From: Christian Van Wambeke Date: Mon, 30 Apr 2018 17:14:10 +0000 (+0200) Subject: fix doxstring :param: :result: X-Git-Url: http://git.salome-platform.org/gitweb/?a=commitdiff_plain;h=02f5ea6f157517e05486a67b71dd73bd57016bfa;p=tools%2Fsat.git fix doxstring :param: :result: --- diff --git a/AllTestLauncherSat.py b/AllTestLauncherSat.py index a0288db..3ec8b8e 100755 --- a/AllTestLauncherSat.py +++ b/AllTestLauncherSat.py @@ -272,10 +272,10 @@ def getParser(): ) parser.add_argument( '-r', '--rootPath', - help='''\ + help="""\ dir name with absolute or relative path stand for root directory of recursive searching unittest python files -''', +""", default=satdir, metavar='dirPath' ) @@ -294,7 +294,8 @@ of recursive searching unittest python files ) parser.add_argument( '-n', '--name', - help="""(only for type xml) + help="""\ +(only for type xml) name of directory output: ['test_reports'|...]. If name = 'stdout' then all-in-one xml output at 'sys.stdout'. For pipe redirection: '>> AllTestLauncher.py -t xml -n stdout > tmp.xml' diff --git a/__init__.py b/__init__.py index e69de29..a76d25d 100644 --- a/__init__.py +++ b/__init__.py @@ -0,0 +1 @@ +__all__ = ['src', 'commands', ] \ No newline at end of file diff --git a/commands/application.py b/commands/application.py index a145061..720b342 100644 --- a/commands/application.py +++ b/commands/application.py @@ -36,15 +36,15 @@ from src.salomeTools import _BaseCommand # Command class ######################################################################## class Command(_BaseCommand): - """\ + """ The application command creates a SALOME application. - WARNING: - It works only for SALOME 6. - Use the 'launcher' command for newer versions of SALOME - - examples: - >> sat application SALOME-6.6.0 + | Warning: + | It works only for SALOME 6. + | Use the 'launcher' command for newer versions of SALOME + | + | Examples: + | >> sat application SALOME-6.6.0 """ name = "application" @@ -349,7 +349,7 @@ def get_step(logger, message, pad=50): returns 'message ........ ' with pad 50 by default avoid colors '' for now in message """ - return "%s %s " % (message, '.' * (pad - len(message.decode("UTF-8"))) + return "%s %s " % (message, '.' * (pad - len(message.decode("UTF-8")))) ## # Creates a SALOME application. @@ -371,7 +371,7 @@ def create_application(config, appli_dir, catalog, logger, display=True): cmd = UTS.label("%s/salome" % appli_dir) if display: - msg = _("To launch the application, type:")) + msg = _("To launch the application, type:") logger.info("\n%s\n %s\n" % (msg, cmd)) return retcode @@ -422,7 +422,7 @@ def generate_launch_file(config, appli_dir, catalog, logger, l_SALOME_modules): # build the application (the name depends upon salome version env_file = os.path.join(config.APPLICATION.workdir, "env_launch." + env_ext) - logger.info(get_step(_("Building application")) + logger.info(get_step(_("Building application"))) cf = create_config_file(config, l_SALOME_modules, env_file, logger) # create the application directory diff --git a/commands/check.py b/commands/check.py index e664bdf..0ec831b 100644 --- a/commands/check.py +++ b/commands/check.py @@ -27,14 +27,14 @@ CHECK_PROPERTY = "has_unit_tests" # Command class ######################################################################## class Command(_BaseCommand): - """\ + """ The check command executes the 'check' command in the build directory of all the products of the application. It is possible to reduce the list of products to check by using the --products option examples: - >> sat check SALOME --products KERNEL,GUI,GEOM + >> sat check SALOME --products KERNEL,GUI,GEOM """ name = "check" @@ -104,17 +104,17 @@ Optional: products to configure. def get_products_list(options, cfg, logger): - '''method that gives the product list with their informations from - configuration regarding the passed options. - - :param options Options: The Options instance that stores the commands - arguments - :param cfg Config: The global configuration - :param logger Logger: The logger instance to use for the display and - logging - :return: The list of (product name, product_informations). - :rtype: List - ''' + """ + method that gives the product list with their informations from + configuration regarding the passed options. + + :param options: (Options) The Options instance that stores + the commands arguments + :param cfg: (Config) The global configuration + :param logger: (Logger) The logger instance to use + for the display and logging + :return: (list) The list of (product name, product_informations). + """ # Get the products to be prepared, regarding the options if options.products is None: # No options, get all products sources @@ -150,16 +150,17 @@ def log_res_step(logger, res): logger.debug("\n") def check_all_products(config, products_infos, logger): - '''Execute the proper configuration commands - in each product build directory. - - :param config Config: The global configuration - :param products_info list: List of - (str, Config) => (product_name, product_info) - :param logger Logger: The logger instance to use for the display and logging - :return: the number of failing commands. - :rtype: int - ''' + """ + Execute the proper configuration commands + in each product build directory. + + :param config: (Config) The global configuration + :param products_info: (list) + List of (str, Config) => (product_name, product_info) + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (int) the number of failing commands. + """ res = 0 for p_name_info in products_infos: res_prod = check_product(p_name_info, config, logger) @@ -168,16 +169,17 @@ def check_all_products(config, products_infos, logger): return res def check_product(p_name_info, config, logger): - '''Execute the proper configuration command(s) - in the product build directory. - - :param p_name_info tuple: (str, Config) => (product_name, product_info) - :param config Config: The global configuration - :param logger Logger: The logger instance to use for the display - and logging - :return: 1 if it fails, else 0. - :rtype: int - ''' + """ + Execute the proper configuration command(s) + in the product build directory. + + :param p_name_info: (tuple) + (str, Config) => (product_name, product_info) + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (int) 1 if it fails, else 0. + """ p_name, p_info = p_name_info @@ -214,7 +216,7 @@ is not defined in the definition of %(name)\n""") % p_name if ignored or not cmd_found: log_step(logger, header, "ignored") - logger.debug("==== %s %s\n" % (p_name, "IGNORED") + logger.debug("==== %s %s\n" % (p_name, "IGNORED")) if not cmd_found: return 1 return 0 diff --git a/commands/clean.py b/commands/clean.py index 4a06a17..13a209b 100644 --- a/commands/clean.py +++ b/commands/clean.py @@ -36,14 +36,14 @@ PROPERTY_EXPRESSION = "^.+:.+$" # Command class ######################################################################## class Command(_BaseCommand): - """\ + """ The clean command suppresses the source, build, or install directories of the application products. Use the options to define what directories you want to suppress and to reduce the list of products examples: - >> sat clean SALOME --build --install --properties is_salome_module:yes + >> sat clean SALOME --build --install --properties is_salome_module:yes """ name = "clean" @@ -149,16 +149,16 @@ The '--properties' options must have the following syntax: def get_source_directories(products_infos, without_dev): - '''Returns the list of directory source paths corresponding to the list of - product information given as input. If without_dev (bool), then - the dev products are ignored. + """ + Returns the list of directory source paths corresponding to the list of + product information given as input. If without_dev (bool), then + the dev products are ignored. - :param products_infos list: The list of (name, config) corresponding to one - product. - :param without_dev boolean: If True, then ignore the dev products. - :return: the list of source paths. - :rtype: list - ''' + :param products_infos: (list) + The list of (name, config) corresponding to one product. + :param without_dev: (boolean) If True, then ignore the dev products. + :return: (list) the list of source paths. + """ l_dir_source = [] for __, product_info in products_infos: if product_has_dir(product_info, without_dev): @@ -166,14 +166,14 @@ def get_source_directories(products_infos, without_dev): return l_dir_source def get_build_directories(products_infos): - '''Returns the list of directory build paths corresponding to the list of - product information given as input. + """ + Returns the list of directory build paths corresponding to the list of + product information given as input. - :param products_infos list: The list of (name, config) corresponding to one - product. - :return: the list of build paths. - :rtype: list - ''' + :param products_infos: (list) + The list of (name, config) corresponding to one product. + :return: (list) the list of build paths. + """ l_dir_build = [] for __, product_info in products_infos: if product_has_dir(product_info): @@ -182,14 +182,14 @@ def get_build_directories(products_infos): return l_dir_build def get_install_directories(products_infos): - '''Returns the list of directory install paths corresponding to the list of - product information given as input. + """ + Returns the list of directory install paths corresponding to the list of + product information given as input. - :param products_infos list: The list of (name, config) corresponding to one - product. - :return: the list of install paths. - :rtype: list - ''' + :param products_infos: (list) + The list of (name, config) corresponding to one product. + :return: (list) the list of install paths. + """ l_dir_install = [] for __, product_info in products_infos: if product_has_dir(product_info): @@ -197,14 +197,16 @@ def get_install_directories(products_infos): return l_dir_install def product_has_dir(product_info, without_dev=False): - '''Returns a boolean at True if there is a source, build and install - directory corresponding to the product described by product_info. + """ + Returns a boolean at True if there is a source, build and install + directory corresponding to the product described by product_info. - :param products_info Config: The config corresponding to the product. - :return: True if there is a source, build and install - directory corresponding to the product described by product_info. - :rtype: boolean - ''' + :param products_info: (Config) + The config corresponding to the product. + :return: (bool) + True if there is a source, build and install + directory corresponding to the product described by product_info. + """ if (src.product.product_is_native(product_info) or src.product.product_is_fixed(product_info)): return False @@ -214,12 +216,12 @@ def product_has_dir(product_info, without_dev=False): return True def suppress_directories(l_paths, logger): - '''Suppress the paths given in the list in l_paths. + """Suppress the paths given in the list in l_paths. - :param l_paths list: The list of Path to be suppressed - :param logger Logger: The logger instance to use for the display and - logging - ''' + :param l_paths: (list) The list of Path to be suppressed + :param logger: (Logger) + The logger instance to use for the display and logging + """ for path in l_paths: strpath = str(path) if not path.isdir(): diff --git a/commands/compile.py b/commands/compile.py index e0f6e0b..74919c8 100644 --- a/commands/compile.py +++ b/commands/compile.py @@ -125,7 +125,7 @@ class Command(_BaseCommand): srcDir = os.path.join(config.APPLICATION.workdir, 'SOURCES') buildDir = os.path.join(config.APPLICATION.workdir, 'BUILD') - msg = _("Application %s, executing compile commands in build directories of products.\n" + msg = _("Application %s, executing compile commands in build directories of products.\n") logger.info(msg % UTS.label(nameApp)) info = [ (_("SOURCE directory"), srcDir), @@ -170,17 +170,17 @@ class Command(_BaseCommand): def get_products_list(options, cfg, logger): - '''method that gives the product list with their informations from - configuration regarding the passed options. + """ + method that gives the product list with their informations from + configuration regarding the passed options. - :param options Options: The Options instance that stores the commands - arguments - :param cfg Config: The global configuration - :param logger Logger: The logger instance to use for the display and - logging - :return: The list of (product name, product_informations). - :rtype: List - ''' + :param options: (Options) + The Options instance that stores the commands arguments + :param cfg: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (list) The list of (product name, product_informations). + """ # Get the products to be prepared, regarding the options if options.products is None: # No options, get all products sources @@ -217,15 +217,15 @@ def get_children(config, p_name_p_info): return l_res def get_recursive_children(config, p_name_p_info, without_native_fixed=False): - """ Get the recursive list of the product that depend on - the product defined by prod_info + """ + Get the recursive list of the product that depend on + the product defined by prod_info - :param config Config: The global configuration - :param prod_info Config: The specific config of the product - :param without_native_fixed boolean: If true, do not include the fixed - or native products in the result - :return: The list of product_informations. - :rtype: List + :param config: (Config) The global configuration + :param prod_info: (Config) The specific config of the product + :param without_native_fixed: (bool) + If true, do not include the fixed or native products in the result + :return: (list) The list of product_informations. """ p_name, __ = p_name_p_info # Initialization of the resulting list @@ -268,15 +268,15 @@ is not present in application %(appli_name)s.""" % return l_children def get_recursive_fathers(config, p_name_p_info, without_native_fixed=False): - """ Get the recursive list of the dependencies of the product defined by - prod_info + """ + Get the recursive list of the dependencies of the product defined + by prod_info - :param config Config: The global configuration - :param prod_info Config: The specific config of the product - :param without_native_fixed boolean: If true, do not include the fixed - or native products in the result - :return: The list of product_informations. - :rtype: List + :param config: (Config) The global configuration + :param prod_info: (Config) The specific config of the product + :param without_native_fixed: (bool) + If true, do not include the fixed or native products in the result + :return: (list) The list of product_informations. """ p_name, p_info = p_name_p_info # Initialization of the resulting list @@ -317,10 +317,11 @@ def get_recursive_fathers(config, p_name_p_info, without_native_fixed=False): return l_fathers def sort_products(config, p_infos): - """ Sort the p_infos regarding the dependencies between the products + """Sort the p_infos regarding the dependencies between the products - :param config Config: The global configuration - :param p_infos list: List of (str, Config) => (product_name, product_info) + :param config: (Config) The global configuration + :param p_infos: (list) + List of (str, Config) => (product_name, product_info) """ l_prod_sorted = src.deepcopy_list(p_infos) for prod in p_infos: @@ -383,16 +384,16 @@ def log_res_step(logger, res): def compile_all_products(sat, config, options, products_infos, logger): - """\ + """ Execute the proper configuration commands in each product build directory. - :param config Config: The global configuration - :param products_info list: List of - (str, Config) => (product_name, product_info) - :param logger Logger: The logger instance to use for the display and logging - :return: the number of failing commands. - :rtype: int + :param config: (Config) The global configuration + :param products_info: (list) + List of (str, Config) => (product_name, product_info) + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (int) the number of failing commands. """ res = 0 for p_name_info in products_infos: @@ -501,12 +502,12 @@ def compile_all_products(sat, config, options, products_infos, logger): # Log the result if res_prod > 0: logger.info("\r%s%s" % (header, " " * len_end_line)) - logger.info("\r" + header + " ") + error_step) - logger.debug("\n==== in compile of %s\n" % p_name + logger.info("\r" + header + " " + error_step) + logger.debug("\n==== in compile of %s\n" % p_name) if error_step == "CHECK": logger.info(_("\nINSTALL directory = %s") % p_info.install_dir) else: - logger.info("\r%s%s" % (header, " " * len_end_line) + logger.info("\r%s%s" % (header, " " * len_end_line)) logger.info("\r" + header + "") logger.info(_("\nINSTALL directory = %s") % p_info.install_dir) logger.debug("\n==== in compile of %s\n" % p_name) @@ -519,18 +520,18 @@ def compile_all_products(sat, config, options, products_infos, logger): return res def compile_product(sat, p_name_info, config, options, logger, header, len_end): - '''Execute the proper configuration command(s) - in the product build directory. + """ + Execute the proper configuration command(s) + in the product build directory. - :param p_name_info tuple: (str, Config) => (product_name, product_info) - :param config Config: The global configuration - :param logger Logger: The logger instance to use for the display - and logging - :param header Str: the header to display when logging - :param len_end Int: the lenght of the the end of line (used in display) - :return: 1 if it fails, else 0. - :rtype: int - ''' + :param p_name_info: (tuple) (str, Config) => (product_name, product_info) + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :param header: (str) the header to display when logging + :param len_end: (int) the lenght of the the end of line (used in display) + :return: (int) 1 if it fails, else 0. + """ p_name, p_info = p_name_info @@ -593,18 +594,19 @@ def compile_product_cmake_autotools(sat, logger, header, len_end): - '''Execute the proper build procedure for autotools or cmake - in the product build directory. + """ + Execute the proper build procedure for autotools or cmake + in the product build directory. - :param p_name_info tuple: (str, Config) => (product_name, product_info) - :param config Config: The global configuration - :param logger Logger: The logger instance to use for the display - and logging - :param header Str: the header to display when logging - :param len_end Int: the lenght of the the end of line (used in display) - :return: 1 if it fails, else 0. - :rtype: int - ''' + :param p_name_info: (tuple) + (str, Config) => (product_name, product_info) + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :param header: (str) the header to display when logging + :param len_end: (int) the length of the the end of line (used in display) + :return: (int) 1 if it fails, else 0. + """ p_name, p_info = p_name_info # Execute "sat configure", "sat make" and "sat install" @@ -671,17 +673,17 @@ def compile_product_script(sat, logger, header, len_end): - '''Execute the script build procedure in the product build directory. + """Execute the script build procedure in the product build directory. - :param p_name_info tuple: (str, Config) => (product_name, product_info) - :param config Config: The global configuration - :param logger Logger: The logger instance to use for the display - and logging - :param header Str: the header to display when logging - :param len_end Int: the lenght of the the end of line (used in display) - :return: 1 if it fails, else 0. - :rtype: int - ''' + :param p_name_info: (tuple) + (str, Config) => (product_name, product_info) + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :param header: (str) the header to display when logging + :param len_end: (int) the lenght of the the end of line (used in display) + :return: (int) 1 if it fails, else 0. + """ p_name, p_info = p_name_info # Execute "sat configure", "sat make" and "sat install" @@ -699,12 +701,13 @@ def compile_product_script(sat, return res, len_end_line, error_step def add_compile_config_file(p_info, config): - '''Execute the proper configuration command(s) - in the product build directory. + """ + Execute the proper configuration command(s) + in the product build directory. - :param p_info Config: The specific config of the product - :param config Config: The global configuration - ''' + :param p_info: (Config) The specific config of the product + :param config: (Config) The global configuration + """ # Create the compile config compile_cfg = PYCONF.Config() for prod_name in p_info.depend: diff --git a/commands/config.py b/commands/config.py index fa0b58a..b66203d 100644 --- a/commands/config.py +++ b/commands/config.py @@ -30,17 +30,17 @@ import src.system as SYSS # Command class ######################################################################## class Command(_BaseCommand): - """\ + """ The config command allows manipulation and operation on config '.pyconf' files. - examples: - >> sat config --list - >> sat config SALOME --edit - >> sat config SALOME --copy SALOME-new - >> sat config SALOME --value VARS - >> sat config SALOME --debug VARS - >> sat config SALOME --info ParaView - >> sat config SALOME --show_patchs + | examples: + | >> sat config --list + | >> sat config SALOME --edit + | >> sat config SALOME --copy SALOME-new + | >> sat config SALOME --value VARS + | >> sat config SALOME --debug VARS + | >> sat config SALOME --info ParaView + | >> sat config SALOME --show_patchs """ name = "config" diff --git a/commands/configure.py b/commands/configure.py index 5987571..0e87ddf 100644 --- a/commands/configure.py +++ b/commands/configure.py @@ -26,18 +26,18 @@ from src.salomeTools import _BaseCommand # Command class ######################################################################## class Command(_BaseCommand): - """\ + """ The configure command executes in the build directory commands corresponding to the compilation mode of the application products. The possible compilation modes are 'cmake', 'autotools', or 'script'. Here are the commands to be run: - autotools: >> build_configure and configure - cmake: >> cmake - script: (do nothing) + autotools: >> build_configure and configure + cmake: >> cmake + script: (do nothing) examples: - >> sat configure SALOME --products KERNEL,GUI,PARAVIS + >> sat configure SALOME --products KERNEL,GUI,PARAVIS """ name = "configure" @@ -110,16 +110,17 @@ class Command(_BaseCommand): def get_products_list(options, cfg, logger): - '''method that gives the product list with their informations from - configuration regarding the passed options. - - :param options Options: The Options instance that stores the commands - arguments - :param cfg Config: The global configuration - :param logger Logger: The logger instance to use for the display and logging - :return: The list of (product name, product_informations). - :rtype: List - ''' + """ + method that gives the product list with their informations from + configuration regarding the passed options. + + :param options: (Options) + The Options instance that stores the commands arguments + :param cfg: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (list) The list of (product name, product_informations). + """ # Get the products to be prepared, regarding the options if options.products is None: # No options, get all products sources @@ -155,17 +156,17 @@ def log_res_step(logger, res): logger.debug("") def configure_all_products(config, products_infos, conf_option, logger): - '''Execute the proper configuration commands - in each product build directory. - - :param config Config: The global configuration - :param products_info list: List of - (str, Config) => (product_name, product_info) - :param conf_option str: The options to add to the command - :param logger Logger: The logger instance to use for the display and logging - :return: the number of failing commands. - :rtype: int - ''' + """ + Execute the proper configuration commands + in each product build directory. + + :param config: (Config) The global configuration + :param products_info: (list) + List of (str, Config) => (product_name, product_info) + :param conf_option: (str) The options to add to the command + :param logger: (Logger) The logger instance to use for the display and logging + :return: (int) the number of failing commands. + """ res = 0 for p_name_info in products_infos: res_prod = configure_product(p_name_info, conf_option, config, logger) @@ -174,17 +175,18 @@ def configure_all_products(config, products_infos, conf_option, logger): return res def configure_product(p_name_info, conf_option, config, logger): - '''Execute the proper configuration command(s) - in the product build directory. - - :param p_name_info tuple: (str, Config) => (product_name, product_info) - :param conf_option str: The options to add to the command - :param config Config: The global configuration - :param logger Logger: The logger instance to use for the display - and logging - :return: 1 if it fails, else 0. - :rtype: int - ''' + """ + Execute the proper configuration command(s) + in the product build directory. + + :param p_name_info: (tuple) + (str, Config) => (product_name, product_info) + :param conf_option: (str) The options to add to the command + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (int) 1 if it fails, else 0. + """ p_name, p_info = p_name_info diff --git a/commands/environ.py b/commands/environ.py index 0a1de0a..8e8467c 100644 --- a/commands/environ.py +++ b/commands/environ.py @@ -32,11 +32,11 @@ C_ALL_SHELL = [ "bash", "bat", "cfg" ] # Command class ######################################################################## class Command(_BaseCommand): - """\ + """ The environ command generates the environment files of your application. examples: - >> sat environ SALOME + >> sat environ SALOME """ name = "environ" @@ -113,20 +113,22 @@ def write_all_source_files(config, shells=["bash"], prefix="env", env_info=None): - '''Generates the environment files. + """Generates the environment files. - :param config Config: The global configuration - :param logger Logger: The logger instance to use for the display - and logging - :param out_dir str: The path to the directory where the files will be put - :param src_root str: The path to the directory where the sources are - :param silent boolean: If True, do not print anything in the terminal - :param shells list: The list of shells to generate - :param prefix str: The prefix to add to the file names. - :param env_info str: The list of products to add in the files. - :return: The list of the generated files. - :rtype: List - ''' + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :param out_dir: (str) + The path to the directory where the files will be put + :param src_root: (str) + The path to the directory where the sources are + :param silent: (bool) + If True, do not print anything in the terminal + :param shells: (list) The list of shells to generate + :param prefix: (str) The prefix to add to the file names. + :param env_info: (str) The list of products to add in the files. + :return: (list) The list of the generated files. + """ if not out_dir: out_dir = config.APPLICATION.workdir diff --git a/commands/find_duplicates.py b/commands/find_duplicates.py index 494212e..58eed40 100644 --- a/commands/find_duplicates.py +++ b/commands/find_duplicates.py @@ -195,7 +195,7 @@ class Command(_BaseCommand): l_path.append(fic_path[1]) dic_fic_paths[the_file] = l_path - logger.info("\n') + logger.info("\n") # End the execution if no duplicates were found if len(dic_fic_paths) == 0: @@ -224,14 +224,15 @@ class Command(_BaseCommand): def list_directory(lpath, extension_ignored, files_ignored, directories_ignored): """Make the list of all files and paths that are not filtered - :param lpath List: The list of path to of the directories where to - search for duplicates - :param extension_ignored List: The list of extensions to ignore - :param files_ignored List: The list of files to ignore - :param directories_ignored List: The list of directory paths to ignore - :return: files_arb_out is the list of [file, path] - and files_out is is the list of files - :rtype: List, List + :param lpath: (list) + The list of path to of the directories where to search for duplicates + :param extension_ignored: (list) The list of extensions to ignore + :param files_ignored: (list) The list of files to ignore + :param directories_ignored: (list) + The list of directory paths to ignore + :return: (list, list) + files_arb_out is the list of [file, path] + and files_out is is the list of files """ files_out = [] files_arb_out=[] @@ -251,28 +252,28 @@ def list_directory(lpath, extension_ignored, files_ignored, directories_ignored) return files_arb_out, files_out def format_list_of_str(l_str): - '''Make a list from a string + """Make a list from a string - :param l_str List or Str: The variable to format - :return: the formatted variable - :rtype: List - ''' + :param l_str: (list or str) The variable to format + :return: (list) the formatted variable + """ if not isinstance(l_str, list): return l_str return ",".join(l_str) class Progress_bar: - "Create a progress bar in the terminal" - + """ + Create a progress bar in the terminal + """ def __init__(self, name, valMin, valMax, logger, length = 50): - '''Initialization of the progress bar. + """Initialization of the progress bar. - :param name str: The name of the progress bar - :param valMin float: the minimum value of the variable - :param valMax float: the maximum value of the variable - :param logger Logger: the logger instance - :param length int: the lenght of the progress bar - ''' + :param name: (str) The name of the progress bar + :param valMin: (float) the minimum value of the variable + :param valMax: (float) the maximum value of the variable + :param logger: (Logger) the logger instance + :param length: (int) the lenght of the progress bar + """ self.name = name self.valMin = valMin self.valMax = valMax @@ -283,12 +284,12 @@ class Progress_bar: raise Exception(out_err) def display_value_progression(self,val): - '''Display the progress bar. + """Display the progress bar. - :param val float: val must be between valMin and valMax. - ''' + :param val: (float) val must be between valMin and valMax. + """ if val < self.valMin or val > self.valMax: - self.logger.error(_("Wrong value for the progress bar.\n'))) + self.logger.error(_("Wrong value for the progress bar.\n")) else: perc = (float(val-self.valMin) / (self.valMax - self.valMin)) * 100. nb_equals = int(perc * self.length / 100) diff --git a/commands/generate.py b/commands/generate.py index ced2b8d..86404db 100644 --- a/commands/generate.py +++ b/commands/generate.py @@ -329,9 +329,9 @@ def build_context(config, logger): def check_module_generator(directory=None): """Check if module_generator is available. - :param directory str: The directory of YACSGEN. - :return: The YACSGEN path if the module_generator is available, else None - :rtype: str + :param directory: (str) The directory of YACSGEN. + :return: (str) + The YACSGEN path if the module_generator is available, else None """ undo = False if directory is not None and directory not in sys.path: @@ -353,10 +353,11 @@ def check_module_generator(directory=None): def check_yacsgen(config, directory, logger): """Check if YACSGEN is available. - :param config Config: The global configuration. - :param directory str: The directory given by option --yacsgen - :param logger Logger: The logger instance - :return: RCO.ReturnCode with value The path to yacsgen directory if ok + :param config: (Config) The global configuration. + :param directory: (str) The directory given by option --yacsgen + :param logger: (Logger) The logger instance + :return: (RCO.ReturnCode) + with value The path to yacsgen directory if ok """ # first check for YACSGEN (command option, then product, then environment) yacsgen_dir = None @@ -375,7 +376,7 @@ def check_yacsgen(config, directory, logger): if yacsgen_dir is None: RCO.ReturnCode("KO", _("The generate command requires YACSGEN.")) - logger.info(" %s in %s" % (yacs_src, yacsgen_dir) + logger.info(" %s in %s" % (yacs_src, yacsgen_dir)) if not os.path.exists(yacsgen_dir): msg = _("YACSGEN directory not found: '%s'") % yacsgen_dir diff --git a/commands/init.py b/commands/init.py index 3a04ccc..15154ad 100644 --- a/commands/init.py +++ b/commands/init.py @@ -105,14 +105,13 @@ class Command(_BaseCommand): def set_local_value(config, key, value, logger): - """ Edit the site.pyconf file and change a value. + """Edit the site.pyconf file and change a value. - :param config Config: The global configuration. - :param key Str: The key from which to change the value. - :param value Str: The path to change. - :param logger Logger: The logger instance. - :return: 0 if all is OK, else 1 - :rtype: int + :param config: (Config) The global configuration. + :param key: (str) The key from which to change the value. + :param value: (str) The path to change. + :param logger: (Logger) The logger instance. + :return: (int) 0 if all is OK, else 1 """ local_file_path = os.path.join(config.VARS.datadir, "local.pyconf") # Update the local.pyconf file @@ -133,11 +132,11 @@ def set_local_value(config, key, value, logger): return RCO.ReturnCode("OK") def display_local_values(config, logger): - """ Display the base path + """Display the base path - :param config Config: The global configuration. - :param key Str: The key from which to change the value. - :param logger Logger: The logger instance. + :param config: (Config) The global configuration. + :param key: (str) The key from which to change the value. + :param logger: (Logger) The logger instance. """ info = [("base", config.LOCAL.base), ("workdir", config.LOCAL.workdir), @@ -150,10 +149,10 @@ def display_local_values(config, logger): return 0 def check_path(path_to_check, logger): - """ Verify that the given path is not a file and can be created. + """Verify that the given path is not a file and can be created. - :param path_to_check Str: The path to check. - :param logger Logger: The logger instance. + :param path_to_check: (str) The path to check. + :param logger: (Logger) The logger instance. """ if path_to_check == "default": return 0 @@ -176,7 +175,7 @@ Please provide a path to a directory\n""") % UTS.blue(path_to_check) msg = _("""\ Unable to create the directory %s: -%s\n""") % (UTS.blue(str(path)), UTS.yellow(e) +%s\n""") % (UTS.blue(str(path)), UTS.yellow(e)) logger.error(msg) return 1 diff --git a/commands/job.py b/commands/job.py index a767eba..f5817a3 100644 --- a/commands/job.py +++ b/commands/job.py @@ -31,7 +31,7 @@ class Command(_BaseCommand): in the jobs configuration file\ examples: - >> sat job --jobs_config my_jobs --name my_job" + >> sat job --jobs_config my_jobs --name my_job" """ name = "job" diff --git a/commands/jobs.py b/commands/jobs.py index 4f88f42..ce0fc93 100644 --- a/commands/jobs.py +++ b/commands/jobs.py @@ -64,7 +64,7 @@ def getParamiko(logger=None): # Command class ######################################################################## class Command(_BaseCommand): - """\ + """ The jobs command command launches maintenances that are described in the dedicated jobs configuration file. @@ -292,7 +292,7 @@ Use the --list option to get the possible files.\n""") % config_file class Machine(object): - """\ + """ Manage a ssh connection on a machine """ def __init__(self, @@ -314,11 +314,11 @@ class Machine(object): self._connection_successful = None def connect(self, logger): - '''Initiate the ssh connection to the remote machine + """Initiate the ssh connection to the remote machine :param logger: The logger instance - :return: Nothing - ''' + :return: None + """ self._connection_successful = False self.ssh.load_system_host_keys() @@ -344,12 +344,11 @@ class Machine(object): return message def successfully_connected(self, logger): - """\ + """ Verify if the connection to the remote machine has succeed :param logger: The logger instance - :return: True if the connection has succeed, False if not - :rtype: bool + :return: (bool) True if the connection has succeed, False if not """ if self._connection_successful == None: message = _("""\ @@ -381,10 +380,11 @@ whereas there were no connection request""" % \ return res def put_dir(self, source, target, filters = []): - '''Uploads the contents of the source directory to the target path. + """ + Uploads the contents of the source directory to the target path. The target directory needs to exists. All sub-directories in source are created under target. - ''' + """ for item in os.listdir(source): if item in filters: continue @@ -408,8 +408,9 @@ whereas there were no connection request""" % \ self.put_dir(source_path, destination_path) def mkdir(self, path, mode=511, ignore_existing=False): - '''As mkdir by adding an option to not fail if the folder exists - ''' + """ + As mkdir by adding an option to not fail if the folder exists + """ try: self.sftp.mkdir(path, mode) except IOError: @@ -419,15 +420,14 @@ whereas there were no connection request""" % \ raise def exec_command(self, command, logger): - '''Execute the command on the remote machine + """Execute the command on the remote machine - :param command str: The command to be run + :param command: (str) The command to be run :param logger: The logger instance - :return: the stdin, stdout, and stderr of the executing command, - as a 3-tuple - :rtype: (paramiko.channel.ChannelFile, paramiko.channel.ChannelFile, - paramiko.channel.ChannelFile) - ''' + :return: (paramiko.channel.ChannelFile, etc) + the stdin, stdout, and stderr of the executing command, + as a 3-tuple + """ import traceback try: # Does not wait the end of the command @@ -447,20 +447,16 @@ whereas there were no connection request""" % \ return (stdin, stdout, stderr) def close(self): - '''Close the ssh connection - - :rtype: N\A - ''' + """Close the ssh connection""" self.ssh.close() def write_info(self, logger): - """\ + """ Prints the informations relative to the machine in the logger (terminal traces and log file) :param logger: The logger instance - :return: Nothing - :rtype: N\A + :return: None """ if self.successfully_connected(logger): msg = "" @@ -469,11 +465,11 @@ whereas there were no connection request""" % \ msg += "host: %s, " % self.host msg += "port: %s, " % str(self.port) msg += "user: %s" % str(self.user) - logger.info("Connection %s" % msg ) + logger.info("Connection %s" % msg ) class Job(object): - """\ + """ Class to manage one job """ def __init__(self, @@ -532,11 +528,11 @@ class Job(object): self.command = prefix + ' "' + self.command +'"' def get_pids(self): - """ Get the pid(s) corresponding to the command that have been launched - On the remote machine + """ + Get the pid(s) corresponding to the command that have been launched + On the remote machine - :return: The list of integers corresponding to the found pids - :rtype: List + :return: (list) The list of integers corresponding to the found pids """ pids = [] cmd_pid = 'ps aux | grep "' + self.command + '" | awk \'{print $2}\'' @@ -547,11 +543,10 @@ class Job(object): return pids def kill_remote_process(self, wait=1): - '''Kills the process on the remote machine. + """Kills the process on the remote machine. - :return: (the output of the kill, the error of the kill) - :rtype: (str, str) - ''' + :return: (str, str) the output of the kill, the error of the kill + """ try: pids = self.get_pids() except: @@ -564,21 +559,20 @@ class Job(object): return (out_kill.read().decode(), err_kill.read().decode()) def has_begun(self): - '''Returns True if the job has already begun + """Returns True if the job has already begun - :return: True if the job has already begun - :rtype: bool - ''' + :return: (bool) True if the job has already begun + """ return self._has_begun def has_finished(self): - '''Returns True if the job has already finished - (i.e. all the commands have been executed) - If it is finished, the outputs are stored in the fields out and err. + """ + Returns True if the job has already finished + (i.e. all the commands have been executed) + If it is finished, the outputs are stored in the fields out and err. - :return: True if the job has already finished - :rtype: bool - ''' + :return: (bool) True if the job has already finished + """ # If the method has already been called and returned True if self._has_finished: @@ -604,7 +598,7 @@ class Job(object): return self._has_finished def get_log_files(self): - """\ + """ Get the log files produced by the command launched on the remote machine, and put it in the log directory of the user, so they can be accessible from @@ -677,14 +671,14 @@ class Job(object): {"1": str(job_path_remote), "2": str(e)} def has_failed(self): - '''Returns True if the job has failed. - A job is considered as failed if the machine could not be reached, - if the remote command failed, - or if the job finished with a time out. - - :return: True if the job has failed - :rtype: bool - ''' + """ + Returns True if the job has failed. + A job is considered as failed if the machine could not be reached, + if the remote command failed, + or if the job finished with a time out. + + :return: (bool) True if the job has failed + """ if not self.has_finished(): return False if not self.machine.successfully_connected(self.logger): @@ -696,8 +690,9 @@ class Job(object): return False def cancel(self): - """In case of a failing job, one has to cancel every job that depend - on it. This method put the job as failed and will not be executed. + """ + In case of a failing job, one has to cancel every job that depend on it. + This method put the job as failed and will not be executed. """ if self.cancelled: return @@ -709,19 +704,17 @@ class Job(object): self.err += msg def is_running(self): - '''Returns True if the job commands are running + """Returns True if the job commands are running - :return: True if the job is running - :rtype: bool - ''' + :return: (bool) True if the job is running + """ return self.has_begun() and not self.has_finished() def is_timeout(self): - '''Returns True if the job commands has finished with timeout + """Returns True if the job commands has finished with timeout - :return: True if the job has finished with timeout - :rtype: bool - ''' + :return: (bool) True if the job has finished with timeout + """ return self._has_timouted def time_elapsed(self): @@ -736,8 +729,9 @@ class Job(object): return T_now - self._T0 def check_time(self): - """Verify that the job has not exceeded its timeout. - If it has, kill the remote command and consider the job as finished. + """ + Verify that the job has not exceeded its timeout. + If it has, kill the remote command and consider the job as finished. """ if not self.has_begun(): return @@ -754,16 +748,15 @@ class Job(object): self.err += _("Unable to get remote log files!\n%s\n" % str(e)) def total_duration(self): - """\ - Give the total duration of the job + """ + Gives the total duration of the job - :return: the total duration of the job in seconds - :rtype: int + :return: (int) the total duration of the job in seconds """ return self._Tf - self._T0 def run(self): - """\ + """ Launch the job by executing the remote command. """ @@ -802,7 +795,7 @@ class Job(object): self._has_begun = True def write_results(self): - """\ + """ Display on the terminal all the job's information """ msg = "name : %s\n" % self.name @@ -810,10 +803,10 @@ class Job(object): msg += "after : %s\n" % self.after msg += "Time elapsed : %4imin %2is \n" % (self.total_duration()//60 , self.total_duration()%60) if self._T0 != -1: - msg += "Begin time : %s\n" % + msg += "Begin time : %s\n" % \ time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self._T0)) if self._Tf != -1: - msg += "End time : %s\n\n" % + msg += "End time : %s\n\n" % \ time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self._Tf)) self.logger.info(msg) @@ -828,15 +821,13 @@ class Job(object): msg += "Unable to get output\n" else: msg += self.out + "\n" - msg += "err :\n%s\n" % .err + msg += "err :\n%s\n" % self.err self.logger.info(msg) def get_status(self): - """\ - Get the status of the job (used by the Gui for xml display) + """Get the status of the job (used by the Gui for xml display) - :return: The current status of the job - :rtype: String + :return: (str) The current status of the job """ if not self.machine.successfully_connected(self.logger): return "SSH connection KO" @@ -852,7 +843,7 @@ class Job(object): return "Finished since " + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self._Tf)) class Jobs(object): - """\ + """ Class to manage the jobs to be run """ def __init__(self, @@ -888,14 +879,13 @@ class Jobs(object): self.determine_jobs_and_machines() def define_job(self, job_def, machine): - """\ + """ Takes a pyconf job definition and a machine (from class machine) and returns the job instance corresponding to the definition. - :param job_def src.config.Mapping: a job definition - :param machine machine: the machine on which the job will run - :return: The corresponding job in a job class instance - :rtype: job + :param job_def: (Mapping a job definition + :param machine: (Machine) the machine on which the job will run + :return: (Job) The corresponding job in a job class instance """ name = job_def.name cmmnds = job_def.commands @@ -929,7 +919,7 @@ class Jobs(object): prefix = prefix) def determine_jobs_and_machines(self): - """\ + """ Reads the pyconf jobs definition and instantiates all the machines and jobs to be done today. @@ -1015,11 +1005,9 @@ The job will not be launched. self.lhosts = host_list def ssh_connection_all_machines(self, pad=50): - """\ - Do the ssh connection to every machine to be used today. + """Do the ssh connection to every machine to be used today. - :return: Nothing - :rtype: N\A + :return: None """ self.logger.info( "Establishing connection with all the machines :\n") for machine in self.lmachines: @@ -1089,14 +1077,15 @@ The job will not be launched. def is_occupied(self, hostname): - '''Function that returns True if a job is running on - the machine defined by its host and its port. - - :param hostname (str, int): the pair (host, port) - :return: the job that is running on the host, - or false if there is no job running on the host. - :rtype: job / bool - ''' + """ + Returns True if a job is running on + the machine defined by its host and its port. + + :param hostname: (str, int) the pair (host, port) + :return: (Job or bool) + the job that is running on the host, + or false if there is no job running on the host. + """ host = hostname[0] port = hostname[1] for jb in self.ljobs: @@ -1106,12 +1095,12 @@ The job will not be launched. return False def update_jobs_states_list(self): - '''Function that updates the lists that store the currently - running jobs and the jobs that have already finished. + """ + Updates the lists that store the currently + running jobs and the jobs that have already finished. - :return: Nothing. - :rtype: N\A - ''' + :return: None + """ jobs_finished_list = [] jobs_running_list = [] for jb in self.ljobs: @@ -1130,11 +1119,10 @@ The job will not be launched. return nb_job_finished_now > nb_job_finished_before def cancel_dependencies_of_failing_jobs(self): - '''Function that cancels all the jobs that depend on a failing one. + """Cancels all the jobs that depend on a failing one. - :return: Nothing. - :rtype: N\A - ''' + :return: None + """ for job in self.ljobs: if job.after is None: @@ -1144,12 +1132,11 @@ The job will not be launched. job.cancel() def find_job_that_has_name(self, name): - '''Returns the job by its name. + """Returns the job by its name. - :param name str: a job name - :return: the job that has the name. - :rtype: job - ''' + :param name: (str) a job name + :return: (Job) the job that has the name. + """ for jb in self.ljobs: if jb.name == name: return jb @@ -1157,14 +1144,14 @@ The job will not be launched. return None def str_of_length(self, text, length): - '''Takes a string text of any length and returns - the most close string of length "length". - - :param text str: any string - :param length int: a length for the returned string - :return: the most close string of length "length" - :rtype: str - ''' + """ + Takes a string text of any length and returns + the most close string of length "length". + + :param text: (str) any string + :param length: (int) a length for the returned string + :return: (str) the most close string of length "length" + """ if len(text) > length: text_out = text[:length-3] + '...' else: @@ -1176,14 +1163,13 @@ The job will not be launched. return text_out def display_status(self, len_col): - """\ + """ Takes a lenght and construct the display of the current status of the jobs in an array that has a column for each host. It displays the job that is currently running on the host of the column. - :param len_col int: the size of the column - :return: Nothing - :rtype: N\A + :param len_col: (int) the size of the column + :return: None """ display_line = "" for host_port in self.lhosts: @@ -1199,15 +1185,14 @@ The job will not be launched. def run_jobs(self): - """\ + """ The main method. Runs all the jobs on every host. For each host, at a given time, only one job can be running. The jobs that have the field after (that contain the job that has to be run before it) are run after the previous job. This method stops when all the jobs are finished. - :return: Nothing - :rtype: N\A + :return: None """ # Print header self.logger.info(_('Executing the jobs :\n')) @@ -1274,11 +1259,9 @@ The job will not be launched. self.gui.last_update() def write_all_results(self): - """\ - Display all the jobs outputs. + """Display all the jobs outputs. - :return: Nothing - :rtype: N\A + :return: None """ for jb in self.ljobs: self.logger.info("#------- Results for job %s -------#\n" % jb.name) @@ -1286,7 +1269,7 @@ The job will not be launched. self.logger.info("\n\n") class Gui(object): - """\ + """ Class to manage the the xml data that can be displayed in a browser to see the jobs states """ @@ -1297,13 +1280,15 @@ class Gui(object): prefix, logger, file_boards=""): - """\ - Initialization - - :param xml_dir_path str: The path to the directory where to put the xml resulting files - :param l_jobs List: the list of jobs that run today - :param l_jobs_not_today List: the list of jobs that do not run today - :param file_boards str: the file path from which to read the expected boards + """Initialization + + :param xml_dir_path: (str) + The path to the directory where to put the xml resulting files + :param l_jobs: (list) the list of jobs that run today + :param l_jobs_not_today: (list) + the list of jobs that do not run today + :param file_boards: (str) + the file path from which to read the expected boards """ # The logging instance self.logger = logger @@ -1343,9 +1328,11 @@ class Gui(object): self.update_xml_files(l_jobs) def add_xml_board(self, name): - '''Add a board to the board list - :param name str: the board name - ''' + """ + Add a board to the board list + + :param name: (str) the board name + """ xml_board_path = os.path.join(self.xml_dir_path, name + ".xml") self.d_xml_board_files[name] = XMLMGR.XmlLogFile(xml_board_path,"JobsReport") self.d_xml_board_files[name].add_simple_node("distributions") @@ -1353,12 +1340,12 @@ class Gui(object): self.d_xml_board_files[name].add_simple_node("board", text=name) def initialize_boards(self, l_jobs, l_jobs_not_today): - """\ + """ Get all the first information needed for each file and write the first version of the files - :param l_jobs List: the list of jobs that run today - :param l_jobs_not_today List: the list of jobs that do not run today + :param l_jobs: (list) the list of jobs that run today + :param l_jobs_not_today: (list) the list of jobs that do not run today """ # Get the boards to fill and put it in a dictionary # {board_name : xml instance corresponding to the board} @@ -1489,12 +1476,15 @@ class Gui(object): attrib={"distribution" : row, "application" : column } ) def find_history(self, l_jobs, l_jobs_not_today): - """find, for each job, in the existent xml boards the results for the - job. Store the results in the dictionnary self.history = {name_job : - list of (date, status, list links)} - - :param l_jobs List: the list of jobs to run today - :param l_jobs_not_today List: the list of jobs that do not run today + """ + find, for each job, in the existent xml boards the results for the job. + Store the results in the dictionary + self.history = {name_job : list of (date, status, list links)} + + :param l_jobs: (list) + the list of jobs to run today + :param l_jobs_not_today: (list) + the list of jobs that do not run today """ # load the all the history expression = "^[0-9]{8}_+[0-9]{6}_" + self.global_name + ".xml$" @@ -1509,7 +1499,7 @@ class Gui(object): l_globalxml.append(global_xml) except Exception as e: msg = _("The file '%s' can not be read, it will be ignored\n%s") % \ - (file_path, e}) + (file_path, e) self.logger.warning("%s\n" % msg) # Construct the dictionnary self.history @@ -1530,12 +1520,14 @@ class Gui(object): self.history[job.name] = l_links def put_jobs_not_today(self, l_jobs_not_today, xml_node_jobs): - """\ + """ Get all the first information needed for each file and write the first version of the files - :param xml_node_jobs etree.Element: the node corresponding to a job - :param l_jobs_not_today List: the list of jobs that do not run today + :param xml_node_jobs: (etree.Element) + the node corresponding to a job + :param l_jobs_not_today: (list) + the list of jobs that do not run today """ ASNODE = XMLMGR.add_simple_node # shortcut @@ -1563,10 +1555,11 @@ class Gui(object): attrib={"date" : date, "res" : res_job, "last" : "no"} ) def parse_csv_boards(self, today): - """ Parse the csv file that describes the boards to produce and fill - the dict d_input_boards that contain the csv file contain + """ + Parse the csv file that describes the boards to produce and fill + the dict d_input_boards that contain the csv file contain - :param today int: the current day of the week + :param today: (int) the current day of the week """ # open the csv file and read its content l_read = [] @@ -1614,10 +1607,10 @@ class Gui(object): self.d_input_boards = d_boards def update_xml_files(self, l_jobs): - '''Write all the xml files with updated information about the jobs + """Write all the xml files with updated information about the jobs - :param l_jobs List: the list of jobs that run today - ''' + :param l_jobs: (list) the list of jobs that run today + """ for xml_file in [self.xml_global_file] + list( self.d_xml_board_files.values()): self.update_xml_file(l_jobs, xml_file) @@ -1626,11 +1619,12 @@ class Gui(object): self.write_xml_files() def update_xml_file(self, l_jobs, xml_file): - '''update information about the jobs for the file xml_file + """update information about the jobs for the file xml_file - :param l_jobs List: the list of jobs that run today - :param xml_file xmlManager.XmlLogFile: the xml instance to update - ''' + :param l_jobs: (list) the list of jobs that run today + :param xml_file: (xmlManager.XmlLogFile) + the xml instance to update + """ xml_node_jobs = xml_file.xmlroot.find('jobs') # Update the job names and status node @@ -1721,14 +1715,15 @@ class Gui(object): def find_test_log(self, l_remote_log_files): - '''Find if there is a test log (board) in the remote log files and - the path to it. There can be several test command, so the result is - a list. + """ + Find if there is a test log (board) in the remote log files and + the path to it. There can be several test command, + so the result is a list. - :param l_remote_log_files List: the list of all remote log files - :return: the list of (test log files path, res of the command) - :rtype: List - ''' + :param l_remote_log_files: (list) the list of all remote log files + :return: (list) + the list of tuples (test log files path, res of the command) + """ res = [] for file_path in l_remote_log_files: dirname = os.path.basename(os.path.dirname(file_path)) @@ -1749,11 +1744,11 @@ class Gui(object): return res def last_update(self, finish_status = "finished"): - '''update information about the jobs for the file xml_file + """update information about the jobs for the file xml_file - :param l_jobs List: the list of jobs that run today - :param xml_file xmlManager.XmlLogFile: the xml instance to update - ''' + :param l_jobs: (list) the list of jobs that run today + :param xml_file: (xmlManager.XmlLogFile) the xml instance to update + """ for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()): xml_node_infos = xml_file.xmlroot.find('infos') XMLMGR.append_node_attrib(xml_node_infos, @@ -1762,8 +1757,9 @@ class Gui(object): self.write_xml_files() def write_xml_file(self, xml_file, stylesheet): - ''' Write one xml file and the same file with prefix - ''' + """ + Write one xml file and the same file with prefix + """ xml_file.write_tree(stylesheet) file_path = xml_file.logFile file_dir = os.path.dirname(file_path) @@ -1773,8 +1769,9 @@ class Gui(object): file_name_with_prefix)) def write_xml_files(self): - ''' Write the xml files - ''' + """ + Write the xml files + """ self.write_xml_file(self.xml_global_file, STYLESHEET_GLOBAL) for xml_file in self.d_xml_board_files.values(): self.write_xml_file(xml_file, STYLESHEET_BOARD) @@ -1799,10 +1796,11 @@ def get_config_file_path(job_config_name, l_cfg_dir): return found, file_jobs_cfg def develop_factorized_jobs(config_jobs): - '''update information about the jobs for the file xml_file + """update information about the jobs for the file xml_file - :param config_jobs Config: the config corresponding to the jos description - ''' + :param config_jobs: (Config) + the config corresponding to the jos description + """ developed_jobs_list = [] for jb in config_jobs.jobs: # case where the jobs are not developed diff --git a/commands/launcher.py b/commands/launcher.py index 898207e..d9d0eda 100644 --- a/commands/launcher.py +++ b/commands/launcher.py @@ -31,11 +31,11 @@ from src.salomeTools import _BaseCommand # Command class ######################################################################## class Command(_BaseCommand): - """\ + """ The launcher command generates a SALOME launcher. examples: - >> sat launcher SALOME + >> sat launcher SALOME """ name = "launcher" @@ -104,19 +104,18 @@ def generate_launch_file(config, pathlauncher, display=True, additional_env={}): - '''Generates the launcher file. + """Generates the launcher file. - :param config Config: The global configuration - :param logger Logger: The logger instance to use for the display - and logging - :param launcher_name str: The name of the launcher to generate - :param pathlauncher str: The path to the launcher to generate - :param display boolean: If False, do not print anything in the terminal - :param additional_env dict: The dict giving additional - environment variables - :return: The launcher file path. - :rtype: str - ''' + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :param launcher_name: (str) The name of the launcher to generate + :param pathlauncher: (str) The path to the launcher to generate + :param display: (bool) If False, do not print anything in the terminal + :param additional_env: (dict) + The dict giving additional environment variables + :return: (str) The launcher file path. + """ # Compute the default launcher path if it is not provided in pathlauncher # parameter @@ -188,12 +187,11 @@ def generate_launch_file(config, def generate_catalog(machines, config, logger): """Generates an xml catalog file from a list of machines. - :param machines List: The list of machines to add in the catalog - :param config Config: The global configuration - :param logger Logger: The logger instance to use for the display - and logging - :return: The catalog file path. - :rtype: str + :param machines: (list) The list of machines to add in the catalog + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (str) The catalog file path. """ # remove empty machines machines = map(lambda l: l.strip(), machines) @@ -201,7 +199,7 @@ def generate_catalog(machines, config, logger): # log something logger.debug(" %s = %s\n" % \ - (_("Generate Resources Catalog"),", ".join(machines)) + (_("Generate Resources Catalog"), ", ".join(machines)) ) # The command to execute on each machine in order to get some information cmd = '"cat /proc/cpuinfo | grep MHz ; cat /proc/meminfo | grep MemTotal"' @@ -258,10 +256,10 @@ def generate_catalog(machines, config, logger): def copy_catalog(config, catalog_path): """Copy the xml catalog file into the right location - :param config Config: The global configuration - :param catalog_path str: the catalog file path - :return: The environment dictionary corresponding to the file path. - :rtype: Dict + :param config: (Config) The global configuration + :param catalog_path: (str) the catalog file path + :return: (dict) + The environment dictionary corresponding to the file path. """ # Verify the existence of the file if not os.path.exists(catalog_path): diff --git a/commands/log.py b/commands/log.py index 4b9baf6..c32aca2 100644 --- a/commands/log.py +++ b/commands/log.py @@ -43,11 +43,11 @@ except NameError: # Command class ######################################################################## class Command(_BaseCommand): - """\ + """ The log command gives access to the logs produced by the salomeTools commands. examples: - >> sat log + >> sat log """ name = "log" @@ -237,14 +237,13 @@ class Command(_BaseCommand): return RCO.ReturnCode("OK", "option no browser") def get_last_log_file(logDir, notShownCommands): - """\ + """ Used in case of last option. Get the last log command file path. - :param logDir str: The directory where to search the log files - :param notShownCommands list: the list of commands to ignore - :return: the path to the last log file - :rtype: str + :param logDir: (str) The directory where to search the log files + :param notShownCommands: (list) the list of commands to ignore + :return: (str) the path to the last log file """ last = (_, 0) for fileName in os.listdir(logDir): @@ -267,22 +266,22 @@ def get_last_log_file(logDir, notShownCommands): return res def remove_log_file(filePath, logger): - '''if it exists, print a warning and remove the input file + """if it exists, print a warning and remove the input file :param filePath: the path of the file to delete - :param logger Logger: the logger instance to use for the print - ''' + :param logger: (Logger) the logger instance to use for the print + """ if os.path.exists(filePath): logger.debug(UTS.red("Removing %s\n" % filePath)) os.remove(filePath) def print_log_command_in_terminal(filePath, logger): - '''Print the contain of filePath. It contains a command log in xml format. + """Print the contain of filePath. It contains a command log in xml format. - :param filePath: The command xml file from which extract the commands - context and traces - :param logger Logger: the logging instance to use in order to print. - ''' + :param filePath: + The command xml file from which extract the commands context and traces + :param logger: (Logger) the logging instance to use in order to print. + """ logger.debug(_("Reading %s\n") % filePath) # Instantiate the ReadXmlFile class that reads xml files xmlRead = XMLMGR.ReadXmlFile(filePath) @@ -370,12 +369,12 @@ def show_product_last_logs(logger, config, product_log_dir): SYSS.show_in_editor(config.USER.editor, log_file_path, logger) def ask_value(nb): - '''Ask for an int n. 0> sat make SALOME --products Python,KERNEL,GUI + >> sat make SALOME --products Python,KERNEL,GUI """ name = "make" @@ -103,17 +103,17 @@ class Command(_BaseCommand): def get_products_list(options, cfg, logger): - '''method that gives the product list with their informations from - configuration regarding the passed options. + """ + method that gives the product list with their informations from + configuration regarding the passed options. - :param options Options: The Options instance that stores the commands - arguments - :param cfg Config: The global configuration - :param logger Logger: The logger instance to use for the display and - logging - :return: The list of (product name, product_informations). - :rtype: List - ''' + :param options: (Options) + The Options instance that stores the commands arguments + :param cfg: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (list) The list of tuples (product name, product_informations). + """ # Get the products to be prepared, regarding the options if options.products is None: # No options, get all products sources @@ -152,17 +152,18 @@ def log_res_step(logger, res): def make_all_products(config, products_infos, make_option, logger): - '''Execute the proper configuration commands - in each product build directory. + """ + Execute the proper configuration commands + in each product build directory. - :param config Config: The global configuration - :param products_info list: List of - (str, Config) => (product_name, product_info) - :param make_option str: The options to add to the command - :param logger Logger: The logger instance to use for the display and logging - :return: the number of failing commands. - :rtype: int - ''' + :param config: (Config) The global configuration + :param products_info: (list) + List of (str, Config) => (product_name, product_info) + :param make_option: (str) The options to add to the command + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (int) the number of failing commands. + """ res = 0 for p_name_info in products_infos: res_prod = make_product(p_name_info, make_option, config, logger) @@ -171,17 +172,17 @@ def make_all_products(config, products_infos, make_option, logger): return res def make_product(p_name_info, make_option, config, logger): - '''Execute the proper configuration command(s) - in the product build directory. + """ + Execute the proper configuration command(s) + in the product build directory. - :param p_name_info tuple: (str, Config) => (product_name, product_info) - :param make_option str: The options to add to the command - :param config Config: The global configuration - :param logger Logger: The logger instance to use for the display - and logging - :return: 1 if it fails, else 0. - :rtype: int - ''' + :param p_name_info: (tuple) (str, Config) => (product_name, product_info) + :param make_option: (str) The options to add to the command + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (int) 1 if it fails, else 0. + """ p_name, p_info = p_name_info diff --git a/commands/makeinstall.py b/commands/makeinstall.py index 2d0f79b..f445579 100644 --- a/commands/makeinstall.py +++ b/commands/makeinstall.py @@ -26,13 +26,13 @@ from src.salomeTools import _BaseCommand # Command class ######################################################################## class Command(_BaseCommand): - """\ + """ The makeinstall command executes the 'make install' command in the build directory. In case of product constructed using a script (build_source : 'script'), then the makeinstall command do nothing. examples: - >> sat makeinstall SALOME --products KERNEL,GUI + >> sat makeinstall SALOME --products KERNEL,GUI """ name = "makeinstall" @@ -98,16 +98,17 @@ class Command(_BaseCommand): def get_products_list(options, cfg, logger): - '''method that gives the product list with their informations from - configuration regarding the passed options. + """ + method that gives the product list with their informations from + configuration regarding the passed options. - :param options Options: The Options instance that stores the commands - arguments - :param cfg Config: The global configuration - :param logger Logger: The logger instance to use for the display and logging - :return: The list of (product name, product_informations). - :rtype: List - ''' + :param options: (Options) + The Options instance that stores the commands arguments + :param cfg: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (list) The list of (product name, product_informations). + """ # Get the products to be prepared, regarding the options if options.products is None: # No options, get all products sources @@ -142,16 +143,17 @@ def log_res_step(logger, res): logger.debug("\n") def makeinstall_all_products(config, products_infos, logger): - '''Execute the proper configuration commands - in each product build directory. - - :param config Config: The global configuration - :param products_info list: List of - (str, Config) => (product_name, product_info) - :param logger Logger: The logger instance to use for the display and logging - :return: the number of failing commands. - :rtype: int - ''' + """ + Execute the proper configuration commands + in each product build directory. + + :param config: (Config) The global configuration + :param products_info: (list) + List of (str, Config) => (product_name, product_info) + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (int) the number of failing commands. + """ res = 0 for p_name_info in products_infos: res_prod = makeinstall_product(p_name_info, config, logger) @@ -160,16 +162,17 @@ def makeinstall_all_products(config, products_infos, logger): return res def makeinstall_product(p_name_info, config, logger): - '''Execute the proper configuration command(s) - in the product build directory. + """ + Execute the proper configuration command(s) + in the product build directory. - :param p_name_info tuple: (str, Config) => (product_name, product_info) - :param config Config: The global configuration - :param logger Logger: The logger instance to use for the display - and logging - :return: 1 if it fails, else 0. - :rtype: int - ''' + :param p_name_info: (tuple) + (str, Config) => (product_name, product_info) + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (int) 1 if it fails, else 0. + """ p_name, p_info = p_name_info diff --git a/commands/package.py b/commands/package.py index b424b89..1c3beb5 100644 --- a/commands/package.py +++ b/commands/package.py @@ -93,17 +93,16 @@ project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep""" # Command class ######################################################################## class Command(_BaseCommand): - """\ + """ The package command creates an archive. There are 4 kinds of archive, which can be mixed: - 1- The binary archive. It contains all the product installation directories and a launcher. - 2- The sources archive. It contains the products archives, - a project corresponding to the application and salomeTools. - 3- The project archive. It contains a project (give the project file path as argument). - 4- The salomeTools archive. It contains salomeTools. + 1- The binary archive. It contains all the product installation directories and a launcher. + 2- The sources archive. It contains the products archives, a project corresponding to the application and salomeTools. + 3- The project archive. It contains a project (give the project file path as argument). + 4- The salomeTools archive. It contains salomeTools. examples: - >> sat package SALOME --binaries --sources + >> sat package SALOME --binaries --sources """ name = "package" @@ -253,7 +252,7 @@ check if at least one of the following options was selected: path_targz = os.path.join(dir_name, archive_name + ".tgz") - logger.info(" Package path = %s\n" UTS.blue(path_targz)) + logger.info(" Package path = %s\n" % UTS.blue(path_targz)) # Create a working directory for all files that are produced during the # package creation and that will be removed at the end of the command @@ -345,7 +344,7 @@ check if at least one of the following options was selected: tar.close() except KeyboardInterrupt: - logger.critical(UTS.red(_("KeyboardInterrupt forced interruption\n")) + logger.critical(UTS.red(_("KeyboardInterrupt forced interruption\n"))) logger.info(_("Removing the temporary working directory ... ")) # remove the working directory shutil.rmtree(tmp_working_dir) @@ -356,26 +355,24 @@ check if at least one of the following options was selected: shutil.rmtree(tmp_working_dir) # Print again the path of the package - logger.info(" Package path = %s\n" UTS.blue(path_targz)) + logger.info(" Package path = %s\n" % UTS.blue(path_targz)) return res def add_files(tar, name_archive, d_content, logger, f_exclude=None): - """\ + """ Create an archive containing all directories and files that are given in the d_content argument. - :param tar tarfile: The tarfile instance used to make the archive. - :param name_archive str: The name of the archive to make. - :param d_content dict: The dictionary that contain all directories and files - to add in the archive. - d_content[label] = - (path_on_local_machine, path_in_archive) - :param logger Logger: the logging instance - :param f_exclude Function: the function that filters - :return: 0 if success, 1 if not. - :rtype: int + :param tar: (tarfile) The tarfile instance used to make the archive. + :param name_archive: (str) The name of the archive to make. + :param d_content: (dict) + The dictionary that contain all directories and files to add in the archive. + d_content[label] = (path_on_local_machine, path_in_archive) + :param logger: (Logger) the logging instance + :param f_exclude: (function) the function that filters + :return: (int) 0 if success, 1 if not. """ # get the max length of the messages in order to make the display max_len = len(max(d_content.keys(), key=len)) @@ -395,18 +392,17 @@ def add_files(tar, name_archive, d_content, logger, f_exclude=None): tar.add(local_path, arcname=in_archive, exclude=f_exclude) logger.info("\n") except Exception as e: - logger.info(" %s\n" str(e)) + logger.info(" %s\n" % str(e)) success = 1 return success def exclude_VCS_and_extensions(filename): - """\ + """ The function that is used to exclude from package the link to the VCS repositories (like .git) - :param filename Str: The filname to exclude (or not). - :return: True if the file has to be exclude - :rtype: Boolean + :param filename: (str) The filname to exclude (or not). + :return: (bool) True if the file has to be exclude """ for dir_name in IGNORED_DIRS: if dir_name in filename: @@ -422,18 +418,18 @@ def produce_relative_launcher(config, file_name, binaries_dir_name, with_commercial=True): - '''Create a specific SALOME launcher for the binary package. This launcher - uses relative paths. + """ + Create a specific SALOME launcher for the binary package. + This launcher uses relative paths. - :param config Config: The global configuration. - :param logger Logger: the logging instance - :param file_dir str: the directory where to put the launcher - :param file_name str: The launcher name - :param binaries_dir_name str: the name of the repository where the binaries - are, in the archive. - :return: the path of the produced launcher - :rtype: str - ''' + :param config: (Config) The global configuration. + :param logger: (Logger) the logging instance + :param file_dir: (str) the directory where to put the launcher + :param file_name: (str) The launcher name + :param binaries_dir_name: (str) + the name of the repository where the binaries are, in the archive. + :return: (str) the path of the produced launcher + """ # get KERNEL installation path kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL") @@ -495,10 +491,10 @@ def produce_relative_launcher(config, return filepath def hack_for_distene_licence(filepath): - '''Replace the distene licence env variable by a call to a file. + """Replace the distene licence env variable by a call to a file. - :param filepath Str: The path to the launcher to modify. - ''' + :param filepath: (str) The path to the launcher to modify. + """ shutil.move(filepath, filepath + "_old") fileout= filepath filein = filepath + "_old" @@ -521,12 +517,12 @@ def hack_for_distene_licence(filepath): del text[num_line +1] del text[num_line +1] text_to_insert ="""\ - import imp - try: - distene = imp.load_source('distene_licence', '/data/tmpsalome/salome/prerequis/install/LICENSE/dlim8.var.py') - distene.set_distene_variables(context) - except: - pass +import imp +try: + distene = imp.load_source('distene_licence', '/data/tmpsalome/salome/prerequis/install/LICENSE/dlim8.var.py') + distene.set_distene_variables(context) +except: + pass """ text.insert(num_line + 1, text_to_insert) for line in text: @@ -539,17 +535,17 @@ def produce_relative_env_files(config, logger, file_dir, binaries_dir_name): - '''Create some specific environment files for the binary package. These - files use relative paths. + """ + Create some specific environment files for the binary package. + These files use relative paths. - :param config Config: The global configuration. - :param logger Logger: the logging instance - :param file_dir str: the directory where to put the files - :param binaries_dir_name str: the name of the repository where the binaries - are, in the archive. - :return: the list of path of the produced environment files - :rtype: List - ''' + :param config: (Config) The global configuration. + :param logger: (Logger) the logging instance + :param file_dir: (str) the directory where to put the files + :param binaries_dir_name: (str) + The name of the repository where the binaries are, in the archive. + :return: (list) The list of path of the produced environment files + """ # create an environment file writer writer = src.environment.FileEnvWriter(config, logger, @@ -582,17 +578,18 @@ def produce_install_bin_file(config, file_dir, d_sub, file_name): - '''Create a bash shell script which do substitutions in BIRARIES dir - in order to use it for extra compilations. + """ + Create a bash shell script which do substitutions in BIRARIES dir + in order to use it for extra compilations. - :param config Config: The global configuration. - :param logger Logger: the logging instance - :param file_dir str: the directory where to put the files - :param d_sub, dict: the dictionnary that contains the substitutions to be done - :param file_name str: the name of the install script file - :return: the produced file - :rtype: str - ''' + :param config: (Config) The global configuration. + :param logger: (Logger) the logging instance + :param file_dir: (str) the directory where to put the files + :param d_sub: (dict) + the dictionnary that contains the substitutions to be done + :param file_name: (str) the name of the install script file + :return: (str) the produced file + """ # Write filepath = os.path.join(file_dir, file_name) # open the file and write into it @@ -635,17 +632,17 @@ def product_appli_creation_script(config, logger, file_dir, binaries_dir_name): - '''Create a script that can produce an application (EDF style) in the binary - package. + """ + Create a script that can produce an application (EDF style) + in the binary package. - :param config Config: The global configuration. - :param logger Logger: the logging instance - :param file_dir str: the directory where to put the file - :param binaries_dir_name str: the name of the repository where the binaries - are, in the archive. - :return: the path of the produced script file - :rtype: Str - ''' + :param config: (Config) The global configuration. + :param logger: (Logger) the logging instance + :param file_dir: (str) the directory where to put the file + :param binaries_dir_name: (str) + The name of the repository where the binaries are, in the archive. + :return: (str) The path of the produced script file + """ template_name = "create_appli.py.for_bin_packages.template" template_path = os.path.join(config.VARS.internal_dir, template_name) text_to_fill = open(template_path, "r").read() @@ -697,20 +694,21 @@ def product_appli_creation_script(config, return tmp_file_path def binary_package(config, logger, options, tmp_working_dir): - '''Prepare a dictionary that stores all the needed directories and files to - add in a binary package. + """ + Prepare a dictionary that stores all the needed directories and files + to add in a binary package. - :param config Config: The global configuration. - :param logger Logger: the logging instance - :param options OptResult: the options of the launched command - :param tmp_working_dir str: The temporary local directory containing some - specific directories or files needed in the - binary package - :return: the dictionary that stores all the needed directories and files to - add in a binary package. - {label : (path_on_local_machine, path_in_archive)} - :rtype: dict - ''' + :param config: (Config) The global configuration. + :param logger: (Logger) the logging instance + :param options: (OptResult) the options of the launched command + :param tmp_working_dir: (str) + The temporary local directory containing some specific directories + or files needed in the binary package + :return: (dict) + The dictionary that stores all the needed directories and files + to add in a binary package. + {label : (path_on_local_machine, path_in_archive)} + """ # Get the list of product installation to add to the archive l_products_name = config.APPLICATION.products.keys() @@ -759,7 +757,7 @@ def binary_package(config, logger, options, tmp_working_dir): text_missing_prods += "-" + p_name + "\n" msg = _("There are missing products installations:\n") - logger.warning(msg + text_missing_prods)) + logger.warning(msg + text_missing_prods) if not options.force_creation: return None @@ -828,20 +826,21 @@ def binary_package(config, logger, options, tmp_working_dir): return d_products def source_package(sat, config, logger, options, tmp_working_dir): - '''Prepare a dictionary that stores all the needed directories and files to - add in a source package. + """ + Prepare a dictionary that stores all the needed directories and files + to add in a source package. - :param config Config: The global configuration. - :param logger Logger: the logging instance - :param options OptResult: the options of the launched command - :param tmp_working_dir str: The temporary local directory containing some - specific directories or files needed in the - binary package - :return: the dictionary that stores all the needed directories and files to - add in a source package. - {label : (path_on_local_machine, path_in_archive)} - :rtype: dict - ''' + :param config: (Config) The global configuration. + :param logger: (Logger) the logging instance + :param options: (OptResult) the options of the launched command + :param tmp_working_dir: (str) + The temporary local directory containing some specific directories + or files needed in the binary package + :return: (dict) + the dictionary that stores all the needed directories and files + to add in a source package. + {label : (path_on_local_machine, path_in_archive)} + """ # Get all the products that are prepared using an archive logger.info("Find archive products ... ") @@ -888,17 +887,17 @@ def source_package(sat, config, logger, options, tmp_working_dir): return d_source def get_archives(config, logger): - '''Find all the products that are get using an archive and all the products - that are get using a vcs (git, cvs, svn) repository. + """ + Find all the products from an archive and all the products + from a VCS (git, cvs, svn) repository. - :param config Config: The global configuration. - :param logger Logger: the logging instance - :return: the dictionary {name_product : - (local path of its archive, path in the package of its archive )} - and the list of specific configuration corresponding to the vcs - products - :rtype: (Dict, List) - ''' + :param config: (Config) The global configuration. + :param logger: (Logger) The logging instance + :return: (Dict, List) + The dictionary + {name_product : (local path of its archive, path in the package of its archive )} + and the list of specific configuration corresponding to the vcs products + """ # Get the list of product informations l_products_name = config.APPLICATION.products.keys() l_product_info = src.product.get_products_infos(l_products_name, @@ -921,16 +920,17 @@ def get_archives(config, logger): return d_archives, l_pinfo_vcs def add_salomeTools(config, tmp_working_dir): - '''Prepare a version of salomeTools that has a specific local.pyconf file - configured for a source package. - - :param config Config: The global configuration. - :param tmp_working_dir str: The temporary local directory containing some - specific directories or files needed in the - source package - :return: The path to the local salomeTools directory to add in the package - :rtype: str - ''' + """ + Prepare a version of salomeTools that has a specific local.pyconf file + configured for a source package. + + :param config: (Config) The global configuration. + :param tmp_working_dir: (str) + The temporary local directory containing some specific directories + or files needed in the source package + :return: (str) + The path to the local salomeTools directory to add in the package + """ # Copy sat in the temporary working directory sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools")) sat_running_path = src.Path(config.VARS.salometoolsway) @@ -958,24 +958,25 @@ def add_salomeTools(config, tmp_working_dir): return sat_tmp_path.path def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir): - '''For sources package that require that all products are get using an - archive, one has to create some archive for the vcs products. - So this method calls the clean and source command of sat and then create - the archives. - - :param l_pinfo_vcs List: The list of specific configuration corresponding to - each vcs product - :param sat Sat: The Sat instance that can be called to clean and source the - products - :param config Config: The global configuration. - :param logger Logger: the logging instance - :param tmp_working_dir str: The temporary local directory containing some - specific directories or files needed in the - source package - :return: the dictionary that stores all the archives to add in the source - package. {label : (path_on_local_machine, path_in_archive)} - :rtype: dict - ''' + """ + For sources package that require that all products from an archive, + one has to create some archive for the vcs products. + So this method calls the clean and source command of sat + and then create the archives. + + :param l_pinfo_vcs: (list) + The list of specific configuration corresponding to each vcs product + :param sat: (Sat) + The Sat instance that can be called to clean and source the products + :param config: (Config) The global configuration. + :param logger: (Logger) The logging instance + :param tmp_working_dir: (str) + The temporary local directory containing some specific directories + or files needed in the source package + :return: (dict) + The dictionary that stores all the archives to add in the sourcepackage. + {label : (path_on_local_machine, path_in_archive)} + """ # clean the source directory of all the vcs products, then use the source # command and thus construct an archive that will not contain the patches l_prod_names = [pn for pn, __ in l_pinfo_vcs] @@ -1001,16 +1002,15 @@ def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir): return d_archives_vcs def make_archive(prod_name, prod_info, where): - '''Create an archive of a product by searching its source directory. - - :param prod_name str: The name of the product. - :param prod_info Config: The specific configuration corresponding to the - product - :param where str: The path of the repository where to put the resulting - archive - :return: The path of the resulting archive - :rtype: str - ''' + """Create an archive of a product by searching its source directory. + + :param prod_name: (str) The name of the product. + :param prod_info: (Config) + The specific configuration corresponding to the product + :param where: (str) + The path of the repository where to put the resulting archive + :return: (str) The path of the resulting archive + """ path_targz_prod = os.path.join(where, prod_name + ".tgz") tar_prod = tarfile.open(path_targz_prod, mode='w:gz') local_path = prod_info.source_dir @@ -1021,18 +1021,19 @@ def make_archive(prod_name, prod_info, where): return path_targz_prod def create_project_for_src_package(config, tmp_working_dir, with_vcs): - '''Create a specific project for a source package. - - :param config Config: The global configuration. - :param tmp_working_dir str: The temporary local directory containing some - specific directories or files needed in the - source package - :param with_vcs boolean: True if the package is with vcs products (not - transformed into archive products) - :return: The dictionary - {"project" : (produced project, project path in the archive)} - :rtype: Dict - ''' + """Create a specific project for a source package. + + :param config: (Config) The global configuration. + :param tmp_working_dir: (str) + The temporary local directory containing some specific directories + or files needed in the source package + :param with_vcs: (bool) + True if the package is with vcs products + (not transformed into archive products) + :return: (dict) + The dictionary + {"project" : (produced project, project path in the archive)} + """ # Create in the working temporary directory the full project tree project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR) @@ -1091,26 +1092,28 @@ def find_product_scripts_and_pyconf(p_name, env_scripts_tmp_dir, patches_tmp_dir, products_pyconf_tmp_dir): - '''Create a specific pyconf file for a given product. Get its environment - script, its compilation script and patches and put it in the temporary - working directory. This method is used in the source package in order to - construct the specific project. - - :param p_name str: The name of the product. - :param p_info Config: The specific configuration corresponding to the - product - :param config Config: The global configuration. - :param with_vcs boolean: True if the package is with vcs products (not - transformed into archive products) - :param compil_scripts_tmp_dir str: The path to the temporary compilation - scripts directory of the project. - :param env_scripts_tmp_dir str: The path to the temporary environment script - directory of the project. - :param patches_tmp_dir str: The path to the temporary patch scripts - directory of the project. - :param products_pyconf_tmp_dir str: The path to the temporary product - scripts directory of the project. - ''' + """ + Create a specific pyconf file for a given product. + Get its environment script, its compilation script + and patches and put it in the temporary working directory. + This method is used in the source package in order to + construct the specific project. + + :param p_name: (str) The name of the product. + :param p_info: (Config) The specific configuration corresponding to the product + :param config: (Config) The global configuration. + :param with_vcs: (bool) + True if the package is with vcs products + (not transformed into archive products) + :param compil_scripts_tmp_dir: (str) + The path to the temporary compilation scripts directory of the project. + :param env_scripts_tmp_dir: (str) + The path to the temporary environment script directory of the project. + :param patches_tmp_dir: (str) + The path to the temporary patch scripts directory of the project. + :param products_pyconf_tmp_dir: (str) + The path to the temporary product scripts directory of the project. + """ # read the pyconf of the product product_pyconf_path = UTS.find_file_in_lpath(p_name + ".pyconf", @@ -1166,13 +1169,14 @@ def find_product_scripts_and_pyconf(p_name, ff.close() def find_application_pyconf(config, application_tmp_dir): - '''Find the application pyconf file and put it in the specific temporary - directory containing the specific project of a source package. + """ + Find the application pyconf file and put it in the specific temporary + directory containing the specific project of a source package. - :param config Config: The global configuration. - :param application_tmp_dir str: The path to the temporary application - scripts directory of the project. - ''' + :param config: 'Config) The global configuration. + :param application_tmp_dir: (str) + The path to the temporary application scripts directory of the project. + """ # read the pyconf of the application application_name = config.VARS.application application_pyconf_path = UTS.find_file_in_lpath( @@ -1198,18 +1202,19 @@ def find_application_pyconf(config, application_tmp_dir): ff.close() def project_package(project_file_path, tmp_working_dir): - '''Prepare a dictionary that stores all the needed directories and files to - add in a project package. + """ + Prepare a dictionary that stores all the needed directories and files + to add in a project package. - :param project_file_path str: The path to the local project. - :param tmp_working_dir str: The temporary local directory containing some - specific directories or files needed in the - project package - :return: the dictionary that stores all the needed directories and files to - add in a project package. - {label : (path_on_local_machine, path_in_archive)} - :rtype: dict - ''' + :param project_file_path: (str) The path to the local project. + :param tmp_working_dir: (str) + The temporary local directory containing some specific directories + or files needed in the project package + :return: (dict) + The dictionary that stores all the needed directories and files + to add in a project package. + {label : (path_on_local_machine, path_in_archive)} + """ d_project = {} # Read the project file and get the directories to add to the package project_pyconf_cfg = PYCONF.Config(project_file_path) @@ -1341,12 +1346,14 @@ The procedure to do it is: return readme_path def update_config(config, prop, value): - '''Remove from config.APPLICATION.products the products that have the property given as input. + """ + Remove from config.APPLICATION.products the products + that have the property given as input. - :param config Config: The global config. - :param prop str: The property to filter - :param value str: The value of the property to filter - ''' + :param config: (Config) The global config. + :param prop: (str) The property to filter + :param value: (str) The value of the property to filter + """ src.check_config_has_application(config) l_product_to_remove = [] for product_name in config.APPLICATION.products.keys(): diff --git a/commands/patch.py b/commands/patch.py index b802885..8c5dc1c 100644 --- a/commands/patch.py +++ b/commands/patch.py @@ -30,12 +30,12 @@ import commands.prepare # Command class ######################################################################## class Command(_BaseCommand): - """\ + """ The patch command apply the patches on the sources of the application products if there is any. examples: - >> sat patch SALOME --products qt,boost + >> sat patch SALOME --products qt,boost """ name = "patch" @@ -115,14 +115,14 @@ class Command(_BaseCommand): def apply_patch(config, product_info, max_product_name_len, logger): - """\ - The method called to apply patches on a product - - :param config Config: The global configuration - :param product_info Config: The configuration specific to - the product to be patched - :param logger Logger: The logger instance to use for the display and logging - :return: RCO.ReturnCode + """The method called to apply patches on a product + + :param config: (Config) The global configuration + :param product_info: (Config) + The configuration specific to the product to be patched + :param logger: (Logger: + The logger instance to use for the display and logging + :return: (RCO.ReturnCode) """ # if the product is native, do not apply patch diff --git a/commands/prepare.py b/commands/prepare.py index 859516e..240a250 100644 --- a/commands/prepare.py +++ b/commands/prepare.py @@ -28,12 +28,12 @@ from src.salomeTools import _BaseCommand # Command class ######################################################################## class Command(_BaseCommand): - """\ + """ The prepare command gets the sources of the application products and apply the patches if there is any. examples: - >> sat prepare SALOME --products KERNEL,GUI + >> sat prepare SALOME --products KERNEL,GUI """ name = "prepare" @@ -157,15 +157,14 @@ Use the --force_patch option to overwrite it. def remove_products(arguments, l_products_info, logger): - """ - function that removes the products in l_products_info from arguments list. + """Removes the products in l_products_info from arguments list. - :param arguments str: The arguments from which to remove products - :param l_products_info list: List of - (str, Config) => (product_name, product_info) - :param logger Logger: The logger instance to use for the display and logging - :return: The updated arguments. - :rtype: str + :param arguments: (str) The arguments from which to remove products + :param l_products_info: (list) + List of (str, Config) => (product_name, product_info) + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (str) The updated arguments. """ args = arguments for i, (product_name, __) in enumerate(l_products_info): @@ -177,14 +176,13 @@ def remove_products(arguments, l_products_info, logger): return args def find_products_already_getted(l_products): - '''function that returns the list of products that have an existing source - directory. + """Returns the list of products that have an existing source directory. - :param l_products List: The list of products to check - :return: The list of product configurations that have an existing source - directory. - :rtype: List - ''' + :param l_products: (list) The list of products to check + :return: (list) + The list of product configurations + that have an existing source directory. + """ l_res = [] for p_name_p_cfg in l_products: __, prod_cfg = p_name_p_cfg @@ -193,12 +191,13 @@ def find_products_already_getted(l_products): return l_res def find_products_with_patchs(l_products): - '''function that returns the list of products that have one or more patches. + """Returns the list of products that have one or more patches. - :param l_products List: The list of products to check - :return: The list of product configurations that have one or more patches. - :rtype: List - ''' + :param l_products: (list) The list of products to check + :return: (list) + The list of product configurations + that have one or more patches. + """ l_res = [] for p_name_p_cfg in l_products: __, prod_cfg = p_name_p_cfg diff --git a/commands/profile.py b/commands/profile.py index 7776f64..599efb9 100644 --- a/commands/profile.py +++ b/commands/profile.py @@ -34,11 +34,11 @@ class Command(_BaseCommand): examples: >> sat profile [PRODUCT] - [-p | --prefix (string)] - [-n | --name (string)] - [-f | --force] - [-v | --version (string)] - [-s | --slogan (string)] + >> sat profile --prefix (string) + >> sat profile --name (string) + >> sat profile --force + >> sat profile --version (string) + >> sat profile --slogan (string) """ name = "profile" @@ -210,11 +210,10 @@ def update_pyconf( config, options, logger ): """ Updates the pyconf """ - #Save previous version pyconf = config.VARS.product + '.pyconf' pyconfBackup = config.VARS.product + '-backup.pyconf' - logger.info( _("Updating %s (previous version saved as %s." ) % (pyconf, pyconfBackup) + logger.info( _("Updating %s (previous version saved as %s." ) % (pyconf, pyconfBackup)) path = config.getPath( pyconf ) shutil.copyfile( os.path.join( path, pyconf ), os.path.join( path, pyconfBackup ) ) diff --git a/commands/script.py b/commands/script.py index f3eec3b..dd3da47 100644 --- a/commands/script.py +++ b/commands/script.py @@ -108,17 +108,17 @@ class Command(_BaseCommand): def get_products_list(options, cfg, logger): - '''method that gives the product list with their informations from - configuration regarding the passed options. - - :param options Options: The Options instance that stores the commands - arguments - :param cfg Config: The global configuration - :param logger Logger: The logger instance to use for the display and - logging - :return: The list of (product name, product_informations). - :rtype: List - ''' + """ + Gives the product list with their informations from + configuration regarding the passed options. + + :param options: (Options) + The Options instance that stores the commands arguments + :param cfg: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (list) The list of (product name, product_informations). + """ # Get the products to be prepared, regarding the options if options.products is None: # No options, get all products sources @@ -155,16 +155,16 @@ def log_res_step(logger, res): logger.debug("\n") def run_script_all_products(config, products_infos, nb_proc, logger): - '''Execute the script in each product build directory. - - :param config Config: The global configuration - :param products_info list: List of - (str, Config) => (product_name, product_info) - :param nb_proc int: The number of processors to use - :param logger Logger: The logger instance to use for the display and logging - :return: the number of failing commands. - :rtype: int - ''' + """Execute the script in each product build directory. + + :param config: (Config) The global configuration + :param products_info: (list) + List of (str, Config) => (product_name, product_info) + :param nb_proc: (int) The number of processors to use + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (int) The number of failing commands. + """ res = 0 for p_name_info in products_infos: res_prod = run_script_of_product(p_name_info, @@ -176,17 +176,18 @@ def run_script_all_products(config, products_infos, nb_proc, logger): return res def run_script_of_product(p_name_info, nb_proc, config, logger): - '''Execute the proper configuration command(s) - in the product build directory. - - :param p_name_info tuple: (str, Config) => (product_name, product_info) - :param nb_proc int: The number of processors to use - :param config Config: The global configuration - :param logger Logger: The logger instance to use for the display - and logging - :return: 1 if it fails, else 0. - :rtype: int - ''' + """ + Execute the proper configuration command(s) + in the product build directory. + + :param p_name_info: (tuple) + (str, Config) => (product_name, product_info) + :param nb_proc: (int) The number of processors to use + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (int) 1 if it fails, else 0. + """ p_name, p_info = p_name_info diff --git a/commands/source.py b/commands/source.py index 6a802f0..d83b447 100644 --- a/commands/source.py +++ b/commands/source.py @@ -25,19 +25,20 @@ import src.returnCode as RCO from src.salomeTools import _BaseCommand import src.system as SYSS + ######################################################################## # Command class ######################################################################## class Command(_BaseCommand): - """\ + """ The source command gets the sources of the application products from cvs, git or an archive. examples: - >> sat source SALOME --products KERNEL,GUI + >> sat source SALOME --products KERNEL,GUI """ - name = "sourcre" + name = "source" def getParser(self): """Define all options for command 'sat source '""" @@ -106,15 +107,15 @@ def get_source_for_dev(config, product_info, source_dir, logger, pad): """\ Called if the product is in development mode - :param config Config: The global configuration - :param product_info Config: The configuration specific to - the product to be prepared - :param source_dir Path: The Path instance corresponding to the - directory where to put the sources - :param logger Logger: The logger instance to use for the display and logging - :param pad int: The gap to apply for the terminal display - :return: True if it succeed, else False - :rtype: boolean + :param config: (Config) The global configuration + :param product_info: (Config) + The configuration specific to the product to be prepared + :param source_dir: (Path) + The Path instance corresponding to the directory where to put the sources + :param logger: (Logger) + The logger instance to use for the display and logging + :param pad: (int) The gap to apply for the terminal display + :return: (bool) True if it succeed, else False """ # Call the function corresponding to get the sources with True checkout @@ -137,20 +138,21 @@ def get_source_from_git(product_info, pad, is_dev=False, environ = None): - """\ + """ Called if the product is to be get in git mode - :param product_info Config: The configuration specific to - the product to be prepared - :param source_dir Path: The Path instance corresponding to the - directory where to put the sources - :param logger Logger: The logger instance to use for the display and logging - :param pad int: The gap to apply for the terminal display - :param is_dev boolean: True if the product is in development mode - :param environ src.environment.Environ: The environment to source when - extracting. - :return: True if it succeed, else False - :rtype: boolean + :param product_info: (Config) + The configuration specific to the product to be prepared + :param source_dir: (Path) + The Path instance corresponding to the + directory where to put the sources + :param logger Logger: (Logger) + The logger instance to use for the display and logging + :param pad: (int) The gap to apply for the terminal display + :param is_dev: (bool) True if the product is in development mode + :param environ: (src.environment.Environ) + The environment to source when extracting. + :return: (bool) True if it succeed, else False """ # The str to display coflag = 'git' @@ -177,16 +179,18 @@ def get_source_from_git(product_info, return retcode def get_source_from_archive(product_info, source_dir, logger): - '''The method called if the product is to be get in archive mode + """The method called if the product is to be get in archive mode - :param product_info Config: The configuration specific to - the product to be prepared - :param source_dir Path: The Path instance corresponding to the - directory where to put the sources - :param logger Logger: The logger instance to use for the display and logging - :return: True if it succeed, else False - :rtype: boolean - ''' + :param product_info: (Config) + The configuration specific to + the product to be prepared + :param source_dir: (Path) + The Path instance corresponding to the directory + where to put the sources + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (bool) True if it succeed, else False + """ # check archive exists if not os.path.exists(product_info.archive_info.archive_name): raise Exception(_("Archive not found: '%s'") % \ @@ -240,21 +244,23 @@ def get_source_from_cvs(user, logger, pad, environ = None): - '''The method called if the product is to be get in cvs mode + """ + The method called if the product is to be get in cvs mode - :param user str: The user to use in for the cvs command - :param product_info Config: The configuration specific to - the product to be prepared - :param source_dir Path: The Path instance corresponding to the - directory where to put the sources - :param checkout boolean: If True, get the source in checkout mode - :param logger Logger: The logger instance to use for the display and logging - :param pad int: The gap to apply for the terminal display - :param environ src.environment.Environ: The environment to source when - extracting. - :return: True if it succeed, else False - :rtype: boolean - ''' + :param user: (str) The user to use in for the cvs command + :param product_info: (Config) + The configuration specific to the product to be prepared + :param source_dir: (Path) + The Path instance corresponding to the directory + where to put the sources + :param checkout: (bool) If True, get the source in checkout mode + :param logger: (Logger) + The logger instance to use for the display and logging + :param pad: (int) The gap to apply for the terminal display + :param environ: (src.environment.Environ) + The environment to source when extracting. + :return: (bool) True if it succeed, else False + """ # Get the protocol to use in the command if "protocol" in product_info.cvs_info: protocol = product_info.cvs_info.protocol @@ -299,20 +305,22 @@ def get_source_from_svn(user, checkout, logger, environ = None): - '''The method called if the product is to be get in svn mode + """The method called if the product is to be get in svn mode - :param user str: The user to use in for the svn command - :param product_info Config: The configuration specific to - the product to be prepared - :param source_dir Path: The Path instance corresponding to the - directory where to put the sources - :param checkout boolean: If True, get the source in checkout mode - :param logger Logger: The logger instance to use for the display and logging - :param environ src.environment.Environ: The environment to source when - extracting. - :return: True if it succeed, else False - :rtype: boolean - ''' + :param user: (str) The user to use in for the svn command + :param product_info: (Config) + The configuration specific to the product to be prepared + :param source_dir: (Path) + The Path instance corresponding to the directory + where to put the sources + :param checkout: (boolean) + If True, get the source in checkout mode + :param logger: (Logger) + The logger instance to use for the display and logging + :param environ: (src.environment.Environ) + The environment to source when extracting. + :return: (bool) True if it succeed, else False + """ coflag = 'svn' if checkout: coflag = coflag.upper() @@ -335,20 +343,21 @@ def get_product_sources(config, logger, pad, checkout=False): - '''Get the product sources. + """Get the product sources. - :param config Config: The global configuration - :param product_info Config: The configuration specific to - the product to be prepared - :param is_dev boolean: True if the product is in development mode - :param source_dir Path: The Path instance corresponding to the - directory where to put the sources - :param logger Logger: The logger instance to use for the display and logging - :param pad int: The gap to apply for the terminal display - :param checkout boolean: If True, get the source in checkout mode - :return: True if it succeed, else False - :rtype: boolean - ''' + :param config: (Config) The global configuration + :param product_info: (Config) + The configuration specific to the product to be prepared + :param is_dev: (bool) True if the product is in development mode + :param source_dir: (Path) + The Path instance corresponding to the directory + where to put the sources + :param logger: (Logger) + The logger instance to use for the display and logging + :param pad: (int) The gap to apply for the terminal display + :param checkout: (bool) If True, get the source in checkout mode + :return: (bool) True if it succeed, else False + """ # Get the application environment logger.info(_("Set the application environment\n")) @@ -410,14 +419,16 @@ def get_product_sources(config, return False def get_all_product_sources(config, products, logger): - '''Get all the product sources. + """Get all the product sources. - :param config Config: The global configuration - :param products List: The list of tuples (product name, product informations) - :param logger Logger: The logger instance to be used for the logging - :return: the tuple (number of success, dictionary product_name/success_fail) - :rtype: (int,dict) - ''' + :param config: (Config) The global configuration + :param products: (list) + The list of tuples (product name, product informations) + :param logger: (Logger) + The logger instance to be used for the logging + :return: (int,dict) + The tuple (number of success, dictionary product_name/success_fail) + """ # Initialize the variables that will count the fails and success results = dict() @@ -462,11 +473,11 @@ def get_all_product_sources(config, products, logger): max_product_name_len, checkout=False) - ''' + """ if 'no_rpath' in product_info.keys(): if product_info.no_rpath: hack_no_rpath(config, product_info, logger) - ''' + """ # Check that the sources are correctly get using the files to be tested # in product information @@ -496,14 +507,17 @@ def get_all_product_sources(config, products, logger): return good_result, results def check_sources(product_info, logger): - '''Check that the sources are correctly get, using the files to be tested - in product information + """ + Check that the sources are correctly get, + using the files to be tested in product information - :param product_info Config: The configuration specific to - the product to be prepared - :return: True if the files exists (or no files to test is provided). - :rtype: boolean - ''' + :param product_info: (Config) + The configuration specific to the product to be prepared + :param logger: (Logger) + The logger instance to be used for the logging + :return: (bool) + True if the files exists (or no files to test is provided). + """ # Get the files to test if there is any if ("present_files" in product_info and "source" in product_info.present_files): diff --git a/commands/template.py b/commands/template.py index 6844c2a..16d0e8c 100644 --- a/commands/template.py +++ b/commands/template.py @@ -41,11 +41,11 @@ except NameError: # Command class ######################################################################## class Command(_BaseCommand): - """\ + """ The template command creates the sources for a SALOME module from a template. examples: - >> sat template --name my_product_name --template PythonComponent --target /tmp + >> sat template --name my_product_name --template PythonComponent --target /tmp """ name = "template" @@ -359,7 +359,7 @@ def prepare_from_template(config, tsettings = TemplateSettings(compo_name, settings_file, target_dir) # first rename the files - logger.debug(UTS.label(_("Rename files\n")) + logger.debug(UTS.label(_("Rename files\n"))) for root, dirs, files in os.walk(target_dir): for fic in files: ff = fic.replace(tsettings.file_subst, compo_name) @@ -462,7 +462,7 @@ def get_template_info(config, template_name, logger): msg += "\n= Configuration\n" msg += " file substitution key = %s\n" % tsettings.file_subst - msg += " substitution key = '%s'\n" % tsettings.delimiter_char) + msg += " substitution key = '%s'\n" % tsettings.delimiter_char if len(tsettings.ignore_filters) > 0: msg += " Ignore Filter = %s\n" % ', '.join(tsettings.ignore_filters) @@ -483,7 +483,7 @@ def get_template_info(config, template_name, logger): retcode = 0 msg = skip - msg += "= Verification\n",) + msg += "= Verification\n" if tsettings.file_subst not in pnames: msg += "file substitution key not defined as a parameter: %s\n" % \ tsettings.file_subst diff --git a/commands/test.py b/commands/test.py index 3d9f262..c981016 100644 --- a/commands/test.py +++ b/commands/test.py @@ -74,7 +74,7 @@ Optional: set the display where to launch SALOME. def check_option(self, options): """Check the options - :param options: The options + :param options: (Options) The options :return: None """ if not options.launcher: @@ -288,7 +288,8 @@ Please specify an application or a launcher def ask_a_path(): - """ + """ + interactive as using 'raw_input' """ path = raw_input("enter a path where to save the result: ") if path == "": @@ -425,13 +426,15 @@ def check_remote_machine(machine_name, logger): else: logger.debug("\n") -## -# Creates the XML report for a product. + def create_test_report(config, xml_history_path, dest_path, retcode, xmlname=""): + """ + Creates the XML report for a product. + """ # get the date and hour of the launching of the command, in order to keep # history date_hour = config.VARS.datehour @@ -452,10 +455,6 @@ def create_test_report(config, prod_node.attrib["history_file"] = os.path.basename(xml_history_path) prod_node.attrib["global_res"] = retcode - - # OP 14/11/2017 Ajout de traces pour essayer de decouvrir le pb - # de remontee de log des tests - print "TRACES OP - test.py/create_test_report() : xml_history_path = '#%s#'" %xml_history_path ASNODE = XMLMGR.add_simple_node # shortcut @@ -717,13 +716,13 @@ def create_test_report(config, return src.OK_STATUS def generate_history_xml_path(config, test_base): - """Generate the name of the xml file that contain the history of the tests - on the machine with the current APPLICATION and the current test base. + """ + Generate the name of the xml file that contain the history of the tests + on the machine with the current APPLICATION and the current test base. - :param config Config: The global configuration - :param test_base Str: The test base name (or path) - :return: the full path of the history xml file - :rtype: Str + :param config: (Config) The global configuration + :param test_base: (str) The test base name (or path) + :return: (str) the full path of the history xml file """ history_xml_name = "" if "APPLICATION" in config: diff --git a/doc/Makefile b/doc/Makefile index a514380..331eebc 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -138,5 +138,13 @@ doctest: @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." +# splitin many files rst +#apidoc: +# @sphinx-apidoc --separate -o src/apidoc_src ../src +# @sphinx-apidoc --separate -o src/apidoc_commands ../commands + apidoc: - @sphinx-apidoc -o src/commands/apidoc ../src + @sphinx-apidoc -o src/apidoc_src ../src + @sphinx-apidoc -o src/apidoc_commands ../commands + + diff --git a/doc/build/doctrees/apidoc_commands/commands.doctree b/doc/build/doctrees/apidoc_commands/commands.doctree new file mode 100644 index 0000000..1b1f5f4 Binary files /dev/null and b/doc/build/doctrees/apidoc_commands/commands.doctree differ diff --git a/doc/build/doctrees/apidoc_commands/modules.doctree b/doc/build/doctrees/apidoc_commands/modules.doctree new file mode 100644 index 0000000..2c42178 Binary files /dev/null and b/doc/build/doctrees/apidoc_commands/modules.doctree differ diff --git a/doc/build/doctrees/apidoc_src/modules.doctree b/doc/build/doctrees/apidoc_src/modules.doctree new file mode 100644 index 0000000..bcbbc7c Binary files /dev/null and b/doc/build/doctrees/apidoc_src/modules.doctree differ diff --git a/doc/build/doctrees/apidoc_src/src.colorama.doctree b/doc/build/doctrees/apidoc_src/src.colorama.doctree new file mode 100644 index 0000000..ff45f4e Binary files /dev/null and b/doc/build/doctrees/apidoc_src/src.colorama.doctree differ diff --git a/doc/build/doctrees/apidoc_src/src.doctree b/doc/build/doctrees/apidoc_src/src.doctree new file mode 100644 index 0000000..c17d100 Binary files /dev/null and b/doc/build/doctrees/apidoc_src/src.doctree differ diff --git a/doc/build/doctrees/apidoc_src/src.example.doctree b/doc/build/doctrees/apidoc_src/src.example.doctree new file mode 100644 index 0000000..62fe49b Binary files /dev/null and b/doc/build/doctrees/apidoc_src/src.example.doctree differ diff --git a/doc/build/doctrees/commands/apidoc/modules.doctree b/doc/build/doctrees/commands/apidoc/modules.doctree deleted file mode 100644 index 512c851..0000000 Binary files a/doc/build/doctrees/commands/apidoc/modules.doctree and /dev/null differ diff --git a/doc/build/doctrees/commands/apidoc/src.colorama.doctree b/doc/build/doctrees/commands/apidoc/src.colorama.doctree deleted file mode 100644 index 6d1698c..0000000 Binary files a/doc/build/doctrees/commands/apidoc/src.colorama.doctree and /dev/null differ diff --git a/doc/build/doctrees/commands/apidoc/src.doctree b/doc/build/doctrees/commands/apidoc/src.doctree deleted file mode 100644 index a025616..0000000 Binary files a/doc/build/doctrees/commands/apidoc/src.doctree and /dev/null differ diff --git a/doc/build/doctrees/commands/apidoc/src.example.doctree b/doc/build/doctrees/commands/apidoc/src.example.doctree deleted file mode 100644 index 7e279da..0000000 Binary files a/doc/build/doctrees/commands/apidoc/src.example.doctree and /dev/null differ diff --git a/doc/build/doctrees/commands/application.doctree b/doc/build/doctrees/commands/application.doctree index acfae09..503e3f7 100644 Binary files a/doc/build/doctrees/commands/application.doctree and b/doc/build/doctrees/commands/application.doctree differ diff --git a/doc/build/doctrees/commands/clean.doctree b/doc/build/doctrees/commands/clean.doctree index a2756bc..843bf82 100644 Binary files a/doc/build/doctrees/commands/clean.doctree and b/doc/build/doctrees/commands/clean.doctree differ diff --git a/doc/build/doctrees/commands/config.doctree b/doc/build/doctrees/commands/config.doctree index 0c89c97..326cbf2 100644 Binary files a/doc/build/doctrees/commands/config.doctree and b/doc/build/doctrees/commands/config.doctree differ diff --git a/doc/build/doctrees/commands/prepare.doctree b/doc/build/doctrees/commands/prepare.doctree index cf11a63..ba7e36d 100644 Binary files a/doc/build/doctrees/commands/prepare.doctree and b/doc/build/doctrees/commands/prepare.doctree differ diff --git a/doc/build/doctrees/environment.pickle b/doc/build/doctrees/environment.pickle index 198b5cd..2ad2b55 100644 Binary files a/doc/build/doctrees/environment.pickle and b/doc/build/doctrees/environment.pickle differ diff --git a/doc/build/doctrees/index.doctree b/doc/build/doctrees/index.doctree index a4913f7..df9b1bf 100644 Binary files a/doc/build/doctrees/index.doctree and b/doc/build/doctrees/index.doctree differ diff --git a/doc/build/doctrees/installation_of_sat.doctree b/doc/build/doctrees/installation_of_sat.doctree index a177464..639a0ac 100644 Binary files a/doc/build/doctrees/installation_of_sat.doctree and b/doc/build/doctrees/installation_of_sat.doctree differ diff --git a/doc/build/doctrees/release_notes/release_notes_5.0.0.doctree b/doc/build/doctrees/release_notes/release_notes_5.0.0.doctree index 4857485..00fe84e 100644 Binary files a/doc/build/doctrees/release_notes/release_notes_5.0.0.doctree and b/doc/build/doctrees/release_notes/release_notes_5.0.0.doctree differ diff --git a/doc/build/html/_modules/commands/application.html b/doc/build/html/_modules/commands/application.html new file mode 100644 index 0000000..d882b9b --- /dev/null +++ b/doc/build/html/_modules/commands/application.html @@ -0,0 +1,597 @@ + + + + + + + + commands.application — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.application

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2018  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+"""
+Is a salomeTools command module
+see Command class docstring, also used for help
+"""
+
+import os
+import getpass
+import subprocess
+
+import src.ElementTree as ET
+import src.debug as DBG
+import src.returnCode as RCO
+import src.utilsSat as UTS
+from src.salomeTools import _BaseCommand
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """ + The application command creates a SALOME application. + + | Warning: + | It works only for SALOME 6. + | Use the 'launcher' command for newer versions of SALOME + | + | Examples: + | >> sat application SALOME-6.6.0 + """ + + name = "application" + +
[docs] def getParser(self): + """Define all options for command 'sat application <options>'""" + parser = self.getParserWithHelp() + parser.add_option('n', 'name', 'string', 'name', + _("""\ +Optional: The name of the application + (default is APPLICATION.virtual_app.name or runAppli)""") ) + parser.add_option('c', 'catalog', 'string', 'catalog', + _('Optional: The resources catalog to use') ) + parser.add_option('t', 'target', 'string', 'target', + _("""\ +Optional: The directory where to create the application + (default is APPLICATION.workdir)""") ) + parser.add_option('', 'gencat', 'string', 'gencat', + _("""\ +Optional: Create a resources catalog for the specified machines (separated with ',') +NOTICE: this command will ssh to retrieve information to each machine in the list""") ) + parser.add_option('m', 'module', 'list2', 'modules', + _("Optional: the restricted list of module(s) to include in the application") ) + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat application <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + # check for product + src.check_config_has_application( config ) + + application = config.VARS.application + logger.info(_("Building application for <header>%s<reset>\n") % application) + + # if section APPLICATION.virtual_app does not exists create one + if "virtual_app" not in config.APPLICATION: + msg = _("The section APPLICATION.virtual_app is not defined in the product.") + logger.error(UTS.red(msg)) + return RCO.ReturnCode("KO", msg) + + # get application dir + target_dir = config.APPLICATION.workdir + if options.target: + target_dir = options.target + + # set list of modules + if options.modules: + config.APPLICATION.virtual_app['modules'] = options.modules + + # set name and application_name + if options.name: + config.APPLICATION.virtual_app['name'] = options.name + config.APPLICATION.virtual_app['application_name'] = options.name + "_appdir" + + application_name = src.get_cfg_param(config.APPLICATION.virtual_app, + "application_name", + config.APPLICATION.virtual_app.name + "_appdir") + appli_dir = os.path.join(target_dir, application_name) + + fmt = " %s = %s\n" # as " label = value\n" + logger.info(fmt % (_("Application directory"), appli_dir)) + + # get catalog + catalog, catalog_src = "", "" + if options.catalog: + # use catalog specified in the command line + catalog = options.catalog + elif options.gencat: + # generate catalog for given list of computers + catalog_src = options.gencat + catalog = generate_catalog(options.gencat.split(","), + config,logger) + elif 'catalog' in config.APPLICATION.virtual_app: + # use catalog specified in the product + if config.APPLICATION.virtual_app.catalog.endswith(".xml"): + # catalog as a file + catalog = config.APPLICATION.virtual_app.catalog + else: + # catalog as a list of computers + catalog_src = config.APPLICATION.virtual_app.catalog + mlist = filter(lambda l: len(l.strip()) > 0, + config.APPLICATION.virtual_app.catalog.split(",")) + if len(mlist) > 0: + catalog = generate_catalog(config.APPLICATION.virtual_app.catalog.split(","), + config, logger) + + # display which catalog is used + if len(catalog) > 0: + catalog = os.path.realpath(catalog) + if len(catalog_src) > 0: + logger.info(fmt % (_("Resources Catalog"), catalog_src)) + else: + logger.info(fmt % (_("Resources Catalog"), catalog)) + + details = [] + + # remove previous application + if os.path.exists(appli_dir): + logger.info(get_step(_("Removing previous application directory"))) + rres = "<KO>" + try: + shutil.rmtree(appli_dir) + rres = "<OK>" + finally: + logger.info(rres + "\n") + + # generate the application + try: + try: # try/except/finally not supported in all version of python + retcode = create_application(config, appli_dir, catalog, logger) + except Exception as exc: + details.append(str(exc)) + raise + finally: + logger.info("\n") + + return RCO.ReturnCode("OK")
+ + + +# Creates an alias for runAppli. +
[docs]def make_alias(appli_path, alias_path, force=False): + assert len(alias_path) > 0, "Bad name for alias" + if os.path.exists(alias_path) and not force: + raise Exception(_("Cannot create the alias '%s'\n") % alias_path) + else: # find relative path + os.symlink(appli_path, alias_path)
+ +
[docs]def add_module_to_appli(out, module, has_gui, module_path, logger, flagline): + """add the definition of a module to out stream.""" + if not os.path.exists(module_path): + if not flagline: + logger.info("\n") + flagline = True + logger.warning(" %s\n" + _("module %s not installed") % module) + + out.write(' <module name="%s" gui="%s" path="%s"/>\n' % \ + (module, has_gui, module_path)) + return flagline
+ +## +# Creates the config file to create an application with the list of modules. +
[docs]def create_config_file(config, modules, env_file, logger): + + samples = "" + if 'SAMPLES' in config.APPLICATION.products: + samples = src.product.get_product_config(config, 'SAMPLES').source_dir + + config_file = src.get_tmp_filename(config, "appli_config.xml") + f = open(config_file, "w") + + f.write('<application>\n') + if env_file.endswith("cfg"): + f.write('<context path="%s"/>\n' % env_file) + else: + f.write('<prerequisites path="%s"/>\n' % env_file) + f.write('<resources path="CatalogResources.xml"/>\n') + f.write('<modules>\n') + + flagline = False + for m in modules: + mm = src.product.get_product_config(config, m) + if src.product.product_is_smesh_plugin(mm): + continue + + if 'install_dir' in mm and bool(mm.install_dir): + if src.product.product_is_cpp(mm): + # cpp module + for aa in src.product.get_product_components(mm): + install_dir = os.path.join(config.APPLICATION.workdir, + "INSTALL") + mp = os.path.join(install_dir, aa) + flagline = add_module_to_appli(f, + aa, + "yes", + mp, + logger, + flagline) + else: + # regular module + mp = mm.install_dir + gui = src.get_cfg_param(mm, "has_gui", "yes") + flagline = add_module_to_appli(f, m, gui, mp, logger, flagline) + + f.write('</modules>\n') + f.write('<samples path="%s"/>\n' % samples) + f.write('</application>\n') + f.close() + + return config_file
+ + +
[docs]def customize_app(config, appli_dir, logger): + """Customizes the application by editing SalomeApp.xml.""" + if 'configure' not in config.APPLICATION.virtual_app \ + or len(config.APPLICATION.virtual_app.configure) == 0: + return + + def get_element(parent, name, strtype): + """shortcut to get an element (section or parameter) from parent.""" + for c in parent.getchildren(): + if c.attrib['name'] == name: + return c + + # element not found create it + elt = add_simple_node(parent, strtype) + elt.attrib['name'] = name + return elt + + def add_simple_node(parent, node_name, text=None): + """shortcut method to create a node""" + n = etree.Element(node_name) + if text is not None: + try: + n.text = text.strip("\n\t").decode("UTF-8") + except: + logger.error("problem decode UTF8 %s:\n%s\n" % \ + (node_name, UTS.toHex(text))) + n.text = "?" + parent.append(n) + return n + + # read the app file + app_file = os.path.join(appli_dir, "SalomeApp.xml") + tree = etree.parse(app_file) + document = tree.getroot() + assert document is not None, "document tag not found" + + logger.info("\n") + for section_name in config.APPLICATION.virtual_app.configure: + for parameter_name in config.APPLICATION.virtual_app.configure[section_name]: + parameter_value = config.APPLICATION.virtual_app.configure[section_name][parameter_name] + logger.info(" configure: %s/%s = %s\n" % (section_name, + parameter_name, + parameter_value)) + section = get_element(document, section_name, "section") + parameter = get_element(section, parameter_name, "parameter") + parameter.attrib['value'] = parameter_value + + # write the file + f = open(app_file, "w") + f.write("<?xml version='1.0' encoding='utf-8'?>\n") + f.write(etree.tostring(document, encoding='utf-8')) + f.close()
+ +## +# Generates the application with the config_file. +
[docs]def generate_application(config, appli_dir, config_file, logger): + target_dir = os.path.dirname(appli_dir) + + install_KERNEL_dir = src.product.get_product_config(config, + 'KERNEL').install_dir + script = os.path.join(install_KERNEL_dir, "bin", "salome", "appli_gen.py") + if not os.path.exists(script): + raise Exception(_("KERNEL is not installed")) + + # Add SALOME python in the environment in order to avoid python version + # problems at appli_gen.py call + if 'Python' in config.APPLICATION.products: + envi = src.environment.SalomeEnviron(config, + src.environment.Environ( + dict(os.environ)), + True) + envi.set_a_product('Python', logger) + + command = "python %s --prefix=%s --config=%s" % (script, + appli_dir, + config_file) + logger.debug("\n>" + command + "\n") + res = subprocess.call(command, + shell=True, + cwd=target_dir, + env=envi.environ.environ, + stdout=logger.logTxtFile, + stderr=subprocess.STDOUT) + + if res != 0: + raise Exception(_("Cannot create application, code = %d\n") % res) + + return res
+ +
[docs]def get_step(logger, message, pad=50): + """ + returns 'message ........ ' with pad 50 by default + avoid colors '<color>' for now in message + """ + return "%s %s " % (message, '.' * (pad - len(message.decode("UTF-8"))))
+ +## +# Creates a SALOME application. +
[docs]def create_application(config, appli_dir, catalog, logger, display=True): + + SALOME_modules = get_SALOME_modules(config) + + warn = ['KERNEL', 'GUI'] + if display: + for w in warn: + if w not in SALOME_modules: + msg = _("module %s is required to create application\n") % w + logger.warning(msg) + + # generate the launch file + retcode = generate_launch_file(config, appli_dir, catalog, logger, SALOME_modules) + + if retcode == 0: + cmd = UTS.label("%s/salome" % appli_dir) + + if display: + msg = _("To launch the application, type:") + logger.info("\n%s\n %s\n" % (msg, cmd)) + return retcode
+ +
[docs]def get_SALOME_modules(config): + l_modules = [] + for product in config.APPLICATION.products: + product_info = src.product.get_product_config(config, product) + if (src.product.product_is_SALOME(product_info) or + src.product.product_is_generated(product_info)): + l_modules.append(product) + return l_modules
+ +
[docs]def generate_launch_file(config, appli_dir, catalog, logger, l_SALOME_modules): + """ + Obsolescent way of creating the application. + This method will use appli_gen to create the application directory. + """ + retcode = -1 + + if len(catalog) > 0 and not os.path.exists(catalog): + raise IOError(_("Catalog not found: %s") % catalog) + + logger.info(get_step(_("Creating environment files"))) + status = "<KO>" + + VersionSalome = src.get_salome_version(config) + if VersionSalome >= 820: + # for salome 8+ we use a salome context file for the virtual app + app_shell="cfg" + env_ext="cfg" + else: + app_shell="bash" + env_ext="sh" + + try: + import environ + # generate only shells the user wants (by default bash, csh, batch) + # the environ command will only generate file compatible + # with the current system. + environ.write_all_source_files(config, + logger, + shells=[app_shell], + silent=True) + status = "<OK>" + finally: + logger.info(status + "\n") + + # build the application (the name depends upon salome version + env_file = os.path.join(config.APPLICATION.workdir, "env_launch." + env_ext) + + logger.info(get_step(_("Building application"))) + cf = create_config_file(config, l_SALOME_modules, env_file, logger) + + # create the application directory + os.makedirs(appli_dir) + + # generate the application + status = "<KO>" + try: + retcode = generate_application(config, appli_dir, cf, logger) + customize_app(config, appli_dir, logger) + status = "<OK>" + finally: + logger.info(status + "\n") + + # copy the catalog if one + if len(catalog) > 0: + shutil.copy(catalog, os.path.join(appli_dir, "CatalogResources.xml")) + + return retcode
+ +
[docs]def generate_catalog(machines, config, logger): + """Generates the catalog from a list of machines.""" + # remove empty machines + machines = map(lambda l: l.strip(), machines) + machines = filter(lambda l: len(l) > 0, machines) + + logger.debug(" %s = %s" % _("Generate Resources Catalog"), ", ".join(machines)) + + cmd = '"cat /proc/cpuinfo | grep MHz ; cat /proc/meminfo | grep MemTotal"' + user = getpass.getuser() + + catfile = src.get_tmp_filename(config, "CatalogResources.xml") + catalog = file(catfile, "w") + catalog.write("""\ +<!DOCTYPE ResourcesCatalog> +<resources> +""") + + for k in machines: + logger.info(" ssh %s " % (k + " ").ljust(20, '.'), 4) + + ssh_cmd = 'ssh -o "StrictHostKeyChecking no" %s %s' % (k, cmd) + p = subprocess.Popen(ssh_cmd, shell=True, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + p.wait() + + if p.returncode != 0: + logger.error("<KO>\n%s\n" % UTS.red(p.stderr.read())) + else: + logger.info("<OK>\n") + lines = p.stdout.readlines() + freq = lines[0][:-1].split(':')[-1].split('.')[0].strip() + nb_proc = len(lines) -1 + memory = lines[-1].split(':')[-1].split()[0].strip() + memory = int(memory) / 1000 + + msg = """\ + <machine + protocol="ssh" + nbOfNodes="1" + mode="interactif" + OS="LINUX" + CPUFreqMHz="%s" + nbOfProcPerNode="%s" + memInMB="%s" + userName="%s" + name="%s" + hostname="%s" + > + </machine> +""" + msg = msg % (freq, nb_proc, memory, user, k, k) + catalog.write(msg) + + catalog.write("</resources>\n") + catalog.close() + return catfile
+ +
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/check.html b/doc/build/html/_modules/commands/check.html new file mode 100644 index 0000000..8bc6e5e --- /dev/null +++ b/doc/build/html/_modules/commands/check.html @@ -0,0 +1,344 @@ + + + + + + + + commands.check — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.check

+#!/usr/bin/env python
+
+#-*- coding:utf-8 -*-
+#  Copyright (C) 2010-2018  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+import src.debug as DBG
+import src.returnCode as RCO
+from src.salomeTools import _BaseCommand
+
+CHECK_PROPERTY = "has_unit_tests"
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """ + The check command executes the 'check' command in the build directory of + all the products of the application. + It is possible to reduce the list of products to check + by using the --products option + + examples: + >> sat check SALOME --products KERNEL,GUI,GEOM + """ + + name = "check" + +
[docs] def getParser(self): + """Define all options for the check command 'sat check <options>'""" + parser = self.getParserWithHelp() + parser.add_option('p', 'products', 'list2', 'products', + _("""\ +Optional: products to configure. + This option can be passed several time to configure several products.""")) + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat check <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + + # check that the command has been called with an application + src.check_config_has_application( config ) + + # Get the list of products to treat + products_infos = get_products_list(options, config, logger) + + # Print some informations + msg = _('Executing the check command in the build directories of the application') + logger.info("%s %s\n" % (msg, UTS.label(config.VARS.application))) + + info = [(_("BUILD directory"), + os.path.join(config.APPLICATION.workdir, 'BUILD'))] + UTS.logger_info_tuples(logger, info) + + # Call the function that will loop over all the products and execute + # the right command(s) + res = check_all_products(config, products_infos, logger) + + # Print the final state + nb_products = len(products_infos) + if res == 0: + final_status = "<OK>" + else: + final_status = "<KO>" + + logger.info(_("\nCheck: %(status)s (%(1)d/%(2)d)\n") % \ + { 'status': final_status, + '1': nb_products - res, + '2': nb_products }) + + return res
+ + +
[docs]def get_products_list(options, cfg, logger): + """ + method that gives the product list with their informations from + configuration regarding the passed options. + + :param options: (Options) The Options instance that stores + the commands arguments + :param cfg: (Config) The global configuration + :param logger: (Logger) The logger instance to use + for the display and logging + :return: (list) The list of (product name, product_informations). + """ + # Get the products to be prepared, regarding the options + if options.products is None: + # No options, get all products sources + products = cfg.APPLICATION.products + else: + # if option --products, check that all products of the command line + # are present in the application. + products = options.products + for p in products: + if p not in cfg.APPLICATION.products: + msg = _("Product %(1)s not defined in application %(2)s") % \ + { '1': p, '2': cfg.VARS.application} + raise Exception(msg) + + # Construct the list of tuple containing + # the products name and their definition + products_infos = src.product.get_products_infos(products, cfg) + + products_infos = [pi for pi in products_infos if not( + src.product.product_is_native(pi[1]) or + src.product.product_is_fixed(pi[1]))] + + return products_infos
+ +
[docs]def log_step(logger, header, step): + logger.info("\r%s%s" % (header, " " * 20)) + logger.info("\r%s%s" % (header, step))
+ +
[docs]def log_res_step(logger, res): + if res == 0: + logger.debug("<OK>\n") + else: + logger.debug("<KO>\n")
+ +
[docs]def check_all_products(config, products_infos, logger): + """ + Execute the proper configuration commands + in each product build directory. + + :param config: (Config) The global configuration + :param products_info: (list) + List of (str, Config) => (product_name, product_info) + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (int) the number of failing commands. + """ + res = 0 + for p_name_info in products_infos: + res_prod = check_product(p_name_info, config, logger) + if res_prod != 0: + res += 1 + return res
+ +
[docs]def check_product(p_name_info, config, logger): + """ + Execute the proper configuration command(s) + in the product build directory. + + :param p_name_info: (tuple) + (str, Config) => (product_name, product_info) + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (int) 1 if it fails, else 0. + """ + + p_name, p_info = p_name_info + + header = _("Check of %s") % UTS.label(p_name) + header += " %s " % ("." * (20 - len(p_name))) + logger.info(header) + + # Verify if the command has to be launched or not + ignored = False + msg += "" + if not src.get_property_in_product_cfg(p_info, CHECK_PROPERTY): + msg += _("The product %s is defined as not having tests: product ignored.\n") % p_name + ignored = True + if "build_dir" not in p_info: + msg += _("The product %s have no 'build_dir' key: product ignored.\n") % p_name + ignored = True + if not src.product.product_compiles(p_info): + msg += _("The product %s is defined as not compiling: product ignored.\n") % p_name + ignored = True + + logger.info("%s\n" % msg) + # Get the command to execute for script products + cmd_found = True + command = "" + if src.product.product_has_script(p_info) and not ignored: + command = src.get_cfg_param(p_info, "test_build", "Not found") + if command == "Not found": + cmd_found = False + msg = _("""\ +The product %s is defined as having tests. +But it is compiled using a script and the key 'test_build' +is not defined in the definition of %(name)\n""") % p_name + logger.warning(msg) + + if ignored or not cmd_found: + log_step(logger, header, "ignored") + logger.debug("==== %s %s\n" % (p_name, "IGNORED")) + if not cmd_found: + return 1 + return 0 + + # Instantiate the class that manages all the construction commands + # like cmake, check, make install, make test, environment management, etc... + builder = src.compilation.Builder(config, logger, p_info) + + # Prepare the environment + log_step(logger, header, "PREPARE ENV") + res_prepare = builder.prepare() + log_res_step(logger, res_prepare) + + len_end_line = 20 + + # Launch the check + log_step(logger, header, "CHECK") + res = builder.check(command=command) + log_res_step(logger, res) + + # Log the result + if res > 0: + logger.info("\r%s%s" % (header, " " * len_end_line)) + logger.info("\r" + header + "<KO>\n") + logger.debug("==== <KO> in check of %s\n" % p_name) + else: + logger.info("\r%s%s" % (header, " " * len_end_line)) + logger.info("\r" + header + "<OK>\n") + logger.debug("==== <OK> in check of %s\n" % p_name) + logger.info("\n") + + return res
+ +
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/clean.html b/doc/build/html/_modules/commands/clean.html new file mode 100644 index 0000000..7ccaae4 --- /dev/null +++ b/doc/build/html/_modules/commands/clean.html @@ -0,0 +1,326 @@ + + + + + + + + commands.clean — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.clean

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2018  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+import re
+
+import src.debug as DBG
+import src.returnCode as RCO
+from src.salomeTools import _BaseCommand
+
+# Compatibility python 2/3 for input function
+# input stays input for python 3 and input = raw_input for python 2
+try: 
+    input = raw_input
+except NameError: 
+    pass
+
+PROPERTY_EXPRESSION = "^.+:.+$"
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """ + The clean command suppresses the source, build, or install directories + of the application products. + Use the options to define what directories you want to suppress and + to reduce the list of products + + examples: + >> sat clean SALOME --build --install --properties is_salome_module:yes + """ + + name = "clean" + +
[docs] def getParser(self): + """Define all options for the command 'sat clean <options>'""" + parser = self.getParserWithHelp() + parser.add_option('p', 'products', 'list2', 'products', + _('Optional: Products to clean. This option can be' + ' passed several time to clean several products.')) + parser.add_option('', 'properties', 'string', 'properties', + _('Optional: Filter the products by their properties.\n' + '\tSyntax: --properties <property>:<value>')) + parser.add_option('s', 'sources', 'boolean', 'sources', + _("Optional: Clean the product source directories.")) + parser.add_option('b', 'build', 'boolean', 'build', + _("Optional: Clean the product build directories.")) + parser.add_option('i', 'install', 'boolean', 'install', + _("Optional: Clean the product install directories.")) + parser.add_option('a', 'all', 'boolean', 'all', + _("Optional: Clean the product source, build and install directories.")) + parser.add_option('', 'sources_without_dev', 'boolean', 'sources_without_dev', + _("Optional: do not clean the products in development mode.")) + return parser
+ + +
[docs] def run(self, cmd_arguments): + """method called for command 'sat clean <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + # check that the command has been called with an application + src.check_config_has_application( config ) + + # Verify the --properties option + if options.properties: + oExpr = re.compile(PROPERTY_EXPRESSION) + if not oExpr.search(options.properties): + msg = _("""\ +The '--properties' options must have the following syntax: + --properties <property>:<value>\n""") + logger.error(msg) + options.properties = None + + + # Get the list of products to threat + products_infos = self.get_products_list(options, config, logger) + + # Construct the list of directories to suppress + l_dir_to_suppress = [] + if options.all: + l_dir_to_suppress += (get_source_directories(products_infos, + options.sources_without_dev) + + get_build_directories(products_infos) + + get_install_directories(products_infos)) + else: + if options.install: + l_dir_to_suppress += get_install_directories(products_infos) + + if options.build: + l_dir_to_suppress += get_build_directories(products_infos) + + if options.sources or options.sources_without_dev: + l_dir_to_suppress += get_source_directories(products_infos, + options.sources_without_dev) + + if len(l_dir_to_suppress) == 0: + sat_command = ("sat -h clean") + msg = _("Nothing to suppress, Please specify what you want to suppress.") + logger.error(msg + "\nsee: '%s'\n" % sat_command) + return RCO.ReturnCode("KO", "specify what you want to suppress") + + # Check with the user if he really wants to suppress the directories + if not runner.options.batch: + msg = _("Remove the following directories ?\n") + for directory in l_dir_to_suppress: + msg += " %s\n" % directory + logger.info(msg) + rep = input(_("Are you sure you want to continue? [Yes/No] ")) + if rep.upper() != _("YES"): + return RCO.ReturnCode("OK", "user do not want to continue") + + # Suppress the list of paths + suppress_directories(l_dir_to_suppress, logger) + + return RCO.ReturnCode("OK", "clean done")
+ + +
[docs]def get_source_directories(products_infos, without_dev): + """ + Returns the list of directory source paths corresponding to the list of + product information given as input. If without_dev (bool), then + the dev products are ignored. + + :param products_infos: (list) + The list of (name, config) corresponding to one product. + :param without_dev: (boolean) If True, then ignore the dev products. + :return: (list) the list of source paths. + """ + l_dir_source = [] + for __, product_info in products_infos: + if product_has_dir(product_info, without_dev): + l_dir_source.append(src.Path(product_info.source_dir)) + return l_dir_source
+ +
[docs]def get_build_directories(products_infos): + """ + Returns the list of directory build paths corresponding to the list of + product information given as input. + + :param products_infos: (list) + The list of (name, config) corresponding to one product. + :return: (list) the list of build paths. + """ + l_dir_build = [] + for __, product_info in products_infos: + if product_has_dir(product_info): + if "build_dir" in product_info: + l_dir_build.append(src.Path(product_info.build_dir)) + return l_dir_build
+ +
[docs]def get_install_directories(products_infos): + """ + Returns the list of directory install paths corresponding to the list of + product information given as input. + + :param products_infos: (list) + The list of (name, config) corresponding to one product. + :return: (list) the list of install paths. + """ + l_dir_install = [] + for __, product_info in products_infos: + if product_has_dir(product_info): + l_dir_install.append(src.Path(product_info.install_dir)) + return l_dir_install
+ +
[docs]def product_has_dir(product_info, without_dev=False): + """ + Returns a boolean at True if there is a source, build and install + directory corresponding to the product described by product_info. + + :param products_info: (Config) + The config corresponding to the product. + :return: (bool) + True if there is a source, build and install + directory corresponding to the product described by product_info. + """ + if (src.product.product_is_native(product_info) or + src.product.product_is_fixed(product_info)): + return False + if without_dev: + if src.product.product_is_dev(product_info): + return False + return True
+ +
[docs]def suppress_directories(l_paths, logger): + """Suppress the paths given in the list in l_paths. + + :param l_paths: (list) The list of Path to be suppressed + :param logger: (Logger) + The logger instance to use for the display and logging + """ + for path in l_paths: + strpath = str(path) + if not path.isdir(): + msg = _("The path %s does not exists (or is not a directory)\n") % strpath + logger.warning(msg) + else: + logger.info(_("Removing %s ...") % strpath ) + path.rm() + logger.info('<OK>\n')
+ +
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/compile.html b/doc/build/html/_modules/commands/compile.html new file mode 100644 index 0000000..b689ce6 --- /dev/null +++ b/doc/build/html/_modules/commands/compile.html @@ -0,0 +1,817 @@ + + + + + + + + commands.compile — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.compile

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2018  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+
+import os
+import src.debug as DBG
+import src.returnCode as RCO
+import src.pyconf as PYCONF
+from src.salomeTools import _BaseCommand
+
+# Compatibility python 2/3 for input function
+# input stays input for python 3 and input = raw_input for python 2
+try: 
+    input = raw_input
+except NameError: 
+    pass
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """\ + The compile command constructs the products of the application + + examples: + >> sat compile SALOME --products KERNEL,GUI,MEDCOUPLING --clean_all + """ + + name = "compile" + +
[docs] def getParser(self): + """Define all options for the command 'sat compile <options>'""" + parser = self.getParserWithHelp() + parser.add_option( + 'p', 'products', 'list2', 'products', + _('Optional: products to configure. This option can be passed several time to configure several products.')) + parser.add_option( + '', 'with_fathers', 'boolean', 'fathers', + _("Optional: build all necessary products to the given product (KERNEL is build before building GUI)."), + False) + parser.add_option( + '', 'with_children', 'boolean', 'children', + _("Optional: build all products using the given product (all SMESH plugins are build after SMESH)."), + False) + parser.add_option( + '', 'clean_all', 'boolean', 'clean_all', + _("Optional: clean BUILD dir and INSTALL dir before building product."), + False) + parser.add_option( + '', 'clean_install', 'boolean', 'clean_install', + _("Optional: clean INSTALL dir before building product."), False) + parser.add_option( + '', 'make_flags', 'string', 'makeflags', + _("Optional: add extra options to the 'make' command.")) + parser.add_option( + '', 'show', 'boolean', 'no_compile', + _("Optional: DO NOT COMPILE just show if products are installed or not."), + False) + parser.add_option( + '', 'stop_first_fail', 'boolean', 'stop_first_fail', _( + "Optional: Stops the command at first product compilation fail."), + False) + parser.add_option( + '', 'check', 'boolean', 'check', + _("Optional: execute the unit tests after compilation"), + False) + parser.add_option( + '', 'clean_build_after', 'boolean', 'clean_build_after', + _('Optional: remove the build directory after successful compilation'), + False) + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat compile <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + # Warn the user if he invoked the clean_all option + # without --products option + if (options.clean_all and + options.products is None and + not runner.options.batch): + rep = input(_("You used --clean_all without specifying a product" + " are you sure you want to continue? [Yes/No] ")) + if rep.upper() != _("YES").upper(): + return 0 + + # check that the command has been called with an application + src.check_config_has_application( config ) + + # Print some informations + nameApp = str(config.VARS.application) + srcDir = os.path.join(config.APPLICATION.workdir, 'SOURCES') + buildDir = os.path.join(config.APPLICATION.workdir, 'BUILD') + + msg = _("Application %s, executing compile commands in build directories of products.\n") + logger.info(msg % UTS.label(nameApp)) + + info = [ (_("SOURCE directory"), srcDir), + (_("BUILD directory"),buildDir) ] + UTS.logger_info_tuples(logger, info) + + # Get the list of products to treat + products_infos = get_products_list(options, config, logger) + + if options.fathers: + # Extend the list with all recursive dependencies of the given products + products_infos = extend_with_fathers(config, products_infos) + + if options.children: + # Extend the list with all products that use the given products + products_infos = extend_with_children(config, products_infos) + + # Sort the list regarding the dependencies of the products + products_infos = sort_products(config, products_infos) + + + # Call the function that will loop over all the products and execute + # the right command(s) + res = compile_all_products(runner, config, options, products_infos, logger) + + # Print the final state + nb_products = len(products_infos) + if res == 0: + final_status = "<OK>" + else: + final_status = "<KO>" + + logger.info(_("\nCompilation: %(status)s (%(1)d/%(2)d)\n") % \ + { 'status': final_status, + '1': nb_products - res, + '2': nb_products }) + + code = res + if code != 0: + code = 1 + return code
+ + +
[docs]def get_products_list(options, cfg, logger): + """ + method that gives the product list with their informations from + configuration regarding the passed options. + + :param options: (Options) + The Options instance that stores the commands arguments + :param cfg: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (list) The list of (product name, product_informations). + """ + # Get the products to be prepared, regarding the options + if options.products is None: + # No options, get all products sources + products = cfg.APPLICATION.products + else: + # if option --products, check that all products of the command line + # are present in the application. + products = options.products + for p in products: + if p not in cfg.APPLICATION.products: + raise Exception( + _("Product %(product)s not defined in application %(application)s") % + { 'product': p, 'application': cfg.VARS.application} ) + + # Construct the list of tuple containing + # the products name and their definition + products_infos = src.product.get_products_infos(products, cfg) + + products_infos = [pi for pi in products_infos if not( + src.product.product_is_fixed(pi[1]))] + + return products_infos
+ +
[docs]def get_children(config, p_name_p_info): + l_res = [] + p_name, __ = p_name_p_info + # Get all products of the application + products = config.APPLICATION.products + products_infos = src.product.get_products_infos(products, config) + for p_name_potential_child, p_info_potential_child in products_infos: + if ("depend" in p_info_potential_child and + p_name in p_info_potential_child.depend): + l_res.append(p_name_potential_child) + return l_res
+ +
[docs]def get_recursive_children(config, p_name_p_info, without_native_fixed=False): + """ + Get the recursive list of the product that depend on + the product defined by prod_info + + :param config: (Config) The global configuration + :param prod_info: (Config) The specific config of the product + :param without_native_fixed: (bool) + If true, do not include the fixed or native products in the result + :return: (list) The list of product_informations. + """ + p_name, __ = p_name_p_info + # Initialization of the resulting list + l_children = [] + + # Get the direct children (not recursive) + l_direct_children = get_children(config, p_name_p_info) + # Minimal case : no child + if l_direct_children == []: + return [] + # Add the children and call the function to get the children of the + # children + for child_name in l_direct_children: + l_children_name = [pn_pi[0] for pn_pi in l_children] + if child_name not in l_children_name: + if child_name not in config.APPLICATION.products: + msg = _("""\ +The product %(child_name)s that is in %(product_name)s children +is not present in application %(appli_name)s.""" % + {"child_name" : child_name, + "product_name" : p_name.name, + "appli_name" : config.VARS.application} ) + raise Exception(msg) + prod_info_child = src.product.get_product_config(config, + child_name) + pname_pinfo_child = (prod_info_child.name, prod_info_child) + # Do not append the child if it is native or fixed and + # the corresponding parameter is called + if without_native_fixed: + if not(src.product.product_is_native(prod_info_child) or + src.product.product_is_fixed(prod_info_child)): + l_children.append(pname_pinfo_child) + else: + l_children.append(pname_pinfo_child) + # Get the children of the children + l_grand_children = get_recursive_children(config, + pname_pinfo_child, + without_native_fixed = without_native_fixed) + l_children += l_grand_children + return l_children
+ +
[docs]def get_recursive_fathers(config, p_name_p_info, without_native_fixed=False): + """ + Get the recursive list of the dependencies of the product defined + by prod_info + + :param config: (Config) The global configuration + :param prod_info: (Config) The specific config of the product + :param without_native_fixed: (bool) + If true, do not include the fixed or native products in the result + :return: (list) The list of product_informations. + """ + p_name, p_info = p_name_p_info + # Initialization of the resulting list + l_fathers = [] + # Minimal case : no dependencies + if "depend" not in p_info or p_info.depend == []: + return [] + # Add the dependencies and call the function to get the dependencies of the + # dependencies + for father_name in p_info.depend: + l_fathers_name = [pn_pi[0] for pn_pi in l_fathers] + if father_name not in l_fathers_name: + if father_name not in config.APPLICATION.products: + msg = _("The product %(father_name)s that is in %(product_nam" + "e)s dependencies is not present in application " + "%(appli_name)s" % {"father_name" : father_name, + "product_name" : p_name, + "appli_name" : config.VARS.application}) + raise Exception(msg) + prod_info_father = src.product.get_product_config(config, + father_name) + pname_pinfo_father = (prod_info_father.name, prod_info_father) + # Do not append the father if it is native or fixed and + # the corresponding parameter is called + if without_native_fixed: + if not(src.product.product_is_native(prod_info_father) or + src.product.product_is_fixed(prod_info_father)): + l_fathers.append(pname_pinfo_father) + else: + l_fathers.append(pname_pinfo_father) + # Get the dependencies of the dependency + l_grand_fathers = get_recursive_fathers(config, + pname_pinfo_father, + without_native_fixed = without_native_fixed) + for item in l_grand_fathers: + if item not in l_fathers: + l_fathers.append(item) + return l_fathers
+ +
[docs]def sort_products(config, p_infos): + """Sort the p_infos regarding the dependencies between the products + + :param config: (Config) The global configuration + :param p_infos: (list) + List of (str, Config) => (product_name, product_info) + """ + l_prod_sorted = src.deepcopy_list(p_infos) + for prod in p_infos: + l_fathers = get_recursive_fathers(config, + prod, + without_native_fixed=True) + l_fathers = [father for father in l_fathers if father in p_infos] + if l_fathers == []: + continue + for p_sorted in l_prod_sorted: + if p_sorted in l_fathers: + l_fathers.remove(p_sorted) + if l_fathers==[]: + l_prod_sorted.remove(prod) + l_prod_sorted.insert(l_prod_sorted.index(p_sorted)+1, prod) + break + + return l_prod_sorted
+ +
[docs]def extend_with_fathers(config, p_infos): + p_infos_res = src.deepcopy_list(p_infos) + for p_name_p_info in p_infos: + fathers = get_recursive_fathers(config, + p_name_p_info, + without_native_fixed=True) + for p_name_p_info_father in fathers: + if p_name_p_info_father not in p_infos_res: + p_infos_res.append(p_name_p_info_father) + return p_infos_res
+ +
[docs]def extend_with_children(config, p_infos): + p_infos_res = src.deepcopy_list(p_infos) + for p_name_p_info in p_infos: + children = get_recursive_children(config, + p_name_p_info, + without_native_fixed=True) + for p_name_p_info_child in children: + if p_name_p_info_child not in p_infos_res: + p_infos_res.append(p_name_p_info_child) + return p_infos_res
+ +
[docs]def check_dependencies(config, p_name_p_info): + l_depends_not_installed = [] + fathers = get_recursive_fathers(config, p_name_p_info, without_native_fixed=True) + for p_name_father, p_info_father in fathers: + if not(src.product.check_installation(p_info_father)): + l_depends_not_installed.append(p_name_father) + return l_depends_not_installed
+ +
[docs]def log_step(logger, header, step): + logger.info("\r%s%s" % (header, " " * 30)) + logger.info("\r%s%s" % (header, step)) + logger.debug("\n==== %s \n" % step)
+ +
[docs]def log_res_step(logger, res): + if res == 0: + logger.debug("<OK>\n") + else: + logger.debug("<KO>\n")
+ + +
[docs]def compile_all_products(sat, config, options, products_infos, logger): + """ + Execute the proper configuration commands + in each product build directory. + + :param config: (Config) The global configuration + :param products_info: (list) + List of (str, Config) => (product_name, product_info) + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (int) the number of failing commands. + """ + res = 0 + for p_name_info in products_infos: + + p_name, p_info = p_name_info + + # Logging + len_end_line = 30 + header = _("Compilation of %s") % UTS.label(p_name) + header += " %s \n" % ("." * (len_end_line - len(p_name))) + logger.info(header) + + # Do nothing if the product is not compilable + if ("properties" in p_info and \ + "compilation" in p_info.properties and \ + p_info.properties.compilation == "no"): + + log_step(logger, header, "ignored") + logger.info("\n") + continue + + # Do nothing if the product is native + if src.product.product_is_native(p_info): + log_step(logger, header, "native") + logger.info("\n") + continue + + # Clean the build and the install directories + # if the corresponding options was called + if options.clean_all: + log_step(logger, header, "CLEAN BUILD AND INSTALL") + sat.clean(config.VARS.application + + " --products " + p_name + + " --build --install", + batch=True, + verbose=0, + logger_add_link = logger) + + # Clean the the install directory + # if the corresponding option was called + if options.clean_install and not options.clean_all: + log_step(logger, header, "CLEAN INSTALL") + sat.clean(config.VARS.application + + " --products " + p_name + + " --install", + batch=True, + verbose=0, + logger_add_link = logger) + + # Recompute the product information to get the right install_dir + # (it could change if there is a clean of the install directory) + p_info = src.product.get_product_config(config, p_name) + + # Check if it was already successfully installed + if src.product.check_installation(p_info): + logger.info(_("Already installed\n")) + continue + + # If the show option was called, do not launch the compilation + if options.no_compile: + logger.info(_("Not installed\n")) + continue + + # Check if the dependencies are installed + l_depends_not_installed = check_dependencies(config, p_name_info) + if len(l_depends_not_installed) > 0: + log_step(logger, header, "") + msg = _("the following products are mandatory:\n") + for prod_name in l_depends_not_installed: + msg += "%s\n" % prod_name + logger.error(msg) + continue + + # Call the function to compile the product + res_prod, len_end_line, error_step = compile_product(sat, + p_name_info, + config, + options, + logger, + header, + len_end_line) + + if res_prod != 0: + res += 1 + + if error_step != "CHECK": + # Clean the install directory if there is any + logger.debug(_("Cleaning the install directory if there is any\n")) + sat.clean(config.VARS.application + + " --products " + p_name + + " --install", + batch=True, + verbose=0, + logger_add_link = logger) + else: + # Clean the build directory if the compilation and tests succeed + if options.clean_build_after: + log_step(logger, header, "CLEAN BUILD") + sat.clean(config.VARS.application + + " --products " + p_name + + " --build", + batch=True, + verbose=0, + logger_add_link = logger) + + # Log the result + if res_prod > 0: + logger.info("\r%s%s" % (header, " " * len_end_line)) + logger.info("\r" + header + "<KO> " + error_step) + logger.debug("\n==== <KO> in compile of %s\n" % p_name) + if error_step == "CHECK": + logger.info(_("\nINSTALL directory = %s") % p_info.install_dir) + else: + logger.info("\r%s%s" % (header, " " * len_end_line)) + logger.info("\r" + header + "<OK>") + logger.info(_("\nINSTALL directory = %s") % p_info.install_dir) + logger.debug("\n==== <OK> in compile of %s\n" % p_name) + logger.info("\n") + + + if res_prod != 0 and options.stop_first_fail: + break + + return res
+ +
[docs]def compile_product(sat, p_name_info, config, options, logger, header, len_end): + """ + Execute the proper configuration command(s) + in the product build directory. + + :param p_name_info: (tuple) (str, Config) => (product_name, product_info) + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :param header: (str) the header to display when logging + :param len_end: (int) the lenght of the the end of line (used in display) + :return: (int) 1 if it fails, else 0. + """ + + p_name, p_info = p_name_info + + # Get the build procedure from the product configuration. + # It can be : + # build_sources : autotools -> build_configure, configure, make, make install + # build_sources : cmake -> cmake, make, make install + # build_sources : script -> script executions + res = 0 + if (src.product.product_is_autotools(p_info) or + src.product.product_is_cmake(p_info)): + res, len_end_line, error_step = compile_product_cmake_autotools(sat, + p_name_info, + config, + options, + logger, + header, + len_end) + if src.product.product_has_script(p_info): + res, len_end_line, error_step = compile_product_script(sat, + p_name_info, + config, + options, + logger, + header, + len_end) + + # Check that the install directory exists + if res==0 and not(os.path.exists(p_info.install_dir)): + res = 1 + error_step = "NO INSTALL DIR" + msg = _("despite all the steps ended successfully, no install directory was found\n") + logger.error(msg) + return res, len_end, error_step + + # Add the config file corresponding to the dependencies/versions of the + # product that have been successfully compiled + if res==0: + logger.debug(_("Add the config file in installation directory\n")) + add_compile_config_file(p_info, config) + + if options.check: + # Do the unit tests (call the check command) + log_step(logger, header, "CHECK") + res_check = sat.check( + config.VARS.application + " --products " + p_name, + verbose = 0, + logger_add_link = logger) + if res_check != 0: + error_step = "CHECK" + + res += res_check + + return res, len_end_line, error_step
+ +
[docs]def compile_product_cmake_autotools(sat, + p_name_info, + config, + options, + logger, + header, + len_end): + """ + Execute the proper build procedure for autotools or cmake + in the product build directory. + + :param p_name_info: (tuple) + (str, Config) => (product_name, product_info) + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :param header: (str) the header to display when logging + :param len_end: (int) the length of the the end of line (used in display) + :return: (int) 1 if it fails, else 0. + """ + p_name, p_info = p_name_info + + # Execute "sat configure", "sat make" and "sat install" + res = 0 + error_step = "" + + # Logging and sat command call for configure step + len_end_line = len_end + log_step(logger, header, "CONFIGURE") + res_c = sat.configure(config.VARS.application + " --products " + p_name, + verbose = 0, + logger_add_link = logger) + log_res_step(logger, res_c) + res += res_c + + if res_c > 0: + error_step = "CONFIGURE" + else: + # Logging and sat command call for make step + # Logging take account of the fact that the product has a compilation + # script or not + if src.product.product_has_script(p_info): + # if the product has a compilation script, + # it is executed during make step + scrit_path_display = UTS.label( + p_info.compil_script) + log_step(logger, header, "SCRIPT " + scrit_path_display) + len_end_line = len(scrit_path_display) + else: + log_step(logger, header, "MAKE") + make_arguments = config.VARS.application + " --products " + p_name + # Get the make_flags option if there is any + if options.makeflags: + make_arguments += " --option -j" + options.makeflags + res_m = sat.make(make_arguments, + verbose = 0, + logger_add_link = logger) + log_res_step(logger, res_m) + res += res_m + + if res_m > 0: + error_step = "MAKE" + else: + # Logging and sat command call for make install step + log_step(logger, header, "MAKE INSTALL") + res_mi = sat.makeinstall(config.VARS.application + + " --products " + + p_name, + verbose = 0, + logger_add_link = logger) + + log_res_step(logger, res_mi) + res += res_mi + + if res_mi > 0: + error_step = "MAKE INSTALL" + + return res, len_end_line, error_step
+ +
[docs]def compile_product_script(sat, + p_name_info, + config, + options, + logger, + header, + len_end): + """Execute the script build procedure in the product build directory. + + :param p_name_info: (tuple) + (str, Config) => (product_name, product_info) + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :param header: (str) the header to display when logging + :param len_end: (int) the lenght of the the end of line (used in display) + :return: (int) 1 if it fails, else 0. + """ + p_name, p_info = p_name_info + + # Execute "sat configure", "sat make" and "sat install" + error_step = "" + + # Logging and sat command call for the script step + scrit_path_display = UTS.label(p_info.compil_script) + log_step(logger, header, "SCRIPT " + scrit_path_display) + len_end_line = len_end + len(scrit_path_display) + res = sat.script(config.VARS.application + " --products " + p_name, + verbose = 0, + logger_add_link = logger) + log_res_step(logger, res) + + return res, len_end_line, error_step
+ +
[docs]def add_compile_config_file(p_info, config): + """ + Execute the proper configuration command(s) + in the product build directory. + + :param p_info: (Config) The specific config of the product + :param config: (Config) The global configuration + """ + # Create the compile config + compile_cfg = PYCONF.Config() + for prod_name in p_info.depend: + if prod_name not in compile_cfg: + compile_cfg.addMapping(prod_name, + PYCONF.Mapping(compile_cfg), + "") + prod_dep_info = src.product.get_product_config(config, prod_name, False) + compile_cfg[prod_name] = prod_dep_info.version + # Write it in the install directory of the product + compile_cfg_path = os.path.join(p_info.install_dir, src.CONFIG_FILENAME) + f = open(compile_cfg_path, 'w') + compile_cfg.__save__(f) + f.close()
+ +
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/config.html b/doc/build/html/_modules/commands/config.html new file mode 100644 index 0000000..d8d7e15 --- /dev/null +++ b/doc/build/html/_modules/commands/config.html @@ -0,0 +1,324 @@ + + + + + + + + commands.config — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.config

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+import os
+
+import src.debug as DBG
+import src.returnCode as RCO
+from src.salomeTools import _BaseCommand
+import src.configManager as CFGMGR
+import src.system as SYSS
+
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """ + The config command allows manipulation and operation on config '.pyconf' files. + + | examples: + | >> sat config --list + | >> sat config SALOME --edit + | >> sat config SALOME --copy SALOME-new + | >> sat config SALOME --value VARS + | >> sat config SALOME --debug VARS + | >> sat config SALOME --info ParaView + | >> sat config SALOME --show_patchs + """ + + name = "config" + +
[docs] def getParser(self): + """Define all options for command 'sat config <options>'""" + parser = self.getParserWithHelp() + parser.add_option('v', 'value', 'string', 'value', + _("Optional: print the value of CONFIG_VARIABLE.")) + parser.add_option('d', 'debug', 'string', 'debug', + _("Optional: print the debugging value of CONFIG_VARIABLE.")) + parser.add_option('e', 'edit', 'boolean', 'edit', + _("Optional: edit the product configuration file.")) + parser.add_option('i', 'info', 'string', 'info', + _("Optional: get information on a product.")) + parser.add_option('l', 'list', 'boolean', 'list', + _("Optional: list all available applications.")) + parser.add_option('p', 'show_patchs', 'boolean', 'show_patchs', + _("Optional: synthetic view of all patches used in the application")) + parser.add_option('c', 'copy', 'boolean', 'copy', + _("""\ +Optional: copy a config file (.pyconf) to the personal config files directory. +Warning: the included files are not copied. +If a name is given the new config file takes the given name.""")) + parser.add_option('n', 'no_label', 'boolean', 'no_label', + _("Internal use: do not print labels, Works only with --value and --list.")) + parser.add_option('o', 'completion', 'boolean', 'completion', + _("Internal use: print only keys, works only with --value.")) + parser.add_option('s', 'schema', 'boolean', 'schema', + _("Internal use.")) + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat config <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + if config is None: + return RCO.ReturnCode("KO", "config is None") + + # Only useful for completion mechanism : print the keys of the config + if options.schema: + get_config_children(config, args) + return RCO.ReturnCode("OK", "completion mechanism") + + # case : print a value of the config + if options.value: + if options.value == ".": + # if argument is ".", print all the config + for val in sorted(config.keys()): + CFGMGR.print_value(config, val, logger, not options.no_label) + else: + CFGMGR.print_value(config, options.value, logger, not options.no_label, + level=0, show_full_path=False) + + if options.debug: + CFGMGR.print_debug(config, str(options.debug), logger, not options.no_label, + level=0, show_full_path=False) + + # case : edit user pyconf file or application file + elif options.edit: + editor = config.USER.editor + if ('APPLICATION' not in config and + 'open_application' not in config): # edit user pyconf + usercfg = os.path.join(config.VARS.personalDir, 'SAT.pyconf') + logger.info(_("Opening %s\n") % usercfg) + SYSS.show_in_editor(editor, usercfg, logger) + else: + # search for file <application>.pyconf and open it + for path in config.PATHS.APPLICATIONPATH: + pyconf_path = os.path.join(path, config.VARS.application + ".pyconf") + if os.path.exists(pyconf_path): + logger.info(_("Opening %s\n") % pyconf_path) + SYSS.show_in_editor(editor, pyconf_path, logger) + break + + # case : give information about the product in parameter + elif options.info: + src.check_config_has_application(config) + if options.info in config.APPLICATION.products: + show_product_info(config, options.info, logger) + return RCO.ReturnCode("OK", "options.info") + raise Exception( + _("%(product_name)s is not a product of %(application_name)s.") % \ + {'product_name' : options.info, 'application_name' : config.VARS.application} ) + + # case : copy an existing <application>.pyconf + # to ~/.salomeTools/Applications/LOCAL_<application>.pyconf + elif options.copy: + # product is required + src.check_config_has_application( config ) + + # get application file path + source = config.VARS.application + '.pyconf' + source_full_path = "" + for path in config.PATHS.APPLICATIONPATH: + # ignore personal directory + if path == config.VARS.personalDir: + continue + # loop on all directories that can have pyconf applications + zz = os.path.join(path, source) + if os.path.exists(zz): + source_full_path = zz + break + + if len(source_full_path) == 0: + raise Exception( + _("Config file for product %s not found\n") % source ) + else: + if len(args) > 0: + # a name is given as parameter, use it + dest = args[0] + elif 'copy_prefix' in config.INTERNAL.config: + # use prefix + dest = (config.INTERNAL.config.copy_prefix + + config.VARS.application) + else: + # use same name as source + dest = config.VARS.application + + # the full path + dest_file = os.path.join( + config.VARS.personalDir, 'Applications', dest + '.pyconf' ) + if os.path.exists(dest_file): + raise Exception( + _("A personal application '%s' already exists") % dest ) + + # perform the copy + shutil.copyfile(source_full_path, dest_file) + logger.info(_("%s has been created.\n") % dest_file) + + # case : display all the available pyconf applications + elif options.list: + lproduct = list() + # search in all directories that can have pyconf applications + for path in config.PATHS.APPLICATIONPATH: + # print a header + if not options.no_label: + logger.info("<header>------ %s<reset>" % path) + msg = "" # only one multiline info + if not os.path.exists(path): + msg += ("<red>" + _("Directory not found") + "<reset>\n" ) + else: + for f in sorted(os.listdir(path)): + # ignore file that does not ends with .pyconf + if not f.endswith('.pyconf'): + continue + + appliname = f[:-len('.pyconf')] + if appliname not in lproduct: + lproduct.append(appliname) + if path.startswith(config.VARS.personalDir) \ + and not options.no_label: + msg += "%s*\n" % appliname + else: + msg += "%s\n" % appliname + + logger.info(msg) + + # case : give a synthetic view of all patches used in the application + elif options.show_patchs: + src.check_config_has_application(config) + # Print some informations + logger.info(_('Show the patchs of application %s\n') % \ + UTS.label(config.VARS.application)) + show_patchs(config, logger) + + # case: print all the products name of the application (internal use for completion) + elif options.completion: + for product_name in config.APPLICATION.products.keys(): + logger.info("%s\n" % product_name) + + return RCO.ReturnCode("OK", "config command done")
+
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/configure.html b/doc/build/html/_modules/commands/configure.html new file mode 100644 index 0000000..4d3ddeb --- /dev/null +++ b/doc/build/html/_modules/commands/configure.html @@ -0,0 +1,337 @@ + + + + + + + + commands.configure — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.configure

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+
+import src.debug as DBG
+import src.returnCode as RCO
+from src.salomeTools import _BaseCommand
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """ + The configure command executes in the build directory commands + corresponding to the compilation mode of the application products. + The possible compilation modes are 'cmake', 'autotools', or 'script'. + + Here are the commands to be run: + autotools: >> build_configure and configure + cmake: >> cmake + script: (do nothing) + + examples: + >> sat configure SALOME --products KERNEL,GUI,PARAVIS + """ + + name = "configure" + +
[docs] def getParser(self): + """Define all options for command 'sat configure <options>'""" + parser = self.getParserWithHelp() + parser.add_option('p', 'products', 'list2', 'products', + _('Optional: products to configure. This option can be' + ' passed several time to configure several products.')) + parser.add_option('o', 'option', 'string', 'option', + _('Optional: Option to add to the configure or cmake command.'), "") + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat configure <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + + # check that the command has been called with an application + src.check_config_has_application( config ) + + # Get the list of products to treat + products_infos = get_products_list(options, config, logger) + + # Print some informations + logger.info(_('Configuring the sources of the application %s\n') % + UTS.label(config.VARS.application)) + + info = [(_("BUILD directory"), + os.path.join(config.APPLICATION.workdir, 'BUILD'))] + UTS.logger_info_tuples(logger, info) + + # Call the function that will loop over all the products and execute + # the right command(s) + if options.option is None: + options.option = "" + res = configure_all_products(config, products_infos, options.option, logger) + + # Print the final state + nb_products = len(products_infos) + if res == 0: + final_status = "<OK>" + else: + final_status = "<KO>" + + logger.info(_("\nConfiguration: %(status)s (%(1)d/%(2)d)\n") % \ + { 'status': final_status, + '1': nb_products - res, + '2': nb_products }, 1) + + return res
+ + +
[docs]def get_products_list(options, cfg, logger): + """ + method that gives the product list with their informations from + configuration regarding the passed options. + + :param options: (Options) + The Options instance that stores the commands arguments + :param cfg: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (list) The list of (product name, product_informations). + """ + # Get the products to be prepared, regarding the options + if options.products is None: + # No options, get all products sources + products = cfg.APPLICATION.products + else: + # if option --products, check that all products of the command line + # are present in the application. + products = options.products + for p in products: + if p not in cfg.APPLICATION.products: + raise Exception( + _("Product %(product)s not defined in application %(application)s") % + {'product': p, 'application': cfg.VARS.application} ) + + # Construct the list of tuple containing + # the products name and their definition + products_infos = src.product.get_products_infos(products, cfg) + + products_infos = [pi for pi in products_infos if not(src.product.product_is_native(pi[1]) or src.product.product_is_fixed(pi[1]))] + + return products_infos
+ +
[docs]def log_step(logger, header, step): + logger.info("\r%s%s" % (header, " " * 20)) + logger.info("\r%s%s" % (header, step)) + logger.debug("\n==== %s \n" % UTS.info(step)) + logger.flush()
+ +
[docs]def log_res_step(logger, res): + if res == 0: + logger.debug("<OK>") + else: + logger.debug("<KO>")
+ +
[docs]def configure_all_products(config, products_infos, conf_option, logger): + """ + Execute the proper configuration commands + in each product build directory. + + :param config: (Config) The global configuration + :param products_info: (list) + List of (str, Config) => (product_name, product_info) + :param conf_option: (str) The options to add to the command + :param logger: (Logger) The logger instance to use for the display and logging + :return: (int) the number of failing commands. + """ + res = 0 + for p_name_info in products_infos: + res_prod = configure_product(p_name_info, conf_option, config, logger) + if res_prod != 0: + res += 1 + return res
+ +
[docs]def configure_product(p_name_info, conf_option, config, logger): + """ + Execute the proper configuration command(s) + in the product build directory. + + :param p_name_info: (tuple) + (str, Config) => (product_name, product_info) + :param conf_option: (str) The options to add to the command + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (int) 1 if it fails, else 0. + """ + + p_name, p_info = p_name_info + + # Logging + header = _("Configuration of %s") % UTS.label(p_name) + header += " %s " % ("." * (20 - len(p_name))) + logger.info(header) + + # Do nothing if he product is not compilable + if ("properties" in p_info and \ + "compilation" in p_info.properties and \ + p_info.properties.compilation == "no"): + + log_step(logger, header, "ignored") + logger.info("\n") + return 0 + + # Instantiate the class that manages all the construction commands + # like cmake, make, make install, make test, environment management, etc... + builder = src.compilation.Builder(config, logger, p_info) + + # Prepare the environment + log_step(logger, header, "PREPARE ENV") + res_prepare = builder.prepare() + log_res_step(logger, res_prepare) + + # Execute buildconfigure, configure if the product is autotools + # Execute cmake if the product is cmake + res = 0 + if src.product.product_is_autotools(p_info): + log_step(logger, header, "BUILDCONFIGURE") + res_bc = builder.build_configure() + log_res_step(logger, res_bc) + res += res_bc + log_step(logger, header, "CONFIGURE") + res_c = builder.configure(conf_option) + log_res_step(logger, res_c) + res += res_c + if src.product.product_is_cmake(p_info): + log_step(logger, header, "CMAKE") + res_cm = builder.cmake(conf_option) + log_res_step(logger, res_cm) + res += res_cm + + # Log the result + if res > 0: + logger.info("\r%s%s" % (header, " " * 20)) + logger.info("\r" + header + "<KO>") + logger.debug("==== <KO> in configuration of %s\n" % p_name) + else: + logger.info("\r%s%s" % (header, " " * 20)) + logger.info("\r" + header + "<OK>") + logger.debug("==== <OK> in configuration of %s\n" % p_name) + logger.info("\n") + + return res
+
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/environ.html b/doc/build/html/_modules/commands/environ.html new file mode 100644 index 0000000..ea14d16 --- /dev/null +++ b/doc/build/html/_modules/commands/environ.html @@ -0,0 +1,268 @@ + + + + + + + + commands.environ — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.environ

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+
+import src.debug as DBG
+import src.returnCode as RCO
+import src.utilsSat as UTS
+from src.salomeTools import _BaseCommand
+
+# list of available shells with extensions
+C_SHELLS = { "bash": "sh", "bat": "bat", "cfg" : "cfg" }
+C_ALL_SHELL = [ "bash", "bat", "cfg" ]
+
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """ + The environ command generates the environment files of your application. + + examples: + >> sat environ SALOME + """ + + name = "environ" + +
[docs] def getParser(self): + """Define all options for command 'sat environ <options>'""" + parser = self.getParserWithHelp() + parser.add_option('', 'shell', 'list2', 'shell', + _("Optional: Generates the environment files for the given format: " + "bash (default), bat (for windows), cfg (salome context file) or all."), []) + parser.add_option('p', 'products', 'list2', 'products', + _("Optional: Includes only the specified products.")) + parser.add_option('', 'prefix', 'string', 'prefix', + _("Optional: Specifies the prefix for the environment files."), "env") + parser.add_option('t', 'target', 'string', 'out_dir', + _("Optional: Specifies the directory path where to put the environment files."), + None) + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat environ <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + # check that the command was called with an application + src.check_config_has_application( config ) + + if options.products is None: + environ_info = None + else: + # add products specified by user (only products + # included in the application) + environ_info = filter(lambda l: + l in config.APPLICATION.products.keys(), + options.products) + + if options.shell == []: + shell = ["bash"] + if src.architecture.is_windows(): + shell = ["bat"] + else: + shell = options.shell + + out_dir = options.out_dir + if out_dir: + out_dir = os.path.abspath(out_dir) + + write_all_source_files(config, logger, out_dir=out_dir, shells=shell, + prefix=options.prefix, env_info=environ_info) + logger.info("\n")
+ #TODO return code + +
[docs]def write_all_source_files(config, + logger, + out_dir=None, + src_root=None, + silent=False, + shells=["bash"], + prefix="env", + env_info=None): + """Generates the environment files. + + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :param out_dir: (str) + The path to the directory where the files will be put + :param src_root: (str) + The path to the directory where the sources are + :param silent: (bool) + If True, do not print anything in the terminal + :param shells: (list) The list of shells to generate + :param prefix: (str) The prefix to add to the file names. + :param env_info: (str) The list of products to add in the files. + :return: (list) The list of the generated files. + """ + + if not out_dir: + out_dir = config.APPLICATION.workdir + + if not os.path.exists(out_dir): + raise Exception(_("Target directory not found: %s") % out_dir) + + if not silent: + logger.info(_("Creating environment files for %s\n") % \ + UTS.header(config.APPLICATION.name)) + logger.info(" %s = %s\n\n" % (_("Target"), out_dir)) + + shells_list = [] + all_shells = C_ALL_SHELL + if "all" in shells: + shells = all_shells + else: + shells = filter(lambda l: l in all_shells, shells) + + for shell in shells: + if shell not in C_SHELLS: + logger.warning(_("Unknown shell: %s\n") % shell) + else: + shells_list.append(src.environment.Shell(shell, C_SHELLS[shell])) + + writer = src.environment.FileEnvWriter(config, + logger, + out_dir, + src_root, + env_info) + writer.silent = silent + files = [] + for_build = True + for_launch = False + for shell in shells_list: + files.append(writer.write_env_file("%s_launch.%s" % + (prefix, shell.extension), + for_launch, + shell.name)) + files.append(writer.write_env_file("%s_build.%s" % + (prefix, shell.extension), + for_build, + shell.name)) + + return files
+
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/find_duplicates.html b/doc/build/html/_modules/commands/find_duplicates.html new file mode 100644 index 0000000..afb83f6 --- /dev/null +++ b/doc/build/html/_modules/commands/find_duplicates.html @@ -0,0 +1,391 @@ + + + + + + + + commands.find_duplicates — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.find_duplicates

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+
+import src.debug as DBG
+import src.returnCode as RCO
+import src.utilsSat as UTS
+from src.salomeTools import _BaseCommand
+
+
+default_extension_ignored = \
+    'html png txt js xml cmake gif m4 in pyo pyc doctree css'.split()
+
+default_files_ignored = \
+    '__init__.py Makefile.am VERSION build_configure README AUTHORS NEWS COPYING ChangeLog'.split()
+
+default_directories_ignored = []
+
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """\ + The find_duplicates command search recursively for all duplicates files + in INSTALL directory (or the optionally given directory) and + prints the found files to the terminal. + + examples: + >> sat find_duplicates --path /tmp + """ + + name = "find_duplicates" + +
[docs] def getParser(self): + """Define all options for command 'sat find_duplicates <options>'""" + parser = self.getParserWithHelp() + parser.add_option( + "s", + "sources", + "boolean", + "sources", + _("Search the duplicate files in the SOURCES directory.") ) + parser.add_option( + "p", + "path", + "list2", + "path", + _("Optional: Search the duplicate files in the given directory paths.") ) + parser.add_option( + "", + "exclude-file", + "list2", + "exclude_file", + _("Optional: Override the default list of filtered files.") ) + parser.add_option( + "", + "exclude-extension", + "list2", + "exclude_extension", + _("Optional: Override the default list of filtered extensions.") ) + parser.add_option( + "", + "exclude-path", + "list2", + "exclude_path", + _("Optional: Override the default list of filtered paths.") ) + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat find_duplicates <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + # Determine the directory path where to search + # for duplicates files regarding the options + if options.path: + l_dir_path = options.path + else: + src.check_config_has_application(config) + if options.sources: + l_dir_path = [os.path.join(config.APPLICATION.workdir, "SOURCES")] + else: + # find all installation paths + all_products = config.APPLICATION.products.keys() + l_product_cfg = src.product.get_products_infos(all_products, config) + l_dir_path = [pi.install_dir for __, pi in l_product_cfg] + + # Get the files to ignore during the searching + files_ignored = default_files_ignored + if options.exclude_file: + files_ignored = options.exclude_file + + # Get the extension to ignore during the searching + extension_ignored = default_extension_ignored + if options.exclude_extension: + extension_ignored = options.exclude_extension + + # Get the directory paths to ignore during the searching + directories_ignored = default_directories_ignored + if options.exclude_path: + directories_ignored = options.exclude_path + + # Check the directories + l_path = src.deepcopy_list(l_dir_path) + l_dir_path = [] + for dir_path in l_path: + if not(os.path.isdir(dir_path)): + msg = _("%s does not exists or is not a directory path: it will be ignored" % + dir_path) + logger.warning("%s\n" % msg) + continue + l_dir_path.append(dir_path) + + + # Display some information + info = [(_("Directories"), "\n".join(l_dir_path)), + (_("Ignored files"), files_ignored), + (_("Ignored extensions"), extension_ignored), + (_("Ignored directories"), directories_ignored) + ] + UTS.logger_info_tuples(logger, info) + + # Get all the files and paths + logger.info(_("Store all file paths ... "), 3) + dic, fic = list_directory(l_dir_path, + extension_ignored, + files_ignored, + directories_ignored) + logger.info("<OK>\n") + + # Eliminate all the singletons + len_fic = len(fic) + range_fic = range(0,len_fic) + range_fic.reverse() + my_bar = Progress_bar(_('Eliminate the files that are not duplicated'), + 0, + len_fic, + logger, + length = 50) + for i in range_fic: + my_bar.display_value_progression(len_fic - i) + if fic.count(fic[i])==1: + fic.remove(fic[i]) + dic.remove(dic[i]) + + # Format the resulting variable to get a dictionary + logger.info(_("\n\nCompute the dict for file -> list of paths ... ")) + fic.sort() + len_fic = len(fic) + rg_fic = range(0,len_fic) + rg_fic.reverse() + for i in rg_fic: + if fic[i-1] != fic[i]: + fic.remove(fic[i]) + + dic_fic_paths = {} + for fichier in fic: + the_file = fichier[0] + l_path = [] + for fic_path in dic: + if fic_path[0] == the_file: + l_path.append(fic_path[1]) + dic_fic_paths[the_file] = l_path + + logger.info("<OK>\n") + + # End the execution if no duplicates were found + if len(dic_fic_paths) == 0: + logger.info(_("No duplicate files found.\n")) + return 0 + + # Check that there are no singletons in the result (it would be a bug) + for elem in dic_fic_paths: + if len(dic_fic_paths[elem])<2: + logger.warning(_("Element %s has not more than two paths.\n") % elem) + + + # Display the results + logger.info(_('\nResults:\n\n')) + max_file_name_lenght = max(map(lambda l: len(l), dic_fic_paths.keys())) + for fich in dic_fic_paths: + sp = " " * (max_file_name_lenght - len(fich)) + msg = UTS.label(fich) + sp + for rep in dic_fic_paths[fich]: + msg += rep + " " + logger.info(msg + "\n") + + return 0
+ + +
[docs]def list_directory(lpath, extension_ignored, files_ignored, directories_ignored): + """Make the list of all files and paths that are not filtered + + :param lpath: (list) + The list of path to of the directories where to search for duplicates + :param extension_ignored: (list) The list of extensions to ignore + :param files_ignored: (list) The list of files to ignore + :param directories_ignored: (list) + The list of directory paths to ignore + :return: (list, list) + files_arb_out is the list of [file, path] + and files_out is is the list of files + """ + files_out = [] + files_arb_out=[] + for path in lpath: + for root, __, files in os.walk(path): + for fic in files: + extension = fic.split('.')[-1] + if (extension not in extension_ignored and + fic not in files_ignored): + in_ignored_dir = False + for rep in directories_ignored: + if rep in root: + in_ignored_dir = True + if not in_ignored_dir: + files_out.append([fic]) + files_arb_out.append([fic, root]) + return files_arb_out, files_out
+ +
[docs]def format_list_of_str(l_str): + """Make a list from a string + + :param l_str: (list or str) The variable to format + :return: (list) the formatted variable + """ + if not isinstance(l_str, list): + return l_str + return ",".join(l_str)
+ +
[docs]class Progress_bar: + """ + Create a progress bar in the terminal + """ + def __init__(self, name, valMin, valMax, logger, length = 50): + """Initialization of the progress bar. + + :param name: (str) The name of the progress bar + :param valMin: (float) the minimum value of the variable + :param valMax: (float) the maximum value of the variable + :param logger: (Logger) the logger instance + :param length: (int) the lenght of the progress bar + """ + self.name = name + self.valMin = valMin + self.valMax = valMax + self.length = length + self.logger = logger + if (self.valMax - self.valMin) <= 0 or length <= 0: + out_err = _('ERROR: Wrong init values for the progress bar\n') + raise Exception(out_err) + +
[docs] def display_value_progression(self,val): + """Display the progress bar. + + :param val: (float) val must be between valMin and valMax. + """ + if val < self.valMin or val > self.valMax: + self.logger.error(_("Wrong value for the progress bar.\n")) + else: + perc = (float(val-self.valMin) / (self.valMax - self.valMin)) * 100. + nb_equals = int(perc * self.length / 100) + out = '\r %s : %3d %% [%s%s]' % (self.name, perc, nb_equals*'=', + (self.length - nb_equals)*' ' ) + self.logger.info(out)
+ +
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/generate.html b/doc/build/html/_modules/commands/generate.html new file mode 100644 index 0000000..f110cfb --- /dev/null +++ b/doc/build/html/_modules/commands/generate.html @@ -0,0 +1,493 @@ + + + + + + + + commands.generate — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.generate

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+
+import src.debug as DBG
+import src.returnCode as RCO
+import src.utilsSat as UTS
+from src.salomeTools import _BaseCommand
+import src.pyconf as PYCONF
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """\ + The generate command generates SALOME modules from 'pure cpp' products. + WARNING: this command NEEDS YACSGEN to run. + + examples: + >> sat generate SALOME --products FLICACPP + """ + + name = "generate" + +
[docs] def getParser(self): + """Define all options for command 'sat generate <options>'""" + parser = self.getParserWithHelp() + parser.add_option('p', 'products', 'list2', 'products', + _("Optional: the list of products to generate")) + parser.add_option('', 'yacsgen', 'string', 'yacsgen', + _("Optional: path to YACSGEN's module_generator package")) + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat generate <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + # Check that the command has been called with an application + src.check_config_has_application(config) + + logger.info( _('Generation of SALOME modules for application %s\n') % \ + UTS.label(config.VARS.application) ) + + status = src.KO_STATUS + + # verify that YACSGEN is available + returnCode = check_yacsgen(config, options.yacsgen, logger) + + if not returnCode.isOk(): + logger.error(returnCode.getWhy()) + return returnCode + else: + yacsgen_dir = returnCode.getValue() + + # Make the generator module visible by python + sys.path.insert(0, yacsgen_dir) + + logger.info(" insert directory PATH %s = %s\n" % \ + ("YACSGEN", UTS.blue(yacsgen_dir)) ) + + products = config.APPLICATION.products + if options.products: + products = options.products + + details = [] + nbgen = 0 + + context = build_context(config, logger) + lprod = UTS.label(product) + for product in products: + header = _("Generating %s") % lprod + header += " %s " % ("." * (20 - len(product))) + logger.info(header) + + if product not in config.PRODUCTS: + logger.error(_("Unknown product %s") % lprod) + continue + + pi = src.product.get_product_config(config, product) + if not src.product.product_is_generated(pi): + logger.info(_("not a generated product %s") % lprod) + continue + + nbgen += 1 + try: + result = generate_component_list(config, pi, context, logger) + except Exception as exc: + result = str(exc) + + if result != src.OK_STATUS: + result = _("ERROR: %s") % result + details.append([product, result]) + + if len(details) != 0: + msg = _("The following modules were not generated correctly:\n") + for d in details: + msg += " %s: %s\n" % (d[0], d[1]) + logger.error(msg) + return RCO.ReturnCode("KO", msg) + else: + return RCO.ReturnCode("OK", "generate command done")
+ + +
[docs]def generate_component_list(config, product_info, context, logger): + res = "?" + logger.info("\n") + for compo in src.product.get_product_components(product_info): + header = " %s %s " % (UTS.label(compo), "." * (20 - len(compo))) + res = generate_component(config, + compo, + product_info, + context, + header, + logger) + if config.USER.output_verbose_level == 3: + logger.info("\r%s%s\r%s" % (header, " " * 20, header)) + logger.info(res + "\n") + return res
+ +
[docs]def generate_component(config, compo, product_info, context, header, logger): +# get from config include file name and librairy name, or take default value + if "hxxfile" in product_info: + hxxfile = product_info.hxxfile + else: + hxxfile = compo + ".hxx" + if "cpplib" in product_info: + cpplib = product_info.cpplib + else: + cpplib = "lib" + compo + "CXX.so" + cpp_path = product_info.install_dir + + msg = "" + msg += "%s\n" % UTS.blue(header) + msg += "hxxfile = %s\n" % hxxfile + msg += "cpplib = %s\n" % cpplib + msg += "cpp_path = %s\n" % cpp_path + logger.debug(msg) + + # create a product_info at runtime + compo_info = PYCONF.Mapping(config) + compo_info.name = compo + compo_info.nb_proc = 1 + generate_dir = os.path.join(config.APPLICATION.workdir, "GENERATED") + install_dir = os.path.join(config.APPLICATION.workdir, "INSTALL") + build_dir = os.path.join(config.APPLICATION.workdir, "BUILD") + compo_info.source_dir = os.path.join(generate_dir, compo + "_SRC") + compo_info.install_dir = os.path.join(install_dir, compo) + compo_info.build_dir = os.path.join(build_dir, compo) + compo_info.depend = product_info.depend + compo_info.depend.append(product_info.name, "") # add cpp module + compo_info.opt_depend = product_info.opt_depend + + config.PRODUCTS.addMapping(compo, PYCONF.Mapping(config), "") + config.PRODUCTS[compo].default = compo_info + + builder = src.compilation.Builder(config, logger, compo_info, check_src=False) + builder.header = header + + # generate the component + # create GENERATE dir if necessary + if not os.path.exists(generate_dir): + os.mkdir(generate_dir) + + # delete previous generated directory if it already exists + if os.path.exists(compo_info.source_dir): + logger.debug(" delete %s" % compo_info.source_dir) + shutil.rmtree(compo_info.source_dir) + + # generate generates in the current directory => change for generate dir + curdir = os.curdir + os.chdir(generate_dir) + + # inline class to override bootstrap method + import module_generator + class sat_generator(module_generator.Generator): + # old bootstrap for automake (used if salome version <= 7.4) + def bootstrap(self, source_dir, log_file): + # replace call to default bootstrap() by using subprocess call (cleaner) + command = "sh autogen.sh" + ier = subprocess.call(command, shell=True, cwd=source_dir, + stdout=log_file, stderr=subprocess.STDOUT) + if ier != 0: + raise Exception("bootstrap has ended in error") + + + # determine salome version + VersionSalome = src.get_salome_version(config) + if VersionSalome >= 750 : + use_autotools=False + builder.log('USE CMAKE', 3) + else: + use_autotools=True + builder.log('USE AUTOTOOLS', 3) + + result = "GENERATE" + builder.log('GENERATE', 3) + + prevstdout = sys.stdout + prevstderr = sys.stderr + + try: + sys.stdout = logger.logTxtFile + sys.stderr = logger.logTxtFile + + if src.product.product_is_mpi(product_info): + salome_compo = module_generator.HXX2SALOMEParaComponent(hxxfile, + cpplib, + cpp_path) + else: + salome_compo = module_generator.HXX2SALOMEComponent(hxxfile, + cpplib, + cpp_path) + + if src.product.product_has_salome_gui(product_info): + # get files to build a template GUI + gui_files = salome_compo.getGUIfilesTemplate(compo) + else: + gui_files = None + + mg = module_generator.Module(compo, components=[salome_compo], + prefix=generate_dir, gui=gui_files) + g = sat_generator(mg, context) + g.generate() + + if use_autotools: + result = "BUID_CONFIGURE" + builder.log('BUID_CONFIGURE (no bootstrap)', 3) + g.bootstrap(compo_info.source_dir, logger.logTxtFile) + + result = src.OK_STATUS + finally: + sys.stdout = prevstdout + sys.stderr = prevstderr + + # go back to previous directory + os.chdir(curdir) + + # do the compilation using the builder object + if builder.prepare()!= 0: return "Error in prepare" + if use_autotools: + if builder.configure()!= 0: return "Error in configure" + else: + if builder.cmake()!= 0: return "Error in cmake" + + if builder.make(config.VARS.nb_proc, "")!=0: return "Error in make" + if builder.install()!=0: return "Error in make install" + + # copy specified logo in generated component install directory + # rem : logo is not copied in source dir because this would require + # to modify the generated makefile + logo_path = src.product.product_has_logo(product_info) + if logo_path: + destlogo = os.path.join(compo_info.install_dir, "share", "salome", + "resources", compo.lower(), compo + ".png") + src.Path(logo_path).copyfile(destlogo) + + return result
+ +
[docs]def build_context(config, logger): + products_list = [ 'KERNEL', 'GUI' ] + ctxenv = src.environment.SalomeEnviron(config, + src.environment.Environ(dict( + os.environ)), + True) + ctxenv.silent = True + ctxenv.set_full_environ(logger, config.APPLICATION.products.keys()) + + dicdir = {} + for p in products_list: + prod_env = p + "_ROOT_DIR" + val = os.getenv(prod_env) + if os.getenv(prod_env) is None: + if p not in config.APPLICATION.products: + msg = _("product %s is not defined. Include it in the application or define $%s.") % \ + (p, prod_env) + logger.error(UTS.red(msg)) + val = "" + val = ctxenv.environ.environ[prod_env] + dicdir[p] = val + + # the dictionary requires all keys + # but the generation requires only values for KERNEL and GUI + context = { + "update": 1, + "makeflags": "-j2", + "kernel": dicdir["KERNEL"], + "gui": dicdir["GUI"], + "yacs": "", + "med": "", + "mesh": "", + "visu": "", + "geom": "", + } + return context
+ +
[docs]def check_module_generator(directory=None): + """Check if module_generator is available. + + :param directory: (str) The directory of YACSGEN. + :return: (str) + The YACSGEN path if the module_generator is available, else None + """ + undo = False + if directory is not None and directory not in sys.path: + sys.path.insert(0, directory) + undo = True + + res = None + try: + #import module_generator + info = imp.find_module("module_generator") + res = info[1] + except ImportError: + if undo: + sys.path.remove(directory) + res = None + + return res
+ +
[docs]def check_yacsgen(config, directory, logger): + """Check if YACSGEN is available. + + :param config: (Config) The global configuration. + :param directory: (str) The directory given by option --yacsgen + :param logger: (Logger) The logger instance + :return: (RCO.ReturnCode) + with value The path to yacsgen directory if ok + """ + # first check for YACSGEN (command option, then product, then environment) + yacsgen_dir = None + yacs_src = "?" + if directory is not None: + yacsgen_dir = directory + yacs_src = _("Using YACSGEN from command line") + elif 'YACSGEN' in config.APPLICATION.products: + yacsgen_info = src.product.get_product_config(config, 'YACSGEN') + yacsgen_dir = yacsgen_info.install_dir + yacs_src = _("Using YACSGEN from application") + elif os.environ.has_key("YACSGEN_ROOT_DIR"): + yacsgen_dir = os.getenv("YACSGEN_ROOT_DIR") + yacs_src = _("Using YACSGEN from environment") + + if yacsgen_dir is None: + RCO.ReturnCode("KO", _("The generate command requires YACSGEN.")) + + logger.info(" %s in %s" % (yacs_src, yacsgen_dir)) + + if not os.path.exists(yacsgen_dir): + msg = _("YACSGEN directory not found: '%s'") % yacsgen_dir + RCO.ReturnCode("KO", msg) + + # load module_generator + c = check_module_generator(yacsgen_dir) + if c is not None: + return RCO.ReturnCode("OK", "check_module_generator on %s" % yacsgen_dir, c) + + pv = os.getenv("PYTHON_VERSION") + if pv is None: + python_info = src.product.get_product_config(config, "Python") + pv = '.'.join(python_info.version.split('.')[:2]) + assert pv is not None, "$PYTHON_VERSION not defined" + yacsgen_dir = os.path.join(yacsgen_dir, "lib", "python%s" % pv, "site-packages") + c = check_module_generator(yacsgen_dir) + if c is not None: + return RCO.ReturnCode("OK", "check_module_generator on %s" % yacsgen_dir, c) + + return RCO.ReturnCode("KO", _("The python module module_generator was not found in YACSGEN"))
+ +
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/init.html b/doc/build/html/_modules/commands/init.html new file mode 100644 index 0000000..68a6106 --- /dev/null +++ b/doc/build/html/_modules/commands/init.html @@ -0,0 +1,274 @@ + + + + + + + + commands.init — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.init

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+
+import src.debug as DBG
+import src.returnCode as RCO
+import src.utilsSat as UTS
+from src.salomeTools import _BaseCommand
+import src.pyconf as PYCONF
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """\ + The init command Changes the local settings of SAT + """ + + name = "init" + +
[docs] def getParser(self): + """Define all options for command 'sat init <options>'""" + parser = self.getParserWithHelp() + parser.add_option('b', 'base', 'string', 'base', + _('Optional: The path to the products base')) + parser.add_option('w', 'workdir', 'string', 'workdir', + _('Optional: The path to the working directory ' + '(where to install the applications')) + parser.add_option('a', 'archive_dir', 'string', 'archive_dir', + _('Optional: The path to the local archive directory ' + '(where to install local source archives')) + parser.add_option('v', 'VCS', 'string', 'VCS', + _('Optional: The address of the repository of SAT ' + '(only informative)')) + parser.add_option('t', 'tag', 'string', 'tag', + _('Optional: The tag of SAT (only informative)')) + parser.add_option('l', 'log_dir', 'string', 'log_dir', + _('Optional: The directory where to put all the logs of SAT')) + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat init <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + # Print some informations + logger.info(_('Local Settings of SAT %s') % UTS.label(config.VARS.salometoolsway)) + + res = 0 + + # Set the options corresponding to a directory + for opt in [("base" , options.base), + ("workdir", options.workdir), + ("log_dir", options.log_dir), + ("archive_dir", options.archive_dir)]: + key, value = opt + if value: + res_check = check_path(value, logger) + res += res_check + if res_check == 0: + res_set = set_local_value(config, key, value, logger) + res += res_set + + # Set the options corresponding to an informative value + for opt in [("VCS", options.VCS), ("tag", options.tag)]: + key, value = opt + res_set = set_local_value(config, key, value, logger) + res += res_set + + display_local_values(config, logger) + + return res
+ + +
[docs]def set_local_value(config, key, value, logger): + """Edit the site.pyconf file and change a value. + + :param config: (Config) The global configuration. + :param key: (str) The key from which to change the value. + :param value: (str) The path to change. + :param logger: (Logger) The logger instance. + :return: (int) 0 if all is OK, else 1 + """ + local_file_path = os.path.join(config.VARS.datadir, "local.pyconf") + # Update the local.pyconf file + try: + local_cfg = PYCONF.Config(local_file_path) + local_cfg.LOCAL[key] = value + ff = open(local_file_path, 'w') + local_cfg.__save__(ff, 1) + ff.close() + if key != "log_dir": + config.LOCAL[key] = value + except Exception as e: + err = str(e) + msg = _("Unable to update the local.pyconf file: %s\n") % err + logger.error(msg) + return RCO.ReturnCode("KO", msg) + + return RCO.ReturnCode("OK")
+ +
[docs]def display_local_values(config, logger): + """Display the base path + + :param config: (Config) The global configuration. + :param key: (str) The key from which to change the value. + :param logger: (Logger) The logger instance. + """ + info = [("base", config.LOCAL.base), + ("workdir", config.LOCAL.workdir), + ("log_dir", config.LOCAL.log_dir), + ("archive_dir", config.LOCAL.archive_dir), + ("VCS", config.LOCAL.VCS), + ("tag", config.LOCAL.tag)] + UTS.logger_info_tuples(logger, info) + + return 0
+ +
[docs]def check_path(path_to_check, logger): + """Verify that the given path is not a file and can be created. + + :param path_to_check: (str) The path to check. + :param logger: (Logger) The logger instance. + """ + if path_to_check == "default": + return 0 + + # Get the path + path = src.Path(path_to_check) + + # If it is a file, do nothing and return error + if path.isfile(): + msg = _("""\ +The given path is a file: %s +Please provide a path to a directory\n""") % UTS.blue(path_to_check) + logger.error(msg) + return 1 + + # Try to create the given path + try: + UTS.ensure_path_exists(str(path)) + except Exception as e: + msg = _("""\ +Unable to create the directory %s: + +%s\n""") % (UTS.blue(str(path)), UTS.yellow(e)) + logger.error(msg) + return 1 + + return 0
+
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/job.html b/doc/build/html/_modules/commands/job.html new file mode 100644 index 0000000..5edc609 --- /dev/null +++ b/doc/build/html/_modules/commands/job.html @@ -0,0 +1,276 @@ + + + + + + + + commands.job — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.job

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+
+import src.debug as DBG
+import src.returnCode as RCO
+from src.salomeTools import _BaseCommand
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """\ + The job command executes the commands of the job defined + in the jobs configuration file\ + + examples: + >> sat job --jobs_config my_jobs --name my_job" + """ + + name = "job" + +
[docs] def getParser(self): + """Define all options for command 'sat job <options>'""" + parser = self.getParserWithHelp() + parser.add_option( + 'j', 'jobs_config', 'string', 'jobs_cfg', + _('Mandatory: The name of the config file that contains the jobs configuration') ) + parser.add_option( + '', 'name', 'string', 'job', + _('Mandatory: The job name from which to execute commands.'), "" ) + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat job <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + l_cfg_dir = config.PATHS.JOBPATH + + # Make sure the jobs_config option has been called + if not options.jobs_cfg: + message = _("The option --jobs_config is required\n") + logger.error(message) + return 1 + + # Make sure the name option has been called + if not options.job: + message = _("The option --name is required\n") + logger.error(message) + return 1 + + # Find the file in the directories + found = True + fPyconf = options.jobs_cfg + if not file_jobs_cfg.endswith('.pyconf'): + fPyconf += '.pyconf' + + for cfg_dir in l_cfg_dir: + file_jobs_cfg = os.path.join(cfg_dir, fPyconf) + if os.path.exists(file_jobs_cfg): + found = True + break + + if not found: + msg = _("""\ +The job file configuration %s was not found. +Use the --list option to get the possible files.""") % UTS.blue(fPyconf) + logger.error(msg) + return 1 + + info = [ (_("Platform"), config.VARS.dist), + (_("File containing the jobs configuration"), file_jobs_cfg) ] + UTS.logger_info_tuples(logger, info) + + # Read the config that is in the file + config_jobs = src.read_config_from_a_file(file_jobs_cfg) + + # Find the job and its commands + found = False + for job in config_jobs.jobs: + if job.name == options.job: + commands = job.commands + found = True + break + if not found: + msg = _("Impossible to find the job %s in %s\n") % (options.job, file_jobs_cfg) + logger.error(msg) + return 1 + + # Find the maximum length of the commands in order to format the display + len_max_command = max([len(cmd) for cmd in commands]) + + # Loop over the commands and execute it + res = 0 + nb_pass = 0 + for command in commands: + specific_option = False + # Determine if it is a sat command or a shell command + cmd_exe = command.split(" ")[0] # first part + if cmd_exe == "sat": + # use the salomeTools parser to get the options of the command + sat_parser = salomeTools.parser + input_parser = src.remove_item_from_list(command.split(' ')[1:], "") + (options, argus) = sat_parser.parse_args(input_parser) + # Verify if there is a changed option + for attr in dir(options): + if attr.startswith("__"): + continue + if options.__getattr__(attr) != None: + specific_option = True + sat_command_name = argus[0] + end_cmd = " ".join(argus[1:]) + else: + sat_command_name = "shell" + end_cmd = ["--command", command] + # Do not change the options if no option was called in the command + if not(specific_option): + options = None + + # Get dynamically the command function to call + sat_command = runner.__getattr__(sat_command_name) + + logger.info("Executing " + UTS.label(command) + " " + + "." * (len_max_command - len(command)) + " ") + + error = "" + # Execute the command + code = sat_command(end_cmd, + options = options, + batch = True, + verbose = 0, + logger_add_link = logger) + + # Print the status of the command + if code == 0: + nb_pass += 1 + logger.info("<OK>\n") + else: + if sat_command_name != "test": + res = 1 + logger.info('<KO>: %s\n' % error) + + # Print the final state + if res == 0: + final_status = "OK" + else: + final_status = "KO" + + msg = "Commands: <%s> (%d/%d)" % (final_status, nb_pass, len(commands)) + logger.info(msg) + return RCO.ReturnCode(final_status, msg)
+
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/jobs.html b/doc/build/html/_modules/commands/jobs.html new file mode 100644 index 0000000..9deb005 --- /dev/null +++ b/doc/build/html/_modules/commands/jobs.html @@ -0,0 +1,1921 @@ + + + + + + + + commands.jobs — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.jobs

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+import os
+import sys
+import tempfile
+import traceback
+import datetime
+import time
+import csv
+import shutil
+import itertools
+import re
+
+# import paramiko later
+  
+import src.ElementTree as etree
+import src.debug as DBG
+import src.returnCode as RCO
+import src.utilsSat as UTS
+import src.pyconf as PYCONF
+import src.xmlManager as XMLMGR
+from src.salomeTools import _BaseCommand
+
+STYLESHEET_GLOBAL = "jobs_global_report.xsl"
+STYLESHEET_BOARD = "jobs_board_report.xsl"
+
+DAYS_SEPARATOR = ","
+CSV_DELIMITER = ";"
+
+_PARAMIKO = []
+
+
[docs]def getParamiko(logger=None): + if len(_PARAMIKO) == 0: + try: + import paramiko as PARAMIKO + _PARAMIKO.append(PARAMIKO) + return PARAMIKO + except Exception as e: + if logger is not None: + logger.critical("Problem import paramiko. No jobs if not 'pip install paramiko'") + return None + else: + return _PARAMIKO[0]
+ + +######################################################################## +# Command class +######################################################################## +
[docs]class Command(_BaseCommand): + """ + The jobs command command launches maintenances that are described in + the dedicated jobs configuration file. + + examples: + >> sat jobs --name my_jobs --publish + """ + + name = "jobs" + +
[docs] def getParser(self): + """Define all options for command 'sat jobs <options>'""" + parser = self.getParserWithHelp() + parser.add_option( + 'n', 'name', 'list2', 'jobs_cfg', + _('Mandatory: The name of the config file that contains the jobs configuration. Can be a list.') ) + parser.add_option( + 'o', 'only_jobs', 'list2', 'only_jobs', + _('Optional: the list of jobs to launch, by their name. ') ) + parser.add_option( + 'l', 'list', 'boolean', 'list', + _('Optional: list all available config files.') ) + parser.add_option( + 't', 'test_connection', 'boolean', 'test_connection', + _("Optional: try to connect to the machines. Not executing the jobs."), + False ) + parser.add_option( + 'p', 'publish', 'boolean', 'publish', + _("Optional: generate an xml file that can be read in a browser to display the jobs status."), + False ) + parser.add_option( + 'i', 'input_boards', 'string', 'input_boards', _("Optional: " + "the path to csv file that contain the expected boards."), + "" ) + parser.add_option( + '', 'completion', 'boolean', 'no_label', + _("Optional (internal use): do not print labels, Works only with --list."), + False ) + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat jobs <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + l_cfg_dir = config.PATHS.JOBPATH + + # list option : display all the available config files + if options.list: + for cfg_dir in l_cfg_dir: + if not options.no_label: + logger.info("------ %s\n" % UTS.blue(cfg_dir)) + if not os.path.exists(cfg_dir): + continue + for f in sorted(os.listdir(cfg_dir)): + if not f.endswith('.pyconf'): + continue + cfilename = f[:-7] + logger.info("%s\n" % cfilename) + return RCO.ReturnCode("OK", "jobs command done") + + # Make sure the jobs_config option has been called + if not options.jobs_cfg: + msg = _("The option --jobs_config is required\n") + logger.error(message) + return RCO.ReturnCode("KO", msg) + + # Find the file in the directories, unless it is a full path + # merge all in a config + merger = PYCONF.ConfigMerger() + config_jobs = PYCONF.Config() + l_conf_files_path = [] + for config_file in options.jobs_cfg: + found, file_jobs_cfg = get_config_file_path(config_file, l_cfg_dir) + if not found: + msg = _("""\ +The file configuration %s was not found. +Use the --list option to get the possible files.\n""") % config_file + logger.error(msg) + return RCO.ReturnCode("KO", msg) + l_conf_files_path.append(file_jobs_cfg) + # Read the config that is in the file + one_config_jobs = src.read_config_from_a_file(file_jobs_cfg) + merger.merge(config_jobs, one_config_jobs) + + info = [(_("Platform"), config.VARS.dist), + (_("Files containing the jobs configuration"), l_conf_files_path)] + UTS.logger_info_tuples(logger, info) + + if options.only_jobs: + l_jb = PYCONF.Sequence() + for jb in config_jobs.jobs: + if jb.name in options.only_jobs: + l_jb.append(jb, + "Job that was given in only_jobs option parameters\n") + config_jobs.jobs = l_jb + + # Parse the config jobs in order to develop all the factorized jobs + develop_factorized_jobs(config_jobs) + + # Make a unique file that contain all the jobs in order to use it + # on every machine + name_pyconf = "_".join([os.path.basename(path)[:-len('.pyconf')] + for path in l_conf_files_path]) + ".pyconf" + path_pyconf = src.get_tmp_filename(config, name_pyconf) + #Save config + f = file( path_pyconf , 'w') + config_jobs.__save__(f) + + # log the paramiko problems + log_dir = UTS.get_log_path(config) + paramiko_log_dir_path = os.path.join(log_dir, "JOBS") + UTS.ensure_path_exists(paramiko_log_dir_path) + paramiko = getParamiko(logger) + paramiko.util.log_to_file(os.path.join(paramiko_log_dir_path, + logger.txtFileName)) + + # Initialization + today_jobs = Jobs(runner, logger, path_pyconf, config_jobs) + + # SSH connection to all machines + today_jobs.ssh_connection_all_machines() + if options.test_connection: + return RCO.ReturnCode("OK", "jobs ssh_connection done") + + gui = None + if options.publish: + logger.debug(_("Initialize the xml boards : ")) + + # Copy the stylesheets in the log directory + log_dir = log_dir + xsl_dir = os.path.join(config.VARS.srcDir, 'xsl') + files_to_copy = [] + files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_GLOBAL)) + files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_BOARD)) + files_to_copy.append(os.path.join(xsl_dir, "command.xsl")) + files_to_copy.append(os.path.join(xsl_dir, "running.gif")) + for file_path in files_to_copy: + # OP We use copy instead of copy2 to update the creation date + # So we can clean the LOGS directories easily + shutil.copy(file_path, log_dir) + + # Instanciate the Gui in order to produce the xml files that contain all + # the boards + gui = Gui(log_dir, + today_jobs.ljobs, + today_jobs.ljobs_not_today, + config.VARS.datehour, + logger, + file_boards = options.input_boards) + + logger.debug("<OK>\n\n") + + # Display the list of the xml files + logger.info(("List of published files:\n%s\n") % gui.xml_global_file.logFile) + msg = "" + for board in gui.d_xml_board_files.keys(): + file_path = gui.d_xml_board_files[board].logFile + file_name = os.path.basename(file_path) + msg += "%s\n" % file_path + logger.add_link(file_name, "board", 0, board) + + logger.info(msg) + + today_jobs.gui = gui + + interruped = False + try: + # Run all the jobs contained in config_jobs + today_jobs.run_jobs() + except KeyboardInterrupt: + interruped = True + logger.critical(UTS.red(_("KeyboardInterrupt forced interruption"))) + except Exception as e: + # verbose debug message with traceback + msg = _("Exception raised, the jobs loop has been interrupted:\n\n%s") + logger.critical(msg % UTS.yellow(traceback.format_exc())) + finally: + # make clear kill subprocess + res = RCO.ReturnCode("OK", "jobs command finally done") + if interruped: + msg = _("Killing the running jobs and trying to get the corresponding logs\n") + logger.warning(UTS.red(msg)) + res = RCO.ReturnCode("KO", msg) + + # find the potential not finished jobs and kill them + for jb in today_jobs.ljobs: + if not jb.has_finished(): + res += RCO.ReturnCode("KO", "job %s has not finished" % jb.name) + try: + jb.kill_remote_process() + except Exception as e: + msg = _("Failed to kill job %s: %s\n") % (jb.name, e) + logger.warning(UTS.red(msg)) + res += RCO.ReturnCode("KO", msg) + if jb.res_job != "0": + res += RCO.ReturnCode("KO", "job %s fail" % jb.name) + if interruped: + if today_jobs.gui: + today_jobs.gui.last_update(_("Forced interruption")) + else: + if today_jobs.gui: + today_jobs.gui.last_update() + # Output the results + today_jobs.write_all_results() + # Remove the temporary pyconf file + if os.path.exists(path_pyconf): + os.remove(path_pyconf) + return res
+ + +
[docs]class Machine(object): + """ + Manage a ssh connection on a machine + """ + def __init__(self, + name, + host, + user, + port=22, + passwd=None, + sat_path="salomeTools"): + self.name = name + self.host = host + self.port = port + self.distribution = None # Will be filled after copying SAT on the machine + self.user = user + self.password = passwd + self.sat_path = sat_path + self.paramiko = getParamiko() + self.ssh = self.paramiko.SSHClient() + self._connection_successful = None + +
[docs] def connect(self, logger): + """Initiate the ssh connection to the remote machine + + :param logger: The logger instance + :return: None + """ + + self._connection_successful = False + self.ssh.load_system_host_keys() + self.ssh.set_missing_host_key_policy(self.paramiko.AutoAddPolicy()) + try: + self.ssh.connect(self.host, + port=self.port, + username=self.user, + password = self.password) + except self.paramiko.AuthenticationException: + message = src.KO_STATUS + _("Authentication failed") + except self.paramiko.BadHostKeyException: + message = (src.KO_STATUS + + _("The server's host key could not be verified")) + except self.paramiko.SSHException: + message = ( _("SSHException error connecting or " + "establishing an SSH session")) + except: + message = ( _("Error connecting or establishing an SSH session")) + else: + self._connection_successful = True + message = "" + return message
+ +
[docs] def successfully_connected(self, logger): + """ + Verify if the connection to the remote machine has succeed + + :param logger: The logger instance + :return: (bool) True if the connection has succeed, False if not + """ + if self._connection_successful == None: + message = _("""\ +Ask if the connection +(name: %(1)s host: %(2)s, port: %(3)s, user: %(4)s) is OK +whereas there were no connection request""" % \ + {"1": self.name, "2": self.host, "3": self.port, "4": self.user} ) + logger.critical(UTS.red(message)) + return self._connection_successful
+ +
[docs] def copy_sat(self, sat_local_path, job_file): + """Copy salomeTools to the remote machine in self.sat_path""" + res = 0 + try: + # open a sftp connection + self.sftp = self.ssh.open_sftp() + # Create the sat directory on remote machine if it is not existing + self.mkdir(self.sat_path, ignore_existing=True) + # Put sat + self.put_dir(sat_local_path, self.sat_path, filters = ['.git']) + # put the job configuration file in order to make it reachable + # on the remote machine + remote_job_file_name = ".%s" % os.path.basename(job_file) + self.sftp.put(job_file, os.path.join(self.sat_path, remote_job_file_name)) + except Exception as e: + res = str(e) + self._connection_successful = False + + return res
+ +
[docs] def put_dir(self, source, target, filters = []): + """ + Uploads the contents of the source directory to the target path. + The target directory needs to exists. + All sub-directories in source are created under target. + """ + for item in os.listdir(source): + if item in filters: + continue + source_path = os.path.join(source, item) + destination_path = os.path.join(target, item) + if os.path.islink(source_path): + linkto = os.readlink(source_path) + try: + self.sftp.symlink(linkto, destination_path) + self.sftp.chmod(destination_path, + os.stat(source_path).st_mode) + except IOError: + pass + else: + if os.path.isfile(source_path): + self.sftp.put(source_path, destination_path) + self.sftp.chmod(destination_path, + os.stat(source_path).st_mode) + else: + self.mkdir(destination_path, ignore_existing=True) + self.put_dir(source_path, destination_path)
+ +
[docs] def mkdir(self, path, mode=511, ignore_existing=False): + """ + As mkdir by adding an option to not fail if the folder exists + """ + try: + self.sftp.mkdir(path, mode) + except IOError: + if ignore_existing: + pass + else: + raise
+ +
[docs] def exec_command(self, command, logger): + """Execute the command on the remote machine + + :param command: (str) The command to be run + :param logger: The logger instance + :return: (paramiko.channel.ChannelFile, etc) + the stdin, stdout, and stderr of the executing command, + as a 3-tuple + """ + import traceback + try: + # Does not wait the end of the command + (stdin, stdout, stderr) = self.ssh.exec_command(command) + except self.paramiko.SSHException: + msg = _("<KO>: the paramiko server failed to execute the command\n") + msg += "command: '%s'\n" % command + msg += "\n%s\n" % UTS.yellow(traceback.format_exc()) + logger.critical(msg) + return (None, None, None) + except Exception as e: + msg = _("<KO>: an exception raised on ssh.exec_command:\n") + msg += "command: '%s'\n" % command + msg += "\n%s\n" % UTS.yellow(traceback.format_exc()) + logger.critical(msg) + return (None, None, None) + return (stdin, stdout, stderr)
+ +
[docs] def close(self): + """Close the ssh connection""" + self.ssh.close()
+ +
[docs] def write_info(self, logger): + """ + Prints the informations relative to the machine in the logger + (terminal traces and log file) + + :param logger: The logger instance + :return: None + """ + if self.successfully_connected(logger): + msg = "<OK>" + else: + msg = "<KO>" + msg += "host: %s, " % self.host + msg += "port: %s, " % str(self.port) + msg += "user: %s" % str(self.user) + logger.info("Connection %s" % msg )
+ + +
[docs]class Job(object): + """ + Class to manage one job + """ + def __init__(self, + name, + machine, + application, + board, + commands, + timeout, + config, + job_file_path, + logger, + after=None, + prefix=None): + + self.name = name + self.machine = machine + self.after = after + self.timeout = timeout + self.application = application + self.board = board + self.config = config + self.logger = logger + # The list of log files to download from the remote machine + self.remote_log_files = [] + + # The remote command status + # -1 means that it has not been launched, + # 0 means success and 1 means fail + self.res_job = "-1" + self.cancelled = False + + self._T0 = -1 + self._Tf = -1 + self._has_begun = False + self._has_finished = False + self._has_timouted = False + self._stdin = None # Store the command inputs field + self._stdout = None # Store the command outputs field + self._stderr = None # Store the command errors field + + self.out = "" + self.err = "" + + self.name_remote_jobs_pyconf = ".%s" % os.path.basename(job_file_path) + self.commands = commands + self.command = (os.path.join(self.machine.sat_path, "sat") + + " -l " + + os.path.join(self.machine.sat_path, + "list_log_files.txt") + + " job --jobs_config " + + os.path.join(self.machine.sat_path, + self.name_remote_jobs_pyconf) + + " --name " + self.name) + if prefix: + self.command = prefix + ' "' + self.command +'"' + +
[docs] def get_pids(self): + """ + Get the pid(s) corresponding to the command that have been launched + On the remote machine + + :return: (list) The list of integers corresponding to the found pids + """ + pids = [] + cmd_pid = 'ps aux | grep "' + self.command + '" | awk \'{print $2}\'' + (_, out_pid, _) = self.machine.exec_command(cmd_pid, self.logger) + pids_cmd = out_pid.readlines() + pids_cmd = [str(src.only_numbers(pid)) for pid in pids_cmd] + pids+=pids_cmd + return pids
+ +
[docs] def kill_remote_process(self, wait=1): + """Kills the process on the remote machine. + + :return: (str, str) the output of the kill, the error of the kill + """ + try: + pids = self.get_pids() + except: + return ("Unable to get the pid of the command.", "") + + cmd_kill = " ; ".join([("kill -2 " + pid) for pid in pids]) + (_, out_kill, err_kill) = self.machine.exec_command(cmd_kill, + self.logger) + time.sleep(wait) + return (out_kill.read().decode(), err_kill.read().decode())
+ +
[docs] def has_begun(self): + """Returns True if the job has already begun + + :return: (bool) True if the job has already begun + """ + return self._has_begun
+ +
[docs] def has_finished(self): + """ + Returns True if the job has already finished + (i.e. all the commands have been executed) + If it is finished, the outputs are stored in the fields out and err. + + :return: (bool) True if the job has already finished + """ + + # If the method has already been called and returned True + if self._has_finished: + return True + + # If the job has not begun yet + if not self.has_begun(): + return False + + if self._stdout.channel.closed: + self._has_finished = True + # Store the result outputs + self.out += self._stdout.read().decode() + self.err += self._stderr.read().decode() + # Put end time + self._Tf = time.time() + # And get the remote command status and log files + try: + self.get_log_files() + except Exception as e: + self.err += _("Unable to get remote log files: %s") % e + + return self._has_finished
+ +
[docs] def get_log_files(self): + """ + Get the log files produced by the command launched + on the remote machine, and put it in the log directory of the user, + so they can be accessible from + """ + # Do not get the files if the command is not finished + if not self.has_finished(): + msg = _("Trying to get log files whereas the job is not finished.") + self.logger.warning(UTS.red(msg)) + return + + # First get the file that contains the list of log files to get + tmp_file_path = src.get_tmp_filename(self.config, "list_log_files.txt") + remote_path = os.path.join(self.machine.sat_path, "list_log_files.txt") + self.machine.sftp.get(remote_path, tmp_file_path) + + # Read the file and get the result of the command and all the log files + # to get + fstream_tmp = open(tmp_file_path, "r") + file_lines = fstream_tmp.readlines() + file_lines = [line.replace("\n", "") for line in file_lines] + fstream_tmp.close() + os.remove(tmp_file_path) + + try : + # The first line is the result of the command (0 success or 1 fail) + self.res_job = file_lines[0] + except Exception as e: + self.err += _("Unable to get status from remote file '%(1)s': %(2)s") % \ + {"1": remote_path, "2": str(e)} + + for i, job_path_remote in enumerate(file_lines[1:]): + try: + # For each command, there is two files to get : + # 1- The xml file describing the command and giving the + # internal traces. + # 2- The txt file containing the system command traces (like + # traces produced by the "make" command) + # 3- In case of the test command, there is another file to get : + # the xml board that contain the test results + dirname = os.path.basename(os.path.dirname(job_path_remote)) + if dirname != 'OUT' and dirname != 'TEST': + # Case 1- + local_path = os.path.join(os.path.dirname( + self.logger.logFilePath), + os.path.basename(job_path_remote)) + if i==0: # The first is the job command + self.logger.add_link(os.path.basename(job_path_remote), + "job", + self.res_job, + self.command) + elif dirname == 'OUT': + # Case 2- + local_path = os.path.join(os.path.dirname( + self.logger.logFilePath), + 'OUT', + os.path.basename(job_path_remote)) + elif dirname == 'TEST': + # Case 3- + local_path = os.path.join(os.path.dirname( + self.logger.logFilePath), + 'TEST', + os.path.basename(job_path_remote)) + + # Get the file + if not os.path.exists(local_path): + self.machine.sftp.get(job_path_remote, local_path) + self.remote_log_files.append(local_path) + except Exception as e: + self.err += _("Unable to get %(1)s log file from remote: %(2)s") % \ + {"1": str(job_path_remote), "2": str(e)}
+ +
[docs] def has_failed(self): + """ + Returns True if the job has failed. + A job is considered as failed if the machine could not be reached, + if the remote command failed, + or if the job finished with a time out. + + :return: (bool) True if the job has failed + """ + if not self.has_finished(): + return False + if not self.machine.successfully_connected(self.logger): + return True + if self.is_timeout(): + return True + if self.res_job == "1": + return True + return False
+ +
[docs] def cancel(self): + """ + In case of a failing job, one has to cancel every job that depend on it. + This method put the job as failed and will not be executed. + """ + if self.cancelled: + return + self._has_begun = True + self._has_finished = True + self.cancelled = True + msg = _("This job was not launched because its father has failed.") + self.out += msg + self.err += msg
+ +
[docs] def is_running(self): + """Returns True if the job commands are running + + :return: (bool) True if the job is running + """ + return self.has_begun() and not self.has_finished()
+ +
[docs] def is_timeout(self): + """Returns True if the job commands has finished with timeout + + :return: (bool) True if the job has finished with timeout + """ + return self._has_timouted
+ +
[docs] def time_elapsed(self): + """Get the time elapsed since the job launching + + :return: The number of seconds + :rtype: int + """ + if not self.has_begun(): + return -1 + T_now = time.time() + return T_now - self._T0
+ +
[docs] def check_time(self): + """ + Verify that the job has not exceeded its timeout. + If it has, kill the remote command and consider the job as finished. + """ + if not self.has_begun(): + return + if self.time_elapsed() > self.timeout: + self._has_finished = True + self._has_timouted = True + self._Tf = time.time() + (out_kill, __) = self.kill_remote_process() + self.out += "TIMEOUT \n" + out_kill + self.err += "TIMEOUT : %s seconds elapsed\n" % str(self.timeout) + try: + self.get_log_files() + except Exception as e: + self.err += _("Unable to get remote log files!\n%s\n" % str(e))
+ +
[docs] def total_duration(self): + """ + Gives the total duration of the job + + :return: (int) the total duration of the job in seconds + """ + return self._Tf - self._T0
+ +
[docs] def run(self): + """ + Launch the job by executing the remote command. + """ + + # Prevent multiple run + if self.has_begun(): + msg = _("A job can only be launched one time") + msg2 = _("Trying to launch the job '%s' whereas it has already been launched.") % self.name + self.logger.warning( UTS.red("%s\n%s\n" % (msg,msg2)) ) + return RCO.ReturnCode("KO", msg2) + + # Do not execute the command if the machine could not be reached + if not self.machine.successfully_connected(self.logger): + self._has_finished = True + self.out = "N\A" + self.err += ("Connection to machine (name : %s, host: %s, port:" + " %s, user: %s) has failed\nUse the log command " + "to get more information." + % (self.machine.name, + self.machine.host, + self.machine.port, + self.machine.user)) + else: + # Usual case : Launch the command on remote machine + self._T0 = time.time() + self._stdin, self._stdout, self._stderr = self.machine.exec_command( + self.command, + self.logger) + # If the results are not initialized, finish the job + if (self._stdin, self._stdout, self._stderr) == (None, None, None): + self._has_finished = True + self._Tf = time.time() + self.out += "N\A" + self.err += "The server failed to execute the command" + + # Put the beginning flag to true. + self._has_begun = True
+ +
[docs] def write_results(self): + """ + Display on the terminal all the job's information + """ + msg = "name : %s\n" % self.name + if self.after: + msg += "after : %s\n" % self.after + msg += "Time elapsed : %4imin %2is \n" % (self.total_duration()//60 , self.total_duration()%60) + if self._T0 != -1: + msg += "Begin time : %s\n" % \ + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self._T0)) + if self._Tf != -1: + msg += "End time : %s\n\n" % \ + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self._Tf)) + + self.logger.info(msg) + + machine_head = "Informations about connection :\n" + underline = (len(machine_head) - 2) * "-" + self.logger.info(machine_head+underline) + self.machine.write_info(self.logger) + + msg = "out : \n" + if self.out == "": + msg += "Unable to get output\n" + else: + msg += self.out + "\n" + msg += "err :\n%s\n" % self.err + self.logger.info(msg)
+ +
[docs] def get_status(self): + """Get the status of the job (used by the Gui for xml display) + + :return: (str) The current status of the job + """ + if not self.machine.successfully_connected(self.logger): + return "SSH connection KO" + if not self.has_begun(): + return "Not launched" + if self.cancelled: + return "Cancelled" + if self.is_running(): + return "running since " + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self._T0)) + if self.has_finished(): + if self.is_timeout(): + return "Timeout since " + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self._Tf)) + return "Finished since " + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self._Tf))
+ +
[docs]class Jobs(object): + """ + Class to manage the jobs to be run + """ + def __init__(self, + runner, + logger, + job_file_path, + config_jobs, + lenght_columns = 20): + # The jobs configuration + self.cfg_jobs = config_jobs + self.job_file_path = job_file_path + # The machine that will be used today + self.lmachines = [] + # The list of machine (hosts, port) that will be used today + # (a same host can have several machine instances since there + # can be several ssh parameters) + self.lhosts = [] + # The jobs to be launched today + self.ljobs = [] + # The jobs that will not be launched today + self.ljobs_not_today = [] + self.runner = runner + self.logger = logger + self.len_columns = lenght_columns + + # the list of jobs that have not been run yet + self._l_jobs_not_started = [] + # the list of jobs that have already ran + self._l_jobs_finished = [] + # the list of jobs that are running + self._l_jobs_running = [] + + self.determine_jobs_and_machines() + +
[docs] def define_job(self, job_def, machine): + """ + Takes a pyconf job definition and a machine (from class machine) + and returns the job instance corresponding to the definition. + + :param job_def: (Mapping a job definition + :param machine: (Machine) the machine on which the job will run + :return: (Job) The corresponding job in a job class instance + """ + name = job_def.name + cmmnds = job_def.commands + if not "timeout" in job_def: + timeout = 4*60*60 # default timeout = 4h + else: + timeout = job_def.timeout + after = None + if 'after' in job_def: + after = job_def.after + application = None + if 'application' in job_def: + application = job_def.application + board = None + if 'board' in job_def: + board = job_def.board + prefix = None + if "prefix" in job_def: + prefix = job_def.prefix + + return Job(name, + machine, + application, + board, + cmmnds, + timeout, + self.runner.cfg, + self.job_file_path, + self.logger, + after = after, + prefix = prefix)
+ +
[docs] def determine_jobs_and_machines(self): + """ + Reads the pyconf jobs definition and instantiates all + the machines and jobs to be done today. + + :return: None + """ + today = datetime.date.weekday(datetime.date.today()) + host_list = [] + + for job_def in self.cfg_jobs.jobs : + + if not "machine" in job_def: + msg = _("""\ +The job '%s' do not have the key 'machine'. +This job is ignored. +""") % job_def.name + self.logger.warning(msg) + continue + name_machine = job_def.machine + + a_machine = None + for mach in self.lmachines: + if mach.name == name_machine: + a_machine = mach + break + + if a_machine == None: + for machine_def in self.cfg_jobs.machines: + if machine_def.name == name_machine: + if 'host' not in machine_def: + host = self.runner.cfg.VARS.hostname + else: + host = machine_def.host + + if 'user' not in machine_def: + user = self.runner.cfg.VARS.user + else: + user = machine_def.user + + if 'port' not in machine_def: + port = 22 + else: + port = machine_def.port + + if 'password' not in machine_def: + passwd = None + else: + passwd = machine_def.password + + if 'sat_path' not in machine_def: + sat_path = "salomeTools" + else: + sat_path = machine_def.sat_path + + a_machine = Machine( + machine_def.name, + host, + user, + port=port, + passwd=passwd, + sat_path=sat_path + ) + + self.lmachines.append(a_machine) + if (host, port) not in host_list: + host_list.append((host, port)) + + if a_machine == None: + msg = _("""\ +The job '%(job)s' requires the machine '%(machine)s'. +This machine is not defined in the configuration file. +The job will not be launched. +""") % {"job" : job_def.name, "machine" : name_machine} + self.logger.warning(msg) + continue + + a_job = self.define_job(job_def, a_machine) + + if today in job_def.when: + self.ljobs.append(a_job) + else: # today in job_def.when + self.ljobs_not_today.append(a_job) + + self.lhosts = host_list
+ +
[docs] def ssh_connection_all_machines(self, pad=50): + """Do the ssh connection to every machine to be used today. + + :return: None + """ + self.logger.info( "Establishing connection with all the machines :\n") + for machine in self.lmachines: + # little algorithm in order to display traces + begin_line = (_("Connection to %s: ") % machine.name) + if pad - len(begin_line) < 0: + endline = " " + else: + endline = (pad - len(begin_line)) * "." + " " + + step = "SSH connection" + self.logger.info( begin_line + endline + step) + # the call to the method that initiate the ssh connection + msg = machine.connect(self.logger) + + # Copy salomeTools to the remote machine + if machine.successfully_connected(self.logger): + step = _("Remove SAT") + self.logger.info('\r%s%s%s' % (begin_line, endline, 20 * " ")) + self.logger.info('\r%s%s%s' % (begin_line, endline, step)) + (__, out_dist, __) = machine.exec_command( + "rm -rf %s" % machine.sat_path, self.logger) + out_dist.read() + + step = _("Copy SAT") + self.logger.info('\r%s%s%s' % (begin_line, endline, 20 * " ")) + self.logger.info('\r%s%s%s' % (begin_line, endline, step)) + + res_copy = machine.copy_sat(self.runner.cfg.VARS.salometoolsway, + self.job_file_path) + + # set the local settings of sat on the remote machine using + # the init command + (__, out_dist, __) = machine.exec_command( + os.path.join(machine.sat_path, + "sat init --base default --workdir" + " default --log_dir default"), + self.logger) + out_dist.read() + + # get the remote machine distribution using a sat command + (__, out_dist, __) = machine.exec_command( + os.path.join(machine.sat_path, + "sat config --value VARS.dist --no_label"), + self.logger) + machine.distribution = out_dist.read().decode().replace("\n", + "") + + # Print the status of the copy + if res_copy == 0: + self.logger.info('\r%s' % \ + ((len(begin_line)+len(endline)+20) * " ")) + self.logger.info('\r%s%s%s' % (begin_line, endline, "<OK>")) + else: + self.logger.info('\r%s' % \ + ((len(begin_line)+len(endline)+20) * " "), 3) + self.logger.info('\r%s%s%s %s' % \ + (begin_line, endline, "<KO>", + _("Copy of SAT failed: %s") % res_copy)) + else: + self.logger.info('\r%s' % + ((len(begin_line)+len(endline)+20) * " ")) + self.logger.info('\r%s%s%s %s' % (begin_line, endline, "<KO>", msg)) + self.logger.info("\n") + + self.logger.info("\n")
+ + +
[docs] def is_occupied(self, hostname): + """ + Returns True if a job is running on + the machine defined by its host and its port. + + :param hostname: (str, int) the pair (host, port) + :return: (Job or bool) + the job that is running on the host, + or false if there is no job running on the host. + """ + host = hostname[0] + port = hostname[1] + for jb in self.ljobs: + if jb.machine.host == host and jb.machine.port == port: + if jb.is_running(): + return jb + return False
+ +
[docs] def update_jobs_states_list(self): + """ + Updates the lists that store the currently + running jobs and the jobs that have already finished. + + :return: None + """ + jobs_finished_list = [] + jobs_running_list = [] + for jb in self.ljobs: + if jb.is_running(): + jobs_running_list.append(jb) + jb.check_time() + if jb.has_finished(): + jobs_finished_list.append(jb) + + nb_job_finished_before = len(self._l_jobs_finished) + self._l_jobs_finished = jobs_finished_list + self._l_jobs_running = jobs_running_list + + nb_job_finished_now = len(self._l_jobs_finished) + + return nb_job_finished_now > nb_job_finished_before
+ +
[docs] def cancel_dependencies_of_failing_jobs(self): + """Cancels all the jobs that depend on a failing one. + + :return: None + """ + + for job in self.ljobs: + if job.after is None: + continue + father_job = self.find_job_that_has_name(job.after) + if father_job is not None and father_job.has_failed(): + job.cancel()
+ +
[docs] def find_job_that_has_name(self, name): + """Returns the job by its name. + + :param name: (str) a job name + :return: (Job) the job that has the name. + """ + for jb in self.ljobs: + if jb.name == name: + return jb + # the following is executed only if the job was not found + return None
+ +
[docs] def str_of_length(self, text, length): + """ + Takes a string text of any length and returns + the most close string of length "length". + + :param text: (str) any string + :param length: (int) a length for the returned string + :return: (str) the most close string of length "length" + """ + if len(text) > length: + text_out = text[:length-3] + '...' + else: + diff = length - len(text) + before = " " * (diff//2) + after = " " * (diff//2 + diff%2) + text_out = before + text + after + + return text_out
+ +
[docs] def display_status(self, len_col): + """ + Takes a lenght and construct the display of the current status + of the jobs in an array that has a column for each host. + It displays the job that is currently running on the host of the column. + + :param len_col: (int) the size of the column + :return: None + """ + display_line = "" + for host_port in self.lhosts: + jb = self.is_occupied(host_port) + if not jb: # nothing running on the host + empty = self.str_of_length("empty", len_col) + display_line += "|" + empty + else: + display_line += "|" + UTS.info( + self.str_of_length(jb.name, len_col)) + + self.logger.info("\r" + display_line + "|")
+ + +
[docs] def run_jobs(self): + """ + The main method. Runs all the jobs on every host. + For each host, at a given time, only one job can be running. + The jobs that have the field after (that contain the job that has + to be run before it) are run after the previous job. + This method stops when all the jobs are finished. + + :return: None + """ + # Print header + self.logger.info(_('Executing the jobs :\n')) + text_line = "" + for host_port in self.lhosts: + host = host_port[0] + port = host_port[1] + if port == 22: # default value + text_line += "|" + self.str_of_length(host, self.len_columns) + else: + text_line += "|" + self.str_of_length( + "("+host+", "+str(port)+")", self.len_columns) + + tiret_line = " " + "-"*(len(text_line)-1) + "\n" + self.logger.info(tiret_line + text_line + "|\n" + tiret_line) + + # The infinite loop that runs the jobs + l_jobs_not_started = src.deepcopy_list(self.ljobs) + while len(self._l_jobs_finished) != len(self.ljobs): + new_job_start = False + for host_port in self.lhosts: + + if self.is_occupied(host_port): + continue + + for jb in l_jobs_not_started: + if (jb.machine.host, jb.machine.port) != host_port: + continue + if jb.after == None: + jb.run() + l_jobs_not_started.remove(jb) + new_job_start = True + break + else: + jb_before = self.find_job_that_has_name(jb.after) + if jb_before is None: + jb.cancel() + msg = _("This job was not launched because its " + "father is not in the jobs list.") + jb.out = msg + jb.err = msg + break + if jb_before.has_finished(): + jb.run() + l_jobs_not_started.remove(jb) + new_job_start = True + break + self.cancel_dependencies_of_failing_jobs() + new_job_finished = self.update_jobs_states_list() + + if new_job_start or new_job_finished: + if self.gui: + self.gui.update_xml_files(self.ljobs) + # Display the current status + self.display_status(self.len_columns) + + # Make sure that the proc is not entirely busy + time.sleep(0.001) + + self.logger.info("\n" + tiret_line + "\n\n") + + if self.gui: + self.gui.update_xml_files(self.ljobs) + self.gui.last_update()
+ +
[docs] def write_all_results(self): + """Display all the jobs outputs. + + :return: None + """ + for jb in self.ljobs: + self.logger.info("#------- Results for job %s -------#\n" % jb.name) + jb.write_results() + self.logger.info("\n\n")
+ +
[docs]class Gui(object): + """ + Class to manage the the xml data that can be displayed in a browser + to see the jobs states + """ + def __init__(self, + xml_dir_path, + l_jobs, + l_jobs_not_today, + prefix, + logger, + file_boards=""): + """Initialization + + :param xml_dir_path: (str) + The path to the directory where to put the xml resulting files + :param l_jobs: (list) the list of jobs that run today + :param l_jobs_not_today: (list) + the list of jobs that do not run today + :param file_boards: (str) + the file path from which to read the expected boards + """ + # The logging instance + self.logger = logger + + # The prefix to add to the xml files : date_hour + self.prefix = prefix + + # The path of the csv files to read to fill the expected boards + self.file_boards = file_boards + + if file_boards != "": + today = datetime.date.weekday(datetime.date.today()) + self.parse_csv_boards(today) + else: + self.d_input_boards = {} + + # The path of the global xml file + self.xml_dir_path = xml_dir_path + # Initialize the xml files + self.global_name = "global_report" + xml_global_path = os.path.join(self.xml_dir_path, + self.global_name + ".xml") + self.xml_global_file = XMLMGR.XmlLogFile(xml_global_path, "JobsReport") + + # Find history for each job + self.history = {} + self.find_history(l_jobs, l_jobs_not_today) + + # The xml files that corresponds to the boards. + # {name_board : xml_object}} + self.d_xml_board_files = {} + + # Create the lines and columns + self.initialize_boards(l_jobs, l_jobs_not_today) + + # Write the xml file + self.update_xml_files(l_jobs) + +
[docs] def add_xml_board(self, name): + """ + Add a board to the board list + + :param name: (str) the board name + """ + xml_board_path = os.path.join(self.xml_dir_path, name + ".xml") + self.d_xml_board_files[name] = XMLMGR.XmlLogFile(xml_board_path,"JobsReport") + self.d_xml_board_files[name].add_simple_node("distributions") + self.d_xml_board_files[name].add_simple_node("applications") + self.d_xml_board_files[name].add_simple_node("board", text=name)
+ +
[docs] def initialize_boards(self, l_jobs, l_jobs_not_today): + """ + Get all the first information needed for each file and write the + first version of the files + + :param l_jobs: (list) the list of jobs that run today + :param l_jobs_not_today: (list) the list of jobs that do not run today + """ + # Get the boards to fill and put it in a dictionary + # {board_name : xml instance corresponding to the board} + for job in l_jobs + l_jobs_not_today: + board = job.board + if (board is not None and + board not in self.d_xml_board_files.keys()): + self.add_xml_board(board) + + # Verify that the boards given as input are done + for board in list(self.d_input_boards.keys()): + if board not in self.d_xml_board_files: + self.add_xml_board(board) + root_node = self.d_xml_board_files[board].xmlroot + XMLMGR.append_node_attrib(root_node, {"input_file" : self.file_boards}) + + # Loop over all jobs in order to get the lines and columns for each + # xml file + d_dist = {} + d_application = {} + for board in self.d_xml_board_files: + d_dist[board] = [] + d_application[board] = [] + + l_hosts_ports = [] + + ASNODE = XMLMGR.add_simple_node # shortcut + + for job in l_jobs + l_jobs_not_today: + + if (job.machine.host, job.machine.port) not in l_hosts_ports: + l_hosts_ports.append((job.machine.host, job.machine.port)) + + distrib = job.machine.distribution + application = job.application + + board_job = job.board + if board is None: + continue + for board in self.d_xml_board_files: + if board_job == board: + if (distrib not in [None, ''] and + distrib not in d_dist[board]): + d_dist[board].append(distrib) + ASNODE( self.d_xml_board_files[board].xmlroot.find('distributions'), + "dist", attrib={"name" : distrib} ) + + if board_job == board: + if (application not in [None, ''] and + application not in d_application[board]): + d_application[board].append(application) + ASNODE( self.d_xml_board_files[board].xmlroot.find('applications'), + "application", attrib={"name" : application} ) + + # Verify that there are no missing application or distribution in the + # xml board files (regarding the input boards) + for board in self.d_xml_board_files: + l_dist = d_dist[board] + if board not in self.d_input_boards.keys(): + continue + for dist in self.d_input_boards[board]["rows"]: + if dist not in l_dist: + ASNODE( self.d_xml_board_files[board].xmlroot.find('distributions'), + "dist", attrib={"name" : dist} ) + l_appli = d_application[board] + for appli in self.d_input_boards[board]["columns"]: + if appli not in l_appli: + ASNODE( self.d_xml_board_files[board].xmlroot.find('applications'), + "application", attrib={"name" : appli} ) + + # Initialize the hosts_ports node for the global file + self.xmlhosts_ports = self.xml_global_file.add_simple_node( "hosts_ports") + for host, port in l_hosts_ports: + host_port = "%s:%i" % (host, port) + ASNODE(self.xmlhosts_ports, "host_port", attrib={"name" : host_port}) + + # Initialize the jobs node in all files + for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()): + xml_jobs = xml_file.add_simple_node("jobs") + # Get the jobs present in the config file but + # that will not be launched today + self.put_jobs_not_today(l_jobs_not_today, xml_jobs) + + # add also the infos node + xml_file.add_simple_node( + "infos", attrib={"name" : "last update", "JobsCommandStatus" : "running"} ) + + # and put the history node + history_node = xml_file.add_simple_node("history") + name_board = os.path.basename(xml_file.logFile)[:-len(".xml")] + # serach for board files + expression = "^[0-9]{8}_+[0-9]{6}_" + name_board + ".xml$" + oExpr = re.compile(expression) + # Get the list of xml borad files that are in the log directory + for file_name in os.listdir(self.xml_dir_path): + if oExpr.search(file_name): + date = os.path.basename(file_name).split("_")[0] + file_path = os.path.join(self.xml_dir_path, file_name) + ASNODE(history_node, "link", text=file_path, attrib={"date" : date}) + + + # Find in each board the squares that needs to be filled regarding the + # input csv files but that are not covered by a today job + for board in self.d_input_boards.keys(): + xml_root_board = self.d_xml_board_files[board].xmlroot + # Find the missing jobs for today + xml_missing = ASNODE(xml_root_board, "missing_jobs") + for row, column in self.d_input_boards[board]["jobs"]: + found = False + for job in l_jobs: + if (job.application == column and + job.machine.distribution == row): + found = True + break + if not found: + ASNODE(xml_missing, "job", attrib={"distribution" : row, "application" : column }) + # Find the missing jobs not today + xml_missing_not_today = ASNODE( xml_root_board, "missing_jobs_not_today") + for row, column in self.d_input_boards[board]["jobs_not_today"]: + found = False + for job in l_jobs_not_today: + if (job.application == column and + job.machine.distribution == row): + found = True + break + if not found: + ASNODE( xml_missing_not_today, "job", + attrib={"distribution" : row, "application" : column } )
+ +
[docs] def find_history(self, l_jobs, l_jobs_not_today): + """ + find, for each job, in the existent xml boards the results for the job. + Store the results in the dictionary + self.history = {name_job : list of (date, status, list links)} + + :param l_jobs: (list) + the list of jobs to run today + :param l_jobs_not_today: (list) + the list of jobs that do not run today + """ + # load the all the history + expression = "^[0-9]{8}_+[0-9]{6}_" + self.global_name + ".xml$" + oExpr = re.compile(expression) + # Get the list of global xml that are in the log directory + l_globalxml = [] + for file_name in os.listdir(self.xml_dir_path): + if oExpr.search(file_name): + file_path = os.path.join(self.xml_dir_path, file_name) + try: + global_xml = XMLMGR.ReadXmlFile(file_path) + l_globalxml.append(global_xml) + except Exception as e: + msg = _("The file '%s' can not be read, it will be ignored\n%s") % \ + (file_path, e) + self.logger.warning("%s\n" % msg) + + # Construct the dictionnary self.history + for job in l_jobs + l_jobs_not_today: + l_links = [] + for global_xml in l_globalxml: + date = os.path.basename(global_xml.filePath).split("_")[0] + global_root_node = global_xml.xmlroot.find("jobs") + job_node = XMLMGR.find_node_by_attrib( + global_root_node, "job", "name", job.name ) + if job_node: + if job_node.find("remote_log_file_path") is not None: + link = job_node.find("remote_log_file_path").text + res_job = job_node.find("res").text + if link != "nothing": + l_links.append((date, res_job, link)) + l_links = sorted(l_links, reverse=True) + self.history[job.name] = l_links
+ +
[docs] def put_jobs_not_today(self, l_jobs_not_today, xml_node_jobs): + """ + Get all the first information needed for each file and write the + first version of the files + + :param xml_node_jobs: (etree.Element) + the node corresponding to a job + :param l_jobs_not_today: (list) + the list of jobs that do not run today + """ + + ASNODE = XMLMGR.add_simple_node # shortcut + + for job in l_jobs_not_today: + xmlj = ASNODE(xml_node_jobs, "job", attrib={"name" : job.name}) + ASNODE(xmlj, "application", job.application) + ASNODE(xmlj, "distribution", job.machine.distribution) + ASNODE(xmlj, "board", job.board) + ASNODE(xmlj, "commands", " ; ".join(job.commands)) + ASNODE(xmlj, "state", "Not today") + ASNODE(xmlj, "machine", job.machine.name) + ASNODE(xmlj, "host", job.machine.host) + ASNODE(xmlj, "port", str(job.machine.port)) + ASNODE(xmlj, "user", job.machine.user) + ASNODE(xmlj, "sat_path", job.machine.sat_path) + xml_history = ASNODE(xmlj, "history") + for i, (date, res_job, link) in enumerate(self.history[job.name]): + if i==0: + # tag the first one (the last one) + ASNODE( xml_history, "link", text=link, + attrib={"date" : date, "res" : res_job, "last" : "yes"} ) + else: + ASNODE( xml_history, "link", text=link, + attrib={"date" : date, "res" : res_job, "last" : "no"} )
+ +
[docs] def parse_csv_boards(self, today): + """ + Parse the csv file that describes the boards to produce and fill + the dict d_input_boards that contain the csv file contain + + :param today: (int) the current day of the week + """ + # open the csv file and read its content + l_read = [] + with open(self.file_boards, 'r') as f: + reader = csv.reader(f,delimiter=CSV_DELIMITER) + for row in reader: + l_read.append(row) + # get the delimiter for the boards (empty line) + boards_delimiter = [''] * len(l_read[0]) + # Make the list of boards, by splitting with the delimiter + l_boards = [list(y) for x, y in itertools.groupby(l_read, + lambda z: z == boards_delimiter) if not x] + + # loop over the csv lists of lines and get the rows, columns and jobs + d_boards = {} + for input_board in l_boards: + # get board name + board_name = input_board[0][0] + + # Get columns list + columns = input_board[0][1:] + + rows = [] + jobs = [] + jobs_not_today = [] + for line in input_board[1:]: + row = line[0] + rows.append(row) + for i, square in enumerate(line[1:]): + if square=='': + continue + days = square.split(DAYS_SEPARATOR) + days = [int(day) for day in days] + job = (row, columns[i]) + if today in days: + jobs.append(job) + else: + jobs_not_today.append(job) + + d_boards[board_name] = {"rows" : rows, + "columns" : columns, + "jobs" : jobs, + "jobs_not_today" : jobs_not_today} + + self.d_input_boards = d_boards
+ +
[docs] def update_xml_files(self, l_jobs): + """Write all the xml files with updated information about the jobs + + :param l_jobs: (list) the list of jobs that run today + """ + for xml_file in [self.xml_global_file] + list( + self.d_xml_board_files.values()): + self.update_xml_file(l_jobs, xml_file) + + # Write the file + self.write_xml_files()
+ +
[docs] def update_xml_file(self, l_jobs, xml_file): + """update information about the jobs for the file xml_file + + :param l_jobs: (list) the list of jobs that run today + :param xml_file: (xmlManager.XmlLogFile) + the xml instance to update + """ + + xml_node_jobs = xml_file.xmlroot.find('jobs') + # Update the job names and status node + for job in l_jobs: + # Find the node corresponding to the job and delete it + # in order to recreate it + for xmljob in xml_node_jobs.findall('job'): + if xmljob.attrib['name'] == job.name: + xml_node_jobs.remove(xmljob) + + T0 = str(job._T0) + if T0 != "-1": + T0 = time.strftime('%Y-%m-%d %H:%M:%S', + time.localtime(job._T0)) + Tf = str(job._Tf) + if Tf != "-1": + Tf = time.strftime('%Y-%m-%d %H:%M:%S', + time.localtime(job._Tf)) + + # recreate the job node + xmlj = ASNODE(xml_node_jobs, "job", attrib={"name" : job.name}) + ASNODE(xmlj, "machine", job.machine.name) + ASNODE(xmlj, "host", job.machine.host) + ASNODE(xmlj, "port", str(job.machine.port)) + ASNODE(xmlj, "user", job.machine.user) + xml_history = ASNODE(xmlj, "history") + for date, res_job, link in self.history[job.name]: + ASNODE( xml_history, "link", text=link, + attrib={"date" : date, "res" : res_job} ) + + ASNODE(xmlj, "sat_path", job.machine.sat_path) + ASNODE(xmlj, "application", job.application) + ASNODE(xmlj, "distribution", job.machine.distribution) + ASNODE(xmlj, "board", job.board) + ASNODE(xmlj, "timeout", str(job.timeout)) + ASNODE(xmlj, "commands", " ; ".join(job.commands)) + ASNODE(xmlj, "state", job.get_status()) + ASNODE(xmlj, "begin", T0) + ASNODE(xmlj, "end", Tf) + ASNODE(xmlj, "out", UTS.cleancolor(job.out)) + ASNODE(xmlj, "err", UTS.cleancolor(job.err)) + ASNODE(xmlj, "res", str(job.res_job)) + if len(job.remote_log_files) > 0: + ASNODE(xmlj, "remote_log_file_path", job.remote_log_files[0]) + else: + ASNODE(xmlj, "remote_log_file_path", "nothing") + # Search for the test log if there is any + l_test_log_files = self.find_test_log(job.remote_log_files) + xml_test = ASNODE(xmlj, "test_log_file_path") + for test_log_path, res_test, nb_fails in l_test_log_files: + test_path_node = ASNODE(xml_test, "path", test_log_path) + test_path_node.attrib["res"] = res_test + test_path_node.attrib["nb_fails"] = nb_fails + + xmlafter = ASNODE(xmlj, "after", job.after) + # get the job father + if job.after is not None: + job_father = None + for jb in l_jobs: + if jb.name == job.after: + job_father = jb + + if (job_father is not None and + len(job_father.remote_log_files) > 0): + link = job_father.remote_log_files[0] + else: + link = "nothing" + XMLMGR.append_node_attrib(xmlafter, {"link" : link}) + + # Verify that the job is to be done today regarding the input csv + # files + if job.board and job.board in self.d_input_boards.keys(): + found = False + for dist, appli in self.d_input_boards[job.board]["jobs"]: + if (job.machine.distribution == dist + and job.application == appli): + found = True + ASNODE(xmlj, "extra_job", "no") + break + if not found: + ASNODE(xmlj, "extra_job", "yes") + + + # Update the date + xml_node_infos = xml_file.xmlroot.find('infos') + XMLMGR.append_node_attrib( xml_node_infos, + attrib={"value" : datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")} )
+ + +
[docs] def find_test_log(self, l_remote_log_files): + """ + Find if there is a test log (board) in the remote log files and + the path to it. There can be several test command, + so the result is a list. + + :param l_remote_log_files: (list) the list of all remote log files + :return: (list) + the list of tuples (test log files path, res of the command) + """ + res = [] + for file_path in l_remote_log_files: + dirname = os.path.basename(os.path.dirname(file_path)) + file_name = os.path.basename(file_path) + regex = UTS._log_all_command_file_expression + oExpr = re.compile(regex) + if dirname == "TEST" and oExpr.search(file_name): + # find the res of the command + prod_node = etree.parse(file_path).getroot().find("product") + res_test = prod_node.attrib["global_res"] + # find the number of fails + testbase_node = prod_node.find("tests").find("testbase") + nb_fails = int(testbase_node.attrib["failed"]) + # put the file path, the res of the test command and the number + # of fails in the output + res.append((file_path, res_test, nb_fails)) + + return res
+ +
[docs] def last_update(self, finish_status = "finished"): + """update information about the jobs for the file xml_file + + :param l_jobs: (list) the list of jobs that run today + :param xml_file: (xmlManager.XmlLogFile) the xml instance to update + """ + for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()): + xml_node_infos = xml_file.xmlroot.find('infos') + XMLMGR.append_node_attrib(xml_node_infos, + attrib={"JobsCommandStatus" : finish_status}) + # Write the file + self.write_xml_files()
+ +
[docs] def write_xml_file(self, xml_file, stylesheet): + """ + Write one xml file and the same file with prefix + """ + xml_file.write_tree(stylesheet) + file_path = xml_file.logFile + file_dir = os.path.dirname(file_path) + file_name = os.path.basename(file_path) + file_name_with_prefix = self.prefix + "_" + file_name + xml_file.write_tree(stylesheet, os.path.join(file_dir, + file_name_with_prefix))
+ +
[docs] def write_xml_files(self): + """ + Write the xml files + """ + self.write_xml_file(self.xml_global_file, STYLESHEET_GLOBAL) + for xml_file in self.d_xml_board_files.values(): + self.write_xml_file(xml_file, STYLESHEET_BOARD)
+ +
[docs]def get_config_file_path(job_config_name, l_cfg_dir): + found = False + file_jobs_cfg = None + if os.path.exists(job_config_name) and job_config_name.endswith(".pyconf"): + found = True + file_jobs_cfg = job_config_name + else: + for cfg_dir in l_cfg_dir: + file_jobs_cfg = os.path.join(cfg_dir, job_config_name) + if not file_jobs_cfg.endswith('.pyconf'): + file_jobs_cfg += '.pyconf' + + if not os.path.exists(file_jobs_cfg): + continue + else: + found = True + break + return found, file_jobs_cfg
+ +
[docs]def develop_factorized_jobs(config_jobs): + """update information about the jobs for the file xml_file + + :param config_jobs: (Config) + the config corresponding to the jos description + """ + developed_jobs_list = [] + for jb in config_jobs.jobs: + # case where the jobs are not developed + if type(jb.machine) == type(""): + developed_jobs_list.append(jb) + continue + # Case where the jobs must be developed + # Example: + # machine : ["CO7.2 physique", ["CO6.4 physique", $MONDAY, $TUESDAY ], "FD22"] + name_job = jb.name + for machine in jb.machine: + new_job = PYCONF.deepCopyMapping(jb) + # case where there is a jobs on the machine corresponding to all + # days in when variable. + if type(machine) == type(""): + new_job.machine = machine + new_job.name = name_job + " / " + machine + else: + # case the days are re defined + new_job.machine = machine[0] + new_job.name = name_job + " / " + machine[0] + new_job.when = machine[1:] + developed_jobs_list.append(new_job) + + config_jobs.jobs = developed_jobs_list
+ +
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/launcher.html b/doc/build/html/_modules/commands/launcher.html new file mode 100644 index 0000000..6d6ad7d --- /dev/null +++ b/doc/build/html/_modules/commands/launcher.html @@ -0,0 +1,365 @@ + + + + + + + + commands.launcher — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.launcher

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+import platform
+import shutil
+import getpass
+import subprocess
+import stat
+
+import src.debug as DBG
+import src.returnCode as RCO
+from src.salomeTools import _BaseCommand
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """ + The launcher command generates a SALOME launcher. + + examples: + >> sat launcher SALOME + """ + + name = "launcher" + +
[docs] def getParser(self): + """Define all possible options for command 'sat launcher <options>'""" + parser = self.getParserWithHelp() + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat launcher <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + # Verify that the command was called with an application + src.check_config_has_application( config ) + + # Determine the launcher name (from option, profile section or by default "salome") + if options.name: + launcher_name = options.name + else: + launcher_name = src.get_launcher_name(config) + + # set the launcher path + launcher_path = config.APPLICATION.workdir + + # Copy a catalog if the option is called + additional_environ = {} + if options.catalog: + additional_environ = copy_catalog(config, options.catalog) + + # Generate a catalog of resources if the corresponding option was called + if options.gencat: + catalog_path = generate_catalog(options.gencat.split(","), config, logger) + additional_environ = copy_catalog(config, catalog_path) + + # Generate the launcher + launcherPath = generate_launch_file( config, + logger, + launcher_name, + launcher_path, + additional_env = additional_environ ) + + return 0
+ + +
[docs]def generate_launch_file(config, + logger, + launcher_name, + pathlauncher, + display=True, + additional_env={}): + """Generates the launcher file. + + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :param launcher_name: (str) The name of the launcher to generate + :param pathlauncher: (str) The path to the launcher to generate + :param display: (bool) If False, do not print anything in the terminal + :param additional_env: (dict) + The dict giving additional environment variables + :return: (str) The launcher file path. + """ + + # Compute the default launcher path if it is not provided in pathlauncher + # parameter + filepath = os.path.join(pathlauncher, launcher_name) + + # Remove the file if it exists in order to replace it + if os.path.exists(filepath): + os.remove(filepath) + + # Add the APPLI variable + additional_env['APPLI'] = filepath + + + # get KERNEL bin installation path + # (in order for the launcher to get python salomeContext API) + kernel_cfg = src.product.get_product_config(config, "KERNEL") + if not src.product.check_installation(kernel_cfg): + raise Exception(_("KERNEL is not installed")) + kernel_root_dir = kernel_cfg.install_dir + + # set kernel bin dir (considering fhs property) + if src.get_property_in_product_cfg(kernel_cfg, "fhs"): + bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") + else: + bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") + + # Get the launcher template + withProfile = src.fileEnviron.withProfile\ + .replace("BIN_KERNEL_INSTALL_DIR", bin_kernel_install_dir)\ + .replace("KERNEL_INSTALL_DIR", kernel_root_dir) + + before, after = withProfile.split( + "# here your local standalone environment\n") + + # create an environment file writer + writer = src.environment.FileEnvWriter(config, + logger, + pathlauncher, + src_root=None, + env_info=None) + + # Display some information + if display: + # Write the launcher file + msg = _("Generating launcher for %s :\n %s\n") % \ + (UTS.label(config.VARS.application), UTS.label(filepath)) + logger.info(msg) + + # open the file and write into it + launch_file = open(filepath, "w") + launch_file.write(before) + # Write + writer.write_cfgForPy_file(launch_file, additional_env=additional_env) + launch_file.write(after) + launch_file.close() + + # change the rights in order to make the file executable for everybody + os.chmod(filepath, + stat.S_IRUSR | + stat.S_IRGRP | + stat.S_IROTH | + stat.S_IWUSR | + stat.S_IXUSR | + stat.S_IXGRP | + stat.S_IXOTH) + return filepath
+ + +
[docs]def generate_catalog(machines, config, logger): + """Generates an xml catalog file from a list of machines. + + :param machines: (list) The list of machines to add in the catalog + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (str) The catalog file path. + """ + # remove empty machines + machines = map(lambda l: l.strip(), machines) + machines = filter(lambda l: len(l) > 0, machines) + + # log something + logger.debug(" %s = %s\n" % \ + (_("Generate Resources Catalog"), ", ".join(machines)) ) + + # The command to execute on each machine in order to get some information + cmd = '"cat /proc/cpuinfo | grep MHz ; cat /proc/meminfo | grep MemTotal"' + user = getpass.getuser() + + # Create the catalog path + catfile = src.get_tmp_filename(config, "CatalogResources.xml") + catalog = file(catfile, "w") + + # Write into it + catalog.write("<!DOCTYPE ResourcesCatalog>\n<resources>\n") + for k in machines: + logger.debug(" ssh %s " % (k + " ").ljust(20, '.')) + + # Verify that the machine is accessible + ssh_cmd = 'ssh -o "StrictHostKeyChecking no" %s %s' % (k, cmd) + p = subprocess.Popen(ssh_cmd, shell=True, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + p.wait() + + if p.returncode != 0: # The machine is not accessible + logger.error("<KO>: The machine %s is not accessible:\n%s\n" % k + + UTS.red(p.stderr.read())) + else: + # The machine is accessible, write the corresponding section on + # the xml file + logger.debug("<OK>: The machine %s is accessible:\n" % k) + lines = p.stdout.readlines() + freq = lines[0][:-1].split(':')[-1].split('.')[0].strip() + nb_proc = len(lines) -1 + memory = lines[-1].split(':')[-1].split()[0].strip() + memory = int(memory) / 1000 + + catalog.write(" <machine\n") + catalog.write(" protocol=\"ssh\"\n") + catalog.write(" nbOfNodes=\"1\"\n") + catalog.write(" mode=\"interactif\"\n") + catalog.write(" OS=\"LINUX\"\n") + catalog.write(" CPUFreqMHz=\"%s\"\n" % freq) + catalog.write(" nbOfProcPerNode=\"%s\"\n" % nb_proc) + catalog.write(" memInMB=\"%s\"\n" % memory) + catalog.write(" userName=\"%s\"\n" % user) + catalog.write(" name=\"%s\"\n" % k) + catalog.write(" hostname=\"%s\"\n" % k) + catalog.write(" >\n") + catalog.write(" </machine>\n") + + catalog.write("</resources>\n") + catalog.close() + return catfile
+ +
[docs]def copy_catalog(config, catalog_path): + """Copy the xml catalog file into the right location + + :param config: (Config) The global configuration + :param catalog_path: (str) the catalog file path + :return: (dict) + The environment dictionary corresponding to the file path. + """ + # Verify the existence of the file + if not os.path.exists(catalog_path): + raise IOError(_("Catalog not found: %s") % catalog_path) + # Get the application directory and copy catalog inside + out_dir = config.APPLICATION.workdir + new_catalog_path = os.path.join(out_dir, "CatalogResources.xml") + # Do the copy + shutil.copy(catalog_path, new_catalog_path) + additional_environ = {'USER_CATALOG_RESOURCES_FILE' : new_catalog_path} + return additional_environ
+
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/log.html b/doc/build/html/_modules/commands/log.html new file mode 100644 index 0000000..e658aff --- /dev/null +++ b/doc/build/html/_modules/commands/log.html @@ -0,0 +1,483 @@ + + + + + + + + commands.log — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.log

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+
+import os
+import shutil
+import re
+import glob
+import datetime
+import stat
+
+import src.debug as DBG
+import src.returnCode as RCO
+import src.utilsSat as UTS
+import src.xmlManager as XMLMGR
+import src.system as SYSS
+from src.salomeTools import _BaseCommand
+
+# Compatibility python 2/3 for input function
+# input stays input for python 3 and input = raw_input for python 2
+try: 
+    input = raw_input
+except NameError: 
+    pass
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """ + The log command gives access to the logs produced by the salomeTools commands. + + examples: + >> sat log + """ + + name = "log" + +
[docs] def getParser(self): + """Define all options for command 'sat log <options>'""" + parser = self.getParserWithHelp() + parser.add_option( + 't', 'terminal', 'boolean', 'terminal', + "Optional: Show sat instances logs, no browser.") + parser.add_option( + 'l', 'last', 'boolean', 'last', + "Show the log of the last launched command.") + parser.add_option( + 'x', 'last_terminal', 'boolean', 'last_terminal', + """Optional: Show compile log of products, no browser.""") + parser.add_option( + 'f', 'full', 'boolean', 'full', + "Optional: Show the logs of ALL the launched commands.") + parser.add_option( + 'c', 'clean', 'int', 'clean', + "Optional: Erase the n most ancient log files.") + parser.add_option( + 'n', 'no_browser', 'boolean', 'no_browser', + "Optional: Do not launch the browser at the end of the command. Only update the hat file.") + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat log <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + + # get the log directory. + logDir = UTS.get_log_path(config) + + # Print a header + nb_files_log_dir = len(glob.glob(os.path.join(logDir, "*"))) + info = [("log directory", logDir), + ("number of log files", nb_files_log_dir)] + UTS.logger_info_tuples(logger, info) + + # If the clean options is invoked, + # do nothing but deleting the concerned files. + if options.clean: + nbClean = options.clean + # get the list of files to remove + lLogs = UTS.list_log_file(logDir, UTS._log_all_command_file_expression) + nbLogFiles = len(lLogs) + # Delete all if the invoked number is bigger than the number of log files + if nbClean > nbLogFiles: + nbClean = nbLogFiles + # Get the list to delete and do the removing + lLogsToDelete = sorted(lLogs)[:nbClean] + for filePath, __, __, __, __, __, __ in lLogsToDelete: + # remove the xml log file + remove_log_file(filePath, logger) + # remove also the corresponding txt file in OUT directory + txtFilePath = os.path.join(os.path.dirname(filePath), + 'OUT', + os.path.basename(filePath)[:-len('.xml')] + '.txt') + remove_log_file(txtFilePath, logger) + # remove also the corresponding pyconf (do not exist 2016-06) + # file in OUT directory + pyconfFilePath = os.path.join(os.path.dirname(filePath), + 'OUT', + os.path.basename(filePath)[:-len('.xml')] + '.pyconf') + remove_log_file(pyconfFilePath, logger) + + msg = "%i logs deleted" % nbClean + logger.info("<OK>\n%s\n" % msg) + return RCO.ReturnCode("OK", msg) + + # determine the commands to show in the hat log + notShownCommands = list(config.INTERNAL.log.not_shown_commands) + if options.full: + notShownCommands = [] + + # Find the stylesheets Directory and files + xslDir = os.path.join(config.VARS.srcDir, 'xsl') + xslCommand = os.path.join(xslDir, "command.xsl") + xslHat = os.path.join(xslDir, "hat.xsl") + xsltest = os.path.join(xslDir, "test.xsl") + imgLogo = os.path.join(xslDir, "LOGO-SAT.png") + + # copy the stylesheets in the log directory + # OP We use copy instead of copy2 to update the creation date + # So we can clean the LOGS directories easily + shutil.copy(xslCommand, logDir) + shutil.copy(xslHat, logDir) + UTS.ensure_path_exists(os.path.join(logDir, "TEST")) + shutil.copy(xsltest, os.path.join(logDir, "TEST")) + shutil.copy(imgLogo, logDir) + + # If the last option is invoked, just, show the last log file + if options.last_terminal: + src.check_config_has_application(config) + rootLogDir = os.path.join(config.APPLICATION.workdir, 'LOGS') + UTS.ensure_path_exists(rootLogDir) + log_dirs = os.listdir(rootLogDir) + if log_dirs == []: + raise Exception("log directory empty") + log_dirs= sorted(log_dirs) + res = show_last_logs(logger, config, log_dirs) + return res + + # If the last option is invoked, just, show the last log file + if options.last: + lastLogFilePath = get_last_log_file( + logDir, notShownCommands + ["config"]) + if lastLogFilePath is None: + raise Exception("last log file not found in '%s'" % logDir) + if options.terminal: + # Show the log corresponding to the selected command call + res = print_log_command_in_terminal(lastLogFilePath, logger) + else: + # open the log xml file in the user editor + res = SYSS.show_in_editor(config.USER.browser, + lastLogFilePath, logger) + return res + + # If the user asks for a terminal display + if options.terminal: + # Parse the log directory in order to find + # all the files corresponding to the commands + lLogs = UTS.list_log_file(logDir, UTS._log_macro_command_file_expression) + lLogsFiltered = [] + for filePath, __, date, __, hour, cmd, __ in lLogs: + showLog = UTS.show_command_log(filePath, cmd, config.VARS.application, notShownCommands) + # showLog, cmdAppli, __ = UTS.show_command_log(filePath, cmd, + # config.VARS.application, notShownCommands) + cmdAppli = showLog.getValue()[0] + if showLog.isOk(): + lLogsFiltered.append((filePath, date, hour, cmd, cmdAppli)) + + lLogsFiltered = sorted(lLogsFiltered) + nb_logs = len(lLogsFiltered) + index = 0 + # loop on all files and print it with date, time and command name + for __, date, hour, cmd, cmdAppli in lLogsFiltered: + num = UTS.label("%2d" % (nb_logs - index)) + logger.info("%s: %13s %s %s %s\n" % (num, cmd, date, hour, cmdAppli)) + index += 1 + + # ask the user what for what command he wants to be displayed + x = -1 + while (x < 0): + x = ask_value(nb_logs) + if x > 0: + index = len(lLogsFiltered) - int(x) + # Show the log corresponding to the selected command call + print_log_command_in_terminal(lLogsFiltered[index][0], logger) + x = 0 + + return RCO.ReturnCode("OK", "end from user") + + # Create or update the hat xml that gives access to all the commands log files + logger.info(_("Generating the hat log file (can be long) ... ")) + xmlHatFilePath = os.path.join(logDir, 'hat.xml') + UTS.update_hat_xml(logDir, + application = config.VARS.application, + notShownCommands = notShownCommands) + logger.info("<OK>\n") + + # open the hat xml in the user editor + if not options.no_browser: + logger.info(_("Opening the log file")) + res = SYSS.show_in_editor(config.USER.browser, xmlHatFilePath, logger) + return res + + return RCO.ReturnCode("OK", "option no browser")
+ +
[docs]def get_last_log_file(logDir, notShownCommands): + """ + Used in case of last option. + Get the last log command file path. + + :param logDir: (str) The directory where to search the log files + :param notShownCommands: (list) the list of commands to ignore + :return: (str) the path to the last log file + """ + last = (_, 0) + for fileName in os.listdir(logDir): + # YYYYMMDD_HHMMSS_namecmd.xml + sExpr = UTS._log_macro_command_file_expression + oExpr = re.compile(sExpr) + if oExpr.search(fileName): + # get date and hour and format it + date_hour_cmd = fileName.split('_') + datehour = date_hour_cmd[0] + date_hour_cmd[1] + cmd = date_hour_cmd[2] + if cmd in notShownCommands: + continue + if int(datehour) > last[1]: + last = (fileName, int(datehour)) + if last[1] != 0: + res = os.path.join(logDir, last[0]) + else: + res = None #no log file + return res
+ +
[docs]def remove_log_file(filePath, logger): + """if it exists, print a warning and remove the input file + + :param filePath: the path of the file to delete + :param logger: (Logger) the logger instance to use for the print + """ + if os.path.exists(filePath): + logger.debug(UTS.red("Removing %s\n" % filePath)) + os.remove(filePath)
+ + + +
[docs]def getMaxFormat(aListOfStr, offset=1): + """returns format for columns width as '%-30s"' for example""" + maxLen = max([len(i) for i in aListOfStr]) + offset + fmt = "%-" + str(maxLen) + "s" # "%-30s" for example + return fmt, maxLen
+ +
[docs]def show_last_logs(logger, config, log_dirs): + """Show last compilation logs""" + log_dir = os.path.join(config.APPLICATION.workdir, 'LOGS') + # list the logs + nb = len(log_dirs) + fmt1, maxLen = getMaxFormat(log_dirs, offset=1) + fmt2 = "%s: " + fmt1 + "\n" # "%s: %-30s\n" for example + nb_cols = 5 + # line ~ no more 100 chars + if maxLen > 20: nb_cols = 4 + if maxLen > 25: nb_cols = 3 + if maxLen > 33: nb_cols = 2 + if maxLen > 50: nb_cols = 1 + col_size = (nb / nb_cols) + 1 + for index in range(0, col_size): + msg = "" + for i in range(0, nb_cols): + k = index + i * col_size + if k < nb: + l = log_dirs[k] + str_indice = UTS.label("%2d" % (k+1)) + log_name = l + msg += fmt2 % (str_indice, log_name) + logger.info(msg + "\n") + + # loop till exit + x = -1 + while (x < 0): + x = ask_value(nb) + if x > 0: + product_log_dir = os.path.join(log_dir, log_dirs[x-1]) + show_product_last_logs(logger, config, product_log_dir)
+ +
[docs]def show_product_last_logs(logger, config, product_log_dir): + """Show last compilation logs of a product""" + # sort the files chronologically + l_time_file = [] + for file_n in os.listdir(product_log_dir): + my_stat = os.stat(os.path.join(product_log_dir, file_n)) + l_time_file.append( + (datetime.datetime.fromtimestamp(my_stat[stat.ST_MTIME]), file_n)) + + # display the available logs + for i, (__, file_name) in enumerate(sorted(l_time_file)): + str_indice = UTS.label("%2d" % (i+1)) + opt = [] + my_stat = os.stat(os.path.join(product_log_dir, file_name)) + opt.append(str(datetime.datetime.fromtimestamp(my_stat[stat.ST_MTIME]))) + + opt.append("(%8.2f)" % (my_stat[stat.ST_SIZE] / 1024.0)) + logger.info(" %-35s" % " ".join(opt)) + logger.info("%s: %-30s\n" % (str_indice, file_name)) + + # loop till exit + x = -1 + while (x < 0): + x = ask_value(len(l_time_file)) + if x > 0: + (__, file_name) = sorted(l_time_file)[x-1] + log_file_path = os.path.join(product_log_dir, file_name) + SYSS.show_in_editor(config.USER.editor, log_file_path, logger)
+ +
[docs]def ask_value(nb): + """Ask for an int n. 0<n<nb + + :param nb: (int) The maximum value of the value to be returned by the user. + :return: (int) + the value entered by the user. Return -1 if it is not as expected + """ + try: + # ask for a value + rep = input(_("Which one (enter or 0 to quit)? ")) + # Verify it is on the right range + if len(rep) == 0: + x = 0 + else: + x = int(rep) + if x > nb: + x = -1 + except: + x = -1 + + return x
+
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/make.html b/doc/build/html/_modules/commands/make.html new file mode 100644 index 0000000..3134317 --- /dev/null +++ b/doc/build/html/_modules/commands/make.html @@ -0,0 +1,351 @@ + + + + + + + + commands.make — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.make

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+import os
+import re
+
+import src.debug as DBG
+import src.returnCode as RCO
+import src.utilsSat as UTS
+from src.salomeTools import _BaseCommand
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """ + The make command executes the 'make' command in the build directory. + + examples: + >> sat make SALOME --products Python,KERNEL,GUI + """ + + name = "make" + +
[docs] def getParser(self): + """Define all options for the command 'sat make <options>'""" + parser = self.getParserWithHelp() + parser.add_option('p', 'products', 'list2', 'products', + _('Optional: products to configure. This option can be' + ' passed several time to configure several products.')) + parser.add_option('o', 'option', 'string', 'option', + _('Optional: Option to add to the make command.'), "") + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat make <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + # check that the command has been called with an application + src.check_config_has_application( config ) + + # Get the list of products to treat + products_infos = get_products_list(options, config, logger) + + # Print some informations + logger.info( + _('Executing the make command in the build directories of the application %s\n') % \ + UTS.label(config.VARS.application)) + + info = [(_("BUILD directory"), os.path.join(config.APPLICATION.workdir, 'BUILD'))] + UTS.logger_info_tuples(logger, info) + + # Call the function that will loop over all the products and execute + # the right command(s) + if options.option is None: + options.option = "" + res = make_all_products(config, products_infos, options.option, logger) + + # Print the final state + nb_products = len(products_infos) + if res == 0: + final_status = "OK" + else: + final_status = "KO" + + msg = _("\nMake: <%s> (%d/%d)\n") % (final_status, nb_products - res, nb_products) + logger.info(msg) + + return RCO.ReturnCode(final_status, msg)
+ + +
[docs]def get_products_list(options, cfg, logger): + """ + method that gives the product list with their informations from + configuration regarding the passed options. + + :param options: (Options) + The Options instance that stores the commands arguments + :param cfg: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (list) The list of tuples (product name, product_informations). + """ + # Get the products to be prepared, regarding the options + if options.products is None: + # No options, get all products sources + products = cfg.APPLICATION.products + else: + # if option --products, check that all products of the command line + # are present in the application. + products = options.products + for p in products: + if p not in cfg.APPLICATION.products: + raise Exception(_("Product %(product)s " + "not defined in application %(application)s") % + { 'product': p, 'application': cfg.VARS.application} ) + + # Construct the list of tuple containing + # the products name and their definition + products_infos = src.product.get_products_infos(products, cfg) + + products_infos = [pi for pi in products_infos if not( + src.product.product_is_native(pi[1]) or + src.product.product_is_fixed(pi[1]))] + + return products_infos
+ +
[docs]def log_step(logger, header, step): + msg = "\r%s%s" % (header, " " * 20) + msg += "\r%s%s" % (header, step) + logger.info(msg) + logger.debug("\n==== %s \n" % UTS.info(step))
+ +
[docs]def log_res_step(logger, res): + if res == 0: + logger.debug("<OK>\n") + else: + logger.debug("<KO>\n")
+ + +
[docs]def make_all_products(config, products_infos, make_option, logger): + """ + Execute the proper configuration commands + in each product build directory. + + :param config: (Config) The global configuration + :param products_info: (list) + List of (str, Config) => (product_name, product_info) + :param make_option: (str) The options to add to the command + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (int) the number of failing commands. + """ + res = 0 + for p_name_info in products_infos: + res_prod = make_product(p_name_info, make_option, config, logger) + if res_prod != 0: + res += 1 + return res
+ +
[docs]def make_product(p_name_info, make_option, config, logger): + """ + Execute the proper configuration command(s) + in the product build directory. + + :param p_name_info: (tuple) (str, Config) => (product_name, product_info) + :param make_option: (str) The options to add to the command + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (int) 1 if it fails, else 0. + """ + + p_name, p_info = p_name_info + + # Logging + header = _("Make of %s") % UTS.label(p_name) + header += " %s " % ("." * (20 - len(p_name))) + logger.info(header) + + # Do nothing if he product is not compilable + if ("properties" in p_info and \ + "compilation" in p_info.properties and \ + p_info.properties.compilation == "no"): + log_step(logger, header, "ignored") + return 0 + + # Instantiate the class that manages all the construction commands + # like cmake, make, make install, make test, environment management, etc... + builder = src.compilation.Builder(config, logger, p_info) + + # Prepare the environment + log_step(logger, header, "PREPARE ENV") + res_prepare = builder.prepare() + log_res_step(logger, res_prepare) + + # Execute buildconfigure, configure if the product is autotools + # Execute cmake if the product is cmake + len_end_line = 20 + + nb_proc, make_opt_without_j = get_nb_proc(p_info, config, make_option) + log_step(logger, header, "MAKE -j" + str(nb_proc)) + if src.architecture.is_windows(): + res = builder.wmake(nb_proc, make_opt_without_j) + else: + res = builder.make(nb_proc, make_opt_without_j) + log_res_step(logger, res) + + # Log the result + if res > 0: + logger.info("\r%s%s" % (header, " " * len_end_line)) + logger.info("\r" + header + "<KO>") + logger.debug("==== <KO> in make of %s\n" % p_name) + else: + logger.info("\r%s%s" % (header, " " * len_end_line)) + logger.info("\r" + header + "<OK>") + logger.debug("==== <OK> in make of %s\n" % p_name) + logger.info("\n") + return res
+ +
[docs]def get_nb_proc(product_info, config, make_option): + + opt_nb_proc = None + new_make_option = make_option + if "-j" in make_option: + oExpr = re.compile("-j[0-9]+") + found = oExpr.search(make_option) + opt_nb_proc = int(re.findall('\d+', found.group())[0]) + new_make_option = make_option.replace(found.group(), "") + + nbproc = -1 + if "nb_proc" in product_info: + # nb proc is specified in module definition + nbproc = product_info.nb_proc + if opt_nb_proc and opt_nb_proc < product_info.nb_proc: + # use command line value only if it is lower than module definition + nbproc = opt_nb_proc + else: + # nb proc is not specified in module definition + if opt_nb_proc: + nbproc = opt_nb_proc + else: + nbproc = config.VARS.nb_proc + + assert nbproc > 0 + return nbproc, new_make_option
+
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/makeinstall.html b/doc/build/html/_modules/commands/makeinstall.html new file mode 100644 index 0000000..3af8980 --- /dev/null +++ b/doc/build/html/_modules/commands/makeinstall.html @@ -0,0 +1,312 @@ + + + + + + + + commands.makeinstall — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.makeinstall

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+
+import src.debug as DBG
+import src.returnCode as RCO
+from src.salomeTools import _BaseCommand
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """ + The makeinstall command executes the 'make install' command in the build directory. + In case of product constructed using a script (build_source : 'script'), + then the makeinstall command do nothing. + + examples: + >> sat makeinstall SALOME --products KERNEL,GUI + """ + + name = "makeinstall" + +
[docs] def getParser(self): + """Define all options for the command 'sat makeinstall <options>'""" + parser = self.getParserWithHelp() + parser.add_option('p', 'products', 'list2', 'products', + _('Optional: products to install. This option can be' + ' passed several time to install several products.')) + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat makeinstall <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + # check that the command has been called with an application + src.check_config_has_application( config ) + + # Get the list of products to treat + products_infos = get_products_list(options, config, logger) + + # Print some informations + logger.info(_('Executing the make install command in the build directories of the application %s\n') % \ + UTS.label(config.VARS.application)) + + info = [(_("BUILD directory"), + os.path.join(config.APPLICATION.workdir, 'BUILD'))] + UTS.logger_info_tuples(logger, info) + + # Call the function that will loop over all the products and execute + # the right command(s) + res = makeinstall_all_products(config, products_infos, logger) + + # Print the final state + nb_products = len(products_infos) + if res == 0: + final_status = "OK" + else: + final_status = "KO" + + msg = _("\nMake install: <%s> (%d/%d)\n") % (final_status, nb_products - res, nb_products) + logger.info(msg) + + return RCO.ReturnCode(final_status, msg)
+ + +
[docs]def get_products_list(options, cfg, logger): + """ + method that gives the product list with their informations from + configuration regarding the passed options. + + :param options: (Options) + The Options instance that stores the commands arguments + :param cfg: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (list) The list of (product name, product_informations). + """ + # Get the products to be prepared, regarding the options + if options.products is None: + # No options, get all products sources + products = cfg.APPLICATION.products + else: + # if option --products, check that all products of the command line + # are present in the application. + products = options.products + for p in products: + if p not in cfg.APPLICATION.products: + raise Exception(_("Product %(product)s " + "not defined in application %(application)s") % + { 'product': p, 'application': cfg.VARS.application} ) + + # Construct the list of tuple containing + # the products name and their definition + products_infos = src.product.get_products_infos(products, cfg) + + products_infos = [pi for pi in products_infos if not(src.product.product_is_native(pi[1]) or src.product.product_is_fixed(pi[1]))] + + return products_infos
+ +
[docs]def log_step(logger, header, step): + logger.info("\r%s%s" % (header, " " * 20), 3) + logger.info("\r%s%s" % (header, step), 3) + logger.debug("\n==== %s \n" % UTS.info(step), 4)
+ +
[docs]def log_res_step(logger, res): + if res == 0: + logger.debug("<OK>\n") + else: + logger.debug("<KO>\n")
+ +
[docs]def makeinstall_all_products(config, products_infos, logger): + """ + Execute the proper configuration commands + in each product build directory. + + :param config: (Config) The global configuration + :param products_info: (list) + List of (str, Config) => (product_name, product_info) + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (int) the number of failing commands. + """ + res = 0 + for p_name_info in products_infos: + res_prod = makeinstall_product(p_name_info, config, logger) + if res_prod != 0: + res += 1 + return res
+ +
[docs]def makeinstall_product(p_name_info, config, logger): + """ + Execute the proper configuration command(s) + in the product build directory. + + :param p_name_info: (tuple) + (str, Config) => (product_name, product_info) + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (int) 1 if it fails, else 0. + """ + + p_name, p_info = p_name_info + + # Logging + header = _("Make install of %s") % UTS.label(p_name) + header += " %s " % ("." * (20 - len(p_name))) + logger.info(header) + + # Do nothing if he product is not compilable + if ("properties" in p_info and \ + "compilation" in p_info.properties and \ + p_info.properties.compilation == "no"): + log_step(logger, header, "ignored") + return RCO.ReturnCode("OK", "product %s is not compilable" % p_name) + + # Instantiate the class that manages all the construction commands + # like cmake, make, make install, make test, environment management, etc... + builder = src.compilation.Builder(config, logger, p_info) + + # Prepare the environment + log_step(logger, header, "PREPARE ENV") + res_prepare = builder.prepare() + log_res_step(logger, res_prepare) + + # Execute buildconfigure, configure if the product is autotools + # Execute cmake if the product is cmake + res = 0 + if not src.product.product_has_script(p_info): + log_step(logger, header, "MAKE INSTALL") + res_m = builder.install() + log_res_step(logger, res_m) + res += res_m + + # Log the result + if res > 0: + logger.info("\r%s%s" % (header, " " * 20)) + logger.info("\r" + header + "<KO>") + logger.debug("==== <KO> in make install of s\n" % p_name) + else: + logger.info("\r%s%s" % (header, " " * 20)) + logger.info("\r" + header + "<OK>") + logger.debug("==== <OK> in make install of %s\n" % p_name) + logger.info("\n") + + return res
+
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/package.html b/doc/build/html/_modules/commands/package.html new file mode 100644 index 0000000..cfdd9e0 --- /dev/null +++ b/doc/build/html/_modules/commands/package.html @@ -0,0 +1,1456 @@ + + + + + + + + commands.package — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.package

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+import os
+import stat
+import shutil
+import datetime
+import tarfile
+import codecs
+import string
+import traceback
+
+from commands.application import get_SALOME_modules
+
+import src.debug as DBG
+import src.returnCode as RCO
+import src.utilsSat as UTS
+from src.salomeTools import _BaseCommand
+import src.pyconf as PYCONF
+import src.utilsSat as UTS
+
+BINARY = "binary"
+SOURCE = "Source"
+PROJECT = "Project"
+SAT = "Sat"
+
+ARCHIVE_DIR = "ARCHIVES"
+PROJECT_DIR = "PROJECT"
+
+IGNORED_DIRS = [".git", ".svn"]
+IGNORED_EXTENSIONS = []
+
+PROJECT_TEMPLATE = """\
+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+# The path to the archive root directory
+root_path : $PWD + "/../"
+# path to the PROJECT
+project_path : $PWD + "/"
+
+# Where to search the archives of the products
+ARCHIVEPATH : $root_path + "ARCHIVES"
+# Where to search the pyconf of the applications
+APPLICATIONPATH : $project_path + "applications/"
+# Where to search the pyconf of the products
+PRODUCTPATH : $project_path + "products/"
+# Where to search the pyconf of the jobs of the project
+JOBPATH : $project_path + "jobs/"
+# Where to search the pyconf of the machines of the project
+MACHINEPATH : $project_path + "machines/"
+"""
+
+LOCAL_TEMPLATE = ("""\
+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+  LOCAL :
+  {
+    base : 'default'
+    workdir : 'default'
+    log_dir : 'default'
+    archive_dir : 'default'
+    VCS : None
+    tag : None
+  }
+
+PROJECTS :
+{
+project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
+""" + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
+}
+""")
+  
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """ + The package command creates an archive. + There are 4 kinds of archive, which can be mixed: + 1- The binary archive. It contains all the product installation directories and a launcher. + 2- The sources archive. It contains the products archives, a project corresponding to the application and salomeTools. + 3- The project archive. It contains a project (give the project file path as argument). + 4- The salomeTools archive. It contains salomeTools. + + examples: + >> sat package SALOME --binaries --sources + """ + + name = "package" + +
[docs] def getParser(self): + """Define all options for command 'sat package <options>'""" + parser = self.getParserWithHelp() + parser.add_option('b', 'binaries', 'boolean', 'binaries', + _('Optional: Produce a binary package.'), False) + parser.add_option('f', 'force_creation', 'boolean', 'force_creation', + _('Optional: Only binary package: produce the archive even if ' + 'there are some missing products.'), False) + parser.add_option('s', 'sources', 'boolean', 'sources', + _('Optional: Produce a compilable archive of the sources of the ' + 'application.'), False) + parser.add_option('', 'with_vcs', 'boolean', 'with_vcs', + _('Optional: Only source package: do not make archive of vcs products.'), + False) + parser.add_option('p', 'project', 'string', 'project', + _('Optional: Produce an archive that contains a project.'), "") + parser.add_option('t', 'salometools', 'boolean', 'sat', + _('Optional: Produce an archive that contains salomeTools.'), False) + parser.add_option('n', 'name', 'string', 'name', + _('Optional: The name or full path of the archive.'), None) + parser.add_option('', 'add_files', 'list2', 'add_files', + _('Optional: The list of additional files to add to the archive.'), []) + parser.add_option('', 'without_commercial', 'boolean', 'without_commercial', + _('Optional: do not add commercial licence.'), False) + parser.add_option('', 'without_property', 'string', 'without_property', + _('Optional: Filter the products by their properties.\n' + '\tSyntax: --without_property <property>:<value>')) + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat package <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + # Check that a type of package is called, and only one + all_option_types = (options.binaries, + options.sources, + options.project not in ["", None], + options.sat) + + # Check if no option for package type + if all_option_types.count(True) == 0: + msg = _("""\ +Needs a type for the package +Use one of the following options: + '--binaries' '--sources' '--project' or '--salometools'\n""") + logger.error(msg) + return 1 + + # The repository where to put the package if not Binary or Source + package_default_path = config.LOCAL.workdir + + # if the package contains binaries or sources: + if options.binaries or options.sources: + # Check that the command has been called with an application + src.check_config_has_application(config) + + # Display information + logger.info(_("Packaging application %s\n") % \ + UTS.label(config.VARS.application), 1) + + # Get the default directory where to put the packages + package_default_path = os.path.join(config.APPLICATION.workdir, "PACKAGE") + UTS.ensure_path_exists(package_default_path) + + # if the package contains a project: + if options.project: + # check that the project is visible by SAT + if options.project not in config.PROJECTS.project_file_paths: + local_path = os.path.join( + config.VARS.salometoolsway, "data", "local.pyconf") + msg = _("""\ +The project %s is not visible by salomeTools. +Please add it in the %s file.\n""") % (options.project, local_path) + logger.error(msg) + return 1 + + # Remove the products that are filtered by the --without_property option + if options.without_property: + [prop, value] = options.without_property.split(":") + update_config(config, prop, value) + + # get the name of the archive or build it + if options.name: + if os.path.basename(options.name) == options.name: + # only a name (not a path) + archive_name = options.name + dir_name = package_default_path + else: + archive_name = os.path.basename(options.name) + dir_name = os.path.dirname(options.name) + + # suppress extension + if archive_name[-len(".tgz"):] == ".tgz": + archive_name = archive_name[:-len(".tgz")] + if archive_name[-len(".tar.gz"):] == ".tar.gz": + archive_name = archive_name[:-len(".tar.gz")] + + else: + archive_name="" + dir_name = package_default_path + if options.binaries or options.sources: + archive_name = config.APPLICATION.name + + if options.binaries: + archive_name += "-"+config.VARS.dist + + if options.sources: + archive_name += "-SRC" + if options.with_vcs: + archive_name += "-VCS" + + if options.project: + project_name, __ = os.path.splitext( + os.path.basename(options.project)) + archive_name += ("PROJECT-" + project_name) + + if options.sat: + archive_name += ("salomeTools_" + config.INTERNAL.sat_version) + if len(archive_name)==0: # no option worked + msg = _("""\ +Cannot name the archive. +check if at least one of the following options was selected: + '--binaries' '--sources' '--project' or '--salometools'\n""") + logger.error(msg) + return 1 + + path_targz = os.path.join(dir_name, archive_name + ".tgz") + + logger.info(" Package path = %s\n" % UTS.blue(path_targz)) + + # Create a working directory for all files that are produced during the + # package creation and that will be removed at the end of the command + tmp_working_dir = os.path.join(config.VARS.tmp_root, config.VARS.datehour) + UTS.ensure_path_exists(tmp_working_dir) + logger.debug(_("The temporary working directory: %s\n") % tmp_working_dir) + + msg = _("Preparation of files to add to the archive") + logger.info(UTS.label(msg)) + + d_files_to_add={} # content of the archive + + # a dict to hold paths that will need to be substitute for users recompilations + d_paths_to_substitute={} + + if options.binaries: + d_bin_files_to_add = binary_package(config, logger, options, tmp_working_dir) + # for all binaries dir, store the substitution that will be required + # for extra compilations + for key in d_bin_files_to_add: + if key.endswith("(bin)"): + source_dir = d_bin_files_to_add[key][0] + path_in_archive = d_bin_files_to_add[key][1].replace("BINARIES-" + config.VARS.dist,"INSTALL") + if os.path.basename(source_dir)==os.path.basename(path_in_archive): + # if basename is the same we will just substitute the dirname + d_paths_to_substitute[os.path.dirname(source_dir)]=\ + os.path.dirname(path_in_archive) + else: + d_paths_to_substitute[source_dir]=path_in_archive + + d_files_to_add.update(d_bin_files_to_add) + + if options.sources: + d_files_to_add.update(source_package(runner, + config, + logger, + options, + tmp_working_dir)) + if options.binaries: + # for archives with bin and sources we provide a shell script able to + # install binaries for compilation + file_install_bin=produce_install_bin_file(config,logger, + tmp_working_dir, + d_paths_to_substitute, + "install_bin.sh") + d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")}) + logger.debug("substitutions to be done later:\n%s\n" % str(d_paths_to_substitute)) + + else: + # --salomeTool option is not considered when --sources is selected, as this option + # already brings salomeTool! + if options.sat: + d_files_to_add.update({"salomeTools" : (config.VARS.salometoolsway, "")}) + + + if options.project: + d_files_to_add.update(project_package(options.project, tmp_working_dir)) + + if not(d_files_to_add): + msg = _("Empty dictionary to build the archive.\n") + logger.error(msg) + return 1 + + # Add the README file in the package + local_readme_tmp_path = add_readme(config, options, tmp_working_dir) + d_files_to_add["README"] = (local_readme_tmp_path, "README") + + # Add the additional files of option add_files + if options.add_files: + for file_path in options.add_files: + if not os.path.exists(file_path): + msg = _("The file %s is not accessible.\n") % file_path + continue + file_name = os.path.basename(file_path) + d_files_to_add[file_name] = (file_path, file_name) + + msg = UTS.label(_("Actually do the package")) + logger.info("\n%s\n" % msg) + + try: + # Creating the object tarfile + tar = tarfile.open(path_targz, mode='w:gz') + + # get the filtering function if needed + filter_function = exclude_VCS_and_extensions + + # Add the files to the tarfile object + res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function) + tar.close() + + except KeyboardInterrupt: + logger.critical(UTS.red(_("KeyboardInterrupt forced interruption\n"))) + logger.info(_("Removing the temporary working directory ... ")) + # remove the working directory + shutil.rmtree(tmp_working_dir) + logger.info("<OK>") + return 1 + + # remove the working directory + shutil.rmtree(tmp_working_dir) + + # Print again the path of the package + logger.info(" Package path = %s\n" % UTS.blue(path_targz)) + + return res
+ + +
[docs]def add_files(tar, name_archive, d_content, logger, f_exclude=None): + """ + Create an archive containing all directories and files that are given + in the d_content argument. + + :param tar: (tarfile) The tarfile instance used to make the archive. + :param name_archive: (str) The name of the archive to make. + :param d_content: (dict) + The dictionary that contain all directories and files to add in the archive. + d_content[label] = (path_on_local_machine, path_in_archive) + :param logger: (Logger) the logging instance + :param f_exclude: (function) the function that filters + :return: (int) 0 if success, 1 if not. + """ + # get the max length of the messages in order to make the display + max_len = len(max(d_content.keys(), key=len)) + + success = 0 + # loop over each directory or file stored in the d_content dictionary + for name in d_content.keys(): + # display information + len_points = max_len - len(name) + logger.info(name + " " + len_points * "." + " ") + # Get the local path and the path in archive + # of the directory or file to add + local_path, archive_path = d_content[name] + in_archive = os.path.join(name_archive, archive_path) + # Add it in the archive + try: + tar.add(local_path, arcname=in_archive, exclude=f_exclude) + logger.info("<OK>\n") + except Exception as e: + logger.info("<KO> %s\n" % str(e)) + success = 1 + return success
+ +
[docs]def exclude_VCS_and_extensions(filename): + """ + The function that is used to exclude from package the link to the + VCS repositories (like .git) + + :param filename: (str) The filname to exclude (or not). + :return: (bool) True if the file has to be exclude + """ + for dir_name in IGNORED_DIRS: + if dir_name in filename: + return True + for extension in IGNORED_EXTENSIONS: + if filename.endswith(extension): + return True + return False
+ +
[docs]def produce_relative_launcher(config, + logger, + file_dir, + file_name, + binaries_dir_name, + with_commercial=True): + """ + Create a specific SALOME launcher for the binary package. + This launcher uses relative paths. + + :param config: (Config) The global configuration. + :param logger: (Logger) the logging instance + :param file_dir: (str) the directory where to put the launcher + :param file_name: (str) The launcher name + :param binaries_dir_name: (str) + the name of the repository where the binaries are, in the archive. + :return: (str) the path of the produced launcher + """ + + # get KERNEL installation path + kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL") + + # set kernel bin dir (considering fhs property) + kernel_cfg = src.product.get_product_config(config, "KERNEL") + if src.get_property_in_product_cfg(kernel_cfg, "fhs"): + bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") + else: + bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") + + # Get the launcher template and do substitutions + withProfile = src.fileEnviron.withProfile + + withProfile = withProfile.replace( + "ABSOLUTE_APPLI_PATH'] = 'KERNEL_INSTALL_DIR'", + "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + kernel_root_dir + "'") + withProfile = withProfile.replace( + " 'BIN_KERNEL_INSTALL_DIR'", + " out_dir_Path + '" + config.VARS.sep + bin_kernel_install_dir + "'") + + before, after = withProfile.split( + "# here your local standalone environment\n") + + # create an environment file writer + writer = src.environment.FileEnvWriter(config, + logger, + file_dir, + src_root=None) + + filepath = os.path.join(file_dir, file_name) + # open the file and write into it + launch_file = open(filepath, "w") + launch_file.write(before) + # Write + writer.write_cfgForPy_file(launch_file, + for_package = binaries_dir_name, + with_commercial=with_commercial) + launch_file.write(after) + launch_file.close() + + # Little hack to put out_dir_Path outside the strings + src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' ) + + # A hack to put a call to a file for distene licence. + # It does nothing to an application that has no distene product + hack_for_distene_licence(filepath) + + # change the rights in order to make the file executable for everybody + os.chmod(filepath, + stat.S_IRUSR | + stat.S_IRGRP | + stat.S_IROTH | + stat.S_IWUSR | + stat.S_IXUSR | + stat.S_IXGRP | + stat.S_IXOTH) + + return filepath
+ +
[docs]def hack_for_distene_licence(filepath): + """Replace the distene licence env variable by a call to a file. + + :param filepath: (str) The path to the launcher to modify. + """ + shutil.move(filepath, filepath + "_old") + fileout= filepath + filein = filepath + "_old" + fin = open(filein, "r") + fout = open(fileout, "w") + text = fin.readlines() + # Find the Distene section + num_line = -1 + for i,line in enumerate(text): + if "# Set DISTENE License" in line: + num_line = i + break + if num_line == -1: + # No distene product, there is nothing to do + fin.close() + for line in text: + fout.write(line) + fout.close() + return + del text[num_line +1] + del text[num_line +1] + text_to_insert ="""\ +import imp +try: + distene = imp.load_source('distene_licence', '/data/tmpsalome/salome/prerequis/install/LICENSE/dlim8.var.py') + distene.set_distene_variables(context) +except: + pass +""" + text.insert(num_line + 1, text_to_insert) + for line in text: + fout.write(line) + fin.close() + fout.close() + return
+ +
[docs]def produce_relative_env_files(config, + logger, + file_dir, + binaries_dir_name): + """ + Create some specific environment files for the binary package. + These files use relative paths. + + :param config: (Config) The global configuration. + :param logger: (Logger) the logging instance + :param file_dir: (str) the directory where to put the files + :param binaries_dir_name: (str) + The name of the repository where the binaries are, in the archive. + :return: (list) The list of path of the produced environment files + """ + # create an environment file writer + writer = src.environment.FileEnvWriter(config, + logger, + file_dir, + src_root=None) + + # Write + filepath = writer.write_env_file("env_launch.sh", + False, # for launch + "bash", + for_package = binaries_dir_name) + + # Little hack to put out_dir_Path as environment variable + src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' ) + + # change the rights in order to make the file executable for everybody + os.chmod(filepath, + stat.S_IRUSR | + stat.S_IRGRP | + stat.S_IROTH | + stat.S_IWUSR | + stat.S_IXUSR | + stat.S_IXGRP | + stat.S_IXOTH) + + return filepath
+ +
[docs]def produce_install_bin_file(config, + logger, + file_dir, + d_sub, + file_name): + """ + Create a bash shell script which do substitutions in BIRARIES dir + in order to use it for extra compilations. + + :param config: (Config) The global configuration. + :param logger: (Logger) the logging instance + :param file_dir: (str) the directory where to put the files + :param d_sub: (dict) + the dictionnary that contains the substitutions to be done + :param file_name: (str) the name of the install script file + :return: (str) the produced file + """ + # Write + filepath = os.path.join(file_dir, file_name) + # open the file and write into it + # use codec utf-8 as sat variables are in unicode + with codecs.open(filepath, "w", 'utf-8') as installbin_file: + installbin_template_path = os.path.join(config.VARS.internal_dir, + "INSTALL_BIN.template") + + # build the name of the directory that will contain the binaries + binaries_dir_name = "BINARIES-" + config.VARS.dist + # build the substitution loop + loop_cmd = "for f in $(grep -RIl" + for key in d_sub: + loop_cmd += " -e "+ key + loop_cmd += ' INSTALL); do\n sed -i "\n' + for key in d_sub: + loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n" + loop_cmd += ' " $f\ndone' + + d={} + d["BINARIES_DIR"] = binaries_dir_name + d["SUBSTITUTION_LOOP"]=loop_cmd + + # substitute the template and write it in file + content=src.template.substitute(installbin_template_path, d) + installbin_file.write(content) + # change the rights in order to make the file executable for everybody + os.chmod(filepath, + stat.S_IRUSR | + stat.S_IRGRP | + stat.S_IROTH | + stat.S_IWUSR | + stat.S_IXUSR | + stat.S_IXGRP | + stat.S_IXOTH) + + return filepath
+ +
[docs]def product_appli_creation_script(config, + logger, + file_dir, + binaries_dir_name): + """ + Create a script that can produce an application (EDF style) + in the binary package. + + :param config: (Config) The global configuration. + :param logger: (Logger) the logging instance + :param file_dir: (str) the directory where to put the file + :param binaries_dir_name: (str) + The name of the repository where the binaries are, in the archive. + :return: (str) The path of the produced script file + """ + template_name = "create_appli.py.for_bin_packages.template" + template_path = os.path.join(config.VARS.internal_dir, template_name) + text_to_fill = open(template_path, "r").read() + text_to_fill = text_to_fill.replace("TO BE FILLED 1", + '"' + binaries_dir_name + '"') + + text_to_add = "" + for product_name in get_SALOME_modules(config): + product_info = src.product.get_product_config(config, product_name) + + if src.product.product_is_smesh_plugin(product_info): + continue + + if 'install_dir' in product_info and bool(product_info.install_dir): + if src.product.product_is_cpp(product_info): + # cpp module + for cpp_name in src.product.get_product_components(product_info): + line_to_add = ("<module name=\"" + + cpp_name + + "\" gui=\"yes\" path=\"''' + " + "os.path.join(dir_bin_name, \"" + + cpp_name + "\") + '''\"/>") + else: + # regular module + line_to_add = ("<module name=\"" + + product_name + + "\" gui=\"yes\" path=\"''' + " + "os.path.join(dir_bin_name, \"" + + product_name + "\") + '''\"/>") + text_to_add += line_to_add + "\n" + + filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add) + + tmp_file_path = os.path.join(file_dir, "create_appli.py") + ff = open(tmp_file_path, "w") + ff.write(filled_text) + ff.close() + + # change the rights in order to make the file executable for everybody + os.chmod(tmp_file_path, + stat.S_IRUSR | + stat.S_IRGRP | + stat.S_IROTH | + stat.S_IWUSR | + stat.S_IXUSR | + stat.S_IXGRP | + stat.S_IXOTH) + + return tmp_file_path
+ +
[docs]def binary_package(config, logger, options, tmp_working_dir): + """ + Prepare a dictionary that stores all the needed directories and files + to add in a binary package. + + :param config: (Config) The global configuration. + :param logger: (Logger) the logging instance + :param options: (OptResult) the options of the launched command + :param tmp_working_dir: (str) + The temporary local directory containing some specific directories + or files needed in the binary package + :return: (dict) + The dictionary that stores all the needed directories and files + to add in a binary package. + {label : (path_on_local_machine, path_in_archive)} + """ + + # Get the list of product installation to add to the archive + l_products_name = config.APPLICATION.products.keys() + l_product_info = src.product.get_products_infos(l_products_name, + config) + l_install_dir = [] + l_source_dir = [] + l_not_installed = [] + l_sources_not_present = [] + for prod_name, prod_info in l_product_info: + + # Add the sources of the products that have the property + # sources_in_package : "yes" + if src.get_property_in_product_cfg(prod_info, + "sources_in_package") == "yes": + if os.path.exists(prod_info.source_dir): + l_source_dir.append((prod_name, prod_info.source_dir)) + else: + l_sources_not_present.append(prod_name) + + # ignore the native and fixed products for install directories + if (src.product.product_is_native(prod_info) + or src.product.product_is_fixed(prod_info) + or not src.product.product_compiles(prod_info)): + continue + if src.product.check_installation(prod_info): + l_install_dir.append((prod_name, prod_info.install_dir)) + else: + l_not_installed.append(prod_name) + + # Add also the cpp generated modules (if any) + if src.product.product_is_cpp(prod_info): + # cpp module + for name_cpp in src.product.get_product_components(prod_info): + install_dir = os.path.join(config.APPLICATION.workdir, + "INSTALL", name_cpp) + if os.path.exists(install_dir): + l_install_dir.append((name_cpp, install_dir)) + else: + l_not_installed.append(name_cpp) + + # Print warning or error if there are some missing products + if len(l_not_installed) > 0: + text_missing_prods = "" + for p_name in l_not_installed: + text_missing_prods += "-" + p_name + "\n" + + msg = _("There are missing products installations:\n") + logger.warning(msg + text_missing_prods) + if not options.force_creation: + return None + + # Do the same for sources + if len(l_sources_not_present) > 0: + text_missing_prods = "" + for p_name in l_sources_not_present: + text_missing_prods += "-" + p_name + "\n" + + msg = _("There are missing products sources:\n") + logger.warning(msg + text_missing_prods) + if not options.force_creation: + return None + + # construct the name of the directory that will contain the binaries + binaries_dir_name = "BINARIES-" + config.VARS.dist + + # construct the correlation table between the product names, there + # actual install directories and there install directory in archive + d_products = {} + for prod_name, install_dir in l_install_dir: + path_in_archive = os.path.join(binaries_dir_name, prod_name) + d_products[prod_name + " (bin)"] = (install_dir, path_in_archive) + + for prod_name, source_dir in l_source_dir: + path_in_archive = os.path.join("SOURCES", prod_name) + d_products[prod_name + " (sources)"] = (source_dir, path_in_archive) + + # for packages of SALOME applications including KERNEL, + # we produce a salome launcher or a virtual application (depending on salome version) + if 'KERNEL' in config.APPLICATION.products: + VersionSalome = src.get_salome_version(config) + # Case where SALOME has the launcher that uses the SalomeContext API + if VersionSalome >= 730: + # create the relative launcher and add it to the files to add + launcher_name = src.get_launcher_name(config) + launcher_package = produce_relative_launcher(config, + logger, + tmp_working_dir, + launcher_name, + binaries_dir_name, + not(options.without_commercial)) + + d_products["launcher"] = (launcher_package, launcher_name) + if options.sources: + # if we mix binaries and sources, we add a copy of the launcher, + # prefixed with "bin",in order to avoid clashes + d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name) + else: + # Provide a script for the creation of an application EDF style + appli_script = product_appli_creation_script(config, + logger, + tmp_working_dir, + binaries_dir_name) + + d_products["appli script"] = (appli_script, "create_appli.py") + + # Put also the environment file + env_file = produce_relative_env_files(config, + logger, + tmp_working_dir, + binaries_dir_name) + + d_products["environment file"] = (env_file, "env_launch.sh") + + return d_products
+ +
[docs]def source_package(sat, config, logger, options, tmp_working_dir): + """ + Prepare a dictionary that stores all the needed directories and files + to add in a source package. + + :param config: (Config) The global configuration. + :param logger: (Logger) the logging instance + :param options: (OptResult) the options of the launched command + :param tmp_working_dir: (str) + The temporary local directory containing some specific directories + or files needed in the binary package + :return: (dict) + the dictionary that stores all the needed directories and files + to add in a source package. + {label : (path_on_local_machine, path_in_archive)} + """ + + # Get all the products that are prepared using an archive + logger.info("Find archive products ... ") + d_archives, l_pinfo_vcs = get_archives(config, logger) + logger.info("Done\n") + d_archives_vcs = {} + if not options.with_vcs and len(l_pinfo_vcs) > 0: + # Make archives with the products that are not prepared using an archive + # (git, cvs, svn, etc) + logger.info("Construct archives for vcs products ... ") + d_archives_vcs = get_archives_vcs(l_pinfo_vcs, + sat, + config, + logger, + tmp_working_dir) + logger.info("Done\n") + + # Create a project + logger.info("Create the project ... ") + d_project = create_project_for_src_package(config, tmp_working_dir, options.with_vcs) + logger.info("Done\n") + + # Add salomeTools + tmp_sat = add_salomeTools(config, tmp_working_dir) + d_sat = {"salomeTools" : (tmp_sat, "salomeTools")} + + # Add a sat symbolic link if not win + if not src.architecture.is_windows(): + tmp_satlink_path = os.path.join(tmp_working_dir, 'sat') + try: + t = os.getcwd() + except: + # In the jobs, os.getcwd() can fail + t = config.LOCAL.workdir + os.chdir(tmp_working_dir) + if os.path.lexists(tmp_satlink_path): + os.remove(tmp_satlink_path) + os.symlink(os.path.join('salomeTools', 'sat'), 'sat') + os.chdir(t) + + d_sat["sat link"] = (tmp_satlink_path, "sat") + + d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat) + return d_source
+ +
[docs]def get_archives(config, logger): + """ + Find all the products from an archive and all the products + from a VCS (git, cvs, svn) repository. + + :param config: (Config) The global configuration. + :param logger: (Logger) The logging instance + :return: (Dict, List) + The dictionary + {name_product : (local path of its archive, path in the package of its archive )} + and the list of specific configuration corresponding to the vcs products + """ + # Get the list of product informations + l_products_name = config.APPLICATION.products.keys() + l_product_info = src.product.get_products_infos(l_products_name, + config) + d_archives = {} + l_pinfo_vcs = [] + for p_name, p_info in l_product_info: + # ignore the native and fixed products + if (src.product.product_is_native(p_info) + or src.product.product_is_fixed(p_info)): + continue + if p_info.get_source == "archive": + archive_path = p_info.archive_info.archive_name + archive_name = os.path.basename(archive_path) + else: + l_pinfo_vcs.append((p_name, p_info)) + + d_archives[p_name] = (archive_path, + os.path.join(ARCHIVE_DIR, archive_name)) + return d_archives, l_pinfo_vcs
+ +
[docs]def add_salomeTools(config, tmp_working_dir): + """ + Prepare a version of salomeTools that has a specific local.pyconf file + configured for a source package. + + :param config: (Config) The global configuration. + :param tmp_working_dir: (str) + The temporary local directory containing some specific directories + or files needed in the source package + :return: (str) + The path to the local salomeTools directory to add in the package + """ + # Copy sat in the temporary working directory + sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools")) + sat_running_path = src.Path(config.VARS.salometoolsway) + sat_running_path.copy(sat_tmp_path) + + # Update the local.pyconf file that contains the path to the project + local_pyconf_name = "local.pyconf" + local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data") + local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name) + # Remove the .pyconf file in the root directory of salomeTools if there is + # any. (For example when launching jobs, a pyconf file describing the jobs + # can be here and is not useful) + files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools")) + for file_or_dir in files_or_dir_SAT: + if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"): + file_path = os.path.join(tmp_working_dir, + "salomeTools", + file_or_dir) + os.remove(file_path) + + ff = open(local_pyconf_file, "w") + ff.write(LOCAL_TEMPLATE) + ff.close() + + return sat_tmp_path.path
+ +
[docs]def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir): + """ + For sources package that require that all products from an archive, + one has to create some archive for the vcs products. + So this method calls the clean and source command of sat + and then create the archives. + + :param l_pinfo_vcs: (list) + The list of specific configuration corresponding to each vcs product + :param sat: (Sat) + The Sat instance that can be called to clean and source the products + :param config: (Config) The global configuration. + :param logger: (Logger) The logging instance + :param tmp_working_dir: (str) + The temporary local directory containing some specific directories + or files needed in the source package + :return: (dict) + The dictionary that stores all the archives to add in the sourcepackage. + {label : (path_on_local_machine, path_in_archive)} + """ + # clean the source directory of all the vcs products, then use the source + # command and thus construct an archive that will not contain the patches + l_prod_names = [pn for pn, __ in l_pinfo_vcs] + # clean + logger.info(_("clean sources\n")) + args_clean = config.VARS.application + args_clean += " --sources --products " + args_clean += ",".join(l_prod_names) + sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger) + # source + logger.info(_("get sources")) + args_source = config.VARS.application + args_source += " --products " + args_source += ",".join(l_prod_names) + sat.source(args_source, batch=True, verbose=0, logger_add_link = logger) + + # make the new archives + d_archives_vcs = {} + for pn, pinfo in l_pinfo_vcs: + path_archive = make_archive(pn, pinfo, tmp_working_dir) + d_archives_vcs[pn] = (path_archive, + os.path.join(ARCHIVE_DIR, pn + ".tgz")) + return d_archives_vcs
+ +
[docs]def make_archive(prod_name, prod_info, where): + """Create an archive of a product by searching its source directory. + + :param prod_name: (str) The name of the product. + :param prod_info: (Config) + The specific configuration corresponding to the product + :param where: (str) + The path of the repository where to put the resulting archive + :return: (str) The path of the resulting archive + """ + path_targz_prod = os.path.join(where, prod_name + ".tgz") + tar_prod = tarfile.open(path_targz_prod, mode='w:gz') + local_path = prod_info.source_dir + tar_prod.add(local_path, + arcname=prod_name, + exclude=exclude_VCS_and_extensions) + tar_prod.close() + return path_targz_prod
+ +
[docs]def create_project_for_src_package(config, tmp_working_dir, with_vcs): + """Create a specific project for a source package. + + :param config: (Config) The global configuration. + :param tmp_working_dir: (str) + The temporary local directory containing some specific directories + or files needed in the source package + :param with_vcs: (bool) + True if the package is with vcs products + (not transformed into archive products) + :return: (dict) + The dictionary + {"project" : (produced project, project path in the archive)} + """ + + # Create in the working temporary directory the full project tree + project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR) + products_pyconf_tmp_dir = os.path.join(project_tmp_dir, + "products") + compil_scripts_tmp_dir = os.path.join(project_tmp_dir, + "products", + "compil_scripts") + env_scripts_tmp_dir = os.path.join(project_tmp_dir, + "products", + "env_scripts") + patches_tmp_dir = os.path.join(project_tmp_dir, + "products", + "patches") + application_tmp_dir = os.path.join(project_tmp_dir, + "applications") + for directory in [project_tmp_dir, + compil_scripts_tmp_dir, + env_scripts_tmp_dir, + patches_tmp_dir, + application_tmp_dir]: + UTS.ensure_path_exists(directory) + + # Create the pyconf that contains the information of the project + project_pyconf_name = "project.pyconf" + project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name) + ff = open(project_pyconf_file, "w") + ff.write(PROJECT_TEMPLATE) + ff.close() + + # Loop over the products to get there pyconf and all the scripts + # (compilation, environment, patches) + # and create the pyconf file to add to the project + lproducts_name = config.APPLICATION.products.keys() + l_products = src.product.get_products_infos(lproducts_name, config) + for p_name, p_info in l_products: + find_product_scripts_and_pyconf(p_name, + p_info, + config, + with_vcs, + compil_scripts_tmp_dir, + env_scripts_tmp_dir, + patches_tmp_dir, + products_pyconf_tmp_dir) + + find_application_pyconf(config, application_tmp_dir) + + d_project = {"project" : (project_tmp_dir, PROJECT_DIR )} + return d_project
+ +
[docs]def find_product_scripts_and_pyconf(p_name, + p_info, + config, + with_vcs, + compil_scripts_tmp_dir, + env_scripts_tmp_dir, + patches_tmp_dir, + products_pyconf_tmp_dir): + """ + Create a specific pyconf file for a given product. + Get its environment script, its compilation script + and patches and put it in the temporary working directory. + This method is used in the source package in order to + construct the specific project. + + :param p_name: (str) The name of the product. + :param p_info: (Config) The specific configuration corresponding to the product + :param config: (Config) The global configuration. + :param with_vcs: (bool) + True if the package is with vcs products + (not transformed into archive products) + :param compil_scripts_tmp_dir: (str) + The path to the temporary compilation scripts directory of the project. + :param env_scripts_tmp_dir: (str) + The path to the temporary environment script directory of the project. + :param patches_tmp_dir: (str) + The path to the temporary patch scripts directory of the project. + :param products_pyconf_tmp_dir: (str) + The path to the temporary product scripts directory of the project. + """ + + # read the pyconf of the product + product_pyconf_path = UTS.find_file_in_lpath(p_name + ".pyconf", + config.PATHS.PRODUCTPATH) + product_pyconf_cfg = PYCONF.Config(product_pyconf_path) + + # find the compilation script if any + if src.product.product_has_script(p_info): + compil_script_path = src.Path(p_info.compil_script) + compil_script_path.copy(compil_scripts_tmp_dir) + product_pyconf_cfg[p_info.section].compil_script = os.path.basename( + p_info.compil_script) + # find the environment script if any + if src.product.product_has_env_script(p_info): + env_script_path = src.Path(p_info.environ.env_script) + env_script_path.copy(env_scripts_tmp_dir) + product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename( + p_info.environ.env_script) + # find the patches if any + if src.product.product_has_patches(p_info): + patches = PYCONF.Sequence() + for patch_path in p_info.patches: + p_path = src.Path(patch_path) + p_path.copy(patches_tmp_dir) + patches.append(os.path.basename(patch_path), "") + + product_pyconf_cfg[p_info.section].patches = patches + + if with_vcs: + # put in the pyconf file the resolved values + for info in ["git_info", "cvs_info", "svn_info"]: + if info in p_info: + for key in p_info[info]: + product_pyconf_cfg[p_info.section][info][key] = p_info[ + info][key] + else: + # if the product is not archive, then make it become archive. + if src.product.product_is_vcs(p_info): + product_pyconf_cfg[p_info.section].get_source = "archive" + if not "archive_info" in product_pyconf_cfg[p_info.section]: + product_pyconf_cfg[p_info.section].addMapping("archive_info", + PYCONF.Mapping(product_pyconf_cfg), + "") + product_pyconf_cfg[p_info.section + ].archive_info.archive_name = p_info.name + ".tgz" + + # write the pyconf file to the temporary project location + product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir, + p_name + ".pyconf") + ff = open(product_tmp_pyconf_path, 'w') + ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n") + product_pyconf_cfg.__save__(ff, 1) + ff.close()
+ +
[docs]def find_application_pyconf(config, application_tmp_dir): + """ + Find the application pyconf file and put it in the specific temporary + directory containing the specific project of a source package. + + :param config: 'Config) The global configuration. + :param application_tmp_dir: (str) + The path to the temporary application scripts directory of the project. + """ + # read the pyconf of the application + application_name = config.VARS.application + application_pyconf_path = UTS.find_file_in_lpath( + application_name + ".pyconf", + config.PATHS.APPLICATIONPATH) + application_pyconf_cfg = PYCONF.Config(application_pyconf_path) + + # Change the workdir + application_pyconf_cfg.APPLICATION.workdir = PYCONF.Reference( + application_pyconf_cfg, + PYCONF.DOLLAR, + 'VARS.salometoolsway + $VARS.sep + ".."') + + # Prevent from compilation in base + application_pyconf_cfg.APPLICATION.no_base = "yes" + + # write the pyconf file to the temporary application location + application_tmp_pyconf_path = os.path.join(application_tmp_dir, + application_name + ".pyconf") + ff = open(application_tmp_pyconf_path, 'w') + ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n") + application_pyconf_cfg.__save__(ff, 1) + ff.close()
+ +
[docs]def project_package(project_file_path, tmp_working_dir): + """ + Prepare a dictionary that stores all the needed directories and files + to add in a project package. + + :param project_file_path: (str) The path to the local project. + :param tmp_working_dir: (str) + The temporary local directory containing some specific directories + or files needed in the project package + :return: (dict) + The dictionary that stores all the needed directories and files + to add in a project package. + {label : (path_on_local_machine, path_in_archive)} + """ + d_project = {} + # Read the project file and get the directories to add to the package + project_pyconf_cfg = PYCONF.Config(project_file_path) + paths = {"ARCHIVEPATH" : "archives", + "APPLICATIONPATH" : "applications", + "PRODUCTPATH" : "products", + "JOBPATH" : "jobs", + "MACHINEPATH" : "machines"} + # Loop over the project paths and add it + for path in paths: + if path not in project_pyconf_cfg: + continue + # Add the directory to the files to add in the package + d_project[path] = (project_pyconf_cfg[path], paths[path]) + # Modify the value of the path in the package + project_pyconf_cfg[path] = PYCONF.Reference( + project_pyconf_cfg, + PYCONF.DOLLAR, + 'project_path + "/' + paths[path] + '"') + + # Modify some values + if "project_path" not in project_pyconf_cfg: + project_pyconf_cfg.addMapping("project_path", + PYCONF.Mapping(project_pyconf_cfg), + "") + project_pyconf_cfg.project_path = PYCONF.Reference(project_pyconf_cfg, + PYCONF.DOLLAR, + 'PWD') + + # Write the project pyconf file + project_file_name = os.path.basename(project_file_path) + project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name) + ff = open(project_pyconf_tmp_path, 'w') + ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n") + project_pyconf_cfg.__save__(ff, 1) + ff.close() + d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_name) + + return d_project
+ +
[docs]def add_readme(config, options, where): + readme_path = os.path.join(where, "README") + with codecs.open(readme_path, "w", 'utf-8') as f: + + # templates for building the header + readme_header=""" +# This package was generated with sat $version +# Date: $date +# User: $user +# Distribution : $dist + +In the following, $$ROOT represents the directory where you have installed +SALOME (the directory where this file is located). + +""" + readme_compilation_with_binaries=""" + +compilation based on the binaries used as prerequisites +======================================================= + +If you fail to compile the complete application (for example because +you are not root on your system and cannot install missing packages), you +may try a partial compilation based on the binaries. +For that it is necessary to copy the binaries from BINARIES to INSTALL, +and do some substitutions on cmake and .la files (replace the build directories +with local paths). +The procedure to do it is: + 1) Remove or rename INSTALL directory if it exists + 2) Execute the shell script install_bin.sh: + >> cd $ROOT + >> ./install_bin.sh + 3) Use SalomeTool (as explained in Sources section) and compile only the + modules you need to (with -p option) + +""" + readme_header_tpl=string.Template(readme_header) + readme_template_path_bin = os.path.join(config.VARS.internal_dir, + "README_BIN.template") + readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir, + "README_LAUNCHER.template") + readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir, + "README_BIN_VIRTUAL_APP.template") + readme_template_path_src = os.path.join(config.VARS.internal_dir, + "README_SRC.template") + readme_template_path_pro = os.path.join(config.VARS.internal_dir, + "README_PROJECT.template") + readme_template_path_sat = os.path.join(config.VARS.internal_dir, + "README_SAT.template") + + # prepare substitution dictionary + d = dict() + d['user'] = config.VARS.user + d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M") + d['version'] = config.INTERNAL.sat_version + d['dist'] = config.VARS.dist + f.write(readme_header_tpl.substitute(d)) # write the general header (common) + + if options.binaries or options.sources: + d['application'] = config.VARS.application + f.write("# Application: " + d['application'] + "\n") + if 'KERNEL' in config.APPLICATION.products: + VersionSalome = src.get_salome_version(config) + # Case where SALOME has the launcher that uses the SalomeContext API + if VersionSalome >= 730: + d['launcher'] = config.APPLICATION.profile.launcher_name + else: + d['virtual_app'] = 'runAppli' # this info is not used now) + + # write the specific sections + if options.binaries: + f.write(src.template.substitute(readme_template_path_bin, d)) + if "virtual_app" in d: + f.write(src.template.substitute(readme_template_path_bin_virtapp, d)) + if "launcher" in d: + f.write(src.template.substitute(readme_template_path_bin_launcher, d)) + + if options.sources: + f.write(src.template.substitute(readme_template_path_src, d)) + + if options.binaries and options.sources: + f.write(readme_compilation_with_binaries) + + if options.project: + f.write(src.template.substitute(readme_template_path_pro, d)) + + if options.sat: + f.write(src.template.substitute(readme_template_path_sat, d)) + + return readme_path
+ +
[docs]def update_config(config, prop, value): + """ + Remove from config.APPLICATION.products the products + that have the property given as input. + + :param config: (Config) The global config. + :param prop: (str) The property to filter + :param value: (str) The value of the property to filter + """ + src.check_config_has_application(config) + l_product_to_remove = [] + for product_name in config.APPLICATION.products.keys(): + prod_cfg = src.product.get_product_config(config, product_name) + if src.get_property_in_product_cfg(prod_cfg, prop) == value: + l_product_to_remove.append(product_name) + for product_name in l_product_to_remove: + config.APPLICATION.products.__delitem__(product_name)
+
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/patch.html b/doc/build/html/_modules/commands/patch.html new file mode 100644 index 0000000..c265a07 --- /dev/null +++ b/doc/build/html/_modules/commands/patch.html @@ -0,0 +1,300 @@ + + + + + + + + commands.patch — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.patch

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+import os
+import subprocess
+
+import src.debug as DBG
+import src.returnCode as RCO
+import src.utilsSat as UTS
+from src.salomeTools import _BaseCommand
+import commands.prepare
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """ + The patch command apply the patches on the sources of the application products + if there is any. + + examples: + >> sat patch SALOME --products qt,boost + """ + + name = "patch" + +
[docs] def getParser(self): + """Define all options for command 'sat patch <options>'""" + parser = self.getParserWithHelp() + parser.add_option('p', 'products', 'list2', 'products', + _('Optional: products to get the sources. This option can be' + ' passed several time to get the sources of several products.')) + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat patch <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + # check that the command has been called with an application + src.check_config_has_application( config ) + + # Print some informations + logger.info("Patching sources of the application %s\n" % \ + UTS.blue(config.VARS.application)) + + logger.info(' workdir = %s\n\n"', UTS.blue(config.APPLICATION.workdir)) + + # Get the products list with products informations regarding the options + products_infos = commands.prepare.get_products_list(options, config, logger) + + # Get the maximum name length in order to format the terminal display + max_product_name_len = 1 + if len(products_infos) > 0: + max_product_name_len = max(map(lambda l: len(l), products_infos[0])) + 4 + + # The loop on all the products on which to apply the patches + good_result = 0 + for __, product_info in products_infos: + # Apply the patch + return_code, patch_res = apply_patch(config, + product_info, + max_product_name_len, + logger) + logger.info(patch_res) + if return_code: + good_result += 1 + + # Display the results (how much passed, how much failed, etc...) + + logger.info("\n") + if good_result == len(products_infos): + status = "OK" + else: + status = "KO" + + # write results + msg = ("\nPatching sources of the application: <%s> (%d/%d)\n") % \ + (status, good_result, len(products_infos)) + logger.info(msg) + + return RCO.ReturnCode(status, msg)
+ + +
[docs]def apply_patch(config, product_info, max_product_name_len, logger): + """The method called to apply patches on a product + + :param config: (Config) The global configuration + :param product_info: (Config) + The configuration specific to the product to be patched + :param logger: (Logger: + The logger instance to use for the display and logging + :return: (RCO.ReturnCode) + """ + + # if the product is native, do not apply patch + if src.product.product_is_native(product_info): + # display and log + logger.info('%s: ' % UTS.label(product_info.name)) + logger.info(' ' * (max_product_name_len - len(product_info.name))) + logger.info("\n") + msg = _("The %s product is native. Do not apply any patch") % product_info.name + logger.info(msg + "\n") + return RCO.ReturnCode("OK", msg) + + if not "patches" in product_info or len(product_info.patches) == 0: + # display and log + logger.info('%s: ' % UTS.label(product_info.name)) + logger.info(' ' * (max_product_name_len - len(product_info.name))) + logger.info("\n") + msg = _("No patch for the %s product") % product_info.name + logger.info(msg + "\n") + return RCO.ReturnCode("OK", msg) + else: + # display and log + logger.info('%s: ' % UTS.label(product_info.name)) + logger.info(' ' * (max_product_name_len - len(product_info.name))) + logger.info("\n") + + if not os.path.exists(product_info.source_dir): + msg = _("No sources found for the %s product") % product_info.name + logger.error(UTS.red(msg)) + return RCO.ReturnCode("KO", msg) + + # At this point, there one or more patches and the source directory exists + retcode = [] + res = [] + # Loop on all the patches of the product + for patch in product_info.patches: + details = [] + + # Check the existence and apply the patch + if os.path.isfile(patch): + patch_cmd = "patch -p1 < %s" % patch + + # Write the command in the terminal if verbose level is at 5 + logger.info(" >%s\n" % patch_cmd) + + # Write the command in the log file (can be seen using 'sat log') + logger.logTxtFile.write("\n >%s\n" % patch_cmd) + logger.logTxtFile.flush() + + # Call the command + res_cmd = subprocess.call( + patch_cmd, + shell=True, + cwd=product_info.source_dir, + stdout=logger.logTxtFile, + stderr=subprocess.STDOUT ) + + res_cmd = (res_cmd == 0) + else: + res_cmd = False + details.append(" " + UTS.red(_("Not a valid patch: %s\n")) % patch) + + res.append(res_cmd) + + if res_cmd: + message = _("Apply patch %s") % UTS.blue(patch) + else: + message = _("Failed to apply patch %s") % UTS.red(patch) + + if config.USER.output_verbose_level >= 3: + retcode.append(" %s" % message) + else: + retcode.append("%s: %s" % (product_info.name, message)) + + if len(details) > 0: + retcode.extend(details) + + if False in res: + rc = "KO" + else: + rc = "OK" + + return RCO.ReturnCode(rc, "\n".join(retcode))
+
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/prepare.html b/doc/build/html/_modules/commands/prepare.html new file mode 100644 index 0000000..0a65cd5 --- /dev/null +++ b/doc/build/html/_modules/commands/prepare.html @@ -0,0 +1,299 @@ + + + + + + + + commands.prepare — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.prepare

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+import os
+import re
+
+import src.debug as DBG
+import src.returnCode as RCO
+from src.salomeTools import _BaseCommand
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """ + The prepare command gets the sources of the application products + and apply the patches if there is any. + + examples: + >> sat prepare SALOME --products KERNEL,GUI + """ + + name = "prepare" + +
[docs] def getParser(self): + """Define all options for command 'sat prepare <options>'""" + parser = self.getParserWithHelp() + parser.add_option( + 'p', 'products', 'list2', 'products', + _('Optional: products to prepare. This option can be' + ' passed several time to prepare several products.')) + parser.add_option( + 'f', 'force', 'boolean', 'force', + _("Optional: force to prepare the products in development mode.")) + parser.add_option( + '', 'force_patch', 'boolean', 'force_patch', + _("Optional: force to apply patch to the products in development mode.")) + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat prepare <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + # check that the command has been called with an application + src.check_config_has_application( config ) + + products_infos = self.get_products_list(options, config, logger) + + # Construct the arguments to pass to the clean, source and patch commands + args_appli = config.VARS.application + ' ' + args_product_opt = '--products ' + if options.products: + for p_name in options.products: + args_product_opt += ',' + p_name + else: + for p_name, __ in products_infos: + args_product_opt += ',' + p_name + + ldev_products = [p for p in products_infos if src.product.product_is_dev(p[1])] + args_product_opt_clean = args_product_opt + if not options.force and len(ldev_products) > 0: + l_products_not_getted = find_products_already_getted(ldev_products) + if len(l_products_not_getted) > 0: + msg = _("""\ +Do not get the source of the following products in development mode. +Use the --force option to overwrite it. +""") + logger.error(UTS.red(msg)) + args_product_opt_clean = remove_products(args_product_opt_clean, + l_products_not_getted, + logger) + + + args_product_opt_patch = args_product_opt + if not options.force_patch and len(ldev_products) > 0: + l_products_with_patchs = find_products_with_patchs(ldev_products) + if len(l_products_with_patchs) > 0: + msg = _(""" +Do not patch the following products in development mode. +Use the --force_patch option to overwrite it. +""") + logger.error(UTS.red(msg)) + args_product_opt_patch = remove_products(args_product_opt_patch, + l_products_with_patchs, + logger) + + # Construct the final commands arguments + args_clean = args_appli + args_product_opt_clean + " --sources" + args_source = args_appli + args_product_opt + args_patch = args_appli + args_product_opt_patch + + # If there is no more any product in the command arguments, + # do not call the concerned command + oExpr = re.compile("^--products *$") + do_clean = not(oExpr.search(args_product_opt_clean)) + do_source = not(oExpr.search(args_product_opt)) + do_patch = not(oExpr.search(args_product_opt_patch)) + + + # Initialize the results to Ok but nothing done status + res_clean = RCO.ReturnCode("OK", "nothing done") + res_source = RCO.ReturnCode("OK", "nothing done") + res_patch = RCO.ReturnCode("OK", "nothing done") + + # return res_clean + res_source + res_patch + + # Call the commands using the API + if do_clean: + msg = _("Clean the source directories ...") + logger.info(msg) + DBG.tofix("args_clean and TODO remove returns", args_clean, True) + res_clean = runner.getCommand("clean").run(args_clean) + return res_clean + res_source + res_patch + if do_source: + msg = _("Get the sources of the products ...") + logger.debug(msg) + res_source = runner.getCommand("source").run(args_source) + if do_patch: + msg = _("Patch the product sources (if any) ...") + logger.debug(msg) + res_patch = runner.getCommand("patch").run(args_patch) + + return res_clean + res_source + res_patch
+ + +
[docs]def remove_products(arguments, l_products_info, logger): + """Removes the products in l_products_info from arguments list. + + :param arguments: (str) The arguments from which to remove products + :param l_products_info: (list) + List of (str, Config) => (product_name, product_info) + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (str) The updated arguments. + """ + args = arguments + for i, (product_name, __) in enumerate(l_products_info): + args = args.replace(',' + product_name, '') + end_text = ', ' + if i+1 == len(l_products_info): + end_text = '\n' + logger.info(product_name + end_text) + return args
+ +
[docs]def find_products_already_getted(l_products): + """Returns the list of products that have an existing source directory. + + :param l_products: (list) The list of products to check + :return: (list) + The list of product configurations + that have an existing source directory. + """ + l_res = [] + for p_name_p_cfg in l_products: + __, prod_cfg = p_name_p_cfg + if os.path.exists(prod_cfg.source_dir): + l_res.append(p_name_p_cfg) + return l_res
+ +
[docs]def find_products_with_patchs(l_products): + """Returns the list of products that have one or more patches. + + :param l_products: (list) The list of products to check + :return: (list) + The list of product configurations + that have one or more patches. + """ + l_res = [] + for p_name_p_cfg in l_products: + __, prod_cfg = p_name_p_cfg + l_patchs = src.get_cfg_param(prod_cfg, "patches", []) + if len(l_patchs)>0: + l_res.append(p_name_p_cfg) + return l_res
+
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/profile.html b/doc/build/html/_modules/commands/profile.html new file mode 100644 index 0000000..9ff5faa --- /dev/null +++ b/doc/build/html/_modules/commands/profile.html @@ -0,0 +1,363 @@ + + + + + + + + commands.profile — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.profile

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+import shutil
+import subprocess
+
+import src.debug as DBG
+import src.returnCode as RCO
+import src.pyconf as PYCONF
+from src.salomeTools import _BaseCommand
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """\ + The profile command creates default profile. + + examples: + >> sat profile [PRODUCT] + >> sat profile --prefix (string) + >> sat profile --name (string) + >> sat profile --force + >> sat profile --version (string) + >> sat profile --slogan (string) + """ + + name = "profile" + +
[docs] def getParser(self): + """Define all options for command 'sat profile <options>'""" + parser = self.getParserWithHelp() + parser.add_option( + 'p', 'prefix', 'string', 'prefix', + _("Where the profile's sources will be generated.") ) + parser.add_option( + 'n', 'name', 'string', 'name', + _("Name of the profile's sources. [Default: '${config.PRODUCT.name}_PROFILE]") ) + parser.add_option( + 'f', 'force', 'boolean', 'force', + _("Overwrites existing sources.") ) + parser.add_option( + 'u', 'no_update', 'boolean', 'no_update', + _("Does not update pyconf file.") ) + parser.add_option( + 'v', 'version', 'string', 'version', + _("Version of the application. [Default: 1.0]"), '1.0' ) + parser.add_option( + 's', 'slogan', 'string', 'slogan', + _("Slogan of the application.") ) + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat profile <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + src.check_config_has_application(config) + + if options.prefix is None: + msg = _("The --%s argument is required\n") % "prefix" + logger.error(msg) + return RCO.ReturnCode("KO", msg) + + retcode = generate_profile_sources(config, options, logger) + + if not options.no_update : + update_pyconf(config, options) + + return retcode
+ + +# Class that overrides common.Reference +# in order to manipulate fields starting with '@' +
[docs]class profileReference( PYCONF.Reference ): + def __str__(self): + s = self.elements[0] + for tt, tv in self.elements[1:]: + if tt == PYCONF.DOT: + s += '.%s' % tv + else: + s += '[%r]' % tv + if self.type == PYCONF.BACKTICK: + return PYCONF.BACKTICK + s + PYCONF.BACKTICK + elif self.type == PYCONF.AT: + return PYCONF.AT + s + else: + return PYCONF.DOLLAR + s
+ +## +# Class that overrides how fields starting with '@' are read. +
[docs]class profileConfigReader( PYCONF.ConfigReader ) : +
[docs] def parseMapping(self, parent, suffix): + if self.token[0] == PYCONF.LCURLY: + self.match(PYCONF.LCURLY) + rv = PYCONF.Mapping(parent) + rv.setPath( + PYCONF.makePath(object.__getattribute__(parent, 'path'), + suffix)) + self.parseMappingBody(rv) + self.match(PYCONF.RCURLY) + else: + self.match(PYCONF.AT) + __, fn = self.match('"') + rv = profileReference(self, PYCONF.AT, fn) + return rv
+ + + +## +# Gets the profile name +
[docs]def get_profile_name ( options, config ): + if options.name : + res = options.name + else : + res = config.APPLICATION.name + "_PROFILE" + return res
+ +
[docs]def generate_profile_sources( config, options, logger ): + """ + Generates the sources of the profile + """ + #Check script app-quickstart.py exists + kernel_cfg = src.product.get_product_config(config, "KERNEL") + kernel_root_dir = kernel_cfg.install_dir + if not src.product.check_installation(kernel_cfg): + raise Exception(_("KERNEL is not installed")) + script = os.path.join(kernel_root_dir,"bin","salome","app-quickstart.py") + if not os.path.exists( script ): + raise Exception( _("KERNEL's install has not the script app-quickstart.py") ) + + # Check that GUI is installed + gui_cfg = src.product.get_product_config(config, "GUI") + gui_root_dir = gui_cfg.install_dir + if not src.product.check_installation(gui_cfg): + raise Exception(_("GUI is not installed")) + + #Set prefix option passed to app-quickstart.py + name = get_profile_name ( options, config ) + prefix = os.path.join( options.prefix, name ) + if os.path.exists( prefix ) : + if not options.force : + raise Exception( + _("The path %s already exists, use option --force to remove it.") % prefix ) + else : + shutil.rmtree( prefix ) + + #Set name option passed to app-quickstart.py + if name.upper().endswith("_PROFILE"): + name = name[:-8] + + #Write command line that calls app-quickstart.py + command = "python %s --prefix=%s --name=%s --modules=_NO_ --version=%s" % \ + ( script, prefix, name, options.version ) + if options.force : + command += " --force" + if options.slogan : + command += " --slogan=%s" % options.slogan + logger.debug("\n>" + command + "\n") + + #Run command + os.environ["KERNEL_ROOT_DIR"] = kernel_root_dir + os.environ["GUI_ROOT_DIR"] = gui_root_dir + res = subprocess.call(command, + shell=True, + env=os.environ, + stdout=logger.logTxtFile, + stderr=subprocess.STDOUT) + #Check result of command + if res != 0: + raise Exception(_("Cannot create application, code = %d\n") % res) + else: + logger.info( _("Profile sources were generated in directory %s.\n" % prefix) ) + return res
+ + +
[docs]def update_pyconf( config, options, logger ): + """ + Updates the pyconf + """ + #Save previous version + pyconf = config.VARS.product + '.pyconf' + pyconfBackup = config.VARS.product + '-backup.pyconf' + logger.info( _("Updating %s (previous version saved as %s." ) % (pyconf, pyconfBackup)) + path = config.getPath( pyconf ) + shutil.copyfile( os.path.join( path, pyconf ), + os.path.join( path, pyconfBackup ) ) + + #Load config + cfg = PYCONF.Config( ) + object.__setattr__( cfg, 'reader', profileConfigReader( cfg ) ) + cfg.load( PYCONF.defaultStreamOpener( os.path.join( path, pyconf ) ) ) + + #Check if profile is in APPLICATION.products + profile = get_profile_name ( options, config ) + if not profile in cfg.APPLICATION.products: + cfg.APPLICATION.products.append( profile, None ) + + #Check if profile is in APPLICATION + if not 'profile' in cfg.APPLICATION: + cfg.APPLICATION.addMapping( 'profile', PYCONF.Mapping(), None ) + cfg.APPLICATION.profile.addMapping( 'module', profile, None ) + cfg.APPLICATION.profile.addMapping( 'launcher_name', + config.VARS.product.lower(), None ) + + #Check if profile info is in PRODUCTS + if not 'PRODUCTS' in cfg: + cfg.addMapping( 'PRODUCTS', PYCONF.Mapping(), None ) + + if not profile in cfg.PRODUCTS: + cfg.PRODUCTS.addMapping( profile, PYCONF.Mapping(), None ) + cfg.PRODUCTS[profile].addMapping( 'default', PYCONF.Mapping(), + None ) + prf = cfg.TOOLS.common.module_info[profile].default + prf.addMapping( 'name', profile, None ) + prf.addMapping( 'get_source', 'archive', None ) + prf.addMapping( 'build_source', 'cmake', None ) + prf.addMapping( 'archive_info', PYCONF.Mapping(), None ) + prf.archive_info.addMapping( + 'name', os.path.join(os.path.abspath(options.prefix), profile), None ) + tmp = "APPLICATION.workdir + $VARS.sep + 'SOURCES' + $VARS.sep + $name" + prf.addMapping( 'source_dir', + PYCONF.Reference(cfg, PYCONF.DOLLAR, tmp ), + None ) + tmp = "APPLICATION.workdir + $VARS.sep + 'BUILD' + $VARS.sep + $name" + prf.addMapping( 'build_dir', + PYCONF.Reference(cfg, PYCONF.DOLLAR, tmp ), + None ) + prf.addMapping( 'depend', PYCONF.Sequence(), None ) + prf.depend.append( 'KERNEL', None ) + prf.depend.append( 'GUI', None ) + prf.depend.append( 'Python', None ) + prf.depend.append( 'Sphinx', None ) + prf.depend.append( 'qt', None ) + prf.addMapping( 'opt_depend', PYCONF.Sequence(), None ) + + #Save config + f = file( os.path.join( path, pyconf ) , 'w') + cfg.__save__(f)
+
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/run.html b/doc/build/html/_modules/commands/run.html new file mode 100644 index 0000000..6104bf3 --- /dev/null +++ b/doc/build/html/_modules/commands/run.html @@ -0,0 +1,199 @@ + + + + + + + + commands.run — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.run

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+import os
+import subprocess
+
+import src.debug as DBG
+import src.returnCode as RCO
+import src.utilsSat as UTS
+from src.salomeTools import _BaseCommand
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """\ + The run command runs the application launcher with the given arguments. + + examples: + >> sat run SALOME + """ + + name = "run" + +
[docs] def getParser(self): + """Define all options for command 'sat run <options>'""" + parser = self.getParserWithHelp() # no options yet + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat run <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + # check for product + src.check_config_has_application(config) + + # Determine launcher path + launcher_name = src.get_launcher_name(config) + launcher_dir = config.APPLICATION.workdir + + # Check the launcher existence + if launcher_name not in os.listdir(launcher_dir): + message = _("""\ +The launcher %(1)s was not found in directory '%(2)s'. +Did you run the command 'sat launcher' ?\n""") % {"1": launcher_name, "2": launcher_dir} + raise Exception(message) + + launcher_path = os.path.join(launcher_dir, launcher_name) + + if not os.path.exists(launcher_path): + message = _("""\ +The launcher at path '%s' is missing. +Did you run the command 'sat launcher' ?\n""") % launcher_path + raise Exception(message) + + # Determine the command to launch (add the additional arguments) + command = launcher_path + " " + " ".join(args) + + # Print the command + logger.info(_("Executed command <blue>%s<reset> Launching ...\n") % command) + + # Run the launcher + subprocess.call(command, + shell=True, + stdout=logger.logTxtFile, + stderr=subprocess.STDOUT) + + # Display information: how to get the logs + msg1 = _("End of 'sat run'. To see traces, type:") + msg2 = UTS.label("sat log " + config.VARS.application) + msg = "%s\n%s\n" % (msg1, msg2) + logger.info(msg) + + return RCO.ReturnCode("OK", msg)
+
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/script.html b/doc/build/html/_modules/commands/script.html new file mode 100644 index 0000000..8cc0183 --- /dev/null +++ b/doc/build/html/_modules/commands/script.html @@ -0,0 +1,328 @@ + + + + + + + + commands.script — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.script

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+
+import src.debug as DBG
+import src.returnCode as RCO
+from src.salomeTools import _BaseCommand
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """\ + The script command executes the script(s) of the the given products in the build directory. + This is done only for the products that are constructed using a script (build_source : 'script'). + Otherwise, nothing is done. + + examples: + >> sat script SALOME --products Python,numpy + """ + + name = "script" + +
[docs] def getParser(self): + """Define all options for the command 'sat script <options>'""" + parser = self.getParserWithHelp() + parser.add_option('p', 'products', 'list2', 'products', + _('Optional: products to configure. This option can be' + ' passed several time to configure several products.')) + parser.add_option('', 'nb_proc', 'int', 'nb_proc', + _('Optional: The number of processors to use in the script if the make ' + 'command is used in it.\n\tWarning: the script has to be correctly written ' + 'if you want this option to work.\n\tThe $MAKE_OPTIONS has to be ' + 'used.'), 0) + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat script <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + # check that the command has been called with an application + src.check_config_has_application( config ) + + # Get the list of products to treat + products_infos = get_products_list(options, config, logger) + + # Print some informations + msg = ('Executing the script in the build directories of the application %s\n') % \ + UTS.label(config.VARS.application) + logger.info(msg) + + info = [(_("BUILD directory"), os.path.join(config.APPLICATION.workdir, 'BUILD'))] + UTS.logger_info_tuples(logger, info) + + # Call the function that will loop over all the products and execute + # the right command(s) + if options.nb_proc is None: + options.nb_proc = 0 + res = run_script_all_products(config, + products_infos, + options.nb_proc, + logger) + + # Print the final state + nb_products = len(products_infos) + if res == 0: + final_status = "<OK>" + else: + final_status = "<KO>" + + logger.info( _("\nScript: %(s (%d/%d)\n") % \ + (final_status, nb_products - res, nb_products) ) + + return res
+ + +
[docs]def get_products_list(options, cfg, logger): + """ + Gives the product list with their informations from + configuration regarding the passed options. + + :param options: (Options) + The Options instance that stores the commands arguments + :param cfg: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (list) The list of (product name, product_informations). + """ + # Get the products to be prepared, regarding the options + if options.products is None: + # No options, get all products sources + products = cfg.APPLICATION.products + else: + # if option --products, check that all products of the command line + # are present in the application. + products = options.products + for p in products: + if p not in cfg.APPLICATION.products: + raise Exception(_("Product %(product)s " + "not defined in application %(application)s") % \ + { 'product': p, 'application': cfg.VARS.application} ) + + # Construct the list of tuple containing + # the products name and their definition + products_infos = src.product.get_products_infos(products, cfg) + + products_infos = [pi for pi in products_infos if not( + src.product.product_is_native(pi[1]) or + src.product.product_is_fixed(pi[1]))] + + return products_infos
+ +
[docs]def log_step(logger, header, step): + logger.info("\r%s%s" % (header, " " * 20)) + logger.info("\r%s%s" % (header, step)) + logger.debug("\n==== %s \n" % UTS.info(step))
+ +
[docs]def log_res_step(logger, res): + if res == 0: + logger.debug("<OK>\n") + else: + logger.debug("<KO>\n")
+ +
[docs]def run_script_all_products(config, products_infos, nb_proc, logger): + """Execute the script in each product build directory. + + :param config: (Config) The global configuration + :param products_info: (list) + List of (str, Config) => (product_name, product_info) + :param nb_proc: (int) The number of processors to use + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (int) The number of failing commands. + """ + res = 0 + for p_name_info in products_infos: + res_prod = run_script_of_product(p_name_info, + nb_proc, + config, + logger) + if res_prod != 0: + res += 1 + return res
+ +
[docs]def run_script_of_product(p_name_info, nb_proc, config, logger): + """ + Execute the proper configuration command(s) + in the product build directory. + + :param p_name_info: (tuple) + (str, Config) => (product_name, product_info) + :param nb_proc: (int) The number of processors to use + :param config: (Config) The global configuration + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (int) 1 if it fails, else 0. + """ + + p_name, p_info = p_name_info + + # Logging + header = _("Running script of %s") % UTS.label(p_name) + header += " %s " % ("." * (20 - len(p_name))) + logger.info("\n" + header) + + # Do nothing if he product is not compilable or has no compilation script + test1 = "properties" in p_info and \ + "compilation" in p_info.properties and \ + p_info.properties.compilation == "no" + if ( test1 or (not src.product.product_has_script(p_info)) ): + log_step(logger, header, "ignored") + logger.info("\n") + return 0 + + # Instantiate the class that manages all the construction commands + # like cmake, make, make install, make test, environment management, etc... + builder = src.compilation.Builder(config, logger, p_info) + + # Prepare the environment + log_step(logger, header, "PREPARE ENV") + res_prepare = builder.prepare() + log_res_step(logger, res_prepare) + + # Execute the script + len_end_line = 20 + script_path_display = UTS.label(p_info.compil_script) + log_step(logger, header, "SCRIPT " + script_path_display) + len_end_line += len(script_path_display) + res = builder.do_script_build(p_info.compil_script, number_of_proc=nb_proc) + log_res_step(logger, res) + + # Log the result + if res > 0: + logger.info("\r%s%s" % (header, " " * len_end_line)) + logger.info("\r" + header + "<KO>") + logger.debug("==== <KO> in script execution of %s\n" % p_name) + else: + logger.info("\r%s%s" % (header, " " * len_end_line)) + logger.info("\r" + header + "<OK>") + logger.debug("==== <OK> in script execution of %s\n" % p_name) + logger.info("\n") + + return res
+
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/shell.html b/doc/build/html/_modules/commands/shell.html new file mode 100644 index 0000000..a9007c8 --- /dev/null +++ b/doc/build/html/_modules/commands/shell.html @@ -0,0 +1,183 @@ + + + + + + + + commands.shell — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.shell

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+
+import subprocess
+
+import src.debug as DBG
+import src.returnCode as RCO
+from src.salomeTools import _BaseCommand
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """\ + The shell command executes the shell command passed as argument. + + examples: + >> sat shell --command 'ls -lt /tmp' + """ + + name = "shell" + +
[docs] def getParser(self): + """Define all options for the command 'sat shell <options>'""" + parser = self.getParserWithHelp() + parser.add_option('c', 'command', 'string', 'command', + _('Mandatory: The shell command to execute.'), "") + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat shell <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + # Make sure the command option has been called + if not options.command: + msg = _("The option --command is required\n") + logger.error(msg) + return 1 + + # Print the input command + msg = _("Command to execute:\n%s\nExecution ... ") % options.command + logger.info(msg) + + # Call the input command + res = subprocess.call(options.command, + shell=True, + stdout=logger.logTxtFile, + stderr=subprocess.STDOUT) + + # Format the result to be 0 (success) or 1 (fail) + if res != 0: + status = "KO" + else: + status = "OK" + + logger.info("<%s>\n" % status) + return RCO.ReturnCode(status, "shell command done")
+
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/source.html b/doc/build/html/_modules/commands/source.html new file mode 100644 index 0000000..064a397 --- /dev/null +++ b/doc/build/html/_modules/commands/source.html @@ -0,0 +1,630 @@ + + + + + + + + commands.source — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.source

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+import os
+import shutil
+
+import src.debug as DBG
+import src.returnCode as RCO
+from src.salomeTools import _BaseCommand
+import src.system as SYSS
+
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """ + The source command gets the sources of the application products + from cvs, git or an archive. + + examples: + >> sat source SALOME --products KERNEL,GUI + """ + + name = "source" + +
[docs] def getParser(self): + """Define all options for command 'sat source <options>'""" + parser = self.getParserWithHelp() + parser.add_option('p', 'products', 'list2', 'products', + _('Optional: products from which to get the sources. This option can be' + ' passed several time to get the sources of several products.')) + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat source <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + # check that the command has been called with an application + src.check_config_has_application( config ) + + # Print some informations + logger.info(_('Getting sources of the application %s\n') % \ + UTS.label(config.VARS.application), 1) + logger.info(" workdir = %s\n" % config.APPLICATION.workdir) + + # Get the products list with products informations regarding the options + products_infos = self.get_products_list(options, config, logger) + + # Call to the function that gets all the sources + good_result, results = get_all_product_sources(config, + products_infos, + logger) + + # Display the results (how much passed, how much failed, etc...) + details = [] + nbExpected = len(products_infos) + msgCount = "(%d/%d)" % (good_result, nbExpected) + if good_result == nbExpected: + status = "OK" + msg = _("Getting sources of the application") + logger.info("\n%s %s: <%s>.\n" % (msg, msgCount, status)) + else: + status = "KO" + msg = _("Some sources haven't been get") + details = [p for p in results if (results[product] == 0 or results[product] is None)] + details = " ".join(details) + logger.info("\n%s %s: <%s>.\n%s\n" % (msg, msgCount, status, details)) + + return RCO.ReturnCode(status, "%s %s" % msg, msgCount)
+ + +
[docs]def get_source_for_dev(config, product_info, source_dir, logger, pad): + """\ + Called if the product is in development mode + + :param config: (Config) The global configuration + :param product_info: (Config) + The configuration specific to the product to be prepared + :param source_dir: (Path) + The Path instance corresponding to the directory where to put the sources + :param logger: (Logger) + The logger instance to use for the display and logging + :param pad: (int) The gap to apply for the terminal display + :return: (bool) True if it succeed, else False + """ + + # Call the function corresponding to get the sources with True checkout + retcode = get_product_sources(config, + product_info, + True, + source_dir, + logger, + pad, + checkout=True) + # +2 because product name is followed by ': ' + logger.info("\n" + " " * (pad+2)) + logger.info('dev: %s ... ' % UTS.info(product_info.source_dir)) + + return retcode
+ +
[docs]def get_source_from_git(product_info, + source_dir, + logger, + pad, + is_dev=False, + environ = None): + """ + Called if the product is to be get in git mode + + :param product_info: (Config) + The configuration specific to the product to be prepared + :param source_dir: (Path) + The Path instance corresponding to the + directory where to put the sources + :param logger Logger: (Logger) + The logger instance to use for the display and logging + :param pad: (int) The gap to apply for the terminal display + :param is_dev: (bool) True if the product is in development mode + :param environ: (src.environment.Environ) + The environment to source when extracting. + :return: (bool) True if it succeed, else False + """ + # The str to display + coflag = 'git' + + # Get the repository address. (from repo_dev key if the product is + # in dev mode. + if is_dev and 'repo_dev' in product_info.git_info: + coflag = coflag.upper() + repo_git = product_info.git_info.repo_dev + else: + repo_git = product_info.git_info.repo + + # Display informations + msg = "'%s:%s" % (coflag, repo_git) + msg += " " * (pad + 50 - len(repo_git)) + msg += " tag:%s" % product_info.git_info.tag + msg += "%s. " % "." * (10 - len(product_info.git_info.tag)) + logger.info("\n" + msg) + + # Call the system function that do the extraction in git mode + retcode = SYSS.git_extract(repo_git, + product_info.git_info.tag, + source_dir, logger, environ) + return retcode
+ +
[docs]def get_source_from_archive(product_info, source_dir, logger): + """The method called if the product is to be get in archive mode + + :param product_info: (Config) + The configuration specific to + the product to be prepared + :param source_dir: (Path) + The Path instance corresponding to the directory + where to put the sources + :param logger: (Logger) + The logger instance to use for the display and logging + :return: (bool) True if it succeed, else False + """ + # check archive exists + if not os.path.exists(product_info.archive_info.archive_name): + raise Exception(_("Archive not found: '%s'") % \ + product_info.archive_info.archive_name) + + logger.info('arc:%s ... ' % UTS.info(product_info.archive_info.archive_name)) + # Call the system function that do the extraction in archive mode + retcode, NameExtractedDirectory = SYSS.archive_extract( + product_info.archive_info.archive_name, + source_dir.dir(), logger) + + # Rename the source directory if + # it does not match with product_info.source_dir + if (NameExtractedDirectory.replace('/', '') != + os.path.basename(product_info.source_dir)): + shutil.move(os.path.join(os.path.dirname(product_info.source_dir), + NameExtractedDirectory), + product_info.source_dir) + + return retcode
+ +
[docs]def get_source_from_dir(product_info, source_dir, logger): + + if "dir_info" not in product_info: + msg = _("You must put a dir_info section in the file %s.pyconf") % \ + product_info.name + logger.error(msg) + return False + + if "dir" not in product_info.dir_info: + msg = _("Error: you must put a dir in the dir_info section in the file %s.pyconf") % \ + product_info.name + logger.error(msg) + return False + + # check that source exists + if not os.path.exists(product_info.dir_info.dir): + msg = _("The dir %s defined in the file %s.pyconf does not exists") % \ + (product_info.dir_info.dir, product_info.name) + logger.error(msg) + return False + + logger.info('DIR: %s ... ' % UTS.info(product_info.dir_info.dir)) + retcode = src.Path(product_info.dir_info.dir).copy(source_dir) + return retcode
+ +
[docs]def get_source_from_cvs(user, + product_info, + source_dir, + checkout, + logger, + pad, + environ = None): + """ + The method called if the product is to be get in cvs mode + + :param user: (str) The user to use in for the cvs command + :param product_info: (Config) + The configuration specific to the product to be prepared + :param source_dir: (Path) + The Path instance corresponding to the directory + where to put the sources + :param checkout: (bool) If True, get the source in checkout mode + :param logger: (Logger) + The logger instance to use for the display and logging + :param pad: (int) The gap to apply for the terminal display + :param environ: (src.environment.Environ) + The environment to source when extracting. + :return: (bool) True if it succeed, else False + """ + # Get the protocol to use in the command + if "protocol" in product_info.cvs_info: + protocol = product_info.cvs_info.protocol + else: + protocol = "pserver" + + # Construct the line to display + if "protocol" in product_info.cvs_info: + cvs_line = "%s:%s@%s:%s" % \ + (protocol, user, product_info.cvs_info.server, + product_info.cvs_info.product_base) + else: + cvs_line = "%s / %s" % (product_info.cvs_info.server, + product_info.cvs_info.product_base) + + coflag = 'cvs' + if checkout: coflag = coflag.upper() + + msg = '%s:%s' % (coflag, cvs_line) + msg += " " * (pad + 50 - len(cvs_line)) + msg += " src:%s" % product_info.cvs_info.source + msg += " " * (pad + 1 - len(product_info.cvs_info.source)) + msg += " tag:%s" % product_info.cvs_info.tag + + # at least one '.' is visible + msg += " %s. " % ("." * (10 - len(product_info.cvs_info.tag))) + + logger.info(msg) + + # Call the system function that do the extraction in cvs mode + retcode = SYSS.cvs_extract(protocol, user, + product_info.cvs_info.server, + product_info.cvs_info.product_base, + product_info.cvs_info.tag, + product_info.cvs_info.source, + source_dir, logger, checkout, environ) + return retcode
+ +
[docs]def get_source_from_svn(user, + product_info, + source_dir, + checkout, + logger, + environ = None): + """The method called if the product is to be get in svn mode + + :param user: (str) The user to use in for the svn command + :param product_info: (Config) + The configuration specific to the product to be prepared + :param source_dir: (Path) + The Path instance corresponding to the directory + where to put the sources + :param checkout: (boolean) + If True, get the source in checkout mode + :param logger: (Logger) + The logger instance to use for the display and logging + :param environ: (src.environment.Environ) + The environment to source when extracting. + :return: (bool) True if it succeed, else False + """ + coflag = 'svn' + if checkout: coflag = coflag.upper() + + logger.info('%s:%s ... ' % (coflag, product_info.svn_info.repo)) + + # Call the system function that do the extraction in svn mode + retcode = SYSS.svn_extract(user, + product_info.svn_info.repo, + product_info.svn_info.tag, + source_dir, + logger, + checkout, + environ) + return retcode
+ +
[docs]def get_product_sources(config, + product_info, + is_dev, + source_dir, + logger, + pad, + checkout=False): + """Get the product sources. + + :param config: (Config) The global configuration + :param product_info: (Config) + The configuration specific to the product to be prepared + :param is_dev: (bool) True if the product is in development mode + :param source_dir: (Path) + The Path instance corresponding to the directory + where to put the sources + :param logger: (Logger) + The logger instance to use for the display and logging + :param pad: (int) The gap to apply for the terminal display + :param checkout: (bool) If True, get the source in checkout mode + :return: (bool) True if it succeed, else False + """ + + # Get the application environment + logger.info(_("Set the application environment\n")) + env_appli = src.environment.SalomeEnviron(config, + src.environment.Environ(dict(os.environ))) + env_appli.set_application_env(logger) + + # Call the right function to get sources regarding the product settings + if not checkout and is_dev: + return get_source_for_dev(config, + product_info, + source_dir, + logger, + pad) + + if product_info.get_source == "git": + return get_source_from_git(product_info, source_dir, logger, pad, + is_dev,env_appli) + + if product_info.get_source == "archive": + return get_source_from_archive(product_info, source_dir, logger) + + if product_info.get_source == "dir": + return get_source_from_dir(product_info, source_dir, logger) + + if product_info.get_source == "cvs": + cvs_user = config.USER.cvs_user + return get_source_from_cvs(cvs_user, + product_info, + source_dir, + checkout, + logger, + pad, + env_appli) + + if product_info.get_source == "svn": + svn_user = config.USER.svn_user + return get_source_from_svn(svn_user, product_info, source_dir, + checkout, + logger, + env_appli) + + if product_info.get_source == "native": + # skip + msg = "<OK>" + _("\ndo nothing because the product is of type 'native'.\n") + logger.info(msg) + return True + + if product_info.get_source == "fixed": + # skip + msg = "<OK>" + _("\ndo nothing because the product is of type 'fixed'.\n") + logger.info(msg) + return True + + # if the get_source is not in [git, archive, cvs, svn, fixed, native] + msg = _("Unknown get source method '%s' for product %s") % \ + ( product_info.get_source, product_info.name) + logger.info("%s ... " % msg) + return False
+ +
[docs]def get_all_product_sources(config, products, logger): + """Get all the product sources. + + :param config: (Config) The global configuration + :param products: (list) + The list of tuples (product name, product informations) + :param logger: (Logger) + The logger instance to be used for the logging + :return: (int,dict) + The tuple (number of success, dictionary product_name/success_fail) + """ + + # Initialize the variables that will count the fails and success + results = dict() + good_result = 0 + + # Get the maximum name length in order to format the terminal display + max_product_name_len = 1 + if len(products) > 0: + max_product_name_len = max(map(lambda l: len(l), products[0])) + 4 + + # The loop on all the products from which to get the sources + for product_name, product_info in products: + # get product name, product informations and the directory where to put + # the sources + if (not (src.product.product_is_fixed(product_info) or + src.product.product_is_native(product_info))): + source_dir = src.Path(product_info.source_dir) + else: + source_dir = src.Path('') + + # display and log + logger.info('%s: ' % UTS.label(product_name)) + logger.info(' ' * (max_product_name_len - len(product_name))) + + # Remove the existing source directory if + # the product is not in development mode + is_dev = src.product.product_is_dev(product_info) + if source_dir.exists(): + logger.info("<OK>\n") + msg = _("Nothing done because source directory existing yet.\n") + logger.info(msg) + good_result = good_result + 1 + # Do not get the sources and go to next product + continue + + # Call to the function that get the sources for one product + retcode = get_product_sources(config, + product_info, + is_dev, + source_dir, + logger, + max_product_name_len, + checkout=False) + + """ + if 'no_rpath' in product_info.keys(): + if product_info.no_rpath: + hack_no_rpath(config, product_info, logger) + """ + + # Check that the sources are correctly get using the files to be tested + # in product information + if retcode: + check_OK, wrong_path = check_sources(product_info, logger) + if not check_OK: + # Print the missing file path + msg = _("The required file %s does not exists.\n") % wrong_path + logger.error(msg) + retcode = False + + # show results + results[product_name] = retcode + if retcode: + # The case where it succeed + res = "<OK>" + good_result = good_result + 1 + else: + # The case where it failed + res = "<KO>" + + # print the result + if not(src.product.product_is_fixed(product_info) or + src.product.product_is_native(product_info)): + logger.info('%s\n' % res) + + return good_result, results
+ +
[docs]def check_sources(product_info, logger): + """ + Check that the sources are correctly get, + using the files to be tested in product information + + :param product_info: (Config) + The configuration specific to the product to be prepared + :param logger: (Logger) + The logger instance to be used for the logging + :return: (bool) + True if the files exists (or no files to test is provided). + """ + # Get the files to test if there is any + if ("present_files" in product_info and + "source" in product_info.present_files): + l_files_to_be_tested = product_info.present_files.source + res = True # all ok a priori + filesKo = "" # None + for file_path in l_files_to_be_tested: + # add source directory of the product + path_to_test = os.path.join(product_info.source_dir, file_path) + msg = _("File %s testing existence:" % path_to_test) + if not os.path.exists(path_to_test): + logger.debug("%s <KO>\n" % msg) + res = False + # return False, path_to_test #break at first + filesKo += path_to_test + "\n" # check all + else: + logger.debug("%s <OK>\n" % msg) + return res, filesKo
+
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/template.html b/doc/build/html/_modules/commands/template.html new file mode 100644 index 0000000..9ff6542 --- /dev/null +++ b/doc/build/html/_modules/commands/template.html @@ -0,0 +1,615 @@ + + + + + + + + commands.template — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.template

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+import os
+import string
+import shutil
+import subprocess
+import fnmatch
+import re
+
+import src.debug as DBG
+import src.returnCode as RCO
+import src.utilsSat as UTS
+from src.salomeTools import _BaseCommand
+import src.system as SYSS
+
+# Compatibility python 2/3 for input function
+# input stays input for python 3 and input = raw_input for python 2
+try: 
+    input = raw_input
+except NameError: 
+    pass
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """ + The template command creates the sources for a SALOME module from a template. + + examples: + >> sat template --name my_product_name --template PythonComponent --target /tmp + """ + + name = "template" + +
[docs] def getParser(self): + """Define all options for command 'sat template <options>'""" + parser = self.getParserWithHelp() + parser.add_option('n', 'name', 'string', 'name', + _("""\ +REQUIRED: the name of the module to create. + The name must be a single word in upper case with only alphanumeric characters. + When generating a c++ component the module's name must be suffixed with 'CPP'.""")) + parser.add_option('t', 'template', 'string', 'template', + _('REQUIRED: the template to use.')) + parser.add_option('', 'target', 'string', 'target', + _('REQUIRED: where to create the module.')) + parser.add_option('', 'param', 'string', 'param', + _("""\ +Optional: dictionary to generate the configuration for salomeTools. + Format is: --param param1=value1,param2=value2... (without spaces). + Note that when using this option you must supply all the values, + otherwise an error will be raised.""") ) + parser.add_option('', 'info', 'boolean', 'info', + _('Optional: Get information on the template.'), False) + return parser
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat template <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + msg_miss = _("The --%s argument is required\n") + if options.template is None: + logger.error(msg_miss % "template") + return 1 + + if options.target is None and options.info is None: + logger.error(msg_miss % "target") + return 1 + + if "APPLICATION" in config: + msg = _("This command does not use a product.\n") + logger.error(msg) + return 1 + + if options.info: + return get_template_info(config, options.template, logger) + + if options.name is None: + logger.error(msg_miss % "name") + return 1 + + if not options.name.replace('_', '').isalnum(): + msg = _("""\ +Component name must contains only alphanumeric characters and no spaces\n""") + logger.error(msg) + return 1 + + if options.target is None: + logger.error(msg_miss % "target") + return 1 + + target_dir = os.path.join(options.target, options.name) + if os.path.exists(target_dir): + msg = _("The target already exists: %s\n") % target_dir + logger.error(msg) + return 1 + + msg = "" + msg += _('Create sources from template\n') + msg += ' destination = %s\n' % target_dir + msg += ' name = %\ns' % options.name + msg += ' template = %s\n' % options.template + logger.info(msg) + + conf_values = None + if options.param is not None: + conf_values = {} + for elt in options.param.split(","): + param_def = elt.strip().split('=') + if len(param_def) != 2: + msg = _("Bad parameter definition: '%s'\n") % elt + logger.error(msg) + return 1 + conf_values[param_def[0].strip()] = param_def[1].strip() + + retcode = prepare_from_template(config, options.name, options.template, + target_dir, conf_values, logger) + + if retcode == 0: + logger.info(_("The sources were created in %s\n") % UTS.info(target_dir)) + msg = _("Do not forget to put them in your version control system.\n") + logger.info("\n" + UTS.red(msg)) + else: + logger.info("\n") + + return retcode
+ +
[docs]class TParam: + def __init__(self, param_def, compo_name, dico=None): + self.default = "" + self.prompt = "" + self.check_method = None + + if isinstance(param_def, str): + self.name = param_def + elif isinstance(param_def, tuple): + self.name = param_def[0] + if len(param_def) > 1: + if dico is not None: self.default = param_def[1] % dico + else: self.default = param_def[1] + if len(param_def) > 2: self.prompt = param_def[2] + if len(param_def) > 3: self.check_method = param_def[3] + else: + raise Exception(_("ERROR in template parameter definition")) + + self.raw_prompt = self.prompt + if len(self.prompt) == 0: + self.prompt = _("value for '%s'") % self.name + self.prompt += "? " + if len(self.default) > 0: + self.prompt += "[%s] " % self.default + +
[docs] def check_value(self, val): + if self.check_method is None: + return len(val) > 0 + return len(val) > 0 and self.check_method(val)
+ +
[docs]def get_dico_param(dico, key, default): + if dico.has_key(key): + return dico[key] + return default
+ +
[docs]class TemplateSettings: + def __init__(self, compo_name, settings_file, target): + self.compo_name = compo_name + self.dico = None + self.target = target + + # read the settings + gdic, ldic = {}, {} + execfile(settings_file, gdic, ldic) + + # check required parameters in template.info + missing = [] + for pp in ["file_subst", "parameters"]: + if not ldic.has_key(pp): missing.append("'%s'" % pp) + if len(missing) > 0: + raise Exception( + _("Bad format in settings file! %s not defined.") % \ + ", ".join(missing) ) + + self.file_subst = ldic["file_subst"] + self.parameters = ldic['parameters'] + self.info = get_dico_param(ldic, "info", "").strip() + self.pyconf = get_dico_param(ldic, "pyconf", "") + self.post_command = get_dico_param(ldic, "post_command", "") + + # get the delimiter for the template + self.delimiter_char = get_dico_param(ldic, "delimiter", ":sat:") + + # get the ignore filter + self.ignore_filters = map(lambda l: l.strip(), + ldic["ignore_filters"].split(',')) + +
[docs] def has_pyconf(self): + return len(self.pyconf) > 0
+ +
[docs] def get_pyconf_parameters(self): + if len(self.pyconf) == 0: + return [] + return re.findall("%\((?P<name>\S[^\)]*)", self.pyconf)
+ + ## + # Check if the file needs to be parsed. +
[docs] def check_file_for_substitution(self, file_): + for filter_ in self.ignore_filters: + if fnmatch.fnmatchcase(file_, filter_): + return False + return True
+ +
[docs] def check_user_values(self, values): + if values is None: + return + + # create a list of all parameters (pyconf + list)) + pnames = self.get_pyconf_parameters() + for p in self.parameters: + tp = TParam(p, self.compo_name) + pnames.append(tp.name) + + # reduce the list + pnames = list(set(pnames)) # remove duplicates + + known_values = ["name", "Name", "NAME", "target", self.file_subst] + known_values.extend(values.keys()) + missing = [] + for p in pnames: + if p not in known_values: + missing.append(p) + + if len(missing) > 0: + raise Exception( + _("Missing parameters: %s") % ", ".join(missing) )
+ +
[docs] def get_parameters(self, conf_values=None): + if self.dico is not None: + return self.dico + + self.check_user_values(conf_values) + + # create dictionary with default values + dico = {} + dico["name"] = self.compo_name.lower() + dico["Name"] = self.compo_name.capitalize() + dico["NAME"] = self.compo_name + dico["target"] = self.target + dico[self.file_subst] = self.compo_name + # add user values if any + if conf_values is not None: + for p in conf_values.keys(): + dico[p] = conf_values[p] + + # ask user for values + for p in self.parameters: + tp = TParam(p, self.compo_name, dico) + if dico.has_key(tp.name): + continue + + val = "" + while not tp.check_value(val): + val = raw_input(tp.prompt) + if len(val) == 0 and len(tp.default) > 0: + val = tp.default + dico[tp.name] = val + + # ask for missing value for pyconf + pyconfparam = self.get_pyconf_parameters() + for p in filter(lambda l: not dico.has_key(l), pyconfparam): + rep = "" + while len(rep) == 0: + rep = raw_input("%s? " % p) + dico[p] = rep + + self.dico = dico + return self.dico
+ +
[docs]def search_template(config, template): + # search template + template_src_dir = "" + if os.path.isabs(template): + if os.path.exists(template): + template_src_dir = template + else: + # look in template directory + for td in [os.path.join(config.VARS.datadir, "templates")]: + zz = os.path.join(td, template) + if os.path.exists(zz): + template_src_dir = zz + break + + if len(template_src_dir) == 0: + raise Exception(_("Template not found: %s") % template) + + return template_src_dir
+ + +
[docs]def prepare_from_template(config, + name, + template, + target_dir, + conf_values, + logger): + """Prepares a module from a template.""" + template_src_dir = search_template(config, template) + res = 0 + + # copy the template + if os.path.isfile(template_src_dir): + logger.info(_("Extract template %s\n") % UTS.info(template)) + SYSS.archive_extract(template_src_dir, target_dir) + else: + logger.info(_("Copy template %s\n") % UTS.info(template)) + shutil.copytree(template_src_dir, target_dir) + + + compo_name = name + if name.endswith("CPP"): + compo_name = name[:-3] + + # read settings + settings_file = os.path.join(target_dir, "template.info") + if not os.path.exists(settings_file): + raise Exception(_("Settings file not found")) + tsettings = TemplateSettings(compo_name, settings_file, target_dir) + + # first rename the files + logger.debug(UTS.label(_("Rename files\n"))) + for root, dirs, files in os.walk(target_dir): + for fic in files: + ff = fic.replace(tsettings.file_subst, compo_name) + if ff != fic: + if os.path.exists(os.path.join(root, ff)): + raise Exception( + _("Destination file already exists: %s") % \ + os.path.join(root, ff) ) + logger.debug(" %s -> %s\n" % (fic, ff)) + os.rename(os.path.join(root, fic), os.path.join(root, ff)) + + # rename the directories + logger.debug(UTS.label(_("Rename directories\n"))) + for root, dirs, files in os.walk(target_dir, topdown=False): + for rep in dirs: + dd = rep.replace(tsettings.file_subst, compo_name) + if dd != rep: + if os.path.exists(os.path.join(root, dd)): + raise Exception( + _("Destination directory already exists: %s") % \ + os.path.join(root, dd) ) + logger.debug(" %s -> %s\n" % (rep, dd)) + os.rename(os.path.join(root, rep), os.path.join(root, dd)) + + # ask for missing parameters + logger.debug(UTS.label(_("Make substitution in files\n"))) + logger.debug(_("Delimiter =") + " %s\n" % tsettings.delimiter_char) + logger.debug(_("Ignore Filters =") + " %s\n" % ', '.join(tsettings.ignore_filters)) + dico = tsettings.get_parameters(conf_values) + + # override standard string.Template class to use the desire delimiter + class CompoTemplate(string.Template): + delimiter = tsettings.delimiter_char + + # do substitution + pathlen = len(target_dir) + 1 + for root, dirs, files in os.walk(target_dir): + for fic in files: + fpath = os.path.join(root, fic) + if not tsettings.check_file_for_substitution(fpath[pathlen:]): + logger.debug(" - %s\n" % fpath[pathlen:]) + continue + # read the file + m = file(fpath, 'r').read() + # make the substitution + template = CompoTemplate(m) + d = template.safe_substitute(dico) + # overwrite the file with substituted content + changed = " " + if d != m: + changed = "*" + file(fpath, 'w').write(d) + logger.debug(" %s %s\n" % (changed, fpath[pathlen:])) + + if not tsettings.has_pyconf: + logger.error(_("Definition for sat not found in settings file.")) + else: + definition = tsettings.pyconf % dico + pyconf_file = os.path.join(target_dir, name + '.pyconf') + f = open(pyconf_file, 'w') + f.write(definition) + f.close + logger.info(_("Create configuration file: ") + pyconf_file) + + if len(tsettings.post_command) > 0: + cmd = tsettings.post_command % dico + logger.info(_("Run post command: ") + cmd) + + p = subprocess.Popen(cmd, shell=True, cwd=target_dir) + p.wait() + res = p.returncode + + return res
+ +
[docs]def get_template_info(config, template_name, logger): + sources = search_template(config, template_name) + logger.info(" Template = %s\n" % sources) + + # read settings + tmpdir = os.path.join(config.VARS.tmp_root, "tmp_template") + settings_file = os.path.join(tmpdir, "template.info") + if os.path.exists(tmpdir): + shutil.rmtree(tmpdir) + if os.path.isdir(sources): + shutil.copytree(sources, tmpdir) + else: + SYSS.archive_extract(sources, tmpdir) + settings_file = os.path.join(tmpdir, "template.info") + + if not os.path.exists(settings_file): + raise Exception(_("Settings file not found")) + tsettings = TemplateSettings("NAME", settings_file, "target") + + skip = "\n"*3 + msg = skip + if len(tsettings.info) == 0: + msg += UTS.red("No information for this template.") + else: + msg += tsettings.info + + msg += "\n= Configuration\n" + msg += " file substitution key = %s\n" % tsettings.file_subst + msg += " substitution key = '%s'\n" % tsettings.delimiter_char + if len(tsettings.ignore_filters) > 0: + msg += " Ignore Filter = %s\n" % ', '.join(tsettings.ignore_filters) + + logger.info(msg) + + msg = skip + msg += "= Parameters\n" + pnames = [] + for pp in tsettings.parameters: + tt = TParam(pp, "NAME") + pnames.append(tt.name) + msg += " Name = %s\n" % tt.name + msg += " Prompt = %s\n" % tt.raw_prompt + msg += " Default value = %s\n" % tt.default + + logger.info(msg) + + retcode = 0 + + msg = skip + msg += "= Verification\n" + if tsettings.file_subst not in pnames: + msg += "file substitution key not defined as a parameter: %s\n" % \ + tsettings.file_subst + retcode = 1 + + logger.info(msg) + + msg = "" + reexp = tsettings.delimiter_char.replace("$", "\$") + "{(?P<name>\S[^}]*)" + pathlen = len(tmpdir) + 1 + for root, __, files in os.walk(tmpdir): + for fic in files: + fpath = os.path.join(root, fic) + if not tsettings.check_file_for_substitution(fpath[pathlen:]): + continue + # read the file + m = file(fpath, 'r').read() + zz = re.findall(reexp, m) + zz = list(set(zz)) # reduce + zz = filter(lambda l: l not in pnames, zz) + if len(zz) > 0: + msg += "Missing definition in %s: %s\n" % \ + ( fpath[pathlen:], ", ".join(zz) ) + retcode = 1 + + logger.info(msg) + + if retcode == 0: + logger.info("<OK>" + skip) + else: + logger.info("<KO>" + skip) + + + # clean up tmp file + shutil.rmtree(tmpdir) + + return retcode
+
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/commands/test.html b/doc/build/html/_modules/commands/test.html new file mode 100644 index 0000000..eae4ada --- /dev/null +++ b/doc/build/html/_modules/commands/test.html @@ -0,0 +1,831 @@ + + + + + + + + commands.test — salomeTools 5.0.0dev documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Source code for commands.test

+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+#  Copyright (C) 2010-2012  CEA/DEN
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+import os
+import sys
+import shutil
+import subprocess
+import datetime
+import gzip
+
+import src.debug as DBG
+import src.returnCode as RCO
+import src.utilsSat as UTS
+from src.salomeTools import _BaseCommand
+import src.ElementTree as etree
+import src.xmlManager as XMLMGR
+
+try:
+    from hashlib import sha1
+except ImportError:
+    from sha import sha as sha1
+
+
+########################################################################
+# Command class
+########################################################################
+
[docs]class Command(_BaseCommand): + """\ + The test command runs a test base on a SALOME installation. + + examples: + >> sat test SALOME --grid GEOM --session light + """ + + name = "test" + +
[docs] def getParser(self): + """Define all options for command 'sat test <options>'""" + parser = self.getParserWithHelp() + parser.add_option('b', 'base', 'string', 'base', + _("""\ +Optional: Indicate the name of the test base to use. + This name has to be registered in your application and in a project. + A path to a test base can also be used.""")) + parser.add_option('l', 'launcher', 'string', 'launcher', + _("Optional: Use this option to specify the path to a SALOME launcher to " + "use to launch the test scripts of the test base.")) + parser.add_option('g', 'grid', 'list', 'grids', + _('Optional: Indicate which grid(s) to test (subdirectory of the test base).')) + parser.add_option('s', 'session', 'list', 'sessions', + _('Optional: indicate which session(s) to test (subdirectory of the grid).')) + parser.add_option('', 'display', 'string', 'display', + _("""\ +Optional: set the display where to launch SALOME. + If value is NO then option --show-desktop=0 will be used to launch SALOME.""")) + return parser
+ +
[docs] def check_option(self, options): + """Check the options + + :param options: (Options) The options + :return: None + """ + if not options.launcher: + options.launcher = "" + elif not os.path.isabs(options.launcher): + if not src.config_has_application(config): + raise Exception( + _("An application is required to use a relative path with option --appli") ) + options.launcher = os.path.join(config.APPLICATION.workdir, options.launcher) + + if not os.path.exists(options.launcher): + raise Exception( + _("Launcher not found: %s") % options.launcher ) + return
+ +
[docs] def run(self, cmd_arguments): + """method called for command 'sat test <options>'""" + argList = self.assumeAsList(cmd_arguments) + + # print general help and returns + if len(argList) == 0: + self.print_help() + return RCO.ReturnCode("OK", "No arguments, as 'sat %s --help'" % self.name) + + self._options, remaindersArgs = self.parseArguments(argList) + + if self._options.help: + self.print_help() + return RCO.ReturnCode("OK", "Done 'sat %s --help'" % self.name) + + # shortcuts + runner = self.getRunner() + config = self.getConfig() + logger = self.getLogger() + options = self.getOptions() + + self.check_option(options) + + # the test base is specified either by the application, or by the --base option + with_application = False + if config.VARS.application != 'None': + logger.info(_('Running tests on application %s\n') % + UTS.label(config.VARS.application)) + with_application = True + elif not options.base: + raise Exception( + _('A test base is required. Use the --base option') ) + + # the launcher is specified either by the application, or by the --launcher option + if with_application: + # check if environment is loaded + if 'KERNEL_ROOT_DIR' in os.environ: + logger.warning(_("SALOME environment already sourced")) + + elif options.launcher: + logger.info(_("Running SALOME application.")) + else: + msg = _("""\ +Impossible to find any launcher. +Please specify an application or a launcher +""") + logger.error(msg) + return 1 + + # set the display + show_desktop = (options.display and options.display.upper() == "NO") + if options.display and options.display != "NO": + remote_name = options.display.split(':')[0] + if remote_name != "": + check_remote_machine(remote_name, logger) + # if explicitly set use user choice + os.environ['DISPLAY'] = options.display + elif 'DISPLAY' not in os.environ: + # if no display set + if ('test' in config.LOCAL and + 'display' in config.LOCAL.test and + len(config.LOCAL.test.display) > 0): + # use default value for test tool + os.environ['DISPLAY'] = config.LOCAL.test.display + else: + os.environ['DISPLAY'] = "localhost:0.0" + + # initialization + ################# + if with_application: + tmp_dir = os.path.join(config.VARS.tmp_root, + config.APPLICATION.name, + "test") + else: + tmp_dir = os.path.join(config.VARS.tmp_root, + "test") + + # remove previous tmp dir + if os.access(tmp_dir, os.F_OK): + try: + shutil.rmtree(tmp_dir) + except: + logger.error( + _("error removing TT_TMP_RESULT %s\n") % tmp_dir) + + lines = [] + lines.append("date = '%s'" % config.VARS.date) + lines.append("hour = '%s'" % config.VARS.hour) + lines.append("node = '%s'" % config.VARS.node) + lines.append("arch = '%s'" % config.VARS.dist) + + if 'APPLICATION' in config: + lines.append("application_info = {}") + lines.append("application_info['name'] = '%s'" % + config.APPLICATION.name) + lines.append("application_info['tag'] = '%s'" % + config.APPLICATION.tag) + lines.append("application_info['products'] = %s" % + str(config.APPLICATION.products)) + + content = "\n".join(lines) + + # create hash from context information + dirname = sha1(content.encode()).hexdigest() + base_dir = os.path.join(tmp_dir, dirname) + os.makedirs(base_dir) + os.environ['TT_TMP_RESULT'] = base_dir + + # create env_info file + f = open(os.path.join(base_dir, 'env_info.py'), "w") + f.write(content) + f.close() + + # create working dir and bases dir + working_dir = os.path.join(base_dir, 'WORK') + os.makedirs(working_dir) + os.makedirs(os.path.join(base_dir, 'BASES')) + os.chdir(working_dir) + + if 'PYTHONPATH' not in os.environ: + os.environ['PYTHONPATH'] = '' + else: + for var in os.environ['PYTHONPATH'].split(':'): + if var not in sys.path: + sys.path.append(var) + + # launch of the tests + ##################### + test_base = "" + if options.base: + test_base = options.base + elif with_application and "test_base" in config.APPLICATION: + test_base = config.APPLICATION.test_base.name + + fmt = " %s = %s\n" + msg = fmt % (_('Display'), os.environ['DISPLAY']) + msg += fmt % (_('Timeout'), src.test_module.DEFAULT_TIMEOUT) + msg += fmt % (_("Working dir"), base_dir) + logger.info(msg) + + # create the test object + test_runner = src.test_module.Test(config, + logger, + base_dir, + testbase=test_base, + grids=options.grids, + sessions=options.sessions, + launcher=options.launcher, + show_desktop=show_desktop) + + if not test_runner.test_base_found: + # Fail + return 1 + + # run the test + logger.allowPrintLevel = False + retcode = test_runner.run_all_tests() + logger.allowPrintLevel = True + + logger.info(_("Tests finished\n")) + + logger.debug(_("Generate the specific test log\n")) + log_dir = UTS.get_log_path(config) + out_dir = os.path.join(log_dir, "TEST") + UTS.ensure_path_exists(out_dir) + name_xml_board = logger.logFileName.split(".")[0] + "board" + ".xml" + historic_xml_path = generate_history_xml_path(config, test_base) + + create_test_report(config, + historic_xml_path, + out_dir, + retcode, + xmlname = name_xml_board) + xml_board_path = os.path.join(out_dir, name_xml_board) + + # OP 14/11/2017 Ajout de traces pour essayer de decouvrir le pb + # de remontee de log des tests + print "TRACES OP - test.py/run() : historic_xml_path = '#%s#'" %historic_xml_path + print "TRACES OP - test.py/run() : log_dir = '#%s#'" %log_dir + print "TRACES OP - test.py/run() : name_xml_board = '#%s#'" %name_xml_board + + logger.l_logFiles.append(xml_board_path) + logger.add_link(os.path.join("TEST", name_xml_board), + "board", + retcode, + "Click on the link to get the detailed test results") + + # Add the historic files into the log files list of the command + logger.l_logFiles.append(historic_xml_path) + + logger.debug(_("Removing the temporary directory: %s") % test_runner.tmp_working_dir) + if os.path.exists(test_runner.tmp_working_dir): + shutil.rmtree(test_runner.tmp_working_dir) + + return retcode
+ + +
[docs]def ask_a_path(): + """ + interactive as using 'raw_input' + """ + path = raw_input("enter a path where to save the result: ") + if path == "": + result = raw_input("the result will be not save. Are you sure to " + "continue ? [y/n] ") + if result == "y": + return path + else: + return ask_a_path() + + elif os.path.exists(path): + result = raw_input("WARNING: the content of %s will be deleted. Are you" + " sure to continue ? [y/n] " % path) + if result == "y": + return path + else: + return ask_a_path() + else: + return path
+ +
[docs]def save_file(filename, base): + f = open(filename, 'r') + content = f.read() + f.close() + + objectname = sha1(content).hexdigest() + + f = gzip.open(os.path.join(base, '.objects', objectname), 'w') + f.write(content) + f.close() + return objectname
+ +
[docs]def move_test_results(in_dir, what, out_dir, logger): + if out_dir == in_dir: + return + + finalPath = out_dir + pathIsOk = False + while not pathIsOk: + try: + # create test results directory if necessary + #logger.debug("FINAL = %s\n" % finalPath) + if not os.access(finalPath, os.F_OK): + #shutil.rmtree(finalPath) + os.makedirs(finalPath) + pathIsOk = True + except: + logger.error(_("%s cannot be created.") % finalPath) + finalPath = ask_a_path() + + if finalPath != "": + os.makedirs(os.path.join(finalPath, what, 'BASES')) + + # check if .objects directory exists + if not os.access(os.path.join(finalPath, '.objects'), os.F_OK): + os.makedirs(os.path.join(finalPath, '.objects')) + + logger.info(_('copy tests results to %s ... ') % finalPath) + + # copy env_info.py + shutil.copy2(os.path.join(in_dir, what, 'env_info.py'), + os.path.join(finalPath, what, 'env_info.py')) + + # for all sub directory (ie testbase) in the BASES directory + for testbase in os.listdir(os.path.join(in_dir, what, 'BASES')): + outtestbase = os.path.join(finalPath, what, 'BASES', testbase) + intestbase = os.path.join(in_dir, what, 'BASES', testbase) + + # ignore files in root dir + if not os.path.isdir(intestbase): + continue + + os.makedirs(outtestbase) + #logger.debug("copy testbase %s\n" % testbase) + + for grid_ in [m for m in os.listdir(intestbase) if os.path.isdir( + os.path.join(intestbase, m))]: + # ignore source configuration directories + if grid_[:4] == '.git' or grid_ == 'CVS': + continue + + outgrid = os.path.join(outtestbase, grid_) + ingrid = os.path.join(intestbase, grid_) + os.makedirs(outgrid) + #logger.debug("copy grid %s" % grid_) + + if grid_ == 'RESSOURCES': + for file_name in os.listdir(ingrid): + if not os.path.isfile(os.path.join(ingrid, + file_name)): + continue + f = open(os.path.join(outgrid, file_name), "w") + f.write(save_file(os.path.join(ingrid, file_name), + finalPath)) + f.close() + else: + for session_name in [t for t in os.listdir(ingrid) if + os.path.isdir(os.path.join(ingrid, t))]: + outsession = os.path.join(outgrid, session_name) + insession = os.path.join(ingrid, session_name) + os.makedirs(outsession) + + for file_name in os.listdir(insession): + if not os.path.isfile(os.path.join(insession, + file_name)): + continue + if file_name.endswith('result.py'): + shutil.copy2(os.path.join(insession, file_name), + os.path.join(outsession, file_name)) + else: + f = open(os.path.join(outsession, file_name), "w") + f.write(save_file(os.path.join(insession, + file_name), + finalPath)) + f.close() + + logger.info("<OK>\n")
+ +
[docs]def check_remote_machine(machine_name, logger): + logger.debug(_("Check the display on %s\n") % machine_name) + ssh_cmd = 'ssh -o "StrictHostKeyChecking no" %s "ls"' % machine_name + logger.debug(_("Executing the command : %s\n") % ssh_cmd) + p = subprocess.Popen(ssh_cmd, + shell=True, + stdin =subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + p.wait() + if p.returncode != 0: + msg = "<KO> on '%s'" % ssh_cmd + logger.critical(msg) + logger.error(UTS.red(p.stderr.read())) + logger.error(UTS.red(_("No ssh access to the display machine %s.") % machine_name)) + else: + logger.debug("<OK>\n")
+ + +
[docs]def create_test_report(config, + xml_history_path, + dest_path, + retcode, + xmlname=""): + """ + Creates the XML report for a product. + """ + # get the date and hour of the launching of the command, in order to keep + # history + date_hour = config.VARS.datehour + + # Get some information to put in the xml file + application_name = config.VARS.application + withappli = src.config_has_application(config) + + first_time = False + if not os.path.exists(xml_history_path): + first_time = True + root = etree.Element("salome") + prod_node = etree.Element("product", name=application_name, build=xmlname) + root.append(prod_node) + else: + root = etree.parse(xml_history_path).getroot() + prod_node = root.find("product") + + prod_node.attrib["history_file"] = os.path.basename(xml_history_path) + prod_node.attrib["global_res"] = retcode + + ASNODE = XMLMGR.add_simple_node # shortcut + + if withappli: + if not first_time: + for node in (prod_node.findall("version_to_download") + + prod_node.findall("out_dir")): + prod_node.remove(node) + + ASNODE(prod_node, "version_to_download", config.APPLICATION.name) + + ASNODE(prod_node, "out_dir", config.APPLICATION.workdir) + + # add environment + if not first_time: + for node in prod_node.findall("exec"): + prod_node.remove(node) + + exec_node = ASNODE(prod_node, "exec") + exec_node.append(etree.Element("env", name="Host", value=config.VARS.node)) + exec_node.append(etree.Element("env", name="Architecture", + value=config.VARS.dist)) + exec_node.append(etree.Element("env", name="Number of processors", + value=str(config.VARS.nb_proc))) + exec_node.append(etree.Element("env", name="Begin date", + value=src.parse_date(date_hour))) + exec_node.append(etree.Element("env", name="Command", + value=config.VARS.command)) + exec_node.append(etree.Element("env", name="sat version", + value=config.INTERNAL.sat_version)) + + if 'TESTS' in config: + if first_time: + tests = ASNODE(prod_node, "tests") + known_errors = ASNODE(prod_node, "known_errors") + new_errors = ASNODE(prod_node, "new_errors") + amend = ASNODE(prod_node, "amend") + else: + tests = prod_node.find("tests") + known_errors = prod_node.find("known_errors") + new_errors = prod_node.find("new_errors") + amend = prod_node.find("amend") + + tt = {} + for test in config.TESTS: + if not tt.has_key(test.testbase): + tt[test.testbase] = [test] + else: + tt[test.testbase].append(test) + + for testbase in tt.keys(): + if first_time: + gn = ASNODE(tests, "testbase") + else: + gn = tests.find("testbase") + # initialize all grids and session to "not executed" + for mn in gn.findall("grid"): + mn.attrib["executed_last_time"] = "no" + for tyn in mn.findall("session"): + tyn.attrib["executed_last_time"] = "no" + for test_node in tyn.findall('test'): + for node in test_node.getchildren(): + if node.tag != "history": + test_node.remove(node) + + attribs_to_pop = [] + for attribute in test_node.attrib: + if (attribute != "script" and + attribute != "res"): + attribs_to_pop.append(attribute) + for attribute in attribs_to_pop: + test_node.attrib.pop(attribute) + + gn.attrib['name'] = testbase + nb, nb_pass, nb_failed, nb_timeout, nb_not_run = 0, 0, 0, 0, 0 + grids = {} + sessions = {} + for test in tt[testbase]: + if not grids.has_key(test.grid): + if first_time: + mn = ASNODE(gn, "grid") + mn.attrib['name'] = test.grid + else: + l_mn = gn.findall("grid") + mn = None + for grid_node in l_mn: + if grid_node.attrib['name'] == test.grid: + mn = grid_node + break + if mn == None: + mn = ASNODE(gn, "grid") + mn.attrib['name'] = test.grid + + grids[test.grid] = mn + + mn.attrib["executed_last_time"] = "yes" + + if not sessions.has_key("%s/%s" % (test.grid, test.session)): + if first_time: + tyn = ASNODE(mn, "session") + tyn.attrib['name'] = test.session + else: + l_tyn = mn.findall("session") + tyn = None + for session_node in l_tyn: + if session_node.attrib['name'] == test.session: + tyn = session_node + break + if tyn == None: + tyn = ASNODE(mn, "session") + tyn.attrib['name'] = test.session + + sessions["%s/%s" % (test.grid, test.session)] = tyn + + tyn.attrib["executed_last_time"] = "yes" + + for script in test.script: + if first_time: + tn = ASNODE(sessions[ + "%s/%s" % (test.grid, test.session)], + "test") + tn.attrib['session'] = test.session + tn.attrib['script'] = script.name + hn = ASNODE(tn, "history") + else: + l_tn = sessions["%s/%s" % (test.grid, test.session)].findall( + "test") + tn = None + for test_node in l_tn: + if test_node.attrib['script'] == script['name']: + tn = test_node + break + + if tn == None: + tn = ASNODE(sessions[ + "%s/%s" % (test.grid, test.session)], + "test") + tn.attrib['session'] = test.session + tn.attrib['script'] = script.name + hn = ASNODE(tn, "history") + else: + # Get or create the history node for the current test + if len(tn.findall("history")) == 0: + hn = ASNODE(tn, "history") + else: + hn = tn.find("history") + # Put the last test data into the history + if 'res' in tn.attrib: + attributes = {"date_hour" : date_hour, + "res" : tn.attrib['res'] } + ASNODE(hn, + "previous_test", + attrib=attributes) + for node in tn: + if node.tag != "history": + tn.remove(node) + + if 'callback' in script: + try: + cnode = ASNODE(tn, "callback") + if src.architecture.is_windows(): + import string + cnode.text = filter( + lambda x: x in string.printable, + script.callback) + else: + cnode.text = script.callback.decode( + 'string_escape') + except UnicodeDecodeError as exc: + zz = (script.callback[:exc.start] + + '?' + + script.callback[exc.end-2:]) + cnode = ASNODE(tn, "callback") + cnode.text = zz.decode("UTF-8") + + # Add the script content + cnode = ASNODE(tn, "content") + cnode.text = script.content + + # Add the script execution log + cnode = ASNODE(tn, "out") + cnode.text = script.out + + if 'amend' in script: + cnode = ASNODE(tn, "amend") + cnode.text = script.amend.decode("UTF-8") + + if script.time < 0: + tn.attrib['exec_time'] = "?" + else: + tn.attrib['exec_time'] = "%.3f" % script.time + tn.attrib['res'] = script.res + + if "amend" in script: + amend_test = ASNODE(amend, "atest") + amend_test.attrib['name'] = os.path.join(test.grid, + test.session, + script.name) + amend_test.attrib['reason'] = script.amend.decode( + "UTF-8") + + # calculate status + nb += 1 + if script.res == src.OK_STATUS: nb_pass += 1 + elif script.res == src.TIMEOUT_STATUS: nb_timeout += 1 + elif script.res == src.KO_STATUS: nb_failed += 1 + else: nb_not_run += 1 + + if "known_error" in script: + kf_script = ASNODE(known_errors, "error") + kf_script.attrib['name'] = os.path.join(test.grid, + test.session, + script.name) + kf_script.attrib['date'] = script.known_error.date + kf_script.attrib[ + 'expected'] = script.known_error.expected + kf_script.attrib[ + 'comment'] = script.known_error.comment.decode("UTF-8") + kf_script.attrib['fixed'] = str( + script.known_error.fixed) + overdue = datetime.datetime.today().strftime("%Y-%m-" + "%d") > script.known_error.expected + if overdue: + kf_script.attrib['overdue'] = str(overdue) + + elif script.res == src.KO_STATUS: + new_err = ASNODE(new_errors, "new_error") + script_path = os.path.join(test.grid, + test.session, script.name) + new_err.attrib['name'] = script_path + new_err.attrib['cmd'] = ("sat testerror %s -s %s -c 'my" + " comment' -p %s" % \ + (application_name, script_path, config.VARS.dist)) + + + gn.attrib['total'] = str(nb) + gn.attrib['pass'] = str(nb_pass) + gn.attrib['failed'] = str(nb_failed) + gn.attrib['timeout'] = str(nb_timeout) + gn.attrib['not_run'] = str(nb_not_run) + + # Remove the res attribute of all tests that were not launched + # this time + for mn in gn.findall("grid"): + if mn.attrib["executed_last_time"] == "no": + for tyn in mn.findall("session"): + if tyn.attrib["executed_last_time"] == "no": + for test_node in tyn.findall('test'): + if "res" in test_node.attrib: + test_node.attrib.pop("res") + + if len(xmlname) == 0: + xmlname = application_name + if not xmlname.endswith(".xml"): + xmlname += ".xml" + + XMLMGR.write_report(os.path.join(dest_path, xmlname), root, "test.xsl") + XMLMGR.write_report(xml_history_path, root, "test_history.xsl") + return src.OK_STATUS
+ +
[docs]def generate_history_xml_path(config, test_base): + """ + Generate the name of the xml file that contain the history of the tests + on the machine with the current APPLICATION and the current test base. + + :param config: (Config) The global configuration + :param test_base: (str) The test base name (or path) + :return: (str) the full path of the history xml file + """ + history_xml_name = "" + if "APPLICATION" in config: + history_xml_name += config.APPLICATION.name + history_xml_name += "-" + history_xml_name += config.VARS.dist + history_xml_name += "-" + test_base_name = test_base + if os.path.exists(test_base): + test_base_name = os.path.basename(test_base) + history_xml_name += test_base_name + history_xml_name += ".xml" + log_dir = UTS.get_log_path(config) + return os.path.join(log_dir, "TEST", history_xml_name)
+
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/doc/build/html/_modules/index.html b/doc/build/html/_modules/index.html index d373b6c..2f61f88 100644 --- a/doc/build/html/_modules/index.html +++ b/doc/build/html/_modules/index.html @@ -43,7 +43,33 @@

All modules for which code is available

-
  • src.ElementTree
  • +
    • commands.application
    • +
    • commands.check
    • +
    • commands.clean
    • +
    • commands.compile
    • +
    • commands.config
    • +
    • commands.configure
    • +
    • commands.environ
    • +
    • commands.find_duplicates
    • +
    • commands.generate
    • +
    • commands.init
    • +
    • commands.job
    • +
    • commands.jobs
    • +
    • commands.launcher
    • +
    • commands.log
    • +
    • commands.make
    • +
    • commands.makeinstall
    • +
    • commands.package
    • +
    • commands.patch
    • +
    • commands.prepare
    • +
    • commands.profile
    • +
    • commands.run
    • +
    • commands.script
    • +
    • commands.shell
    • +
    • commands.source
    • +
    • commands.template
    • +
    • commands.test
    • +
    • src.ElementTree
    • src.architecture
    • src.catchAll
    • src.colorama.ansi
    • diff --git a/doc/build/html/_modules/src/ElementTree.html b/doc/build/html/_modules/src/ElementTree.html index 8eeb90a..ecc849f 100644 --- a/doc/build/html/_modules/src/ElementTree.html +++ b/doc/build/html/_modules/src/ElementTree.html @@ -501,7 +501,7 @@ # @return An element instance. # @defreturn Element -
      [docs]def Element(tag, attrib={}, **extra): +
      [docs]def Element(tag, attrib={}, **extra): attrib = attrib.copy() attrib.update(extra) return _ElementInterface(tag, attrib)
      @@ -520,7 +520,7 @@ # @return An element instance. # @defreturn Element -
      [docs]def SubElement(parent, tag, attrib={}, **extra): +
      [docs]def SubElement(parent, tag, attrib={}, **extra): attrib = attrib.copy() attrib.update(extra) element = parent.makeelement(tag, attrib) @@ -538,7 +538,7 @@ # @return An element instance, representing a comment. # @defreturn Element -
      [docs]def Comment(text=None): +
      [docs]def Comment(text=None): element = Element(Comment) element.text = text return element
      @@ -552,7 +552,7 @@ # @return An element instance, representing a PI. # @defreturn Element -
      [docs]def ProcessingInstruction(target, text=None): +
      [docs]def ProcessingInstruction(target, text=None): element = Element(ProcessingInstruction) element.text = target if text: @@ -571,7 +571,7 @@ # an URI, and this argument is interpreted as a local name. # @return An opaque object, representing the QName. -
      [docs]class QName: +
      [docs]class QName: def __init__(self, text_or_uri, tag=None): if tag: text_or_uri = "{%s}%s" % (text_or_uri, tag) @@ -594,7 +594,7 @@ # @keyparam file Optional file handle or name. If given, the # tree is initialized with the contents of this XML file. -
      [docs]class ElementTree: +
      [docs]class ElementTree: def __init__(self, element=None, file=None): assert element is None or iselement(element) @@ -608,7 +608,7 @@ # @return An element instance. # @defreturn Element -
      [docs] def getroot(self): +
      [docs] def getroot(self): return self._root
      ## @@ -631,7 +631,7 @@ # @return The document root element. # @defreturn Element -
      [docs] def parse(self, source, parser=None): +
      [docs] def parse(self, source, parser=None): if not hasattr(source, "read"): source = open(source, "rb") if not parser: @@ -652,7 +652,7 @@ # @return An iterator. # @defreturn iterator -
      [docs] def getiterator(self, tag=None): +
      [docs] def getiterator(self, tag=None): assert self._root is not None return self._root.getiterator(tag)
      @@ -664,7 +664,7 @@ # @return The first matching element, or None if no element was found. # @defreturn Element or None -
      [docs] def find(self, path): +
      [docs] def find(self, path): assert self._root is not None if path[:1] == "/": path = "." + path @@ -682,7 +682,7 @@ # empty string. # @defreturn string -
      [docs] def findtext(self, path, default=None): +
      [docs] def findtext(self, path, default=None): assert self._root is not None if path[:1] == "/": path = "." + path @@ -697,7 +697,7 @@ # in document order. # @defreturn list of Element instances -
      [docs] def findall(self, path): +
      [docs] def findall(self, path): assert self._root is not None if path[:1] == "/": path = "." + path @@ -709,7 +709,7 @@ # @param file A file name, or a file object opened for writing. # @param encoding Optional output encoding (default is US-ASCII). -
      [docs] def write(self, file, encoding="us-ascii"): +
      [docs] def write(self, file, encoding="us-ascii"): assert self._root is not None if not hasattr(file, "write"): file = open(file, "wb") @@ -781,7 +781,7 @@ # @return A true value if this is an element object. # @defreturn flag -
      [docs]def iselement(element): +
      [docs]def iselement(element): # FIXME: not sure about this; might be a better idea to look # for tag/attrib/text attributes return isinstance(element, _ElementInterface) or hasattr(element, "tag")
      @@ -795,7 +795,7 @@ # # @param elem An element tree or an individual element. -
      [docs]def dump(elem): +
      [docs]def dump(elem): # debugging if not isinstance(elem, ElementTree): elem = ElementTree(elem) @@ -923,7 +923,7 @@ # standard {@link XMLTreeBuilder} parser is used. # @return An ElementTree instance -
      [docs]def parse(source, parser=None): +
      [docs]def parse(source, parser=None): tree = ElementTree() tree.parse(source, parser) return tree
      @@ -937,7 +937,7 @@ # events are reported. # @return A (event, elem) iterator. -
      [docs]class iterparse: +
      [docs]class iterparse: def __init__(self, source, events=None): if not hasattr(source, "read"): @@ -986,7 +986,7 @@ append((event, None)) parser.EndNamespaceDeclHandler = handler -
      [docs] def next(self): +
      [docs] def next(self): while 1: try: item = self._events[self._index] @@ -1026,7 +1026,7 @@ # @return An Element instance. # @defreturn Element -
      [docs]def XML(text): +
      [docs]def XML(text): parser = XMLTreeBuilder() parser.feed(text) return parser.close()
      @@ -1068,7 +1068,7 @@ # @return An encoded string containing the XML data. # @defreturn string -
      [docs]def tostring(element, encoding=None): +
      [docs]def tostring(element, encoding=None): class dummy: pass data = [] @@ -1093,7 +1093,7 @@ # @param element_factory Optional element factory. This factory # is called to create new Element instances, as necessary. -
      [docs]class TreeBuilder: +
      [docs]class TreeBuilder: def __init__(self, element_factory=None): self._data = [] # data collector @@ -1111,7 +1111,7 @@ # @return An Element instance. # @defreturn Element -
      [docs] def close(self): +
      [docs] def close(self): assert len(self._elem) == 0, "missing end tags" assert self._last != None, "missing toplevel element" return self._last
      @@ -1139,7 +1139,7 @@ # @param data A string. This should be either an 8-bit string # containing ASCII text, or a Unicode string. -
      [docs] def data(self, data): +
      [docs] def data(self, data): self._data.append(data)
      ## @@ -1150,7 +1150,7 @@ # @return The opened element. # @defreturn Element -
      [docs] def start(self, tag, attrs): +
      [docs] def start(self, tag, attrs): self._flush() self._last = elem = self._factory(tag, attrs) if self._elem: @@ -1166,7 +1166,7 @@ # @return The closed element. # @defreturn Element -
      [docs] def end(self, tag): +
      [docs] def end(self, tag): self._flush() self._last = self._elem.pop() assert self._last.tag == tag,\ @@ -1186,7 +1186,7 @@ # @see #ElementTree # @see #TreeBuilder -
      [docs]class XMLTreeBuilder: +
      [docs]class XMLTreeBuilder: def __init__(self, html=0, target=None): try: @@ -1311,7 +1311,7 @@ # @param pubid Public identifier. # @param system System identifier. -
      [docs] def doctype(self, name, pubid, system): +
      [docs] def doctype(self, name, pubid, system): pass
      ## @@ -1319,7 +1319,7 @@ # # @param data Encoded data. -
      [docs] def feed(self, data): +
      [docs] def feed(self, data): self._parser.Parse(data, 0)
      ## @@ -1328,7 +1328,7 @@ # @return An element structure. # @defreturn Element -
      [docs] def close(self): +
      [docs] def close(self): self._parser.Parse("", 1) # end of data tree = self._target.close() del self._target, self._parser # get rid of circular references diff --git a/doc/build/html/_modules/src/architecture.html b/doc/build/html/_modules/src/architecture.html index 6862466..e486727 100644 --- a/doc/build/html/_modules/src/architecture.html +++ b/doc/build/html/_modules/src/architecture.html @@ -62,27 +62,27 @@ # License along with this library; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -''' -In this file : all the stuff that can change with the architecture +""" +Contains all the stuff that can change with the architecture on which SAT is running -''' +""" import os import sys import platform -
      [docs]def is_windows(): - '''method that checks windows OS +
      [docs]def is_windows(): + """Checks windows OS - :rtype: boolean - ''' + :return: (bool) True if system is Windows + """ return platform.system() == 'Windows'
      -
      [docs]def get_user(): - '''method that gets the username that launched sat +
      [docs]def get_user(): + """Gets the username that launched sat - :rtype: str - ''' + :return: (str) environ var USERNAME + """ # In windows case, the USERNAME environment variable has to be set if is_windows(): if not os.environ.has_key('USERNAME'): @@ -93,12 +93,11 @@ return pwd.getpwuid(os.getuid())[0]
      def _lsb_release(args): - '''Get system information with lsb_release. + """Get system information with lsb_release. - :param args str: The arguments to give to lsb_release. - :return: The distribution. - :rtype: str - ''' + :param args: (str) The CLI arguments to give to lsb_release. + :return: (str) The distribution. + """ try: path = '/usr/local/bin:/usr/bin:/bin' lsb_path = os.getenv("LSB_PATH") @@ -118,14 +117,15 @@ " the path to lsb_release\n")) sys.exit(-1) -
      [docs]def get_distribution(codes): - '''Gets the code for the distribution +
      [docs]def get_distribution(codes): + """Gets the code for the distribution - :param codes L{Mapping}: The map containing distribution correlation table. - :return: The distribution on which salomeTools is running, regarding the - distribution correlation table contained in codes variable. - :rtype: str - ''' + :param codes: (L{Mapping}) + The map containing distribution correlation table. + :return: (str) + The distribution on which salomeTools is running, regarding the + distribution correlation table contained in codes variable. + """ if is_windows(): return "Win" @@ -142,16 +142,18 @@ return distrib
      -
      [docs]def get_distrib_version(distrib, codes): - '''Gets the version of the distribution +
      [docs]def get_distrib_version(distrib, codes): + """Gets the version of the distribution - :param distrib str: The distribution on which the version will be found. - :param codes L{Mapping}: The map containing distribution correlation table. - :return: The version of the distribution on which salomeTools is running, - regarding the distribution correlation table contained in codes - variable. - :rtype: str - ''' + :param distrib: (str) + The distribution on which the version will be found. + :param codes: (L{Mapping}) + The map containing distribution correlation table. + :return: (str) + The version of the distribution on which + salomeTools is running, regarding the distribution + correlation table contained in codes variable. + """ if is_windows(): return platform.release() @@ -166,23 +168,22 @@ version=version[0] #for centos, we only care for major version return version
      -
      [docs]def get_python_version(): - '''Gets the version of the running python. +
      [docs]def get_python_version(): + """Gets the version of the running python. - :return: the version of the running python. - :rtype: str - ''' + :return: (str) The version of the running python. + """ # The platform python module gives the answer return platform.python_version()
      -
      [docs]def get_nb_proc(): - '''Gets the number of processors of the machine - on which salomeTools is running. +
      [docs]def get_nb_proc(): + """ + Gets the number of processors of the machine + on which salomeTools is running. - :return: the number of processors. - :rtype: str - ''' + :return: (str) The number of processors. + """ try : import multiprocessing diff --git a/doc/build/html/_modules/src/catchAll.html b/doc/build/html/_modules/src/catchAll.html index 2062196..1475128 100644 --- a/doc/build/html/_modules/src/catchAll.html +++ b/doc/build/html/_modules/src/catchAll.html @@ -83,11 +83,11 @@ ######################################################################################## # json utils ######################################################################################## -
      [docs]def dumper(obj): +
      [docs]def dumper(obj): """to json explore subclass object as dict""" return obj.__dict__
      -
      [docs]def dumperType(obj): +
      [docs]def dumperType(obj): """\ to get a "_type" to trace json subclass object, but ignore all attributes begining with '_' @@ -97,13 +97,13 @@ if not aDict.has_key(typeatt): aDict[typeatt] = obj.__class__.__name__ return aDict
      -
      [docs]def jsonDumps(obj): +
      [docs]def jsonDumps(obj): """to get direct default jsonDumps method""" return json.dumps(obj, default=dumperType, sort_keys=True, indent=2)
      ######################################################################################## -
      [docs]class CatchAll(object): +
      [docs]class CatchAll(object): """\ class as simple dynamic dictionary with predefined keys as properties in @@ -164,7 +164,7 @@ return txt -
      [docs] def jsonDumps(self): +
      [docs] def jsonDumps(self): return jsonDumps(self)
      diff --git a/doc/build/html/_modules/src/colorama/ansi.html b/doc/build/html/_modules/src/colorama/ansi.html index 0515de7..5d8e975 100644 --- a/doc/build/html/_modules/src/colorama/ansi.html +++ b/doc/build/html/_modules/src/colorama/ansi.html @@ -55,20 +55,20 @@ BEL = '\007' -
      [docs]def code_to_chars(code): +
      [docs]def code_to_chars(code): return CSI + str(code) + 'm'
      -
      [docs]def set_title(title): +
      [docs]def set_title(title): return OSC + '2;' + title + BEL
      -
      [docs]def clear_screen(mode=2): +
      [docs]def clear_screen(mode=2): return CSI + str(mode) + 'J'
      -
      [docs]def clear_line(mode=2): +
      [docs]def clear_line(mode=2): return CSI + str(mode) + 'K'
      -
      [docs]class AnsiCodes(object): +
      [docs]class AnsiCodes(object): def __init__(self): # the subclasses declare class attributes which are numbers. # Upon instantiation we define instance attributes, which are the same @@ -79,20 +79,20 @@ setattr(self, name, code_to_chars(value))
      -
      [docs]class AnsiCursor(object): -
      [docs] def UP(self, n=1): +
      [docs]class AnsiCursor(object): +
      [docs] def UP(self, n=1): return CSI + str(n) + 'A'
      -
      [docs] def DOWN(self, n=1): +
      [docs] def DOWN(self, n=1): return CSI + str(n) + 'B'
      -
      [docs] def FORWARD(self, n=1): +
      [docs] def FORWARD(self, n=1): return CSI + str(n) + 'C'
      -
      [docs] def BACK(self, n=1): +
      [docs] def BACK(self, n=1): return CSI + str(n) + 'D'
      -
      [docs] def POS(self, x=1, y=1): +
      [docs] def POS(self, x=1, y=1): return CSI + str(y) + ';' + str(x) + 'H'
      -
      [docs]class AnsiFore(AnsiCodes): +
      [docs]class AnsiFore(AnsiCodes): BLACK = 30 RED = 31 GREEN = 32 @@ -114,7 +114,7 @@ LIGHTWHITE_EX = 97
      -
      [docs]class AnsiBack(AnsiCodes): +
      [docs]class AnsiBack(AnsiCodes): BLACK = 40 RED = 41 GREEN = 42 @@ -136,7 +136,7 @@ LIGHTWHITE_EX = 107
      -
      [docs]class AnsiStyle(AnsiCodes): +
      [docs]class AnsiStyle(AnsiCodes): BRIGHT = 1 DIM = 2 NORMAL = 22 diff --git a/doc/build/html/_modules/src/colorama/ansitowin32.html b/doc/build/html/_modules/src/colorama/ansitowin32.html index 94dbe46..5fb649b 100644 --- a/doc/build/html/_modules/src/colorama/ansitowin32.html +++ b/doc/build/html/_modules/src/colorama/ansitowin32.html @@ -59,15 +59,15 @@ winterm = WinTerm() -
      [docs]def is_stream_closed(stream): +
      [docs]def is_stream_closed(stream): return not hasattr(stream, 'closed') or stream.closed
      -
      [docs]def is_a_tty(stream): +
      [docs]def is_a_tty(stream): return hasattr(stream, 'isatty') and stream.isatty()
      -
      [docs]class StreamWrapper(object): +
      [docs]class StreamWrapper(object): ''' Wraps a stream (such as stdout), acting as a transparent proxy for all attribute access apart from method 'write()', which is delegated to our @@ -82,11 +82,11 @@ def __getattr__(self, name): return getattr(self.__wrapped, name) -
      [docs] def write(self, text): +
      [docs] def write(self, text): self.__convertor.write(text)
      -
      [docs]class AnsiToWin32(object): +
      [docs]class AnsiToWin32(object): ''' Implements a 'write()' method which, on Windows, will strip ANSI character sequences from the text, and if outputting to a tty, will convert them into @@ -128,7 +128,7 @@ # are we wrapping stderr? self.on_stderr = self.wrapped is sys.stderr -
      [docs] def should_wrap(self): +
      [docs] def should_wrap(self): ''' True if this class is actually needed. If false, then the output stream will not be affected, nor will win32 calls be issued, so @@ -138,7 +138,7 @@ ''' return self.convert or self.strip or self.autoreset
      -
      [docs] def get_win32_calls(self): +
      [docs] def get_win32_calls(self): if self.convert and winterm: return { AnsiStyle.RESET_ALL: (winterm.reset_all, ), @@ -182,7 +182,7 @@ } return dict()
      -
      [docs] def write(self, text): +
      [docs] def write(self, text): if self.strip or self.convert: self.write_and_convert(text) else: @@ -192,14 +192,14 @@ self.reset_all()
      -
      [docs] def reset_all(self): +
      [docs] def reset_all(self): if self.convert: self.call_win32('m', (0,)) elif not self.strip and not is_stream_closed(self.wrapped): self.wrapped.write(Style.RESET_ALL)
      -
      [docs] def write_and_convert(self, text): +
      [docs] def write_and_convert(self, text): ''' Write the given text to our wrapped stream, stripping any ANSI sequences from the text, and optionally converting them into win32 @@ -215,19 +215,19 @@ self.write_plain_text(text, cursor, len(text))
      -
      [docs] def write_plain_text(self, text, start, end): +
      [docs] def write_plain_text(self, text, start, end): if start < end: self.wrapped.write(text[start:end]) self.wrapped.flush()
      -
      [docs] def convert_ansi(self, paramstring, command): +
      [docs] def convert_ansi(self, paramstring, command): if self.convert: params = self.extract_params(command, paramstring) self.call_win32(command, params)
      -
      [docs] def extract_params(self, command, paramstring): +
      [docs] def extract_params(self, command, paramstring): if command in 'Hf': params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';')) while len(params) < 2: @@ -245,7 +245,7 @@ return params
      -
      [docs] def call_win32(self, command, params): +
      [docs] def call_win32(self, command, params): if command == 'm': for param in params: if param in self.win32_calls: @@ -267,7 +267,7 @@ winterm.cursor_adjust(x, y, on_stderr=self.on_stderr)
      -
      [docs] def convert_osc(self, text): +
      [docs] def convert_osc(self, text): for match in self.ANSI_OSC_RE.finditer(text): start, end = match.span() text = text[:start] + text[end:] diff --git a/doc/build/html/_modules/src/colorama/initialise.html b/doc/build/html/_modules/src/colorama/initialise.html index 347da84..1062ef2 100644 --- a/doc/build/html/_modules/src/colorama/initialise.html +++ b/doc/build/html/_modules/src/colorama/initialise.html @@ -61,12 +61,12 @@ atexit_done = False -
      [docs]def reset_all(): +
      [docs]def reset_all(): if AnsiToWin32 is not None: # Issue #74: objects might become None at exit AnsiToWin32(orig_stdout).reset_all()
      -
      [docs]def init(autoreset=False, convert=None, strip=None, wrap=True): +
      [docs]def init(autoreset=False, convert=None, strip=None, wrap=True): if not wrap and any([autoreset, convert, strip]): raise ValueError('wrap=False conflicts with any other arg=True') @@ -94,7 +94,7 @@ atexit_done = True
      -
      [docs]def deinit(): +
      [docs]def deinit(): if orig_stdout is not None: sys.stdout = orig_stdout if orig_stderr is not None: @@ -102,7 +102,7 @@ @contextlib.contextmanager -
      [docs]def colorama_text(*args, **kwargs): +
      [docs]def colorama_text(*args, **kwargs): init(*args, **kwargs) try: yield @@ -110,14 +110,14 @@ deinit()
      -
      [docs]def reinit(): +
      [docs]def reinit(): if wrapped_stdout is not None: sys.stdout = wrapped_stdout if wrapped_stderr is not None: sys.stderr = wrapped_stderr
      -
      [docs]def wrap_stream(stream, convert, strip, autoreset, wrap): +
      [docs]def wrap_stream(stream, convert, strip, autoreset, wrap): if wrap: wrapper = AnsiToWin32(stream, convert=convert, strip=strip, autoreset=autoreset) diff --git a/doc/build/html/_modules/src/colorama/win32.html b/doc/build/html/_modules/src/colorama/win32.html index 50b3a53..f86854c 100644 --- a/doc/build/html/_modules/src/colorama/win32.html +++ b/doc/build/html/_modules/src/colorama/win32.html @@ -140,7 +140,7 @@ STDERR: _GetStdHandle(STDERR), } -
      [docs] def winapi_test(): +
      [docs] def winapi_test(): handle = handles[STDOUT] csbi = CONSOLE_SCREEN_BUFFER_INFO() success = _GetConsoleScreenBufferInfo( @@ -154,7 +154,7 @@ handle, byref(csbi)) return csbi -
      [docs] def SetConsoleTextAttribute(stream_id, attrs): +
      [docs] def SetConsoleTextAttribute(stream_id, attrs): handle = handles[stream_id] return _SetConsoleTextAttribute(handle, attrs)
      diff --git a/doc/build/html/_modules/src/colorama/winterm.html b/doc/build/html/_modules/src/colorama/winterm.html index ff1d0d4..ba78903 100644 --- a/doc/build/html/_modules/src/colorama/winterm.html +++ b/doc/build/html/_modules/src/colorama/winterm.html @@ -49,7 +49,7 @@ # from wincon.h -
      [docs]class WinColor(object): +
      [docs]class WinColor(object): BLACK = 0 BLUE = 1 GREEN = 2 @@ -60,12 +60,12 @@ GREY = 7
      # from wincon.h -
      [docs]class WinStyle(object): +
      [docs]class WinStyle(object): NORMAL = 0x00 # dim text, dim background BRIGHT = 0x08 # bright text, dim background BRIGHT_BACKGROUND = 0x80 # dim text, bright background
      -
      [docs]class WinTerm(object): +
      [docs]class WinTerm(object): def __init__(self): self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes @@ -79,19 +79,19 @@ # and BRIGHT is overwritten by Style codes. self._light = 0 -
      [docs] def get_attrs(self): +
      [docs] def get_attrs(self): return self._fore + self._back * 16 + (self._style | self._light)
      -
      [docs] def set_attrs(self, value): +
      [docs] def set_attrs(self, value): self._fore = value & 7 self._back = (value >> 4) & 7 self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND)
      -
      [docs] def reset_all(self, on_stderr=None): +
      [docs] def reset_all(self, on_stderr=None): self.set_attrs(self._default) self.set_console(attrs=self._default)
      -
      [docs] def fore(self, fore=None, light=False, on_stderr=False): +
      [docs] def fore(self, fore=None, light=False, on_stderr=False): if fore is None: fore = self._default_fore self._fore = fore @@ -102,7 +102,7 @@ self._light &= ~WinStyle.BRIGHT self.set_console(on_stderr=on_stderr)
      -
      [docs] def back(self, back=None, light=False, on_stderr=False): +
      [docs] def back(self, back=None, light=False, on_stderr=False): if back is None: back = self._default_back self._back = back @@ -113,13 +113,13 @@ self._light &= ~WinStyle.BRIGHT_BACKGROUND self.set_console(on_stderr=on_stderr)
      -
      [docs] def style(self, style=None, on_stderr=False): +
      [docs] def style(self, style=None, on_stderr=False): if style is None: style = self._default_style self._style = style self.set_console(on_stderr=on_stderr)
      -
      [docs] def set_console(self, attrs=None, on_stderr=False): +
      [docs] def set_console(self, attrs=None, on_stderr=False): if attrs is None: attrs = self.get_attrs() handle = win32.STDOUT @@ -127,7 +127,7 @@ handle = win32.STDERR win32.SetConsoleTextAttribute(handle, attrs)
      -
      [docs] def get_position(self, handle): +
      [docs] def get_position(self, handle): position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition # Because Windows coordinates are 0-based, # and win32.SetConsoleCursorPosition expects 1-based. @@ -135,7 +135,7 @@ position.Y += 1 return position
      -
      [docs] def set_cursor_position(self, position=None, on_stderr=False): +
      [docs] def set_cursor_position(self, position=None, on_stderr=False): if position is None: # I'm not currently tracking the position, so there is no default. # position = self.get_position() @@ -145,7 +145,7 @@ handle = win32.STDERR win32.SetConsoleCursorPosition(handle, position)
      -
      [docs] def cursor_adjust(self, x, y, on_stderr=False): +
      [docs] def cursor_adjust(self, x, y, on_stderr=False): handle = win32.STDOUT if on_stderr: handle = win32.STDERR @@ -153,7 +153,7 @@ adjusted_position = (position.Y + y, position.X + x) win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False)
      -
      [docs] def erase_screen(self, mode=0, on_stderr=False): +
      [docs] def erase_screen(self, mode=0, on_stderr=False): # 0 should clear from the cursor to the end of the screen. # 1 should clear from the cursor to the beginning of the screen. # 2 should clear the entire screen, and move cursor to (1,1) @@ -182,7 +182,7 @@ # put the cursor where needed win32.SetConsoleCursorPosition(handle, (1, 1))
      -
      [docs] def erase_line(self, mode=0, on_stderr=False): +
      [docs] def erase_line(self, mode=0, on_stderr=False): # 0 should clear from the cursor to the end of the line. # 1 should clear from the cursor to the beginning of the line. # 2 should clear the entire line. @@ -204,7 +204,7 @@ # now set the buffer's attributes accordingly win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
      -
      [docs] def set_title(self, title): +
      [docs] def set_title(self, title): win32.SetConsoleTitle(title)
      diff --git a/doc/build/html/_modules/src/coloringSat.html b/doc/build/html/_modules/src/coloringSat.html index b84094d..86b5dc8 100644 --- a/doc/build/html/_modules/src/coloringSat.html +++ b/doc/build/html/_modules/src/coloringSat.html @@ -162,13 +162,13 @@ ("<KO>", "KO"), ) ) -
      [docs]def indent(msg, nb, car=" "): +
      [docs]def indent(msg, nb, car=" "): """indent nb car (spaces) multi lines message except first one""" s = msg.split("\n") res = ("\n"+car*nb).join(s) return res
      -
      [docs]def log(msg): +
      [docs]def log(msg): """elementary log stdout for debug if _verbose""" prefix = "%s.log: " % _name nb = len(prefix) @@ -179,7 +179,7 @@ res = res + toColor("<reset>") print(res)
      -
      [docs]class ColoringStream(object): +
      [docs]class ColoringStream(object): """ write my stream class only write and flush are used for the streaming @@ -189,17 +189,17 @@ def __init__(self): self.logs = '' -
      [docs] def write(self, astr): +
      [docs] def write(self, astr): # log("UnittestStream.write('%s')" % astr) self.logs += astr
      -
      [docs] def flush(self): +
      [docs] def flush(self): pass
      def __str__(self): return self.logs
      -
      [docs]def toColor(msg): +
      [docs]def toColor(msg): """ automatically clean the message of color tags '<red> ... if the terminal output stdout is redirected by user @@ -214,11 +214,11 @@ else: return replace(msg, _tags)
      -
      [docs]def cleanColors(msg): +
      [docs]def cleanColors(msg): """clean the message of color tags '<red> ... """ return replace(msg, _tagsNone)
      -
      [docs]def toColor_AnsiToWin32(msg): +
      [docs]def toColor_AnsiToWin32(msg): """for test debug no wrapping""" if not ('isatty' in dir(sys.stdout) and sys.stdout.isatty()): # clean the message color if the terminal is redirected by user @@ -235,7 +235,7 @@ # print "streamOut",str(streamOut) return str(streamOut)
      -
      [docs]def replace(msg, tags): +
      [docs]def replace(msg, tags): s = msg for r in tags: s = s.replace(*r) diff --git a/doc/build/html/_modules/src/compilation.html b/doc/build/html/_modules/src/compilation.html index eb96ae8..c7f6e70 100644 --- a/doc/build/html/_modules/src/compilation.html +++ b/doc/build/html/_modules/src/compilation.html @@ -79,8 +79,9 @@ "LIBS", "LDFLAGS"] -
      [docs]class Builder: - """Class to handle all construction steps, like cmake, configure, make, ... +
      [docs]class Builder: + """ + Class to handle all construction steps, like cmake, configure, make, ... """ def __init__(self, config, @@ -100,17 +101,17 @@ if "debug" in self.product_info and self.product_info.debug == "yes": self.debug_mode = True -
      [docs] def log(self, text, level, showInfo=True): +
      [docs] def log(self, text, level, showInfo=True): """Shortcut method to log in log file.""" self.logger.info(text) self.logger.logTxtFile.write(UTS.cleancolor(text))
      -
      [docs] def log_command(self, command): +
      [docs] def log_command(self, command): """Shortcut method to log a command.""" self.log("> %s\n" % command, 5)
      -
      [docs] def prepare(self): - """\ +
      [docs] def prepare(self): + """ Prepares the environment. Build two environment: one for building and one for testing (launch). """ @@ -151,7 +152,7 @@ ## # Runs cmake with the given options. -
      [docs] def cmake(self, options=""): +
      [docs] def cmake(self, options=""): cmake_option = options # cmake_option +=' -DCMAKE_VERBOSE_MAKEFILE=ON -DSALOME_CMAKE_DEBUG=ON' @@ -192,7 +193,7 @@ ## # Runs build_configure with the given options. -
      [docs] def build_configure(self, options=""): +
      [docs] def build_configure(self, options=""): if 'buildconfigure_options' in self.product_info: options += " %s " % self.product_info.buildconfigure_options @@ -215,7 +216,7 @@ ## # Runs configure with the given options. -
      [docs] def configure(self, options=""): +
      [docs] def configure(self, options=""): if 'configure_options' in self.product_info: options += " %s " % self.product_info.configure_options @@ -239,7 +240,7 @@ else: return 1
      -
      [docs] def hack_libtool(self): +
      [docs] def hack_libtool(self): if not os.path.exists(str(self.build_dir + 'libtool')): return @@ -271,7 +272,7 @@ ## # Runs make to build the module. -
      [docs] def make(self, nb_proc, make_opt=""): +
      [docs] def make(self, nb_proc, make_opt=""): # make command = 'make' @@ -292,7 +293,7 @@ ## # Runs msbuild to build the module. -
      [docs] def wmake(self,nb_proc, opt_nb_proc = None): +
      [docs] def wmake(self,nb_proc, opt_nb_proc = None): hh = 'MSBUILD /m:%s' % str(nb_proc) if self.debug_mode: @@ -322,7 +323,7 @@ ## # Runs 'make install'. -
      [docs] def install(self): +
      [docs] def install(self): if self.config.VARS.dist_name=="Win": command = 'msbuild INSTALL.vcxproj' if self.debug_mode: @@ -349,7 +350,7 @@ ## # Runs 'make_check'. -
      [docs] def check(self, command=""): +
      [docs] def check(self, command=""): if src.architecture.is_windows(): cmd = 'msbuild RUN_TESTS.vcxproj' else : @@ -377,7 +378,7 @@ ## # Performs a default build for this module. -
      [docs] def do_default_build(self, +
      [docs] def do_default_build(self, build_conf_options="", configure_options="", show_warning=True): @@ -436,7 +437,7 @@ return self.get_result()
      -
      [docs] def do_python_script_build(self, script, nb_proc): +
      [docs] def do_python_script_build(self, script, nb_proc): """Performs a build with a script.""" # script found self.logger.info(_("Compile %s using script %s\n") % \ @@ -458,7 +459,7 @@ self.put_txt_log_in_appli_log_dir("script") return retcode
      -
      [docs] def complete_environment(self, make_options): +
      [docs] def complete_environment(self, make_options): assert self.build_environ is not None # pass additional variables to environment # (may be used by the build script) @@ -473,7 +474,7 @@ self.build_environ.set("DIST", self.config.VARS.dist) self.build_environ.set("VERSION", self.product_info.version)
      -
      [docs] def do_batch_script_build(self, script, nb_proc): +
      [docs] def do_batch_script_build(self, script, nb_proc): if src.architecture.is_windows(): make_options = "/maxcpucount:%s" % nb_proc @@ -496,7 +497,7 @@ else: return 1
      -
      [docs] def do_script_build(self, script, number_of_proc=0): +
      [docs] def do_script_build(self, script, number_of_proc=0): # define make options (may not be used by the script) if number_of_proc==0: nb_proc = src.get_cfg_param(self.product_info,"nb_proc", 0) @@ -514,17 +515,16 @@ msg = _("The script %s must have .sh, .bat or .py extension.") % script raise Exception(msg)
      -
      [docs] def put_txt_log_in_appli_log_dir(self, file_name): - '''Put the txt log (that contain the system logs, like make command - output) in the directory <APPLICATION DIR>/LOGS/<product_name>/ +
      [docs] def put_txt_log_in_appli_log_dir(self, file_name): + """ + Put the txt log (that contain the system logs, like make command output) + in the directory <APPLICATION DIR>/LOGS/<product_name>/ - :param file_name Str: the name of the file to write - ''' + :param file_name: (str) The name of the file to write + """ if self.logger.logTxtFile == sys.__stdout__: return - dir_where_to_put = os.path.join(self.config.APPLICATION.workdir, - "LOGS", - self.product_info.name) + dir_where_to_put = os.path.join(self.config.APPLICATION.workdir, "LOGS", self.product_info.name) file_path = os.path.join(dir_where_to_put, file_name) UTS.ensure_path_exists(dir_where_to_put) # write the logTxtFile copy it to the destination, and then recreate diff --git a/doc/build/html/_modules/src/configManager.html b/doc/build/html/_modules/src/configManager.html index be2cf3c..e789a1b 100644 --- a/doc/build/html/_modules/src/configManager.html +++ b/doc/build/html/_modules/src/configManager.html @@ -75,16 +75,17 @@ import src.utilsSat as UTS import src.pyconf as PYCONF -
      [docs]class ConfigOpener: +
      [docs]class ConfigOpener: """ Class that helps to find an application pyconf in all the possible directories (pathList) """ def __init__(self, pathList): - '''Initialization + """Initialization - :param pathList list: The list of paths where to search a pyconf. - ''' + :param pathList: (list) + The list of paths where to search a pyconf. + """ self.pathList = pathList def __call__(self, name): @@ -95,17 +96,18 @@ open(os.path.join( self.get_path(name), name ), 'rb') ) raise IOError(_("Configuration file '%s' not found") % name) -
      [docs] def get_path( self, name ): - '''The method that returns the entire path of the pyconf searched - :param name str: The name of the searched pyconf. - ''' +
      [docs] def get_path( self, name): + """The method that returns the entire path of the pyconf searched + + :param name: (str) The name of the searched pyconf. + """ for path in self.pathList: if os.path.exists(os.path.join(path, name)): return path raise IOError(_("Configuration file '%s' not found") % name)
      -
      [docs]class ConfigManager: - """\ +
      [docs]class ConfigManager: + """ Class that manages the read of all the config .pyconf files of salomeTools """ def __init__(self, runner): @@ -114,15 +116,16 @@ self.datadir = None def _create_vars(self, application=None, command=None, datadir=None): - """Create a dictionary that stores all information about machine, - user, date, repositories, etc... + """ + Create a dictionary that stores all information about machine, + user, date, repositories, etc... - :param application str: The application for which salomeTools is called. - :param command str: The command that is called. - :param datadir str: The repository that contain external data - for salomeTools. - :return: The dictionary that stores all information. - :rtype: dict + :param application: (str) + The application for which salomeTools is called. + :param command: (str) The command that is called. + :param datadir: (str) + The repository that contain external data for salomeTools. + :return: (dict) The dictionary that stores all information. """ var = {} var['user'] = ARCH.get_user() @@ -199,15 +202,15 @@ return var -
      [docs] def get_command_line_overrides(self, options, sections): - '''get all the overwrites that are in the command line +
      [docs] def get_command_line_overrides(self, options, sections): + """get all the overwrites that are in the command line - :param options: the options from salomeTools class - initialization (like -l5 or --overwrite) - :param sections str: The config section to overwrite. - :return: The list of all the overwrites to apply. - :rtype: list - ''' + :param options: + The options from salomeTools class initialization + (as '-l5' or '--overwrite') + :param sections: (str) The config section to overwrite. + :return: (list) The list of all the overwrites to apply. + """ # when there are no options or not the overwrite option, # return an empty list if options is None or options.overwrite is None: @@ -220,20 +223,21 @@ options.overwrite)) return over
      -
      [docs] def get_config(self, application=None, options=None, command=None, +
      [docs] def get_config(self, application=None, options=None, command=None, datadir=None): - '''get the config from all the configuration files. + """get the config from all the configuration files. - :param application str: The application for which salomeTools is called. - :param options class Options: The general salomeTools - options (--overwrite or -v5, for example) - :param command str: The command that is called. - :param datadir str: The repository that contain - external data for salomeTools. - :return: The final config. - :rtype: class 'PYCONF.Config' - ''' - + :param application: (str) + The application for which salomeTools is called. + :param options: (Options) + The general salomeTools options + (as '--overwrite' or '-v5') + :param command: (str) The command that is called. + :param datadir: (str) + The repository that contain external data for salomeTools. + :return: (Config) The final config. + """ + msgPb = _("Problem in configuration file: <red>%s\n<yellow>%s<reset>\n") # % (filename, exception) # create a ConfigMerger to handle merge merger = PYCONF.ConfigMerger() #MergeHandler()) @@ -256,15 +260,14 @@ # ===================================================================== # Load INTERNAL config # read src/internal_config/salomeTools.pyconf - PYCONF.streamOpener = ConfigOpener([ - os.path.join(cfg.VARS.srcDir, 'internal_config')]) + intDir = os.path.join(cfg.VARS.srcDir, 'internal_config') + PYCONF.streamOpener = ConfigOpener([intDir]) try: - internal_cfg = PYCONF.Config(open(os.path.join(cfg.VARS.srcDir, - 'internal_config', 'salomeTools.pyconf'))) - except PYCONF.ConfigError as e: - raise Exception(_("Error in configuration file:" - " salomeTools.pyconf\n %(error)s") % \ - {'error': str(e) }) + afile = os.path.join(intDir, 'salomeTools.pyconf') + with open(afile) as f: + internal_cfg = PYCONF.Config(f) + except Exception as e: + raise Exception(msgPb % (afile, str(e))) merger.merge(cfg, internal_cfg) @@ -277,39 +280,24 @@ # search only in the data directory PYCONF.streamOpener = ConfigOpener([cfg.VARS.datadir]) try: - local_cfg = PYCONF.Config(open(os.path.join(cfg.VARS.datadir, - 'local.pyconf')), - PWD = ('LOCAL', cfg.VARS.datadir) ) - except PYCONF.ConfigError as e: - raise Exception(_("Error in configuration file: " - "local.pyconf\n %(error)s") % \ - {'error': str(e) }) - except IOError as error: - e = str(error) - raise Exception( e ); + aFile = os.path.join(cfg.VARS.datadir, 'local.pyconf') + with open(aFile) as f: + local_cfg = PYCONF.Config(f, PWD = ('LOCAL', cfg.VARS.datadir) ) + except Exception as e: + raise Exception(msgPb % (aFile, str(e))) + merger.merge(cfg, local_cfg) # When the key is "default", put the default value + sWay = cfg.VARS.salometoolsway if cfg.LOCAL.base == "default": - cfg.LOCAL.base = os.path.abspath( - os.path.join(cfg.VARS.salometoolsway, - "..", - "BASE")) + cfg.LOCAL.base = os.path.abspath(os.path.join(sWay, "..", "BASE")) if cfg.LOCAL.workdir == "default": - cfg.LOCAL.workdir = os.path.abspath( - os.path.join(cfg.VARS.salometoolsway, - "..")) + cfg.LOCAL.workdir = os.path.abspath(os.path.join(sWay, "..")) if cfg.LOCAL.log_dir == "default": - cfg.LOCAL.log_dir = os.path.abspath( - os.path.join(cfg.VARS.salometoolsway, - "..", - "LOGS")) - + cfg.LOCAL.log_dir = os.path.abspath(os.path.join(sWay, "..", "LOGS")) if cfg.LOCAL.archive_dir == "default": - cfg.LOCAL.archive_dir = os.path.abspath( - os.path.join(cfg.VARS.salometoolsway, - "..", - "ARCHIVES")) + cfg.LOCAL.archive_dir = os.path.abspath(os.path.join(sWay, "..", "ARCHIVES")) # apply overwrite from command line if needed for rule in self.get_command_line_overrides(options, ["LOCAL"]): @@ -318,34 +306,28 @@ # ===================================================================== # Load the PROJECTS projects_cfg = PYCONF.Config() - projects_cfg.addMapping("PROJECTS", - PYCONF.Mapping(projects_cfg), - "The projects\n") - projects_cfg.PROJECTS.addMapping("projects", - PYCONF.Mapping(cfg.PROJECTS), - "The projects definition\n") + projects_cfg.addMapping("PROJECTS", PYCONF.Mapping(projects_cfg), "The projects\n") + projects_cfg.PROJECTS.addMapping("projects", PYCONF.Mapping(cfg.PROJECTS), "The projects definition\n") for project_pyconf_path in cfg.PROJECTS.project_file_paths: if not os.path.exists(project_pyconf_path): - msg = _("Cannot find project file <red>%s<reset>, Ignored.") % project_pyconf_path + msg = _("Cannot find project file %s, Ignored.") % UTS.red(project_pyconf_path) self.logger.warning(msg) continue project_name = os.path.basename(project_pyconf_path)[:-len(".pyconf")] try: - project_pyconf_dir = os.path.dirname(project_pyconf_path) - project_cfg = PYCONF.Config(open(project_pyconf_path), - PWD=("", project_pyconf_dir)) + project_pyconf_dir = os.path.dirname(project_pyconf_path) + with open(project_pyconf_path) as f: + project_cfg = PYCONF.Config(f, PWD=("", project_pyconf_dir)) except Exception as e: - msg = _("ERROR: Error in configuration file: %(file_path)s\n %(error)s\n") % \ - {'file_path' : project_pyconf_path, 'error': str(e) } - sys.stdout.write(msg) - continue - projects_cfg.PROJECTS.projects.addMapping(project_name, - PYCONF.Mapping(projects_cfg.PROJECTS.projects), - "The %s project\n" % project_name) - projects_cfg.PROJECTS.projects[project_name]=project_cfg - projects_cfg.PROJECTS.projects[project_name]["file_path"] = \ - project_pyconf_path + self.logger.warning(msgPb % (project_pyconf_path, str(e))) + continue + PROJECTS = projects_cfg.PROJECTS + PROJECTS.projects.addMapping(project_name, + PYCONF.Mapping(PROJECTS.projects), + "The %s project\n" % project_name) + PROJECTS.projects[project_name] = project_cfg + PROJECTS.projects[project_name]["file_path"] = project_pyconf_path merger.merge(cfg, projects_cfg) @@ -395,36 +377,32 @@ cp = cfg.PATHS.APPLICATIONPATH PYCONF.streamOpener = ConfigOpener(cp) do_merge = True + aFile = application + '.pyconf' try: - application_cfg = PYCONF.Config(application + '.pyconf') + application_cfg = PYCONF.Config(aFile) except IOError as e: - raise Exception(_("%s\n(use 'config --list' to get the" - " list of available applications)") % e) + msg = msgPb % (aFile, str(e)) + msg += "\n" + _("(use 'sat config --list' to get the list of available applications)") + raise Exception(msg) except PYCONF.ConfigError as e: - if (not ('-e' in parser.parse_args()[1]) - or ('--edit' in parser.parse_args()[1]) - and command == 'config'): - raise Exception( - _("Error in configuration file: (1)s.pyconf\n %(2)s") % \ - { 'application': application, 'error': str(e) } ) - else: - sys.stdout.write(UTS.red( - "There is an error in the file %s.pyconf.\n" % \ - cfg.VARS.application)) - do_merge = False + msg = msgPb % (aFile, str(e)) + if (not ('-e' in parser.parse_args()[1]) + or ('--edit' in parser.parse_args()[1]) + and command == 'config'): + raise Exception(msg) + else: + self.warning(msg) + do_merge = False except Exception as e: - if ( not('-e' in parser.parse_args()[1]) or - ('--edit' in parser.parse_args()[1]) and - command == 'config' ): - sys.stdout.write(UTS.red("%s\n" % str(e))) - raise Exception( - _("Error in configuration file: %s.pyconf\n") % application ) - else: - sys.stdout.write(UTS.red( - "ERROR: in file %s.pyconf. Opening the file with the default viewer\n" % \ - cfg.VARS.application)) - sys.stdout.write("\n%s\n" % UTS.red(str(e))) - do_merge = False + msg = msgPb % (aFile, str(e)) + if (not('-e' in parser.parse_args()[1]) + or ('--edit' in parser.parse_args()[1]) + and command == 'config' ): + self.warning(msg) + raise Exception(msg) + else: + self.warning("Opening the file %s with the default viewer" % aFile) + do_merge = False else: cfg['open_application'] = 'yes' @@ -432,9 +410,7 @@ # ===================================================================== # Load product config files in PRODUCTS section products_cfg = PYCONF.Config() - products_cfg.addMapping("PRODUCTS", - PYCONF.Mapping(products_cfg), - "The products\n") + products_cfg.addMapping("PRODUCTS", PYCONF.Mapping(products_cfg), "The products\n") if application is not None: PYCONF.streamOpener = ConfigOpener(cfg.PATHS.PRODUCTPATH) for product_name in application_cfg.APPLICATION.products.keys(): @@ -445,15 +421,12 @@ if product_file_path: products_dir = os.path.dirname(product_file_path) try: - prod_cfg = PYCONF.Config(open(product_file_path), - PWD=("", products_dir)) + prod_cfg = PYCONF.Config(open(product_file_path), PWD=("", products_dir)) prod_cfg.from_file = product_file_path products_cfg.PRODUCTS[product_name] = prod_cfg except Exception as e: - msg = _( - "WARNING: Error in configuration file: %(prod)s\n %(error)s" % \ - {'prod' : product_name, 'error': str(e) }) - sys.stdout.write(msg) + msg = msgPb % (product_name, str(e)) + self.logger.warning(msg) merger.merge(cfg, products_cfg) @@ -488,13 +461,14 @@ return cfg
      -
      [docs] def set_user_config_file(self, config): - '''Set the user config file name and path. +
      [docs] def set_user_config_file(self, config): + """ + Set the user config file name and path. If necessary, build it from another one or create it from scratch. - :param config class 'PYCONF.Config': The global config - (containing all pyconf). - ''' + :param config: (Config) + The global config (containing all pyconf). + """ # get the expected name and path of the file self.config_file_name = 'SAT.pyconf' self.user_config_file_path = os.path.join(config.VARS.personalDir, @@ -504,86 +478,57 @@ if not os.path.isfile(self.user_config_file_path): self.create_config_file(config)
      -
      [docs] def create_config_file(self, config): - '''This method is called when there are no user config file. - It build it from scratch. +
      [docs] def create_config_file(self, config): + """ + This method is called when there are no user config file. + It build it from scratch. - :param config class 'PYCONF.Config': The global config. - :return: the config corresponding to the file created. - :rtype: config class 'PYCONF.Config' - ''' + :param config: (Config) The global config. + :return: (Config) + The config corresponding to the file created. + """ cfg_name = self.get_user_config_file() - - user_cfg = PYCONF.Config() - # - user_cfg.addMapping('USER', PYCONF.Mapping(user_cfg), "") - - user_cfg.USER.addMapping('cvs_user', config.VARS.user, - "This is the user name used to access salome cvs base.\n") - user_cfg.USER.addMapping('svn_user', config.VARS.user, - "This is the user name used to access salome svn base.\n") - user_cfg.USER.addMapping('output_verbose_level', 3, - "This is the default output_verbose_level you want." - " 0=>no output, 5=>debug.\n") - user_cfg.USER.addMapping('publish_dir', - os.path.join(os.path.expanduser('~'), - 'websupport', - 'satreport'), - "") - user_cfg.USER.addMapping('editor', - 'vi', - "This is the editor used to " - "modify configuration files\n") - user_cfg.USER.addMapping('browser', - 'firefox', - "This is the browser used to " - "read html documentation\n") - user_cfg.USER.addMapping('pdf_viewer', - 'evince', - "This is the pdf_viewer used " - "to read pdf documentation\n") -# CNC 25/10/17 : plus nécessaire a priori -# user_cfg.USER.addMapping("base", -# PYCONF.Reference( -# user_cfg, -# PYCONF.DOLLAR, -# 'workdir + $VARS.sep + "BASE"'), -# "The products installation base (could be " -# "ignored if this key exists in the local.pyconf" -# " file of salomTools).\n") + cfg = PYCONF.Config() + cfg.addMapping('USER', PYCONF.Mapping(user_cfg), "") + USER = cfg.USER + + USER.addMapping('cvs_user', config.VARS.user, + "This is the user name used to access salome cvs base.\n") + USER.addMapping('svn_user', config.VARS.user, + "This is the user name used to access salome svn base.\n") + USER.addMapping('output_verbose_level', 3, + "This is the default output_verbose_level you want. 0=>no output, 5=>debug.\n") + USER.addMapping('publish_dir', os.path.join(os.path.expanduser('~'), 'websupport', 'satreport'), + "") + USER.addMapping('editor', 'vi', "This is the editor used to modify configuration files\n") + USER.addMapping('browser', 'firefox', "This is the browser used to read html documentation\n") + USER.addMapping('pdf_viewer', 'evince', "This is the pdf_viewer used to read pdf documentation\n") - # UTS.ensure_path_exists(config.VARS.personalDir) - UTS.ensure_path_exists(os.path.join(config.VARS.personalDir, - 'Applications')) + UTS.ensure_path_exists(os.path.join(config.VARS.personalDir, 'Applications')) - f = open(cfg_name, 'w') - user_cfg.__save__(f) - f.close() + with open(cfg_name, 'w') as f: + cfg.__save__(f) + return cfg
      - return user_cfg
      - -
      [docs] def get_user_config_file(self): - '''Get the user config file - :return: path to the user config file. - :rtype: str - ''' +
      [docs] def get_user_config_file(self): + """Get the user config file + + :return: (str) path to the user config file. + """ if not self.user_config_file_path: - raise Exception( - _("Error in get_user_config_file: missing user config file path") ) + raise Exception(_("get_user_config_file: missing user config file path")) return self.user_config_file_path
      -
      [docs]def check_path(path, ext=[]): - '''Construct a text with the input path and "not found" if it does not - exist. +
      [docs]def check_path(path, ext=[]): + """Construct a text with the input path and "not found" if it does not exist. - :param path Str: the path to check. - :param ext List: An extension. Verify that the path extension - is in the list - :return: The string of the path with information - :rtype: Str - ''' + :param path: (str) The path to check. + :param ext: (list) + An extension. Verify that the path extension is in the list + :return: (str) The string of the path with information + """ # check if file exists if not os.path.exists(path): return "path '%s' ** not found" % path @@ -596,13 +541,13 @@ return path
      -
      [docs]def show_product_info(config, name, logger): - '''Display on the terminal and logger information about a product. +
      [docs]def show_product_info(config, name, logger): + """Display on the terminal and logger information about a product. - :param config Config: the global configuration. - :param name Str: The name of the product - :param logger Logger: The logger instance to use for the display - ''' + :param config: (Config) the global configuration. + :param name: (str) The name of the product + :param logger: (Logger) The logger instance to use for the display + """ def msgAdd(label, value): """ @@ -698,12 +643,12 @@ zz.set_a_product(name, logger) return
      -
      [docs]def show_patchs(config, logger): - """ - Prints all the used patchs in the application. +
      [docs]def show_patchs(config, logger): + """Prints all the used patchs in the application. - :param config Config: the global configuration. - :param logger Logger: The logger instance to use for the display + :param config: (Config) the global configuration. + :param logger: (Logger) + The logger instance to use for the display """ len_max = max([len(p) for p in config.APPLICATION.products]) + 2 msg = "" @@ -720,17 +665,19 @@ logger.info(msg) return
      -
      [docs]def getConfigColored(config, path, stream, show_label=False, level=0, show_full_path=False): - """\ +
      [docs]def getConfigColored(config, path, stream, show_label=False, level=0, show_full_path=False): + """ Get a colored representation value from a config pyconf instance. used recursively from the initial path. - :param config class 'PYCONF.Config': The configuration from which the value is displayed. - :param path str: the path in the configuration of the value to print. - :param show_label boolean: if True, do a basic display. (useful for bash completion) - :param stream: the output stream used - :param level int: The number of spaces to add before display. - :param show_full_path: display full path, else relative + :param config: (Config) + The configuration from which the value is displayed. + :param path: (str) The path in the configuration of the value to print. + :param show_label: (bool) + If True, do a basic display. (useful for bash completion) + :param stream: The output stream used + :param level: (int) The number of spaces to add before display. + :param show_full_path: (bool) Display full path, else relative """ # Make sure that the path does not ends with a point @@ -774,12 +721,12 @@ else: # case where val is just a str stream.write("%s\n" % val)
      -