import os
import getpass
-import subprocess
+import subprocess as SP
import src.ElementTree as ETREE
import src.debug as DBG
elif options.gencat:
# generate catalog for given list of computers
catalog_src = options.gencat
- catalog = generate_catalog(options.gencat.split(","), config,logger)
+ catalog = UTS.generate_catalog(options.gencat.split(","), config,logger)
elif 'catalog' in virtual_app:
# use catalog specified in the product
if virtual_app.catalog.endswith(".xml"):
catalog_src = virtual_app.catalog
mlist = filter(lambda l: len(l.strip()) > 0, virtual_app.catalog.split(","))
if len(mlist) > 0:
- catalog = generate_catalog(virtual_app.catalog.split(","), config, logger)
+ catalog = UTS.generate_catalog(virtual_app.catalog.split(","), config, logger)
# display which catalog is used
if len(catalog) > 0:
shutil.rmtree(appli_dir)
rres = "<OK>"
finally:
- logger.info(rres + "\n")
+ logger.info(rres)
# generate the application
try:
"""Generates the application with the config_file."""
target_dir = os.path.dirname(appli_dir)
- install_KERNEL_dir = PROD.get_product_config(config,
- 'KERNEL').install_dir
+ install_KERNEL_dir = PROD.get_product_config(config, 'KERNEL').install_dir
script = os.path.join(install_KERNEL_dir, "bin", "salome", "appli_gen.py")
if not os.path.exists(script):
raise Exception(_("KERNEL is not installed"))
envi = ENVI.SalomeEnviron(config, ENVI.Environ(dict(os.environ)), True)
envi.set_a_product('Python', logger)
- command = "python %s --prefix=%s --config=%s" % (script,
- appli_dir,
- config_file)
- logger.debug("\n>" + command + "\n")
- res = subprocess.call(command,
- shell=True,
- cwd=target_dir,
- env=envi.environ.environ,
- stdout=logger.logTxtFile,
- stderr=subprocess.STDOUT)
-
- if res != 0:
- raise Exception(_("Cannot create application, code = %d\n") % res)
-
+ command = "python %s --prefix=%s --config=%s" % (script, appli_dir, config_file)
+ res = UTS.Popen(command, shell=True, cwd=target_dir, env=envi.environ.environ, logger=logger)
+ res.raiseIfKo()
return res
def get_step(logger, message, pad=50):
returns 'message ........ ' with pad 50 by default
avoid colors '<color>' for now in message
"""
- return "%s %s " % (message, '.' * (pad - len(message.decode("UTF-8"))))
+ return "%s %s " % (message, '.'*(pad - len(message.decode("UTF-8"))))
def create_application(config, appli_dir, catalog, logger, display=True):
"""reates a SALOME application."""
return retcode
-def generate_catalog(machines, config, logger):
- """Generates the catalog from a list of machines."""
- # remove empty machines
- machines = map(lambda l: l.strip(), machines)
- machines = filter(lambda l: len(l) > 0, machines)
- logger.debug(" %s = %s" % _("Generate Resources Catalog"), ", ".join(machines))
-
- cmd = '"cat /proc/cpuinfo | grep MHz ; cat /proc/meminfo | grep MemTotal"'
- user = getpass.getuser()
-
- catfile = UTS.get_tmp_filename(config, "CatalogResources.xml")
- catalog = file(catfile, "w")
- catalog.write("""\
-<!DOCTYPE ResourcesCatalog>
-<resources>
-""")
-
- for k in machines:
- logger.info(" ssh %s " % (k + " ").ljust(20, '.'), 4)
-
- ssh_cmd = 'ssh -o "StrictHostKeyChecking no" %s %s' % (k, cmd)
- p = subprocess.Popen(ssh_cmd, shell=True,
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- p.wait()
-
- if p.returncode != 0:
- logger.error("<KO>\n%s\n" % UTS.red(p.stderr.read()))
- else:
- logger.info("<OK>\n")
- lines = p.stdout.readlines()
- freq = lines[0][:-1].split(':')[-1].split('.')[0].strip()
- nb_proc = len(lines) -1
- memory = lines[-1].split(':')[-1].split()[0].strip()
- memory = int(memory) / 1000
-
- msg = """\
- <machine
- protocol="ssh"
- nbOfNodes="1"
- mode="interactif"
- OS="LINUX"
- CPUFreqMHz="%s"
- nbOfProcPerNode="%s"
- memInMB="%s"
- userName="%s"
- name="%s"
- hostname="%s"
- >
- </machine>
-"""
- msg = msg % (freq, nb_proc, memory, user, k, k)
- catalog.write(msg)
-
- catalog.write("</resources>\n")
- catalog.close()
- return catfile
# Suppress the list of paths
suppress_directories(l_dir_to_suppress, logger)
- return RCO.ReturnCode("OK", "clean done")
+ return RCO.ReturnCode("OK", "Command clean done")
def get_source_directories(products_infos, without_dev):
else:
logger.info(_("Removing %s ...") % strpath )
path.rm()
- logger.info('<OK>\n')
+ logger.info('<OK>')
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+import subprocess as SP
import src.debug as DBG
import src.returnCode as RCO
return res
def generate_component(config, compo, product_info, context, header, logger):
-# get from config include file name and librairy name, or take default value
+ """get from config include file name and librairy name, or take default value"""
if "hxxfile" in product_info:
hxxfile = product_info.hxxfile
else:
# inline class to override bootstrap method
import module_generator
+
class sat_generator(module_generator.Generator):
# old bootstrap for automake (used if salome version <= 7.4)
def bootstrap(self, source_dir, log_file):
# replace call to default bootstrap() by using subprocess call (cleaner)
command = "sh autogen.sh"
- ier = subprocess.call(command, shell=True, cwd=source_dir,
- stdout=log_file, stderr=subprocess.STDOUT)
+ ier = SP.call(command, shell=True, cwd=source_dir, stdout=log_file, stderr=SP.STDOUT)
if ier != 0:
raise Exception("bootstrap has ended in error")
:param name: (str) the board name
"""
xml_board_path = os.path.join(self.xml_dir_path, name + ".xml")
- self.d_xml_board_files[name] = XMLMGR.XmlLogFile(xml_board_path,"JobsReport")
- self.d_xml_board_files[name].add_simple_node("distributions")
- self.d_xml_board_files[name].add_simple_node("applications")
- self.d_xml_board_files[name].add_simple_node("board", text=name)
+ aXml = XMLMGR.XmlLogFile(xml_board_path,"JobsReport")
+ aXml.add_simple_node_root("distributions")
+ aXml.add_simple_node_root("applications")
+ aXml.add_simple_node_root("board", text=name)
+ self.d_xml_board_files[name] = aXml
def initialize_boards(self, l_jobs, l_jobs_not_today):
"""
l_hosts_ports = []
- ASNODE = XMLMGR.add_simple_node # shortcut
+ ASNODE = XMLMGR.add_simple_node # shortcut to add a child node to another node
for job in l_jobs + l_jobs_not_today:
"application", attrib={"name" : appli} )
# Initialize the hosts_ports node for the global file
- self.xmlhosts_ports = self.xml_global_file.add_simple_node( "hosts_ports")
+ self.xmlhosts_ports = self.xml_global_file.add_simple_node_root("hosts_ports")
for host, port in l_hosts_ports:
host_port = "%s:%i" % (host, port)
ASNODE(self.xmlhosts_ports, "host_port", attrib={"name" : host_port})
# Initialize the jobs node in all files
for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()):
- xml_jobs = xml_file.add_simple_node("jobs")
+ xml_jobs = xml_file.add_simple_node_root("jobs")
# Get the jobs present in the config file but
# that will not be launched today
self.put_jobs_not_today(l_jobs_not_today, xml_jobs)
# add also the infos node
- xml_file.add_simple_node(
+ xml_file.add_simple_node_root(
"infos", attrib={"name" : "last update", "JobsCommandStatus" : "running"} )
# and put the history node
- history_node = xml_file.add_simple_node("history")
+ history_node = xml_file.add_simple_node_root("history")
name_board = os.path.basename(xml_file.logFile)[:-len(".xml")]
# serach for board files
expression = "^[0-9]{8}_+[0-9]{6}_" + name_board + ".xml$"
import platform
import shutil
import getpass
-import subprocess
import stat
import src.debug as DBG
# Generate a catalog of resources if the corresponding option was called
if options.gencat:
- catalog_path = generate_catalog(options.gencat.split(","), config, logger)
+ catalog_path = UTS.generate_catalog(options.gencat.split(","), config, logger)
additional_environ = copy_catalog(config, catalog_path)
# Generate the launcher
stat.S_IXOTH)
return filepath
-
-def generate_catalog(machines, config, logger):
- """Generates an xml catalog file from a list of machines.
-
- :param machines: (list) The list of machines to add in the catalog
- :param config: (Config) The global configuration
- :param logger: (Logger)
- The logger instance to use for the display and logging
- :return: (str) The catalog file path.
- """
- # remove empty machines
- machines = map(lambda l: l.strip(), machines)
- machines = filter(lambda l: len(l) > 0, machines)
-
- # log something
- logger.debug(" %s = %s\n" % \
- (_("Generate Resources Catalog"), ", ".join(machines)) )
-
- # The command to execute on each machine in order to get some information
- cmd = '"cat /proc/cpuinfo | grep MHz ; cat /proc/meminfo | grep MemTotal"'
- user = getpass.getuser()
-
- # Create the catalog path
- catfile = UTS.get_tmp_filename(config, "CatalogResources.xml")
- catalog = file(catfile, "w")
-
- # Write into it
- catalog.write("<!DOCTYPE ResourcesCatalog>\n<resources>\n")
- for k in machines:
- logger.debug(" ssh %s " % (k + " ").ljust(20, '.'))
-
- # Verify that the machine is accessible
- ssh_cmd = 'ssh -o "StrictHostKeyChecking no" %s %s' % (k, cmd)
- p = subprocess.Popen(ssh_cmd, shell=True,
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- p.wait()
-
- if p.returncode != 0: # The machine is not accessible
- logger.error("<KO>: The machine %s is not accessible:\n%s\n" % k +
- UTS.red(p.stderr.read()))
- else:
- # The machine is accessible, write the corresponding section on
- # the xml file
- logger.debug("<OK>: The machine %s is accessible:\n" % k)
- lines = p.stdout.readlines()
- freq = lines[0][:-1].split(':')[-1].split('.')[0].strip()
- nb_proc = len(lines) -1
- memory = lines[-1].split(':')[-1].split()[0].strip()
- memory = int(memory) / 1000
-
- catalog.write(" <machine\n")
- catalog.write(" protocol=\"ssh\"\n")
- catalog.write(" nbOfNodes=\"1\"\n")
- catalog.write(" mode=\"interactif\"\n")
- catalog.write(" OS=\"LINUX\"\n")
- catalog.write(" CPUFreqMHz=\"%s\"\n" % freq)
- catalog.write(" nbOfProcPerNode=\"%s\"\n" % nb_proc)
- catalog.write(" memInMB=\"%s\"\n" % memory)
- catalog.write(" userName=\"%s\"\n" % user)
- catalog.write(" name=\"%s\"\n" % k)
- catalog.write(" hostname=\"%s\"\n" % k)
- catalog.write(" >\n")
- catalog.write(" </machine>\n")
-
- catalog.write("</resources>\n")
- catalog.close()
- return catfile
-
def copy_catalog(config, catalog_path):
"""Copy the xml catalog file into the right location
:param logger: (Logger) the logger instance to use for the print
"""
if os.path.exists(filePath):
- logger.debug(UTS.red("Removing %s\n" % filePath))
+ logger.debug(UTS.red("Removing %s" % filePath))
os.remove(filePath)
def print_log_command_in_terminal(filePath, logger):
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os
-import subprocess
+import subprocess as sP
import src.debug as DBG
import src.returnCode as RCO
UTS.check_config_has_application(config).raiseIfKo()
# Print some informations
- logger.info("Patching sources of the application %s\n" % \
+ logger.info("Patching sources of the application %s" % \
UTS.blue(config.VARS.application))
- logger.info(' workdir = %s\n\n"', UTS.blue(config.APPLICATION.workdir))
+ logger.info(" workdir = %s" % UTS.blue(config.APPLICATION.workdir))
# Get the products list with products informations regarding the options
products_infos = self.get_products_list(options, config)
# The loop on all the products on which to apply the patches
good_result = 0
- for __, product_info in products_infos:
+ for tmp, product_info in products_infos:
# Apply the patch
- return_code, patch_res = apply_patch(config,
- product_info,
- max_product_name_len,
- logger)
- logger.info(patch_res)
- if return_code:
+ rc = apply_patch(config, product_info, max_product_name_len, logger)
+ logger.info(str(rc))
+ if rc.isOk():
good_result += 1
# Display the results (how much passed, how much failed, etc...)
- logger.info("\n")
if good_result == len(products_infos):
status = "OK"
else:
status = "KO"
# write results
- msg = ("\nPatching sources of the application: <%s> (%d/%d)\n") % \
+ msg = ("Patching sources of the application: <%s> (%d/%d)") % \
(status, good_result, len(products_infos))
logger.info(msg)
logger.logTxtFile.flush()
# Call the command
- res_cmd = subprocess.call(
- patch_cmd,
- shell=True,
- cwd=product_info.source_dir,
- stdout=logger.logTxtFile,
- stderr=subprocess.STDOUT )
+ res_cmd = SP.call(patch_cmd, shell=True, cwd=product_info.source_dir,
+ stdout=logger.logTxtFile, stderr=SP.STDOUT )
res_cmd = (res_cmd == 0)
else:
logger.info(msg + "(%s)" % args_clean)
mCmd = self.getMicroCommand("clean", args_appli)
res_clean = mCmd.run(args_clean)
- logger.warning(str(res_clean))
- return res_clean # TODO debug remove that
+ logger.step(str(res_clean))
+ logger.closeFileHandlerForCommand(mCmd)
if do_source:
msg = _("Get the sources of the products ...")
logger.info(msg + "(%s)" % args_source)
mCmd = self.getMicroCommand("source", args_appli)
res_source = mCmd.run(args_source)
- logger.warning(str(res_source))
+ logger.step(str(res_source))
+ logger.closeFileHandlerForCommand(mCmd)
if do_patch:
msg = _("Patch the product sources (if any) ...")
logger.info(msg + "(%s)" % args_patch)
mCmd = self.getMicroCommand("patch", args_appli)
res_patch = mCmd.run(args_patch)
- logger.warning(str(res_patch))
+ logger.step(str(res_patch))
+ logger.closeFileHandlerForCommand(mCmd)
return res_clean + res_source + res_patch
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import shutil
-import subprocess
+import subprocess as SP
import src.debug as DBG
import src.returnCode as RCO
#Run command
os.environ["KERNEL_ROOT_DIR"] = kernel_root_dir
os.environ["GUI_ROOT_DIR"] = gui_root_dir
- res = subprocess.call(command,
- shell=True,
- env=os.environ,
- stdout=logger.logTxtFile,
- stderr=subprocess.STDOUT)
+ res = SP.call(command, shell=True, env=os.environ,
+ stdout=logger.logTxtFile, stderr=SP.STDOUT)
#Check result of command
if res != 0:
raise Exception(_("Cannot create application, code = %d\n") % res)
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os
-import subprocess
+import subprocess as SP
import src.debug as DBG
import src.returnCode as RCO
logger.info(_("Executed command <blue>%s<reset> Launching ...\n") % command)
# Run the launcher
- subprocess.call(command,
- shell=True,
- stdout=logger.logTxtFile,
- stderr=subprocess.STDOUT)
+ SP.call(command, shell=True, stdout=logger.logTxtFile, stderr=SP.STDOUT)
# Display information: how to get the logs
msg1 = _("End of 'sat run'. To see traces, type:")
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-import subprocess
+import subprocess as SP
import src.debug as DBG
import src.returnCode as RCO
logger.info(msg)
# Call the input command
- res = subprocess.call(options.command,
- shell=True,
- stdout=logger.logTxtFile,
- stderr=subprocess.STDOUT)
+ res = SP.call(options.command, shell=True, stdout=logger.logTxtFile, stderr=SP.STDOUT)
# Format the result to be 0 (success) or 1 (fail)
if res != 0:
UTS.check_config_has_application(config).raiseIfKo()
# Print some informations
- logger.info(_('Getting sources of the application %s\n') % \
- UTS.label(config.VARS.application), 1)
- logger.info(" workdir = %s\n" % config.APPLICATION.workdir)
+ logger.info(_('Getting sources of the application %s') % UTS.label(config.VARS.application))
+ logger.info(" workdir = %s" % UTS.blue(config.APPLICATION.workdir))
# Get the products list with products informations regarding the options
products_infos = self.get_products_list(options, config)
# Call to the function that gets all the sources
- good_result, results = get_all_product_sources(config,
- products_infos,
- logger)
+ good_result, results = get_all_product_sources(config, products_infos, logger)
# Display the results (how much passed, how much failed, etc...)
details = []
details = " ".join(details)
logger.info("\n%s %s: <%s>.\n%s\n" % (msg, msgCount, status, details))
- return RCO.ReturnCode(status, "%s %s" % msg, msgCount)
+ return RCO.ReturnCode(status, "%s %s" % (msg, msgCount))
def get_source_for_dev(config, product_info, source_dir, logger, pad):
repo_git = product_info.git_info.repo
# Display informations
- msg = "'%s:%s" % (coflag, repo_git)
+ msg = "%s:%s" % (coflag, repo_git)
msg += " " * (pad + 50 - len(repo_git))
msg += " tag:%s" % product_info.git_info.tag
msg += "%s. " % "." * (10 - len(product_info.git_info.tag))
logger.info("\n" + msg)
# Call the system function that do the extraction in git mode
- retcode = SYSS.git_extract(repo_git,
- product_info.git_info.tag,
- source_dir, logger, environ)
+ retcode = SYSS.git_extract(repo_git, product_info.git_info.tag, source_dir, logger, environ)
return retcode
def get_source_from_archive(product_info, source_dir, logger):
"""
# Get the application environment
- logger.info(_("Set the application environment\n"))
+ logger.info(_("Set the application environment"))
env_appli = ENVI.SalomeEnviron(config, ENVI.Environ(dict(os.environ)))
env_appli.set_application_env(logger)
# Call the right function to get sources regarding the product settings
if not checkout and is_dev:
- return get_source_for_dev(config,
- product_info,
- source_dir,
- logger,
- pad)
+ return get_source_for_dev(config, product_info, source_dir, logger, pad)
if product_info.get_source == "git":
- return get_source_from_git(product_info, source_dir, logger, pad,
- is_dev,env_appli)
+ return get_source_from_git(product_info, source_dir, logger, pad, is_dev, env_appli)
if product_info.get_source == "archive":
return get_source_from_archive(product_info, source_dir, logger)
if product_info.get_source == "cvs":
cvs_user = config.USER.cvs_user
- return get_source_from_cvs(cvs_user,
- product_info,
- source_dir,
- checkout,
- logger,
- pad,
- env_appli)
+ return get_source_from_cvs(cvs_user, product_info, source_dir, checkout, logger, pad, env_appli)
if product_info.get_source == "svn":
svn_user = config.USER.svn_user
- return get_source_from_svn(svn_user, product_info, source_dir,
- checkout,
- logger,
- env_appli)
+ return get_source_from_svn(svn_user, product_info, source_dir, checkout, logger, env_appli)
if product_info.get_source == "native":
# skip
# Check that the sources are correctly get using the files to be tested
# in product information
if retcode:
- check_OK, wrong_path = check_sources(product_info, logger)
- if not check_OK:
+ rc = check_sources(product_info, logger)
+ if not rc.isOk():
# Print the missing file path
- msg = _("The required file %s does not exists.\n") % wrong_path
+ msg = _("These required files does not exists:\n%s") % \
+ ("\n ".join(rc.getValue()))
logger.error(msg)
- retcode = False
+ retcode = rc
# show results
results[product_name] = retcode
res = "<KO>"
# print the result
- if not(PROD.product_is_fixed(product_info) or
- PROD.product_is_native(product_info)):
+ if not(PROD.product_is_fixed(product_info) or PROD.product_is_native(product_info)):
logger.info('%s\n' % res)
return good_result, results
The configuration specific to the product to be prepared
:param logger: (Logger)
The logger instance to be used for the logging
- :return: (bool)
- True if the files exists (or no files to test is provided).
+ :return: (RCO.ReturnCode)
+ OK if the files exists (or no files to test is provided).
"""
# Get the files to test if there is any
- if ("present_files" in product_info and
- "source" in product_info.present_files):
- l_files_to_be_tested = product_info.present_files.source
- res = True # all ok a priori
- filesKo = "" # None
- for file_path in l_files_to_be_tested:
- # add source directory of the product
- path_to_test = os.path.join(product_info.source_dir, file_path)
- msg = _("File %s testing existence:" % path_to_test)
- if not os.path.exists(path_to_test):
- logger.debug("%s <KO>\n" % msg)
- res = False
- # return False, path_to_test #break at first
- filesKo += path_to_test + "\n" # check all
- else:
- logger.debug("%s <OK>\n" % msg)
- return res, filesKo
+ if not ("present_files" in product_info and "source" in product_info.present_files):
+ return RCO.ReturnCode("OK", "check_sources, nothing to check")
+
+ l_files_to_be_tested = product_info.present_files.source
+ for file_path in l_files_to_be_tested:
+ # add source directory of the product
+ path_to_test = os.path.join(product_info.source_dir, file_path)
+ msg = _("File %s testing existence:" % path_to_test)
+ if not os.path.exists(path_to_test):
+ logger.debug("%s <KO>" % msg)
+ filesKo.append(path_to_test) # check all
+ else:
+ logger.debug("%s <OK>" % msg)
+ if len(filesKo) != 0:
+ return RCO.ReturnCode("KO", "check_sources, missing files")
+ else:
+ return RCO.ReturnCode("OK", "check_sources, no missing file")
+
conf_values,
logger):
"""Prepares a module from a template."""
+ res = RCO.ReturnCode("OK", "prepare_from_template has no raise")
+
template_src_dir = search_template(config, template)
- res = 0
# copy the template
if os.path.isfile(template_src_dir):
else:
definition = tsettings.pyconf % dico
pyconf_file = os.path.join(target_dir, name + '.pyconf')
- f = open(pyconf_file, 'w')
- f.write(definition)
- f.close
+ with open(pyconf_file, 'w') as f:
+ f.write(definition)
logger.info(_("Create configuration file: ") + pyconf_file)
if len(tsettings.post_command) > 0:
cmd = tsettings.post_command % dico
- logger.info(_("Run post command: ") + cmd)
+ res = UTS.Popen(cmd, shell=True, cwd=target_dir, logger=logger)
- p = subprocess.Popen(cmd, shell=True, cwd=target_dir)
- p.wait()
- res = p.returncode
-
return res
+
def get_template_info(config, template_name, logger):
sources = search_template(config, template_name)
logger.info(" Template = %s\n" % sources)
def check_remote_machine(machine_name, logger):
logger.debug(_("Check the display on %s\n") % machine_name)
ssh_cmd = 'ssh -o "StrictHostKeyChecking no" %s "ls"' % machine_name
- logger.debug(_("Executing the command : %s\n") % ssh_cmd)
- p = subprocess.Popen(ssh_cmd,
- shell=True,
- stdin =subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- p.wait()
- if p.returncode != 0:
- msg = "<KO> on '%s'" % ssh_cmd
- logger.critical(msg)
- logger.error(UTS.red(p.stderr.read()))
- logger.error(UTS.red(_("No ssh access to the display machine %s.") % machine_name))
- else:
- logger.debug("<OK>\n")
-
+ res = UTS.Popen(ssh_cmd, shell=True, logger=logger)
def create_test_report(config,
xml_history_path,
# First, it copies the content of the sources directory to the install directory.
# Then it runs 'lrelease' to build the resources.
-import subprocess
-
-import src
+import src.utilsSat as UTS
def compil(config, builder, logger):
builder.prepare()
raise Exception(_("Error when copying %s sources to install dir") % builder.product_info.name)
# test lrelease #.pyconf needs in ..._APPLI pre_depend : ['qt']
+ env = builder.build_environ.environ.environ
command = "which lrelease"
- res = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,env=builder.build_environ.environ.environ).communicate()
- if res[1] != "": #an error occured
- logger.error(res[1])
- builder.log(res[1]+"\n")
- return 1
+ res = UTS.Popen(command, shell=True ,env=env)
+ if not res.isOk():
+ return res
# run lrelease
command = "lrelease *.ts"
- res = subprocess.call(command,
- shell=True,
- cwd=str(builder.install_dir + "resources"),
- env=builder.build_environ.environ.environ,
- stdout=logger.logTxtFile,
- stderr=subprocess.STDOUT)
- if res != 0:
- res = 1
-
+ cwd = str(builder.install_dir + "resources")
+ res = UTS.Popen(command, shell=True, cwd=cwd, env=env)
return res
"""
import os
-import subprocess
import sys
import shutil
+import subprocess as SP
from src.options import OptResult
import src.utilsSat as UTS
self.log_command(command)
# for key in sorted(self.build_environ.environ.environ.keys()):
# print key, " ", self.build_environ.environ.environ[key]
- res = subprocess.call(command,
- shell=True,
- cwd=str(self.build_dir),
+ res = SP.call(command, shell=True, cwd=str(self.build_dir),
env=self.build_environ.environ.environ,
stdout=self.logger.logTxtFile,
- stderr=subprocess.STDOUT)
+ stderr=SP.STDOUT)
self.put_txt_log_in_appli_log_dir("cmake")
if res == 0:
command = command + " " + options
self.log_command(command)
- res = subprocess.call(command,
+ res = SP.call(command,
shell=True,
cwd=str(self.build_dir),
env=self.build_environ.environ.environ,
stdout=self.logger.logTxtFile,
- stderr=subprocess.STDOUT)
+ stderr=SP.STDOUT)
self.put_txt_log_in_appli_log_dir("build_configure")
if res == 0:
return res
command = command + " " + options
self.log_command(command)
- res = subprocess.call(command,
+ res = SP.call(command,
shell=True,
cwd=str(self.build_dir),
env=self.build_environ.environ.environ,
stdout=self.logger.logTxtFile,
- stderr=subprocess.STDOUT)
+ stderr=SP.STDOUT)
self.put_txt_log_in_appli_log_dir("configure")
if res == 0:
CC=\\"hack_libtool\\"%g" libtool'''
self.log_command(hack_command)
- subprocess.call(hack_command,
+ SP.call(hack_command,
shell=True,
cwd=str(self.build_dir),
env=self.build_environ.environ.environ,
stdout=self.logger.logTxtFile,
- stderr=subprocess.STDOUT)
+ stderr=SP.STDOUT)
##
command = command + " -j" + str(nb_proc)
command = command + " " + make_opt
self.log_command(command)
- res = subprocess.call(command,
+ res = SP.call(command,
shell=True,
cwd=str(self.build_dir),
env=self.build_environ.environ.environ,
stdout=self.logger.logTxtFile,
- stderr=subprocess.STDOUT)
+ stderr=SP.STDOUT)
self.put_txt_log_in_appli_log_dir("make")
if res == 0:
return res
command = command + " ALL_BUILD.vcxproj"
self.log_command(command)
- res = subprocess.call(command,
+ res = SP.call(command,
shell=True,
cwd=str(self.build_dir),
env=self.build_environ.environ.environ,
stdout=self.logger.logTxtFile,
- stderr=subprocess.STDOUT)
+ stderr=SP.STDOUT)
self.put_txt_log_in_appli_log_dir("make")
if res == 0:
self.log_command(command)
- res = subprocess.call(command,
+ res = SP.call(command,
shell=True,
cwd=str(self.build_dir),
env=self.build_environ.environ.environ,
stdout=self.logger.logTxtFile,
- stderr=subprocess.STDOUT)
+ stderr=SP.STDOUT)
self.put_txt_log_in_appli_log_dir("makeinstall")
if res == 0:
self.log_command(cmd)
- res = subprocess.call(cmd,
+ res = SP.call(cmd,
shell=True,
cwd=str(self.build_dir),
env=self.launch_environ.environ.environ,
stdout=self.logger.logTxtFile,
- stderr=subprocess.STDOUT)
+ stderr=SP.STDOUT)
if res == 0:
return res
self.log_command(" " + _("Run build script %s\n") % script)
self.complete_environment(make_options)
- res = subprocess.call(script,
+ res = SP.call(script,
shell=True,
stdout=self.logger.logTxtFile,
- stderr=subprocess.STDOUT,
+ stderr=SP.STDOUT,
cwd=str(self.build_dir),
env=self.build_environ.environ.environ)
"""
import os
-import subprocess
+import subprocess as SP
import string
import sys
:param key: (str) the environment variable
:param command: (str) the command to execute
"""
- value = subprocess.Popen(command,
- shell=True,
- stdout=subprocess.PIPE,
- env=self.environ).communicate()[0]
+ p = SP.Popen(command, shell=True, stdout=SP.PIPE, env=self.environ)
+ value = p.communicate()[0]
self.environ[key] = value
"""
# check if value needs to be evaluated
if value is not None and value.startswith("`") and value.endswith("`"):
- res = subprocess.Popen("echo %s" % value,
- shell=True,
- stdout=subprocess.PIPE).communicate()
- value = res[0].strip()
+ p = SP.Popen("echo %s" % value, shell=True, stdout=SP.PIPE)
+ res = p.communicate()[0]
+ value = res.strip()
return self.environ.set(key, value)
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os
+import shlex
+import subprocess as SP
import src.utilsSat as UTS
bat_header="""\
"""
self.output.write(self.indent+'#`%s`\n' % command)
- import shlex, subprocess
args = shlex.split(command)
- res=subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out, __ = res.communicate()
- self.output.write(self.begin+
- self.setVarEnv+
- '(r"%s", r"%s", overwrite=True)\n' % (key, out))
+ p = SP.Popen(args, stdout=SP.PIPE, stderr=SP.PIPE)
+ res = p.communicate()[0]
+ msg = self.begin + self.setVarEnv + '(r"%s", r"%s", overwrite=True)\n' % (key, res)
+ self.output.write(msg)
def add_comment(self, comment):
# Special comment in case of the distène licence
import sys
import time
import pickle
-import subprocess
+import subprocess as SP
def show_progress(logger, top, delai, ss=""):
def launch_command(cmd, logger, cwd, args=[], log=None):
- """Launch command"""
+ """Launch command with subprocess.Popen"""
if log:
log = file(log, "a")
logger.info("launch: %s\n" % cmd)
for arg in args:
cmd += " " + arg
- prs = subprocess.Popen(cmd,
- shell=True,
- stdout=log,
- stderr=subprocess.STDOUT,
- cwd=cwd,
- executable='/bin/bash')
+ prs = SP.Popen(cmd, shell=True, stdout=log, stderr=SP.STDOUT, cwd=cwd, executable='/bin/bash')
return prs
import os
import stat
-import subprocess
+import subprocess as SP
import shutil
ROOT_DIR = os.path.dirname(os.path.realpath(__file__))
stat.S_IXGRP |
stat.S_IXOTH)
-command = "python " + os.path.join(ROOT_DIR, dir_bin_name, "KERNEL", "bin", "salome", "appli_gen.py") + " --prefix=APPLI --config=" + appli_config_name
-subprocess.call(command, shell=True)
\ No newline at end of file
+appgen = os.path.join(ROOT_DIR, dir_bin_name, "KERNEL", "bin", "salome", "appli_gen.py")
+command = "python %s --prefix=APPLI --config=%s" % (appgen, appli_config_name)
+SP.call(command, shell=True)
\ No newline at end of file
import os
import sys
+import time
import logging as LOGI
from logging.handlers import BufferingHandler
import pprint as PP
LOGI.STEP = _STEP # only for coherency,
LOGI.TRACE = _TRACE # only for coherency,
+
#################################################################
# utilities methods
#################################################################
print(prefix + indent(msg, nb))
-log("import logging on %s" % LOGI.__file__)
+log("import logging on %s" % LOGI.__file__, True)
def getStrDirLogger(logger):
#################################################################
# salometools logger classes
+#################################################################
+
+try:
+ unicode
+ _unicode = True
+except NameError:
+ _unicode = False
+
+def getMessage(self):
+ """
+ modified from logging.__init__.LogRecord.getMessage
+ Return the message for this LogRecord.
+
+ Return the message for this LogRecord after merging any user-supplied
+ arguments with the message.
+ """
+ if not _unicode: #if no unicode support...
+ msg = str(self.msg)
+ else:
+ msg = self.msg
+ if not isinstance(msg, basestring):
+ try:
+ msg = str(self.msg)
+ except UnicodeError:
+ msg = self.msg #Defer encoding till later
+ if self.args:
+ try:
+ msg = msg % self.args
+ except Exception as e:
+ msg = "ERROR: %s with args %s" % (msg, PP.pformat(self.args))
+ print(msg)
+ return msg
+
+LOGI.LogRecord.getMessage = getMessage # better message if error
+
#################################################################
class LoggerSat(LOGI.Logger):
"""
LOGI.addLevelName(_STEP, "STEP")
LOGI.addLevelName(_TRACE, "TRACE")
self.dateLogger = "NoDateLogger"
- self.closed = False
+ self.isClosed = False
+ self.idCommandHandlers = 0 # incremented, 0 for main command 1, 2, etc. for micro command
self.STEP = _STEP
self.TRACE = _TRACE
final stuff for logger, done at end salomeTools
flushed and closed xml files have to be not overriden/appended
"""
- if self.closed:
+ if self.isClosed:
raise Exception("logger closed yet: %s" % self)
log("close stuff logger %s" % self) # getStrDirLogger(self)
for handl in self.handlers:
log("close stuff handler %s" % getStrHandler(handl))
handl.close() # Tidy up any resources used by the handler.
# todo etc
- self.closed = True # done at end sat, flushed closed xml files.
+ self.isClosed = True # done at end sat, flushed closed xml files.
return
def __repr__(self):
return 0
return level >= self.getEffectiveLevel()
- def setFileHandler(self, cmdInstance):
+ def setFileHandlerForCommand(self, cmdInstance):
"""
add file handler to logger to set log files
- for salometools command.
+ for a salometools command.
when command is known from pyconf/config instance
| Example:
cmd = config.VARS.command
fullNameCmd = cmdInstance.getFullNameStr()
hostname = config.VARS.hostname
- nameFileXml = "%s_%s_%s.xml" % (datehour, cmd, hostname)
- nameFileTxt = "%s_%s_%s.txt" % (datehour, cmd, hostname)
+ nameFileXml = "%s_%02i_%s_%s.xml" % (datehour, self.idCommandHandlers, cmd, hostname)
+ nameFileTxt = "%s_%02i_%s_%s.txt" % (datehour, self.idCommandHandlers, cmd, hostname)
fileXml = os.path.join(log_dir, nameFileXml)
fileTxt = os.path.join(log_dir_out, nameFileTxt)
msg = "setFileHandler '%s' command name incoherency in config '%s'" % (fullNameCmd, cmd)
logger.critical(msg)
- nbhandl = len(logger.handlers) # number of current handlers
- if nbhandl == 1: # first main command
- log("setFileHandler '%s' main command" % fullNameCmd, True)
- # Logging vers file xml
+ nbhandl = len(logger.handlers) # number of active current handlers
+
+ if self.idCommandHandlers == 0: # first main command
+ log("setFileHandler '%s' main command (id=%i)" % (fullNameCmd, self.idCommandHandlers), True)
+ ################################
+ # Logging vers file xml
handler = XmlHandler(3000) # no many log outputs in memory
handler.setLevel(LOGI.STEP)
handler.set_name(nameFileXml)
handler.set_target_file(fileXml)
handler.set_config(config)
+ handler.idCommandHandlers = self.idCommandHandlers
fmt = '%(asctime)s :: %(levelname)s :: %(message)s'
formatter = FileXmlFormatter(fmt, "%y-%m-%d %H:%M:%S")
handler.setFormatter(formatter)
logger.addHandler(handler)
+ ################################
# Logging vers file txt
handler = LOGI.FileHandler(fileTxt)
handler.setLevel(LOGI.TRACE)
handler.set_name(nameFileTxt)
+ handler.idCommandHandlers = self.idCommandHandlers
fmt = '%(asctime)s :: %(levelname)s :: %(message)s'
formatter = FileTxtFormatter(fmt, "%y-%m-%d %H:%M:%S")
handler.setFormatter(formatter)
logger.addHandler(handler)
+
- elif nbhandl > 1: # secondary micro command
- log("TODO setFileHandler '%s' micro command" % fullNameCmd, True)
-
+ elif self.idCommandHandlers > 0: # secondary micro command
+ log("TODO setFileHandler '%s' micro command (id=%i)" % (fullNameCmd, self.idCommandHandlers), True)
+
+ ################################
+ # Logging vers file xml
+ handler = XmlHandler(3000) # no many log outputs in memory
+ handler.setLevel(LOGI.STEP)
+ handler.set_name(nameFileXml)
+ handler.set_target_file(fileXml)
+ handler.set_config(config)
+ handler.idCommandHandlers = self.idCommandHandlers
+
+ fmt = '%(asctime)s :: %(levelname)s :: %(message)s'
+ formatter = FileXmlFormatter(fmt, "%y-%m-%d %H:%M:%S")
+
+ handler.setFormatter(formatter)
+ logger.addHandler(handler)
+
+ ################################
+ # Logging vers file txt
+ handler = LOGI.FileHandler(fileTxt)
+ handler.setLevel(LOGI.TRACE)
+ handler.set_name(nameFileTxt)
+ handler.idCommandHandlers = self.idCommandHandlers
+
+ fmt = '%(asctime)s :: %(levelname)s :: %(message)s'
+ formatter = FileTxtFormatter(fmt, "%y-%m-%d %H:%M:%S")
+
+ handler.setFormatter(formatter)
+ logger.addHandler(handler)
+
+ cmdInstance.setIdCommandHandlers(self.idCommandHandlers)
+ self.idCommandHandlers += 1
log("setFileHandler %s" % logger)
+ return self.idCommandHandlers
+
+ def closeFileHandlerForCommand(self, cmdInstance):
+ for handl in self.handlers:
+ try: # may be foreign handlers without idCommandHandlers attribute
+ if handl.idCommandHandlers == cmdInstance._idCommandHandlers:
+ log("=== begin len(logger.handlers)=%i" % len(self.handlers))
+ log("close stuff handler %s" % getStrHandler(handl))
+ handl.close() # Tidy up any resources used by the handler.
+ log("=== end len(logger.handlers)=%i" % len(self.handlers))
+ except:
+ self.warning("existing logger handler without idCommandHandlers attribute %s" % str(handl))
#################################################################
Write ElementTree in file and flush are done once
when method close is called, to generate xml file.
- see: https://docs.python.org/2/library/logging.handlers.html
+ | atts = {
+ | "fileName": xml file name of micro command
+ | "command": cmd, # 'compile' or 'prepare' etc.
+ | "passed": res, # 'O' or '1'
+ | "launchedCommand" : fullcmd, # 'compile TOTO -etc'
+ | }
+ |
+ | see: https://docs.python.org/2/library/logging.handlers.html
"""
def __init__(self, capacity):
super(XmlHandler, self).__init__(capacity)
self._target_file = None
self._config = None
- self._log_field = "Uninitiate log"
+ self._log_field = "Uninitialized log"
self._links_fields = [] # list of (log_file_name, cmd_name, cmd_res, full_launched_cmd)
self._final_fields = {} # node attributes
+ self.isClosed = False # precaution as write file done yet
def set_target_file(self, filename):
"""
targetFile = self._target_file
config = self._config
- # TODO for debug
- log("XmlHandler to xml file\n%s" % PP.pformat(getListOfStrLogRecord(self.buffer)), True)
- self._log_field = self.createLogField()
+ # log("dir(XmlHandler)\n" + PP.pformat(dir(self)), True)
+ if self.isClosed:
+ msg = "XmlHandler target file %s closed yet" % targetFile
+ log(msg, True) #avoid sat logging message in logger close phase
+ return # avoid overwrite
+
if os.path.exists(targetFile):
msg = "XmlHandler target file %s existing yet" % targetFile
log(msg, True) #avoid sat logging message in logger close phase
- return # avoid overwrite
-
- else: # TOFIX for debug
- msg = "XmlHandler target file NOT %s existing yet" % targetFile
+ return # avoid overwrite
+ """
+ else: # for debug
+ msg = "XmlHandler target file %s NOT existing yet" % targetFile
log(msg, True) #avoid sat logging message in logger close phase
+ """
+
+ # TODO for debug
+ log("XmlHandler to xml file\n%s" % PP.pformat(getListOfStrLogRecord(self.buffer)), True)
+
+ self._log_field = self.createLogField()
xmlFile = XMLMGR.XmlLogFile(targetFile, "SATcommand")
xmlFile.put_initial_fields(config)
xmlFile.put_log_field(self._log_field)
xmlFile.put_links_fields(self._links_fields)
xmlFile.put_final_fields(self._final_fields)
- xmlFile.write_tree(stylesheet = "command.xsl")
+ xmlFile.write_tree(stylesheet = "command.xsl") # xml complete closed file
xmlFile.dump_config(config) # create pyconf file in the log directory
+ self.isClosed = True # precaution to not override xml closed file
# zaps the buffer to empty as parent class
- super(XmlHandler, self).close()
+ super(XmlHandler, self).close() # n.b. extract handler from logger
def createLogFieldFromScrath(self):
"""
"""
res = ""
for lr in self.buffer:
- fmt = "%s :: %s\n"
- levelName = COLS.cleanColors(lr.levelname).replace(" ", "")
- if levelName != "INFO":
- msg = COLS.cleanColors(lr.msg)
- res += fmt % (levelName, msg)
+ fmt = "%s :: %s\n"
+ if lr.levelno != LOGI.INFO:
+ levelName = COLS.cleanColors(lr.levelname).replace(" ", "")
+ msg = COLS.cleanColors(lr.msg)
+ res += fmt % (levelName, msg)
if res == "":
res = "Empty log"
return res
fmtr = self.formatter
res = ""
for lr in self.buffer:
- if not "INFO" in lr.levelname: #skip info level
- res += fmtr.format(lr) + "\n"
+ if lr.levelno != LOGI.INFO: #skip info level and supposed no debug present
+ res += fmtr.format(lr) + "\n"
if res == "":
res = "Empty log"
- print res
return COLS.cleanColors(res)
-
-
#################################################################
# methods to define two LoggerSat instances in salomeTools,
# no more need
# formatter = LOGI.Formatter(fmt, "%Y-%m-%d %H:%M:%S")
formatter = DefaultFormatter(fmt, "%y-%m-%d %H:%M:%S")
handler.setFormatter(formatter)
+ handler.idCommandHandlers = 0
logger.addHandler(handler)
if level is not None:
logger.setLevel(logger.STEP)
# formatter = LOGI.Formatter(fmt, "%Y-%m-%d %H:%M:%S")
formatter = UnittestFormatter(fmt, "%Y-%m-%d %H:%M:%S")
handler.setFormatter(formatter)
+ handler.idCommandHandlers = 0
logger.addHandler(handler)
logger.stream = stream
logger.getLogs = stream.getLogs
self._value = self._DEFAULT_VALUE
def isOk(self):
- """return True if ok"""
+ """
+ return True if ok.
+ inexisting method isKo(), use more explicit/readability 'if not res.isOk()'
+ """
return (self._status == self.OK_STATUS)
def raiseIfKo(self):
import src # for __version__
import src.debug as DBG # Easy print stderr (for DEBUG only)
import src.returnCode as RCO # Easy (ok/ko, why) return methods code
+import src.utilsSat as UTS
from src.options import Options
import configManager as CFGMGR
env["PATH"] = rootdir + ":" + env["PATH"]
# TODO setLocale not 'fr' on subprocesses, why not?
# env["LANG"] == ''
- res = SP.Popen(command, shell=True, env=env, stdout=SP.PIPE, stderr=SP.PIPE).communicate()
+ p = SP.Popen(command, shell=True, env=env, stdout=SP.PIPE, stderr=SP.PIPE)
+ res = P.communicate()
return res
def setNotLocale():
self._logger = runner.logger
self._options = None
self._fullName = [] # example '[prepare','clean'] when micro command 'clean' of 'prepare'
+ self._idCommandHandlers = None # as logger.idCommandHandlers for logger handlers of current command
def initFullName(self, parentFullName=[]):
"""
cmdInstance.setConfig(config) # micro command config
cmdInstance.setOptions(options)
- logger.setFileHandler(cmdInstance)
-
+ logger.setFileHandlerForCommand(cmdInstance)
return cmdInstance
def run(self, cmd_arguments):
"""
return RCO.ReturnCode("KO", "_BaseCommand.run() have not to be instancied and called")
+ def setIdCommandHandlers(self, idCommandHandlers):
+ """set logger handlers id (as an int >0) for current insance command"""
+ if self._idCommandHandlers is not None:
+ self._logger.error("change idCommandHandlers for %s set yet" % self.getFullNameStr())
+ self._idCommandHandlers = idCommandHandlers
+
def setLogger(self, logger):
"""set logger for run command"""
if self._logger is not None:
cmdInstance.setConfig(config)
logger = self.getLogger()
- logger.setFileHandler(cmdInstance)
+ logger.setFileHandlerForCommand(cmdInstance)
# Run the main command using the remainders command arguments
strArgs = " ".join(commandArguments)
msg = "END main launch command %s on (%s)\n%s" % (self.nameCommandToLoad, strArgs, str(returnCode))
logger.step(msg)
+ logger.closeFileHandlerForCommand(cmdInstance)
+
return returnCode
def getCommandAndAppli(self, arguments):
| >> import src.system as SYSS
"""
-import subprocess
import os
import tarfile
+import subprocess as SP
+
+import utilsSat as UTS
import src.returnCode as RCO
def show_in_editor(editor, filePath, logger):
cmd = editor % filePath
msg = "show_in_editor command: '%s'" % cmd
logger.debug(msg)
- p = subprocess.Popen(cmd, shell=True)
+ p = SP.Popen(cmd, shell=True)
p.communicate()
return RCO.ReturnCode("OK", msg)
except:
:param logger: (Logger) The logger instance to use.
:param environment: (Environ)
The environment to source when extracting.
- :return: (bool) True if the extraction is successful
+ :return: RCO.ReturnCode OK if the extraction is successful
"""
if not where.exists():
where.make()
+ whe = str(where)
if tag == "master" or tag == "HEAD":
- command = "git clone %(remote)s %(where)s" % \
- { 'remote': from_what, 'tag': tag, 'where': str(where) }
+ command = "git clone %(rem)s %(whe)s" % {'rem': from_what, 'whe': whe}
else:
# NOTICE: this command only works with recent version of git
# because --work-tree does not work with an absolute path
- where_git = os.path.join( str(where), ".git" )
- command = "rmdir %(where)s && git clone %(remote)s %(where)s && " + \
- "git --git-dir=%(where_git)s --work-tree=%(where)s checkout %(tag)s"
- command = command % {'remote': from_what,
- 'tag': tag,
- 'where': str(where),
- 'where_git': where_git }
-
- logger.debug("git_extract \n" + command)
+ where_git = os.path.join(whe, ".git" )
+ command = r"""\
+rmdir %(whe)s && \
+git clone %(rem)s %(whe)s && \
+git --git-dir=%(whe_git)s --work-tree=%(whe)s checkout %(tag)s"""
+ command = command % {'rem': from_what, 'tag': tag, 'whe': whe, 'whe_git': where_git }
- logger.logTxtFile.write("\n" + command + "\n")
- logger.logTxtFile.flush()
- res = subprocess.call(command,
- cwd=str(where.dir()),
- env=environment.environ.environ,
- shell=True,
- stdout=logger.logTxtFile,
- stderr=subprocess.STDOUT)
- return (res == 0)
+ env = environment.environ.environ
+ res = UTS.Popen(command, cwd=str(where.dir()), env=env, shell=True, logger=logger)
+ return res
def archive_extract(from_what, where, logger):
"""Extracts sources from an archive.
logger.logTxtFile.write("\n" + command + "\n")
logger.logTxtFile.flush()
- res = subprocess.call(command,
+ res = SP.call(command,
cwd=str(where.dir()),
env=environment.environ.environ,
shell=True,
stdout=logger.logTxtFile,
- stderr=subprocess.STDOUT)
+ stderr=SP.STDOUT)
return (res == 0)
def svn_extract(user,
logger.debug(command)
logger.logTxtFile.write("\n" + command + "\n")
logger.logTxtFile.flush()
- res = subprocess.call(command,
+ res = SP.call(command,
cwd=str(where.dir()),
env=environment.environ.environ,
shell=True,
stdout=logger.logTxtFile,
- stderr=subprocess.STDOUT)
+ stderr=SP.STDOUT)
return (res == 0)
import shutil
import string
import imp
-import subprocess
+import subprocess as SP
import src.pyconf as PYCONF
import src.returnCode as RCO
self.logger.debug("> %s" % cmd)
if ARCH.is_windows():
# preexec_fn not supported on windows platform
- res = subprocess.call(cmd,
+ res = SP.call(cmd,
cwd=os.path.join(self.tmp_working_dir, 'BASES'),
shell=True,
stdout=self.logger.logTxtFile,
- stderr=subprocess.PIPE)
+ stderr=SP.PIPE)
else:
- res = subprocess.call(cmd,
+ res = SP.call(cmd,
cwd=os.path.join(self.tmp_working_dir, 'BASES'),
shell=True,
preexec_fn=set_signal,
stdout=self.logger.logTxtFile,
- stderr=subprocess.PIPE)
+ stderr=SP.PIPE)
if res != 0:
msg = _("Unable to get test base '%s' from git '%s'.") % \
(testbase_name, testbase_base)
self.logger.debug("> %s" % cmd)
if ARCH.is_windows():
# preexec_fn not supported on windows platform
- res = subprocess.call(cmd,
+ res = SP.call(cmd,
cwd=os.path.join(self.tmp_working_dir, 'BASES'),
shell=True,
stdout=self.logger.logTxtFile,
- stderr=subprocess.PIPE)
+ stderr=SP.PIPE)
else:
- res = subprocess.call(cmd,
+ res = SP.call(cmd,
cwd=os.path.join(self.tmp_working_dir, 'BASES'),
shell=True,
preexec_fn=set_signal,
stdout=self.logger.logTxtFile,
- stderr=subprocess.PIPE,
+ stderr=SP.PIPE,
env=env_appli.environ.environ,)
if res != 0:
launcherDir = os.path.dirname(self.launcher)
if launcherName == 'runAppli':
# Old application
- cmd = ("for i in " + launcherDir + "/env.d/*.sh; do source ${i};"
- " done ; echo $KERNEL_ROOT_DIR")
+ cmd = "for i in %s/env.d/*.sh; do source ${i}; done ; echo $KERNEL_ROOT_DIR"
+ cmd = cmd % launcherDir
else:
- # New application
- cmd = ("echo -e 'import os\nprint os.environ[\"KERNEL_" +
- "ROOT_DIR\"]' > tmpscript.py; %s shell" +
- " tmpscript.py") % self.launcher
-
- subproc_res = subprocess.Popen(cmd,
- stdout=subprocess.PIPE,
- shell=True,
- executable='/bin/bash').communicate()
+ # New application TODO fix that horreur
+ cmd = "echo -e 'import os\nprint os.environ[\"KERNEL_ROOT_DIR\"]' > tmpscript.py; %s shell tmpscript.py"
+ cmd = cmd % self.launcher
+
+ p = SP.Popen(cmd, stdout=SP.PIPE, shell=True, executable='/bin/bash')
+ subproc_res = p.communicate()
for resLine in subproc_res:
print "- '#%s#'" % resLine
# import grid salome_utils from KERNEL that gives
# the right getTmpDir function
- (file_, pathname, description) = imp.find_module("salome_utils",
- [os.path.join(root_dir,
- 'bin',
- 'salome')])
+ aDir = os.path.join(root_dir, 'bin', 'salome')
+ (file_, pathname, description) = imp.find_module("salome_utils", [aDir])
try:
- grid = imp.load_module("salome_utils",
- file_,
- pathname,
- description)
+ grid = imp.load_module("salome_utils", file_, pathname, description)
return grid.getLogDir
except:
- grid = imp.load_module("salome_utils",
- file_,
- pathname,
- description)
+ grid = imp.load_module("salome_utils", file_, pathname, description)
return grid.getTmpDir
finally:
if file_:
self.run_grid_tests()
def run_script(self, script_name):
- if ('APPLICATION' in self.config and
- script_name in self.config.APPLICATION):
+ if ('APPLICATION' in self.config and script_name in self.config.APPLICATION):
script = self.config.APPLICATION[script_name]
if len(script) == 0:
return
else:
self.logger.info("----------- start %s\n" % script_name)
self.logger.info("Run script: %s\n" % script)
- subprocess.Popen(script, shell=True).wait()
+ SP.Popen(script, shell=True).wait()
self.logger.info("----------- end %s\n" % script_name)
def run_all_tests(self):
import os
import string
-import subprocess
+import subprocess as SP
class SatTestError(Exception):
assert isinstance(options, list), "Bad options for mdump: %s" % options
assert len(options) == 3, "Bad options for mdump: %s" % options
cmd = "mdump %s %s" % (med_file, " ".join(options))
- #print cmd
-
- df = open(dump_file, "w")
- pdump = subprocess.Popen(cmd, shell=True, stdout=df)
- st = pdump.wait()
- df.close()
+ with open(dump_file, "w") as df:
+ pdump = SP.Popen(cmd, shell=True, stdout=df)
+ st = pdump.wait()
return st
def compMED(file1, file2, tol=0, diff_flags=""):
diff_cmd = "diff %s %s %s" % (diff_flags, dump1, dump2)
print " >" + diff_cmd
- pdiff = subprocess.Popen(diff_cmd, shell=True, stdout=subprocess.PIPE)
+ pdiff = SP.Popen(diff_cmd, shell=True, stdout=SP.PIPE)
status = pdiff.wait()
print " Diff =", status
if status != 0:
import re
import tempfile
+import subprocess as SP
import src.returnCode as RCO
import src.debug as DBG # Easy print stderr (for DEBUG only)
#if cmd not in notShownCommands:
if showLog:
# add a node to the hat.xml file
- xmlHat.add_simple_node("LogCommand",
- text=os.path.basename(filePath),
- attrib = {"date" : date,
- "hour" : hour,
- "cmd" : cmd,
- "application" : cmdAppli,
- "full_command" : full_cmd})
+ atts = {"date" : date, "hour" : hour, "cmd" : cmd, "application" : cmdAppli, "full_command" : full_cmd}
+ txt = os.path.basename(filePath)
+ xmlHat.add_simple_node_root("LogCommand", text=txt, attrib=atts)
# Write the file on the hard drive
xmlHat.write_tree('hat.xsl')
+
+
+##############################################################################
+# subprocess utilities, with logger functionnalities (trace etc.)
+##############################################################################
+
+def Popen(command, shell=True, cwd=None, env=None, stdout=SP.PIPE, stderr=SP.PIPE, logger=None):
+ """make subprocess.Popen(cmd), with call logger.trace and logger.error if problem"""
+ if logger is not None:
+ logger.trace("launch command cwd=%s:\n%s" % (cwd, command))
+
+ try:
+ proc = SP.Popen(command, shell=shell, cwd=cwd, env=env, stdout=stdout, stderr=stderr)
+ res_out, res_err = proc.communicate()
+
+ if logger is not None:
+ logger.trace("result command stdout:\n%s" % res_out)
+
+ if res_err == "":
+ return RCO.ReturnCode("OK", "command done", value=res_out)
+ else:
+ if logger is not None:
+ logger.warning("result command stderr:\n%s" % res_err)
+ return RCO.ReturnCode("KO", "command problem", value=stderr)
+ except Exception as e:
+ logger.error("launch command:\n%s" % str(e))
+ return RCO.ReturnCode("KO", "command problem")
+
+
+def generate_catalog(machines, config, logger):
+ """Generates the catalog from a list of machines."""
+ # remove empty machines
+ machines = map(lambda l: l.strip(), machines)
+ machines = filter(lambda l: len(l) > 0, machines)
+
+ logger.debug(" %s = %s" % _("Generate Resources Catalog"), ", ".join(machines))
+
+ cmd = '"cat /proc/cpuinfo | grep MHz ; cat /proc/meminfo | grep MemTotal"'
+ user = getpass.getuser()
+
+ msg = ""
+ machine = """\
+ <machine
+ protocol="ssh"
+ nbOfNodes="1"
+ mode="interactif"
+ OS="LINUX"
+ CPUFreqMHz="%s"
+ nbOfProcPerNode="%s"
+ memInMB="%s"
+ userName="%s"
+ name="%s"
+ hostname="%s"/>
+"""
+ for k in machines:
+ logger.info(" ssh %s " % (k + " ").ljust(20, '.'), 4)
+
+ ssh_cmd = 'ssh -o "StrictHostKeyChecking no" %s %s' % (k, cmd)
+ res = UTS.Popen(ssh_cmd, shell=True)
+ if res.isOk():
+ lines = p.stdout.readlines()
+ freq = lines[0][:-1].split(':')[-1].split('.')[0].strip()
+ nb_proc = len(lines) -1
+ memory = lines[-1].split(':')[-1].split()[0].strip()
+ memory = int(memory) / 1000
+ msg += machine % (freq, nb_proc, memory, user, k, k)
+
+ catfile = UTS.get_tmp_filename(config, "CatalogResources.xml")
+ with open(catfile, "w") as f:
+ f.write("""\
+<!DOCTYPE ResourcesCatalog>
+<resources>
+%s
+</resources>
+""" % msg)
+ return catfile
+
except Exception:
raise Exception("problem writing Xml log file: %s" % log_file_path)
- def add_simple_node(self, node_name, text=None, attrib={}):
- """Add a node with some attibutes and text to the root node.
+ def add_simple_node_root(self, node_name, text=None, attrib={}):
+ """Add a node with some attributes and text to the main root node.
:param node_name: (str) the name of the node to add
:param text: (str) the text of the node
"beginTime" : self.datehourToXml(cfg.VARS.datehour), #when command was launched
"application" : cfg.VARS.application, # The application if any
}
- self.add_simple_node("Site", attrib=atts)
+ self.add_simple_node_root("Site", attrib=atts)
# The initialization of the node Log
- self.add_simple_node("Log", text="Empty trace")
+ self.add_simple_node_root("Log", text="Empty trace")
# The system commands logs
- self.add_simple_node("OutLog", text=self.relPath(self.txtFile))
+ self.add_simple_node_root("OutLog", text=self.relPath(self.txtFile))
# The initialization of the node Links
# where to put the links to the other sat commands (micro commands)
# called by any first main command
- self.add_simple_node("Links")
+ self.add_simple_node_root("Links", text="No links")
def put_log_field(self, text):
"""
"""
Put all fields corresponding to the links context (micro commands)
- :param log_file_name: (str) The file name of the link.
- :param command_name: (str) The name of the command linked.
- :param command_res: (str) The result of the command linked. "0" or "1"
- :param full_launched_command: (str) The full lanch command ("sat command ...")
+ :param links: (list) The links as list of dict
+ {fileName, command, passed, launchedCommand}
+ :param fileName: (str) The file name of the link.
+ :param command: (str) The name of the command linked.
+ :param passed: (str) The result of the command linked. "0" or "1"
+ :param launchedCommand: (str) The full launch command ("sat command ...")
"""
xmlLinks = self.xmlroot.find("Links")
- for li in links:
- log_file_name, cmd_name, cmd_res, full_launched_cmd = li
- atts = {
- "command": cmd_name,
- "passed": cmd_res,
- "launchedCommand" : full_launched_cmd,
- }
- self.add_simple_node(xmlLinks, "link", text=log_file_name, attrib=atts)
+ if len(links) != 0:
+ xmlLinks.text = "" # erase No links
+ for atts in links: # order matters as time
+ # DBG.write("put_links_fields", atts)
+ add_simple_node(xmlLinks, "link", text=atts["fileName"], attrib=atts)
def put_final_fields(self, attribute):
"""
# utilities method
##############################################################################
def add_simple_node(root_node, node_name, text=None, attrib={}):
- """Add a node with some attibutes and text to the root node.
+ """Add a node with some attributes and text to the root node.
:param root_node: (ETREE.Element)
the Etree element where to add the new node