parser = src.options.Options()
parser.add_option('p', 'products', 'list2', 'products',
_('Optional: products to compile. This option accepts a comma separated list.'))
+parser.add_option('f', 'force', 'boolean', 'force',
+ 'Optional: force the compilation of product, even if it is already installed. The BUILD directory is cleaned before compilation.')
+parser.add_option('u', 'update', 'boolean', 'update',
+ 'Optional: update mode, compile only products which sources has changed, including the dependencies.')
parser.add_option('', 'with_fathers', 'boolean', 'fathers',
_("Optional: build all necessary products to the given product (KERNEL is "
"build before building GUI)."), False)
# from sat product infos, represent the product dependencies in a simple python graph
# keys are nodes, the list of dependencies are values
-def get_dependencies_graph(p_infos):
+def get_dependencies_graph(p_infos, compile_time=True):
graph={}
for (p_name,p_info) in p_infos:
- graph[p_name]=p_info.depend
+ depprod=[]
+ for d in p_info.depend:
+ depprod.append(d)
+ if compile_time and "build_depend" in p_info:
+ for d in p_info.build_depend:
+ depprod.append(d)
+ graph[p_name]=depprod
return graph
# this recursive function calculates all the dependencies of node start
path = path + [start]
if start in end:
return path
- if not graph.has_key(start):
+ if start not in graph:
return None
for node in graph[start]:
if node not in path:
# return in sorted_nodes the list of sorted nodes
def depth_first_topo_graph(graph, start, visited=[], sorted_nodes=[]):
visited = visited + [start]
+ if start not in graph:
+ raise src.SatException('Error in product dependencies : %s product is referenced in products dependencies but is not present in the application !' % start)
for node in graph[start]:
if node not in visited:
visited,sorted_nodes=depth_first_topo_graph(graph, node, visited,sorted_nodes)
else:
- assert node in sorted_nodes, 'Error : cycle detection for node %s and %s !' % (start,node)
+ if node not in sorted_nodes:
+ raise src.SatException('Error in product dependencies : cycle detection for node %s and %s !' % (start,node))
sorted_nodes = sorted_nodes + [start]
return visited,sorted_nodes
for prod in p_name_p_info[1]["depend_all"]:
# for each dependency, check the install
prod_name, prod_info=all_products_dict[prod]
- if not(src.product.check_installation(prod_info)):
+ if not(src.product.check_installation(config, prod_info)):
l_depends_not_installed.append(prod_name)
return l_depends_not_installed # non installed deps
logger.write("%s \n" % src.printcolors.printcError("KO"), 4)
logger.flush()
-def compile_all_products(sat, config, options, products_infos, all_products_dict, logger):
+def compile_all_products(sat, config, options, products_infos, all_products_dict, all_products_graph, logger):
'''Execute the proper configuration commands
in each product build directory.
:param products_info list: List of
(str, Config) => (product_name, product_info)
:param all_products_dict: Dict of all products
+ :param all_products_graph: graph of all products
:param logger Logger: The logger instance to use for the display and logging
:return: the number of failing commands.
:rtype: int
'''
+ # first loop for the cleaning
+ check_salome_configuration=False
+ updated_products=[]
+ for p_name_info in products_infos:
+
+ p_name, p_info = p_name_info
+ if src.product.product_is_salome(p_info):
+ check_salome_configuration=True
+
+ # nothing to clean for native or fixed products
+ if (not src.product.product_compiles(p_info)) or\
+ src.product.product_is_native(p_info) or\
+ src.product.product_is_fixed(p_info):
+ continue
+
+ # Clean the build and the install directories
+ # if the corresponding options was called
+ if options.clean_all:
+ sat.clean(config.VARS.application +
+ " --products " + p_name +
+ " --build --install",
+ batch=True,
+ verbose=0,
+ logger_add_link = logger)
+
+ else:
+ # Clean the the install directory
+ # if the corresponding option was called
+ if options.clean_install:
+ sat.clean(config.VARS.application +
+ " --products " + p_name +
+ " --install",
+ batch=True,
+ verbose=0,
+ logger_add_link = logger)
+
+ # Clean the the install directory
+ # if the corresponding option was called
+ if options.force:
+ sat.clean(config.VARS.application +
+ " --products " + p_name +
+ " --build",
+ batch=True,
+ verbose=0,
+ logger_add_link = logger)
+
+ if options.update and src.product.product_is_vcs(p_info):
+ # only VCS products are concerned by update option
+ try:
+ do_update=False
+ if len(updated_products)>0:
+ # if other products where updated, check that the current product is a child
+ # in this case it will be also updated
+ if find_path_graph(all_products_graph, p_name, updated_products):
+ logger.write("\nUpdate product %s (child)" % p_name, 5)
+ do_update=True
+ if (not do_update) and os.path.isdir(p_info.source_dir) \
+ and os.path.isdir(p_info.install_dir):
+ source_time=os.path.getmtime(p_info.source_dir)
+ install_time=os.path.getmtime(p_info.install_dir)
+ if install_time<source_time:
+ logger.write("\nupdate product %s" % p_name, 5)
+ do_update=True
+ if do_update:
+ updated_products.append(p_name)
+ sat.clean(config.VARS.application +
+ " --products " + p_name +
+ " --build --install",
+ batch=True,
+ verbose=0,
+ logger_add_link = logger)
+ except:
+ pass
+
+ if check_salome_configuration:
+ # For salome applications, we check if the sources of configuration modules are present
+ # configuration modules have the property "configure_dependency"
+ # they are implicit prerequisites of the compilation.
+ res=0
+
+ # get the list of all modules in application
+ all_products_infos = src.product.get_products_infos(config.APPLICATION.products,
+ config)
+ check_source = True
+ # for configuration modules, check if sources are present
+ for prod in all_products_dict:
+ product_name, product_info = all_products_dict[prod]
+ if ("properties" in product_info and
+ "configure_dependency" in product_info.properties and
+ product_info.properties.configure_dependency == "yes"):
+ check_source = check_source and src.product.check_source(product_info)
+ if not check_source:
+ logger.write(_("\nERROR : SOURCES of %s not found! It is required for"
+ " the configuration\n" % product_name))
+ logger.write(_(" Get it with the command : sat prepare %s -p %s \n" %
+ (config.APPLICATION.name, product_name)))
+ res += 1
+ if res>0:
+ return res # error configure dependency : we stop the compilation
+
+ # second loop to compile
res = 0
for p_name_info in products_infos:
logger.write("\n", 3, False)
continue
- # Clean the build and the install directories
- # if the corresponding options was called
- if options.clean_all:
- log_step(logger, header, "CLEAN BUILD AND INSTALL ")
- sat.clean(config.VARS.application +
- " --products " + p_name +
- " --build --install",
- batch=True,
- verbose=0,
- logger_add_link = logger)
-
- # Clean the the install directory
- # if the corresponding option was called
- if options.clean_install and not options.clean_all:
- log_step(logger, header, "CLEAN INSTALL ")
- sat.clean(config.VARS.application +
- " --products " + p_name +
- " --install",
- batch=True,
- verbose=0,
- logger_add_link = logger)
-
# Recompute the product information to get the right install_dir
# (it could change if there is a clean of the install directory)
p_info = src.product.get_product_config(config, p_name)
res += 1 # one more error
continue
- if src.product.product_is_salome(p_info):
- # For salome modules, we check if the sources of configuration modules are present
- # configuration modules have the property "configure_dependency"
-
- # get the list of all modules in application
- all_products_infos = src.product.get_products_infos(config.APPLICATION.products,
- config)
- check_source = True
- # for configuration modules, check if sources are present
- for prod in all_products_dict:
- product_name, product_info = all_products_dict[prod]
- if ("properties" in product_info and
- "configure_dependency" in product_info.properties and
- product_info.properties.configure_dependency == "yes"):
- check_source = check_source and src.product.check_source(product_info)
- if not check_source:
- logger.write(_("\nERROR : SOURCES of %s not found! It is required for"
- " the configuration\n" % product_name))
- logger.write(_(" Get it with the command : sat prepare %s -p %s \n" %
- (config.APPLICATION.name, product_name)))
- if not check_source:
- # if at least one configuration module is not present, we stop compilation
- res += 1
- continue
-
- # Check if it was already successfully installed
- if src.product.check_installation(p_info):
+ # if we don't force compilation, check if the was already successfully installed.
+ # we don't compile in this case.
+ if (not options.force) and src.product.check_installation(config, p_info):
logger.write(_("Already installed"))
logger.write(_(" in %s" % p_info.install_dir), 4)
logger.write(_("\n"))
if res_prod != 0:
res += 1
-
- if error_step != "CHECK":
+ # there was an error, we clean install dir, unless :
+ # - the error step is "check", or
+ # - the product is managed by pip and installed in python dir
+ do_not_clean_install=False
+ is_single_dir=(src.appli_test_property(config,"single_install_dir", "yes") and \
+ src.product.product_test_property(p_info,"single_install_dir", "yes"))
+
+ if (error_step == "CHECK") or (is_pip and src.appli_test_property(config,"pip_install_dir", "python")) or is_single_dir :
+ # cases for which we do not want to remove install dir
+ # for is_single_dir and is_pip, the test to determine if the product is already
+ # compiled is based on configuration file, not the directory
+ do_not_clean_install=True
+
+ if not do_not_clean_install:
# Clean the install directory if there is any
logger.write(_(
"Cleaning the install directory if there is any\n"),
:return: 1 if it fails, else 0.
:rtype: int
'''
+ # pip needs openssl-dev. If openssl is declared in the application, we check it!
+ if "openssl" in config.APPLICATION.products:
+ openssl_cfg = src.product.get_product_config(config, "openssl")
+ if not src.product.check_installation(config, openssl_cfg):
+ raise src.SatException(_("please install system openssl development package, it is required for products managed by pip."))
+ # a) initialisation
p_name, p_info = p_name_info
-
- # Execute "sat configure", "sat make" and "sat install"
res = 0
error_step = ""
pip_install_in_python=False
pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
+ pip_install_cmd=config.INTERNAL.command.pip_install # parametrized in src/internal
+
+ # b) get the build environment (useful to get the installed python & pip3)
+ build_environ = src.environment.SalomeEnviron(config,
+ src.environment.Environ(dict(os.environ)),
+ True)
+ environ_info = src.product.get_product_dependencies(config,
+ p_name,
+ p_info)
+ build_environ.silent = (config.USER.output_verbose_level < 5)
+ build_environ.set_full_environ(logger, environ_info)
+
+ # c- download : check/get pip wheel in pip_wheels_dir
+ pip_download_cmd=config.INTERNAL.command.pip_download +\
+ " --destination-directory %s --no-deps %s==%s " %\
+ (pip_wheels_dir, p_info.name, p_info.version)
+ logger.write("\n"+pip_download_cmd+"\n", 4, False)
+ res_pip_dwl = (subprocess.call(pip_download_cmd,
+ shell=True,
+ cwd=config.LOCAL.workdir,
+ env=build_environ.environ.environ,
+ stdout=logger.logTxtFile,
+ stderr=subprocess.STDOUT) == 0)
+ # error is not managed at the stage. error will be handled by pip install
+ # here we just print a message
+ if not res_pip_dwl:
+ logger.write("Error in pip download\n", 4, False)
+
+
+ # d- install (in python or in separate product directory)
if src.appli_test_property(config,"pip_install_dir", "python"):
# pip will install product in python directory"
- pip_install_cmd="pip3 install --disable-pip-version-check --no-index --find-links=%s --build %s %s==%s" %\
+ pip_install_cmd+=" --find-links=%s --build %s %s==%s" %\
(pip_wheels_dir, p_info.build_dir, p_info.name, p_info.version)
pip_install_in_python=True
else:
# pip will install product in product install_dir
pip_install_dir=os.path.join(p_info.install_dir, "lib", "python${PYTHON_VERSION:0:3}", "site-packages")
- pip_install_cmd="pip3 install --disable-pip-version-check --no-index --find-links=%s --build %s --target %s %s==%s" %\
+ pip_install_cmd+=" --find-links=%s --build %s --target %s %s==%s" %\
(pip_wheels_dir, p_info.build_dir, pip_install_dir, p_info.name, p_info.version)
log_step(logger, header, "PIP")
logger.write("\n"+pip_install_cmd+"\n", 4)
len_end_line = len_end + 3
error_step = ""
- build_environ = src.environment.SalomeEnviron(config,
- src.environment.Environ(dict(os.environ)),
- True)
- environ_info = src.product.get_product_dependencies(config,
- p_info)
- build_environ.silent = (config.USER.output_verbose_level < 5)
- build_environ.set_full_environ(logger, environ_info)
-
- # useless - pip uninstall himself when wheel is alredy installed
- #if pip_install_in_python and (options.clean_install or options.clean_all):
- # # for products installed by pip inside python install dir
- # # finish the clean by uninstalling the product from python install dir
- # pip_clean_cmd="pip3 uninstall -y %s==%s" % (p_name, p_info.version)
- # res_pipclean = (subprocess.call(pip_clean_cmd,
- # shell=True,
- # cwd=config.LOCAL.workdir,
- # env=build_environ.environ.environ,
- # stdout=logger.logTxtFile,
- # stderr=subprocess.STDOUT) == 0)
- # if not res_pipclean:
- # logger.write("\n",1)
- # logger.warning("pip3 uninstall failed!")
res_pip = (subprocess.call(pip_install_cmd,
shell=True,
stderr=subprocess.STDOUT) == 0)
if res_pip:
res=0
- if pip_install_in_python:
- # when product is installed in python, create install_dir
- # (to put inside product info and mark the installation success)
- os.mkdir(p_info.install_dir)
else:
#log_res_step(logger, res)
res=1
error_step = "PIP"
+ logger.write("\nError in pip command, please consult details with sat log command's internal traces\n", 3)
return res, len_end_line, error_step
def run(args, runner, logger):
'''method that is called when salomeTools is called with compile parameter.
'''
- # DBG.write("compile runner.cfg", runner.cfg, True)
# Parse the options
(options, args) = parser.parse_args(args)
if rep.upper() != _("YES").upper():
return 0
+ if options.update and (options.clean_all or options.force or options.clean_install):
+ options.update=False # update is useless in this case
+
# check that the command has been called with an application
src.check_config_has_application( runner.cfg )
all_products_infos = src.product.get_products_infos(runner.cfg.APPLICATION.products,
runner.cfg)
all_products_graph=get_dependencies_graph(all_products_infos)
- logger.write("Dependency graph of all application products : %s\n" % all_products_graph, 6)
+ #logger.write("Dependency graph of all application products : %s\n" % all_products_graph, 6)
+ DBG.write("Dependency graph of all application products : ", all_products_graph)
# Get the list of products we have to compile
products_infos = src.product.get_products_list(options, runner.cfg, logger)
for n in all_products_graph:
if n not in visited_nodes:
visited_nodes,sorted_nodes=depth_first_topo_graph(all_products_graph, n, visited_nodes,sorted_nodes)
- logger.write("Complete depndency graph topological search (sorting): %s\n" % sorted_nodes, 6)
+ logger.write("Complete dependency graph topological search (sorting): %s\n" % sorted_nodes, 6)
+
+ # Create a dict of all products to facilitate products_infos sorting
+ all_products_dict={}
+ for (pname,pinfo) in all_products_infos:
+ all_products_dict[pname]=(pname,pinfo)
-# use the sorted list of all products to sort the list of products we have to compile
+ # Use the sorted list of all products to sort the list of products we have to compile
sorted_product_list=[]
+ product_list_runtime=[]
+ product_list_compiletime=[]
+
+ # store at beginning compile time products, we need to compile them before!
for n in sorted_nodes:
if n in products_list:
sorted_product_list.append(n)
logger.write("Sorted list of products to compile : %s\n" % sorted_product_list, 5)
-
# from the sorted list of products to compile, build a sorted list of products infos
- # a- create a dict to facilitate products_infos sorting
- all_products_dict={}
- for (pname,pinfo) in all_products_infos:
- all_products_dict[pname]=(pname,pinfo)
- # b- build a sorted list of products infos in products_infos
products_infos=[]
for product in sorted_product_list:
products_infos.append(all_products_dict[product])
# for all products to compile, store in "depend_all" field the complete dependencies (recursive)
- # (will be used by check_dependencies funvtion)
+ # (will be used by check_dependencies function)
for pi in products_infos:
dep_prod=[]
dep_prod=depth_search_graph(all_products_graph,pi[0], dep_prod)
# Call the function that will loop over all the products and execute
# the right command(s)
- res = compile_all_products(runner, runner.cfg, options, products_infos, all_products_dict, logger)
+ res = compile_all_products(runner, runner.cfg, options, products_infos, all_products_dict, all_products_graph, logger)
# Print the final state
nb_products = len(products_infos)