_('Optional: products to compile. This option accepts a comma separated list.'))
parser.add_option('f', 'force', 'boolean', 'force',
'Optional: force the compilation of product, even if it is already installed. The BUILD directory is cleaned before compilation.')
+parser.add_option('u', 'update', 'boolean', 'update',
+ 'Optional: update mode, compile only products which sources has changed, including the dependencies.')
parser.add_option('', 'with_fathers', 'boolean', 'fathers',
_("Optional: build all necessary products to the given product (KERNEL is "
"build before building GUI)."), False)
# from sat product infos, represent the product dependencies in a simple python graph
# keys are nodes, the list of dependencies are values
-def get_dependencies_graph(p_infos):
+def get_dependencies_graph(p_infos, compile_time=True):
graph={}
for (p_name,p_info) in p_infos:
- graph[p_name]=p_info.depend
+ depprod=[]
+ for d in p_info.depend:
+ depprod.append(d)
+ if compile_time and "build_depend" in p_info:
+ for d in p_info.build_depend:
+ depprod.append(d)
+ graph[p_name]=depprod
return graph
# this recursive function calculates all the dependencies of node start
logger.write("%s \n" % src.printcolors.printcError("KO"), 4)
logger.flush()
-def compile_all_products(sat, config, options, products_infos, all_products_dict, logger):
+def compile_all_products(sat, config, options, products_infos, all_products_dict, all_products_graph, logger):
'''Execute the proper configuration commands
in each product build directory.
:param products_info list: List of
(str, Config) => (product_name, product_info)
:param all_products_dict: Dict of all products
+ :param all_products_graph: graph of all products
:param logger Logger: The logger instance to use for the display and logging
:return: the number of failing commands.
:rtype: int
'''
# first loop for the cleaning
check_salome_configuration=False
+ updated_products=[]
for p_name_info in products_infos:
p_name, p_info = p_name_info
+ if src.product.product_is_salome(p_info):
+ check_salome_configuration=True
# nothing to clean for native or fixed products
if (not src.product.product_compiles(p_info)) or\
verbose=0,
logger_add_link = logger)
+ else:
+ # Clean the the install directory
+ # if the corresponding option was called
+ if options.clean_install:
+ sat.clean(config.VARS.application +
+ " --products " + p_name +
+ " --install",
+ batch=True,
+ verbose=0,
+ logger_add_link = logger)
+
+ # Clean the the install directory
+ # if the corresponding option was called
+ if options.force:
+ sat.clean(config.VARS.application +
+ " --products " + p_name +
+ " --build",
+ batch=True,
+ verbose=0,
+ logger_add_link = logger)
- # Clean the the install directory
- # if the corresponding option was called
- if options.clean_install and not options.clean_all:
- sat.clean(config.VARS.application +
- " --products " + p_name +
- " --install",
- batch=True,
- verbose=0,
- logger_add_link = logger)
-
- # Clean the the install directory
- # if the corresponding option was called
- if options.force and not options.clean_all:
- sat.clean(config.VARS.application +
- " --products " + p_name +
- " --build",
- batch=True,
- verbose=0,
- logger_add_link = logger)
- if src.product.product_is_salome(p_info):
- check_salome_configuration=True
+ if options.update and src.product.product_is_vcs(p_info):
+ # only VCS products are concerned by update option
+ try:
+ do_update=False
+ if len(updated_products)>0:
+ # if other products where updated, check that the current product is a child
+ # in this case it will be also updated
+ if find_path_graph(all_products_graph, p_name, updated_products):
+ logger.write("\nUpdate product %s (child)" % p_name, 5)
+ do_update=True
+ if (not do_update) and os.path.isdir(p_info.source_dir) \
+ and os.path.isdir(p_info.install_dir):
+ source_time=os.path.getmtime(p_info.source_dir)
+ install_time=os.path.getmtime(p_info.install_dir)
+ if install_time<source_time:
+ logger.write("\nupdate product %s" % p_name, 5)
+ do_update=True
+ if do_update:
+ updated_products.append(p_name)
+ sat.clean(config.VARS.application +
+ " --products " + p_name +
+ " --build --install",
+ batch=True,
+ verbose=0,
+ logger_add_link = logger)
+ except:
+ pass
if check_salome_configuration:
# For salome applications, we check if the sources of configuration modules are present
if rep.upper() != _("YES").upper():
return 0
+ if options.update and (options.clean_all or options.force or options.clean_install):
+ options.update=False # update is useless in this case
+
# check that the command has been called with an application
src.check_config_has_application( runner.cfg )
# store at beginning compile time products, we need to compile them before!
for n in sorted_nodes:
if n in products_list:
- if src.product.product_is_compile_time(all_products_dict[n][1]) or\
- src.product.product_is_compile_and_runtime(all_products_dict[n][1]):
- product_list_compiletime.append(n)
- else:
- product_list_runtime.append(n)
- sorted_product_list = product_list_compiletime + product_list_runtime
+ sorted_product_list.append(n)
logger.write("Sorted list of products to compile : %s\n" % sorted_product_list, 5)
# from the sorted list of products to compile, build a sorted list of products infos
products_infos.append(all_products_dict[product])
# for all products to compile, store in "depend_all" field the complete dependencies (recursive)
- # (will be used by check_dependencies funvtion)
+ # (will be used by check_dependencies function)
for pi in products_infos:
dep_prod=[]
dep_prod=depth_search_graph(all_products_graph,pi[0], dep_prod)
# Call the function that will loop over all the products and execute
# the right command(s)
- res = compile_all_products(runner, runner.cfg, options, products_infos, all_products_dict, logger)
+ res = compile_all_products(runner, runner.cfg, options, products_infos, all_products_dict, all_products_graph, logger)
# Print the final state
nb_products = len(products_infos)
if "opt_depend" in pinfo:
src.printcolors.print_value(logger, "optional", sorted(pinfo.opt_depend), 2)
+ if "build_depend" in pinfo:
+ src.printcolors.print_value(logger, "build depend on", sorted(pinfo.build_depend), 2)
+
+
# information on pyconf
logger.write("\n", 2)
logger.write(src.printcolors.printcLabel("configuration:") + "\n", 2)
try:
key=local_path+"->"+in_archive
if key not in already_added:
- tar.add(local_path, arcname=in_archive, exclude=f_exclude)
+ tar.add(local_path, arcname=in_archive, filter=f_exclude)
already_added.add(key)
logger.write(src.printcolors.printcSuccess(_("OK")), 3)
except Exception as e:
logger.write("\n", 3)
return success
-def exclude_VCS_and_extensions(filename):
+def exclude_VCS_and_extensions(tarinfo):
''' The function that is used to exclude from package the link to the
VCS repositories (like .git)
:param filename Str: The filname to exclude (or not).
- :return: True if the file has to be exclude
- :rtype: Boolean
+ :return: None if the file has to be exclude
+ :rtype: tarinfo or None
'''
+ filename = tarinfo.name
for dir_name in IGNORED_DIRS:
if dir_name in filename:
- return True
+ return None
for extension in IGNORED_EXTENSIONS:
if filename.endswith(extension):
- return True
- return False
+ return None
+ return tarinfo
def produce_relative_launcher(config,
logger,
local_path = prod_info.source_dir
tar_prod.add(local_path,
arcname=prod_name,
- exclude=exclude_VCS_and_extensions)
+ filter=exclude_VCS_and_extensions)
tar_prod.close()
return path_targz_prod
sat compile <application> --products med --force
+* Update mode, compile only git products which source has changed, including the dependencies.
+ The option is not implemented for svn and cvs, only for git.
+ One has to call sat prepare before, to check if git sources where modified.
+ The mecanism is based upon git log -1 command, and the modification of the source directory date accordingly: ::
+
+ # update SALOME sources
+ ./sat prepare <application> --properties is_SALOME_module:yes
+
+ # only compile modules that has to be recompiled.
+ sat compile <application> --update
+
* Clean the build and install directories before starting compilation: ::
sat compile <application> --products GEOM --clean_all
sat prepare <application> --products <product1>,<product2> ...
+* Prepare only some modules with a given property: ::
+
+ # prepare only SALOME modules, not prerequisites
+ ./sat prepare <application> --properties is_SALOME_module:yes
+
* Use --force to force to prepare the products in development mode
(this will remove the sources and do a new clone/checkout): ::
self.cfg.APPLICATION.products,
self.cfg)
- all_products_graph=get_dependencies_graph(all_products_infos)
+ all_products_graph=get_dependencies_graph(all_products_infos, self.forBuild)
visited_nodes=[]
sorted_nodes=[]
for n in all_products_graph:
sorted_nodes)
self.sorted_product_list=sorted_nodes
- # store the list of compile time products
- # they should be added in build env
- compile_time_products=[]
- for (pname,pinfo) in all_products_infos:
- if src.product.product_is_compile_time(pinfo) or\
- src.product.product_is_compile_and_runtime(pinfo) :
- compile_time_products.append(pname)
- self.compile_time_products=compile_time_products
-
def append(self, key, value, sep=os.pathsep):
"""\
else:
self.cfg.APPLICATION.environ.PRODUCT_ROOT_DIR = src.pyconf.Reference(self.cfg, src.pyconf.DOLLAR, "workdir")
- # these sensitive variables are reset to avoid bad environment interactions
- self.add_line(1)
- self.add_comment("reset these sensitive variables to avoid bad environment interactions")
- self.add_comment("comment these to lines if you wish a different behaviour")
- if not src.architecture.is_windows():
- self.set("LD_LIBRARY_PATH", "")
- self.set("PYTHONPATH", "")
- self.add_line(1)
# Set the variables defined in the "environ" section
if 'environ' in self.cfg.APPLICATION:
self.set_a_product("Python", logger)
self.set_python_libdirs()
- # for a build environment, add compile time products (like cmake)
- if self.forBuild :
- for product in self.compile_time_products:
- if product == "Python":
- continue
- self.set_a_product(product, logger)
-
# The loop on the products
for product in self.sorted_product_list:
if product == "Python":
continue
- if self.forBuild and product in self.compile_time_products:
- continue
self.set_a_product(product, logger)
def set_full_environ(self, logger, env_info):
self.set_a_product("Python", logger)
self.set_python_libdirs()
- # for a build environment, add compile time products (like cmake)
- if self.forBuild :
- for product in self.compile_time_products:
- if product == "Python":
- continue
- self.set_a_product(product, logger)
-
# set products
for product in sorted_product_list:
if product == "Python":
continue
- if self.forBuild and product in self.compile_time_products:
- continue
self.set_a_product(product, logger)
class FileEnvWriter:
"mageia": "MG"
"mandriva": "MD"
"redhat": "CO"
+ "red hat": "CO"
"ubuntu": "UB"
"opensuse":"OS"
"Windows" : "W"
res.addMapping(p_info.name, src.pyconf.Mapping(res), "")
res[p_info.name]= p_info.version
- for prod_name in p_info.depend:
+ depprod=[]
+ for d in p_info.depend:
+ depprod.append(d)
+ if "build_depend" in p_info:
+ for d in p_info.build_depend:
+ depprod.append(d)
+ for prod_name in depprod:
if prod_name not in res:
res.addMapping(prod_name, src.pyconf.Mapping(res), "")
prod_dep_info = src.product.get_product_config(config, prod_name, False)
DBG.write("check_config_exists 000", (prod_dir, l_dir_and_files), verbose)
DBG.write("check_config_exists 111", prod_info, verbose)
+ depend_all=[]
+ if "depend" in prod_info:
+ for d in prod_info.depend:
+ depend_all.append(d)
+ if "build_depend" in prod_info:
+ for d in prod_info.build_depend:
+ depend_all.append(d)
for dir_or_file in l_dir_and_files:
oExpr = re.compile(config_expression)
if not(oExpr.search(dir_or_file)):
# dependencies of the product
config_corresponds = True
compile_cfg = src.pyconf.Config(config_file)
- for prod_dep in prod_info.depend:
+ for prod_dep in depend_all:
# if the dependency is not in the config,
# the config does not correspond
if prod_dep not in compile_cfg:
break
else:
# as old compatibility without prod_name sat-config.pyconf files
- if prod_name not in prod_info.depend:
+ if prod_name not in depend_all:
# here there is an unexpected depend in an old compilation
config_corresponds = False
break
:return: the list of products in dependence
:rtype: list
"""
- if "depend" not in product_info or product_info.depend == []:
+ depend_all=[]
+ if "depend" in product_info:
+ for d in product_info.depend:
+ depend_all.append(d)
+ if "build_depend" in product_info:
+ for d in product_info.build_depend:
+ depend_all.append(d)
+
+ if len(depend_all) == 0:
return []
+
res = []
- for prod in product_info.depend:
+ for prod in depend_all:
if prod == product_info.name:
continue
if prod not in res:
return True
install_dir = product_info.install_dir
- if ( (src.appli_test_property(config,"single_install_dir", "yes") and
- src.product.product_test_property(product_info,"single_install_dir", "yes")) or
- (src.appli_test_property(config,"pip", "yes") and
- src.product.product_test_property(product_info,"pip", "yes") and
- src.appli_test_property(config,"pip_install_dir", "python") ) ):
- # if the product is installed in the single install dir, or in python (for pip managed products)
- # we check the product file in state of the install directory.
- filename = CONFIG_FILENAME + product_info.name + ".pyconf"
- if not os.path.exists(os.path.join(install_dir, filename)):
+ if src.product.product_is_fixed(product_info):
+ # we check directly the install dir only for fixed products
+ # (there is no pyconf file in that case)
+ if not os.path.exists(install_dir):
return False
else:
- if not os.path.exists(install_dir):
+ filename = CONFIG_FILENAME + product_info.name + ".pyconf"
+ if not os.path.exists(os.path.join(install_dir, filename)):
return False
# check extra files if specified in present_files.install section
DBG.write("git_extract", [from_what, tag, str(where)])
if not where.exists():
where.make()
+ where_git = os.path.join(str(where), ".git")
if tag == "master" or tag == "HEAD":
if src.architecture.is_windows():
cmd = "git clone %(remote)s %(where)s"
cmd = r"""
set -x
git clone %(remote)s %(where)s
+touch -d "$(git --git-dir=%(where_git)s log -1 --format=date_format)" %(where)s
"""
- cmd = cmd % {'remote': from_what, 'tag': tag, 'where': str(where)}
+#git --git-dir=%(where_git)s log -1 --format=date_format > %(where)s/last_commit_date.txt
+ cmd = cmd % {'remote': from_what, 'tag': tag, 'where': str(where), 'where_git': where_git}
else:
# NOTICE: this command only works with recent version of git
# because --work-tree does not work with an absolute path
- where_git = os.path.join(str(where), ".git")
if src.architecture.is_windows():
cmd = "rmdir %(where)s && git clone %(remote)s %(where)s && git --git-dir=%(where_git)s --work-tree=%(where)s checkout %(tag)s"
else:
rmdir %(where)s
git clone %(remote)s %(where)s && \
git --git-dir=%(where_git)s --work-tree=%(where)s checkout %(tag)s
+touch -d "$(git --git-dir=%(where_git)s log -1 --format=date_format)" %(where)s
"""
cmd = cmd % {'remote': from_what,
'tag': tag,
'where_git': where_git}
+ cmd=cmd.replace('date_format','"%ai"')
logger.logTxtFile.write("\n" + cmd + "\n")
logger.logTxtFile.flush()