Salome HOME
sat #18867 : pour les url des bases git : substitution des references par leur valeur...
[tools/sat.git] / commands / compile.py
index 20afeb9ee29cdf6f139e0df92ac3adacf4dcf586..984363c0db1e9c51683e2a84a866b15b83807cb7 100644 (file)
 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
 
 import os
-
+import re
+import subprocess
 import src
+import src.debug as DBG
 
 # Compatibility python 2/3 for input function
 # input stays input for python 3 and input = raw_input for python 2
@@ -27,11 +29,15 @@ try:
 except NameError: 
     pass
 
+
 # Define all possible option for the compile command :  sat compile <options>
 parser = src.options.Options()
 parser.add_option('p', 'products', 'list2', 'products',
-    _('Optional: products to configure. This option can be'
-    ' passed several time to configure several products.'))
+    _('Optional: products to compile. This option accepts a comma separated list.'))
+parser.add_option('f', 'force', 'boolean', 'force',
+    'Optional: force the compilation of product, even if it is already installed. The BUILD directory is cleaned before compilation.')
+parser.add_option('u', 'update', 'boolean', 'update',
+    'Optional: update mode, compile only products which sources has changed, including the dependencies.')
 parser.add_option('', 'with_fathers', 'boolean', 'fathers',
     _("Optional: build all necessary products to the given product (KERNEL is "
       "build before building GUI)."), False)
@@ -51,211 +57,78 @@ parser.add_option('', 'show', 'boolean', 'no_compile',
 parser.add_option('', 'stop_first_fail', 'boolean', 'stop_first_fail', _(
                   "Optional: Stops the command at first product compilation"
                   " fail."), False)
-
-def get_products_list(options, cfg, logger):
-    '''method that gives the product list with their informations from 
-       configuration regarding the passed options.
-    
-    :param options Options: The Options instance that stores the commands 
-                            arguments
-    :param cfg Config: The global configuration
-    :param logger Logger: The logger instance to use for the display and 
-                          logging
-    :return: The list of (product name, product_informations).
-    :rtype: List
-    '''
-    # Get the products to be prepared, regarding the options
-    if options.products is None:
-        # No options, get all products sources
-        products = cfg.APPLICATION.products
-    else:
-        # if option --products, check that all products of the command line
-        # are present in the application.
-        products = options.products
-        for p in products:
-            if p not in cfg.APPLICATION.products:
-                raise src.SatException(_("Product %(product)s "
-                            "not defined in application %(application)s") %
-                        { 'product': p, 'application': cfg.VARS.application} )
-    
-    # Construct the list of tuple containing 
-    # the products name and their definition
-    products_infos = src.product.get_products_infos(products, cfg)
-    
-    products_infos = [pi for pi in products_infos if not(
-                                     src.product.product_is_fixed(pi[1]))]
-    
-    return products_infos
-
-def get_children(config, p_name_p_info):
-    l_res = []
-    p_name, __ = p_name_p_info
-    # Get all products of the application
-    products = config.APPLICATION.products
-    products_infos = src.product.get_products_infos(products, config)
-    for p_name_potential_child, p_info_potential_child in products_infos:
-        if ("depend" in p_info_potential_child and 
-                p_name in p_info_potential_child.depend):
-            l_res.append(p_name_potential_child)
-    return l_res
-
-def get_recursive_children(config, p_name_p_info, without_native_fixed=False):
-    """ Get the recursive list of the product that depend on 
-        the product defined by prod_info
-    
-    :param config Config: The global configuration
-    :param prod_info Config: The specific config of the product
-    :param without_native_fixed boolean: If true, do not include the fixed
-                                         or native products in the result
-    :return: The list of product_informations.
-    :rtype: List
-    """
-    p_name, __ = p_name_p_info
-    # Initialization of the resulting list
-    l_children = []
-    
-    # Get the direct children (not recursive)
-    l_direct_children = get_children(config, p_name_p_info)
-    # Minimal case : no child
-    if l_direct_children == []:
-        return []
-    # Add the children and call the function to get the children of the
-    # children
-    for child_name in l_direct_children:
-        l_children_name = [pn_pi[0] for pn_pi in l_children]
-        if child_name not in l_children_name:
-            if child_name not in config.APPLICATION.products:
-                msg = _("The product %(child_name)s that is in %(product_nam"
-                        "e)s children is not present in application "
-                        "%(appli_name)s" % {"child_name" : child_name, 
-                                    "product_name" : p_name.name, 
-                                    "appli_name" : config.VARS.application})
-                raise src.SatException(msg)
-            prod_info_child = src.product.get_product_config(config,
-                                                              child_name)
-            pname_pinfo_child = (prod_info_child.name, prod_info_child)
-            # Do not append the child if it is native or fixed and 
-            # the corresponding parameter is called
-            if without_native_fixed:
-                if not(src.product.product_is_native(prod_info_child) or 
-                       src.product.product_is_fixed(prod_info_child)):
-                    l_children.append(pname_pinfo_child)
-            else:
-                l_children.append(pname_pinfo_child)
-            # Get the children of the children
-            l_grand_children = get_recursive_children(config,
-                                pname_pinfo_child,
-                                without_native_fixed = without_native_fixed)
-            l_children += l_grand_children
-    return l_children
-
-def get_recursive_fathers(config, p_name_p_info, without_native_fixed=False):
-    """ Get the recursive list of the dependencies of the product defined by
-        prod_info
-    
-    :param config Config: The global configuration
-    :param prod_info Config: The specific config of the product
-    :param without_native_fixed boolean: If true, do not include the fixed
-                                         or native products in the result
-    :return: The list of product_informations.
-    :rtype: List
-    """
-    p_name, p_info = p_name_p_info
-    # Initialization of the resulting list
-    l_fathers = []
-    # Minimal case : no dependencies
-    if "depend" not in p_info or p_info.depend == []:
-        return []
-    # Add the dependencies and call the function to get the dependencies of the
-    # dependencies
-    for father_name in p_info.depend:
-        l_fathers_name = [pn_pi[0] for pn_pi in l_fathers]
-        if father_name not in l_fathers_name:
-            if father_name not in config.APPLICATION.products:
-                msg = _("The product %(father_name)s that is in %(product_nam"
-                        "e)s dependencies is not present in application "
-                        "%(appli_name)s" % {"father_name" : father_name, 
-                                    "product_name" : p_name, 
-                                    "appli_name" : config.VARS.application})
-                raise src.SatException(msg)
-            prod_info_father = src.product.get_product_config(config,
-                                                              father_name)
-            pname_pinfo_father = (prod_info_father.name, prod_info_father)
-            # Do not append the father if it is native or fixed and 
-            # the corresponding parameter is called
-            if without_native_fixed:
-                if not(src.product.product_is_native(prod_info_father) or 
-                       src.product.product_is_fixed(prod_info_father)):
-                    l_fathers.append(pname_pinfo_father)
-            else:
-                l_fathers.append(pname_pinfo_father)
-            # Get the dependencies of the dependency
-            l_grand_fathers = get_recursive_fathers(config,
-                                pname_pinfo_father,
-                                without_native_fixed = without_native_fixed)
-            for item in l_grand_fathers:
-                if item not in l_fathers:
-                    l_fathers.append(item)
-    return l_fathers
-
-def sort_products(config, p_infos):
-    """ Sort the p_infos regarding the dependencies between the products
+parser.add_option('', 'check', 'boolean', 'check', _(
+                  "Optional: execute the unit tests after compilation"), False)
+
+parser.add_option('', 'clean_build_after', 'boolean', 'clean_build_after', 
+                  _('Optional: remove the build directory after successful compilation'), False)
+
+
+# from sat product infos, represent the product dependencies in a simple python graph
+# keys are nodes, the list of dependencies are values
+def get_dependencies_graph(p_infos, compile_time=True):
+    graph={}
+    for (p_name,p_info) in p_infos:
+        depprod=[]
+        for d in p_info.depend:
+            depprod.append(d)
+        if compile_time and "build_depend" in p_info:
+            for d in p_info.build_depend:
+                depprod.append(d)
+        graph[p_name]=depprod
+    return graph
+
+# this recursive function calculates all the dependencies of node start
+def depth_search_graph(graph, start, visited=[]):
+    visited= visited+ [start]
+    for node in graph[start]:  # for all nodes in start dependencies
+        if node not in visited:
+            visited=depth_search_graph(graph, node, visited)
+    return visited
+
+# find a path from start node to end (a group of nodes)
+def find_path_graph(graph, start, end, path=[]):
+    path = path + [start]
+    if start in end:
+        return path
+    if start not in graph:
+        return None
+    for node in graph[start]:
+        if node not in path:
+            newpath = find_path_graph(graph, node, end, path)
+            if newpath: return newpath
+    return None
+
+# Topological sorting algo
+# return in sorted_nodes the list of sorted nodes
+def depth_first_topo_graph(graph, start, visited=[], sorted_nodes=[]):
+    visited = visited + [start]
+    if start not in graph:
+        raise src.SatException('Error in product dependencies : %s product is referenced in products dependencies but is not present in the application !' % start)
+    for node in graph[start]:
+        if node not in visited:
+            visited,sorted_nodes=depth_first_topo_graph(graph, node, visited,sorted_nodes)
+        else:
+            if node not in sorted_nodes:
+                raise src.SatException('Error in product dependencies : cycle detection for node %s and %s !' % (start,node))
     
-    :param config Config: The global configuration
-    :param p_infos list: List of (str, Config) => (product_name, product_info)
-    """
-    l_prod_sorted = src.deepcopy_list(p_infos)
-    for prod in p_infos:
-        l_fathers = get_recursive_fathers(config,
-                                          prod,
-                                          without_native_fixed=True)
-        l_fathers = [father for father in l_fathers if father in p_infos]
-        if l_fathers == []:
-            continue
-        for p_sorted in l_prod_sorted:
-            if p_sorted in l_fathers:
-                l_fathers.remove(p_sorted)
-            if l_fathers==[]:
-                l_prod_sorted.remove(prod)
-                l_prod_sorted.insert(l_prod_sorted.index(p_sorted)+1, prod)
-                break
-        
-    return l_prod_sorted
-
-def extend_with_fathers(config, p_infos):
-    p_infos_res = src.deepcopy_list(p_infos)
-    for p_name_p_info in p_infos:
-        fathers = get_recursive_fathers(config,
-                                        p_name_p_info,
-                                        without_native_fixed=True)
-        for p_name_p_info_father in fathers:
-            if p_name_p_info_father not in p_infos_res:
-                p_infos_res.append(p_name_p_info_father)
-    return p_infos_res
-
-def extend_with_children(config, p_infos):
-    p_infos_res = src.deepcopy_list(p_infos)
-    for p_name_p_info in p_infos:
-        children = get_recursive_children(config,
-                                        p_name_p_info,
-                                        without_native_fixed=True)
-        for p_name_p_info_child in children:
-            if p_name_p_info_child not in p_infos_res:
-                p_infos_res.append(p_name_p_info_child)
-    return p_infos_res    
-
-def check_dependencies(config, p_name_p_info):
+    sorted_nodes = sorted_nodes + [start]
+    return visited,sorted_nodes
+
+
+# check for p_name that all dependencies are installed
+def check_dependencies(config, p_name_p_info, all_products_dict):
     l_depends_not_installed = []
-    fathers = get_recursive_fathers(config, p_name_p_info, without_native_fixed=True)
-    for p_name_father, p_info_father in fathers:
-        if not(src.product.check_installation(p_info_father)):
-            l_depends_not_installed.append(p_name_father)
-    return l_depends_not_installed
+    for prod in p_name_p_info[1]["depend_all"]:
+        # for each dependency, check the install
+        prod_name, prod_info=all_products_dict[prod]
+        if not(src.product.check_installation(config, prod_info)):
+            l_depends_not_installed.append(prod_name)
+    return l_depends_not_installed   # non installed deps
 
 def log_step(logger, header, step):
     logger.write("\r%s%s" % (header, " " * 30), 3)
     logger.write("\r%s%s" % (header, step), 3)
-    logger.write("\n==== %s \n" % src.printcolors.printcInfo(step), 4)
     logger.flush()
 
 def log_res_step(logger, res):
@@ -266,17 +139,120 @@ def log_res_step(logger, res):
         logger.write("%s \n" % src.printcolors.printcError("KO"), 4)
         logger.flush()
 
-def compile_all_products(sat, config, options, products_infos, logger):
+def compile_all_products(sat, config, options, products_infos, all_products_dict, all_products_graph, logger):
     '''Execute the proper configuration commands 
        in each product build directory.
 
     :param config Config: The global configuration
     :param products_info list: List of 
                                  (str, Config) => (product_name, product_info)
+    :param all_products_dict: Dict of all products 
+    :param all_products_graph: graph of all products 
     :param logger Logger: The logger instance to use for the display and logging
     :return: the number of failing commands.
     :rtype: int
     '''
+    # first loop for the cleaning 
+    check_salome_configuration=False
+    updated_products=[]
+    for p_name_info in products_infos:
+        
+        p_name, p_info = p_name_info
+        if src.product.product_is_salome(p_info):
+            check_salome_configuration=True
+        
+        # nothing to clean for native or fixed products
+        if (not src.product.product_compiles(p_info)) or\
+           src.product.product_is_native(p_info) or\
+           src.product.product_is_fixed(p_info):
+            continue
+
+        # Clean the build and the install directories 
+        # if the corresponding options was called
+        if options.clean_all:
+            sat.clean(config.VARS.application + 
+                      " --products " + p_name + 
+                      " --build --install",
+                      batch=True,
+                      verbose=0,
+                      logger_add_link = logger)
+
+        else:
+            # Clean the the install directory 
+            # if the corresponding option was called
+            if options.clean_install:
+                sat.clean(config.VARS.application + 
+                          " --products " + p_name + 
+                          " --install",
+                          batch=True,
+                          verbose=0,
+                          logger_add_link = logger)
+            
+            # Clean the the install directory 
+            # if the corresponding option was called
+            if options.force:
+                sat.clean(config.VARS.application + 
+                          " --products " + p_name + 
+                          " --build",
+                          batch=True,
+                          verbose=0,
+                          logger_add_link = logger)
+
+            if options.update and src.product.product_is_vcs(p_info):
+            # only VCS products are concerned by update option
+                try: 
+                    do_update=False
+                    if len(updated_products)>0:
+                        # if other products where updated, check that the current product is a child 
+                        # in this case it will be also updated
+                        if find_path_graph(all_products_graph, p_name, updated_products):
+                            logger.write("\nUpdate product %s (child)" % p_name, 5)
+                            do_update=True
+                    if (not do_update) and os.path.isdir(p_info.source_dir) \
+                                       and os.path.isdir(p_info.install_dir):
+                        source_time=os.path.getmtime(p_info.source_dir)
+                        install_time=os.path.getmtime(p_info.install_dir)
+                        if install_time<source_time:
+                            logger.write("\nupdate product %s" % p_name, 5)
+                            do_update=True
+                    if do_update:
+                        updated_products.append(p_name) 
+                        sat.clean(config.VARS.application + 
+                                  " --products " + p_name + 
+                                  " --build --install",
+                                  batch=True,
+                                  verbose=0,
+                                  logger_add_link = logger)
+                except:
+                    pass
+
+    if check_salome_configuration:
+        # For salome applications, we check if the sources of configuration modules are present
+        # configuration modules have the property "configure_dependency"
+        # they are implicit prerequisites of the compilation.
+        res=0
+
+        # get the list of all modules in application 
+        all_products_infos = src.product.get_products_infos(config.APPLICATION.products,
+                                                            config)
+        check_source = True
+        # for configuration modules, check if sources are present
+        for prod in all_products_dict:
+            product_name, product_info = all_products_dict[prod]
+            if ("properties" in product_info and
+                "configure_dependency" in product_info.properties and
+                product_info.properties.configure_dependency == "yes"):
+                check_source = check_source and src.product.check_source(product_info)
+                if not check_source:
+                    logger.write(_("\nERROR : SOURCES of %s not found! It is required for" 
+                                   " the configuration\n" % product_name))
+                    logger.write(_("        Get it with the command : sat prepare %s -p %s \n" % 
+                                  (config.APPLICATION.name, product_name)))
+                    res += 1
+        if res>0:
+            return res  # error configure dependency : we stop the compilation
+
+    # second loop to compile
     res = 0
     for p_name_info in products_infos:
         
@@ -284,17 +260,13 @@ def compile_all_products(sat, config, options, products_infos, logger):
         
         # Logging
         len_end_line = 30
-        logger.write("\n", 4, False)
-        logger.write("################ ", 4)
         header = _("Compilation of %s") % src.printcolors.printcLabel(p_name)
         header += " %s " % ("." * (len_end_line - len(p_name)))
         logger.write(header, 3)
-        logger.write("\n", 4, False)
         logger.flush()
 
         # Do nothing if the product is not compilable
-        if ("properties" in p_info and "compilation" in p_info.properties and 
-                                            p_info.properties.compilation == "no"):
+        if not src.product.product_compiles(p_info):
             log_step(logger, header, "ignored")
             logger.write("\n", 3, False)
             continue
@@ -305,76 +277,102 @@ def compile_all_products(sat, config, options, products_infos, logger):
             logger.write("\n", 3, False)
             continue
 
-        # Clean the build and the install directories 
-        # if the corresponding options was called
-        if options.clean_all:
-            log_step(logger, header, "CLEAN BUILD AND INSTALL")
-            sat.clean(config.VARS.application + 
-                      " --products " + p_name + 
-                      " --build --install",
-                      batch=True,
-                      verbose=0,
-                      logger_add_link = logger)
+        # Do nothing if the product is fixed (already compiled by third party)
+        if src.product.product_is_fixed(p_info):
+            log_step(logger, header, "native")
+            logger.write("\n", 3, False)
+            continue
+
+
+        # Recompute the product information to get the right install_dir
+        # (it could change if there is a clean of the install directory)
+        p_info = src.product.get_product_config(config, p_name)
         
-        # Clean the the install directory 
-        # if the corresponding option was called
-        if options.clean_install and not options.clean_all:
-            log_step(logger, header, "CLEAN INSTALL")
-            sat.clean(config.VARS.application + 
-                      " --products " + p_name + 
-                      " --install",
-                      batch=True,
-                      verbose=0,
-                      logger_add_link = logger)
+        # Check if sources was already successfully installed
+        check_source = src.product.check_source(p_info)
+        is_pip= (src.appli_test_property(config,"pip", "yes") and src.product.product_test_property(p_info,"pip", "yes"))
+        # don't check sources with option --show 
+        # or for products managed by pip (there sources are in wheels stored in LOCAL.ARCHIVE
+        if not (options.no_compile or is_pip): 
+            if not check_source:
+                logger.write(_("Sources of product not found (try 'sat -h prepare') \n"))
+                res += 1 # one more error
+                continue
         
-        # Check if it was already successfully installed
-        if src.product.check_installation(p_info):
-            logger.write(_("Already installed\n"))
+        # if we don't force compilation, check if the was already successfully installed.
+        # we don't compile in this case.
+        if (not options.force) and src.product.check_installation(config, p_info):
+            logger.write(_("Already installed"))
+            logger.write(_(" in %s" % p_info.install_dir), 4)
+            logger.write(_("\n"))
             continue
         
         # If the show option was called, do not launch the compilation
         if options.no_compile:
-            logger.write(_("Not installed\n"))
+            logger.write(_("Not installed in %s\n" % p_info.install_dir))
             continue
         
         # Check if the dependencies are installed
-        l_depends_not_installed = check_dependencies(config, p_name_info)
+        l_depends_not_installed = check_dependencies(config, p_name_info, all_products_dict)
         if len(l_depends_not_installed) > 0:
             log_step(logger, header, "")
             logger.write(src.printcolors.printcError(
-                    _("ERROR : the following product(s) is(are) mandatory: ")))
+                    _("ERROR : the following mandatory product(s) is(are) not installed: ")))
             for prod_name in l_depends_not_installed:
                 logger.write(src.printcolors.printcError(prod_name + " "))
             logger.write("\n")
             continue
         
         # Call the function to compile the product
-        res_prod, len_end_line, error_step = compile_product(sat,
-                                                             p_name_info,
-                                                             config,
-                                                             options,
-                                                             logger,
-                                                             header,
-                                                             len_end_line)
+        res_prod, len_end_line, error_step = compile_product(
+             sat, p_name_info, config, options, logger, header, len_end_line)
         
         if res_prod != 0:
-            # Clean the install directory if there is any
-            logger.write(_("Cleaning the install directory if there is any\n"),
-                         5)
-            sat.clean(config.VARS.application + 
-                      " --products " + p_name + 
-                      " --install",
-                      batch=True,
-                      verbose=0,
-                      logger_add_link = logger)
             res += 1
-            
+            # there was an error, we clean install dir, unless :
+            #  - the error step is "check", or
+            #  - the product is managed by pip and installed in python dir
+            do_not_clean_install=False
+            is_single_dir=(src.appli_test_property(config,"single_install_dir", "yes") and \
+                           src.product.product_test_property(p_info,"single_install_dir", "yes"))
+              
+            if (error_step == "CHECK") or (is_pip and src.appli_test_property(config,"pip_install_dir", "python")) or is_single_dir  :
+                # cases for which we do not want to remove install dir
+                #   for is_single_dir and is_pip, the test to determine if the product is already 
+                #   compiled is based on configuration file, not the directory
+                do_not_clean_install=True 
+
+            if not do_not_clean_install:
+                # Clean the install directory if there is any
+                logger.write(_(
+                            "Cleaning the install directory if there is any\n"),
+                             5)
+                sat.clean(config.VARS.application + 
+                          " --products " + p_name + 
+                          " --install",
+                          batch=True,
+                          verbose=0,
+                          logger_add_link = logger)
+        else:
+            # Clean the build directory if the compilation and tests succeed
+            if options.clean_build_after:
+                log_step(logger, header, "CLEAN BUILD")
+                sat.clean(config.VARS.application + 
+                          " --products " + p_name + 
+                          " --build",
+                          batch=True,
+                          verbose=0,
+                          logger_add_link = logger)
+
         # Log the result
         if res_prod > 0:
             logger.write("\r%s%s" % (header, " " * len_end_line), 3)
             logger.write("\r" + header + src.printcolors.printcError("KO ") + error_step)
             logger.write("\n==== %(KO)s in compile of %(name)s \n" %
                 { "name" : p_name , "KO" : src.printcolors.printcInfo("ERROR")}, 4)
+            if error_step == "CHECK":
+                logger.write(_("\nINSTALL directory = %s" % 
+                           src.printcolors.printcInfo(p_info.install_dir)), 3)
             logger.flush()
         else:
             logger.write("\r%s%s" % (header, " " * len_end_line), 3)
@@ -415,23 +413,36 @@ def compile_product(sat, p_name_info, config, options, logger, header, len_end):
     # build_sources : cmake     -> cmake, make, make install
     # build_sources : script    -> script executions
     res = 0
-    if (src.product.product_is_autotools(p_info) or 
-                                          src.product.product_is_cmake(p_info)):
-        res, len_end_line, error_step = compile_product_cmake_autotools(sat,
-                                                                  p_name_info,
-                                                                  config,
-                                                                  options,
-                                                                  logger,
-                                                                  header,
-                                                                  len_end)
-    if src.product.product_has_script(p_info):
-        res, len_end_line, error_step = compile_product_script(sat,
-                                                                  p_name_info,
-                                                                  config,
-                                                                  options,
-                                                                  logger,
-                                                                  header,
-                                                                  len_end)
+
+    
+    # check if pip should be used : the application and product have pip property
+    if (src.appli_test_property(config,"pip", "yes") and 
+       src.product.product_test_property(p_info,"pip", "yes")):
+            res, len_end_line, error_step = compile_product_pip(sat,
+                                                                p_name_info,
+                                                                config,
+                                                                options,
+                                                                logger,
+                                                                header,
+                                                                len_end)
+    else:
+        if (src.product.product_is_autotools(p_info) or 
+                                              src.product.product_is_cmake(p_info)):
+            res, len_end_line, error_step = compile_product_cmake_autotools(sat,
+                                                                      p_name_info,
+                                                                      config,
+                                                                      options,
+                                                                      logger,
+                                                                      header,
+                                                                      len_end)
+        if src.product.product_has_script(p_info):
+            res, len_end_line, error_step = compile_product_script(sat,
+                                                                   p_name_info,
+                                                                   config,
+                                                                   options,
+                                                                   logger,
+                                                                   header,
+                                                                   len_end)
 
     # Check that the install directory exists
     if res==0 and not(os.path.exists(p_info.install_dir)):
@@ -441,22 +452,121 @@ def compile_product(sat, p_name_info, config, options, logger, header, len_end):
                 " no install directory was found !")
         logger.write(src.printcolors.printcError(msg), 4)
         logger.write("\n", 4)
-        return res, len_end_line, error_step
+        return res, len_end, error_step
     
     # Add the config file corresponding to the dependencies/versions of the 
     # product that have been successfully compiled
     if res==0:       
         logger.write(_("Add the config file in installation directory\n"), 5)
-        add_compile_config_file(p_info, config)
-
-    # Do the unit tests (call the check command)
-    log_step(logger, header, "CHECK")
-    res += sat.check(config.VARS.application + " --products " + p_name,
-                             verbose = 0,
-                             logger_add_link = logger)
+        src.product.add_compile_config_file(p_info, config)
+        
+        if options.check:
+            # Do the unit tests (call the check command)
+            log_step(logger, header, "CHECK")
+            res_check = sat.check(
+                              config.VARS.application + " --products " + p_name,
+                              verbose = 0,
+                              logger_add_link = logger)
+            if res_check != 0:
+                error_step = "CHECK"
+                
+            res += res_check
     
     return res, len_end_line, error_step
 
+
+def compile_product_pip(sat,
+                        p_name_info,
+                        config,
+                        options,
+                        logger,
+                        header,
+                        len_end):
+    '''Execute the proper build procedure for pip products
+    :param p_name_info tuple: (str, Config) => (product_name, product_info)
+    :param config Config: The global configuration
+    :param logger Logger: The logger instance to use for the display 
+                          and logging
+    :param header Str: the header to display when logging
+    :param len_end Int: the lenght of the the end of line (used in display)
+    :return: 1 if it fails, else 0.
+    :rtype: int
+    '''
+    # pip needs openssl-dev. If openssl is declared in the application, we check it!
+    if "openssl" in config.APPLICATION.products:
+        openssl_cfg = src.product.get_product_config(config, "openssl")
+        if not src.product.check_installation(config, openssl_cfg):
+            raise src.SatException(_("please install system openssl development package, it is required for products managed by pip."))
+    # a) initialisation
+    p_name, p_info = p_name_info
+    res = 0
+    error_step = ""
+    pip_install_in_python=False
+    pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
+    pip_install_cmd=config.INTERNAL.command.pip_install # parametrized in src/internal
+
+    # b) get the build environment (useful to get the installed python & pip3)
+    build_environ = src.environment.SalomeEnviron(config,
+                             src.environment.Environ(dict(os.environ)),
+                             True)
+    environ_info = src.product.get_product_dependencies(config,
+                                                        p_name,
+                                                        p_info)
+    build_environ.silent = (config.USER.output_verbose_level < 5)
+    build_environ.set_full_environ(logger, environ_info)
+
+    # c- download : check/get pip wheel in pip_wheels_dir
+    pip_download_cmd=config.INTERNAL.command.pip_download +\
+                     " --destination-directory %s --no-deps %s==%s " %\
+                     (pip_wheels_dir, p_info.name, p_info.version)
+    logger.write("\n"+pip_download_cmd+"\n", 4, False) 
+    res_pip_dwl = (subprocess.call(pip_download_cmd, 
+                                   shell=True, 
+                                   cwd=config.LOCAL.workdir,
+                                   env=build_environ.environ.environ,
+                                   stdout=logger.logTxtFile, 
+                                   stderr=subprocess.STDOUT) == 0)
+    # error is not managed at the stage. error will be handled by pip install
+    # here we just print a message
+    if not res_pip_dwl:
+        logger.write("Error in pip download\n", 4, False)
+
+
+    # d- install (in python or in separate product directory)
+    if src.appli_test_property(config,"pip_install_dir", "python"):
+        # pip will install product in python directory"
+        pip_install_cmd+=" --find-links=%s --build %s %s==%s" %\
+        (pip_wheels_dir, p_info.build_dir, p_info.name, p_info.version)
+        pip_install_in_python=True
+        
+    else: 
+        # pip will install product in product install_dir
+        pip_install_dir=os.path.join(p_info.install_dir, "lib", "python${PYTHON_VERSION:0:3}", "site-packages")
+        pip_install_cmd+=" --find-links=%s --build %s --target %s %s==%s" %\
+        (pip_wheels_dir, p_info.build_dir, pip_install_dir, p_info.name, p_info.version)
+    log_step(logger, header, "PIP")
+    logger.write("\n"+pip_install_cmd+"\n", 4)
+    len_end_line = len_end + 3
+    error_step = ""
+
+    res_pip = (subprocess.call(pip_install_cmd, 
+                               shell=True, 
+                               cwd=config.LOCAL.workdir,
+                               env=build_environ.environ.environ,
+                               stdout=logger.logTxtFile, 
+                               stderr=subprocess.STDOUT) == 0)        
+    if res_pip:
+        res=0
+    else:
+        #log_res_step(logger, res)
+        res=1
+        error_step = "PIP"
+        logger.write("\nError in pip command, please consult details with sat log command's internal traces\n", 3)
+
+    return res, len_end_line, error_step 
+
+
+
 def compile_product_cmake_autotools(sat,
                                     p_name_info,
                                     config,
@@ -569,27 +679,6 @@ def compile_product_script(sat,
               
     return res, len_end_line, error_step 
 
-def add_compile_config_file(p_info, config):
-    '''Execute the proper configuration command(s) 
-       in the product build directory.
-    
-    :param p_info Config: The specific config of the product
-    :param config Config: The global configuration
-    '''
-    # Create the compile config
-    compile_cfg = src.pyconf.Config()
-    for prod_name in p_info.depend:
-        if prod_name not in compile_cfg:
-            compile_cfg.addMapping(prod_name,
-                                   src.pyconf.Mapping(compile_cfg),
-                                   "")
-        prod_dep_info = src.product.get_product_config(config, prod_name, False)
-        compile_cfg[prod_name] = prod_dep_info.version
-    # Write it in the install directory of the product
-    compile_cfg_path = os.path.join(p_info.install_dir, src.CONFIG_FILENAME)
-    f = open(compile_cfg_path, 'w')
-    compile_cfg.__save__(f)
-    f.close()
     
 def description():
     '''method that is called when salomeTools is called with --help option.
@@ -604,7 +693,6 @@ def description():
 def run(args, runner, logger):
     '''method that is called when salomeTools is called with compile parameter.
     '''
-    
     # Parse the options
     (options, args) = parser.parse_args(args)
 
@@ -618,6 +706,9 @@ def run(args, runner, logger):
         if rep.upper() != _("YES").upper():
             return 0
         
+    if options.update and (options.clean_all or options.force or options.clean_install):
+        options.update=False  # update is useless in this case
+
     # check that the command has been called with an application
     src.check_config_has_application( runner.cfg )
 
@@ -635,24 +726,81 @@ def run(args, runner, logger):
             ]
     src.print_info(logger, info)
 
-    # Get the list of products to treat
-    products_infos = get_products_list(options, runner.cfg, logger)
+    # Get the list of all application products, and create its dependency graph
+    all_products_infos = src.product.get_products_infos(runner.cfg.APPLICATION.products,
+                                                        runner.cfg)
+    all_products_graph=get_dependencies_graph(all_products_infos)
+    #logger.write("Dependency graph of all application products : %s\n" % all_products_graph, 6)
+    DBG.write("Dependency graph of all application products : ", all_products_graph)
 
+    # Get the list of products we have to compile
+    products_infos = src.product.get_products_list(options, runner.cfg, logger)
+    products_list = [pi[0] for pi in products_infos]
+
+    logger.write("Product we have to compile (as specified by user) : %s\n" % products_list, 5)
     if options.fathers:
         # Extend the list with all recursive dependencies of the given products
-        products_infos = extend_with_fathers(runner.cfg, products_infos)
+        visited=[]
+        for p_name in products_list:
+            visited=depth_search_graph(all_products_graph, p_name, visited)
+        products_list = visited
 
+    logger.write("Product list to compile with fathers : %s\n" % products_list, 5)
     if options.children:
-        # Extend the list with all products that use the given products
-        products_infos = extend_with_children(runner.cfg, products_infos)
-
-    # Sort the list regarding the dependencies of the products
-    products_infos = sort_products(runner.cfg, products_infos)
-
+        # Extend the list with all products that depends upon the given products
+        children=[]
+        for n in all_products_graph:
+            # for all products (that are not in products_list):
+            # if we we find a path from the product to the product list,
+            # then we product is a child and we add it to the children list 
+            if (n not in children) and (n not in products_list):
+                if find_path_graph(all_products_graph, n, products_list):
+                    children = children + [n]
+        # complete products_list (the products we have to compile) with the list of children
+        products_list = products_list + children
+        logger.write("Product list to compile with children : %s\n" % products_list, 5)
+
+    # Sort the list of all products (topological sort).
+    # the products listed first do not depend upon products listed after
+    visited_nodes=[]
+    sorted_nodes=[]
+    for n in all_products_graph:
+        if n not in visited_nodes:
+            visited_nodes,sorted_nodes=depth_first_topo_graph(all_products_graph, n, visited_nodes,sorted_nodes)
+    logger.write("Complete dependency graph topological search (sorting): %s\n" % sorted_nodes, 6)
+
+    #  Create a dict of all products to facilitate products_infos sorting
+    all_products_dict={}
+    for (pname,pinfo) in all_products_infos:
+        all_products_dict[pname]=(pname,pinfo)
+
+    # Use the sorted list of all products to sort the list of products we have to compile
+    sorted_product_list=[]
+    product_list_runtime=[]
+    product_list_compiletime=[]
+
+    # store at beginning compile time products, we need to compile them before!
+    for n in sorted_nodes:
+        if n in products_list:
+            sorted_product_list.append(n)
+    logger.write("Sorted list of products to compile : %s\n" % sorted_product_list, 5)
     
+    # from the sorted list of products to compile, build a sorted list of products infos
+    products_infos=[]
+    for product in sorted_product_list:
+        products_infos.append(all_products_dict[product])
+
+    # for all products to compile, store in "depend_all" field the complete dependencies (recursive) 
+    # (will be used by check_dependencies function)
+    for pi in products_infos:
+        dep_prod=[]
+        dep_prod=depth_search_graph(all_products_graph,pi[0], dep_prod)
+        pi[1]["depend_all"]=dep_prod[1:]
+        
+
     # Call the function that will loop over all the products and execute
     # the right command(s)
-    res = compile_all_products(runner, runner.cfg, options, products_infos, logger)
+    res = compile_all_products(runner, runner.cfg, options, products_infos, all_products_dict, all_products_graph, logger)
     
     # Print the final state
     nb_products = len(products_infos)
@@ -669,4 +817,4 @@ def run(args, runner, logger):
     code = res
     if code != 0:
         code = 1
-    return code
\ No newline at end of file
+    return code