]> SALOME platform Git repositories - tools/sat.git/commitdiff
Salome HOME
Merge branch 'nct/july20' 5.6.1
authorcrouzet <nicolas.crouzet@cea.fr>
Fri, 25 Sep 2020 08:25:00 +0000 (10:25 +0200)
committercrouzet <nicolas.crouzet@cea.fr>
Fri, 25 Sep 2020 08:25:00 +0000 (10:25 +0200)
commands/compile.py
commands/config.py
commands/package.py
doc/src/commands/compile.rst
doc/src/commands/prepare.rst
src/environment.py
src/internal_config/distrib.pyconf
src/product.py
src/system.py

index 4d3a35c7b17238a461d92e989ca19432b52c9463..cd5b6e633d38fd6b36ec22f724ba0497fefd1106 100644 (file)
@@ -36,6 +36,8 @@ parser.add_option('p', 'products', 'list2', 'products',
     _('Optional: products to compile. This option accepts a comma separated list.'))
 parser.add_option('f', 'force', 'boolean', 'force',
     'Optional: force the compilation of product, even if it is already installed. The BUILD directory is cleaned before compilation.')
+parser.add_option('u', 'update', 'boolean', 'update',
+    'Optional: update mode, compile only products which sources has changed, including the dependencies.')
 parser.add_option('', 'with_fathers', 'boolean', 'fathers',
     _("Optional: build all necessary products to the given product (KERNEL is "
       "build before building GUI)."), False)
@@ -64,10 +66,16 @@ parser.add_option('', 'clean_build_after', 'boolean', 'clean_build_after',
 
 # from sat product infos, represent the product dependencies in a simple python graph
 # keys are nodes, the list of dependencies are values
-def get_dependencies_graph(p_infos):
+def get_dependencies_graph(p_infos, compile_time=True):
     graph={}
     for (p_name,p_info) in p_infos:
-        graph[p_name]=p_info.depend
+        depprod=[]
+        for d in p_info.depend:
+            depprod.append(d)
+        if compile_time and "build_depend" in p_info:
+            for d in p_info.build_depend:
+                depprod.append(d)
+        graph[p_name]=depprod
     return graph
 
 # this recursive function calculates all the dependencies of node start
@@ -131,7 +139,7 @@ def log_res_step(logger, res):
         logger.write("%s \n" % src.printcolors.printcError("KO"), 4)
         logger.flush()
 
-def compile_all_products(sat, config, options, products_infos, all_products_dict, logger):
+def compile_all_products(sat, config, options, products_infos, all_products_dict, all_products_graph, logger):
     '''Execute the proper configuration commands 
        in each product build directory.
 
@@ -139,15 +147,19 @@ def compile_all_products(sat, config, options, products_infos, all_products_dict
     :param products_info list: List of 
                                  (str, Config) => (product_name, product_info)
     :param all_products_dict: Dict of all products 
+    :param all_products_graph: graph of all products 
     :param logger Logger: The logger instance to use for the display and logging
     :return: the number of failing commands.
     :rtype: int
     '''
     # first loop for the cleaning 
     check_salome_configuration=False
+    updated_products=[]
     for p_name_info in products_infos:
         
         p_name, p_info = p_name_info
+        if src.product.product_is_salome(p_info):
+            check_salome_configuration=True
         
         # nothing to clean for native or fixed products
         if (not src.product.product_compiles(p_info)) or\
@@ -165,28 +177,54 @@ def compile_all_products(sat, config, options, products_infos, all_products_dict
                       verbose=0,
                       logger_add_link = logger)
 
+        else:
+            # Clean the the install directory 
+            # if the corresponding option was called
+            if options.clean_install:
+                sat.clean(config.VARS.application + 
+                          " --products " + p_name + 
+                          " --install",
+                          batch=True,
+                          verbose=0,
+                          logger_add_link = logger)
+            
+            # Clean the the install directory 
+            # if the corresponding option was called
+            if options.force:
+                sat.clean(config.VARS.application + 
+                          " --products " + p_name + 
+                          " --build",
+                          batch=True,
+                          verbose=0,
+                          logger_add_link = logger)
 
-        # Clean the the install directory 
-        # if the corresponding option was called
-        if options.clean_install and not options.clean_all:
-            sat.clean(config.VARS.application + 
-                      " --products " + p_name + 
-                      " --install",
-                      batch=True,
-                      verbose=0,
-                      logger_add_link = logger)
-        
-        # Clean the the install directory 
-        # if the corresponding option was called
-        if options.force and not options.clean_all:
-            sat.clean(config.VARS.application + 
-                      " --products " + p_name + 
-                      " --build",
-                      batch=True,
-                      verbose=0,
-                      logger_add_link = logger)
-        if src.product.product_is_salome(p_info):
-            check_salome_configuration=True
+            if options.update and src.product.product_is_vcs(p_info):
+            # only VCS products are concerned by update option
+                try: 
+                    do_update=False
+                    if len(updated_products)>0:
+                        # if other products where updated, check that the current product is a child 
+                        # in this case it will be also updated
+                        if find_path_graph(all_products_graph, p_name, updated_products):
+                            logger.write("\nUpdate product %s (child)" % p_name, 5)
+                            do_update=True
+                    if (not do_update) and os.path.isdir(p_info.source_dir) \
+                                       and os.path.isdir(p_info.install_dir):
+                        source_time=os.path.getmtime(p_info.source_dir)
+                        install_time=os.path.getmtime(p_info.install_dir)
+                        if install_time<source_time:
+                            logger.write("\nupdate product %s" % p_name, 5)
+                            do_update=True
+                    if do_update:
+                        updated_products.append(p_name) 
+                        sat.clean(config.VARS.application + 
+                                  " --products " + p_name + 
+                                  " --build --install",
+                                  batch=True,
+                                  verbose=0,
+                                  logger_add_link = logger)
+                except:
+                    pass
 
     if check_salome_configuration:
         # For salome applications, we check if the sources of configuration modules are present
@@ -667,6 +705,9 @@ def run(args, runner, logger):
         if rep.upper() != _("YES").upper():
             return 0
         
+    if options.update and (options.clean_all or options.force or options.clean_install):
+        options.update=False  # update is useless in this case
+
     # check that the command has been called with an application
     src.check_config_has_application( runner.cfg )
 
@@ -740,12 +781,7 @@ def run(args, runner, logger):
     # store at beginning compile time products, we need to compile them before!
     for n in sorted_nodes:
         if n in products_list:
-            if src.product.product_is_compile_time(all_products_dict[n][1]) or\
-               src.product.product_is_compile_and_runtime(all_products_dict[n][1]):
-                product_list_compiletime.append(n)
-            else:
-                product_list_runtime.append(n)
-    sorted_product_list = product_list_compiletime + product_list_runtime
+            sorted_product_list.append(n)
     logger.write("Sorted list of products to compile : %s\n" % sorted_product_list, 5)
     
     # from the sorted list of products to compile, build a sorted list of products infos
@@ -754,7 +790,7 @@ def run(args, runner, logger):
         products_infos.append(all_products_dict[product])
 
     # for all products to compile, store in "depend_all" field the complete dependencies (recursive) 
-    # (will be used by check_dependencies funvtion)
+    # (will be used by check_dependencies function)
     for pi in products_infos:
         dep_prod=[]
         dep_prod=depth_search_graph(all_products_graph,pi[0], dep_prod)
@@ -763,7 +799,7 @@ def run(args, runner, logger):
 
     # Call the function that will loop over all the products and execute
     # the right command(s)
-    res = compile_all_products(runner, runner.cfg, options, products_infos, all_products_dict, logger)
+    res = compile_all_products(runner, runner.cfg, options, products_infos, all_products_dict, all_products_graph, logger)
     
     # Print the final state
     nb_products = len(products_infos)
index f5e539c2a0223af6fbb13a034335087a372d4643..abba1a8a14c610d8745373080d82a24f28e86711 100644 (file)
@@ -649,6 +649,10 @@ def show_product_info(config, name, logger):
     if "opt_depend" in pinfo:
         src.printcolors.print_value(logger, "optional", sorted(pinfo.opt_depend), 2)
 
+    if "build_depend" in pinfo:
+        src.printcolors.print_value(logger, "build depend on", sorted(pinfo.build_depend), 2)
+
+
     # information on pyconf
     logger.write("\n", 2)
     logger.write(src.printcolors.printcLabel("configuration:") + "\n", 2)
index ff289b173900f05d927b5861c45fef71dd887b77..3446730e4e0bb45d4b917945bee630a73b78ba73 100644 (file)
@@ -152,7 +152,7 @@ def add_files(tar, name_archive, d_content, logger, f_exclude=None):
         try:
             key=local_path+"->"+in_archive
             if key not in already_added:
-                tar.add(local_path, arcname=in_archive, exclude=f_exclude)
+                tar.add(local_path, arcname=in_archive, filter=f_exclude)
                 already_added.add(key)
             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
         except Exception as e:
@@ -162,21 +162,22 @@ def add_files(tar, name_archive, d_content, logger, f_exclude=None):
         logger.write("\n", 3)
     return success
 
-def exclude_VCS_and_extensions(filename):
+def exclude_VCS_and_extensions(tarinfo):
     ''' The function that is used to exclude from package the link to the 
         VCS repositories (like .git)
 
     :param filename Str: The filname to exclude (or not).
-    :return: True if the file has to be exclude
-    :rtype: Boolean
+    :return: None if the file has to be exclude
+    :rtype: tarinfo or None
     '''
+    filename = tarinfo.name
     for dir_name in IGNORED_DIRS:
         if dir_name in filename:
-            return True
+            return None
     for extension in IGNORED_EXTENSIONS:
         if filename.endswith(extension):
-            return True
-    return False
+            return None
+    return tarinfo
 
 def produce_relative_launcher(config,
                               logger,
@@ -945,7 +946,7 @@ def make_archive(prod_name, prod_info, where):
     local_path = prod_info.source_dir
     tar_prod.add(local_path,
                  arcname=prod_name,
-                 exclude=exclude_VCS_and_extensions)
+                 filter=exclude_VCS_and_extensions)
     tar_prod.close()
     return path_targz_prod       
 
index b646fe01eb659a7d9f4212795151e36c5cd9bced..cd434759a51525458a4cb666902da2527644e8ba 100644 (file)
@@ -35,6 +35,17 @@ Usage
   
     sat compile <application> --products med --force
 
+* Update mode, compile only git products which source has changed, including the dependencies.
+  The option is not implemented for svn and cvs, only for git.
+  One has to call sat prepare before, to check if git sources where modified.
+  The mecanism is based upon git log -1 command, and the modification of the source directory date accordingly: ::
+  
+    # update SALOME sources
+    ./sat prepare <application> --properties  is_SALOME_module:yes
+
+    # only compile modules that has to be recompiled.
+    sat compile <application> --update
+
 * Clean the build and install directories before starting compilation: ::
 
     sat compile <application> --products GEOM  --clean_all
index ad2c871a5da0d8d0d5807683ec695527bbc7da29..5ea8eba528b1264d08524d18ab2c3b6adbb1602a 100644 (file)
@@ -69,6 +69,11 @@ Usage
 
     sat prepare <application>  --products <product1>,<product2> ...
 
+* Prepare only some modules with a given property: ::
+
+    # prepare only SALOME modules, not prerequisites
+    ./sat prepare <application> --properties  is_SALOME_module:yes
+
 * Use --force to force to prepare the products in development mode 
   (this will remove the sources and do a new clone/checkout): ::
 
index 62943389339565b15113bf4f9259be26f1db232f..875c4a5035d95bc6133488da3ec3ec4ed61b5144 100644 (file)
@@ -234,7 +234,7 @@ class SalomeEnviron:
                                  self.cfg.APPLICATION.products,
                                  self.cfg)
         
-        all_products_graph=get_dependencies_graph(all_products_infos)
+        all_products_graph=get_dependencies_graph(all_products_infos, self.forBuild)
         visited_nodes=[]
         sorted_nodes=[]
         for n in all_products_graph:
@@ -246,15 +246,6 @@ class SalomeEnviron:
                                                sorted_nodes)
         self.sorted_product_list=sorted_nodes
 
-        # store the list of compile time products
-        # they should be added in build env
-        compile_time_products=[]
-        for (pname,pinfo) in all_products_infos:
-           if src.product.product_is_compile_time(pinfo) or\
-              src.product.product_is_compile_and_runtime(pinfo) :
-               compile_time_products.append(pname)
-        self.compile_time_products=compile_time_products
-
 
     def append(self, key, value, sep=os.pathsep):
         """\
@@ -397,14 +388,6 @@ class SalomeEnviron:
         else:
            self.cfg.APPLICATION.environ.PRODUCT_ROOT_DIR = src.pyconf.Reference(self.cfg, src.pyconf.DOLLAR, "workdir")
 
-        # these sensitive variables are reset to avoid bad environment interactions
-        self.add_line(1)
-        self.add_comment("reset these sensitive variables to avoid bad environment interactions")
-        self.add_comment("comment these to lines if you wish a different behaviour")
-        if not src.architecture.is_windows():
-           self.set("LD_LIBRARY_PATH", "")
-        self.set("PYTHONPATH", "")
-        self.add_line(1)
 
         # Set the variables defined in the "environ" section
         if 'environ' in self.cfg.APPLICATION:
@@ -763,19 +746,10 @@ class SalomeEnviron:
             self.set_a_product("Python", logger)
             self.set_python_libdirs()
 
-        # for a build environment, add compile time products (like cmake)
-        if self.forBuild :
-            for product in self.compile_time_products:
-                if product == "Python":
-                    continue
-                self.set_a_product(product, logger)
-
         # The loop on the products
         for product in self.sorted_product_list:
             if product == "Python":
                 continue
-            if self.forBuild and product in self.compile_time_products:
-                continue
             self.set_a_product(product, logger)
  
     def set_full_environ(self, logger, env_info):
@@ -802,19 +776,10 @@ class SalomeEnviron:
             self.set_a_product("Python", logger)
             self.set_python_libdirs()
 
-        # for a build environment, add compile time products (like cmake)
-        if self.forBuild :
-            for product in self.compile_time_products:
-                if product == "Python":
-                    continue
-                self.set_a_product(product, logger)
-
         # set products
         for product in sorted_product_list:
             if product == "Python":
                 continue
-            if self.forBuild and product in self.compile_time_products:
-                continue
             self.set_a_product(product, logger)
 
 class FileEnvWriter:
index f3a01a0df3018f369d7328653b95cee915c560f9..ba9ae2d6af348c474401c4c74f6a76b2241ad0b5 100644 (file)
@@ -13,6 +13,7 @@ DISTRIBUTIONS :
   "mageia": "MG"
   "mandriva": "MD"
   "redhat": "CO"
+  "red hat": "CO"
   "ubuntu": "UB"
   "opensuse":"OS"
   "Windows" : "W"
index e0bc0871409854b1c79292d54252600fd7f06b82..7d73a505bd31fe727895c647cc6a1ad210372fd6 100644 (file)
@@ -608,7 +608,13 @@ def add_compile_config_file(p_info, config):
     res.addMapping(p_info.name, src.pyconf.Mapping(res), "")
     res[p_info.name]= p_info.version
 
-    for prod_name in p_info.depend:
+    depprod=[]
+    for d in p_info.depend:
+        depprod.append(d)
+    if "build_depend" in p_info:
+        for d in p_info.build_depend:
+            depprod.append(d)
+    for prod_name in depprod:
       if prod_name not in res:
         res.addMapping(prod_name, src.pyconf.Mapping(res), "")
       prod_dep_info = src.product.get_product_config(config, prod_name, False)
@@ -658,6 +664,13 @@ def check_config_exists(config, prod_dir, prod_info, verbose=False):
     DBG.write("check_config_exists 000",  (prod_dir, l_dir_and_files), verbose)
     DBG.write("check_config_exists 111",  prod_info, verbose)
 
+    depend_all=[]
+    if "depend" in prod_info:
+        for d in prod_info.depend:
+            depend_all.append(d)
+    if "build_depend" in prod_info:
+        for d in prod_info.build_depend:
+            depend_all.append(d)
     for dir_or_file in l_dir_and_files:
         oExpr = re.compile(config_expression)
         if not(oExpr.search(dir_or_file)):
@@ -676,7 +689,7 @@ def check_config_exists(config, prod_dir, prod_info, verbose=False):
         # dependencies of the product
         config_corresponds = True    
         compile_cfg = src.pyconf.Config(config_file)
-        for prod_dep in prod_info.depend:
+        for prod_dep in depend_all:
             # if the dependency is not in the config, 
             # the config does not correspond
             if prod_dep not in compile_cfg:
@@ -702,7 +715,7 @@ def check_config_exists(config, prod_dir, prod_info, verbose=False):
                 break
             else:
               # as old compatibility without prod_name sat-config.pyconf files
-              if prod_name not in prod_info.depend:
+              if prod_name not in depend_all:
                 # here there is an unexpected depend in an old compilation
                 config_corresponds = False
                 break
@@ -808,10 +821,19 @@ def get_product_dependencies(config, product_info):
     :return: the list of products in dependence
     :rtype: list
     """
-    if "depend" not in product_info or product_info.depend == []:
+    depend_all=[]
+    if "depend" in product_info:
+        for d in product_info.depend:
+            depend_all.append(d)
+    if "build_depend" in product_info:
+        for d in product_info.build_depend:
+            depend_all.append(d)
+
+    if len(depend_all) == 0:
         return []
+
     res = []
-    for prod in product_info.depend:
+    for prod in depend_all:
         if prod == product_info.name:
             continue
         if prod not in res:
@@ -857,18 +879,14 @@ def check_installation(config, product_info):
             return True    
 
     install_dir = product_info.install_dir
-    if ( (src.appli_test_property(config,"single_install_dir", "yes") and 
-          src.product.product_test_property(product_info,"single_install_dir", "yes")) or
-         (src.appli_test_property(config,"pip", "yes") and 
-          src.product.product_test_property(product_info,"pip", "yes") and
-          src.appli_test_property(config,"pip_install_dir", "python") ) ):
-        # if the product is installed in the single install dir, or in python (for pip managed products)
-        # we check the product file in state of the install directory.
-        filename = CONFIG_FILENAME + product_info.name + ".pyconf"
-        if not os.path.exists(os.path.join(install_dir, filename)): 
+    if src.product.product_is_fixed(product_info):
+        # we check directly the install dir only for fixed products
+        # (there is no pyconf file in that case)
+        if not os.path.exists(install_dir):
             return False
     else:
-        if not os.path.exists(install_dir):
+        filename = CONFIG_FILENAME + product_info.name + ".pyconf"
+        if not os.path.exists(os.path.join(install_dir, filename)): 
             return False
 
     # check extra files if specified in present_files.install section
index 56141bcee5a6202fc4c3f51eb617ccb8669e2f0f..672149d12cd50ae3986d55d9798e26b7dae6b94d 100644 (file)
@@ -89,6 +89,7 @@ def git_extract(from_what, tag, where, logger, environment=None):
   DBG.write("git_extract", [from_what, tag, str(where)])
   if not where.exists():
     where.make()
+  where_git = os.path.join(str(where), ".git")
   if tag == "master" or tag == "HEAD":
     if src.architecture.is_windows():
       cmd = "git clone %(remote)s %(where)s"
@@ -96,12 +97,13 @@ def git_extract(from_what, tag, where, logger, environment=None):
       cmd = r"""
 set -x
 git clone %(remote)s %(where)s
+touch -d "$(git --git-dir=%(where_git)s  log -1 --format=date_format)" %(where)s
 """
-    cmd = cmd % {'remote': from_what, 'tag': tag, 'where': str(where)}
+#git --git-dir=%(where_git)s  log -1 --format=date_format > %(where)s/last_commit_date.txt
+    cmd = cmd % {'remote': from_what, 'tag': tag, 'where': str(where), 'where_git': where_git}
   else:
     # NOTICE: this command only works with recent version of git
     #         because --work-tree does not work with an absolute path
-    where_git = os.path.join(str(where), ".git")
     if src.architecture.is_windows():
       cmd = "rmdir %(where)s && git clone %(remote)s %(where)s && git --git-dir=%(where_git)s --work-tree=%(where)s checkout %(tag)s"
     else:
@@ -110,6 +112,7 @@ set -x
 rmdir %(where)s
 git clone %(remote)s %(where)s && \
 git --git-dir=%(where_git)s --work-tree=%(where)s checkout %(tag)s
+touch -d "$(git --git-dir=%(where_git)s  log -1 --format=date_format)" %(where)s
 """
     cmd = cmd % {'remote': from_what,
                  'tag': tag,
@@ -117,6 +120,7 @@ git --git-dir=%(where_git)s --work-tree=%(where)s checkout %(tag)s
                  'where_git': where_git}
 
 
+  cmd=cmd.replace('date_format','"%ai"')
   logger.logTxtFile.write("\n" + cmd + "\n")
   logger.logTxtFile.flush()