Salome HOME
sat #18501 bug fix : pour une archive, ecriture du pyconf de l'application directemen...
[tools/sat.git] / commands / package.py
index a41c6a4bd9667648c4c8d4e8ee7ba19a4e14ac33..a5b448fce18e6dc47120c0ea10f4ba99a1b01965 100644 (file)
@@ -23,6 +23,8 @@ import datetime
 import tarfile
 import codecs
 import string
+import glob
+import pprint as PP
 
 import src
 
@@ -40,6 +42,8 @@ PROJECT_DIR = "PROJECT"
 IGNORED_DIRS = [".git", ".svn"]
 IGNORED_EXTENSIONS = []
 
+PACKAGE_EXT=".tar.gz" # the extension we use for the packages
+
 PROJECT_TEMPLATE = """#!/usr/bin/env python
 #-*- coding:utf-8 -*-
 
@@ -91,7 +95,12 @@ parser.add_option('s', 'sources', 'boolean', 'sources',
     _('Optional: Produce a compilable archive of the sources of the '
       'application.'), False)
 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
-    _('Optional: Only source package: do not make archive of vcs products.'),
+    _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' 
+      'Sat prepare will use VCS mode instead to retrieve them'),
+    False)
+parser.add_option('', 'ftp', 'boolean', 'ftp',
+    _('Optional: Do not embed archives for products in archive mode.' 
+    'Sat prepare will use ftp instead to retrieve them'),
     False)
 parser.add_option('p', 'project', 'string', 'project',
     _('Optional: Produce an archive that contains a project.'), "")
@@ -101,11 +110,9 @@ parser.add_option('n', 'name', 'string', 'name',
     _('Optional: The name or full path of the archive.'), None)
 parser.add_option('', 'add_files', 'list2', 'add_files',
     _('Optional: The list of additional files to add to the archive.'), [])
-parser.add_option('', 'without_commercial', 'boolean', 'without_commercial',
-    _('Optional: do not add commercial licence.'), False)
-parser.add_option('', 'without_property', 'string', 'without_property',
+parser.add_option('', 'without_properties', 'properties', 'without_properties',
     _('Optional: Filter the products by their properties.\n\tSyntax: '
-      '--without_property <property>:<value>'))
+      '--without_properties <property>:<value>'))
 
 
 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
@@ -128,9 +135,14 @@ def add_files(tar, name_archive, d_content, logger, f_exclude=None):
     
     success = 0
     # loop over each directory or file stored in the d_content dictionary
-    for name in sorted(d_content.keys()):
+    names = sorted(d_content.keys())
+    DBG.write("add tar names", names)
+
+    # used to avoid duplications (for pip install in python, or single_install_dir cases)
+    already_added=set() 
+    for name in names:
         # display information
-        len_points = max_len - len(name)
+        len_points = max_len - len(name) + 3
         local_path, archive_path = d_content[name]
         in_archive = os.path.join(name_archive, archive_path)
         logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
@@ -138,7 +150,10 @@ def add_files(tar, name_archive, d_content, logger, f_exclude=None):
         # of the directory or file to add
         # Add it in the archive
         try:
-            tar.add(local_path, arcname=in_archive, exclude=f_exclude)
+            key=local_path+"->"+in_archive
+            if key not in already_added:
+                tar.add(local_path, arcname=in_archive, exclude=f_exclude)
+                already_added.add(key)
             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
         except Exception as e:
             logger.write(src.printcolors.printcError(_("KO ")), 3)
@@ -167,8 +182,7 @@ def produce_relative_launcher(config,
                               logger,
                               file_dir,
                               file_name,
-                              binaries_dir_name,
-                              with_commercial=True):
+                              binaries_dir_name):
     '''Create a specific SALOME launcher for the binary package. This launcher 
        uses relative paths.
     
@@ -183,7 +197,13 @@ def produce_relative_launcher(config,
     '''
     
     # get KERNEL installation path 
-    kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
+    kernel_info = src.product.get_product_config(config, "KERNEL")
+    kernel_base_name=os.path.basename(kernel_info.install_dir)
+    if kernel_base_name.startswith("config"):
+        # case of kernel installed in base. We remove "config-i"
+        kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
+    
+    kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
 
     # set kernel bin dir (considering fhs property)
     kernel_cfg = src.product.get_product_config(config, "KERNEL")
@@ -193,14 +213,19 @@ def produce_relative_launcher(config,
         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") 
 
     # check if the application contains an application module
-    l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(),
-                                                    config)
+    # check also if the application has a distene product, 
+    # in this case get its licence file name
+    l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
     salome_application_name="Not defined" 
+    distene_licence_file_name=False
     for prod_name, prod_info in l_product_info:
-        # look for a salome application
+        # look for a "salome application" and a distene product
+        if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
+            distene_licence_file_name = src.product.product_has_licence(prod_info, 
+                                            config.PATHS.LICENCEPATH) 
         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
             salome_application_name=prod_info.name
-            continue
+
     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
     if salome_application_name == "Not defined":
@@ -208,42 +233,41 @@ def produce_relative_launcher(config,
     else:
         app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
 
-    # Get the launcher template and do substitutions
-    withProfile = src.fileEnviron.withProfile
-
-    withProfile = withProfile.replace(
-        "ABSOLUTE_APPLI_PATH'] = 'KERNEL_INSTALL_DIR'",
-        "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + app_root_dir + "'")
-    withProfile = withProfile.replace(
-        " 'BIN_KERNEL_INSTALL_DIR'",
-        " out_dir_Path + '" + config.VARS.sep + bin_kernel_install_dir + "'")
+    additional_env={}
+    additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
+                                                   config.VARS.sep + bin_kernel_install_dir
+    if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
+        additional_env['sat_python_version'] = 3
+    else:
+        additional_env['sat_python_version'] = 2
 
-    before, after = withProfile.split(
-                                "# here your local standalone environment\n")
+    additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
 
     # create an environment file writer
     writer = src.environment.FileEnvWriter(config,
                                            logger,
                                            file_dir,
-                                           src_root=None)
+                                           src_root=None,
+                                           env_info=None)
     
     filepath = os.path.join(file_dir, file_name)
-    # open the file and write into it
-    launch_file = open(filepath, "w")
-    launch_file.write(before)
     # Write
-    writer.write_cfgForPy_file(launch_file,
-                               for_package = binaries_dir_name,
-                               with_commercial=with_commercial)
-    launch_file.write(after)
-    launch_file.close()
+    writer.write_env_file(filepath,
+                          False,  # for launch
+                          "cfgForPy",
+                          additional_env=additional_env,
+                          no_path_init="False",
+                          for_package = binaries_dir_name)
     
     # Little hack to put out_dir_Path outside the strings
     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
+    src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
     
     # A hack to put a call to a file for distene licence.
     # It does nothing to an application that has no distene product
-    hack_for_distene_licence(filepath)
+    if distene_licence_file_name:
+        logger.write("Application has a distene licence file! We use it in package launcher", 5)
+        hack_for_distene_licence(filepath, distene_licence_file_name)
        
     # change the rights in order to make the file executable for everybody
     os.chmod(filepath,
@@ -257,7 +281,7 @@ def produce_relative_launcher(config,
 
     return filepath
 
-def hack_for_distene_licence(filepath):
+def hack_for_distene_licence(filepath, licence_file):
     '''Replace the distene licence env variable by a call to a file.
     
     :param filepath Str: The path to the launcher to modify.
@@ -283,12 +307,19 @@ def hack_for_distene_licence(filepath):
         return
     del text[num_line +1]
     del text[num_line +1]
-    text_to_insert ="""    import imp
-    try:
-        distene = imp.load_source('distene_licence', '/data/tmpsalome/salome/prerequis/install/LICENSE/dlim8.var.py')
+    text_to_insert ="""    try:
+        distene_licence_file=r"%s"
+        if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
+            import importlib.util
+            spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
+            distene=importlib.util.module_from_spec(spec_dist)
+            spec_dist.loader.exec_module(distene)
+        else:
+            import imp
+            distene = imp.load_source('distene_licence', distene_licence_file)
         distene.set_distene_variables(context)
     except:
-        pass\n"""
+        pass\n"""  % licence_file
     text.insert(num_line + 1, text_to_insert)
     for line in text:
         fout.write(line)
@@ -317,14 +348,25 @@ def produce_relative_env_files(config,
                                            file_dir,
                                            src_root=None)
     
+    if src.architecture.is_windows():
+      shell = "bat"
+      filename  = "env_launch.bat"
+    else:
+      shell = "bash"
+      filename  = "env_launch.sh"
+
     # Write
-    filepath = writer.write_env_file("env_launch.sh",
+    filepath = writer.write_env_file(filename,
                           False, # for launch
-                          "bash",
+                          shell,
                           for_package = binaries_dir_name)
 
     # Little hack to put out_dir_Path as environment variable
-    src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
+    if src.architecture.is_windows() :
+      src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
+      src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
+    else:
+      src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
 
     # change the rights in order to make the file executable for everybody
     os.chmod(filepath,
@@ -363,12 +405,13 @@ def produce_install_bin_file(config,
                                         "INSTALL_BIN.template")
         
         # build the name of the directory that will contain the binaries
-        binaries_dir_name = "BINARIES-" + config.VARS.dist
+        binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
         # build the substitution loop
         loop_cmd = "for f in $(grep -RIl"
         for key in d_sub:
             loop_cmd += " -e "+ key
-        loop_cmd += ' INSTALL); do\n     sed -i "\n'
+        loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
+                    '); do\n     sed -i "\n'
         for key in d_sub:
             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
         loop_cmd += '            " $f\ndone'
@@ -376,6 +419,7 @@ def produce_install_bin_file(config,
         d={}
         d["BINARIES_DIR"] = binaries_dir_name
         d["SUBSTITUTION_LOOP"]=loop_cmd
+        d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
         
         # substitute the template and write it in file
         content=src.template.substitute(installbin_template_path, d)
@@ -482,7 +526,16 @@ def binary_package(config, logger, options, tmp_working_dir):
     l_not_installed = []
     l_sources_not_present = []
     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
+    if ("APPLICATION" in config  and
+        "properties"  in config.APPLICATION  and
+        "mesa_launcher_in_package"    in config.APPLICATION.properties  and
+        config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
+            generate_mesa_launcher=True
+
     for prod_name, prod_info in l_product_info:
+        # skip product with property not_in_package set to yes
+        if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
+            continue  
 
         # Add the sources of the products that have the property 
         # sources_in_package : "yes"
@@ -493,16 +546,12 @@ def binary_package(config, logger, options, tmp_working_dir):
             else:
                 l_sources_not_present.append(prod_name)
 
-        # if at least one of the application products has the "is_mesa" property
-        if src.get_property_in_product_cfg(prod_info, "is_mesa") == "yes":
-            generate_mesa_launcher = True  # we will generate a mesa launcher
-
         # ignore the native and fixed products for install directories
         if (src.product.product_is_native(prod_info) 
                 or src.product.product_is_fixed(prod_info)
                 or not src.product.product_compiles(prod_info)):
             continue
-        if src.product.check_installation(prod_info):
+        if src.product.check_installation(config, prod_info):
             l_install_dir.append((prod_name, prod_info.install_dir))
         else:
             l_not_installed.append(prod_name)
@@ -512,7 +561,8 @@ def binary_package(config, logger, options, tmp_working_dir):
             # cpp module
             for name_cpp in src.product.get_product_components(prod_info):
                 install_dir = os.path.join(config.APPLICATION.workdir,
-                                           "INSTALL", name_cpp) 
+                                           config.INTERNAL.config.install_dir,
+                                           name_cpp) 
                 if os.path.exists(install_dir):
                     l_install_dir.append((name_cpp, install_dir))
                 else:
@@ -520,7 +570,9 @@ def binary_package(config, logger, options, tmp_working_dir):
         
     # check the name of the directory that (could) contains the binaries 
     # from previous detar
-    binaries_from_detar = os.path.join(config.APPLICATION.workdir, "BINARIES-" + config.VARS.dist)
+    binaries_from_detar = os.path.join(
+                              config.APPLICATION.workdir,
+                              config.INTERNAL.config.binary_dir + config.VARS.dist)
     if os.path.exists(binaries_from_detar):
          logger.write("""
 WARNING: existing binaries directory from previous detar installation:
@@ -537,13 +589,13 @@ WARNING: existing binaries directory from previous detar installation:
     if len(l_not_installed) > 0:
         text_missing_prods = ""
         for p_name in l_not_installed:
-            text_missing_prods += "-" + p_name + "\n"
+            text_missing_prods += " - " + p_name + "\n"
         if not options.force_creation:
-            msg = _("ERROR: there are missing products installations:")
+            msg = _("ERROR: there are missing product installations:")
             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
                                      text_missing_prods),
                          1)
-            return None
+            raise src.SatException(msg)
         else:
             msg = _("WARNING: there are missing products installations:")
             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
@@ -556,11 +608,11 @@ WARNING: existing binaries directory from previous detar installation:
         for p_name in l_sources_not_present:
             text_missing_prods += "-" + p_name + "\n"
         if not options.force_creation:
-            msg = _("ERROR: there are missing products sources:")
+            msg = _("ERROR: there are missing product sources:")
             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
                                      text_missing_prods),
                          1)
-            return None
+            raise src.SatException(msg)
         else:
             msg = _("WARNING: there are missing products sources:")
             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
@@ -568,13 +620,19 @@ WARNING: existing binaries directory from previous detar installation:
                          1)
  
     # construct the name of the directory that will contain the binaries
-    binaries_dir_name = "BINARIES-" + config.VARS.dist
-    
+    if src.architecture.is_windows():
+        binaries_dir_name = config.INTERNAL.config.binary_dir
+    else:
+        binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
     # construct the correlation table between the product names, there 
     # actual install directories and there install directory in archive
     d_products = {}
     for prod_name, install_dir in l_install_dir:
-        path_in_archive = os.path.join(binaries_dir_name, prod_name)
+        prod_base_name=os.path.basename(install_dir)
+        if prod_base_name.startswith("config"):
+            # case of a products installed in base. We remove "config-i"
+            prod_base_name=os.path.basename(os.path.dirname(install_dir))
+        path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
         
     for prod_name, source_dir in l_source_dir:
@@ -593,8 +651,7 @@ WARNING: existing binaries directory from previous detar installation:
                                                  logger,
                                                  tmp_working_dir,
                                                  launcher_name,
-                                                 binaries_dir_name,
-                                                 not(options.without_commercial))
+                                                 binaries_dir_name)
             d_products["launcher"] = (launcher_package, launcher_name)
 
             # if the application contains mesa products, we generate in addition to the 
@@ -614,8 +671,7 @@ WARNING: existing binaries directory from previous detar installation:
                                                      logger,
                                                      tmp_working_dir,
                                                      launcher_mesa_name,
-                                                     binaries_dir_name,
-                                                     not(options.without_commercial))
+                                                     binaries_dir_name)
                 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
 
                 # if there was a use_mesa value, we restore it
@@ -644,8 +700,11 @@ WARNING: existing binaries directory from previous detar installation:
                                            tmp_working_dir,
                                            binaries_dir_name)
 
-    d_products["environment file"] = (env_file, "env_launch.sh")
-      
+    if src.architecture.is_windows():
+      filename  = "env_launch.bat"
+    else:
+      filename  = "env_launch.sh"
+    d_products["environment file"] = (env_file, filename)      
     return d_products
 
 def source_package(sat, config, logger, options, tmp_working_dir):
@@ -664,10 +723,15 @@ def source_package(sat, config, logger, options, tmp_working_dir):
     :rtype: dict
     '''
     
+    d_archives={}
     # Get all the products that are prepared using an archive
-    logger.write("Find archive products ... ")
-    d_archives, l_pinfo_vcs = get_archives(config, logger)
-    logger.write("Done\n")
+    # unless ftp mode is specified (in this case the user of the
+    # archive will get the sources through the ftp mode of sat prepare
+    if not options.ftp:
+        logger.write("Find archive products ... ")
+        d_archives, l_pinfo_vcs = get_archives(config, logger)
+        logger.write("Done\n")
+
     d_archives_vcs = {}
     if not options.with_vcs and len(l_pinfo_vcs) > 0:
         # Make archives with the products that are not prepared using an archive
@@ -683,8 +747,9 @@ def source_package(sat, config, logger, options, tmp_working_dir):
     # Create a project
     logger.write("Create the project ... ")
     d_project = create_project_for_src_package(config,
-                                                tmp_working_dir,
-                                                options.with_vcs)
+                                               tmp_working_dir,
+                                               options.with_vcs,
+                                               options.ftp)
     logger.write("Done\n")
     
     # Add salomeTools
@@ -729,6 +794,9 @@ def get_archives(config, logger):
     d_archives = {}
     l_pinfo_vcs = []
     for p_name, p_info in l_product_info:
+        # skip product with property not_in_package set to yes
+        if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
+            continue  
         # ignore the native and fixed products
         if (src.product.product_is_native(p_info) 
                 or src.product.product_is_fixed(p_info)):
@@ -736,11 +804,34 @@ def get_archives(config, logger):
         if p_info.get_source == "archive":
             archive_path = p_info.archive_info.archive_name
             archive_name = os.path.basename(archive_path)
+            d_archives[p_name] = (archive_path,
+                                  os.path.join(ARCHIVE_DIR, archive_name))
+            if (src.appli_test_property(config,"pip", "yes") and 
+                src.product.product_test_property(p_info,"pip", "yes")):
+                # if pip mode is activated, and product is managed by pip
+                pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
+                pip_wheel_pattern=os.path.join(pip_wheels_dir, 
+                    "%s-%s*" % (p_info.name, p_info.version))
+                pip_wheel_path=glob.glob(pip_wheel_pattern)
+                msg_pip_not_found="Error in get_archive, pip wheel for "\
+                                  "product %s-%s was not found in %s directory"
+                msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
+                                  "product %s-%s were found in %s directory"
+                if len(pip_wheel_path)==0:
+                    raise src.SatException(msg_pip_not_found %\
+                        (p_info.name, p_info.version, pip_wheels_dir))
+                if len(pip_wheel_path)>1:
+                    raise src.SatException(msg_pip_two_or_more %\
+                        (p_info.name, p_info.version, pip_wheels_dir))
+
+                pip_wheel_name=os.path.basename(pip_wheel_path[0])
+                d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0], 
+                    os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
         else:
-            l_pinfo_vcs.append((p_name, p_info))
+            # this product is not managed by archive, 
+            # an archive of the vcs directory will be created by get_archive_vcs
+            l_pinfo_vcs.append((p_name, p_info)) 
             
-        d_archives[p_name] = (archive_path,
-                              os.path.join(ARCHIVE_DIR, archive_name))
     return d_archives, l_pinfo_vcs
 
 def add_salomeTools(config, tmp_working_dir):
@@ -847,7 +938,7 @@ def make_archive(prod_name, prod_info, where):
     :return: The path of the resulting archive
     :rtype: str
     '''
-    path_targz_prod = os.path.join(where, prod_name + ".tgz")
+    path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
     local_path = prod_info.source_dir
     tar_prod.add(local_path,
@@ -856,7 +947,7 @@ def make_archive(prod_name, prod_info, where):
     tar_prod.close()
     return path_targz_prod       
 
-def create_project_for_src_package(config, tmp_working_dir, with_vcs):
+def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
     '''Create a specific project for a source package.
 
     :param config Config: The global configuration.
@@ -865,6 +956,7 @@ def create_project_for_src_package(config, tmp_working_dir, with_vcs):
                                 source package
     :param with_vcs boolean: True if the package is with vcs products (not 
                              transformed into archive products)
+    :param with_ftp boolean: True if the package use ftp servers to get archives
     :return: The dictionary 
              {"project" : (produced project, project path in the archive)}
     :rtype: Dict
@@ -897,6 +989,23 @@ def create_project_for_src_package(config, tmp_working_dir, with_vcs):
     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
     ff = open(project_pyconf_file, "w")
     ff.write(PROJECT_TEMPLATE)
+    if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
+        ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
+        for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
+            ftp_path=ftp_path+":"+ftpserver
+        ftp_path+='"'
+        ff.write("# ftp servers where to search for prerequisite archives\n")
+        ff.write(ftp_path)
+    # add licence paths if any
+    if len(config.PATHS.LICENCEPATH) > 0:  
+        licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
+        for path in config.PATHS.LICENCEPATH[1:]:
+            licence_path=licence_path+":"+path
+        licence_path+='"'
+        ff.write("\n# Where to search for licences\n")
+        ff.write(licence_path)
+        
+
     ff.close()
     
     # Loop over the products to get there pyconf and all the scripts 
@@ -905,6 +1014,9 @@ def create_project_for_src_package(config, tmp_working_dir, with_vcs):
     lproducts_name = config.APPLICATION.products.keys()
     l_products = src.product.get_products_infos(lproducts_name, config)
     for p_name, p_info in l_products:
+        # skip product with property not_in_package set to yes
+        if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
+            continue  
         find_product_scripts_and_pyconf(p_name,
                                         p_info,
                                         config,
@@ -914,7 +1026,10 @@ def create_project_for_src_package(config, tmp_working_dir, with_vcs):
                                         patches_tmp_dir,
                                         products_pyconf_tmp_dir)
     
-    find_application_pyconf(config, application_tmp_dir)
+    # for the application pyconf, we write directly the config
+    # don't search for the original pyconf file
+    # to avoid problems with overwrite sections and rm_products key
+    write_application_pyconf(config, application_tmp_dir)
     
     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
     return d_project
@@ -949,22 +1064,18 @@ def find_product_scripts_and_pyconf(p_name,
     '''
     
     # read the pyconf of the product
-    product_pyconf_path = src.find_file_in_lpath(p_name + ".pyconf",
-                                           config.PATHS.PRODUCTPATH)
-    product_pyconf_cfg = src.pyconf.Config(product_pyconf_path)
+    product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
 
     # find the compilation script if any
     if src.product.product_has_script(p_info):
         compil_script_path = src.Path(p_info.compil_script)
         compil_script_path.copy(compil_scripts_tmp_dir)
-        product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
-                                                    p_info.compil_script)
+
     # find the environment script if any
     if src.product.product_has_env_script(p_info):
         env_script_path = src.Path(p_info.environ.env_script)
         env_script_path.copy(env_scripts_tmp_dir)
-        product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
-                                                p_info.environ.env_script)
+
     # find the patches if any
     if src.product.product_has_patches(p_info):
         patches = src.pyconf.Sequence()
@@ -973,25 +1084,26 @@ def find_product_scripts_and_pyconf(p_name,
             p_path.copy(patches_tmp_dir)
             patches.append(os.path.basename(patch_path), "")
 
-        product_pyconf_cfg[p_info.section].patches = patches
-    
-    if with_vcs:
-        # put in the pyconf file the resolved values
-        for info in ["git_info", "cvs_info", "svn_info"]:
-            if info in p_info:
-                for key in p_info[info]:
-                    product_pyconf_cfg[p_info.section][info][key] = p_info[
-                                                                      info][key]
-    else:
-        # if the product is not archive, then make it become archive.
-        if src.product.product_is_vcs(p_info):
-            product_pyconf_cfg[p_info.section].get_source = "archive"
-            if not "archive_info" in product_pyconf_cfg[p_info.section]:
-                product_pyconf_cfg[p_info.section].addMapping("archive_info",
+    if (not with_vcs) and src.product.product_is_vcs(p_info):
+        # in non vcs mode, if the product is not archive, then make it become archive.
+
+        # depending upon the incremental mode, select impacted sections
+        if "properties" in p_info and "incremental" in p_info.properties and\
+            p_info.properties.incremental == "yes":
+            sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
+        else:
+            sections = [p_info.section]
+        for section in sections:
+            if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
+                DBG.write("sat package set archive mode to archive for product %s and section %s" %\
+                          (p_name,section))
+                product_pyconf_cfg[section].get_source = "archive"
+                if not "archive_info" in product_pyconf_cfg[section]:
+                    product_pyconf_cfg[section].addMapping("archive_info",
                                         src.pyconf.Mapping(product_pyconf_cfg),
                                         "")
-            product_pyconf_cfg[p_info.section
-                              ].archive_info.archive_name = p_info.name + ".tgz"
+                    product_pyconf_cfg[section].archive_info.archive_name =\
+                        p_info.name + ".tgz"
     
     # write the pyconf file to the temporary project location
     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
@@ -1001,46 +1113,86 @@ def find_product_scripts_and_pyconf(p_name,
     product_pyconf_cfg.__save__(ff, 1)
     ff.close()
 
-def find_application_pyconf(config, application_tmp_dir):
-    '''Find the application pyconf file and put it in the specific temporary 
+
+def write_application_pyconf(config, application_tmp_dir):
+    '''Write the application pyconf file in the specific temporary 
        directory containing the specific project of a source package.
 
     :param config Config: The global configuration.
     :param application_tmp_dir str: The path to the temporary application 
-                                       scripts directory of the project.
+                                    scripts directory of the project.
     '''
-    # read the pyconf of the application
     application_name = config.VARS.application
-    application_pyconf_path = src.find_file_in_lpath(
-                                            application_name + ".pyconf",
-                                            config.PATHS.APPLICATIONPATH)
-    application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
-    
-    # Change the workdir
-    application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
-                                    application_pyconf_cfg,
-                                    src.pyconf.DOLLAR,
-                                    'VARS.salometoolsway + $VARS.sep + ".."')
-
-    # Prevent from compilation in base
-    application_pyconf_cfg.APPLICATION.no_base = "yes"
-    
     # write the pyconf file to the temporary application location
     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
                                                application_name + ".pyconf")
-    ff = open(application_tmp_pyconf_path, 'w')
-    ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
-    application_pyconf_cfg.__save__(ff, 1)
+    with open(application_tmp_pyconf_path, 'w') as f:
+        f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
+        res = src.pyconf.Config()
+        app = src.pyconf.deepCopyMapping(config.APPLICATION)
+        # no base in packages
+        if "base" in app:
+            app.base = "no" 
+        # Change the workdir
+        app.workdir = src.pyconf.Reference(
+                                 app,
+                                 src.pyconf.DOLLAR,
+                                 'VARS.salometoolsway + $VARS.sep + ".."')
+        res.addMapping("APPLICATION", app, "")
+        res.__save__(f, evaluated=False)
+    
+
+def sat_package(config, tmp_working_dir, options, logger):
+    '''Prepare a dictionary that stores all the needed directories and files to
+       add in a salomeTool package.
+    
+    :param tmp_working_dir str: The temporary local working directory 
+    :param options OptResult: the options of the launched command
+    :return: the dictionary that stores all the needed directories and files to
+             add in a salomeTool package.
+             {label : (path_on_local_machine, path_in_archive)}
+    :rtype: dict
+    '''
+    d_project = {}
+
+    # we include sat himself
+    d_project["all_sat"]=(config.VARS.salometoolsway, "")
+
+    # and we overwrite local.pyconf with a clean wersion.
+    local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
+    local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
+    local_cfg = src.pyconf.Config(local_file_path)
+    local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
+    local_cfg.LOCAL["base"] = "default"
+    local_cfg.LOCAL["workdir"] = "default"
+    local_cfg.LOCAL["log_dir"] = "default"
+    local_cfg.LOCAL["archive_dir"] = "default"
+    local_cfg.LOCAL["VCS"] = "None"
+    local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
+
+    # if the archive contains a project, we write its relative path in local.pyconf
+    if options.project:
+        project_arch_path = os.path.join("projects", options.project, 
+                                         os.path.basename(options.project_file_path))
+        local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
+
+    ff = open(local_pyconf_tmp_path, 'w')
+    local_cfg.__save__(ff, 1)
     ff.close()
+    d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
+    return d_project
+    
 
-def project_package(config, name_project, project_file_path, tmp_working_dir, logger):
+def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
     '''Prepare a dictionary that stores all the needed directories and files to
        add in a project package.
     
     :param project_file_path str: The path to the local project.
+    :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
     :param tmp_working_dir str: The temporary local directory containing some 
                                 specific directories or files needed in the 
                                 project package
+    :param embedded_in_sat boolean : the project package is embedded in a sat package
     :return: the dictionary that stores all the needed directories and files to
              add in a project package.
              {label : (path_on_local_machine, path_in_archive)}
@@ -1057,17 +1209,28 @@ WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n"""
       project_pyconf_cfg = src.pyconf.Config(project_file_path)
       project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
     
-    paths = {"ARCHIVEPATH" : "archives",
-             "APPLICATIONPATH" : "applications",
+    paths = {"APPLICATIONPATH" : "applications",
              "PRODUCTPATH" : "products",
              "JOBPATH" : "jobs",
              "MACHINEPATH" : "machines"}
+    if not ftp_mode:
+        paths["ARCHIVEPATH"] = "archives"
+
     # Loop over the project paths and add it
+    project_file_name = os.path.basename(project_file_path)
     for path in paths:
         if path not in project_pyconf_cfg:
             continue
+        if embedded_in_sat:
+            dest_path = os.path.join("projects", name_project, paths[path])
+            project_file_dest = os.path.join("projects", name_project, project_file_name)
+        else:
+            dest_path = paths[path]
+            project_file_dest = project_file_name
+
         # Add the directory to the files to add in the package
-        d_project[path] = (project_pyconf_cfg[path], paths[path])
+        d_project[path] = (project_pyconf_cfg[path], dest_path)
+
         # Modify the value of the path in the package
         project_pyconf_cfg[path] = src.pyconf.Reference(
                                     project_pyconf_cfg,
@@ -1082,15 +1245,19 @@ WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n"""
     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
                                                            src.pyconf.DOLLAR,
                                                            'PWD')
+    # we don't want to export these two fields
+    project_pyconf_cfg.__delitem__("file_path")
+    project_pyconf_cfg.__delitem__("PWD")
+    if ftp_mode:
+        project_pyconf_cfg.__delitem__("ARCHIVEPATH")
     
     # Write the project pyconf file
-    project_file_name = os.path.basename(project_file_path)
     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
     ff = open(project_pyconf_tmp_path, 'w')
     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
     project_pyconf_cfg.__save__(ff, 1)
     ff.close()
-    d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_name)
+    d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
     
     return d_project
 
@@ -1109,6 +1276,8 @@ In the following, $$ROOT represents the directory where you have installed
 SALOME (the directory where this file is located).
 
 """
+        if src.architecture.is_windows():
+            readme_header = readme_header.replace('$$ROOT','%ROOT%')
         readme_compilation_with_binaries="""
 
 compilation based on the binaries used as prerequisites
@@ -1147,12 +1316,22 @@ The procedure to do it is:
         d = dict()
         d['user'] = config.VARS.user
         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
-        d['version'] = config.INTERNAL.sat_version
+        d['version'] = src.get_salometool_version(config)
         d['dist'] = config.VARS.dist
         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
 
         if options.binaries or options.sources:
             d['application'] = config.VARS.application
+            d['BINARIES']    = config.INTERNAL.config.install_dir
+            d['SEPARATOR'] = config.VARS.sep
+            if src.architecture.is_windows():
+                d['operatingSystem'] = 'Windows'
+                d['PYTHON3'] = 'python3'
+                d['ROOT']    = '%ROOT%'
+            else:
+                d['operatingSystem'] = 'Linux'
+                d['PYTHON3'] = ''
+                d['ROOT']    = '$ROOT'
             f.write("# Application: " + d['application'] + "\n")
             if 'KERNEL' in config.APPLICATION.products:
                 VersionSalome = src.get_salome_version(config)
@@ -1173,7 +1352,7 @@ The procedure to do it is:
         if options.sources:
             f.write(src.template.substitute(readme_template_path_src, d))
 
-        if options.binaries and options.sources:
+        if options.binaries and options.sources and not src.architecture.is_windows():
             f.write(readme_compilation_with_binaries)
 
         if options.project:
@@ -1191,14 +1370,15 @@ def update_config(config, prop, value):
     :param prop str: The property to filter
     :param value str: The value of the property to filter
     '''
-    src.check_config_has_application(config)
-    l_product_to_remove = []
-    for product_name in config.APPLICATION.products.keys():
-        prod_cfg = src.product.get_product_config(config, product_name)
-        if src.get_property_in_product_cfg(prod_cfg, prop) == value:
-            l_product_to_remove.append(product_name)
-    for product_name in l_product_to_remove:
-        config.APPLICATION.products.__delitem__(product_name)
+    # if there is no APPLICATION (ex sat package -t) : nothing to do
+    if "APPLICATION" in config:
+        l_product_to_remove = []
+        for product_name in config.APPLICATION.products.keys():
+            prod_cfg = src.product.get_product_config(config, product_name)
+            if src.get_property_in_product_cfg(prod_cfg, prop) == value:
+                l_product_to_remove.append(product_name)
+        for product_name in l_product_to_remove:
+            config.APPLICATION.products.__delitem__(product_name)
 
 def description():
     '''method that is called when salomeTools is called with --help option.
@@ -1257,8 +1437,7 @@ def run(args, runner, logger):
                                                     runner.cfg.VARS.application), 1)
         
         # Get the default directory where to put the packages
-        package_default_path = os.path.join(runner.cfg.APPLICATION.workdir,
-                                            "PACKAGE")
+        package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
         src.ensure_path_exists(package_default_path)
         
     # if the package contains a project:
@@ -1273,9 +1452,7 @@ def run(args, runner, logger):
                 break
 
         if foundProject is None:
-            local_path = os.path.join(runner.cfg.VARS.salometoolsway,
-                                     "data",
-                                     "local.pyconf")
+            local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
             msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
 known projects are:
 %(2)s
@@ -1290,10 +1467,16 @@ Please add it in file:
             options.project_file_path = foundProject
             src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
     
-    # Remove the products that are filtered by the --without_property option
-    if options.without_property:
-        [prop, value] = options.without_property.split(":")
+    # Remove the products that are filtered by the --without_properties option
+    if options.without_properties:
+        app = runner.cfg.APPLICATION
+        logger.trace("without_properties all products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
+        prop, value = options.without_properties
         update_config(runner.cfg, prop, value)
+        logger.warning("without_properties selected products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
+
+    # Remove from config the products that have the not_in_package property
+    update_config(runner.cfg, "not_in_package", "yes")
     
     # get the name of the archive or build it
     if options.name:
@@ -1325,12 +1508,14 @@ Please add it in file:
             if options.with_vcs:
                 archive_name += "-VCS"
 
+        if options.sat:
+            archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
+
         if options.project:
-            project_name = options.project
-            archive_name += ("PROJECT-" + project_name)
+            if options.sat:
+                archive_name += "_" 
+            archive_name += ("satproject_" + options.project)
  
-        if options.sat:
-            archive_name += ("salomeTools_" + runner.cfg.INTERNAL.sat_version)
         if len(archive_name)==0: # no option worked 
             msg = _("Error: Cannot name the archive\n"
                     " check if at least one of the following options was "
@@ -1340,7 +1525,7 @@ Please add it in file:
             logger.write("\n", 1)
             return 1
  
-    path_targz = os.path.join(dir_name, archive_name + ".tgz")
+    path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
     
     src.printcolors.print_value(logger, "Package path", path_targz, 2)
 
@@ -1372,7 +1557,9 @@ Please add it in file:
         for key in d_bin_files_to_add:
             if key.endswith("(bin)"):
                 source_dir = d_bin_files_to_add[key][0]
-                path_in_archive = d_bin_files_to_add[key][1].replace("BINARIES-" + runner.cfg.VARS.dist,"INSTALL")
+                path_in_archive = d_bin_files_to_add[key][1].replace(
+                   runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
+                   runner.cfg.INTERNAL.config.install_dir)
                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
                     # if basename is the same we will just substitute the dirname 
                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
@@ -1381,7 +1568,6 @@ Please add it in file:
                     d_paths_to_substitute[source_dir]=path_in_archive
 
         d_files_to_add.update(d_bin_files_to_add)
-
     if options.sources:
         d_files_to_add.update(source_package(runner,
                                         runner.cfg,
@@ -1403,11 +1589,12 @@ Please add it in file:
         # --salomeTool option is not considered when --sources is selected, as this option
         # already brings salomeTool!
         if options.sat:
-            d_files_to_add.update({"salomeTools" : (runner.cfg.VARS.salometoolsway, "")})
+            d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir, 
+                                  options, logger))
         
     if options.project:
-        DBG.write("config for package %s" % project_name, runner.cfg)
-        d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, tmp_working_dir, logger))
+        DBG.write("config for package %s" % options.project, runner.cfg)
+        d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
 
     if not(d_files_to_add):
         msg = _("Error: Empty dictionnary to build the archive!\n")
@@ -1429,10 +1616,11 @@ Please add it in file:
             d_files_to_add[file_name] = (file_path, file_name)
 
     logger.write("\n", 2)
-
     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
     logger.write("\n", 2)
-    
+    logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
+
+    res = 0
     try:
         # Creating the object tarfile
         tar = tarfile.open(path_targz, mode='w:gz')
@@ -1452,13 +1640,21 @@ Please add it in file:
         logger.write(_("\n"), 1)
         return 1
     
+    # case if no application, only package sat as 'sat package -t'
+    try:
+        app = runner.cfg.APPLICATION
+    except:
+        app = None
+
     # unconditionaly remove the tmp_local_working_dir
-    tmp_local_working_dir = os.path.join(runner.cfg.APPLICATION.workdir, "tmp_package")
-    if os.path.isdir(tmp_local_working_dir):
-      shutil.rmtree(tmp_local_working_dir)
+    if app is not None:
+        tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
+        if os.path.isdir(tmp_local_working_dir):
+            shutil.rmtree(tmp_local_working_dir)
 
-    # to decide...
-    DBG.tofix("make shutil.rmtree(%s) effective" % tmp_working_dir, "", True)   
+    # remove the tmp directory, unless user has registered as developer
+    if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
+        shutil.rmtree(tmp_working_dir)
     
     # Print again the path of the package
     logger.write("\n", 2)