Salome HOME
documentation, option de sat package --with_vcs en combinaison avec --bin_products
[tools/sat.git] / commands / package.py
index 5b18d13e3a9b75064cdc767d44246b30c1b8df65..bbb20bed7f62b1bbd22c5b62f9d4f5d9deb14ee2 100644 (file)
@@ -23,10 +23,15 @@ import datetime
 import tarfile
 import codecs
 import string
-
+import glob
+import pprint as PP
+import sys
 import src
 
 from application import get_SALOME_modules
+import src.debug as DBG
+
+old_python = sys.version_info[0] == 2 and sys.version_info[1] <= 6
 
 BINARY = "binary"
 SOURCE = "Source"
@@ -39,7 +44,10 @@ PROJECT_DIR = "PROJECT"
 IGNORED_DIRS = [".git", ".svn"]
 IGNORED_EXTENSIONS = []
 
-PROJECT_TEMPLATE = """#!/usr/bin/env python
+PACKAGE_EXT=".tar.gz" # the extension we use for the packages
+
+if src.architecture.is_windows():
+    PROJECT_TEMPLATE = """#!/usr/bin/env python
 #-*- coding:utf-8 -*-
 
 # The path to the archive root directory
@@ -58,23 +66,44 @@ JOBPATH : $project_path + "jobs/"
 # Where to search the pyconf of the machines of the project
 MACHINEPATH : $project_path + "machines/"
 """
+else:
+    PROJECT_TEMPLATE = """#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+# path to the PROJECT
+project_path : $PWD + "/"
+
+# Where to search the archives of the products
+ARCHIVEPATH : $project_path + "ARCHIVES"
+# Where to search the pyconf of the applications
+APPLICATIONPATH : $project_path + "applications/"
+# Where to search the pyconf of the products
+PRODUCTPATH : $project_path + "products/"
+# Where to search the pyconf of the jobs of the project
+JOBPATH : $project_path + "jobs/"
+# Where to search the pyconf of the machines of the project
+MACHINEPATH : $project_path + "machines/"
+"""
+
 
 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
 #-*- coding:utf-8 -*-
 
   LOCAL :
   {
-    base : 'unknown'
-    workdir : 'unknown'
-    log_dir : 'unknown'
-    VCS : None
-    tag : None
+    base : 'default'
+    workdir : 'default'
+    log_dir : 'default'
+    archive_dir : 'default'
+    VCS : 'unknown'
+    tag : 'unknown'
   }
 
 PROJECTS :
 {
-project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
-""" + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
+  project_file_paths : 
+  [
+  ]
 }
 """)
 
@@ -88,9 +117,19 @@ parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
 parser.add_option('s', 'sources', 'boolean', 'sources',
     _('Optional: Produce a compilable archive of the sources of the '
       'application.'), False)
+parser.add_option('', 'bin_products', 'boolean', 'bin_products',
+    _('Optional: Create binary archives for all products.'), False)
 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
-    _('Optional: Only source package: do not make archive of vcs products.'),
+    _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' 
+      'Sat prepare will use VCS mode instead to retrieve them.'
+      '\n          Also, when combined with "--bin_products" option, restrict the building of product archives to VCS products.'),
     False)
+parser.add_option('', 'ftp', 'boolean', 'ftp',
+    _('Optional: Do not embed archives for products in archive mode.' 
+    'Sat prepare will use ftp instead to retrieve them'),
+    False)
+parser.add_option('e', 'exe', 'string', 'exe',
+    _('Optional: Produce an extra launcher based upon the exe given as argument.'), "")
 parser.add_option('p', 'project', 'string', 'project',
     _('Optional: Produce an archive that contains a project.'), "")
 parser.add_option('t', 'salometools', 'boolean', 'sat',
@@ -99,11 +138,9 @@ parser.add_option('n', 'name', 'string', 'name',
     _('Optional: The name or full path of the archive.'), None)
 parser.add_option('', 'add_files', 'list2', 'add_files',
     _('Optional: The list of additional files to add to the archive.'), [])
-parser.add_option('', 'without_commercial', 'boolean', 'without_commercial',
-    _('Optional: do not add commercial licence.'), False)
-parser.add_option('', 'without_property', 'string', 'without_property',
+parser.add_option('', 'without_properties', 'properties', 'without_properties',
     _('Optional: Filter the products by their properties.\n\tSyntax: '
-      '--without_property <property>:<value>'))
+      '--without_properties <property>:<value>'))
 
 
 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
@@ -126,17 +163,32 @@ def add_files(tar, name_archive, d_content, logger, f_exclude=None):
     
     success = 0
     # loop over each directory or file stored in the d_content dictionary
-    for name in d_content.keys():
+    names = sorted(d_content.keys())
+    DBG.write("add tar names", names)
+
+    # used to avoid duplications (for pip install in python, or single_install_dir cases)
+    already_added=set() 
+    for name in names:
         # display information
-        len_points = max_len - len(name)
-        logger.write(name + " " + len_points * "." + " ", 3)
-        # Get the local path and the path in archive 
-        # of the directory or file to add
+        len_points = max_len - len(name) + 3
         local_path, archive_path = d_content[name]
         in_archive = os.path.join(name_archive, archive_path)
+        logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
+        # Get the local path and the path in archive 
+        # of the directory or file to add
         # Add it in the archive
         try:
-            tar.add(local_path, arcname=in_archive, exclude=f_exclude)
+            key=local_path+"->"+in_archive
+            if key not in already_added:
+                if old_python:
+                    tar.add(local_path,
+                                 arcname=in_archive,
+                                 exclude=exclude_VCS_and_extensions_26)
+                else:
+                    tar.add(local_path,
+                                 arcname=in_archive,
+                                 filter=exclude_VCS_and_extensions)
+                already_added.add(key)
             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
         except Exception as e:
             logger.write(src.printcolors.printcError(_("KO ")), 3)
@@ -145,9 +197,10 @@ def add_files(tar, name_archive, d_content, logger, f_exclude=None):
         logger.write("\n", 3)
     return success
 
-def exclude_VCS_and_extensions(filename):
+
+def exclude_VCS_and_extensions_26(filename):
     ''' The function that is used to exclude from package the link to the 
-        VCS repositories (like .git)
+        VCS repositories (like .git) (only for python 2.6)
 
     :param filename Str: The filname to exclude (or not).
     :return: True if the file has to be exclude
@@ -161,12 +214,28 @@ def exclude_VCS_and_extensions(filename):
             return True
     return False
 
+def exclude_VCS_and_extensions(tarinfo):
+    ''' The function that is used to exclude from package the link to the 
+        VCS repositories (like .git)
+
+    :param filename Str: The filname to exclude (or not).
+    :return: None if the file has to be exclude
+    :rtype: tarinfo or None
+    '''
+    filename = tarinfo.name
+    for dir_name in IGNORED_DIRS:
+        if dir_name in filename:
+            return None
+    for extension in IGNORED_EXTENSIONS:
+        if filename.endswith(extension):
+            return None
+    return tarinfo
+
 def produce_relative_launcher(config,
                               logger,
                               file_dir,
                               file_name,
-                              binaries_dir_name,
-                              with_commercial=True):
+                              binaries_dir_name):
     '''Create a specific SALOME launcher for the binary package. This launcher 
        uses relative paths.
     
@@ -180,43 +249,87 @@ def produce_relative_launcher(config,
     :rtype: str
     '''
     
-    # Get the launcher template
-    profile_install_dir = os.path.join(binaries_dir_name,
-                                       config.APPLICATION.profile.product)
-    withProfile = src.fileEnviron.withProfile
-    withProfile = withProfile.replace(
-        "ABSOLUTE_APPLI_PATH'] = 'PROFILE_INSTALL_DIR'",
-        "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + profile_install_dir + "'")
-    withProfile = withProfile.replace(
-        "os.path.join( 'PROFILE_INSTALL_DIR'",
-        "os.path.join( out_dir_Path, '" + profile_install_dir + "'")
-
-    before, after = withProfile.split(
-                                "# here your local standalone environment\n")
+    # set base mode to "no" for the archive - save current mode to restore it at the end
+    if "base" in config.APPLICATION:
+        base_setting=config.APPLICATION.base 
+    else:
+        base_setting="maybe"
+    config.APPLICATION.base="no"
+
+    # get KERNEL installation path 
+    kernel_info = src.product.get_product_config(config, "KERNEL")
+    kernel_base_name=os.path.basename(kernel_info.install_dir)
+    if kernel_info.install_mode == "base":
+        # case of kernel installed in base. the kernel install dir name is different in the archive
+        kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
+    
+    kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
+
+    # set kernel bin dir (considering fhs property)
+    kernel_cfg = src.product.get_product_config(config, "KERNEL")
+    if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
+        bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") 
+    else:
+        bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") 
+
+    # check if the application contains an application module
+    # check also if the application has a distene product, 
+    # in this case get its licence file name
+    l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
+    salome_application_name="Not defined" 
+    distene_licence_file_name=False
+    for prod_name, prod_info in l_product_info:
+        # look for a "salome application" and a distene product
+        if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
+            distene_licence_file_name = src.product.product_has_licence(prod_info, 
+                                            config.PATHS.LICENCEPATH) 
+        if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
+            salome_application_name=prod_info.name
+
+    # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
+    # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
+    if salome_application_name == "Not defined":
+        app_root_dir=kernel_root_dir
+    else:
+        app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
+
+    additional_env={}
+    additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
+                                                   config.VARS.sep + bin_kernel_install_dir
+    if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
+        additional_env['sat_python_version'] = 3
+    else:
+        additional_env['sat_python_version'] = 2
+
+    additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
+    launcher_name = src.get_launcher_name(config)
+    additional_env['APPLI'] = "out_dir_Path" + config.VARS.sep + file_name
 
     # create an environment file writer
     writer = src.environment.FileEnvWriter(config,
                                            logger,
                                            file_dir,
-                                           src_root=None)
+                                           src_root=None,
+                                           env_info=None)
     
     filepath = os.path.join(file_dir, file_name)
-    # open the file and write into it
-    launch_file = open(filepath, "w")
-    launch_file.write(before)
     # Write
-    writer.write_cfgForPy_file(launch_file,
-                               for_package = binaries_dir_name,
-                               with_commercial=with_commercial)
-    launch_file.write(after)
-    launch_file.close()
+    writer.write_env_file(filepath,
+                          False,  # for launch
+                          "cfgForPy",
+                          additional_env=additional_env,
+                          no_path_init="False",
+                          for_package = binaries_dir_name)
     
     # Little hack to put out_dir_Path outside the strings
     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
+    src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
     
     # A hack to put a call to a file for distene licence.
     # It does nothing to an application that has no distene product
-    hack_for_distene_licence(filepath)
+    if distene_licence_file_name:
+        logger.write("Application has a distene licence file! We use it in package launcher", 5)
+        hack_for_distene_licence(filepath, distene_licence_file_name)
        
     # change the rights in order to make the file executable for everybody
     os.chmod(filepath,
@@ -228,9 +341,12 @@ def produce_relative_launcher(config,
              stat.S_IXGRP |
              stat.S_IXOTH)
 
+    # restore modified setting by its initial value
+    config.APPLICATION.base=base_setting
+
     return filepath
 
-def hack_for_distene_licence(filepath):
+def hack_for_distene_licence(filepath, licence_file):
     '''Replace the distene licence env variable by a call to a file.
     
     :param filepath Str: The path to the launcher to modify.
@@ -256,12 +372,19 @@ def hack_for_distene_licence(filepath):
         return
     del text[num_line +1]
     del text[num_line +1]
-    text_to_insert ="""    import imp
-    try:
-        distene = imp.load_source('distene_licence', '/data/tmpsalome/salome/prerequis/install/LICENSE/dlim8.var.py')
+    text_to_insert ="""    try:
+        distene_licence_file=r"%s"
+        if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
+            import importlib.util
+            spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
+            distene=importlib.util.module_from_spec(spec_dist)
+            spec_dist.loader.exec_module(distene)
+        else:
+            import imp
+            distene = imp.load_source('distene_licence', distene_licence_file)
         distene.set_distene_variables(context)
     except:
-        pass\n"""
+        pass\n"""  % licence_file
     text.insert(num_line + 1, text_to_insert)
     for line in text:
         fout.write(line)
@@ -272,7 +395,8 @@ def hack_for_distene_licence(filepath):
 def produce_relative_env_files(config,
                               logger,
                               file_dir,
-                              binaries_dir_name):
+                              binaries_dir_name,
+                              exe_name=None):
     '''Create some specific environment files for the binary package. These 
        files use relative paths.
     
@@ -281,23 +405,56 @@ def produce_relative_env_files(config,
     :param file_dir str: the directory where to put the files
     :param binaries_dir_name str: the name of the repository where the binaries
                                   are, in the archive.
+    :param exe_name str: if given generate a launcher executing exe_name
     :return: the list of path of the produced environment files
     :rtype: List
     '''  
+
+    # set base mode to "no" for the archive - save current mode to restore it at the end
+    if "base" in config.APPLICATION:
+        base_setting=config.APPLICATION.base 
+    else:
+        base_setting="maybe"
+    config.APPLICATION.base="no"
+
     # create an environment file writer
     writer = src.environment.FileEnvWriter(config,
                                            logger,
                                            file_dir,
                                            src_root=None)
     
+    if src.architecture.is_windows():
+      shell = "bat"
+      filename  = "env_launch.bat"
+    else:
+      shell = "bash"
+      filename  = "env_launch.sh"
+
+    if exe_name:
+        filename=os.path.basename(exe_name)
+
     # Write
-    filepath = writer.write_env_file("env_launch.sh",
+    filepath = writer.write_env_file(filename,
                           False, # for launch
-                          "bash",
+                          shell,
                           for_package = binaries_dir_name)
 
     # Little hack to put out_dir_Path as environment variable
-    src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
+    if src.architecture.is_windows() :
+      src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
+      src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
+      src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
+    else:
+      src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
+      src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' )
+
+    if exe_name:
+        if src.architecture.is_windows():
+            cmd="\n\nrem Launch exe with user arguments\n%s " % exe_name + "%*"
+        else:
+            cmd='\n\n# Launch exe with user arguments\n%s "$*"' % exe_name
+        with open(filepath, "a") as exe_launcher:
+            exe_launcher.write(cmd)
 
     # change the rights in order to make the file executable for everybody
     os.chmod(filepath,
@@ -309,6 +466,9 @@ def produce_relative_env_files(config,
              stat.S_IXGRP |
              stat.S_IXOTH)
     
+    # restore modified setting by its initial value
+    config.APPLICATION.base=base_setting
+
     return filepath
 
 def produce_install_bin_file(config,
@@ -336,12 +496,13 @@ def produce_install_bin_file(config,
                                         "INSTALL_BIN.template")
         
         # build the name of the directory that will contain the binaries
-        binaries_dir_name = "BINARIES-" + config.VARS.dist
+        binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
         # build the substitution loop
         loop_cmd = "for f in $(grep -RIl"
         for key in d_sub:
             loop_cmd += " -e "+ key
-        loop_cmd += ' INSTALL); do\n     sed -i "\n'
+        loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
+                    '); do\n     sed -i "\n'
         for key in d_sub:
             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
         loop_cmd += '            " $f\ndone'
@@ -349,6 +510,7 @@ def produce_install_bin_file(config,
         d={}
         d["BINARIES_DIR"] = binaries_dir_name
         d["SUBSTITUTION_LOOP"]=loop_cmd
+        d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
         
         # substitute the template and write it in file
         content=src.template.substitute(installbin_template_path, d)
@@ -430,6 +592,53 @@ def product_appli_creation_script(config,
     
     return tmp_file_path
 
+def bin_products_archives(config, logger, only_vcs):
+    '''Prepare binary packages for all products
+    :param config Config: The global configuration.
+    :return: the error status
+    :rtype: bool
+    '''
+
+    logger.write("Make %s binary archives\n" % config.VARS.dist)
+    # Get the default directory where to put the packages
+    binpackage_path = os.path.join(config.APPLICATION.workdir, "PACKAGE", "products")
+    src.ensure_path_exists(binpackage_path)
+    # Get the list of product installation to add to the archive
+    l_products_name = sorted(config.APPLICATION.products.keys())
+    l_product_info = src.product.get_products_infos(l_products_name,
+                                                    config)
+    # first loop on products : filter products, analyse properties,
+    # and store the information that will be used to create the archive in the second loop 
+    l_not_installed=[] # store not installed products for warning at the end
+    for prod_name, prod_info in l_product_info:
+        # ignore the native and fixed products for install directories
+        if (src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes"
+                or src.product.product_is_native(prod_info) 
+                or src.product.product_is_fixed(prod_info)
+                or not src.product.product_compiles(prod_info)):
+            continue
+        if only_vcs and not src.product.product_is_vcs(prod_info):
+            continue
+        if not src.product.check_installation(config, prod_info):
+            l_not_installed.append(prod_name)
+            continue  # product is not installed, we skip it
+        # prepare call to make_bin_archive
+        path_targz_prod = os.path.join(binpackage_path, prod_name + '-' + prod_info.version + "-" + config.VARS.dist + PACKAGE_EXT) 
+        targz_prod = tarfile.open(path_targz_prod, mode='w:gz')
+        bin_path = prod_info.install_dir
+        targz_prod.add(bin_path)
+        targz_prod.close()
+        # Python program to find MD5 hash value of a file
+        import hashlib
+        with open(path_targz_prod,"rb") as f:
+            bytes = f.read() # read file as bytes
+            readable_hash = hashlib.md5(bytes).hexdigest();
+            with open(path_targz_prod+".md5", "w") as md5sum:
+               md5sum.write("%s  %s" % (readable_hash, os.path.basename(path_targz_prod))) 
+            logger.write("   archive : %s   (md5sum = %s)\n" % (path_targz_prod, readable_hash))
+
+    return 0
+
 def binary_package(config, logger, options, tmp_working_dir):
     '''Prepare a dictionary that stores all the needed directories and files to
        add in a binary package.
@@ -447,14 +656,31 @@ def binary_package(config, logger, options, tmp_working_dir):
     '''
 
     # Get the list of product installation to add to the archive
-    l_products_name = config.APPLICATION.products.keys()
+    l_products_name = sorted(config.APPLICATION.products.keys())
     l_product_info = src.product.get_products_infos(l_products_name,
                                                     config)
+
+    # suppress compile time products for binaries-only archives
+    if not options.sources:
+        update_config(config, logger, "compile_time", "yes")
+
     l_install_dir = []
     l_source_dir = []
     l_not_installed = []
     l_sources_not_present = []
+    generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
+    if ("APPLICATION" in config  and
+        "properties"  in config.APPLICATION  and
+        "mesa_launcher_in_package"    in config.APPLICATION.properties  and
+        config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
+            generate_mesa_launcher=True
+
+    # first loop on products : filter products, analyse properties,
+    # and store the information that will be used to create the archive in the second loop 
     for prod_name, prod_info in l_product_info:
+        # skip product with property not_in_package set to yes
+        if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
+            continue  
 
         # Add the sources of the products that have the property 
         # sources_in_package : "yes"
@@ -470,8 +696,13 @@ def binary_package(config, logger, options, tmp_working_dir):
                 or src.product.product_is_fixed(prod_info)
                 or not src.product.product_compiles(prod_info)):
             continue
-        if src.product.check_installation(prod_info):
-            l_install_dir.append((prod_name, prod_info.install_dir))
+        # 
+        # products with single_dir property will be installed in the PRODUCTS directory of the archive
+        is_single_dir=(src.appli_test_property(config,"single_install_dir", "yes") and \
+                       src.product.product_test_property(prod_info,"single_install_dir", "yes"))
+        if src.product.check_installation(config, prod_info):
+            l_install_dir.append((prod_name, prod_info.name, prod_info.install_dir,
+                                  is_single_dir, prod_info.install_mode))
         else:
             l_not_installed.append(prod_name)
         
@@ -480,23 +711,41 @@ def binary_package(config, logger, options, tmp_working_dir):
             # cpp module
             for name_cpp in src.product.get_product_components(prod_info):
                 install_dir = os.path.join(config.APPLICATION.workdir,
-                                           "INSTALL", name_cpp) 
+                                           config.INTERNAL.config.install_dir,
+                                           name_cpp) 
                 if os.path.exists(install_dir):
-                    l_install_dir.append((name_cpp, install_dir))
+                    l_install_dir.append((name_cpp, name_cpp, install_dir, False, "value"))
                 else:
                     l_not_installed.append(name_cpp)
         
+    # check the name of the directory that (could) contains the binaries 
+    # from previous detar
+    binaries_from_detar = os.path.join(
+                              config.APPLICATION.workdir,
+                              config.INTERNAL.config.binary_dir + config.VARS.dist)
+    if os.path.exists(binaries_from_detar):
+         logger.write("""
+WARNING: existing binaries directory from previous detar installation:
+         %s
+         To make new package from this, you have to: 
+         1) install binaries in INSTALL directory with the script "install_bin.sh" 
+            see README file for more details
+         2) or recompile everything in INSTALL with "sat compile" command 
+            this step is long, and requires some linux packages to be installed 
+            on your system\n
+""" % binaries_from_detar)
+    
     # Print warning or error if there are some missing products
     if len(l_not_installed) > 0:
         text_missing_prods = ""
         for p_name in l_not_installed:
-            text_missing_prods += "-" + p_name + "\n"
+            text_missing_prods += " - " + p_name + "\n"
         if not options.force_creation:
-            msg = _("ERROR: there are missing products installations:")
+            msg = _("ERROR: there are missing product installations:")
             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
                                      text_missing_prods),
                          1)
-            return None
+            raise src.SatException(msg)
         else:
             msg = _("WARNING: there are missing products installations:")
             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
@@ -509,11 +758,11 @@ def binary_package(config, logger, options, tmp_working_dir):
         for p_name in l_sources_not_present:
             text_missing_prods += "-" + p_name + "\n"
         if not options.force_creation:
-            msg = _("ERROR: there are missing products sources:")
+            msg = _("ERROR: there are missing product sources:")
             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
                                      text_missing_prods),
                          1)
-            return None
+            raise src.SatException(msg)
         else:
             msg = _("WARNING: there are missing products sources:")
             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
@@ -521,43 +770,98 @@ def binary_package(config, logger, options, tmp_working_dir):
                          1)
  
     # construct the name of the directory that will contain the binaries
-    binaries_dir_name = "BINARIES-" + config.VARS.dist
-    
+    if src.architecture.is_windows():
+        binaries_dir_name = config.INTERNAL.config.binary_dir
+    else:
+        binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
     # construct the correlation table between the product names, there 
     # actual install directories and there install directory in archive
     d_products = {}
-    for prod_name, install_dir in l_install_dir:
-        path_in_archive = os.path.join(binaries_dir_name, prod_name)
+    for prod_name, prod_info_name, install_dir, is_single_dir, install_mode in l_install_dir:
+        prod_base_name=os.path.basename(install_dir)
+        if install_mode == "base":
+            # case of a products installed in base. 
+            # because the archive is in base:no mode, the name of the install dir is different inside archive
+            # we set it to the product name or by PRODUCTS if single-dir
+            if is_single_dir:
+                prod_base_name=config.INTERNAL.config.single_install_dir
+            else:
+                prod_base_name=prod_info_name
+        path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
         
     for prod_name, source_dir in l_source_dir:
         path_in_archive = os.path.join("SOURCES", prod_name)
         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
 
-    # create the relative launcher and add it to the files to add
-    if ("profile" in config.APPLICATION and 
-                                       "product" in config.APPLICATION.profile):
-        launcher_name = config.APPLICATION.profile.launcher_name
-        launcher_package = produce_relative_launcher(config,
-                                             logger,
-                                             tmp_working_dir,
-                                             launcher_name,
-                                             binaries_dir_name,
-                                             not(options.without_commercial))
-    
-        d_products["launcher"] = (launcher_package, launcher_name)
-        if options.sources:
-            # if we mix binaries and sources, we add a copy of the launcher, 
-            # prefixed  with "bin",in order to avoid clashes
-            d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
-    else:
-        # Provide a script for the creation of an application EDF style
-        appli_script = product_appli_creation_script(config,
-                                                    logger,
-                                                    tmp_working_dir,
-                                                    binaries_dir_name)
-        
-        d_products["appli script"] = (appli_script, "create_appli.py")
+    # create an archives of compilation logs, and insert it into the tarball
+    logpath=os.path.join(config.APPLICATION.workdir, "LOGS")
+    path_targz_logs = os.path.join(tmp_working_dir, "logs.tgz")
+    tar_log = tarfile.open(path_targz_logs, mode='w:gz')
+    tar_log.add(logpath, arcname="LOGS")
+    tar_log.close()
+    d_products["LOGS"] = (path_targz_logs, "logs.tgz")
+    # for packages of SALOME applications including KERNEL, 
+    # we produce a salome launcher or a virtual application (depending on salome version)
+    if 'KERNEL' in config.APPLICATION.products:
+        VersionSalome = src.get_salome_version(config)
+        # Case where SALOME has the launcher that uses the SalomeContext API
+        if VersionSalome >= 730:
+            # create the relative launcher and add it to the files to add
+            launcher_name = src.get_launcher_name(config)
+            launcher_package = produce_relative_launcher(config,
+                                                 logger,
+                                                 tmp_working_dir,
+                                                 launcher_name,
+                                                 binaries_dir_name)
+            d_products["launcher"] = (launcher_package, launcher_name)
+
+            # if the application contains mesa products, we generate in addition to the 
+            # classical salome launcher a launcher using mesa and called mesa_salome 
+            # (the mesa launcher will be used for remote usage through ssh).
+            if generate_mesa_launcher:
+                #if there is one : store the use_mesa property
+                restore_use_mesa_option=None
+                if ('properties' in config.APPLICATION and 
+                    'use_mesa' in config.APPLICATION.properties):
+                    restore_use_mesa_option = config.APPLICATION.properties.use_mesa
+
+                # activate mesa property, and generate a mesa launcher
+                src.activate_mesa_property(config)  #activate use_mesa property
+                launcher_mesa_name="mesa_"+launcher_name
+                launcher_package_mesa = produce_relative_launcher(config,
+                                                     logger,
+                                                     tmp_working_dir,
+                                                     launcher_mesa_name,
+                                                     binaries_dir_name)
+                d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
+
+                # if there was a use_mesa value, we restore it
+                # else we set it to the default value "no"
+                if restore_use_mesa_option != None:
+                    config.APPLICATION.properties.use_mesa=restore_use_mesa_option
+                else:
+                    config.APPLICATION.properties.use_mesa="no"
+
+            if options.sources:
+                # if we mix binaries and sources, we add a copy of the launcher, 
+                # prefixed  with "bin",in order to avoid clashes
+                launcher_copy_name="bin"+launcher_name
+                launcher_package_copy = produce_relative_launcher(config,
+                                                     logger,
+                                                     tmp_working_dir,
+                                                     launcher_copy_name,
+                                                     binaries_dir_name)
+                d_products["launcher (copy)"] = (launcher_package_copy, launcher_copy_name)
+        else:
+            # Provide a script for the creation of an application EDF style
+            appli_script = product_appli_creation_script(config,
+                                                        logger,
+                                                        tmp_working_dir,
+                                                        binaries_dir_name)
+            
+            d_products["appli script"] = (appli_script, "create_appli.py")
 
     # Put also the environment file
     env_file = produce_relative_env_files(config,
@@ -565,8 +869,27 @@ def binary_package(config, logger, options, tmp_working_dir):
                                            tmp_working_dir,
                                            binaries_dir_name)
 
-    d_products["environment file"] = (env_file, "env_launch.sh")
-      
+    if src.architecture.is_windows():
+      filename  = "env_launch.bat"
+    else:
+      filename  = "env_launch.sh"
+    d_products["environment file"] = (env_file, filename)      
+
+    # If option exe, produce an extra launcher based on specified exe
+    if options.exe:
+        exe_file = produce_relative_env_files(config,
+                                              logger,
+                                              tmp_working_dir,
+                                              binaries_dir_name,
+                                              options.exe)
+            
+        if src.architecture.is_windows():
+          filename  = os.path.basename(options.exe) + ".bat"
+        else:
+          filename  = os.path.basename(options.exe) + ".sh"
+        d_products["exe file"] = (exe_file, filename)      
+    
+
     return d_products
 
 def source_package(sat, config, logger, options, tmp_working_dir):
@@ -585,10 +908,15 @@ def source_package(sat, config, logger, options, tmp_working_dir):
     :rtype: dict
     '''
     
+    d_archives={}
     # Get all the products that are prepared using an archive
-    logger.write("Find archive products ... ")
-    d_archives, l_pinfo_vcs = get_archives(config, logger)
-    logger.write("Done\n")
+    # unless ftp mode is specified (in this case the user of the
+    # archive will get the sources through the ftp mode of sat prepare
+    if not options.ftp:
+        logger.write("Find archive products ... ")
+        d_archives, l_pinfo_vcs = get_archives(config, logger)
+        logger.write("Done\n")
+
     d_archives_vcs = {}
     if not options.with_vcs and len(l_pinfo_vcs) > 0:
         # Make archives with the products that are not prepared using an archive
@@ -604,29 +932,33 @@ def source_package(sat, config, logger, options, tmp_working_dir):
     # Create a project
     logger.write("Create the project ... ")
     d_project = create_project_for_src_package(config,
-                                                tmp_working_dir,
-                                                options.with_vcs)
+                                               tmp_working_dir,
+                                               options.with_vcs,
+                                               options.ftp)
     logger.write("Done\n")
     
     # Add salomeTools
     tmp_sat = add_salomeTools(config, tmp_working_dir)
-    d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
+    d_sat = {"salomeTools" : (tmp_sat, "sat")}
     
     # Add a sat symbolic link if not win
     if not src.architecture.is_windows():
-        tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
         try:
             t = os.getcwd()
         except:
             # In the jobs, os.getcwd() can fail
-            t = config.USER.workdir
+            t = config.LOCAL.workdir
         os.chdir(tmp_working_dir)
-        if os.path.lexists(tmp_satlink_path):
-            os.remove(tmp_satlink_path)
-        os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
+
+        # create a symlink, to avoid reference with "salomeTool/.."
+        os.chdir("PROJECT")
+        if os.path.lexists("ARCHIVES"):
+            os.remove("ARCHIVES")
+        os.symlink("../ARCHIVES", "ARCHIVES")
         os.chdir(t)
         
-        d_sat["sat link"] = (tmp_satlink_path, "sat")
+        d_sat["sat archive link"] = (os.path.join(tmp_working_dir,"PROJECT", "ARCHIVES"), 
+                                     os.path.join("PROJECT", "ARCHIVES"))
     
     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
     return d_source
@@ -650,6 +982,9 @@ def get_archives(config, logger):
     d_archives = {}
     l_pinfo_vcs = []
     for p_name, p_info in l_product_info:
+        # skip product with property not_in_package set to yes
+        if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
+            continue  
         # ignore the native and fixed products
         if (src.product.product_is_native(p_info) 
                 or src.product.product_is_fixed(p_info)):
@@ -657,11 +992,34 @@ def get_archives(config, logger):
         if p_info.get_source == "archive":
             archive_path = p_info.archive_info.archive_name
             archive_name = os.path.basename(archive_path)
+            d_archives[p_name] = (archive_path,
+                                  os.path.join(ARCHIVE_DIR, archive_name))
+            if (src.appli_test_property(config,"pip", "yes") and 
+                src.product.product_test_property(p_info,"pip", "yes")):
+                # if pip mode is activated, and product is managed by pip
+                pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
+                pip_wheel_pattern=os.path.join(pip_wheels_dir, 
+                    "%s-%s*" % (p_info.name, p_info.version))
+                pip_wheel_path=glob.glob(pip_wheel_pattern)
+                msg_pip_not_found="Error in get_archive, pip wheel for "\
+                                  "product %s-%s was not found in %s directory"
+                msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
+                                  "product %s-%s were found in %s directory"
+                if len(pip_wheel_path)==0:
+                    raise src.SatException(msg_pip_not_found %\
+                        (p_info.name, p_info.version, pip_wheels_dir))
+                if len(pip_wheel_path)>1:
+                    raise src.SatException(msg_pip_two_or_more %\
+                        (p_info.name, p_info.version, pip_wheels_dir))
+
+                pip_wheel_name=os.path.basename(pip_wheel_path[0])
+                d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0], 
+                    os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
         else:
-            l_pinfo_vcs.append((p_name, p_info))
+            # this product is not managed by archive, 
+            # an archive of the vcs directory will be created by get_archive_vcs
+            l_pinfo_vcs.append((p_name, p_info)) 
             
-        d_archives[p_name] = (archive_path,
-                              os.path.join(ARCHIVE_DIR, archive_name))
     return d_archives, l_pinfo_vcs
 
 def add_salomeTools(config, tmp_working_dir):
@@ -684,6 +1042,17 @@ def add_salomeTools(config, tmp_working_dir):
     local_pyconf_name = "local.pyconf"
     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
+    # Remove the .pyconf file in the root directory of salomeTools if there is
+    # any. (For example when launching jobs, a pyconf file describing the jobs 
+    # can be here and is not useful) 
+    files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
+    for file_or_dir in files_or_dir_SAT:
+        if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
+            file_path = os.path.join(tmp_working_dir,
+                                     "salomeTools",
+                                     file_or_dir)
+            os.remove(file_path)
+    
     ff = open(local_pyconf_file, "w")
     ff.write(LOCAL_TEMPLATE)
     ff.close()
@@ -712,27 +1081,58 @@ def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
     # clean the source directory of all the vcs products, then use the source 
     # command and thus construct an archive that will not contain the patches
     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
-    # clean
-    logger.write(_("clean sources\n"))
-    args_clean = config.VARS.application
-    args_clean += " --sources --products "
-    args_clean += ",".join(l_prod_names)
-    sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
-    # source
-    logger.write(_("get sources"))
-    args_source = config.VARS.application
-    args_source += " --products "
-    args_source += ",".join(l_prod_names)
-    sat.source(args_source, batch=True, verbose=0, logger_add_link = logger)
-
-    # make the new archives
-    d_archives_vcs = {}
-    for pn, pinfo in l_pinfo_vcs:
-        path_archive = make_archive(pn, pinfo, tmp_working_dir)
-        d_archives_vcs[pn] = (path_archive,
-                              os.path.join(ARCHIVE_DIR, pn + ".tgz"))
+    if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
+      logger.write(_("\nclean sources\n"))
+      args_clean = config.VARS.application
+      args_clean += " --sources --products "
+      args_clean += ",".join(l_prod_names)
+      logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
+      sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
+    if True:
+      # source
+      logger.write(_("get sources\n"))
+      args_source = config.VARS.application
+      args_source += " --products "
+      args_source += ",".join(l_prod_names)
+      svgDir = sat.cfg.APPLICATION.workdir
+      tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
+      sat.cfg.APPLICATION.workdir = tmp_local_working_dir
+      # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
+      # DBG.write("sat config id", id(sat.cfg), True)
+      # shit as config is not same id() as for sat.source()
+      # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
+      import source
+      source.run(args_source, sat, logger) #use this mode as runner.cfg reference
+      
+      # make the new archives
+      d_archives_vcs = {}
+      for pn, pinfo in l_pinfo_vcs:
+          path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
+          logger.write("make archive vcs '%s'\n" % path_archive)
+          d_archives_vcs[pn] = (path_archive,
+                                os.path.join(ARCHIVE_DIR, pn + ".tgz"))
+      sat.cfg.APPLICATION.workdir = svgDir
+      # DBG.write("END sat config", sat.cfg.APPLICATION, True)
     return d_archives_vcs
 
+def make_bin_archive(prod_name, prod_info, where):
+    '''Create an archive of a product by searching its source directory.
+
+    :param prod_name str: The name of the product.
+    :param prod_info Config: The specific configuration corresponding to the 
+                             product
+    :param where str: The path of the repository where to put the resulting 
+                      archive
+    :return: The path of the resulting archive
+    :rtype: str
+    '''
+    path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
+    tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
+    bin_path = prod_info.install_dir
+    tar_prod.add(bin_path, arcname=path_targz_prod)
+    tar_prod.close()
+    return path_targz_prod       
+
 def make_archive(prod_name, prod_info, where):
     '''Create an archive of a product by searching its source directory.
 
@@ -744,14 +1144,21 @@ def make_archive(prod_name, prod_info, where):
     :return: The path of the resulting archive
     :rtype: str
     '''
-    path_targz_prod = os.path.join(where, prod_name + ".tgz")
+    path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
     local_path = prod_info.source_dir
-    tar_prod.add(local_path, arcname=prod_name)
+    if old_python:
+        tar_prod.add(local_path,
+                     arcname=prod_name,
+                     exclude=exclude_VCS_and_extensions_26)
+    else:
+        tar_prod.add(local_path,
+                     arcname=prod_name,
+                     filter=exclude_VCS_and_extensions)
     tar_prod.close()
     return path_targz_prod       
 
-def create_project_for_src_package(config, tmp_working_dir, with_vcs):
+def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
     '''Create a specific project for a source package.
 
     :param config Config: The global configuration.
@@ -760,6 +1167,7 @@ def create_project_for_src_package(config, tmp_working_dir, with_vcs):
                                 source package
     :param with_vcs boolean: True if the package is with vcs products (not 
                              transformed into archive products)
+    :param with_ftp boolean: True if the package use ftp servers to get archives
     :return: The dictionary 
              {"project" : (produced project, project path in the archive)}
     :rtype: Dict
@@ -792,6 +1200,23 @@ def create_project_for_src_package(config, tmp_working_dir, with_vcs):
     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
     ff = open(project_pyconf_file, "w")
     ff.write(PROJECT_TEMPLATE)
+    if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
+        ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
+        for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
+            ftp_path=ftp_path+":"+ftpserver
+        ftp_path+='"'
+        ff.write("# ftp servers where to search for prerequisite archives\n")
+        ff.write(ftp_path)
+    # add licence paths if any
+    if len(config.PATHS.LICENCEPATH) > 0:  
+        licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
+        for path in config.PATHS.LICENCEPATH[1:]:
+            licence_path=licence_path+":"+path
+        licence_path+='"'
+        ff.write("\n# Where to search for licences\n")
+        ff.write(licence_path)
+        
+
     ff.close()
     
     # Loop over the products to get there pyconf and all the scripts 
@@ -800,6 +1225,9 @@ def create_project_for_src_package(config, tmp_working_dir, with_vcs):
     lproducts_name = config.APPLICATION.products.keys()
     l_products = src.product.get_products_infos(lproducts_name, config)
     for p_name, p_info in l_products:
+        # skip product with property not_in_package set to yes
+        if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
+            continue  
         find_product_scripts_and_pyconf(p_name,
                                         p_info,
                                         config,
@@ -809,7 +1237,10 @@ def create_project_for_src_package(config, tmp_working_dir, with_vcs):
                                         patches_tmp_dir,
                                         products_pyconf_tmp_dir)
     
-    find_application_pyconf(config, application_tmp_dir)
+    # for the application pyconf, we write directly the config
+    # don't search for the original pyconf file
+    # to avoid problems with overwrite sections and rm_products key
+    write_application_pyconf(config, application_tmp_dir)
     
     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
     return d_project
@@ -844,22 +1275,18 @@ def find_product_scripts_and_pyconf(p_name,
     '''
     
     # read the pyconf of the product
-    product_pyconf_path = src.find_file_in_lpath(p_name + ".pyconf",
-                                           config.PATHS.PRODUCTPATH)
-    product_pyconf_cfg = src.pyconf.Config(product_pyconf_path)
+    product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
 
     # find the compilation script if any
     if src.product.product_has_script(p_info):
         compil_script_path = src.Path(p_info.compil_script)
         compil_script_path.copy(compil_scripts_tmp_dir)
-        product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
-                                                    p_info.compil_script)
+
     # find the environment script if any
     if src.product.product_has_env_script(p_info):
         env_script_path = src.Path(p_info.environ.env_script)
         env_script_path.copy(env_scripts_tmp_dir)
-        product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
-                                                p_info.environ.env_script)
+
     # find the patches if any
     if src.product.product_has_patches(p_info):
         patches = src.pyconf.Sequence()
@@ -868,26 +1295,37 @@ def find_product_scripts_and_pyconf(p_name,
             p_path.copy(patches_tmp_dir)
             patches.append(os.path.basename(patch_path), "")
 
-        product_pyconf_cfg[p_info.section].patches = patches
-    
-    if with_vcs:
-        # put in the pyconf file the resolved values
-        for info in ["git_info", "cvs_info", "svn_info"]:
-            if info in p_info:
-                for key in p_info[info]:
-                    product_pyconf_cfg[p_info.section][info][key] = p_info[
-                                                                      info][key]
-    else:
-        # if the product is not archive, then make it become archive.
-        if src.product.product_is_vcs(p_info):
-            product_pyconf_cfg[p_info.section].get_source = "archive"
-            if not "archive_info" in product_pyconf_cfg[p_info.section]:
-                product_pyconf_cfg[p_info.section].addMapping("archive_info",
+    if (not with_vcs) and src.product.product_is_vcs(p_info):
+        # in non vcs mode, if the product is not archive, then make it become archive.
+
+        # depending upon the incremental mode, select impacted sections
+        if "properties" in p_info and "incremental" in p_info.properties and\
+            p_info.properties.incremental == "yes":
+            sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
+        else:
+            sections = [p_info.section]
+        for section in sections:
+            if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
+                DBG.write("sat package set archive mode to archive for product %s and section %s" %\
+                          (p_name,section))
+                product_pyconf_cfg[section].get_source = "archive"
+                if not "archive_info" in product_pyconf_cfg[section]:
+                    product_pyconf_cfg[section].addMapping("archive_info",
                                         src.pyconf.Mapping(product_pyconf_cfg),
                                         "")
-            product_pyconf_cfg[p_info.section
-                              ].archive_info.archive_name = p_info.name + ".tgz"
+                    product_pyconf_cfg[section].archive_info.archive_name =\
+                        p_info.name + ".tgz"
     
+    if (with_vcs) and src.product.product_is_vcs(p_info):
+        # in vcs mode we must replace explicitely the git server url
+        # (or it will not be found later because project files are not exported in archives)
+        for section in product_pyconf_cfg:
+            # replace in all sections of the product pyconf the git repo definition by its substitued value (found in p_info)
+            if "git_info" in product_pyconf_cfg[section]:
+                for repo in product_pyconf_cfg[section].git_info:
+                    if repo in p_info.git_info:
+                        product_pyconf_cfg[section].git_info[repo] =  p_info.git_info[repo]
+
     # write the pyconf file to the temporary project location
     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
                                            p_name + ".pyconf")
@@ -896,46 +1334,87 @@ def find_product_scripts_and_pyconf(p_name,
     product_pyconf_cfg.__save__(ff, 1)
     ff.close()
 
-def find_application_pyconf(config, application_tmp_dir):
-    '''Find the application pyconf file and put it in the specific temporary 
+
+def write_application_pyconf(config, application_tmp_dir):
+    '''Write the application pyconf file in the specific temporary 
        directory containing the specific project of a source package.
 
     :param config Config: The global configuration.
     :param application_tmp_dir str: The path to the temporary application 
-                                       scripts directory of the project.
+                                    scripts directory of the project.
     '''
-    # read the pyconf of the application
     application_name = config.VARS.application
-    application_pyconf_path = src.find_file_in_lpath(
-                                            application_name + ".pyconf",
-                                            config.PATHS.APPLICATIONPATH)
-    application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
-    
-    # Change the workdir
-    application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
-                                    application_pyconf_cfg,
-                                    src.pyconf.DOLLAR,
-                                    'VARS.salometoolsway + $VARS.sep + ".."')
-
-    # Prevent from compilation in base
-    application_pyconf_cfg.APPLICATION.no_base = "yes"
-    
     # write the pyconf file to the temporary application location
     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
                                                application_name + ".pyconf")
-    ff = open(application_tmp_pyconf_path, 'w')
-    ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
-    application_pyconf_cfg.__save__(ff, 1)
+    with open(application_tmp_pyconf_path, 'w') as f:
+        f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
+        res = src.pyconf.Config()
+        app = src.pyconf.deepCopyMapping(config.APPLICATION)
+
+        # set base mode to "no" for the archive
+        app.base = "no"
+
+        # Change the workdir
+        app.workdir = src.pyconf.Reference(
+                                 app,
+                                 src.pyconf.DOLLAR,
+                                 'VARS.salometoolsway + $VARS.sep + ".."')
+        res.addMapping("APPLICATION", app, "")
+        res.__save__(f, evaluated=False)
+    
+
+def sat_package(config, tmp_working_dir, options, logger):
+    '''Prepare a dictionary that stores all the needed directories and files to
+       add in a salomeTool package.
+    
+    :param tmp_working_dir str: The temporary local working directory 
+    :param options OptResult: the options of the launched command
+    :return: the dictionary that stores all the needed directories and files to
+             add in a salomeTool package.
+             {label : (path_on_local_machine, path_in_archive)}
+    :rtype: dict
+    '''
+    d_project = {}
+
+    # we include sat himself
+    d_project["all_sat"]=(config.VARS.salometoolsway, "")
+
+    # and we overwrite local.pyconf with a clean wersion.
+    local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
+    local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
+    local_cfg = src.pyconf.Config(local_file_path)
+    local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
+    local_cfg.LOCAL["base"] = "default"
+    local_cfg.LOCAL["workdir"] = "default"
+    local_cfg.LOCAL["log_dir"] = "default"
+    local_cfg.LOCAL["archive_dir"] = "default"
+    local_cfg.LOCAL["VCS"] = "None"
+    local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
+
+    # if the archive contains a project, we write its relative path in local.pyconf
+    if options.project:
+        project_arch_path = os.path.join("projects", options.project, 
+                                         os.path.basename(options.project_file_path))
+        local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
+
+    ff = open(local_pyconf_tmp_path, 'w')
+    local_cfg.__save__(ff, 1)
     ff.close()
+    d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
+    return d_project
+    
 
-def project_package(project_file_path, tmp_working_dir):
+def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
     '''Prepare a dictionary that stores all the needed directories and files to
        add in a project package.
     
     :param project_file_path str: The path to the local project.
+    :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
     :param tmp_working_dir str: The temporary local directory containing some 
                                 specific directories or files needed in the 
                                 project package
+    :param embedded_in_sat boolean : the project package is embedded in a sat package
     :return: the dictionary that stores all the needed directories and files to
              add in a project package.
              {label : (path_on_local_machine, path_in_archive)}
@@ -943,18 +1422,37 @@ def project_package(project_file_path, tmp_working_dir):
     '''
     d_project = {}
     # Read the project file and get the directories to add to the package
-    project_pyconf_cfg = src.pyconf.Config(project_file_path)
-    paths = {"ARCHIVEPATH" : "archives",
-             "APPLICATIONPATH" : "applications",
+    
+    try: 
+      project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
+    except:
+      logger.write("""
+WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
+      project_pyconf_cfg = src.pyconf.Config(project_file_path)
+      project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
+    
+    paths = {"APPLICATIONPATH" : "applications",
              "PRODUCTPATH" : "products",
              "JOBPATH" : "jobs",
              "MACHINEPATH" : "machines"}
+    if not ftp_mode:
+        paths["ARCHIVEPATH"] = "archives"
+
     # Loop over the project paths and add it
+    project_file_name = os.path.basename(project_file_path)
     for path in paths:
         if path not in project_pyconf_cfg:
             continue
+        if embedded_in_sat:
+            dest_path = os.path.join("projects", name_project, paths[path])
+            project_file_dest = os.path.join("projects", name_project, project_file_name)
+        else:
+            dest_path = paths[path]
+            project_file_dest = project_file_name
+
         # Add the directory to the files to add in the package
-        d_project[path] = (project_pyconf_cfg[path], paths[path])
+        d_project[path] = (project_pyconf_cfg[path], dest_path)
+
         # Modify the value of the path in the package
         project_pyconf_cfg[path] = src.pyconf.Reference(
                                     project_pyconf_cfg,
@@ -969,15 +1467,19 @@ def project_package(project_file_path, tmp_working_dir):
     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
                                                            src.pyconf.DOLLAR,
                                                            'PWD')
+    # we don't want to export these two fields
+    project_pyconf_cfg.__delitem__("file_path")
+    project_pyconf_cfg.__delitem__("PWD")
+    if ftp_mode:
+        project_pyconf_cfg.__delitem__("ARCHIVEPATH")
     
     # Write the project pyconf file
-    project_file_name = os.path.basename(project_file_path)
     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
     ff = open(project_pyconf_tmp_path, 'w')
     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
     project_pyconf_cfg.__save__(ff, 1)
     ff.close()
-    d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_name)
+    d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
     
     return d_project
 
@@ -996,12 +1498,14 @@ In the following, $$ROOT represents the directory where you have installed
 SALOME (the directory where this file is located).
 
 """
+        if src.architecture.is_windows():
+            readme_header = readme_header.replace('$$ROOT','%ROOT%')
         readme_compilation_with_binaries="""
 
 compilation based on the binaries used as prerequisites
 =======================================================
 
-If you fail to compile the the complete application (for example because
+If you fail to compile the complete application (for example because
 you are not root on your system and cannot install missing packages), you
 may try a partial compilation based on the binaries.
 For that it is necessary to copy the binaries from BINARIES to INSTALL,
@@ -1017,10 +1521,12 @@ The procedure to do it is:
 
 """
         readme_header_tpl=string.Template(readme_header)
-        readme_template_path_bin_prof = os.path.join(config.VARS.internal_dir,
+        readme_template_path_bin = os.path.join(config.VARS.internal_dir,
                 "README_BIN.template")
-        readme_template_path_bin_noprof = os.path.join(config.VARS.internal_dir,
-                "README_BIN_NO_PROFILE.template")
+        readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
+                "README_LAUNCHER.template")
+        readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
+                "README_BIN_VIRTUAL_APP.template")
         readme_template_path_src = os.path.join(config.VARS.internal_dir,
                 "README_SRC.template")
         readme_template_path_pro = os.path.join(config.VARS.internal_dir,
@@ -1032,30 +1538,43 @@ The procedure to do it is:
         d = dict()
         d['user'] = config.VARS.user
         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
-        d['version'] = config.INTERNAL.sat_version
+        d['version'] = src.get_salometool_version(config)
         d['dist'] = config.VARS.dist
         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
 
         if options.binaries or options.sources:
             d['application'] = config.VARS.application
-            f.write("# Application: " + d['application'])
-            if 'profile' in config.APPLICATION:
-                d['launcher'] = config.APPLICATION.profile.launcher_name
-                d['launcher'] = config.APPLICATION.profile.launcher_name
+            d['BINARIES']    = config.INTERNAL.config.binary_dir
+            d['SEPARATOR'] = config.VARS.sep
+            if src.architecture.is_windows():
+                d['operatingSystem'] = 'Windows'
+                d['PYTHON3'] = 'python3'
+                d['ROOT']    = '%ROOT%'
             else:
-                d['env_file'] = 'env_launch.sh'
+                d['operatingSystem'] = 'Linux'
+                d['PYTHON3'] = ''
+                d['ROOT']    = '$ROOT'
+            f.write("# Application: " + d['application'] + "\n")
+            if 'KERNEL' in config.APPLICATION.products:
+                VersionSalome = src.get_salome_version(config)
+                # Case where SALOME has the launcher that uses the SalomeContext API
+                if VersionSalome >= 730:
+                    d['launcher'] = config.APPLICATION.profile.launcher_name
+                else:
+                    d['virtual_app'] = 'runAppli' # this info is not used now)
 
         # write the specific sections
         if options.binaries:
-            if "env_file" in d:
-                f.write(src.template.substitute(readme_template_path_bin_noprof, d))
-            else:
-                f.write(src.template.substitute(readme_template_path_bin_prof, d))
+            f.write(src.template.substitute(readme_template_path_bin, d))
+            if "virtual_app" in d:
+                f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
+            if "launcher" in d:
+                f.write(src.template.substitute(readme_template_path_bin_launcher, d))
 
         if options.sources:
             f.write(src.template.substitute(readme_template_path_src, d))
 
-        if options.binaries and options.sources:
+        if options.binaries and options.sources and not src.architecture.is_windows():
             f.write(readme_compilation_with_binaries)
 
         if options.project:
@@ -1066,21 +1585,23 @@ The procedure to do it is:
     
     return readme_path
 
-def update_config(config, prop, value):
+def update_config(config, logger,  prop, value):
     '''Remove from config.APPLICATION.products the products that have the property given as input.
     
     :param config Config: The global config.
     :param prop str: The property to filter
     :param value str: The value of the property to filter
     '''
-    src.check_config_has_application(config)
-    l_product_to_remove = []
-    for product_name in config.APPLICATION.products.keys():
-        prod_cfg = src.product.get_product_config(config, product_name)
-        if src.get_property_in_product_cfg(prod_cfg, prop) == value:
-            l_product_to_remove.append(product_name)
-    for product_name in l_product_to_remove:
-        config.APPLICATION.products.__delitem__(product_name)
+    # if there is no APPLICATION (ex sat package -t) : nothing to do
+    if "APPLICATION" in config:
+        l_product_to_remove = []
+        for product_name in config.APPLICATION.products.keys():
+            prod_cfg = src.product.get_product_config(config, product_name)
+            if src.get_property_in_product_cfg(prod_cfg, prop) == value:
+                l_product_to_remove.append(product_name)
+        for product_name in l_product_to_remove:
+            config.APPLICATION.products.__delitem__(product_name)
+            logger.write("Remove product %s with property %s\n" % (product_name, prop), 5)
 
 def description():
     '''method that is called when salomeTools is called with --help option.
@@ -1088,14 +1609,21 @@ def description():
     :return: The text to display for the package command description.
     :rtype: str
     '''
-    return _("The package command creates an archive.\nThere are 4 kinds of "
-             "archive, which can be mixed:\n  1- The binary archive. It contains all the product "
-             "installation directories and a launcher,\n  2- The sources archive."
-             " It contains the products archives, a project corresponding to "
-             "the application and salomeTools,\n  3- The project archive. It "
-             "contains a project (give the project file path as argument),\n  4-"
-             " The salomeTools archive. It contains salomeTools.\n\nexample:"
-             "\nsat package SALOME-master --bineries --sources")
+    return _("""
+The package command creates a tar file archive of a product.
+There are four kinds of archive, which can be mixed:
+
+ 1 - The binary archive. 
+     It contains the product installation directories plus a launcher.
+ 2 - The sources archive. 
+     It contains the product archives, a project (the application plus salomeTools).
+ 3 - The project archive. 
+     It contains a project (give the project file path as argument).
+ 4 - The salomeTools archive. 
+     It contains code utility salomeTools.
+
+example:
+ >> sat package SALOME-master --binaries --sources""")
   
 def run(args, runner, logger):
     '''method that is called when salomeTools is called with package parameter.
@@ -1103,27 +1631,38 @@ def run(args, runner, logger):
     
     # Parse the options
     (options, args) = parser.parse_args(args)
-       
+
+    
     # Check that a type of package is called, and only one
     all_option_types = (options.binaries,
                         options.sources,
                         options.project not in ["", None],
-                        options.sat)
+                        options.sat,
+                        options.bin_products)
 
     # Check if no option for package type
     if all_option_types.count(True) == 0:
         msg = _("Error: Precise a type for the package\nUse one of the "
                 "following options: --binaries, --sources, --project or"
-                " --salometools")
+                " --salometools, --bin_products")
         logger.write(src.printcolors.printcError(msg), 1)
         logger.write("\n", 1)
         return 1
-    
+    do_create_package = options.binaries or options.sources or options.project or options.sat 
+
+    if options.bin_products:
+        ret = bin_products_archives(runner.cfg, logger, options.with_vcs)
+        if ret!=0:
+            return ret
+    if not do_create_package:
+        return 0
+
+    # continue to create a tar.gz package 
+
     # The repository where to put the package if not Binary or Source
-    package_default_path = runner.cfg.USER.workdir
-    
+    package_default_path = runner.cfg.LOCAL.workdir
     # if the package contains binaries or sources:
-    if options.binaries or options.sources:
+    if options.binaries or options.sources or options.bin_products:
         # Check that the command has been called with an application
         src.check_config_has_application(runner.cfg)
 
@@ -1132,29 +1671,44 @@ def run(args, runner, logger):
                                                     runner.cfg.VARS.application), 1)
         
         # Get the default directory where to put the packages
-        package_default_path = os.path.join(runner.cfg.APPLICATION.workdir,
-                                            "PACKAGE")
+        package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
         src.ensure_path_exists(package_default_path)
         
     # if the package contains a project:
     if options.project:
         # check that the project is visible by SAT
-        if options.project not in runner.cfg.PROJECTS.project_file_paths:
-            local_path = os.path.join(runner.cfg.VARS.salometoolsway,
-                                     "data",
-                                     "local.pyconf")
-            msg = _("ERROR: the project %(proj)s is not visible by salomeTools."
-                    "\nPlease add it in the %(local)s file." % {
-                                  "proj" : options.project, "local" : local_path})
+        projectNameFile = options.project + ".pyconf"
+        foundProject = None
+        for i in runner.cfg.PROJECTS.project_file_paths:
+            baseName = os.path.basename(i)
+            if baseName == projectNameFile:
+                foundProject = i
+                break
+
+        if foundProject is None:
+            local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
+            msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
+known projects are:
+%(2)s
+
+Please add it in file:
+%(3)s""" % \
+                    {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
             logger.write(src.printcolors.printcError(msg), 1)
             logger.write("\n", 1)
             return 1
+        else:
+            options.project_file_path = foundProject
+            src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
     
-    # Remove the products that are filtered by the --without_property option
-    if options.without_property:
-        [prop, value] = options.without_property.split(":")
-        update_config(runner.cfg, prop, value)
-    
+    # Remove the products that are filtered by the --without_properties option
+    if options.without_properties:
+        prop, value = options.without_properties
+        update_config(runner.cfg, logger, prop, value)
+
+    # Remove from config the products that have the not_in_package property
+    update_config(runner.cfg, logger, "not_in_package", "yes")
+
     # get the name of the archive or build it
     if options.name:
         if os.path.basename(options.name) == options.name:
@@ -1178,20 +1732,21 @@ def run(args, runner, logger):
             archive_name = runner.cfg.APPLICATION.name
 
         if options.binaries:
-            archive_name += "_"+runner.cfg.VARS.dist
+            archive_name += "-"+runner.cfg.VARS.dist
             
         if options.sources:
-            archive_name += "_SRC"
+            archive_name += "-SRC"
             if options.with_vcs:
-                archive_name += "_VCS"
+                archive_name += "-VCS"
+
+        if options.sat:
+            archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
 
         if options.project:
-            project_name, __ = os.path.splitext(
-                                            os.path.basename(options.project))
-            archive_name += ("PROJECT_" + project_name)
+            if options.sat:
+                archive_name += "_" 
+            archive_name += ("satproject_" + options.project)
  
-        if options.sat:
-            archive_name += ("salomeTools_" + runner.cfg.INTERNAL.sat_version)
         if len(archive_name)==0: # no option worked 
             msg = _("Error: Cannot name the archive\n"
                     " check if at least one of the following options was "
@@ -1201,14 +1756,13 @@ def run(args, runner, logger):
             logger.write("\n", 1)
             return 1
  
-    path_targz = os.path.join(dir_name, archive_name + ".tgz")
+    path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
     
     src.printcolors.print_value(logger, "Package path", path_targz, 2)
 
     # Create a working directory for all files that are produced during the
     # package creation and that will be removed at the end of the command
-    tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root,
-                                   runner.cfg.VARS.datehour)
+    tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
     src.ensure_path_exists(tmp_working_dir)
     logger.write("\n", 5)
     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
@@ -1218,7 +1772,7 @@ def run(args, runner, logger):
     msg = _("Preparation of files to add to the archive")
     logger.write(src.printcolors.printcLabel(msg), 2)
     logger.write("\n", 2)
-
+    
     d_files_to_add={}  # content of the archive
 
     # a dict to hold paths that will need to be substitute for users recompilations
@@ -1234,7 +1788,9 @@ def run(args, runner, logger):
         for key in d_bin_files_to_add:
             if key.endswith("(bin)"):
                 source_dir = d_bin_files_to_add[key][0]
-                path_in_archive = d_bin_files_to_add[key][1].replace("BINARIES-" + runner.cfg.VARS.dist,"INSTALL")
+                path_in_archive = d_bin_files_to_add[key][1].replace(
+                   runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
+                   runner.cfg.INTERNAL.config.install_dir)
                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
                     # if basename is the same we will just substitute the dirname 
                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
@@ -1243,7 +1799,6 @@ def run(args, runner, logger):
                     d_paths_to_substitute[source_dir]=path_in_archive
 
         d_files_to_add.update(d_bin_files_to_add)
-
     if options.sources:
         d_files_to_add.update(source_package(runner,
                                         runner.cfg,
@@ -1265,11 +1820,12 @@ def run(args, runner, logger):
         # --salomeTool option is not considered when --sources is selected, as this option
         # already brings salomeTool!
         if options.sat:
-            d_files_to_add.update({"salomeTools" : (runner.cfg.VARS.salometoolsway, "")})
+            d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir, 
+                                  options, logger))
         
-    
     if options.project:
-        d_files_to_add.update(project_package(options.project, tmp_working_dir))
+        DBG.write("config for package %s" % options.project, runner.cfg)
+        d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
 
     if not(d_files_to_add):
         msg = _("Error: Empty dictionnary to build the archive!\n")
@@ -1278,9 +1834,7 @@ def run(args, runner, logger):
         return 1
 
     # Add the README file in the package
-    local_readme_tmp_path = add_readme(runner.cfg,
-                                       options,
-                                       tmp_working_dir)
+    local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
     d_files_to_add["README"] = (local_readme_tmp_path, "README")
 
     # Add the additional files of option add_files
@@ -1293,32 +1847,48 @@ def run(args, runner, logger):
             d_files_to_add[file_name] = (file_path, file_name)
 
     logger.write("\n", 2)
-
     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
     logger.write("\n", 2)
-    
+    logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
+
+    res = 0
     try:
         # Creating the object tarfile
         tar = tarfile.open(path_targz, mode='w:gz')
         
         # get the filtering function if needed
-        filter_function = None
-        filter_function = exclude_VCS_and_extensions
+        if old_python:
+            filter_function = exclude_VCS_and_extensions_26
+        else:
+            filter_function = exclude_VCS_and_extensions
 
         # Add the files to the tarfile object
         res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
         tar.close()
     except KeyboardInterrupt:
         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
-        logger.write(_("Removing the temporary working directory ... "), 1)
+        logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
         # remove the working directory
         shutil.rmtree(tmp_working_dir)
         logger.write(_("OK"), 1)
         logger.write(_("\n"), 1)
         return 1
     
-    # remove the working directory    
-    shutil.rmtree(tmp_working_dir)
+    # case if no application, only package sat as 'sat package -t'
+    try:
+        app = runner.cfg.APPLICATION
+    except:
+        app = None
+
+    # unconditionaly remove the tmp_local_working_dir
+    if app is not None:
+        tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
+        if os.path.isdir(tmp_local_working_dir):
+            shutil.rmtree(tmp_local_working_dir)
+
+    # remove the tmp directory, unless user has registered as developer
+    if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
+        shutil.rmtree(tmp_working_dir)
     
     # Print again the path of the package
     logger.write("\n", 2)