Salome HOME
style: black format
[tools/sat.git] / commands / package.py
index f91476e2d6b6b87c7027b2a4ba37431b0af6de62..383232ae2c30e32b30a5f185e76f7b696f6ef064 100644 (file)
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-#-*- coding:utf-8 -*-
+# -*- coding:utf-8 -*-
 #  Copyright (C) 2010-2012  CEA/DEN
 #
 #  This library is free software; you can redistribute it and/or
@@ -29,7 +29,7 @@ import sys
 import src
 
 from application import get_SALOME_modules
-from  src.versionMinorMajorPatch import MinorMajorPatch as MMP
+from src.versionMinorMajorPatch import MinorMajorPatch as MMP
 import src.debug as DBG
 
 old_python = sys.version_info[0] == 2 and sys.version_info[1] <= 6
@@ -45,7 +45,7 @@ PROJECT_DIR = "PROJECT"
 IGNORED_DIRS = [".git", ".svn"]
 IGNORED_EXTENSIONS = []
 
-PACKAGE_EXT=".tar.gz" # the extension we use for the packages
+PACKAGE_EXT = ".tar.gz"  # the extension we use for the packages
 
 if src.architecture.is_windows():
     PROJECT_TEMPLATE = """#!/usr/bin/env python
@@ -87,7 +87,8 @@ MACHINEPATH : $project_path + "machines/"
 """
 
 
-LOCAL_TEMPLATE = ("""#!/usr/bin/env python
+LOCAL_TEMPLATE = (
+    """#!/usr/bin/env python
 #-*- coding:utf-8 -*-
 
   LOCAL :
@@ -104,49 +105,128 @@ PROJECTS :
 {
   project_file_paths :
   [
-$LOCAL.workdir + $VARS.sep + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"
+$LOCAL.workdir + $VARS.sep + \""""
+    + PROJECT_DIR
+    + """\" + $VARS.sep + "project.pyconf"
   ]
 }
-""")
+"""
+)
 
 # Define all possible option for the package command :  sat package <options>
 parser = src.options.Options()
-parser.add_option('b', 'binaries', 'boolean', 'binaries',
-    _('Optional: Produce a binary package.'), False)
-parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
-    _('Optional: Only binary package: produce the archive even if '
-      'there are some missing products.'), False)
-parser.add_option('s', 'sources', 'boolean', 'sources',
-    _('Optional: Produce a compilable archive of the sources of the '
-      'application.'), False)
-parser.add_option('', 'bin_products', 'boolean', 'bin_products',
-    _('Optional: Create binary archives for all products.'), False)
-parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
-    _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). '
-      'Sat prepare will use VCS mode instead to retrieve them.'
-      '\n          Also, when combined with "--bin_products" option, restrict the building of product archives to VCS products.'),
-    False)
-parser.add_option('', 'ftp', 'boolean', 'ftp',
-    _('Optional: Do not embed archives for products in archive mode.'
-    'Sat prepare will use ftp instead to retrieve them'),
-    False)
-parser.add_option('e', 'exe', 'string', 'exe',
-    _('Optional: Produce an extra launcher based upon the exe given as argument.'), "")
-parser.add_option('p', 'project', 'string', 'project',
-    _('Optional: Produce an archive that contains a project.'), "")
-parser.add_option('t', 'salometools', 'boolean', 'sat',
-    _('Optional: Produce an archive that contains salomeTools.'), False)
-parser.add_option('n', 'name', 'string', 'name',
-    _('Optional: The name or full path of the archive.'), None)
-parser.add_option('', 'add_files', 'list2', 'add_files',
-    _('Optional: The list of additional files to add to the archive.'), [])
-parser.add_option('', 'without_properties', 'properties', 'without_properties',
-    _('Optional: Filter the products by their properties.\n\tSyntax: '
-      '--without_properties <property>:<value>'))
+parser.add_option(
+    "b",
+    "binaries",
+    "boolean",
+    "binaries",
+    _("Optional: Produce a binary package."),
+    False,
+)
+parser.add_option(
+    "f",
+    "force_creation",
+    "boolean",
+    "force_creation",
+    _(
+        "Optional: Only binary package: produce the archive even if "
+        "there are some missing products."
+    ),
+    False,
+)
+parser.add_option(
+    "s",
+    "sources",
+    "boolean",
+    "sources",
+    _("Optional: Produce a compilable archive of the sources of the " "application."),
+    False,
+)
+parser.add_option(
+    "",
+    "bin_products",
+    "boolean",
+    "bin_products",
+    _("Optional: Create binary archives for all products."),
+    False,
+)
+parser.add_option(
+    "",
+    "with_vcs",
+    "boolean",
+    "with_vcs",
+    _(
+        "Optional: Do not make archive for products in VCS mode (git, cvs, svn). "
+        "Sat prepare will use VCS mode instead to retrieve them."
+        '\n          Also, when combined with "--bin_products" option, restrict the building of product archives to VCS products.'
+    ),
+    False,
+)
+parser.add_option(
+    "",
+    "ftp",
+    "boolean",
+    "ftp",
+    _(
+        "Optional: Do not embed archives for products in archive mode."
+        "Sat prepare will use ftp instead to retrieve them"
+    ),
+    False,
+)
+parser.add_option(
+    "e",
+    "exe",
+    "string",
+    "exe",
+    _("Optional: Produce an extra launcher based upon the exe given as argument."),
+    "",
+)
+parser.add_option(
+    "p",
+    "project",
+    "string",
+    "project",
+    _("Optional: Produce an archive that contains a project."),
+    "",
+)
+parser.add_option(
+    "t",
+    "salometools",
+    "boolean",
+    "sat",
+    _("Optional: Produce an archive that contains salomeTools."),
+    False,
+)
+parser.add_option(
+    "n",
+    "name",
+    "string",
+    "name",
+    _("Optional: The name or full path of the archive."),
+    None,
+)
+parser.add_option(
+    "",
+    "add_files",
+    "list2",
+    "add_files",
+    _("Optional: The list of additional files to add to the archive."),
+    [],
+)
+parser.add_option(
+    "",
+    "without_properties",
+    "properties",
+    "without_properties",
+    _(
+        "Optional: Filter the products by their properties.\n\tSyntax: "
+        "--without_properties <property>:<value>"
+    ),
+)
 
 
 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
-    '''Create an archive containing all directories and files that are given in
+    """Create an archive containing all directories and files that are given in
        the d_content argument.
 
     :param tar tarfile: The tarfile instance used to make the archive.
@@ -159,7 +239,7 @@ def add_files(tar, name_archive, d_content, logger, f_exclude=None):
     :param f_exclude Function: the function that filters
     :return: 0 if success, 1 if not.
     :rtype: int
-    '''
+    """
     # get the max length of the messages in order to make the display
     max_len = len(max(d_content.keys(), key=len))
 
@@ -169,27 +249,31 @@ def add_files(tar, name_archive, d_content, logger, f_exclude=None):
     DBG.write("add tar names", names)
 
     # used to avoid duplications (for pip install in python, or single_install_dir cases)
-    already_added=set()
+    already_added = set()
     for name in names:
         # display information
         len_points = max_len - len(name) + 3
         local_path, archive_path = d_content[name]
         in_archive = os.path.join(name_archive, archive_path)
-        logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
+        logger.write(name + " " + len_points * "." + " " + in_archive + " ", 3)
         # Get the local path and the path in archive
         # of the directory or file to add
         # Add it in the archive
         try:
-            key=local_path+"->"+in_archive
+            key = local_path + "->" + in_archive
             if key not in already_added:
                 if old_python:
-                    tar.add(local_path,
-                                 arcname=in_archive,
-                                 exclude=exclude_VCS_and_extensions_26)
+                    tar.add(
+                        local_path,
+                        arcname=in_archive,
+                        exclude=exclude_VCS_and_extensions_26,
+                    )
                 else:
-                    tar.add(local_path,
-                                 arcname=in_archive,
-                                 filter=exclude_VCS_and_extensions)
+                    tar.add(
+                        local_path,
+                        arcname=in_archive,
+                        filter=exclude_VCS_and_extensions,
+                    )
                 already_added.add(key)
             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
         except Exception as e:
@@ -201,13 +285,13 @@ def add_files(tar, name_archive, d_content, logger, f_exclude=None):
 
 
 def exclude_VCS_and_extensions_26(filename):
-    ''' The function that is used to exclude from package the link to the
+    """The function that is used to exclude from package the link to the
         VCS repositories (like .git) (only for python 2.6)
 
     :param filename Str: The filname to exclude (or not).
     :return: True if the file has to be exclude
     :rtype: Boolean
-    '''
+    """
     for dir_name in IGNORED_DIRS:
         if dir_name in filename:
             return True
@@ -216,14 +300,15 @@ def exclude_VCS_and_extensions_26(filename):
             return True
     return False
 
+
 def exclude_VCS_and_extensions(tarinfo):
-    ''' The function that is used to exclude from package the link to the
+    """The function that is used to exclude from package the link to the
         VCS repositories (like .git)
 
     :param filename Str: The filname to exclude (or not).
     :return: None if the file has to be exclude
     :rtype: tarinfo or None
-    '''
+    """
     filename = tarinfo.name
     for dir_name in IGNORED_DIRS:
         if dir_name in filename:
@@ -233,12 +318,9 @@ def exclude_VCS_and_extensions(tarinfo):
             return None
     return tarinfo
 
-def produce_relative_launcher(config,
-                              logger,
-                              file_dir,
-                              file_name,
-                              binaries_dir_name):
-    '''Create a specific SALOME launcher for the binary package. This launcher
+
+def produce_relative_launcher(config, logger, file_dir, file_name, binaries_dir_name):
+    """Create a specific SALOME launcher for the binary package. This launcher
        uses relative paths.
 
     :param config Config: The global configuration.
@@ -249,119 +331,130 @@ def produce_relative_launcher(config,
                                   are, in the archive.
     :return: the path of the produced launcher
     :rtype: str
-    '''
+    """
 
     # set base mode to "no" for the archive - save current mode to restore it at the end
     if "base" in config.APPLICATION:
-        base_setting=config.APPLICATION.base
+        base_setting = config.APPLICATION.base
     else:
-        base_setting="maybe"
-    config.APPLICATION.base="no"
+        base_setting = "maybe"
+    config.APPLICATION.base = "no"
 
     # get KERNEL installation path
     kernel_info = src.product.get_product_config(config, "KERNEL")
-    kernel_base_name=os.path.basename(kernel_info.install_dir)
+    kernel_base_name = os.path.basename(kernel_info.install_dir)
     if kernel_info.install_mode == "base":
         # case of kernel installed in base. the kernel install dir name is different in the archive
-        kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
+        kernel_base_name = os.path.basename(os.path.dirname(kernel_info.install_dir))
 
     kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
 
     # set kernel bin dir (considering fhs property)
     kernel_cfg = src.product.get_product_config(config, "KERNEL")
     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
-        bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
+        bin_kernel_install_dir = os.path.join(kernel_root_dir, "bin")
     else:
-        bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
+        bin_kernel_install_dir = os.path.join(kernel_root_dir, "bin", "salome")
 
     # check if the application contains an application module
     # check also if the application has a distene product,
     # in this case get its licence file name
-    l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
-    salome_application_name="Not defined"
-    distene_licence_file_name=False
+    l_product_info = src.product.get_products_infos(
+        config.APPLICATION.products.keys(), config
+    )
+    salome_application_name = "Not defined"
+    distene_licence_file_name = False
     for prod_name, prod_info in l_product_info:
         # look for a "salome application" and a distene product
         if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
-            distene_licence_file_name = src.product.product_has_licence(prod_info,
-                                            config.PATHS.LICENCEPATH)
+            distene_licence_file_name = src.product.product_has_licence(
+                prod_info, config.PATHS.LICENCEPATH
+            )
         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
-            salome_application_name=prod_info.name
+            salome_application_name = prod_info.name
 
     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
     if salome_application_name == "Not defined":
-        app_root_dir=kernel_root_dir
+        app_root_dir = kernel_root_dir
     else:
-        app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
+        app_root_dir = os.path.join(binaries_dir_name, salome_application_name)
 
-    additional_env={}
-    additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
-                                                   config.VARS.sep + bin_kernel_install_dir
+    additional_env = {}
+    additional_env["sat_bin_kernel_install_dir"] = (
+        "out_dir_Path + " + config.VARS.sep + bin_kernel_install_dir
+    )
     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
-        additional_env['sat_python_version'] = 3
+        additional_env["sat_python_version"] = 3
     else:
-        additional_env['sat_python_version'] = 2
+        additional_env["sat_python_version"] = 2
 
-    additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
+    additional_env["ABSOLUTE_APPLI_PATH"] = (
+        "out_dir_Path" + config.VARS.sep + app_root_dir
+    )
     launcher_name = src.get_launcher_name(config)
-    additional_env['APPLI'] = "out_dir_Path" + config.VARS.sep + file_name
+    additional_env["APPLI"] = "out_dir_Path" + config.VARS.sep + file_name
 
     # create an environment file writer
-    writer = src.environment.FileEnvWriter(config,
-                                           logger,
-                                           file_dir,
-                                           src_root=None,
-                                           env_info=None)
+    writer = src.environment.FileEnvWriter(
+        config, logger, file_dir, src_root=None, env_info=None
+    )
 
     filepath = os.path.join(file_dir, file_name)
     # Write
-    writer.write_env_file(filepath,
-                          False,  # for launch
-                          "cfgForPy",
-                          additional_env=additional_env,
-                          no_path_init=False,
-                          for_package = binaries_dir_name)
+    writer.write_env_file(
+        filepath,
+        False,  # for launch
+        "cfgForPy",
+        additional_env=additional_env,
+        no_path_init=False,
+        for_package=binaries_dir_name,
+    )
 
     # Little hack to put out_dir_Path outside the strings
-    src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
-    src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
+    src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"')
+    src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'")
 
     # A hack to put a call to a file for distene licence.
     # It does nothing to an application that has no distene product
     if distene_licence_file_name:
-        logger.write("Application has a distene licence file! We use it in package launcher", 5)
+        logger.write(
+            "Application has a distene licence file! We use it in package launcher", 5
+        )
         hack_for_distene_licence(filepath, distene_licence_file_name)
 
     # change the rights in order to make the file executable for everybody
-    os.chmod(filepath,
-             stat.S_IRUSR |
-             stat.S_IRGRP |
-             stat.S_IROTH |
-             stat.S_IWUSR |
-             stat.S_IXUSR |
-             stat.S_IXGRP |
-             stat.S_IXOTH)
+    os.chmod(
+        filepath,
+        stat.S_IRUSR
+        | stat.S_IRGRP
+        | stat.S_IROTH
+        | stat.S_IWUSR
+        | stat.S_IXUSR
+        | stat.S_IXGRP
+        | stat.S_IXOTH,
+    )
 
     # restore modified setting by its initial value
-    config.APPLICATION.base=base_setting
+    config.APPLICATION.base = base_setting
 
     return filepath
 
+
 def hack_for_distene_licence(filepath, licence_file):
-    '''Replace the distene licence env variable by a call to a file.
+    """Replace the distene licence env variable by a call to a file.
 
     :param filepath Str: The path to the launcher to modify.
-    '''
+    """
     shutil.move(filepath, filepath + "_old")
-    fileout= filepath
+    fileout = filepath
     filein = filepath + "_old"
     fin = open(filein, "r")
     fout = open(fileout, "w")
     text = fin.readlines()
     # Find the Distene section
     num_line = -1
-    for i,line in enumerate(text):
+    for i, line in enumerate(text):
         if "# Set DISTENE License" in line:
             num_line = i
             break
@@ -372,9 +465,10 @@ def hack_for_distene_licence(filepath, licence_file):
             fout.write(line)
         fout.close()
         return
-    del text[num_line +1]
-    del text[num_line +1]
-    text_to_insert ="""    try:
+    del text[num_line + 1]
+    del text[num_line + 1]
+    text_to_insert = (
+        """    try:
         distene_licence_file=r"%s"
         if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
             import importlib.util
@@ -386,7 +480,9 @@ def hack_for_distene_licence(filepath, licence_file):
             distene = imp.load_source('distene_licence', distene_licence_file)
         distene.set_distene_variables(context)
     except:
-        pass\n"""  % licence_file
+        pass\n"""
+        % licence_file
+    )
     text.insert(num_line + 1, text_to_insert)
     for line in text:
         fout.write(line)
@@ -394,12 +490,11 @@ def hack_for_distene_licence(filepath, licence_file):
     fout.close()
     return
 
-def produce_relative_env_files(config,
-                              logger,
-                              file_dir,
-                              binaries_dir_name,
-                              exe_name=None):
-    '''Create some specific environment files for the binary package. These
+
+def produce_relative_env_files(
+    config, logger, file_dir, binaries_dir_name, exe_name=None
+):
+    """Create some specific environment files for the binary package. These
        files use relative paths.
 
     :param config Config: The global configuration.
@@ -410,76 +505,71 @@ def produce_relative_env_files(config,
     :param exe_name str: if given generate a launcher executing exe_name
     :return: the list of path of the produced environment files
     :rtype: List
-    '''
+    """
 
     # set base mode to "no" for the archive - save current mode to restore it at the end
     if "base" in config.APPLICATION:
-        base_setting=config.APPLICATION.base
+        base_setting = config.APPLICATION.base
     else:
-        base_setting="maybe"
-    config.APPLICATION.base="no"
+        base_setting = "maybe"
+    config.APPLICATION.base = "no"
 
     # create an environment file writer
-    writer = src.environment.FileEnvWriter(config,
-                                           logger,
-                                           file_dir,
-                                           src_root=None)
+    writer = src.environment.FileEnvWriter(config, logger, file_dir, src_root=None)
 
     if src.architecture.is_windows():
-      shell = "bat"
-      filename  = "env_launch.bat"
+        shell = "bat"
+        filename = "env_launch.bat"
     else:
-      shell = "bash"
-      filename  = "env_launch.sh"
+        shell = "bash"
+        filename = "env_launch.sh"
 
     if exe_name:
-        filename=os.path.basename(exe_name)
+        filename = os.path.basename(exe_name)
 
     # Write
-    filepath = writer.write_env_file(filename,
-                          False, # for launch
-                          shell,
-                          for_package = binaries_dir_name)
+    filepath = writer.write_env_file(
+        filename, False, shell, for_package=binaries_dir_name  # for launch
+    )
 
     # Little hack to put out_dir_Path as environment variable
-    if src.architecture.is_windows() :
-      src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
-      src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
-      src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
+    if src.architecture.is_windows():
+        src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%')
+        src.replace_in_file(filepath, "=out_dir_Path", "=%out_dir_Path%")
+        src.replace_in_file(filepath, ";out_dir_Path", ";%out_dir_Path%")
     else:
-      src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
-      src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' )
-      src.replace_in_file(filepath, ';out_dir_Path', ';${out_dir_Path}' )
+        src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}')
+        src.replace_in_file(filepath, ":out_dir_Path", ":${out_dir_Path}")
+        src.replace_in_file(filepath, ";out_dir_Path", ";${out_dir_Path}")
 
     if exe_name:
         if src.architecture.is_windows():
-            cmd="\n\nrem Launch exe with user arguments\n%s " % exe_name + "%*"
+            cmd = "\n\nrem Launch exe with user arguments\n%s " % exe_name + "%*"
         else:
-            cmd='\n\n# Launch exe with user arguments\n%s "$*"' % exe_name
+            cmd = '\n\n# Launch exe with user arguments\n%s "$*"' % exe_name
         with open(filepath, "a") as exe_launcher:
             exe_launcher.write(cmd)
 
     # change the rights in order to make the file executable for everybody
-    os.chmod(filepath,
-             stat.S_IRUSR |
-             stat.S_IRGRP |
-             stat.S_IROTH |
-             stat.S_IWUSR |
-             stat.S_IXUSR |
-             stat.S_IXGRP |
-             stat.S_IXOTH)
+    os.chmod(
+        filepath,
+        stat.S_IRUSR
+        | stat.S_IRGRP
+        | stat.S_IROTH
+        | stat.S_IWUSR
+        | stat.S_IXUSR
+        | stat.S_IXGRP
+        | stat.S_IXOTH,
+    )
 
     # restore modified setting by its initial value
-    config.APPLICATION.base=base_setting
+    config.APPLICATION.base = base_setting
 
     return filepath
 
-def produce_install_bin_file(config,
-                             logger,
-                             file_dir,
-                             d_sub,
-                             file_name):
-    '''Create a bash shell script which do substitutions in BIRARIES dir
+
+def produce_install_bin_file(config, logger, file_dir, d_sub, file_name):
+    """Create a bash shell script which do substitutions in BIRARIES dir
        in order to use it for extra compilations.
 
     :param config Config: The global configuration.
@@ -489,52 +579,52 @@ def produce_install_bin_file(config,
     :param file_name str: the name of the install script file
     :return: the produced file
     :rtype: str
-    '''
+    """
     # Write
     filepath = os.path.join(file_dir, file_name)
     # open the file and write into it
     # use codec utf-8 as sat variables are in unicode
-    with codecs.open(filepath, "w", 'utf-8') as installbin_file:
-        installbin_template_path = os.path.join(config.VARS.internal_dir,
-                                        "INSTALL_BIN.template")
+    with codecs.open(filepath, "w", "utf-8") as installbin_file:
+        installbin_template_path = os.path.join(
+            config.VARS.internal_dir, "INSTALL_BIN.template"
+        )
 
         # build the name of the directory that will contain the binaries
         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
         # build the substitution loop
         loop_cmd = "for f in $(grep -RIl"
         for key in d_sub:
-            loop_cmd += " -e "+ key
-        loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
-                    '); do\n     sed -i "\n'
+            loop_cmd += " -e " + key
+        loop_cmd += " " + config.INTERNAL.config.install_dir + '); do\n     sed -i "\n'
         for key in d_sub:
             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
         loop_cmd += '            " $f\ndone'
 
-        d={}
+        d = {}
         d["BINARIES_DIR"] = binaries_dir_name
-        d["SUBSTITUTION_LOOP"]=loop_cmd
-        d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
+        d["SUBSTITUTION_LOOP"] = loop_cmd
+        d["INSTALL_DIR"] = config.INTERNAL.config.install_dir
 
         # substitute the template and write it in file
-        content=src.template.substitute(installbin_template_path, d)
+        content = src.template.substitute(installbin_template_path, d)
         installbin_file.write(content)
         # change the rights in order to make the file executable for everybody
-        os.chmod(filepath,
-                 stat.S_IRUSR |
-                 stat.S_IRGRP |
-                 stat.S_IROTH |
-                 stat.S_IWUSR |
-                 stat.S_IXUSR |
-                 stat.S_IXGRP |
-                 stat.S_IXOTH)
+        os.chmod(
+            filepath,
+            stat.S_IRUSR
+            | stat.S_IRGRP
+            | stat.S_IROTH
+            | stat.S_IWUSR
+            | stat.S_IXUSR
+            | stat.S_IXGRP
+            | stat.S_IXOTH,
+        )
 
     return filepath
 
-def product_appli_creation_script(config,
-                                  logger,
-                                  file_dir,
-                                  binaries_dir_name):
-    '''Create a script that can produce an application (EDF style) in the binary
+
+def product_appli_creation_script(config, logger, file_dir, binaries_dir_name):
+    """Create a script that can produce an application (EDF style) in the binary
        package.
 
     :param config Config: The global configuration.
@@ -544,12 +634,11 @@ def product_appli_creation_script(config,
                                   are, in the archive.
     :return: the path of the produced script file
     :rtype: Str
-    '''
+    """
     template_name = "create_appli.py.for_bin_packages.template"
     template_path = os.path.join(config.VARS.internal_dir, template_name)
     text_to_fill = open(template_path, "r").read()
-    text_to_fill = text_to_fill.replace("TO BE FILLED 1",
-                                        '"' + binaries_dir_name + '"')
+    text_to_fill = text_to_fill.replace("TO BE FILLED 1", '"' + binaries_dir_name + '"')
 
     text_to_add = ""
     for product_name in get_SALOME_modules(config):
@@ -558,22 +647,20 @@ def product_appli_creation_script(config,
         if src.product.product_is_smesh_plugin(product_info):
             continue
 
-        if 'install_dir' in product_info and bool(product_info.install_dir):
+        if "install_dir" in product_info and bool(product_info.install_dir):
             if src.product.product_is_cpp(product_info):
                 # cpp module
                 for cpp_name in src.product.get_product_components(product_info):
-                    line_to_add = ("<module name=\"" +
-                                   cpp_name +
-                                   "\" gui=\"yes\" path=\"''' + "
-                                   "os.path.join(dir_bin_name, \"" +
-                                   cpp_name + "\") + '''\"/>")
+                    line_to_add = (
+                        '<module name="' + cpp_name + '" gui="yes" path="\'\'\' + '
+                        'os.path.join(dir_bin_name, "' + cpp_name + "\") + '''\"/>"
+                    )
             else:
                 # regular module
-                line_to_add = ("<module name=\"" +
-                               product_name +
-                               "\" gui=\"yes\" path=\"''' + "
-                               "os.path.join(dir_bin_name, \"" +
-                               product_name + "\") + '''\"/>")
+                line_to_add = (
+                    '<module name="' + product_name + '" gui="yes" path="\'\'\' + '
+                    'os.path.join(dir_bin_name, "' + product_name + "\") + '''\"/>"
+                )
             text_to_add += line_to_add + "\n"
 
     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
@@ -584,23 +671,26 @@ def product_appli_creation_script(config,
     ff.close()
 
     # change the rights in order to make the file executable for everybody
-    os.chmod(tmp_file_path,
-             stat.S_IRUSR |
-             stat.S_IRGRP |
-             stat.S_IROTH |
-             stat.S_IWUSR |
-             stat.S_IXUSR |
-             stat.S_IXGRP |
-             stat.S_IXOTH)
+    os.chmod(
+        tmp_file_path,
+        stat.S_IRUSR
+        | stat.S_IRGRP
+        | stat.S_IROTH
+        | stat.S_IWUSR
+        | stat.S_IXUSR
+        | stat.S_IXGRP
+        | stat.S_IXOTH,
+    )
 
     return tmp_file_path
 
+
 def bin_products_archives(config, logger, only_vcs):
-    '''Prepare binary packages for all products
+    """Prepare binary packages for all products
     :param config Config: The global configuration.
     :return: the error status
     :rtype: bool
-    '''
+    """
 
     logger.write("Make %s binary archives\n" % config.VARS.dist)
     # Get the default directory where to put the packages
@@ -608,17 +698,18 @@ def bin_products_archives(config, logger, only_vcs):
     src.ensure_path_exists(binpackage_path)
     # Get the list of product installation to add to the archive
     l_products_name = sorted(config.APPLICATION.products.keys())
-    l_product_info = src.product.get_products_infos(l_products_name,
-                                                    config)
+    l_product_info = src.product.get_products_infos(l_products_name, config)
     # first loop on products : filter products, analyse properties,
     # and store the information that will be used to create the archive in the second loop
-    l_not_installed=[] # store not installed products for warning at the end
+    l_not_installed = []  # store not installed products for warning at the end
     for prod_name, prod_info in l_product_info:
         # ignore the native and fixed products for install directories
-        if (src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes"
-                or src.product.product_is_native(prod_info)
-                or src.product.product_is_fixed(prod_info)
-                or not src.product.product_compiles(prod_info)):
+        if (
+            src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes"
+            or src.product.product_is_native(prod_info)
+            or src.product.product_is_fixed(prod_info)
+            or not src.product.product_compiles(prod_info)
+        ):
             continue
         if only_vcs and not src.product.product_is_vcs(prod_info):
             continue
@@ -626,24 +717,38 @@ def bin_products_archives(config, logger, only_vcs):
             l_not_installed.append(prod_name)
             continue  # product is not installed, we skip it
         # prepare call to make_bin_archive
-        path_targz_prod = os.path.join(binpackage_path, prod_name + '-' + prod_info.version.replace("/", "_") + "-" + config.VARS.dist + PACKAGE_EXT)
-        targz_prod = tarfile.open(path_targz_prod, mode='w:gz')
+        path_targz_prod = os.path.join(
+            binpackage_path,
+            prod_name
+            + "-"
+            + prod_info.version.replace("/", "_")
+            + "-"
+            + config.VARS.dist
+            + PACKAGE_EXT,
+        )
+        targz_prod = tarfile.open(path_targz_prod, mode="w:gz")
         bin_path = prod_info.install_dir
         targz_prod.add(bin_path)
         targz_prod.close()
         # Python program to find MD5 hash value of a file
         import hashlib
-        with open(path_targz_prod,"rb") as f:
-            bytes = f.read() # read file as bytes
-            readable_hash = hashlib.md5(bytes).hexdigest();
-            with open(path_targz_prod+".md5", "w") as md5sum:
-               md5sum.write("%s  %s" % (readable_hash, os.path.basename(path_targz_prod)))
-            logger.write("   archive : %s   (md5sum = %s)\n" % (path_targz_prod, readable_hash))
+
+        with open(path_targz_prod, "rb") as f:
+            bytes = f.read()  # read file as bytes
+            readable_hash = hashlib.md5(bytes).hexdigest()
+            with open(path_targz_prod + ".md5", "w") as md5sum:
+                md5sum.write(
+                    "%s  %s" % (readable_hash, os.path.basename(path_targz_prod))
+                )
+            logger.write(
+                "   archive : %s   (md5sum = %s)\n" % (path_targz_prod, readable_hash)
+            )
 
     return 0
 
+
 def binary_package(config, logger, options, tmp_working_dir):
-    '''Prepare a dictionary that stores all the needed directories and files to
+    """Prepare a dictionary that stores all the needed directories and files to
        add in a binary package.
 
     :param config Config: The global configuration.
@@ -656,12 +761,11 @@ def binary_package(config, logger, options, tmp_working_dir):
              add in a binary package.
              {label : (path_on_local_machine, path_in_archive)}
     :rtype: dict
-    '''
+    """
 
     # Get the list of product installation to add to the archive
     l_products_name = sorted(config.APPLICATION.products.keys())
-    l_product_info = src.product.get_products_infos(l_products_name,
-                                                    config)
+    l_product_info = src.product.get_products_infos(l_products_name, config)
 
     # suppress compile time products for binaries-only archives
     if not options.sources:
@@ -672,11 +776,13 @@ def binary_package(config, logger, options, tmp_working_dir):
     l_not_installed = []
     l_sources_not_present = []
     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
-    if ("APPLICATION" in config  and
-        "properties"  in config.APPLICATION  and
-        "mesa_launcher_in_package"    in config.APPLICATION.properties  and
-        config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
-            generate_mesa_launcher=True
+    if (
+        "APPLICATION" in config
+        and "properties" in config.APPLICATION
+        and "mesa_launcher_in_package" in config.APPLICATION.properties
+        and config.APPLICATION.properties.mesa_launcher_in_package == "yes"
+    ):
+        generate_mesa_launcher = True
 
     # first loop on products : filter products, analyse properties,
     # and store the information that will be used to create the archive in the second loop
@@ -687,25 +793,34 @@ def binary_package(config, logger, options, tmp_working_dir):
 
         # Add the sources of the products that have the property
         # sources_in_package : "yes"
-        if src.get_property_in_product_cfg(prod_info,
-                                           "sources_in_package") == "yes":
+        if src.get_property_in_product_cfg(prod_info, "sources_in_package") == "yes":
             if os.path.exists(prod_info.source_dir):
                 l_source_dir.append((prod_name, prod_info.source_dir))
             else:
                 l_sources_not_present.append(prod_name)
 
         # ignore the native and fixed products for install directories
-        if (src.product.product_is_native(prod_info)
-                or src.product.product_is_fixed(prod_info)
-                or not src.product.product_compiles(prod_info)):
+        if (
+            src.product.product_is_native(prod_info)
+            or src.product.product_is_fixed(prod_info)
+            or not src.product.product_compiles(prod_info)
+        ):
             continue
         #
         # products with single_dir property will be installed in the PRODUCTS directory of the archive
-        is_single_dir=(src.appli_test_property(config,"single_install_dir", "yes") and \
-                       src.product.product_test_property(prod_info,"single_install_dir", "yes"))
+        is_single_dir = src.appli_test_property(
+            config, "single_install_dir", "yes"
+        ) and src.product.product_test_property(prod_info, "single_install_dir", "yes")
         if src.product.check_installation(config, prod_info):
-            l_install_dir.append((prod_name, prod_info.name, prod_info.install_dir,
-                                  is_single_dir, prod_info.install_mode))
+            l_install_dir.append(
+                (
+                    prod_name,
+                    prod_info.name,
+                    prod_info.install_dir,
+                    is_single_dir,
+                    prod_info.install_mode,
+                )
+            )
         else:
             l_not_installed.append(prod_name)
 
@@ -713,21 +828,26 @@ def binary_package(config, logger, options, tmp_working_dir):
         if src.product.product_is_cpp(prod_info):
             # cpp module
             for name_cpp in src.product.get_product_components(prod_info):
-                install_dir = os.path.join(config.APPLICATION.workdir,
-                                           config.INTERNAL.config.install_dir,
-                                           name_cpp)
+                install_dir = os.path.join(
+                    config.APPLICATION.workdir,
+                    config.INTERNAL.config.install_dir,
+                    name_cpp,
+                )
                 if os.path.exists(install_dir):
-                    l_install_dir.append((name_cpp, name_cpp, install_dir, False, "value"))
+                    l_install_dir.append(
+                        (name_cpp, name_cpp, install_dir, False, "value")
+                    )
                 else:
                     l_not_installed.append(name_cpp)
 
     # check the name of the directory that (could) contains the binaries
     # from previous detar
     binaries_from_detar = os.path.join(
-                              config.APPLICATION.workdir,
-                              config.INTERNAL.config.binary_dir + config.VARS.dist)
+        config.APPLICATION.workdir, config.INTERNAL.config.binary_dir + config.VARS.dist
+    )
     if os.path.exists(binaries_from_detar):
-         logger.write("""
+        logger.write(
+            """
 WARNING: existing binaries directory from previous detar installation:
          %s
          To make new package from this, you have to:
@@ -736,7 +856,9 @@ WARNING: existing binaries directory from previous detar installation:
          2) or recompile everything in INSTALL with "sat compile" command
             this step is long, and requires some linux packages to be installed
             on your system\n
-""" % binaries_from_detar)
+"""
+            % binaries_from_detar
+        )
 
     # Print warning or error if there are some missing products
     if len(l_not_installed) > 0:
@@ -745,15 +867,15 @@ WARNING: existing binaries directory from previous detar installation:
             text_missing_prods += " - " + p_name + "\n"
         if not options.force_creation:
             msg = _("ERROR: there are missing product installations:")
-            logger.write("%s\n%s" % (src.printcolors.printcError(msg),
-                                     text_missing_prods),
-                         1)
+            logger.write(
+                "%s\n%s" % (src.printcolors.printcError(msg), text_missing_prods), 1
+            )
             raise src.SatException(msg)
         else:
             msg = _("WARNING: there are missing products installations:")
-            logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
-                                     text_missing_prods),
-                         1)
+            logger.write(
+                "%s\n%s" % (src.printcolors.printcWarning(msg), text_missing_prods), 1
+            )
 
     # Do the same for sources
     if len(l_sources_not_present) > 0:
@@ -762,15 +884,15 @@ WARNING: existing binaries directory from previous detar installation:
             text_missing_prods += "-" + p_name + "\n"
         if not options.force_creation:
             msg = _("ERROR: there are missing product sources:")
-            logger.write("%s\n%s" % (src.printcolors.printcError(msg),
-                                     text_missing_prods),
-                         1)
+            logger.write(
+                "%s\n%s" % (src.printcolors.printcError(msg), text_missing_prods), 1
+            )
             raise src.SatException(msg)
         else:
             msg = _("WARNING: there are missing products sources:")
-            logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
-                                     text_missing_prods),
-                         1)
+            logger.write(
+                "%s\n%s" % (src.printcolors.printcWarning(msg), text_missing_prods), 1
+            )
 
     # construct the name of the directory that will contain the binaries
     if src.architecture.is_windows():
@@ -780,16 +902,22 @@ WARNING: existing binaries directory from previous detar installation:
     # construct the correlation table between the product names, there
     # actual install directories and there install directory in archive
     d_products = {}
-    for prod_name, prod_info_name, install_dir, is_single_dir, install_mode in l_install_dir:
-        prod_base_name=os.path.basename(install_dir)
+    for (
+        prod_name,
+        prod_info_name,
+        install_dir,
+        is_single_dir,
+        install_mode,
+    ) in l_install_dir:
+        prod_base_name = os.path.basename(install_dir)
         if install_mode == "base":
             # case of a products installed in base.
             # because the archive is in base:no mode, the name of the install dir is different inside archive
             # we set it to the product name or by PRODUCTS if single-dir
             if is_single_dir:
-                prod_base_name=config.INTERNAL.config.single_install_dir
+                prod_base_name = config.INTERNAL.config.single_install_dir
             else:
-                prod_base_name=prod_info_name
+                prod_base_name = prod_info_name
         path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
 
@@ -797,106 +925,112 @@ WARNING: existing binaries directory from previous detar installation:
         path_in_archive = os.path.join("SOURCES", prod_name)
         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
 
-    # create an archives of compilation logs, and insert it into the tarball
-    logpath=os.path.join(config.APPLICATION.workdir, "LOGS")
+    # create an archives of compilation logs, and insert it into the tarball
+    logpath = os.path.join(config.APPLICATION.workdir, "LOGS")
     path_targz_logs = os.path.join(tmp_working_dir, "logs.tgz")
-    tar_log = tarfile.open(path_targz_logs, mode='w:gz')
+    tar_log = tarfile.open(path_targz_logs, mode="w:gz")
     tar_log.add(logpath, arcname="LOGS")
     tar_log.close()
     d_products["LOGS"] = (path_targz_logs, "logs.tgz")
 
     # for packages of SALOME applications including KERNEL,
     # we produce a salome launcher or a virtual application (depending on salome version)
-    if 'KERNEL' in config.APPLICATION.products:
+    if "KERNEL" in config.APPLICATION.products:
         VersionSalome = src.get_salome_version(config)
         # Case where SALOME has the launcher that uses the SalomeContext API
-        if VersionSalome >= MMP([7,3,0]):
+        if VersionSalome >= MMP([7, 3, 0]):
             # create the relative launcher and add it to the files to add
             launcher_name = src.get_launcher_name(config)
-            launcher_package = produce_relative_launcher(config,
-                                                 logger,
-                                                 tmp_working_dir,
-                                                 launcher_name,
-                                                 binaries_dir_name)
+            launcher_package = produce_relative_launcher(
+                config, logger, tmp_working_dir, launcher_name, binaries_dir_name
+            )
             d_products["launcher"] = (launcher_package, launcher_name)
 
             # if the application contains mesa products, we generate in addition to the
             # classical salome launcher a launcher using mesa and called mesa_salome
             # (the mesa launcher will be used for remote usage through ssh).
             if generate_mesa_launcher:
-                #if there is one : store the use_mesa property
-                restore_use_mesa_option=None
-                if ('properties' in config.APPLICATION and
-                    'use_mesa' in config.APPLICATION.properties):
+                # if there is one : store the use_mesa property
+                restore_use_mesa_option = None
+                if (
+                    "properties" in config.APPLICATION
+                    and "use_mesa" in config.APPLICATION.properties
+                ):
                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
 
                 # activate mesa property, and generate a mesa launcher
-                src.activate_mesa_property(config)  #activate use_mesa property
-                launcher_mesa_name="mesa_"+launcher_name
-                launcher_package_mesa = produce_relative_launcher(config,
-                                                     logger,
-                                                     tmp_working_dir,
-                                                     launcher_mesa_name,
-                                                     binaries_dir_name)
-                d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
+                src.activate_mesa_property(config)  # activate use_mesa property
+                launcher_mesa_name = "mesa_" + launcher_name
+                launcher_package_mesa = produce_relative_launcher(
+                    config,
+                    logger,
+                    tmp_working_dir,
+                    launcher_mesa_name,
+                    binaries_dir_name,
+                )
+                d_products["launcher (mesa)"] = (
+                    launcher_package_mesa,
+                    launcher_mesa_name,
+                )
 
                 # if there was a use_mesa value, we restore it
                 # else we set it to the default value "no"
                 if restore_use_mesa_option != None:
-                    config.APPLICATION.properties.use_mesa=restore_use_mesa_option
+                    config.APPLICATION.properties.use_mesa = restore_use_mesa_option
                 else:
-                    config.APPLICATION.properties.use_mesa="no"
+                    config.APPLICATION.properties.use_mesa = "no"
 
             if options.sources:
                 # if we mix binaries and sources, we add a copy of the launcher,
                 # prefixed  with "bin",in order to avoid clashes
-                launcher_copy_name="bin"+launcher_name
-                launcher_package_copy = produce_relative_launcher(config,
-                                                     logger,
-                                                     tmp_working_dir,
-                                                     launcher_copy_name,
-                                                     binaries_dir_name)
-                d_products["launcher (copy)"] = (launcher_package_copy, launcher_copy_name)
+                launcher_copy_name = "bin" + launcher_name
+                launcher_package_copy = produce_relative_launcher(
+                    config,
+                    logger,
+                    tmp_working_dir,
+                    launcher_copy_name,
+                    binaries_dir_name,
+                )
+                d_products["launcher (copy)"] = (
+                    launcher_package_copy,
+                    launcher_copy_name,
+                )
         else:
             # Provide a script for the creation of an application EDF style
-            appli_script = product_appli_creation_script(config,
-                                                        logger,
-                                                        tmp_working_dir,
-                                                        binaries_dir_name)
+            appli_script = product_appli_creation_script(
+                config, logger, tmp_working_dir, binaries_dir_name
+            )
 
             d_products["appli script"] = (appli_script, "create_appli.py")
 
     # Put also the environment file
-    env_file = produce_relative_env_files(config,
-                                           logger,
-                                           tmp_working_dir,
-                                           binaries_dir_name)
+    env_file = produce_relative_env_files(
+        config, logger, tmp_working_dir, binaries_dir_name
+    )
 
     if src.architecture.is_windows():
-      filename  = "env_launch.bat"
+        filename = "env_launch.bat"
     else:
-      filename  = "env_launch.sh"
+        filename = "env_launch.sh"
     d_products["environment file"] = (env_file, filename)
 
     # If option exe, produce an extra launcher based on specified exe
     if options.exe:
-        exe_file = produce_relative_env_files(config,
-                                              logger,
-                                              tmp_working_dir,
-                                              binaries_dir_name,
-                                              options.exe)
+        exe_file = produce_relative_env_files(
+            config, logger, tmp_working_dir, binaries_dir_name, options.exe
+        )
 
         if src.architecture.is_windows():
-          filename  = os.path.basename(options.exe) + ".bat"
+            filename = os.path.basename(options.exe) + ".bat"
         else:
-          filename  = os.path.basename(options.exe) + ".sh"
+            filename = os.path.basename(options.exe) + ".sh"
         d_products["exe file"] = (exe_file, filename)
 
-
     return d_products
 
+
 def source_package(sat, config, logger, options, tmp_working_dir):
-    '''Prepare a dictionary that stores all the needed directories and files to
+    """Prepare a dictionary that stores all the needed directories and files to
        add in a source package.
 
     :param config Config: The global configuration.
@@ -909,9 +1043,9 @@ def source_package(sat, config, logger, options, tmp_working_dir):
              add in a source package.
              {label : (path_on_local_machine, path_in_archive)}
     :rtype: dict
-    '''
+    """
 
-    d_archives={}
+    d_archives = {}
     # Get all the products that are prepared using an archive
     # unless ftp mode is specified (in this case the user of the
     # archive will get the sources through the ftp mode of sat prepare
@@ -925,24 +1059,21 @@ def source_package(sat, config, logger, options, tmp_working_dir):
         # Make archives with the products that are not prepared using an archive
         # (git, cvs, svn, etc)
         logger.write("Construct archives for vcs products ... ")
-        d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
-                                          sat,
-                                          config,
-                                          logger,
-                                          tmp_working_dir)
+        d_archives_vcs = get_archives_vcs(
+            l_pinfo_vcs, sat, config, logger, tmp_working_dir
+        )
         logger.write("Done\n")
 
     # Create a project
     logger.write("Create the project ... ")
-    d_project = create_project_for_src_package(config,
-                                               tmp_working_dir,
-                                               options.with_vcs,
-                                               options.ftp)
+    d_project = create_project_for_src_package(
+        config, tmp_working_dir, options.with_vcs, options.ftp
+    )
     logger.write("Done\n")
 
     # Add salomeTools
     tmp_sat = add_salomeTools(config, tmp_working_dir)
-    d_sat = {"salomeTools" : (tmp_sat, "sat")}
+    d_sat = {"salomeTools": (tmp_sat, "sat")}
 
     # Add a sat symbolic link if not win
     if not src.architecture.is_windows():
@@ -960,14 +1091,17 @@ def source_package(sat, config, logger, options, tmp_working_dir):
         os.symlink("../ARCHIVES", "ARCHIVES")
         os.chdir(t)
 
-        d_sat["sat archive link"] = (os.path.join(tmp_working_dir,"PROJECT", "ARCHIVES"),
-                                     os.path.join("PROJECT", "ARCHIVES"))
+        d_sat["sat archive link"] = (
+            os.path.join(tmp_working_dir, "PROJECT", "ARCHIVES"),
+            os.path.join("PROJECT", "ARCHIVES"),
+        )
 
     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
     return d_source
 
+
 def get_archives(config, logger):
-    '''Find all the products that are get using an archive and all the products
+    """Find all the products that are get using an archive and all the products
        that are get using a vcs (git, cvs, svn) repository.
 
     :param config Config: The global configuration.
@@ -977,11 +1111,10 @@ def get_archives(config, logger):
              and the list of specific configuration corresponding to the vcs
              products
     :rtype: (Dict, List)
-    '''
+    """
     # Get the list of product informations
     l_products_name = config.APPLICATION.products.keys()
-    l_product_info = src.product.get_products_infos(l_products_name,
-                                                    config)
+    l_product_info = src.product.get_products_infos(l_products_name, config)
     d_archives = {}
     l_pinfo_vcs = []
     for p_name, p_info in l_product_info:
@@ -989,39 +1122,56 @@ def get_archives(config, logger):
         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
             continue
         # ignore the native and fixed products
-        if (src.product.product_is_native(p_info)
-                or src.product.product_is_fixed(p_info)):
+        if src.product.product_is_native(p_info) or src.product.product_is_fixed(
+            p_info
+        ):
             continue
         if p_info.get_source == "archive":
             archive_path = p_info.archive_info.archive_name
             archive_name = os.path.basename(archive_path)
-            d_archives[p_name] = (archive_path,
-                                  os.path.join(ARCHIVE_DIR, archive_name))
-            if (src.appli_test_property(config,"pip", "yes") and
-                src.product.product_test_property(p_info,"pip", "yes")):
+            d_archives[p_name] = (archive_path, os.path.join(ARCHIVE_DIR, archive_name))
+            if src.appli_test_property(
+                config, "pip", "yes"
+            ) and src.product.product_test_property(p_info, "pip", "yes"):
                 # if pip mode is activated, and product is managed by pip
-                pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
-                if "archive_prefix" in p_info.archive_info and p_info.archive_info.archive_prefix:
-                    pip_wheel_pattern=os.path.join(pip_wheels_dir,
-                                                   "%s-%s*" % (p_info.archive_info.archive_prefix, p_info.version))
+                pip_wheels_dir = os.path.join(config.LOCAL.archive_dir, "wheels")
+                if (
+                    "archive_prefix" in p_info.archive_info
+                    and p_info.archive_info.archive_prefix
+                ):
+                    pip_wheel_pattern = os.path.join(
+                        pip_wheels_dir,
+                        "%s-%s*" % (p_info.archive_info.archive_prefix, p_info.version),
+                    )
                 else:
-                    pip_wheel_pattern=os.path.join(pip_wheels_dir,
-                                                   "%s-%s*" % (p_info.name, p_info.version))
-                pip_wheel_path=glob.glob(pip_wheel_pattern)
-                msg_pip_not_found="Error in get_archive, pip wheel for "\
-                                  "product %s-%s was not found in %s directory"
-                msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
-                                  "product %s-%s were found in %s directory"
-                if len(pip_wheel_path)==0:
-                    raise src.SatException(msg_pip_not_found %\
-                        (p_info.name, p_info.version, pip_wheels_dir))
-                if len(pip_wheel_path)>1:
-                    raise src.SatException(msg_pip_two_or_more %\
-                        (p_info.name, p_info.version, pip_wheels_dir))
-
-                pip_wheel_name=os.path.basename(pip_wheel_path[0])
-                d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0],
-                    os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
+                    pip_wheel_pattern = os.path.join(
+                        pip_wheels_dir, "%s-%s*" % (p_info.name, p_info.version)
+                    )
+                pip_wheel_path = glob.glob(pip_wheel_pattern)
+                msg_pip_not_found = (
+                    "Error in get_archive, pip wheel for "
+                    "product %s-%s was not found in %s directory"
+                )
+                msg_pip_two_or_more = (
+                    "Error in get_archive, several pip wheels for "
+                    "product %s-%s were found in %s directory"
+                )
+                if len(pip_wheel_path) == 0:
+                    raise src.SatException(
+                        msg_pip_not_found
+                        % (p_info.name, p_info.version, pip_wheels_dir)
+                    )
+                if len(pip_wheel_path) > 1:
+                    raise src.SatException(
+                        msg_pip_two_or_more
+                        % (p_info.name, p_info.version, pip_wheels_dir)
+                    )
+
+                pip_wheel_name = os.path.basename(pip_wheel_path[0])
+                d_archives[p_name + " (pip wheel)"] = (
+                    pip_wheel_path[0],
+                    os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name),
+                )
         else:
             # this product is not managed by archive,
             # an archive of the vcs directory will be created by get_archive_vcs
@@ -1029,8 +1179,9 @@ def get_archives(config, logger):
 
     return d_archives, l_pinfo_vcs
 
+
 def add_salomeTools(config, tmp_working_dir):
-    '''Prepare a version of salomeTools that has a specific local.pyconf file
+    """Prepare a version of salomeTools that has a specific local.pyconf file
        configured for a source package.
 
     :param config Config: The global configuration.
@@ -1039,7 +1190,7 @@ def add_salomeTools(config, tmp_working_dir):
                                 source package
     :return: The path to the local salomeTools directory to add in the package
     :rtype: str
-    '''
+    """
     # Copy sat in the temporary working directory
     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
     sat_running_path = src.Path(config.VARS.salometoolsway)
@@ -1055,9 +1206,7 @@ def add_salomeTools(config, tmp_working_dir):
     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
     for file_or_dir in files_or_dir_SAT:
         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
-            file_path = os.path.join(tmp_working_dir,
-                                     "salomeTools",
-                                     file_or_dir)
+            file_path = os.path.join(tmp_working_dir, "salomeTools", file_or_dir)
             os.remove(file_path)
 
     ff = open(local_pyconf_file, "w")
@@ -1066,8 +1215,9 @@ def add_salomeTools(config, tmp_working_dir):
 
     return sat_tmp_path.path
 
+
 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
-    '''For sources package that require that all products are get using an
+    """For sources package that require that all products are get using an
        archive, one has to create some archive for the vcs products.
        So this method calls the clean and source command of sat and then create
        the archives.
@@ -1084,46 +1234,49 @@ def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
     :return: the dictionary that stores all the archives to add in the source
              package. {label : (path_on_local_machine, path_in_archive)}
     :rtype: dict
-    '''
+    """
     # clean the source directory of all the vcs products, then use the source
     # command and thus construct an archive that will not contain the patches
     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
-    if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
-      logger.write(_("\nclean sources\n"))
-      args_clean = config.VARS.application
-      args_clean += " --sources --products "
-      args_clean += ",".join(l_prod_names)
-      logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
-      sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
+    if False:  # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
+        logger.write(_("\nclean sources\n"))
+        args_clean = config.VARS.application
+        args_clean += " --sources --products "
+        args_clean += ",".join(l_prod_names)
+        logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
+        sat.clean(args_clean, batch=True, verbose=0, logger_add_link=logger)
     if True:
-      # source
-      logger.write(_("get sources\n"))
-      args_source = config.VARS.application
-      args_source += " --products "
-      args_source += ",".join(l_prod_names)
-      svgDir = sat.cfg.APPLICATION.workdir
-      tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
-      sat.cfg.APPLICATION.workdir = tmp_local_working_dir
-      # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
-      # DBG.write("sat config id", id(sat.cfg), True)
-      # shit as config is not same id() as for sat.source()
-      # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
-      import source
-      source.run(args_source, sat, logger) #use this mode as runner.cfg reference
-
-      # make the new archives
-      d_archives_vcs = {}
-      for pn, pinfo in l_pinfo_vcs:
-          path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
-          logger.write("make archive vcs '%s'\n" % path_archive)
-          d_archives_vcs[pn] = (path_archive,
-                                os.path.join(ARCHIVE_DIR, pn + ".tgz"))
-      sat.cfg.APPLICATION.workdir = svgDir
-      # DBG.write("END sat config", sat.cfg.APPLICATION, True)
+        # source
+        logger.write(_("get sources\n"))
+        args_source = config.VARS.application
+        args_source += " --products "
+        args_source += ",".join(l_prod_names)
+        svgDir = sat.cfg.APPLICATION.workdir
+        tmp_local_working_dir = os.path.join(
+            sat.cfg.APPLICATION.workdir, "tmp_package"
+        )  # to avoid too much big files in /tmp
+        sat.cfg.APPLICATION.workdir = tmp_local_working_dir
+        # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
+        # DBG.write("sat config id", id(sat.cfg), True)
+        # shit as config is not same id() as for sat.source()
+        # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
+        import source
+
+        source.run(args_source, sat, logger)  # use this mode as runner.cfg reference
+
+        # make the new archives
+        d_archives_vcs = {}
+        for pn, pinfo in l_pinfo_vcs:
+            path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
+            logger.write("make archive vcs '%s'\n" % path_archive)
+            d_archives_vcs[pn] = (path_archive, os.path.join(ARCHIVE_DIR, pn + ".tgz"))
+        sat.cfg.APPLICATION.workdir = svgDir
+        # DBG.write("END sat config", sat.cfg.APPLICATION, True)
     return d_archives_vcs
 
+
 def make_bin_archive(prod_name, prod_info, where):
-    '''Create an archive of a product by searching its source directory.
+    """Create an archive of a product by searching its source directory.
 
     :param prod_name str: The name of the product.
     :param prod_info Config: The specific configuration corresponding to the
@@ -1132,16 +1285,17 @@ def make_bin_archive(prod_name, prod_info, where):
                       archive
     :return: The path of the resulting archive
     :rtype: str
-    '''
+    """
     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
-    tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
+    tar_prod = tarfile.open(path_targz_prod, mode="w:gz")
     bin_path = prod_info.install_dir
     tar_prod.add(bin_path, arcname=path_targz_prod)
     tar_prod.close()
     return path_targz_prod
 
+
 def make_archive(prod_name, prod_info, where):
-    '''Create an archive of a product by searching its source directory.
+    """Create an archive of a product by searching its source directory.
 
     :param prod_name str: The name of the product.
     :param prod_info Config: The specific configuration corresponding to the
@@ -1150,23 +1304,22 @@ def make_archive(prod_name, prod_info, where):
                       archive
     :return: The path of the resulting archive
     :rtype: str
-    '''
+    """
     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
-    tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
+    tar_prod = tarfile.open(path_targz_prod, mode="w:gz")
     local_path = prod_info.source_dir
     if old_python:
-        tar_prod.add(local_path,
-                     arcname=prod_name,
-                     exclude=exclude_VCS_and_extensions_26)
+        tar_prod.add(
+            local_path, arcname=prod_name, exclude=exclude_VCS_and_extensions_26
+        )
     else:
-        tar_prod.add(local_path,
-                     arcname=prod_name,
-                     filter=exclude_VCS_and_extensions)
+        tar_prod.add(local_path, arcname=prod_name, filter=exclude_VCS_and_extensions)
     tar_prod.close()
     return path_targz_prod
 
+
 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
-    '''Create a specific project for a source package.
+    """Create a specific project for a source package.
 
     :param config Config: The global configuration.
     :param tmp_working_dir str: The temporary local directory containing some
@@ -1178,32 +1331,24 @@ def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
     :return: The dictionary
              {"project" : (produced project, project path in the archive)}
     :rtype: Dict
-    '''
+    """
 
     # Create in the working temporary directory the full project tree
     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
-    products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
-                                         "products")
-    compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
-                                         "products",
-                                         "compil_scripts")
-    post_scripts_tmp_dir = os.path.join(project_tmp_dir,
-                                         "products",
-                                         "post_scripts")
-    env_scripts_tmp_dir = os.path.join(project_tmp_dir,
-                                         "products",
-                                         "env_scripts")
-    patches_tmp_dir = os.path.join(project_tmp_dir,
-                                         "products",
-                                         "patches")
-    application_tmp_dir = os.path.join(project_tmp_dir,
-                                         "applications")
-    for directory in [project_tmp_dir,
-                      compil_scripts_tmp_dir,
-                      env_scripts_tmp_dir,
-                      post_scripts_tmp_dir,
-                      patches_tmp_dir,
-                      application_tmp_dir]:
+    products_pyconf_tmp_dir = os.path.join(project_tmp_dir, "products")
+    compil_scripts_tmp_dir = os.path.join(project_tmp_dir, "products", "compil_scripts")
+    post_scripts_tmp_dir = os.path.join(project_tmp_dir, "products", "post_scripts")
+    env_scripts_tmp_dir = os.path.join(project_tmp_dir, "products", "env_scripts")
+    patches_tmp_dir = os.path.join(project_tmp_dir, "products", "patches")
+    application_tmp_dir = os.path.join(project_tmp_dir, "applications")
+    for directory in [
+        project_tmp_dir,
+        compil_scripts_tmp_dir,
+        env_scripts_tmp_dir,
+        post_scripts_tmp_dir,
+        patches_tmp_dir,
+        application_tmp_dir,
+    ]:
         src.ensure_path_exists(directory)
 
     # Create the pyconf that contains the information of the project
@@ -1212,22 +1357,21 @@ def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
     ff = open(project_pyconf_file, "w")
     ff.write(PROJECT_TEMPLATE)
     if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
-        ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
+        ftp_path = 'ARCHIVEFTP : "' + config.PATHS.ARCHIVEFTP[0]
         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
-            ftp_path=ftp_path+":"+ftpserver
-        ftp_path+='"'
+            ftp_path = ftp_path + ":" + ftpserver
+        ftp_path += '"'
         ff.write("# ftp servers where to search for prerequisite archives\n")
         ff.write(ftp_path)
     # add licence paths if any
     if len(config.PATHS.LICENCEPATH) > 0:
-        licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
+        licence_path = 'LICENCEPATH : "' + config.PATHS.LICENCEPATH[0]
         for path in config.PATHS.LICENCEPATH[1:]:
-            licence_path=licence_path+":"+path
-        licence_path+='"'
+            licence_path = licence_path + ":" + path
+        licence_path += '"'
         ff.write("\n# Where to search for licences\n")
         ff.write(licence_path)
 
-
     ff.close()
 
     # Loop over the products to get there pyconf and all the scripts
@@ -1239,34 +1383,39 @@ def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
         # skip product with property not_in_package set to yes
         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
             continue
-        find_product_scripts_and_pyconf(p_name,
-                                        p_info,
-                                        config,
-                                        with_vcs,
-                                        compil_scripts_tmp_dir,
-                                        env_scripts_tmp_dir,
-                                        post_scripts_tmp_dir,
-                                        patches_tmp_dir,
-                                        products_pyconf_tmp_dir)
+        find_product_scripts_and_pyconf(
+            p_name,
+            p_info,
+            config,
+            with_vcs,
+            compil_scripts_tmp_dir,
+            env_scripts_tmp_dir,
+            post_scripts_tmp_dir,
+            patches_tmp_dir,
+            products_pyconf_tmp_dir,
+        )
 
     # for the application pyconf, we write directly the config
     # don't search for the original pyconf file
     # to avoid problems with overwrite sections and rm_products key
     write_application_pyconf(config, application_tmp_dir)
 
-    d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
+    d_project = {"project": (project_tmp_dir, PROJECT_DIR)}
     return d_project
 
-def find_product_scripts_and_pyconf(p_name,
-                                    p_info,
-                                    config,
-                                    with_vcs,
-                                    compil_scripts_tmp_dir,
-                                    env_scripts_tmp_dir,
-                                    post_scripts_tmp_dir,
-                                    patches_tmp_dir,
-                                    products_pyconf_tmp_dir):
-    '''Create a specific pyconf file for a given product. Get its environment
+
+def find_product_scripts_and_pyconf(
+    p_name,
+    p_info,
+    config,
+    with_vcs,
+    compil_scripts_tmp_dir,
+    env_scripts_tmp_dir,
+    post_scripts_tmp_dir,
+    patches_tmp_dir,
+    products_pyconf_tmp_dir,
+):
+    """Create a specific pyconf file for a given product. Get its environment
        script, its compilation script and patches and put it in the temporary
        working directory. This method is used in the source package in order to
        construct the specific project.
@@ -1287,7 +1436,7 @@ def find_product_scripts_and_pyconf(p_name,
                                 directory of the project.
     :param products_pyconf_tmp_dir str: The path to the temporary product
                                         scripts directory of the project.
-    '''
+    """
 
     # read the pyconf of the product
     product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
@@ -1319,22 +1468,36 @@ def find_product_scripts_and_pyconf(p_name,
         # in non vcs mode, if the product is not archive, then make it become archive.
 
         # depending upon the incremental mode, select impacted sections
-        if "properties" in p_info and "incremental" in p_info.properties and\
-            p_info.properties.incremental == "yes":
-            sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
+        if (
+            "properties" in p_info
+            and "incremental" in p_info.properties
+            and p_info.properties.incremental == "yes"
+        ):
+            sections = [
+                "default",
+                "default_win",
+                p_info.section,
+                p_info.section + "_win",
+            ]
         else:
             sections = [p_info.section]
         for section in sections:
-            if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
-                DBG.write("sat package set archive mode to archive for product %s and section %s" %\
-                          (p_name,section))
+            if (
+                section in product_pyconf_cfg
+                and "get_source" in product_pyconf_cfg[section]
+            ):
+                DBG.write(
+                    "sat package set archive mode to archive for product %s and section %s"
+                    % (p_name, section)
+                )
                 product_pyconf_cfg[section].get_source = "archive"
                 if not "archive_info" in product_pyconf_cfg[section]:
-                    product_pyconf_cfg[section].addMapping("archive_info",
-                                        src.pyconf.Mapping(product_pyconf_cfg),
-                                        "")
-                    product_pyconf_cfg[section].archive_info.archive_name =\
+                    product_pyconf_cfg[section].addMapping(
+                        "archive_info", src.pyconf.Mapping(product_pyconf_cfg), ""
+                    )
+                    product_pyconf_cfg[section].archive_info.archive_name = (
                         p_info.name + ".tgz"
+                    )
 
     # save git repositories for vcs products, even if archive is not in VCS mode
     # in this case the user will be able to change get_source flag and work with git
@@ -1346,30 +1509,32 @@ def find_product_scripts_and_pyconf(p_name,
             if "git_info" in product_pyconf_cfg[section]:
                 for repo in product_pyconf_cfg[section].git_info:
                     if repo in p_info.git_info:
-                        product_pyconf_cfg[section].git_info[repo] =  p_info.git_info[repo]
+                        product_pyconf_cfg[section].git_info[repo] = p_info.git_info[
+                            repo
+                        ]
 
     # write the pyconf file to the temporary project location
-    product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
-                                           p_name + ".pyconf")
-    ff = open(product_tmp_pyconf_path, 'w')
+    product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir, p_name + ".pyconf")
+    ff = open(product_tmp_pyconf_path, "w")
     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
     product_pyconf_cfg.__save__(ff, 1)
     ff.close()
 
 
 def write_application_pyconf(config, application_tmp_dir):
-    '''Write the application pyconf file in the specific temporary
+    """Write the application pyconf file in the specific temporary
        directory containing the specific project of a source package.
 
     :param config Config: The global configuration.
     :param application_tmp_dir str: The path to the temporary application
                                     scripts directory of the project.
-    '''
+    """
     application_name = config.VARS.application
     # write the pyconf file to the temporary application location
-    application_tmp_pyconf_path = os.path.join(application_tmp_dir,
-                                               application_name + ".pyconf")
-    with open(application_tmp_pyconf_path, 'w') as f:
+    application_tmp_pyconf_path = os.path.join(
+        application_tmp_dir, application_name + ".pyconf"
+    )
+    with open(application_tmp_pyconf_path, "w") as f:
         f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
         res = src.pyconf.Config()
         app = src.pyconf.deepCopyMapping(config.APPLICATION)
@@ -1378,16 +1543,13 @@ def write_application_pyconf(config, application_tmp_dir):
         app.base = "no"
 
         # Change the workdir
-        app.workdir = src.pyconf.Reference(
-                                 app,
-                                 src.pyconf.DOLLAR,
-                                 'LOCAL.workdir')
+        app.workdir = src.pyconf.Reference(app, src.pyconf.DOLLAR, "LOCAL.workdir")
         res.addMapping("APPLICATION", app, "")
         res.__save__(f, evaluated=False)
 
 
 def sat_package(config, tmp_working_dir, options, logger):
-    '''Prepare a dictionary that stores all the needed directories and files to
+    """Prepare a dictionary that stores all the needed directories and files to
        add in a salomeTool package.
 
     :param tmp_working_dir str: The temporary local working directory
@@ -1396,17 +1558,17 @@ def sat_package(config, tmp_working_dir, options, logger):
              add in a salomeTool package.
              {label : (path_on_local_machine, path_in_archive)}
     :rtype: dict
-    '''
+    """
     d_project = {}
 
     # we include sat himself
-    d_project["all_sat"]=(config.VARS.salometoolsway, "")
+    d_project["all_sat"] = (config.VARS.salometoolsway, "")
 
     # and we overwrite local.pyconf with a clean wersion.
     local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
     local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
     local_cfg = src.pyconf.Config(local_file_path)
-    local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
+    local_cfg.PROJECTS.project_file_paths = src.pyconf.Sequence(local_cfg.PROJECTS)
     local_cfg.LOCAL["base"] = "default"
     local_cfg.LOCAL["workdir"] = "default"
     local_cfg.LOCAL["log_dir"] = "default"
@@ -1416,19 +1578,28 @@ def sat_package(config, tmp_working_dir, options, logger):
 
     # if the archive contains a project, we write its relative path in local.pyconf
     if options.project:
-        project_arch_path = os.path.join("projects", options.project,
-                                         os.path.basename(options.project_file_path))
+        project_arch_path = os.path.join(
+            "projects", options.project, os.path.basename(options.project_file_path)
+        )
         local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
 
-    ff = open(local_pyconf_tmp_path, 'w')
+    ff = open(local_pyconf_tmp_path, "w")
     local_cfg.__save__(ff, 1)
     ff.close()
-    d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
+    d_project["local.pyconf"] = (local_pyconf_tmp_path, "data/local.pyconf")
     return d_project
 
 
-def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
-    '''Prepare a dictionary that stores all the needed directories and files to
+def project_package(
+    config,
+    name_project,
+    project_file_path,
+    ftp_mode,
+    tmp_working_dir,
+    embedded_in_sat,
+    logger,
+):
+    """Prepare a dictionary that stores all the needed directories and files to
        add in a project package.
 
     :param project_file_path str: The path to the local project.
@@ -1441,22 +1612,27 @@ def project_package(config, name_project, project_file_path, ftp_mode, tmp_worki
              add in a project package.
              {label : (path_on_local_machine, path_in_archive)}
     :rtype: dict
-    '''
+    """
     d_project = {}
     # Read the project file and get the directories to add to the package
 
     try:
-      project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
+        project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
     except:
-      logger.write("""
-WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
-      project_pyconf_cfg = src.pyconf.Config(project_file_path)
-      project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
-
-    paths = {"APPLICATIONPATH" : "applications",
-             "PRODUCTPATH" : "products",
-             "JOBPATH" : "jobs",
-             "MACHINEPATH" : "machines"}
+        logger.write(
+            """
+WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n"""
+            % (name_project, project_file_path)
+        )
+        project_pyconf_cfg = src.pyconf.Config(project_file_path)
+        project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
+
+    paths = {
+        "APPLICATIONPATH": "applications",
+        "PRODUCTPATH": "products",
+        "JOBPATH": "jobs",
+        "MACHINEPATH": "machines",
+    }
     if not ftp_mode:
         paths["ARCHIVEPATH"] = "archives"
 
@@ -1467,7 +1643,9 @@ WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n"""
             continue
         if embedded_in_sat:
             dest_path = os.path.join("projects", name_project, paths[path])
-            project_file_dest = os.path.join("projects", name_project, project_file_name)
+            project_file_dest = os.path.join(
+                "projects", name_project, project_file_name
+            )
         else:
             dest_path = paths[path]
             project_file_dest = project_file_name
@@ -1477,18 +1655,19 @@ WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n"""
 
         # Modify the value of the path in the package
         project_pyconf_cfg[path] = src.pyconf.Reference(
-                                    project_pyconf_cfg,
-                                    src.pyconf.DOLLAR,
-                                    'project_path + "/' + paths[path] + '"')
+            project_pyconf_cfg,
+            src.pyconf.DOLLAR,
+            'project_path + "/' + paths[path] + '"',
+        )
 
     # Modify some values
     if "project_path" not in project_pyconf_cfg:
-        project_pyconf_cfg.addMapping("project_path",
-                                      src.pyconf.Mapping(project_pyconf_cfg),
-                                      "")
-    project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
-                                                           src.pyconf.DOLLAR,
-                                                           'PWD')
+        project_pyconf_cfg.addMapping(
+            "project_path", src.pyconf.Mapping(project_pyconf_cfg), ""
+        )
+    project_pyconf_cfg.project_path = src.pyconf.Reference(
+        project_pyconf_cfg, src.pyconf.DOLLAR, "PWD"
+    )
     # we don't want to export these two fields
     project_pyconf_cfg.__delitem__("file_path")
     project_pyconf_cfg.__delitem__("PWD")
@@ -1497,7 +1676,7 @@ WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n"""
 
     # Write the project pyconf file
     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
-    ff = open(project_pyconf_tmp_path, 'w')
+    ff = open(project_pyconf_tmp_path, "w")
     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
     project_pyconf_cfg.__save__(ff, 1)
     ff.close()
@@ -1505,12 +1684,13 @@ WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n"""
 
     return d_project
 
+
 def add_readme(config, options, where):
     readme_path = os.path.join(where, "README")
-    with codecs.open(readme_path, "w", 'utf-8') as f:
+    with codecs.open(readme_path, "w", "utf-8") as f:
 
-    # templates for building the header
-        readme_header="""
+        # templates for building the header
+        readme_header = """
 # This package was generated with sat $version
 # Date: $date
 # User: $user
@@ -1521,8 +1701,8 @@ SALOME (the directory where this file is located).
 
 """
         if src.architecture.is_windows():
-            readme_header = readme_header.replace('$$ROOT','%ROOT%')
-        readme_compilation_with_binaries="""
+            readme_header = readme_header.replace("$$ROOT", "%ROOT%")
+        readme_compilation_with_binaries = """
 
 compilation based on the binaries used as prerequisites
 =======================================================
@@ -1542,48 +1722,54 @@ The procedure to do it is:
     modules you need to (with -p option)
 
 """
-        readme_header_tpl=string.Template(readme_header)
-        readme_template_path_bin = os.path.join(config.VARS.internal_dir,
-                "README_BIN.template")
-        readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
-                "README_LAUNCHER.template")
-        readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
-                "README_BIN_VIRTUAL_APP.template")
-        readme_template_path_src = os.path.join(config.VARS.internal_dir,
-                "README_SRC.template")
-        readme_template_path_pro = os.path.join(config.VARS.internal_dir,
-                "README_PROJECT.template")
-        readme_template_path_sat = os.path.join(config.VARS.internal_dir,
-                "README_SAT.template")
+        readme_header_tpl = string.Template(readme_header)
+        readme_template_path_bin = os.path.join(
+            config.VARS.internal_dir, "README_BIN.template"
+        )
+        readme_template_path_bin_launcher = os.path.join(
+            config.VARS.internal_dir, "README_LAUNCHER.template"
+        )
+        readme_template_path_bin_virtapp = os.path.join(
+            config.VARS.internal_dir, "README_BIN_VIRTUAL_APP.template"
+        )
+        readme_template_path_src = os.path.join(
+            config.VARS.internal_dir, "README_SRC.template"
+        )
+        readme_template_path_pro = os.path.join(
+            config.VARS.internal_dir, "README_PROJECT.template"
+        )
+        readme_template_path_sat = os.path.join(
+            config.VARS.internal_dir, "README_SAT.template"
+        )
 
         # prepare substitution dictionary
         d = dict()
-        d['user'] = config.VARS.user
-        d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
-        d['version'] = src.get_salometool_version(config)
-        d['dist'] = config.VARS.dist
-        f.write(readme_header_tpl.substitute(d)) # write the general header (common)
+        d["user"] = config.VARS.user
+        d["date"] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
+        d["version"] = src.get_salometool_version(config)
+        d["dist"] = config.VARS.dist
+        f.write(readme_header_tpl.substitute(d))  # write the general header (common)
 
         if options.binaries or options.sources:
-            d['application'] = config.VARS.application
-            d['BINARIES']    = config.INTERNAL.config.binary_dir
-            d['SEPARATOR'] = config.VARS.sep
+            d["application"] = config.VARS.application
+            d["BINARIES"] = config.INTERNAL.config.binary_dir
+            d["SEPARATOR"] = config.VARS.sep
             if src.architecture.is_windows():
-                d['operatingSystem'] = 'Windows'
-                d['PYTHON3'] = 'python3'
-                d['ROOT']    = '%ROOT%'
+                d["operatingSystem"] = "Windows"
+                d["PYTHON3"] = "python3"
+                d["ROOT"] = "%ROOT%"
             else:
-                d['operatingSystem'] = 'Linux'
-                d['PYTHON3'] = ''
-                d['ROOT']    = '$ROOT'
-            f.write("# Application: " + d['application'] + "\n")
-            if 'KERNEL' in config.APPLICATION.products:
+                d["operatingSystem"] = "Linux"
+                d["PYTHON3"] = ""
+                d["ROOT"] = "$ROOT"
+            f.write("# Application: " + d["application"] + "\n")
+            if "KERNEL" in config.APPLICATION.products:
                 VersionSalome = src.get_salome_version(config)
                 # Case where SALOME has the launcher that uses the SalomeContext API
-                if VersionSalome >= MMP([7,3,0]):
-                    d['launcher'] = config.APPLICATION.profile.launcher_name
+                if VersionSalome >= MMP([7, 3, 0]):
+                    d["launcher"] = config.APPLICATION.profile.launcher_name
                 else:
-                    d['virtual_app'] = 'runAppli' # this info is not used now)
+                    d["virtual_app"] = "runAppli"  # this info is not used now)
 
         # write the specific sections
         if options.binaries:
@@ -1607,13 +1793,14 @@ The procedure to do it is:
 
     return readme_path
 
-def update_config(config, logger,  prop, value):
-    '''Remove from config.APPLICATION.products the products that have the property given as input.
+
+def update_config(config, logger, prop, value):
+    """Remove from config.APPLICATION.products the products that have the property given as input.
 
     :param config Config: The global config.
     :param prop str: The property to filter
     :param value str: The value of the property to filter
-    '''
+    """
     # if there is no APPLICATION (ex sat package -t) : nothing to do
     if "APPLICATION" in config:
         l_product_to_remove = []
@@ -1623,15 +1810,19 @@ def update_config(config, logger,  prop, value):
                 l_product_to_remove.append(product_name)
         for product_name in l_product_to_remove:
             config.APPLICATION.products.__delitem__(product_name)
-            logger.write("Remove product %s with property %s\n" % (product_name, prop), 5)
+            logger.write(
+                "Remove product %s with property %s\n" % (product_name, prop), 5
+            )
+
 
 def description():
-    '''method that is called when salomeTools is called with --help option.
+    """method that is called when salomeTools is called with --help option.
 
     :return: The text to display for the package command description.
     :rtype: str
-    '''
-    return _("""
+    """
+    return _(
+        """
 The package command creates a tar file archive of a product.
 There are four kinds of archive, which can be mixed:
 
@@ -1645,36 +1836,42 @@ There are four kinds of archive, which can be mixed:
      It contains code utility salomeTools.
 
 example:
- >> sat package SALOME-master --binaries --sources""")
+ >> sat package SALOME-master --binaries --sources"""
+    )
+
 
 def run(args, runner, logger):
-    '''method that is called when salomeTools is called with package parameter.
-    '''
+    """method that is called when salomeTools is called with package parameter."""
 
     # Parse the options
     (options, args) = parser.parse_args(args)
 
-
     # Check that a type of package is called, and only one
-    all_option_types = (options.binaries,
-                        options.sources,
-                        options.project not in ["", None],
-                        options.sat,
-                        options.bin_products)
+    all_option_types = (
+        options.binaries,
+        options.sources,
+        options.project not in ["", None],
+        options.sat,
+        options.bin_products,
+    )
 
     # Check if no option for package type
     if all_option_types.count(True) == 0:
-        msg = _("Error: Precise a type for the package\nUse one of the "
-                "following options: --binaries, --sources, --project or"
-                " --salometools, --bin_products")
+        msg = _(
+            "Error: Precise a type for the package\nUse one of the "
+            "following options: --binaries, --sources, --project or"
+            " --salometools, --bin_products"
+        )
         logger.write(src.printcolors.printcError(msg), 1)
         logger.write("\n", 1)
         return 1
-    do_create_package = options.binaries or options.sources or options.project or options.sat
+    do_create_package = (
+        options.binaries or options.sources or options.project or options.sat
+    )
 
     if options.bin_products:
         ret = bin_products_archives(runner.cfg, logger, options.with_vcs)
-        if ret!=0:
+        if ret != 0:
             return ret
     if not do_create_package:
         return 0
@@ -1689,8 +1886,11 @@ def run(args, runner, logger):
         src.check_config_has_application(runner.cfg)
 
         # Display information
-        logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
-                                                    runner.cfg.VARS.application), 1)
+        logger.write(
+            _("Packaging application %s\n")
+            % src.printcolors.printcLabel(runner.cfg.VARS.application),
+            1,
+        )
 
         # Get the default directory where to put the packages
         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
@@ -1708,20 +1908,30 @@ def run(args, runner, logger):
                 break
 
         if foundProject is None:
-            local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
-            msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
+            local_path = os.path.join(
+                runner.cfg.VARS.salometoolsway, "data", "local.pyconf"
+            )
+            msg = _(
+                """ERROR: the project %(1)s is not visible by salomeTools.
 known projects are:
 %(2)s
 
 Please add it in file:
-%(3)s""" % \
-                    {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
+%(3)s"""
+                % {
+                    "1": options.project,
+                    "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths),
+                    "3": local_path,
+                }
+            )
             logger.write(src.printcolors.printcError(msg), 1)
             logger.write("\n", 1)
             return 1
         else:
             options.project_file_path = foundProject
-            src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
+            src.printcolors.print_value(
+                logger, "Project path", options.project_file_path, 2
+            )
 
     # Remove the products that are filtered by the --without_properties option
     if options.without_properties:
@@ -1742,19 +1952,19 @@ Please add it in file:
             dir_name = os.path.dirname(options.name)
 
         # suppress extension
-        if archive_name[-len(".tgz"):] == ".tgz":
-            archive_name = archive_name[:-len(".tgz")]
-        if archive_name[-len(".tar.gz"):] == ".tar.gz":
-            archive_name = archive_name[:-len(".tar.gz")]
+        if archive_name[-len(".tgz") :] == ".tgz":
+            archive_name = archive_name[: -len(".tgz")]
+        if archive_name[-len(".tar.gz") :] == ".tar.gz":
+            archive_name = archive_name[: -len(".tar.gz")]
 
     else:
-        archive_name=""
+        archive_name = ""
         dir_name = package_default_path
         if options.binaries or options.sources:
             archive_name = runner.cfg.APPLICATION.name
 
         if options.binaries:
-            archive_name += "-"+runner.cfg.VARS.dist
+            archive_name += "-" + runner.cfg.VARS.dist
 
         if options.sources:
             archive_name += "-SRC"
@@ -1762,18 +1972,20 @@ Please add it in file:
                 archive_name += "-VCS"
 
         if options.sat:
-            archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
+            archive_name += "salomeTools_" + src.get_salometool_version(runner.cfg)
 
         if options.project:
             if options.sat:
                 archive_name += "_"
-            archive_name += ("satproject_" + options.project)
-
-        if len(archive_name)==0: # no option worked
-            msg = _("Error: Cannot name the archive\n"
-                    " check if at least one of the following options was "
-                    "selected : --binaries, --sources, --project or"
-                    " --salometools")
+            archive_name += "satproject_" + options.project
+
+        if len(archive_name) == 0:  # no option worked
+            msg = _(
+                "Error: Cannot name the archive\n"
+                " check if at least one of the following options was "
+                "selected : --binaries, --sources, --project or"
+                " --salometools"
+            )
             logger.write(src.printcolors.printcError(msg), 1)
             logger.write("\n", 1)
             return 1
@@ -1787,7 +1999,7 @@ Please add it in file:
     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
     src.ensure_path_exists(tmp_working_dir)
     logger.write("\n", 5)
-    logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
+    logger.write(_("The temporary working directory: %s\n" % tmp_working_dir), 5)
 
     logger.write("\n", 3)
 
@@ -1795,46 +2007,48 @@ Please add it in file:
     logger.write(src.printcolors.printcLabel(msg), 2)
     logger.write("\n", 2)
 
-    d_files_to_add={}  # content of the archive
+    d_files_to_add = {}  # content of the archive
 
     # a dict to hold paths that will need to be substitute for users recompilations
-    d_paths_to_substitute={}
+    d_paths_to_substitute = {}
 
     if options.binaries:
-        d_bin_files_to_add = binary_package(runner.cfg,
-                                            logger,
-                                            options,
-                                            tmp_working_dir)
+        d_bin_files_to_add = binary_package(
+            runner.cfg, logger, options, tmp_working_dir
+        )
         # for all binaries dir, store the substitution that will be required
         # for extra compilations
         for key in d_bin_files_to_add:
             if key.endswith("(bin)"):
                 source_dir = d_bin_files_to_add[key][0]
                 path_in_archive = d_bin_files_to_add[key][1].replace(
-                   runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
-                   runner.cfg.INTERNAL.config.install_dir)
-                if os.path.basename(source_dir)==os.path.basename(path_in_archive):
+                    runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
+                    runner.cfg.INTERNAL.config.install_dir,
+                )
+                if os.path.basename(source_dir) == os.path.basename(path_in_archive):
                     # if basename is the same we will just substitute the dirname
-                    d_paths_to_substitute[os.path.dirname(source_dir)]=\
-                        os.path.dirname(path_in_archive)
+                    d_paths_to_substitute[
+                        os.path.dirname(source_dir)
+                    ] = os.path.dirname(path_in_archive)
                 else:
-                    d_paths_to_substitute[source_dir]=path_in_archive
+                    d_paths_to_substitute[source_dir] = path_in_archive
 
         d_files_to_add.update(d_bin_files_to_add)
     if options.sources:
-        d_files_to_add.update(source_package(runner,
-                                        runner.cfg,
-                                        logger,
-                                        options,
-                                        tmp_working_dir))
+        d_files_to_add.update(
+            source_package(runner, runner.cfg, logger, options, tmp_working_dir)
+        )
         if options.binaries:
             # for archives with bin and sources we provide a shell script able to
             # install binaries for compilation
-            file_install_bin=produce_install_bin_file(runner.cfg,logger,
-                                                      tmp_working_dir,
-                                                      d_paths_to_substitute,
-                                                      "install_bin.sh")
-            d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
+            file_install_bin = produce_install_bin_file(
+                runner.cfg,
+                logger,
+                tmp_working_dir,
+                d_paths_to_substitute,
+                "install_bin.sh",
+            )
+            d_files_to_add.update({"install_bin": (file_install_bin, "install_bin.sh")})
             logger.write("substitutions that need to be done later : \n", 5)
             logger.write(str(d_paths_to_substitute), 5)
             logger.write("\n", 5)
@@ -1842,14 +2056,25 @@ Please add it in file:
         # --salomeTool option is not considered when --sources is selected, as this option
         # already brings salomeTool!
         if options.sat:
-            d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir,
-                                  options, logger))
+            d_files_to_add.update(
+                sat_package(runner.cfg, tmp_working_dir, options, logger)
+            )
 
     if options.project:
         DBG.write("config for package %s" % options.project, runner.cfg)
-        d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
-
-    if not(d_files_to_add):
+        d_files_to_add.update(
+            project_package(
+                runner.cfg,
+                options.project,
+                options.project_file_path,
+                options.ftp,
+                tmp_working_dir,
+                options.sat,
+                logger,
+            )
+        )
+
+    if not (d_files_to_add):
         msg = _("Error: Empty dictionnary to build the archive!\n")
         logger.write(src.printcolors.printcError(msg), 1)
         logger.write("\n", 1)
@@ -1859,7 +2084,7 @@ Please add it in file:
     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
     d_files_to_add["README"] = (local_readme_tmp_path, "README")
 
-    # Add the additional files of option add_files
+    # Add the additional files of option add_files
     if options.add_files:
         for file_path in options.add_files:
             if not os.path.exists(file_path):
@@ -1871,12 +2096,14 @@ Please add it in file:
     logger.write("\n", 2)
     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
     logger.write("\n", 2)
-    logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
+    logger.write(
+        "\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5
+    )
 
     res = 0
     try:
         # Creating the object tarfile
-        tar = tarfile.open(path_targz, mode='w:gz')
+        tar = tarfile.open(path_targz, mode="w:gz")
 
         # get the filtering function if needed
         if old_python:
@@ -1885,11 +2112,15 @@ Please add it in file:
             filter_function = exclude_VCS_and_extensions
 
         # Add the files to the tarfile object
-        res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
+        res = add_files(
+            tar, archive_name, d_files_to_add, logger, f_exclude=filter_function
+        )
         tar.close()
     except KeyboardInterrupt:
         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
-        logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
+        logger.write(
+            _("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1
+        )
         # remove the working directory
         shutil.rmtree(tmp_working_dir)
         logger.write(_("OK"), 1)