# SALOME MODULES :
'CONFIGURATION'
- 'MEDCOUPLING' : {tag : 'V9_5_0', section: 'default_MPI'}
+ 'MEDCOUPLING' : {tag : 'V9_5_0', section: 'version_V9_5_0_MPI'}
}
test_base :
{
# SALOME MODULES :
'CONFIGURATION'
- 'MEDCOUPLING' : {tag : 'V9_6_0', section: 'default_MPI'}
+ 'MEDCOUPLING' : {tag : 'V9_6_0', section: 'version_V9_6_0_MPI'}
}
test_base :
{
# SALOME MODULES :
'CONFIGURATION'
- 'MEDCOUPLING' : {section : 'default_MPI'}
+ 'MEDCOUPLING' : {section : 'version_V9_7_0_MPI'}
}
test_base :
{
# SALOME MODULES :
'CONFIGURATION'
- 'MEDCOUPLING' : {section: 'default_MPI'}
+ 'MEDCOUPLING' : {section: 'version_V9_8_0_MPI'}
}
test_base :
{
# SALOME MODULES :
'CONFIGURATION'
- 'MEDCOUPLING' : {tag : 'abn_akr/int', section: 'default_MPI'}
+ 'MEDCOUPLING' : {tag : 'master', section: 'default_MPI'}
}
test_base :
{
# SALOME MODULES :
'CONFIGURATION'
- 'MEDCOUPLING' : {tag : 'abn_akr/int'}
+ 'MEDCOUPLING'
}
test_base :
{
'RESTRICTED'
'LIBBATCH' : {tag :'V2_4_3'}
'KERNEL' : {tag: 'V9_5_0', section : 'default_MPI', verbose : 'yes'}
- 'MEDCOUPLING' : {tag: 'V9_5_0', section : 'default_MPI', verbose : 'yes'}
+ 'MEDCOUPLING' : {tag: 'V9_5_0', section : 'version_V9_5_0_MPI', verbose : 'yes'}
'GUI' : {verbose : 'yes'}
'GEOM'
'SMESH'
'RESTRICTED'
'LIBBATCH' : {tag :'V2_4_4'}
'KERNEL' : {section : 'default_MPI', verbose : 'yes'}
- 'MEDCOUPLING' : {section : 'default_MPI', verbose : 'yes'}
+ 'MEDCOUPLING' : {section : 'version_V9_6_0_MPI', verbose : 'yes'}
'GUI' : {verbose : 'yes'}
'GEOM'
'SMESH'
'RESTRICTED'
'LIBBATCH' : {tag :'V2_4_5'}
'KERNEL' : {section : 'default_MPI', verbose : 'yes'}
- 'MEDCOUPLING' : {section : 'default_MPI', verbose : 'yes'}
+ 'MEDCOUPLING' : {section : 'version_V9_7_0_MPI', verbose : 'yes'}
'GUI' : {verbose : 'yes'}
'GEOM'
'SMESH'
}
__overwrite__ :
[
- {
- # with cmake 3.17.5 issue with Python2 detection - remove it since not required
- 'PRODUCTS.opencv.version_3_2_0.patches' : ['opencv-3.2.0-ccache.patch','opencv-3.2.0-python2-cmake-3.17.5.patch']
- }
]
'RESTRICTED'
'LIBBATCH' : {tag : 'V2_4_5'}
'KERNEL' : {section : 'default_MPI', verbose : 'yes'}
- 'MEDCOUPLING' : {section : 'default_MPI', verbose : 'yes'}
+ 'MEDCOUPLING' : {section : 'version_V9_8_0_MPI', verbose : 'yes'}
'GUI' : {verbose : 'yes'}
'GEOM'
'SMESH'
RESTRICTED_ROOT_DIR : $workdir + $VARS.sep + "SOURCES" + $VARS.sep + "RESTRICTED"
SALOME_USE_64BIT_IDS : '1'
VTK_SMP_IMPLEMENTATION_TYPE : OpenMP # OpenMP # choose among: sequential / OpenMP / TBB switches
+ SALOME_GMSH_HEADERS_STD : '1'
}
launch : {PYTHONIOENCODING:"UTF_8"} # alternative is to encode every accentued string with .encode('utf-8')
SALOME_trace : "local" # local/file:.../with_logger
SALOME_MODULES : "SHAPER,SHAPERSTUDY,GEOM,SMESH,PARAVIS,YACS,JOBMANAGER" # specify the first modules to display in gui
SALOME_ACTOR_DELEGATE_TO_VTK : '1'
- SALOME_GMSH_HEADERS_STD : '1'
- }
+ SCOTCH_HPC : '1'
+ }
products :
{
# PREREQUISITES :
rkCommon : '1.5.1'
root: '6.22.02'
scipy : 'native'
- scotch : 'native'
+ scotch : {tag: 'native', section: 'version_6_0_4_MPI', hpc: 'yes', base: 'no'}
setuptools : 'native'
sip : 'native'
six : 'native'
'RESTRICTED'
'LIBBATCH' : {tag : 'V2_4_5'}
'KERNEL'
- 'MEDCOUPLING' : {tag:'master', base: 'no', section: 'default_MPI', hpc: 'yes'}
+ 'MEDCOUPLING' : {tag:'master', base: 'no', section: 'default_MPI', hpc: 'yes'}
'GUI'
'GEOM'
'SMESH'
'HEXABLOCKPLUGIN'
'HOMARD'
'FIELDS'
- 'PARAVIS': {tag:'master', base: 'no', section: 'default_MPI', hpc: 'yes'}
+ 'PARAVIS': {tag:'master', base: 'no', section: 'default_MPI', hpc: 'yes'}
'OPENTURNS_SALOME': '9.8.0'
'JOBMANAGER'
'YACS'
__condition__ : "VARS.dist in ['UB20.04']"
'APPLICATION.products.opencv' : '3.2.0'
'APPLICATION.products.cminpack': 'native'
- 'APPLICATION.products.PyFMI' : {tag: '2.5', base: 'no', section: 'version_2_5_no_pip' }
+ 'APPLICATION.products.PyFMI' : {tag: '2.5', base: 'no', section: 'version_2_5_no_pip' }
'APPLICATION.products.netcdf' : '4.6.2'
}
{
- # On DB10, ParaView fails to find xmlpatterns executable : ParaViewClient.cmake try to find it
- # from Qt5_DIR, going back from it in filesystem (hardcoded).
- # The standard patch is also needed to be able to build PARAVIS (same kind of issue).
- # What is more, ParaView 5.9 CMake procedure requires Qt 5.12 as minimum version (5.11 here).
- # As this version is compliant too, let's force it as the new minimum needed version.
+ # DB10:
+ # - Qt minimal version 5.12
+ # - xmlpatterns executable
__condition__ : "VARS.dist in ['DB10']"
'APPLICATION.products.cminpack': 'native'
- 'APPLICATION.products.PyFMI' : {tag:'2.5', base: 'no', section: 'version_2_5_no_pip'}
- 'APPLICATION.products.ParaView' : {tag: '5.9.0', base : 'no', section: 'version_5_9_0_DB10'}
+ 'APPLICATION.products.PyFMI' : {tag:'2.5', base: 'no', section: 'version_2_5_no_pip' }
+ 'APPLICATION.products.ParaView' : {tag: '5.9.0', base: 'no', section: 'version_5_9_0_DB10', hpc: 'yes'}
}
{
__condition__ : "VARS.dist in ['DB11']"
'APPLICATION.products.opencv' : '3.2.0'
'APPLICATION.products.cminpack': 'native'
'APPLICATION.products.PyFMI' : {tag:'2.5', base: 'no', section: 'version_2_5_no_pip' }
- 'APPLICATION.products.ParaView' : {tag: '5.9.0', base : 'no', section: 'version_5_9_0_DB11'}
+ 'APPLICATION.products.ParaView' : {tag: '5.9.0', base: 'no', section: 'version_5_9_0_DB11', hpc: 'yes'}
}
{
# CentOS 8 repositories don't include sphinxintl package which must be installed through pip.
# To avoid its missing (system_info pyconf key doesn't handle this use case), we embed it.
__condition__ : "VARS.dist in ['CO8']"
- 'APPLICATION.products.sphinxintl' : {tag: '0.9.10', base: 'no', section: 'version_0_9_10_no_pip' }
+ 'APPLICATION.products.sphinxintl' : {tag: '0.9.10', base: 'no', section: 'version_0_9_10_no_pip' }
'APPLICATION.products.cminpack' : '1.3.6'
- 'APPLICATION.products.PyFMI' : {tag: '2.5', base: 'no', section: 'version_2_5_no_pip' }
- 'APPLICATION.products.statsmodels' : {tag: '0.6.1', base: 'no', section: 'version_0_6_1_no_pip' }
+ 'APPLICATION.products.PyFMI' : {tag: '2.5', base: 'no', section: 'version_2_5_no_pip' }
+ 'APPLICATION.products.statsmodels' : {tag: '0.6.1', base: 'no', section: 'version_0_6_1_no_pip' }
}
{
__condition__ : "VARS.dist in ['FD32']"
SALOME_trace : "local" # local/file:.../with_logger
SALOME_MODULES : "SHAPER,SHAPERSTUDY,GEOM,SMESH,PARAVIS,YACS,JOBMANAGER" # specify the first modules to display in gui
SALOME_ACTOR_DELEGATE_TO_VTK : '1'
+ SCOTCH_HPC : '1'
}
products :
{
packaging : '17.1'
pandas : '0.25.2'
patsy : '0.5.2'
- ParaView : '5.9.0'
+ ParaView : {tag:'5.9.0', base: 'no', section: 'version_5_9_0_MPI', hpc: 'yes'}
PERSALYS: 'v11.0'
petsc : {tag : '3.16.0', section: 'version_3_16_0'}
Pillow : '7.1.1'
Pygments : '2.0.2'
pyparsing : '2.0.3'
PyQt : '5.15.3'
- #PyQtChart : '5.9'
pyreadline : '2.0'
Python : '3.6.5'
pytz : '2017.2'
root: '6.22.02'
salome_system : 'native'
scipy : '1.4.1'
- scotch : '6.0.4'
+ scotch : {tag: '6.0.4', section: 'version_6_0_4_MPI', hpc: 'yes', base: 'no'}
setuptools : '38.4.0'
sip : '5.5.0'
six : '1.10.0'
'RESTRICTED'
'LIBBATCH' : {tag : 'V2_4_5'}
'KERNEL'
- 'MEDCOUPLING'
+ 'MEDCOUPLING' : {tag:'master', base: 'no', section: 'default_MPI', hpc: 'yes'}
'GUI'
'GEOM'
'SMESH'
'HOMARD'
'FIELDS'
'OPENTURNS_SALOME' : '9.8.0'
- 'PARAVIS'
+ 'PARAVIS' : {tag:'master', base: 'no', section: 'default_MPI', hpc: 'yes'}
'JOBMANAGER'
'YACS'
'YACSGEN'
{
__condition__ : "VARS.dist in ['FD30']"
'APPLICATION.products.gcc' : '9.3.0'
- 'APPLICATION.products.ParaView' : {tag:'5.9.0', base: 'no', section: 'version_5_9_0_MPI', hpc: 'yes'}
- 'APPLICATION.products.PARAVIS' : {tag:'master', base: 'no', section: 'default_MPI', hpc: 'yes'}
- 'APPLICATION.products.MEDCOUPLING' : {tag:'master', base: 'no', section: 'default_MPI', hpc: 'yes'}
}
{
__condition__ : "VARS.dist in ['FD32']"
# https://github.com/scipy/scipy/issues/11611
'APPLICATION.products.scipy' : '1.5.2'
'APPLICATION.rm_products' : ['gcc', 'gmp', 'mpc', 'mpfr']
- 'APPLICATION.products.ParaView' : {tag:'5.9.0', base: 'no', section: 'version_5_9_0_MPI', hpc: 'yes'}
- 'APPLICATION.products.PARAVIS' : {tag:'master', base: 'no', section: 'default_MPI', hpc: 'yes'}
- 'APPLICATION.products.MEDCOUPLING' : {tag:'master', base: 'no', section: 'default_MPI', hpc: 'yes'}
}
{
__condition__ : "VARS.dist in ['CO7']"
'APPLICATION.rm_products' : ['gcc', 'gmp', 'mpc', 'mpfr']
- 'APPLICATION.products.ParaView' : {tag:'5.9.0', base: 'no', section: 'version_5_9_0_MPI', hpc: 'yes'}
}
{
__condition__ : "VARS.dist in ['CO8']"
'APPLICATION.rm_products' : ['gcc', 'gmp', 'mpc', 'mpfr']
- 'APPLICATION.products.ParaView' : {tag:'5.9.0', base: 'no', section: 'version_5_9_0_MPI', hpc: 'yes'}
- 'APPLICATION.products.PARAVIS' : {tag:'master', base: 'no', section: 'default_MPI', hpc: 'yes'}
- 'APPLICATION.products.MEDCOUPLING' : {tag:'master', base: 'no', section: 'default_MPI', hpc: 'yes'}
- }
- {
- __condition__ : "VARS.dist in ['DB09']"
- 'APPLICATION.products.ParaView' : {tag:'5.9.0', base: 'no', section: 'version_5_9_0_MPI', hpc: 'yes'}
- 'APPLICATION.products.PARAVIS' : {tag:'master', base: 'no', section: 'default_MPI', hpc: 'yes'}
- 'APPLICATION.products.MEDCOUPLING' : {tag:'master', base: 'no', section: 'default_MPI', hpc: 'yes'}
}
{
__condition__ : "VARS.dist in ['DB10']"
'APPLICATION.rm_products' : ['gcc', 'gmp', 'mpc', 'mpfr']
- 'APPLICATION.products.ParaView' : {tag:'5.9.0', base: 'no', section: 'version_5_9_0_MPI', hpc: 'yes'}
- 'APPLICATION.products.PARAVIS' : {tag:'master', base: 'no', section: 'default_MPI', hpc: 'yes'}
- 'APPLICATION.products.MEDCOUPLING' : {tag:'master', base: 'no', section: 'default_MPI', hpc: 'yes'}
}
{
__condition__ : "VARS.dist in ['UB18.04']"
'APPLICATION.rm_products' : ['gcc', 'gmp', 'mpc', 'mpfr']
- 'APPLICATION.products.ParaView' : {tag:'5.9.0', base: 'no', section: 'version_5_9_0_MPI', hpc: 'yes'}
- 'APPLICATION.products.PARAVIS' : {tag:'master', base: 'no', section: 'default_MPI', hpc: 'yes'}
- 'APPLICATION.products.MEDCOUPLING' : {tag:'master', base: 'no', section: 'default_MPI', hpc: 'yes'}
}
{
__condition__ : "VARS.dist in ['UB20.04']"
'APPLICATION.rm_products' : ['gcc', 'gmp', 'mpc', 'mpfr']
- 'APPLICATION.products.ParaView' : {tag:'5.9.0', base: 'no', section: 'version_5_9_0_MPI', hpc: 'yes'}
- 'APPLICATION.products.PARAVIS' : {tag:'master', base: 'no', section: 'default_MPI', hpc: 'yes'}
- 'APPLICATION.products.MEDCOUPLING' : {tag:'master', base: 'no', section: 'default_MPI', hpc: 'yes'}
}
]
# SALOME MODULES :
'CONFIGURATION'
- 'MEDCOUPLING' : {section : 'default_MPI', verbose : 'yes'}
+ 'MEDCOUPLING' : {section : 'version_V9_6_0_MPI', verbose : 'yes'}
'SOLVERLAB' : {section : 'default_MPI', hpc: 'yes'}
}
profile :
'LIBBATCH' : {tag :'V2_4_5'}
'KERNEL' : {section : 'default_MPI', verbose : 'yes'}
'GUI' : {verbose : 'yes'}
- 'MEDCOUPLING' : {section : 'default_MPI', verbose : 'yes'}
+ 'MEDCOUPLING' : {section : 'version_V9_7_0_MPI', verbose : 'yes'}
'SOLVERLAB' : {tag: 'master', section : 'default_MPI', hpc: 'yes'}
}
profile :
CONFIGURATION_ROOT_DIR : $workdir + $VARS.sep + "SOURCES" + $VARS.sep + "CONFIGURATION"
RESTRICTED_ROOT_DIR : $workdir + $VARS.sep + "SOURCES" + $VARS.sep + "RESTRICTED"
SALOME_USE_64BIT_IDS : '1'
+ SALOME_GMSH_HEADERS_STD : '1'
}
launch : {PYTHONIOENCODING:"UTF_8", SALOME_MODULES_ORDER:""}
SALOME_trace : "local" # local/file:.../with_logger
SALOME_MODULES : "SOLVERLAB" # specify the first modules to display in gui
+ SCOTCH_HPC : '1'
}
products :
{
qwt : '6.1.2'
requests : '2.19.1'
scipy : '1.4.1'
- scotch : '6.0.4'
+ scotch : {tag: '6.0.4', section: 'version_6_0_4_MPI', hpc: 'yes', base: 'no'}
setuptools : '38.4.0'
sip : '5.5.0'
six : '1.10.0'
default_MPI :
{
- cmake_options : "-DMEDCOUPLING_ENABLE_PYTHON=ON -DMEDCOUPLING_ENABLE_PARTITIONER=OFF -DMEDCOUPLING_ENABLE_RENUMBER=ON -DMEDCOUPLING_PARTITIONER_METIS=OFF -DMEDCOUPLING_PARTITIONER_SCOTCH=ON -DMEDCOUPLING_PARTITIONER_PARMETIS=ON -DMEDCOUPLING_MICROMED=OFF -DMEDCOUPLING_USE_MPI=ON -DSALOME_USE_MPI=ON -DMEDCOUPLING_USE_64BIT_IDS=ON -DCMAKE_CXX_COMPILER:STRING=${MPI_CXX_COMPILER} -DCMAKE_C_COMPILER:STRING=${MPI_C_COMPILER}"
+ cmake_options : "-DMEDCOUPLING_ENABLE_PYTHON=ON -DMEDCOUPLING_ENABLE_PARTITIONER=ON -DMEDCOUPLING_ENABLE_RENUMBER=ON -DMEDCOUPLING_PARTITIONER_METIS=ON -DMEDCOUPLING_PARTITIONER_PARMETIS=OFF -DMEDCOUPLING_PARTITIONER_SCOTCH=OFF -DMEDCOUPLING_PARTITIONER_PTSCOTCH=ON -DMEDCOUPLING_MICROMED=OFF -DMEDCOUPLING_USE_MPI=ON -DSALOME_USE_MPI=ON -DMEDCOUPLING_USE_64BIT_IDS=ON -DCMAKE_CXX_COMPILER:STRING=${MPI_CXX_COMPILER} -DCMAKE_C_COMPILER:STRING=${MPI_C_COMPILER} -DSCOTCH_ROOT_DIR=${SCOTCH_ROOT_DIR} -DPTSCOTCH_ROOT_DIR=${PTSCOTCH_ROOT_DIR} -DPTSCOTCH_INCLUDE_DIRS=${PTSCOTCH_INCLUDE_DIR}"
depend : [
"boost",
"cppunit",
"CONFIGURATION",
"openmpi"
]
- opt_depend : ["ParMetis"]
+ opt_depend : ["ParMetis", "metis"]
}
default_32BIT_IDS:
]
}
+version_V9_8_0_MPI:
+{
+ cmake_options : "-DMEDCOUPLING_ENABLE_PYTHON=ON -DMEDCOUPLING_ENABLE_PARTITIONER=OFF -DMEDCOUPLING_ENABLE_RENUMBER=ON -DMEDCOUPLING_PARTITIONER_METIS=OFF -DMEDCOUPLING_PARTITIONER_SCOTCH=ON -DMEDCOUPLING_PARTITIONER_PARMETIS=ON -DMEDCOUPLING_MICROMED=OFF -DMEDCOUPLING_USE_MPI=ON -DSALOME_USE_MPI=ON -DMEDCOUPLING_USE_64BIT_IDS=ON -DCMAKE_CXX_COMPILER:STRING=${MPI_CXX_COMPILER} -DCMAKE_C_COMPILER:STRING=${MPI_C_COMPILER}"
+ depend : [
+ "boost",
+ "cppunit",
+ "Python",
+ "hdf5",
+ "medfile",
+ "scotch",
+ "docutils",
+ "libxml2",
+ "Sphinx",
+ "sphinxintl",
+ "setuptools",
+ "six",
+ "pytz",
+ "numpy",
+ "scipy",
+ "lapack",
+ "CONFIGURATION",
+ "openmpi"
+ ]
+ opt_depend : ["ParMetis"]
+}
+
+version_V9_7_0_MPI:
+{
+ cmake_options : "-DMEDCOUPLING_ENABLE_PYTHON=ON -DMEDCOUPLING_ENABLE_PARTITIONER=OFF -DMEDCOUPLING_ENABLE_RENUMBER=ON -DMEDCOUPLING_PARTITIONER_METIS=OFF -DMEDCOUPLING_PARTITIONER_SCOTCH=ON -DMEDCOUPLING_PARTITIONER_PARMETIS=ON -DMEDCOUPLING_MICROMED=OFF -DMEDCOUPLING_USE_MPI=ON -DSALOME_USE_MPI=ON -DMEDCOUPLING_USE_64BIT_IDS=ON -DCMAKE_CXX_COMPILER:STRING=${MPI_CXX_COMPILER} -DCMAKE_C_COMPILER:STRING=${MPI_C_COMPILER}"
+ depend : [
+ "boost",
+ "cppunit",
+ "Python",
+ "hdf5",
+ "medfile",
+ "scotch",
+ "docutils",
+ "libxml2",
+ "Sphinx",
+ "sphinxintl",
+ "setuptools",
+ "six",
+ "pytz",
+ "numpy",
+ "scipy",
+ "lapack",
+ "CONFIGURATION",
+ "openmpi"
+ ]
+ opt_depend : ["ParMetis"]
+}
+
+version_V9_6_0_MPI:
+{
+ cmake_options : "-DMEDCOUPLING_ENABLE_PYTHON=ON -DMEDCOUPLING_ENABLE_PARTITIONER=OFF -DMEDCOUPLING_ENABLE_RENUMBER=ON -DMEDCOUPLING_PARTITIONER_METIS=OFF -DMEDCOUPLING_PARTITIONER_SCOTCH=ON -DMEDCOUPLING_PARTITIONER_PARMETIS=ON -DMEDCOUPLING_MICROMED=OFF -DMEDCOUPLING_USE_MPI=ON -DSALOME_USE_MPI=ON -DMEDCOUPLING_USE_64BIT_IDS=ON -DCMAKE_CXX_COMPILER:STRING=${MPI_CXX_COMPILER} -DCMAKE_C_COMPILER:STRING=${MPI_C_COMPILER}"
+ depend : [
+ "boost",
+ "cppunit",
+ "Python",
+ "hdf5",
+ "medfile",
+ "scotch",
+ "docutils",
+ "libxml2",
+ "Sphinx",
+ "sphinxintl",
+ "setuptools",
+ "six",
+ "pytz",
+ "numpy",
+ "scipy",
+ "lapack",
+ "CONFIGURATION",
+ "openmpi"
+ ]
+ opt_depend : ["ParMetis"]
+}
+
+version_V9_5_0_MPI:
+{
+ cmake_options : "-DMEDCOUPLING_ENABLE_PYTHON=ON -DMEDCOUPLING_ENABLE_PARTITIONER=OFF -DMEDCOUPLING_ENABLE_RENUMBER=ON -DMEDCOUPLING_PARTITIONER_METIS=OFF -DMEDCOUPLING_PARTITIONER_SCOTCH=ON -DMEDCOUPLING_PARTITIONER_PARMETIS=ON -DMEDCOUPLING_MICROMED=OFF -DMEDCOUPLING_USE_MPI=ON -DSALOME_USE_MPI=ON -DMEDCOUPLING_USE_64BIT_IDS=ON -DCMAKE_CXX_COMPILER:STRING=${MPI_CXX_COMPILER} -DCMAKE_C_COMPILER:STRING=${MPI_C_COMPILER}"
+ depend : [
+ "boost",
+ "cppunit",
+ "Python",
+ "hdf5",
+ "medfile",
+ "scotch",
+ "docutils",
+ "libxml2",
+ "Sphinx",
+ "sphinxintl",
+ "setuptools",
+ "six",
+ "pytz",
+ "numpy",
+ "scipy",
+ "lapack",
+ "CONFIGURATION",
+ "openmpi"
+ ]
+ opt_depend : ["ParMetis"]
+}
+
+version_V9_6_0_to_V9_8_0:
+{
+ cmake_options : "-DMEDCOUPLING_ENABLE_PYTHON=ON -DMEDCOUPLING_ENABLE_PARTITIONER=ON -DMEDCOUPLING_ENABLE_RENUMBER=ON -DMEDCOUPLING_PARTITIONER_METIS=ON -DMEDCOUPLING_PARTITIONER_SCOTCH=ON -DMEDCOUPLING_PARTITIONER_PARMETIS=OFF -DMEDCOUPLING_MICROMED=OFF -DMEDCOUPLING_USE_MPI=OFF -DMEDCOUPLING_USE_64BIT_IDS=ON"
+}
+
version_V9_5_0 :
{
cmake_options : "-DMEDCOUPLING_ENABLE_PYTHON=ON -DMEDCOUPLING_ENABLE_PARTITIONER=ON -DMEDCOUPLING_ENABLE_RENUMBER=ON -DMEDCOUPLING_PARTITIONER_METIS=ON -DMEDCOUPLING_PARTITIONER_SCOTCH=ON -DMEDCOUPLING_PARTITIONER_PARMETIS=OFF -DMEDCOUPLING_MICROMED=OFF -DMEDCOUPLING_USE_MPI=OFF"
--- /dev/null
+#!/bin/bash
+
+echo "##########################################################################"
+echo "ptscotch" $VERSION
+echo "##########################################################################"
+
+echo
+echo "*** mkdir" $PRODUCT_INSTALL
+mkdir -p $PRODUCT_INSTALL
+if [ $? -ne 0 ]
+then
+ echo "ERROR on mkdir"
+ exit 1
+fi
+cp -ar $SOURCE_DIR/* ${BUILD_DIR}/
+cd ${BUILD_DIR}/src
+
+echo
+echo "*** create Makefile"
+if [ -n "$SAT_HPC" ]; then
+ sed -e "s%CFLAGS\([[:space:]]*\)=\([[:space:]]*\)\(.*\)%CFLAGS\1=\2-fPIC -DPIC -DINTSIZE64 -DSCOTCH_PTHREAD -I${MPI_INCLUDE_DIR} \3%g" Make.inc/Makefile.inc.x86-64_pc_linux2 > Makefile.inc
+else
+ sed -e "s%CFLAGS\([[:space:]]*\)=\([[:space:]]*\)\(.*\)%CFLAGS\1=\2-fPIC -DPIC -DINTSIZE64 -DSCOTCH_PTHREAD \3%g" Make.inc/Makefile.inc.x86-64_pc_linux2 > Makefile.inc
+fi
+sed -e "s%LDFLAGS\([[:space:]]*\)=\([[:space:]]*\)\(.*\)%LDFLAGS\1=\2 \3 -lpthread%g" Makefile.inc > Makefile.in_new
+mv Makefile.in_new Makefile.inc
+
+echo
+echo "*** make" $MAKE_OPTIONS
+make $MAKE_OPTIONS
+if [ $? -ne 0 ]
+then
+ echo "ERROR on make"
+ exit 2
+fi
+
+echo
+echo "*** Check if node is a virtual machine"
+ISVM=$(hostnamectl status|grep -i chassis:|grep vm)
+if [ ! -z "$ISVM" ]; then
+ echo "*** oversubscribe..."
+ sed -i 's/mpirun -n 4/mpirun -n 4 --oversubscribe/g' $BUILD_DIR/src/check/Makefile
+else
+ echo "*** hostnamectl says that $HOSTNAME is *NOT* a virtual machine"
+fi
+
+echo
+echo "*** make ptcheck"
+cd $BUILD_DIR/src
+if [ -n "$SAT_HPC" ]; then
+ make ptcheck
+else
+ make check
+fi
+if [ $? -ne 0 ]
+then
+ echo "ERROR on make check"
+ exit 3
+fi
+
+echo
+echo "*** Install"
+cd $BUILD_DIR
+for d in include lib bin; do
+ cp -r $d $PRODUCT_INSTALL/$d
+ if [ $? -ne 0 ]; then
+ echo "FATAL: failed to deploy: $d"
+ exit 3
+ fi
+done
+
+echo
+echo "########## END"
env.set('OPAL_PREFIX', prereq_dir) # be able to move openmpi install (packages)
env.set('MPI_ROOT_DIR', prereq_dir) # update for cmake
env.set('MPI_ROOT', prereq_dir)
+ env.set('MPI_INCLUDE_DIR', os.path.join(prereq_dir, 'include'))
env.set('MPI_C_COMPILER', os.path.join(prereq_dir, 'bin', 'mpicc'))
env.set('MPI_CXX_COMPILER', os.path.join(prereq_dir, 'bin', 'mpicxx'))
env.set('MPI_C_FOUND', os.path.join(prereq_dir,'lib','libmpi.so'))
def set_nativ_env(env):
prereq_dir='/usr'
- mpibin_dir='/usr/bin'
+ prereq_bin='/usr/bin'
+ prereq_inc='/usr/include/openmpi'
try:
import distro
if any(distribution in distro.name().lower() for distribution in ["centos", "fedora"]) :
prereq_dir='/usr/lib64/openmpi'
- mpibin_dir='/usr/lib64/openmpi/bin'
+ prereq_bin='/usr/lib64/openmpi/bin'
+ prereq_inc='/usr/include/openmpi-x86_64'
elif any(distribution in distro.name().lower() for distribution in ["debian", "ubuntu"]) :
prereq_dir='/usr/lib/x86_64-linux-gnu/openmpi'
+ prereq_inc= '/usr/lib/x86_64-linux-gnu/openmpi/include'
except:
import platform
if any(distribution in platform.linux_distribution()[0].lower() for distribution in ["centos", "fedora"]) :
prereq_dir='/usr/lib64/openmpi'
- mpibin_dir='/usr/lib64/openmpi/bin'
+ prereq_bin='/usr/lib64/openmpi/bin'
+ prereq_inc='/usr/include/openmpi-x86_64'
env.set('MPI_ROOT_DIR', prereq_dir)
env.set('OPENMPIDIR', prereq_dir)
env.set('MPI_ROOT', prereq_dir)
- env.set('MPI_C_COMPILER', os.path.join(mpibin_dir,'mpicc'))
- env.set('MPI_CXX_COMPILER', os.path.join(mpibin_dir,'mpicxx'))
+ env.set('MPI_C_COMPILER', os.path.join(prereq_bin,'mpicc'))
+ env.set('MPI_CXX_COMPILER', os.path.join(prereq_bin,'mpicxx'))
env.set('MPI_C_FOUND', os.path.join(prereq_dir,'lib','libmpi.so'))
- env.prepend('PATH', mpibin_dir)
+ env.set('MPI_INCLUDE_DIR', prereq_inc)
+ env.prepend('PATH', prereq_bin)
env.prepend('LD_LIBRARY_PATH', os.path.join(prereq_dir,'lib'))
#!/usr/bin/env python
#-*- coding:utf-8 -*-
+import os.path
def set_env(env, prereq_dir, version):
+ if env.get('SCOTCH_HPC') == '1':
+ env.set('SCOTCH_ROOT_DIR', prereq_dir)
+ env.set('PTSCOTCH_ROOT_DIR', prereq_dir)
+ env.set('PTSCOTCHDIR', prereq_dir)
env.set('SCOTCHDIR', prereq_dir)
- env.set('SCOTCH_ROOT_DIR', prereq_dir) # update for cmake
+ env.set('PTSCOTCH_INCLUDE_DIR',os.path.join(prereq_dir,'include'))
+ else:
+ env.set('SCOTCHDIR', prereq_dir)
+ env.set('SCOTCH_ROOT_DIR', prereq_dir)
def set_nativ_env(env):
- env.set('SCOTCH_ROOT_DIR', '/usr') # update for cmake
- env.set('SCOTCHDIR', '/usr')
+ if env.get('SCOTCH_HPC') != '1':
+ prereq_dir='/usr'
+ env.set('SCOTCH_ROOT_DIR', prereq_dir)
+ else:
+ prereq_dir='/usr'
+ prereq_inc='/usr/include'
+ prereq_lib= None
+ try:
+ import distro
+ if any(distribution in distro.name().lower() for distribution in ["centos", "fedora"]) :
+ prereq_dir='/usr'
+ prereq_inc= '/usr/include/openmpi-x86_64'
+ prereq_lib='/usr/lib64/openmpi/lib'
+ elif any(distribution in distro.name().lower() for distribution in ["debian", "ubuntu"]) :
+ prereq_dir='/usr'
+ prereq_inc='/usr/include/scotch-long'
+ prereq_lib='/usr/lib/x86_64-linux-gnu/scotch-long'
+ else:
+ print("Unimplemented distribution (1): {}".format(distro.name.lower()))
+ except:
+ import platform
+ if any(distribution in platform.linux_distribution()[0].lower() for distribution in ["centos", "fedora"]) :
+ prereq_dir='/usr'
+ prereq_inc= '/usr/include/openmpi-x86_64'
+ prereq_lib='/usr/lib64/openmpi/lib'
+ else:
+ print("Unimplemented distribution (2): {}".format(platform.linux_distribution()[0].lower()))
+ env.set('SCOTCH_ROOT_DIR', prereq_dir)
+ env.set('PTSCOTCH_ROOT_DIR', prereq_dir)
+ env.set('PTSCOTCHDIR', prereq_dir)
+ env.set('PTSCOTCH_INCLUDE_DIR', prereq_inc)
+ if prereq_lib is not None:
+ env.prepend('LD_LIBRARY_PATH', prereq_lib)
env_script : $name + ".py"
}
depend : []
+ patches: []
source_dir : $APPLICATION.workdir + $VARS.sep + 'SOURCES' + $VARS.sep + $name
build_dir : $APPLICATION.workdir + $VARS.sep + 'BUILD' + $VARS.sep + $name
install_dir : 'base'
+ properties :
+ {
+ incremental : "yes"
+ }
}
version_5_1_12b :
install_dir : 'base'
}
+version_6_0_4 :
+{
+ compil_script: "scotch-6.0.4.sh"
+}
-
+version_6_0_4_MPI :
+{
+ system_info :
+ {
+ rpm : ["ptscotch-openmpi"]
+ rpm_dev : ["ptscotch-openmpi-devel"]
+ apt : ["libptscotch"]
+ apt_dev : ["libptscotch-dev"]
+ }
+ environ :
+ {
+ env_script : "scotch.py"
+ }
+ compil_script: "scotch-6.0.4.sh"
+ depend: ['openmpi']
+}