Salome HOME
changes for mpi compilation bsr/medmpi
authorBernard Sécher <bernard.secher@cea.fr>
Mon, 9 Feb 2015 09:13:15 +0000 (10:13 +0100)
committerBernard Sécher <bernard.secher@cea.fr>
Mon, 9 Feb 2015 09:37:38 +0000 (10:37 +0100)
15 files changed:
salome_adm/cmake_files/FindSalomeMPI.cmake
src/Communication/CMakeLists.txt
src/Communication_SWIG/CMakeLists.txt
src/Container/CMakeLists.txt
src/Container/SALOME_ContainerManager.cxx
src/DSC/ParallelDSC/CMakeLists.txt
src/Launcher/CMakeLists.txt
src/MPIContainer/CMakeLists.txt
src/MPIContainer/MPIObject_i.cxx
src/MPIContainer/getMPIImplementation.cxx [new file with mode: 0644]
src/MPIContainer/launch_testMPI2.csh
src/MPIContainer/testMPI2.cxx
src/ParallelContainer/CMakeLists.txt
src/TestMPIContainer/CMakeLists.txt
src/TestMPIContainer/TestMPIContainer.cxx

index d2538862cde0ef4160491d5ed3308c114d0e7460..9eee1cf0b0e85e846d0e9fb70f2693ceca6e7353 100644 (file)
@@ -31,6 +31,17 @@ SET(MPI_INCLUDE_DIRS ${MPI_C_INCLUDE_PATH} ${MPI_CXX_INCLUDE_PATH})
 SET(MPI_LIBRARIES ${MPI_C_LIBRARIES} ${MPI_CXX_LIBRARIES})
 
 IF(MPI_FOUND) 
+  # Detect if function MPI_Publish_name is provided by the external MPI library 
+  # otherwise take ours.
+  include(CheckSymbolExists)
+  SET(CMAKE_REQUIRED_LIBRARIES ${MPI_LIBRARIES})
+  CHECK_SYMBOL_EXISTS(MPI_Publish_name ${MPI_C_INCLUDE_PATH}/mpi.h MPI2_IS_OK)
+  SET(MPI_DEFINITIONS "${MPI_CXX_COMPILE_FLAGS}")
+  IF(MPI2_IS_OK)
+    MESSAGE(STATUS "Your mpi implementation is compatible with mpi2 ... adding -DHAVE_MPI2")
+    SET(MPI_DEFINITIONS "${MPI_CXX_COMPILE_FLAGS} -DHAVE_MPI2")
+  ENDIF(MPI2_IS_OK)
+
   SALOME_ACCUMULATE_HEADERS(MPI_INCLUDE_DIRS)
   SALOME_ACCUMULATE_ENVIRONMENT(LD_LIBRARY_PATH ${MPI_LIBRARIES})
 ENDIF()
index d6e51e3f4d31e0d7bb344334b99249719ec77280..d2c3ceb212dd90de0f56c633d915eba4b9322a93 100755 (executable)
@@ -47,7 +47,7 @@ SET(SalomeCommunication_SOURCES
   MatrixClient.cxx
 )
 
-ADD_DEFINITIONS(${OMNIORB_DEFINITIONS} ${MPI_CXX_COMPILE_FLAGS})
+ADD_DEFINITIONS(${OMNIORB_DEFINITIONS} ${MPI_DEFINITIONS})
 
 ADD_LIBRARY(SalomeCommunication ${SalomeCommunication_SOURCES})
 TARGET_LINK_LIBRARIES(SalomeCommunication ${COMMON_LIBS})
index 9b97c9d86d7463c3444163380dca017a5adf1cee..f1c7a2be0a9a841cf4c7302dde89cf31ebc5e2a4 100755 (executable)
@@ -43,7 +43,7 @@ SET(_libSALOME_Comm_LIBS
 IF(SALOME_USE_MPI)
   INCLUDE_DIRECTORIES(${MPI_CXX_INCLUDE_PATH})
 
-  ADD_DEFINITIONS(${MPI_CXX_COMPILE_FLAGS}) 
+  ADD_DEFINITIONS(${MPI_DEFINITIONS}) 
   SET(_libSALOME_Comm_LIBS
     ${_libSALOME_Comm_LIBS}
     ${MPI_CXX_LIBRARIES}
index d312628809d6f274c8c14354367282ba027e80f2..38aecf6bc1d9c652a3eb05d37e3ddcbf8e61cde4 100755 (executable)
@@ -61,7 +61,7 @@ SET(COMMON_LIBS
 )
 
 IF(WITH_MPI_SEQ_CONTAINER)
-  ADD_DEFINITIONS(${MPI_CXX_COMPILE_FLAGS})
+  ADD_DEFINITIONS(${MPI_DEFINITIONS})
   SET(COMMON_LIBS
     ${MPI_CXX_LIBRARIES}
   )
index 28ad81cd717c32ca063f0411fe38c4a57bc7867c..6ec2f9cd6bc5ecef7f836e2459dbdf5bd598f65f 100644 (file)
 #include <SALOMEconfig.h>
 #include CORBA_CLIENT_HEADER(SALOME_Session)
 
+#ifdef HAVE_MPI2
+#include <mpi.h>
+#endif
+
 #ifdef WIN32
 #include <process.h>
 #define getpid _getpid
@@ -96,7 +100,7 @@ SALOME_ContainerManager::SALOME_ContainerManager(CORBA::ORB_ptr orb, PortableSer
   _isAppliSalomeDefined = (GetenvThreadSafe("APPLI") != 0);
 
 #ifdef HAVE_MPI2
-#ifdef WITHOPENMPI
+#ifdef OPEN_MPI
   _pid_mpiServer = -1;
   // the urifile name depends on pid of the process
   std::stringstream urifile;
@@ -122,7 +126,7 @@ SALOME_ContainerManager::SALOME_ContainerManager(CORBA::ORB_ptr orb, PortableSer
     if(_pid_mpiServer < 0)
       throw SALOME_Exception("Error when getting ompi-server id");
   }
-#elif defined(WITHMPICH)
+#elif defined(MPICH)
   _pid_mpiServer = -1;
   // get the pid of all hydra_nameserver
   std::set<pid_t> thepids1 = getpidofprogram("hydra_nameserver");
@@ -154,7 +158,7 @@ SALOME_ContainerManager::~SALOME_ContainerManager()
   MESSAGE("destructor");
   delete _resManager;
 #ifdef HAVE_MPI2
-#ifdef WITHOPENMPI
+#ifdef OPEN_MPI
   if( GetenvThreadSafe("OMPI_URI_FILE") != NULL ){
     // kill my ompi-server
     if( kill(_pid_mpiServer,SIGTERM) != 0 )
@@ -164,7 +168,7 @@ SALOME_ContainerManager::~SALOME_ContainerManager()
     if(status!=0)
       throw SALOME_Exception("Error when removing urifile");
   }
-#elif defined(WITHMPICH)
+#elif defined(MPICH)
   // kill my hydra_nameserver
   if(_pid_mpiServer > -1)
     if( kill(_pid_mpiServer,SIGTERM) != 0 )
@@ -788,16 +792,16 @@ SALOME_ContainerManager::BuildCommandToLaunchRemoteContainer(const std::string&
       std::ostringstream o;
       o << nbproc << " ";
       command += o.str();
-#ifdef WITHLAM
+#ifdef LAM_MPI
       command += "-x PATH,LD_LIBRARY_PATH,OMNIORB_CONFIG,SALOME_trace ";
-#elif defined(WITHOPENMPI)
+#elif defined(OPEN_MPI)
       if( GetenvThreadSafe("OMPI_URI_FILE") == NULL )
         command += "-x PATH -x LD_LIBRARY_PATH -x OMNIORB_CONFIG -x SALOME_trace";
       else{
         command += "-x PATH -x LD_LIBRARY_PATH -x OMNIORB_CONFIG -x SALOME_trace -ompi-server file:";
         command += GetenvThreadSafe("OMPI_URI_FILE");
       }
-#elif defined(WITHMPICH)
+#elif defined(MPICH)
       command += "-nameserver " + Kernel_Utils::GetHostname();
 #endif        
       command += " SALOME_MPIContainer ";
@@ -842,9 +846,9 @@ std::string SALOME_ContainerManager::BuildCommandToLaunchLocalContainer(const En
       if( GetenvThreadSafe("LIBBATCH_NODEFILE") != NULL )
         o << "-machinefile " << machinesFile << " ";
 
-#ifdef WITHLAM
+#ifdef LAM_MPI
       o << "-x PATH,LD_LIBRARY_PATH,OMNIORB_CONFIG,SALOME_trace ";
-#elif defined(WITHOPENMPI)
+#elif defined(OPEN_MPI)
       if( GetenvThreadSafe("OMPI_URI_FILE") == NULL )
         o << "-x PATH -x LD_LIBRARY_PATH -x OMNIORB_CONFIG -x SALOME_trace";
       else
@@ -852,7 +856,7 @@ std::string SALOME_ContainerManager::BuildCommandToLaunchLocalContainer(const En
           o << "-x PATH -x LD_LIBRARY_PATH -x OMNIORB_CONFIG -x SALOME_trace -ompi-server file:";
           o << GetenvThreadSafe("OMPI_URI_FILE");
         }
-#elif defined(WITHMPICH)
+#elif defined(MPICH)
       o << "-nameserver " + Kernel_Utils::GetHostname();
 #endif
 
@@ -1086,16 +1090,16 @@ std::string SALOME_ContainerManager::BuildTempFileToLaunchRemoteContainer (const
       std::ostringstream o;
 
       tempOutputFile << nbproc << " ";
-#ifdef WITHLAM
+#ifdef LAM_MPI
       tempOutputFile << "-x PATH,LD_LIBRARY_PATH,OMNIORB_CONFIG,SALOME_trace ";
-#elif defined(WITHOPENMPI)
+#elif defined(OPEN_MPI)
       if( GetenvThreadSafe("OMPI_URI_FILE") == NULL )
         tempOutputFile << "-x PATH -x LD_LIBRARY_PATH -x OMNIORB_CONFIG -x SALOME_trace";
       else{
         tempOutputFile << "-x PATH -x LD_LIBRARY_PATH -x OMNIORB_CONFIG -x SALOME_trace -ompi-server file:";
         tempOutputFile << GetenvThreadSafe("OMPI_URI_FILE");
       }
-#elif defined(WITHMPICH)
+#elif defined(MPICH)
       tempOutputFile << "-nameserver " + Kernel_Utils::GetHostname();
 #endif
     }
index aaab6b3724bf32999098defa3f092305e4338ce6..8f40f1ca6c83f28780737e44c56806c3e419b0f6 100755 (executable)
@@ -38,7 +38,7 @@ INCLUDE_DIRECTORIES(
 
 SET(COMMON_FLAGS
   ${OMNIORB_DEFINITIONS}
-  ${MPI_CXX_COMPILE_FLAGS}
+  ${MPI_DEFINITIONS}
 )
 SET(SalomeParallelDSCContainer_SOURCES
     ParallelDSC_i.cxx
index 775cc8ccb37c395a69280c97b87ab94df57b858d..056d51441b0bb95fe5a28010aad1d121bfaaa070 100755 (executable)
@@ -37,7 +37,7 @@ INCLUDE_DIRECTORIES(
   ${PROJECT_BINARY_DIR}/idl
 )
 
-ADD_DEFINITIONS(${MPI_CXX_COMPILE_FLAGS} ${LIBXML2_DEFINITIONS} ${OMNIORB_DEFINITIONS})
+ADD_DEFINITIONS(${MPI_DEFINITIONS} ${LIBXML2_DEFINITIONS} ${OMNIORB_DEFINITIONS})
 IF(SALOME_USE_LIBBATCH)
   ADD_DEFINITIONS(-DWITH_LIBBATCH)
 ENDIF(SALOME_USE_LIBBATCH)
index a19d954413f217278fdd6a4c325138db756ccd0d..9a6356f31a5161c5805c0b1d53d98352ceeaae64 100755 (executable)
@@ -49,7 +49,7 @@ SET(COMMON_LIBS
   ${OMNIORB_LIBRARIES}
 )
 
-ADD_DEFINITIONS(${MPI_CXX_COMPILE_FLAGS} ${OMNIORB_DEFINITIONS})
+ADD_DEFINITIONS(${MPI_DEFINITIONS} ${OMNIORB_DEFINITIONS})
 
 ADD_LIBRARY(SalomeMPIContainer MPIObject_i.cxx MPIContainer_i.cxx)
 TARGET_LINK_LIBRARIES(SalomeMPIContainer ${COMMON_LIBS})
@@ -61,10 +61,14 @@ TARGET_LINK_LIBRARIES(SALOME_MPIContainer SalomeMPIContainer ${COMMON_LIBS} ${PY
 ADD_EXECUTABLE(testMPI2 testMPI2.cxx)
 TARGET_LINK_LIBRARIES(testMPI2 ${MPI_CXX_LIBRARIES})
 
+ADD_EXECUTABLE(getMPIImplementation getMPIImplementation.cxx)
+TARGET_LINK_LIBRARIES(getMPIImplementation ${MPI_CXX_LIBRARIES})
+
 INSTALL(TARGETS SALOME_MPIContainer EXPORT ${PROJECT_NAME}TargetGroup DESTINATION ${SALOME_INSTALL_BINS})
 INSTALL(TARGETS testMPI2 DESTINATION ${SALOME_INSTALL_BINS}) 
+INSTALL(TARGETS getMPIImplementation DESTINATION ${SALOME_INSTALL_BINS})
 
-INSTALL(FILES launch_testMPI2.csh DESTINATION ${SALOME_INSTALL_SCRIPT_SCRIPTS})
+INSTALL(FILES launch_testMPI2.csh PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE DESTINATION ${SALOME_INSTALL_SCRIPT_SCRIPTS})
 
 FILE(GLOB COMMON_HEADERS_HXX "${CMAKE_CURRENT_SOURCE_DIR}/*.hxx")
 INSTALL(FILES ${COMMON_HEADERS_HXX} DESTINATION ${SALOME_INSTALL_HEADERS})
index 433300512dc9779da8247ada1a760864e65e3b64..f38926f68dd3a2e7e6cb6c00a58ed920e624120d 100644 (file)
@@ -139,6 +139,7 @@ void MPIObject_i::remoteMPI2Connect(std::string service)
   int i;
   char port_name[MPI_MAX_PORT_NAME];
   char port_name_clt[MPI_MAX_PORT_NAME];
+  MPI_Info info;
   std::ostringstream msg;
 
   if( service.size() == 0 )
@@ -158,6 +159,8 @@ void MPIObject_i::remoteMPI2Connect(std::string service)
   MPI_Barrier(MPI_COMM_WORLD);
 
   MPI_Errhandler_set(MPI_COMM_WORLD, MPI_ERRORS_RETURN);
+  MPI_Info_create(&info);
+  MPI_Info_set(info, "ompi_unique", "true");
   if( _numproc == 0 )
     { 
       /* rank 0 try to be a server. If service is already published, try to be a cient */
@@ -167,7 +170,7 @@ void MPIObject_i::remoteMPI2Connect(std::string service)
           MESSAGE("[" << _numproc << "] I get the connection with " << service << " at " << port_name_clt << std::endl);
           MPI_Close_port( port_name );
         }
-      else if ( MPI_Publish_name((char*)service.c_str(), MPI_INFO_NULL, port_name) == MPI_SUCCESS )
+      else if ( MPI_Publish_name((char*)service.c_str(), info, port_name) == MPI_SUCCESS )
         {
           _srv[service] = true;
           _port_name[service] = port_name;
diff --git a/src/MPIContainer/getMPIImplementation.cxx b/src/MPIContainer/getMPIImplementation.cxx
new file mode 100644 (file)
index 0000000..ddf663e
--- /dev/null
@@ -0,0 +1,14 @@
+#include <mpi.h>
+
+int main(int argc, char** argv)
+{
+#ifdef OPEN_MPI
+  return 1;
+#elif defined(MPICH)
+  return 2;
+#elif defined(LAM_MPI)
+  return 3;
+#else
+  return 0;
+#endif
+}
index 2f2da90d6b260a92c7193230e07854c971292fa6..af8c563e8364945c9c222be60081ec981d83a675 100755 (executable)
@@ -25,11 +25,30 @@ if $1 == "--debug" then
 else
   set debug=""
 endif
-# get pid of ompi-server
-setenv OMPI_URI_FILE ${HOME}/.urifile_$$
-set lpid1=`pidof ompi-server`
-ompi-server -r ${OMPI_URI_FILE}
-set lpid2=`pidof ompi-server`
+# get mpi implementation
+${KERNEL_ROOT_DIR}/bin/salome/getMPIImplementation
+set res = $?
+if $res == 1 then
+  set mpi="openmpi"
+else if $res == 2 then
+  set mpi="mpich"
+endif
+if $mpi == "openmpi" then
+# launch ompi-server
+  setenv OMPI_URI_FILE ${HOME}/.urifile_$$
+  set lpid1=`pidof ompi-server`
+  ompi-server -r ${OMPI_URI_FILE}
+  set lpid2=`pidof ompi-server`
+else if $mpi == "mpich" then
+# launch hydra_nameserver
+  set lpid1=`pidof hydra_nameserver`
+  if $lpid1 == "" then
+    hydra_nameserver &
+  endif
+  set lpid2=`pidof hydra_nameserver`
+endif
+# get pid of mpi server
+set pid=0
 foreach i ($lpid2)
   set flag=0
   foreach j ($lpid1)
@@ -43,14 +62,23 @@ foreach i ($lpid2)
 end
 sleep 2
 # launch two instances of executable to create communication between both
-mpirun -np 2 -ompi-server file:${OMPI_URI_FILE} ${KERNEL_ROOT_DIR}/bin/salome/testMPI2 -vsize 32 $debug &
-mpirun -np 3 -ompi-server file:${OMPI_URI_FILE} ${KERNEL_ROOT_DIR}/bin/salome/testMPI2 -vsize 32 $debug
+if $mpi == "openmpi" then
+  mpirun -np 2 -ompi-server file:${OMPI_URI_FILE} ${KERNEL_ROOT_DIR}/bin/salome/testMPI2 -vsize 32 $debug &
+  mpirun -np 3 -ompi-server file:${OMPI_URI_FILE} ${KERNEL_ROOT_DIR}/bin/salome/testMPI2 -vsize 32 $debug
+else if $mpi == "mpich" then
+  mpirun -np 2 -nameserver $HOSTNAME ${KERNEL_ROOT_DIR}/bin/salome/testMPI2 -vsize 32 $debug &
+  mpirun -np 3 -nameserver $HOSTNAME ${KERNEL_ROOT_DIR}/bin/salome/testMPI2 -vsize 32 $debug
+endif
 set res=$status
 sleep 1
-# kill ompi-server
-kill -9 $pid
+# kill mpi server
+if $pid != 0 then
+  kill -9 $pid
+endif
+if $mpi == "openmpi" then
 # delete uri file
-rm -f  ${OMPI_URI_FILE}
+  rm -f  ${OMPI_URI_FILE}
+endif
 # give result of test
 if $res == 0 then
   echo "OK"
index df04076a1e146c74d690fc01adabc08154f7dd10..9239e105f6a0d25aa4fe8bf2721ceca8016f1f22 100644 (file)
 int main(int argc, char**argv)
 {
   int *indg;
-  double *vector, sum=0., norm, etalon;
+  double *vector, sum=0., norm=1., etalon=0.;
   int rank, size, grank, gsize, rsize;
   int vsize=20, lvsize, rlvsize;
   int i, k1, k2, imin, imax, nb;
   int srv=0;
   MPI_Comm com, icom;
-  MPI_Status status; 
+  MPI_Status status;
+  MPI_Info info;
   char   port_name     [MPI_MAX_PORT_NAME]; 
   char   port_name_clt [MPI_MAX_PORT_NAME]; 
   std::string service = "SERVICE";
   bool debug=false;
 
-#ifndef WITHOPENMPI
-  std::cout << "This test only works with openmpi implementation" << std::endl;
-  exit(1);
-#endif
-
   for(i=1;i<argc;i++){
     std::string sargv = argv[i];
     if(sargv.find("-debug")!=std::string::npos)
@@ -64,6 +60,9 @@ int main(int argc, char**argv)
   MPI_Barrier(MPI_COMM_WORLD);
 
   MPI_Errhandler_set(MPI_COMM_WORLD, MPI_ERRORS_RETURN);
+#ifdef HAVE_MPI2
+  MPI_Info_create(&info);
+  MPI_Info_set(info, "ompi_unique", "true");
   if(rank==0){
     MPI_Open_port(MPI_INFO_NULL, port_name); 
     if ( MPI_Lookup_name((char*)service.c_str(), MPI_INFO_NULL, port_name_clt) == MPI_SUCCESS )  {
@@ -71,7 +70,7 @@ int main(int argc, char**argv)
         std::cout << "[" << rank << "] I am client: I get the service " << service << " !" << std::endl;
       MPI_Close_port( port_name );
     } 
-    else if ( MPI_Publish_name((char*)service.c_str(), MPI_INFO_NULL, port_name) == MPI_SUCCESS )  {
+    else if ( MPI_Publish_name((char*)service.c_str(), info, port_name) == MPI_SUCCESS )  {
       if(debug)
         std::cout << "[" << rank << "] I am server: I've managed to publish the service " << service << " !" << std::endl;
       srv = 1;
@@ -206,6 +205,7 @@ int main(int argc, char**argv)
 
   free(indg);
   free(vector);
+#endif
   MPI_Finalize();
 
   if(rank==0){
index bb1454c4cfc1e01c0e3494f2b9afe8f87cbc68e0..c758263bd8d220a467bb25f3cad65302907481f5 100755 (executable)
@@ -58,7 +58,7 @@ SET(SalomeParallelContainer_SOURCES
   SALOME_ParallelGlobalProcessVar_i.cxx
 )
 
-ADD_DEFINITIONS(${ONMIORB_DEFINITIONS} ${MPI_CXX_COMPILE_FLAGS})
+ADD_DEFINITIONS(${OMNIORB_DEFINITIONS} ${MPI_DEFINITIONS})
 
 ADD_LIBRARY(SalomeParallelContainer ${SalomeParallelContainer_SOURCES})
 TARGET_LINK_LIBRARIES(SalomeParallelContainer ${COMMON_LIBS} ${MPI_CXX_LIBRARIES})
index 8c4ce705093f1ce77146ebfb609a2868265cb7eb..d749f5cf8f3d24dfa735b528355c2ebad19f8af9 100755 (executable)
@@ -34,7 +34,7 @@ INCLUDE_DIRECTORIES(
   ${PROJECT_BINARY_DIR}/idl
 )
 
-ADD_DEFINITIONS(${OMNIORB_DEFINITIONS} ${MPI_CXX_COMPILE_FLAGS})
+ADD_DEFINITIONS(${OMNIORB_DEFINITIONS} ${MPI_DEFINITIONS})
 
 SET(COMMON_LIBS
   Registry
index 0a251c5803a96fe8215173c2ed507b3b5e3e55e6..91392066f7740772f79a73aa000082a62771f3a5 100644 (file)
@@ -126,6 +126,17 @@ int main (int argc, char * argv[])
     }
     else{ 
       m1->Coucou(1L);
+// Display MPI component in naming service
+      std::string cmdn = "nameclt list Containers.dir/";
+      cmdn += hostName;
+      cmdn += ".dir";
+      system(cmdn.c_str());
+      cmdn = "nameclt list Containers.dir/";
+      cmdn += hostName;
+      cmdn += ".dir/MPIFactoryServer_";
+      cmdn += argv[2];
+      cmdn += ".dir";
+      system(cmdn.c_str());
 // //   sleep(5);
       INFOS("Unload MPI Component");
       iGenFact->remove_impl(m1) ;