dnl
dnl ---------------------------------------------
-dnl testing MPICH
+dnl testing MPI
dnl ---------------------------------------------
dnl
-CHECK_MPICH
-
-dnl
-dnl ---------------------------------------------
-dnl testing LAM
-dnl ---------------------------------------------
-dnl
-
-CHECK_LAM
+CHECK_MPI
dnl
dnl ---------------------------------------------
echo
echo Configure
-variables="cc_ok boost_ok lex_yacc_ok python_ok swig_ok threads_ok OpenGL_ok qt_ok vtk_ok hdf5_ok med2_ok omniORB_ok occ_ok sip_ok pyqt_ok qwt_ok doxygen_ok graphviz_ok"
+variables="cc_ok boost_ok lex_yacc_ok mpi_ok python_ok swig_ok threads_ok OpenGL_ok qt_ok vtk_ok hdf5_ok med2_ok omniORB_ok occ_ok sip_ok pyqt_ok qwt_ok doxygen_ok graphviz_ok"
for var in $variables
do
--with-lam=DIR root directory path of LAM installation,
WITHLAM="yes",WITHLAM="no")
-LAM_INCLUDES=""
-LAM_LIBS=""
+MPI_INCLUDES=""
+MPI_LIBS=""
if test "$WITHLAM" = yes; then
echo
LAM_HOME=$withval
if test "$LAM_HOME"; then
- LAM_INCLUDES="-I$LAM_HOME/include"
- LAM_LIBS="-L$LAM_HOME/lib"
+ MPI_INCLUDES="-I$LAM_HOME/include"
+ MPI_LIBS="-L$LAM_HOME/lib"
fi
CPPFLAGS_old="$CPPFLAGS"
- CPPFLAGS="$LAM_INCLUDES $CPPFLAGS"
+ CPPFLAGS="$MPI_INCLUDES $CPPFLAGS"
AC_CHECK_HEADER(mpi.h,WITHLAM="yes",WITHLAM="no")
CPPFLAGS="$CPPFLAGS_old"
AC_CHECK_LIB(util,openpty,,WITHLAM="no")
LIBS_old="$LIBS"
LDFLAGS_old="$LDFLAGS"
- LDFLAGS="$LAM_LIBS $LDFLAGS"
+ LDFLAGS="$MPI_LIBS $LDFLAGS"
AC_CHECK_LIB(lam,lam_mp_init,,WITHLAM="no")
AC_CHECK_LIB(mpi,MPI_Init,WITHLAM="yes",WITHLAM="no")
AC_CHECK_LIB(mpi,MPI_Publish_name,WITHMPI2="yes",WITHMPI2="no")
fi
if test "$WITHLAM" = "yes";then
- lam_ok=yes
- LAM_LIBS="$LAM_LIBS -lmpi -llam"
- fi
-
- if test "$WITHMPI2" = "yes";then
- CPPFLAGS="-DHAVE_MPI2 $CPPFLAGS"
- CORBA_IDLCXXFLAGS="-DHAVE_MPI2 $CORBA_IDLCXXFLAGS"
- CORBA_IDLPYFLAGS="-DHAVE_MPI2 $CORBA_IDLPYFLAGS"
+ mpi_ok=yes
+ MPI_LIBS="$MPI_LIBS -lmpi -llam"
fi
fi
-AC_SUBST(LAM_INCLUDES)
-AC_SUBST(LAM_LIBS)
-AC_SUBST(WITHLAM)
-AC_SUBST(WITHMPI2)
+
])dnl
CPPFLAGS="$CPPFLAGS_old"
if test "$WITHMPI" = "yes";then
+ LIBS_old="$LIBS"
LDFLAGS_old="$LDFLAGS"
LDFLAGS="$MPI_LIBS $LDFLAGS"
AC_CHECK_LIB(elan,elan_init,MPI_LIBS="$MPI_LIBS -lelan")
AC_CHECK_LIB(mpi,MPI_Init,WITHMPI="yes",WITHMPI="no")
+ AC_CHECK_LIB(mpi,MPI_Publish_name,WITHMPI2="yes",WITHMPI2="no")
LDFLAGS="$LDFLAGS_old"
+ LIBS="$LIBS_old"
fi
if test "$WITHMPI" = "yes";then
fi
fi
+
+if test "$WITHMPI" = no; then
+dnl
+dnl ---------------------------------------------
+dnl testing MPICH
+dnl ---------------------------------------------
+dnl
+
+ CHECK_MPICH
+
+ if test "$WITHMPICH" = no; then
+dnl
+dnl ---------------------------------------------
+dnl testing LAM
+dnl ---------------------------------------------
+dnl
+
+ CHECK_LAM
+
+ fi
+
+fi
+
+if test "$WITHMPI2" = "yes";then
+ CPPFLAGS="-DHAVE_MPI2 $CPPFLAGS"
+ CORBA_IDLCXXFLAGS="-DHAVE_MPI2 $CORBA_IDLCXXFLAGS"
+ CORBA_IDLPYFLAGS="-DHAVE_MPI2 $CORBA_IDLPYFLAGS"
+fi
+
AC_SUBST(MPI_INCLUDES)
AC_SUBST(MPI_LIBS)
-AC_SUBST(WITHMPI)
-
+AC_SUBST(mpi_ok)
])dnl
--with-mpich=DIR root directory path of MPICH installation,
WITHMPICH="yes",WITHMPICH="no")
-MPICH_INCLUDES=""
-MPICH_LIBS=""
+MPI_INCLUDES=""
+MPI_LIBS=""
if test "$WITHMPICH" = yes; then
echo
MPICH_HOME=$withval
if test "$MPICH_HOME"; then
- MPICH_INCLUDES="-I$MPICH_HOME/include"
- MPICH_LIBS="-L$MPICH_HOME/lib"
+ MPI_INCLUDES="-I$MPICH_HOME/include"
+ MPI_LIBS="-L$MPICH_HOME/lib"
fi
CPPFLAGS_old="$CPPFLAGS"
- CPPFLAGS="$MPICH_INCLUDES $CPPFLAGS"
+ CPPFLAGS="$MPI_INCLUDES $CPPFLAGS"
AC_CHECK_HEADER(mpi.h,WITHMPICH="yes",WITHMPICH="no")
CPPFLAGS="$CPPFLAGS_old"
if test "$WITHMPICH" = "yes";then
LDFLAGS_old="$LDFLAGS"
- LDFLAGS="$MPICH_LIBS $LDFLAGS"
+ LDFLAGS="$MPI_LIBS $LDFLAGS"
AC_CHECK_LIB(mpich,MPI_Init,
AC_CHECK_LIB(pmpich, PMPI_Init,WITHMPICH="yes",WITHMPICH="no"),
WITHMPICH="no")
+ AC_CHECK_LIB(mpich,MPI_Publish_name,WITHMPI2="yes",WITHMPI2="no")
LDFLAGS="$LDFLAGS_old"
fi
- MPICH_LIBS="$MPICH_LIBS -lpmpich -lmpich"
+ if test "$WITHMPICH" = "yes";then
+ mpi_ok=yes
+ MPI_LIBS="$MPI_LIBS -lpmpich -lmpich"
+ fi
fi
-AC_SUBST(MPICH_INCLUDES)
-AC_SUBST(MPICH_LIBS)
-AC_SUBST(WITHMPICH)
])dnl
CAS_DATAEXCHANGE=@CAS_DATAEXCHANGE@
CAS_LDPATH=@CAS_LDPATH@
-# MPICH
+# MPI
-MPICH_INCLUDES=@MPICH_INCLUDES@
-MPICH_LIBS=@MPICH_LIBS@
-
-# LAM
-
-LAM_INCLUDES=@LAM_INCLUDES@
-LAM_LIBS=@LAM_LIBS@
+MPI_INCLUDES=@MPI_INCLUDES@
+MPI_LIBS=@MPI_LIBS@
# Swig C++ Python
BIN_SRC =
BIN_SERVER_IDL =
-CPPFLAGS+= $(PYTHON_INCLUDES) $(LAM_INCLUDES)
+CPPFLAGS+= $(PYTHON_INCLUDES) $(MPI_INCLUDES)
LDFLAGS+= -lOpUtil -lSALOMELocalTrace
-LIBS += -Xlinker -export-dynamic $(PYTHON_LIBS) $(LAM_LIBS)
+LIBS += -Xlinker -export-dynamic $(PYTHON_LIBS) $(MPI_LIBS)
@CONCLUDE@
catch(MultiCommException&)
{
SALOME::Sender_ptr newSender=sender->buildOtherWithProtocol(SALOME::CORBA_);
+ MESSAGE("PROTOCOL CHANGED TO CORBA");
sender->release();
CORBA::release(sender);
ret=getValueOneShot(newSender,size);
#include "SenderFactory.hxx"
+#include "utilities.h"
#include "SALOMEMultiComm.hxx"
#include "SALOME_Comm_i.hxx"
default:
{
multiCommunicator.setProtocol(SALOME::CORBA_);
+ MESSAGE("PROTOCOL CHANGED TO CORBA");
SALOME_CorbaDoubleSender * retc=new SALOME_CorbaDoubleSender(tab,lgr);
return retc->_this();
}
BIN_SRC =
BIN_SERVER_IDL = SALOME_Component.idl
-CPPFLAGS+= $(PYTHON_INCLUDES) $(LAM_INCLUDES)
+CPPFLAGS+= $(PYTHON_INCLUDES) $(MPI_INCLUDE)
LDFLAGS+= -lSalomeNS -lRegistry -lOpUtil -lSalomeNotification -lSALOMELocalTrace
-LIBS += -Xlinker -export-dynamic $(PYTHON_LIBS) $(LAM_LIBS)
+LIBS += -Xlinker -export-dynamic $(PYTHON_LIBS) $(MPI_LIBS)
@CONCLUDE@
CORBA::ORB_ptr orb,
PortableServer::POA_ptr poa,
char * containerName)
- : Engines_Container_i(orb,poa,containerName,0), MPIObject_i(nbproc,numproc)
+ : Engines_Container_i(orb,poa,containerName,0,0), MPIObject_i(nbproc,numproc)
{
_id = _poa->activate_object(this);
// Process 0 recupere les ior de l'object sur les autres process
for(ip=1;ip<_nbproc;ip++){
- err = MPI_Recv(&n,1,MPI_INTEGER,ip,ip,MPI_COMM_WORLD,&status);
+ err = MPI_Recv(&n,1,MPI_INT,ip,ip,MPI_COMM_WORLD,&status);
if(err){
MESSAGE("[" << _numproc << "] MPI_RECV error");
exit(1);
}
// Allocation de la chaine de longueur n
ior = (char*)calloc(n,sizeof(char));
- err = MPI_Recv(ior,n,MPI_CHARACTER,ip,2*ip,MPI_COMM_WORLD,&status);
+ err = MPI_Recv(ior,n,MPI_CHAR,ip,2*ip,MPI_COMM_WORLD,&status);
if(err){
MESSAGE("[" << _numproc << "] MPI_RECV error");
exit(1);
else{
// On envoie l'IOR au process 0
n = strlen((char*)sior);
- err = MPI_Send(&n,1,MPI_INTEGER,0,_numproc,MPI_COMM_WORLD);
+ err = MPI_Send(&n,1,MPI_INT,0,_numproc,MPI_COMM_WORLD);
if(err){
MESSAGE("[" << _numproc << "] MPI_SEND error");
exit(1);
}
- err = MPI_Send((char*)sior,n,MPI_CHARACTER,0,2*_numproc,MPI_COMM_WORLD);
+ err = MPI_Send((char*)sior,n,MPI_CHAR,0,2*_numproc,MPI_COMM_WORLD);
if(err){
MESSAGE("[" << _numproc << "] MPI_SEND error");
exit(1);
BIN_SRC =
BIN_SERVER_IDL = TypeData.idl MPIObject.idl MPIContainer.idl
-CXXFLAGS+=${MPICH_INCLUDES}
-CXX_DEPEND_FLAG+=${MPICH_INCLUDES}
-LDFLAGS+= -lSalomeContainer -lSalomeNS -lRegistry -lOpUtil ${MPICH_LIBS}
+CXXFLAGS+=${MPI_INCLUDES}
+CXX_DEPEND_FLAG+=${MPI_INCLUDES}
+LDFLAGS+= -lSalomeContainer -lSalomeNS -lRegistry -lOpUtil ${MPI_LIBS}
@CONCLUDE@
Session SALOME_SWIG TOOLSGUI SALOME_PY \
RegistryDisplay ModuleGenerator SALOME_PYQT Loader Communication
-ifeq (@WITHMPICH@,yes)
+ifeq (@mpi_ok@,yes)
SUBDIRS+= MPIContainer
endif