#include <mpi.h>
#endif
-#ifdef ENABLE_METIS
+#ifdef MED_ENABLE_PARMETIS
#include "MEDPARTITIONER_MetisGraph.hxx"
#endif
-#ifdef ENABLE_SCOTCH
+#ifdef MED_ENABLE_SCOTCH
#include "MEDPARTITIONER_ScotchGraph.hxx"
#endif
switch (split)
{
case Graph::METIS:
-#ifdef ENABLE_METIS
+#ifdef MED_ENABLE_PARMETIS
if (MyGlobals::_Verbose>10) cout<<"METISGraph"<<endl;
cellGraph=(Graph*)(new METISGraph(array,edgeweights));
#else
#endif
break;
case Graph::SCOTCH:
-#ifdef ENABLE_SCOTCH
+#ifdef MED_ENABLE_SCOTCH
if (MyGlobals::_Verbose>10) cout<<"SCOTCHGraph"<<endl;
cellGraph=(Graph*)(new SCOTCHGraph(array,edgeweights));
#else
#include <iostream>
-#ifdef ENABLE_PARMETIS
-#include <parmetis.h>
+#ifdef MED_ENABLE_PARMETIS
+#include <mpi.h>
+#include "parmetis.h"
#endif
-extern "C" {
-#include <metis.h>
+
+#ifdef MED_ENABLE_METIS
+extern "C"
+{
+#include "metis.h"
}
+#endif
-using namespace std;
using namespace ParaMEDMEM;
using namespace MEDPARTITIONER;
void METISGraph::partGraph(int ndomain,
const std::string& options_string,
- ParaDomainSelector* parallelizer)
+ ParaDomainSelector *parallelizer)
{
using std::vector;
vector<int> ran,vx,va; //for randomize
- if (MyGlobals::_Verbose>10) cout<<"proc "<<MyGlobals::_Rank<<" : METISGraph::partGraph"<<endl;
+ if (MyGlobals::_Verbose>10)
+ std::cout << "proc " << MyGlobals::_Rank << " : METISGraph::partGraph" << std::endl;
// number of graph vertices
int n=_graph->getNumberOf();
int edgecut;
int* partition=new int[n];
- //if (MyGlobals::_Verbose>10) cout<<"proc "<<MyGlobals::_Rank<<" : METISGraph::partGraph n="<<n<<endl;
- if (nparts >1)
+ if(nparts >1)
{
- if ( parallelizer )
+ if(parallelizer)
{
-#ifdef ENABLE_PARMETIS
+#ifdef MED_ENABLE_PARMETIS
// distribution of vertices of the graph among the processors
if (MyGlobals::_Verbose>100)
- cout<<"proc "<<MyGlobals::_Rank
- <<" : METISGraph::partGraph ParMETIS_PartKway"<<endl;
+ std::cout << "proc " << MyGlobals::_Rank << " : METISGraph::partGraph ParMETIS_PartKway" << std::endl;
int * vtxdist=parallelizer->getProcVtxdist();
MPI_Comm comm=MPI_COMM_WORLD;
try
{
if (MyGlobals::_Verbose>200)
{
- cout<<"proc "<<MyGlobals::_Rank<<" : vtxdist :";
- for (int i=0; i<MyGlobals::_World_Size+1; ++i) cout<<vtxdist[i]<<" ";
- cout<<endl;
+ std::cout << "proc " << MyGlobals::_Rank << " : vtxdist :";
+ for (int i=0; i<MyGlobals::_World_Size+1; ++i)
+ std::cout << vtxdist[i] <<" ";
+ std::cout << std::endl;
int lgxadj=vtxdist[MyGlobals::_Rank+1]-vtxdist[MyGlobals::_Rank];
- //cout<<"lgxadj "<<lgxadj<<" "<<n<<endl;
if (lgxadj>0)
{
- cout<<"\nproc "<<MyGlobals::_Rank<<" : lgxadj "<<lgxadj<<" lgadj "<<xadj[lgxadj+1]<<endl;
- for (int i=0; i<10; ++i) cout<<xadj[i]<<" ";
- cout<<"... "<<xadj[lgxadj]<<endl;
- for (int i=0; i<15; ++i) cout<<adjncy[i]<<" ";
+ std::cout<< "\nproc " << MyGlobals::_Rank << " : lgxadj " << lgxadj << " lgadj " << xadj[lgxadj+1] << std::endl;
+ for (int i=0; i<10; ++i)
+ std::cout << xadj[i] << " ";
+ std::cout << "... " << xadj[lgxadj] << std::endl;
+ for (int i=0; i<15; ++i)
+ std::cout << adjncy[i] << " ";
int ll=xadj[lgxadj]-1;
- cout<<"... ["<<ll<<"] "<<adjncy[ll-1]<<" "<<adjncy[ll]<<endl;
- /*for (int i=0; i<=ll; ++i) {
- if (adjncy[i]<0) cout<<"***cvw00 error: adjncy[i]<0 "<<i<<endl;
- }*/
+ std::cout << "... [" << ll << "] " << adjncy[ll-1] << " " << adjncy[ll] << std::endl;
int imaxx=0;
- //for (int ilgxadj=0; ilgxadj<lgxadj; ilgxadj++)
for (int ilgxadj=0; ilgxadj<lgxadj; ilgxadj++)
{
int ilg=xadj[ilgxadj+1]-xadj[ilgxadj];
- /*if (ilg<0) cout<<"***cvw01 error: ilg<0 in xadj "<<ilgxadj<<endl;
- if (MyGlobals::_Is0verbose>1000)
- {
- cout<<"\n -cell "<<ilgxadj<<" "<<ilg<<" :";
- for (int i=0; i<ilg; i++) cout<<" "<<adjncy[xadj[ilgxadj]+i];
- }*/
- if (ilg>imaxx) imaxx=ilg;
+ if(ilg>imaxx)
+ imaxx=ilg;
}
- cout<<"\nproc "<<MyGlobals::_Rank
- <<" : on "<<lgxadj<<" cells, max neighbourg number (...for one cell) is "<<imaxx<<endl;
+ std::cout<< "\nproc " << MyGlobals::_Rank << " : on " << lgxadj << " cells, max neighbourg number (...for one cell) is " << imaxx << std::endl;
}
-
}
if ((MyGlobals::_Randomize!=0 || MyGlobals::_Atomize!=0) && MyGlobals::_World_Size==1)
{
else
{
//MPI_Barrier(MPI_COMM_WORLD);
- //cout<<"proc "<<MyGlobals::_Rank<<" : barrier ParMETIS_PartKway done"<<endl;
ParMETIS_PartKway( //cvwat11
vtxdist, xadj, adjncy, vwgt,
adjwgt, &wgtflag, &base, &nparts, options,
}
if (n<8 && nparts==3)
{
- for (int i=0; i<n; i++) partition[i]=i%3;
+ for (int i=0; i<n; i++)
+ partition[i]=i%3;
}
#else
throw INTERP_KERNEL::Exception(LOCALIZED("ParMETIS is not available. Check your products, please."));
#endif
- //throw INTERP_KERNEL::Exception(LOCALIZED("ParMETIS is not available. Check your products, please."));
}
else
{
+#ifdef MED_ENABLE_METIS
if (MyGlobals::_Verbose>10)
- cout<<"proc "<<MyGlobals::_Rank
- <<" : METISGraph::partGraph METIS_PartGraph Recursive or Kway"<<endl;
+ std::cout << "proc " << MyGlobals::_Rank << " : METISGraph::partGraph METIS_PartGraph Recursive or Kway" << std::endl;
if (options_string != "k")
METIS_PartGraphRecursive(&n, xadj, adjncy, vwgt, adjwgt, &wgtflag,
&base, &nparts, options, &edgecut, partition);
else
METIS_PartGraphKway(&n, xadj, adjncy, vwgt, adjwgt, &wgtflag,
&base, &nparts, options, &edgecut, partition);
+#else
+ throw INTERP_KERNEL::Exception(LOCALIZED("METIS is not available. Check your products, please."));
+#endif
}
}
else
{
- for (int i=0; i<n; i++) partition[i]=0;
+ for (int i=0; i<n; i++)
+ partition[i]=0;
}
vector<int> index(n+1);
index[0]=0;
if (ran.size()>0 && MyGlobals::_Atomize==0) //there is randomize
{
- if (MyGlobals::_Is0verbose>100) cout<<"randomize"<<endl;
+ if (MyGlobals::_Is0verbose>100)
+ std::cout << "randomize" << std::endl;
for (int i=0; i<n; i++)
{
index[i+1]=index[i]+1;
value[i]=partition[i];
}
}
- delete[]partition;
+ delete [] partition;
//creating a skylinearray with no copy of the index and partition array
//the fifth argument true specifies that only the pointers are passed
#include <string>
-namespace MEDPARTITIONER {
- class SkyLineArray;
+namespace MEDPARTITIONER
+{
class MEDPARTITIONER_EXPORT METISGraph : public Graph
{
public:
METISGraph();
- METISGraph(MEDPARTITIONER::SkyLineArray*, int* edgeweight=0);
+ METISGraph(MEDPARTITIONER::SkyLineArray*, int *edgeweight=0);
virtual ~METISGraph();
- void partGraph(int ndomain, const std::string& options_string="", ParaDomainSelector* sel=0);
+ void partGraph(int ndomain, const std::string& options_string="", ParaDomainSelector *sel=0);
};
}
+
#endif
#include <cstdio>
-extern "C" {
+#ifdef MED_ENABLE_SCOTCH
+extern "C"
+{
#define restrict
#include "scotch.h"
}
+#endif
using namespace MEDPARTITIONER;
void SCOTCHGraph::partGraph(int ndomain, const std::string& options_string, ParaDomainSelector* sel)
{
+#ifdef MED_ENABLE_SCOTCH
// number of graph vertices
int n = _graph->getNumberOf();
//to the object
_partition = new MEDPARTITIONER::SkyLineArray(index,value);
-
+#else
+ throw INTERP_KERNEL::Exception(LOCALIZED("SCOTCH is not available. Check your products, please."));
+#endif
}
MEDPARTITIONER_ConnectZone.hxx \
MEDPARTITIONER_SkyLineArray.hxx
+if MED_ENABLE_PARMETIS
+ salomeinclude_HEADERS += MEDPARTITIONER_MetisGraph.hxx
+endif
if MED_ENABLE_METIS
- salomeinclude_HEADERS+= MEDPARTITIONER_MetisGraph.hxx
+ salomeinclude_HEADERS += MEDPARTITIONER_MetisGraph.hxx
endif
if MED_ENABLE_SCOTCH
- salomeinclude_HEADERS+= MEDPARTITIONER_ScotchGraph.hxx
+ salomeinclude_HEADERS += MEDPARTITIONER_ScotchGraph.hxx
endif
-dist_libmedpartitioner_la_SOURCES= \
-MEDPARTITIONER_Utils.cxx \
+dist_libmedpartitioner_la_SOURCES = \
MEDPARTITIONER_MeshCollection.cxx \
MEDPARTITIONER_MeshCollectionDriver.cxx \
MEDPARTITIONER_MeshCollectionMedXmlDriver.cxx \
MEDPARTITIONER_MeshCollectionMedAsciiDriver.cxx \
-MEDPARTITIONER_ParallelTopology.cxx \
MEDPARTITIONER_Graph.cxx\
MEDPARTITIONER_UserGraph.cxx\
-MEDPARTITIONER_ParaDomainSelector.cxx \
MEDPARTITIONER_JointFinder.cxx \
MEDPARTITIONER_SkyLineArray.cxx \
MEDPARTITIONER_ConnectZone.cxx
-if MED_ENABLE_METIS
- dist_libmedpartitioner_la_SOURCES+= MEDPARTITIONER_MetisGraph.cxx
-endif
-if MED_ENABLE_SCOTCH
- dist_libmedpartitioner_la_SOURCES+= MEDPARTITIONER_ScotchGraph.cxx
-endif
-
-libmedpartitioner_la_CPPFLAGS= $(MPI_INCLUDES) $(MED3_INCLUDES) $(HDF5_INCLUDES) @CXXTMPDPTHFLAGS@ \
- $(BOOST_CPPFLAGS) $(LIBXML_INCLUDES) \
- -I$(srcdir)/../INTERP_KERNEL/Bases -I$(srcdir)/../MEDCoupling \
- -I$(srcdir)/../MEDLoader -I$(srcdir)/../INTERP_KERNEL
+libmedpartitioner_la_CPPFLAGS = $(MPI_INCLUDES) $(MED3_INCLUDES) $(HDF5_INCLUDES) \
+ $(LIBXML_INCLUDES) -I$(srcdir)/../INTERP_KERNEL/Bases -I$(srcdir)/../MEDCoupling \
+ -I$(srcdir)/../MEDLoader -I$(srcdir)/../INTERP_KERNEL
-libmedpartitioner_la_LDFLAGS=
+libmedpartitioner_la_LDFLAGS =
-if MED_ENABLE_PARMETIS
- libmedpartitioner_la_CPPFLAGS+= $(PARMETIS_CPPFLAGS)
- libmedpartitioner_la_LDFLAGS+= $(PARMETIS_LIBS)
-endif
-if MED_ENABLE_METIS
- libmedpartitioner_la_CPPFLAGS+= $(METIS_CPPFLAGS)
- libmedpartitioner_la_LDFLAGS+= $(METIS_LIBS)
+if MPI_IS_OK
+ dist_libmedpartitioner_la_SOURCES += MEDPARTITIONER_ParaDomainSelector.cxx \
+ MEDPARTITIONER_Utils.cxx \
+ MEDPARTITIONER_ParallelTopology.cxx \
+ MEDPARTITIONER_ParaDomainSelector.cxx
+
+ if MED_ENABLE_PARMETIS
+ dist_libmedpartitioner_la_SOURCES += MEDPARTITIONER_MetisGraph.cxx
+ libmedpartitioner_la_CPPFLAGS += $(PARMETIS_CPPFLAGS)
+ libmedpartitioner_la_LDFLAGS += $(PARMETIS_LIBS)
+ endif
+else !MPI_IS_OK
+ if MED_ENABLE_METIS
+ dist_libmedpartitioner_la_SOURCES += MEDPARTITIONER_MetisGraph.cxx
+ libmedpartitioner_la_CPPFLAGS += $(METIS_CPPFLAGS)
+ libmedpartitioner_la_LDFLAGS += $(METIS_LIBS)
+ endif
endif
+
if MED_ENABLE_SCOTCH
- libmedpartitioner_la_CPPFLAGS+= $(SCOTCH_CPPFLAGS)
- libmedpartitioner_la_LDFLAGS+= $(SCOTCH_LIBS)
+ dist_libmedpartitioner_la_SOURCES += MEDPARTITIONER_ScotchGraph.cxx
+ libmedpartitioner_la_CPPFLAGS += $(SCOTCH_CPPFLAGS)
+ libmedpartitioner_la_LDFLAGS += $(SCOTCH_LIBS)
endif
-if MED_ENABLE_KERNEL
- libmedpartitioner_la_CPPFLAGS+= ${KERNEL_CXXFLAGS}
- libmedpartitioner_la_LDFLAGS+= ${KERNEL_LDFLAGS} -lSALOMELocalTrace
-endif
-
-libmedpartitioner_la_LDFLAGS+= $(MED2_LIBS) $(HDF5_LIBS) $(STDLIB) $(LIBXML_LIBS) $(MPI_LIBS) \
- ../INTERP_KERNEL/libinterpkernel.la ../MEDCoupling/libmedcoupling.la ../MEDLoader/libmedloader.la
-medpartitioner_CPPFLAGS= $(libmedpartitioner_la_CPPFLAGS)
-medpartitioner_LDADD= $(libmedpartitioner_la_LDFLAGS) -lm $(BOOST_LIBS) libmedpartitioner.la
-
-if MED_ENABLE_KERNEL
- medpartitioner_LDADD+= -lSALOMEBasics
-endif
+libmedpartitioner_la_LDFLAGS += $(MED3_LIBS_C_ONLY) $(HDF5_LIBS) $(STDLIB) $(LIBXML_LIBS) $(MPI_LIBS) \
+ ../INTERP_KERNEL/libinterpkernel.la ../MEDCoupling/libmedcoupling.la ../MEDLoader/libmedloader.la
# Executables targets
if MPI_IS_OK
- bin_PROGRAMS= medpartitioner_para
- dist_medpartitioner_para_SOURCES= medpartitioner_para.cxx
- medpartitioner_para_CPPFLAGS= $(medpartitioner_CPPFLAGS)
- medpartitioner_para_LDADD= $(medpartitioner_LDADD)
+ bin_PROGRAMS = medpartitioner_para
+ dist_medpartitioner_para_SOURCES = medpartitioner_para.cxx
+ medpartitioner_para_CPPFLAGS = $(MPI_INCLUDES) $(PARMETIS_CPPFLAGS) $(SCOTCH_CPPFLAGS)
+ medpartitioner_para_LDADD = libmedpartitioner.la
endif
OBSOLETE_FILES =
int main(int argc, char** argv)
{
-#ifndef ENABLE_PARMETIS
-#ifndef ENABLE_PTSCOTCH
+#ifndef MED_ENABLE_PARMETIS
+#ifndef MED_ENABLE_SCOTCH
cout << "Sorry, no one split method is available. Please, compile with ParMETIS or PT-SCOTCH."<<endl;
return 1;
#endif
"\t--input-file=<string> : name of the input .med file or .xml master file\n"
"\t--output-file=<string> : name of the resulting file (without exension)\n"
"\t--ndomains=<number> : number of subdomains in the output file, default is 1\n"
-#ifdef ENABLE_PARMETIS
-#ifdef ENABLE_PTSCOTCH
+#ifdef MED_MED_ENABLE_PARMETIS
+#ifdef MED_MED_ENABLE_SCOTCH
"\t--split-method=<string> : name of the splitting library (metis/scotch), default is metis\n"
#endif
#endif
MyGlobals::_Randomize=0;
}
-#ifdef ENABLE_PARMETIS
-#ifndef ENABLE_PTSCOTCH
+#ifdef MED_ENABLE_PARMETIS
+#ifndef MED_ENABLE_SCOTCH
library = "metis";
#endif
#else