class MEDPARTITIONER_EXPORT Graph
{
public:
- typedef enum {METIS,SCOTCH} splitter_type;
+ typedef enum {METIS,SCOTCH,PTSCOTCH} splitter_type;
Graph();
//creates a graph from a SKYLINEARRAY- WARNING!! Graph takes ownership of the array.
{
MEDPARTITIONER::Graph* cellGraph=0;
// will be destroyed by XXXGraph class:
- MEDCoupling::MEDCouplingSkyLineArray* arr = MEDCoupling::MEDCouplingSkyLineArray::New(graph->getIndexArray(), graph->getValuesArray());
+ MEDCoupling::MCAuto<MEDCoupling::MEDCouplingSkyLineArray> arr(MEDCoupling::MEDCouplingSkyLineArray::New(graph->getIndexArray(), graph->getValuesArray()));
switch (split)
{
case Graph::METIS:
if ( !cellGraph )
{
#ifdef MED_ENABLE_METIS
- cellGraph=new METISGraph(arr,edgeweight);
+ cellGraph=new METISGraph(arr.retn(),edgeweight);
#endif
}
if ( !cellGraph )
break;
case Graph::SCOTCH:
#ifdef MED_ENABLE_SCOTCH
- cellGraph=new SCOTCHGraph(arr,edgeweight);
+ cellGraph=new SCOTCHGraph(arr.retn(),edgeweight);
#else
+ throw INTERP_KERNEL::Exception("MEDPartitioner::Graph : SCOTCH is not available. Check your products, please.");
+#endif
+ break;
+ case Graph::PTSCOTCH:
+ {
#ifdef MED_ENABLE_PTSCOTCH
- cellGraph=new PTSCOTCHGraph(arr,edgeweight,vlbloctab);
+ cellGraph=new PTSCOTCHGraph(arr.retn(),edgeweight,vlbloctab);
#else
- throw INTERP_KERNEL::Exception("MEDPartitioner::Graph : PTSCOTCH/SCOTCH is not available. Check your products, please.");
+ throw INTERP_KERNEL::Exception("MEDPartitioner::Graph : PTSCOTCH is not available. Check your products, please.");
#endif
-#endif
- break;
+ break;
+ }
+ default:
+ throw INTERP_KERNEL::Exception("MEDPartitioner::Graph : Not managed split type engine !");
}
return cellGraph;
}
class Graph
{
public:
- typedef enum {METIS,SCOTCH} splitter_type;
+ typedef enum {METIS,SCOTCH,PTSCOTCH} splitter_type;
public:
virtual void partGraph(int ndomain, const std::string& options_string="", ParaDomainSelector *sel=0) throw(INTERP_KERNEL::Exception);
MEDCoupling::MEDCouplingSkyLineArray *getGraph() const
@unittest.skipUnless(HasPartitionerExt(),"Requires Partitioner activation")
def test3(self):
+ algoSelected=eval("Graph.%s"%MEDPartitioner.AvailableAlgorithms()[0].upper())
arr=DataArrayDouble(10) ; arr.iota()
m=MEDCouplingCMesh() ; m.setCoords(arr,arr)
m=m.buildUnstructured() ; m.setName("mesh")
- m.write("all.med")
a,b=m.computeNeighborsOfCells()
sk=MEDCouplingSkyLineArray(b,a)
- g=MEDPartitioner.Graph(sk)
+ g=MEDPartitioner.Graph(sk,algoSelected)
g.partGraph(4)
procIdOnCells=g.getPartition().getValuesArray()
m0=m[procIdOnCells.findIdsEqual(0)] ; m0.setName("m0")
- m0.write("part0.med")
pass
@unittest.skipUnless(HasParallelInterpolatorExt(),"Requires // interpolator activated")
pass
if __name__ == "__main__":
+ if HasParallelInterpolatorExt():
+ try:
+ from mpi4py import MPI # if not imported test3 may failed due to MPI call of partitioner algorithms.
+ except:
+ pass
+ pass
unittest.main()
+ pass
+