From: cvw Date: Tue, 27 Mar 2012 13:19:44 +0000 (+0000) Subject: *** empty log message *** X-Git-Tag: V6_main_FINAL~713 X-Git-Url: http://git.salome-platform.org/gitweb/?a=commitdiff_plain;h=6335065755377d864786eebb110a7958d40c5749;p=tools%2Fmedcoupling.git *** empty log message *** --- diff --git a/src/MEDPartitioner/MEDPARTITIONER_MeshCollection.cxx b/src/MEDPartitioner/MEDPARTITIONER_MeshCollection.cxx index ef6f8c323..aeca87181 100644 --- a/src/MEDPartitioner/MEDPARTITIONER_MeshCollection.cxx +++ b/src/MEDPartitioner/MEDPARTITIONER_MeshCollection.cxx @@ -112,7 +112,7 @@ MEDPARTITIONER::MeshCollection::MeshCollection(MeshCollection& initialCollection ///////////////// #ifdef HAVE_MPI2 - if (MyGlobals::_Verbose>0) + if (MyGlobals::_Verbose>0 && MyGlobals::_World_Size>1) MPI_Barrier(MPI_COMM_WORLD); //synchronize verbose messages #endif if (MyGlobals::_Is0verbose) @@ -128,7 +128,7 @@ MEDPARTITIONER::MeshCollection::MeshCollection(MeshCollection& initialCollection //treating families //////////////////// #ifdef HAVE_MPI2 - if (MyGlobals::_Verbose>0) + if (MyGlobals::_Verbose>0 && MyGlobals::_World_Size>1) MPI_Barrier(MPI_COMM_WORLD); //synchronize verbose messages #endif if (MyGlobals::_Is0verbose) @@ -152,7 +152,7 @@ MEDPARTITIONER::MeshCollection::MeshCollection(MeshCollection& initialCollection //treating groups #ifdef HAVE_MPI2 - if (MyGlobals::_Verbose>0) + if (MyGlobals::_Verbose>0 && MyGlobals::_World_Size>1) MPI_Barrier(MPI_COMM_WORLD); //synchronize verbose messages #endif if (MyGlobals::_Is0verbose) @@ -161,7 +161,7 @@ MEDPARTITIONER::MeshCollection::MeshCollection(MeshCollection& initialCollection _group_info=initialCollection.getGroupInfo(); #ifdef HAVE_MPI2 - if (MyGlobals::_Verbose>0) + if (MyGlobals::_Verbose>0 && MyGlobals::_World_Size>1) MPI_Barrier(MPI_COMM_WORLD); //synchronize verbose messages #endif if (MyGlobals::_Is0verbose) @@ -1015,7 +1015,8 @@ MEDPARTITIONER::MeshCollection::MeshCollection(const std::string& filename, Para f.close(); } #ifdef HAVE_MPI2 - MPI_Barrier(MPI_COMM_WORLD); //wait for creation of nameFileXml + if (MyGlobals::_World_Size>1) + MPI_Barrier(MPI_COMM_WORLD); //wait for creation of nameFileXml #endif try { diff --git a/src/MEDPartitioner/MEDPARTITIONER_MeshCollectionMedAsciiDriver.hxx b/src/MEDPartitioner/MEDPARTITIONER_MeshCollectionMedAsciiDriver.hxx index 67d7f547e..13b72dd5a 100644 --- a/src/MEDPartitioner/MEDPARTITIONER_MeshCollectionMedAsciiDriver.hxx +++ b/src/MEDPartitioner/MEDPARTITIONER_MeshCollectionMedAsciiDriver.hxx @@ -25,7 +25,7 @@ namespace MEDPARTITIONER { class MeshCollection; - class MEDPARTITIONER_EXPORT MeshCollectionMedAsciiDriver : public MeshCollectionDriver + class MeshCollectionMedAsciiDriver : public MeshCollectionDriver { public: MeshCollectionMedAsciiDriver(MeshCollection*); diff --git a/src/MEDPartitioner/MEDPARTITIONER_MeshCollectionMedXmlDriver.cxx b/src/MEDPartitioner/MEDPARTITIONER_MeshCollectionMedXmlDriver.cxx index b401c1295..2406505c9 100644 --- a/src/MEDPartitioner/MEDPARTITIONER_MeshCollectionMedXmlDriver.cxx +++ b/src/MEDPartitioner/MEDPARTITIONER_MeshCollectionMedXmlDriver.cxx @@ -42,11 +42,6 @@ #include #include -#ifdef WIN32 -#include -#include -#endif - using namespace MEDPARTITIONER; /*!\class MeshCollectionMedXmlDriver @@ -199,22 +194,13 @@ void MeshCollectionMedXmlDriver::write(const char* filename, ParaDomainSelector* xmlNewProp(node, BAD_CAST "ver", BAD_CAST "1"); //Description tag - char date[6]; -#ifndef WIN32 time_t present; time( &present); struct tm *time_asc = localtime(&present); + char date[6]; sprintf(date,"%02d%02d%02d",time_asc->tm_year ,time_asc->tm_mon+1 ,time_asc->tm_mday); -#else - SYSTEMTIME st; - GetLocalTime ( &st ); - sprintf(date,"%02d%02d%02d", - st.wYear - ,st.wMonth - ,st.wDay); -#endif node = xmlNewChild(root_node,0, BAD_CAST "description",0); diff --git a/src/MEDPartitioner/MEDPARTITIONER_MeshCollectionMedXmlDriver.hxx b/src/MEDPartitioner/MEDPARTITIONER_MeshCollectionMedXmlDriver.hxx index 33dde1a71..2b269960e 100644 --- a/src/MEDPartitioner/MEDPARTITIONER_MeshCollectionMedXmlDriver.hxx +++ b/src/MEDPartitioner/MEDPARTITIONER_MeshCollectionMedXmlDriver.hxx @@ -25,7 +25,7 @@ namespace MEDPARTITIONER { class MeshCollection; - class MEDPARTITIONER_EXPORT MeshCollectionMedXmlDriver : public MeshCollectionDriver + class MeshCollectionMedXmlDriver : public MeshCollectionDriver { public: MeshCollectionMedXmlDriver(MeshCollection*); diff --git a/src/MEDPartitioner/MEDPARTITIONER_MetisGraph.cxx b/src/MEDPartitioner/MEDPARTITIONER_MetisGraph.cxx index 54e336b6f..11e8841fc 100644 --- a/src/MEDPartitioner/MEDPARTITIONER_MetisGraph.cxx +++ b/src/MEDPartitioner/MEDPARTITIONER_MetisGraph.cxx @@ -25,11 +25,6 @@ #include -#ifdef MED_ENABLE_PARMETIS -#include -#include "parmetis.h" -#endif - #ifdef MED_ENABLE_METIS extern "C" { @@ -57,8 +52,6 @@ void METISGraph::partGraph(int ndomain, ParaDomainSelector *parallelizer) { using std::vector; - vector ran,vx,va; //for randomize - if (MyGlobals::_Verbose>10) std::cout << "proc " << MyGlobals::_Rank << " : METISGraph::partGraph" << std::endl; @@ -89,150 +82,39 @@ void METISGraph::partGraph(int ndomain, int edgecut; int* partition=new int[n]; +#if !defined(MED_ENABLE_METIS) + throw INTERP_KERNEL::Exception("METISGraph::partGraph : METIS is not available. Check your products, please."); +#else if(nparts >1) { - if(parallelizer) - { -#ifdef MED_ENABLE_PARMETIS - // distribution of vertices of the graph among the processors - if (MyGlobals::_Verbose>100) - std::cout << "proc " << MyGlobals::_Rank << " : METISGraph::partGraph ParMETIS_PartKway" << std::endl; - int * vtxdist=parallelizer->getProcVtxdist(); - MPI_Comm comm=MPI_COMM_WORLD; - try - { - if (MyGlobals::_Verbose>200) - { - std::cout << "proc " << MyGlobals::_Rank << " : vtxdist :"; - for (int i=0; i0) - { - std::cout<< "\nproc " << MyGlobals::_Rank << " : lgxadj " << lgxadj << " lgadj " << xadj[lgxadj] << std::endl; - for (int i=0; i<10; ++i) - std::cout << xadj[i] << " "; - std::cout << "... " << xadj[lgxadj] << std::endl; - for (int i=0; i<15; ++i) - std::cout << adjncy[i] << " "; - int ll=xadj[lgxadj]-1; - std::cout << "... [" << ll << "] " << adjncy[ll-1] << " " << adjncy[ll] << std::endl; - int imaxx=0; - for (int ilgxadj=0; ilgxadjimaxx) - imaxx=ilg; - } - std::cout<< "\nproc " << MyGlobals::_Rank << " : on " << lgxadj << " cells, max neighbourg number (...for one cell) is " << imaxx << std::endl; - } - } - if ((MyGlobals::_Randomize!=0 || MyGlobals::_Atomize!=0) && MyGlobals::_World_Size==1) - { - //randomize initially was for test on ParMETIS error (sometimes) - //due to : seems no changes int options[4]={1,0,33,0}; //test for a random seed of 33 - //it was keeped - ran=CreateRandomSize(n); - RandomizeAdj(&xadj[0],&adjncy[0],ran,vx,va); - ParMETIS_PartKway(vtxdist, &vx[0], &va[0], vwgt, - adjwgt, &wgtflag, &base, &nparts, options, - &edgecut, partition, &comm ); - } - else - { - ParMETIS_PartKway(vtxdist, xadj, adjncy, vwgt, - adjwgt, &wgtflag, &base, &nparts, options, - &edgecut, partition, &comm ); - } - - /*doc from parmetis.h - void __cdecl ParMETIS_PartKway( - idxtype *vtxdist, idxtype *xadj, idxtype *adjncy, idxtype *vwgt, - idxtype *adjwgt, int *wgtflag, int *numflag, int *nparts, int *options, - int *edgecut, idxtype *part, MPI_Comm *comm); - - void __cdecl ParMETIS_V3_PartKway( - idxtype *vtxdist, idxtype *xadj, idxtype *adjncy, idxtype *vwgt, - idxtype *adjwgt, int *wgtflag, int *numflag, int *ncon, int *nparts, - float *tpwgts, float *ubvec, int *options, int *edgecut, idxtype *part, - MPI_Comm *comm); - */ - - } - catch(...) - { - //shit ParMETIS "Error! Key -2 not found!" not catched... - throw INTERP_KERNEL::Exception("Problem in ParMETIS_PartKway"); - } -#else - -#ifdef MED_ENABLE_METIS - if (MyGlobals::_Verbose>10) - std::cout << "proc " << MyGlobals::_Rank << " : METISGraph::partGraph METIS_PartGraph Recursive/Kway" << std::endl; - if (options_string != "k") - METIS_PartGraphRecursive(&n, xadj, adjncy, vwgt, adjwgt, &wgtflag, - &base, &nparts, options, &edgecut, partition); - else - METIS_PartGraphKway(&n, xadj, adjncy, vwgt, adjwgt, &wgtflag, - &base, &nparts, options, &edgecut, partition); -#else - throw INTERP_KERNEL::Exception("ParMETIS or METIS is not available. Check your products, please."); -#endif - -#endif - } + if (MyGlobals::_Verbose>10) + std::cout << "METISGraph::partGraph METIS_PartGraph METIS_PartGraph(RecursiveOrKway) newww" << std::endl; + if (options_string != "k") + METIS_PartGraphRecursive(&n, xadj, adjncy, vwgt, adjwgt, &wgtflag, + &base, &nparts, options, &edgecut, partition); else - { -#ifdef MED_ENABLE_METIS - if (MyGlobals::_Verbose>10) - std::cout << "proc " << MyGlobals::_Rank << " : METISGraph::partGraph METIS_PartGraph Recursive or Kway" << std::endl; - if (options_string != "k") - METIS_PartGraphRecursive(&n, xadj, adjncy, vwgt, adjwgt, &wgtflag, - &base, &nparts, options, &edgecut, partition); - else - METIS_PartGraphKway(&n, xadj, adjncy, vwgt, adjwgt, &wgtflag, - &base, &nparts, options, &edgecut, partition); -#else - throw INTERP_KERNEL::Exception("METIS is not available. Check your products, please."); -#endif - } + METIS_PartGraphKway(&n, xadj, adjncy, vwgt, adjwgt, &wgtflag, + &base, &nparts, options, &edgecut, partition); } - else + else //force this case because METIS send all 1 in value { for (int i=0; i index(n+1); vector value(n); index[0]=0; - if (ran.size()>0 && MyGlobals::_Atomize==0) //there is randomize + for (int i=0; i100) - std::cout << "randomize" << std::endl; - for (int i=0; i10) + std::cout << "WARNING : ParaDomainSelector contructor without parallel_mode World_Size=1 by default" << std::endl; #endif + MyGlobals::_World_Size=_world_size; + MyGlobals::_Rank=_rank; + + if (MyGlobals::_Verbose>200) std::cout << "proc " << MyGlobals::_Rank << " of " << MyGlobals::_World_Size << std::endl; evaluateMemory(); } @@ -125,13 +144,20 @@ void MEDPARTITIONER::ParaDomainSelector::gatherNbOf(const std::vectorgetNumberOfNodes(); } // receive nb of elems from other procs + std::vector all_nb_elems; + if (MyGlobals::_World_Size==1) + { + all_nb_elems=nb_elems; + } + else + { #ifdef HAVE_MPI2 - std::vector all_nb_elems( nb_domains*2 ); - MPI_Allreduce((void*)&nb_elems[0], (void*)&all_nb_elems[0], nb_domains*2, - MPI_INT, MPI_SUM, MPI_COMM_WORLD); + all_nb_elems.resize( nb_domains*2 ); + MPI_Allreduce((void*)&nb_elems[0], (void*)&all_nb_elems[0], nb_domains*2, MPI_INT, MPI_SUM, MPI_COMM_WORLD); #else - std::vector all_nb_elems=nb_elems; + throw INTERP_KERNEL::Exception("not(HAVE_MPI2) incompatible with MPI_World_Size>1"); #endif + } int total_nb_cells=0, total_nb_nodes=0; for (int i=0; i