X-Git-Url: http://git.salome-platform.org/gitweb/?a=blobdiff_plain;f=src%2FMEDPartitioner%2FMEDPARTITIONER_MeshCollection.cxx;h=d0ad9d4cbbce204e9d10d8d6f646b9096100eb74;hb=5f6d7861933f9c7596059d7a847e58624e177216;hp=4fc48e9edf45b71587eec9ff45d3836118a94fb4;hpb=887d0e1efce4f46f68d2596dcd801f02f5c1f99e;p=tools%2Fmedcoupling.git diff --git a/src/MEDPartitioner/MEDPARTITIONER_MeshCollection.cxx b/src/MEDPartitioner/MEDPARTITIONER_MeshCollection.cxx index 4fc48e9ed..d0ad9d4cb 100644 --- a/src/MEDPartitioner/MEDPARTITIONER_MeshCollection.cxx +++ b/src/MEDPartitioner/MEDPARTITIONER_MeshCollection.cxx @@ -1,9 +1,9 @@ -// Copyright (C) 2007-2012 CEA/DEN, EDF R&D +// Copyright (C) 2007-2015 CEA/DEN, EDF R&D // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either -// version 2.1 of the License. +// version 2.1 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of @@ -18,32 +18,32 @@ // #include "MEDPARTITIONER_MeshCollection.hxx" + +#include "MEDPARTITIONER_ConnectZone.hxx" +#include "MEDPARTITIONER_Graph.hxx" #include "MEDPARTITIONER_MeshCollectionDriver.hxx" -#include "MEDPARTITIONER_MeshCollectionMedXmlDriver.hxx" #include "MEDPARTITIONER_MeshCollectionMedAsciiDriver.hxx" +#include "MEDPARTITIONER_MeshCollectionMedXmlDriver.hxx" #include "MEDPARTITIONER_ParaDomainSelector.hxx" -#include "MEDPARTITIONER_Topology.hxx" #include "MEDPARTITIONER_ParallelTopology.hxx" +#include "MEDPARTITIONER_Topology.hxx" +#include "MEDPARTITIONER_UserGraph.hxx" +#include "MEDPARTITIONER_Utils.hxx" -#ifdef HAVE_MPI2 +#ifdef HAVE_MPI #include "MEDPARTITIONER_JointFinder.hxx" #endif -#include "MEDPARTITIONER_Graph.hxx" -#include "MEDPARTITIONER_UserGraph.hxx" -#include "MEDPARTITIONER_Utils.hxx" - -#include "MEDLoaderBase.hxx" -#include "MEDLoader.hxx" +#include "MEDCouplingAutoRefCountObjectPtr.hxx" +#include "MEDCouplingFieldDouble.hxx" #include "MEDCouplingMemArray.hxx" -#include "MEDCouplingUMesh.hxx" #include "MEDCouplingNormalizedUnstructuredMesh.hxx" -#include "MEDCouplingFieldDouble.hxx" -#include "PointLocator3DIntersectorP0P0.hxx" - -#include "MEDCouplingAutoRefCountObjectPtr.hxx" +#include "MEDCouplingSkyLineArray.hxx" +#include "MEDCouplingUMesh.hxx" +#include "MEDLoader.hxx" +#include "MEDLoaderBase.hxx" -#ifdef HAVE_MPI2 +#ifdef HAVE_MPI #include #endif @@ -106,7 +106,9 @@ MEDPARTITIONER::MeshCollection::MeshCollection(MeshCollection& initialCollection _joint_finder(0) { std::vector > > new2oldIds(initialCollection.getTopology()->nbDomain()); - castCellMeshes(initialCollection, new2oldIds); + std::vector o2nRenumber; + + castCellMeshes(initialCollection, new2oldIds, o2nRenumber ); //defining the name for the collection and the underlying meshes setName(initialCollection.getName()); @@ -115,7 +117,7 @@ MEDPARTITIONER::MeshCollection::MeshCollection(MeshCollection& initialCollection //treating faces ///////////////// -#ifdef HAVE_MPI2 +#ifdef HAVE_MPI if (MyGlobals::_Verbose>0 && MyGlobals::_World_Size>1) MPI_Barrier(MPI_COMM_WORLD); //synchronize verbose messages #endif @@ -131,7 +133,7 @@ MEDPARTITIONER::MeshCollection::MeshCollection(MeshCollection& initialCollection //////////////////// //treating families //////////////////// -#ifdef HAVE_MPI2 +#ifdef HAVE_MPI if (MyGlobals::_Verbose>0 && MyGlobals::_World_Size>1) MPI_Barrier(MPI_COMM_WORLD); //synchronize verbose messages #endif @@ -155,7 +157,7 @@ MEDPARTITIONER::MeshCollection::MeshCollection(MeshCollection& initialCollection "faceFamily"); //treating groups -#ifdef HAVE_MPI2 +#ifdef HAVE_MPI if (MyGlobals::_Verbose>0 && MyGlobals::_World_Size>1) MPI_Barrier(MPI_COMM_WORLD); //synchronize verbose messages #endif @@ -163,8 +165,8 @@ MEDPARTITIONER::MeshCollection::MeshCollection(MeshCollection& initialCollection std::cout << "treating groups" << std::endl; _family_info=initialCollection.getFamilyInfo(); _group_info=initialCollection.getGroupInfo(); - -#ifdef HAVE_MPI2 + +#ifdef HAVE_MPI if (MyGlobals::_Verbose>0 && MyGlobals::_World_Size>1) MPI_Barrier(MPI_COMM_WORLD); //synchronize verbose messages #endif @@ -173,7 +175,7 @@ MEDPARTITIONER::MeshCollection::MeshCollection(MeshCollection& initialCollection castAllFields(initialCollection,"cellFieldDouble"); if (_i_non_empty_mesh<0) { - for (int i=0; i<_mesh.size(); i++) + for (size_t i=0; i<_mesh.size(); i++) { if (_mesh[i]) { @@ -183,16 +185,28 @@ MEDPARTITIONER::MeshCollection::MeshCollection(MeshCollection& initialCollection } } + // find faces common with neighbor domains and put them in groups + buildBoundaryFaces(); + + //building the connect zones necessary for writing joints + buildConnectZones( nodeMapping, o2nRenumber, initialCollection.getTopology()->nbDomain() ); + + // delete o2nRenumber + for ( size_t i = 0; i < o2nRenumber.size(); ++i ) + if ( o2nRenumber[i] ) + o2nRenumber[i]->decrRef(); } /*! - Creates the meshes using the topology underlying he mesh collection and the mesh data + Creates the meshes using the topology underlying he mesh collection and the mesh data coming from the ancient collection \param initialCollection collection from which the data is extracted to create the new meshes + \param [out] o2nRenumber returns for each new domain a permutation array returned by sortCellsInMEDFileFrmt() */ void MEDPARTITIONER::MeshCollection::castCellMeshes(MeshCollection& initialCollection, - std::vector > >& new2oldIds) + std::vector > >& new2oldIds, + std::vector & o2nRenumber) { if (MyGlobals::_Verbose>10) std::cout << "proc " << MyGlobals::_Rank << " : castCellMeshes" << std::endl; @@ -203,6 +217,7 @@ void MEDPARTITIONER::MeshCollection::castCellMeshes(MeshCollection& initialColle int nbOldDomain=initialCollection.getTopology()->nbDomain(); _mesh.resize(nbNewDomain); + o2nRenumber.resize(nbNewDomain,0); int rank=MyGlobals::_Rank; //splitting the initial domains into smaller bits std::vector > splitMeshes; @@ -240,7 +255,7 @@ void MEDPARTITIONER::MeshCollection::castCellMeshes(MeshCollection& initialColle } } } -#ifdef HAVE_MPI2 +#ifdef HAVE_MPI if (isParallelMode()) { //if (MyGlobals::_Verbose>300) std::cout<<"proc "< meshes; - + for (int i=0; i<(int)splitMeshes[inew].size(); i++) - if (splitMeshes[inew][i]!=0) + if (splitMeshes[inew][i]!=0) if (splitMeshes[inew][i]->getNumberOfCells()>0) meshes.push_back(splitMeshes[inew][i]); if (!isParallelMode()||_domain_selector->isMyDomain(inew)) { - if (meshes.size()==0) + if (meshes.size()==0) { _mesh[inew]=CreateEmptyMEDCouplingUMesh(); std::cout << "WARNING : castCellMeshes fusing : no meshes try another number of processors" << std::endl; @@ -282,6 +297,7 @@ void MEDPARTITIONER::MeshCollection::castCellMeshes(MeshCollection& initialColle else { _mesh[inew]=ParaMEDMEM::MEDCouplingUMesh::MergeUMeshes(meshes); + o2nRenumber[inew]=_mesh[inew]->sortCellsInMEDFileFrmt(); bool areNodesMerged; int nbNodesMerged; if (meshes.size()>1) @@ -290,7 +306,6 @@ void MEDPARTITIONER::MeshCollection::castCellMeshes(MeshCollection& initialColle array->decrRef(); // array is not used in this case } _mesh[inew]->zipCoords(); - } } for (int i=0;i<(int)splitMeshes[inew].size();i++) @@ -302,7 +317,7 @@ void MEDPARTITIONER::MeshCollection::castCellMeshes(MeshCollection& initialColle } /*! - \param initialCollection source mesh collection + \param initialCollection source mesh collection \param nodeMapping structure containing the correspondency between nodes in the initial collection and the node(s) in the new collection */ void MEDPARTITIONER::MeshCollection::createNodeMapping( MeshCollection& initialCollection, NodeMapping& nodeMapping) @@ -335,7 +350,7 @@ void MEDPARTITIONER::MeshCollection::createNodeMapping( MeshCollection& initialC for (int inew=0; inew<_topology->nbDomain(); inew++) { -#ifdef HAVE_MPI2 +#ifdef HAVE_MPI //sending meshes for parallel computation if (isParallelMode() && _domain_selector->isMyDomain(inew) && !_domain_selector->isMyDomain(iold)) _domain_selector->sendMesh(*(getMesh(inew)), _domain_selector->getProcessorID(iold)); @@ -538,7 +553,7 @@ void MEDPARTITIONER::MeshCollection::castFaceMeshes(MeshCollection& initialColle } } -#ifdef HAVE_MPI2 +#ifdef HAVE_MPI //send/receive stuff if (isParallelMode()) { @@ -561,9 +576,9 @@ void MEDPARTITIONER::MeshCollection::castFaceMeshes(MeshCollection& initialColle } if (!initialCollection._domain_selector->isMyDomain(iold) && _domain_selector->isMyDomain(inew)) _domain_selector->recvMesh(splitMeshes[inew][iold], _domain_selector->getProcessorID(iold)); - int nb=0; - if (splitMeshes[inew][iold]) - nb=splitMeshes[inew][iold]->getNumberOfCells(); + //int nb=0; + //if (splitMeshes[inew][iold]) + // nb=splitMeshes[inew][iold]->getNumberOfCells(); //std::cout << "proc " << MyGlobals::_Rank << " : castFaceMeshes recv "<getNumberOfCells()<decrRef(); @@ -599,6 +614,7 @@ void MEDPARTITIONER::MeshCollection::castFaceMeshes(MeshCollection& initialColle if (myMeshes.size()>0) { meshesCastTo[inew]=ParaMEDMEM::MEDCouplingUMesh::MergeUMeshes(myMeshes); + meshesCastTo[inew]->sortCellsInMEDFileFrmt()->decrRef(); } else { @@ -642,7 +658,7 @@ void MEDPARTITIONER::MeshCollection::castIntField(std::vectordecrRef(); } // send-recv operations -#ifdef HAVE_MPI2 +#ifdef HAVE_MPI for (int inew=0; inew renumN2O = ids1->buildPermArrPerLevel(); + ids1->renumberInPlaceR( renumN2O->begin() ); + ids2->renumberInPlaceR( renumN2O->begin() ); + + if ( removeEqual ) + { + ids1 = ids1->buildUnique(); + ids2 = ids2->buildUnique(); + } + if ( delta != 0 ) + { + int * id = ids1->getPointer(); + for ( ; id < ids1->end(); ++id ) + ++(*id); + id = ids2->getPointer(); + for ( ; id < ids2->end(); ++id ) + ++(*id); + } + + // join + DataArrayInt* ids12 = DataArrayInt::Meld( ids1, ids2 ); // two components + ids12->rearrange( 1 ); // make one component + return ids12; + } + + //================================================================================ + /*! + * \brief Renumber ids according to mesh->sortCellsInMEDFileFrmt() + * \param [in,out] ids - cell ids to renumber + * \param [in] o2nRenumber - renumbering array in "Old to New" mode + */ + //================================================================================ + + void renumber( DataArrayInt* ids, const DataArrayInt* o2nRenumber ) + { + if ( !ids || !o2nRenumber ) + return; + int * id = ids->getPointer(); + const int * o2n = o2nRenumber->getConstPointer(); + for ( ; id < ids->end(); ++id ) + { + *id = o2n[ *id ]; + } + } +} + +//================================================================================ +/*! + * \brief Fill up ConnectZone's stored in _topology with nodal correspondences + * \param [in] nodeMapping - mapping between old nodes and new nodes + * (iolddomain,ioldnode)->(inewdomain,inewnode) + * \param [in] o2nRenumber - renumbering array returned by mesh->sortCellsInMEDFileFrmt() + * per a new domain + * \param [in] nbInitialDomains - nb of old domains + */ +//================================================================================ + +void MEDPARTITIONER::MeshCollection::buildConnectZones( const NodeMapping& nodeMapping, + const std::vector & o2nRenumber, + int nbInitialDomains) +{ + if ( !MyGlobals::_Create_Joints || _topology->nbDomain() < 2 ) + return; + + if ( MyGlobals::_World_Size > 1 ) + { + _topology->getCZ().clear(); + return; // not implemented for parallel mode + } + + // at construction, _topology creates cell correspondences basing on Graph information, + // and here we + // 1) add node correspondences, + // 2) split cell correspondences by cell geometry types + // 3) sort ids to be in ascending order + + const int nb_domains = _topology->nbDomain(); + + // ================================== + // 1) add node correspondences + // ================================== + + std::vector< std::vector< std::vector< int > > > nodeCorresp( nb_domains ); + for ( int idomain = 0; idomain < nb_domains; ++idomain ) + { + nodeCorresp[ idomain ].resize( nb_domains ); + } + + NodeMapping::const_iterator nmIt1, nmIt2 = nodeMapping.begin(); + for ( nmIt1 = nmIt2; nmIt1 != nodeMapping.end(); nmIt1 = nmIt2 ) + { + // look for an "old" node mapped into several "new" nodes in different domains + int nbSameOld = 0; + while ( ++nmIt2 != nodeMapping.end() && nmIt2->first == nmIt1->first ) + nbSameOld += ( nmIt2->second != nmIt1->second ); + + if ( nbSameOld > 0 ) + { + NodeMapping::const_iterator nmEnd = nmIt2; + for ( ; true; ++nmIt1 ) + { + nmIt2 = nmIt1; + if ( ++nmIt2 == nmEnd ) + break; + int dom1 = nmIt1->second.first; + int node1 = nmIt1->second.second; + for ( ; nmIt2 != nmEnd; ++nmIt2 ) + { + int dom2 = nmIt2->second.first; + int node2 = nmIt2->second.second; + if ( dom1 != dom2 ) + { + nodeCorresp[ dom1 ][ dom2 ].push_back( node1 ); + nodeCorresp[ dom1 ][ dom2 ].push_back( node2 ); + nodeCorresp[ dom2 ][ dom1 ].push_back( node2 ); + nodeCorresp[ dom2 ][ dom1 ].push_back( node1 ); + } + } + } + } + } + + // add nodeCorresp to czVec + + std::vector& czVec = _topology->getCZ(); + + for ( int idomain = 0; idomain < nb_domains; ++idomain ) + { + for ( int idomainNear = 0; idomainNear < nb_domains; ++idomainNear ) + { + std::vector< int > & corresp = nodeCorresp[ idomain ][ idomainNear ]; + if ( corresp.empty() ) + continue; + + MEDPARTITIONER::ConnectZone* cz = 0; + for ( size_t i = 0; i < czVec.size() && !cz; ++i ) + if ( czVec[i] && + czVec[i]->getLocalDomainNumber () == idomain && + czVec[i]->getDistantDomainNumber() == idomainNear ) + cz = czVec[i]; + + if ( !cz ) + { + cz = new MEDPARTITIONER::ConnectZone(); + cz->setName( "Nodal Connect Zone defined by MEDPARTITIONER" ); + cz->setLocalDomainNumber ( idomain ); + cz->setDistantDomainNumber( idomainNear ); + czVec.push_back(cz); + } + + cz->setNodeCorresp( &corresp[0], corresp.size()/2 ); + } + } + + // ========================================================== + // 2) split cell correspondences by cell geometry types + // ========================================================== + + for ( size_t i = 0; i < czVec.size(); ++i ) + { + MEDPARTITIONER::ConnectZone* cz = czVec[i]; + if ( !cz || + cz->getEntityCorrespNumber( 0,0 ) == 0 || + cz->getLocalDomainNumber () > (int)_mesh.size() || + cz->getDistantDomainNumber() > (int)_mesh.size() ) + continue; + ParaMEDMEM::MEDCouplingUMesh* mesh1 = _mesh[ cz->getLocalDomainNumber () ]; + ParaMEDMEM::MEDCouplingUMesh* mesh2 = _mesh[ cz->getDistantDomainNumber() ]; + + // separate ids of two domains + const ParaMEDMEM::MEDCouplingSkyLineArray *corrArray = cz->getEntityCorresp( 0, 0 ); + const DataArrayInt* ids12 = corrArray->getValueArray(); + MEDCouplingAutoRefCountObjectPtr ids1, ids2, ids12Sorted; + ids1 = ids12->selectByTupleId2( 0, corrArray->getLength(), 2 ); + ids2 = ids12->selectByTupleId2( 1, corrArray->getLength(), 2 ); + + // renumber cells according to mesh->sortCellsInMEDFileFrmt() + renumber( ids1, o2nRenumber[ cz->getLocalDomainNumber() ]); + renumber( ids2, o2nRenumber[ cz->getDistantDomainNumber() ]); + + // check nb cell types + std::set types1, types2; + types1 = mesh1->getTypesOfPart( ids1->begin(), ids1->end() ); + types2 = mesh2->getTypesOfPart( ids2->begin(), ids2->end() ); + if ( types1.size() < 1 || types2.size() < 1 ) + continue; // parallel mode? + + MEDPARTITIONER::ConnectZone* cz21 = 0; // zone 2 -> 1 + for ( size_t j = 0; j < czVec.size() && !cz21; ++j ) + if ( czVec[j] && + czVec[j]->getLocalDomainNumber () == cz->getDistantDomainNumber() && + czVec[j]->getDistantDomainNumber() == cz->getLocalDomainNumber() ) + cz21 = czVec[j]; + + if ( types1.size() == 1 && types2.size() == 1 ) // split not needed, only sort + { + ids12Sorted = sortCorrespondences( ids1, ids2, /*delta=*/1 ); + cz->setEntityCorresp( *types1.begin(), *types2.begin(), + ids12Sorted->begin(), ids12Sorted->getNbOfElems() / 2 ); + + if ( cz21 )// set 2->1 correspondence + { + ids12Sorted = sortCorrespondences( ids2, ids1, /*delta=*/0 ); + cz21->setEntityCorresp( *types2.begin(), *types1.begin(), + ids12Sorted->begin(), ids12Sorted->getNbOfElems() / 2 ); + } + } + else // split and sort + { + typedef std::pair< std::vector< int >, std::vector< int > > T2Vecs; + T2Vecs idsByType[ INTERP_KERNEL::NORM_MAXTYPE ][ INTERP_KERNEL::NORM_MAXTYPE ]; + int t1, t2; + + const int nbIds = ids1->getNbOfElems(); + const int * p1 = ids1->begin(), * p2 = ids2->begin(); + for ( int i = 0; i < nbIds; ++i ) + { + t1 = mesh1->getTypeOfCell( p1[ i ]); + t2 = mesh2->getTypeOfCell( p2[ i ]); + T2Vecs & ids = idsByType[ t1 ][ t2 ]; + ids.first .push_back( p1[ i ]); + ids.second.push_back( p1[ i ]); + } + + const int maxType = int( INTERP_KERNEL::NORM_MAXTYPE ); + for ( t1 = 0; t1 < maxType; ++t1 ) + for ( t2 = 0; t2 < maxType; ++t2 ) + { + T2Vecs & ids = idsByType[ t1 ][ t2 ]; + if ( ids.first.empty() ) continue; + p1 = & ids.first[0]; + p2 = & ids.second[0]; + ids1->desallocate(); + ids1->pushBackValsSilent( p1, p1+ids.first.size() ); + ids2->desallocate(); + ids2->pushBackValsSilent( p2, p2+ids.first.size() ); + ids12Sorted = sortCorrespondences( ids1, ids2, /*delta=*/1 ); + + cz->setEntityCorresp( t1, t2, + ids12Sorted->begin(), ids12Sorted->getNbOfElems() / 2 ); + + if ( cz21 )// set 2->1 correspondence + { + ids12Sorted = sortCorrespondences( ids2, ids1, /*delta=*/0 ); + cz21->setEntityCorresp( t2, t1, + ids12Sorted->begin(), ids12Sorted->getNbOfElems() / 2 ); + break; + } + } + }// split and sort + + cz->setEntityCorresp( 0, 0, 0, 0 ); // erase ids computed by _topology + if ( cz21 ) + cz21->setEntityCorresp( 0, 0, 0, 0 ); + + } // loop on czVec + + + // ========================================== + // 3) sort node ids to be in ascending order + // ========================================== + + const bool removeEqual = ( nbInitialDomains > 1 ); + + for ( size_t i = 0; i < czVec.size(); ++i ) + { + MEDPARTITIONER::ConnectZone* cz = czVec[i]; + if ( !cz || cz->getNodeNumber() < 1 ) + continue; + if ( cz->getDistantDomainNumber() < cz->getLocalDomainNumber() ) + continue; // treat a pair of domains once + + MEDPARTITIONER::ConnectZone* cz21 = 0; // zone 2 -> 1 + for ( size_t j = 0; j < czVec.size() && !cz21; ++j ) + if ( czVec[j] && + czVec[j]->getLocalDomainNumber () == cz->getDistantDomainNumber() && + czVec[j]->getDistantDomainNumber() == cz->getLocalDomainNumber() ) + cz21 = czVec[j]; + + // separate ids of two domains + const ParaMEDMEM::MEDCouplingSkyLineArray *corrArray = cz->getNodeCorresp(); + const DataArrayInt *ids12 = corrArray->getValueArray(); + MEDCouplingAutoRefCountObjectPtr ids1, ids2, ids12Sorted; + ids1 = ids12->selectByTupleId2( 0, corrArray->getLength(), 2 ); + ids2 = ids12->selectByTupleId2( 1, corrArray->getLength(), 2 ); + + ids12Sorted = sortCorrespondences( ids1, ids2, /*delta=*/0, removeEqual ); + cz->setNodeCorresp( ids12Sorted->begin(), ids12Sorted->getNbOfElems() / 2 ); + + if ( cz21 )// set 2->1 correspondence + { + ids12Sorted = sortCorrespondences( ids2, ids1, /*delta=*/0, false ); + cz->setNodeCorresp( ids12Sorted->begin(), ids12Sorted->getNbOfElems() / 2 ); + } + } +} + //================================================================================ /*! * \brief Find faces common with neighbor domains and put them in "JOINT_n_p_Faces" @@ -942,8 +1275,11 @@ void MEDPARTITIONER::MeshCollection::remapDoubleField(int inew, int iold, */ //================================================================================ -void MEDPARTITIONER::MeshCollection::buildConnectZones() +void MEDPARTITIONER::MeshCollection::buildBoundaryFaces() { + if (_topology->nbDomain() < 2 || !_subdomain_boundary_creates ) + return; + if ( getMeshDimension() < 2 ) return; @@ -979,7 +1315,7 @@ void MEDPARTITIONER::MeshCollection::buildConnectZones() bool mesh1Here = true, mesh2Here = true; if (isParallelMode()) { -#ifdef HAVE_MPI2 +#ifdef HAVE_MPI mesh1Here = _domain_selector->isMyDomain(inew1); mesh2Here = _domain_selector->isMyDomain(inew2); if ( !mesh1Here && mesh2Here ) @@ -1035,7 +1371,7 @@ void MEDPARTITIONER::MeshCollection::buildConnectZones() if ( isParallelMode()) { -#ifdef HAVE_MPI2 +#ifdef HAVE_MPI if ( mesh1Here && !mesh2Here ) { //send faces2 to domain of recvMesh @@ -1128,7 +1464,7 @@ void MEDPARTITIONER::MeshCollection::createJointGroup( const std::vector< int >& // remove faces from the familyID-the family if ( familyID != 0 && famIDs ) - for ( size_t i = 0; i < totalNbFaces; ++i ) + for ( int i = 0; i < totalNbFaces; ++i ) if ( famIDs[i] == familyID ) famIDs[i] = 0; @@ -1269,7 +1605,7 @@ MEDPARTITIONER::MeshCollection::MeshCollection(const std::string& filename, Para \n \ \n \ \n"; - std::vector meshNames=MEDLoader::GetMeshNames(myfile.c_str()); + std::vector meshNames=MEDLoader::GetMeshNames(myfile); xml.replace(xml.find("$fileName"),9,myfile); xml.replace(xml.find("$meshName"),9,meshNames[0]); xml.replace(xml.find("$meshName"),9,meshNames[0]); @@ -1285,7 +1621,7 @@ MEDPARTITIONER::MeshCollection::MeshCollection(const std::string& filename, Para f<1) MPI_Barrier(MPI_COMM_WORLD); //wait for creation of nameFileXml #endif @@ -1325,7 +1661,7 @@ MEDPARTITIONER::MeshCollection::MeshCollection(const std::string& filename, Para try { //check for all proc/file compatibility of _field_descriptions -#ifdef HAVE_MPI2 +#ifdef HAVE_MPI _field_descriptions=AllgathervVectorOfString(MyGlobals::_Field_Descriptions); #else _field_descriptions=MyGlobals::_Field_Descriptions; @@ -1336,7 +1672,7 @@ MEDPARTITIONER::MeshCollection::MeshCollection(const std::string& filename, Para std::cerr << "proc " << MyGlobals::_Rank << " : INTERP_KERNEL_Exception : " << e.what() << std::endl; throw INTERP_KERNEL::Exception("Something wrong verifying coherency of files med ands fields"); } -#ifdef HAVE_MPI2 +#ifdef HAVE_MPI try { //check for all proc/file compatibility of _family_info @@ -1423,7 +1759,7 @@ MEDPARTITIONER::MeshCollection::~MeshCollection() delete _driver; if (_topology!=0 && _owns_topology) delete _topology; -#ifdef HAVE_MPI2 +#ifdef HAVE_MPI delete _joint_finder; #endif } @@ -1440,9 +1776,6 @@ MEDPARTITIONER::MeshCollection::~MeshCollection() */ void MEDPARTITIONER::MeshCollection::write(const std::string& filename) { - //building the connect zones necessary for writing joints - if (_topology->nbDomain()>1 && _subdomain_boundary_creates ) - buildConnectZones(); //suppresses link with driver so that it can be changed for writing delete _driver; _driver=0; @@ -1490,7 +1823,7 @@ int MEDPARTITIONER::MeshCollection::getMeshDimension() const int MEDPARTITIONER::MeshCollection::getNbOfLocalMeshes() const { int nb=0; - for (int i=0; i<_mesh.size(); i++) + for (size_t i=0; i<_mesh.size(); i++) { if (_mesh[i]) nb++; } @@ -1500,7 +1833,7 @@ int MEDPARTITIONER::MeshCollection::getNbOfLocalMeshes() const int MEDPARTITIONER::MeshCollection::getNbOfLocalCells() const { int nb=0; - for (int i=0; i<_mesh.size(); i++) + for (size_t i=0; i<_mesh.size(); i++) { if (_mesh[i]) nb=nb+_mesh[i]->getNumberOfCells(); } @@ -1510,7 +1843,7 @@ int MEDPARTITIONER::MeshCollection::getNbOfLocalCells() const int MEDPARTITIONER::MeshCollection::getNbOfLocalFaces() const { int nb=0; - for (int i=0; i<_face_mesh.size(); i++) + for (size_t i=0; i<_face_mesh.size(); i++) { if (_face_mesh[i]) nb=nb+_face_mesh[i]->getNumberOfCells(); } @@ -1539,7 +1872,11 @@ ParaMEDMEM::MEDCouplingUMesh* MEDPARTITIONER::MeshCollection::getFaceMesh(int id std::vector& MEDPARTITIONER::MeshCollection::getCZ() { - return _connect_zones; + if ( _topology ) + return _topology->getCZ(); + + static std::vector noCZ; + return noCZ; } MEDPARTITIONER::Topology* MEDPARTITIONER::MeshCollection::getTopology() const @@ -1547,23 +1884,56 @@ MEDPARTITIONER::Topology* MEDPARTITIONER::MeshCollection::getTopology() const return _topology; } -void MEDPARTITIONER::MeshCollection::setTopology(Topology* topo) +void MEDPARTITIONER::MeshCollection::setTopology(Topology* topo, bool takeOwneship) { if (_topology!=0) { throw INTERP_KERNEL::Exception("topology is already set"); } else - _topology = topo; + { + _topology = topo; + _owns_topology = takeOwneship; + } } -/*! Method creating the cell graph +/*! Method creating the cell graph in serial mode + * + * \param array returns the pointer to the structure that contains the graph + * \param edgeweight returns the pointer to the table that contains the edgeweights + * (only used if indivisible regions are required) + */ +void MEDPARTITIONER::MeshCollection::buildCellGraph(ParaMEDMEM::MEDCouplingSkyLineArray* & array, int *& edgeweights ) +{ + + using std::map; + using std::vector; + using std::make_pair; + using std::pair; + + if (_topology->nbDomain()>1) throw INTERP_KERNEL::Exception("buildCellGraph should be used for one domain only"); + const ParaMEDMEM::MEDCouplingUMesh* mesh=_mesh[0]; + if (MyGlobals::_Verbose>50) + std::cout<<"getting nodal connectivity"<isMyDomain(0)) + { + vector value; + vector index(1,0); + + array=new ParaMEDMEM::MEDCouplingSkyLineArray(index,value); + return; + } + array=mesh->generateGraph(); +} +/*! Method creating the cell graph in multidomain mode * * \param array returns the pointer to the structure that contains the graph * \param edgeweight returns the pointer to the table that contains the edgeweights * (only used if indivisible regions are required) */ -void MEDPARTITIONER::MeshCollection::buildCellGraph(MEDPARTITIONER::SkyLineArray* & array, int *& edgeweights ) +void MEDPARTITIONER::MeshCollection::buildParallelCellGraph(ParaMEDMEM::MEDCouplingSkyLineArray* & array, int *& edgeweights ) { using std::multimap; using std::vector; @@ -1576,7 +1946,7 @@ void MEDPARTITIONER::MeshCollection::buildCellGraph(MEDPARTITIONER::SkyLineArray std::vector > > commonDistantNodes; int nbdomain=_topology->nbDomain(); -#ifdef HAVE_MPI2 +#ifdef HAVE_MPI if (isParallelMode()) { _joint_finder=new JointFinder(*this); @@ -1618,7 +1988,7 @@ void MEDPARTITIONER::MeshCollection::buildCellGraph(MEDPARTITIONER::SkyLineArray } revConn->decrRef(); index->decrRef(); -#ifdef HAVE_MPI2 +#ifdef HAVE_MPI for (int iother=0; iother::iterator it; @@ -1731,7 +2101,7 @@ void MEDPARTITIONER::MeshCollection::buildCellGraph(MEDPARTITIONER::SkyLineArray } } - array=new MEDPARTITIONER::SkyLineArray(index,value); + array=new ParaMEDMEM::MEDCouplingSkyLineArray(index,value); if (MyGlobals::_Verbose>100) { @@ -1760,40 +2130,44 @@ void MEDPARTITIONER::MeshCollection::buildCellGraph(MEDPARTITIONER::SkyLineArray * returns a topology based on the new graph */ MEDPARTITIONER::Topology* MEDPARTITIONER::MeshCollection::createPartition(int nbdomain, - Graph::splitter_type split, + Graph::splitter_type split, const std::string& options_string, int *user_edge_weights, int *user_vertices_weights) { if (MyGlobals::_Verbose>10) std::cout << "proc " << MyGlobals::_Rank << " : MeshCollection::createPartition : Building cell graph" << std::endl; - + if (nbdomain <1) throw INTERP_KERNEL::Exception("Number of subdomains must be > 0"); - MEDPARTITIONER::SkyLineArray* array=0; + ParaMEDMEM::MEDCouplingSkyLineArray* array=0; int* edgeweights=0; - buildCellGraph(array,edgeweights); - + + if (_topology->nbDomain()>1 || isParallelMode()) + buildParallelCellGraph(array,edgeweights); + else + buildCellGraph(array,edgeweights); + Graph* cellGraph = 0; switch (split) { case Graph::METIS: if ( isParallelMode() && MyGlobals::_World_Size > 1 ) - { + { #ifdef MED_ENABLE_PARMETIS - if (MyGlobals::_Verbose>10) - std::cout << "ParMETISGraph" << std::endl; - cellGraph=new ParMETISGraph(array,edgeweights); + if (MyGlobals::_Verbose>10) + std::cout << "ParMETISGraph" << std::endl; + cellGraph=new ParMETISGraph(array,edgeweights); #endif - } + } if ( !cellGraph ) - { + { #ifdef MED_ENABLE_METIS - if (MyGlobals::_Verbose>10) - std::cout << "METISGraph" << std::endl; - cellGraph=new METISGraph(array,edgeweights); + if (MyGlobals::_Verbose>10) + std::cout << "METISGraph" << std::endl; + cellGraph=new METISGraph(array,edgeweights); #endif - } + } if ( !cellGraph ) throw INTERP_KERNEL::Exception("MeshCollection::createPartition : PARMETIS/METIS is not available. Check your products, please."); break; @@ -1840,10 +2214,14 @@ MEDPARTITIONER::Topology* MEDPARTITIONER::MeshCollection::createPartition(int nb */ MEDPARTITIONER::Topology* MEDPARTITIONER::MeshCollection::createPartition(const int* partition) { - MEDPARTITIONER::SkyLineArray* array=0; + ParaMEDMEM::MEDCouplingSkyLineArray* array=0; int* edgeweights=0; - buildCellGraph(array,edgeweights); + if ( _topology->nbDomain()>1) + buildParallelCellGraph(array,edgeweights); + else + buildCellGraph(array,edgeweights); + Graph* cellGraph; std::set domains; for (int i=0; i<_topology->nbCells(); i++) @@ -1868,13 +2246,13 @@ void MEDPARTITIONER::MeshCollection::setDomainNames(const std::string& name) std::ostringstream oss; oss<isMyDomain(i)) - _mesh[i]->setName(oss.str().c_str()); + _mesh[i]->setName(oss.str()); } } ParaMEDMEM::DataArrayDouble *MEDPARTITIONER::MeshCollection::getField(std::string descriptionField, int iold) //getField look for and read it if not done, and assume decrRef() in ~MeshCollection; -//something like MEDCouplingFieldDouble *f2=MEDLoader::ReadFieldCell(name.c_str(),f1->getMesh()->getName(),0,f1->getName(),0,1); +//something like MEDCouplingFieldDouble *f2=MEDLoader::ReadFieldCell(name,f1->getMesh()->getName(),0,f1->getName(),0,1); { int rank=MyGlobals::_Rank; std::string tag="ioldFieldDouble="+IntToStr(iold); @@ -1897,7 +2275,7 @@ ParaMEDMEM::DataArrayDouble *MEDPARTITIONER::MeshCollection::getField(std::strin meshName=MyGlobals::_Mesh_Names[iold]; ParaMEDMEM::MEDCouplingFieldDouble* f2=MEDLoader::ReadField((ParaMEDMEM::TypeOfField) typeField, - fileName.c_str(), meshName.c_str(), 0, fieldName.c_str(), DT, IT); + fileName, meshName, 0, fieldName, DT, IT); ParaMEDMEM::DataArrayDouble* res=f2->getArray(); //to know names of components @@ -1917,6 +2295,10 @@ void MEDPARTITIONER::MeshCollection::prepareFieldDescriptions() //filter _field_descriptions to be in all procs compliant and equal { int nbfiles=MyGlobals::_File_Names.size(); //nb domains + if (nbfiles==0) + { + nbfiles=_topology->nbDomain(); + } std::vector r2; //from allgatherv then vector(procs) of serialised vector(fields) of vector(description) data for (int i=0; i<(int)_field_descriptions.size(); i++)