* one component.
* \return double - the maximal value among all values of \a this array.
* \throw If \a this is not allocated.
+ * If \a this is empty.
* \sa getMaxAbsValueInArray, getMinValueInArray
*/
template<class T>
T DataArrayTemplate<T>::getMaxValueInArray() const
{
checkAllocated();
+ if( empty() )
+ THROW_IK_EXCEPTION("getMaxValueInArray : this is empty !");
const T *loc(std::max_element(begin(),end()));
return *loc;
}
* The caller is to delete this array using decrRef() as it is no more needed.
* \throw If \a this is not allocated.
* \throw If \a this->getNumberOfComponents() != 1.
- * \throw If \a this->getNumberOfTuples() < 2.
+ * \throw If \a this->getNumberOfTuples() < 1.
*
* \b Example: <br>
* - this contains [1,3,6,7,7,9,15]
if(this->getNumberOfComponents()!=1)
throw INTERP_KERNEL::Exception("DataArrayInt::deltaShiftIndex : only single component allowed !");
std::size_t nbOfElements=this->getNumberOfTuples();
- if(nbOfElements<2)
+ if(nbOfElements<1)
throw INTERP_KERNEL::Exception("DataArrayInt::deltaShiftIndex : 2 tuples at least must be present in 'this' !");
const T *ptr=this->getConstPointer();
DataArrayType *ret=DataArrayType::New();
mcIdType returnedSpaceSz( 0 );// store size of reducted returned size
//
MCAuto<DataArrayIdType> sizeOfPacks( commonEntitiesIndex->deltaShiftIndex() );
- mcIdType maxSizeOfPacks = sizeOfPacks->getMaxValueInArray();// store the max size of common entities
- T newValueInThis = this->getMaxValueInArray() + 1;
//
MCAuto<DataArrayIdType> o2n( DataArrayIdType::ConvertIndexArrayToO2N(initialSpaceSz,commonEntities->begin(),commonEntitiesIndex->begin(),commonEntitiesIndex->end(),returnedSpaceSz) );
MCAuto< DataArrayType > ret( DataArrayDiscrete<T>::New() );
//
partitionsToBeModified = DataArrayType::New(); partitionsToBeModified->alloc(0,1);
partitionsToBeModifiedIndex = DataArrayIdType::New(); partitionsToBeModifiedIndex->alloc(1,1); partitionsToBeModifiedIndex->setIJSilent(0,0,0);
- const T *ptOfThisData( this->begin() );
- const mcIdType *ceBg( commonEntities->begin() ), *ceiBg( commonEntitiesIndex->begin() );
- for(mcIdType szOfPack = 2 ; szOfPack <= maxSizeOfPacks ; ++szOfPack)
- {
- MCAuto<DataArrayIdType> idsInThisWithSamePackSz = findIdsEqual( FromIdType<T>( szOfPack ) );
- std::set< PartitionCfg<T> > partitionCfgHolder;
- for( const mcIdType *idsInThisWithSamePackSzIt = idsInThisWithSamePackSz->begin() ; idsInThisWithSamePackSzIt != idsInThisWithSamePackSz->end() ; ++idsInThisWithSamePackSzIt )
- {
- PartitionCfg<T> partitionCfg;
- std::transform(ceBg + ceiBg[*idsInThisWithSamePackSzIt],ceBg + ceiBg[*idsInThisWithSamePackSzIt + 1], std::inserter(partitionCfg,partitionCfg.end()),[ptOfThisData](mcIdType elt) { return ptOfThisData[elt]; });
- auto existCfg = partitionCfgHolder.find( partitionCfg );
- if( existCfg != partitionCfgHolder.end() )
- {//partition already exist by a previous pack -> reuse it !
- T newPartitionID = existCfg->getID();
- retPt[ o2nPt[ ceBg [ ceiBg[*idsInThisWithSamePackSzIt] ] ] ] = newPartitionID;// hypothesis that o2n is so that all o2n[ceBg + ceiBg[*idsInThisWithSamePackSzIt],ceBg + ceiBg[*idsInThisWithSamePackSzIt + 1]) points to the same point in new renumbering
- }
- else
- {//partition does not exist yet -> create it !
- partitionCfg.setID( newValueInThis++ );
- partitionCfgHolder.insert( partitionCfg );
- retPt[ o2nPt[ ceBg [ ceiBg[*idsInThisWithSamePackSzIt] ] ] ] = partitionCfg.getID();
- partitionsToBeModified->pushBackSilent( partitionCfg.getID() );
- partitionsToBeModified->pushBackValsSilent(partitionCfg.begin(),partitionCfg.end());
- partitionsToBeModifiedIndex->pushBackSilent( partitionsToBeModifiedIndex->back() + szOfPack + 1 );
+ if( !sizeOfPacks->empty() )// if empty -> no fusion -> ret is already ready at this point -> nothing to do.
+ {// ready to work
+ mcIdType maxSizeOfPacks = sizeOfPacks->getMaxValueInArray();// store the max size of common entities
+ T newValueInThis = this->getMaxValueInArray() + 1;
+ const T *ptOfThisData( this->begin() );
+ const mcIdType *ceBg( commonEntities->begin() ), *ceiBg( commonEntitiesIndex->begin() );
+ for(mcIdType szOfPack = 2 ; szOfPack <= maxSizeOfPacks ; ++szOfPack)
+ {
+ MCAuto<DataArrayIdType> idsInThisWithSamePackSz = sizeOfPacks->findIdsEqual( FromIdType<T>( szOfPack ) );
+ std::set< PartitionCfg<T> > partitionCfgHolder;
+ for( const mcIdType *idsInThisWithSamePackSzIt = idsInThisWithSamePackSz->begin() ; idsInThisWithSamePackSzIt != idsInThisWithSamePackSz->end() ; ++idsInThisWithSamePackSzIt )
+ {
+ PartitionCfg<T> partitionCfg;
+ std::transform(ceBg + ceiBg[*idsInThisWithSamePackSzIt],ceBg + ceiBg[*idsInThisWithSamePackSzIt + 1], std::inserter(partitionCfg,partitionCfg.end()),[ptOfThisData](mcIdType elt) { return ptOfThisData[elt]; });
+ auto existCfg = partitionCfgHolder.find( partitionCfg );
+ if( existCfg != partitionCfgHolder.end() )
+ {//partition already exist by a previous pack -> reuse it !
+ T newPartitionID = existCfg->getID();
+ retPt[ o2nPt[ ceBg [ ceiBg[*idsInThisWithSamePackSzIt] ] ] ] = newPartitionID;// hypothesis that o2n is so that all o2n[ceBg + ceiBg[*idsInThisWithSamePackSzIt],ceBg + ceiBg[*idsInThisWithSamePackSzIt + 1]) points to the same point in new renumbering
+ }
+ else
+ {//partition does not exist yet -> create it !
+ partitionCfg.setID( newValueInThis++ );
+ partitionCfgHolder.insert( partitionCfg );
+ retPt[ o2nPt[ ceBg [ ceiBg[*idsInThisWithSamePackSzIt] ] ] ] = partitionCfg.getID();
+ partitionsToBeModified->pushBackSilent( partitionCfg.getID() );
+ partitionsToBeModified->pushBackValsSilent(partitionCfg.begin(),partitionCfg.end());
+ partitionsToBeModifiedIndex->pushBackSilent( partitionsToBeModifiedIndex->back() + partitionCfg.size() + 1 );
+ }
}
}
}
PyTuple_SetItem(pyRet,1,SWIG_NewPointerObj(SWIG_as_voidptr(ret1),SWIGTITraits<INT>::TI, SWIG_POINTER_OWN | 0 ));
return pyRet;
}
+
+ PyObject *forThisAsPartitionBuildReduction(DataArrayIdType *commonEntities, DataArrayIdType *commonEntitiesIndex) const
+ {
+ MCAuto<DataArrayIdType> ceCpp( MCAuto<DataArrayIdType>::TakeRef(commonEntities) );
+ MCAuto<DataArrayIdType> ceiCpp( MCAuto<DataArrayIdType>::TakeRef(commonEntitiesIndex) );
+ MCAuto<ARRAY> ret1;
+ MCAuto<DataArrayIdType> ret2;
+ MCAuto<ARRAY> ret0 = self->forThisAsPartitionBuildReduction(ceCpp,ceiCpp,ret1,ret2);
+ PyObject *pyRet( PyTuple_New(3) );
+ PyTuple_SetItem(pyRet,0,SWIG_NewPointerObj(SWIG_as_voidptr(ret0.retn()),SWIGTITraits<INT>::TI, SWIG_POINTER_OWN | 0 ));
+ PyTuple_SetItem(pyRet,1,SWIG_NewPointerObj(SWIG_as_voidptr(ret1.retn()),SWIGTITraits<INT>::TI, SWIG_POINTER_OWN | 0 ));
+ PyTuple_SetItem(pyRet,2,SWIG_NewPointerObj(SWIG_as_voidptr(ret2.retn()),SWIGTITraits<mcIdType>::TI, SWIG_POINTER_OWN | 0 ));
+ return pyRet;
+ }
PyObject *isRange() const
{
toTest = c-a
self.assertTrue( toTest < 10*ref )
-
-
+ def testFuseOfFamilyField0(self):
+ """
+ EDF30179 : Core algo for family field linked to fusion of entities
+ """
+ d = DataArrayInt([2,2,2,2,2,3,3,3,3,1,1,1,1,1,1]) # 5 x 2 , 4 x 3, 6 x 1
+
+ c = DataArrayInt([]) ; ci = DataArrayInt([0])
+ #### Case 0 : simplest
+ assert( ci.deltaShiftIndex().empty() ) # EDF30179 : extension of deltaShiftIndex to single elt
+ a,b,f = d.forThisAsPartitionBuildReduction(c,ci)
+ assert( a.isEqual( d ) )
+ assert( b.empty() )
+ assert( f.isEqual(DataArrayInt([0])) )
+ #### Case 1 : single fusion
+ c = DataArrayInt([3,6]) ; ci = DataArrayInt([0,2])
+ a,b,f = d.forThisAsPartitionBuildReduction(c,ci)
+ assert( a.isEqual( DataArrayInt([2,2,2,4,2,3,3,3,1,1,1,1,1,1]) ) )
+ assert( b.isEqual( DataArrayInt([4,2,3]) ) )
+ assert( f.isEqual( DataArrayInt([0,3]) ) )
+ #### Case 2 : single fusion - same partition id
+ c = DataArrayInt([6,7]) ; ci = DataArrayInt([0,2])
+ a,b,f = d.forThisAsPartitionBuildReduction(c,ci)
+ assert( a.isEqual( DataArrayInt([2,2,2,2,2,3,4,3,1,1,1,1,1,1]) ) )
+ assert( b.isEqual( DataArrayInt([4,3]) ) )
+ assert( f.isEqual( DataArrayInt([0,2]) ) )
+ #### Case 3 : multi fusion single tuple
+ c = DataArrayInt([2,7,3,6]) ; ci = DataArrayInt([0,2,4]) # elts (2,7) and (3,6) to merge. These 2 couples refers to partitionIDs (2,3)
+ a,b,f = d.forThisAsPartitionBuildReduction(c,ci)
+ assert( a.isEqual( DataArrayInt([2,2,4,4,2,3,3,1,1,1,1,1,1]) ) )
+ assert( b.isEqual( DataArrayInt([4,2,3]) ) ) # Fuse element can be located with ID 4
+ assert( f.isEqual( DataArrayInt([0,3]) ) )
+
+ #### Case 4 : multi fusion
+ c = DataArrayInt([2,7,3,10]) ; ci = DataArrayInt([0,2,4])
+ a,b,f = d.forThisAsPartitionBuildReduction(c,ci)
+ assert( a.isEqual( DataArrayInt([2,2,4,5,2,3,3,3,1,1,1,1,1]) ) )
+ assert( b.isEqual( DataArrayInt([4,2,3,5,1,2]) ) )
+ assert( f.isEqual( DataArrayInt([0,3,6]) ) )
if __name__ == '__main__':
unittest.main()