+ INFOS( "SMESH_Gen_i::Load" );
+
+ if ( myCurrentStudy->_is_nil() ||
+ theComponent->GetStudy()->StudyId() != myCurrentStudy->StudyId() )
+ SetCurrentStudy( theComponent->GetStudy() );
+
+ /* if( !theComponent->_is_nil() )
+ {
+ //SALOMEDS::Study_var aStudy = SALOMEDS::Study::_narrow( theComponent->GetStudy() );
+ if( !myCurrentStudy->FindComponent( "GEOM" )->_is_nil() )
+ loadGeomData( myCurrentStudy->FindComponent( "GEOM" ) );
+ }*/
+
+ StudyContext* myStudyContext = GetCurrentStudyContext();
+
+ // Get temporary files location
+ TCollection_AsciiString tmpDir =
+ isMultiFile ? TCollection_AsciiString( ( char* )theURL ) : ( char* )SALOMEDS_Tool::GetTmpDir().c_str();
+
+ INFOS( "THE URL++++++++++++++" )
+ INFOS( theURL );
+ INFOS( "THE TMP PATH+++++++++" );
+ INFOS( tmpDir );
+
+ // Convert the stream into sequence of files to process
+ SALOMEDS::ListOfFileNames_var aFileSeq = SALOMEDS_Tool::PutStreamToFiles( theStream,
+ tmpDir.ToCString(),
+ isMultiFile );
+ TCollection_AsciiString aStudyName( "" );
+ if ( isMultiFile )
+ aStudyName = ( (char*)SALOMEDS_Tool::GetNameFromPath( myCurrentStudy->URL() ).c_str() );
+
+ // Set names of temporary files
+ TCollection_AsciiString filename = tmpDir + aStudyName + TCollection_AsciiString( "_SMESH.hdf" );
+ TCollection_AsciiString meshfile = tmpDir + aStudyName + TCollection_AsciiString( "_SMESH_Mesh.med" );
+
+ int size;
+ HDFfile* aFile;
+ HDFdataset* aDataset;
+ HDFgroup* aTopGroup;
+ HDFgroup* aGroup;
+ HDFgroup* aSubGroup;
+ HDFgroup* aSubSubGroup;
+
+ // Read data
+ // ---> open HDF file
+ aFile = new HDFfile( (char*) filename.ToCString() );
+ try {
+ aFile->OpenOnDisk( HDF_RDONLY );
+ }
+ catch ( HDFexception ) {
+ INFOS( "Load(): " << filename << " not found!" );
+ return false;
+ }
+
+ DriverMED_R_SMESHDS_Mesh myReader;
+ myReader.SetFile( meshfile.ToCString() );
+
+ // For PAL13473 ("Repetitive mesh") implementation.
+ // New dependencies between SMESH objects are established:
+ // now hypotheses can refer to meshes, shapes and other hypotheses.
+ // To keep data consistent, the following order of data restoration
+ // imposed:
+ // 1. Create hypotheses
+ // 2. Create all meshes
+ // 3. Load hypotheses' data
+ // 4. All the rest
+
+ list< pair< SMESH_Hypothesis_i*, string > > hypDataList;
+ list< pair< SMESH_Mesh_i*, HDFgroup* > > meshGroupList;
+
+ // get total number of top-level groups
+ int aNbGroups = aFile->nInternalObjects();
+ if ( aNbGroups > 0 ) {
+ // --> in first turn we should read&create hypotheses
+ if ( aFile->ExistInternalObject( "Hypotheses" ) ) {
+ // open hypotheses root HDF group
+ aTopGroup = new HDFgroup( "Hypotheses", aFile );
+ aTopGroup->OpenOnDisk();
+
+ // get number of hypotheses
+ int aNbObjects = aTopGroup->nInternalObjects();
+ for ( int j = 0; j < aNbObjects; j++ ) {
+ // try to identify hypothesis
+ char hypGrpName[ HDF_NAME_MAX_LEN+1 ];
+ aTopGroup->InternalObjectIndentify( j, hypGrpName );
+
+ if ( string( hypGrpName ).substr( 0, 10 ) == string( "Hypothesis" ) ) {
+ // open hypothesis group
+ aGroup = new HDFgroup( hypGrpName, aTopGroup );
+ aGroup->OpenOnDisk();
+
+ // --> get hypothesis id
+ int id = atoi( string( hypGrpName ).substr( 10 ).c_str() );
+ string hypname;
+ string libname;
+ string hypdata;
+
+ // get number of datasets
+ int aNbSubObjects = aGroup->nInternalObjects();
+ for ( int k = 0; k < aNbSubObjects; k++ ) {
+ // identify dataset
+ char name_of_subgroup[ HDF_NAME_MAX_LEN+1 ];
+ aGroup->InternalObjectIndentify( k, name_of_subgroup );
+ // --> get hypothesis name
+ if ( strcmp( name_of_subgroup, "Name" ) == 0 ) {
+ aDataset = new HDFdataset( name_of_subgroup, aGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* hypname_str = new char[ size ];
+ aDataset->ReadFromDisk( hypname_str );
+ hypname = string( hypname_str );
+ delete [] hypname_str;
+ aDataset->CloseOnDisk();
+ }
+ // --> get hypothesis plugin library name
+ if ( strcmp( name_of_subgroup, "LibName" ) == 0 ) {
+ aDataset = new HDFdataset( name_of_subgroup, aGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* libname_str = new char[ size ];
+ aDataset->ReadFromDisk( libname_str );
+ if(MYDEBUG) SCRUTE( libname_str );
+ libname = string( libname_str );
+ delete [] libname_str;
+ aDataset->CloseOnDisk();
+ }
+ // --> get hypothesis data
+ if ( strcmp( name_of_subgroup, "Data" ) == 0 ) {
+ aDataset = new HDFdataset( name_of_subgroup, aGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* hypdata_str = new char[ size ];
+ aDataset->ReadFromDisk( hypdata_str );
+ hypdata = string( hypdata_str );
+ delete [] hypdata_str;
+ aDataset->CloseOnDisk();
+ }
+ }
+ // close hypothesis HDF group
+ aGroup->CloseOnDisk();
+
+ // --> restore hypothesis from data
+ if ( id > 0 && !hypname.empty()/* && !hypdata.empty()*/ ) { // VSR : persistent data can be empty
+ if(MYDEBUG) MESSAGE("VSR - load hypothesis : id = " << id <<
+ ", name = " << hypname.c_str() << ", persistent string = " << hypdata.c_str());
+ SMESH::SMESH_Hypothesis_var myHyp;
+
+ try { // protect persistence mechanism against exceptions
+ myHyp = this->createHypothesis( hypname.c_str(), libname.c_str() );
+ }
+ catch (...) {
+ INFOS( "Exception during hypothesis creation" );
+ }
+
+ SMESH_Hypothesis_i* myImpl = dynamic_cast<SMESH_Hypothesis_i*>( GetServant( myHyp ).in() );
+ if ( myImpl ) {
+ // myImpl->LoadFrom( hypdata.c_str() );
+ hypDataList.push_back( make_pair( myImpl, hypdata ));
+ string iorString = GetORB()->object_to_string( myHyp );
+ int newId = myStudyContext->findId( iorString );
+ myStudyContext->mapOldToNew( id, newId );
+ }
+ else
+ if(MYDEBUG) MESSAGE( "VSR - SMESH_Gen::Load - can't get servant" );
+ }
+ }
+ }
+ // close hypotheses root HDF group
+ aTopGroup->CloseOnDisk();
+ aTopGroup = 0;
+ }
+
+ // --> then we should read&create algorithms
+ if ( aFile->ExistInternalObject( "Algorithms" ) ) {
+ // open algorithms root HDF group
+ aTopGroup = new HDFgroup( "Algorithms", aFile );
+ aTopGroup->OpenOnDisk();
+
+ // get number of algorithms
+ int aNbObjects = aTopGroup->nInternalObjects();
+ for ( int j = 0; j < aNbObjects; j++ ) {
+ // try to identify algorithm
+ char hypGrpName[ HDF_NAME_MAX_LEN+1 ];
+ aTopGroup->InternalObjectIndentify( j, hypGrpName );
+
+ if ( string( hypGrpName ).substr( 0, 9 ) == string( "Algorithm" ) ) {
+ // open algorithm group
+ aGroup = new HDFgroup( hypGrpName, aTopGroup );
+ aGroup->OpenOnDisk();
+
+ // --> get algorithm id
+ int id = atoi( string( hypGrpName ).substr( 9 ).c_str() );
+ string hypname;
+ string libname;
+ string hypdata;
+
+ // get number of datasets
+ int aNbSubObjects = aGroup->nInternalObjects();
+ for ( int k = 0; k < aNbSubObjects; k++ ) {
+ // identify dataset
+ char name_of_subgroup[ HDF_NAME_MAX_LEN+1 ];
+ aGroup->InternalObjectIndentify( k, name_of_subgroup );
+ // --> get algorithm name
+ if ( strcmp( name_of_subgroup, "Name" ) == 0 ) {
+ aDataset = new HDFdataset( name_of_subgroup, aGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* hypname_str = new char[ size ];
+ aDataset->ReadFromDisk( hypname_str );
+ hypname = string( hypname_str );
+ delete [] hypname_str;
+ aDataset->CloseOnDisk();
+ }
+ // --> get algorithm plugin library name
+ if ( strcmp( name_of_subgroup, "LibName" ) == 0 ) {
+ aDataset = new HDFdataset( name_of_subgroup, aGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* libname_str = new char[ size ];
+ aDataset->ReadFromDisk( libname_str );
+ if(MYDEBUG) SCRUTE( libname_str );
+ libname = string( libname_str );
+ delete [] libname_str;
+ aDataset->CloseOnDisk();
+ }
+ // --> get algorithm data
+ if ( strcmp( name_of_subgroup, "Data" ) == 0 ) {
+ aDataset = new HDFdataset( name_of_subgroup, aGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* hypdata_str = new char[ size ];
+ aDataset->ReadFromDisk( hypdata_str );
+ if(MYDEBUG) SCRUTE( hypdata_str );
+ hypdata = string( hypdata_str );
+ delete [] hypdata_str;
+ aDataset->CloseOnDisk();
+ }
+ }
+ // close algorithm HDF group
+ aGroup->CloseOnDisk();
+
+ // --> restore algorithm from data
+ if ( id > 0 && !hypname.empty()/* && !hypdata.empty()*/ ) { // VSR : persistent data can be empty
+ if(MYDEBUG) MESSAGE("VSR - load algo : id = " << id <<
+ ", name = " << hypname.c_str() << ", persistent string = " << hypdata.c_str());
+ SMESH::SMESH_Hypothesis_var myHyp;
+
+ try { // protect persistence mechanism against exceptions
+ myHyp = this->createHypothesis( hypname.c_str(), libname.c_str() );
+ }
+ catch (...) {
+ INFOS( "Exception during hypothesis creation" );
+ }
+
+ SMESH_Hypothesis_i* myImpl = dynamic_cast<SMESH_Hypothesis_i*>( GetServant( myHyp ).in() );
+ if ( myImpl ) {
+ //myImpl->LoadFrom( hypdata.c_str() );
+ hypDataList.push_back( make_pair( myImpl, hypdata ));
+ string iorString = GetORB()->object_to_string( myHyp );
+ int newId = myStudyContext->findId( iorString );
+ myStudyContext->mapOldToNew( id, newId );
+ }
+ else
+ if(MYDEBUG) MESSAGE( "VSR - SMESH_Gen::Load - can't get servant" );
+ }
+ }
+ }
+ // close algorithms root HDF group
+ aTopGroup->CloseOnDisk();
+ aTopGroup = 0;
+ }
+
+ // --> the rest groups should be meshes
+ for ( int i = 0; i < aNbGroups; i++ ) {
+ // identify next group
+ char meshName[ HDF_NAME_MAX_LEN+1 ];
+ aFile->InternalObjectIndentify( i, meshName );
+
+ if ( string( meshName ).substr( 0, 4 ) == string( "Mesh" ) ) {
+ // --> get mesh id
+ int id = atoi( string( meshName ).substr( 4 ).c_str() );
+ if ( id <= 0 )
+ continue;
+
+ // open mesh HDF group
+ aTopGroup = new HDFgroup( meshName, aFile );
+ aTopGroup->OpenOnDisk();
+
+ // get number of child HDF objects
+ int aNbObjects = aTopGroup->nInternalObjects();
+ if ( aNbObjects > 0 ) {
+ // create mesh
+ if(MYDEBUG) MESSAGE( "VSR - load mesh : id = " << id );
+ SMESH::SMESH_Mesh_var myNewMesh = this->createMesh();
+ SMESH_Mesh_i* myNewMeshImpl = dynamic_cast<SMESH_Mesh_i*>( GetServant( myNewMesh ).in() );
+ if ( !myNewMeshImpl )
+ continue;
+ meshGroupList.push_back( make_pair( myNewMeshImpl, aTopGroup ));
+
+ string iorString = GetORB()->object_to_string( myNewMesh );
+ int newId = myStudyContext->findId( iorString );
+ myStudyContext->mapOldToNew( id, newId );
+
+ // ouv : NPAL12872
+ // try to read and set auto color flag
+ char aMeshAutoColorName[ 30 ];
+ sprintf( aMeshAutoColorName, "AutoColorMesh %d", id);
+ if( aTopGroup->ExistInternalObject( aMeshAutoColorName ) )
+ {
+ aDataset = new HDFdataset( aMeshAutoColorName, aTopGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ int* anAutoColor = new int[ size ];
+ aDataset->ReadFromDisk( anAutoColor );
+ aDataset->CloseOnDisk();
+ myNewMeshImpl->GetImpl().SetAutoColor( (bool)anAutoColor[0] );
+ }
+
+ // try to read and set reference to shape
+ GEOM::GEOM_Object_var aShapeObject;
+ if ( aTopGroup->ExistInternalObject( "Ref on shape" ) ) {
+ // load mesh "Ref on shape" - it's an entry to SObject
+ aDataset = new HDFdataset( "Ref on shape", aTopGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* refFromFile = new char[ size ];
+ aDataset->ReadFromDisk( refFromFile );
+ aDataset->CloseOnDisk();
+ if ( strlen( refFromFile ) > 0 ) {
+ SALOMEDS::SObject_var shapeSO = myCurrentStudy->FindObjectID( refFromFile );
+
+ // Make sure GEOM data are loaded first
+ //loadGeomData( shapeSO->GetFatherComponent() );
+
+ CORBA::Object_var shapeObject = SObjectToObject( shapeSO );
+ if ( !CORBA::is_nil( shapeObject ) ) {
+ aShapeObject = GEOM::GEOM_Object::_narrow( shapeObject );
+ if ( !aShapeObject->_is_nil() )
+ myNewMeshImpl->SetShape( aShapeObject );
+ }
+ }
+ }
+
+ // issue 0020693. Restore _isModified flag
+ if( aTopGroup->ExistInternalObject( "_isModified" ) )
+ {
+ aDataset = new HDFdataset( "_isModified", aTopGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ int* isModified = new int[ size ];
+ aDataset->ReadFromDisk( isModified );
+ aDataset->CloseOnDisk();
+ myNewMeshImpl->GetImpl().SetIsModified( bool(*isModified));
+ }
+
+ // issue 20918. Restore Persistent Id of SMESHDS_Mesh
+ if( aTopGroup->ExistInternalObject( "meshPersistentId" ) )
+ {
+ aDataset = new HDFdataset( "meshPersistentId", aTopGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ int* meshPersistentId = new int[ size ];
+ aDataset->ReadFromDisk( meshPersistentId );
+ aDataset->CloseOnDisk();
+ myNewMeshImpl->GetImpl().GetMeshDS()->SetPersistentId( *meshPersistentId );
+ }
+ }
+ }
+ }
+
+ // As all object that can be referred by hypothesis are created,
+ // we can restore hypothesis data
+
+ list< pair< SMESH_Hypothesis_i*, string > >::iterator hyp_data;
+ for ( hyp_data = hypDataList.begin(); hyp_data != hypDataList.end(); ++hyp_data )
+ {
+ SMESH_Hypothesis_i* hyp = hyp_data->first;
+ string & data = hyp_data->second;
+ hyp->LoadFrom( data.c_str() );
+ }
+
+ // Restore the rest mesh data
+
+ list< pair< SMESH_Mesh_i*, HDFgroup* > >::iterator meshi_group;
+ for ( meshi_group = meshGroupList.begin(); meshi_group != meshGroupList.end(); ++meshi_group )
+ {
+ aTopGroup = meshi_group->second;
+ SMESH_Mesh_i* myNewMeshImpl = meshi_group->first;
+ ::SMESH_Mesh& myLocMesh = myNewMeshImpl->GetImpl();
+ SMESHDS_Mesh* mySMESHDSMesh = myLocMesh.GetMeshDS();
+
+ GEOM::GEOM_Object_var aShapeObject = myNewMeshImpl->GetShapeToMesh();
+ bool hasData = false;
+
+ // get mesh old id
+ string iorString = GetORB()->object_to_string( myNewMeshImpl->_this() );
+ int newId = myStudyContext->findId( iorString );
+ int id = myStudyContext->getOldId( newId );
+
+ // try to find mesh data dataset
+ if ( aTopGroup->ExistInternalObject( "Has data" ) ) {
+ // load mesh "has data" flag
+ aDataset = new HDFdataset( "Has data", aTopGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* strHasData = new char[ size ];
+ aDataset->ReadFromDisk( strHasData );
+ aDataset->CloseOnDisk();
+ if ( strcmp( strHasData, "1") == 0 ) {
+ // read mesh data from MED file
+ myReader.SetMesh( mySMESHDSMesh );
+ myReader.SetMeshId( id );
+ myReader.Perform();
+ hasData = true;
+ }
+ }
+
+ // Try to get applied ALGORITHMS (mesh is not cleared by algo addition because
+ // nodes and elements are not yet put into sub-meshes)
+ if ( aTopGroup->ExistInternalObject( "Applied Algorithms" ) ) {
+ aGroup = new HDFgroup( "Applied Algorithms", aTopGroup );
+ aGroup->OpenOnDisk();
+ // get number of applied algorithms
+ int aNbSubObjects = aGroup->nInternalObjects();
+ if(MYDEBUG) MESSAGE( "VSR - number of applied algos " << aNbSubObjects );
+ for ( int j = 0; j < aNbSubObjects; j++ ) {
+ char name_dataset[ HDF_NAME_MAX_LEN+1 ];
+ aGroup->InternalObjectIndentify( j, name_dataset );
+ // check if it is an algorithm
+ if ( string( name_dataset ).substr( 0, 4 ) == string( "Algo" ) ) {
+ aDataset = new HDFdataset( name_dataset, aGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* refFromFile = new char[ size ];
+ aDataset->ReadFromDisk( refFromFile );
+ aDataset->CloseOnDisk();
+
+ // san - it is impossible to recover applied algorithms using their entries within Load() method
+
+ //SALOMEDS::SObject_var hypSO = myCurrentStudy->FindObjectID( refFromFile );
+ //CORBA::Object_var hypObject = SObjectToObject( hypSO );
+ int id = atoi( refFromFile );
+ string anIOR = myStudyContext->getIORbyOldId( id );
+ if ( !anIOR.empty() ) {
+ CORBA::Object_var hypObject = GetORB()->string_to_object( anIOR.c_str() );
+ if ( !CORBA::is_nil( hypObject ) ) {
+ SMESH::SMESH_Hypothesis_var anHyp = SMESH::SMESH_Hypothesis::_narrow( hypObject );
+ if ( !anHyp->_is_nil() && (!aShapeObject->_is_nil()
+ || !myNewMeshImpl->HasShapeToMesh()) )
+ myNewMeshImpl->addHypothesis( aShapeObject, anHyp );
+ }
+ }
+ }
+ }
+ aGroup->CloseOnDisk();
+ }
+
+ // try to get applied hypotheses
+ if ( aTopGroup->ExistInternalObject( "Applied Hypotheses" ) ) {
+ aGroup = new HDFgroup( "Applied Hypotheses", aTopGroup );
+ aGroup->OpenOnDisk();
+ // get number of applied hypotheses
+ int aNbSubObjects = aGroup->nInternalObjects();
+ for ( int j = 0; j < aNbSubObjects; j++ ) {
+ char name_dataset[ HDF_NAME_MAX_LEN+1 ];
+ aGroup->InternalObjectIndentify( j, name_dataset );
+ // check if it is a hypothesis
+ if ( string( name_dataset ).substr( 0, 3 ) == string( "Hyp" ) ) {
+ aDataset = new HDFdataset( name_dataset, aGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* refFromFile = new char[ size ];
+ aDataset->ReadFromDisk( refFromFile );
+ aDataset->CloseOnDisk();
+
+ // san - it is impossible to recover applied hypotheses using their entries within Load() method
+
+ //SALOMEDS::SObject_var hypSO = myCurrentStudy->FindObjectID( refFromFile );
+ //CORBA::Object_var hypObject = SObjectToObject( hypSO );
+ int id = atoi( refFromFile );
+ string anIOR = myStudyContext->getIORbyOldId( id );
+ if ( !anIOR.empty() ) {
+ CORBA::Object_var hypObject = GetORB()->string_to_object( anIOR.c_str() );
+ if ( !CORBA::is_nil( hypObject ) ) {
+ SMESH::SMESH_Hypothesis_var anHyp = SMESH::SMESH_Hypothesis::_narrow( hypObject );
+ if ( !anHyp->_is_nil() && (!aShapeObject->_is_nil()
+ || !myNewMeshImpl->HasShapeToMesh()) )
+ myNewMeshImpl->addHypothesis( aShapeObject, anHyp );
+ }
+ }
+ }
+ }
+ aGroup->CloseOnDisk();
+ }
+
+ // --> try to find submeshes containers for each type of submesh
+ for ( int j = GetSubMeshOnVertexTag(); j <= GetSubMeshOnCompoundTag(); j++ ) {
+ char name_meshgroup[ 30 ];
+ if ( j == GetSubMeshOnVertexTag() )
+ strcpy( name_meshgroup, "SubMeshes On Vertex" );
+ else if ( j == GetSubMeshOnEdgeTag() )
+ strcpy( name_meshgroup, "SubMeshes On Edge" );
+ else if ( j == GetSubMeshOnWireTag() )
+ strcpy( name_meshgroup, "SubMeshes On Wire" );
+ else if ( j == GetSubMeshOnFaceTag() )
+ strcpy( name_meshgroup, "SubMeshes On Face" );
+ else if ( j == GetSubMeshOnShellTag() )
+ strcpy( name_meshgroup, "SubMeshes On Shell" );
+ else if ( j == GetSubMeshOnSolidTag() )
+ strcpy( name_meshgroup, "SubMeshes On Solid" );
+ else if ( j == GetSubMeshOnCompoundTag() )
+ strcpy( name_meshgroup, "SubMeshes On Compound" );
+
+ // try to get submeshes container HDF group
+ if ( aTopGroup->ExistInternalObject( name_meshgroup ) ) {
+ // open submeshes containers HDF group
+ aGroup = new HDFgroup( name_meshgroup, aTopGroup );
+ aGroup->OpenOnDisk();
+
+ // get number of submeshes
+ int aNbSubMeshes = aGroup->nInternalObjects();
+ for ( int k = 0; k < aNbSubMeshes; k++ ) {
+ // identify submesh
+ char name_submeshgroup[ HDF_NAME_MAX_LEN+1 ];
+ aGroup->InternalObjectIndentify( k, name_submeshgroup );
+ if ( string( name_submeshgroup ).substr( 0, 7 ) == string( "SubMesh" ) ) {
+ // --> get submesh id
+ int subid = atoi( string( name_submeshgroup ).substr( 7 ).c_str() );
+ if ( subid <= 0 )
+ continue;
+ // open submesh HDF group
+ aSubGroup = new HDFgroup( name_submeshgroup, aGroup );
+ aSubGroup->OpenOnDisk();
+
+ // try to read and set reference to subshape
+ GEOM::GEOM_Object_var aSubShapeObject;
+ SMESH::SMESH_subMesh_var aSubMesh;
+
+ if ( aSubGroup->ExistInternalObject( "Ref on shape" ) ) {
+ // load submesh "Ref on shape" - it's an entry to SObject
+ aDataset = new HDFdataset( "Ref on shape", aSubGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* refFromFile = new char[ size ];
+ aDataset->ReadFromDisk( refFromFile );
+ aDataset->CloseOnDisk();
+ if ( strlen( refFromFile ) > 0 ) {
+ SALOMEDS::SObject_var subShapeSO = myCurrentStudy->FindObjectID( refFromFile );
+ CORBA::Object_var subShapeObject = SObjectToObject( subShapeSO );
+ if ( !CORBA::is_nil( subShapeObject ) ) {
+ aSubShapeObject = GEOM::GEOM_Object::_narrow( subShapeObject );
+ if ( !aSubShapeObject->_is_nil() )
+ aSubMesh = SMESH::SMESH_subMesh::_duplicate
+ ( myNewMeshImpl->createSubMesh( aSubShapeObject ) );
+ if ( aSubMesh->_is_nil() )
+ continue;
+ string iorSubString = GetORB()->object_to_string( aSubMesh );
+ int newSubId = myStudyContext->findId( iorSubString );
+ myStudyContext->mapOldToNew( subid, newSubId );
+ }
+ }
+ }
+
+ if ( aSubMesh->_is_nil() )
+ continue;
+
+ // try to get applied algorithms
+ if ( aSubGroup->ExistInternalObject( "Applied Algorithms" ) ) {
+ // open "applied algorithms" HDF group
+ aSubSubGroup = new HDFgroup( "Applied Algorithms", aSubGroup );
+ aSubSubGroup->OpenOnDisk();
+ // get number of applied algorithms
+ int aNbSubObjects = aSubSubGroup->nInternalObjects();
+ for ( int l = 0; l < aNbSubObjects; l++ ) {
+ char name_dataset[ HDF_NAME_MAX_LEN+1 ];
+ aSubSubGroup->InternalObjectIndentify( l, name_dataset );
+ // check if it is an algorithm
+ if ( string( name_dataset ).substr( 0, 4 ) == string( "Algo" ) ) {
+ aDataset = new HDFdataset( name_dataset, aSubSubGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* refFromFile = new char[ size ];
+ aDataset->ReadFromDisk( refFromFile );
+ aDataset->CloseOnDisk();
+
+ int id = atoi( refFromFile );
+ string anIOR = myStudyContext->getIORbyOldId( id );
+ if ( !anIOR.empty() ) {
+ CORBA::Object_var hypObject = GetORB()->string_to_object( anIOR.c_str() );
+ if ( !CORBA::is_nil( hypObject ) ) {
+ SMESH::SMESH_Hypothesis_var anHyp = SMESH::SMESH_Hypothesis::_narrow( hypObject );
+ if ( !anHyp->_is_nil() && !aShapeObject->_is_nil() )
+ myNewMeshImpl->addHypothesis( aSubShapeObject, anHyp );
+ }
+ }
+ }
+ }
+ // close "applied algorithms" HDF group
+ aSubSubGroup->CloseOnDisk();
+ }
+
+ // try to get applied hypotheses
+ if ( aSubGroup->ExistInternalObject( "Applied Hypotheses" ) ) {
+ // open "applied hypotheses" HDF group
+ aSubSubGroup = new HDFgroup( "Applied Hypotheses", aSubGroup );
+ aSubSubGroup->OpenOnDisk();
+ // get number of applied hypotheses
+ int aNbSubObjects = aSubSubGroup->nInternalObjects();
+ for ( int l = 0; l < aNbSubObjects; l++ ) {
+ char name_dataset[ HDF_NAME_MAX_LEN+1 ];
+ aSubSubGroup->InternalObjectIndentify( l, name_dataset );
+ // check if it is a hypothesis
+ if ( string( name_dataset ).substr( 0, 3 ) == string( "Hyp" ) ) {
+ aDataset = new HDFdataset( name_dataset, aSubSubGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* refFromFile = new char[ size ];
+ aDataset->ReadFromDisk( refFromFile );
+ aDataset->CloseOnDisk();
+
+ int id = atoi( refFromFile );
+ string anIOR = myStudyContext->getIORbyOldId( id );
+ if ( !anIOR.empty() ) {
+ CORBA::Object_var hypObject = GetORB()->string_to_object( anIOR.c_str() );
+ if ( !CORBA::is_nil( hypObject ) ) {
+ SMESH::SMESH_Hypothesis_var anHyp = SMESH::SMESH_Hypothesis::_narrow( hypObject );
+ if ( !anHyp->_is_nil() && !aShapeObject->_is_nil() )
+ myNewMeshImpl->addHypothesis( aSubShapeObject, anHyp );
+ }
+ }
+ }
+ }
+ // close "applied hypotheses" HDF group
+ aSubSubGroup->CloseOnDisk();
+ }
+
+ // close submesh HDF group
+ aSubGroup->CloseOnDisk();
+ }
+ }
+ // close submeshes containers HDF group
+ aGroup->CloseOnDisk();
+ }
+ }
+
+ if(hasData) {
+
+ // Read sub-meshes
+ // ----------------
+ if(MYDEBUG) MESSAGE("Create all sub-meshes");
+ bool submeshesInFamilies = ( ! aTopGroup->ExistInternalObject( "Submeshes" ));
+ if ( submeshesInFamilies ) // from MED
+ {
+ // old way working before fix of PAL 12992
+ myReader.CreateAllSubMeshes();
+ }
+ else
+ {
+ // open a group
+ aGroup = new HDFgroup( "Submeshes", aTopGroup );
+ aGroup->OpenOnDisk();
+
+ int maxID = Max( mySMESHDSMesh->MaxSubMeshIndex(), mySMESHDSMesh->MaxShapeIndex() );
+ vector< SMESHDS_SubMesh * > subMeshes( maxID + 1, (SMESHDS_SubMesh*) 0 );
+ vector< TopAbs_ShapeEnum > smType ( maxID + 1, TopAbs_SHAPE );
+
+ PositionCreator aPositionCreator;
+
+ SMDS_NodeIteratorPtr nIt = mySMESHDSMesh->nodesIterator();
+ SMDS_ElemIteratorPtr eIt = mySMESHDSMesh->elementsIterator();
+ for ( int isNode = 0; isNode < 2; ++isNode )
+ {
+ string aDSName( isNode ? "Node Submeshes" : "Element Submeshes");
+ if ( aGroup->ExistInternalObject( (char*) aDSName.c_str() ))
+ {
+ aDataset = new HDFdataset( (char*) aDSName.c_str(), aGroup );
+ aDataset->OpenOnDisk();
+ // read submesh IDs for all elements sorted by ID
+ int nbElems = aDataset->GetSize();
+ int* smIDs = new int [ nbElems ];
+ aDataset->ReadFromDisk( smIDs );
+ aDataset->CloseOnDisk();
+
+ // get elements sorted by ID
+ TIDSortedElemSet elemSet;
+ if ( isNode )
+ while ( nIt->more() ) elemSet.insert( nIt->next() );
+ else
+ while ( eIt->more() ) elemSet.insert( eIt->next() );
+ //ASSERT( elemSet.size() == nbElems ); -- issue 20182
+ // -- Most probably a bad study was saved when there were
+ // not fixed bugs in SMDS_MeshInfo
+ if ( elemSet.size() < nbElems ) {
+#ifdef _DEBUG_
+ cout << "SMESH_Gen_i::Load(), warning: Node position data is invalid" << endl;
+#endif
+ nbElems = elemSet.size();
+ }
+ // add elements to submeshes
+ TIDSortedElemSet::iterator iE = elemSet.begin();
+ for ( int i = 0; i < nbElems; ++i, ++iE )
+ {
+ int smID = smIDs[ i ];
+ if ( smID == 0 ) continue;
+ const SMDS_MeshElement* elem = *iE;
+ if( smID > maxID ) {
+ // corresponding subshape no longer exists: maybe geom group has been edited
+ if ( myNewMeshImpl->HasShapeToMesh() )
+ mySMESHDSMesh->RemoveElement( elem );
+ continue;
+ }
+ // get or create submesh
+ SMESHDS_SubMesh* & sm = subMeshes[ smID ];
+ if ( ! sm ) {
+ sm = mySMESHDSMesh->NewSubMesh( smID );
+ smType[ smID ] = mySMESHDSMesh->IndexToShape( smID ).ShapeType();
+ }
+ // add
+ if ( isNode ) {
+ SMDS_PositionPtr pos = aPositionCreator.MakePosition( smType[ smID ]);
+ SMDS_MeshNode* node = const_cast<SMDS_MeshNode*>( static_cast<const SMDS_MeshNode*>( elem ));
+ node->SetPosition( pos );
+ sm->AddNode( node );
+ } else {
+ sm->AddElement( elem );
+ }
+ }
+ delete [] smIDs;
+ }
+ }
+ } // end reading submeshes
+
+ // Read node positions on sub-shapes (SMDS_Position)
+
+ if ( aTopGroup->ExistInternalObject( "Node Positions" ))
+ {
+ // There are 5 datasets to read:
+ // "Nodes on Edges" - ID of node on edge
+ // "Edge positions" - U parameter on node on edge
+ // "Nodes on Faces" - ID of node on face
+ // "Face U positions" - U parameter of node on face
+ // "Face V positions" - V parameter of node on face
+ const char* aEid_DSName = "Nodes on Edges";
+ const char* aEu_DSName = "Edge positions";
+ const char* aFu_DSName = "Face U positions";
+ //char* aFid_DSName = "Nodes on Faces";
+ //char* aFv_DSName = "Face V positions";
+
+ // data to retrieve
+ int nbEids = 0, nbFids = 0;
+ int *aEids = 0, *aFids = 0;
+ double *aEpos = 0, *aFupos = 0, *aFvpos = 0;
+
+ // open a group
+ aGroup = new HDFgroup( "Node Positions", aTopGroup );
+ aGroup->OpenOnDisk();
+
+ // loop on 5 data sets
+ int aNbObjects = aGroup->nInternalObjects();
+ for ( int i = 0; i < aNbObjects; i++ )
+ {
+ // identify dataset
+ char aDSName[ HDF_NAME_MAX_LEN+1 ];
+ aGroup->InternalObjectIndentify( i, aDSName );
+ // read data
+ aDataset = new HDFdataset( aDSName, aGroup );
+ aDataset->OpenOnDisk();
+ if ( aDataset->GetType() == HDF_FLOAT64 ) // Positions
+ {
+ double* pos = new double [ aDataset->GetSize() ];
+ aDataset->ReadFromDisk( pos );
+ // which one?
+ if ( strncmp( aDSName, aEu_DSName, strlen( aEu_DSName )) == 0 )
+ aEpos = pos;
+ else if ( strncmp( aDSName, aFu_DSName, strlen( aFu_DSName )) == 0 )
+ aFupos = pos;
+ else
+ aFvpos = pos;
+ }
+ else // NODE IDS
+ {
+ int aSize = aDataset->GetSize();
+
+ // for reading files, created from 18.07.2005 till 10.10.2005
+ if (aDataset->GetType() == HDF_STRING)
+ aSize /= sizeof(int);
+
+ int* ids = new int [aSize];
+ aDataset->ReadFromDisk( ids );
+ // on face or nodes?
+ if ( strncmp( aDSName, aEid_DSName, strlen( aEid_DSName )) == 0 ) {
+ aEids = ids;
+ nbEids = aSize;
+ }
+ else {
+ aFids = ids;
+ nbFids = aSize;
+ }
+ }
+ aDataset->CloseOnDisk();
+ } // loop on 5 datasets
+
+ // Set node positions on edges or faces
+ for ( int onFace = 0; onFace < 2; onFace++ )
+ {
+ int nbNodes = ( onFace ? nbFids : nbEids );
+ if ( nbNodes == 0 ) continue;
+ int* aNodeIDs = ( onFace ? aFids : aEids );
+ double* aUPos = ( onFace ? aFupos : aEpos );
+ double* aVPos = ( onFace ? aFvpos : 0 );
+ // loop on node IDs
+ for ( int iNode = 0; iNode < nbNodes; iNode++ )
+ {
+ const SMDS_MeshNode* node = mySMESHDSMesh->FindNode( aNodeIDs[ iNode ]);
+ if ( !node ) continue; // maybe removed while Loading() if geometry changed
+ SMDS_PositionPtr aPos = node->GetPosition();
+ ASSERT( aPos );
+ if ( onFace ) {
+ // ASSERT( aPos->GetTypeOfPosition() == SMDS_TOP_FACE );-- issue 20182
+ // -- Most probably a bad study was saved when there were
+ // not fixed bugs in SMDS_MeshInfo
+ if ( aPos->GetTypeOfPosition() == SMDS_TOP_FACE ) {
+ SMDS_FacePosition* fPos = const_cast<SMDS_FacePosition*>
+ ( static_cast<const SMDS_FacePosition*>( aPos ));
+ fPos->SetUParameter( aUPos[ iNode ]);
+ fPos->SetVParameter( aVPos[ iNode ]);
+ }
+ }
+ else {
+ // ASSERT( aPos->GetTypeOfPosition() == SMDS_TOP_EDGE );-- issue 20182
+ if ( aPos->GetTypeOfPosition() == SMDS_TOP_EDGE ) {
+ SMDS_EdgePosition* fPos = const_cast<SMDS_EdgePosition*>
+ ( static_cast<const SMDS_EdgePosition*>( aPos ));
+ fPos->SetUParameter( aUPos[ iNode ]);
+ }
+ }
+ }
+ }
+ if ( aEids ) delete [] aEids;
+ if ( aFids ) delete [] aFids;
+ if ( aEpos ) delete [] aEpos;
+ if ( aFupos ) delete [] aFupos;
+ if ( aFvpos ) delete [] aFvpos;
+
+ aGroup->CloseOnDisk();
+
+ } // if ( aTopGroup->ExistInternalObject( "Node Positions" ) )
+ } // if ( hasData )
+
+ // try to get groups
+ for ( int ii = GetNodeGroupsTag(); ii <= Get0DElementsGroupsTag(); ii++ ) {
+ char name_group[ 30 ];
+ if ( ii == GetNodeGroupsTag() )
+ strcpy( name_group, "Groups of Nodes" );
+ else if ( ii == GetEdgeGroupsTag() )
+ strcpy( name_group, "Groups of Edges" );
+ else if ( ii == GetFaceGroupsTag() )
+ strcpy( name_group, "Groups of Faces" );
+ else if ( ii == GetVolumeGroupsTag() )
+ strcpy( name_group, "Groups of Volumes" );
+ else if ( ii == Get0DElementsGroupsTag() )
+ strcpy( name_group, "Groups of 0D Elements" );
+
+ if ( aTopGroup->ExistInternalObject( name_group ) ) {
+ aGroup = new HDFgroup( name_group, aTopGroup );
+ aGroup->OpenOnDisk();
+ // get number of groups
+ int aNbSubObjects = aGroup->nInternalObjects();
+ for ( int j = 0; j < aNbSubObjects; j++ ) {
+ char name_dataset[ HDF_NAME_MAX_LEN+1 ];
+ aGroup->InternalObjectIndentify( j, name_dataset );
+ // check if it is an group
+ if ( string( name_dataset ).substr( 0, 5 ) == string( "Group" ) ) {
+ // --> get group id
+ int subid = atoi( string( name_dataset ).substr( 5 ).c_str() );
+ if ( subid <= 0 )
+ continue;
+ aDataset = new HDFdataset( name_dataset, aGroup );
+ aDataset->OpenOnDisk();
+
+ // Retrieve actual group name
+ size = aDataset->GetSize();
+ char* nameFromFile = new char[ size ];
+ aDataset->ReadFromDisk( nameFromFile );
+ aDataset->CloseOnDisk();
+
+ // Try to find a shape reference
+ TopoDS_Shape aShape;
+ char aRefName[ 30 ];
+ sprintf( aRefName, "Ref on shape %d", subid);
+ if ( aGroup->ExistInternalObject( aRefName ) ) {
+ // load mesh "Ref on shape" - it's an entry to SObject
+ aDataset = new HDFdataset( aRefName, aGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* refFromFile = new char[ size ];
+ aDataset->ReadFromDisk( refFromFile );
+ aDataset->CloseOnDisk();
+ if ( strlen( refFromFile ) > 0 ) {
+ SALOMEDS::SObject_var shapeSO = myCurrentStudy->FindObjectID( refFromFile );
+ CORBA::Object_var shapeObject = SObjectToObject( shapeSO );
+ if ( !CORBA::is_nil( shapeObject ) ) {
+ aShapeObject = GEOM::GEOM_Object::_narrow( shapeObject );
+ if ( !aShapeObject->_is_nil() )
+ aShape = GeomObjectToShape( aShapeObject );
+ }
+ }
+ }
+ // Try to read a filter of SMESH_GroupOnFilter
+ SMESH::Filter_var filter;
+ SMESH_PredicatePtr predicate;
+ std::string hdfGrpName = "Filter " + SMESH_Comment(subid);
+ if ( aGroup->ExistInternalObject( hdfGrpName.c_str() ))
+ {
+ aDataset = new HDFdataset( hdfGrpName.c_str(), aGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* persistStr = new char[ size ];
+ aDataset->ReadFromDisk( persistStr );
+ aDataset->CloseOnDisk();
+ if ( strlen( persistStr ) > 0 ) {
+ filter = SMESH_GroupOnFilter_i::StringToFilter( persistStr );
+ predicate = SMESH_GroupOnFilter_i::GetPredicate( filter );
+ }
+ }
+
+ // Create group servant
+ SMESH::ElementType type = (SMESH::ElementType)(ii - GetNodeGroupsTag() + 1);
+ SMESH::SMESH_GroupBase_var aNewGroup = SMESH::SMESH_GroupBase::_duplicate
+ ( myNewMeshImpl->createGroup( type, nameFromFile, aShape, predicate ) );
+ // Obtain a SMESHDS_Group object
+ if ( aNewGroup->_is_nil() )
+ continue;
+
+ string iorSubString = GetORB()->object_to_string( aNewGroup );
+ int newSubId = myStudyContext->findId( iorSubString );
+ myStudyContext->mapOldToNew( subid, newSubId );
+
+ SMESH_GroupBase_i* aGroupImpl = SMESH::DownCast< SMESH_GroupBase_i*>( aNewGroup );
+ if ( !aGroupImpl )
+ continue;
+
+ if ( SMESH_GroupOnFilter_i* aFilterGroup =
+ dynamic_cast< SMESH_GroupOnFilter_i*>( aGroupImpl ))
+ aFilterGroup->SetFilter( filter );
+
+ SMESHDS_GroupBase* aGroupBaseDS = aGroupImpl->GetGroupDS();
+ if ( !aGroupBaseDS )
+ continue;
+
+ aGroupBaseDS->SetStoreName( name_dataset );
+
+ // ouv : NPAL12872
+ // Read color of the group
+ char aGroupColorName[ 30 ];
+ sprintf( aGroupColorName, "ColorGroup %d", subid);
+ if ( aGroup->ExistInternalObject( aGroupColorName ) )
+ {
+ aDataset = new HDFdataset( aGroupColorName, aGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ double* anRGB = new double[ size ];
+ aDataset->ReadFromDisk( anRGB );
+ aDataset->CloseOnDisk();
+ Quantity_Color aColor( anRGB[0], anRGB[1], anRGB[2], Quantity_TOC_RGB );
+ aGroupBaseDS->SetColor( aColor );
+ }
+
+ // Fill group with contents from MED file
+ SMESHDS_Group* aGrp = dynamic_cast<SMESHDS_Group*>( aGroupBaseDS );
+ if ( aGrp )
+ myReader.GetGroup( aGrp );
+ }
+ }
+ aGroup->CloseOnDisk();
+ }
+ }
+
+ // read submeh order if any
+ if( aTopGroup->ExistInternalObject( "Mesh Order" ) ) {
+ aDataset = new HDFdataset( "Mesh Order", aTopGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ int* smIDs = new int[ size ];
+ aDataset->ReadFromDisk( smIDs );
+ aDataset->CloseOnDisk();
+ TListOfListOfInt anOrderIds;
+ anOrderIds.push_back( TListOfInt() );
+ for ( int i = 0; i < size; i++ )
+ if ( smIDs[ i ] < 0 ) // is separator
+ anOrderIds.push_back( TListOfInt() );
+ else
+ anOrderIds.back().push_back(smIDs[ i ]);
+
+ myNewMeshImpl->GetImpl().SetMeshOrder( anOrderIds );
+ }
+ } // loop on meshes
+
+ // notify algos on completed restoration
+ for ( meshi_group = meshGroupList.begin(); meshi_group != meshGroupList.end(); ++meshi_group )
+ {
+ SMESH_Mesh_i* myNewMeshImpl = meshi_group->first;
+ ::SMESH_Mesh& myLocMesh = myNewMeshImpl->GetImpl();
+
+ TopoDS_Shape myLocShape;
+ if(myLocMesh.HasShapeToMesh())
+ myLocShape = myLocMesh.GetShapeToMesh();
+ else
+ myLocShape = SMESH_Mesh::PseudoShape();
+
+ myLocMesh.GetSubMesh(myLocShape)->
+ ComputeStateEngine (SMESH_subMesh::SUBMESH_RESTORED);
+ }
+
+ for ( hyp_data = hypDataList.begin(); hyp_data != hypDataList.end(); ++hyp_data )
+ {
+ SMESH_Hypothesis_i* hyp = hyp_data->first;
+ hyp->UpdateAsMeshesRestored(); // for hyps needing full mesh data restored (issue 20918)
+ }
+
+ // close mesh group
+ if(aTopGroup)
+ aTopGroup->CloseOnDisk();
+ }
+ // close HDF file
+ aFile->CloseOnDisk();
+ delete aFile;
+
+ // Remove temporary files created from the stream
+ if ( !isMultiFile )
+ SALOMEDS_Tool::RemoveTemporaryFiles( tmpDir.ToCString(), aFileSeq.in(), true );
+
+ INFOS( "SMESH_Gen_i::Load completed" );
+ return true;
+}
+
+//=============================================================================
+/*!
+ * SMESH_Gen_i::LoadASCII
+ *
+ * Load SMESH module's data in ASCII format
+ */
+//=============================================================================
+
+bool SMESH_Gen_i::LoadASCII( SALOMEDS::SComponent_ptr theComponent,
+ const SALOMEDS::TMPFile& theStream,
+ const char* theURL,
+ bool isMultiFile ) {
+ if(MYDEBUG) MESSAGE( "SMESH_Gen_i::LoadASCII" );
+ return Load( theComponent, theStream, theURL, isMultiFile );
+
+ //before call main ::Load method it's need for decipher text format to
+ //binary ( "|xx" => x' )
+ int size = theStream.length();
+ if ( int((size / 3 )*3) != size ) //error size of buffer
+ return false;
+
+ int real_size = int(size / 3);
+
+ _CORBA_Octet* buffer = new _CORBA_Octet[real_size];
+ char tmp[3];
+ tmp[2]='\0';
+ int c = -1;
+ for ( int i = 0; i < real_size; i++ )
+ {
+ memcpy( &(tmp[0]), &(theStream[i*3+1]), 2 );
+ sscanf( tmp, "%x", &c );
+ sprintf( (char*)&(buffer[i]), "%c", (char)c );
+ }
+
+ SALOMEDS::TMPFile_var aRealStreamFile = new SALOMEDS::TMPFile(real_size, real_size, buffer, 1);
+
+ return Load( theComponent, *(aRealStreamFile._retn()), theURL, isMultiFile );