+ INFOS( "SMESH_Gen_i::Load" );
+
+ if ( myCurrentStudy->_is_nil() ||
+ theComponent->GetStudy()->StudyId() != myCurrentStudy->StudyId() )
+ SetCurrentStudy( theComponent->GetStudy() );
+
+ StudyContext* myStudyContext = GetCurrentStudyContext();
+
+ // Get temporary files location
+ TCollection_AsciiString tmpDir =
+ isMultiFile ? TCollection_AsciiString( ( char* )theURL ) : ( char* )SALOMEDS_Tool::GetTmpDir().c_str();
+
+ // Convert the stream into sequence of files to process
+ SALOMEDS::ListOfFileNames_var aFileSeq = SALOMEDS_Tool::PutStreamToFiles( theStream,
+ tmpDir.ToCString(),
+ isMultiFile );
+ TCollection_AsciiString aStudyName( "" );
+ if ( isMultiFile )
+ aStudyName = ( (char*)SALOMEDS_Tool::GetNameFromPath( myCurrentStudy->URL() ).c_str() );
+
+ // Set names of temporary files
+ TCollection_AsciiString filename = tmpDir + aStudyName + TCollection_AsciiString( "_SMESH.hdf" );
+ TCollection_AsciiString meshfile = tmpDir + aStudyName + TCollection_AsciiString( "_SMESH_Mesh.med" );
+
+ int size;
+ HDFfile* aFile;
+ HDFdataset* aDataset;
+ HDFgroup* aTopGroup;
+ HDFgroup* aGroup;
+ HDFgroup* aSubGroup;
+ HDFgroup* aSubSubGroup;
+
+ // Read data
+ // ---> open HDF file
+ aFile = new HDFfile( filename.ToCString() );
+ try {
+ aFile->OpenOnDisk( HDF_RDONLY );
+ }
+ catch ( HDFexception ) {
+ INFOS( "Load(): " << filename << " not found!" );
+ return false;
+ }
+
+ DriverMED_R_SMESHDS_Mesh myReader;
+ myReader.SetFile( meshfile.ToCString() );
+
+ // get total number of top-level groups
+ int aNbGroups = aFile->nInternalObjects();
+ if ( aNbGroups > 0 ) {
+ // --> in first turn we should read&create hypotheses
+ if ( aFile->ExistInternalObject( "Hypotheses" ) ) {
+ // open hypotheses root HDF group
+ aTopGroup = new HDFgroup( "Hypotheses", aFile );
+ aTopGroup->OpenOnDisk();
+
+ // get number of hypotheses
+ int aNbObjects = aTopGroup->nInternalObjects();
+ for ( int j = 0; j < aNbObjects; j++ ) {
+ // try to identify hypothesis
+ char hypGrpName[ HDF_NAME_MAX_LEN+1 ];
+ aTopGroup->InternalObjectIndentify( j, hypGrpName );
+
+ if ( string( hypGrpName ).substr( 0, 10 ) == string( "Hypothesis" ) ) {
+ // open hypothesis group
+ aGroup = new HDFgroup( hypGrpName, aTopGroup );
+ aGroup->OpenOnDisk();
+
+ // --> get hypothesis id
+ int id = atoi( string( hypGrpName ).substr( 10 ).c_str() );
+ string hypname;
+ string libname;
+ string hypdata;
+
+ // get number of datasets
+ int aNbSubObjects = aGroup->nInternalObjects();
+ for ( int k = 0; k < aNbSubObjects; k++ ) {
+ // identify dataset
+ char name_of_subgroup[ HDF_NAME_MAX_LEN+1 ];
+ aGroup->InternalObjectIndentify( k, name_of_subgroup );
+ // --> get hypothesis name
+ if ( strcmp( name_of_subgroup, "Name" ) == 0 ) {
+ aDataset = new HDFdataset( name_of_subgroup, aGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* hypname_str = new char[ size ];
+ aDataset->ReadFromDisk( hypname_str );
+ hypname = string( hypname_str );
+ delete [] hypname_str;
+ aDataset->CloseOnDisk();
+ }
+ // --> get hypothesis plugin library name
+ if ( strcmp( name_of_subgroup, "LibName" ) == 0 ) {
+ aDataset = new HDFdataset( name_of_subgroup, aGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* libname_str = new char[ size ];
+ aDataset->ReadFromDisk( libname_str );
+ if(MYDEBUG) SCRUTE( libname_str );
+ libname = string( libname_str );
+ delete [] libname_str;
+ aDataset->CloseOnDisk();
+ }
+ // --> get hypothesis data
+ if ( strcmp( name_of_subgroup, "Data" ) == 0 ) {
+ aDataset = new HDFdataset( name_of_subgroup, aGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* hypdata_str = new char[ size ];
+ aDataset->ReadFromDisk( hypdata_str );
+ hypdata = string( hypdata_str );
+ delete [] hypdata_str;
+ aDataset->CloseOnDisk();
+ }
+ }
+ // close hypothesis HDF group
+ aGroup->CloseOnDisk();
+
+ // --> restore hypothesis from data
+ if ( id > 0 && !hypname.empty()/* && !hypdata.empty()*/ ) { // VSR : persistent data can be empty
+ if(MYDEBUG) MESSAGE("VSR - load hypothesis : id = " << id <<
+ ", name = " << hypname.c_str() << ", persistent string = " << hypdata.c_str());
+ SMESH::SMESH_Hypothesis_var myHyp;
+
+ try { // protect persistence mechanism against exceptions
+ myHyp = this->createHypothesis( hypname.c_str(), libname.c_str() );
+ }
+ catch (...) {
+ INFOS( "Exception during hypothesis creation" );
+ }
+
+ SMESH_Hypothesis_i* myImpl = dynamic_cast<SMESH_Hypothesis_i*>( GetServant( myHyp ).in() );
+ if ( myImpl ) {
+ myImpl->LoadFrom( hypdata.c_str() );
+ string iorString = GetORB()->object_to_string( myHyp );
+ int newId = myStudyContext->findId( iorString );
+ myStudyContext->mapOldToNew( id, newId );
+ }
+ else
+ if(MYDEBUG) MESSAGE( "VSR - SMESH_Gen::Load - can't get servant" );
+ }
+ }
+ }
+ // close hypotheses root HDF group
+ aTopGroup->CloseOnDisk();
+ }
+
+ // --> then we should read&create algorithms
+ if ( aFile->ExistInternalObject( "Algorithms" ) ) {
+ // open algorithms root HDF group
+ aTopGroup = new HDFgroup( "Algorithms", aFile );
+ aTopGroup->OpenOnDisk();
+
+ // get number of algorithms
+ int aNbObjects = aTopGroup->nInternalObjects();
+ for ( int j = 0; j < aNbObjects; j++ ) {
+ // try to identify algorithm
+ char hypGrpName[ HDF_NAME_MAX_LEN+1 ];
+ aTopGroup->InternalObjectIndentify( j, hypGrpName );
+
+ if ( string( hypGrpName ).substr( 0, 9 ) == string( "Algorithm" ) ) {
+ // open algorithm group
+ aGroup = new HDFgroup( hypGrpName, aTopGroup );
+ aGroup->OpenOnDisk();
+
+ // --> get algorithm id
+ int id = atoi( string( hypGrpName ).substr( 9 ).c_str() );
+ string hypname;
+ string libname;
+ string hypdata;
+
+ // get number of datasets
+ int aNbSubObjects = aGroup->nInternalObjects();
+ for ( int k = 0; k < aNbSubObjects; k++ ) {
+ // identify dataset
+ char name_of_subgroup[ HDF_NAME_MAX_LEN+1 ];
+ aGroup->InternalObjectIndentify( k, name_of_subgroup );
+ // --> get algorithm name
+ if ( strcmp( name_of_subgroup, "Name" ) == 0 ) {
+ aDataset = new HDFdataset( name_of_subgroup, aGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* hypname_str = new char[ size ];
+ aDataset->ReadFromDisk( hypname_str );
+ hypname = string( hypname_str );
+ delete [] hypname_str;
+ aDataset->CloseOnDisk();
+ }
+ // --> get algorithm plugin library name
+ if ( strcmp( name_of_subgroup, "LibName" ) == 0 ) {
+ aDataset = new HDFdataset( name_of_subgroup, aGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* libname_str = new char[ size ];
+ aDataset->ReadFromDisk( libname_str );
+ if(MYDEBUG) SCRUTE( libname_str );
+ libname = string( libname_str );
+ delete [] libname_str;
+ aDataset->CloseOnDisk();
+ }
+ // --> get algorithm data
+ if ( strcmp( name_of_subgroup, "Data" ) == 0 ) {
+ aDataset = new HDFdataset( name_of_subgroup, aGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* hypdata_str = new char[ size ];
+ aDataset->ReadFromDisk( hypdata_str );
+ if(MYDEBUG) SCRUTE( hypdata_str );
+ hypdata = string( hypdata_str );
+ delete [] hypdata_str;
+ aDataset->CloseOnDisk();
+ }
+ }
+ // close algorithm HDF group
+ aGroup->CloseOnDisk();
+
+ // --> restore algorithm from data
+ if ( id > 0 && !hypname.empty()/* && !hypdata.empty()*/ ) { // VSR : persistent data can be empty
+ if(MYDEBUG) MESSAGE("VSR - load algo : id = " << id <<
+ ", name = " << hypname.c_str() << ", persistent string = " << hypdata.c_str());
+ SMESH::SMESH_Hypothesis_var myHyp;
+
+ try { // protect persistence mechanism against exceptions
+ myHyp = this->createHypothesis( hypname.c_str(), libname.c_str() );
+ }
+ catch (...) {
+ INFOS( "Exception during hypothesis creation" );
+ }
+
+ SMESH_Hypothesis_i* myImpl = dynamic_cast<SMESH_Hypothesis_i*>( GetServant( myHyp ).in() );
+ if ( myImpl ) {
+ myImpl->LoadFrom( hypdata.c_str() );
+ string iorString = GetORB()->object_to_string( myHyp );
+ int newId = myStudyContext->findId( iorString );
+ myStudyContext->mapOldToNew( id, newId );
+ }
+ else
+ if(MYDEBUG) MESSAGE( "VSR - SMESH_Gen::Load - can't get servant" );
+ }
+ }
+ }
+ // close algorithms root HDF group
+ aTopGroup->CloseOnDisk();
+ }
+
+ // --> the rest groups should be meshes
+ for ( int i = 0; i < aNbGroups; i++ ) {
+ // identify next group
+ char meshName[ HDF_NAME_MAX_LEN+1 ];
+ aFile->InternalObjectIndentify( i, meshName );
+
+ if ( string( meshName ).substr( 0, 4 ) == string( "Mesh" ) ) {
+ // --> get mesh id
+ int id = atoi( string( meshName ).substr( 4 ).c_str() );
+ if ( id <= 0 )
+ continue;
+
+ bool hasData = false;
+
+ // open mesh HDF group
+ aTopGroup = new HDFgroup( meshName, aFile );
+ aTopGroup->OpenOnDisk();
+
+ // get number of child HDF objects
+ int aNbObjects = aTopGroup->nInternalObjects();
+ if ( aNbObjects > 0 ) {
+ // create mesh
+ if(MYDEBUG) MESSAGE( "VSR - load mesh : id = " << id );
+ SMESH::SMESH_Mesh_var myNewMesh = this->createMesh();
+ SMESH_Mesh_i* myNewMeshImpl = dynamic_cast<SMESH_Mesh_i*>( GetServant( myNewMesh ).in() );
+ if ( !myNewMeshImpl )
+ continue;
+ string iorString = GetORB()->object_to_string( myNewMesh );
+ int newId = myStudyContext->findId( iorString );
+ myStudyContext->mapOldToNew( id, newId );
+
+ ::SMESH_Mesh& myLocMesh = myNewMeshImpl->GetImpl();
+ SMESHDS_Mesh* mySMESHDSMesh = myLocMesh.GetMeshDS();
+
+ // try to find mesh data dataset
+ if ( aTopGroup->ExistInternalObject( "Has data" ) ) {
+ // load mesh "has data" flag
+ aDataset = new HDFdataset( "Has data", aTopGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* strHasData = new char[ size ];
+ aDataset->ReadFromDisk( strHasData );
+ aDataset->CloseOnDisk();
+ if ( strcmp( strHasData, "1") == 0 ) {
+ // read mesh data from MED file
+ myReader.SetMesh( mySMESHDSMesh );
+ myReader.SetMeshId( id );
+ myReader.Perform();
+ hasData = true;
+ }
+ }
+
+ // try to read and set reference to shape
+ GEOM::GEOM_Object_var aShapeObject;
+ if ( aTopGroup->ExistInternalObject( "Ref on shape" ) ) {
+ // load mesh "Ref on shape" - it's an entry to SObject
+ aDataset = new HDFdataset( "Ref on shape", aTopGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* refFromFile = new char[ size ];
+ aDataset->ReadFromDisk( refFromFile );
+ aDataset->CloseOnDisk();
+ if ( strlen( refFromFile ) > 0 ) {
+ SALOMEDS::SObject_var shapeSO = myCurrentStudy->FindObjectID( refFromFile );
+
+ // Make sure GEOM data are loaded first
+ loadGeomData( shapeSO->GetFatherComponent() );
+
+ CORBA::Object_var shapeObject = SObjectToObject( shapeSO );
+ if ( !CORBA::is_nil( shapeObject ) ) {
+ aShapeObject = GEOM::GEOM_Object::_narrow( shapeObject );
+ if ( !aShapeObject->_is_nil() )
+ myNewMeshImpl->SetShape( aShapeObject );
+ }
+ }
+ }
+
+ // try to get applied algorithms
+ if ( aTopGroup->ExistInternalObject( "Applied Algorithms" ) ) {
+ aGroup = new HDFgroup( "Applied Algorithms", aTopGroup );
+ aGroup->OpenOnDisk();
+ // get number of applied algorithms
+ int aNbSubObjects = aGroup->nInternalObjects();
+ if(MYDEBUG) MESSAGE( "VSR - number of applied algos " << aNbSubObjects );
+ for ( int j = 0; j < aNbSubObjects; j++ ) {
+ char name_dataset[ HDF_NAME_MAX_LEN+1 ];
+ aGroup->InternalObjectIndentify( j, name_dataset );
+ // check if it is an algorithm
+ if ( string( name_dataset ).substr( 0, 4 ) == string( "Algo" ) ) {
+ aDataset = new HDFdataset( name_dataset, aGroup );
+ aDataset->OpenOnDisk();
+ size = aDataset->GetSize();
+ char* refFromFile = new char[ size ];
+ aDataset->ReadFromDisk( refFromFile );
+ aDataset->CloseOnDisk();
+
+ // san - it is impossible to recover applied algorithms using their entries within Load() method
+
+ //SALOMEDS::SObject_var hypSO = myCurrentStudy->FindObjectID( refFromFile );
+ //CORBA::Object_var hypObject = SObjectToObject( hypSO );
+ int id = atoi( refFromFile );
+ string anIOR = myStudyContext->getIORbyOldId( id );
+ if ( !anIOR.empty() ) {
+ CORBA::Object_var hypObject = GetORB()->string_to_object( anIOR.c_str() );
+ if ( !CORBA::is_nil( hypObject ) ) {
+ SMESH::SMESH_Hypothesis_var anHyp = SMESH::SMESH_Hypothesis::_narrow( hypObject );
+ if ( !anHyp->_is_nil() && !aShapeObject->_is_nil() )
+ myNewMeshImpl->addHypothesis( aShapeObject, anHyp );
+ }