+SALOMEDS::TMPFile* SMESH_Gen_i::Save( SALOMEDS::SComponent_ptr theComponent,
+ const char* theURL,
+ bool isMultiFile )
+{
+ INFOS( "SMESH_Gen_i::Save" );
+
+ // ASSERT( theComponent->GetStudy()->StudyId() == myCurrentStudy->StudyId() )
+ // san -- in case <myCurrentStudy> differs from theComponent's study,
+ // use that of the component
+ if ( myCurrentStudy->_is_nil() ||
+ theComponent->GetStudy()->StudyId() != myCurrentStudy->StudyId() )
+ SetCurrentStudy( theComponent->GetStudy() );
+
+ // Store study contents as a set of python commands
+ SavePython(myCurrentStudy);
+
+ StudyContext* myStudyContext = GetCurrentStudyContext();
+
+ // Declare a byte stream
+ SALOMEDS::TMPFile_var aStreamFile;
+
+ // Obtain a temporary dir
+ TCollection_AsciiString tmpDir =
+ ( isMultiFile ) ? TCollection_AsciiString( ( char* )theURL ) : ( char* )SALOMEDS_Tool::GetTmpDir().c_str();
+
+ // Create a sequence of files processed
+ SALOMEDS::ListOfFileNames_var aFileSeq = new SALOMEDS::ListOfFileNames;
+ aFileSeq->length( NUM_TMP_FILES );
+
+ TCollection_AsciiString aStudyName( "" );
+ if ( isMultiFile )
+ aStudyName = ( (char*)SALOMEDS_Tool::GetNameFromPath( myCurrentStudy->URL() ).c_str() );
+
+ // Set names of temporary files
+ TCollection_AsciiString filename =
+ aStudyName + TCollection_AsciiString( "_SMESH.hdf" ); // for SMESH data itself
+ TCollection_AsciiString meshfile =
+ aStudyName + TCollection_AsciiString( "_SMESH_Mesh.med" ); // for mesh data to be stored in MED file
+ aFileSeq[ 0 ] = CORBA::string_dup( filename.ToCString() );
+ aFileSeq[ 1 ] = CORBA::string_dup( meshfile.ToCString() );
+ filename = tmpDir + filename;
+ meshfile = tmpDir + meshfile;
+
+ HDFfile* aFile;
+ HDFdataset* aDataset;
+ HDFgroup* aTopGroup;
+ HDFgroup* aGroup;
+ HDFgroup* aSubGroup;
+ HDFgroup* aSubSubGroup;
+ hdf_size aSize[ 1 ];
+
+
+ //Remove the files if they exist: BugID: 11225
+#ifndef WNT /* unix functionality */
+ TCollection_AsciiString cmd("rm -f \"");
+#else /* windows */
+ TCollection_AsciiString cmd("del /F \"");
+#endif
+
+ cmd+=filename;
+ cmd+="\" \"";
+ cmd+=meshfile;
+ cmd+="\"";
+ system(cmd.ToCString());
+
+ // MED writer to be used by storage process
+ DriverMED_W_SMESHDS_Mesh myWriter;
+ myWriter.SetFile( meshfile.ToCString() );
+
+ // IMP issue 20918
+ // SetStoreName() to groups before storing hypotheses to let them refer to
+ // groups using "store name", which is "Group <group_persistent_id>"
+ {
+ SALOMEDS::ChildIterator_wrap itBig = myCurrentStudy->NewChildIterator( theComponent );
+ for ( ; itBig->More(); itBig->Next() ) {
+ SALOMEDS::SObject_wrap gotBranch = itBig->Value();
+ if ( gotBranch->Tag() > GetAlgorithmsRootTag() ) {
+ CORBA::Object_var anObject = SObjectToObject( gotBranch );
+ if ( !CORBA::is_nil( anObject ) ) {
+ SMESH::SMESH_Mesh_var myMesh = SMESH::SMESH_Mesh::_narrow( anObject ) ;
+ if ( !myMesh->_is_nil() ) {
+ myMesh->Load(); // load from study file if not yet done
+ TPythonDump pd; // not to dump GetGroups()
+ SMESH::ListOfGroups_var groups = myMesh->GetGroups();
+ pd << ""; // to avoid optimizing pd out
+ for ( int i = 0; i < groups->length(); ++i )
+ {
+ SMESH_GroupBase_i* grImpl = SMESH::DownCast<SMESH_GroupBase_i*>( groups[i]);
+ if ( grImpl )
+ {
+ CORBA::String_var objStr = GetORB()->object_to_string( grImpl->_this() );
+ int anId = myStudyContext->findId( string( objStr.in() ) );
+ char grpName[ 30 ];
+ sprintf( grpName, "Group %d", anId );
+ SMESHDS_GroupBase* aGrpBaseDS = grImpl->GetGroupDS();
+ aGrpBaseDS->SetStoreName( grpName );
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ // Write data
+ // ---> create HDF file
+ aFile = new HDFfile( (char*) filename.ToCString() );
+ aFile->CreateOnDisk();
+
+ // --> iterator for top-level objects
+ SALOMEDS::ChildIterator_wrap itBig = myCurrentStudy->NewChildIterator( theComponent );
+ for ( ; itBig->More(); itBig->Next() ) {
+ SALOMEDS::SObject_wrap gotBranch = itBig->Value();
+
+ // --> hypotheses root branch (only one for the study)
+ if ( gotBranch->Tag() == GetHypothesisRootTag() ) {
+ // create hypotheses root HDF group
+ aTopGroup = new HDFgroup( "Hypotheses", aFile );
+ aTopGroup->CreateOnDisk();
+
+ // iterator for all hypotheses
+ SALOMEDS::ChildIterator_wrap it = myCurrentStudy->NewChildIterator( gotBranch );
+ for ( ; it->More(); it->Next() ) {
+ SALOMEDS::SObject_wrap mySObject = it->Value();
+ CORBA::Object_var anObject = SObjectToObject( mySObject );
+ if ( !CORBA::is_nil( anObject ) ) {
+ SMESH::SMESH_Hypothesis_var myHyp = SMESH::SMESH_Hypothesis::_narrow( anObject );
+ if ( !myHyp->_is_nil() ) {
+ SMESH_Hypothesis_i* myImpl = dynamic_cast<SMESH_Hypothesis_i*>( GetServant( myHyp ).in() );
+ if ( myImpl ) {
+ string hypname = string( myHyp->GetName() );
+ string libname = string( myHyp->GetLibName() );
+ // BUG SWP13062
+ // Needs for save crossplatform libname, i.e. parth of name ( ".dll" for
+ // WNT and ".so" for X-system) must be deleted
+ int libname_len = libname.length();
+#ifdef WNT
+ if( libname_len > 4 )
+ libname.resize( libname_len - 4 );
+#else
+ // PAL17753 (Regresion: missing hypothesis in restored study)
+ // "lib" also should be removed from the beginning
+ //if( libname_len > 3 )
+ //libname.resize( libname_len - 3 );
+ if( libname_len > 6 )
+ libname = libname.substr( 3, libname_len - 3 - 3 );
+#endif
+ CORBA::String_var objStr = GetORB()->object_to_string( anObject );
+ int id = myStudyContext->findId( string( objStr.in() ) );
+ string hypdata = string( myImpl->SaveTo() );
+
+ // for each hypothesis create HDF group basing on its id
+ char hypGrpName[30];
+ sprintf( hypGrpName, "Hypothesis %d", id );
+ aGroup = new HDFgroup( hypGrpName, aTopGroup );
+ aGroup->CreateOnDisk();
+ // --> type name of hypothesis
+ aSize[ 0 ] = hypname.length() + 1;
+ aDataset = new HDFdataset( "Name", aGroup, HDF_STRING, aSize, 1 );
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( ( char* )( hypname.c_str() ) );
+ aDataset->CloseOnDisk();
+ // --> server plugin library name of hypothesis
+ aSize[ 0 ] = libname.length() + 1;
+ aDataset = new HDFdataset( "LibName", aGroup, HDF_STRING, aSize, 1 );
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( ( char* )( libname.c_str() ) );
+ aDataset->CloseOnDisk();
+ // --> persistent data of hypothesis
+ aSize[ 0 ] = hypdata.length() + 1;
+ aDataset = new HDFdataset( "Data", aGroup, HDF_STRING, aSize, 1 );
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( ( char* )( hypdata.c_str() ) );
+ aDataset->CloseOnDisk();
+ // close hypothesis HDF group
+ aGroup->CloseOnDisk();
+ }
+ }
+ }
+ }
+ // close hypotheses root HDF group
+ aTopGroup->CloseOnDisk();
+ }
+ // --> algorithms root branch (only one for the study)
+ else if ( gotBranch->Tag() == GetAlgorithmsRootTag() ) {
+ // create algorithms root HDF group
+ aTopGroup = new HDFgroup( "Algorithms", aFile );
+ aTopGroup->CreateOnDisk();
+
+ // iterator for all algorithms
+ SALOMEDS::ChildIterator_wrap it = myCurrentStudy->NewChildIterator( gotBranch );
+ for ( ; it->More(); it->Next() ) {
+ SALOMEDS::SObject_wrap mySObject = it->Value();
+ CORBA::Object_var anObject = SObjectToObject( mySObject );
+ if ( !CORBA::is_nil( anObject ) ) {
+ SMESH::SMESH_Hypothesis_var myHyp = SMESH::SMESH_Hypothesis::_narrow( anObject );
+ if ( !myHyp->_is_nil() ) {
+ SMESH_Hypothesis_i* myImpl = dynamic_cast<SMESH_Hypothesis_i*>( GetServant( myHyp ).in() );
+ if ( myImpl ) {
+ string hypname = string( myHyp->GetName() );
+ string libname = string( myHyp->GetLibName() );
+ // BUG SWP13062
+ // Needs for save crossplatform libname, i.e. parth of name ( ".dll" for
+ // WNT and ".so" for X-system) must be deleted
+ int libname_len = libname.length();
+#ifdef WNT
+ if( libname_len > 4 )
+ libname.resize( libname_len - 4 );
+#else
+ // PAL17753 (Regresion: missing hypothesis in restored study)
+ // "lib" also should be removed from the beginning
+ //if( libname_len > 3 )
+ //libname.resize( libname_len - 3 );
+ if( libname_len > 6 )
+ libname = libname.substr( 3, libname_len - 3 - 3 );
+#endif
+ CORBA::String_var objStr = GetORB()->object_to_string( anObject );
+ int id = myStudyContext->findId( string( objStr.in() ) );
+ string hypdata = string( myImpl->SaveTo() );
+
+ // for each algorithm create HDF group basing on its id
+ char hypGrpName[30];
+ sprintf( hypGrpName, "Algorithm %d", id );
+ aGroup = new HDFgroup( hypGrpName, aTopGroup );
+ aGroup->CreateOnDisk();
+ // --> type name of algorithm
+ aSize[0] = hypname.length() + 1;
+ aDataset = new HDFdataset( "Name", aGroup, HDF_STRING, aSize, 1 );
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( ( char* )( hypname.c_str() ) );
+ aDataset->CloseOnDisk();
+ // --> server plugin library name of hypothesis
+ aSize[0] = libname.length() + 1;
+ aDataset = new HDFdataset( "LibName", aGroup, HDF_STRING, aSize, 1 );
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( ( char* )( libname.c_str() ) );
+ aDataset->CloseOnDisk();
+ // --> persistent data of algorithm
+ aSize[0] = hypdata.length() + 1;
+ aDataset = new HDFdataset( "Data", aGroup, HDF_STRING, aSize, 1 );
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( ( char* )( hypdata.c_str() ) );
+ aDataset->CloseOnDisk();
+ // close algorithm HDF group
+ aGroup->CloseOnDisk();
+ }
+ }
+ }
+ }
+ // close algorithms root HDF group
+ aTopGroup->CloseOnDisk();
+ }
+ // --> mesh objects roots branches
+ else if ( gotBranch->Tag() > GetAlgorithmsRootTag() ) {
+ CORBA::Object_var anObject = SObjectToObject( gotBranch );
+ if ( !CORBA::is_nil( anObject ) ) {
+ SMESH::SMESH_Mesh_var myMesh = SMESH::SMESH_Mesh::_narrow( anObject ) ;
+ if ( !myMesh->_is_nil() ) {
+ SMESH_Mesh_i* myImpl = dynamic_cast<SMESH_Mesh_i*>( GetServant( myMesh ).in() );
+ if ( myImpl ) {
+ CORBA::String_var objStr = GetORB()->object_to_string( anObject );
+ int id = myStudyContext->findId( string( objStr.in() ) );
+ ::SMESH_Mesh& myLocMesh = myImpl->GetImpl();
+ SMESHDS_Mesh* mySMESHDSMesh = myLocMesh.GetMeshDS();
+ bool hasShape = myLocMesh.HasShapeToMesh();
+
+ // for each mesh open the HDF group basing on its id
+ char meshGrpName[ 30 ];
+ sprintf( meshGrpName, "Mesh %d", id );
+ aTopGroup = new HDFgroup( meshGrpName, aFile );
+ aTopGroup->CreateOnDisk();
+
+ // --> put dataset to hdf file which is a flag that mesh has data
+ string strHasData = "0";
+ // check if the mesh is not empty
+ if ( mySMESHDSMesh->NbNodes() > 0 ) {
+ // write mesh data to med file
+ myWriter.SetMesh( mySMESHDSMesh );
+ myWriter.SetMeshId( id );
+ strHasData = "1";
+ }
+ aSize[ 0 ] = strHasData.length() + 1;
+ aDataset = new HDFdataset( "Has data", aTopGroup, HDF_STRING, aSize, 1 );
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( ( char* )( strHasData.c_str() ) );
+ aDataset->CloseOnDisk();
+
+ // ouv : NPAL12872
+ // for each mesh open the HDF group basing on its auto color parameter
+ char meshAutoColorName[ 30 ];
+ sprintf( meshAutoColorName, "AutoColorMesh %d", id );
+ int anAutoColor[1];
+ anAutoColor[0] = myImpl->GetAutoColor();
+ aSize[ 0 ] = 1;
+ aDataset = new HDFdataset( meshAutoColorName, aTopGroup, HDF_INT32, aSize, 1 );
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( anAutoColor );
+ aDataset->CloseOnDisk();
+
+ // issue 0020693. Store _isModified flag
+ int isModified = myLocMesh.GetIsModified();
+ aSize[ 0 ] = 1;
+ aDataset = new HDFdataset( "_isModified", aTopGroup, HDF_INT32, aSize, 1 );
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( &isModified );
+ aDataset->CloseOnDisk();
+
+ // issue 20918. Store Persistent Id of SMESHDS_Mesh
+ int meshPersistentId = mySMESHDSMesh->GetPersistentId();
+ aSize[ 0 ] = 1;
+ aDataset = new HDFdataset( "meshPersistentId", aTopGroup, HDF_INT32, aSize, 1 );
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( &meshPersistentId );
+ aDataset->CloseOnDisk();
+
+ // write reference on a shape if exists
+ SALOMEDS::SObject_wrap myRef;
+ bool shapeRefFound = false;
+ bool found = gotBranch->FindSubObject( GetRefOnShapeTag(), myRef.inout() );
+ if ( found ) {
+ SALOMEDS::SObject_wrap myShape;
+ bool ok = myRef->ReferencedObject( myShape.inout() );
+ if ( ok ) {
+ shapeRefFound = (! CORBA::is_nil( myShape->GetObject() ));
+ string myRefOnObject = myShape->GetID();
+ if ( shapeRefFound && myRefOnObject.length() > 0 ) {
+ aSize[ 0 ] = myRefOnObject.length() + 1;
+ aDataset = new HDFdataset( "Ref on shape", aTopGroup, HDF_STRING, aSize, 1 );
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( ( char* )( myRefOnObject.c_str() ) );
+ aDataset->CloseOnDisk();
+ }
+ }
+ }
+
+ // write applied hypotheses if exist
+ SALOMEDS::SObject_wrap myHypBranch;
+ found = gotBranch->FindSubObject( GetRefOnAppliedHypothesisTag(), myHypBranch.inout() );
+ if ( found && !shapeRefFound && hasShape) { // remove applied hyps
+ myCurrentStudy->NewBuilder()->RemoveObjectWithChildren( myHypBranch );
+ }
+ if ( found && (shapeRefFound || !hasShape) ) {
+ aGroup = new HDFgroup( "Applied Hypotheses", aTopGroup );
+ aGroup->CreateOnDisk();
+
+ SALOMEDS::ChildIterator_wrap it = myCurrentStudy->NewChildIterator( myHypBranch );
+ int hypNb = 0;
+ for ( ; it->More(); it->Next() ) {
+ SALOMEDS::SObject_wrap mySObject = it->Value();
+ SALOMEDS::SObject_wrap myRefOnHyp;
+ bool ok = mySObject->ReferencedObject( myRefOnHyp.inout() );
+ if ( ok ) {
+ // san - it is impossible to recover applied hypotheses
+ // using their entries within Load() method,
+ // for there are no AttributeIORs in the study when Load() is working.
+ // Hence, it is better to store persistent IDs of hypotheses as references to them
+
+ //string myRefOnObject = myRefOnHyp->GetID();
+ CORBA::Object_var anObject = SObjectToObject( myRefOnHyp );
+ CORBA::String_var objStr = GetORB()->object_to_string( anObject );
+ int id = myStudyContext->findId( string( objStr.in() ) );
+ //if ( myRefOnObject.length() > 0 ) {
+ //aSize[ 0 ] = myRefOnObject.length() + 1;
+ char hypName[ 30 ], hypId[ 30 ];
+ sprintf( hypName, "Hyp %d", ++hypNb );
+ sprintf( hypId, "%d", id );
+ aSize[ 0 ] = strlen( hypId ) + 1;
+ aDataset = new HDFdataset( hypName, aGroup, HDF_STRING, aSize, 1 );
+ aDataset->CreateOnDisk();
+ //aDataset->WriteOnDisk( ( char* )( myRefOnObject.c_str() ) );
+ aDataset->WriteOnDisk( hypId );
+ aDataset->CloseOnDisk();
+ //}
+ }
+ }
+ aGroup->CloseOnDisk();
+ }
+
+ // write applied algorithms if exist
+ SALOMEDS::SObject_wrap myAlgoBranch;
+ found = gotBranch->FindSubObject( GetRefOnAppliedAlgorithmsTag(),
+ myAlgoBranch.inout() );
+ if ( found && !shapeRefFound && hasShape) { // remove applied algos
+ myCurrentStudy->NewBuilder()->RemoveObjectWithChildren( myAlgoBranch );
+ }
+ if ( found && (shapeRefFound || !hasShape)) {
+ aGroup = new HDFgroup( "Applied Algorithms", aTopGroup );
+ aGroup->CreateOnDisk();
+
+ SALOMEDS::ChildIterator_wrap it = myCurrentStudy->NewChildIterator( myAlgoBranch );
+ int algoNb = 0;
+ for ( ; it->More(); it->Next() ) {
+ SALOMEDS::SObject_wrap mySObject = it->Value();
+ SALOMEDS::SObject_wrap myRefOnAlgo;
+ bool ok = mySObject->ReferencedObject( myRefOnAlgo.inout() );
+ if ( ok ) {
+ // san - it is impossible to recover applied algorithms
+ // using their entries within Load() method,
+ // for there are no AttributeIORs in the study when Load() is working.
+ // Hence, it is better to store persistent IDs of algorithms as references to them
+
+ //string myRefOnObject = myRefOnAlgo->GetID();
+ CORBA::Object_var anObject = SObjectToObject( myRefOnAlgo );
+ CORBA::String_var objStr = GetORB()->object_to_string( anObject );
+ int id = myStudyContext->findId( string( objStr.in() ) );
+ //if ( myRefOnObject.length() > 0 ) {
+ //aSize[ 0 ] = myRefOnObject.length() + 1;
+ char algoName[ 30 ], algoId[ 30 ];
+ sprintf( algoName, "Algo %d", ++algoNb );
+ sprintf( algoId, "%d", id );
+ aSize[ 0 ] = strlen( algoId ) + 1;
+ aDataset = new HDFdataset( algoName, aGroup, HDF_STRING, aSize, 1 );
+ aDataset->CreateOnDisk();
+ //aDataset->WriteOnDisk( ( char* )( myRefOnObject.c_str() ) );
+ aDataset->WriteOnDisk( algoId );
+ aDataset->CloseOnDisk();
+ //}
+ }
+ }
+ aGroup->CloseOnDisk();
+ }
+
+ // --> submesh objects sub-branches
+
+ for ( int i = GetSubMeshOnVertexTag(); i <= GetSubMeshOnCompoundTag(); i++ ) {
+ SALOMEDS::SObject_wrap mySubmeshBranch;
+ found = gotBranch->FindSubObject( i, mySubmeshBranch.inout() );
+
+ if ( found ) // check if there is shape reference in submeshes
+ {
+ bool hasShapeRef = false;
+ SALOMEDS::ChildIterator_wrap itSM =
+ myCurrentStudy->NewChildIterator( mySubmeshBranch );
+ for ( ; itSM->More(); itSM->Next() ) {
+ SALOMEDS::SObject_wrap mySubRef, myShape, mySObject = itSM->Value();
+ if ( mySObject->FindSubObject( GetRefOnShapeTag(), mySubRef.inout() ))
+ mySubRef->ReferencedObject( myShape.inout() );
+ if ( !CORBA::is_nil( myShape ) && !CORBA::is_nil( myShape->GetObject() ))
+ hasShapeRef = true;
+ else
+ { // remove one submesh
+ if ( shapeRefFound )
+ { // unassign hypothesis
+ SMESH::SMESH_subMesh_var mySubMesh =
+ SMESH::SMESH_subMesh::_narrow( SObjectToObject( mySObject ));
+ if ( !mySubMesh->_is_nil() ) {
+ int shapeID = mySubMesh->GetId();
+ TopoDS_Shape S = mySMESHDSMesh->IndexToShape( shapeID );
+ const list<const SMESHDS_Hypothesis*>& hypList =
+ mySMESHDSMesh->GetHypothesis( S );
+ list<const SMESHDS_Hypothesis*>::const_iterator hyp = hypList.begin();
+ while ( hyp != hypList.end() ) {
+ int hypID = (*hyp++)->GetID(); // goto next hyp here because
+ myLocMesh.RemoveHypothesis( S, hypID ); // hypList changes here
+ }
+ }
+ }
+ myCurrentStudy->NewBuilder()->RemoveObjectWithChildren( mySObject );
+ }
+ } // loop on submeshes of a type
+ if ( !shapeRefFound || !hasShapeRef ) { // remove the whole submeshes branch
+ myCurrentStudy->NewBuilder()->RemoveObjectWithChildren( mySubmeshBranch );
+ found = false;
+ }
+ } // end check if there is shape reference in submeshes
+ if ( found ) {
+ char name_meshgroup[ 30 ];
+ if ( i == GetSubMeshOnVertexTag() )
+ strcpy( name_meshgroup, "SubMeshes On Vertex" );
+ else if ( i == GetSubMeshOnEdgeTag() )
+ strcpy( name_meshgroup, "SubMeshes On Edge" );
+ else if ( i == GetSubMeshOnWireTag() )
+ strcpy( name_meshgroup, "SubMeshes On Wire" );
+ else if ( i == GetSubMeshOnFaceTag() )
+ strcpy( name_meshgroup, "SubMeshes On Face" );
+ else if ( i == GetSubMeshOnShellTag() )
+ strcpy( name_meshgroup, "SubMeshes On Shell" );
+ else if ( i == GetSubMeshOnSolidTag() )
+ strcpy( name_meshgroup, "SubMeshes On Solid" );
+ else if ( i == GetSubMeshOnCompoundTag() )
+ strcpy( name_meshgroup, "SubMeshes On Compound" );
+
+ // for each type of submeshes create container HDF group
+ aGroup = new HDFgroup( name_meshgroup, aTopGroup );
+ aGroup->CreateOnDisk();
+
+ // iterator for all submeshes of given type
+ SALOMEDS::ChildIterator_wrap itSM = myCurrentStudy->NewChildIterator( mySubmeshBranch );
+ for ( ; itSM->More(); itSM->Next() ) {
+ SALOMEDS::SObject_wrap mySObject = itSM->Value();
+ CORBA::Object_var anSubObject = SObjectToObject( mySObject );
+ if ( !CORBA::is_nil( anSubObject ))
+ {
+ SMESH::SMESH_subMesh_var mySubMesh = SMESH::SMESH_subMesh::_narrow( anSubObject ) ;
+ CORBA::String_var objStr = GetORB()->object_to_string( anSubObject );
+ int subid = myStudyContext->findId( string( objStr.in() ) );
+
+ // for each mesh open the HDF group basing on its id
+ char submeshGrpName[ 30 ];
+ sprintf( submeshGrpName, "SubMesh %d", subid );
+ aSubGroup = new HDFgroup( submeshGrpName, aGroup );
+ aSubGroup->CreateOnDisk();
+
+ // write reference on a shape, already checked if it exists
+ SALOMEDS::SObject_wrap mySubRef, myShape;
+ if ( mySObject->FindSubObject( GetRefOnShapeTag(), mySubRef.inout() ))
+ mySubRef->ReferencedObject( myShape.inout() );
+ string myRefOnObject = myShape->GetID();
+ if ( myRefOnObject.length() > 0 ) {
+ aSize[ 0 ] = myRefOnObject.length() + 1;
+ aDataset = new HDFdataset( "Ref on shape", aSubGroup, HDF_STRING, aSize, 1 );
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( ( char* )( myRefOnObject.c_str() ) );
+ aDataset->CloseOnDisk();
+ }
+
+ // write applied hypotheses if exist
+ SALOMEDS::SObject_wrap mySubHypBranch;
+ found = mySObject->FindSubObject( GetRefOnAppliedHypothesisTag(),
+ mySubHypBranch.inout() );
+ if ( found ) {
+ aSubSubGroup = new HDFgroup( "Applied Hypotheses", aSubGroup );
+ aSubSubGroup->CreateOnDisk();
+
+ SALOMEDS::ChildIterator_wrap it = myCurrentStudy->NewChildIterator( mySubHypBranch );
+ int hypNb = 0;
+ for ( ; it->More(); it->Next() ) {
+ SALOMEDS::SObject_wrap mySubSObject = it->Value();
+ SALOMEDS::SObject_wrap myRefOnHyp;
+ bool ok = mySubSObject->ReferencedObject( myRefOnHyp.inout() );
+ if ( ok ) {
+ //string myRefOnObject = myRefOnHyp->GetID();
+ CORBA::Object_var anObject = SObjectToObject( myRefOnHyp );
+ CORBA::String_var objStr = GetORB()->object_to_string( anObject );
+ int id = myStudyContext->findId( string( objStr.in() ) );
+ //if ( myRefOnObject.length() > 0 ) {
+ //aSize[ 0 ] = myRefOnObject.length() + 1;
+ char hypName[ 30 ], hypId[ 30 ];
+ sprintf( hypName, "Hyp %d", ++hypNb );
+ sprintf( hypId, "%d", id );
+ aSize[ 0 ] = strlen( hypId ) + 1;
+ aDataset = new HDFdataset( hypName, aSubSubGroup, HDF_STRING, aSize, 1 );
+ aDataset->CreateOnDisk();
+ //aDataset->WriteOnDisk( ( char* )( myRefOnObject.c_str() ) );
+ aDataset->WriteOnDisk( hypId );
+ aDataset->CloseOnDisk();
+ //}
+ }
+ }
+ aSubSubGroup->CloseOnDisk();
+ }
+
+ // write applied algorithms if exist
+ SALOMEDS::SObject_wrap mySubAlgoBranch;
+ found = mySObject->FindSubObject( GetRefOnAppliedAlgorithmsTag(),
+ mySubAlgoBranch.inout() );
+ if ( found ) {
+ aSubSubGroup = new HDFgroup( "Applied Algorithms", aSubGroup );
+ aSubSubGroup->CreateOnDisk();
+
+ SALOMEDS::ChildIterator_wrap it =
+ myCurrentStudy->NewChildIterator( mySubAlgoBranch );
+ int algoNb = 0;
+ for ( ; it->More(); it->Next() ) {
+ SALOMEDS::SObject_wrap mySubSObject = it->Value();
+ SALOMEDS::SObject_wrap myRefOnAlgo;
+ bool ok = mySubSObject->ReferencedObject( myRefOnAlgo.inout() );
+ if ( ok ) {
+ //string myRefOnObject = myRefOnAlgo->GetID();
+ CORBA::Object_var anObject = SObjectToObject( myRefOnAlgo );
+ CORBA::String_var objStr = GetORB()->object_to_string( anObject );
+ int id = myStudyContext->findId( string( objStr.in() ) );
+ //if ( myRefOnObject.length() > 0 ) {
+ //aSize[ 0 ] = myRefOnObject.length() + 1;
+ char algoName[ 30 ], algoId[ 30 ];
+ sprintf( algoName, "Algo %d", ++algoNb );
+ sprintf( algoId, "%d", id );
+ aSize[ 0 ] = strlen( algoId ) + 1;
+ aDataset = new HDFdataset( algoName, aSubSubGroup, HDF_STRING, aSize, 1 );
+ aDataset->CreateOnDisk();
+ //aDataset->WriteOnDisk( ( char* )( myRefOnObject.c_str() ) );
+ aDataset->WriteOnDisk( algoId );
+ aDataset->CloseOnDisk();
+ //}
+ }
+ }
+ aSubSubGroup->CloseOnDisk();
+ }
+ // close submesh HDF group
+ aSubGroup->CloseOnDisk();
+ }
+ }
+ // close container of submeshes by type HDF group
+ aGroup->CloseOnDisk();
+ }
+ }
+ // All sub-meshes will be stored in MED file
+ // .. will NOT (PAL 12992)
+ //if ( shapeRefFound )
+ //myWriter.AddAllSubMeshes();
+
+ // store submesh order if any
+ const TListOfListOfInt& theOrderIds = myLocMesh.GetMeshOrder();
+ if ( theOrderIds.size() ) {
+ char order_list[ 30 ];
+ strcpy( order_list, "Mesh Order" );
+ // count number of submesh ids
+ int nbIDs = 0;
+ TListOfListOfInt::const_iterator idIt = theOrderIds.begin();
+ for ( ; idIt != theOrderIds.end(); idIt++ )
+ nbIDs += (*idIt).size();
+ // number of values = number of IDs +
+ // number of lists (for separators) - 1
+ int* smIDs = new int [ nbIDs + theOrderIds.size() - 1 ];
+ idIt = theOrderIds.begin();
+ for ( int i = 0; idIt != theOrderIds.end(); idIt++ ) {
+ const TListOfInt& idList = *idIt;
+ if (idIt != theOrderIds.begin()) // not first list
+ smIDs[ i++ ] = -1/* *idList.size()*/; // separator between lists
+ // dump submesh ids from current list
+ TListOfInt::const_iterator id_smId = idList.begin();
+ for( ; id_smId != idList.end(); id_smId++ )
+ smIDs[ i++ ] = *id_smId;
+ }
+ // write HDF group
+ aSize[ 0 ] = nbIDs + theOrderIds.size() - 1;
+
+ aDataset = new HDFdataset( order_list, aTopGroup, HDF_INT32, aSize, 1 );
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( smIDs );
+ aDataset->CloseOnDisk();
+ //
+ delete[] smIDs;
+ }
+
+ // groups root sub-branch
+ SALOMEDS::SObject_wrap myGroupsBranch;
+ for ( int i = GetNodeGroupsTag(); i <= GetBallElementsGroupsTag(); i++ ) {
+ found = gotBranch->FindSubObject( i, myGroupsBranch.inout() );
+ if ( found ) {
+ char name_group[ 30 ];
+ if ( i == GetNodeGroupsTag() )
+ strcpy( name_group, "Groups of Nodes" );
+ else if ( i == GetEdgeGroupsTag() )
+ strcpy( name_group, "Groups of Edges" );
+ else if ( i == GetFaceGroupsTag() )
+ strcpy( name_group, "Groups of Faces" );
+ else if ( i == GetVolumeGroupsTag() )
+ strcpy( name_group, "Groups of Volumes" );
+ else if ( i == Get0DElementsGroupsTag() )
+ strcpy( name_group, "Groups of 0D Elements" );
+ else if ( i == GetBallElementsGroupsTag() )
+ strcpy( name_group, "Groups of Balls" );
+
+ aGroup = new HDFgroup( name_group, aTopGroup );
+ aGroup->CreateOnDisk();
+
+ SALOMEDS::ChildIterator_wrap it = myCurrentStudy->NewChildIterator( myGroupsBranch );
+ for ( ; it->More(); it->Next() ) {
+ SALOMEDS::SObject_wrap mySObject = it->Value();
+ CORBA::Object_var aSubObject = SObjectToObject( mySObject );
+ if ( !CORBA::is_nil( aSubObject ) ) {
+ SMESH_GroupBase_i* myGroupImpl =
+ dynamic_cast<SMESH_GroupBase_i*>( GetServant( aSubObject ).in() );
+ if ( !myGroupImpl )
+ continue;
+ SMESHDS_GroupBase* aGrpBaseDS = myGroupImpl->GetGroupDS();
+ if ( !aGrpBaseDS )
+ continue;
+
+ CORBA::String_var objStr = GetORB()->object_to_string( aSubObject );
+ int anId = myStudyContext->findId( string( objStr.in() ) );
+
+ // For each group, create a dataset named "Group <group_persistent_id>"
+ // and store the group's user name into it
+ const char* grpName = aGrpBaseDS->GetStoreName();
+ char* aUserName = myGroupImpl->GetName();
+ aSize[ 0 ] = strlen( aUserName ) + 1;
+
+ aDataset = new HDFdataset( grpName, aGroup, HDF_STRING, aSize, 1 );
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( aUserName );
+ aDataset->CloseOnDisk();
+
+ // ouv : NPAL12872
+ // For each group, create a dataset named "Group <group_persistent_id> Color"
+ // and store the group's color into it
+ char grpColorName[ 30 ];
+ sprintf( grpColorName, "ColorGroup %d", anId );
+ SALOMEDS::Color aColor = myGroupImpl->GetColor();
+ double anRGB[3];
+ anRGB[ 0 ] = aColor.R;
+ anRGB[ 1 ] = aColor.G;
+ anRGB[ 2 ] = aColor.B;
+ aSize[ 0 ] = 3;
+ aDataset = new HDFdataset( grpColorName, aGroup, HDF_FLOAT64, aSize, 1 );
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( anRGB );
+ aDataset->CloseOnDisk();
+
+ // Pass SMESHDS_Group to MED writer
+ SMESHDS_Group* aGrpDS = dynamic_cast<SMESHDS_Group*>( aGrpBaseDS );
+ if ( aGrpDS )
+ myWriter.AddGroup( aGrpDS );
+
+ // write reference on a shape if exists
+ SMESHDS_GroupOnGeom* aGeomGrp =
+ dynamic_cast<SMESHDS_GroupOnGeom*>( aGrpBaseDS );
+ if ( aGeomGrp ) {
+ SALOMEDS::SObject_wrap mySubRef, myShape;
+ if (mySObject->FindSubObject( GetRefOnShapeTag(), mySubRef.inout() ) &&
+ mySubRef->ReferencedObject( myShape.inout() ) &&
+ !CORBA::is_nil( myShape->GetObject() ))
+ {
+ string myRefOnObject = myShape->GetID();
+ if ( myRefOnObject.length() > 0 ) {
+ char aRefName[ 30 ];
+ sprintf( aRefName, "Ref on shape %d", anId);
+ aSize[ 0 ] = myRefOnObject.length() + 1;
+ aDataset = new HDFdataset(aRefName, aGroup, HDF_STRING, aSize, 1);
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( ( char* )( myRefOnObject.c_str() ) );
+ aDataset->CloseOnDisk();
+ }
+ }
+ else // shape ref is invalid:
+ {
+ // save a group on geometry as ordinary group
+ myWriter.AddGroup( aGeomGrp );
+ }
+ }
+ else if ( SMESH_GroupOnFilter_i* aFilterGrp_i =
+ dynamic_cast<SMESH_GroupOnFilter_i*>( myGroupImpl ))
+ {
+ std::string str = aFilterGrp_i->FilterToString();
+ std::string hdfGrpName = "Filter " + SMESH_Comment(anId);
+ aSize[ 0 ] = str.length() + 1;
+ aDataset = new HDFdataset( hdfGrpName.c_str(), aGroup, HDF_STRING, aSize, 1);
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( ( char* )( str.c_str() ) );
+ aDataset->CloseOnDisk();
+ }
+ }
+ }
+ aGroup->CloseOnDisk();
+ }
+ } // loop on groups
+
+ if ( strcmp( strHasData.c_str(), "1" ) == 0 )
+ {
+ // Flush current mesh information into MED file
+ myWriter.Perform();
+
+ // save info on nb of elements
+ SMESH_PreMeshInfo::SaveToFile( myImpl, id, aFile );
+
+ // maybe a shape was deleted in the study
+ if ( !shapeRefFound && !mySMESHDSMesh->ShapeToMesh().IsNull() && hasShape) {
+ TopoDS_Shape nullShape;
+ myLocMesh.ShapeToMesh( nullShape ); // remove shape referring data
+ }
+
+ if ( !mySMESHDSMesh->SubMeshes().empty() )
+ {
+ // Store submeshes
+ // ----------------
+ aGroup = new HDFgroup( "Submeshes", aTopGroup );
+ aGroup->CreateOnDisk();
+
+ // each element belongs to one or none submesh,
+ // so for each node/element, we store a submesh ID
+
+ // Make maps of submesh IDs of elements sorted by element IDs
+ typedef int TElemID;
+ typedef int TSubMID;
+ map< TElemID, TSubMID > eId2smId, nId2smId;
+ map< TElemID, TSubMID >::iterator hint; // insertion to map is done before hint
+ const map<int,SMESHDS_SubMesh*>& aSubMeshes = mySMESHDSMesh->SubMeshes();
+ map<int,SMESHDS_SubMesh*>::const_iterator itSubM ( aSubMeshes.begin() );
+ SMDS_NodeIteratorPtr itNode;
+ SMDS_ElemIteratorPtr itElem;
+ for ( itSubM = aSubMeshes.begin(); itSubM != aSubMeshes.end() ; itSubM++ )
+ {
+ TSubMID aSubMeID = itSubM->first;
+ SMESHDS_SubMesh* aSubMesh = itSubM->second;
+ if ( aSubMesh->IsComplexSubmesh() )
+ continue; // submesh containing other submeshs
+ // nodes
+ hint = nId2smId.begin(); // optimize insertion basing on increasing order of elem Ids in submesh
+ for ( itNode = aSubMesh->GetNodes(); itNode->more(); ++hint)
+ hint = nId2smId.insert( hint, make_pair( itNode->next()->GetID(), aSubMeID ));
+ // elements
+ hint = eId2smId.begin();
+ for ( itElem = aSubMesh->GetElements(); itElem->more(); ++hint)
+ hint = eId2smId.insert( hint, make_pair( itElem->next()->GetID(), aSubMeID ));
+ }
+
+ // Care of elements that are not on submeshes
+ if ( mySMESHDSMesh->NbNodes() != nId2smId.size() ) {
+ for ( itNode = mySMESHDSMesh->nodesIterator(); itNode->more(); )
+ /* --- stl_map.h says : */
+ /* A %map relies on unique keys and thus a %pair is only inserted if its */
+ /* first element (the key) is not already present in the %map. */
+ nId2smId.insert( make_pair( itNode->next()->GetID(), 0 ));
+ }
+ int nbElems = mySMESHDSMesh->NbEdges() + mySMESHDSMesh->NbFaces() + mySMESHDSMesh->NbVolumes();
+ if ( nbElems != eId2smId.size() ) {
+ for ( itElem = mySMESHDSMesh->elementsIterator(); itElem->more(); )
+ eId2smId.insert( make_pair( itElem->next()->GetID(), 0 ));
+ }
+
+ // Store submesh IDs
+ for ( int isNode = 0; isNode < 2; ++isNode )
+ {
+ map< TElemID, TSubMID >& id2smId = isNode ? nId2smId : eId2smId;
+ if ( id2smId.empty() ) continue;
+ map< TElemID, TSubMID >::const_iterator id_smId = id2smId.begin();
+ // make and fill array of submesh IDs
+ int* smIDs = new int [ id2smId.size() ];
+ for ( int i = 0; id_smId != id2smId.end(); ++id_smId, ++i )
+ smIDs[ i ] = id_smId->second;
+ // write HDF group
+ aSize[ 0 ] = id2smId.size();
+ string aDSName( isNode ? "Node Submeshes" : "Element Submeshes");
+ aDataset = new HDFdataset( (char*)aDSName.c_str(), aGroup, HDF_INT32, aSize, 1 );
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( smIDs );
+ aDataset->CloseOnDisk();
+ //
+ delete[] smIDs;
+ }
+
+ aGroup->CloseOnDisk();
+
+ // Store node positions on sub-shapes (SMDS_Position):
+ // ----------------------------------------------------
+
+ aGroup = new HDFgroup( "Node Positions", aTopGroup );
+ aGroup->CreateOnDisk();
+
+ // in aGroup, create 5 datasets to contain:
+ // "Nodes on Edges" - ID of node on edge
+ // "Edge positions" - U parameter on node on edge
+ // "Nodes on Faces" - ID of node on face
+ // "Face U positions" - U parameter of node on face
+ // "Face V positions" - V parameter of node on face
+
+ // Find out nb of nodes on edges and faces
+ // Collect corresponing sub-meshes
+ int nbEdgeNodes = 0, nbFaceNodes = 0;
+ list<SMESHDS_SubMesh*> aEdgeSM, aFaceSM;
+ // loop on SMESHDS_SubMesh'es
+ for ( itSubM = aSubMeshes.begin(); itSubM != aSubMeshes.end() ; itSubM++ )
+ {
+ SMESHDS_SubMesh* aSubMesh = (*itSubM).second;
+ if ( aSubMesh->IsComplexSubmesh() )
+ continue; // submesh containing other submeshs
+ int nbNodes = aSubMesh->NbNodes();
+ if ( nbNodes == 0 ) continue;
+
+ int aShapeID = (*itSubM).first;
+ if ( aShapeID < 1 || aShapeID > mySMESHDSMesh->MaxShapeIndex() )
+ continue;
+ int aShapeType = mySMESHDSMesh->IndexToShape( aShapeID ).ShapeType();
+ // write only SMDS_FacePosition and SMDS_EdgePosition
+ switch ( aShapeType ) {
+ case TopAbs_FACE:
+ nbFaceNodes += nbNodes;
+ aFaceSM.push_back( aSubMesh );
+ break;
+ case TopAbs_EDGE:
+ nbEdgeNodes += nbNodes;
+ aEdgeSM.push_back( aSubMesh );
+ break;
+ default:
+ continue;
+ }
+ }
+ // Treat positions on edges or faces
+ for ( int onFace = 0; onFace < 2; onFace++ )
+ {
+ // Create arrays to store in datasets
+ int iNode = 0, nbNodes = ( onFace ? nbFaceNodes : nbEdgeNodes );
+ if (!nbNodes) continue;
+ int* aNodeIDs = new int [ nbNodes ];
+ double* aUPos = new double [ nbNodes ];
+ double* aVPos = ( onFace ? new double[ nbNodes ] : 0 );
+
+ // Fill arrays
+ // loop on sub-meshes
+ list<SMESHDS_SubMesh*> * pListSM = ( onFace ? &aFaceSM : &aEdgeSM );
+ list<SMESHDS_SubMesh*>::iterator itSM = pListSM->begin();
+ for ( ; itSM != pListSM->end(); itSM++ )
+ {
+ SMESHDS_SubMesh* aSubMesh = (*itSM);
+
+ SMDS_NodeIteratorPtr itNode = aSubMesh->GetNodes();
+ // loop on nodes in aSubMesh
+ while ( itNode->more() )
+ {
+ //node ID
+ const SMDS_MeshNode* node = itNode->next();
+ aNodeIDs [ iNode ] = node->GetID();
+
+ // Position
+ const SMDS_PositionPtr pos = node->GetPosition();
+ if ( onFace ) { // on FACE
+ const SMDS_FacePosition* fPos =
+ dynamic_cast<const SMDS_FacePosition*>( pos );
+ if ( fPos ) {
+ aUPos[ iNode ] = fPos->GetUParameter();
+ aVPos[ iNode ] = fPos->GetVParameter();
+ iNode++;
+ }
+ else
+ nbNodes--;
+ }
+ else { // on EDGE
+ const SMDS_EdgePosition* ePos =
+ dynamic_cast<const SMDS_EdgePosition*>( pos );
+ if ( ePos ) {
+ aUPos[ iNode ] = ePos->GetUParameter();
+ iNode++;
+ }
+ else
+ nbNodes--;
+ }
+ } // loop on nodes in aSubMesh
+ } // loop on sub-meshes
+
+ // Write datasets
+ if ( nbNodes )
+ {
+ aSize[ 0 ] = nbNodes;
+ // IDS
+ string aDSName( onFace ? "Nodes on Faces" : "Nodes on Edges");
+ aDataset = new HDFdataset( (char*)aDSName.c_str(), aGroup, HDF_INT32, aSize, 1 );
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( aNodeIDs );
+ aDataset->CloseOnDisk();
+
+ // U Positions
+ aDSName = ( onFace ? "Face U positions" : "Edge positions");
+ aDataset = new HDFdataset( (char*)aDSName.c_str(), aGroup, HDF_FLOAT64, aSize, 1);
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( aUPos );
+ aDataset->CloseOnDisk();
+ // V Positions
+ if ( onFace ) {
+ aDataset = new HDFdataset( "Face V positions", aGroup, HDF_FLOAT64, aSize, 1);
+ aDataset->CreateOnDisk();
+ aDataset->WriteOnDisk( aVPos );
+ aDataset->CloseOnDisk();
+ }
+ }
+ delete [] aNodeIDs;
+ delete [] aUPos;
+ if ( aVPos ) delete [] aVPos;
+
+ } // treat positions on edges or faces
+
+ // close "Node Positions" group
+ aGroup->CloseOnDisk();
+
+ } // if ( there are submeshes in SMESHDS_Mesh )
+ } // if ( hasData )
+
+ // close mesh HDF group
+ aTopGroup->CloseOnDisk();
+ }
+ }
+ }
+ }
+ }
+
+ // close HDF file
+ aFile->CloseOnDisk();
+ delete aFile;
+
+ // Convert temporary files to stream
+ aStreamFile = SALOMEDS_Tool::PutFilesToStream( tmpDir.ToCString(), aFileSeq.in(), isMultiFile );
+
+ // Remove temporary files and directory
+ if ( !isMultiFile )
+ SALOMEDS_Tool::RemoveTemporaryFiles( tmpDir.ToCString(), aFileSeq.in(), true );
+
+ INFOS( "SMESH_Gen_i::Save() completed" );
+ return aStreamFile._retn();
+}