#include "SMESH_Hypothesis_i.hxx"
#include "SMESH_Algo_i.hxx"
#include "SMESH_Group_i.hxx"
+#include "SMESH_PythonDump.hxx"
#include "SMESHDS_Document.hxx"
#include "SMESHDS_Group.hxx"
#include <boost/filesystem/path.hpp>
using namespace std;
+using SMESH::TPythonDump;
#define NUM_TMP_FILES 2
#ifdef _DEBUG_
-static int MYDEBUG = 0;
+static int MYDEBUG = 1;
#else
static int MYDEBUG = 0;
#endif
void SMESH_Gen_i::SetCurrentStudy( SALOMEDS::Study_ptr theStudy )
{
- if(MYDEBUG) MESSAGE( "SMESH_Gen_i::SetCurrentStudy" );
+ //if(MYDEBUG)
+ //MESSAGE( "SMESH_Gen_i::SetCurrentStudy" );
myCurrentStudy = SALOMEDS::Study::_duplicate( theStudy );
// create study context, if it doesn't exist and set current study
int studyId = GetCurrentStudyID();
if ( myStudyContextMap.find( studyId ) == myStudyContextMap.end() ) {
myStudyContextMap[ studyId ] = new StudyContext;
}
+
+ SALOMEDS::StudyBuilder_var aStudyBuilder = myCurrentStudy->NewBuilder();
+ if( !myCurrentStudy->FindComponent( "GEOM" )->_is_nil() )
+ aStudyBuilder->LoadWith( myCurrentStudy->FindComponent( "GEOM" ), GetGeomEngine() );
+
// set current study for geom engine
- /*
- if ( !CORBA::is_nil( GetGeomEngine() ) )
- GetGeomEngine()->GetCurrentStudy( myCurrentStudy->StudyId() );
- */
+ //if ( !CORBA::is_nil( GetGeomEngine() ) )
+ // GetGeomEngine()->GetCurrentStudy( myCurrentStudy->StudyId() );
}
//=============================================================================
SMESH::SMESH_Hypothesis_var hyp = this->createHypothesis( theHypName, theLibName );
// Publish hypothesis/algorithm in the study
- if ( CanPublishInStudy( hyp ) )
- PublishHypothesis( myCurrentStudy, hyp );
+ if ( CanPublishInStudy( hyp ) ) {
+ SALOMEDS::SObject_var aSO = PublishHypothesis( myCurrentStudy, hyp );
+ if ( !aSO->_is_nil() ) {
+ // Update Python script
+ TPythonDump() << aSO << " = " << this << ".CreateHypothesis('"
+ << theHypName << "', '" << theLibName << "')";
+ }
+ }
return hyp._retn();
}
-
+
//=============================================================================
/*!
* SMESH_Gen_i::CreateMesh
SMESH_Mesh_i* meshServant = dynamic_cast<SMESH_Mesh_i*>( GetServant( mesh ).in() );
ASSERT( meshServant );
meshServant->SetShape( theShapeObject );
+
// publish mesh in the study
- if ( CanPublishInStudy( mesh ) )
- PublishMesh( myCurrentStudy, mesh.in() );
+ if ( CanPublishInStudy( mesh ) ) {
+ SALOMEDS::StudyBuilder_var aStudyBuilder = myCurrentStudy->NewBuilder();
+ aStudyBuilder->NewCommand(); // There is a transaction
+ SALOMEDS::SObject_var aSO = PublishMesh( myCurrentStudy, mesh.in() );
+ aStudyBuilder->CommitCommand();
+ if ( !aSO->_is_nil() ) {
+ // Update Python script
+ TPythonDump() << aSO << " = " << this << ".CreateMesh(" << theShapeObject << ")";
+ }
+ }
+
return mesh._retn();
}
SMESH::SMESH_Mesh_var aMesh = createMesh();
string aFileName; // = boost::filesystem::path(theFileName).leaf();
// publish mesh in the study
- if ( CanPublishInStudy( aMesh ) )
- PublishMesh( myCurrentStudy, aMesh.in(), aFileName.c_str() );
+ if ( CanPublishInStudy( aMesh ) ) {
+ SALOMEDS::StudyBuilder_var aStudyBuilder = myCurrentStudy->NewBuilder();
+ aStudyBuilder->NewCommand(); // There is a transaction
+ SALOMEDS::SObject_var aSO = PublishMesh( myCurrentStudy, aMesh.in(), aFileName.c_str() );
+ aStudyBuilder->CommitCommand();
+ if ( !aSO->_is_nil() ) {
+ // Update Python script
+ TPythonDump() << aSO << " = smeshgen.CreateMeshesFromUNV('" << theFileName << "')";
+ }
+ }
SMESH_Mesh_i* aServant = dynamic_cast<SMESH_Mesh_i*>( GetServant( aMesh ).in() );
ASSERT( aServant );
Unexpect aCatch(SALOME_SalomeException);
if(MYDEBUG) MESSAGE( "SMESH_Gen_i::CreateMeshFromMED" );
+ // Python Dump
+ TPythonDump aPythonDump;
+ aPythonDump << "([";
+ //TCollection_AsciiString aStr ("([");
+
// Retrieve mesh names from the file
DriverMED_R_SMESHDS_Mesh myReader;
myReader.SetFile( theFileName );
list<string> aNames = myReader.GetMeshNames(aStatus);
SMESH::mesh_array_var aResult = new SMESH::mesh_array();
theStatus = (SMESH::DriverMED_ReadStatus)aStatus;
- if(theStatus == SMESH::DRS_OK){
+ if (theStatus == SMESH::DRS_OK) {
+ SALOMEDS::StudyBuilder_var aStudyBuilder = myCurrentStudy->NewBuilder();
+ aStudyBuilder->NewCommand(); // There is a transaction
aResult->length( aNames.size() );
int i = 0;
// Iterate through all meshes and create mesh objects
for ( list<string>::iterator it = aNames.begin(); it != aNames.end(); it++ ) {
+ // Python Dump
+ //if (i > 0) aStr += ", ";
+ if (i > 0) aPythonDump << ", ";
+
// create mesh
SMESH::SMESH_Mesh_var mesh = createMesh();
// publish mesh in the study
+ SALOMEDS::SObject_var aSO;
if ( CanPublishInStudy( mesh ) )
- PublishMesh( myCurrentStudy, mesh.in(), (*it).c_str() );
-
+ aSO = PublishMesh( myCurrentStudy, mesh.in(), (*it).c_str() );
+ if ( !aSO->_is_nil() ) {
+ // Python Dump
+ aPythonDump << aSO;
+ //aStr += aSO->GetID();
+ } else {
+ // Python Dump
+ aPythonDump << "mesh_" << i;
+// aStr += "mesh_";
+// aStr += TCollection_AsciiString(i);
+ }
+
// Read mesh data (groups are published automatically by ImportMEDFile())
SMESH_Mesh_i* meshServant = dynamic_cast<SMESH_Mesh_i*>( GetServant( mesh ).in() );
ASSERT( meshServant );
aResult[i++] = SMESH::SMESH_Mesh::_duplicate( mesh );
}
+ aStudyBuilder->CommitCommand();
}
+
+ // Update Python script
+ aPythonDump << "], status) = " << this << ".CreateMeshesFromMED('" << theFileName << "')";
+
return aResult._retn();
}
SMESH::SMESH_Mesh_var aMesh = createMesh();
string aFileName; // = boost::filesystem::path(theFileName).leaf();
// publish mesh in the study
- if ( CanPublishInStudy( aMesh ) )
- PublishInStudy( myCurrentStudy, SALOMEDS::SObject::_nil(), aMesh.in(), aFileName.c_str() );
+ if ( CanPublishInStudy( aMesh ) ) {
+ SALOMEDS::StudyBuilder_var aStudyBuilder = myCurrentStudy->NewBuilder();
+ aStudyBuilder->NewCommand(); // There is a transaction
+ SALOMEDS::SObject_var aSO = PublishInStudy
+ ( myCurrentStudy, SALOMEDS::SObject::_nil(), aMesh.in(), aFileName.c_str() );
+ aStudyBuilder->CommitCommand();
+ if ( !aSO->_is_nil() ) {
+ // Update Python script
+ TPythonDump() << aSO << " = " << this << ".CreateMeshesFromSTL('" << theFileName << "')";
+ }
+ }
SMESH_Mesh_i* aServant = dynamic_cast<SMESH_Mesh_i*>( GetServant( aMesh ).in() );
ASSERT( aServant );
return false;
}
+//================================================================================
+/*!
+ * \brief Returns errors of hypotheses definintion
+ * \param theMesh - the mesh
+ * \param theSubObject - the main or sub- shape
+ * \retval SMESH::algo_error_array* - sequence of errors
+ */
+//================================================================================
+
+SMESH::algo_error_array* SMESH_Gen_i::GetAlgoState( SMESH::SMESH_Mesh_ptr theMesh,
+ GEOM::GEOM_Object_ptr theSubObject )
+ throw ( SALOME::SALOME_Exception )
+{
+ Unexpect aCatch(SALOME_SalomeException);
+ if(MYDEBUG) MESSAGE( "SMESH_Gen_i::GetAlgoState()" );
+
+ if ( CORBA::is_nil( theSubObject ) )
+ THROW_SALOME_CORBA_EXCEPTION( "bad shape object reference", SALOME::BAD_PARAM );
+
+ if ( CORBA::is_nil( theMesh ) )
+ THROW_SALOME_CORBA_EXCEPTION( "bad Mesh reference",SALOME::BAD_PARAM );
+
+ SMESH::algo_error_array_var error_array = new SMESH::algo_error_array;
+ try {
+ SMESH_Mesh_i* meshServant = SMESH::DownCast<SMESH_Mesh_i*>( theMesh );
+ ASSERT( meshServant );
+ if ( meshServant ) {
+ TopoDS_Shape myLocShape = GeomObjectToShape( theSubObject );
+ ::SMESH_Mesh& myLocMesh = meshServant->GetImpl();
+ list< ::SMESH_Gen::TAlgoStateError > error_list;
+ list< ::SMESH_Gen::TAlgoStateError >::iterator error;
+ // call ::SMESH_Gen::GetAlgoState()
+ myGen.GetAlgoState( myLocMesh, myLocShape, error_list );
+ error_array->length( error_list.size() );
+ int i = 0;
+ for ( error = error_list.begin(); error != error_list.end(); ++error )
+ {
+ // error name
+ SMESH::AlgoStateErrorName errName;
+ switch ( error->_name ) {
+ case ::SMESH_Gen::MISSING_ALGO: errName = SMESH::MISSING_ALGO; break;
+ case ::SMESH_Gen::MISSING_HYPO: errName = SMESH::MISSING_HYPO; break;
+ case ::SMESH_Gen::NOT_CONFORM_MESH: errName = SMESH::NOT_CONFORM_MESH; break;
+ default:
+ THROW_SALOME_CORBA_EXCEPTION( "bad error name",SALOME::BAD_PARAM );
+ }
+ // algo name
+ CORBA::String_var algoName;
+ if ( error->_algo ) {
+ if ( !myCurrentStudy->_is_nil() ) {
+ // find algo in the study
+ SALOMEDS::SComponent_var father = SALOMEDS::SComponent::_narrow
+ ( myCurrentStudy->FindComponent( ComponentDataType() ) );
+ if ( !father->_is_nil() ) {
+ SALOMEDS::ChildIterator_var itBig = myCurrentStudy->NewChildIterator( father );
+ for ( ; itBig->More(); itBig->Next() ) {
+ SALOMEDS::SObject_var gotBranch = itBig->Value();
+ if ( gotBranch->Tag() == GetAlgorithmsRootTag() ) {
+ SALOMEDS::ChildIterator_var algoIt = myCurrentStudy->NewChildIterator( gotBranch );
+ for ( ; algoIt->More(); algoIt->Next() ) {
+ SALOMEDS::SObject_var algoSO = algoIt->Value();
+ CORBA::Object_var algoIOR = SObjectToObject( algoSO );
+ if ( !CORBA::is_nil( algoIOR )) {
+ SMESH_Hypothesis_i* myImpl = SMESH::DownCast<SMESH_Hypothesis_i*>( algoIOR );
+ if ( myImpl && myImpl->GetImpl() == error->_algo ) {
+ algoName = algoSO->GetName();
+ break;
+ }
+ }
+ } // loop on algo SO's
+ break;
+ } // if algo tag
+ } // SMESH component iterator
+ }
+ }
+ if ( algoName.in() == 0 )
+ // use algo type name
+ algoName = CORBA::string_dup( error->_algo->GetName() );
+ }
+ // fill AlgoStateError structure
+ SMESH::AlgoStateError & errStruct = error_array[ i++ ];
+ errStruct.name = errName;
+ errStruct.algoName = algoName;
+ errStruct.algoDim = error->_algoDim;
+ errStruct.isGlobalAlgo = error->_isGlobalAlgo;
+ }
+ }
+ }
+ catch ( SALOME_Exception& S_ex ) {
+ INFOS( "catch exception "<< S_ex.what() );
+ }
+ return error_array._retn();
+}
+
//=============================================================================
/*!
* SMESH_Gen_i::GetSubShapesId
THROW_SALOME_CORBA_EXCEPTION( "bad Mesh reference",
SALOME::BAD_PARAM );
+ // Update Python script
+ TPythonDump() << "isDone = " << this << ".Compute( "
+ << theMesh << ", " << theShapeObject << ")";
+ TPythonDump() << "if not isDone: print 'Mesh " << theMesh << " : computation failed'";
+
try {
// get mesh servant
SMESH_Mesh_i* meshServant = dynamic_cast<SMESH_Mesh_i*>( GetServant( theMesh ).in() );
theComponent->GetStudy()->StudyId() != myCurrentStudy->StudyId() )
SetCurrentStudy( theComponent->GetStudy() );
+ // Store study contents as a set of python commands
+ SavePython(myCurrentStudy);
+
StudyContext* myStudyContext = GetCurrentStudyContext();
// Declare a byte stream
}
}
}
- // maybe a shape was deleted in the study
- if ( !shapeRefFound && !mySMESHDSMesh->ShapeToMesh().IsNull() ) {
- TopoDS_Shape nullShape;
- myLocMesh.ShapeToMesh( nullShape ); // remove shape referring data
- }
// write applied hypotheses if exist
SALOMEDS::SObject_var myHypBranch;
// write applied algorithms if exist
SALOMEDS::SObject_var myAlgoBranch;
found = gotBranch->FindSubObject( GetRefOnAppliedAlgorithmsTag(), myAlgoBranch );
- if ( found && !shapeRefFound ) { // remove applied hyps
+ if ( found && !shapeRefFound ) { // remove applied algos
myCurrentStudy->NewBuilder()->RemoveObjectWithChildren( myAlgoBranch );
}
if ( found && shapeRefFound ) {
mySMESHDSMesh->GetHypothesis( S );
list<const SMESHDS_Hypothesis*>::const_iterator hyp = hypList.begin();
while ( hyp != hypList.end() ) {
- int hypID = (*hyp++)->GetID(); // goto next here because
+ int hypID = (*hyp++)->GetID(); // goto next hyp here because
myLocMesh.RemoveHypothesis( S, hypID ); // hypList changes here
}
}
}
}
// All sub-meshes will be stored in MED file
- myWriter.AddAllSubMeshes();
+ if ( shapeRefFound )
+ myWriter.AddAllSubMeshes();
// groups root sub-branch
SALOMEDS::SObject_var myGroupsBranch;
// Flush current mesh information into MED file
myWriter.Perform();
+ // maybe a shape was deleted in the study
+ if ( !shapeRefFound && !mySMESHDSMesh->ShapeToMesh().IsNull() ) {
+ TopoDS_Shape nullShape;
+ myLocMesh.ShapeToMesh( nullShape ); // remove shape referring data
+ }
// Store node positions on sub-shapes (SMDS_Position):
theComponent->GetStudy()->StudyId() != myCurrentStudy->StudyId() )
SetCurrentStudy( theComponent->GetStudy() );
+/* if( !theComponent->_is_nil() )
+ {
+ //SALOMEDS::Study_var aStudy = SALOMEDS::Study::_narrow( theComponent->GetStudy() );
+ if( !myCurrentStudy->FindComponent( "GEOM" )->_is_nil() )
+ loadGeomData( myCurrentStudy->FindComponent( "GEOM" ) );
+ }*/
+
StudyContext* myStudyContext = GetCurrentStudyContext();
// Get temporary files location
char* hypname_str = new char[ size ];
aDataset->ReadFromDisk( hypname_str );
hypname = string( hypname_str );
- delete hypname_str;
+ delete [] hypname_str;
aDataset->CloseOnDisk();
}
// --> get hypothesis plugin library name
aDataset->ReadFromDisk( libname_str );
if(MYDEBUG) SCRUTE( libname_str );
libname = string( libname_str );
- delete libname_str;
+ delete [] libname_str;
aDataset->CloseOnDisk();
}
// --> get hypothesis data
char* hypdata_str = new char[ size ];
aDataset->ReadFromDisk( hypdata_str );
hypdata = string( hypdata_str );
- delete hypdata_str;
+ delete [] hypdata_str;
aDataset->CloseOnDisk();
}
}
char* hypname_str = new char[ size ];
aDataset->ReadFromDisk( hypname_str );
hypname = string( hypname_str );
- delete hypname_str;
+ delete [] hypname_str;
aDataset->CloseOnDisk();
}
// --> get algorithm plugin library name
aDataset->ReadFromDisk( libname_str );
if(MYDEBUG) SCRUTE( libname_str );
libname = string( libname_str );
- delete libname_str;
+ delete [] libname_str;
aDataset->CloseOnDisk();
}
// --> get algorithm data
aDataset->ReadFromDisk( hypdata_str );
if(MYDEBUG) SCRUTE( hypdata_str );
hypdata = string( hypdata_str );
- delete hypdata_str;
+ delete [] hypdata_str;
aDataset->CloseOnDisk();
}
}
SALOMEDS::SObject_var shapeSO = myCurrentStudy->FindObjectID( refFromFile );
// Make sure GEOM data are loaded first
- loadGeomData( shapeSO->GetFatherComponent() );
+ //loadGeomData( shapeSO->GetFatherComponent() );
CORBA::Object_var shapeObject = SObjectToObject( shapeSO );
if ( !CORBA::is_nil( shapeObject ) ) {
}
}
- // try to get applied hypotheses
- if ( aTopGroup->ExistInternalObject( "Applied Hypotheses" ) ) {
- aGroup = new HDFgroup( "Applied Hypotheses", aTopGroup );
+ // try to get applied algorithms
+ if ( aTopGroup->ExistInternalObject( "Applied Algorithms" ) ) {
+ aGroup = new HDFgroup( "Applied Algorithms", aTopGroup );
aGroup->OpenOnDisk();
- // get number of applied hypotheses
+ // get number of applied algorithms
int aNbSubObjects = aGroup->nInternalObjects();
+ if(MYDEBUG) MESSAGE( "VSR - number of applied algos " << aNbSubObjects );
for ( int j = 0; j < aNbSubObjects; j++ ) {
char name_dataset[ HDF_NAME_MAX_LEN+1 ];
aGroup->InternalObjectIndentify( j, name_dataset );
- // check if it is a hypothesis
- if ( string( name_dataset ).substr( 0, 3 ) == string( "Hyp" ) ) {
+ // check if it is an algorithm
+ if ( string( name_dataset ).substr( 0, 4 ) == string( "Algo" ) ) {
aDataset = new HDFdataset( name_dataset, aGroup );
aDataset->OpenOnDisk();
size = aDataset->GetSize();
aDataset->ReadFromDisk( refFromFile );
aDataset->CloseOnDisk();
- // san - it is impossible to recover applied hypotheses using their entries within Load() method
+ // san - it is impossible to recover applied algorithms using their entries within Load() method
//SALOMEDS::SObject_var hypSO = myCurrentStudy->FindObjectID( refFromFile );
//CORBA::Object_var hypObject = SObjectToObject( hypSO );
aGroup->CloseOnDisk();
}
- // try to get applied algorithms
- if ( aTopGroup->ExistInternalObject( "Applied Algorithms" ) ) {
- aGroup = new HDFgroup( "Applied Algorithms", aTopGroup );
+ // try to get applied hypotheses
+ if ( aTopGroup->ExistInternalObject( "Applied Hypotheses" ) ) {
+ aGroup = new HDFgroup( "Applied Hypotheses", aTopGroup );
aGroup->OpenOnDisk();
- // get number of applied algorithms
+ // get number of applied hypotheses
int aNbSubObjects = aGroup->nInternalObjects();
- if(MYDEBUG) MESSAGE( "VSR - number of applied algos " << aNbSubObjects );
for ( int j = 0; j < aNbSubObjects; j++ ) {
char name_dataset[ HDF_NAME_MAX_LEN+1 ];
aGroup->InternalObjectIndentify( j, name_dataset );
- // check if it is an algorithm
- if ( string( name_dataset ).substr( 0, 4 ) == string( "Algo" ) ) {
+ // check if it is a hypothesis
+ if ( string( name_dataset ).substr( 0, 3 ) == string( "Hyp" ) ) {
aDataset = new HDFdataset( name_dataset, aGroup );
aDataset->OpenOnDisk();
size = aDataset->GetSize();
aDataset->ReadFromDisk( refFromFile );
aDataset->CloseOnDisk();
- // san - it is impossible to recover applied algorithms using their entries within Load() method
+ // san - it is impossible to recover applied hypotheses using their entries within Load() method
//SALOMEDS::SObject_var hypSO = myCurrentStudy->FindObjectID( refFromFile );
//CORBA::Object_var hypObject = SObjectToObject( hypSO );
// myReader.GetSubMesh( aSubMeshDS, subid );
// }
- // try to get applied hypotheses
- if ( aSubGroup->ExistInternalObject( "Applied Hypotheses" ) ) {
- // open "applied hypotheses" HDF group
- aSubSubGroup = new HDFgroup( "Applied Hypotheses", aSubGroup );
+ // try to get applied algorithms
+ if ( aSubGroup->ExistInternalObject( "Applied Algorithms" ) ) {
+ // open "applied algorithms" HDF group
+ aSubSubGroup = new HDFgroup( "Applied Algorithms", aSubGroup );
aSubSubGroup->OpenOnDisk();
- // get number of applied hypotheses
+ // get number of applied algorithms
int aNbSubObjects = aSubSubGroup->nInternalObjects();
for ( int l = 0; l < aNbSubObjects; l++ ) {
char name_dataset[ HDF_NAME_MAX_LEN+1 ];
aSubSubGroup->InternalObjectIndentify( l, name_dataset );
- // check if it is a hypothesis
- if ( string( name_dataset ).substr( 0, 3 ) == string( "Hyp" ) ) {
+ // check if it is an algorithm
+ if ( string( name_dataset ).substr( 0, 4 ) == string( "Algo" ) ) {
aDataset = new HDFdataset( name_dataset, aSubSubGroup );
aDataset->OpenOnDisk();
size = aDataset->GetSize();
char* refFromFile = new char[ size ];
aDataset->ReadFromDisk( refFromFile );
aDataset->CloseOnDisk();
-
+
//SALOMEDS::SObject_var hypSO = myCurrentStudy->FindObjectID( refFromFile );
//CORBA::Object_var hypObject = SObjectToObject( hypSO );
int id = atoi( refFromFile );
}
}
}
- // close "applied hypotheses" HDF group
+ // close "applied algorithms" HDF group
aSubSubGroup->CloseOnDisk();
}
-
- // try to get applied algorithms
- if ( aSubGroup->ExistInternalObject( "Applied Algorithms" ) ) {
- // open "applied algorithms" HDF group
- aSubSubGroup = new HDFgroup( "Applied Algorithms", aSubGroup );
+
+ // try to get applied hypotheses
+ if ( aSubGroup->ExistInternalObject( "Applied Hypotheses" ) ) {
+ // open "applied hypotheses" HDF group
+ aSubSubGroup = new HDFgroup( "Applied Hypotheses", aSubGroup );
aSubSubGroup->OpenOnDisk();
- // get number of applied algorithms
+ // get number of applied hypotheses
int aNbSubObjects = aSubSubGroup->nInternalObjects();
for ( int l = 0; l < aNbSubObjects; l++ ) {
char name_dataset[ HDF_NAME_MAX_LEN+1 ];
aSubSubGroup->InternalObjectIndentify( l, name_dataset );
- // check if it is an algorithm
- if ( string( name_dataset ).substr( 0, 4 ) == string( "Algo" ) ) {
+ // check if it is a hypothesis
+ if ( string( name_dataset ).substr( 0, 3 ) == string( "Hyp" ) ) {
aDataset = new HDFdataset( name_dataset, aSubSubGroup );
aDataset->OpenOnDisk();
size = aDataset->GetSize();
char* refFromFile = new char[ size ];
aDataset->ReadFromDisk( refFromFile );
aDataset->CloseOnDisk();
-
+
//SALOMEDS::SObject_var hypSO = myCurrentStudy->FindObjectID( refFromFile );
//CORBA::Object_var hypObject = SObjectToObject( hypSO );
int id = atoi( refFromFile );
}
}
}
- // close "applied algorithms" HDF group
+ // close "applied hypotheses" HDF group
aSubSubGroup->CloseOnDisk();
}
-
+
// close submesh HDF group
aSubGroup->CloseOnDisk();
}
if(hasData) {
// Read sub-meshes from MED
- if(MYDEBUG) MESSAGE("JFA - Create all sub-meshes");
+ if(MYDEBUG) MESSAGE("Create all sub-meshes");
myReader.CreateAllSubMeshes();
}
else // NODE IDS
{
- int* ids = new int [ aDataset->GetSize() ];
+ int aSize = aDataset->GetSize();
+
+ // for reading files, created from 18.07.2005 till 10.10.2005
+ if (aDataset->GetType() == HDF_STRING)
+ aSize /= sizeof(int);
+
+ int* ids = new int [aSize];
aDataset->ReadFromDisk( ids );
// on face or nodes?
if ( strncmp( aDSName, aEid_DSName, strlen( aEid_DSName )) == 0 ) {
aEids = ids;
- nbEids = aDataset->GetSize();
+ nbEids = aSize;
}
else {
aFids = ids;
- nbFids = aDataset->GetSize();
+ nbFids = aSize;
}
}
} // loop on 5 datasets
// Recompute State (as computed sub-meshes are restored from MED)
if ( !aShapeObject->_is_nil() ) {
- MESSAGE("JFA - Compute State Engine ...");
+ MESSAGE("Compute State Engine ...");
TopoDS_Shape myLocShape = GeomObjectToShape( aShapeObject );
myNewMeshImpl->GetImpl().GetSubMesh(myLocShape)->ComputeStateEngine
(SMESH_subMesh::SUBMESH_RESTORED);
- MESSAGE("JFA - Compute State Engine finished");
+ MESSAGE("Compute State Engine finished");
}
// try to get groups