From a9a46d67d1b5c4ae681ddbdb28228bced9127e3c Mon Sep 17 00:00:00 2001 From: ribes Date: Thu, 5 Jul 2007 06:42:51 +0000 Subject: [PATCH] - Salome_file can now be saved into a hdf file - Makefile are changed to add HDF persist dependency --- idl/SALOME_Component.idl | 11 + src/Container/Makefile.am | 2 + src/Container/Salome_file_i.cxx | 400 +++++++++++++++++++++++++++++- src/Container/Salome_file_i.hxx | 1 + src/Container/TestSalome_file.cxx | 32 ++- src/ParallelContainer/Makefile.am | 3 + 6 files changed, 437 insertions(+), 12 deletions(-) diff --git a/idl/SALOME_Component.idl b/idl/SALOME_Component.idl index 788db4bb4..5c8746f32 100644 --- a/idl/SALOME_Component.idl +++ b/idl/SALOME_Component.idl @@ -471,6 +471,17 @@ module Engines */ void save(in string hdf5_file) raises (SALOME::SALOME_Exception); + /*! + Save a Salome_file into a hdf5_file. All files that are managed + are saved into the hdf5_file + + \param hdf5_file name (with path) of the hdf5_file. + + \exception contains informations of errors if the save doesn't succeed. + + */ + void save_all(in string hdf5_file) raises (SALOME::SALOME_Exception); + /**************/ /*! diff --git a/src/Container/Makefile.am b/src/Container/Makefile.am index 8bcfdd45c..2ccd098c9 100644 --- a/src/Container/Makefile.am +++ b/src/Container/Makefile.am @@ -73,6 +73,7 @@ COMMON_CPPFLAGS=\ -I$(srcdir)/../Registry \ -I$(srcdir)/../Notification \ -I$(srcdir)/../ResourcesManager \ + -I$(srcdir)/../HDFPersist \ -I$(top_builddir)/salome_adm/unix \ -I$(top_builddir)/idl \ @CORBA_CXXFLAGS@ @CORBA_INCLUDES@ @@ -86,6 +87,7 @@ COMMON_LIBS =\ ../Utils/libOpUtil.la \ ../SALOMELocalTrace/libSALOMELocalTrace.la \ ../Basics/libSALOMEBasics.la \ + ../HDFPersist/libSalomeHDFPersist.la \ $(top_builddir)/idl/libSalomeIDLKernel.la\ @MPI_LIBS@ \ @CORBA_LIBS@ diff --git a/src/Container/Salome_file_i.cxx b/src/Container/Salome_file_i.cxx index 2636f3729..43fe8b1b5 100644 --- a/src/Container/Salome_file_i.cxx +++ b/src/Container/Salome_file_i.cxx @@ -27,6 +27,8 @@ #include "utilities.h" #include #include +#include "HDFOI.hxx" +#include //============================================================================= /*! @@ -56,14 +58,390 @@ Salome_file_i::~Salome_file_i() void Salome_file_i::load(const char* hdf5_file) { - MESSAGE("Salome_file_i::load : NOT YET IMPLEMENTED"); _state.hdf5_file_name = CORBA::string_dup(hdf5_file); + try + { + HDFfile *hdf_file; + HDFgroup *hdf_group; + HDFdataset *hdf_dataset; + int size; + int fd; + char * value; + char * buffer; + + hdf_file = new HDFfile((char*) hdf5_file); + hdf_file->OpenOnDisk(HDF_RDONLY); + + hdf_group = new HDFgroup("CONFIG",hdf_file); + hdf_group->OpenOnDisk(); + hdf_dataset = new HDFdataset("MODE",hdf_group); + hdf_dataset->OpenOnDisk(); + size = hdf_dataset->GetSize(); + value = new char[size]; + hdf_dataset->ReadFromDisk(value); + hdf_dataset->CloseOnDisk(); + std::string mode(value); + delete value; + + hdf_group = new HDFgroup("GROUP_FILES",hdf_file); + hdf_group->OpenOnDisk(); + hdf_dataset = new HDFdataset("LIST_OF_FILES",hdf_group); + hdf_dataset->OpenOnDisk(); + size = hdf_dataset->GetSize(); + value = new char[size]; + hdf_dataset->ReadFromDisk(value); + hdf_dataset->CloseOnDisk(); + std::string list_of_files(value); + delete value; + + std::istringstream iss(list_of_files); + std::string file_name; + while (std::getline(iss, file_name, ' ')) + { + std::string dataset_group_name("DATASET"); + dataset_group_name += file_name; + + hdf_group = new HDFgroup((char *) dataset_group_name.c_str(), hdf_file); + hdf_group->OpenOnDisk(); + + hdf_dataset = new HDFdataset("NAME",hdf_group); + hdf_dataset->OpenOnDisk(); + size = hdf_dataset->GetSize(); + value = new char[size]; + hdf_dataset->ReadFromDisk(value); + hdf_dataset->CloseOnDisk(); + std::string name(value); + + hdf_dataset = new HDFdataset("PATH",hdf_group); + hdf_dataset->OpenOnDisk(); + size = hdf_dataset->GetSize(); + value = new char[size]; + hdf_dataset->ReadFromDisk(value); + hdf_dataset->CloseOnDisk(); + std::string path(value); + + hdf_dataset = new HDFdataset("TYPE",hdf_group); + hdf_dataset->OpenOnDisk(); + size = hdf_dataset->GetSize(); + value = new char[size]; + hdf_dataset->ReadFromDisk(value); + hdf_dataset->CloseOnDisk(); + std::string type(value); + + hdf_dataset = new HDFdataset("SOURCE_FILE_NAME",hdf_group); + hdf_dataset->OpenOnDisk(); + size = hdf_dataset->GetSize(); + value = new char[size]; + hdf_dataset->ReadFromDisk(value); + hdf_dataset->CloseOnDisk(); + std::string source_file_name(value); + + hdf_dataset = new HDFdataset("STATUS",hdf_group); + hdf_dataset->OpenOnDisk(); + size = hdf_dataset->GetSize(); + value = new char[size]; + hdf_dataset->ReadFromDisk(value); + hdf_dataset->CloseOnDisk(); + std::string status(value); + + if (mode == "all") { + + // Changing path, is now current directory + char CurrentPath[_path_max]; + getcwd(CurrentPath, _path_max); + path = CurrentPath; + + std::string group_name("GROUP"); + group_name += file_name; + hdf_group = new HDFgroup((char *) group_name.c_str(),hdf_file); + hdf_group->OpenOnDisk(); + hdf_dataset = new HDFdataset("FILE DATASET",hdf_group); + hdf_dataset->OpenOnDisk(); + size = hdf_dataset->GetSize(); + buffer = new char[size]; + + if ( (fd = ::open(file_name.c_str(),O_RDWR|O_CREAT,00666)) <0) { + SALOME::ExceptionStruct es; + es.type = SALOME::INTERNAL_ERROR; + std::string text = "open failed"; + es.text = CORBA::string_dup(text.c_str()); + throw SALOME::SALOME_Exception(es); + }; + hdf_dataset->ReadFromDisk(buffer); + if ( write(fd,buffer,size) <0) { + SALOME::ExceptionStruct es; + es.type = SALOME::INTERNAL_ERROR; + std::string text = "write failed"; + es.text = CORBA::string_dup(text.c_str()); + throw SALOME::SALOME_Exception(es); + }; + // Close the target file + ::close(fd); + + Engines::file infos; + infos.file_name = CORBA::string_dup(file_name.c_str()); + infos.path = CORBA::string_dup(path.c_str()); + infos.type = CORBA::string_dup(type.c_str()); + infos.source_file_name = CORBA::string_dup(source_file_name.c_str()); + infos.status = CORBA::string_dup(status.c_str()); + + _fileManaged[file_name] = infos; + + // Update Salome_file state + _state.number_of_files++; + _state.files_ok = true; + } + else { + Engines::file infos; + infos.file_name = CORBA::string_dup(file_name.c_str()); + infos.path = CORBA::string_dup(path.c_str()); + infos.type = CORBA::string_dup(type.c_str()); + infos.source_file_name = CORBA::string_dup(source_file_name.c_str()); + infos.status = CORBA::string_dup(status.c_str()); + + _fileManaged[file_name] = infos; + + // Update Salome_file state + _state.number_of_files++; + if (status != "ok") + _state.files_ok = false; + } + } + } + catch (HDFexception) + { + SALOME::ExceptionStruct es; + es.type = SALOME::INTERNAL_ERROR; + std::string text = "!!!! HDFexception"; + es.text = CORBA::string_dup(text.c_str()); + throw SALOME::SALOME_Exception(es); + } } void Salome_file_i::save(const char* hdf5_file) { - MESSAGE("Salome_file_i::save : NOT YET IMPLEMENTED"); _state.hdf5_file_name = CORBA::string_dup(hdf5_file); + try + { + HDFfile *hdf_file; + HDFgroup *hdf_group; + HDFdataset *hdf_dataset; + hdf_size size[1]; + _t_fileManaged::iterator begin = _fileManaged.begin(); + _t_fileManaged::iterator end = _fileManaged.end(); + + hdf_file = new HDFfile((char*) _state.hdf5_file_name.in()); + hdf_file->CreateOnDisk(); + + // Save mode information + hdf_group = new HDFgroup("CONFIG", hdf_file); + hdf_group->CreateOnDisk(); + std::string mode("infos"); + size[0] = strlen(mode.c_str()) + 1; + hdf_dataset = new HDFdataset("MODE", hdf_group, HDF_STRING, size, 1); + hdf_dataset->CreateOnDisk(); + hdf_dataset->WriteOnDisk((void *) mode.c_str()); + hdf_dataset->CloseOnDisk(); + hdf_group->CloseOnDisk(); + + // List of files that are managed + std::string list_of_files; + for(;begin!=end;begin++) + { + Engines::file file_infos = begin->second; + std::string file_name(file_infos.file_name.in()); + + list_of_files = list_of_files + file_name + std::string(" "); + } + hdf_group = new HDFgroup("GROUP_FILES", hdf_file); + hdf_group->CreateOnDisk(); + size[0] = strlen(list_of_files.c_str()) + 1; + hdf_dataset = new HDFdataset("LIST_OF_FILES", hdf_group, HDF_STRING, size, 1); + hdf_dataset->CreateOnDisk(); + hdf_dataset->WriteOnDisk((void *) list_of_files.c_str()); + hdf_dataset->CloseOnDisk(); + hdf_group->CloseOnDisk(); + + // Insert Files into the hdf5_file + begin = _fileManaged.begin(); + for(;begin!=end;begin++) + { + Engines::file file_infos = begin->second; + std::string file_name(file_infos.file_name.in()); + std::string comp_file_name(_fileManaged[file_name].path.in()); + comp_file_name.append(_fileManaged[file_name].file_name.in()); + std::string dataset_group_name("DATASET"); + dataset_group_name += std::string(_fileManaged[file_name].file_name.in()); + + hdf_group = new HDFgroup((char *) dataset_group_name.c_str(), hdf_file); + hdf_group->CreateOnDisk(); + size[0] = strlen(file_infos.file_name.in()) + 1; + hdf_dataset = new HDFdataset("NAME", hdf_group, HDF_STRING, size, 1); + hdf_dataset->CreateOnDisk(); + hdf_dataset->WriteOnDisk((void *) file_infos.file_name.in()); + hdf_dataset->CloseOnDisk(); + size[0] = strlen(file_infos.path.in()) + 1; + hdf_dataset = new HDFdataset("PATH", hdf_group, HDF_STRING, size, 1); + hdf_dataset->CreateOnDisk(); + hdf_dataset->WriteOnDisk((void *) file_infos.path.in()); + hdf_dataset->CloseOnDisk(); + size[0] = strlen(file_infos.type.in()) + 1; + hdf_dataset = new HDFdataset("TYPE", hdf_group, HDF_STRING, size, 1); + hdf_dataset->CreateOnDisk(); + hdf_dataset->WriteOnDisk((void *) file_infos.type.in()); + hdf_dataset->CloseOnDisk(); + size[0] = strlen(file_infos.source_file_name.in()) + 1; + hdf_dataset = new HDFdataset("SOURCE_FILE_NAME", hdf_group, HDF_STRING, size, 1); + hdf_dataset->CreateOnDisk(); + hdf_dataset->WriteOnDisk((void *) file_infos.source_file_name.in()); + hdf_dataset->CloseOnDisk(); + size[0] = strlen(file_infos.status.in()) + 1; + hdf_dataset = new HDFdataset("STATUS", hdf_group, HDF_STRING, size, 1); + hdf_dataset->CreateOnDisk(); + hdf_dataset->WriteOnDisk((void *) file_infos.status.in()); + hdf_dataset->CloseOnDisk(); + hdf_group->CloseOnDisk(); + } + + hdf_file->CloseOnDisk(); + + // delete hdf_dataset; + // delete hdf_group; ----> SEGFAULT !!! + // delete hdf_file; ----> SEGFAULT !!! + } + catch (HDFexception) + { + SALOME::ExceptionStruct es; + es.type = SALOME::INTERNAL_ERROR; + std::string text = "!!!! HDFexception"; + es.text = CORBA::string_dup(text.c_str()); + throw SALOME::SALOME_Exception(es); + } +} + +void +Salome_file_i::save_all(const char* hdf5_file) { + + _state.hdf5_file_name = CORBA::string_dup(hdf5_file); + // Test Salome_file status + if (_state.files_ok == false) { + SALOME::ExceptionStruct es; + es.type = SALOME::INTERNAL_ERROR; + std::string text = "File Not Ok !"; + es.text = CORBA::string_dup(text.c_str()); + throw SALOME::SALOME_Exception(es); + } + + // For each file we create two groups + // First group contains file's informations + // Second group contains the file + // At the end we create a group and a dataset containing the names + // of all the files. + try + { + HDFfile *hdf_file; + HDFgroup *hdf_group; + HDFdataset *hdf_dataset; + hdf_size size[1]; + _t_fileManaged::iterator begin = _fileManaged.begin(); + _t_fileManaged::iterator end = _fileManaged.end(); + + hdf_file = new HDFfile((char*) _state.hdf5_file_name.in()); + hdf_file->CreateOnDisk(); + + // Save mode information + hdf_group = new HDFgroup("CONFIG", hdf_file); + hdf_group->CreateOnDisk(); + std::string mode("all"); + size[0] = strlen(mode.c_str()) + 1; + hdf_dataset = new HDFdataset("MODE", hdf_group, HDF_STRING, size, 1); + hdf_dataset->CreateOnDisk(); + hdf_dataset->WriteOnDisk((void *) mode.c_str()); + hdf_dataset->CloseOnDisk(); + hdf_group->CloseOnDisk(); + + + // List of files that will be inserted + std::string list_of_files; + for(;begin!=end;begin++) + { + Engines::file file_infos = begin->second; + std::string file_name(file_infos.file_name.in()); + + list_of_files = list_of_files + file_name + std::string(" "); + } + hdf_group = new HDFgroup("GROUP_FILES", hdf_file); + hdf_group->CreateOnDisk(); + size[0] = strlen(list_of_files.c_str()) + 1; + hdf_dataset = new HDFdataset("LIST_OF_FILES", hdf_group, HDF_STRING, size, 1); + hdf_dataset->CreateOnDisk(); + hdf_dataset->WriteOnDisk((void *) list_of_files.c_str()); + hdf_dataset->CloseOnDisk(); + hdf_group->CloseOnDisk(); + + // Insert Files into the hdf5_file + begin = _fileManaged.begin(); + for(;begin!=end;begin++) + { + Engines::file file_infos = begin->second; + std::string file_name(file_infos.file_name.in()); + std::string comp_file_name(_fileManaged[file_name].path.in()); + comp_file_name.append(_fileManaged[file_name].file_name.in()); + std::string group_name("GROUP"); + group_name += std::string(_fileManaged[file_name].file_name.in()); + std::string dataset_group_name("DATASET"); + dataset_group_name += std::string(_fileManaged[file_name].file_name.in()); + + hdf_group = new HDFgroup((char *) group_name.c_str(), hdf_file); + hdf_group->CreateOnDisk(); + HDFConvert::FromAscii(comp_file_name.c_str(), *hdf_group, "FILE DATASET"); + hdf_group->CloseOnDisk(); + + hdf_group = new HDFgroup((char *) dataset_group_name.c_str(), hdf_file); + hdf_group->CreateOnDisk(); + size[0] = strlen(file_infos.file_name.in()) + 1; + hdf_dataset = new HDFdataset("NAME", hdf_group, HDF_STRING, size, 1); + hdf_dataset->CreateOnDisk(); + hdf_dataset->WriteOnDisk((void *) file_infos.file_name.in()); + hdf_dataset->CloseOnDisk(); + size[0] = strlen(file_infos.path.in()) + 1; + hdf_dataset = new HDFdataset("PATH", hdf_group, HDF_STRING, size, 1); + hdf_dataset->CreateOnDisk(); + hdf_dataset->WriteOnDisk((void *) file_infos.path.in()); + hdf_dataset->CloseOnDisk(); + size[0] = strlen(file_infos.type.in()) + 1; + hdf_dataset = new HDFdataset("TYPE", hdf_group, HDF_STRING, size, 1); + hdf_dataset->CreateOnDisk(); + hdf_dataset->WriteOnDisk((void *) file_infos.type.in()); + hdf_dataset->CloseOnDisk(); + size[0] = strlen(file_infos.source_file_name.in()) + 1; + hdf_dataset = new HDFdataset("SOURCE_FILE_NAME", hdf_group, HDF_STRING, size, 1); + hdf_dataset->CreateOnDisk(); + hdf_dataset->WriteOnDisk((void *) file_infos.source_file_name.in()); + hdf_dataset->CloseOnDisk(); + size[0] = strlen(file_infos.status.in()) + 1; + hdf_dataset = new HDFdataset("STATUS", hdf_group, HDF_STRING, size, 1); + hdf_dataset->CreateOnDisk(); + hdf_dataset->WriteOnDisk((void *) file_infos.status.in()); + hdf_dataset->CloseOnDisk(); + hdf_group->CloseOnDisk(); + + } + + hdf_file->CloseOnDisk(); + + // delete hdf_dataset; + // delete hdf_group; ----> SEGFAULT !!! + // delete hdf_file; ----> SEGFAULT !!! + } + catch (HDFexception) + { + SALOME::ExceptionStruct es; + es.type = SALOME::INTERNAL_ERROR; + std::string text = "!!!! HDFexception"; + es.text = CORBA::string_dup(text.c_str()); + throw SALOME::SALOME_Exception(es); + } } void @@ -147,15 +525,15 @@ Salome_file_i::setDistributedFile(const char* comp_file_name, } // Test if this file is already added - _t_fileManaged::iterator it = _fileManaged.find(file_name); - if (it != _fileManaged.end()) - { - SALOME::ExceptionStruct es; - es.type = SALOME::INTERNAL_ERROR; - std::string text = "file already added"; - es.text = CORBA::string_dup(text.c_str()); - throw SALOME::SALOME_Exception(es); - } + //_t_fileManaged::iterator it = _fileManaged.find(file_name); + //if (it != _fileManaged.end()) + //{ + // SALOME::ExceptionStruct es; + // es.type = SALOME::INTERNAL_ERROR; + // std::string text = "file already added"; + // es.text = CORBA::string_dup(text.c_str()); + // throw SALOME::SALOME_Exception(es); + //} // Adding file with is informations Engines::file infos; diff --git a/src/Container/Salome_file_i.hxx b/src/Container/Salome_file_i.hxx index 51652a2de..bcdd6d17b 100644 --- a/src/Container/Salome_file_i.hxx +++ b/src/Container/Salome_file_i.hxx @@ -45,6 +45,7 @@ class CONTAINER_EXPORT Salome_file_i: // Import and export methods virtual void load(const char* hdf5_file); virtual void save(const char* hdf5_file); + virtual void save_all(const char* hdf5_file); // Adding files virtual void setLocalFile(const char* comp_file_name); diff --git a/src/Container/TestSalome_file.cxx b/src/Container/TestSalome_file.cxx index 69a6d363e..f89558b06 100644 --- a/src/Container/TestSalome_file.cxx +++ b/src/Container/TestSalome_file.cxx @@ -19,7 +19,7 @@ void print_state(Engines::SfState * state) { cerr << "-------------------------------------------------------------------" << endl; cerr << "name = " << state->name << endl; - cerr << "hdf5_file_name = " << state->name << endl; + cerr << "hdf5_file_name = " << state->hdf5_file_name << endl; cerr << "number_of_files = " << state->number_of_files << endl; cerr << "files_ok = " << state->files_ok << endl; } @@ -27,8 +27,13 @@ void print_state(Engines::SfState * state) int main (int argc, char * argv[]) { + system("rm toto cat test.hdf test2.hdf"); + Salome_file_i file; Salome_file_i file2; + Salome_file_i file3; + Salome_file_i file4; + Salome_file_i file5; Engines::file * infos; Engines::SfState * state; Engines::files * all_infos; @@ -109,4 +114,29 @@ int main (int argc, char * argv[]) print_state(state); orb->destroy(); + + file3.setLocalFile("/tmp/toto"); + file3.setLocalFile("/bin/cat"); + state = file3.getSalome_fileState(); + print_state(state); + file3.save_all("test.hdf"); + file3.setLocalFile("/bin/tutu"); + file3.save("test2.hdf"); + + file4.load("test.hdf"); + all_infos = file4.getFilesInfos(); + for (int i = 0; i < all_infos->length(); i++) + { + print_infos(&((*all_infos)[i])); + } + state = file4.getSalome_fileState(); + print_state(state); + file5.load("test2.hdf"); + all_infos = file5.getFilesInfos(); + for (int i = 0; i < all_infos->length(); i++) + { + print_infos(&((*all_infos)[i])); + } + state = file5.getSalome_fileState(); + print_state(state); } diff --git a/src/ParallelContainer/Makefile.am b/src/ParallelContainer/Makefile.am index 3f8655e9b..a3cff1e38 100644 --- a/src/ParallelContainer/Makefile.am +++ b/src/ParallelContainer/Makefile.am @@ -49,15 +49,18 @@ COMMON_CPPFLAGS= -I$(top_srcdir)/src/Container \ -I$(top_srcdir)/src/NamingService \ -I$(top_srcdir)/src/Registry \ -I$(top_srcdir)/src/Utils \ + -I$(srcdir)/../HDFPersist \ -I$(top_builddir)/salome_adm/unix \ -I$(top_builddir)/idl \ -I$(top_srcdir)/src/SALOMETraceCollector \ + @CAS_CPPFLAGS@ @CAS_CXXFLAGS@ \ @CORBA_CXXFLAGS@ \ @CORBA_INCLUDES@ \ @PACO_INCLUDES@ # This local variable defines the list of dependant libraries common to all target in this package. COMMON_LIBS = $(top_builddir)/src/Container/libSalomeContainer.la \ + $(top_builddir)/src/HDFPersist/libSalomeHDFPersist.la \ $(top_builddir)/idl/libSalomeParallelIDLKernel.la \ $(top_builddir)/idl/libSalomeIDLKernel.la \ @CORBA_LIBS@ \ -- 2.39.2