Salome HOME
Merge branch 'agy/ParallelContainerLaunch'
authorAnthony Geay <anthony.geay@edf.fr>
Tue, 7 Oct 2014 09:59:17 +0000 (11:59 +0200)
committerAnthony Geay <anthony.geay@edf.fr>
Tue, 7 Oct 2014 09:59:17 +0000 (11:59 +0200)
84 files changed:
CMakeLists.txt
adm/cmake/FindSalomeSphinx.cmake
adm/cmake/FindSphinx.cmake
doc/operations.rst
doc/optimizer.rst
doc/representation.rst
doc/schemapy.rst
doc/yacsgen.rst
src/engine/Pool.hxx
src/genericgui/CatalogWidget.cxx
src/genericgui/GenericGui.cxx
src/genericgui/GenericGui.hxx
src/genericgui/GuiEditor.cxx
src/genericgui/GuiEditor.hxx
src/genericgui/Menus.cxx
src/genericgui/SceneComposedNodeItem.cxx
src/genericgui/SceneComposedNodeItem.hxx
src/genericgui/SceneElementaryNodeItem.cxx
src/genericgui/SceneElementaryNodeItem.hxx
src/genericgui/SceneItem.cxx
src/genericgui/SceneItem.hxx
src/genericgui/SceneNodeItem.cxx
src/genericgui/SceneNodeItem.hxx
src/genericgui/SceneProcItem.cxx
src/genericgui/SceneProcItem.hxx
src/pmml/CMakeLists.txt
src/pmml/Test/CMakeLists.txt
src/pmml/Test/PMMLBasicsTest1.cxx
src/pmml/Test/samples/CMakeLists.txt [new file with mode: 0755]
src/pmml/Test/samples/ann_model.pmml [new file with mode: 0755]
src/pmml/Test/samples/ann_model_2.pmml [new file with mode: 0755]
src/pmml/Test/samples/lr_model.pmml [new file with mode: 0755]
src/pmml/Test/samples/lr_model_2.pmml [new file with mode: 0755]
src/pmml/Test/samples/no_model.pmml [new file with mode: 0755]
src/pmml/Test/samples/two_models_ann_lr.pmml [new file with mode: 0755]
src/pmml/Test/samples/unittest_ref_ann_model.cpp [new file with mode: 0755]
src/pmml/Test/samples/unittest_ref_ann_model.f [new file with mode: 0755]
src/pmml/Test/samples/unittest_ref_ann_model.py [new file with mode: 0755]
src/pmml/Test/samples/unittest_ref_lr_model.cpp [new file with mode: 0755]
src/pmml/Test/samples/unittest_ref_lr_model.f [new file with mode: 0755]
src/pmml/Test/samples/unittest_ref_lr_model.py [new file with mode: 0755]
src/pmml/Test/samples/win32_ann_model.pmml [new file with mode: 0755]
src/pmml/Test/samples/win32_lr_model.pmml [new file with mode: 0755]
src/pmml/pmml_swig/PMMLBasicsTest.py
src/pmml/resources/CMakeLists.txt [deleted file]
src/pmml/resources/ann_model.pmml [deleted file]
src/pmml/resources/ann_model_2.pmml [deleted file]
src/pmml/resources/lr_model.pmml [deleted file]
src/pmml/resources/lr_model_2.pmml [deleted file]
src/pmml/resources/no_model.pmml [deleted file]
src/pmml/resources/two_models_ann_lr.pmml [deleted file]
src/pmml/resources/unittest_ref_ann_model.cpp [deleted file]
src/pmml/resources/unittest_ref_ann_model.f [deleted file]
src/pmml/resources/unittest_ref_ann_model.py [deleted file]
src/pmml/resources/unittest_ref_lr_model.cpp [deleted file]
src/pmml/resources/unittest_ref_lr_model.f [deleted file]
src/pmml/resources/unittest_ref_lr_model.py [deleted file]
src/pmml/resources/win32_ann_model.pmml [deleted file]
src/pmml/resources/win32_lr_model.pmml [deleted file]
src/salomegui/resources/SalomeApp.xml.in
src/salomegui/resources/YACS_msg_fr.ts
src/salomegui/resources/YACS_msg_ja.ts
src/salomewrap/SalomeWrap_Module.cxx
src/salomewrap/SalomeWrap_Module.hxx
src/salomewrap/SuitWrapper.cxx
src/salomewrap/SuitWrapper.hxx
src/yacsloader/CMakeLists.txt
src/yacsloader/Test/CMakeLists.txt
src/yacsloader/Test/OptimizerAlgASyncExample.cxx
src/yacsloader/Test/OptimizerAlgSyncExample.cxx
src/yacsloader/Test/algoasyncexample.py
src/yacsloader/Test/algosyncexample.py
src/yacsloader/loopParsers.hxx
src/yacsloader/pmml/CMakeLists.txt
src/yacsloader/pmml/PmmlExeTest.sh.in [new file with mode: 0755]
src/yacsloader/pmml/PmmlInSessionTest.sh.in [new file with mode: 0755]
src/yacsloader/pmml/YACSPMMLBasicsTest1.cxx
src/yacsloader/pmml/config_appli.xml.in [new file with mode: 0644]
src/yacsloader/samples/optimizer_async_cpp.xml
src/yacsloader/samples/optimizer_async_py.xml
src/yacsloader/samples/optimizer_sync_cpp.xml
src/yacsloader/samples/optimizer_sync_py.xml
src/yacsloader_swig/Test/CMakeLists.txt
tmp_dump.py [new file with mode: 0644]

index 62e4672b7d7c9708bacdb54eb4d49fcc5a4aecba..1327b21afda3a69d8aa6d42efa7572e77ff5e4ea 100644 (file)
@@ -31,10 +31,10 @@ STRING(TOUPPER ${PROJECT_NAME} PROJECT_NAME_UC)
 
 SET(${PROJECT_NAME_UC}_MAJOR_VERSION 7)
 SET(${PROJECT_NAME_UC}_MINOR_VERSION 4)
-SET(${PROJECT_NAME_UC}_PATCH_VERSION 0)
+SET(${PROJECT_NAME_UC}_PATCH_VERSION 1)
 SET(${PROJECT_NAME_UC}_VERSION
   ${${PROJECT_NAME_UC}_MAJOR_VERSION}.${${PROJECT_NAME_UC}_MINOR_VERSION}.${${PROJECT_NAME_UC}_PATCH_VERSION})
-SET(${PROJECT_NAME_UC}_VERSION_DEV 0)
+SET(${PROJECT_NAME_UC}_VERSION_DEV 1)
 
 # Find KERNEL (optional)
 # ==============
@@ -67,7 +67,8 @@ IF(SALOME_BUILD_GUI)
   IF(EXISTS ${GUI_ROOT_DIR})
     LIST(APPEND CMAKE_MODULE_PATH "${GUI_ROOT_DIR}/adm_local/cmake_files")
     FIND_PACKAGE(SalomeGUI)
-    FULL_GUI(TRUE) # check whether GUI builded in full mode and with CORBA
+    SALOME_GUI_WITH_CORBA() # check whether GUI builded with CORBA
+    SALOME_GUI_MODE(SALOME_USE_QXGRAPHVIEWER) # check whether GUI is built with the QxGraphViewer
   ELSE(EXISTS ${GUI_ROOT_DIR})
     MESSAGE(FATAL_ERROR "We absolutely need a Salome GUI, please define GUI_ROOT_DIR or turn option SALOME_BUILD_GUI to OFF !")
   ENDIF(EXISTS ${GUI_ROOT_DIR})
index 78378d6084b378ebc55925e08b9b813cee7e267e..4ef6c0f07c636895c7e90470c5dba73aed6dec96 100644 (file)
@@ -31,13 +31,16 @@ SET(DOCUTILS_ROOT_DIR "$ENV{DOCUTILS_ROOT_DIR}" CACHE PATH "Path to the Docutils
 # Ensure the command is run with the given PYTHONPATH
 IF(WIN32 AND NOT CYGWIN)
    SET(SPHINX_EXECUTABLE ${SPHINX_EXECUTABLE})
+   SET(SPHINX_APIDOC_EXECUTABLE ${SPHINX_APIDOC_EXECUTABLE})
 ELSE()
    SET(SPHINX_EXECUTABLE /usr/bin/env PYTHONPATH="${SPHINX_PYTHONPATH}:$$PYTHONPATH" ${SPHINX_EXECUTABLE})
+   SET(SPHINX_APIDOC_EXECUTABLE /usr/bin/env PYTHONPATH="${SPHINX_PYTHONPATH}:$$PYTHONPATH" ${SPHINX_APIDOC_EXECUTABLE})
 ENDIF()
 
 MARK_AS_ADVANCED(SPHINX_EXECUTABLE)
 
 IF(SPHINX_FOUND)
   SALOME_ACCUMULATE_ENVIRONMENT(PATH ${SPHINX_EXECUTABLE})
+  SALOME_ACCUMULATE_ENVIRONMENT(PATH ${SPHINX_APIDOC_EXECUTABLE})
   SALOME_ACCUMULATE_ENVIRONMENT(PYTHONPATH ${SPHINX_PYTHONPATH})
 ENDIF()
index c3fd775b126999c8cbaabfb024d285ac2ec9a6ef..01c3a3d3e8099fa2f758b348bf45d59ac0dd317e 100644 (file)
@@ -28,6 +28,7 @@
 #
 
 FIND_PROGRAM(SPHINX_EXECUTABLE sphinx-build PATH_SUFFIXES Scripts)
+FIND_PROGRAM(SPHINX_APIDOC_EXECUTABLE sphinx-apidoc PATH_SUFFIXES Scripts)
 
 # Get root dir locally, going up two levels from the exec:
 GET_FILENAME_COMPONENT(_tmp_ROOT_DIR "${SPHINX_EXECUTABLE}" PATH)
@@ -39,5 +40,5 @@ ELSE()
 ENDIF()
 # Handle the standard arguments of the find_package() command:
 INCLUDE(FindPackageHandleStandardArgs)
-FIND_PACKAGE_HANDLE_STANDARD_ARGS(Sphinx REQUIRED_VARS SPHINX_EXECUTABLE)
+FIND_PACKAGE_HANDLE_STANDARD_ARGS(Sphinx REQUIRED_VARS SPHINX_EXECUTABLE SPHINX_APIDOC_EXECUTABLE)
 
index 86dd773f1802de3c897210f7155193c1f34b19fa..9b5ba35ededff5d211bc547d9ce9955c6373e532 100644 (file)
@@ -274,6 +274,14 @@ Specific operations available in the **edition mode** for each type of objects s
 |                                |             |                                                                                     |
 |                                |             |**Center on Node** - Center the 2D view on selected Node, without resizing.          |
 |                                |             |                                                                                     |
+|                                |             |**Shrink/Expand** - Fold/Unfold the selected Node. See :ref:`shrink_expand_nodes`.   |
+|                                |             |                                                                                     |
+|                                |             |**Shrink/Expand Children** - Fold/Unfold all direct Children Nodes of selected Node. |
+|                                |             |See :ref:`shrink_expand_nodes`.                                                      |
+|                                |             |                                                                                     |
+|                                |             |**Shrink/Expand Elementary** - Fold/Unfold all Elementary Nodes of selected Node     |
+|                                |             |recursively. See :ref:`shrink_expand_nodes`.                                         |
+|                                |             |                                                                                     |
 |                                |             |**Compute Links** - Recompute links, useful for large schemas, when automatic link   |
 |                                |             |calculation have been deactivated, see :ref:`edition_toolbar`.                       |
 |                                |             |                                                                                     |
@@ -324,6 +332,14 @@ Node objects.
 |                                |             |                                                                                     |
 |                                |             |**Center on Node** - Center the 2D view on selected Node, without resizing.          |
 |                                |             |                                                                                     |
+|                                |             |**Shrink/Expand** - Fold/Unfold the selected Node. See :ref:`shrink_expand_nodes`.   |
+|                                |             |                                                                                     |
+|                                |             |**Shrink/Expand Children** - Fold/Unfold all direct Children Nodes of selected Node. |
+|                                |             |See :ref:`shrink_expand_nodes`.                                                      |
+|                                |             |                                                                                     |
+|                                |             |**Shrink/Expand Elementary** - Fold/Unfold all Elementary Nodes of selected Node     |
+|                                |             |recursively. See :ref:`shrink_expand_nodes`.                                         |
+|                                |             |                                                                                     |
 |                                |             |**Compute Links** - Recompute links, useful for large schemas, when automatic link   |
 |                                |             |calculation have been deactivated, see :ref:`edition_toolbar`.                       |
 |                                |             |                                                                                     |
@@ -355,6 +371,9 @@ Node objects.
 |                                |             |the screen                                                                           |
 |                                |             |                                                                                     |
 |                                |             |**Center on Node** - Center the 2D view on selected Node, without resizing.          |
+|                                |             |                                                                                     |
+|                                |             |**Shrink/Expand** - Fold/Unfold the selected Node. See :ref:`shrink_expand_nodes`.   |
+|                                |             |                                                                                     |
 +--------------------------------+-------------+-------------------------------------------------------------------------------------+
 
 Port and link objects.
@@ -404,6 +423,14 @@ The popup menus available on the objects selected in the Tree View or 2D View of
 |                                |             |                                                                                     |
 |                                |             |**Center on Node** - Center the 2D view on selected Node, without resizing.          |
 |                                |             |                                                                                     |
+|                                |             |**Shrink/Expand** - Fold/Unfold the selected Node. See :ref:`shrink_expand_nodes`.   |
+|                                |             |                                                                                     |
+|                                |             |**Shrink/Expand Children** - Fold/Unfold all direct Children Nodes of selected Node. |
+|                                |             |See :ref:`shrink_expand_nodes`.                                                      |
+|                                |             |                                                                                     |
+|                                |             |**Shrink/Expand Elementary** - Fold/Unfold all Elementary Nodes of selected Node     |
+|                                |             |recursively. See :ref:`shrink_expand_nodes`.                                         |
+|                                |             |                                                                                     |
 |                                |             |**Compute Links** - Recompute links, useful for large schemas, when automatic link   |
 |                                |             |calculation have been deactivated, see :ref:`edition_toolbar`.                       |
 |                                |             |                                                                                     |
@@ -425,6 +452,14 @@ The popup menus available on the objects selected in the Tree View or 2D View of
 |                                |             |                                                                                     |
 |                                |             |**Center on Node** - Center the 2D view on selected Node, without resizing.          |
 |                                |             |                                                                                     |
+|                                |             |**Shrink/Expand** - Fold/Unfold the selected Node. See :ref:`shrink_expand_nodes`.   |
+|                                |             |                                                                                     |
+|                                |             |**Shrink/Expand Children** - Fold/Unfold all direct Children Nodes of selected Node. |
+|                                |             |See :ref:`shrink_expand_nodes`.                                                      |
+|                                |             |                                                                                     |
+|                                |             |**Shrink/Expand Elementary** - Fold/Unfold all Elementary Nodes of selected Node     |
+|                                |             |recursively. See :ref:`shrink_expand_nodes`.                                         |
+|                                |             |                                                                                     |
 |                                |             |**Compute Links** - Recompute links, useful for large schemas, when automatic link   |
 |                                |             |calculation have been deactivated, see :ref:`edition_toolbar`.                       |
 |                                |             |                                                                                     |
@@ -442,6 +477,8 @@ The popup menus available on the objects selected in the Tree View or 2D View of
 |                                |             |                                                                                     |
 |                                |             |**Center on Node** - Center the 2D view on selected Node, without resizing.          |
 |                                |             |                                                                                     |
+|                                |             |**Shrink/Expand** - Fold/Unfold the selected Node. See :ref:`shrink_expand_nodes`.   |
+|                                |             |                                                                                     |
 +--------------------------------+-------------+-------------------------------------------------------------------------------------+
 
 .. centered::
index 9fda369578756e93fe7fd1afef53e991997b080c..9dd3dcf2f46c11d543271969668a6917942e89c6 100644 (file)
@@ -7,6 +7,9 @@ The definition of the optimization algorithm is done by way of plugin.
 The plugin can be a C++ plugin implemented in a dynamic library (.so file) or a Python plugin implemented in a Python module (.py).
 It is possible to implement two kinds of algorithm : synchronous or asynchronous.
 
+The algorithm uses a pool of samples to be evaluated.
+When all the samples of the pool are evaluated, the algorithm stops.
+
 Synchronous algorithm
 --------------------------------------------------
 In synchronous mode, the OptimizerLoop calls the algorithm to know what are the types of the input port (sample sent to the internal node), 
@@ -19,10 +22,13 @@ must be implemented and some optional methods (in C++ and in Python):
 
 - **getTCForIn**, this method must return the YACS type of the input port of the internal node
 - **getTCForOut**, this method must return the YACS type of the output port of the internal node
+- **getTCForAlgoInit** (optional), this method returns the type of the "algoInit" port, string if undefined
+- **getTCForAlgoResult** (optional), this method returns the type of the "algoResult" port, string if undefined
 - **initialize** (optional), this method is called during the algorithm initialization
 - **start**, this method is called at the beginning of iterations
 - **takeDecision**, this method is called at each iteration
 - **finish** (optional), this method is called to finish the algorithm at the end of the iteration process
+- **getAlgoResult** (optional), this method returns the value of the "algoResult" port, "NULL" if undefined
 
 In Python you need to implement another method:
 
@@ -32,191 +38,17 @@ C++ plugin example
 ''''''''''''''''''''
 Here is a small example of a C++ synchronous algorithm:
 
-.. code-block:: cpp
-
-  #include <cmath>
-  
-  #include "OptimizerAlg.hxx"
-  
-  using namespace YACS::ENGINE;
-  
-  extern "C"
-  {
-    OptimizerAlgBase * createOptimizerAlgSyncExample(Pool * pool);
-  }
-  
-  class OptimizerAlgSyncExample : public OptimizerAlgSync
-    {
-    private:
-      int _idTest;
-      TypeCode *_tcIn;
-      TypeCode *_tcOut;
-    public:
-      OptimizerAlgSyncExample(Pool *pool);
-      virtual ~OptimizerAlgSyncExample();
-      TypeCode *getTCForIn() const;
-      TypeCode *getTCForOut() const;
-      void start();
-      void takeDecision();
-      void initialize(const Any *input) throw(YACS::Exception);
-      void finish();
-    };
-  
-  OptimizerAlgSyncExample::OptimizerAlgSyncExample(Pool *pool)
-    : OptimizerAlgSync(pool), _tcIn(0), _tcOut(0), _idTest(0)
-  {
-    _tcIn=new TypeCode(Double);
-    _tcOut=new TypeCode(Int);
-  }
-  
-  OptimizerAlgSyncExample::~OptimizerAlgSyncExample()
-  {
-    _tcIn->decrRef();
-    _tcOut->decrRef();
-  }
-  
-  //! Return the typecode of the expected input type
-  TypeCode * OptimizerAlgSyncExample::getTCForIn() const
-  {
-    return _tcIn;
-  }
-  
-  //! Return the typecode of the expected output type
-  TypeCode * OptimizerAlgSyncExample::getTCForOut() const
-  {
-    return _tcOut;
-  }
-  
-  //! Start to fill the pool with samples to evaluate
-  void OptimizerAlgSyncExample::start()
-  {
-    _idTest=0;
-    Any *val=AtomAny::New(1.2);
-    _pool->pushInSample(4,val);
-    val=AtomAny::New(3.4);
-    _pool->pushInSample(9,val);
-  }
-  
-  //! This method is called each time a sample has been evaluated.
-  /*!
-   *  It can either add new samples to evaluate in the pool, do nothing (wait
-   *  for more samples), or empty the pool to finish the evaluation.
-   */
-  void OptimizerAlgSyncExample::takeDecision()
-  {
-    if(_idTest==1)
-      {
-        Any *val=AtomAny::New(5.6);
-        _pool->pushInSample(16,val);
-        val=AtomAny::New(7.8);
-        _pool->pushInSample(25,val);
-        val=AtomAny::New(9. );
-        _pool->pushInSample(36,val);
-        val=AtomAny::New(12.3);
-        _pool->pushInSample(49,val);
-      }
-    else if(_idTest==4)
-      {
-        Any *val=AtomAny::New(45.6);
-        _pool->pushInSample(64,val);
-        val=AtomAny::New(78.9);
-        _pool->pushInSample(81,val);
-      }
-    else
-      {
-        Any *tmp= _pool->getCurrentInSample();
-        if(fabs(tmp->getDoubleValue()-45.6)<1.e-12)
-          _pool->destroyAll();
-      }
-    _idTest++;
-  }
-  
-  //! Optional method to initialize the algorithm.
-  /*!
-   *  For now, the parameter input is always NULL. It might be used in the
-   *  future to initialize an algorithm with custom data.
-   */
-  void OptimizerAlgSyncExample::initialize(const Any *input)
-    throw (YACS::Exception)
-  {
-  }
-  
-  /*!
-   *  Optional method called when the algorithm has finished, successfully or
-   *  not, to perform any necessary clean up.
-   */
-  void OptimizerAlgSyncExample::finish()
-  {
-  }
-  
-  //! Factory method to create the algorithm.
-  OptimizerAlgBase * createOptimizerAlgSyncExample(Pool *pool)
-  {
-    return new OptimizerAlgSyncExample(pool);
-  }
-
+.. literalinclude:: ../src/yacsloader/Test/OptimizerAlgSyncExample.cxx
+    :language: cpp
 
 Here, the entry point in the dynamic library is the name of the factory function : createOptimizerAlgSyncExample
 that returns an instance of the OptimizerAlgSyncExample class that implements the algorithm.
 
 Python plugin example
 ''''''''''''''''''''''
-Here, the same example of a synchronous algorithm in Python::
-
-  import SALOMERuntime
-  
-  class myalgosync(SALOMERuntime.OptimizerAlgSync):
-    def __init__(self):
-      SALOMERuntime.OptimizerAlgSync.__init__(self, None)
-      r=SALOMERuntime.getSALOMERuntime()
-      self.tin=r.getTypeCode("double")
-      self.tout=r.getTypeCode("int")
-  
-    def setPool(self,pool):
-      """Must be implemented to set the pool"""
-      self.pool=pool
-  
-    def getTCForIn(self):
-      """returns typecode of type expected as Input"""
-      return self.tin
-  
-    def getTCForOut(self):
-      """returns typecode of type expected as Output"""
-      return self.tout
-  
-    def initialize(self,input):
-      """Optional method called on initialization. Do nothing here"""
-  
-    def start(self):
-      """Start to fill the pool with samples to evaluate."""
-      self.iter=0
-      self.pool.pushInSample(4,1.2)
-      self.pool.pushInSample(9,3.4)
-  
-    def takeDecision(self):
-      """ This method is called each time a sample has been evaluated. It can
-          either add new samples to evaluate in the pool, do nothing (wait for
-          more samples), or empty the pool to finish the evaluation.
-      """
-      currentId=self.pool.getCurrentId()
-  
-      if self.iter==1:
-        self.pool.pushInSample(16,5.6)
-        self.pool.pushInSample(25,7.8)
-        self.pool.pushInSample(36,9.)
-        self.pool.pushInSample(49,12.3)
-      elif self.iter==4:
-        self.pool.pushInSample(64,45.6)
-        self.pool.pushInSample(81,78.9)
-      else:
-        val=self.pool.getCurrentInSample()
-        if abs(val.getDoubleValue()-45.6) < 1.e-12:
-          self.pool.destroyAll()
-      self.iter=self.iter+1
-  
-    def finish(self):
-      """Optional method called when the algorithm has finished, successfully
-         or not, to perform any necessary clean up. Do nothing here"""
+Here, the same example of a synchronous algorithm in Python:
+
+.. literalinclude:: ../src/yacsloader/Test/algosyncexample.py
 
 Here, the entry point in the Python module is directly the name of the class that implements the algorithm : myalgosync.
 
@@ -231,9 +63,12 @@ must be implemented and some optional methods (in C++ and in Python):
 
 - **getTCForIn**, this method must return the YACS type of the input port of the internal node
 - **getTCForOut**, this method must return the YACS type of the output port of the internal node
+- **getTCForAlgoInit** (optional), this method returns the type of the "algoInit" port, string if undefined
+- **getTCForAlgoResult** (optional), this method returns the type of the "algoResult" port, string if undefined
 - **initialize** (optional), this method is called during the algorithm initialization
 - **startToTakeDecision**, this method is called to start the iteration process in a separate thread. It is the body of the algorithm.
 - **finish** (optional), this method is called to finish the algorithm at the end of the iteration process
+- **getAlgoResult** (optional), this method returns the value of the "algoResult" port, "NULL" if undefined
 
 In Python you need to implement another method:
 
@@ -243,94 +78,8 @@ C++ plugin example
 ''''''''''''''''''''
 Here is a small example of a C++ asynchronous algorithm:
 
-.. code-block:: cpp
-
-  #include "OptimizerAlg.hxx"
-  
-  using namespace YACS::ENGINE;
-  
-  extern "C"
-  {
-    OptimizerAlgBase * createOptimizerAlgASyncExample(Pool * pool);
-  }
-  
-  class OptimizerAlgASyncExample : public OptimizerAlgASync
-    {
-    private:
-      TypeCode * _tcIn;
-      TypeCode * _tcOut;
-    public:
-      OptimizerAlgASyncExample(Pool * pool);
-      virtual ~OptimizerAlgASyncExample();
-      TypeCode * getTCForIn() const;
-      TypeCode * getTCForOut() const;
-      void startToTakeDecision();
-    };
-  
-  OptimizerAlgASyncExample::OptimizerAlgASyncExample(Pool * pool)
-    : OptimizerAlgASync(pool), _tcIn(0), _tcOut(0)
-  {
-    _tcIn = new TypeCode(Double);
-    _tcOut = new TypeCode(Int);
-  }
-  
-  OptimizerAlgASyncExample::~OptimizerAlgASyncExample()
-  {
-    _tcIn->decrRef();
-    _tcOut->decrRef();
-  }
-  
-  //! Return the typecode of the expected input type
-  TypeCode *OptimizerAlgASyncExample::getTCForIn() const
-  {
-    return _tcIn;
-  }
-  
-  //! Return the typecode of the expected output type
-  TypeCode *OptimizerAlgASyncExample::getTCForOut() const
-  {
-    return _tcOut;
-  }
-  
-  //! This method is called only once to launch the algorithm.
-  /*!
-   *  It must first fill the pool with samples to evaluate and call
-   *  signalMasterAndWait() to block until a sample has been evaluated. When
-   *  returning from this method, it MUST check for an eventual termination
-   *  request (with the method isTerminationRequested()). If the termination
-   *  is requested, the method must perform any necessary cleanup and return
-   *  as soon as possible. Otherwise it can either add new samples to evaluate
-   *  in the pool, do nothing (wait for more samples), or empty the pool and
-   *  return to finish the evaluation.
-   */
-  void OptimizerAlgASyncExample::startToTakeDecision()
-  {
-    double val = 1.2;
-    for (int i=0 ; i<5 ; i++) {
-      // push a sample in the input of the slave node
-      _pool->pushInSample(i, AtomAny::New(val));
-      // wait until next sample is ready
-      signalMasterAndWait();
-      // check error notification
-      if (isTerminationRequested()) {
-        _pool->destroyAll();
-        return;
-      }
-  
-      // get a sample from the output of the slave node
-      Any * v = _pool->getCurrentOutSample();
-      val += v->getIntValue();
-    }
-  
-    // in the end destroy the pool content
-    _pool->destroyAll();
-  }
-  
-  //! Factory method to create the algorithm.
-  OptimizerAlgBase * createOptimizerAlgASyncExample(Pool * pool)
-  {
-    return new OptimizerAlgASyncExample(pool);
-  }
+.. literalinclude:: ../src/yacsloader/Test/OptimizerAlgASyncExample.cxx
+    :language: cpp
 
 
 Here, the entry point in the dynamic library is the name of the factory function : createOptimizerAlgASyncExample
@@ -338,62 +87,59 @@ that returns an instance of the OptimizerAlgASyncExample class that implements t
 
 Python plugin example
 ''''''''''''''''''''''''
-Here is an example of an asynchronous algorithm implemented in Python::
-
-  import SALOMERuntime
-  
-  class myalgoasync(SALOMERuntime.OptimizerAlgASync):
-    def __init__(self):
-      SALOMERuntime.OptimizerAlgASync.__init__(self, None)
-      r=SALOMERuntime.getSALOMERuntime()
-      self.tin=r.getTypeCode("double")
-      self.tout=r.getTypeCode("int")
-  
-    def setPool(self,pool):
-      """Must be implemented to set the pool"""
-      self.pool=pool
-  
-    def getTCForIn(self):
-      """returns typecode of type expected as Input"""
-      return self.tin
-  
-    def getTCForOut(self):
-      """returns typecode of type expected as Output"""
-      return self.tout
-  
-    def startToTakeDecision(self):
-      """This method is called only once to launch the algorithm. It must
-         first fill the pool with samples to evaluate and call
-         self.signalMasterAndWait() to block until a sample has been
-         evaluated. When returning from this method, it MUST check for an
-         eventual termination request (with the method
-         self.isTerminationRequested()). If the termination is requested, the
-         method must perform any necessary cleanup and return as soon as
-         possible. Otherwise it can either add new samples to evaluate in the
-         pool, do nothing (wait for more samples), or empty the pool and
-         return to finish the evaluation.
-      """
-      val=1.2
-      for iter in xrange(5):
-        #push a sample in the input of the slave node
-        self.pool.pushInSample(iter,val)
-        #wait until next sample is ready
-        self.signalMasterAndWait()
-        #check error notification
-        if self.isTerminationRequested():
-          self.pool.destroyAll()
-          return
-  
-        #get a sample from the output of the slave node
-        currentId=self.pool.getCurrentId()
-        v=self.pool.getCurrentOutSample()
-        val=val+v.getIntValue()
-  
-      #in the end destroy the pool content
-      self.pool.destroyAll()
+Here is an example of an asynchronous algorithm implemented in Python:
+
+.. literalinclude:: ../src/yacsloader/Test/algoasyncexample.py
 
 Here, the entry point in the Python module is directly the name of the class that implements the algorithm : myalgoasync.
 
+Managing the pool of samples
+---------------------------------
+
+Samples can be added to the pool at the initialization of the algorithm or
+every time a sample is evaluated (while "taking decision").
+The algorithm stops to take decisions when every sample is evaluated.
+
+A sample has:
+
+- an identifier - *Id*
+- a priority - it is used to choose the order of evaluation
+- a value - *In*
+- an evaluated or computed value - *Out*
+
+The current sample is the sample used by the latest terminated evaluation.
+
+These are the methods needed to manage the pool of samples:
+
+.. code-block:: cpp
+
+  class Pool
+  {
+      //...
+    public:
+      //For algorithm use
+      int getCurrentId() const ;
+      Any *getCurrentInSample() const ;
+      Any *getCurrentOutSample() const ;
+      Any *getOutSample(int id);
+      void pushInSample(int id, Any *inSample, unsigned char priority = 0);
+      void destroyAll();
+      //...
+  }
+
+In C++, the samples are of type ``YACS::ENGINE::Any``, in order to support any
+type supported by YACS. For conversion to standard types, use:
+
+- ``getIntValue``
+- ``getBoolValue``
+- ``getDoubleValue``
+- ``getStringValue``
+
+It is possible to create a pointer to a new object with:
+
+- ``YACS::ENGINE::AtomAny::New``
+
+For further information, see `include/salome/Any.hxx <file:../../../../../include/salome/Any.hxx>`_.
 
 C++ algorithm calling Python code
 --------------------------------------------------
index c7a2226ec2714f0541384b50fbb862a4ae50a149..2fa6b257d62ffdfb428080791b6b154aca49d7a2 100644 (file)
@@ -68,3 +68,18 @@ in the toolbar :ref:`edition_toolbar`. To force links computation, use the **com
 The user can change the links representation with the options **simplify links** which tries to make the links as direct as possible with
 a slight CPU cost and **separate links** which tries to avoid links superposition with again a CPU cost.
 
+.. _shrink_expand_nodes:
+
+Shrink/Expand nodes
+-------------------
+This functionality allows folding/unfolding any node(s) in order to decrease the schema size. 
+It can be useful if user deals with a large schema that contains a lot of number of nodes.
+
+There are 3 possible ways to make the schema more compact:
+
++ call **shrink/expand** context menu or **double mouse click** on any node to fold/unfold this node;
+
++ call **shrink/expand children** context menu or  **Ctrl + double mouse click** on any composed node to fold/unfold all direct children nodes of selected node;
+
++ call **shrink/expand elementary** context menu or  **Ctrl + Shift + double mouse click** on any composed node to fold/unfold all elementary nodes of selected node recursively.
+
index d9fb63bca7c80526c13ce6d4f66c0f7b26b6eed0..3880dcf16000fdf1aab176a53bb9b2078e9dc6c6 100644 (file)
@@ -26,6 +26,20 @@ Before YACS modules can be imported, the environment must be correctly configure
 SALOME application is used.  Otherwise, the PYTHONPATH environment variable has to be set to 
 <YACS_ROOT_DIR>/lib/pythonX.Y/site-packages/salome.
 
+When you build your own Salome application and use your own modules and components (using YACSGEN for example), you may need to load
+the module catalog::
+
+    import SALOMERuntime
+    SALOMERuntime.RuntimeSALOME_setRuntime()
+    salome_runtime = SALOMERuntime.getSALOMERuntime()
+    import salome
+    salome.salome_init()
+    mc = salome.naming_service.Resolve('/Kernel/ModulCatalog')
+    ior = salome.orb.object_to_string(mc)
+    session_catalog = salome_runtime.loadCatalog("session", ior)
+    salome_runtime.addCatalog(session_catalog)
+
+
 .. _loadxml:
 
 Create a calculation scheme by loading an XML file
@@ -332,22 +346,21 @@ Node n3 will be executed before node n4.
 
 Dataflow link
 ++++++++++++++++++++++++++++
-The first step in defining a dataflow link is to obtain port objects using one of the methods described above.  
-The edAddDFLink method for the context node is then used, transferring the two ports to be connected to it.
-The following gives an example of a dataflow link between the output port p1 of node n3 and the input port of node n4::
+The first step in defining a dataflow link is to obtain port objects using one of the methods described above.
+Then, the edAddLink method links an output port to an input port::
 
   pout=n3.getOutputPort("p1")
   pin=n4.getInputPort("p1")
-  p.edAddDFLink(pout,pin)
+  p.edAddLink(pout,pin)
 
-Data link
-++++++++++++++++++++++++++++
-A data link is defined as being a dataflow link using the edAddLink method instead of edAddDFLink.  
-The same example as above with a data link::
+Most of the time, when you need a dataflow link between two ports, you also need a control link between the nodes
+of the ports. In this case you can use the method edAddDFLink::
 
   pout=n3.getOutputPort("p1")
   pin=n4.getInputPort("p1")
-  p.edAddLink(pout,pin)
+  p.edAddDFLink(pout,pin)
+
+edAddDFLink is equivalent to edAddCFLink followed by edAddLink.
 
 Initialising an input data port
 '''''''''''''''''''''''''''''''''''''''''''''''
@@ -383,6 +396,7 @@ will appear as follows::
   r = pilot.getRuntime()
   p=r.createProc("pr")
   ti=p.getTypeCode("int")
+  td=p.getTypeCode("double")
   #node1
   n1=r.createScriptNode("","node1")
   p.edAddChild(n1)
@@ -406,11 +420,8 @@ will appear as follows::
   p.edAddCFLink(n1,n2)
   p.edAddCFLink(n1,n4)
   #dataflow links
-  pout=n3.getOutputPort("p1")
-  pin=n4.getInputPort("p1")
-  #dataflow links
-  p.edAddDFLink(n1.getOutputPort("p1"),n2.getInputPort("p1"))
-  p.edAddDFLink(n1.getOutputPort("p1"),n4.getInputPort("p1"))
+  p.edAddLink(n1.getOutputPort("p1"),n2.getInputPort("p1"))
+  p.edAddLink(n1.getOutputPort("p1"),n4.getInputPort("p1"))
   #initialisation ports
   n1.getInputPort("p1").edInitPy(5)
 
@@ -440,7 +451,6 @@ Repeating a part of the example above, we will get::
   n2.setScript("p1=2*p1")
   n2.edAddInputPort("p1",ti)
   n2.edAddOutputPort("p1",ti)
-  b.edAddCFLink(n1,n2)
   b.edAddDFLink(n1.getOutputPort("p1"),n2.getInputPort("p1"))
 
 .. _py_forloop:
index cda4c320581bff3d41d9e53543e19166e1131887..662e723c03571205eff85a30d562677b77aadd27 100644 (file)
@@ -37,6 +37,7 @@ The first action to be done is to import these definitions::
 
      from module_generator import Generator,Module,PYComponent
      from module_generator import CPPComponent,Service,F77Component
+     from module_generator import Library
 
 If you want to import all definitions, you can do that::
 
@@ -198,10 +199,13 @@ For example, we can have::
                                         body="outputport=myfunc(inputport);",
                                        ),
                                ],
-                      libs="-L/usr/local/mysoft -lmybib",
-                      rlibs="-Wl,--rpath -Wl,/usr/local/mysoft"
+                      libs=[Library(name="mybib", path="/usr/local/mysoft")],
+                      rlibs="/usr/local/mysoft"
                       )
 
+**libs** contains a list of **Library** objects. On linux, if the name of the file is "libmybib.so",
+the **name** of the library will be "mybib". The *path* shows where the library is installed.
+
 The **rlibs** attribute is not compulsory but it can be used to indicate a search path for dynamic libraries in execution.  
 **libs** is used during the link phase.  **rlibs** is only used during execution, it avoids the need to set the LD_LIBRARY_PATH 
 environment variable to find the dynamic library.
@@ -212,7 +216,7 @@ Includes will be added using the **defs** attribute.  For example::
 
    defs="""#include "myinclude.h" """
 
-The includes path will be specified in the **includes** attribute of the component in the following form::
+The include paths will be specified in the **includes** attribute of the component in the following form::
 
 
    defs="""#include "myinclude.h"
@@ -227,11 +231,13 @@ The includes path will be specified in the **includes** attribute of the compone
                                      body="outputport=myfunc(inputport);",
                                     ),
                             ],
-                   libs="-L/usr/local/mysoft -lmybib",
-                   rlibs="-Wl,--rpath -Wl,/usr/local/mysoft",
-                   includes="-I/usr/local/mysoft/include",
+                   libs=[Library(name="mybib", path="/usr/local/mysoft")],
+                   rlibs="/usr/local/mysoft",
+                   includes="/usr/local/mysoft/include",
                   )
 
+Multiple include paths should be separated by spaces or end of line character (\\n).
+
 Adding sources
 """"""""""""""""""""""""""""""""""""""""""""""""""""
 It is possible to add some source files with the **sources** attribute (a list of source files will be given).
@@ -252,7 +258,7 @@ named myfunc.cpp. The description will be::
                                     ),
                             ],
                    sources=["myfunc.cpp"],
-                   includes="-I/usr/local/mysoft/include",
+                   includes="/usr/local/mysoft/include",
                   )
 
 
@@ -379,8 +385,8 @@ The following example will be used to specify these final concepts::
                                        body="chdir(c);"
                                       ),
                               ],
-                     libs="-L/usr/local/fcompo -lfcompo",
-                     rlibs="-Wl,--rpath -Wl,/usr/local/fcompo"
+                     libs=[Library(name="fcompo", path="/usr/local/fcompo")],
+                     rlibs="/usr/local/fcompo"
                     )
 
 The Fortran “compo3” component has dataflow and datastream ports like the C++ component.  The Fortran dynamic library 
@@ -671,8 +677,7 @@ Example creation of generator::
 Once this generator has been created, simply call its commands to perform the necessary operations.
 
 - SALOME module generation:  ``g.generate()``
-- initialise automake:  ``g.bootstrap()``
-- execute the configure script:  ``g.configure()``
+- build configuration:  ``g.configure()``
 - compilation:  ``g.make()``
 - installation in the directory <prefix>:  ``g.install()``
 - create a SALOME application in the directory **appli_dir**::
@@ -724,13 +729,12 @@ This gives something like the following for a module with a single Fortran compo
                                     body="chdir(c);"
                                    ),
                            ],
-                  libs="-L/local/chris/modulegen/YACSGEN/fcompo -lfcompo",
-                  rlibs="-Wl,--rpath -Wl,/local/chris/modulegen/YACSGEN/fcompo")
+                  libs=[Library(name="fcompo", path="/local/chris/modulegen/YACSGEN/fcompo")],
+                  rlibs="/local/chris/modulegen/YACSGEN/fcompo")
 
   m=Module("mymodule",components=[c1],prefix="Install")
   g=Generator(m,context)
   g.generate()
-  g.bootstrap()
   g.configure()
   g.make()
   g.install()
@@ -1228,7 +1232,6 @@ written as follows::
 
   g=Generator(Module("astmod",components=[c1,c2],prefix=install_prefix),context)
   g.generate()
-  g.bootstrap()
   g.configure()
   g.make()
   g.install()
@@ -1508,7 +1511,7 @@ The module provides the following classes:
 .. autoclass:: Module
 
 .. autoclass:: Generator
-    :members: generate, bootstrap, configure, make, install, make_appli
+    :members: generate, configure, make, install, make_appli
 
 .. autofunction:: add_type
 
index 3fd53c97c17f1911796af207c57db801643a0fa6..efd2937779fc936a9136bdd2bde66b72d84543f4 100644 (file)
@@ -32,6 +32,12 @@ namespace YACS
     class Any;
     class OptimizerLoop;
 
+    /*! \brief Pool used to manage the samples of the optimizer loop plugin.
+     *  
+     *  Every sample has an identifier (Id), a priority, an initial value (In)
+     *  and an evaluation value (Out).
+     *  The current sample is the sample used by the latest terminated evaluation.
+     */
     class YACSLIBENGINE_EXPORT Pool
     {
       friend class OptimizerLoop;
index a206af2fc58b011a67424decb2e2c635b55c4b37..63016deb7a22d78c84a39ab03eec28c85254ed5a 100644 (file)
@@ -59,6 +59,7 @@ CatalogWidget::CatalogWidget(QWidget *parent,
   _dragModifier=false;
 
   setColumnCount(1);
+  setHeaderHidden( true );
 
   addCatalog(_builtinCatalog, "Built In");
   addCatalog(_sessionCatalog, "Current Session");
index dfda19af3fb3110492f3e369ec3263e6c53ec6ce..845ee4b77be3f2b3ea3c46cde71820624e55d5e1 100644 (file)
@@ -153,18 +153,18 @@ GenericGui::GenericGui(YACS::HMI::SuitWrapper* wrapper, QMainWindow *parent)
   _dwTree = new QDockWidget(_parent);
   _dwTree->setVisible(false);
   _dwTree->setWindowTitle("Tree View: edition mode");
-  _dwTree->setObjectName("Tree View");
+  _dwTree->setObjectName("yacsTreeViewDock");
   _parent->addDockWidget(Qt::LeftDockWidgetArea, _dwTree);
   _dwStacked = new QDockWidget(_parent);
   _dwStacked->setVisible(false);
   _dwStacked->setWindowTitle("Input Panel");
-  _dwStacked->setObjectName("Input Panel");
+  _dwStacked->setObjectName("yacsInputPanelDock");
   _dwStacked->setMinimumWidth(270); // --- force a minimum until display
   _parent->addDockWidget(Qt::RightDockWidgetArea, _dwStacked);
   _dwCatalogs = new QDockWidget(_parent);
   _dwCatalogs->setVisible(false);
   _dwCatalogs->setWindowTitle("Catalogs");
-  _dwCatalogs->setObjectName("Catalogs");
+  _dwCatalogs->setObjectName("yacsCatalogsDock");
   _parent->addDockWidget(Qt::RightDockWidgetArea, _dwCatalogs);
   _catalogsWidget = new CatalogWidget(_dwCatalogs,
                                       _builtinCatalog,
@@ -464,6 +464,14 @@ void GenericGui::createActions()
                                             tr("shrink/expand"), tr("shrink or expand the selected node"),
                                             0, _parent, false, this,  SLOT(onShrinkExpand()));
 
+  _shrinkExpandChildren = _wrapper->createAction(getMenuId(), tr("shrink or expand direct children of the selected node"), QIcon("icons:shrinkExpand.png"),
+                                            tr("shrink/expand children"), tr("shrink or expand direct children of the selected node"),
+                                            0, _parent, false, this,  SLOT(onShrinkExpandChildren()));
+
+  _shrinkExpandElementaryRecursively = _wrapper->createAction(getMenuId(), tr("shrink or expand elementary nodes of the selected node recursively"), QIcon("icons:shrinkExpand.png"),
+                                            tr("shrink/expand elementary"), tr("shrink or expand elementary nodes of the selected node recursively"),
+                                            0, _parent, false, this,  SLOT(onShrinkExpandElementaryRecursively()));
+
   _toggleStraightLinksAct = _wrapper->createAction(getMenuId(), tr("draw straight or orthogonal links"), QIcon("icons:straightLink.png"),
                                                    tr("straight/orthogonal"), tr("draw straight or orthogonal links"),
                                                    0, _parent, true, this,  SLOT(onToggleStraightLinks(bool)));
@@ -674,7 +682,7 @@ void GenericGui::createMenus()
 
 void GenericGui::createTools()
 {
-  int aToolId = _wrapper->createTool ( tr( "YACS Toolbar" ) );
+  int aToolId = _wrapper->createTool ( tr( "YACS Toolbar" ), QString( "YACSToolbar" ) );
   _wrapper->createTool( _newSchemaAct, aToolId );
   _wrapper->createTool( _importSchemaAct, aToolId );
   _wrapper->createTool( _wrapper->separator(), aToolId );
@@ -2192,6 +2200,16 @@ void GenericGui::onShrinkExpand() {
   _guiEditor->shrinkExpand();
 }
 
+void GenericGui::onShrinkExpandChildren() {
+  DEBTRACE("GenericGui::onShrinkExpandChildren");
+  _guiEditor->shrinkExpand(Qt::ControlModifier|Qt::ShiftModifier);
+}
+
+void GenericGui::onShrinkExpandElementaryRecursively() {
+  DEBTRACE("GenericGui::onShrinkExpandElementaryRecursively");
+  _guiEditor->shrinkExpand(Qt::ControlModifier);
+}
+
 void GenericGui::onToggleStraightLinks(bool checked)
 {
   Scene::_straightLinks = checked;
index c56fd29d22c21a0bee5fbb5d8076b86e3c199937..2c9c6179cd9a1bdeda8fdad4bd38951beb2ca632 100644 (file)
@@ -162,6 +162,8 @@ namespace YACS
       QAction *_zoomToBlocAct;
       QAction *_centerOnNodeAct;
       QAction *_shrinkExpand;
+      QAction *_shrinkExpandChildren;
+      QAction *_shrinkExpandElementaryRecursively;
 
       QAction *_toggleStraightLinksAct;
       QAction *_toggleAutomaticComputeLinkAct;
@@ -302,6 +304,8 @@ namespace YACS
       void onZoomToBloc();
       void onCenterOnNode();
       void onShrinkExpand();
+      void onShrinkExpandChildren();
+      void onShrinkExpandElementaryRecursively();
       void onToggleStraightLinks(bool checked);
       void onToggleAutomaticComputeLinks(bool checked);
       void onToggleSimplifyLinks(bool checked);
index 57d307f702e29ea585b3fa098bcc3c74f2fbbda5..da2237e89ceccaa4ae3984c7be246b0743c26a04 100644 (file)
@@ -389,7 +389,7 @@ SubjectDataPort*  GuiEditor::CreateOutputPort(SubjectElementaryNode* seNode,
 /*!
  * Subject shrink or expand, command from popup menu: needs a valid selection
  */
-void GuiEditor::shrinkExpand() {
+void GuiEditor::shrinkExpand(Qt::KeyboardModifiers kbModifiers) {
   DEBTRACE("GuiEditor::shrinkExpand");
 
   Subject* sub = QtGuiContext::getQtCurrent()->getSelectedSubject();
@@ -410,12 +410,14 @@ void GuiEditor::shrinkExpand() {
     return;
   };
 
-  if (sni->isExpanded()) {
-    sni->setExpanded(false);
-  } else {
-    sni->setExpanded(true);
-  };
-  sni->reorganizeShrinkExpand();
+  ShrinkMode aShrinkMode = CurrentNode;
+  if (kbModifiers == Qt::ControlModifier) {
+    aShrinkMode = ElementaryNodes;
+  } else if (kbModifiers == (Qt::ShiftModifier|Qt::ControlModifier)) {
+    aShrinkMode = ChildrenNodes;
+  }
+
+  sni->reorganizeShrinkExpand(aShrinkMode);
   sni->showOutScopeLinks();
   sni->updateLinks();
 }
index dc2926cb99ae0182c8f9d64c5f4b39363d2273e0..0e791a57c0093178acd8c2d589e72fe8f5e2b5af 100644 (file)
@@ -83,7 +83,7 @@ namespace YACS
       void PutSubjectInBloc();
       std::string PutGraphInBloc();
       void PutGraphInNode(std::string typeNode);
-      void shrinkExpand();
+      void shrinkExpand(Qt::KeyboardModifiers kbModifiers = Qt::NoModifier);
       void rebuildLinks();
       void arrangeNodes(bool isRecursive);
       void arrangeProc();
index 65bdc6b0d5d5fa0dd88ca440ba9a5529357a6e0a..1a43e83103aa154cc45705b7b1188f55f308b709 100644 (file)
@@ -170,6 +170,8 @@ void ComposedNodeMenu::popupMenu(QWidget *caller, const QPoint &globalPos, const
   menu.addAction(gmain->_zoomToBlocAct);
   menu.addAction(gmain->_centerOnNodeAct);
   menu.addAction(gmain->_shrinkExpand);
+  menu.addAction(gmain->_shrinkExpandChildren);
+  menu.addAction(gmain->_shrinkExpandElementaryRecursively);
   menu.addAction(gmain->_computeLinkAct);
 //   menu.addAction(gmain->_toggleAutomaticComputeLinkAct);
 //   menu.addAction(gmain->_toggleSimplifyLinkAct);
@@ -273,6 +275,8 @@ void ProcMenu::popupMenu(QWidget *caller, const QPoint &globalPos, const QString
   menu.addAction(gmain->_zoomToBlocAct);
   menu.addAction(gmain->_centerOnNodeAct);
   menu.addAction(gmain->_shrinkExpand);
+  menu.addAction(gmain->_shrinkExpandChildren);
+  menu.addAction(gmain->_shrinkExpandElementaryRecursively);
   menu.addAction(gmain->_computeLinkAct);
 //   menu.addAction(gmain->_toggleAutomaticComputeLinkAct);
 //   menu.addAction(gmain->_toggleSimplifyLinkAct);
index ddd78059592241b8441ff0e709d0b39d84f918f1..6c18f9fe4f6f0e1861cc8935d38a1b3353758bcc 100644 (file)
@@ -391,12 +391,10 @@ void SceneComposedNodeItem::removeChildFromList(AbstractSceneItem* child)
   _children.remove(child);
 }
 
-void SceneComposedNodeItem::reorganizeShrinkExpand() {
-  DEBTRACE("SceneComposedNodeItem::reorganizeShrinkExpand " << _expanded << " " << _label.toStdString());
-  bool isExpanding = isExpanded();
-
+void SceneComposedNodeItem::updateControlLinks(bool toExpand)
+{
   //update control links
-  std::list<SubjectControlLink*> lscl=dynamic_cast<SubjectNode*>(_subject)->getSubjectControlLinks();
+  std::list<SubjectControlLink*> lscl=dynamic_cast<SubjectNode*>(getSubject())->getSubjectControlLinks();
   for (std::list<SubjectControlLink*>::const_iterator it = lscl.begin(); it != lscl.end(); ++it) {
     SceneLinkItem* lk = dynamic_cast<SceneLinkItem*>(QtGuiContext::getQtCurrent()->_mapOfSceneItem[*it]);
 
@@ -419,15 +417,44 @@ void SceneComposedNodeItem::reorganizeShrinkExpand() {
     };
 
     if (b1 && b2) {
-      if (isExpanding) {
+      if (toExpand) {
         lk->show();
       } else {
         lk->hide();
       };
     };
   };
+}
+
+void SceneComposedNodeItem::reorganizeShrinkExpand(ShrinkMode theShrinkMode) {
+  DEBTRACE("SceneComposedNodeItem::reorganizeShrinkExpand " << _expanded << " " << _label.toStdString());
+
+  bool toExpand = true;
+  if (theShrinkMode == CurrentNode) {
+    // shrink/expand current node only
+    toExpand = !isExpanded();
+
+    updateControlLinks(toExpand);
+    shrinkExpandRecursive(toExpand, true, theShrinkMode);
+
+  } else {
+    if (!isExpanded())
+      return;
+    // shrink/expand child nodes
+    toExpand = !hasExpandedChildren(theShrinkMode == ElementaryNodes);
+    for (list<AbstractSceneItem*>::const_iterator it=_children.begin(); it!=_children.end(); ++it) {
+      SceneItem* item = dynamic_cast<SceneItem*>(*it);
+      SceneNodeItem *sni = dynamic_cast<SceneNodeItem*>(item);
+      item->shrinkExpandRecursive(toExpand, true, theShrinkMode);
+    }
+    _ancestorShrinked = !toExpand;
+    _width = _expandedWidth;
+    _height = _expandedHeight;
+    _shownState = expandShown;
+    adjustHeader();
+    rebuildLinks();
+  }
 
-  shrinkExpandRecursive(isExpanding, true);
   if (Scene::_autoComputeLinks)
     {
       SubjectProc* subproc = QtGuiContext::getQtCurrent()->getSubjectProc();
@@ -437,70 +464,116 @@ void SceneComposedNodeItem::reorganizeShrinkExpand() {
     }
 }
 
-void SceneComposedNodeItem::shrinkExpandRecursive(bool isExpanding, bool fromHere)
+bool SceneComposedNodeItem::hasExpandedChildren(bool recursively)
+{
+  bool res = false;
+  for (list<AbstractSceneItem*>::const_iterator it=_children.begin(); it!=_children.end() && !res; ++it) {
+    SceneItem* item = dynamic_cast<SceneItem*>(*it);
+    SceneNodeItem *sni = dynamic_cast<SceneNodeItem*>(item);
+    if (sni->isExpanded()) {
+      res = true;
+      if (recursively)
+        if (SceneComposedNodeItem *scni = dynamic_cast<SceneComposedNodeItem*>(sni))
+          res = scni->hasExpandedChildren(recursively);
+    }
+  }
+  return res;
+}
+
+void SceneComposedNodeItem::shrinkExpandRecursive(bool toExpand, bool fromHere, ShrinkMode theShrinkMode)
 {
   DEBTRACE("SceneComposedNodeItem::shrinkExpandRecursive " << isExpanding << " " << fromHere << " " << isExpanded() << " " << _label.toStdString());
   
-  if (!isExpanding)
+  bool toChangeShrinkState = false;
+  switch (theShrinkMode) {
+  case CurrentNode:
+    if (fromHere)
+      toChangeShrinkState = true;
+    break;
+  case ChildrenNodes:
+    if (fromHere)
+      toChangeShrinkState = true;
+    break;
+  case ElementaryNodes:
+    toChangeShrinkState = false;
+    break;
+  }
+  if (toChangeShrinkState) {
+    if (toExpand != isExpanded())
+      setExpanded(toExpand);
+  } else if (!isExpanded() && theShrinkMode == ElementaryNodes) {
+    return;
+  }
+
+  updateControlLinks(toExpand);
+
+  if (!toExpand)
     { // ---collapsing: hide first children , then resize
       for (list<AbstractSceneItem*>::const_iterator it=_children.begin(); it!=_children.end(); ++it)
         {
           SceneItem* item = dynamic_cast<SceneItem*>(*it);
-          item->shrinkExpandRecursive(false, false);
-          item->hide();  
-          DEBTRACE("------------------------------- Hide " << item->getLabel().toStdString());
-          item->shrinkExpandLink(false);  
+          item->shrinkExpandRecursive(toExpand, false, theShrinkMode);
+          if (theShrinkMode != ElementaryNodes) {
+            item->hide();  
+            DEBTRACE("------------------------------- Hide " << item->getLabel().toStdString());
+            item->shrinkExpandLink(false);  
+          }
         }
 
-      if (_shownState == expandShown)
-        {
-           _expandedWidth = _width;
-           _expandedHeight = _height;
-        }
+      if (toChangeShrinkState || theShrinkMode != ElementaryNodes) {
+        if (_shownState == expandShown)
+          {
+             _expandedWidth = _width;
+             _expandedHeight = _height;
+          }
 
-      if (fromHere)
-        {
-          _shownState = shrinkShown;
-        }
-      else
-        {
-          _ancestorShrinked = true;
-          _shownState = shrinkHidden;
-        }
+        if (fromHere)
+          {
+            _shownState = shrinkShown;
+          }
+        else
+          {
+            _ancestorShrinked = true;
+            _shownState = shrinkHidden;
+          }
 
-      _width  = 2*Resource::Corner_Margin + 2*Resource::DataPort_Width + Resource::Space_Margin;
-      if (_shownState == shrinkShown)
-        _height = getHeaderBottom() + Resource::Corner_Margin;
-      else
-        _height = Resource::Header_Height + Resource::Corner_Margin;
+        _width  = 2*Resource::Corner_Margin + 2*Resource::DataPort_Width + Resource::Space_Margin;
+        if (_shownState == shrinkShown)
+          _height = getHeaderBottom() + Resource::Corner_Margin;
+        else
+          _height = Resource::Header_Height + Resource::Corner_Margin;
       
-      if (_shownState == shrinkHidden) // shrink of ancestor
-        setPos(0 ,0);
-      else
-        setPos(_expandedPos);
-      adjustHeader();
-      if (_progressItem)
-        _progressItem->adjustGeometry();
+        if (_shownState == shrinkHidden) // shrink of ancestor
+          setPos(0 ,0);
+        else
+          setPos(_expandedPos);
+        adjustHeader();
+        if (_progressItem)
+          _progressItem->adjustGeometry();
+      }
     }
   else
     { // --- expanding: resize, then show children
-      _ancestorShrinked = false;
+      if (toChangeShrinkState)
+        _ancestorShrinked = false;
 
       for (list<AbstractSceneItem*>::const_iterator it=_children.begin(); it!=_children.end(); ++it)
         {
           SceneItem* item = dynamic_cast<SceneItem*>(*it);
-          item->shrinkExpandRecursive(isExpanded(), false); 
-          if (isExpanded())
-            {
-              item->show();  
-              DEBTRACE("------------------------------- Show " << item->getLabel().toStdString());
-            }
-          else
-            {
-              item->hide();  
-              DEBTRACE("------------------------------- Hide " << item->getLabel().toStdString());
-            }
-          item->shrinkExpandLink(fromHere);  
+          item->shrinkExpandRecursive(isExpanded(), false, theShrinkMode); 
+          if (theShrinkMode != ElementaryNodes) {
+            if (isExpanded())
+              {
+                item->show();  
+                DEBTRACE("------------------------------- Show " << item->getLabel().toStdString());
+              }
+            else
+              {
+                item->hide();  
+                DEBTRACE("------------------------------- Hide " << item->getLabel().toStdString());
+              }
+            item->shrinkExpandLink(fromHere);  
+          }
         }
 
       if (isExpanded())
index 37e55d264fa3e534738321291d8a051a6c5bdd86..639cf0c86d65d13d08f28ab2be6f5d9b852799e7 100644 (file)
@@ -45,8 +45,8 @@ namespace YACS
       virtual std::list<AbstractSceneItem*> getChildren();
       virtual void removeChildFromList(AbstractSceneItem* child);
       virtual void reorganize();
-      virtual void reorganizeShrinkExpand();
-      virtual void shrinkExpandRecursive(bool isExpanding, bool fromHere);
+      virtual void reorganizeShrinkExpand(ShrinkMode theShrinkMode);
+      virtual void shrinkExpandRecursive(bool isExpanding, bool fromHere, ShrinkMode theShrinkMode);
       virtual void shrinkExpandLink(bool se);
       virtual void collisionResolv(SceneItem* child, QPointF oldPos);
       virtual void rebuildLinks();
@@ -55,10 +55,12 @@ namespace YACS
       virtual void arrangeChildNodes();
       virtual void adjustColors();
       virtual void setShownState(shownState ss);
+      virtual bool hasExpandedChildren(bool recursively);
     protected:
       void dragEnterEvent(QGraphicsSceneDragDropEvent *event);
       void dragLeaveEvent(QGraphicsSceneDragDropEvent *event);
       void dropEvent(QGraphicsSceneDragDropEvent *event);
+      void updateControlLinks(bool toExpand);
       virtual QColor getPenColor();
       virtual QColor getBrushColor();
 
index 40d4b4d339f1672db1b019c909eb0db04e4cb317..9396fe049b75bca9cf49047b5b8997949fefb85e 100644 (file)
@@ -187,10 +187,12 @@ void SceneElementaryNodeItem::popupMenu(QWidget *caller, const QPoint &globalPos
   m.popupMenu(caller, globalPos);
 }
 
-void SceneElementaryNodeItem::reorganizeShrinkExpand()
+void SceneElementaryNodeItem::reorganizeShrinkExpand(ShrinkMode theShrinkMode)
 {
   DEBTRACE("SceneElementaryNodeItem::reorganizeShrinkExpand " << isExpanded() << " "  << _label.toStdString());
-  shrinkExpandRecursive(isExpanded(), true);
+  if (theShrinkMode != CurrentNode)
+    return;
+  shrinkExpandRecursive(!isExpanded(), true, theShrinkMode);
   if (Scene::_autoComputeLinks)
     {
       SubjectProc* subproc = QtGuiContext::getQtCurrent()->getSubjectProc();
@@ -200,30 +202,48 @@ void SceneElementaryNodeItem::reorganizeShrinkExpand()
     }
 }
 
-void SceneElementaryNodeItem::shrinkExpandRecursive(bool isExpanding, bool fromHere)
+void SceneElementaryNodeItem::shrinkExpandRecursive(bool toExpand, bool fromHere, ShrinkMode theShrinkMode)
 {
-  DEBTRACE("SceneElementaryNodeItem::shrinkExpandRecursive " << isExpanding << " " << fromHere << " "  << isExpanded() << " " << _label.toStdString());
-  if (isExpanding)
-    {
+  DEBTRACE("SceneElementaryNodeItem::shrinkExpandRecursive " << toExpand << " " << fromHere << " "  << isExpanded() << " " << _label.toStdString());
+  
+  bool toChangeShrinkState = false;
+  switch (theShrinkMode) {
+  case CurrentNode:
+    if (fromHere)
+      toChangeShrinkState = true;
+    break;
+  case ChildrenNodes:
+    if (fromHere)
+      toChangeShrinkState = true;
+    break;
+  case ElementaryNodes:
+    toChangeShrinkState = true;
+    break;
+  }
+  if (toChangeShrinkState && toExpand != isExpanded())
+    setExpanded(toExpand);
+
+  if (toExpand) {
+    if (toChangeShrinkState) {
       _ancestorShrinked = false;
+      _shownState = expandShown;
+    } else {
       if (isExpanded())
         _shownState = expandShown;
       else
         _shownState = shrinkShown;
     }
-  else
-    {
-      if (fromHere)
-        _shownState = shrinkShown;
-      else
-        {
-          _ancestorShrinked = true;
-          _shownState = shrinkHidden;
-        }
+  } else {
+    if (fromHere || theShrinkMode==ElementaryNodes) {
+      _shownState = shrinkShown;
+    } else {
+      _ancestorShrinked = true;
+      _shownState = shrinkHidden;
     }
+  }
 
   if (_shownState == shrinkHidden) // shrink of ancestor
-    setPos(0 ,0);
+    setPos(00);
   else
     setPos(_expandedPos);
 
index 66ecbe387225135e72f4aaf33b334dc85cee5cff..fdc2f16ae151014a9941cc2e0b8a778230aa688f 100644 (file)
@@ -42,8 +42,8 @@ namespace YACS
       virtual void popupMenu(QWidget *caller, const QPoint &globalPos);
       virtual void autoPosNewPort(AbstractSceneItem *item, int nbPorts);
       virtual void reorganize();
-      virtual void reorganizeShrinkExpand();
-      virtual void shrinkExpandRecursive(bool isExpanding, bool fromHere);
+      virtual void reorganizeShrinkExpand(ShrinkMode theShrinkMode);
+      virtual void shrinkExpandRecursive(bool isExpanding, bool fromHere, ShrinkMode theShrinkMode);
       virtual void setShownState(shownState ss);
     protected:
       int _maxPorts;
index 35fb5f077f4520e01c61cbedb5cfbb0bcd0704f7..d097bb19b80c62a057d998c2bc8cbd7a7cf99739 100644 (file)
@@ -436,7 +436,7 @@ void SceneItem::shrinkExpandLink(bool se)
 }
 
 
-void SceneItem::shrinkExpandRecursive(bool isExpanding, bool fromHere)
+void SceneItem::shrinkExpandRecursive(bool isExpanding, bool fromHere, ShrinkMode theShrinkMode)
 {
 }
 
index 06cf276a351886d44a6a24b3948d1ce840289511..a04e6dba1eaa3899a0fc3465956042f1c332b64e 100644 (file)
@@ -33,6 +33,13 @@ namespace YACS
 {
   namespace HMI
   {
+    typedef enum
+      {
+        CurrentNode,
+        ChildrenNodes,
+        ElementaryNodes
+      } ShrinkMode;
+    
     class Scene;
 
     class RootSceneItem: public GuiObserver
@@ -131,7 +138,7 @@ namespace YACS
       virtual void updateChildItems();
       virtual void updateLinks();
       virtual void shrinkExpandLink(bool se);
-      virtual void shrinkExpandRecursive(bool isExpanding, bool fromHere);
+      virtual void shrinkExpandRecursive(bool isExpanding, bool fromHere, ShrinkMode theShrinkMode);
       bool isAncestorShrinked() { return _ancestorShrinked; };
       bool _blocX;
       bool _blocY;
index fdff648d0804d06460e83d42bc914ed71ea4db4a..8c7ee43ab177c80850df64de01474eb43679ca36 100644 (file)
@@ -179,7 +179,7 @@ void SceneNodeItem::arrangeChildNodes()
 {
 }
 
-void SceneNodeItem::reorganizeShrinkExpand()
+void SceneNodeItem::reorganizeShrinkExpand(ShrinkMode theShrinkMode)
 {
 }
 
@@ -310,7 +310,7 @@ void SceneNodeItem::mouseMoveEvent(QGraphicsSceneMouseEvent * event)
 void SceneNodeItem::mouseDoubleClickEvent(QGraphicsSceneMouseEvent *event)
 {
   DEBTRACE("SceneNodeItem::mouseDoubleClickEvent");
-  QtGuiContext::getQtCurrent()->getGMain()->_guiEditor->shrinkExpand();
+  QtGuiContext::getQtCurrent()->getGMain()->_guiEditor->shrinkExpand(QApplication::keyboardModifiers());
 }
 
 void SceneNodeItem::setTopLeft(QPointF topLeft)
index 5205a49383a0627377f08009762e28de7ba465cd..292c31d775179494d34eb81d8703cb02f9d988a6 100644 (file)
@@ -72,7 +72,7 @@ namespace YACS
       virtual void updateName();
       virtual void arrangeNodes(bool isRecursive);
       virtual void arrangeChildNodes();
-      virtual void reorganizeShrinkExpand();
+      virtual void reorganizeShrinkExpand(ShrinkMode theShrinkMode);
       virtual void updateChildItems();
       virtual void shrinkExpandLink(bool se);
       virtual void showOutScopeLinks();
index 14ec4befe06571e9f7a043611676e8ea93cbae0c..919d9510d044ee8f22ac57a206e1d832d0733c58 100644 (file)
@@ -52,41 +52,3 @@ void SceneProcItem::popupMenu(QWidget *caller, const QPoint &globalPos)
   m.popupMenu(caller, globalPos);
 }
 
-void SceneProcItem::reorganizeShrinkExpand() {
-  if (_children.size() == 0)
-    return;
-  bool isExpanding = true;
-  DEBTRACE("SceneProcItem::reorganizeShrinkExpand " << _expanded << " " << _label.toStdString());
-  for (list<AbstractSceneItem*>::const_iterator it=_children.begin(); it!=_children.end(); ++it)
-    {
-      SceneItem* item = dynamic_cast<SceneItem*>(*it);
-      SceneNodeItem *sni = dynamic_cast<SceneNodeItem*>(item);
-      if (sni->isExpanded()) {
-         isExpanding = false;
-         break;
-      }
-    }
-  for (list<AbstractSceneItem*>::const_iterator it=_children.begin(); it!=_children.end(); ++it)
-    {
-      SceneItem* item = dynamic_cast<SceneItem*>(*it);
-      SceneNodeItem *sni = dynamic_cast<SceneNodeItem*>(item);
-      if (!isExpanding && sni->isExpanded()) {
-       sni->setExpanded(false);
-       item->shrinkExpandRecursive(false, true);
-       DEBTRACE("------------------------------- Hide " << item->getLabel().toStdString());
-      }
-      if (isExpanding && !sni->isExpanded()) {
-       sni->setExpanded(true);
-       item->shrinkExpandRecursive(true, false);
-       DEBTRACE("------------------------------- Show " << item->getLabel().toStdString());
-      }
-      item->shrinkExpandLink(isExpanding);
-    }
-  _ancestorShrinked = !isExpanding;
-  _width = _expandedWidth;
-  _height = _expandedHeight;
-  _shownState = expandShown;
-  adjustHeader();
-  rebuildLinks();
-}
-
index 8e4ce68f00677753f2ea9a3562aa4a27f4210f8f..725accab3fd56dce23d61563c44874ab631c050b 100644 (file)
@@ -33,7 +33,6 @@ namespace YACS
                     QString label, Subject *subject);
       virtual ~SceneProcItem();
       virtual void popupMenu(QWidget *caller, const QPoint &globalPos);
-      virtual void reorganizeShrinkExpand();
     };
   }
 }
index a6709da84afe66339d8a8a9c94ab2f582b4a1edb..b9d5d923dcd477772a6833467cd8669b155467a4 100755 (executable)
@@ -66,7 +66,6 @@ IF(SALOME_YACS_USE_SWIG)
     "Install path: SALOME Python shared modules")
 ENDIF(SALOME_YACS_USE_SWIG)
 
-SET(SALOME_INSTALL_RES share/salome/resources CACHE PATH "Install path: SALOME resources")
 SET(SALOME_PMML_INSTALL_RES_DATA "${SALOME_INSTALL_RES}/pmml" CACHE PATH "Install path: SALOME PMML specific data")
 
 # Sources 
@@ -92,8 +91,6 @@ SET(pmml_SOURCES
   PMMLlib.cxx
   )
 
-ADD_SUBDIRECTORY(resources)
-
 ADD_LIBRARY(pmmlLib SHARED ${pmml_SOURCES})
 TARGET_LINK_LIBRARIES(pmmlLib  ${LIBXML2_LIBRARIES} ) 
 INSTALL(TARGETS pmmlLib EXPORT ${PROJECT_NAME}TargetGroup DESTINATION ${SALOME_INSTALL_LIBS})
index 01984504de7b5d85274a76518b9aefd8e24b9621..b079a9050cf8d40c2c06f23916283609377b0f21 100755 (executable)
@@ -95,4 +95,5 @@ ENDIF(URANIE AND WIN32)
 
 ADD_TEST(TestPMML TestPMML)
 
+FILE(COPY ${PROJECT_SOURCE_DIR}/src/pmml/Test/samples  DESTINATION . )
 INSTALL(TARGETS TestPMML DESTINATION ${SALOME_INSTALL_BINS})
index f99a70d07e8337a78fc81c17cc19f1de5a2c4d11..c30b90039c288516c731170c454d9e674581f4ca 100755 (executable)
@@ -27,20 +27,13 @@ using namespace std;
 
 void PMMLBasicsTest1::setUp()
 {
+    resourcesDir = "samples/";
 #ifdef WIN32
-    const char* p = std::getenv("YACS_ROOT_DIR");
-    std::string strP("");
-    if (p) 
-        strP = std::string(p);
-    else 
-        throw std::string("unable to get YACS_ROOT_DIR");
-    resourcesDir = strP;
-    resourcesDir += "/share/salome/resources/pmml/";
     const char* user = std::getenv("USERPROFILE");
     std::string strUser("");
-    if (user) 
+    if (user)
         strUser = std::string(user);
-    else 
+    else
         throw std::string("unable to get USERPROFILE");
     tmpDir = strUser;
     tmpDir += "\\tmp";
@@ -48,14 +41,12 @@ void PMMLBasicsTest1::setUp()
     std::string cmd = "mkdir " + tmpDir; 
     system( cmd.c_str() );  
 #else
-    resourcesDir =  getenv("YACS_ROOT_DIR");
-    resourcesDir += "/share/salome/resources/pmml/";
     tmpDir = "/tmp/";
     tmpDir += getenv("USER");
     tmpDir += "/PmmlUnitTest/";
     std::string cmd = "mkdir -p " + tmpDir; 
     system( cmd.c_str() );  
-#endif    
+#endif
 }
 
 void PMMLBasicsTest1::tearDown()
diff --git a/src/pmml/Test/samples/CMakeLists.txt b/src/pmml/Test/samples/CMakeLists.txt
new file mode 100755 (executable)
index 0000000..68c41e5
--- /dev/null
@@ -0,0 +1,45 @@
+# Copyright (C) 2012-2014  CEA/DEN, EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+SET(PMML_RESOURCES_FILES
+  # ici les noms des fichiers ressources 
+  ann_model.pmml
+  ann_model_2.pmml
+  lr_model.pmml
+  lr_model_2.pmml
+  no_model.pmml
+  two_models_ann_lr.pmml
+  unittest_ref_ann_model.cpp
+  unittest_ref_ann_model.f
+  unittest_ref_ann_model.py
+  unittest_ref_lr_model.cpp
+  unittest_ref_lr_model.f
+  unittest_ref_lr_model.py
+  win32_ann_model.pmml
+  win32_lr_model.pmml 
+  )
+
+INSTALL(FILES ${PMML_RESOURCES_FILES} DESTINATION ${SALOME_PMML_INSTALL_RES_DATA})
+
+# MESSAGE(STATUS "Creation of ${CMAKE_CURRENT_BINARY_DIR}/PMMLCatalog.xml")
+# CONFIGURE_FILE(${CMAKE_CURRENT_SOURCE_DIR}/PMMLCatalog.xml.in ${CMAKE_CURRENT_BINARY_DIR}/PMMLCatalog.xml @ONLY)
+# MESSAGE(STATUS "Creation of ${CMAKE_CURRENT_BINARY_DIR}/SalomeApp.xml")
+# CONFIGURE_FILE(${CMAKE_CURRENT_SOURCE_DIR}/SalomeApp.xml.in ${CMAKE_CURRENT_BINARY_DIR}/SalomeApp.xml @ONLY)
+
+# INSTALL(FILES ${CMAKE_CURRENT_BINARY_DIR}/PMMLCatalog.xml ${CMAKE_CURRENT_BINARY_DIR}/SalomeApp.xml DESTINATION ${SALOME_PMML_INSTALL_RES_DATA})
diff --git a/src/pmml/Test/samples/ann_model.pmml b/src/pmml/Test/samples/ann_model.pmml
new file mode 100755 (executable)
index 0000000..bd4d4e8
--- /dev/null
@@ -0,0 +1,124 @@
+<?xml version="1.0"?>
+<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
+  <Header copyright="myCopyright" description="Tests unitaires">
+    <Application name="PMMLlib" version="myVersion"/>
+    <Annotation>Tests unitaires PMMLlib</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="rw" displayName="rw" optype="continuous" dataType="float"/>
+    <DataField name="r" displayName="r" optype="continuous" dataType="float"/>
+    <DataField name="tu" displayName="tu" optype="continuous" dataType="float"/>
+    <DataField name="tl" displayName="tl" optype="continuous" dataType="float"/>
+    <DataField name="hu" displayName="hu" optype="continuous" dataType="float"/>
+    <DataField name="hl" displayName="hl" optype="continuous" dataType="float"/>
+    <DataField name="l" displayName="l" optype="continuous" dataType="float"/>
+    <DataField name="kw" displayName="kw" optype="continuous" dataType="float"/>
+    <DataField name="yhat" displayName="yhat" optype="continuous" dataType="float"/>
+  </DataDictionary>
+  <NeuralNetwork modelName="sANNName" functionName="regression" numberOfLayers="2">
+    <MiningSchema>
+      <MiningField name="rw" usageType="active"/>
+      <MiningField name="r" usageType="active"/>
+      <MiningField name="tu" usageType="active"/>
+      <MiningField name="tl" usageType="active"/>
+      <MiningField name="hu" usageType="active"/>
+      <MiningField name="hl" usageType="active"/>
+      <MiningField name="l" usageType="active"/>
+      <MiningField name="kw" usageType="active"/>
+      <MiningField name="yhat" usageType="predicted"/>
+    </MiningSchema>
+    <NeuralInputs numberOfInputs="8">
+      <NeuralInput id="0">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="rw">
+            <LinearNorm orig="0.000000e+00" norm="-2.889932e-01"/>
+            <LinearNorm orig="9.999901e-02" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="1">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="r">
+            <LinearNorm orig="0.000000e+00" norm="-5.756638e-01"/>
+            <LinearNorm orig="2.504894e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="2">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tu">
+            <LinearNorm orig="0.000000e+00" norm="-1.699313e-01"/>
+            <LinearNorm orig="8.933486e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="3">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tl">
+            <LinearNorm orig="0.000000e+00" norm="-1.707007e-01"/>
+            <LinearNorm orig="8.955232e+01" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="4">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hu">
+            <LinearNorm orig="0.000000e+00" norm="-3.302777e-02"/>
+            <LinearNorm orig="1.050003e+03" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="5">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hl">
+            <LinearNorm orig="0.000000e+00" norm="-4.562070e-02"/>
+            <LinearNorm orig="7.600007e+02" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="6">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="l">
+            <LinearNorm orig="0.000000e+00" norm="-1.155882e-01"/>
+            <LinearNorm orig="1.400018e+03" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="7">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="kw">
+            <LinearNorm orig="0.000000e+00" norm="-5.780019e-02"/>
+            <LinearNorm orig="1.095001e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+    </NeuralInputs>
+    <NeuralLayer activationFunction="tanh" numberOfNeurons="1">
+      <Neuron id="8" bias="-1.263572e+00">
+        <Con from="0" weight="7.536629e-01"/>
+        <Con from="1" weight="1.653660e-03"/>
+        <Con from="2" weight="4.725001e-03"/>
+        <Con from="3" weight="9.969786e-03"/>
+        <Con from="4" weight="1.787976e-01"/>
+        <Con from="5" weight="-1.809809e-01"/>
+        <Con from="6" weight="-1.735688e-01"/>
+        <Con from="7" weight="8.559675e-02"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralLayer activationFunction="identity" numberOfNeurons="1">
+      <Neuron id="9" bias="-1.745483e+00">
+        <Con from="8" weight="6.965512e+00"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralOutputs numberOfOutputs="1">
+      <NeuralOutput outputNeuron="9">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="yhat">
+            <LinearNorm orig="0.000000e+00" norm="-5.873935e-01"/>
+            <LinearNorm orig="7.781171e+01" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralOutput>
+    </NeuralOutputs>
+  </NeuralNetwork>
+</PMML>
diff --git a/src/pmml/Test/samples/ann_model_2.pmml b/src/pmml/Test/samples/ann_model_2.pmml
new file mode 100755 (executable)
index 0000000..c64b4fb
--- /dev/null
@@ -0,0 +1,132 @@
+<?xml version="1.0"?>
+<PMML xmlns="http://www.dmg.org/PMML-3_0" version="3.0">
+  <Header copyright="texte copyright" description="texte description">
+    <Application name="Uranie" version="2.3/1"/>
+    <Annotation>date Fri Oct 07, 2011</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="rw" displayName="rw" optype="continuous" dataType="float"/>
+    <DataField name="r" displayName="r" optype="continuous" dataType="float"/>
+    <DataField name="tu" displayName="tu" optype="continuous" dataType="float"/>
+    <DataField name="tl" displayName="tl" optype="continuous" dataType="float"/>
+    <DataField name="hu" displayName="hu" optype="continuous" dataType="float"/>
+    <DataField name="hl" displayName="hl" optype="continuous" dataType="float"/>
+    <DataField name="l" displayName="l" optype="continuous" dataType="float"/>
+    <DataField name="kw" displayName="kw" optype="continuous" dataType="float"/>
+    <DataField name="yhat" displayName="yhat" optype="continuous" dataType="float"/>
+  </DataDictionary>
+  <NeuralNetwork modelName="sANNName" functionName="regression" numberOfLayers="2">
+    <MiningSchema>
+      <MiningField name="rw" usageType="active"/>
+      <MiningField name="r" usageType="active"/>
+      <MiningField name="tu" usageType="active"/>
+      <MiningField name="tl" usageType="active"/>
+      <MiningField name="hu" usageType="active"/>
+      <MiningField name="hl" usageType="active"/>
+      <MiningField name="l" usageType="active"/>
+      <MiningField name="kw" usageType="active"/>
+      <MiningField name="yhat" usageType="predicted"/>
+    </MiningSchema>
+    <NeuralInputs numberOfInputs="8">
+      <NeuralInput id="0">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="rw">
+            <LinearNorm orig="0" norm="-2.889932e-01"/>
+            <LinearNorm orig="9.999901e-02" norm="0"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="1">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="r">
+            <LinearNorm orig="0" norm="-5.756638e-01"/>
+            <LinearNorm orig="2.504894e+04" norm="0"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="2">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tu">
+            <LinearNorm orig="0" norm="-1.699313e-01"/>
+            <LinearNorm orig="8.933486e+04" norm="0"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="3">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tl">
+            <LinearNorm orig="0" norm="-1.707007e-01"/>
+            <LinearNorm orig="8.955232e+01" norm="0"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="4">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hu">
+            <LinearNorm orig="0" norm="-3.302777e-02"/>
+            <LinearNorm orig="1.050003e+03" norm="0"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="5">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hl">
+            <LinearNorm orig="0" norm="-4.562070e-02"/>
+            <LinearNorm orig="7.600007e+02" norm="0"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="6">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="l">
+            <LinearNorm orig="2" norm="1."/>
+            <LinearNorm orig="1.400018e+03" norm="-1."/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="7">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="kw">
+            <LinearNorm orig="0" norm="-5.780019e-02"/>
+            <LinearNorm orig="1.095001e+04" norm="0"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+    </NeuralInputs>
+    <NeuralLayer activationFunction="tanh" numberOfNeurons="1">
+      <Neuron id="8" bias="-1.263572e+00">
+        <Con from="0" weight="7.536629e-01"/>
+        <Con from="1" weight="1.653660e-03"/>
+        <Con from="2" weight="4.725001e-03"/>
+        <Con from="3" weight="9.969786e-03"/>
+        <Con from="4" weight="1.787976e-01"/>
+        <Con from="5" weight="-1.809809e-01"/>
+        <Con from="6" weight="-1.735688e-01"/>
+        <Con from="7" weight="8.559675e-02"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralLayer activationFunction="identity" numberOfNeurons="1">
+      <Neuron id="9" bias="-1.745483e+00">
+        <Con from="8" weight="6.965512e+00"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralOutputs numberOfOutputs="1">
+      <NeuralOutput outputNeuron="9">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="yhat">
+            <LinearNorm orig="0" norm="-5.873935e-01"/>
+            <LinearNorm orig="7.781171e+01" norm="0"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralOutput>
+      <NeuralOutput outputNeuron="-1">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="yhat">
+            <LinearNorm orig="2." norm="-1"/>
+            <LinearNorm orig="5.781171e+01" norm="1"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralOutput>
+    </NeuralOutputs>
+  </NeuralNetwork>
+</PMML>
diff --git a/src/pmml/Test/samples/lr_model.pmml b/src/pmml/Test/samples/lr_model.pmml
new file mode 100755 (executable)
index 0000000..fae9d26
--- /dev/null
@@ -0,0 +1,38 @@
+<?xml version="1.0"?>
+<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
+  <Header copyright="myCopyright" description="Tests unitaires">
+    <Application name="PMMLlib" version="myVersion"/>
+    <Annotation>Tests unitaires PMMLlib</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="x6" displayName=" x_{6}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="1.100000e+01" rightMargin="2.300000e+01"/>
+    </DataField>
+    <DataField name="x8" displayName=" x_{8}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="2.810000e+01" rightMargin="7.670000e+01"/>
+    </DataField>
+    <DataField name="x1" displayName=" x_{1}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="6.360000e+00" rightMargin="1.251000e+01"/>
+    </DataField>
+  </DataDictionary>
+  <RegressionModel functionName="regression" modelName="Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]" targetFieldName="x1">
+    <MiningSchema>
+      <MiningField name="x6" usageType="active"/>
+      <MiningField name="x8" usageType="active"/>
+      <MiningField name="x1" usageType="predicted"/>
+    </MiningSchema>
+    <RegressionTable intercept="3.837365e+00">
+      <NumericPredictor name="x6" exponent="1" coefficient="4.759134e-01"/>
+      <NumericPredictor name="x8" exponent="1" coefficient="1.428838e-01"/>
+      <PredictorTerm coefficient="-2.201903e-02">
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+      <PredictorTerm coefficient="5.362560e-04">
+        <FieldRef field="x6"/>
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+    </RegressionTable>
+  </RegressionModel>
+</PMML>
diff --git a/src/pmml/Test/samples/lr_model_2.pmml b/src/pmml/Test/samples/lr_model_2.pmml
new file mode 100755 (executable)
index 0000000..2f1ef6b
--- /dev/null
@@ -0,0 +1,38 @@
+<?xml version="1.0"?>
+<PMML version="4.1" xmlns="http://www.dmg.org/PMML-4_1">
+  <Header copyright="myCopyright" description="Text Description">
+    <Application name="Uranie" version="2013.7/18"/>
+    <Annotation>Compilation date : Wed Jul 17, 2013</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="x6" displayName=" x_{6}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="1.100000e+01" rightMargin="2.300000e+01"/>
+    </DataField>
+    <DataField name="x8" displayName=" x_{8}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="2.810000e+01" rightMargin="7.670000e+01"/>
+    </DataField>
+    <DataField name="x1" displayName=" x_{1}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="6.360000e+00" rightMargin="1.251000e+01"/>
+    </DataField>
+  </DataDictionary>
+  <RegressionModel functionName="regression" modelName="Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]" targetFieldName="x1">
+    <MiningSchema>
+      <MiningField name="x6" usageType="active"/>
+      <MiningField name="x8" usageType="active"/>
+      <MiningField name="x1" usageType="predicted"/>
+    </MiningSchema>
+    <RegressionTable>
+      <NumericPredictor name="x6" exponent="1" coefficient="4.759134e-01"/>
+      <NumericPredictor name="x8" exponent="1" coefficient="1.428838e-01"/>
+      <PredictorTerm coefficient="-2.201903e-02">
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+      <PredictorTerm coefficient="5.362560e-04">
+        <FieldRef field="x6"/>
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+    </RegressionTable>
+  </RegressionModel>
+</PMML>
diff --git a/src/pmml/Test/samples/no_model.pmml b/src/pmml/Test/samples/no_model.pmml
new file mode 100755 (executable)
index 0000000..3951518
--- /dev/null
@@ -0,0 +1,144 @@
+<?xml version="1.0"?>
+<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
+  <Header copyright="myCopyright" description="Tests unitaires">
+    <Application name="PMMLlib" version="myVersion"/>
+    <Annotation>Tests unitaires PMMLlib</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="rw" displayName="rw" optype="continuous" dataType="float"/>
+    <DataField name="r" displayName="r" optype="continuous" dataType="float"/>
+    <DataField name="tu" displayName="tu" optype="continuous" dataType="float"/>
+    <DataField name="tl" displayName="tl" optype="continuous" dataType="float"/>
+    <DataField name="hu" displayName="hu" optype="continuous" dataType="float"/>
+    <DataField name="hl" displayName="hl" optype="continuous" dataType="float"/>
+    <DataField name="l" displayName="l" optype="continuous" dataType="float"/>
+    <DataField name="kw" displayName="kw" optype="continuous" dataType="float"/>
+    <DataField name="yhat" displayName="yhat" optype="continuous" dataType="float"/>
+  </DataDictionary>
+  <NeuralNetwork modelName="modelName" functionName="regression" numberOfLayers="2">
+    <MiningSchema>
+      <MiningField name="rw" usageType="active"/>
+      <MiningField name="r" usageType="active"/>
+      <MiningField name="tu" usageType="active"/>
+      <MiningField name="tl" usageType="active"/>
+      <MiningField name="hu" usageType="active"/>
+      <MiningField name="hl" usageType="active"/>
+      <MiningField name="l" usageType="active"/>
+      <MiningField name="kw" usageType="active"/>
+      <MiningField name="yhat" usageType="predicted"/>
+    </MiningSchema>
+    <NeuralInputs numberOfInputs="8">
+      <NeuralInput id="0">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="rw">
+            <LinearNorm orig="0.000000e+00" norm="-2.889932e-01"/>
+            <LinearNorm orig="9.999901e-02" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="1">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="r">
+            <LinearNorm orig="0.000000e+00" norm="-5.756638e-01"/>
+            <LinearNorm orig="2.504894e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="2">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tu">
+            <LinearNorm orig="0.000000e+00" norm="-1.699313e-01"/>
+            <LinearNorm orig="8.933486e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="3">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tl">
+            <LinearNorm orig="0.000000e+00" norm="-1.707007e-01"/>
+            <LinearNorm orig="8.955232e+01" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="4">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hu">
+            <LinearNorm orig="0.000000e+00" norm="-3.302777e-02"/>
+            <LinearNorm orig="1.050003e+03" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="5">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hl">
+            <LineakLRAndkANNrNorm orig="0.000000e+00" norm="-4.562070e-02"/>
+            <LinearNorm orig="7.600007e+02" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="6">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="l">
+            <LinearNorm orig="0.000000e+00" norm="-1.155882e-01"/>
+            <LinearNorm orig="1.400018e+03" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="7">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="kw">
+            <LinearNorm orig="0.000000e+00" norm="-5.780019e-02"/>
+            <LinearNorm orig="1.095001e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+    </NeuralInputs>
+    <NeuralLayer activationFunction="tanh" numberOfNeurons="1">
+      <Neuron id="8" bias="-1.263572e+00">
+        <Con from="0" weight="7.536629e-01"/>
+        <Con from="1" weight="1.653660e-03"/>
+        <Con from="2" weight="4.725001e-03"/>
+        <Con from="3" weight="9.969786e-03"/>
+        <Con from="4" weight="1.787976e-01"/>
+        <Con from="5" weight="-1.809809e-01"/>
+        <Con from="6" weight="-1.735688e-01"/>
+        <Con from="7" weight="8.559675e-02"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralLayer activationFunction="identity" numberOfNeurons="1">
+      <Neuron id="9" bias="-1.745483e+00">
+        <Con from="8" weight="6.965512e+00"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralOutputs numberOfOutputs="1">
+      <NeuralOutput outputNeuron="9">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="yhat">
+            <LinearNorm orig="0.000000e+00" norm="-5.873935e-01"/>
+            <LinearNorm orig="7.781171e+01" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralOutput>
+    </NeuralOutputs>
+  </NeuralNetwork>
+  <RegressionModel functionName="regression" modelName="modelName" targetFieldName="x1">
+    <MiningSchema>
+      <MiningField name="x6" usageType="active"/>
+      <MiningField name="x8" usageType="active"/>
+      <MiningField name="x1" usageType="predicted"/>
+    </MiningSchema>
+    <RegressionTable intercept="3.837365e+00">
+      <NumericPredictor name="x6" exponent="1" coefficient="4.759134e-01"/>
+      <NumericPredictor name="x8" exponent="1" coefficient="1.428838e-01"/>
+      <PredictorTerm coefficient="-2.201903e-02">
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+      <PredictorTerm coefficient="5.362560e-04">
+        <FieldRef field="x6"/>
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+    </RegressionTable>
+  </RegressionModel>
+</PMML>
diff --git a/src/pmml/Test/samples/two_models_ann_lr.pmml b/src/pmml/Test/samples/two_models_ann_lr.pmml
new file mode 100755 (executable)
index 0000000..3951518
--- /dev/null
@@ -0,0 +1,144 @@
+<?xml version="1.0"?>
+<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
+  <Header copyright="myCopyright" description="Tests unitaires">
+    <Application name="PMMLlib" version="myVersion"/>
+    <Annotation>Tests unitaires PMMLlib</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="rw" displayName="rw" optype="continuous" dataType="float"/>
+    <DataField name="r" displayName="r" optype="continuous" dataType="float"/>
+    <DataField name="tu" displayName="tu" optype="continuous" dataType="float"/>
+    <DataField name="tl" displayName="tl" optype="continuous" dataType="float"/>
+    <DataField name="hu" displayName="hu" optype="continuous" dataType="float"/>
+    <DataField name="hl" displayName="hl" optype="continuous" dataType="float"/>
+    <DataField name="l" displayName="l" optype="continuous" dataType="float"/>
+    <DataField name="kw" displayName="kw" optype="continuous" dataType="float"/>
+    <DataField name="yhat" displayName="yhat" optype="continuous" dataType="float"/>
+  </DataDictionary>
+  <NeuralNetwork modelName="modelName" functionName="regression" numberOfLayers="2">
+    <MiningSchema>
+      <MiningField name="rw" usageType="active"/>
+      <MiningField name="r" usageType="active"/>
+      <MiningField name="tu" usageType="active"/>
+      <MiningField name="tl" usageType="active"/>
+      <MiningField name="hu" usageType="active"/>
+      <MiningField name="hl" usageType="active"/>
+      <MiningField name="l" usageType="active"/>
+      <MiningField name="kw" usageType="active"/>
+      <MiningField name="yhat" usageType="predicted"/>
+    </MiningSchema>
+    <NeuralInputs numberOfInputs="8">
+      <NeuralInput id="0">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="rw">
+            <LinearNorm orig="0.000000e+00" norm="-2.889932e-01"/>
+            <LinearNorm orig="9.999901e-02" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="1">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="r">
+            <LinearNorm orig="0.000000e+00" norm="-5.756638e-01"/>
+            <LinearNorm orig="2.504894e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="2">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tu">
+            <LinearNorm orig="0.000000e+00" norm="-1.699313e-01"/>
+            <LinearNorm orig="8.933486e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="3">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tl">
+            <LinearNorm orig="0.000000e+00" norm="-1.707007e-01"/>
+            <LinearNorm orig="8.955232e+01" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="4">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hu">
+            <LinearNorm orig="0.000000e+00" norm="-3.302777e-02"/>
+            <LinearNorm orig="1.050003e+03" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="5">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hl">
+            <LineakLRAndkANNrNorm orig="0.000000e+00" norm="-4.562070e-02"/>
+            <LinearNorm orig="7.600007e+02" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="6">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="l">
+            <LinearNorm orig="0.000000e+00" norm="-1.155882e-01"/>
+            <LinearNorm orig="1.400018e+03" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="7">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="kw">
+            <LinearNorm orig="0.000000e+00" norm="-5.780019e-02"/>
+            <LinearNorm orig="1.095001e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+    </NeuralInputs>
+    <NeuralLayer activationFunction="tanh" numberOfNeurons="1">
+      <Neuron id="8" bias="-1.263572e+00">
+        <Con from="0" weight="7.536629e-01"/>
+        <Con from="1" weight="1.653660e-03"/>
+        <Con from="2" weight="4.725001e-03"/>
+        <Con from="3" weight="9.969786e-03"/>
+        <Con from="4" weight="1.787976e-01"/>
+        <Con from="5" weight="-1.809809e-01"/>
+        <Con from="6" weight="-1.735688e-01"/>
+        <Con from="7" weight="8.559675e-02"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralLayer activationFunction="identity" numberOfNeurons="1">
+      <Neuron id="9" bias="-1.745483e+00">
+        <Con from="8" weight="6.965512e+00"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralOutputs numberOfOutputs="1">
+      <NeuralOutput outputNeuron="9">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="yhat">
+            <LinearNorm orig="0.000000e+00" norm="-5.873935e-01"/>
+            <LinearNorm orig="7.781171e+01" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralOutput>
+    </NeuralOutputs>
+  </NeuralNetwork>
+  <RegressionModel functionName="regression" modelName="modelName" targetFieldName="x1">
+    <MiningSchema>
+      <MiningField name="x6" usageType="active"/>
+      <MiningField name="x8" usageType="active"/>
+      <MiningField name="x1" usageType="predicted"/>
+    </MiningSchema>
+    <RegressionTable intercept="3.837365e+00">
+      <NumericPredictor name="x6" exponent="1" coefficient="4.759134e-01"/>
+      <NumericPredictor name="x8" exponent="1" coefficient="1.428838e-01"/>
+      <PredictorTerm coefficient="-2.201903e-02">
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+      <PredictorTerm coefficient="5.362560e-04">
+        <FieldRef field="x6"/>
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+    </RegressionTable>
+  </RegressionModel>
+</PMML>
diff --git a/src/pmml/Test/samples/unittest_ref_ann_model.cpp b/src/pmml/Test/samples/unittest_ref_ann_model.cpp
new file mode 100755 (executable)
index 0000000..643308c
--- /dev/null
@@ -0,0 +1,67 @@
+#define ActivationFunction(sum) ( 1.0 / ( 1.0 + exp( -1.0 * sum )) )
+void myTestFunc(double *param, double *res)
+{
+  ////////////////////////////// 
+  //
+  // File used by unit test
+  // PMMLBasicsTest1::testExportNeuralNetworkCpp
+  //
+  ////////////////////////////// 
+
+  int nInput   = 8;
+  int nOutput   = 1;
+  int nHidden  = 1;
+  const int nNeurones  = 10;
+  double myTestFunc_act[nNeurones];
+
+  // --- Preprocessing of the inputs and outputs
+  double myTestFunc_minInput[] = {
+  0.099999, 25048.9, 89334.9, 89.5523, 1050, 
+  760.001, 1400.02, 10950, 
+  };
+  double myTestFunc_minOutput[] = {
+  77.8117,   };
+  double myTestFunc_maxInput[] = {
+  0.028899, 14419.8, 15180.8, 15.2866, 34.6793, 
+  34.6718, 161.826, 632.913, 
+  };
+  double myTestFunc_maxOutput[] = {
+  45.7061,   };
+
+  // --- Values of the weights
+  double myTestFunc_valW[] = {
+  -1.74548, 6.96551, -1.26357, 0.753663, 0.00165366, 
+  0.004725, 0.00996979, 0.178798, -0.180981, -0.173569, 
+  0.0855967, 
+  };
+  // --- Constants
+  int indNeurone = 0;
+  int CrtW;
+  double sum;
+
+  // --- Input Layers
+  for(int i = 0; i < nInput; i++) {
+     myTestFunc_act[indNeurone++] = ( param[i] - myTestFunc_minInput[i] ) / myTestFunc_maxInput[i];
+  }
+
+  // --- Hidden Layers
+  for (int member = 0; member < nHidden; member++) {
+     int CrtW = member * ( nInput + 2) + 2;
+     sum = myTestFunc_valW[CrtW++];
+     for (int source = 0; source < nInput; source++) {
+         sum += myTestFunc_act[source] * myTestFunc_valW[CrtW++];
+       }
+       myTestFunc_act[indNeurone++] = ActivationFunction(sum);
+  }
+
+  // --- Output
+  for (int member = 0; member < nOutput; member++) {
+    sum = myTestFunc_valW[0];
+    for (int source = 0; source < nHidden; source++) {
+      CrtW = source * ( nInput + 2) + 1;
+      sum += myTestFunc_act[nInput+source] * myTestFunc_valW[CrtW];
+    }
+    myTestFunc_act[indNeurone++] = sum;
+    res[member] = myTestFunc_minOutput[member] + myTestFunc_maxOutput[member] * sum;
+  }
+}
diff --git a/src/pmml/Test/samples/unittest_ref_ann_model.f b/src/pmml/Test/samples/unittest_ref_ann_model.f
new file mode 100755 (executable)
index 0000000..7996d31
--- /dev/null
@@ -0,0 +1,64 @@
+      SUBROUTINE myTestFunc(rw,r,tu,tl,hu,hl,l,kw,yhat)
+C --- *********************************************
+C --- 
+C ---  File used by unit test
+C ---  PMMLBasicsTest1::testExportNeuralNetworkFortran
+C --- 
+C --- *********************************************
+      IMPLICIT DOUBLE PRECISION (V)
+      DOUBLE PRECISION rw
+      DOUBLE PRECISION r
+      DOUBLE PRECISION tu
+      DOUBLE PRECISION tl
+      DOUBLE PRECISION hu
+      DOUBLE PRECISION hl
+      DOUBLE PRECISION l
+      DOUBLE PRECISION kw
+      DOUBLE PRECISION yhat
+
+C --- Preprocessing of the inputs
+      VXNrw = ( rw - 0.099999D0 ) / 0.028899D0
+      VXNr = ( r - 25048.9D0 ) / 14419.8D0
+      VXNtu = ( tu - 89334.9D0 ) / 15180.8D0
+      VXNtl = ( tl - 89.5523D0 ) / 15.2866D0
+      VXNhu = ( hu - 1050D0 ) / 34.6793D0
+      VXNhl = ( hl - 760.001D0 ) / 34.6718D0
+      VXNl = ( l - 1400.02D0 ) / 161.826D0
+      VXNkw = ( kw - 10950D0 ) / 632.913D0
+
+C --- Values of the weights
+      VW1 = -1.74548
+      VW2 = 6.96551
+      VW3 = -1.26357
+      VW4 = 0.753663
+      VW5 = 0.00165366
+      VW6 = 0.004725
+      VW7 = 0.00996979
+      VW8 = 0.178798
+      VW9 = -0.180981
+      VW10 = -0.173569
+      VW11 = 0.0855967
+
+C --- hidden neural number 1
+      VAct1 = VW3
+     1      + VW4 * VXNrw
+     1      + VW5 * VXNr
+     1      + VW6 * VXNtu
+     1      + VW7 * VXNtl
+     1      + VW8 * VXNhu
+     1      + VW9 * VXNhl
+     1      + VW10 * VXNl
+     1      + VW11 * VXNkw
+
+      VPot1 = 1.D0 / (1.D0 + DEXP(-1.D0 * VAct1))
+
+C --- Output
+      VOut = VW1
+     1    + VW2 * VPot1
+
+C --- Pretraitment of the output
+      yhat = 77.8117D0 + 45.7061D0 * VOut;
+
+C --- 
+      RETURN
+      END
diff --git a/src/pmml/Test/samples/unittest_ref_ann_model.py b/src/pmml/Test/samples/unittest_ref_ann_model.py
new file mode 100755 (executable)
index 0000000..2a1f5e5
--- /dev/null
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from math import tanh, exp
+
+def ActivationFunction(sum): 
+    return ( 1.0 / ( 1.0 + exp( -1.0 * sum ) ) ); 
+
+def myTestFunc(param):
+
+    ############################## 
+    #
+    # File used by unit test
+    # PMMLBasicsTest1::testExportNeuralNetworkPython
+    #
+    ############################## 
+
+    nInput = 8;
+    nOutput = 1;
+    nHidden = 1;
+    nNeurones = 10;
+    myTestFunc_act = [];
+    res = [];
+
+    # --- Preprocessing of the inputs and outputs
+    myTestFunc_minInput = [
+      0.099999, 25048.9, 89334.9, 89.5523, 1050, 
+    760.001, 1400.02, 10950, 
+    ];
+    myTestFunc_minOutput = [
+        77.8117
+    ];
+    myTestFunc_maxInput = [
+    0.028899, 14419.8, 15180.8, 15.2866, 34.6793, 
+    34.6718, 161.826, 632.913, 
+    ];
+    myTestFunc_maxOutput = [
+        45.7061
+    ];
+    # --- Values of the weights
+    myTestFunc_valW = [
+    -1.74548, 6.96551, -1.26357, 0.753663, 0.00165366, 
+    0.004725, 0.00996979, 0.178798, -0.180981, -0.173569, 
+    0.0855967, 
+    ];
+    # --- Constants
+    indNeurone = 0;
+
+    # --- Input Layers
+    for i in range(nInput) :
+        myTestFunc_act.append( ( param[i] - myTestFunc_minInput[i] ) / myTestFunc_maxInput[i] ) ;
+        indNeurone += 1 ;
+        pass
+
+    # --- Hidden Layers
+    for member in range(nHidden):
+        CrtW = member * ( nInput + 2) + 2;
+        sum = myTestFunc_valW[CrtW];
+        CrtW += 1 ;
+        for source in range(nInput) :
+            sum += myTestFunc_act[source] * myTestFunc_valW[CrtW];
+            CrtW += 1 ;
+            pass
+        myTestFunc_act.append( ActivationFunction(sum) ) ;
+        indNeurone += 1 ;
+        pass
+
+    # --- Output
+    for member in range(nOutput):
+        sum = myTestFunc_valW[0];
+        for source in range(nHidden):
+            CrtW = source * ( nInput + 2) + 1;
+            sum += myTestFunc_act[nInput+source] * myTestFunc_valW[CrtW];
+            pass
+        myTestFunc_act.append( sum );
+        indNeurone += 1 ;
+        res.append( myTestFunc_minOutput[member] + myTestFunc_maxOutput[member] * sum );
+        pass
+
+    return res;
+
+
diff --git a/src/pmml/Test/samples/unittest_ref_lr_model.cpp b/src/pmml/Test/samples/unittest_ref_lr_model.cpp
new file mode 100755 (executable)
index 0000000..1072f46
--- /dev/null
@@ -0,0 +1,27 @@
+void myTestFunc(double *param, double *res)
+{
+  ////////////////////////////// 
+  //
+  // File used by unit test
+  // PMMLBasicsTest1::testExportLinearRegressionCpp
+  //
+  ////////////////////////////// 
+
+  // Intercept
+  double y = 3.83737;
+
+  // Attribute : x6
+  y += param[0]*0.475913;
+
+  // Attribute : x8
+  y += param[1]*0.142884;
+
+  // Attribute : x6x8
+  y += param[2]*-0.022019;
+
+  // Attribute : x6x6x8
+  y += param[3]*0.000536256;
+
+  // Return the value
+  res[0] = y;
+}
diff --git a/src/pmml/Test/samples/unittest_ref_lr_model.f b/src/pmml/Test/samples/unittest_ref_lr_model.f
new file mode 100755 (executable)
index 0000000..7e60a97
--- /dev/null
@@ -0,0 +1,31 @@
+      SUBROUTINE myTestFunc(P0, P1, P2, P3, RES)
+C --- *********************************************
+C --- 
+C ---  File used by unit test
+C ---  PMMLBasicsTest1::testExportLinearRegressionFortran
+C --- 
+C --- *********************************************
+
+      IMPLICIT DOUBLE PRECISION (P)
+      DOUBLE PRECISION RES
+      DOUBLE PRECISION Y
+
+C --- Intercept
+      Y = 3.83737;
+
+C --- Attribute : x6
+      Y += P[0]*0.475913;
+
+C --- Attribute : x8
+      Y += P[1]*0.142884;
+
+C --- Attribute : x6x8
+      Y += P[2]*-0.022019;
+
+C --- Attribute : x6x6x8
+      Y += P[3]*0.000536256;
+
+C --- Return the value
+      RES = Y 
+      RETURN
+      END
diff --git a/src/pmml/Test/samples/unittest_ref_lr_model.py b/src/pmml/Test/samples/unittest_ref_lr_model.py
new file mode 100755 (executable)
index 0000000..5dbea2c
--- /dev/null
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+def myTestFunc(param):
+
+    ############################## 
+    # 
+    # File used by unit test
+    # PMMLBasicsTest1::testExportLinearRegressionPython
+    # 
+    ############################## 
+
+    #  Intercept
+    y = 3.83737;
+
+    #  Attribute : x6
+    y += param[0]*0.475913;
+
+    #  Attribute : x8
+    y += param[1]*0.142884;
+
+    #  Attribute : x6x8
+    y += param[2]*-0.022019;
+
+    #  Attribute : x6x6x8
+    y += param[3]*0.000536256;
+
+    #  Return the value
+    return [y];
diff --git a/src/pmml/Test/samples/win32_ann_model.pmml b/src/pmml/Test/samples/win32_ann_model.pmml
new file mode 100755 (executable)
index 0000000..ae32e3a
--- /dev/null
@@ -0,0 +1,124 @@
+<?xml version="1.0"?>
+<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
+  <Header copyright="myCopyright" description="Tests unitaires">
+    <Application name="PMMLlib" version="myVersion"/>
+    <Annotation>Tests unitaires PMMLlib</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="rw" displayName="rw" optype="continuous" dataType="float"/>
+    <DataField name="r" displayName="r" optype="continuous" dataType="float"/>
+    <DataField name="tu" displayName="tu" optype="continuous" dataType="float"/>
+    <DataField name="tl" displayName="tl" optype="continuous" dataType="float"/>
+    <DataField name="hu" displayName="hu" optype="continuous" dataType="float"/>
+    <DataField name="hl" displayName="hl" optype="continuous" dataType="float"/>
+    <DataField name="l" displayName="l" optype="continuous" dataType="float"/>
+    <DataField name="kw" displayName="kw" optype="continuous" dataType="float"/>
+    <DataField name="yhat" displayName="yhat" optype="continuous" dataType="float"/>
+  </DataDictionary>
+  <NeuralNetwork modelName="sANNName" functionName="regression" numberOfLayers="2">
+    <MiningSchema>
+      <MiningField name="rw" usageType="active"/>
+      <MiningField name="r" usageType="active"/>
+      <MiningField name="tu" usageType="active"/>
+      <MiningField name="tl" usageType="active"/>
+      <MiningField name="hu" usageType="active"/>
+      <MiningField name="hl" usageType="active"/>
+      <MiningField name="l" usageType="active"/>
+      <MiningField name="kw" usageType="active"/>
+      <MiningField name="yhat" usageType="predicted"/>
+    </MiningSchema>
+    <NeuralInputs numberOfInputs="8">
+      <NeuralInput id="0">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="rw">
+            <LinearNorm orig="0.000000e+000" norm="-2.889932e-001"/>
+            <LinearNorm orig="9.999901e-002" norm="0.000000e+000"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="1">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="r">
+            <LinearNorm orig="0.000000e+000" norm="-5.756638e-001"/>
+            <LinearNorm orig="2.504894e+004" norm="0.000000e+000"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="2">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tu">
+            <LinearNorm orig="0.000000e+000" norm="-1.699313e-001"/>
+            <LinearNorm orig="8.933486e+004" norm="0.000000e+000"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="3">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tl">
+            <LinearNorm orig="0.000000e+000" norm="-1.707007e-001"/>
+            <LinearNorm orig="8.955232e+001" norm="0.000000e+000"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="4">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hu">
+            <LinearNorm orig="0.000000e+000" norm="-3.302777e-002"/>
+            <LinearNorm orig="1.050003e+003" norm="0.000000e+000"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="5">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hl">
+            <LinearNorm orig="0.000000e+000" norm="-4.562070e-002"/>
+            <LinearNorm orig="7.600007e+002" norm="0.000000e+000"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="6">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="l">
+            <LinearNorm orig="0.000000e+000" norm="-1.155882e-001"/>
+            <LinearNorm orig="1.400018e+003" norm="0.000000e+000"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="7">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="kw">
+            <LinearNorm orig="0.000000e+000" norm="-5.780019e-002"/>
+            <LinearNorm orig="1.095001e+004" norm="0.000000e+000"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+    </NeuralInputs>
+    <NeuralLayer activationFunction="tanh" numberOfNeurons="1">
+      <Neuron id="8" bias="-1.263572e+000">
+        <Con from="0" weight="7.536629e-001"/>
+        <Con from="1" weight="1.653660e-003"/>
+        <Con from="2" weight="4.725001e-003"/>
+        <Con from="3" weight="9.969786e-003"/>
+        <Con from="4" weight="1.787976e-001"/>
+        <Con from="5" weight="-1.809809e-001"/>
+        <Con from="6" weight="-1.735688e-001"/>
+        <Con from="7" weight="8.559675e-002"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralLayer activationFunction="identity" numberOfNeurons="1">
+      <Neuron id="9" bias="-1.745483e+000">
+        <Con from="8" weight="6.965512e+000"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralOutputs numberOfOutputs="1">
+      <NeuralOutput outputNeuron="9">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="yhat">
+            <LinearNorm orig="0.000000e+000" norm="-5.873935e-001"/>
+            <LinearNorm orig="7.781171e+001" norm="0.000000e+000"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralOutput>
+    </NeuralOutputs>
+  </NeuralNetwork>
+</PMML>
diff --git a/src/pmml/Test/samples/win32_lr_model.pmml b/src/pmml/Test/samples/win32_lr_model.pmml
new file mode 100755 (executable)
index 0000000..afc0b14
--- /dev/null
@@ -0,0 +1,38 @@
+<?xml version="1.0"?>
+<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
+  <Header copyright="myCopyright" description="Tests unitaires">
+    <Application name="PMMLlib" version="myVersion"/>
+    <Annotation>Tests unitaires PMMLlib</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="x6" displayName=" x_{6}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="1.100000e+001" rightMargin="2.300000e+001"/>
+    </DataField>
+    <DataField name="x8" displayName=" x_{8}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="2.810000e+001" rightMargin="7.670000e+001"/>
+    </DataField>
+    <DataField name="x1" displayName=" x_{1}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="6.360000e+000" rightMargin="1.251000e+001"/>
+    </DataField>
+  </DataDictionary>
+  <RegressionModel functionName="regression" modelName="Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]" targetFieldName="x1">
+    <MiningSchema>
+      <MiningField name="x6" usageType="active"/>
+      <MiningField name="x8" usageType="active"/>
+      <MiningField name="x1" usageType="predicted"/>
+    </MiningSchema>
+    <RegressionTable intercept="3.837365e+000">
+      <NumericPredictor name="x6" exponent="1" coefficient="4.759134e-001"/>
+      <NumericPredictor name="x8" exponent="1" coefficient="1.428838e-001"/>
+      <PredictorTerm coefficient="-2.201903e-002">
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+      <PredictorTerm coefficient="5.362560e-004">
+        <FieldRef field="x6"/>
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+    </RegressionTable>
+  </RegressionModel>
+</PMML>
index 59a4a180b653a5ed9706ac88b39ce0150b0f4b78..a7c17a36f28ee870bf84a28640bd184122436abf 100755 (executable)
@@ -13,12 +13,10 @@ import shutil
 class PMMLBasicsTest(unittest.TestCase):
 
     def setUp(self):
-        pmmlRootDir = os.getenv("YACS_ROOT_DIR");
-        self.resourcesDir = os.path.join(pmmlRootDir,"share","salome","resources","pmml");
-        self.resourcesDir += os.sep ;
-        self.tmpDir = "/tmp/";
-        self.tmpDir += os.environ['LOGNAME']; # ("USER");
-        self.tmpDir += "/PmmlUnitTest/";
+        self.resourcesDir = ".." + os.sep + "Test" + os.sep + "samples" + os.sep ;
+        self.tmpDir = os.sep + "tmp" + os.sep + os.environ['LOGNAME'] + os.sep ;
+        self.tmpDir += "PmmlUnitTest";
+        self.tmpDir += os.sep ;
         if ( not os.path.exists(self.tmpDir) ):
             os.mkdir(self.tmpDir);
             pass
diff --git a/src/pmml/resources/CMakeLists.txt b/src/pmml/resources/CMakeLists.txt
deleted file mode 100755 (executable)
index 68c41e5..0000000
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright (C) 2012-2014  CEA/DEN, EDF R&D
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
-#
-# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
-#
-
-SET(PMML_RESOURCES_FILES
-  # ici les noms des fichiers ressources 
-  ann_model.pmml
-  ann_model_2.pmml
-  lr_model.pmml
-  lr_model_2.pmml
-  no_model.pmml
-  two_models_ann_lr.pmml
-  unittest_ref_ann_model.cpp
-  unittest_ref_ann_model.f
-  unittest_ref_ann_model.py
-  unittest_ref_lr_model.cpp
-  unittest_ref_lr_model.f
-  unittest_ref_lr_model.py
-  win32_ann_model.pmml
-  win32_lr_model.pmml 
-  )
-
-INSTALL(FILES ${PMML_RESOURCES_FILES} DESTINATION ${SALOME_PMML_INSTALL_RES_DATA})
-
-# MESSAGE(STATUS "Creation of ${CMAKE_CURRENT_BINARY_DIR}/PMMLCatalog.xml")
-# CONFIGURE_FILE(${CMAKE_CURRENT_SOURCE_DIR}/PMMLCatalog.xml.in ${CMAKE_CURRENT_BINARY_DIR}/PMMLCatalog.xml @ONLY)
-# MESSAGE(STATUS "Creation of ${CMAKE_CURRENT_BINARY_DIR}/SalomeApp.xml")
-# CONFIGURE_FILE(${CMAKE_CURRENT_SOURCE_DIR}/SalomeApp.xml.in ${CMAKE_CURRENT_BINARY_DIR}/SalomeApp.xml @ONLY)
-
-# INSTALL(FILES ${CMAKE_CURRENT_BINARY_DIR}/PMMLCatalog.xml ${CMAKE_CURRENT_BINARY_DIR}/SalomeApp.xml DESTINATION ${SALOME_PMML_INSTALL_RES_DATA})
diff --git a/src/pmml/resources/ann_model.pmml b/src/pmml/resources/ann_model.pmml
deleted file mode 100755 (executable)
index bd4d4e8..0000000
+++ /dev/null
@@ -1,124 +0,0 @@
-<?xml version="1.0"?>
-<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
-  <Header copyright="myCopyright" description="Tests unitaires">
-    <Application name="PMMLlib" version="myVersion"/>
-    <Annotation>Tests unitaires PMMLlib</Annotation>
-  </Header>
-  <DataDictionary>
-    <DataField name="rw" displayName="rw" optype="continuous" dataType="float"/>
-    <DataField name="r" displayName="r" optype="continuous" dataType="float"/>
-    <DataField name="tu" displayName="tu" optype="continuous" dataType="float"/>
-    <DataField name="tl" displayName="tl" optype="continuous" dataType="float"/>
-    <DataField name="hu" displayName="hu" optype="continuous" dataType="float"/>
-    <DataField name="hl" displayName="hl" optype="continuous" dataType="float"/>
-    <DataField name="l" displayName="l" optype="continuous" dataType="float"/>
-    <DataField name="kw" displayName="kw" optype="continuous" dataType="float"/>
-    <DataField name="yhat" displayName="yhat" optype="continuous" dataType="float"/>
-  </DataDictionary>
-  <NeuralNetwork modelName="sANNName" functionName="regression" numberOfLayers="2">
-    <MiningSchema>
-      <MiningField name="rw" usageType="active"/>
-      <MiningField name="r" usageType="active"/>
-      <MiningField name="tu" usageType="active"/>
-      <MiningField name="tl" usageType="active"/>
-      <MiningField name="hu" usageType="active"/>
-      <MiningField name="hl" usageType="active"/>
-      <MiningField name="l" usageType="active"/>
-      <MiningField name="kw" usageType="active"/>
-      <MiningField name="yhat" usageType="predicted"/>
-    </MiningSchema>
-    <NeuralInputs numberOfInputs="8">
-      <NeuralInput id="0">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="rw">
-            <LinearNorm orig="0.000000e+00" norm="-2.889932e-01"/>
-            <LinearNorm orig="9.999901e-02" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="1">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="r">
-            <LinearNorm orig="0.000000e+00" norm="-5.756638e-01"/>
-            <LinearNorm orig="2.504894e+04" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="2">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="tu">
-            <LinearNorm orig="0.000000e+00" norm="-1.699313e-01"/>
-            <LinearNorm orig="8.933486e+04" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="3">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="tl">
-            <LinearNorm orig="0.000000e+00" norm="-1.707007e-01"/>
-            <LinearNorm orig="8.955232e+01" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="4">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="hu">
-            <LinearNorm orig="0.000000e+00" norm="-3.302777e-02"/>
-            <LinearNorm orig="1.050003e+03" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="5">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="hl">
-            <LinearNorm orig="0.000000e+00" norm="-4.562070e-02"/>
-            <LinearNorm orig="7.600007e+02" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="6">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="l">
-            <LinearNorm orig="0.000000e+00" norm="-1.155882e-01"/>
-            <LinearNorm orig="1.400018e+03" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="7">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="kw">
-            <LinearNorm orig="0.000000e+00" norm="-5.780019e-02"/>
-            <LinearNorm orig="1.095001e+04" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-    </NeuralInputs>
-    <NeuralLayer activationFunction="tanh" numberOfNeurons="1">
-      <Neuron id="8" bias="-1.263572e+00">
-        <Con from="0" weight="7.536629e-01"/>
-        <Con from="1" weight="1.653660e-03"/>
-        <Con from="2" weight="4.725001e-03"/>
-        <Con from="3" weight="9.969786e-03"/>
-        <Con from="4" weight="1.787976e-01"/>
-        <Con from="5" weight="-1.809809e-01"/>
-        <Con from="6" weight="-1.735688e-01"/>
-        <Con from="7" weight="8.559675e-02"/>
-      </Neuron>
-    </NeuralLayer>
-    <NeuralLayer activationFunction="identity" numberOfNeurons="1">
-      <Neuron id="9" bias="-1.745483e+00">
-        <Con from="8" weight="6.965512e+00"/>
-      </Neuron>
-    </NeuralLayer>
-    <NeuralOutputs numberOfOutputs="1">
-      <NeuralOutput outputNeuron="9">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="yhat">
-            <LinearNorm orig="0.000000e+00" norm="-5.873935e-01"/>
-            <LinearNorm orig="7.781171e+01" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralOutput>
-    </NeuralOutputs>
-  </NeuralNetwork>
-</PMML>
diff --git a/src/pmml/resources/ann_model_2.pmml b/src/pmml/resources/ann_model_2.pmml
deleted file mode 100755 (executable)
index c64b4fb..0000000
+++ /dev/null
@@ -1,132 +0,0 @@
-<?xml version="1.0"?>
-<PMML xmlns="http://www.dmg.org/PMML-3_0" version="3.0">
-  <Header copyright="texte copyright" description="texte description">
-    <Application name="Uranie" version="2.3/1"/>
-    <Annotation>date Fri Oct 07, 2011</Annotation>
-  </Header>
-  <DataDictionary>
-    <DataField name="rw" displayName="rw" optype="continuous" dataType="float"/>
-    <DataField name="r" displayName="r" optype="continuous" dataType="float"/>
-    <DataField name="tu" displayName="tu" optype="continuous" dataType="float"/>
-    <DataField name="tl" displayName="tl" optype="continuous" dataType="float"/>
-    <DataField name="hu" displayName="hu" optype="continuous" dataType="float"/>
-    <DataField name="hl" displayName="hl" optype="continuous" dataType="float"/>
-    <DataField name="l" displayName="l" optype="continuous" dataType="float"/>
-    <DataField name="kw" displayName="kw" optype="continuous" dataType="float"/>
-    <DataField name="yhat" displayName="yhat" optype="continuous" dataType="float"/>
-  </DataDictionary>
-  <NeuralNetwork modelName="sANNName" functionName="regression" numberOfLayers="2">
-    <MiningSchema>
-      <MiningField name="rw" usageType="active"/>
-      <MiningField name="r" usageType="active"/>
-      <MiningField name="tu" usageType="active"/>
-      <MiningField name="tl" usageType="active"/>
-      <MiningField name="hu" usageType="active"/>
-      <MiningField name="hl" usageType="active"/>
-      <MiningField name="l" usageType="active"/>
-      <MiningField name="kw" usageType="active"/>
-      <MiningField name="yhat" usageType="predicted"/>
-    </MiningSchema>
-    <NeuralInputs numberOfInputs="8">
-      <NeuralInput id="0">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="rw">
-            <LinearNorm orig="0" norm="-2.889932e-01"/>
-            <LinearNorm orig="9.999901e-02" norm="0"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="1">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="r">
-            <LinearNorm orig="0" norm="-5.756638e-01"/>
-            <LinearNorm orig="2.504894e+04" norm="0"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="2">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="tu">
-            <LinearNorm orig="0" norm="-1.699313e-01"/>
-            <LinearNorm orig="8.933486e+04" norm="0"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="3">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="tl">
-            <LinearNorm orig="0" norm="-1.707007e-01"/>
-            <LinearNorm orig="8.955232e+01" norm="0"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="4">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="hu">
-            <LinearNorm orig="0" norm="-3.302777e-02"/>
-            <LinearNorm orig="1.050003e+03" norm="0"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="5">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="hl">
-            <LinearNorm orig="0" norm="-4.562070e-02"/>
-            <LinearNorm orig="7.600007e+02" norm="0"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="6">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="l">
-            <LinearNorm orig="2" norm="1."/>
-            <LinearNorm orig="1.400018e+03" norm="-1."/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="7">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="kw">
-            <LinearNorm orig="0" norm="-5.780019e-02"/>
-            <LinearNorm orig="1.095001e+04" norm="0"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-    </NeuralInputs>
-    <NeuralLayer activationFunction="tanh" numberOfNeurons="1">
-      <Neuron id="8" bias="-1.263572e+00">
-        <Con from="0" weight="7.536629e-01"/>
-        <Con from="1" weight="1.653660e-03"/>
-        <Con from="2" weight="4.725001e-03"/>
-        <Con from="3" weight="9.969786e-03"/>
-        <Con from="4" weight="1.787976e-01"/>
-        <Con from="5" weight="-1.809809e-01"/>
-        <Con from="6" weight="-1.735688e-01"/>
-        <Con from="7" weight="8.559675e-02"/>
-      </Neuron>
-    </NeuralLayer>
-    <NeuralLayer activationFunction="identity" numberOfNeurons="1">
-      <Neuron id="9" bias="-1.745483e+00">
-        <Con from="8" weight="6.965512e+00"/>
-      </Neuron>
-    </NeuralLayer>
-    <NeuralOutputs numberOfOutputs="1">
-      <NeuralOutput outputNeuron="9">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="yhat">
-            <LinearNorm orig="0" norm="-5.873935e-01"/>
-            <LinearNorm orig="7.781171e+01" norm="0"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralOutput>
-      <NeuralOutput outputNeuron="-1">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="yhat">
-            <LinearNorm orig="2." norm="-1"/>
-            <LinearNorm orig="5.781171e+01" norm="1"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralOutput>
-    </NeuralOutputs>
-  </NeuralNetwork>
-</PMML>
diff --git a/src/pmml/resources/lr_model.pmml b/src/pmml/resources/lr_model.pmml
deleted file mode 100755 (executable)
index fae9d26..0000000
+++ /dev/null
@@ -1,38 +0,0 @@
-<?xml version="1.0"?>
-<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
-  <Header copyright="myCopyright" description="Tests unitaires">
-    <Application name="PMMLlib" version="myVersion"/>
-    <Annotation>Tests unitaires PMMLlib</Annotation>
-  </Header>
-  <DataDictionary>
-    <DataField name="x6" displayName=" x_{6}" optype="continuous" dataType="double">
-      <Interval closure="ClosedClosed" leftMargin="1.100000e+01" rightMargin="2.300000e+01"/>
-    </DataField>
-    <DataField name="x8" displayName=" x_{8}" optype="continuous" dataType="double">
-      <Interval closure="ClosedClosed" leftMargin="2.810000e+01" rightMargin="7.670000e+01"/>
-    </DataField>
-    <DataField name="x1" displayName=" x_{1}" optype="continuous" dataType="double">
-      <Interval closure="ClosedClosed" leftMargin="6.360000e+00" rightMargin="1.251000e+01"/>
-    </DataField>
-  </DataDictionary>
-  <RegressionModel functionName="regression" modelName="Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]" targetFieldName="x1">
-    <MiningSchema>
-      <MiningField name="x6" usageType="active"/>
-      <MiningField name="x8" usageType="active"/>
-      <MiningField name="x1" usageType="predicted"/>
-    </MiningSchema>
-    <RegressionTable intercept="3.837365e+00">
-      <NumericPredictor name="x6" exponent="1" coefficient="4.759134e-01"/>
-      <NumericPredictor name="x8" exponent="1" coefficient="1.428838e-01"/>
-      <PredictorTerm coefficient="-2.201903e-02">
-        <FieldRef field="x6"/>
-        <FieldRef field="x8"/>
-      </PredictorTerm>
-      <PredictorTerm coefficient="5.362560e-04">
-        <FieldRef field="x6"/>
-        <FieldRef field="x6"/>
-        <FieldRef field="x8"/>
-      </PredictorTerm>
-    </RegressionTable>
-  </RegressionModel>
-</PMML>
diff --git a/src/pmml/resources/lr_model_2.pmml b/src/pmml/resources/lr_model_2.pmml
deleted file mode 100755 (executable)
index 2f1ef6b..0000000
+++ /dev/null
@@ -1,38 +0,0 @@
-<?xml version="1.0"?>
-<PMML version="4.1" xmlns="http://www.dmg.org/PMML-4_1">
-  <Header copyright="myCopyright" description="Text Description">
-    <Application name="Uranie" version="2013.7/18"/>
-    <Annotation>Compilation date : Wed Jul 17, 2013</Annotation>
-  </Header>
-  <DataDictionary>
-    <DataField name="x6" displayName=" x_{6}" optype="continuous" dataType="double">
-      <Interval closure="ClosedClosed" leftMargin="1.100000e+01" rightMargin="2.300000e+01"/>
-    </DataField>
-    <DataField name="x8" displayName=" x_{8}" optype="continuous" dataType="double">
-      <Interval closure="ClosedClosed" leftMargin="2.810000e+01" rightMargin="7.670000e+01"/>
-    </DataField>
-    <DataField name="x1" displayName=" x_{1}" optype="continuous" dataType="double">
-      <Interval closure="ClosedClosed" leftMargin="6.360000e+00" rightMargin="1.251000e+01"/>
-    </DataField>
-  </DataDictionary>
-  <RegressionModel functionName="regression" modelName="Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]" targetFieldName="x1">
-    <MiningSchema>
-      <MiningField name="x6" usageType="active"/>
-      <MiningField name="x8" usageType="active"/>
-      <MiningField name="x1" usageType="predicted"/>
-    </MiningSchema>
-    <RegressionTable>
-      <NumericPredictor name="x6" exponent="1" coefficient="4.759134e-01"/>
-      <NumericPredictor name="x8" exponent="1" coefficient="1.428838e-01"/>
-      <PredictorTerm coefficient="-2.201903e-02">
-        <FieldRef field="x6"/>
-        <FieldRef field="x8"/>
-      </PredictorTerm>
-      <PredictorTerm coefficient="5.362560e-04">
-        <FieldRef field="x6"/>
-        <FieldRef field="x6"/>
-        <FieldRef field="x8"/>
-      </PredictorTerm>
-    </RegressionTable>
-  </RegressionModel>
-</PMML>
diff --git a/src/pmml/resources/no_model.pmml b/src/pmml/resources/no_model.pmml
deleted file mode 100755 (executable)
index 3951518..0000000
+++ /dev/null
@@ -1,144 +0,0 @@
-<?xml version="1.0"?>
-<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
-  <Header copyright="myCopyright" description="Tests unitaires">
-    <Application name="PMMLlib" version="myVersion"/>
-    <Annotation>Tests unitaires PMMLlib</Annotation>
-  </Header>
-  <DataDictionary>
-    <DataField name="rw" displayName="rw" optype="continuous" dataType="float"/>
-    <DataField name="r" displayName="r" optype="continuous" dataType="float"/>
-    <DataField name="tu" displayName="tu" optype="continuous" dataType="float"/>
-    <DataField name="tl" displayName="tl" optype="continuous" dataType="float"/>
-    <DataField name="hu" displayName="hu" optype="continuous" dataType="float"/>
-    <DataField name="hl" displayName="hl" optype="continuous" dataType="float"/>
-    <DataField name="l" displayName="l" optype="continuous" dataType="float"/>
-    <DataField name="kw" displayName="kw" optype="continuous" dataType="float"/>
-    <DataField name="yhat" displayName="yhat" optype="continuous" dataType="float"/>
-  </DataDictionary>
-  <NeuralNetwork modelName="modelName" functionName="regression" numberOfLayers="2">
-    <MiningSchema>
-      <MiningField name="rw" usageType="active"/>
-      <MiningField name="r" usageType="active"/>
-      <MiningField name="tu" usageType="active"/>
-      <MiningField name="tl" usageType="active"/>
-      <MiningField name="hu" usageType="active"/>
-      <MiningField name="hl" usageType="active"/>
-      <MiningField name="l" usageType="active"/>
-      <MiningField name="kw" usageType="active"/>
-      <MiningField name="yhat" usageType="predicted"/>
-    </MiningSchema>
-    <NeuralInputs numberOfInputs="8">
-      <NeuralInput id="0">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="rw">
-            <LinearNorm orig="0.000000e+00" norm="-2.889932e-01"/>
-            <LinearNorm orig="9.999901e-02" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="1">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="r">
-            <LinearNorm orig="0.000000e+00" norm="-5.756638e-01"/>
-            <LinearNorm orig="2.504894e+04" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="2">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="tu">
-            <LinearNorm orig="0.000000e+00" norm="-1.699313e-01"/>
-            <LinearNorm orig="8.933486e+04" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="3">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="tl">
-            <LinearNorm orig="0.000000e+00" norm="-1.707007e-01"/>
-            <LinearNorm orig="8.955232e+01" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="4">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="hu">
-            <LinearNorm orig="0.000000e+00" norm="-3.302777e-02"/>
-            <LinearNorm orig="1.050003e+03" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="5">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="hl">
-            <LineakLRAndkANNrNorm orig="0.000000e+00" norm="-4.562070e-02"/>
-            <LinearNorm orig="7.600007e+02" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="6">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="l">
-            <LinearNorm orig="0.000000e+00" norm="-1.155882e-01"/>
-            <LinearNorm orig="1.400018e+03" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="7">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="kw">
-            <LinearNorm orig="0.000000e+00" norm="-5.780019e-02"/>
-            <LinearNorm orig="1.095001e+04" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-    </NeuralInputs>
-    <NeuralLayer activationFunction="tanh" numberOfNeurons="1">
-      <Neuron id="8" bias="-1.263572e+00">
-        <Con from="0" weight="7.536629e-01"/>
-        <Con from="1" weight="1.653660e-03"/>
-        <Con from="2" weight="4.725001e-03"/>
-        <Con from="3" weight="9.969786e-03"/>
-        <Con from="4" weight="1.787976e-01"/>
-        <Con from="5" weight="-1.809809e-01"/>
-        <Con from="6" weight="-1.735688e-01"/>
-        <Con from="7" weight="8.559675e-02"/>
-      </Neuron>
-    </NeuralLayer>
-    <NeuralLayer activationFunction="identity" numberOfNeurons="1">
-      <Neuron id="9" bias="-1.745483e+00">
-        <Con from="8" weight="6.965512e+00"/>
-      </Neuron>
-    </NeuralLayer>
-    <NeuralOutputs numberOfOutputs="1">
-      <NeuralOutput outputNeuron="9">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="yhat">
-            <LinearNorm orig="0.000000e+00" norm="-5.873935e-01"/>
-            <LinearNorm orig="7.781171e+01" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralOutput>
-    </NeuralOutputs>
-  </NeuralNetwork>
-  <RegressionModel functionName="regression" modelName="modelName" targetFieldName="x1">
-    <MiningSchema>
-      <MiningField name="x6" usageType="active"/>
-      <MiningField name="x8" usageType="active"/>
-      <MiningField name="x1" usageType="predicted"/>
-    </MiningSchema>
-    <RegressionTable intercept="3.837365e+00">
-      <NumericPredictor name="x6" exponent="1" coefficient="4.759134e-01"/>
-      <NumericPredictor name="x8" exponent="1" coefficient="1.428838e-01"/>
-      <PredictorTerm coefficient="-2.201903e-02">
-        <FieldRef field="x6"/>
-        <FieldRef field="x8"/>
-      </PredictorTerm>
-      <PredictorTerm coefficient="5.362560e-04">
-        <FieldRef field="x6"/>
-        <FieldRef field="x6"/>
-        <FieldRef field="x8"/>
-      </PredictorTerm>
-    </RegressionTable>
-  </RegressionModel>
-</PMML>
diff --git a/src/pmml/resources/two_models_ann_lr.pmml b/src/pmml/resources/two_models_ann_lr.pmml
deleted file mode 100755 (executable)
index 3951518..0000000
+++ /dev/null
@@ -1,144 +0,0 @@
-<?xml version="1.0"?>
-<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
-  <Header copyright="myCopyright" description="Tests unitaires">
-    <Application name="PMMLlib" version="myVersion"/>
-    <Annotation>Tests unitaires PMMLlib</Annotation>
-  </Header>
-  <DataDictionary>
-    <DataField name="rw" displayName="rw" optype="continuous" dataType="float"/>
-    <DataField name="r" displayName="r" optype="continuous" dataType="float"/>
-    <DataField name="tu" displayName="tu" optype="continuous" dataType="float"/>
-    <DataField name="tl" displayName="tl" optype="continuous" dataType="float"/>
-    <DataField name="hu" displayName="hu" optype="continuous" dataType="float"/>
-    <DataField name="hl" displayName="hl" optype="continuous" dataType="float"/>
-    <DataField name="l" displayName="l" optype="continuous" dataType="float"/>
-    <DataField name="kw" displayName="kw" optype="continuous" dataType="float"/>
-    <DataField name="yhat" displayName="yhat" optype="continuous" dataType="float"/>
-  </DataDictionary>
-  <NeuralNetwork modelName="modelName" functionName="regression" numberOfLayers="2">
-    <MiningSchema>
-      <MiningField name="rw" usageType="active"/>
-      <MiningField name="r" usageType="active"/>
-      <MiningField name="tu" usageType="active"/>
-      <MiningField name="tl" usageType="active"/>
-      <MiningField name="hu" usageType="active"/>
-      <MiningField name="hl" usageType="active"/>
-      <MiningField name="l" usageType="active"/>
-      <MiningField name="kw" usageType="active"/>
-      <MiningField name="yhat" usageType="predicted"/>
-    </MiningSchema>
-    <NeuralInputs numberOfInputs="8">
-      <NeuralInput id="0">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="rw">
-            <LinearNorm orig="0.000000e+00" norm="-2.889932e-01"/>
-            <LinearNorm orig="9.999901e-02" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="1">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="r">
-            <LinearNorm orig="0.000000e+00" norm="-5.756638e-01"/>
-            <LinearNorm orig="2.504894e+04" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="2">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="tu">
-            <LinearNorm orig="0.000000e+00" norm="-1.699313e-01"/>
-            <LinearNorm orig="8.933486e+04" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="3">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="tl">
-            <LinearNorm orig="0.000000e+00" norm="-1.707007e-01"/>
-            <LinearNorm orig="8.955232e+01" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="4">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="hu">
-            <LinearNorm orig="0.000000e+00" norm="-3.302777e-02"/>
-            <LinearNorm orig="1.050003e+03" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="5">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="hl">
-            <LineakLRAndkANNrNorm orig="0.000000e+00" norm="-4.562070e-02"/>
-            <LinearNorm orig="7.600007e+02" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="6">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="l">
-            <LinearNorm orig="0.000000e+00" norm="-1.155882e-01"/>
-            <LinearNorm orig="1.400018e+03" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="7">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="kw">
-            <LinearNorm orig="0.000000e+00" norm="-5.780019e-02"/>
-            <LinearNorm orig="1.095001e+04" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-    </NeuralInputs>
-    <NeuralLayer activationFunction="tanh" numberOfNeurons="1">
-      <Neuron id="8" bias="-1.263572e+00">
-        <Con from="0" weight="7.536629e-01"/>
-        <Con from="1" weight="1.653660e-03"/>
-        <Con from="2" weight="4.725001e-03"/>
-        <Con from="3" weight="9.969786e-03"/>
-        <Con from="4" weight="1.787976e-01"/>
-        <Con from="5" weight="-1.809809e-01"/>
-        <Con from="6" weight="-1.735688e-01"/>
-        <Con from="7" weight="8.559675e-02"/>
-      </Neuron>
-    </NeuralLayer>
-    <NeuralLayer activationFunction="identity" numberOfNeurons="1">
-      <Neuron id="9" bias="-1.745483e+00">
-        <Con from="8" weight="6.965512e+00"/>
-      </Neuron>
-    </NeuralLayer>
-    <NeuralOutputs numberOfOutputs="1">
-      <NeuralOutput outputNeuron="9">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="yhat">
-            <LinearNorm orig="0.000000e+00" norm="-5.873935e-01"/>
-            <LinearNorm orig="7.781171e+01" norm="0.000000e+00"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralOutput>
-    </NeuralOutputs>
-  </NeuralNetwork>
-  <RegressionModel functionName="regression" modelName="modelName" targetFieldName="x1">
-    <MiningSchema>
-      <MiningField name="x6" usageType="active"/>
-      <MiningField name="x8" usageType="active"/>
-      <MiningField name="x1" usageType="predicted"/>
-    </MiningSchema>
-    <RegressionTable intercept="3.837365e+00">
-      <NumericPredictor name="x6" exponent="1" coefficient="4.759134e-01"/>
-      <NumericPredictor name="x8" exponent="1" coefficient="1.428838e-01"/>
-      <PredictorTerm coefficient="-2.201903e-02">
-        <FieldRef field="x6"/>
-        <FieldRef field="x8"/>
-      </PredictorTerm>
-      <PredictorTerm coefficient="5.362560e-04">
-        <FieldRef field="x6"/>
-        <FieldRef field="x6"/>
-        <FieldRef field="x8"/>
-      </PredictorTerm>
-    </RegressionTable>
-  </RegressionModel>
-</PMML>
diff --git a/src/pmml/resources/unittest_ref_ann_model.cpp b/src/pmml/resources/unittest_ref_ann_model.cpp
deleted file mode 100755 (executable)
index 643308c..0000000
+++ /dev/null
@@ -1,67 +0,0 @@
-#define ActivationFunction(sum) ( 1.0 / ( 1.0 + exp( -1.0 * sum )) )
-void myTestFunc(double *param, double *res)
-{
-  ////////////////////////////// 
-  //
-  // File used by unit test
-  // PMMLBasicsTest1::testExportNeuralNetworkCpp
-  //
-  ////////////////////////////// 
-
-  int nInput   = 8;
-  int nOutput   = 1;
-  int nHidden  = 1;
-  const int nNeurones  = 10;
-  double myTestFunc_act[nNeurones];
-
-  // --- Preprocessing of the inputs and outputs
-  double myTestFunc_minInput[] = {
-  0.099999, 25048.9, 89334.9, 89.5523, 1050, 
-  760.001, 1400.02, 10950, 
-  };
-  double myTestFunc_minOutput[] = {
-  77.8117,   };
-  double myTestFunc_maxInput[] = {
-  0.028899, 14419.8, 15180.8, 15.2866, 34.6793, 
-  34.6718, 161.826, 632.913, 
-  };
-  double myTestFunc_maxOutput[] = {
-  45.7061,   };
-
-  // --- Values of the weights
-  double myTestFunc_valW[] = {
-  -1.74548, 6.96551, -1.26357, 0.753663, 0.00165366, 
-  0.004725, 0.00996979, 0.178798, -0.180981, -0.173569, 
-  0.0855967, 
-  };
-  // --- Constants
-  int indNeurone = 0;
-  int CrtW;
-  double sum;
-
-  // --- Input Layers
-  for(int i = 0; i < nInput; i++) {
-     myTestFunc_act[indNeurone++] = ( param[i] - myTestFunc_minInput[i] ) / myTestFunc_maxInput[i];
-  }
-
-  // --- Hidden Layers
-  for (int member = 0; member < nHidden; member++) {
-     int CrtW = member * ( nInput + 2) + 2;
-     sum = myTestFunc_valW[CrtW++];
-     for (int source = 0; source < nInput; source++) {
-         sum += myTestFunc_act[source] * myTestFunc_valW[CrtW++];
-       }
-       myTestFunc_act[indNeurone++] = ActivationFunction(sum);
-  }
-
-  // --- Output
-  for (int member = 0; member < nOutput; member++) {
-    sum = myTestFunc_valW[0];
-    for (int source = 0; source < nHidden; source++) {
-      CrtW = source * ( nInput + 2) + 1;
-      sum += myTestFunc_act[nInput+source] * myTestFunc_valW[CrtW];
-    }
-    myTestFunc_act[indNeurone++] = sum;
-    res[member] = myTestFunc_minOutput[member] + myTestFunc_maxOutput[member] * sum;
-  }
-}
diff --git a/src/pmml/resources/unittest_ref_ann_model.f b/src/pmml/resources/unittest_ref_ann_model.f
deleted file mode 100755 (executable)
index 7996d31..0000000
+++ /dev/null
@@ -1,64 +0,0 @@
-      SUBROUTINE myTestFunc(rw,r,tu,tl,hu,hl,l,kw,yhat)
-C --- *********************************************
-C --- 
-C ---  File used by unit test
-C ---  PMMLBasicsTest1::testExportNeuralNetworkFortran
-C --- 
-C --- *********************************************
-      IMPLICIT DOUBLE PRECISION (V)
-      DOUBLE PRECISION rw
-      DOUBLE PRECISION r
-      DOUBLE PRECISION tu
-      DOUBLE PRECISION tl
-      DOUBLE PRECISION hu
-      DOUBLE PRECISION hl
-      DOUBLE PRECISION l
-      DOUBLE PRECISION kw
-      DOUBLE PRECISION yhat
-
-C --- Preprocessing of the inputs
-      VXNrw = ( rw - 0.099999D0 ) / 0.028899D0
-      VXNr = ( r - 25048.9D0 ) / 14419.8D0
-      VXNtu = ( tu - 89334.9D0 ) / 15180.8D0
-      VXNtl = ( tl - 89.5523D0 ) / 15.2866D0
-      VXNhu = ( hu - 1050D0 ) / 34.6793D0
-      VXNhl = ( hl - 760.001D0 ) / 34.6718D0
-      VXNl = ( l - 1400.02D0 ) / 161.826D0
-      VXNkw = ( kw - 10950D0 ) / 632.913D0
-
-C --- Values of the weights
-      VW1 = -1.74548
-      VW2 = 6.96551
-      VW3 = -1.26357
-      VW4 = 0.753663
-      VW5 = 0.00165366
-      VW6 = 0.004725
-      VW7 = 0.00996979
-      VW8 = 0.178798
-      VW9 = -0.180981
-      VW10 = -0.173569
-      VW11 = 0.0855967
-
-C --- hidden neural number 1
-      VAct1 = VW3
-     1      + VW4 * VXNrw
-     1      + VW5 * VXNr
-     1      + VW6 * VXNtu
-     1      + VW7 * VXNtl
-     1      + VW8 * VXNhu
-     1      + VW9 * VXNhl
-     1      + VW10 * VXNl
-     1      + VW11 * VXNkw
-
-      VPot1 = 1.D0 / (1.D0 + DEXP(-1.D0 * VAct1))
-
-C --- Output
-      VOut = VW1
-     1    + VW2 * VPot1
-
-C --- Pretraitment of the output
-      yhat = 77.8117D0 + 45.7061D0 * VOut;
-
-C --- 
-      RETURN
-      END
diff --git a/src/pmml/resources/unittest_ref_ann_model.py b/src/pmml/resources/unittest_ref_ann_model.py
deleted file mode 100755 (executable)
index 2a1f5e5..0000000
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-from math import tanh, exp
-
-def ActivationFunction(sum): 
-    return ( 1.0 / ( 1.0 + exp( -1.0 * sum ) ) ); 
-
-def myTestFunc(param):
-
-    ############################## 
-    #
-    # File used by unit test
-    # PMMLBasicsTest1::testExportNeuralNetworkPython
-    #
-    ############################## 
-
-    nInput = 8;
-    nOutput = 1;
-    nHidden = 1;
-    nNeurones = 10;
-    myTestFunc_act = [];
-    res = [];
-
-    # --- Preprocessing of the inputs and outputs
-    myTestFunc_minInput = [
-      0.099999, 25048.9, 89334.9, 89.5523, 1050, 
-    760.001, 1400.02, 10950, 
-    ];
-    myTestFunc_minOutput = [
-        77.8117
-    ];
-    myTestFunc_maxInput = [
-    0.028899, 14419.8, 15180.8, 15.2866, 34.6793, 
-    34.6718, 161.826, 632.913, 
-    ];
-    myTestFunc_maxOutput = [
-        45.7061
-    ];
-    # --- Values of the weights
-    myTestFunc_valW = [
-    -1.74548, 6.96551, -1.26357, 0.753663, 0.00165366, 
-    0.004725, 0.00996979, 0.178798, -0.180981, -0.173569, 
-    0.0855967, 
-    ];
-    # --- Constants
-    indNeurone = 0;
-
-    # --- Input Layers
-    for i in range(nInput) :
-        myTestFunc_act.append( ( param[i] - myTestFunc_minInput[i] ) / myTestFunc_maxInput[i] ) ;
-        indNeurone += 1 ;
-        pass
-
-    # --- Hidden Layers
-    for member in range(nHidden):
-        CrtW = member * ( nInput + 2) + 2;
-        sum = myTestFunc_valW[CrtW];
-        CrtW += 1 ;
-        for source in range(nInput) :
-            sum += myTestFunc_act[source] * myTestFunc_valW[CrtW];
-            CrtW += 1 ;
-            pass
-        myTestFunc_act.append( ActivationFunction(sum) ) ;
-        indNeurone += 1 ;
-        pass
-
-    # --- Output
-    for member in range(nOutput):
-        sum = myTestFunc_valW[0];
-        for source in range(nHidden):
-            CrtW = source * ( nInput + 2) + 1;
-            sum += myTestFunc_act[nInput+source] * myTestFunc_valW[CrtW];
-            pass
-        myTestFunc_act.append( sum );
-        indNeurone += 1 ;
-        res.append( myTestFunc_minOutput[member] + myTestFunc_maxOutput[member] * sum );
-        pass
-
-    return res;
-
-
diff --git a/src/pmml/resources/unittest_ref_lr_model.cpp b/src/pmml/resources/unittest_ref_lr_model.cpp
deleted file mode 100755 (executable)
index 1072f46..0000000
+++ /dev/null
@@ -1,27 +0,0 @@
-void myTestFunc(double *param, double *res)
-{
-  ////////////////////////////// 
-  //
-  // File used by unit test
-  // PMMLBasicsTest1::testExportLinearRegressionCpp
-  //
-  ////////////////////////////// 
-
-  // Intercept
-  double y = 3.83737;
-
-  // Attribute : x6
-  y += param[0]*0.475913;
-
-  // Attribute : x8
-  y += param[1]*0.142884;
-
-  // Attribute : x6x8
-  y += param[2]*-0.022019;
-
-  // Attribute : x6x6x8
-  y += param[3]*0.000536256;
-
-  // Return the value
-  res[0] = y;
-}
diff --git a/src/pmml/resources/unittest_ref_lr_model.f b/src/pmml/resources/unittest_ref_lr_model.f
deleted file mode 100755 (executable)
index 7e60a97..0000000
+++ /dev/null
@@ -1,31 +0,0 @@
-      SUBROUTINE myTestFunc(P0, P1, P2, P3, RES)
-C --- *********************************************
-C --- 
-C ---  File used by unit test
-C ---  PMMLBasicsTest1::testExportLinearRegressionFortran
-C --- 
-C --- *********************************************
-
-      IMPLICIT DOUBLE PRECISION (P)
-      DOUBLE PRECISION RES
-      DOUBLE PRECISION Y
-
-C --- Intercept
-      Y = 3.83737;
-
-C --- Attribute : x6
-      Y += P[0]*0.475913;
-
-C --- Attribute : x8
-      Y += P[1]*0.142884;
-
-C --- Attribute : x6x8
-      Y += P[2]*-0.022019;
-
-C --- Attribute : x6x6x8
-      Y += P[3]*0.000536256;
-
-C --- Return the value
-      RES = Y 
-      RETURN
-      END
diff --git a/src/pmml/resources/unittest_ref_lr_model.py b/src/pmml/resources/unittest_ref_lr_model.py
deleted file mode 100755 (executable)
index 5dbea2c..0000000
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-def myTestFunc(param):
-
-    ############################## 
-    # 
-    # File used by unit test
-    # PMMLBasicsTest1::testExportLinearRegressionPython
-    # 
-    ############################## 
-
-    #  Intercept
-    y = 3.83737;
-
-    #  Attribute : x6
-    y += param[0]*0.475913;
-
-    #  Attribute : x8
-    y += param[1]*0.142884;
-
-    #  Attribute : x6x8
-    y += param[2]*-0.022019;
-
-    #  Attribute : x6x6x8
-    y += param[3]*0.000536256;
-
-    #  Return the value
-    return [y];
diff --git a/src/pmml/resources/win32_ann_model.pmml b/src/pmml/resources/win32_ann_model.pmml
deleted file mode 100755 (executable)
index ae32e3a..0000000
+++ /dev/null
@@ -1,124 +0,0 @@
-<?xml version="1.0"?>
-<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
-  <Header copyright="myCopyright" description="Tests unitaires">
-    <Application name="PMMLlib" version="myVersion"/>
-    <Annotation>Tests unitaires PMMLlib</Annotation>
-  </Header>
-  <DataDictionary>
-    <DataField name="rw" displayName="rw" optype="continuous" dataType="float"/>
-    <DataField name="r" displayName="r" optype="continuous" dataType="float"/>
-    <DataField name="tu" displayName="tu" optype="continuous" dataType="float"/>
-    <DataField name="tl" displayName="tl" optype="continuous" dataType="float"/>
-    <DataField name="hu" displayName="hu" optype="continuous" dataType="float"/>
-    <DataField name="hl" displayName="hl" optype="continuous" dataType="float"/>
-    <DataField name="l" displayName="l" optype="continuous" dataType="float"/>
-    <DataField name="kw" displayName="kw" optype="continuous" dataType="float"/>
-    <DataField name="yhat" displayName="yhat" optype="continuous" dataType="float"/>
-  </DataDictionary>
-  <NeuralNetwork modelName="sANNName" functionName="regression" numberOfLayers="2">
-    <MiningSchema>
-      <MiningField name="rw" usageType="active"/>
-      <MiningField name="r" usageType="active"/>
-      <MiningField name="tu" usageType="active"/>
-      <MiningField name="tl" usageType="active"/>
-      <MiningField name="hu" usageType="active"/>
-      <MiningField name="hl" usageType="active"/>
-      <MiningField name="l" usageType="active"/>
-      <MiningField name="kw" usageType="active"/>
-      <MiningField name="yhat" usageType="predicted"/>
-    </MiningSchema>
-    <NeuralInputs numberOfInputs="8">
-      <NeuralInput id="0">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="rw">
-            <LinearNorm orig="0.000000e+000" norm="-2.889932e-001"/>
-            <LinearNorm orig="9.999901e-002" norm="0.000000e+000"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="1">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="r">
-            <LinearNorm orig="0.000000e+000" norm="-5.756638e-001"/>
-            <LinearNorm orig="2.504894e+004" norm="0.000000e+000"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="2">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="tu">
-            <LinearNorm orig="0.000000e+000" norm="-1.699313e-001"/>
-            <LinearNorm orig="8.933486e+004" norm="0.000000e+000"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="3">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="tl">
-            <LinearNorm orig="0.000000e+000" norm="-1.707007e-001"/>
-            <LinearNorm orig="8.955232e+001" norm="0.000000e+000"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="4">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="hu">
-            <LinearNorm orig="0.000000e+000" norm="-3.302777e-002"/>
-            <LinearNorm orig="1.050003e+003" norm="0.000000e+000"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="5">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="hl">
-            <LinearNorm orig="0.000000e+000" norm="-4.562070e-002"/>
-            <LinearNorm orig="7.600007e+002" norm="0.000000e+000"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="6">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="l">
-            <LinearNorm orig="0.000000e+000" norm="-1.155882e-001"/>
-            <LinearNorm orig="1.400018e+003" norm="0.000000e+000"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-      <NeuralInput id="7">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="kw">
-            <LinearNorm orig="0.000000e+000" norm="-5.780019e-002"/>
-            <LinearNorm orig="1.095001e+004" norm="0.000000e+000"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralInput>
-    </NeuralInputs>
-    <NeuralLayer activationFunction="tanh" numberOfNeurons="1">
-      <Neuron id="8" bias="-1.263572e+000">
-        <Con from="0" weight="7.536629e-001"/>
-        <Con from="1" weight="1.653660e-003"/>
-        <Con from="2" weight="4.725001e-003"/>
-        <Con from="3" weight="9.969786e-003"/>
-        <Con from="4" weight="1.787976e-001"/>
-        <Con from="5" weight="-1.809809e-001"/>
-        <Con from="6" weight="-1.735688e-001"/>
-        <Con from="7" weight="8.559675e-002"/>
-      </Neuron>
-    </NeuralLayer>
-    <NeuralLayer activationFunction="identity" numberOfNeurons="1">
-      <Neuron id="9" bias="-1.745483e+000">
-        <Con from="8" weight="6.965512e+000"/>
-      </Neuron>
-    </NeuralLayer>
-    <NeuralOutputs numberOfOutputs="1">
-      <NeuralOutput outputNeuron="9">
-        <DerivedField optype="continuous" dataType="float">
-          <NormContinuous field="yhat">
-            <LinearNorm orig="0.000000e+000" norm="-5.873935e-001"/>
-            <LinearNorm orig="7.781171e+001" norm="0.000000e+000"/>
-          </NormContinuous>
-        </DerivedField>
-      </NeuralOutput>
-    </NeuralOutputs>
-  </NeuralNetwork>
-</PMML>
diff --git a/src/pmml/resources/win32_lr_model.pmml b/src/pmml/resources/win32_lr_model.pmml
deleted file mode 100755 (executable)
index afc0b14..0000000
+++ /dev/null
@@ -1,38 +0,0 @@
-<?xml version="1.0"?>
-<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
-  <Header copyright="myCopyright" description="Tests unitaires">
-    <Application name="PMMLlib" version="myVersion"/>
-    <Annotation>Tests unitaires PMMLlib</Annotation>
-  </Header>
-  <DataDictionary>
-    <DataField name="x6" displayName=" x_{6}" optype="continuous" dataType="double">
-      <Interval closure="ClosedClosed" leftMargin="1.100000e+001" rightMargin="2.300000e+001"/>
-    </DataField>
-    <DataField name="x8" displayName=" x_{8}" optype="continuous" dataType="double">
-      <Interval closure="ClosedClosed" leftMargin="2.810000e+001" rightMargin="7.670000e+001"/>
-    </DataField>
-    <DataField name="x1" displayName=" x_{1}" optype="continuous" dataType="double">
-      <Interval closure="ClosedClosed" leftMargin="6.360000e+000" rightMargin="1.251000e+001"/>
-    </DataField>
-  </DataDictionary>
-  <RegressionModel functionName="regression" modelName="Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]" targetFieldName="x1">
-    <MiningSchema>
-      <MiningField name="x6" usageType="active"/>
-      <MiningField name="x8" usageType="active"/>
-      <MiningField name="x1" usageType="predicted"/>
-    </MiningSchema>
-    <RegressionTable intercept="3.837365e+000">
-      <NumericPredictor name="x6" exponent="1" coefficient="4.759134e-001"/>
-      <NumericPredictor name="x8" exponent="1" coefficient="1.428838e-001"/>
-      <PredictorTerm coefficient="-2.201903e-002">
-        <FieldRef field="x6"/>
-        <FieldRef field="x8"/>
-      </PredictorTerm>
-      <PredictorTerm coefficient="5.362560e-004">
-        <FieldRef field="x6"/>
-        <FieldRef field="x6"/>
-        <FieldRef field="x8"/>
-      </PredictorTerm>
-    </RegressionTable>
-  </RegressionModel>
-</PMML>
index 01b877b43e628a8101f602ef26e85676ad32964b..66b175c07105547b35e21bf12c0093968f388bdc 100644 (file)
     <parameter name="DataPort_Width"  value="100" />
     <parameter name="DataPort_Height" value="25"  />
   </section>
+ <section name="windows_geometry">
+  <parameter name="YACS" value="#00 #00 #00 #FF #00 #00 #00 #00 #FD #00 #00 #00 #03 #00 #00 #00 #00 #00 #00 #01 #00 #00 #00 #02 #6B #FC #02 #00 #00 #00 #01 #FC #00 #00 #00 #59 #00 #00 #02 #6B #00 #00 #00 #68 #01 #00 #00 #14 #FA #00 #00 #00 #01 #02 #00 #00 #00 #02 #FB #00 #00 #00 #20 #00 #79 #00 #61 #00 #63 #00 #73 #00 #54 #00 #72 #00 #65 #00 #65 #00 #56 #00 #69 #00 #65 #00 #77 #00 #44 #00 #6F #00 #63 #00 #6B #01 #00 #00 #00 #00 #FF #FF #FF #FF #00 #00 #00 #16 #00 #FF #FF #FF #FB #00 #00 #00 #22 #00 #6F #00 #62 #00 #6A #00 #65 #00 #63 #00 #74 #00 #42 #00 #72 #00 #6F #00 #77 #00 #73 #00 #65 #00 #72 #00 #44 #00 #6F #00 #63 #00 #6B #01 #00 #00 #00 #00 #FF #FF #FF #FF #00 #00 #00 #53 #00 #FF #FF #FF #00 #00 #00 #01 #00 #00 #01 #0E #00 #00 #02 #6B #FC #02 #00 #00 #00 #01 #FC #00 #00 #00 #59 #00 #00 #02 #6B #00 #00 #00 #68 #01 #00 #00 #14 #FA #00 #00 #00 #01 #02 #00 #00 #00 #02 #FB #00 #00 #00 #24 #00 #79 #00 #61 #00 #63 #00 #73 #00 #49 #00 #6E #00 #70 #00 #75 #00 #74 #00 #50 #00 #61 #00 #6E #00 #65 #00 #6C #00 #44 #00 #6F #00 #63 #00 #6B #01 #00 #00 #00 #00 #FF #FF #FF #FF #00 #00 #00 #16 #00 #FF #FF #FF #FB #00 #00 #00 #20 #00 #79 #00 #61 #00 #63 #00 #73 #00 #43 #00 #61 #00 #74 #00 #61 #00 #6C #00 #6F #00 #67 #00 #73 #00 #44 #00 #6F #00 #63 #00 #6B #01 #00 #00 #00 #00 #FF #FF #FF #FF #00 #00 #00 #53 #00 #FF #FF #FF #00 #00 #00 #03 #00 #00 #05 #40 #00 #00 #00 #53 #FC #01 #00 #00 #00 #01 #FB #00 #00 #00 #22 #00 #70 #00 #79 #00 #74 #00 #68 #00 #6F #00 #6E #00 #43 #00 #6F #00 #6E #00 #73 #00 #6F #00 #6C #00 #65 #00 #44 #00 #6F #00 #63 #00 #6B #01 #00 #00 #00 #00 #00 #00 #05 #40 #00 #00 #00 #46 #00 #FF #FF #FF #00 #00 #03 #22 #00 #00 #02 #6B #00 #00 #00 #04 #00 #00 #00 #04 #00 #00 #00 #08 #00 #00 #00 #08 #FC #00 #00 #00 #02 #00 #00 #00 #02 #00 #00 #00 #02 #00 #00 #00 #1C #00 #53 #00 #61 #00 #6C #00 #6F #00 #6D #00 #65 #00 #53 #00 #74 #00 #61 #00 #6E #00 #64 #00 #61 #00 #72 #00 #64 #01 #00 #00 #00 #00 #FF #FF #FF #FF #00 #00 #00 #00 #00 #00 #00 #00 #00 #00 #00 #1A #00 #53 #00 #61 #00 #6C #00 #6F #00 #6D #00 #65 #00 #4D #00 #6F #00 #64 #00 #75 #00 #6C #00 #65 #00 #73 #01 #00 #00 #00 #CE #FF #FF #FF #FF #00 #00 #00 #00 #00 #00 #00 #00 #00 #00 #00 #02 #00 #00 #00 #01 #00 #00 #00 #16 #00 #59 #00 #41 #00 #43 #00 #53 #00 #54 #00 #6F #00 #6F #00 #6C #00 #62 #00 #61 #00 #72 #01 #00 #00 #00 #00 #FF #FF #FF #FF #00 #00 #00 #00 #00 #00 #00 #00"/>
+ </section>
+ <section name="windows_visibility">
+  <parameter name="YACS" value="#00 #00 #00 #00 #08 #00 #00 #00 #0E #00 #4D #00 #6F #00 #64 #00 #75 #00 #6C #00 #65 #00 #73 #01 #00 #00 #00 #2A #00 #51 #00 #78 #00 #53 #00 #63 #00 #65 #00 #6E #00 #65 #00 #56 #00 #69 #00 #65 #00 #77 #00 #4F #00 #70 #00 #65 #00 #72 #00 #61 #00 #74 #00 #69 #00 #6F #00 #6E #00 #73 #01 #00 #00 #00 #1A #00 #53 #00 #61 #00 #6C #00 #6F #00 #6D #00 #65 #00 #4D #00 #6F #00 #64 #00 #75 #00 #6C #00 #65 #00 #73 #01 #00 #00 #00 #1C #00 #53 #00 #61 #00 #6C #00 #6F #00 #6D #00 #65 #00 #53 #00 #74 #00 #61 #00 #6E #00 #64 #00 #61 #00 #72 #00 #64 #01 #00 #00 #00 #10 #00 #53 #00 #74 #00 #61 #00 #6E #00 #64 #00 #61 #00 #72 #00 #64 #01 #00 #00 #00 #1E #00 #56 #00 #69 #00 #65 #00 #77 #00 #20 #00 #4F #00 #70 #00 #65 #00 #72 #00 #61 #00 #74 #00 #69 #00 #6F #00 #6E #00 #73 #01 #00 #00 #00 #18 #00 #59 #00 #41 #00 #43 #00 #53 #00 #20 #00 #54 #00 #6F #00 #6F #00 #6C #00 #62 #00 #61 #00 #72 #01 #00 #00 #00 #16 #00 #59 #00 #41 #00 #43 #00 #53 #00 #54 #00 #6F #00 #6F #00 #6C #00 #62 #00 #61 #00 #72 #01 #01 #00 #00 #00 #05 #00 #00 #00 #22 #00 #6F #00 #62 #00 #6A #00 #65 #00 #63 #00 #74 #00 #42 #00 #72 #00 #6F #00 #77 #00 #73 #00 #65 #00 #72 #00 #44 #00 #6F #00 #63 #00 #6B #01 #00 #00 #00 #22 #00 #70 #00 #79 #00 #74 #00 #68 #00 #6F #00 #6E #00 #43 #00 #6F #00 #6E #00 #73 #00 #6F #00 #6C #00 #65 #00 #44 #00 #6F #00 #63 #00 #6B #01 #00 #00 #00 #20 #00 #79 #00 #61 #00 #63 #00 #73 #00 #43 #00 #61 #00 #74 #00 #61 #00 #6C #00 #6F #00 #67 #00 #73 #00 #44 #00 #6F #00 #63 #00 #6B #01 #00 #00 #00 #24 #00 #79 #00 #61 #00 #63 #00 #73 #00 #49 #00 #6E #00 #70 #00 #75 #00 #74 #00 #50 #00 #61 #00 #6E #00 #65 #00 #6C #00 #44 #00 #6F #00 #63 #00 #6B #01 #00 #00 #00 #20 #00 #79 #00 #61 #00 #63 #00 #73 #00 #54 #00 #72 #00 #65 #00 #65 #00 #56 #00 #69 #00 #65 #00 #77 #00 #44 #00 #6F #00 #63 #00 #6B #01"/>
+ </section>
 </document>
index acc337c8af78c7b39b5012c75ffefc43f0e00b15..ae221267b3e4b1e9cb012fc7b51ae3650c2d052d 100644 (file)
         <source>shrink or expand the selected node</source>
         <translation>contracter ou étendre le noeud choisi</translation>
     </message>
+    <message>
+        <source>shrink or expand direct children of the selected node</source>
+        <translation type="unfinished">contracter ou étendre le noeud choisi</translation>
+    </message>
+    <message>
+        <source>shrink or expand elementary nodes of the selected node recursively</source>
+        <translation type="unfinished">contracter ou étendre le noeud choisi</translation>
+    </message>
     <message>
         <source>shrink/expand</source>
         <translation>contracter/étendre</translation>
     </message>
+    <message>
+        <source>shrink/expand children</source>
+        <translation type="unfinished">contracter/étendre</translation>
+    </message>
+    <message>
+        <source>shrink/expand elementary</source>
+        <translation type="unfinished">contracter/étendre</translation>
+    </message>
     <message>
         <source>draw straight or orthogonal links</source>
         <translation>Créer les liens droits ou orthogonaux</translation>
index f30cce995b9c691f33c59978c35a732230966951..a8508990e3a372a236dd16a603c4394096e328e9 100644 (file)
       <source>shrink or expand the selected node</source>
       <translation>選択中のノードを展開または縮小</translation>
     </message>
+    <message>
+      <source>shrink or expand direct children of the selected node</source>
+      <translation type="unfinished">選択中のノードを展開または縮小</translation>
+    </message>
+    <message>
+      <source>shrink or expand elementary nodes of the selected node recursively</source>
+      <translation type="unfinished">選択中のノードを展開または縮小</translation>
+    </message>
     <message>
       <source>shrink/expand</source>
       <translation>展開/縮小</translation>
     </message>
+    <message>
+      <source>shrink/expand children</source>
+      <translation type="unfinished">展開/縮小</translation>
+    </message>
+    <message>
+      <source>shrink/expand elementary</source>
+      <translation type="unfinished">展開/縮小</translation>
+    </message>
     <message>
       <source>draw straight or orthogonal links</source>
       <translation>直線または直交リンクを描画</translation>
index ecf25ea149d943800c67c46aa273fc5f2240ec47..70a9e5af62dcee8259407de20cae763720f00a61 100644 (file)
@@ -169,9 +169,9 @@ int SalomeWrap_Module::wCreateMenu(QAction* action,
   return createMenu(action, menu, actionId, groupId, index);
 }
 
-int SalomeWrap_Module::wCreateTool(const QString& name)
+int SalomeWrap_Module::wCreateTool(const QString& title, const QString& name)
 {
-  return createTool(name);
+  return createTool(title, name);
 }
 
 int SalomeWrap_Module::wCreateTool(const int actionId,
index 66ffcbb3beba5303b76199442953f8735b8188cd..1dab9272bf8d7f0967e600174dcc94682041929d 100644 (file)
@@ -97,7 +97,8 @@ public:
                   const int groupId = -1,
                   const int index = -1);
 
-  int wCreateTool(const QString& name);
+  int wCreateTool(const QString& title,
+                 const QString& name = QString());
 
   int wCreateTool(const int actionId,
                   const int toolbarId, 
index 0ef735cfbe964908e22db663da6f504ba2c1129a..15f87eaa650b5172088945aed5338e107554293f 100644 (file)
@@ -201,10 +201,10 @@ int SuitWrapper::createMenu(QAction* action,
   return module->wCreateMenu(action, menu, actionId, groupId, index);
 }
 
-int SuitWrapper::createTool(const QString& name)
+int SuitWrapper::createTool(const QString& title, const QString& name)
 {
   SalomeWrap_Module* module = dynamic_cast<SalomeWrap_Module*>(_wrapped);
-  return module->wCreateTool(name);
+  return module->wCreateTool(title, name);
 }
 
 int SuitWrapper::createTool(const int actionId,
index 6f405657a6817b08de157851db0cbb4f81eeccb2..e74af95ecfda18d373864b349053a02ad4059a37 100644 (file)
@@ -96,7 +96,8 @@ namespace YACS
                      const int groupId = -1,
                      const int index = -1);
 
-      int createTool(const QString& name);
+      int createTool(const QString& title,
+                    const QString& name = QString());
 
       int createTool(const int actionId,
                      const int toolbarId, 
index 10c37ba19b3895b182406d57f710354ba3413c40..43c1f2f68e52a9d0b1fc0409ad9991ef7922094d 100644 (file)
@@ -19,7 +19,7 @@
 
 IF(SALOME_BUILD_TESTS)
   ADD_SUBDIRECTORY(Test)
-  ADD_SUBDIRECTORY(pmml)
+  ADD_SUBDIRECTORY(pmml)  
 ENDIF(SALOME_BUILD_TESTS)
 
 # --- options ---
index 6c4f4866c58d2250b87e5a9f69c54ee2ad9eeadd..c53c72dde30766f0c8b3f89facba9a45a3c020c5 100644 (file)
@@ -114,3 +114,4 @@ IF(NOT WIN32)
   SALOME_CONFIGURE_FILE(xmlrun_orig.sh xmlrun.sh)
   ADD_TEST(NAME YacsLoaderTest COMMAND ${SHELL} YacsLoaderTest.sh)
 ENDIF()
+
index 86bc84ecb3b8b16adca676b7d98f66ff93dc8ca1..34d4a0d4e9eda924cc3ebf85d15cd1688f82d860 100644 (file)
 
 #include "OptimizerAlg.hxx"
 
-using namespace YACS::ENGINE;
+#include <iostream>
+//using namespace YACS::ENGINE;
 
 extern "C"
 {
-  OptimizerAlgBase * createOptimizerAlgASyncExample(Pool * pool);
+  YACS::ENGINE::OptimizerAlgBase * createOptimizerAlgASyncExample(YACS::ENGINE::Pool * pool);
 }
 
-class OptimizerAlgASyncExample : public OptimizerAlgASync
-  {
+class OptimizerAlgASyncExample : public YACS::ENGINE::OptimizerAlgASync
+{
   private:
-    TypeCode * _tcIn;
-    TypeCode * _tcOut;
+    YACS::ENGINE::TypeCode *_tcInt;
+    YACS::ENGINE::TypeCode *_tcDouble;
   public:
-    OptimizerAlgASyncExample(Pool * pool);
+    OptimizerAlgASyncExample(YACS::ENGINE::Pool *pool);
     virtual ~OptimizerAlgASyncExample();
-    TypeCode * getTCForIn() const;
-    TypeCode * getTCForOut() const;
-    void startToTakeDecision();
-  };
+    
+    //! returns typecode of type expected as Input. OwnerShip of returned pointer is held by this.
+    virtual YACS::ENGINE::TypeCode *getTCForIn() const;
+    //! returns typecode of type expected as Output. OwnerShip of returned pointer is held by this.
+    virtual YACS::ENGINE::TypeCode *getTCForOut() const;
+    //! returns typecode of type expected for algo initialization. OwnerShip of returned pointer is held by this.
+    virtual YACS::ENGINE::TypeCode *getTCForAlgoInit() const;
+    //! returns typecode of type expected as algo result. OwnerShip of returned pointer is held by this.
+    virtual YACS::ENGINE::TypeCode *getTCForAlgoResult() const;
+    virtual void initialize(const YACS::ENGINE::Any *input) throw (YACS::Exception);
+    virtual void startToTakeDecision();
+    virtual void finish();//! Called when optimization has succeed.
+    virtual YACS::ENGINE::Any * getAlgoResult();
+};
 
-OptimizerAlgASyncExample::OptimizerAlgASyncExample(Pool * pool) : OptimizerAlgASync(pool),
-                                                                  _tcIn(0), _tcOut(0)
+OptimizerAlgASyncExample::OptimizerAlgASyncExample(YACS::ENGINE::Pool *pool)
+  : YACS::ENGINE::OptimizerAlgASync(pool), _tcInt(0), _tcDouble(0)
 {
-  _tcIn = new TypeCode(Double);
-  _tcOut = new TypeCode(Int);
+  _tcDouble = new YACS::ENGINE::TypeCode(YACS::ENGINE::Double);
+  _tcInt    = new YACS::ENGINE::TypeCode(YACS::ENGINE::Int);
 }
 
 OptimizerAlgASyncExample::~OptimizerAlgASyncExample()
 {
-  _tcIn->decrRef();
-  _tcOut->decrRef();
+  _tcDouble->decrRef();
+  _tcInt->decrRef();
+}
+
+//! Return the typecode of the expected input of the internal node
+YACS::ENGINE::TypeCode * OptimizerAlgASyncExample::getTCForIn() const
+{
+  return _tcDouble;
+}
+
+//! Return the typecode of the expected output of the internal node
+YACS::ENGINE::TypeCode * OptimizerAlgASyncExample::getTCForOut() const
+{
+  return _tcInt;
 }
 
-//! Return the typecode of the expected input type
-TypeCode *OptimizerAlgASyncExample::getTCForIn() const
+//! Return the typecode of the expected input of the algorithm (algoInit port)
+YACS::ENGINE::TypeCode * OptimizerAlgASyncExample::getTCForAlgoInit() const
 {
-  return _tcIn;
+  return _tcInt;
 }
 
-//! Return the typecode of the expected output type
-TypeCode *OptimizerAlgASyncExample::getTCForOut() const
+//! Return the typecode of the expected output of the algorithm (algoResult port)
+YACS::ENGINE::TypeCode * OptimizerAlgASyncExample::getTCForAlgoResult() const
 {
-  return _tcOut;
+  return _tcInt;
+}
+
+//! Optional method to initialize the algorithm.
+/*!
+ *  For now, the parameter input is always NULL. It might be used in the
+ *  future to initialize an algorithm with custom data.
+ */
+void OptimizerAlgASyncExample::initialize(const YACS::ENGINE::Any *input)
+  throw (YACS::Exception)
+{
+  std::cout << "Algo initialize, input = " << input->getIntValue() << std::endl;
 }
 
 //! This method is called only once to launch the algorithm.
 /*!
- *  It must first fill the pool with samples to evaluate and call signalMasterAndWait()
- *  to block until a sample has been evaluated. When returning from this method, it MUST
- *  check for an eventual termination request (with the method isTerminationRequested()).
- *  If the termination is requested, the method must perform any necessary cleanup and
- *  return as soon as possible. Otherwise it can either add new samples to evaluate in
- *  the pool, do nothing (wait for more samples), or empty the pool and return to finish
- *  the evaluation.
+ *  It must first fill the pool with samples to evaluate and call
+ *  signalMasterAndWait() to block until a sample has been evaluated. When
+ *  returning from this method, it MUST check for an eventual termination
+ *  request (with the method isTerminationRequested()). If the termination
+ *  is requested, the method must perform any necessary cleanup and return
+ *  as soon as possible. Otherwise it can either add new samples to evaluate
+ *  in the pool, do nothing (wait for more samples), or empty the pool and
+ *  return to finish the evaluation.
  */
 void OptimizerAlgASyncExample::startToTakeDecision()
 {
-  double val = 1.2;
-  for (int i=0 ; i<5 ; i++) {
-    // push a sample in the input of the slave node
-    _pool->pushInSample(i, AtomAny::New(val));
-    // wait until next sample is ready
-    signalMasterAndWait();
-    // check error notification
-    if (isTerminationRequested()) {
-      _pool->destroyAll();
-      return;
+  std::cout << "startToTakeDecision" << std::endl;
+  int iter = 0;
+  YACS::ENGINE::Any *val=YACS::ENGINE::AtomAny::New(0.5);
+  _pool->pushInSample(iter, val);
+  
+  signalMasterAndWait();
+  while(!isTerminationRequested())
+  {
+    int currentId = _pool->getCurrentId();
+    double valIn  = _pool->getCurrentInSample()->getDoubleValue();
+    int valOut    = _pool->getCurrentOutSample()->getIntValue();
+    
+    std::cout << "Compute currentId=" << currentId;
+    std::cout << ", valIn=" << valIn;
+    std::cout << ", valOut=" << valOut << std::endl;
+    
+    iter++;
+    if(iter < 3)
+    {
+      YACS::ENGINE::Any *val=YACS::ENGINE::AtomAny::New(valIn + 1);
+      _pool->pushInSample(iter, val);
     }
-
-    // get a sample from the output of the slave node
-    Any * v = _pool->getCurrentOutSample();
-    val += v->getIntValue();
+    signalMasterAndWait();
   }
+}
 
-  // in the end destroy the pool content
+/*!
+ *  Optional method called when the algorithm has finished, successfully or
+ *  not, to perform any necessary clean up.
+ */
+void OptimizerAlgASyncExample::finish()
+{
+  std::cout << "Algo finish" << std::endl;
   _pool->destroyAll();
 }
 
+/*!
+ *  Return the value of the algoResult port.
+ */
+YACS::ENGINE::Any * OptimizerAlgASyncExample::getAlgoResult()
+{
+  YACS::ENGINE::Any *val=YACS::ENGINE::AtomAny::New(42);
+  return val;
+}
+
 //! Factory method to create the algorithm.
-OptimizerAlgBase * createOptimizerAlgASyncExample(Pool * pool)
+YACS::ENGINE::OptimizerAlgBase * createOptimizerAlgASyncExample(YACS::ENGINE::Pool *pool)
 {
   return new OptimizerAlgASyncExample(pool);
-}
+}
\ No newline at end of file
index db7134fae5f807cf15daf7dc17316fcddb6d5d76..6161dbf9095c8f5639e6db2478843458351731f8 100644 (file)
 
 #include "OptimizerAlg.hxx"
 
-using namespace YACS::ENGINE;
+#include <iostream>
+//using namespace YACS::ENGINE;
 
 extern "C"
 {
-  OptimizerAlgBase * createOptimizerAlgSyncExample(Pool * pool);
+  YACS::ENGINE::OptimizerAlgBase * createOptimizerAlgSyncExample(YACS::ENGINE::Pool * pool);
 }
 
-class OptimizerAlgSyncExample : public OptimizerAlgSync
-  {
+class OptimizerAlgSyncExample : public YACS::ENGINE::OptimizerAlgSync
+{
   private:
-    int _idTest;
-    TypeCode *_tcIn;
-    TypeCode *_tcOut;
+    int _iter;
+    YACS::ENGINE::TypeCode *_tcInt;
+    YACS::ENGINE::TypeCode *_tcDouble;
   public:
-    OptimizerAlgSyncExample(Pool *pool);
+    OptimizerAlgSyncExample(YACS::ENGINE::Pool *pool);
     virtual ~OptimizerAlgSyncExample();
-    TypeCode *getTCForIn() const;
-    TypeCode *getTCForOut() const;
-    void start();
-    void takeDecision();
-    void initialize(const Any *input) throw(YACS::Exception);
-    void finish();
-  };
-
-OptimizerAlgSyncExample::OptimizerAlgSyncExample(Pool *pool) : OptimizerAlgSync(pool),
-                                                               _tcIn(0), _tcOut(0),
-                                                               _idTest(0)
+    
+    //! returns typecode of type expected as Input. OwnerShip of returned pointer is held by this.
+    virtual YACS::ENGINE::TypeCode *getTCForIn() const;
+    //! returns typecode of type expected as Output. OwnerShip of returned pointer is held by this.
+    virtual YACS::ENGINE::TypeCode *getTCForOut() const;
+    //! returns typecode of type expected for algo initialization. OwnerShip of returned pointer is held by this.
+    virtual YACS::ENGINE::TypeCode *getTCForAlgoInit() const;
+    //! returns typecode of type expected as algo result. OwnerShip of returned pointer is held by this.
+    virtual YACS::ENGINE::TypeCode *getTCForAlgoResult() const;
+    virtual void initialize(const YACS::ENGINE::Any *input) throw (YACS::Exception);
+    virtual void start(); //! Update _pool attribute before performing anything.
+    virtual void takeDecision();//! _pool->getCurrentId gives the \b id at the origin of this call.
+                                //! Perform the job of analysing to know what new jobs to do (_pool->pushInSample)
+                                //! or in case of convergence _pool->destroyAll
+    virtual void finish();//! Called when optimization has succeed.
+    virtual YACS::ENGINE::Any * getAlgoResult();
+};
+
+OptimizerAlgSyncExample::OptimizerAlgSyncExample(YACS::ENGINE::Pool *pool)
+  : YACS::ENGINE::OptimizerAlgSync(pool), _tcInt(0), _tcDouble(0), _iter(0)
 {
-  _tcIn=new TypeCode(Double);
-  _tcOut=new TypeCode(Int);
+  _tcDouble = new YACS::ENGINE::TypeCode(YACS::ENGINE::Double);
+  _tcInt    = new YACS::ENGINE::TypeCode(YACS::ENGINE::Int);
 }
 
 OptimizerAlgSyncExample::~OptimizerAlgSyncExample()
 {
-  std::cout << "Destroying OptimizerAlgSyncExample" << std::endl;
-  _tcIn->decrRef();
-  _tcOut->decrRef();
-  std::cout << "Destroyed OptimizerAlgSyncExample" << std::endl;
+  _tcDouble->decrRef();
+  _tcInt->decrRef();
 }
 
-//! Return the typecode of the expected input type
-TypeCode * OptimizerAlgSyncExample::getTCForIn() const
+//! Return the typecode of the expected input of the internal node
+YACS::ENGINE::TypeCode * OptimizerAlgSyncExample::getTCForIn() const
 {
-  return _tcIn;
+  return _tcDouble;
 }
 
-//! Return the typecode of the expected output type
-TypeCode * OptimizerAlgSyncExample::getTCForOut() const
+//! Return the typecode of the expected output of the internal node
+YACS::ENGINE::TypeCode * OptimizerAlgSyncExample::getTCForOut() const
 {
-  return _tcOut;
+  return _tcInt;
+}
+
+//! Return the typecode of the expected input of the algorithm (algoInit port)
+YACS::ENGINE::TypeCode * OptimizerAlgSyncExample::getTCForAlgoInit() const
+{
+  return _tcInt;
+}
+
+//! Return the typecode of the expected output of the algorithm (algoResult port)
+YACS::ENGINE::TypeCode * OptimizerAlgSyncExample::getTCForAlgoResult() const
+{
+  return _tcInt;
+}
+
+//! Optional method to initialize the algorithm.
+/*!
+ *  For now, the parameter input is always NULL. It might be used in the
+ *  future to initialize an algorithm with custom data.
+ */
+void OptimizerAlgSyncExample::initialize(const YACS::ENGINE::Any *input)
+  throw (YACS::Exception)
+{
+  std::cout << "Algo initialize, input = " << input->getIntValue() << std::endl;
 }
 
 //! Start to fill the pool with samples to evaluate
 void OptimizerAlgSyncExample::start()
 {
-  _idTest=0;
-  Any *val=AtomAny::New(1.2);
-  _pool->pushInSample(4,val);
-  val=AtomAny::New(3.4);
-  _pool->pushInSample(9,val);
+  std::cout << "Algo start " << std::endl;
+  _iter=0;
+  YACS::ENGINE::Any *val=YACS::ENGINE::AtomAny::New(0.5);
+  _pool->pushInSample(_iter,val);
 }
 
 //! This method is called each time a sample has been evaluated.
 /*!
- *  It can either add new samples to evaluate in the pool, do nothing (wait for more
- *  samples), or empty the pool to finish the evaluation.
+ *  It can either add new samples to evaluate in the pool, do nothing (wait
+ *  for more samples), or empty the pool to finish the evaluation.
  */
 void OptimizerAlgSyncExample::takeDecision()
 {
-  if(_idTest==1)
-    {
-      Any *val=AtomAny::New(5.6);
-      _pool->pushInSample(16,val);
-      val=AtomAny::New(7.8);
-      _pool->pushInSample(25,val);
-      val=AtomAny::New(9. );
-      _pool->pushInSample(36,val);
-      val=AtomAny::New(12.3);
-      _pool->pushInSample(49,val);
-    }
-  else if(_idTest==4)
-    {
-      Any *val=AtomAny::New(45.6);
-      _pool->pushInSample(64,val);
-      val=AtomAny::New(78.9);
-      _pool->pushInSample(81,val);
-    }
-  else
-    {
-      Any *tmp= _pool->getCurrentInSample();
-      if(fabs(tmp->getDoubleValue()-45.6)<1.e-12)
-        _pool->destroyAll();
-    }
-  _idTest++;
+  int currentId = _pool->getCurrentId();
+  double valIn  = _pool->getCurrentInSample()->getDoubleValue();
+  int valOut    = _pool->getCurrentOutSample()->getIntValue();
+  
+  std::cout << "Algo takeDecision currentId=" << currentId;
+  std::cout << ", valIn=" << valIn;
+  std::cout << ", valOut=" << valOut << std::endl;
+  
+  _iter++;
+  if(_iter < 3)
+  {
+    YACS::ENGINE::Any *val=YACS::ENGINE::AtomAny::New(valIn + 1);
+    _pool->pushInSample(_iter, val);
+  }
 }
 
-//! Optional method to initialize the algorithm.
 /*!
- *  For now, the parameter input is always NULL. It might be used in the future to
- *  initialize an algorithm with custom data.
+ *  Optional method called when the algorithm has finished, successfully or
+ *  not, to perform any necessary clean up.
  */
-void OptimizerAlgSyncExample::initialize(const Any *input) throw (YACS::Exception)
+void OptimizerAlgSyncExample::finish()
 {
+  std::cout << "Algo finish" << std::endl;
+  _pool->destroyAll();
 }
 
 /*!
- *  Optional method called when the algorithm has finished, successfully or not, to
- *  perform any necessary clean up.
+ *  Return the value of the algoResult port.
  */
-void OptimizerAlgSyncExample::finish()
+YACS::ENGINE::Any * OptimizerAlgSyncExample::getAlgoResult()
 {
+  YACS::ENGINE::Any *val=YACS::ENGINE::AtomAny::New(42);
+  return val;
 }
 
 //! Factory method to create the algorithm.
-OptimizerAlgBase * createOptimizerAlgSyncExample(Pool *pool)
+YACS::ENGINE::OptimizerAlgBase * createOptimizerAlgSyncExample(YACS::ENGINE::Pool *pool)
 {
   return new OptimizerAlgSyncExample(pool);
-}
+}
\ No newline at end of file
index c371890f8aa1dee6d15712406afb92ddcc1e433f..430820e5c0a3fa5fba56b87737de19c786919b99 100644 (file)
@@ -25,44 +25,77 @@ class myalgoasync(SALOMERuntime.OptimizerAlgASync):
     r=SALOMERuntime.getSALOMERuntime()
     self.tin=r.getTypeCode("double")
     self.tout=r.getTypeCode("int")
+    self.tAlgoInit=r.getTypeCode("int")
+    self.tAlgoResult=r.getTypeCode("int")
 
   def setPool(self,pool):
     """Must be implemented to set the pool"""
     self.pool=pool
 
   def getTCForIn(self):
-    """returns typecode of type expected as Input"""
+    """return typecode of type expected as Input of the internal node """
     return self.tin
 
   def getTCForOut(self):
-    """returns typecode of type expected as Output"""
+    """return typecode of type expected as Output of the internal node"""
     return self.tout
 
-  def startToTakeDecision(self):
-    """This method is called only once to launch the algorithm. It must first fill the
-       pool with samples to evaluate and call self.signalMasterAndWait() to block until a
-       sample has been evaluated. When returning from this method, it MUST check for an
-       eventual termination request (with the method self.isTerminationRequested()). If
-       the termination is requested, the method must perform any necessary cleanup and
-       return as soon as possible. Otherwise it can either add new samples to evaluate in
-       the pool, do nothing (wait for more samples), or empty the pool and return to
-       finish the evaluation.
+  def getTCForAlgoInit(self):
+    """return typecode of type expected as input for initialize """
+    return self.tAlgoInit
+
+  def getTCForAlgoResult(self):
+    """return typecode of type expected as output of the algorithm """
+    return self.tAlgoResult
+
+  def initialize(self,input):
+    """Optional method called on initialization.
+       The type of "input" is returned by "getTCForAlgoInit"
     """
-    val=1.2
-    for iter in xrange(5):
-      #push a sample in the input of the slave node
-      self.pool.pushInSample(iter,val)
-      #wait until next sample is ready
-      self.signalMasterAndWait()
-      #check error notification
-      if self.isTerminationRequested():
-        self.pool.destroyAll()
-        return
+    print "Algo initialize, input = ", input.getIntValue()
 
-      #get a sample from the output of the slave node
+  def startToTakeDecision(self):
+    """This method is called only once to launch the algorithm. It must
+       first fill the pool with samples to evaluate and call
+       self.signalMasterAndWait() to block until a sample has been
+       evaluated. When returning from this method, it MUST check for an
+       eventual termination request (with the method
+       self.isTerminationRequested()). If the termination is requested, the
+       method must perform any necessary cleanup and return as soon as
+       possible. Otherwise it can either add new samples to evaluate in the
+       pool, do nothing (wait for more samples), or empty the pool and
+       return to finish the evaluation.
+    """
+    print "startToTakeDecision"
+    # fill the pool with samples
+    iter=0
+    self.pool.pushInSample(0, 0.5)
+    
+    # 
+    self.signalMasterAndWait()
+    while not self.isTerminationRequested():
       currentId=self.pool.getCurrentId()
-      v=self.pool.getCurrentOutSample()
-      val=val+v.getIntValue()
+      valIn = self.pool.getCurrentInSample().getDoubleValue()
+      valOut = self.pool.getCurrentOutSample().getIntValue()
+      print "Compute currentId=%s, valIn=%s, valOut=%s" % (currentId, valIn, valOut)
+      iter=iter+1
+      
+      if iter < 3:
+        nextSample = valIn + 1
+        self.pool.pushInSample(iter, nextSample)
+        
+      self.signalMasterAndWait()
 
-    #in the end destroy the pool content
+  def finish(self):
+    """Optional method called when the algorithm has finished, successfully
+       or not, to perform any necessary clean up."""
+    print "Algo finish"
     self.pool.destroyAll()
+
+  def getAlgoResult(self):
+    """return the result of the algorithm.
+       The object returned is of type indicated by getTCForAlgoResult.
+    """
+    return 42
+
+
index 1ccf6bda4a4fc7fe25382c1ffbaea9c37e22a2dd..d40a937646c318ca068b1d0e8d95090ef0e3c1d9 100644 (file)
@@ -25,49 +25,68 @@ class myalgosync(SALOMERuntime.OptimizerAlgSync):
     r=SALOMERuntime.getSALOMERuntime()
     self.tin=r.getTypeCode("double")
     self.tout=r.getTypeCode("int")
+    self.tAlgoInit=r.getTypeCode("int")
+    self.tAlgoResult=r.getTypeCode("int")
 
   def setPool(self,pool):
     """Must be implemented to set the pool"""
     self.pool=pool
 
   def getTCForIn(self):
-    """returns typecode of type expected as Input"""
+    """return typecode of type expected as Input of the internal node """
     return self.tin
 
   def getTCForOut(self):
-    """returns typecode of type expected as Output"""
+    """return typecode of type expected as Output of the internal node"""
     return self.tout
 
+  def getTCForAlgoInit(self):
+    """return typecode of type expected as input for initialize """
+    return self.tAlgoInit
+
+  def getTCForAlgoResult(self):
+    """return typecode of type expected as output of the algorithm """
+    return self.tAlgoResult
+
   def initialize(self,input):
-    """Optional method called on initialization. Do nothing here"""
+    """Optional method called on initialization.
+       The type of "input" is returned by "getTCForAlgoInit"
+    """
+    print "Algo initialize, input = ", input.getIntValue()
 
   def start(self):
     """Start to fill the pool with samples to evaluate."""
+    print "Algo start "
     self.iter=0
-    self.pool.pushInSample(4,1.2)
-    self.pool.pushInSample(9,3.4)
+    # pushInSample(id, value)
+    self.pool.pushInSample(self.iter, 0.5)
 
   def takeDecision(self):
-    """ This method is called each time a sample has been evaluated. It can either add
-        new samples to evaluate in the pool, do nothing (wait for more samples), or empty
-        the pool to finish the evaluation.
+    """ This method is called each time a sample has been evaluated. It can
+        either add new samples to evaluate in the pool, do nothing (wait for
+        more samples), or empty the pool to finish the evaluation.
     """
     currentId=self.pool.getCurrentId()
+    valIn = self.pool.getCurrentInSample().getDoubleValue()
+    valOut = self.pool.getCurrentOutSample().getIntValue()
+    print "Algo takeDecision currentId=%s, valIn=%s, valOut=%s" % (currentId, valIn, valOut)
 
-    if self.iter==1:
-      self.pool.pushInSample(16,5.6)
-      self.pool.pushInSample(25,7.8)
-      self.pool.pushInSample(36,9.)
-      self.pool.pushInSample(49,12.3)
-    elif self.iter==4:
-      self.pool.pushInSample(64,45.6)
-      self.pool.pushInSample(81,78.9)
-    else:
-      val=self.pool.getCurrentInSample()
-      if abs(val.getDoubleValue()-45.6) < 1.e-12:
-        self.pool.destroyAll()
     self.iter=self.iter+1
+    if self.iter < 3:
+      # continue
+      nextSample = valIn + 1
+      self.pool.pushInSample(self.iter, nextSample)
 
   def finish(self):
-    """Optional method called when the algorithm has finished, successfully or not, to
-       perform any necessary clean up. Do nothing here"""
+    """Optional method called when the algorithm has finished, successfully
+       or not, to perform any necessary clean up."""
+    print "Algo finish"
+    self.pool.destroyAll()
+
+  def getAlgoResult(self):
+    """return the result of the algorithm.
+       The object returned is of type indicated by getTCForAlgoResult.
+    """
+    return 42
+
+
index 2d38fc841772a27b96b9016f2cafa0ad91c286c2..8ece73119f3c8082e809c561233dd301a3f64e11 100644 (file)
@@ -308,7 +308,7 @@ T whilelooptypeParser<T>::post()
       T b=this->_cnode;
       this->_cnodes.pop_back();
       currentProc->names.pop_back();
-      this->_cnode=this->_cnodes.back();
+      this->_cnode=this->_cnodes.empty() ? 0 : this->_cnodes.back();
       return b;
     }
 }
@@ -374,7 +374,7 @@ template <class T>
       T b=this->_cnode;
       this->_cnodes.pop_back();
       currentProc->names.pop_back();
-      this->_cnode=this->_cnodes.back();
+      this->_cnode=this->_cnodes.empty() ? 0 : this->_cnodes.back();
       return b;
     }
 
@@ -414,10 +414,7 @@ struct pseudocomposednodetypeParser:looptypeParser<T>
       DEBTRACE("pseudocomposednode_post" << this->_cnode->getNode()->getName())
       T b = this->_cnode;
       this->_cnodes.pop_back();
-      if(this->_cnodes.size() == 0)
-        this->_cnode = 0;
-      else
-        this->_cnode = this->_cnodes.back();
+      this->_cnode=this->_cnodes.empty() ? 0 : this->_cnodes.back();
       return b;
     }
 
@@ -558,10 +555,7 @@ struct foreachlooptypeParser:dynparalooptypeParser<T>
       T b=this->_cnode;
       this->_cnodes.pop_back();
       currentProc->names.pop_back();
-      if(this->_cnodes.size() == 0)
-        this->_cnode=0;
-      else
-        this->_cnode=this->_cnodes.back();
+      this->_cnode=this->_cnodes.empty() ? 0 : this->_cnodes.back();
       return b;
     }
   int _nbranch;
@@ -641,10 +635,7 @@ struct optimizerlooptypeParser:dynparalooptypeParser<T>
       T b=this->_cnode;
       this->_cnodes.pop_back();
       currentProc->names.pop_back();
-      if(this->_cnodes.size() == 0)
-        this->_cnode=0;
-      else
-        this->_cnode=this->_cnodes.back();
+      this->_cnode=this->_cnodes.empty() ? 0 : this->_cnodes.back();
       return b;
     }
 
index f6cd5ddb7b2406ca7aeca51f32828c079ab08836..8e355b988e4a185131db0b85846f5836612b6b3e 100755 (executable)
@@ -53,6 +53,12 @@ TARGET_LINK_LIBRARIES(TestYACSPMML pmmlLib
                       YACSBases 
                       ${CPPUNIT_LIBRARIES} ${PLATFORM_LIBS})
 
-ADD_TEST(TestYACSPMML TestYACSPMML)
-
 INSTALL(TARGETS TestYACSPMML DESTINATION ${SALOME_INSTALL_BINS})
+
+IF(NOT WIN32)
+  SET(SHELL /bin/sh)
+  SALOME_CONFIGURE_FILE(config_appli.xml.in config_appli.xml)  
+  SALOME_CONFIGURE_FILE(PmmlExeTest.sh.in PmmlExeTest.sh)  
+  SALOME_CONFIGURE_FILE(PmmlInSessionTest.sh.in PmmlInSessionTest.sh)
+  ADD_TEST(NAME PmmlExeTest COMMAND ${SHELL} PmmlExeTest.sh)
+ENDIF()
\ No newline at end of file
diff --git a/src/yacsloader/pmml/PmmlExeTest.sh.in b/src/yacsloader/pmml/PmmlExeTest.sh.in
new file mode 100755 (executable)
index 0000000..d5f13d6
--- /dev/null
@@ -0,0 +1,91 @@
+#!/bin/bash
+# Copyright (C) 2006-2014  CEA/DEN, EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+if test -f @CMAKE_CURRENT_BINARY_DIR@/config_appli.xml; then
+  if test -n "${GEOM_ROOT_DIR}" && test -d ${GEOM_ROOT_DIR}; then
+    sed -i s%\"GEOM_ROOT_DIR\"%\"${GEOM_ROOT_DIR}\"% @CMAKE_CURRENT_BINARY_DIR@/config_appli.xml
+  fi
+  if test -n "${PYHELLO_ROOT_DIR}" && test -d ${PYHELLO_ROOT_DIR}; then
+    sed -i s%\"PYHELLO_ROOT_DIR\"%\"${PYHELLO_ROOT_DIR}\"% @CMAKE_CURRENT_BINARY_DIR@/config_appli.xml
+  fi
+fi
+
+if test -f @KERNEL_ROOT_DIR@/bin/salome/appli_gen.py ; then
+
+  # --- create a SALOME Application environment
+
+  @KERNEL_ROOT_DIR@/bin/salome/appli_gen.py
+  sed -i s/\"yes\"/\"no\"/ SalomeApp.xml
+  sed -i s/\,study\,cppContainer\,registry\,moduleCatalog// SalomeApp.xml
+  sed -i s/pyContainer/pyContainer\,study\,cppContainer\,registry\,moduleCatalog/ SalomeApp.xml
+
+cat > CatalogResources.xml << EOF
+<!DOCTYPE ResourcesCatalog>
+<resources>
+   <machine hostname="localhost" />
+</resources>
+EOF
+
+  # ---------------------------------------------------------------------------
+  # --- first set of test in C++
+
+  # --- launch in background a SALOME session (servers)
+
+  ln -fs @CMAKE_SOURCE_DIR@/src/yacsloader/samples .
+  ./runAppli > log1 2>&1
+
+  # --- wait a little to let the background process define
+  #     the CORBA naming service port and host
+
+  sleep 5
+
+  # --- execute the test script in SALOME session environment
+
+  chmod +x @CMAKE_CURRENT_BINARY_DIR@/PmmlInSessionTest.sh
+  ./runSession @CMAKE_CURRENT_BINARY_DIR@/PmmlInSessionTest.sh
+  ret=$?
+
+  # ---------------------------------------------------------------------------
+
+  kill -9 `cat "/tmp/YACSTEST_PidEcho"`
+  ./runSession killSalome.py
+
+  echo "exec status PmmlInSessionTest.sh "  $ret
+
+  # --- delete all the SALOME Application environment
+  
+  ./bin/salome/appli_clean.sh -f
+
+else
+
+  ln -fs @CMAKE_SOURCE_DIR@/src/yacsloader/samples .
+  chmod +x @CMAKE_CURRENT_BINARY_DIR@/PmmlInSessionTest.sh
+  @CMAKE_CURRENT_BINARY_DIR@/PmmlInSessionTest.sh
+  ret=$?
+  echo "exec status PmmlInSessionTest.sh " $ret
+
+fi
+
+if [ $ret -ne 0 ]
+then cat /tmp/${USER}/UnitTestsResult
+else echo "Results are in /tmp/${USER}/UnitTestsResult"
+fi
+
+exit $ret
diff --git a/src/yacsloader/pmml/PmmlInSessionTest.sh.in b/src/yacsloader/pmml/PmmlInSessionTest.sh.in
new file mode 100755 (executable)
index 0000000..2ef1b9d
--- /dev/null
@@ -0,0 +1,50 @@
+#!/bin/bash
+# Copyright (C) 2006-2014  CEA/DEN, EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+# --- script to execute in SALOME environment (use runSession)
+
+# --- wait until SALOME containers are ready
+
+python @CMAKE_CURRENT_SOURCE_DIR@/waitContainers.py
+
+# --- launch CORBA server echoSrv for tests
+
+./echoSrv &
+pidecho=$!
+echo $pidecho > "/tmp/YACSTEST_PidEcho"
+
+# Environment variables needed for the optimizer loop tests
+export LD_LIBRARY_PATH=@CMAKE_CURRENT_BINARY_DIR@:$LD_LIBRARY_PATH
+export PYTHONPATH=@CMAKE_BINARY_DIR@/src/engine_swig:$PYTHONPATH
+export PYTHONPATH=@CMAKE_BINARY_DIR@/src/runtime_swig:$PYTHONPATH
+export PYTHONPATH=@CMAKE_CURRENT_SOURCE_DIR@:$PYTHONPATH
+
+# --- launch unit tests
+
+export ROOT_SAMPLES=@CMAKE_SOURCE_DIR@/src/yacsloader/samples
+export TESTCOMPONENT_ROOT_DIR=@CMAKE_BINARY_DIR@/src/runtime/Test
+
+./TestYACSPMML
+ret=$?
+echo "exec status TestYACSPMML " $ret
+
+# --- return unit tests status
+
+exit $ret
index f8f70b6c07ecdb8eab74543135a5e5d9c68478b0..8bcf696f05dbd5fba335a1df6ab8a79e77ceb7bb 100755 (executable)
@@ -125,19 +125,7 @@ int driverTest(Proc* &p, const char* schema)
 
 void YACSPMMLBasicsTest1::setUp()
 {
-#ifdef WIN32
-    const char* p = std::getenv("YACS_ROOT_DIR");
-    std::string strP("");
-    if (p) 
-        strP = std::string(p);
-    else 
-        throw std::string("unable to get YACS_ROOT_DIR");
-    resourcesDir = strP;
-    resourcesDir += "/share/salome/yacssamples/";
-#else
-    resourcesDir =  getenv("YACS_ROOT_DIR");
-    resourcesDir += "/share/salome/yacssamples/";
-#endif      
+    resourcesDir = "samples/";    
 }
 
 void YACSPMMLBasicsTest1::tearDown()
diff --git a/src/yacsloader/pmml/config_appli.xml.in b/src/yacsloader/pmml/config_appli.xml.in
new file mode 100644 (file)
index 0000000..2c3dafa
--- /dev/null
@@ -0,0 +1,34 @@
+<!--
+  Copyright (C) 2006-2014  CEA/DEN, EDF R&D
+
+  This library is free software; you can redistribute it and/or
+  modify it under the terms of the GNU Lesser General Public
+  License as published by the Free Software Foundation; either
+  version 2.1 of the License, or (at your option) any later version.
+
+  This library is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+  Lesser General Public License for more details.
+
+  You should have received a copy of the GNU Lesser General Public
+  License along with this library; if not, write to the Free Software
+  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+
+  See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+
+-->
+<application>
+<prerequisites path="profile@SALOMEYACS_VERSION@.sh"/>
+<modules>
+   <!-- variable name <MODULE>_ROOT_DIR is built with <MODULE> == name attribute value -->
+   <!-- <MODULE>_ROOT_DIR values is set with path attribute value -->
+   <!-- attribute gui (defaults = yes) indicates if the module has a gui interface -->
+   <module name="KERNEL"       gui="no"  path="@KERNEL_ROOT_DIR@"/>
+   <module name="GUI"          gui="no"  path="@GUI_ROOT_DIR@"/>
+   <module name="GEOM"                   path="GEOM_ROOT_DIR"/> <!--GEOM_ROOT_DIR will be substituted at starting of test-->
+   <module name="PYHELLO"                path="PYHELLO_ROOT_DIR"/> <!--PYHELLO_ROOT_DIR will be substituted at starting of test-->
+   <module name="YACS"                   path="@CMAKE_INSTALL_PREFIX@"/>
+</modules>
+</application>
+
index 223f38e062c0f92ccf81c60101a834ff34e643ba..bf69320138f3d3a6cdd476694f5acf23b488ec79 100644 (file)
    <objref name="file" id="file"/>
    <type name="int" kind="int"/>
    <sequence name="intvec" content="int"/>
+   <struct name="stringpair">
+      <member name="name" type="string"/>
+      <member name="value" type="string"/>
+   </struct>
+   <sequence name="propvec" content="stringpair"/>
    <objref name="pyobj" id="python:obj:1.0"/>
+   <sequence name="seqboolvec" content="boolvec"/>
+   <sequence name="seqdblevec" content="dblevec"/>
+   <sequence name="seqintvec" content="intvec"/>
    <sequence name="stringvec" content="string"/>
+   <sequence name="seqstringvec" content="stringvec"/>
    <container name="DefaultContainer">
       <property name="container_name" value="FactoryServer"/>
       <property name="name" value="localhost"/>
    </container>
-   <optimizer name="OptimizerLoop0" nbranch="4" lib="libTestOptLoop" entry="createOptimizerAlgASyncExample">
-      <inline name="PyFunction1">
-         <function name="myfunc">
-            <code><![CDATA[def myfunc(inValue):
-    outValue = int(3*inValue+5)
-    print "Received", inValue, ", returning", outValue
-    return outValue
-]]></code>
-         </function>
-         <inport name="inValue" type="double"/>
-         <outport name="outValue" type="int"/>
+   <optimizer name="OptimizerLoop1" nbranch="1" lib="libTestOptLoop" entry="createOptimizerAlgASyncExample">
+      <inline name="PyScript7">
+         <script><code><![CDATA[o9 = int(i8)
+print "traitement:", i8
+]]></code></script>
+         <load container="DefaultContainer"/>
+         <inport name="i8" type="double"/>
+         <outport name="o9" type="int"/>
       </inline>
    </optimizer>
+   <datanode name="DataIn3">
+      <parameter name="o4" type="int">
+         <value><int>5</int></value>
+      </parameter>
+   </datanode>
+   <outnode name="OutNode5">
+      <parameter name="i6" type="int"/>
+   </outnode>
+   <control> <fromnode>OptimizerLoop1</fromnode> <tonode>OutNode5</tonode> </control>
+   <control> <fromnode>DataIn3</fromnode> <tonode>OptimizerLoop1</tonode> </control>
+   <datalink control="false">
+      <fromnode>OptimizerLoop1</fromnode> <fromport>algoResults</fromport>
+      <tonode>OutNode5</tonode> <toport>i6</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>OptimizerLoop1</fromnode> <fromport>evalSamples</fromport>
+      <tonode>OptimizerLoop1.PyScript7</tonode> <toport>i8</toport>
+   </datalink>
    <datalink control="false">
-      <fromnode>OptimizerLoop0</fromnode> <fromport>evalSamples</fromport>
-      <tonode>OptimizerLoop0.PyFunction1</tonode> <toport>inValue</toport>
+      <fromnode>DataIn3</fromnode> <fromport>o4</fromport>
+      <tonode>OptimizerLoop1</tonode> <toport>algoInit</toport>
    </datalink>
    <datalink control="false">
-      <fromnode>OptimizerLoop0.PyFunction1</fromnode> <fromport>outValue</fromport>
-      <tonode>OptimizerLoop0</tonode> <toport>evalResults</toport>
+      <fromnode>OptimizerLoop1.PyScript7</fromnode> <fromport>o9</fromport>
+      <tonode>OptimizerLoop1</tonode> <toport>evalResults</toport>
    </datalink>
    <parameter>
-      <tonode>OptimizerLoop0</tonode><toport>nbBranches</toport>
-      <value><int>4</int></value>
+      <tonode>OptimizerLoop1</tonode><toport>nbBranches</toport>
+      <value><int>1</int></value>
    </parameter>
-   <presentation name="OptimizerLoop0" x="6" y="34" width="168" height="178.5" expanded="1" expx="6" expy="34" expWidth="168" expHeight="178.5" shownState="0"/>
-   <presentation name="OptimizerLoop0.PyFunction1" x="6" y="111.5" width="158" height="63" expanded="1" expx="6" expy="111.5" expWidth="158" expHeight="63" shownState="0"/>
-   <presentation name="__ROOT__" x="0" y="0" width="178" height="216.5" expanded="1" expx="0" expy="0" expWidth="178" expHeight="216.5" shownState="0"/>
+   <presentation name="DataIn3" x="11" y="86" width="158" height="63" expanded="1" expx="11" expy="86" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="OptimizerLoop1" x="238.5" y="83.5" width="204.5" height="216" expanded="1" expx="238.5" expy="83.5" expWidth="204.5" expHeight="216" shownState="0"/>
+   <presentation name="OutNode5" x="488.5" y="84" width="158" height="63" expanded="1" expx="488.5" expy="84" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="OptimizerLoop1.PyScript7" x="42.5" y="149" width="158" height="63" expanded="1" expx="42.5" expy="149" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="__ROOT__" x="0" y="0" width="650.5" height="303.5" expanded="1" expx="0" expy="0" expWidth="650.5" expHeight="303.5" shownState="0"/>
 </proc>
index de8ba04800febd96f42d31fdaf4d0811864551d1..115bb6824e8e9320a1bc3ed2ba0e46f00ca1b49e 100644 (file)
    <objref name="file" id="file"/>
    <type name="int" kind="int"/>
    <sequence name="intvec" content="int"/>
+   <struct name="stringpair">
+      <member name="name" type="string"/>
+      <member name="value" type="string"/>
+   </struct>
+   <sequence name="propvec" content="stringpair"/>
    <objref name="pyobj" id="python:obj:1.0"/>
+   <sequence name="seqboolvec" content="boolvec"/>
+   <sequence name="seqdblevec" content="dblevec"/>
+   <sequence name="seqintvec" content="intvec"/>
    <sequence name="stringvec" content="string"/>
+   <sequence name="seqstringvec" content="stringvec"/>
    <container name="DefaultContainer">
       <property name="container_name" value="FactoryServer"/>
       <property name="name" value="localhost"/>
    </container>
-   <optimizer name="OptimizerLoop0" nbranch="4" lib="algoasyncexample.py" entry="myalgoasync">
-      <inline name="PyFunction0">
-         <function name="myfunc">
-            <code><![CDATA[def myfunc(inputValue):
-    outputValue = int(inputValue*3+5)
-    print "Received", inputValue, ", returning", outputValue
-    return outputValue
-]]></code>
-         </function>
-         <inport name="inputValue" type="double"/>
-         <outport name="outputValue" type="int"/>
+   <optimizer name="OptimizerLoop1" nbranch="1" lib="algoasyncexample.py" entry="myalgoasync">
+      <inline name="PyScript7">
+         <script><code><![CDATA[o9 = int(i8)
+print "traitement:", i8
+]]></code></script>
+         <load container="DefaultContainer"/>
+         <inport name="i8" type="double"/>
+         <outport name="o9" type="int"/>
       </inline>
    </optimizer>
+   <datanode name="DataIn3">
+      <parameter name="o4" type="int">
+         <value><int>5</int></value>
+      </parameter>
+   </datanode>
+   <outnode name="OutNode5">
+      <parameter name="i6" type="int"/>
+   </outnode>
+   <control> <fromnode>OptimizerLoop1</fromnode> <tonode>OutNode5</tonode> </control>
+   <control> <fromnode>DataIn3</fromnode> <tonode>OptimizerLoop1</tonode> </control>
+   <datalink control="false">
+      <fromnode>OptimizerLoop1</fromnode> <fromport>algoResults</fromport>
+      <tonode>OutNode5</tonode> <toport>i6</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>OptimizerLoop1</fromnode> <fromport>evalSamples</fromport>
+      <tonode>OptimizerLoop1.PyScript7</tonode> <toport>i8</toport>
+   </datalink>
    <datalink control="false">
-      <fromnode>OptimizerLoop0</fromnode> <fromport>evalSamples</fromport>
-      <tonode>OptimizerLoop0.PyFunction0</tonode> <toport>inputValue</toport>
+      <fromnode>DataIn3</fromnode> <fromport>o4</fromport>
+      <tonode>OptimizerLoop1</tonode> <toport>algoInit</toport>
    </datalink>
    <datalink control="false">
-      <fromnode>OptimizerLoop0.PyFunction0</fromnode> <fromport>outputValue</fromport>
-      <tonode>OptimizerLoop0</tonode> <toport>evalResults</toport>
+      <fromnode>OptimizerLoop1.PyScript7</fromnode> <fromport>o9</fromport>
+      <tonode>OptimizerLoop1</tonode> <toport>evalResults</toport>
    </datalink>
    <parameter>
-      <tonode>OptimizerLoop0</tonode><toport>nbBranches</toport>
-      <value><int>4</int></value>
+      <tonode>OptimizerLoop1</tonode><toport>nbBranches</toport>
+      <value><int>1</int></value>
    </parameter>
-   <presentation name="OptimizerLoop0" x="6" y="34" width="167" height="191" expanded="1" expx="6" expy="34" expWidth="167" expHeight="191" shownState="0"/>
-   <presentation name="OptimizerLoop0.PyFunction0" x="5" y="124" width="158" height="63" expanded="1" expx="5" expy="124" expWidth="158" expHeight="63" shownState="0"/>
-   <presentation name="__ROOT__" x="0" y="0" width="177" height="229" expanded="1" expx="0" expy="0" expWidth="177" expHeight="229" shownState="0"/>
+   <presentation name="DataIn3" x="11" y="86" width="158" height="63" expanded="1" expx="11" expy="86" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="OptimizerLoop1" x="238.5" y="83.5" width="204.5" height="216" expanded="1" expx="238.5" expy="83.5" expWidth="204.5" expHeight="216" shownState="0"/>
+   <presentation name="OutNode5" x="488.5" y="84" width="158" height="63" expanded="1" expx="488.5" expy="84" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="OptimizerLoop1.PyScript7" x="42.5" y="149" width="158" height="63" expanded="1" expx="42.5" expy="149" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="__ROOT__" x="0" y="0" width="650.5" height="303.5" expanded="1" expx="0" expy="0" expWidth="650.5" expHeight="303.5" shownState="0"/>
 </proc>
index 97394e915ddf5ece77a27be8a501b07e960639e4..59b78017e189ea31efa6f4cb96d8f05a06468d6f 100644 (file)
    <objref name="file" id="file"/>
    <type name="int" kind="int"/>
    <sequence name="intvec" content="int"/>
+   <struct name="stringpair">
+      <member name="name" type="string"/>
+      <member name="value" type="string"/>
+   </struct>
+   <sequence name="propvec" content="stringpair"/>
    <objref name="pyobj" id="python:obj:1.0"/>
+   <sequence name="seqboolvec" content="boolvec"/>
+   <sequence name="seqdblevec" content="dblevec"/>
+   <sequence name="seqintvec" content="intvec"/>
    <sequence name="stringvec" content="string"/>
+   <sequence name="seqstringvec" content="stringvec"/>
    <container name="DefaultContainer">
       <property name="container_name" value="FactoryServer"/>
       <property name="name" value="localhost"/>
    </container>
-   <optimizer name="OptimizerLoop0" nbranch="4" lib="libTestOptLoop" entry="createOptimizerAlgSyncExample">
-      <inline name="PyFunction1">
-         <function name="myfunc">
-            <code><![CDATA[def myfunc(inValue):
-    outValue = int(3*inValue+5)
-    print "Received", inValue, ", returning", outValue
-    return outValue
-]]></code>
-         </function>
-         <inport name="inValue" type="double"/>
-         <outport name="outValue" type="int"/>
+   <optimizer name="OptimizerLoop1" nbranch="1" lib="libTestOptLoop" entry="createOptimizerAlgSyncExample">
+      <inline name="PyScript7">
+         <script><code><![CDATA[o9 = int(i8)
+print "traitement:", i8
+]]></code></script>
+         <load container="DefaultContainer"/>
+         <inport name="i8" type="double"/>
+         <outport name="o9" type="int"/>
       </inline>
    </optimizer>
+   <datanode name="DataIn3">
+      <parameter name="o4" type="int">
+         <value><int>5</int></value>
+      </parameter>
+   </datanode>
+   <outnode name="OutNode5">
+      <parameter name="i6" type="int"/>
+   </outnode>
+   <control> <fromnode>OptimizerLoop1</fromnode> <tonode>OutNode5</tonode> </control>
+   <control> <fromnode>DataIn3</fromnode> <tonode>OptimizerLoop1</tonode> </control>
+   <datalink control="false">
+      <fromnode>OptimizerLoop1</fromnode> <fromport>algoResults</fromport>
+      <tonode>OutNode5</tonode> <toport>i6</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>OptimizerLoop1</fromnode> <fromport>evalSamples</fromport>
+      <tonode>OptimizerLoop1.PyScript7</tonode> <toport>i8</toport>
+   </datalink>
    <datalink control="false">
-      <fromnode>OptimizerLoop0</fromnode> <fromport>evalSamples</fromport>
-      <tonode>OptimizerLoop0.PyFunction1</tonode> <toport>inValue</toport>
+      <fromnode>DataIn3</fromnode> <fromport>o4</fromport>
+      <tonode>OptimizerLoop1</tonode> <toport>algoInit</toport>
    </datalink>
    <datalink control="false">
-      <fromnode>OptimizerLoop0.PyFunction1</fromnode> <fromport>outValue</fromport>
-      <tonode>OptimizerLoop0</tonode> <toport>evalResults</toport>
+      <fromnode>OptimizerLoop1.PyScript7</fromnode> <fromport>o9</fromport>
+      <tonode>OptimizerLoop1</tonode> <toport>evalResults</toport>
    </datalink>
    <parameter>
-      <tonode>OptimizerLoop0</tonode><toport>nbBranches</toport>
-      <value><int>4</int></value>
+      <tonode>OptimizerLoop1</tonode><toport>nbBranches</toport>
+      <value><int>1</int></value>
    </parameter>
-   <presentation name="OptimizerLoop0" x="6" y="34" width="169" height="187.5" expanded="1" expx="6" expy="34" expWidth="169" expHeight="187.5" shownState="0"/>
-   <presentation name="OptimizerLoop0.PyFunction1" x="7" y="120.5" width="158" height="63" expanded="1" expx="7" expy="120.5" expWidth="158" expHeight="63" shownState="0"/>
-   <presentation name="__ROOT__" x="0" y="0" width="179" height="225.5" expanded="1" expx="0" expy="0" expWidth="179" expHeight="225.5" shownState="0"/>
+   <presentation name="DataIn3" x="11" y="86" width="158" height="63" expanded="1" expx="11" expy="86" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="OptimizerLoop1" x="238.5" y="83.5" width="204.5" height="216" expanded="1" expx="238.5" expy="83.5" expWidth="204.5" expHeight="216" shownState="0"/>
+   <presentation name="OutNode5" x="488.5" y="84" width="158" height="63" expanded="1" expx="488.5" expy="84" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="OptimizerLoop1.PyScript7" x="42.5" y="149" width="158" height="63" expanded="1" expx="42.5" expy="149" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="__ROOT__" x="0" y="0" width="650.5" height="303.5" expanded="1" expx="0" expy="0" expWidth="650.5" expHeight="303.5" shownState="0"/>
 </proc>
index 3da9d59c6589ec4708d120cdbe4588ce62a7f815..0b69be3fcbe0cf6d666e569c501f206b90ae438c 100644 (file)
    <objref name="file" id="file"/>
    <type name="int" kind="int"/>
    <sequence name="intvec" content="int"/>
+   <struct name="stringpair">
+      <member name="name" type="string"/>
+      <member name="value" type="string"/>
+   </struct>
+   <sequence name="propvec" content="stringpair"/>
    <objref name="pyobj" id="python:obj:1.0"/>
+   <sequence name="seqboolvec" content="boolvec"/>
+   <sequence name="seqdblevec" content="dblevec"/>
+   <sequence name="seqintvec" content="intvec"/>
    <sequence name="stringvec" content="string"/>
+   <sequence name="seqstringvec" content="stringvec"/>
    <container name="DefaultContainer">
       <property name="container_name" value="FactoryServer"/>
       <property name="name" value="localhost"/>
    </container>
-   <optimizer name="OptimizerLoop0" nbranch="4" lib="algosyncexample.py" entry="myalgosync">
-      <inline name="PyFunction0">
-         <function name="myfunc">
-            <code><![CDATA[def myfunc(inputValue):
-    outputValue = int(inputValue*3+5)
-    print "Received", inputValue, ", returning", outputValue
-    return outputValue
-]]></code>
-         </function>
-         <inport name="inputValue" type="double"/>
-         <outport name="outputValue" type="int"/>
+   <optimizer name="OptimizerLoop1" nbranch="1" lib="algosyncexample.py" entry="myalgosync">
+      <inline name="PyScript7">
+         <script><code><![CDATA[o9 = int(i8)
+print "traitement:", i8
+]]></code></script>
+         <load container="DefaultContainer"/>
+         <inport name="i8" type="double"/>
+         <outport name="o9" type="int"/>
       </inline>
    </optimizer>
+   <datanode name="DataIn3">
+      <parameter name="o4" type="int">
+         <value><int>5</int></value>
+      </parameter>
+   </datanode>
+   <outnode name="OutNode5">
+      <parameter name="i6" type="int"/>
+   </outnode>
+   <control> <fromnode>OptimizerLoop1</fromnode> <tonode>OutNode5</tonode> </control>
+   <control> <fromnode>DataIn3</fromnode> <tonode>OptimizerLoop1</tonode> </control>
+   <datalink control="false">
+      <fromnode>OptimizerLoop1</fromnode> <fromport>algoResults</fromport>
+      <tonode>OutNode5</tonode> <toport>i6</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>OptimizerLoop1</fromnode> <fromport>evalSamples</fromport>
+      <tonode>OptimizerLoop1.PyScript7</tonode> <toport>i8</toport>
+   </datalink>
    <datalink control="false">
-      <fromnode>OptimizerLoop0</fromnode> <fromport>evalSamples</fromport>
-      <tonode>OptimizerLoop0.PyFunction0</tonode> <toport>inputValue</toport>
+      <fromnode>DataIn3</fromnode> <fromport>o4</fromport>
+      <tonode>OptimizerLoop1</tonode> <toport>algoInit</toport>
    </datalink>
    <datalink control="false">
-      <fromnode>OptimizerLoop0.PyFunction0</fromnode> <fromport>outputValue</fromport>
-      <tonode>OptimizerLoop0</tonode> <toport>evalResults</toport>
+      <fromnode>OptimizerLoop1.PyScript7</fromnode> <fromport>o9</fromport>
+      <tonode>OptimizerLoop1</tonode> <toport>evalResults</toport>
    </datalink>
    <parameter>
-      <tonode>OptimizerLoop0</tonode><toport>nbBranches</toport>
-      <value><int>4</int></value>
+      <tonode>OptimizerLoop1</tonode><toport>nbBranches</toport>
+      <value><int>1</int></value>
    </parameter>
-   <presentation name="OptimizerLoop0.PyFunction0" x="6" y="111.5" width="158" height="63" expanded="1" expx="6" expy="111.5" expWidth="158" expHeight="63" shownState="0"/>
-   <presentation name="OptimizerLoop0" x="6" y="34" width="168" height="178.5" expanded="1" expx="6" expy="34" expWidth="168" expHeight="178.5" shownState="0"/>
-   <presentation name="__ROOT__" x="0" y="0" width="178" height="216.5" expanded="1" expx="0" expy="0" expWidth="178" expHeight="216.5" shownState="0"/>
+   <presentation name="DataIn3" x="11" y="86" width="158" height="63" expanded="1" expx="11" expy="86" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="OptimizerLoop1" x="238.5" y="83.5" width="204.5" height="216" expanded="1" expx="238.5" expy="83.5" expWidth="204.5" expHeight="216" shownState="0"/>
+   <presentation name="OutNode5" x="488.5" y="84" width="158" height="63" expanded="1" expx="488.5" expy="84" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="OptimizerLoop1.PyScript7" x="42.5" y="149" width="158" height="63" expanded="1" expx="42.5" expy="149" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="__ROOT__" x="0" y="0" width="650.5" height="303.5" expanded="1" expx="0" expy="0" expWidth="650.5" expHeight="303.5" shownState="0"/>
 </proc>
index c1d43df31ebba0882e1cf222383916b2d28fafcf..b7c39fd5e7556d2a8f048d689334683a1056b399 100644 (file)
@@ -25,5 +25,8 @@ IF(NOT WIN32)
   CONFIGURE_FILE(YacsLoaderTest.sh.in YacsLoaderTest.sh)
   CONFIGURE_FILE(YacsLoaderInSessionTest.sh.in YacsLoaderInSessionTest.sh)
   ADD_TEST(NAME YacsLoaderTest_swig COMMAND ${SHELL} ${CMAKE_CURRENT_BINARY_DIR}/YacsLoaderTest.sh)
+
+  SALOME_GENERATE_TESTS_ENVIRONMENT(tests_env)
   ADD_TEST(NAME StdAloneYacsLoaderTest1 COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/StdAloneYacsLoaderTest1.py)
+  SET_TESTS_PROPERTIES(StdAloneYacsLoaderTest1 PROPERTIES ENVIRONMENT "${tests_env}")
 ENDIF() 
diff --git a/tmp_dump.py b/tmp_dump.py
new file mode 100644 (file)
index 0000000..eb42c95
--- /dev/null
@@ -0,0 +1,19 @@
+
+# Dump generated by HEXABLOCK at 2014/08/12 17:27:07
+
+import hexablock
+
+doc = hexablock.addDocument ('default')
+rep001 = doc.countVertex ()
+rep002 = doc.countVertex ()
+rep003 = doc.countEdge ()
+rep004 = doc.countQuad ()
+rep005 = doc.countHexa ()
+rep006 = doc.countVertex ()
+law0 = doc.getLaw (0)
+rep007 = doc.countVertex ()
+rep008 = doc.countVertex ()
+rep009 = doc.countEdge ()
+rep010 = doc.countQuad ()
+rep011 = doc.countHexa ()
+rep012 = doc.countVertex ()