Salome HOME
PMML feature
authorlhelgoualch <lhelgoualch>
Thu, 19 Dec 2013 15:45:14 +0000 (15:45 +0000)
committerlhelgoualch <lhelgoualch>
Thu, 19 Dec 2013 15:45:14 +0000 (15:45 +0000)
56 files changed:
src/pmml/CMakeLists.txt [new file with mode: 0755]
src/pmml/PMMLlib.cxx [new file with mode: 0755]
src/pmml/PMMLlib.hxx [new file with mode: 0755]
src/pmml/PMMLwin.hxx [new file with mode: 0755]
src/pmml/Test/BasicMainTest.hxx [new file with mode: 0755]
src/pmml/Test/CMakeLists.txt [new file with mode: 0755]
src/pmml/Test/PMMLBasicsTest.hxx [new file with mode: 0755]
src/pmml/Test/PMMLBasicsTest1.cxx [new file with mode: 0755]
src/pmml/Test/PMMLBasicsTest1.hxx [new file with mode: 0755]
src/pmml/Test/TestPMML.cxx [new file with mode: 0755]
src/pmml/Test/tools.cxx [new file with mode: 0755]
src/pmml/Test/tools.hxx [new file with mode: 0755]
src/pmml/doc/CMakeLists.txt [new file with mode: 0755]
src/pmml/doc/doxygen/CMakeLists.txt [new file with mode: 0755]
src/pmml/doc/doxygen/Doxyfile_pmml_user.in [new file with mode: 0755]
src/pmml/doc/doxygen/doxfiles/cppexamples.dox [new file with mode: 0755]
src/pmml/doc/doxygen/doxfiles/install.dox [new file with mode: 0755]
src/pmml/doc/doxygen/doxfiles/intro.dox [new file with mode: 0755]
src/pmml/doc/doxygen/doxfiles/pmml.dox [new file with mode: 0755]
src/pmml/doc/doxygen/doxfiles/pyexamples.dox [new file with mode: 0755]
src/pmml/doc/doxygen/images/head.png [new file with mode: 0755]
src/pmml/doc/doxygen/static/footer.html [new file with mode: 0755]
src/pmml/doc/doxygen/static/header.html.in [new file with mode: 0755]
src/pmml/doc/doxygen/static/salome_extra.css [new file with mode: 0755]
src/pmml/pmml_swig/CMakeLists.txt [new file with mode: 0755]
src/pmml/pmml_swig/PMML.i [new file with mode: 0755]
src/pmml/pmml_swig/PMMLBasicsTest.py [new file with mode: 0755]
src/pmml/pmml_swig/PMMLsalome.i [new file with mode: 0755]
src/pmml/resources/CMakeLists.txt [new file with mode: 0755]
src/pmml/resources/ann_model.pmml [new file with mode: 0755]
src/pmml/resources/ann_model_2.pmml [new file with mode: 0755]
src/pmml/resources/lr_model.pmml [new file with mode: 0755]
src/pmml/resources/lr_model_2.pmml [new file with mode: 0755]
src/pmml/resources/no_model.pmml [new file with mode: 0755]
src/pmml/resources/two_models_ann_lr.pmml [new file with mode: 0755]
src/pmml/resources/unittest_ref_ann_model.cpp [new file with mode: 0755]
src/pmml/resources/unittest_ref_ann_model.f [new file with mode: 0755]
src/pmml/resources/unittest_ref_ann_model.py [new file with mode: 0755]
src/pmml/resources/unittest_ref_lr_model.cpp [new file with mode: 0755]
src/pmml/resources/unittest_ref_lr_model.f [new file with mode: 0755]
src/pmml/resources/unittest_ref_lr_model.py [new file with mode: 0755]
src/pmml/resources/win32_ann_model.pmml [new file with mode: 0755]
src/pmml/resources/win32_lr_model.pmml [new file with mode: 0755]
src/yacsloader/pmml/BasicMainTest.hxx [new file with mode: 0755]
src/yacsloader/pmml/CMakeLists.txt [new file with mode: 0755]
src/yacsloader/pmml/TestYACSPMML.cxx [new file with mode: 0755]
src/yacsloader/pmml/YACSPMMLBasicsTest.hxx [new file with mode: 0755]
src/yacsloader/pmml/YACSPMMLBasicsTest1.cxx [new file with mode: 0755]
src/yacsloader/pmml/YACSPMMLBasicsTest1.hxx [new file with mode: 0755]
src/yacsloader/samples/pmml_tann_exportFunctionPMML.pmml [new file with mode: 0644]
src/yacsloader/samples/pmml_tann_tlr_exportFunctionPMML.pmml [new file with mode: 0644]
src/yacsloader/samples/pmml_tlr_exportFunctionPMML.pmml [new file with mode: 0644]
src/yacsloader/samples/schemaANN2.xml [new file with mode: 0644]
src/yacsloader/samples/schemaANNLR2.xml [new file with mode: 0644]
src/yacsloader/samples/schemaLR2.xml [new file with mode: 0644]
src/yacsloader/samples/schemaPmmlDoesNotExist.xml [new file with mode: 0644]

diff --git a/src/pmml/CMakeLists.txt b/src/pmml/CMakeLists.txt
new file mode 100755 (executable)
index 0000000..b94cb26
--- /dev/null
@@ -0,0 +1,146 @@
+# Copyright (C) 2012-2013  CEA/DEN, EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+#
+# TODO  : URANIE AND WIN32 : to compile on Windows, user uranietm 
+#         
+#         To adapt when YACS will be available on Windows
+# 
+# cmake options are :
+# 
+# cmake  
+# -DURANIE=ON
+# -DSALOME_BUILD_TESTS=ON
+# -DSALOME_YACS_USE_SWIG=OFF 
+# -DCMAKE_VERBOSE_MAKEFILE=ON 
+# -DSALOME_CMAKE_DEBUG=ON 
+# -DSALOME_BUILD_DOC:BOOL=FALSE  
+# -G"NMake Makefiles JOM" 
+# -DCMAKE_INSTALL_PREFIX=<chemin install>
+# <chemin sources>
+#   
+
+IF(URANIE AND WIN32) 
+  CMAKE_MINIMUM_REQUIRED(VERSION 2.8.8 FATAL_ERROR)    
+  SET(SALOME_INSTALL_CMAKE_LOCAL adm/cmake CACHE PATH 
+    "Install path: local SALOME CMake files") 
+  ENABLE_TESTING()
+ENDIF(URANIE AND WIN32) 
+
+IF(SALOME_BUILD_DOC)
+  FIND_PACKAGE(SalomeDoxygen)
+  FIND_PACKAGE(SalomeGraphviz)
+  FIND_PACKAGE(SalomeSphinx)
+  SALOME_LOG_OPTIONAL_PACKAGE(Doxygen SALOME_BUILD_DOC)
+  SALOME_LOG_OPTIONAL_PACKAGE(Graphviz SALOME_BUILD_DOC)
+  SALOME_LOG_OPTIONAL_PACKAGE(Sphinx SALOME_BUILD_DOC)
+ENDIF(SALOME_BUILD_DOC)
+
+IF(SALOME_YACS_USE_SWIG)
+       FIND_PACKAGE(SalomePython)
+       FIND_PACKAGE(SalomeSWIG)
+       SALOME_LOG_OPTIONAL_PACKAGE(Python SALOME_YACS_USE_SWIG)
+       SALOME_LOG_OPTIONAL_PACKAGE(SWIG   SALOME_YACS_USE_SWIG)
+ENDIF(SALOME_YACS_USE_SWIG)
+
+# Directories
+#
+# Directories have to be given after prerequisites (to be able to use
+# Python version string for example).adm_local
+# ===========
+SET(SALOME_INSTALL_BINS bin/salome CACHE PATH "Install path: SALOME binaries")
+SET(SALOME_INSTALL_LIBS lib/salome CACHE PATH "Install path: SALOME libs")
+SET(SALOME_INSTALL_HEADERS include/salome CACHE PATH "Install path: SALOME headers")
+
+IF(SALOME_YACS_USE_SWIG)
+  SET(_pydir lib/python${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}/site-packages)
+  SET(SALOME_INSTALL_PYTHON ${_pydir}/salome CACHE PATH "Install path: SALOME Python stuff")
+  SET(SALOME_INSTALL_PYTHON_SHARED ${SALOME_INSTALL_PYTHON}/shared_modules CACHE PATH 
+    "Install path: SALOME Python shared modules")
+ENDIF(SALOME_YACS_USE_SWIG)
+
+SET(SALOME_INSTALL_RES share/salome/resources CACHE PATH "Install path: SALOME resources")
+SET(SALOME_PMML_INSTALL_RES_DATA "${SALOME_INSTALL_RES}/pmml" CACHE PATH "Install path: SALOME PMML specific data")
+
+# Sources 
+# ========
+IF(WIN32)
+  ADD_DEFINITIONS("-D_USE_MATH_DEFINES")
+ENDIF(WIN32)
+
+IF(URANIE AND WIN32) 
+    FIND_PACKAGE(LibXml2 REQUIRED) 
+    LINK_DIRECTORIES( ${LIBXML2_LIBRARIES} )
+    INCLUDE_DIRECTORIES( ${LIBXML2_INCLUDE_DIR} )
+ELSE(URANIE AND WIN32) 
+    FIND_PACKAGE(SalomeLibXml2 REQUIRED) 
+ENDIF(URANIE AND WIN32)    
+
+INCLUDE_DIRECTORIES(
+  ${CMAKE_CURRENT_BINARY_DIR}/..
+  ${LIBXML2_INCLUDE_DIR}
+  )
+
+SET(pmml_SOURCES
+  PMMLlib.cxx
+  )
+
+ADD_SUBDIRECTORY(resources)
+
+ADD_LIBRARY(pmmlLib SHARED ${pmml_SOURCES})
+TARGET_LINK_LIBRARIES(pmmlLib  ${LIBXML2_LIBRARIES} ) 
+INSTALL(TARGETS pmmlLib EXPORT ${PROJECT_NAME}TargetGroup DESTINATION ${SALOME_INSTALL_LIBS})
+
+FILE(GLOB pmml_HEADERS_HXX "${CMAKE_CURRENT_SOURCE_DIR}/*.hxx")
+FILE(GLOB pmml_HEADERS_TXX "${CMAKE_CURRENT_SOURCE_DIR}/*.txx")
+INSTALL(FILES ${pmml_HEADERS_HXX} ${pmml_HEADERS_TXX} DESTINATION ${SALOME_INSTALL_HEADERS})
+
+# To allow usage as SWIG dependencies:
+IF (NOT URANIE)
+    SET(pmml_HEADERS_HXX PARENT_SCOPE)
+    SET(pmml_HEADERS_TXX PARENT_SCOPE)
+ENDIF (NOT URANIE)
+
+
+
+IF(SALOME_BUILD_TESTS)
+    ADD_SUBDIRECTORY(Test)
+ENDIF(SALOME_BUILD_TESTS)
+
+
+IF(SALOME_YACS_USE_SWIG)
+    ADD_SUBDIRECTORY(pmml_swig)
+ENDIF(SALOME_YACS_USE_SWIG)
+
+
+IF(SALOME_BUILD_DOC)
+    ADD_SUBDIRECTORY(doc)
+ENDIF(SALOME_BUILD_DOC)
+
+# Configuration export
+# ====================
+
+#   - in the install tree:
+#       Get the relative path of the include directory so 
+#       we can register it in the generated configuration files:
+SET(CONF_INCLUDE_DIRS "${CMAKE_INSTALL_PREFIX}/${INSTALL_INCLUDE_DIR}")
+
+# Install the export set for use with the install-tree
+INSTALL(EXPORT ${PROJECT_NAME}TargetGroup DESTINATION "${SALOME_INSTALL_CMAKE_LOCAL}" 
+        FILE ${PROJECT_NAME}Targets.cmake)
diff --git a/src/pmml/PMMLlib.cxx b/src/pmml/PMMLlib.cxx
new file mode 100755 (executable)
index 0000000..591800d
--- /dev/null
@@ -0,0 +1,2916 @@
+//////////////////////////////////////////////////////////////
+// Copyright (C) 2013 CEA/DEN
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published
+// by the Free Software Foundation, either version 3 of the License, or any
+// later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public License
+// along with this program. If not, see <http://www.gnu.org/licenses/>.
+//////////////////////////////////////////////////////////////
+/*!
+  \file   PMMLlib.cxx
+  \author Incka
+  \date   Wed Nov 20 11:04:17 2013
+
+  \brief  Implémentation de la classe PMMLlib
+
+ */
+
+// includes Salomé
+#include "PMMLlib.hxx"
+
+// includes C
+#include <stdlib.h>
+
+// includes C++
+#include <cstdlib>
+#include <iostream>
+#include <fstream>
+#include <sstream>
+
+using namespace std;
+
+namespace PMMLlib
+{
+  
+//**************************************************************
+//                                                             *
+//                                                             *
+//                                                             *
+//  méthodes communes à tous les types de modèles              *
+//                                                             *
+//                                                             *
+//                                                             *
+//**************************************************************
+  
+/**
+ * Constructor to read a PMML file.
+ * @param file Name of the PMML file to read
+ * @param log Flag to print logs or not
+ */
+PMMLlib::PMMLlib(std::string file,bool log) : 
+            _log(log),
+            _pmmlFile(file),
+            _doc(NULL),
+            _rootNode(NULL),
+            _currentNode(NULL),
+            _nbModels(0),
+            _currentModelName(""),
+            _currentModelType(kUNDEFINED)   
+{
+    try
+    {
+        xmlKeepBlanksDefault(0);
+        xmlInitParser();
+        _doc = xmlParseFile(_pmmlFile.c_str());         
+        if ( _doc != NULL )
+        {
+            _rootNode = xmlDocGetRootElement(_doc);
+            CountModels(); 
+        }
+        else 
+            throw string("Unable to read PMML file.");
+    }
+    catch ( std::string msg )
+    {
+        std::cerr << msg;
+        xmlFreeDoc(_doc);
+        xmlCleanupParser();  
+        throw;
+    }
+}
+
+/**
+ * Constructor to create a PMML file.
+ * @brief This constructor is mandatory for Swig because it can be used with no parameters.
+ * @param log  Flag to print logs or not
+ */
+PMMLlib::PMMLlib(bool log):
+            _log(log),
+            _pmmlFile(""),
+            _doc(NULL),
+            _rootNode(NULL),
+            _currentNode(NULL),
+            _nbModels(0),
+            _currentModelName(""),
+            _currentModelType(kUNDEFINED)           
+{
+    SetRootNode();
+}
+
+/**
+ * Destructor of the class.
+ */
+PMMLlib::~PMMLlib()
+{
+    if (_doc)
+        xmlFreeDoc(_doc);
+    xmlCleanupParser();
+    if ( _log )
+        cout << "~PMMLlib" << endl;
+}
+
+/**
+ * Set the current model and its type.
+ * @param modelName Name of the model to load (ie content of 'modelName' attribute)
+ * @param type Type of PMML to read: one of kANN or kLR
+ */
+void PMMLlib::SetCurrentModel(std::string modelName, 
+                              PMMLType type)
+{
+    _currentModelName = modelName;
+    _currentModelType = type;
+    switch(type)
+    {
+        case kANN:
+            _currentModelNode = GetNeuralNetPtr(modelName);
+            break;
+        case kLR:
+            _currentModelNode = GetRegressionPtr(modelName);
+            break;
+        default:
+            throw string("Unknown PMML type.");
+            break;
+    } 
+    if ( _currentModelNode == NULL )
+        throw string("Model not found.");
+}
+
+/**
+ * Set the current model and its type.
+ * @brief Throw an exception if there is no model or more than one model with name "modelName" in the PMML file
+ * @param modelName Name of the model to load (ie content of 'modelName' attribute)
+ */
+void PMMLlib::SetCurrentModel(std::string modelName)
+{
+    if (_rootNode == NULL)
+        throw string("No PMML file set.");
+    xmlNodePtr node = NULL;
+    int nC = 0;
+    node = _rootNode->children;
+    while (node)
+    {         
+        string nodeModelName = _getProp(node, string("modelName"));
+        if ( nodeModelName == modelName )
+        {
+            nC++;
+            _currentModelNode = node;
+            _currentModelName = modelName;
+            _currentModelType = GetCurrentModelType();
+        }
+        node = node->next;
+    }
+    if ( nC != 1 ) 
+    {
+        std::ostringstream oss;
+        oss << nC;
+        string msg = "SetCurrentModel(modelName) : found " + oss.str() + " model(s) in PMML file.\n";
+        msg += "Use SetCurrentModel(modelName,type).";
+        throw msg;
+    }   
+}
+
+/**
+ * Set the current model and its type.
+ * @brief Throw an exception if no model is found or if there are more than one model in the PMLL file
+ */
+void PMMLlib::SetCurrentModel()
+{
+    int nC = _nbModels;
+    if ( nC != 1 ) 
+    {
+        std::ostringstream oss;
+        oss << nC;
+        string msg = "SetCurrentModel() : found " + oss.str() + " model(s) in PMML file.\n";
+        msg += "Use SetCurrentModel(modelName) or SetCurrentModel(modelName,type).";
+        throw msg;
+    }   
+    _currentModelNode = GetChildByName(_rootNode,"NeuralNetwork"); 
+    _currentModelType = kANN;  
+    if (_currentModelNode == NULL)
+    {
+        _currentModelNode = GetChildByName(_rootNode,"RegressionModel"); 
+        _currentModelType = kLR;
+    }
+    if (_currentModelNode == NULL)
+    {
+        string msg("Couldn't get node in SetCurrentModel().");
+        throw msg;      
+    }
+    _currentModelName = _getProp(_currentModelNode, string("modelName"));
+}
+
+/**
+ * Make the string used by PMMLlib::printLog.
+ * @return The log
+ */
+std::string PMMLlib::makeLog() const
+{
+    ostringstream out;
+    out << "**\n**** Display of PMMLlib ****" << endl;
+    out << " **  _pmmlFile[" << _pmmlFile << "]" << endl;
+    out << " **  _log[" << (_log?1:0) << "]" << endl;
+    out << "**\n**** End of display of PMMLlib ****" << endl;
+    return out.str(); 
+}
+
+/**
+ * Print some information about the current PMML object.
+ */
+void PMMLlib::printLog() const
+{
+    string log = makeLog();
+    cout << log << endl;
+}
+
+/**
+ * Set the root node in the tree:
+ * @code
+ * <PMML version="4.1" xmlns="http://www.dmg.org/PMML-4_1">
+ * @endcode
+ */
+void PMMLlib::SetRootNode()
+{ 
+    xmlChar * xs = _stringToXmlChar("1.0");
+    _doc = xmlNewDoc(xs);
+    xmlFree(xs);
+    
+    xmlChar *xp = _stringToXmlChar("PMML");
+    _rootNode = xmlNewNode(0, xp);
+    xmlFree(xp);
+    
+    xmlNewProp(_rootNode, (const xmlChar*)"xmlns", (const xmlChar*)"http://www.dmg.org/PMML-4_1");
+    xmlNewProp(_rootNode, (const xmlChar*)"version", (const xmlChar*)"4.1");
+    
+    xmlDocSetRootElement(_doc, _rootNode);
+}
+
+
+/**
+ * Set the header node in the tree.
+ * @param copyright Copyright of the PMML file
+ * @param description Description of the model
+ * @param appName Name of the application that produced the file
+ * @param appVersion Version of the application that produced the file
+ * @param annotation Some annotation
+ */
+void PMMLlib::SetHeader(std::string copyright, 
+                        std::string description, 
+                        std::string appName, 
+                        std::string appVersion, 
+                        std::string annotation)
+{
+    xmlNodePtr headerNode = xmlNewChild(_rootNode, 0, (const xmlChar*)"Header", 0);
+    xmlNewProp(headerNode, (const xmlChar*)"copyright", (const xmlChar*)(copyright.c_str()));
+    xmlNewProp(headerNode, (const xmlChar*)"description", (const xmlChar*)(description.c_str()));
+
+    xmlNodePtr appNode = xmlNewChild(headerNode, 0, (const xmlChar*)"Application", 0);
+    xmlNewProp(appNode, (const xmlChar*)"name", (const xmlChar*)(appName.c_str()));
+    xmlNewProp(appNode, (const xmlChar*)"version", (const xmlChar*)(appVersion.c_str()));
+
+    xmlNewChild(headerNode, 0, (const xmlChar*)"Annotation", (const xmlChar*)(annotation.c_str()));
+}
+
+/**
+ * Add the MiningSchema node. 
+ * @brief Common to all models.
+ * @param name Value of property "name".
+ * @param usageType Value of property "usageType".
+ */
+void PMMLlib::AddMiningSchema(std::string name, 
+                              std::string usageType)
+{
+    xmlNodePtr netNode = _currentModelNode; 
+
+    // if 'MiningSchema' node does not exist, create it
+    xmlNodePtr miningSchemaNode = GetChildByName(netNode, "MiningSchema");
+    if(!miningSchemaNode)
+    {
+        miningSchemaNode = xmlNewChild(netNode, 0, (const xmlChar*)"MiningSchema", 0);
+    }
+
+    // then append the node
+    xmlNodePtr miningFieldNode = xmlNewChild(miningSchemaNode, 0, (const xmlChar*)"MiningField", 0);
+    xmlNewProp(miningFieldNode, (const xmlChar*)"name", (const xmlChar*)(name.c_str()) );
+    xmlNewProp(miningFieldNode, (const xmlChar*)"usageType", (const xmlChar*)(usageType.c_str()) );
+}
+
+/**
+ * Get the child of a node from the name of this node
+ * @param node Start node for the research
+ * @param nodeName Name of the node to find
+ * @return Pointer to the node found
+ */
+xmlNodePtr PMMLlib::GetChildByName(xmlNodePtr node, 
+                                   std::string nodeName)
+{
+    if ( node == NULL )
+        return node;
+    
+    xmlNodePtr childNode = node->children;
+    if ( childNode == NULL )   
+        return childNode;
+    
+    const xmlChar* name = childNode->name;
+    string strName("");  
+    if ( name != NULL )
+        strName =  _xmlCharToString(name);        
+    
+    while( (childNode != NULL) && (strName != nodeName) )
+    {
+      childNode = childNode->next;
+      if ( childNode == NULL )
+          return childNode;
+      name = childNode->name; 
+      if ( name != NULL )
+          strName =  _xmlCharToString(name);        
+    }
+    return childNode;
+}
+
+/**
+ * Count the tags of all types of models (NeuralNetwork and RegressionModel).
+ * @return Number of models
+ */
+void PMMLlib::CountModels()
+{
+    int nCount = 0;
+    nCount = CountNeuralNetModels() + CountRegressionModels();
+    if ( _log)
+        cout << " ** End Of Count Models nCount[" << nCount << "]" << endl;
+    _nbModels = nCount ; 
+}
+
+/**
+ * Count NeuralNetwork models tags in the PMML file.
+ * @return Number of models
+ */
+int PMMLlib::CountNeuralNetModels()
+{
+    int nCount = 0;
+    xmlNodePtr ptr = GetChildByName(_rootNode,"NeuralNetwork");  
+    // Count the models
+    while (ptr != NULL && _xmlCharToString(ptr->name) == "NeuralNetwork")
+    {
+        nCount++;
+        if (_log)
+            cout << " ** nCount[" << nCount << "]" << endl;
+        ptr = ptr->next;
+    }   
+    if ( _log)
+        cout << " ** End Of CountNetworks nCount[" << nCount << "]" << endl;    
+    return nCount;
+}
+
+/**
+ * Count RegressionModel models tags in the PMML file.
+ * @return Number of models
+ */
+int PMMLlib::CountRegressionModels()
+{
+    int nCount = 0;
+    xmlNodePtr ptr = GetChildByName(_rootNode,"RegressionModel");  
+    // Count the models
+    while (ptr != NULL && _xmlCharToString(ptr->name) == "RegressionModel")
+    {
+        nCount++;
+        if (_log)
+            cout << " ** nCount[" << nCount << "]" << endl;
+        ptr = ptr->next;
+    }   
+    if ( _log)
+        cout << " ** End Of CountRegressions nCount[" << nCount << "]" << endl;    
+    return nCount;
+}
+
+/**
+ * Get the number of models
+ * @return Number of models
+ */
+int PMMLlib::GetModelsNb()
+{
+    return _nbModels;
+}
+
+/**
+ * Get the name of the XML node of a given model
+ * @param node Model node
+ * @return value of attribute "modelName" of the model node
+ */
+std::string PMMLlib::GetModelName(xmlNodePtr node)
+{
+    string name("");
+    name = _getProp(node, string("modelName") );
+    return name;
+}
+
+/**
+ * Get a pointer to the index-th node named name
+ * @param index Index of the node to search
+ * @param name Name of the node
+ * @return Pointer to the node found
+ */
+xmlNodePtr PMMLlib::GetPtr(int index, 
+                           std::string name)
+{
+    xmlNodePtr node = NULL;
+
+    if (_doc != NULL)
+    {
+        _rootNode = xmlDocGetRootElement(_doc);
+        node = GetChildByName(_rootNode, name);
+
+        int i=0;
+        while ((i != index) && (node != NULL))
+        {
+            node = node->next;
+            i++;
+        }
+    }
+    return node;
+}
+
+/**
+ * Get a pointer to the node named name whose 'modelName' attribute is ann_name
+ * @param modelName Model name of the node to search
+ * @param nodeName Name of the node
+ * @return Pointer to the node found
+ */
+xmlNodePtr PMMLlib::GetPtr(std::string myModelName, 
+                           std::string nodeName)
+{
+    xmlNodePtr node = NULL;
+    if (_doc != NULL)
+    {
+        node = GetChildByName(_rootNode, nodeName);
+        if( node )
+        {
+            string modelName = _getProp(node, string("modelName"));
+
+            while ( (node != NULL) && modelName != myModelName )
+            {
+                node = node->next;
+                if( node )
+                {
+                    modelName = _getProp(node, string("modelName"));
+                }
+            }
+        }
+    }
+    return node;
+}
+
+/**
+ * Get the tag of the current model.
+ * @return Current model tag
+ */
+std::string PMMLlib::GetTypeString()
+{
+    string name = "";
+    switch(_currentModelType)
+    {
+    case kANN:
+        name = "NeuralNetwork";
+        break;
+    case kLR:
+        name = "RegressionModel";
+        break;
+    default:
+        throw string("Unknown PMML type.");
+        break;
+    }
+    return name;
+}
+
+/**
+ * Get the current model type.
+ * @brief type is kUNDEFINED if no model is set or if model type is not handled
+ * @return the type
+ */
+PMMLType PMMLlib::GetCurrentModelType()
+{
+    PMMLType type = kUNDEFINED ; 
+    if ( ! _currentModelNode )
+        return type;
+    string name = _xmlCharToString(_currentModelNode->name);
+    if ( name == "NeuralNetwork" )
+        type = kANN;
+    else if ( name == "RegressionModel" )
+        type = kLR;
+    return type;
+}
+
+/**
+ * Get the current model name.
+ * @brief name is "" if no model is set 
+ * @return the type
+ */
+std::string PMMLlib::GetCurrentModelName()
+{
+    if ( ! _currentModelNode )
+        return string("");
+    string name = _getProp(_currentModelNode, string("modelName"));
+    return name;  
+}
+
+/**
+ * Unlink the current model node.
+ */
+void PMMLlib::UnlinkNode()
+{
+    xmlNodePtr ptr = _currentModelNode ;
+    xmlUnlinkNode( ptr );
+    xmlFreeNode( ptr );
+}
+
+/**
+ * Make a backup of the current model node.
+ */
+void PMMLlib::BackupNode()
+{
+    // Node name depending of PMML type
+    string name = GetTypeString();
+    // Find the last save index number
+    int nCrtIndex = 0;
+    stringstream ss;
+    ss << _currentModelName << "_" << nCrtIndex;
+    xmlNodePtr ptr = GetPtr(ss.str(), name);
+    while( ptr )
+    {
+        nCrtIndex++;
+        if (_log)
+            cout << " ** nCrtIndex[" << nCrtIndex << "]" << endl;
+
+        ss.str("");
+        ss << _currentModelName << "_" << nCrtIndex;
+        ptr = GetPtr(ss.str(), name);
+    }
+    if(_log)
+        cout << " *** Node \"" << _currentModelName << "\" found, then backup it with index [" << nCrtIndex << "]" << endl;
+    // Rename model
+    xmlUnsetProp(_currentModelNode, (const xmlChar*)"modelName");
+    xmlNewProp(_currentModelNode, (const xmlChar*)"modelName", (const xmlChar*)(ss.str().c_str()));
+}
+
+/**
+ * Save the XML tree in the PMML file
+ */
+void PMMLlib::Write()
+{
+    // Enregistrement de l'arbre DOM dans le fichier pmml
+    Write(_pmmlFile);
+    // Mise à jour du nombre de modèles
+    CountModels();
+}
+
+/**
+ * Save the XML tree in a given file
+ * @param Name of the file  
+ */
+void PMMLlib::Write(std::string file)
+{
+    // Enregistrement de l'arbre DOM sous forme de fichier pmml
+    int ret = xmlSaveFormatFile( file.c_str(), _doc, 1);
+    if ( ret == -1 )
+    {
+        std::string msg("  *** Error :: unable to write the PMML file \"" + file + "\"") ; 
+        cout << msg << endl;
+        throw msg;
+    } 
+    if ( _log )
+        cout << "  *** Write the PMML file \"" << file <<"\"" << endl;
+}
+
+/**
+ * Export the current model as a function in a Cpp file.
+ * @param file Name of the file  
+ * @param functionName Name of the function  
+ * @param header Header of the function  
+ */
+void PMMLlib::ExportCpp(std::string file, 
+                        std::string functionName, 
+                        std::string header)
+{
+    if ( _currentModelType == kANN )
+        ExportNeuralNetworkCpp(file,functionName, header);
+    else if ( _currentModelType == kLR )
+    {
+        ExportLinearRegressionCpp(file, functionName, header);
+    }
+    else 
+        throw string("ExportCpp : PMML type not handled.");
+}
+
+/**
+ * Export the current model as a function in a Fortran file.
+ * @param file Name of the file  
+ * @param functionName Name of the function  
+ * @param header Header of the function  
+ */
+void PMMLlib::ExportFortran(std::string file, 
+                            std::string functionName, 
+                            std::string header)
+{
+    if ( _currentModelType == kANN )
+        ExportNeuralNetworkFortran(file,functionName, header);
+    else if ( _currentModelType == kLR )
+        ExportLinearRegressionFortran(file,functionName, header);
+    else 
+        throw string("ExportFortran : PMML type not handled.");  
+}
+
+/**
+ * Export the current model as a function in a Python file.
+ * @param file Name of the file  
+ * @param functionName Name of the function  
+ * @param header Header of the function  
+ */
+void PMMLlib::ExportPython(std::string file, 
+                           std::string functionName, 
+                           std::string header)
+{
+    if ( _currentModelType == kANN )
+        ExportNeuralNetworkPython(file,functionName, header);
+    else if ( _currentModelType == kLR )
+        ExportLinearRegressionPython(file,functionName, header);
+    else 
+        throw string("ExportPython : PMML type not handled.");    
+}
+
+/**
+ * Export the current model as a function in a Python string.
+ * @param file Name of the file  
+ * @param functionName Name of the function  
+ * @param header Header of the function  
+ * @return Function as a string
+ */
+std::string PMMLlib::ExportPyStr(std::string functionName, 
+                                 std::string header)
+{
+    if ( _currentModelType == kANN )
+        return ExportNeuralNetworkPyStr(functionName, header);
+    else if ( _currentModelType == kLR )
+        return ExportLinearRegressionPyStr(functionName, header);
+    else 
+        throw string("ExportPyStr : PMML type not handled.");    
+}
+
+/*!
+ * Conversion from a libxml2 string (xmlChar *) to a standard C++ string.
+ *    \param xs a constant libxml string.
+ *    \return a C++ std::string (contains the same text as xs).
+ */
+std::string PMMLlib::_xmlCharToString(const xmlChar *xs) const
+{
+    size_t i, L = xmlStrlen(xs);
+    std::string s;
+    s.resize(L);
+    for (i=0; *xs; s[i++] = *xs++);
+    return s;
+}
+
+/*!
+ * Conversion from a a standard C++ string to a libxml2 string (xmlChar *).
+ *    \param s Constant C++ std::string (contains the same text as xs)
+ *    \return Constant libxml string.
+ */
+xmlChar * PMMLlib::_stringToXmlChar(const std::string &s) const
+{
+    return xmlCharStrdup(s.c_str());
+}
+
+/*!
+ * Get the value of a node property.
+ *    \param node Tag
+ *    \param prop Property
+ *    \return Constant libxml string.
+ */
+std::string PMMLlib::_getProp(const xmlNodePtr node, 
+                              std::string const & prop ) const
+{
+    std::string name("");
+    if (_doc != NULL)
+    {
+        xmlChar *xp = _stringToXmlChar(prop);
+        xmlChar * attr ;
+        attr = xmlGetProp(node, xp );
+        if ( attr ) 
+        {
+            name = _xmlCharToString(attr );
+            xmlFree(attr);   
+        }  
+        xmlFree(xp);
+    }
+    return name;
+}
+
+//**************************************************************
+//                                                             *
+//                                                             *
+//                                                             *
+//  méthodes propres au NeuralNetwork                          *
+//                                                             *
+//                                                             *
+//                                                             *
+//**************************************************************
+
+/*!
+ * Check if the current model type is kANN.
+ *    \brief Called in all methods specific to the NeuralNetwork model.
+ *    \brief Throw an exception if the model type is not kANN.
+ */
+void PMMLlib::CheckNeuralNetwork()
+{
+    if ( _currentModelType != kANN )
+        throw string("Use this method with NeuralNetwork models.");
+}
+
+/**
+ * Get the XML node of a given network from the index
+ * @param index Index of the neural network
+ * @return Pointer to the XML node
+ */
+xmlNodePtr PMMLlib::GetNeuralNetPtr(int index)
+{  
+    return GetPtr(index, GetTypeString() );
+}
+
+/**
+ * Get the XML node of a given network model
+ * @param name Name of the neural network
+ * @return Pointer to the XML node
+ */
+xmlNodePtr PMMLlib::GetNeuralNetPtr(std::string name)
+{   
+    return GetPtr(name, GetTypeString() );
+}
+
+/**
+ * Read the structure of the network 
+ * @brief Specific to NeuralNetwork 
+ * @return Structure read
+ */
+std::string PMMLlib::ReadNetworkStructure()
+{   
+    CheckNeuralNetwork(); 
+    
+    string structure("");
+    // Treatment of the input
+    xmlNodePtr inputNodes = GetChildByName(_currentModelNode,"NeuralInputs");
+    if ( inputNodes != NULL )
+    {
+        xmlNodePtr inputNode = GetChildByName(inputNodes,"NeuralInput");
+        if ( inputNode != NULL ) 
+        {
+            while (inputNode != NULL)
+            {
+                xmlNodePtr child = GetChildByName(inputNode,"DerivedField");
+                if ( child != NULL )
+                {
+                    xmlNodePtr fieldName = child->children; // NormContinuous
+                    if ( fieldName != NULL )
+                    {
+                        string field = _getProp(fieldName, string("field"));
+                        structure += field;
+                        structure += ":";
+                    }
+                } 
+                inputNode = inputNode->next;
+            }
+            // Delete the last comma
+            structure.erase(structure.size()-1);
+        }
+    }
+    // Intermediary layers
+    xmlNodePtr node_layer = GetChildByName(_currentModelNode,"NeuralLayer");
+    if ( node_layer != NULL )
+    {
+        string name = string((const char*)(node_layer->name));
+        structure += ",";
+
+        while ( node_layer != NULL &&
+                (string((const char*)(node_layer->name)) == "NeuralLayer") &&
+                node_layer->next != NULL &&
+                (string((const char*)(node_layer->next->name)) != "NeuralOutputs") )
+        {
+            // Get the number of neurons of the current layer
+            string nbneurons = _getProp(node_layer, string("numberOfNeurons"));
+            structure += nbneurons;
+            structure += ",";
+            node_layer = node_layer->next;
+        }
+    }
+    // Output layers
+    xmlNodePtr node_outputs = GetChildByName(_currentModelNode,"NeuralOutputs");
+    if ( node_outputs != NULL )
+    {    
+        xmlNodePtr node_output = GetChildByName(node_outputs,"NeuralOutput");
+        if ( node_output != NULL )
+        {   
+            while (node_output != NULL)
+            {
+                // Get the input of the current layer
+                xmlNodePtr child = GetChildByName(node_output,"DerivedField");
+                if ( child != NULL )
+                {                
+                    xmlNodePtr fieldName = child->children; // NormContinuous
+                    if ( fieldName != NULL )
+                    {
+                        if (string((const char*)(fieldName->name)) == "NormContinuous")
+                            structure += "@";
+
+                        string field = _getProp(fieldName, string("field"));
+                        structure += field;
+                        structure += ":"; 
+                    }    
+                }
+                node_output = node_output->next;
+            }
+            // Delete the last comma
+            structure.erase(structure.size()-1);
+        }
+    }
+    return structure;
+}
+
+/**
+ * Get the number of inputs, ie the number of NeuralInputs nodes.
+ * @brief Specific to NeuralNetwork 
+ * @return Number of input nodes
+ */
+int PMMLlib::GetNbInputs()
+{
+    CheckNeuralNetwork();  
+    
+    int nb=0;
+    xmlNodePtr node_inputs = GetChildByName(_currentModelNode,"NeuralInputs");
+    if ( node_inputs == NULL )
+      return nb;
+    
+    node_inputs = node_inputs->children;
+    while (node_inputs != NULL)
+    {
+        nb++;
+        node_inputs = node_inputs->next;
+    }
+
+    return nb;
+}
+
+/**
+ * Recover the number of outputs
+ * @brief Specific to NeuralNetwork 
+ * @return Number of outputs
+ */
+int PMMLlib::GetNbOutputs()
+{
+    CheckNeuralNetwork();   
+    
+    int nb=0;
+    xmlNodePtr node_outputs = GetChildByName(_currentModelNode,"NeuralOutputs");
+    if ( node_outputs == NULL )
+      return nb;    
+    
+    node_outputs = node_outputs->children;
+
+    while (node_outputs != NULL)
+    {
+        nb++;
+        node_outputs = node_outputs->next;
+    }
+
+    return nb;
+}
+
+/**
+ * Recovery of the name of an input in the current model.
+ * @brief Specific to NeuralNetwork 
+ * @param index Index of the input
+ * @return Name of the input
+ */
+std::string PMMLlib::GetNameInput(int index)
+{
+    CheckNeuralNetwork();  
+    
+    string name("");
+    xmlNodePtr node_inputs = GetChildByName(_currentModelNode,"NeuralInputs");
+    if ( node_inputs == NULL )
+        return name;   
+    
+    node_inputs = node_inputs->children;
+    if ( node_inputs == NULL )
+        return name; 
+    
+    for(int i = 0;i<index;i++)
+    {
+        node_inputs = node_inputs->next;
+        if ( node_inputs == NULL )
+            return name;         
+    }
+
+    node_inputs = node_inputs->children;
+    if ( node_inputs == NULL )
+        return name; 
+    
+    node_inputs = node_inputs->children;
+    if ( node_inputs == NULL )
+        return name;     
+
+    name = _getProp(node_inputs, string("field"));
+    
+    return name;
+}
+
+/**
+ * Get the name of an output in the current model.
+ * @brief Specific to NeuralNetwork 
+ * @param index Index of the output
+ * @return Name of the output
+ */
+std::string PMMLlib::GetNameOutput(int index)
+{
+    CheckNeuralNetwork();  
+    
+    string name("");
+    xmlNodePtr node_outputs = GetChildByName(_currentModelNode,"NeuralOutputs");
+    if ( node_outputs == NULL )
+      return name;       
+    node_outputs = node_outputs->children;
+    if ( node_outputs == NULL )
+      return name;   
+    for(int i = 0;i<index;i++)
+    {
+        node_outputs = node_outputs->next;
+        if ( node_outputs == NULL )
+          return name;          
+    }
+
+    node_outputs = node_outputs->children;
+    if ( node_outputs == NULL )
+      return name;  
+    node_outputs = node_outputs->children;
+    if ( node_outputs == NULL )
+      return name;      
+
+    name = _getProp(node_outputs, string("field") );    
+
+    return name;
+}
+
+/**
+ * Get the normalization type of the current model
+ * @brief Specific to NeuralNetwork
+ * @return Normalization type of the neural network
+ */
+int PMMLlib::GetNormalizationType()
+{
+    CheckNeuralNetwork();  
+    
+    xmlNodePtr node_inputs = GetChildByName(_currentModelNode,"NeuralInputs");
+    node_inputs = GetChildByName(node_inputs,"NeuralInput");
+    xmlNodePtr nodeTmp = GetChildByName(node_inputs,"DerivedField");
+    xmlNodePtr node_field = nodeTmp->children;
+    xmlNodePtr node_linearnorm;
+    string str_tmp;
+    double dorig1, dnorm1;
+    double dorig2, dnorm2;
+    if (string((const char*)(node_field->name)) == "NormContinuous")
+    {
+        // Get mean and standard deviation
+        node_linearnorm = node_field->children;
+        str_tmp = _getProp(node_linearnorm, string("orig"));
+        dorig1 = atof(str_tmp.c_str());
+        str_tmp = _getProp(node_linearnorm, string("norm"));
+        dnorm1 = atof(str_tmp.c_str());
+        node_linearnorm = node_linearnorm->next;
+        str_tmp = _getProp(node_linearnorm, string("orig"));
+        dorig2 = atof(str_tmp.c_str());
+        str_tmp = _getProp(node_linearnorm, string("norm"));
+        dnorm2 = atof(str_tmp.c_str());
+        if ( dnorm1 * dnorm2  < -0.5 )
+        {   // case of kMinusOneOne
+            return 0;
+        }
+        else
+        {   // case of kCR, kZeroOne
+            return 1;
+        }
+    }
+    string msg("Unable to retrieve the normalization type.");
+    throw msg;
+}
+
+/**
+ * Get the input parameters on the normalization
+ * @brief Specific to NeuralNetwork 
+ * @param node_ann Neural network node
+ * @param index Index of the input
+ * @param[out] dnorm Array that contains the mean and the standard deviation
+ */
+void PMMLlib::GetNormalisationInput(int index, 
+                                    double *dnorm)
+{
+    CheckNeuralNetwork();     
+    dnorm[0] = 0.0;
+    dnorm[1] = 0.0;
+    xmlNodePtr node_inputs = GetChildByName(_currentModelNode,"NeuralInputs");
+    if ( node_inputs == NULL )
+        return ;       
+    node_inputs = GetChildByName(node_inputs,"NeuralInput");
+    if ( node_inputs == NULL )
+        return ;    
+    // Positionnement sur la bonne entree
+    for(int i=0;i<index;i++)
+    {
+        node_inputs = node_inputs->next;
+        if ( node_inputs == NULL )
+            return ;  
+    }
+    xmlNodePtr tmpNode = GetChildByName(node_inputs,"DerivedField");
+    if ( tmpNode == NULL )
+        return ; 
+    xmlNodePtr node_field = GetChildByName(tmpNode,"NormContinuous");
+    if ( node_field == NULL )
+        return ; 
+    if (string((const char*)(node_field->name)) == "NormContinuous")
+    {
+        //Get mean and standard deviation
+        string str_tmp;        
+        xmlNodePtr node_linearnorm = node_field->children;
+        str_tmp = _getProp(node_linearnorm, string("orig"));
+        double dorig1 = atof(str_tmp.c_str());
+        str_tmp = _getProp(node_linearnorm, string("norm"));
+        double dnorm1 = atof(str_tmp.c_str());
+        node_linearnorm = node_linearnorm->next;
+        str_tmp = _getProp(node_linearnorm, string("orig"));
+        double dorig2 = atof(str_tmp.c_str());
+        str_tmp = _getProp(node_linearnorm, string("norm"));
+        double dnorm2 = atof(str_tmp.c_str());
+        if ( dnorm1 * dnorm2  < -0.5 ) // <=> GetNormalizationType == 0
+        {
+            // case of kMinusOneOne
+            dnorm[0] = dorig1;
+            dnorm[1] = dorig2;
+        } 
+        else // <=> GetNormalizationType == 1
+        {
+            // case of kCR, kZeroOne
+            dnorm[0] = dorig2;
+            dnorm[1] = -1.0 * dnorm1 * dorig2; //dorig2 / dnorm1; 
+        }
+    }
+}
+
+/**
+ * Get the parameters on the normalization of an output for the current model.
+ * @brief Specific to NeuralNetwork 
+ * @param index Output index
+ * @param[out] dnorm Array that contains the mean and the standard deviation
+ */
+void PMMLlib::GetNormalisationOutput(int index, 
+                                     double *dnorm)
+{
+    CheckNeuralNetwork();     
+    dnorm[0] = 0.0;
+    dnorm[1] = 0.0;
+    
+    xmlNodePtr node_outputs = GetChildByName(_currentModelNode,"NeuralOutputs");
+    if ( node_outputs == NULL )
+        return ;      
+    node_outputs = GetChildByName(node_outputs,"NeuralOutput");
+    if ( node_outputs == NULL )
+        return ;   
+    // Positionnement sur la bonne sortie
+    for(int i=0;i< index;i++)
+    {
+        node_outputs = node_outputs->next;
+        if ( node_outputs == NULL )
+            return ;        
+    }
+    xmlNodePtr tmpNode = GetChildByName(node_outputs,"DerivedField");
+    if ( tmpNode == NULL )
+        return ;   
+    xmlNodePtr node_field = GetChildByName(tmpNode,"NormContinuous"); 
+    if ( node_field == NULL )
+        return ;    
+
+    if (string((const char*)(node_field->name)) == "NormContinuous")
+    {
+        // Recuperation de la moyenne et de l'ecart type
+        string str_tmp;
+        xmlNodePtr node_linearnorm = node_field->children;
+        str_tmp = _getProp(node_linearnorm, string("orig"));
+        double dorig1 = atof(str_tmp.c_str());
+        str_tmp = _getProp(node_linearnorm, string("norm"));
+        double dnorm1 = atof(str_tmp.c_str());
+        node_linearnorm = node_linearnorm->next;
+        str_tmp = _getProp(node_linearnorm,string("orig"));
+        double dorig2 = atof(str_tmp.c_str());
+        str_tmp = _getProp(node_linearnorm, string("norm"));
+        double dnorm2 = atof(str_tmp.c_str());
+        if ( dnorm1 * dnorm2  < -0.5 ) 
+        {
+            // case of kMinusOneOne
+            dnorm[0] = dorig1;
+            dnorm[1] = dorig2;
+        } 
+        else 
+        {
+            // case of kCR, kZeroOne
+            dnorm[0] = dorig2;
+            dnorm[1] = -1.0 * dorig2 * dnorm1; //-1.0 * dorig2 / dnorm1;
+        }
+    }
+}
+
+/**
+ * Get the number of hidden layers
+ * @brief Specific to NeuralNetwork 
+ * @return Number of hidden layers
+ */
+int PMMLlib::GetNbHiddenLayers()
+{
+    CheckNeuralNetwork();
+    
+    int nb_layers = 0;  
+    xmlNodePtr node_layers = GetChildByName(_currentModelNode,"NeuralLayer");
+    if ( node_layers == NULL )
+      return nb_layers;
+    
+    while (string((const char*)(node_layers->name)) == "NeuralLayer")
+    {
+        nb_layers++;
+        node_layers = node_layers->next;
+        if ( node_layers == NULL )
+          return nb_layers;    
+    }
+    return nb_layers;
+}
+
+/**
+ * Get the total number of layers
+ * @return Total number of layers
+ */
+int PMMLlib::GetNbLayers()
+{
+    return (GetNbHiddenLayers() + 2);
+}
+
+/**
+ * Get the number of neurons at a given layer
+ * @param index Index of the layer
+ * @return Number of neurons at given layer
+ */
+int PMMLlib::GetNbNeuronsAtLayer(int index)
+{
+    CheckNeuralNetwork();  
+    
+    int nb_neurons = 0;
+    xmlNodePtr node_layers = GetChildByName(_currentModelNode,"NeuralLayer");
+    if ( node_layers == NULL )
+        return nb_neurons;  
+
+    // Positionnement à la bonne couche
+    for(int i=0;i<index;i++)
+    {
+        node_layers = node_layers->next;
+        if ( node_layers == NULL )
+            return nb_neurons;         
+    }
+
+    xmlNodePtr node_neurons = GetChildByName(node_layers,"Neuron");
+    while(node_neurons != NULL)
+    {
+        nb_neurons++;
+        node_neurons = node_neurons->next;
+    }
+
+    return nb_neurons;
+}
+
+/**
+ * Get the bias of a neuron
+ * @brief Specific to NeuralNetwork 
+ * @param layer_index Index of the layer to get bias
+ * @param neu_index Index of the neuron
+ * @return Bias of the specified neuron
+ */
+double PMMLlib::GetNeuronBias(int layer_index, 
+                              int neu_index)
+{
+    CheckNeuralNetwork();  
+    
+    double bias = 0.;
+    xmlNodePtr node_layers = GetChildByName(_currentModelNode,"NeuralLayer");
+    if ( node_layers == NULL )
+        return bias;  
+    // Positionnement a la bonne couche
+    for(int i=0;i<layer_index;i++)
+    {
+        node_layers = node_layers->next;
+        if ( node_layers == NULL )
+            return bias;          
+    }
+    xmlNodePtr node_neurons = GetChildByName(node_layers,"Neuron");
+    // Positionnement sur le bon neurone
+    for(int j=0;j<neu_index;j++)
+    {
+        node_neurons = node_neurons->next;
+        if ( node_neurons == NULL )
+            return bias;           
+    }
+    string str_tmp  = _getProp(node_neurons, string("bias"));
+    bias = atof(str_tmp.c_str());
+    return bias;
+}
+
+/**
+ * Get the synaptic weight
+ * @brief Specific to NeuralNetwork 
+ * @param layer_index Index of the layer to get synaptic weight
+ * @param neu_index Index of the neuron
+ * @param prec_index Index of the synapse
+ * @return Synaptic weight
+ */
+double PMMLlib::GetPrecNeuronSynapse(int layer_index, 
+                                     int neu_index, 
+                                     int prec_index)
+{
+    CheckNeuralNetwork();
+    
+    double weight = 0.;
+    xmlNodePtr node_layers = GetChildByName(_currentModelNode,"NeuralLayer");
+    if ( node_layers == NULL )
+        return weight;     
+    // Positionnement a la bonne couche
+    for(int i=0;i<layer_index;i++)
+    {
+        node_layers = node_layers->next;
+        if ( node_layers == NULL )
+            return weight;        
+    }
+    xmlNodePtr node_neurons = GetChildByName(node_layers,"Neuron");
+    // Positionnement sur le bon neurone
+    for(int i=0;i<neu_index;i++)
+    {
+        node_neurons = node_neurons->next;
+        if ( node_neurons == NULL )
+            return weight;          
+    }
+    xmlNodePtr node_con = GetChildByName(node_neurons,"Con");
+    // Positionnement sur la bonne synapse
+    for(int i=0;i<prec_index;i++)
+    {
+        node_con = node_con->next;
+        if ( node_con == NULL )
+            return weight;          
+    }
+    string str_tmp  = _getProp(node_con, string("weight"));
+    weight = atof(str_tmp.c_str());
+    return weight;
+}
+
+/**
+ * Set the name of the neural network
+ * @brief Not tested 
+ * @param index Neural network index
+ * @param name Neural network name to set
+ */
+// LCOV_EXCL_START
+void PMMLlib::SetNeuralNetName(int index, 
+                               std::string name)
+{
+    CheckNeuralNetwork();
+    
+    int i=0;
+    if (_doc != NULL)
+    {
+        xmlNodePtr node_ann = GetChildByName(_rootNode,"NeuralNetwork");
+        while ((i != index) && (node_ann != NULL))
+        {
+            node_ann = node_ann->next;
+            i++;
+        }
+        xmlNewProp(node_ann, (const xmlChar*)"modelName", (const xmlChar*)(name.c_str()));
+    }
+    xmlSaveFormatFile( string(_pmmlFile+".pmml").c_str(), _doc, 1);
+}
+// LCOV_EXCL_STOP
+
+/**
+ * Add a DataField node to the DataDictionnary node
+ * @param fieldName Value of property "name"
+ * @param displayName Value of property "displayName"
+ * @param optype Value of property "optype"
+ * @param dataType Value of property "dataType"
+ * @param closure Value of property "closure" in node Interval
+ * @param leftMargin Value of property "leftMargin" in node Interval
+ * @param rightMargin Value of property "rightMargin" in node Interval
+ * @param interval Flag to add a node Interval (if true)
+ */
+void PMMLlib::AddDataField(std::string fieldName, 
+                           std::string displayName, 
+                           std::string optype,
+                           std::string dataType, 
+                           std::string closure, 
+                           double leftMargin, 
+                           double rightMargin, 
+                           bool interval)
+{
+    // if 'DataDictionary' node does not exist, create it
+    xmlNodePtr dataDictNode = GetChildByName(_rootNode, "DataDictionary");
+    if(!dataDictNode)
+    {
+        dataDictNode = xmlNewChild(_rootNode, 0, (const xmlChar*)"DataDictionary", 0);
+    }
+
+    // then append the node
+    xmlNodePtr dataFieldNode = xmlNewChild(dataDictNode, 0, (const xmlChar*)"DataField", 0);
+    xmlNewProp(dataFieldNode, (const xmlChar*)"name", (const xmlChar*)(fieldName.c_str()) );
+    xmlNewProp(dataFieldNode, (const xmlChar*)"displayName", (const xmlChar*)(displayName.c_str()) );
+    xmlNewProp(dataFieldNode, (const xmlChar*)"optype", (const xmlChar*)(optype.c_str()) );
+    xmlNewProp(dataFieldNode, (const xmlChar*)"dataType", (const xmlChar*)(dataType.c_str()) );
+
+    if ( interval ) 
+    {
+        xmlNodePtr intervalNode = xmlNewChild(dataFieldNode, 0, (const xmlChar*)"Interval", 0);
+        xmlNewProp(intervalNode, (const xmlChar*)"closure", (const xmlChar*)(closure.c_str()) );
+        stringstream ss;
+        ss << scientific << leftMargin;
+        xmlNewProp(intervalNode, (const xmlChar*)"leftMargin", (const xmlChar*)(ss.str().c_str()) );
+        ss.str("");
+        ss << scientific << rightMargin;
+        xmlNewProp(intervalNode, (const xmlChar*)"rightMargin", (const xmlChar*)(ss.str().c_str()) );
+    }
+}
+
+/**
+ * Add a NeuralNetwork node to the root node
+  * @brief Specific to NeuralNetwork
+ * @param modelName Model name
+ * @param functionName PMMLMiningFunction. One of : kREGRESSION.
+ */ 
+void PMMLlib::AddNeuralNetwork(std::string modelName, 
+                               PMMLMiningFunction functionName)
+{
+    _currentModelType = kANN;
+    _currentModelName = modelName;
+    
+    CheckNeuralNetwork();
+    
+    string function;
+    switch(functionName)
+    {
+    case kREGRESSION:
+        function = "regression"; 
+        break;
+    }
+
+    xmlNodePtr netNode = xmlNewChild(_rootNode, 0, (const xmlChar*)"NeuralNetwork", 0);
+    xmlNewProp(netNode, (const xmlChar*)"modelName", (const xmlChar*)(_currentModelName.c_str()) );
+    xmlNewProp(netNode, (const xmlChar*)"functionName", (const xmlChar*)(function.c_str()) );
+    xmlNewProp(netNode, (const xmlChar*)"numberOfLayers", (const xmlChar*)"0" );
+    _currentModelNode = netNode;
+}
+
+ /**
+ * Add a NeuralInput node to the current model.
+  * @brief Specific to NeuralNetwork
+ * @param id Id of the input
+ * @param inputName Name of the input
+ * @param optype Value of property "optype"
+ * @param dataType Value of property "dataType" 
+ * @param orig1 Value of the first origin
+ * @param norm1 Value of the first norm
+ * @param orig2 Value of the second origin
+ * @param norm2 Value of the second norm
+ */ 
+void PMMLlib::AddNeuralInput(int id, 
+                             std::string inputName, 
+                             std::string optype, 
+                             std::string dataType, 
+                             double orig1, double norm1, 
+                             double orig2, double norm2)
+{
+    CheckNeuralNetwork();
+
+    xmlNodePtr netNode = _currentModelNode; 
+    // if 'NeuralInputs' node does not exist, create it
+    xmlNodePtr neuralInputsNode = GetChildByName(netNode, "NeuralInputs");
+    if(!neuralInputsNode)
+    {
+        neuralInputsNode = xmlNewChild(netNode, 0, (const xmlChar*)"NeuralInputs", 0);
+        xmlNewProp(neuralInputsNode, (const xmlChar*)"numberOfInputs", (const xmlChar*)"0" );
+    }
+    // increment the number of inputs
+    string numberOfInputsStr = _getProp(neuralInputsNode, string("numberOfInputs"));
+    int numberOfInputs;
+    istringstream( numberOfInputsStr ) >> numberOfInputs;        
+    numberOfInputs++;
+    stringstream ss;
+    ss << numberOfInputs;
+    xmlSetProp(neuralInputsNode, (const xmlChar*)"numberOfInputs", (const xmlChar*)(ss.str().c_str()) );
+    // then append the node and its children
+    xmlNodePtr neuralInputNode = xmlNewChild(neuralInputsNode, 0, (const xmlChar*)"NeuralInput", 0);
+    ss.str(""); ss << id;
+    xmlNewProp(neuralInputNode, (const xmlChar*)"id", (const xmlChar*)(ss.str().c_str()) );
+
+    xmlNodePtr derivedFieldNode = xmlNewChild(neuralInputNode, 0, (const xmlChar*)"DerivedField", 0);
+    xmlNewProp(derivedFieldNode, (const xmlChar*)"optype", (const xmlChar*)(optype.c_str()) );
+    xmlNewProp(derivedFieldNode, (const xmlChar*)"dataType", (const xmlChar*)(dataType.c_str()) );
+
+    xmlNodePtr normcontNode = xmlNewChild(derivedFieldNode, 0, (const xmlChar*)"NormContinuous", 0);
+    xmlNewProp(normcontNode, (const xmlChar*)"field", (const xmlChar*)(inputName.c_str()) );
+
+    xmlNodePtr node_linearnorm1 = xmlNewChild(normcontNode, 0, (const xmlChar*)"LinearNorm", 0);
+    ss.str(""); ss << scientific << orig1;
+    xmlNewProp(node_linearnorm1, (const xmlChar*)"orig", (const xmlChar*)(ss.str().c_str()) );
+    ss.str(""); ss << scientific << norm1;
+    xmlNewProp(node_linearnorm1, (const xmlChar*)"norm", (const xmlChar*)(ss.str().c_str()) );
+    xmlNodePtr node_linearnorm2 = xmlNewChild(normcontNode, 0, (const xmlChar*)"LinearNorm", 0);
+    ss.str(""); ss << scientific << orig2;
+    xmlNewProp(node_linearnorm2, (const xmlChar*)"orig", (const xmlChar*)(ss.str().c_str()) );
+    ss.str(""); ss << scientific << norm2;
+    xmlNewProp(node_linearnorm2, (const xmlChar*)"norm", (const xmlChar*)(ss.str().c_str()) );
+}
+
+ /**
+ * Add a NeuralOutput node to the current model.
+ * @brief Specific to NeuralNetwork
+ * @param outputNeuron Id of the output
+ * @param outputName Name of the output
+ * @param optype Value of property "optype"
+ * @param dataType Value of property "dataType" 
+ * @param orig1 Value of the first origin
+ * @param norm1 Value of the first norm
+ * @param orig2 Value of the second origin
+ * @param norm2 Value of the second norm
+ */ 
+void PMMLlib::AddNeuralOutput(int outputNeuron, 
+                              std::string outputName, 
+                              std::string optype, 
+                              std::string dataType, 
+                              double orig1, double norm1, 
+                              double orig2, double norm2)
+{
+    CheckNeuralNetwork();
+
+     xmlNodePtr netNode = _currentModelNode; 
+    // if 'NeuralOutputs' node does not exist, create it
+    xmlNodePtr neuralOutputsNode = GetChildByName(netNode, "NeuralOutputs");
+    if(!neuralOutputsNode)
+    {
+        neuralOutputsNode = xmlNewChild(netNode, 0, (const xmlChar*)"NeuralOutputs", 0);
+        xmlNewProp(neuralOutputsNode, (const xmlChar*)"numberOfOutputs", (const xmlChar*)"0" );
+    }
+    // increment the number of inputs
+    string numberOfOutputsStr = _getProp(neuralOutputsNode, string("numberOfOutputs"));
+    int numberOfOutputs;
+    istringstream( numberOfOutputsStr ) >> numberOfOutputs;       
+    numberOfOutputs++;
+    stringstream ss;
+    ss << numberOfOutputs;
+    xmlSetProp(neuralOutputsNode, (const xmlChar*)"numberOfOutputs", (const xmlChar*)(ss.str().c_str()) );
+
+    // then append the node and its children
+    xmlNodePtr neuralOutputNode = xmlNewChild(neuralOutputsNode, 0, (const xmlChar*)"NeuralOutput", 0);
+    ss.str(""); ss << outputNeuron;
+    xmlNewProp(neuralOutputNode, (const xmlChar*)"outputNeuron", (const xmlChar*)(ss.str().c_str()) );
+
+    xmlNodePtr derivedFieldNode = xmlNewChild(neuralOutputNode, 0, (const xmlChar*)"DerivedField", 0);
+    xmlNewProp(derivedFieldNode, (const xmlChar*)"optype", (const xmlChar*)(optype.c_str()) );
+    xmlNewProp(derivedFieldNode, (const xmlChar*)"dataType", (const xmlChar*)(dataType.c_str()) );
+
+    xmlNodePtr normcontNode = xmlNewChild(derivedFieldNode, 0, (const xmlChar*)"NormContinuous", 0);
+    xmlNewProp(normcontNode, (const xmlChar*)"field", (const xmlChar*)(outputName.c_str()) );
+
+    xmlNodePtr node_linearnorm1 = xmlNewChild(normcontNode, 0, (const xmlChar*)"LinearNorm", 0);
+    ss.str(""); ss << scientific << orig1;
+    xmlNewProp(node_linearnorm1, (const xmlChar*)"orig", (const xmlChar*)(ss.str().c_str()) );
+    ss.str(""); ss << scientific << norm1;
+    xmlNewProp(node_linearnorm1, (const xmlChar*)"norm", (const xmlChar*)(ss.str().c_str()) );
+    xmlNodePtr node_linearnorm2 = xmlNewChild(normcontNode, 0, (const xmlChar*)"LinearNorm", 0);
+    ss.str(""); ss << scientific << orig2;
+    xmlNewProp(node_linearnorm2, (const xmlChar*)"orig", (const xmlChar*)(ss.str().c_str()) );
+    ss.str(""); ss << scientific << norm2;
+    xmlNewProp(node_linearnorm2, (const xmlChar*)"norm", (const xmlChar*)(ss.str().c_str()) );
+}
+
+ /**
+ * Add a NeuralLayer node to the current model.
+  * @brief Specific to NeuralNetwork
+ * @param activationFunction Activation function. One of kIDENTITY, kTANH, kLOGISTIC.
+ */ 
+void PMMLlib::AddNeuralLayer(PMMLActivationFunction activationFunction)
+{
+    CheckNeuralNetwork();
+    
+    string functionName;
+    switch(activationFunction)
+    {
+    case kIDENTITY:
+        functionName = "identity";
+        break;
+    case kTANH:
+        functionName = "tanh";
+        break;
+    case kLOGISTIC:
+        functionName = "logistic";
+        break;
+    }
+    xmlNodePtr netNode = _currentModelNode; 
+    // Increment the number of layers
+    string numberOfLayersStr = _getProp(_currentModelNode, string("numberOfLayers"));
+    int numberOfLayers;
+    istringstream( numberOfLayersStr ) >> numberOfLayers;       
+    numberOfLayers++;
+    stringstream ss;
+    ss << numberOfLayers;
+    xmlSetProp(netNode, (const xmlChar*)"numberOfLayers", (const xmlChar*)(ss.str().c_str()) );
+    // Add the neural layer node
+    xmlNodePtr neuralLayerNode = xmlNewChild(netNode, 0, (const xmlChar*)"NeuralLayer", 0);
+    xmlNewProp(neuralLayerNode, (const xmlChar*)"activationFunction", (const xmlChar*)(functionName.c_str()) );
+    xmlNewProp(neuralLayerNode, (const xmlChar*)"numberOfNeurons", (const xmlChar*)"0" );
+    // Save the current layer in the _currentNode attribute
+    _currentNode = neuralLayerNode;
+}
+
+ /**
+ * Add a NeuralLayer node to the current model.
+  * @brief Specific to NeuralNetwork
+ * @param id Id of the layer
+ * @param bias Value of property "bias"
+ * @param conNb Number of Con nodes
+ * @param firstFrom Value of property "from" for the first Con
+ * @param weights Vector of weights (One per Con node)
+ */ 
+void PMMLlib::AddNeuron(int id, 
+                        double bias, 
+                        int conNb, 
+                        int firstFrom, 
+                        vector<double> weights)
+{
+    CheckNeuralNetwork();
+    
+    stringstream ss;
+
+    // increment the number of neurons
+    string numberOfNeuronsStr = _getProp(_currentNode, string("numberOfNeurons"));
+    int numberOfNeurons;
+    istringstream( numberOfNeuronsStr ) >> numberOfNeurons;    
+    numberOfNeurons++;
+    ss << numberOfNeurons;
+    xmlSetProp(_currentNode, (const xmlChar*)"numberOfNeurons", (const xmlChar*)(ss.str().c_str()) );
+
+    // append a neuron
+    xmlNodePtr neuronNode = xmlNewChild(_currentNode, 0, (const xmlChar*)"Neuron", 0);
+    ss.str(""); ss << id;
+    xmlNewProp(neuronNode, (const xmlChar*)"id", (const xmlChar*)(ss.str().c_str()) );
+    ss.str(""); ss << scientific << bias;
+    xmlNewProp(neuronNode, (const xmlChar*)"bias", (const xmlChar*)(ss.str().c_str()) );
+
+    // append multiple 'Con' to the neuron
+    for(int k=0 ; k<conNb ; k++)
+    {
+        xmlNodePtr conNode = xmlNewChild(neuronNode, 0, (const xmlChar*)"Con", 0);
+        ss.str(""); ss << firstFrom+k;
+        xmlNewProp(conNode, (const xmlChar*)"from", (const xmlChar*)(ss.str().c_str()) ); // !!! ce n'est pas k !!!
+        ss.str(""); ss << scientific << weights[k];
+        xmlNewProp(conNode, (const xmlChar*)"weight", (const xmlChar*)(ss.str().c_str()) );
+    }
+}
+
+ /**
+ * Fill the vectors used by the ExportXXX methods.
+  * @brief Specific to NeuralNetwork
+ * @param nInput
+ * @param nOutput
+ * @param nHidden
+ * @param normType
+ * @param minInput
+ * @param maxInput
+ * @param minOutput
+ * @param maxOutput
+ * @param valW
+ */ 
+void PMMLlib::fillVectorsForExport(int nInput, 
+                                   int nOutput, 
+                                   int nHidden, 
+                                   int normType,
+                                   vector<double> &minInput,
+                                   vector<double> &maxInput,
+                                   vector<double> &minOutput,
+                                   vector<double> &maxOutput,
+                                   vector<double> &valW )
+{
+    CheckNeuralNetwork();
+    
+    xmlNodePtr netNode = _currentModelNode ; 
+    // Get the different values required
+    // Build min/max input/output vectors
+    for(int i=0 ; i<nInput ; i++)
+    {
+        xmlNodePtr node_inputs = GetChildByName(netNode,"NeuralInputs");
+        node_inputs = node_inputs->children;
+        for(int j = 0;j<i;j++)
+        {
+            node_inputs = node_inputs->next;
+        }
+        node_inputs = node_inputs->children; // DerivedField
+        node_inputs = node_inputs->children; // NormContinuous
+        node_inputs = node_inputs->children; // LinearNorm
+        string strOrig1 = _getProp(node_inputs, string("orig") );
+        double orig1 = atof( strOrig1.c_str() );
+        string strNorm1 = _getProp(node_inputs, string("norm") );        
+        double norm1 = atof( strNorm1.c_str() );
+        node_inputs = node_inputs->next;
+        string strOrig2 = _getProp(node_inputs, string("orig") );
+        double orig2 = atof( strOrig2.c_str() );
+        string strNorm2 = _getProp(node_inputs, string("norm") );    
+        if( normType==0 )
+        {   // kMinusOneOne
+            minInput[i] = orig1;
+            maxInput[i] = orig2;
+        }
+        else
+        {   //  kCR, kZeroOne
+            minInput[i] = orig2;
+            maxInput[i] = -1.0*norm1*orig2;
+        }
+    }
+    xmlNodePtr node_outputs = GetChildByName(netNode,"NeuralOutputs");
+    node_outputs = node_outputs->children;
+    node_outputs = node_outputs->children; // DerivedField
+    node_outputs = node_outputs->children; // NormContinuous
+    node_outputs = node_outputs->children; // LinearNorm  
+    string strOrig1 = _getProp(node_outputs, string("orig") );
+    double orig1 = atof( strOrig1.c_str() );
+    string strNorm1 = _getProp(node_outputs, string("norm") );        
+    double norm1 = atof( strNorm1.c_str() ); 
+    node_outputs = node_outputs->next;
+    string strOrig2 = _getProp(node_outputs, string("orig") );
+    double orig2 = atof( strOrig2.c_str() );    
+    if( normType==0 )
+    {   // kMinusOneOne
+        minOutput[0] = orig1;
+        maxOutput[0] = orig2;
+    }
+    else
+    {   //  kCR, kZeroOne
+        minOutput[0] = orig2;
+        maxOutput[0] = -1.0*norm1*orig2;
+    }
+    // Build weight vector
+    for(int j=0 ; j<nHidden ; j++) // hidden layers
+    {
+        valW[j*(nInput+nOutput+1)+2] = GetNeuronBias( 0, j);
+        for(int i=0 ; i<nInput ; i++)
+        {
+            valW[j*(nInput+nOutput+1)+3+i] = GetPrecNeuronSynapse( 0, j, i);        
+        }
+    }  
+    for(int j=0 ; j<nOutput ; j++) // output layers
+    {
+        valW[0] = GetNeuronBias( 1, j);                
+        for(int i=0 ; i<nHidden ; i++)
+        {
+            valW[i*(nInput+nOutput+1)+1] = GetPrecNeuronSynapse( 1, j, i);           
+        }
+    }
+}
+
+/**
+ * Export the current model as a NeuralNetwork function in a Cpp file.
+ * @brief Specific to NeuralNetwork
+ * @param file Name of the file  
+ * @param functionName Name of the function  
+ * @param header Header of the function  
+ */
+void PMMLlib::ExportNeuralNetworkCpp(std::string file, 
+                                     std::string functionName, 
+                                     std::string header)
+{
+    CheckNeuralNetwork();
+    
+    // Get the different values required
+    int nInput = GetNbInputs();
+    int nOutput = GetNbOutputs();
+    int nHidden = GetNbNeuronsAtLayer(0);
+    int nNeurons = nInput+nOutput+nHidden;
+    int nWeights = nHidden*(nInput+nOutput+1)+nOutput;
+    int normType = GetNormalizationType();
+    // Build min/max input/output vectors
+    vector<double> minInput(nInput);
+    vector<double> maxInput(nInput);
+    vector<double> minOutput(nOutput);
+    vector<double> maxOutput(nOutput);
+    vector<double> valW(nWeights);
+    fillVectorsForExport(nInput,nOutput,nHidden,normType,minInput,maxInput,minOutput,maxOutput,valW);
+    // Write the file
+    ofstream sourcefile(file.c_str());
+    // ActivationFunction
+    if( normType==0 )
+    {   // kMinusOneOne
+        sourcefile << "#define ActivationFunction(sum)         ( tanh(sum) )" << endl;
+    }
+    else
+    {   //  kCR, kZeroOne
+        sourcefile << "#define ActivationFunction(sum) ( 1.0 / ( 1.0 + exp( -1.0 * sum )) )" << endl;
+    }
+    //
+    sourcefile << "void " << functionName <<"(double *param, double *res)" << endl;
+    sourcefile << "{" << endl;
+    // header
+    sourcefile << "  ////////////////////////////// " << endl;
+    sourcefile << "  //" << endl;
+    // insert comments in header
+    header = "  // " + header;
+    size_t pos = 0;
+    while ((pos = header.find("\n", pos)) != std::string::npos) 
+    {
+        header.replace(pos, 1, "\n  //");
+        pos += 5;
+    }
+    sourcefile << header << endl;
+    sourcefile << "  //" << endl;
+    sourcefile << "  ////////////////////////////// " << endl;
+    sourcefile << endl;
+    sourcefile << "  int nInput   = " <<  nInput << ";" << endl;
+    sourcefile << "  int nOutput   = " <<  nOutput << ";" << endl;
+    //  sourcefile << "  int nWeights = " <<  _nWeight << ";" << endl;
+    sourcefile << "  int nHidden  = " <<  nHidden << ";" << endl;
+    sourcefile << "  const int nNeurones  = " <<  nNeurons << ";" << endl;
+    sourcefile << "  double " << functionName << "_act[nNeurones];" << endl;
+    sourcefile << endl;
+    sourcefile << "  // --- Preprocessing of the inputs and outputs" << endl;
+    sourcefile << "  double " << functionName << "_minInput[] = {" << endl << "  ";
+    for(int i=0 ; i<nInput ; i++)
+    {
+        sourcefile << minInput[i] << ", ";
+        if( (i+1)%5==0 )
+            sourcefile << "\n  ";
+    }
+    if( nInput%5 != 0 )
+        sourcefile << endl;
+    sourcefile << "  };" << endl;
+    //
+    sourcefile << "  double " << functionName << "_minOutput[] = {" << endl << "  ";
+    sourcefile << minOutput[0] << ", ";
+    sourcefile << "  };" << endl;
+    //
+    sourcefile << "  double " << functionName << "_maxInput[] = {" << endl << "  ";
+    for(int i=0 ; i<nInput ; i++)
+    {
+        sourcefile << maxInput[i] << ", ";
+        if( (i+1)%5==0 )
+            sourcefile << "\n  ";
+    }
+    if( nInput%5 != 0 )
+        sourcefile << endl;
+    sourcefile << "  };" << endl;
+    //
+    sourcefile << "  double " << functionName << "_maxOutput[] = {" << endl << "  ";
+    sourcefile << maxOutput[0] << ", ";
+    sourcefile << "  };" << endl;
+    // Weights vector
+    sourcefile << endl;
+    sourcefile << "  // --- Values of the weights" << endl;
+    sourcefile << "  double " << functionName << "_valW[] = {" << endl << "  ";
+    for(int i=0 ; i<nWeights ; i++)
+    {
+        sourcefile << valW[i] << ", ";
+        if ( (i+1)%5 == 0 )
+            sourcefile << endl << "  ";
+    }
+    sourcefile << endl << "  };"<<endl;
+    //
+    sourcefile << "  // --- Constants";
+    sourcefile << endl;
+    sourcefile << "  int indNeurone = 0;"<<endl;
+    sourcefile << "  int CrtW;"<<endl;
+    sourcefile << "  double sum;"<<endl;
+
+    // couche  entree
+    sourcefile << endl;
+    sourcefile << "  // --- Input Layers"<<endl;
+    sourcefile << "  for(int i = 0; i < nInput; i++) {"<<endl;
+    if( normType==0 )
+    {   // kMinusOneOne
+        sourcefile << "     " << functionName << "_act[indNeurone++] = 2.0 * ( param[i] - "
+                   << functionName << "_minInput[i] ) / ( " << functionName << "_maxInput[i] - "
+                   << functionName << "_minInput[i] ) - 1.0;"<<endl;
+    }
+    else
+    {   //  kCR, kZeroOne
+        sourcefile << "     " << functionName << "_act[indNeurone++] = ( param[i] - "
+                   << functionName << "_minInput[i] ) / " << functionName << "_maxInput[i];"
+                   << endl;
+    }
+    sourcefile << "  }"<<endl;
+
+
+    // couche cachee
+    sourcefile << endl;
+    sourcefile << "  // --- Hidden Layers"<<endl;
+    sourcefile << "  for (int member = 0; member < nHidden; member++) {"<<endl;
+    sourcefile << "     int CrtW = member * ( nInput + 2) + 2;" << endl;
+    sourcefile << "     sum = " << functionName << "_valW[CrtW++];" << endl;
+    sourcefile << "     for (int source = 0; source < nInput; source++) {" << endl;
+    sourcefile << "         sum += " << functionName << "_act[source] * " << functionName << "_valW[CrtW++];" << endl;
+    sourcefile << "       }" << endl;
+    sourcefile << "       " << functionName << "_act[indNeurone++] = ActivationFunction(sum);" << endl;
+    sourcefile << "  }"<<endl;
+    // couche sortie
+    sourcefile << endl;
+    sourcefile << "  // --- Output"<<endl;
+    sourcefile << "  for (int member = 0; member < nOutput; member++) {"<<endl;
+    sourcefile << "    sum = " << functionName << "_valW[0];"<<endl;
+    sourcefile << "    for (int source = 0; source < nHidden; source++) {"<<endl;
+    sourcefile << "      CrtW = source * ( nInput + 2) + 1;"<<endl;
+    sourcefile << "      sum += " << functionName << "_act[nInput+source] * " << functionName << "_valW[CrtW];"<<endl;
+    sourcefile << "    }"<<endl;
+    sourcefile << "    " << functionName << "_act[indNeurone++] = sum;"<<endl;
+    if( normType==0 )
+    {   // kMinusOneOne
+        sourcefile << "    res[member] = " << functionName
+                   << "_minOutput[member] + 0.5 * ( " << functionName
+                   << "_maxOutput[member] - " << functionName
+                   << "_minOutput[member] ) * ( sum + 1.0);" << endl;
+    }
+    else
+    {   //  kCR, kZeroOne
+        sourcefile << "    res[member] = " << functionName
+                   << "_minOutput[member] + " << functionName
+                   << "_maxOutput[member] * sum;" << endl;
+    }
+    sourcefile << "  }"<<endl;
+    //
+    sourcefile << "}" << endl;
+    sourcefile.close();
+}
+
+/**
+ * Export the current model as a NeuralNetwork function in a Fortran file.
+ * @brief Specific to NeuralNetwork
+ * @param file Name of the file  
+ * @param functionName Name of the function  
+ * @param header Header of the function  
+ */
+void PMMLlib::ExportNeuralNetworkFortran(std::string file, 
+                                         std::string functionName, 
+                                         std::string header)
+{
+    CheckNeuralNetwork();  
+    
+    // Get the different values required
+    int nInput = GetNbInputs();
+    int nOutput = GetNbOutputs();
+    int nHidden = GetNbNeuronsAtLayer(0);
+    int nWeights = nHidden*(nInput+nOutput+1)+nOutput;
+    int normType = GetNormalizationType();
+    // Build min/max input/output vectors
+    vector<double> minInput(nInput);
+    vector<double> maxInput(nInput);
+    vector<double> minOutput(nOutput);
+    vector<double> maxOutput(nOutput);
+    vector<double> valW(nWeights);
+    fillVectorsForExport(nInput,nOutput,nHidden,normType,minInput,maxInput,minOutput,maxOutput,valW);
+    // Write the file
+    ofstream sourcefile(file.c_str());
+
+    sourcefile << "      SUBROUTINE " << functionName << "(";
+    for(int i=0 ; i<GetNbInputs() ; i++)
+    {
+        sourcefile << GetNameInput(i) << ",";
+    }
+    sourcefile << GetNameOutput(0) << ")" << endl;
+    // header
+    sourcefile << "C --- *********************************************" << endl;
+    sourcefile << "C --- " << endl;
+    // insert comments in header
+    header = "C ---  " + header;
+    size_t pos = 0;
+    while ((pos = header.find("\n", pos)) != std::string::npos) 
+    {
+        header.replace(pos, 1, "\nC --- ");
+        pos += 5;
+    }
+    sourcefile << header << endl;
+    sourcefile << "C --- " << endl;
+    sourcefile << "C --- *********************************************" << endl;
+
+    sourcefile << "      IMPLICIT DOUBLE PRECISION (V)" << endl;
+    for(int i=0 ; i<GetNbInputs() ; i++)
+    {
+        sourcefile << "      DOUBLE PRECISION " << GetNameInput(i) << endl;
+    }
+    sourcefile << "      DOUBLE PRECISION " << GetNameOutput(0) << endl;
+    sourcefile << endl;
+
+    sourcefile << "C --- Preprocessing of the inputs" << endl;
+    for(int i=0 ; i<GetNbInputs() ; i++)
+    {
+        sourcefile << "      VXN" << GetNameInput(i) << " = ";
+
+        if( normType==0 )
+        {   // kMinusOneOne
+            sourcefile << "2.D0 * ( " << GetNameInput(i) << " - " << minInput[i] << "D0 ) / " << maxInput[i] - minInput[i] << "D0 - 1.0" << endl;
+        }
+        else
+        {   //  kCR, kZeroOne
+            sourcefile << "( " << GetNameInput(i) << " - " << minInput[i] << "D0 ) / " << maxInput[i] << "D0" << endl;
+        }
+    }
+
+    // Weights vector
+    sourcefile << endl;
+    sourcefile << "C --- Values of the weights" << endl;
+    for(int i=0 ; i<nWeights ; i++)
+    {
+        sourcefile << "      VW" << i+1 << " = " << valW[i] << endl;
+    }
+    // Loop on hidden neurons
+    sourcefile << endl;
+    for(int member = 0; member < nHidden; member++) 
+    {
+        sourcefile << "C --- hidden neural number " << member+1 << endl;
+        int CrtW = member * ( nInput + 2) + 3;
+        sourcefile << "      VAct" << member+1 << " = VW" << CrtW++ << endl;
+        for (int source = 0; source < nInput; source++)
+        {
+            sourcefile << "     1      + VW"<< CrtW++ << " * VXN" << GetNameInput(source) << endl;
+        }
+        sourcefile << endl;
+
+
+        if( normType==0 )
+        {   // kMinusOneOne
+            sourcefile << "      VPot" << member+1 << " = 2.D0 / (1.D0 + DEXP(-2.D0 * VAct" << member+1 <<")) - 1.D0" << endl;
+        }
+        else
+        {   //  kCR, kZeroOne
+            sourcefile << "      VPot" << member+1 << " = 1.D0 / (1.D0 + DEXP(-1.D0 * VAct" << member+1 <<"))" << endl;
+        }
+        sourcefile << endl;
+    }
+
+    // Ouput of the model
+    sourcefile << "C --- Output" << endl;
+    sourcefile << "      VOut = VW1" << endl;
+    for(int source=0 ; source < nHidden ; source++)
+    {
+        int CrtW = source * ( nInput + 2) + 2;
+        sourcefile << "     1    + VW"<< CrtW  << " * VPot" << source+1 << endl;
+    }
+
+    // Denormalize Output
+    sourcefile << endl;
+    sourcefile << "C --- Pretraitment of the output" << endl;
+    if( normType==0 )
+    {   // kMinusOneOne
+        sourcefile << "      VDelta = " << 0.5*(maxOutput[0]-minOutput[0]) << "D0 * ( VOut + 1.0D0)" << endl;
+        sourcefile << "      " << GetNameOutput(0) << " = " << minOutput[0] << "D0 + VDelta" << endl;
+
+    }
+    else
+    {   //  kCR, kZeroOne
+        sourcefile << "      " << GetNameOutput(0) << " = "<< minOutput[0] << "D0 + " << maxOutput[0] << "D0 * VOut;" << endl;
+    }
+
+    sourcefile << endl;
+    sourcefile << "C --- " << endl;
+    sourcefile << "      RETURN" << endl;
+    sourcefile << "      END" << endl;
+
+    sourcefile.close();
+}
+
+/**
+ * Export the current model as a NeuralNetwork function in a Python file.
+ * @brief Specific to NeuralNetwork
+ * @param file Name of the file  
+ * @param functionName Name of the function  
+ * @param header Header of the function  
+ */
+void PMMLlib::ExportNeuralNetworkPython(std::string file, 
+                                        std::string functionName, 
+                                        std::string header)
+{
+    string str(ExportNeuralNetworkPyStr(functionName, header));
+    // Write the file
+    ofstream exportfile(file.c_str()); 
+    exportfile << str;
+    exportfile.close();  
+}
+
+
+/**
+ * Export the current model as a function in a Python string.
+ * @brief Specific to NeuralNetwork
+ * @param functionName Name of the function  
+ * @param header Header of the function  
+ * @return Function as a string
+ */ 
+std::string PMMLlib::ExportNeuralNetworkPyStr(std::string functionName, 
+                                              std::string header)
+{
+    CheckNeuralNetwork();
+    
+    ostringstream out;
+  
+    // Get the different values required
+    int nInput = GetNbInputs();
+    int nOutput = GetNbOutputs();
+    int nHidden = GetNbNeuronsAtLayer(0);
+    int nNeurons = nInput+nOutput+nHidden;
+    int nWeights = nHidden*(nInput+nOutput+1)+nOutput;
+    int normType = GetNormalizationType();
+    // Build min/max input/output vectors
+    vector<double> minInput(nInput);
+    vector<double> maxInput(nInput);
+    vector<double> minOutput(nOutput);
+    vector<double> maxOutput(nOutput);
+    vector<double> valW(nWeights);
+    fillVectorsForExport(nInput,nOutput,nHidden,normType,minInput,maxInput,minOutput,maxOutput,valW);
+
+    // Shebang et imports
+    out << "#!/usr/bin/env python" << endl;
+    out << "# -*- coding: utf-8 -*-" << endl;
+    out << endl;
+    out << "from math import tanh, exp" << endl;
+    out << endl;
+
+    // ActivationFunction
+    if( normType==0 )
+    {   // kMinusOneOne
+        out << "def ActivationFunction(sum): " << endl;
+        out << "    return tanh(sum); " << endl;
+    }
+    else
+    {   //  kCR, kZeroOne
+        out << "def ActivationFunction(sum): " << endl;
+        out << "    return ( 1.0 / ( 1.0 + exp( -1.0 * sum ) ) ); " << endl;        
+    }
+
+    out << endl;
+    out << "def " << functionName <<"(param):" << endl;
+    out << endl;
+
+    // header
+    out << "    ############################## " << endl;
+    out << "    #" << endl;
+    // insert comments in header
+    header = "    # " + header;
+    size_t pos = 0;
+    while ((pos = header.find("\n", pos)) != std::string::npos) 
+    {
+        header.replace(pos, 1, "\n    #");
+        pos += 5;
+    }
+    out << header << endl;
+    out << "    #" << endl;
+    out << "    ############################## " << endl;
+    out << endl;
+
+    // Initialisations
+    out << "    nInput = " << nInput << ";" << endl;
+    out << "    nOutput = " << nOutput << ";" << endl;
+    out << "    nHidden = " <<  nHidden << ";" << endl;
+    out << "    nNeurones = " <<  nNeurons << ";" << endl;
+    out << "    " << functionName << "_act = [];" << endl;
+    out << "    res = [];" << endl;    
+    out << endl;
+
+    out << "    # --- Preprocessing of the inputs and outputs" << endl;
+    out << "    " << functionName << "_minInput = [" << endl << "  ";
+    out << "    " ;
+    for(int i=0 ; i<nInput ; i++)
+    {
+        out << minInput[i] << ", ";
+        if( (i+1)%5==0 )
+        {
+            out << endl ;
+            out << "    " ;
+        }
+    }
+    out << endl <<  "    ];" << endl;
+
+    out << "    " << functionName << "_minOutput = [" << endl << "    ";
+    out << "    " << minOutput[0] ;
+    out << endl << "    ];" << endl;
+
+    out << "    " << functionName << "_maxInput = [" << endl << "    ";
+    for(int i=0 ; i<nInput ; i++)
+    {
+        out << maxInput[i] << ", ";
+        if( (i+1)%5==0 )
+        {
+            out << endl;
+            out << "    " ;
+        }
+    }
+    out << endl << "    ];" << endl;
+
+    out << "    " << functionName << "_maxOutput = [" << endl << "    ";
+    out << "    " << maxOutput[0] ;
+    out << endl << "    ];" << endl;
+
+    // Weights vector
+    out << "    # --- Values of the weights" << endl;
+    out << "    " << functionName << "_valW = [" << endl << "    ";
+    for(int i=0 ; i<nWeights ; i++)
+    {
+        out << valW[i] << ", ";
+        if ( (i+1)%5 == 0 )
+        {
+            out << endl;
+            out << "    " ;
+        }
+    }
+    out << endl << "    ];"<<endl;
+
+    out << "    # --- Constants" << endl;
+    out << "    indNeurone = 0;" << endl;
+    out << endl;
+    
+    // couche entree
+    out << "    # --- Input Layers" << endl;
+    out << "    for i in range(nInput) :" << endl;
+    if( normType==0 )
+    {   // kMinusOneOne
+        out << "        " << functionName << "_act.append( 2.0 * ( param[i] - "
+                   << functionName << "_minInput[i] ) / ( " << functionName << "_maxInput[i] - "
+                   << functionName << "_minInput[i] ) - 1.0 ) ;"
+                   << endl;                  
+    }
+    else
+    {   //  kCR, kZeroOne
+        out << "        " << functionName << "_act.append( ( param[i] - "
+                   << functionName << "_minInput[i] ) / " << functionName << "_maxInput[i] ) ;"
+                   << endl;
+    }   
+    out << "        indNeurone += 1 ;" << endl;
+    out << "        pass" << endl;
+    
+    // couche cachee
+    out << endl;
+    out << "    # --- Hidden Layers" << endl;
+    out << "    for member in range(nHidden):" << endl;
+    out << "        CrtW = member * ( nInput + 2) + 2;" << endl;
+    out << "        sum = " << functionName << "_valW[CrtW];" << endl;
+    out << "        CrtW += 1 ;" << endl;     
+    out << "        for source in range(nInput) :" << endl;
+    out << "            sum += " << functionName << "_act[source] * " << functionName << "_valW[CrtW];" << endl;
+    out << "            CrtW += 1 ;" << endl; 
+    out << "            pass" << endl;
+    out << "        " << functionName << "_act.append( ActivationFunction(sum) ) ;" << endl;
+    out << "        indNeurone += 1 ;" << endl;
+    out << "        pass" << endl;
+    out << endl;
+    
+    // couche sortie
+    out << "    # --- Output"<<endl;
+    out << "    for member in range(nOutput):" << endl; 
+    out << "        sum = " << functionName << "_valW[0];" << endl;
+    out << "        for source in range(nHidden):" << endl;
+    out << "            CrtW = source * ( nInput + 2) + 1;"<<endl;
+    out << "            sum += " << functionName << "_act[nInput+source] * " << functionName << "_valW[CrtW];" << endl;
+    out << "            pass" << endl;
+    out << "        " << functionName << "_act.append( sum );" << endl;
+    out << "        indNeurone += 1 ;" << endl;
+    if( normType==0 )
+    {   // kMinusOneOne
+        out << "        res[member] = " << functionName
+                   << "_minOutput[member] + 0.5 * ( " << functionName
+                   << "_maxOutput[member] - " << functionName
+                   << "_minOutput[member] ) * ( sum + 1.0);" << endl;
+    }
+    else
+    {   //  kCR, kZeroOne
+        out << "        res.append( " << functionName
+                   << "_minOutput[member] + " << functionName
+                   << "_maxOutput[member] * sum );" << endl;
+    }
+    out << "        pass" << endl;
+    out << endl;
+    
+    // return result
+    out << "    return res;" << endl << endl;
+    out << endl;    
+   
+    return out.str(); 
+}
+
+//**************************************************************
+//                                                             *
+//                                                             *
+//                                                             *
+//  méthodes propres au RegressionModel                        *
+//                                                             *
+//                                                             *
+//                                                             *
+//**************************************************************
+
+/**
+ * Check if the current model type is kLR.
+ *    \brief Called in all methods specific to the RegressionModel model.
+ *    \brief Throw an exception if the model type is not kLR.
+ */
+void PMMLlib::CheckRegression()
+{
+    if ( _currentModelType != kLR )
+        throw string("Use this method with Regression models."); 
+}
+
+/**
+ * Get the pointeur to the regression model node.
+ * @param name Name of the regression model 
+ * @return Pointer to the XML node
+ */
+xmlNodePtr PMMLlib::GetRegressionPtr(std::string name)
+{
+    return GetPtr(name, GetTypeString() );  
+}   
+
+/**
+ * Add a RegressionModel to the root node
+ * @brief Specific to RegressionModel
+ * @param modelName Name of the model (Value of property "modelName")
+ * @param functionName Value of property "functionName"
+ * @param targetFieldName Value of Property "targetFieldName"
+ */
+void PMMLlib::AddRegressionModel(std::string modelName, 
+                                 PMMLMiningFunction functionName, 
+                                 std::string targetFieldName)
+{
+    _currentModelType = kLR;
+    _currentModelName = modelName;
+    // Check regression after setting model type!
+    CheckRegression();
+    
+    string function;
+    switch(functionName)
+    {
+        case kREGRESSION:
+            function = "regression"; 
+            break;
+    }
+    xmlNodePtr netNode = xmlNewChild(_rootNode, 0, (const xmlChar*)"RegressionModel", 0);     
+    xmlNewProp(netNode, (const xmlChar*)"functionName", (const xmlChar*)(function.c_str()) );
+    xmlNewProp(netNode, (const xmlChar*)"modelName", (const xmlChar*)(_currentModelName.c_str()) );
+    xmlNewProp(netNode, (const xmlChar*)"targetFieldName", (const xmlChar*)(targetFieldName.c_str()) );
+    _currentModelNode = netNode ;
+}
+
+/**
+ * Add a RegressionTable to the Regression model.
+ * @brief No property "intercept" will be set.
+ * @brief Specific to RegressionModel 
+ */
+void PMMLlib::AddRegressionTable()
+{
+    CheckRegression();  
+    xmlNodePtr tableNode = xmlNewChild(_currentModelNode, 0, (const xmlChar*)"RegressionModel", 0);
+    _currentNode = tableNode;
+}
+
+/**
+ * Add a RegressionTable to the Regression model with a given value of property "intercept".
+ * @brief Specific to RegressionModel 
+ * @param intercept Value of property "intercept"
+ */
+void PMMLlib::AddRegressionTable(double intercept)
+{
+    CheckRegression();
+
+    stringstream ss;
+    xmlNodePtr tableNode = xmlNewChild(_currentModelNode, 0, (const xmlChar*)"RegressionTable", 0);
+    if(intercept!=0.0)
+    {
+        ss << scientific << intercept;
+        xmlNewProp(tableNode, (const xmlChar*)"intercept", (const xmlChar*)(ss.str().c_str()) );
+    }
+    _currentNode = tableNode;
+}
+
+/**
+ * Add a numeric predictor to the Regression model.
+ * @brief Specific to RegressionModel 
+ * @param neuronName  Value of property "name"
+ * @param exponent Value of property "exponent"
+ * @param coefficient Value of property "coefficient"
+ */
+void PMMLlib::AddNumericPredictor(std::string neuronName, 
+                                  int exponent, 
+                                  double coefficient)
+{
+    CheckRegression(); 
+    stringstream ss;
+    xmlNodePtr numPrecNode = xmlNewChild(_currentNode, 0, (const xmlChar*)"NumericPredictor", 0);
+    xmlNewProp(numPrecNode, (const xmlChar*)"name", (const xmlChar*)(neuronName.c_str()) );
+    ss.str("");  ss << exponent;
+    xmlNewProp(numPrecNode, (const xmlChar*)"exponent", (const xmlChar*)(ss.str().c_str()) );
+    ss.str("");  ss << scientific << coefficient;
+    xmlNewProp(numPrecNode, (const xmlChar*)"coefficient", (const xmlChar*)(ss.str().c_str()) );
+}
+
+/**
+ * Add a predictor term to the Regression model.
+ * @brief Specific to RegressionModel  
+ * @param coefficient Value of property "coefficient"
+ * @param fieldRef List of values for property "field", one per FieldRef to add to the PredictorTerm
+ */
+void PMMLlib::AddPredictorTerm(double coefficient,
+                               std::vector<std::string> fieldRef)
+{
+    CheckRegression();
+    stringstream ss;
+    xmlNodePtr predTermNode = xmlNewChild(_currentNode, 0, (const xmlChar*)"PredictorTerm", 0);
+    ss.str("");  ss << scientific << coefficient;
+    xmlNewProp(predTermNode, (const xmlChar*)"coefficient", (const xmlChar*)(ss.str().c_str()) );
+    vector<string>::iterator it;
+    for(it=fieldRef.begin() ; it!=fieldRef.end() ; it++)
+    {
+        xmlNodePtr fieldRefNode = xmlNewChild(predTermNode, 0, (const xmlChar*)"FieldRef", 0);
+        ss.str(""); ss << (*it);
+        xmlNewProp(fieldRefNode, (const xmlChar*)"field", (const xmlChar*)(ss.str().c_str()) );
+    }
+}
+
+/**
+ * Check if the RegressionTable has a property called "intercept".
+ * @brief Specific to RegressionModel  
+ * @return true if it has, false otherwise
+ */
+bool PMMLlib::HasIntercept()
+{
+    CheckRegression();
+    bool b = false;   
+    xmlNodePtr tableNode = GetChildByName(_currentModelNode,"RegressionTable");
+    if ( tableNode == NULL )
+      return b;         
+    xmlChar *xp = _stringToXmlChar("intercept");
+    xmlChar * attr ;
+    attr = xmlGetProp(tableNode, xp);
+    if ( attr ) 
+    {
+        xmlFree(attr);
+        xmlFree(xp);  
+        return true;
+    }  
+    xmlFree(xp);  
+    return false;
+}
+
+/**
+ * Get the value of property "intercept" in the RegressionTable.
+ * @brief Specific to RegressionModel  
+ * @return Value of property "intercept"
+ */
+double PMMLlib::GetRegressionTableIntercept()
+{
+    CheckRegression();
+    double reg = 0.;
+    xmlNodePtr tableNode = GetChildByName(_currentModelNode,"RegressionTable");
+    if ( tableNode == NULL )
+        return reg; 
+    string strValue = _getProp(tableNode, string("intercept") );
+    return atof(strValue.c_str());
+}
+
+/**
+ * Get the number of numeric predictors.
+ * @brief Specific to RegressionModel  
+ * @return Number of numeric predictors
+ */
+int PMMLlib::GetNumericPredictorNb()
+{
+    CheckRegression();
+    
+    int nb=0;   
+    xmlNodePtr tableNode = GetChildByName(_currentModelNode,"RegressionTable");
+    if ( tableNode == NULL )
+        return nb;
+    xmlNodePtr numPredNodes =  tableNode->children;
+    while (numPredNodes != NULL )
+    {
+        if ( string((const char*)(numPredNodes->name)) == "NumericPredictor" )
+            nb++;
+        numPredNodes = numPredNodes->next;
+    }    
+    return nb;
+}
+
+/**
+ * Get the number of predictor terms.
+ * @brief Specific to RegressionModel  
+ * @return Number of predictor terms
+ */
+int PMMLlib::GetPredictorTermNb()
+{
+    CheckRegression();
+    int nb=0;    
+    xmlNodePtr tableNode = GetChildByName(_currentModelNode,"RegressionTable");
+    if ( tableNode == NULL )
+      return nb;  
+    xmlNodePtr numPredNodes =  tableNode->children;
+    while ( numPredNodes != NULL )      
+    {
+        if ( string((const char*)(numPredNodes->name)) == "PredictorTerm" )
+            nb++;
+        numPredNodes = numPredNodes->next;
+    }    
+    return nb;
+}
+
+/**
+ * Get the name of the numeric predictor given by its index.
+ * @brief Specific to RegressionModel  
+ * @param num_pred_index Index of the numeric predictor
+ * @return Name of the numeric predictor
+ */
+std::string PMMLlib::GetNumericPredictorName(int num_pred_index)
+{
+    CheckRegression();
+    string strName("");   
+    xmlNodePtr numPredNodes = GetChildByName(_currentModelNode,"RegressionTable");
+    if ( numPredNodes == NULL )
+        return strName;
+    
+    numPredNodes = GetChildByName(numPredNodes,"NumericPredictor");
+    if ( numPredNodes == NULL )
+        return strName;
+    // Positionnement sur la bonne sortie PredictorTerm
+    for(int i=0;i<num_pred_index;i++)
+    {
+        numPredNodes = numPredNodes->next;
+        if ( numPredNodes == NULL || 
+             string((const char*)(numPredNodes->name)) != "NumericPredictor" )
+            return strName;        
+    }        
+    strName = _getProp(numPredNodes, string("name"));
+    return strName;
+}
+
+/**
+ * Get the name of the predictor term given by its index.
+ * @brief Specific to RegressionModel  
+ * @param pred_term_index Index of the predictor term 
+ * @return Name of the predictor term
+ */
+std::string PMMLlib::GetPredictorTermName(int pred_term_index)
+{
+    CheckRegression();
+    string strName("");   
+    xmlNodePtr fieldRefNodes = GetChildByName(_currentModelNode,"RegressionTable");
+    if ( fieldRefNodes == NULL )
+        return strName;    
+    
+    fieldRefNodes = GetChildByName(fieldRefNodes,"PredictorTerm");
+    if ( fieldRefNodes == NULL )
+        return strName;
+    // Positionnement sur la bonne sortie
+    for(int i=0;i<pred_term_index;i++)
+    {
+        fieldRefNodes = fieldRefNodes->next;
+        if ( fieldRefNodes == NULL || 
+            string((const char*)(fieldRefNodes->name)) != "PredictorTerm" )
+            return strName;        
+    }  
+    
+    fieldRefNodes = fieldRefNodes->children;
+    while (fieldRefNodes != NULL)
+    {
+        strName += _getProp(fieldRefNodes, string("field"));
+        fieldRefNodes = fieldRefNodes->next;
+    }        
+    return strName;
+}
+
+/**
+ * Get the coefficient of the numeric predictor given by its index.
+ * @brief (The coefficient is the value of property "coefficient")
+ * @brief Specific to RegressionModel  
+ * @param num_pred_index Index of the numeric predictor
+ * @return Coefficient of the numeric predictor
+ */
+double PMMLlib::GetNumericPredictorCoefficient(int num_pred_index)
+{
+    CheckRegression();
+    
+    double coef = 0.;  
+    xmlNodePtr numPredNodes = GetChildByName(_currentModelNode,"RegressionTable");
+    if ( numPredNodes == NULL )
+        return coef;    
+    numPredNodes = GetChildByName(numPredNodes,"NumericPredictor");
+    if ( numPredNodes == NULL )
+        return coef;
+    // Positionnement sur la bonne sortie
+    for(int i=0;i<num_pred_index;i++)
+    {
+        numPredNodes = numPredNodes->next;
+        if ( numPredNodes == NULL || 
+             string((const char*)(numPredNodes->name)) != "NumericPredictor" )
+            return coef;        
+    }  
+    string strValue = _getProp(numPredNodes, string("coefficient")); 
+    coef = atof(strValue.c_str());
+    return coef;  
+}
+
+/**
+ * Get the coefficient of the predictor term given by its index.
+ * @brief (The coefficient is the value of property "coefficient")
+ * @brief Specific to RegressionModel  
+ * @param pred_term_index Index of the predictor term
+ * @return Coefficient of the predictor term
+ */
+double PMMLlib::GetPredictorTermCoefficient(int pred_term_index)
+{
+    CheckRegression();
+    
+    double coef = 0.;   
+    xmlNodePtr predTermNodes = GetChildByName(_currentModelNode,"RegressionTable");
+    if ( predTermNodes == NULL )
+        return coef;    
+    predTermNodes = GetChildByName(predTermNodes,"PredictorTerm");
+    if ( predTermNodes == NULL )
+        return coef;
+    // Positionnement sur la bonne sortie
+    for(int i=0;i<pred_term_index;i++)
+    {
+        predTermNodes = predTermNodes->next;
+        if ( predTermNodes == NULL || 
+             string((const char*)(predTermNodes->name)) != "PredictorTerm" )
+            return coef;        
+    }  
+    string strValue = _getProp(predTermNodes, string("coefficient")); 
+    coef = atof(strValue.c_str());
+    return coef;
+}
+
+/**
+ * Get the number of FieldRef for the predictor term given by its index.
+ * @brief Specific to RegressionModel   
+ * @param indsex Index of the predictor term
+ * @return Number of FieldRef
+ */
+int PMMLlib::GetPredictorTermFieldRefNb(int index)
+{
+    CheckRegression();
+    
+    int nb=0;
+    xmlNodePtr fieldRefNodes = GetChildByName(_currentModelNode,"RegressionTable");
+    if ( fieldRefNodes == NULL )
+      return nb;
+    fieldRefNodes = GetChildByName(fieldRefNodes,"PredictorTerm");
+    if ( fieldRefNodes == NULL )
+        return nb;
+    // Positionnement sur la bonne sortie
+    for(int i=0;i<index;i++)
+    {
+        fieldRefNodes = fieldRefNodes->next;
+        if ( fieldRefNodes == NULL || 
+             string((const char*)(fieldRefNodes->name)) != "PredictorTerm" )
+            return nb;        
+    }  
+    fieldRefNodes = fieldRefNodes->children;
+    while (fieldRefNodes != NULL)
+    {
+        nb++;
+        fieldRefNodes = fieldRefNodes->next;
+    }    
+    return nb;
+}
+
+/**
+ * Get the name of the field_index-th FieldRef for the pred_term_index-th predictor term.
+ * @brief (The name is the value of property "field")
+ * @brief Specific to RegressionModel   
+ * @param pred_term_index Index of the predictor term
+ * @param field_index Index of the FieldRef
+ * @return Name of the FieldRef
+ */
+std::string PMMLlib::GetPredictorTermFieldRefName(int pred_term_index, int field_index)   
+{
+    CheckRegression();
+    
+    string strName("");    
+    xmlNodePtr fieldRefNodes = GetChildByName(_currentModelNode,"RegressionTable");
+    if ( fieldRefNodes == NULL )
+      return strName;
+    fieldRefNodes = GetChildByName(fieldRefNodes,"PredictorTerm");
+    if ( fieldRefNodes == NULL )
+        return strName;
+    // Positionnement sur la bonne sortie PredictorTerm
+    for(int i=0;i<pred_term_index;i++)
+    {
+        fieldRefNodes = fieldRefNodes->next;
+        if ( fieldRefNodes == NULL || 
+             string((const char*)(fieldRefNodes->name)) != "PredictorTerm" )
+            return strName;        
+    }  
+    fieldRefNodes = fieldRefNodes->children;
+    if ( fieldRefNodes == NULL )
+        return strName;
+    // Positionnement sur la bonne sortie FieldRef
+    for(int i=0;i<field_index;i++)
+    {
+        fieldRefNodes = fieldRefNodes->next;
+        if ( fieldRefNodes == NULL )
+            return strName;        
+    }     
+    strName = _getProp(fieldRefNodes, string("field"));
+    return strName;
+}
+
+/**
+ * Export the current model as a NeuralNetwork function in a Cpp file.
+ * @brief Specific to RegressionModel  
+ * @param file Name of the file  
+ * @param functionName Name of the function  
+ * @param header Header of the function  
+ */
+void PMMLlib::ExportLinearRegressionCpp(std::string file, 
+                                        std::string functionName, 
+                                        std::string header)
+{
+    CheckRegression();
+
+    // Write the file
+    ofstream exportfile(file.c_str());
+    
+    exportfile << "void " << functionName <<"(double *param, double *res)" << endl;
+    exportfile << "{" << endl;    
+    // header
+    exportfile << "  ////////////////////////////// " << endl;
+    exportfile << "  //" << endl;
+    // insert comments in header
+    header = "  // " + header;
+    size_t pos = 0;
+    while ((pos = header.find("\n", pos)) != std::string::npos) 
+    {
+        header.replace(pos, 1, "\n  //");
+        pos += 5;
+    }
+    exportfile << header << endl;
+    exportfile << "  //" << endl;
+    exportfile << "  ////////////////////////////// " << endl << endl;  
+
+    double intercept = 0.0;    
+    if ( HasIntercept() ) 
+    {
+        exportfile << "  // Intercept"<< endl;
+        intercept = GetRegressionTableIntercept();
+    }
+    else 
+        exportfile << "  // No Intercept"<< endl;
+    exportfile << "  double y = " << intercept << ";";
+    exportfile << endl << endl;
+     
+    int nPred = GetNumericPredictorNb();
+    for (int i=0; i<nPred; i++)
+    {
+       exportfile << "  // Attribute : " << GetNumericPredictorName(i) << endl;
+       exportfile << "  y += param["<<i<<"]*" << GetNumericPredictorCoefficient(i) << ";";
+       exportfile << endl << endl;
+    }
+    nPred = GetPredictorTermNb();
+    for (int i=0; i<nPred; i++)
+    {
+       exportfile << "  // Attribute : " << GetPredictorTermName(i) << endl;
+       exportfile << "  y += param["<<(i+nPred)<<"]*" << GetPredictorTermCoefficient(i) << ";";
+       exportfile << endl << endl;
+    }    
+    
+    exportfile << "  // Return the value"<< endl;
+    exportfile << "  res[0] = y;" << endl;
+    exportfile << "}" << endl;    
+    exportfile.close(); 
+}
+
+/**
+ * Export the current model as a NeuralNetwork function in a Fortran file.
+ * @brief Specific to Regression
+ * @param file Name of the file  
+ * @param functionName Name of the function  
+ * @param header Header of the function  
+ */
+void PMMLlib::ExportLinearRegressionFortran(std::string file, 
+                                            std::string functionName, 
+                                            std::string header)
+{
+    CheckRegression();
+
+    int nNumPred = GetNumericPredictorNb();
+    int nPredTerm = GetPredictorTermNb(); 
+    vector<string>strParam(nNumPred+nPredTerm);
+    for(int i=0; i<(nNumPred+nPredTerm); i++)
+    {
+        strParam[i] = "P" + NumberToString(i) ;
+    }    
+    
+    // Write the file
+    ofstream exportfile(file.c_str());
+    
+    exportfile << "      SUBROUTINE " << functionName <<"(";
+    for(int i=0; i<(nNumPred+nPredTerm); i++)
+    {
+        exportfile << strParam[i] << ", ";
+    }    
+    exportfile << "RES)" << endl;    
+    // header
+    exportfile << "C --- *********************************************" << endl;
+    exportfile << "C --- " << endl;
+    // insert comments in header
+    header = "C ---  " + header;
+    size_t pos = 0;
+    while ((pos = header.find("\n", pos)) != std::string::npos) 
+    {
+        header.replace(pos, 1, "\nC --- ");
+        pos += 5;
+    }
+    exportfile << header << endl;
+    exportfile << "C --- " << endl;
+    exportfile << "C --- *********************************************" << endl << endl;  
+
+    exportfile << "      IMPLICIT DOUBLE PRECISION (P)" << endl;
+    exportfile << "      DOUBLE PRECISION RES" << endl;    
+    exportfile << "      DOUBLE PRECISION Y" << endl;
+    exportfile << endl;    
+    
+    double intercept = 0.0;    
+    if ( HasIntercept() ) 
+    {
+        exportfile << "C --- Intercept"<< endl;
+        intercept = GetRegressionTableIntercept();
+    }
+    else 
+        exportfile << "C --- No Intercept"<< endl;
+    exportfile << "      Y = " << intercept << ";";
+    exportfile << endl << endl;
+     
+    for (int i=0; i<nNumPred; i++)
+    {
+       exportfile << "C --- Attribute : " << GetNumericPredictorName(i) << endl;
+       exportfile << "      Y += P["<<i<<"]*" << GetNumericPredictorCoefficient(i) << ";";
+       exportfile << endl << endl;
+    }
+
+    for (int i=0; i<nPredTerm; i++)
+    {
+       exportfile << "C --- Attribute : " << GetPredictorTermName(i) << endl;
+       exportfile << "      Y += P["<<(i+nNumPred)<<"]*" << GetPredictorTermCoefficient(i) << ";";
+       exportfile << endl << endl;
+    }    
+    
+    exportfile << "C --- Return the value"<< endl;
+    exportfile << "      RES = Y " << endl;
+    exportfile << "      RETURN" << endl;    
+    exportfile << "      END" << endl;    
+    exportfile.close(); 
+}
+
+/**
+ * Export the current model as a NeuralNetwork function in a Python file.
+ * @brief Specific to Regression
+ * @param file Name of the file  
+ * @param functionName Name of the function  
+ * @param header Header of the function  
+ */
+void PMMLlib::ExportLinearRegressionPython(std::string file, 
+                                           std::string functionName, 
+                                           std::string header)
+{
+    string str(ExportLinearRegressionPyStr(functionName, header));
+    // Write the file
+    ofstream exportfile(file.c_str()); 
+    exportfile << str;
+    exportfile.close(); 
+}
+
+/**
+ * Export the current model as a NeuralNetwork function in a Python string.
+ * @brief Specific to Regression 
+ * @param functionName Name of the function  
+ * @param header Header of the function  
+ */
+std::string PMMLlib::ExportLinearRegressionPyStr(std::string functionName, 
+                                                 std::string header)
+{
+    CheckRegression();
+
+    ostringstream out;
+    
+    // Shebang et imports
+    out << "#!/usr/bin/env python" << endl;
+    out << "# -*- coding: utf-8 -*-" << endl;
+    out << endl;    
+    
+    // Function
+    out << "def " << functionName <<"(param):" << endl;
+    out << endl; 
+    
+    // header
+    out << "    ############################## " << endl;
+    out << "    # " << endl;
+    // insert comments in header
+    header = "    # " + header;
+    size_t pos = 0;
+    while ((pos = header.find("\n", pos)) != std::string::npos) 
+    {
+        header.replace(pos, 1, "\n    #");
+        pos += 5;
+    }    
+    out << header << endl;
+    out << "    # " << endl;
+    out << "    ############################## " << endl << endl;  
+
+    double intercept = 0.0;    
+    if ( HasIntercept() ) 
+    {
+        out << "    #  Intercept"<< endl;
+        intercept = GetRegressionTableIntercept();
+    }
+    else 
+        out << "    #  No Intercept"<< endl;
+    out << "    y = " << intercept << ";";
+    out << endl << endl;
+     
+    int nPred = GetNumericPredictorNb();
+    for (int i=0; i<nPred; i++)
+    {
+       out << "    #  Attribute : " << GetNumericPredictorName(i) << endl;
+       out << "    y += param["<<i<<"]*" << GetNumericPredictorCoefficient(i) << ";";
+       out << endl << endl;
+    }
+    nPred = GetPredictorTermNb();
+    for (int i=0; i<nPred; i++)
+    {
+       out << "    #  Attribute : " << GetPredictorTermName(i) << endl;
+       out << "    y += param["<<(i+nPred)<<"]*" << GetPredictorTermCoefficient(i) << ";";
+       out << endl << endl;
+    }    
+    
+    out << "    #  Return the value"<< endl;
+    out << "    return [y];" << endl;
+        
+    return out.str() ;
+}
+
+/**
+ * Read the structure of the regression model
+ * @brief Specific to RegressionModel 
+ * @return Structure read
+ */
+std::string PMMLlib::ReadRegressionStructure()
+{   
+    CheckRegression(); 
+
+    string structure("");    
+    string structureActive("");
+    string structurePredicted("@");
+    int nPred = 0;
+    xmlNodePtr mNode = GetChildByName(_currentModelNode,"MiningSchema");
+    if ( mNode != NULL )
+    {
+        xmlNodePtr dNode = GetChildByName(mNode,"MiningField");     
+        while (dNode != NULL)
+        {
+            string name = _getProp(dNode, string("name"));
+            string usage = _getProp(dNode, string("usageType"));
+            if ( usage == "active" )
+            {
+                structureActive += name;
+                structureActive += ":"; 
+            }    
+            else if ( usage == "predicted" )
+            {
+                structurePredicted += name;
+                structurePredicted += ":"; 
+                nPred++;
+            }                 
+            dNode = dNode->next;             
+        }
+        // Delete the last ":"
+        if ( structureActive.length() > 0 )
+            structureActive.erase(structureActive.size()-1);        
+        structurePredicted.erase(structurePredicted.size()-1);
+    }
+    std::ostringstream oss;
+    oss << nPred;   
+    structure = structureActive + ","  + oss.str() + "," + structurePredicted;
+    return structure;
+}
+
+} // end of namespace
+
+
diff --git a/src/pmml/PMMLlib.hxx b/src/pmml/PMMLlib.hxx
new file mode 100755 (executable)
index 0000000..6724f66
--- /dev/null
@@ -0,0 +1,301 @@
+//////////////////////////////////////////////////////////////
+// Copyright (C) 2013 CEA/DEN
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published
+// by the Free Software Foundation, either version 3 of the License, or any
+// later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public License
+// along with this program. If not, see <http://www.gnu.org/licenses/>.
+//////////////////////////////////////////////////////////////
+/*!
+  \file   PMMLlib.hxx
+  \author InckA
+  \date   Wed Nov 20 11:04:17 2013
+
+  \brief  Header de la classe PMMLlib
+
+ */
+
+#ifndef __PMMLLIB_H__
+#define __PMMLLIB_H__
+
+#include "PMMLwin.hxx"
+#include <libxml/xpathInternals.h>
+#include <string>
+#include <vector>
+#include <sstream>
+
+namespace PMMLlib
+{
+
+template <typename T>
+std::string NumberToString ( T Number )
+{
+     std::ostringstream ss;
+     ss << Number;
+     return ss.str();
+}
+
+/**
+ * Enumeration to type the PMML file.
+ * UNDEFINED: not yet defined
+ * ANN: Artificial Neural Network
+ * LR:  Linear Regression
+ *
+ * @see http://www.dmg.org/v4-1/GeneralStructure.html#xsdGroup_MODEL-ELEMENT
+ */
+enum PMMLType{kUNDEFINED, kANN, kLR};
+
+
+/**
+ * @see http://www.dmg.org/v4-1/NeuralNetwork.html#xsdType_ACTIVATION-FUNCTION
+ */
+enum PMMLActivationFunction{kIDENTITY, kTANH, kLOGISTIC};
+
+
+/**
+ * @see http://www.dmg.org/v4-1/GeneralStructure.html#xsdType_MINING-FUNCTION
+ */
+enum PMMLMiningFunction{kREGRESSION};
+
+
+/**
+ * Class PMMLlib
+ */
+class PMMLlib
+{
+  
+private:
+    bool _log;                      //!< Log Printing
+    std::string _pmmlFile;          //!< Name of the associated PMML file
+    xmlDocPtr _doc;                 //!< Associated DOM documents
+    xmlNodePtr _rootNode;           //!< Root node of the document
+    xmlNodePtr _currentNode;        //!< Pointer to the current node    
+    int _nbModels;                  //!< Number of models (all kinds)
+    std::string _currentModelName;  //!< Name of the current model    
+    PMMLType _currentModelType;     //!< Type of the current model 
+    xmlNodePtr _currentModelNode;   //!< Pointer to the current model node    
+
+    /** @defgroup general General methods
+     *  Common methods to all kinds of PMML files and models
+     *  @{
+     */
+public:    
+    PMMLLIB_EXPORT PMMLlib(std::string file, 
+                           bool log=false) ;     
+    PMMLLIB_EXPORT PMMLlib(bool log=false); 
+    PMMLLIB_EXPORT ~PMMLlib();
+    PMMLLIB_EXPORT void SetCurrentModel(std::string modelName, 
+                                        PMMLType type); 
+    PMMLLIB_EXPORT void SetCurrentModel(std::string modelName); 
+    PMMLLIB_EXPORT void SetCurrentModel();                                         
+    PMMLLIB_EXPORT std::string makeLog() const; 
+    PMMLLIB_EXPORT void printLog() const;   
+
+    PMMLLIB_EXPORT void AddDataField(std::string name, 
+                                     std::string displayName,
+                                     std::string optype, 
+                                     std::string dataType, 
+                                     std::string closure, 
+                                     double leftMargin, 
+                                     double rightMargin,
+                                     bool interval=false);
+    PMMLLIB_EXPORT void AddMiningSchema(std::string name, 
+                                        std::string usageType);
+    PMMLLIB_EXPORT void SetHeader(std::string copyright, 
+                                  std::string description, 
+                                  std::string appName, 
+                                  std::string appVersion, 
+                                  std::string annotation);
+    PMMLLIB_EXPORT void UnlinkNode();
+    PMMLLIB_EXPORT void BackupNode();
+    PMMLLIB_EXPORT int GetModelsNb();    
+    PMMLLIB_EXPORT void Write();
+    PMMLLIB_EXPORT void Write(std::string file);  
+    PMMLLIB_EXPORT PMMLType GetCurrentModelType(); 
+    PMMLLIB_EXPORT std::string GetCurrentModelName();    
+private: 
+    xmlNodePtr GetChildByName(xmlNodePtr node, 
+                              std::string nodename);
+    xmlNodePtr GetPtr(int ann_index, 
+                      std::string name);
+    xmlNodePtr GetPtr(std::string ann_name, 
+                      std::string name);
+    void CountModels();
+    int CountNeuralNetModels();
+    int CountRegressionModels();
+    void SetRootNode();
+    std::string GetModelName(xmlNodePtr node);
+    std::string GetTypeString();
+    
+    /** @} */ // end of group general   
+
+
+    /** @defgroup ann Methods dedicated to neural networks
+     *  Methods dedicated to neural networks
+     *  @{
+     */
+public:     
+    PMMLLIB_EXPORT void AddNeuralNetwork(std::string modelName, 
+                                         PMMLMiningFunction functionName);    
+    PMMLLIB_EXPORT void AddNeuralInput(int id, 
+                                       std::string inputName, 
+                                       std::string optype, 
+                                       std::string dataType, 
+                                       double orig1, double norm1, 
+                                       double orig2, double norm2);
+    PMMLLIB_EXPORT void AddNeuralLayer(PMMLActivationFunction activationFunction);
+    PMMLLIB_EXPORT void AddNeuron(int id, 
+                                  double bias, 
+                                  int conNb, 
+                                  int firstFrom, 
+                                  std::vector<double> weights);
+    PMMLLIB_EXPORT void AddNeuralOutput(int outputNeuron, 
+                                        std::string outputName, 
+                                        std::string optype, 
+                                        std::string dataType, 
+                                        double orig1, double norm1, 
+                                        double orig2, double norm2);
+    PMMLLIB_EXPORT int GetNbInputs();
+    PMMLLIB_EXPORT int GetNbOutputs();
+    PMMLLIB_EXPORT std::string GetNameInput(int input_index);
+    PMMLLIB_EXPORT std::string GetNameOutput(int output_index);
+    PMMLLIB_EXPORT int GetNormalizationType();
+    PMMLLIB_EXPORT void GetNormalisationInput(int input_index, 
+                                              double *dnorm);
+    PMMLLIB_EXPORT void GetNormalisationOutput(int output_index, 
+                                               double *dnorm);
+    PMMLLIB_EXPORT int GetNbHiddenLayers();
+    PMMLLIB_EXPORT int GetNbLayers();
+    PMMLLIB_EXPORT int GetNbNeuronsAtLayer(int layer_index);
+    PMMLLIB_EXPORT double GetNeuronBias(int layer_index, 
+                                        int neu_index);
+    PMMLLIB_EXPORT double GetPrecNeuronSynapse(int layer_index, 
+                                               int neu_index, 
+                                               int prec_index);
+    PMMLLIB_EXPORT void SetNeuralNetName(int ann_index, 
+                                         std::string ann_name); 
+    PMMLLIB_EXPORT std::string ReadNetworkStructure();    
+private:   
+    xmlNodePtr GetNeuralNetPtr(std::string ann_name);
+    xmlNodePtr GetNeuralNetPtr(int ann_index);
+    void CheckNeuralNetwork();
+    /** @} */ // end of group ann
+
+
+    /** @defgroup ln Methods dedicated to linear regression
+     *  Methods dedicated to linear regression
+     *  @{
+     */
+public:    
+    PMMLLIB_EXPORT void AddRegressionModel(std::string modelName, 
+                                           PMMLMiningFunction functionName, 
+                                           std::string targetFieldName); 
+    PMMLLIB_EXPORT void AddRegressionTable();
+    PMMLLIB_EXPORT void AddRegressionTable(double intercept);
+    PMMLLIB_EXPORT void AddNumericPredictor(std::string neuronName, 
+                                            int exponent, 
+                                            double coefficient);
+    PMMLLIB_EXPORT void AddPredictorTerm(double coefficient, 
+                                         std::vector<std::string> fieldRef);
+    PMMLLIB_EXPORT bool HasIntercept();    
+    PMMLLIB_EXPORT double GetRegressionTableIntercept();
+    PMMLLIB_EXPORT int GetNumericPredictorNb();
+    PMMLLIB_EXPORT int GetPredictorTermNb();    
+    PMMLLIB_EXPORT std::string GetNumericPredictorName(int num_pred_index);
+    PMMLLIB_EXPORT std::string GetPredictorTermName(int num_pred_index);
+    PMMLLIB_EXPORT double GetNumericPredictorCoefficient(int num_pred_index);
+    PMMLLIB_EXPORT double GetPredictorTermCoefficient(int pred_term_index);
+    PMMLLIB_EXPORT int GetPredictorTermFieldRefNb(int pred_term_index);
+    PMMLLIB_EXPORT std::string GetPredictorTermFieldRefName(int pred_term_index, 
+                                                            int field_index);
+    PMMLLIB_EXPORT std::string ReadRegressionStructure();                                                              
+private:    
+    xmlNodePtr GetRegressionPtr(int reg_index); 
+    xmlNodePtr GetRegressionPtr(std::string reg_name);     
+    void CheckRegression();
+    
+    /** @} */ // end of group ln
+    
+
+    /** @defgroup export Methods dedicated to file export
+     *  Methods dedicated to file export
+     *  @{
+     */
+private:
+    void fillVectorsForExport(int nInput, int nOutput, int nHidden, int normType,
+            std::vector<double> &minInput, std::vector<double> &maxInput,
+            std::vector<double> &minOutput, std::vector<double> &maxOutput,
+            std::vector<double> &valW );
+public:
+    PMMLLIB_EXPORT void ExportCpp(std::string file, 
+                                  std::string functionName, 
+                                  std::string header);
+    PMMLLIB_EXPORT void ExportFortran(std::string file, 
+                                      std::string functionName, 
+                                      std::string header);
+    PMMLLIB_EXPORT void ExportPython(std::string file, 
+                                     std::string functionName, 
+                                     std::string header);
+    PMMLLIB_EXPORT std::string ExportPyStr(std::string functionName, 
+                                           std::string header);
+private:    
+    void ExportNeuralNetworkCpp(std::string file, 
+                                std::string functionName, 
+                                std::string header);
+    void ExportNeuralNetworkFortran(std::string file, 
+                                    std::string functionName, 
+                                    std::string header);
+    void ExportNeuralNetworkPython(std::string file, 
+                                   std::string functionName, 
+                                   std::string header);
+    std::string ExportNeuralNetworkPyStr(std::string functionName, 
+                                         std::string header);
+    
+    void ExportLinearRegressionCpp(std::string, 
+                                   std::string, 
+                                   std::string);
+    void ExportLinearRegressionFortran(std::string, 
+                                       std::string, 
+                                       std::string);
+    void ExportLinearRegressionPython(std::string, 
+                                      std::string, 
+                                      std::string);
+    std::string ExportLinearRegressionPyStr(std::string functionName, 
+                                            std::string header);
+    /** @} */ // end of group export
+
+  private:
+    /*!     * Conversion from a libxml2 string (xmlChar *) to a standard C++ string.
+     *
+     *    \param xs a constant libxml string.
+     *    \return a C++ std::string (contains the same text as xs).
+     */
+     std::string _xmlCharToString(const xmlChar *xs) const;  
+    /*!
+    * Conversion from a standard C++ string to a libxml2 string (xmlChar *)
+    *
+    *    \param s a constant C++ std::string.
+    *    \return a libxml string (contains the same text as s)
+    *
+    * The caller of this function must free the result when it's not needed
+    * anymore (using xmlFree) 
+    */
+    xmlChar * _stringToXmlChar(const std::string &s) const;
+
+    std::string _getProp(const xmlNodePtr node, 
+                         std::string const & prop ) const;
+
+};
+
+}
+
+#endif  //__PMMLLIB_H__
diff --git a/src/pmml/PMMLwin.hxx b/src/pmml/PMMLwin.hxx
new file mode 100755 (executable)
index 0000000..bd546b7
--- /dev/null
@@ -0,0 +1,38 @@
+// Copyright (C) 2007-2013  CEA/DEN, EDF R&D\r
+//\r
+// This library is free software; you can redistribute it and/or\r
+// modify it under the terms of the GNU Lesser General Public\r
+// License as published by the Free Software Foundation; either\r
+// version 2.1 of the License.\r
+//\r
+// This library is distributed in the hope that it will be useful,\r
+// but WITHOUT ANY WARRANTY; without even the implied warranty of\r
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU\r
+// Lesser General Public License for more details.\r
+//\r
+// You should have received a copy of the GNU Lesser General Public\r
+// License along with this library; if not, write to the Free Software\r
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA\r
+//\r
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com\r
+//\r
+// Author : InckA\r
+\r
+#ifndef _PMMLWIN_HXX_\r
+#define _PMMLWIN_HXX_\r
+\r
+#ifdef WIN32\r
+#  if defined pmmlLib_EXPORTS\r
+#    define PMMLLIB_EXPORT __declspec( dllexport )\r
+#  else\r
+#    define PMMLLIB_EXPORT __declspec( dllimport )\r
+#  endif\r
+#else\r
+#  define PMMLLIB_EXPORT\r
+#endif\r
+\r
+#ifdef WIN32\r
+#pragma warning( disable : 4290 )\r
+#endif\r
+\r
+#endif\r
diff --git a/src/pmml/Test/BasicMainTest.hxx b/src/pmml/Test/BasicMainTest.hxx
new file mode 100755 (executable)
index 0000000..9bff03f
--- /dev/null
@@ -0,0 +1,96 @@
+// Copyright (C) 2007-2013  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef _BASICMAINTEST_HXX_
+#define _BASICMAINTEST_HXX_
+
+#include <cppunit/CompilerOutputter.h>
+#include <cppunit/TestResult.h>
+#include <cppunit/TestResultCollector.h>
+#include <cppunit/TextTestProgressListener.h>
+#include <cppunit/BriefTestProgressListener.h>
+#include <cppunit/extensions/TestFactoryRegistry.h>
+#include <cppunit/TestRunner.h>
+#include <stdexcept>
+
+#include <iostream>
+#include <fstream>
+
+#ifndef WIN32
+#include <fpu_control.h>
+#endif
+
+// ============================================================================
+/*!
+ *  Main program source for Unit Tests with cppunit package does not depend
+ *  on actual tests, so we use the same for all partial unit tests.
+ */
+// ============================================================================
+
+int main(int argc, char* argv[])
+{
+#ifndef WIN32
+  fpu_control_t cw = _FPU_DEFAULT & ~(_FPU_MASK_IM | _FPU_MASK_ZM | _FPU_MASK_OM);
+  _FPU_SETCW(cw);
+#endif
+  // --- Create the event manager and test controller
+  CPPUNIT_NS::TestResult controller;
+
+  // ---  Add a listener that collects test result
+  CPPUNIT_NS::TestResultCollector result;
+  controller.addListener( &result );        
+
+  // ---  Add a listener that print dots as test run.
+#ifdef WIN32
+  CPPUNIT_NS::TextTestProgressListener progress;
+#else
+  CPPUNIT_NS::BriefTestProgressListener progress;
+#endif
+  controller.addListener( &progress );      
+
+  // ---  Get the top level suite from the registry
+
+  CPPUNIT_NS::Test *suite =
+    CPPUNIT_NS::TestFactoryRegistry::getRegistry().makeTest();
+
+  // ---  Adds the test to the list of test to run
+
+  CPPUNIT_NS::TestRunner runner;
+  runner.addTest( suite );
+  runner.run( controller);
+
+  // ---  Print test in a compiler compatible format.
+
+  std::ofstream testFile;
+  testFile.open("UnitTestsResult", std::ios::out |  std::ios::trunc);
+  //CPPUNIT_NS::CompilerOutputter outputter( &result, std::cerr );
+  CPPUNIT_NS::CompilerOutputter outputter( &result, testFile );
+  outputter.write(); 
+
+  // ---  Run the tests.
+
+  bool wasSucessful = result.wasSuccessful();
+  testFile.close();
+
+  // ---  Return error code 1 if the one of test failed.
+
+  return wasSucessful ? 0 : 1;
+}
+
+#endif
diff --git a/src/pmml/Test/CMakeLists.txt b/src/pmml/Test/CMakeLists.txt
new file mode 100755 (executable)
index 0000000..0c45ab1
--- /dev/null
@@ -0,0 +1,98 @@
+# Copyright (C) 2012-2013  CEA/DEN, EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+# Author : InckA
+
+#
+# TODO  : URANIE AND WIN32 : to compile on Windows, user uranietm 
+#         
+#         To adapt when YACS will be available on Windows
+# 
+
+IF(URANIE AND WIN32) 
+    # Find the CppUnit includes and library
+    #
+    # This module defines
+    # CPPUNIT_INCLUDE_DIR, where to find tiff.h, etc.
+    # CPPUNIT_LIBRARIES, the libraries to link against to use CppUnit.
+    # CPPUNIT_FOUND, If false, do not try to use CppUnit.
+
+    # also defined, but not for general use are
+    # CPPUNIT_LIBRARY, where to find the CppUnit library.
+    # CPPUNIT_DEBUG_LIBRARY, where to find the CppUnit library in debug mode.
+
+    FIND_PATH(CPPUNIT_INCLUDE_DIR cppunit/TestCase.h)
+
+    # With Win32, important to have both
+
+    FIND_LIBRARY(CPPUNIT_LIBRARY cppunit
+                    ${CPPUNIT_INCLUDE_DIR}/../lib)
+    FIND_LIBRARY(CPPUNIT_DEBUG_LIBRARY cppunitd
+                    ${CPPUNIT_INCLUDE_DIR}/../lib)
+
+    IF(CPPUNIT_INCLUDE_DIR)
+      IF(CPPUNIT_LIBRARY)
+        SET(CPPUNIT_FOUND "YES")
+        SET(CPPUNIT_LIBRARIES ${CPPUNIT_LIBRARY} ${CMAKE_DL_LIBS})
+        SET(CPPUNIT_DEBUG_LIBRARIES ${CPPUNIT_DEBUG_LIBRARY} ${CMAKE_DL_LIBS})
+      ENDIF(CPPUNIT_LIBRARY)
+    ENDIF(CPPUNIT_INCLUDE_DIR)
+         
+ELSE(URANIE AND WIN32) 
+    FIND_PACKAGE(SalomeCppUnit)
+    SALOME_LOG_OPTIONAL_PACKAGE(CppUnit SALOME_BUILD_TESTS)    
+ENDIF(URANIE AND WIN32) 
+
+ADD_DEFINITIONS(${CPPUNIT_DEFINITIONS})
+
+IF(URANIE AND WIN32) 
+    INCLUDE_DIRECTORIES(
+      ${CPPUNIT_INCLUDE_DIR}  
+      ${CMAKE_CURRENT_SOURCE_DIR}/ 
+      ${CMAKE_CURRENT_SOURCE_DIR}/.. 
+      )   
+ELSE(URANIE AND WIN32) 
+    INCLUDE_DIRECTORIES(
+      ${CPPUNIT_INCLUDE_DIRS}
+      ${CMAKE_CURRENT_SOURCE_DIR}/
+      ${CMAKE_CURRENT_SOURCE_DIR}/..
+      )      
+ENDIF(URANIE AND WIN32) 
+
+SET(TestPMML_SOURCES
+  TestPMML.cxx
+  PMMLBasicsTest1.cxx
+  tools.cxx
+  )
+
+ADD_EXECUTABLE(TestPMML ${TestPMML_SOURCES})
+
+IF(URANIE AND WIN32) 
+    IF(SALOME_CMAKE_DEBUG)
+               TARGET_LINK_LIBRARIES(TestPMML pmmlLib ${CPPUNIT_DEBUG_LIBRARY} ${PLATFORM_LIBS})
+    ELSE(SALOME_CMAKE_DEBUG)
+               TARGET_LINK_LIBRARIES(TestPMML pmmlLib ${CPPUNIT_LIBRARY} ${PLATFORM_LIBS})
+       ENDIF(SALOME_CMAKE_DEBUG)       
+ELSE(URANIE AND WIN32) 
+    TARGET_LINK_LIBRARIES(TestPMML pmmlLib  
+                          ${CPPUNIT_LIBRARIES} ${PLATFORM_LIBS})
+ENDIF(URANIE AND WIN32) 
+
+ADD_TEST(TestPMML TestPMML)
+
+INSTALL(TARGETS TestPMML DESTINATION ${SALOME_INSTALL_BINS})
diff --git a/src/pmml/Test/PMMLBasicsTest.hxx b/src/pmml/Test/PMMLBasicsTest.hxx
new file mode 100755 (executable)
index 0000000..f4fbc08
--- /dev/null
@@ -0,0 +1,30 @@
+// Copyright (C) 2007-2013  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : InckA
+
+#ifndef __PMMLBASICSTEST_HXX__
+#define __PMMLBASICSTEST_HXX__
+
+#include <cppunit/extensions/HelperMacros.h>
+
+class PMMLBasicsTest : public CppUnit::TestFixture
+{
+};
+
+#endif
diff --git a/src/pmml/Test/PMMLBasicsTest1.cxx b/src/pmml/Test/PMMLBasicsTest1.cxx
new file mode 100755 (executable)
index 0000000..837daf3
--- /dev/null
@@ -0,0 +1,1068 @@
+// Copyright (C) 2007-2013  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : InckA
+
+
+#include "PMMLBasicsTest1.hxx"
+#include "PMMLlib.hxx"
+#include "tools.hxx"
+
+using namespace std;
+
+void PMMLBasicsTest1::setUp()
+{
+#ifdef WIN32
+    const char* p = std::getenv("YACS_ROOT_DIR");
+    std::string strP("");
+    if (p) 
+        strP = std::string(p);
+    else 
+        throw std::string("unable to get YACS_ROOT_DIR");
+    resourcesDir = strP;
+    resourcesDir += "/share/salome/resources/pmml/";
+    const char* user = std::getenv("USERPROFILE");
+    std::string strUser("");
+    if (user) 
+        strUser = std::string(user);
+    else 
+        throw std::string("unable to get USERPROFILE");
+    tmpDir = strUser;
+    tmpDir += "\\tmp";
+    tmpDir += "\\PmmlUnitTest\\";
+    std::string cmd = "mkdir " + tmpDir; 
+    system( cmd.c_str() );  
+#else
+    resourcesDir =  getenv("YACS_ROOT_DIR");
+    resourcesDir += "/share/salome/resources/pmml/";
+    tmpDir = "/tmp/";
+    tmpDir += getenv("USER");
+    tmpDir += "/PmmlUnitTest/";
+    std::string cmd = "mkdir -p " + tmpDir; 
+    system( cmd.c_str() );  
+#endif    
+}
+
+void PMMLBasicsTest1::tearDown()
+{
+#ifdef WIN32
+    string cmd = "rmdir " + tmpDir + " /s /q "; 
+    system( cmd.c_str() );  
+#else
+    string cmd = "rm -rf " + tmpDir; 
+    system( cmd.c_str() );    
+#endif
+}
+
+//**************************************************************
+//                                                             *
+//                                                             *
+//                                                             *
+//  méthodes communes à tous les types de modèles              *
+//                                                             *
+//                                                             *
+//                                                             *
+//**************************************************************
+
+void PMMLBasicsTest1::testMakeLog()
+{
+    string pmmlFile = resourcesDir + "ann_model.pmml";
+    bool b = true;
+    string logRef;
+    logRef =  "**\n**** Display of PMMLlib ****\n";
+    logRef += " **  _pmmlFile[";
+    logRef += pmmlFile;
+    logRef += "]\n";
+    logRef += " **  _log[";
+    logRef += (b?"1":"0");
+    logRef += "]\n";  
+    logRef += "**\n**** End of display of PMMLlib ****\n";    
+    PMMLlib::PMMLlib p(pmmlFile, b);
+    p.SetCurrentModel("sANNName", PMMLlib::kANN);
+    CPPUNIT_ASSERT_EQUAL( p.makeLog(), logRef );
+}
+
+void PMMLBasicsTest1::testConstructorFileDoesNotExist()
+{
+    CPPUNIT_ASSERT_THROW( PMMLlib::PMMLlib p("0.mml", true) , std::string );  
+}
+  
+void PMMLBasicsTest1::testSetCurrentModelWithNameAndTypekANNUnknownModel()
+{
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    CPPUNIT_ASSERT_THROW( p.SetCurrentModel("toto", PMMLlib::kANN), std::string );
+}
+
+void PMMLBasicsTest1::testSetCurrentModelWithNameAndTypekLRUnknownModel()
+{
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    CPPUNIT_ASSERT_THROW( p.SetCurrentModel( "toto", PMMLlib::kLR), std::string );
+}
+
+void PMMLBasicsTest1::testSetCurrentModelWithNameAndTypekUNDEFINEDUnknownModel()
+{
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    CPPUNIT_ASSERT_THROW( p.SetCurrentModel("toto", PMMLlib::kUNDEFINED), std::string );
+}
+
+void PMMLBasicsTest1::testSetCurrentModelWithNameAndType()
+{
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel("sANNName", PMMLlib::kANN);
+    CPPUNIT_ASSERT_EQUAL( p.GetModelsNb(), 1 );
+    CPPUNIT_ASSERT_EQUAL( p.GetCurrentModelType(), PMMLlib::kANN );
+}
+
+void PMMLBasicsTest1::testSetCurrentModelWithNameAndTypeTwoModelsWithSameNames()
+{
+    PMMLlib::PMMLlib p(resourcesDir + "two_models_ann_lr.pmml");
+    p.SetCurrentModel("modelName", PMMLlib::kLR);
+    CPPUNIT_ASSERT_EQUAL( p.GetModelsNb(), 2 );
+    CPPUNIT_ASSERT_EQUAL( p.GetCurrentModelType(), PMMLlib::kLR ); 
+}
+
+void PMMLBasicsTest1::testSetCurrentModelWithNameWrongName()
+{
+    PMMLlib::PMMLlib pANN(resourcesDir + "ann_model.pmml");
+    CPPUNIT_ASSERT_THROW( pANN.SetCurrentModel("toto"), std::string );      
+}
+
+void PMMLBasicsTest1::testSetCurrentModelWithNameTwoModelsWithSameNames()
+{
+    PMMLlib::PMMLlib p(resourcesDir + "two_models_ann_lr.pmml");
+    CPPUNIT_ASSERT_THROW( p.SetCurrentModel("modelName"), std::string );
+}
+
+void PMMLBasicsTest1::testSetCurrentModelWithName()
+{
+    // kANN
+    PMMLlib::PMMLlib pANN(resourcesDir + "ann_model.pmml");
+    pANN.SetCurrentModel("sANNName");
+    CPPUNIT_ASSERT_EQUAL( pANN.GetModelsNb(), 1 );
+    CPPUNIT_ASSERT_EQUAL( pANN.GetCurrentModelType(), PMMLlib::kANN );
+    // kLR
+    string strModel("Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]");
+    PMMLlib::PMMLlib pLR(resourcesDir + "lr_model.pmml"); 
+    pLR.SetCurrentModel(strModel);    
+    CPPUNIT_ASSERT_EQUAL( pLR.GetModelsNb(), 1 );
+    CPPUNIT_ASSERT_EQUAL( pLR.GetCurrentModelType(), PMMLlib::kLR );
+}
+  
+void PMMLBasicsTest1::testSetCurrentModelNoModel()
+{
+    PMMLlib::PMMLlib p(resourcesDir + "no_model.pmml");
+    CPPUNIT_ASSERT_THROW( p.SetCurrentModel(), std::string );
+}
+
+void PMMLBasicsTest1::testSetCurrentModelMoreThanOneModel()
+{
+    PMMLlib::PMMLlib p(resourcesDir + "two_models_ann_lr.pmml");
+    CPPUNIT_ASSERT_THROW( p.SetCurrentModel(), std::string );
+}
+
+void PMMLBasicsTest1::testSetCurrentModel()
+{
+    // kANN
+    PMMLlib::PMMLlib pANN(resourcesDir + "ann_model.pmml");
+    pANN.SetCurrentModel();
+    CPPUNIT_ASSERT_EQUAL( pANN.GetModelsNb(), 1 );
+    CPPUNIT_ASSERT_EQUAL( pANN.GetCurrentModelType(), PMMLlib::kANN );
+    CPPUNIT_ASSERT_EQUAL( pANN.GetCurrentModelName(), string("sANNName") );    
+    // kLR
+    string strModel("Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]");
+    PMMLlib::PMMLlib pLR(resourcesDir + "lr_model.pmml"); 
+    pLR.SetCurrentModel();    
+    CPPUNIT_ASSERT_EQUAL( pLR.GetModelsNb(), 1 );
+    CPPUNIT_ASSERT_EQUAL( pLR.GetCurrentModelType(), PMMLlib::kLR );
+    CPPUNIT_ASSERT_EQUAL( pLR.GetCurrentModelName(), strModel );       
+}
+
+void PMMLBasicsTest1::testGetModelsNbkANN()
+{
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    CPPUNIT_ASSERT_EQUAL( p.GetModelsNb(), 1 );    
+}
+
+void PMMLBasicsTest1::testGetModelsNbkLR()
+{
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    CPPUNIT_ASSERT_EQUAL( p.GetModelsNb(), 1 );    
+}
+
+void PMMLBasicsTest1::testWrite() 
+{
+    string refPmmlFilename = resourcesDir + "ann_model.pmml";
+    string pmmlFilename = tmpDir + "unittest_generated_ann_model.pmml";
+    
+    PMMLlib::PMMLlib p(refPmmlFilename);
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN);                  
+    p.Write(pmmlFilename);
+    CPPUNIT_ASSERT_EQUAL( areFilesEqual( refPmmlFilename, pmmlFilename), 0 ); 
+}
+
+void PMMLBasicsTest1::testWriteNotExistingFile() 
+{   
+    PMMLlib::PMMLlib p;             
+    CPPUNIT_ASSERT_THROW( p.Write(), std::string ); 
+}
+
+void PMMLBasicsTest1::testUnlinkNode()
+{
+    string pmmlFilename = tmpDir + "unittest_unlinked_ann_model.pmml";  
+    string refPmmlFilename = resourcesDir + "ann_model.pmml";
+    PMMLlib::PMMLlib refP(refPmmlFilename);
+    refP.SetCurrentModel( string("sANNName"), PMMLlib::kANN);  
+    refP.UnlinkNode();
+    refP.Write(pmmlFilename);
+    PMMLlib::PMMLlib p2(pmmlFilename);
+    CPPUNIT_ASSERT_EQUAL( p2.GetModelsNb(), 0 );
+}
+
+void PMMLBasicsTest1::testBackupNode()
+{
+    string pmmlFilename = tmpDir + "unittest_backup_ann_model.pmml";  
+    string refPmmlFilename = resourcesDir + "ann_model.pmml";
+    PMMLlib::PMMLlib refP(refPmmlFilename, true);
+    refP.SetCurrentModel( string("sANNName"), PMMLlib::kANN);  
+    refP.BackupNode(); // rename sANNName to sANNName_0
+    refP.BackupNode(); // rename sANNName_0 to sANNName_1
+    // Create the PMML test file
+    refP.Write(pmmlFilename);
+    PMMLlib::PMMLlib p(pmmlFilename, true);
+    // Still one model: 
+    CPPUNIT_ASSERT_EQUAL( p.GetModelsNb(), 1 );
+    // Its name is sANNName_1:
+    p.SetCurrentModel(string("sANNName_1"), PMMLlib::kANN);
+    // XML tree for NeuralNetwork sANNName_1 is the same than in sANNName
+    CPPUNIT_ASSERT_EQUAL( p.GetNbOutputs(), 1 );    
+    // sANNName is not there anymore: SetCurrentModel throws an exception
+    // You'll have to build the new structure from scratch 
+    CPPUNIT_ASSERT_THROW( p.SetCurrentModel(string("sANNName"), PMMLlib::kANN), std::string );
+}
+
+//**************************************************************
+//                                                             *
+//                                                             *
+//                                                             *
+//  méthodes propres au NeuralNetwork                          *
+//                                                             *
+//                                                             *
+//                                                             *
+//**************************************************************
+
+void PMMLBasicsTest1::testGetNbInputsForbiddenModelType()
+{
+    string strModel("Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]");
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR);  
+    CPPUNIT_ASSERT_THROW( p.GetNbInputs(), std::string );     
+}
+
+void PMMLBasicsTest1::testGetNbInputs8()
+{  
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    CPPUNIT_ASSERT_EQUAL( p.GetNbInputs(), 8 );      
+}
+
+
+void PMMLBasicsTest1::testGetNbOutputsForbiddenModelType()
+{
+    string strModel("Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]");
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR);  
+    CPPUNIT_ASSERT_THROW( p.GetNbOutputs(), std::string ); 
+}
+
+void PMMLBasicsTest1::testGetNbOutputs2()
+{    
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model_2.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    CPPUNIT_ASSERT_EQUAL( p.GetNbOutputs(), 2 );     
+}
+
+
+void PMMLBasicsTest1::testGetNameInputForbiddenModelType()
+{  
+    string strModel("Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]");
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR);  
+    CPPUNIT_ASSERT_THROW( p.GetNameInput(0), std::string );   
+}
+
+void PMMLBasicsTest1::testGetNameInputIndexOutOfRange()
+{  
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );  
+    CPPUNIT_ASSERT_EQUAL( p.GetNameInput(18), string("") );  
+}
+
+void PMMLBasicsTest1::testGetNameInput()
+{ 
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    CPPUNIT_ASSERT_EQUAL( p.GetNameInput(2), string("tu") ); 
+}  
+
+void PMMLBasicsTest1::testGetNameOutputForbiddenModelType()
+{  
+    string strModel("Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]");
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR);  
+    CPPUNIT_ASSERT_THROW( p.GetNameOutput(0), std::string );   
+}
+
+void PMMLBasicsTest1::testGetNameOutputIndexOutOfRange()
+{  
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );  
+    CPPUNIT_ASSERT_EQUAL( p.GetNameOutput(3), string("") );  
+}
+
+void PMMLBasicsTest1::testGetNameOutput()
+{ 
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    CPPUNIT_ASSERT_EQUAL( p.GetNameOutput(0), string("yhat") ); 
+}  
+
+
+void PMMLBasicsTest1::testGetNormalisationInputForbiddenType()
+{
+    double * dnorm = new double[2];  
+    string strModel("Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]");
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR);  
+    CPPUNIT_ASSERT_THROW( p.GetNormalisationInput(0, dnorm), std::string ); 
+    delete [] dnorm ;    
+}
+
+void PMMLBasicsTest1::testGetNormalisationInputIndexUnknown()
+{
+    double * dnorm = new double[2];
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    p.GetNormalisationInput(18, dnorm) ;
+    CPPUNIT_ASSERT_EQUAL( dnorm[0], 0.0 ); 
+    CPPUNIT_ASSERT_EQUAL( dnorm[1], 0.0 );   
+    delete [] dnorm ;
+}
+
+void PMMLBasicsTest1::testGetNormalisationInputkMinusOneOne()
+{
+    // Valeurs de reference
+    double dorig1Ref = 2;   
+    double dorig2Ref = 1.400018e+03;
+    double * dnormRef = new double[2]; 
+    dnormRef[0] = dorig1Ref ;
+    dnormRef[1] = dorig2Ref ;
+    double * dnorm = new double[2];
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model_2.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    p.GetNormalisationInput(6, dnorm) ;
+    CPPUNIT_ASSERT_EQUAL( dnorm[0], dnormRef[0] ); 
+    CPPUNIT_ASSERT_EQUAL( dnorm[1], dnormRef[1] );
+    delete [] dnormRef ;
+    delete [] dnorm ;  
+}
+
+void PMMLBasicsTest1::testGetNormalisationInputkCRkZeroOne()
+{
+    // Valeurs de reference
+    double dnorm1Ref = -5.780019e-02;   
+    double dorig2Ref = 1.095001e+04;
+    double * dnormRef = new double[2]; 
+    dnormRef[0] = dorig2Ref ;
+    dnormRef[1] = -1.0 * dorig2Ref * dnorm1Ref ;
+    double * dnorm = new double[2];
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    p.GetNormalisationInput(7, dnorm) ;
+    CPPUNIT_ASSERT_EQUAL( dnorm[0], dnormRef[0] ); 
+    CPPUNIT_ASSERT_EQUAL( dnorm[1], dnormRef[1] );
+    delete [] dnormRef ;
+    delete [] dnorm ;
+}
+
+void PMMLBasicsTest1::testGetNormalisationOutputForbiddenType()
+{
+    double * dnorm = new double[2];  
+    string strModel("Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]");
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR);  
+    CPPUNIT_ASSERT_THROW( p.GetNormalisationOutput(0, dnorm), std::string ); 
+    delete [] dnorm ;    
+}
+
+void PMMLBasicsTest1::testGetNormalisationOutputIndexUnknown()
+{
+    double * dnorm = new double[2];
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    p.GetNormalisationOutput(18, dnorm) ;
+    CPPUNIT_ASSERT_EQUAL( dnorm[0], 0.0 ); 
+    CPPUNIT_ASSERT_EQUAL( dnorm[1], 0.0 );   
+    delete [] dnorm ;
+}
+
+void PMMLBasicsTest1::testGetNormalisationOutputkMinusOneOne()
+{
+    // Valeurs de reference
+    double dorig1Ref = 2;   
+    double dorig2Ref = 5.781171e+01;
+    double * dnormRef = new double[2]; 
+    dnormRef[0] = dorig1Ref ;
+    dnormRef[1] = dorig2Ref ;
+    double * dnorm = new double[2];
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model_2.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    p.GetNormalisationOutput(1, dnorm) ;
+    CPPUNIT_ASSERT_EQUAL( dnorm[0], dnormRef[0] ); 
+    CPPUNIT_ASSERT_EQUAL( dnorm[1], dnormRef[1] );
+    delete [] dnormRef ;
+    delete [] dnorm ;  
+}
+
+void PMMLBasicsTest1::testGetNormalisationOutputkCRkZeroOne()
+{
+    // Valeurs de reference
+    double dnorm1Ref = -5.873935e-01;   
+    double dorig2Ref = 7.781171e+01;
+    double * dnormRef = new double[2]; 
+    dnormRef[0] = dorig2Ref ;
+    dnormRef[1] = -1.0 * dorig2Ref * dnorm1Ref ;
+    double * dnorm = new double[2];
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    p.GetNormalisationOutput(0, dnorm) ;
+    CPPUNIT_ASSERT_EQUAL( dnorm[0], dnormRef[0] ); 
+    CPPUNIT_ASSERT_EQUAL( dnorm[1], dnormRef[1] );
+    delete [] dnormRef ;
+    delete [] dnorm ;
+}
+
+void PMMLBasicsTest1::testGetNbLayersForbiddenType()
+{
+    double * dnorm = new double[2];  
+    string strModel("Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]");
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR);  
+    CPPUNIT_ASSERT_THROW( p.GetNbLayers(), std::string ); 
+    delete [] dnorm ;    
+}
+
+void PMMLBasicsTest1::testGetNbLayers()
+{    
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    CPPUNIT_ASSERT_EQUAL( p.GetNbHiddenLayers(), 2 );     
+    CPPUNIT_ASSERT_EQUAL( p.GetNbLayers(), 4 );     
+}
+
+void PMMLBasicsTest1::testGetNbNeuronsAtLayerForbiddenType()
+{
+    double * dnorm = new double[2];  
+    string strModel("Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]");
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR);  
+    CPPUNIT_ASSERT_THROW( p.GetNbNeuronsAtLayer(0), std::string ); 
+    delete [] dnorm ;    
+}
+
+void PMMLBasicsTest1::testGetNbNeuronsAtLayerFromIndexOutOfRange()
+{    
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    CPPUNIT_ASSERT_EQUAL( p.GetNbNeuronsAtLayer(18), 0 );    
+}
+
+void PMMLBasicsTest1::testGetNbNeuronsAtLayer()
+{    
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    CPPUNIT_ASSERT_EQUAL( p.GetNbNeuronsAtLayer(0), 1 );    
+}
+
+void PMMLBasicsTest1::testGetNeuronBiasForbiddenType()
+{
+    double * dnorm = new double[2];  
+    string strModel("Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]");
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR);  
+    CPPUNIT_ASSERT_THROW( p.GetNeuronBias(0,0), std::string ); 
+    delete [] dnorm ;    
+}
+
+void PMMLBasicsTest1::testGetNeuronBiasFromLayerIndexOutOfRange()
+{    
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    CPPUNIT_ASSERT_EQUAL( p.GetNeuronBias(10,1), 0. );    
+}
+
+void PMMLBasicsTest1::testGetNeuronBiasFromNeuronIndexOutOfRange()
+{    
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    CPPUNIT_ASSERT_EQUAL( p.GetNeuronBias(0,10), 0. );    
+}
+
+void PMMLBasicsTest1::testGetNeuronBias()
+{    
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    CPPUNIT_ASSERT_EQUAL( p.GetNeuronBias(0,0), -1.263572e+00 );    
+}
+
+void PMMLBasicsTest1::testGetPrecNeuronSynapseForbiddenType()
+{
+    double * dnorm = new double[2];  
+    string strModel("Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]");
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );  
+    CPPUNIT_ASSERT_THROW( p.GetPrecNeuronSynapse(0,0,0), std::string ); 
+    delete [] dnorm ;    
+}
+
+void PMMLBasicsTest1::testGetPrecNeuronSynapseFromLayerIndexOutOfRange()
+{    
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    CPPUNIT_ASSERT_EQUAL( p.GetPrecNeuronSynapse(10,1,1), 0. );    
+}
+
+void PMMLBasicsTest1::testGetPrecNeuronSynapseFromNeuronIndexOutOfRange()
+{    
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    CPPUNIT_ASSERT_EQUAL( p.GetPrecNeuronSynapse(1,10,1), 0. );    
+}
+
+void PMMLBasicsTest1::testGetPrecNeuronSynapseFromPrecIndexOutOfRange()
+{    
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    CPPUNIT_ASSERT_EQUAL( p.GetPrecNeuronSynapse(1,0,10), 0. );    
+}
+
+void PMMLBasicsTest1::testGetPrecNeuronSynapse()
+{    
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    CPPUNIT_ASSERT_EQUAL( p.GetPrecNeuronSynapse(0,0,7), 8.559675e-02 );    
+}
+
+void PMMLBasicsTest1::testReadNetworkStructure()
+{
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    string str = p.ReadNetworkStructure();
+    CPPUNIT_ASSERT_EQUAL( p.ReadNetworkStructure(), string("rw:r:tu:tl:hu:hl:l:kw,1,@yhat") );
+}
+
+void PMMLBasicsTest1::testExportNeuralNetworkCpp()
+{
+    string refCppFilename = resourcesDir + "unittest_ref_ann_model.cpp";
+    string cppFilename = tmpDir + "unittest_ann_model.cpp";
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    p.ExportCpp(cppFilename, 
+                "myTestFunc", 
+                "File used by unit test\n PMMLBasicsTest1::testExportNeuralNetworkCpp" );
+     CPPUNIT_ASSERT_EQUAL( areFilesEqual( refCppFilename, cppFilename), 0 ); 
+}
+
+void PMMLBasicsTest1::testExportNeuralNetworkFortran()
+{
+    string refFortranFilename = resourcesDir + "unittest_ref_ann_model.f";
+    string fortranFilename = tmpDir + "unittest_ann_model.f";
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    p.ExportFortran(fortranFilename, 
+                    "myTestFunc", 
+                    "File used by unit test\n PMMLBasicsTest1::testExportNeuralNetworkFortran" );
+    CPPUNIT_ASSERT_EQUAL( areFilesEqual( refFortranFilename, fortranFilename), 0 ); 
+}
+
+void PMMLBasicsTest1::testExportNeuralNetworkPython()
+{
+    string refPyFilename = resourcesDir + "unittest_ref_ann_model.py";
+    string pyFilename = tmpDir + "unittest_ann_model.py";    
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN );
+    p.ExportPython(pyFilename, 
+                   "myTestFunc", 
+                   "File used by unit test\n PMMLBasicsTest1::testExportNeuralNetworkPython" );
+    CPPUNIT_ASSERT_EQUAL( areFilesEqual( refPyFilename, pyFilename), 0 ); 
+}
+
+void PMMLBasicsTest1::testCreatePmmlNeuralNetwork() 
+{
+#ifdef WIN32
+    std::string refPmmlFilename = resourcesDir + "win32_ann_model.pmml";
+#else
+    std::string refPmmlFilename = resourcesDir + "ann_model.pmml";
+#endif
+    std::string pmmlFilename = tmpDir + "unittest_generated_ann_model.pmml";
+    
+    // Données
+    int nInput = 8;
+    int nOutput = 1; 
+    int nHidden = 1;
+    int nWeights = 9;
+    vector<string>fieldName(nInput+1);
+    fieldName[0] = "rw";
+    fieldName[1] = "r";
+    fieldName[2] = "tu";
+    fieldName[3] = "tl";
+    fieldName[4] = "hu";
+    fieldName[5] = "hl";
+    fieldName[6] = "l";
+    fieldName[7] = "kw";
+    fieldName[8] = "yhat";     
+    double orig1=0, norm2=0;
+    vector<double>norm1(nInput);
+    norm1[0] = -2.889932e-01;
+    norm1[1] = -5.756638e-01;
+    norm1[2] = -1.699313e-01;
+    norm1[3] = -1.707007e-01;
+    norm1[4] = -3.302777e-02;
+    norm1[5] = -4.562070e-02;
+    norm1[6] = -1.155882e-01;
+    norm1[7] = -5.780019e-02;
+    vector<double>orig2(nInput);
+    orig2[0] = 9.999901e-02;
+    orig2[1] = 2.504894e+04;
+    orig2[2] = 8.933486e+04;
+    orig2[3] = 8.955232e+01;
+    orig2[4] = 1.050003e+03;
+    orig2[5] = 7.600007e+02;
+    orig2[6] = 1.400018e+03;
+    orig2[7] = 1.095001e+04;
+    vector<double>weights(nWeights);
+    weights[0] = 7.536629e-01;
+    weights[1] = 1.653660e-03;
+    weights[2] = 4.725001e-03;
+    weights[3] = 9.969786e-03;
+    weights[4] = 1.787976e-01;
+    weights[5] = -1.809809e-01;
+    weights[6] = -1.735688e-01;
+    weights[7] = 8.559675e-02; 
+    weights[8] = 6.965512e+00;     
+
+    // Construction
+    PMMLlib::PMMLlib p;
+
+    // Header node--------------------------------------------------------
+    p.SetHeader("myCopyright", "Tests unitaires", "PMMLlib", "myVersion", "Tests unitaires PMMLlib");  
+    // DataDictionnary node--------------------------------------------------------
+    for(int j=0 ; j<nInput+1 ; j++)
+    {
+        p.AddDataField(fieldName[j], fieldName[j], "continuous", "float", "ClosedClosed", 0., 0.);
+    }            
+    // Set Neural Network (and type)---------------------------------------------   
+    p.AddNeuralNetwork( string("sANNName"), PMMLlib::kREGRESSION);
+    // Set Mining schema  
+    for(int j=0 ; j<nInput ; j++)
+    {
+        p.AddMiningSchema(fieldName[j], "active");
+    }
+    p.AddMiningSchema(fieldName[nInput], "predicted"); 
+    // Set NeuralInput
+    for(int j=0 ; j<nInput ; j++)
+    {
+        p.AddNeuralInput(j, fieldName[j], "continuous", "float", orig1, norm1[j], orig2[j], norm2);
+    }
+    // Set neural layers
+    p.AddNeuralLayer(PMMLlib::kTANH);
+    for(int j=0 ; j<nHidden ; j++) // hidden layers
+    {
+        vector<double> tmp_weights(nInput);
+        for(int i=0 ; i<nInput ; i++) 
+        {
+            tmp_weights[i] = weights[i];
+        }        
+        p.AddNeuron(nInput+j, -1.263572, nInput, 0, tmp_weights);
+    }
+    //
+    p.AddNeuralLayer(PMMLlib::kIDENTITY);
+    for(int j=0 ; j<nOutput ; j++) 
+    {
+        vector<double> tmp_weights(nHidden);        
+        tmp_weights[0] =  weights[nWeights-1];
+        p.AddNeuron(nInput+nHidden+j, -1.745483, nHidden, nInput, tmp_weights);
+    }      
+    // Set NeuralOutput
+    p.AddNeuralOutput(nInput+nHidden, "yhat", "continuous", "float",
+                          0,-5.873935e-01 , 
+                          7.781171e+01, 0);
+    p.Write( pmmlFilename );
+    CPPUNIT_ASSERT_EQUAL( areFilesEqual( refPmmlFilename, pmmlFilename), 0 ); 
+}
+
+//**************************************************************
+//                                                             *
+//                                                             *
+//                                                             *
+//  méthodes propres au RegressionModel                        *
+//                                                             *
+//                                                             *
+//                                                             *
+//**************************************************************
+
+void PMMLBasicsTest1::testHasInterceptForbiddenType()
+{   
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml" );
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN);      
+    CPPUNIT_ASSERT_THROW( p.HasIntercept(), std::string );    
+}
+
+void PMMLBasicsTest1::testHasIntercept()
+{
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    CPPUNIT_ASSERT_EQUAL( p.HasIntercept(), true );  
+}
+
+void PMMLBasicsTest1::testHasInterceptNo()
+{
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model_2.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    CPPUNIT_ASSERT_EQUAL( p.HasIntercept(), false );  
+}
+
+void PMMLBasicsTest1::testGetRegressionTableInterceptForbiddenType()
+{
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN);  
+    CPPUNIT_ASSERT_THROW( p.GetRegressionTableIntercept(), std::string );  
+}
+
+void PMMLBasicsTest1::testGetRegressionTableIntercept()
+{
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    CPPUNIT_ASSERT_EQUAL( p.GetRegressionTableIntercept(), 3.837365 );
+}
+
+void PMMLBasicsTest1::testReadRegressionStructure()
+{
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    CPPUNIT_ASSERT_EQUAL( p.ReadRegressionStructure(), string("x6:x8,1,@x1") );  
+}
+
+void PMMLBasicsTest1::testGetNumericPredictorNb()
+{
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    CPPUNIT_ASSERT_EQUAL( p.GetNumericPredictorNb(), 2 );
+}
+
+void PMMLBasicsTest1::testGetNumericPredictorNbForbiddenType()
+{
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN); 
+    CPPUNIT_ASSERT_THROW( p.GetNumericPredictorNb(), std::string );  
+}
+
+void PMMLBasicsTest1::testGetPredictorTermNb()
+{
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    CPPUNIT_ASSERT_EQUAL( p.GetPredictorTermNb(), 2 );
+}
+
+void PMMLBasicsTest1::testGetPredictorTermNbForbiddenType()
+{
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN); 
+    CPPUNIT_ASSERT_THROW( p.GetNumericPredictorNb(), std::string ); 
+}
+
+void PMMLBasicsTest1::testGetNumericPredictorName()
+{
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    CPPUNIT_ASSERT_EQUAL( p.GetNumericPredictorName(1), string("x8") );
+}
+
+void PMMLBasicsTest1::testGetNumericPredictorNameForbiddenType()
+{
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN); 
+    CPPUNIT_ASSERT_THROW( p.GetNumericPredictorName(0), std::string );  
+}
+
+void PMMLBasicsTest1::testGetNumericPredictorNamePredictorOutOfRange()
+{
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    CPPUNIT_ASSERT_EQUAL( p.GetNumericPredictorName(5), string("") );  
+} 
+
+void PMMLBasicsTest1::testGetPredictorTermName()
+{
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    CPPUNIT_ASSERT_EQUAL( p.GetPredictorTermName(1), string("x6x6x8") );
+}
+
+void PMMLBasicsTest1::testGetPredictorTermNameForbiddenType()
+{
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN); 
+    CPPUNIT_ASSERT_THROW( p.GetPredictorTermName(0), std::string );
+}
+
+void PMMLBasicsTest1::testGetPredictorTermNamePredictorOutOfRange()
+{
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    CPPUNIT_ASSERT_EQUAL( p.GetPredictorTermName(5), string("") );  
+} 
+    
+void PMMLBasicsTest1::testGetNumericPredictorCoefficient()
+{
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    CPPUNIT_ASSERT_EQUAL( p.GetNumericPredictorCoefficient(1), 0.1428838 );
+}
+
+void PMMLBasicsTest1::testGetNumericPredictorCoefficientForbiddenType()
+{
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN); 
+    CPPUNIT_ASSERT_THROW( p.GetNumericPredictorCoefficient(0), std::string );
+}
+
+void PMMLBasicsTest1::testGetNumericPredictorCoefficientPredictorOutOfRange()
+{
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    CPPUNIT_ASSERT_EQUAL( p.GetNumericPredictorCoefficient(2), 0. );  
+}
+
+void PMMLBasicsTest1::testGetPredictorTermCoefficient()
+{
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    CPPUNIT_ASSERT_EQUAL( p.GetPredictorTermCoefficient(0), -0.02201903 );
+}
+
+void PMMLBasicsTest1::testGetPredictorTermCoefficientForbiddenType()
+{
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN); 
+    CPPUNIT_ASSERT_THROW( p.GetPredictorTermCoefficient(0), std::string );
+}
+
+void PMMLBasicsTest1::testGetPredictorTermCoefficientPredictorOutOfRange()
+{
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    CPPUNIT_ASSERT_EQUAL( p.GetPredictorTermCoefficient(2), 0.);  
+}
+
+void PMMLBasicsTest1::testGetPredictorTermFieldRefNb()
+{
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    CPPUNIT_ASSERT_EQUAL( p.GetPredictorTermFieldRefNb(1), 3 );
+}
+
+void PMMLBasicsTest1::testGetPredictorTermFieldRefNbForbiddenType()
+{
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN); 
+    CPPUNIT_ASSERT_THROW( p.GetPredictorTermFieldRefNb(0), std::string );  
+}
+
+void PMMLBasicsTest1::testGetPredictorTermFieldRefNbPredictorOutOfRange()
+{
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    CPPUNIT_ASSERT_EQUAL( p.GetPredictorTermFieldRefNb(12), 0 );  
+}
+
+void PMMLBasicsTest1::testGetPredictorTermFieldRefName()
+{
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    CPPUNIT_ASSERT_EQUAL( p.GetPredictorTermFieldRefName(1,1), string("x6") );
+}
+
+void PMMLBasicsTest1::testGetPredictorTermFieldRefNameForbiddenType()
+{
+    PMMLlib::PMMLlib p(resourcesDir + "ann_model.pmml");
+    p.SetCurrentModel( string("sANNName"), PMMLlib::kANN); 
+    CPPUNIT_ASSERT_THROW( p.GetPredictorTermFieldRefName(1,1), std::string );  
+}
+
+void PMMLBasicsTest1::testGetPredictorTermFieldRefNamePredictorOutOfRange()   
+{
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    CPPUNIT_ASSERT_EQUAL( p.GetPredictorTermFieldRefName(3,1), string("") );  
+}
+
+void PMMLBasicsTest1::testGetPredictorTermFieldRefNameFieldOutOfRange()
+{
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    CPPUNIT_ASSERT_EQUAL( p.GetPredictorTermFieldRefName(1,5), string("") );  
+}
+
+void PMMLBasicsTest1::testExportLinearRegressionCpp()
+{
+    string refCppFilename = resourcesDir + "unittest_ref_lr_model.cpp";
+    string cppFilename = tmpDir + "unittest_lr_model.cpp"; 
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";   
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    p.ExportCpp(cppFilename, 
+                string("myTestFunc"), 
+                "File used by unit test\n PMMLBasicsTest1::testExportLinearRegressionCpp" );    
+    CPPUNIT_ASSERT_EQUAL( areFilesEqual( refCppFilename, cppFilename), 0 );  
+}
+
+void PMMLBasicsTest1::testExportLinearRegressionFortran()
+{
+    string refFortranFilename = resourcesDir + "unittest_ref_lr_model.f";
+    string fortranFilename = tmpDir + "unittest_lr_model.f"; 
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";   
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    p.ExportFortran(fortranFilename, 
+                    string("myTestFunc"), 
+                    "File used by unit test\n PMMLBasicsTest1::testExportLinearRegressionFortran" );    
+    CPPUNIT_ASSERT_EQUAL( areFilesEqual( refFortranFilename, fortranFilename), 0 );  
+}
+
+void PMMLBasicsTest1::testExportLinearRegressionPython()
+{
+    string refPyFilename = resourcesDir + "unittest_ref_lr_model.py";
+    string pyFilename = tmpDir + "unittest_lr_model.py"; 
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";   
+    PMMLlib::PMMLlib p(resourcesDir + "lr_model.pmml");
+    p.SetCurrentModel( strModel, PMMLlib::kLR );
+    p.ExportPython(pyFilename, 
+                   string("myTestFunc"), 
+                   "File used by unit test\n PMMLBasicsTest1::testExportLinearRegressionPython" );
+    CPPUNIT_ASSERT_EQUAL( areFilesEqual( refPyFilename, pyFilename), 0 );  
+}
+
+void PMMLBasicsTest1::testCreatePmmlRegression()
+{
+#ifdef WIN32
+    std::string refPmmlFilename = resourcesDir + "win32_lr_model.pmml";
+#else
+    std::string refPmmlFilename = resourcesDir + "lr_model.pmml";
+#endif
+    std::string pmmlFilename = tmpDir + "unittest_generated_lr_model.pmml";
+    // Données
+    string strModel = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+    int nData = 3;
+    vector<string>fieldName(nData);
+    fieldName[0] = "x6";
+    fieldName[1] = "x8";
+    fieldName[2] = "x1";
+    vector<string>displayName(nData);
+    displayName[0] = " x_{6}";
+    displayName[1] = " x_{8}";
+    displayName[2] = " x_{1}";    
+    vector<double>leftMargin(nData);
+    leftMargin[0] = 1.1e+01;
+    leftMargin[1] = 2.81e+01;
+    leftMargin[2] = 6.36e+00;
+    vector<double>rightMargin(nData);
+    rightMargin[0] = 2.3e+01;
+    rightMargin[1] = 7.67e+01;
+    rightMargin[2] = 1.251e+01;
+    vector<double>numPredCoeff(nData-1);
+    numPredCoeff[0] = 0.4759134;
+    numPredCoeff[1] = 0.1428838;    
+    vector<double>predTermCoeff(nData-1);
+    predTermCoeff[0] = -0.02201903;
+    predTermCoeff[1] = 0.000536256;      
+    vector< vector<string> > fieldRefVect(nData-1);
+    vector<string>fieldRef0(2);
+    fieldRef0[0] = fieldName[0];
+    fieldRef0[1] = fieldName[1];
+    vector<string>fieldRef1(3);
+    fieldRef1[0] = fieldName[0];
+    fieldRef1[1] = fieldName[0];
+    fieldRef1[2] = fieldName[1];
+    fieldRefVect[0] = fieldRef0;
+    fieldRefVect[nData-2] = fieldRef1; 
+
+    // Construction
+    PMMLlib::PMMLlib p;
+    // Header node--------------------------------------------------------
+    p.SetHeader("myCopyright", "Tests unitaires", "PMMLlib", "myVersion", "Tests unitaires PMMLlib");  
+    // DataDictionnary node--------------------------------------------------------
+    for(int j=0; j<nData; j++)
+    {
+        p.AddDataField(fieldName[j], displayName[j], 
+                       "continuous", "double", "ClosedClosed", 
+                       leftMargin[j], rightMargin[j], true);
+    } 
+    // Set Model (and type)----------------------------------------------   
+    p.AddRegressionModel( strModel, PMMLlib::kREGRESSION,  fieldName[2]);
+    // Set Mining schema------------------------------------------------
+    for(int j=0 ; j<nData-1 ; j++)
+    {
+        p.AddMiningSchema(fieldName[j], "active");
+    }
+    p.AddMiningSchema(fieldName[nData-1], "predicted");
+    // Set Regression table
+    double intercept = 3.837365;
+    p.AddRegressionTable(intercept);
+    // Set numeric predictor
+    for(int j=0; j<nData-1; j++) 
+    {       
+        p.AddNumericPredictor(fieldName[j], 1, numPredCoeff[j]);
+    }
+    // Set Predictor term
+    for(int j=0; j<nData-1; j++) 
+    {
+        p.AddPredictorTerm(predTermCoeff[j], fieldRefVect[j]);
+    }      
+    p.Write( pmmlFilename );
+    CPPUNIT_ASSERT_EQUAL( areFilesEqual( refPmmlFilename, pmmlFilename), 0 );   
+}
+
diff --git a/src/pmml/Test/PMMLBasicsTest1.hxx b/src/pmml/Test/PMMLBasicsTest1.hxx
new file mode 100755 (executable)
index 0000000..e6f17e7
--- /dev/null
@@ -0,0 +1,284 @@
+// Copyright (C) 2007-2013  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : InckA
+
+#ifndef __PMMLBASICSTEST1_HXX__
+#define __PMMLBASICSTEST1_HXX__
+
+#include "PMMLBasicsTest.hxx"
+
+#include <string>
+
+//classe test de PMMLlib
+  
+class PMMLBasicsTest1 : public PMMLBasicsTest
+{
+    CPPUNIT_TEST_SUITE(PMMLBasicsTest1);
+    
+    // test des méthodes communes
+    CPPUNIT_TEST(testMakeLog);
+    
+    CPPUNIT_TEST(testConstructorFileDoesNotExist);     
+     
+    CPPUNIT_TEST(testSetCurrentModelWithNameAndTypekANNUnknownModel);
+    CPPUNIT_TEST(testSetCurrentModelWithNameAndTypekLRUnknownModel);  
+    CPPUNIT_TEST(testSetCurrentModelWithNameAndTypekUNDEFINEDUnknownModel);
+    CPPUNIT_TEST(testSetCurrentModelWithNameAndType);
+    CPPUNIT_TEST(testSetCurrentModelWithNameAndTypeTwoModelsWithSameNames);
+    CPPUNIT_TEST(testSetCurrentModelWithNameWrongName);
+    CPPUNIT_TEST(testSetCurrentModelWithNameTwoModelsWithSameNames);
+    CPPUNIT_TEST(testSetCurrentModelWithName);   
+    CPPUNIT_TEST(testSetCurrentModelNoModel);
+    CPPUNIT_TEST(testSetCurrentModelMoreThanOneModel);
+    CPPUNIT_TEST(testSetCurrentModel);      
+    
+    CPPUNIT_TEST(testGetModelsNbkANN);
+    CPPUNIT_TEST(testGetModelsNbkLR);
+
+    CPPUNIT_TEST( testWrite );  
+    CPPUNIT_TEST( testWriteNotExistingFile );
+    
+    CPPUNIT_TEST(testUnlinkNode );
+    CPPUNIT_TEST(testBackupNode );
+
+    // test des méthodes NeuralNetwork
+    CPPUNIT_TEST( testGetNbInputsForbiddenModelType ) ;
+    CPPUNIT_TEST( testGetNbInputs8 ) ;
+
+    CPPUNIT_TEST( testGetNbOutputsForbiddenModelType ) ;
+    CPPUNIT_TEST( testGetNbOutputs2 ) ;
+
+    CPPUNIT_TEST( testGetNameInputForbiddenModelType ) ;
+    CPPUNIT_TEST( testGetNameInputIndexOutOfRange ) ;    
+    CPPUNIT_TEST( testGetNameInput ) ;
+
+    CPPUNIT_TEST( testGetNameOutputForbiddenModelType ) ;
+    CPPUNIT_TEST( testGetNameOutputIndexOutOfRange ) ;    
+    CPPUNIT_TEST( testGetNameOutput ) ;
+
+    CPPUNIT_TEST( testGetNormalisationInputForbiddenType ) ;
+    CPPUNIT_TEST( testGetNormalisationInputIndexUnknown ) ;
+    CPPUNIT_TEST( testGetNormalisationInputkMinusOneOne ) ;
+    CPPUNIT_TEST( testGetNormalisationInputkCRkZeroOne ) ;
+
+    CPPUNIT_TEST( testGetNormalisationOutputForbiddenType ) ; 
+    CPPUNIT_TEST( testGetNormalisationOutputIndexUnknown ) ;
+    CPPUNIT_TEST( testGetNormalisationOutputkMinusOneOne ) ;
+    CPPUNIT_TEST( testGetNormalisationOutputkCRkZeroOne ) ;
+
+    CPPUNIT_TEST( testGetNbLayersForbiddenType ) ;
+    CPPUNIT_TEST( testGetNbLayers ) ;
+
+    CPPUNIT_TEST( testGetNbNeuronsAtLayerForbiddenType );
+    CPPUNIT_TEST( testGetNbNeuronsAtLayerFromIndexOutOfRange );
+    CPPUNIT_TEST( testGetNbNeuronsAtLayer );
+
+    CPPUNIT_TEST( testGetNeuronBiasForbiddenType );
+    CPPUNIT_TEST( testGetNeuronBiasFromLayerIndexOutOfRange );
+    CPPUNIT_TEST( testGetNeuronBiasFromNeuronIndexOutOfRange );
+    CPPUNIT_TEST( testGetNeuronBias );
+       
+    CPPUNIT_TEST( testGetPrecNeuronSynapseForbiddenType );
+    CPPUNIT_TEST( testGetPrecNeuronSynapseFromLayerIndexOutOfRange );
+    CPPUNIT_TEST( testGetPrecNeuronSynapseFromNeuronIndexOutOfRange );
+    CPPUNIT_TEST( testGetPrecNeuronSynapseFromPrecIndexOutOfRange );
+    CPPUNIT_TEST( testGetPrecNeuronSynapse );
+    
+    CPPUNIT_TEST( testReadNetworkStructure );    
+
+    CPPUNIT_TEST( testExportNeuralNetworkCpp );
+    CPPUNIT_TEST( testExportNeuralNetworkFortran );
+    CPPUNIT_TEST( testExportNeuralNetworkPython );
+    CPPUNIT_TEST( testCreatePmmlNeuralNetwork );
+
+    // test des méthodes RegressionModel    
+    CPPUNIT_TEST( testHasInterceptForbiddenType );
+    CPPUNIT_TEST( testHasIntercept );
+    CPPUNIT_TEST( testHasInterceptNo );
+    CPPUNIT_TEST( testGetRegressionTableInterceptForbiddenType );    
+    CPPUNIT_TEST( testGetRegressionTableIntercept );
+    
+    CPPUNIT_TEST( testReadRegressionStructure );
+   
+    CPPUNIT_TEST( testGetNumericPredictorNb );
+    CPPUNIT_TEST( testGetNumericPredictorNbForbiddenType );
+
+    CPPUNIT_TEST( testGetNumericPredictorName );
+    CPPUNIT_TEST( testGetNumericPredictorNameForbiddenType );
+    CPPUNIT_TEST( testGetNumericPredictorNamePredictorOutOfRange );
+
+    CPPUNIT_TEST( testGetPredictorTermName );
+    CPPUNIT_TEST( testGetPredictorTermNameForbiddenType ); 
+    CPPUNIT_TEST( testGetPredictorTermNamePredictorOutOfRange );
+    
+    CPPUNIT_TEST( testGetNumericPredictorCoefficient );
+    CPPUNIT_TEST( testGetNumericPredictorCoefficientForbiddenType );
+    CPPUNIT_TEST( testGetNumericPredictorCoefficientPredictorOutOfRange );
+
+    CPPUNIT_TEST( testGetPredictorTermCoefficient );
+    CPPUNIT_TEST( testGetPredictorTermCoefficientForbiddenType );
+    CPPUNIT_TEST( testGetPredictorTermCoefficientPredictorOutOfRange );
+
+    CPPUNIT_TEST( testGetPredictorTermFieldRefNb );
+    CPPUNIT_TEST( testGetPredictorTermFieldRefNbForbiddenType );    
+    CPPUNIT_TEST( testGetPredictorTermFieldRefNbPredictorOutOfRange );
+
+    CPPUNIT_TEST( testGetPredictorTermFieldRefName );
+    CPPUNIT_TEST( testGetPredictorTermFieldRefNameForbiddenType );
+    CPPUNIT_TEST( testGetPredictorTermFieldRefNamePredictorOutOfRange );
+    CPPUNIT_TEST( testGetPredictorTermFieldRefNameFieldOutOfRange );
+    CPPUNIT_TEST( testExportLinearRegressionCpp );
+    CPPUNIT_TEST( testExportLinearRegressionFortran );
+    CPPUNIT_TEST( testExportLinearRegressionPython );
+    CPPUNIT_TEST( testCreatePmmlRegression );
+
+    CPPUNIT_TEST_SUITE_END();
+  
+public:
+    
+    void setUp(); 
+    void tearDown();
+    
+    void testMakeLog();
+    
+    void testConstructorFileDoesNotExist();
+    
+    void testSetCurrentModelWithNameAndTypekANNUnknownModel();
+    void testSetCurrentModelWithNameAndTypekLRUnknownModel();  
+    void testSetCurrentModelWithNameAndTypekUNDEFINEDUnknownModel();
+    void testSetCurrentModelWithNameAndType();
+    void testSetCurrentModelWithNameAndTypeTwoModelsWithSameNames();
+    void testSetCurrentModelWithNameWrongName();
+    void testSetCurrentModelWithNameTwoModelsWithSameNames();
+    void testSetCurrentModelWithName();
+    void testSetCurrentModelNoModel();
+    void testSetCurrentModelMoreThanOneModel();
+    void testSetCurrentModel(); 
+    
+    void testGetModelsNbkANN();
+    void testGetModelsNbkLR();
+    
+    void testWrite(); 
+    void testWriteNotExistingFile();
+    
+    void testUnlinkNode();
+    void testBackupNode(); 
+    
+    void testGetNbInputsForbiddenModelType();
+    void testGetNbInputs8();  
+    
+    void testGetNbOutputsForbiddenModelType();
+    void testGetNbOutputs2();  
+    
+    void testGetNameInputForbiddenModelType();
+    void testGetNameInputIndexOutOfRange();
+    void testGetNameInput();
+    
+    void testGetNameOutputForbiddenModelType();
+    void testGetNameOutputIndexOutOfRange();
+    void testGetNameOutput();
+    
+    void testGetNormalisationInputForbiddenType();     
+    void testGetNormalisationInputIndexUnknown();
+    void testGetNormalisationInputkMinusOneOne();
+    void testGetNormalisationInputkCRkZeroOne();
+
+    void testGetNormalisationOutputForbiddenType();        
+    void testGetNormalisationOutputIndexUnknown();
+    void testGetNormalisationOutputkMinusOneOne();
+    void testGetNormalisationOutputkCRkZeroOne();
+    void testGetNbLayersForbiddenType();
+    void testGetNbLayers() ;
+  
+    void testGetNbNeuronsAtLayerForbiddenType();
+    void testGetNbNeuronsAtLayerFromIndexOutOfRange(); 
+    void testGetNbNeuronsAtLayer(); 
+     
+    void testGetNeuronBiasForbiddenType();
+    void testGetNeuronBiasFromLayerIndexOutOfRange(); 
+    void testGetNeuronBiasFromNeuronIndexOutOfRange();
+    void testGetNeuronBias();    
+    
+    void testGetPrecNeuronSynapseForbiddenType(); 
+    void testGetPrecNeuronSynapseFromForbiddenTypeName();
+    void testGetPrecNeuronSynapseFromLayerIndexOutOfRange();
+    void testGetPrecNeuronSynapseFromNeuronIndexOutOfRange();
+    void testGetPrecNeuronSynapseFromPrecIndexOutOfRange();
+    void testGetPrecNeuronSynapse();
+    
+    void testReadNetworkStructure();
+    
+    void testExportNeuralNetworkCpp();
+    void testExportNeuralNetworkFortran();
+    void testExportNeuralNetworkPython();
+    void testCreatePmmlNeuralNetwork();
+    
+    void testHasInterceptForbiddenType();
+    void testHasIntercept();
+    void testHasInterceptNo();
+    void testGetRegressionTableInterceptForbiddenType(); 
+    void testGetRegressionTableIntercept();
+
+    void testReadRegressionStructure();
+    
+    void testGetNumericPredictorNb();
+    void testGetNumericPredictorNbForbiddenType();
+
+    void testGetPredictorTermNb();
+    void testGetPredictorTermNbForbiddenType();
+    
+    void testGetNumericPredictorName();
+    void testGetNumericPredictorNameForbiddenType();    
+    void testGetNumericPredictorNamePredictorOutOfRange();  
+    
+    void testGetPredictorTermName();
+    void testGetPredictorTermNameForbiddenType();    
+    void testGetPredictorTermNamePredictorOutOfRange();      
+    
+    void testGetNumericPredictorCoefficient();
+    void testGetNumericPredictorCoefficientForbiddenType();      
+    void testGetNumericPredictorCoefficientPredictorOutOfRange();      
+
+    void testGetPredictorTermCoefficient();
+    void testGetPredictorTermCoefficientForbiddenType();  
+    void testGetPredictorTermCoefficientPredictorOutOfRange();  
+    
+    void testGetPredictorTermFieldRefNb();
+    void testGetPredictorTermFieldRefNbForbiddenType();   
+    void testGetPredictorTermFieldRefNbPredictorOutOfRange();      
+
+    void testGetPredictorTermFieldRefName();
+    void testGetPredictorTermFieldRefNameForbiddenType();  
+    void testGetPredictorTermFieldRefNamePredictorOutOfRange();     
+    void testGetPredictorTermFieldRefNameFieldOutOfRange();    
+    
+    void testExportLinearRegressionCpp(); 
+    void testExportLinearRegressionFortran();
+    void testExportLinearRegressionPython();
+    void testCreatePmmlRegression();
+
+private :
+    std::string resourcesDir;
+    std::string tmpDir;
+};
+
+
+#endif
diff --git a/src/pmml/Test/TestPMML.cxx b/src/pmml/Test/TestPMML.cxx
new file mode 100755 (executable)
index 0000000..8151600
--- /dev/null
@@ -0,0 +1,25 @@
+// Copyright (C) 2007-2013  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : InckA
+
+#include "PMMLBasicsTest1.hxx"
+
+CPPUNIT_TEST_SUITE_REGISTRATION( PMMLBasicsTest1 );
+
+#include "BasicMainTest.hxx"
diff --git a/src/pmml/Test/tools.cxx b/src/pmml/Test/tools.cxx
new file mode 100755 (executable)
index 0000000..86d3538
--- /dev/null
@@ -0,0 +1,47 @@
+#include <cstring>
+#include <iostream>
+#include <fstream>
+
+#include "tools.hxx"
+
+using namespace std ;
+
+/** Test if two files are identical. 
+ * \param fileName Name of the first file
+ * \param otherFileName Name of the other file
+ * \return 0 if files are equal.
+ */
+int areFilesEqual(const std::string & fileName, std::string otherFileName)
+{
+    // Get content of the files 
+    string str1("");
+    const char* f1 = fileName.c_str();
+    std::ifstream ifs1 (f1, ios::in);
+    if (ifs1) 
+    {
+        string ligne;
+        while(getline(ifs1, ligne))
+        {
+            str1 +=  ligne ;
+            str1 += "'\n'";
+        }
+        ifs1.close();
+    } 
+    //
+    string str2("");
+    const char* f2 = otherFileName.c_str();
+    std::ifstream ifs2 (f2);
+    if (ifs2) 
+    {
+        string ligne;
+        while(getline(ifs2, ligne))
+        {
+            str2 +=  ligne ;
+            str2 += "'\n'";
+        }    
+        ifs2.close();
+    }  
+    // Compare
+    int cmp = str1.compare(str2);
+    return cmp; 
+}
\ No newline at end of file
diff --git a/src/pmml/Test/tools.hxx b/src/pmml/Test/tools.hxx
new file mode 100755 (executable)
index 0000000..1f21323
--- /dev/null
@@ -0,0 +1,30 @@
+// Copyright (C) 2007-2013  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : InckA
+
+#ifndef __UNITTESTTOOLS_HXX__
+#define __UNITTESTTOOLS_HXX__
+
+#include <string>
+
+   
+int areFilesEqual(const std::string & fileName, std::string otherFileName);
+
+
+#endif
diff --git a/src/pmml/doc/CMakeLists.txt b/src/pmml/doc/CMakeLists.txt
new file mode 100755 (executable)
index 0000000..d290c02
--- /dev/null
@@ -0,0 +1,21 @@
+# Copyright (C) 2012-2013  CEA/DEN, EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+ADD_SUBDIRECTORY(doxygen)
+
diff --git a/src/pmml/doc/doxygen/CMakeLists.txt b/src/pmml/doc/doxygen/CMakeLists.txt
new file mode 100755 (executable)
index 0000000..9540d83
--- /dev/null
@@ -0,0 +1,43 @@
+# Copyright (C) 2012-2013  CEA/DEN, EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+SET(indoxfiles Doxyfile_pmml_user.in static/header.html.in)
+SET(builddir ${CMAKE_CURRENT_BINARY_DIR})
+SET(srcdir   ${CMAKE_CURRENT_SOURCE_DIR})
+FOREACH(indoxfile ${indoxfiles})
+  STRING(REGEX REPLACE ".in" "" baseindoxfile ${indoxfile})
+  SET(input ${CMAKE_CURRENT_SOURCE_DIR}/${indoxfile})
+  SET(output ${CMAKE_CURRENT_BINARY_DIR}/${baseindoxfile})
+  CONFIGURE_FILE(${input} ${output})
+  MESSAGE(STATUS "Creation of ${output}")
+ENDFOREACH(indoxfile ${indoxfiles})
+FILE(TO_NATIVE_PATH "${CMAKE_CURRENT_BINARY_DIR}" output)
+
+
+# :TRICKY: For ease of maintenance, documentation for code examples is
+# splitted in several files. We here splice to a single file before running
+# Doxygen.
+
+# Here is the "standard" procedure, as if ${input} was hand-written.
+ADD_CUSTOM_TARGET(usr_docs ALL
+  COMMAND ${DOXYGEN_EXECUTABLE} Doxyfile_pmml_user
+  COMMAND ${PYTHON_EXECUTABLE} -c "import shutil, sys; shutil.rmtree(r'''${CMAKE_INSTALL_PREFIX}/share/doc/salome/PMML''', True); shutil.copytree(r'''${CMAKE_CURRENT_BINARY_DIR}/doc_ref_user/html''', r'''${CMAKE_INSTALL_PREFIX}/share/doc/salome/PMML'''); shutil.copy(r'''${CMAKE_CURRENT_SOURCE_DIR}/images/head.png''', r'''${CMAKE_INSTALL_PREFIX}/share/doc/salome/PMML''')"
+  VERBATIM
+  WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+  )
diff --git a/src/pmml/doc/doxygen/Doxyfile_pmml_user.in b/src/pmml/doc/doxygen/Doxyfile_pmml_user.in
new file mode 100755 (executable)
index 0000000..af86c1d
--- /dev/null
@@ -0,0 +1,200 @@
+# Copyright (C) 2007-2013  CEA/DEN, EDF R&D, OPEN CASCADE
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+# Doxyfile 0.1
+#---------------------------------------------------------------------------
+# General configuration options
+#---------------------------------------------------------------------------
+#
+PROJECT_NAME           = "SALOME PMML Users' Guide"
+PROJECT_NUMBER         =
+OUTPUT_DIRECTORY       = doc_ref_user
+OUTPUT_LANGUAGE        = English
+EXTRACT_ALL            = YES
+EXTRACT_PRIVATE        = NO
+EXTRACT_STATIC         = NO
+HIDE_UNDOC_MEMBERS     = YES
+HIDE_UNDOC_CLASSES     = YES
+BRIEF_MEMBER_DESC      = NO
+REPEAT_BRIEF           = YES
+ALWAYS_DETAILED_SEC    = NO
+FULL_PATH_NAMES        = NO
+STRIP_FROM_PATH        =
+INTERNAL_DOCS          = NO
+STRIP_CODE_COMMENTS    = YES
+CASE_SENSE_NAMES       = YES
+SHORT_NAMES            = NO
+HIDE_SCOPE_NAMES       = NO
+VERBATIM_HEADERS       = NO
+SHOW_INCLUDE_FILES     = NO
+JAVADOC_AUTOBRIEF      = NO
+INHERIT_DOCS           = YES
+MARKDOWN_SUPPORT       = YES
+INLINE_INFO            = NO
+SORT_MEMBER_DOCS       = NO
+DISTRIBUTE_GROUP_DOC   = NO
+TAB_SIZE               = 8
+GENERATE_TODOLIST      = YES
+GENERATE_TESTLIST      = YES
+GENERATE_BUGLIST       = YES
+ALIASES                =
+ENABLED_SECTIONS       = user PMML_ug
+MAX_INITIALIZER_LINES  = 30
+OPTIMIZE_OUTPUT_FOR_C  = NO
+SHOW_USED_FILES        = NO
+#---------------------------------------------------------------------------
+# configuration options related to warning and progress messages
+#---------------------------------------------------------------------------
+QUIET                  = NO
+WARNINGS               = YES
+WARN_IF_UNDOCUMENTED   = YES
+WARN_FORMAT            = "$file:$line: $text"
+WARN_LOGFILE           = log_user
+#---------------------------------------------------------------------------
+# configuration options related to the input files
+#---------------------------------------------------------------------------
+
+INPUT                  = @srcdir@/doxfiles/intro.dox \
+                         @srcdir@/doxfiles/cppexamples.dox \
+                         @srcdir@/doxfiles/pyexamples.dox \
+                         @srcdir@/doxfiles/pmml.dox \
+                         @srcdir@/doxfiles/install.dox \
+                         @srcdir@/../../src/PMML
+
+FILE_PATTERNS          = PMMLlib.* \
+                         *.dox
+RECURSIVE              = YES
+EXCLUDE                = CVS
+EXCLUDE_PATTERNS       = *~
+EXAMPLE_PATH           = @srcdir@/../../src/PMML \
+                         @srcdir@/../../src/PMML/Test \
+                         @srcdir@/../../src/PMML_Swig
+
+EXAMPLE_PATTERNS       = *.cxx *.py
+EXAMPLE_RECURSIVE      = NO
+IMAGE_PATH             = 
+INPUT_FILTER           =
+FILTER_SOURCE_FILES    = NO
+#---------------------------------------------------------------------------
+# configuration options related to source browsing
+#---------------------------------------------------------------------------
+SOURCE_BROWSER         = NO
+INLINE_SOURCES         = NO
+REFERENCED_BY_RELATION = YES
+REFERENCES_RELATION    = YES
+#---------------------------------------------------------------------------
+# configuration options related to the alphabetical class index
+#---------------------------------------------------------------------------
+ALPHABETICAL_INDEX     = YES
+COLS_IN_ALPHA_INDEX    = 5
+IGNORE_PREFIX          =
+#---------------------------------------------------------------------------
+# configuration options related to the HTML output
+#---------------------------------------------------------------------------
+GENERATE_HTML          = YES
+HTML_OUTPUT            = html
+HTML_HEADER            = @builddir@/static/header.html
+HTML_FOOTER            = @srcdir@/static/footer.html
+HTML_EXTRA_STYLESHEET  = @srcdir@/static/salome_extra.css
+GENERATE_HTMLHELP      = NO
+GENERATE_CHI           = YES
+BINARY_TOC             = NO
+TOC_EXPAND             = YES
+DISABLE_INDEX          = NO
+ENUM_VALUES_PER_LINE   = 4
+GENERATE_TREEVIEW      = YES
+TREEVIEW_WIDTH         = 250
+
+#---------------------------------------------------------------------------
+#SORT related options
+#---------------------------------------------------------------------------
+SORT_GROUP_NAMES = NO
+
+#---------------------------------------------------------------------------
+# configuration options related to the LaTeX output
+#---------------------------------------------------------------------------
+GENERATE_LATEX         = YES
+LATEX_OUTPUT           = latex
+COMPACT_LATEX          = YES
+PAPER_TYPE             = a4wide
+EXTRA_PACKAGES         =
+LATEX_HEADER           =
+PDF_HYPERLINKS         = NO
+USE_PDFLATEX           = NO
+LATEX_BATCHMODE        = NO
+#---------------------------------------------------------------------------
+# configuration options related to the RTF output
+#---------------------------------------------------------------------------
+GENERATE_RTF           = NO
+RTF_OUTPUT             = rtf
+COMPACT_RTF            = NO
+RTF_HYPERLINKS         = NO
+RTF_STYLESHEET_FILE    =
+RTF_EXTENSIONS_FILE    =
+#---------------------------------------------------------------------------
+# configuration options related to the man page output
+#---------------------------------------------------------------------------
+GENERATE_MAN           = NO
+MAN_OUTPUT             = man
+MAN_EXTENSION          = .3
+MAN_LINKS              = NO
+#---------------------------------------------------------------------------
+# configuration options related to the XML output
+#---------------------------------------------------------------------------
+GENERATE_XML           = NO
+#---------------------------------------------------------------------------
+# Configuration options related to the preprocessor
+#---------------------------------------------------------------------------
+ENABLE_PREPROCESSING   = YES
+MACRO_EXPANSION        = YES
+EXPAND_ONLY_PREDEF     = YES
+SEARCH_INCLUDES        = YES
+INCLUDE_PATH           =
+INCLUDE_FILE_PATTERNS  =
+PREDEFINED             =
+EXPAND_AS_DEFINED      = PMMLLIB_EXPORT
+SKIP_FUNCTION_MACROS   = YES
+#---------------------------------------------------------------------------
+# Configuration::addtions related to external references
+#---------------------------------------------------------------------------
+TAGFILES               =
+GENERATE_TAGFILE       =
+ALLEXTERNALS           = NO
+PERL_PATH              = /usr/bin/perl
+#---------------------------------------------------------------------------
+# Configuration options related to the dot tool
+#---------------------------------------------------------------------------
+CLASS_DIAGRAMS         = YES
+HAVE_DOT               = YES
+CLASS_GRAPH            = YES
+COLLABORATION_GRAPH    = YES
+TEMPLATE_RELATIONS     = YES
+HIDE_UNDOC_RELATIONS   = YES
+INCLUDE_GRAPH          = YES
+INCLUDED_BY_GRAPH      = YES
+GRAPHICAL_HIERARCHY    = YES
+DOT_PATH               =
+DOT_FONTNAME           = Arial
+DOTFILE_DIRS           =
+GENERATE_LEGEND        = YES
+DOT_CLEANUP            = YES
+#---------------------------------------------------------------------------
+# Configuration::addtions related to the search engine
+#---------------------------------------------------------------------------
+SEARCHENGINE           = NO
diff --git a/src/pmml/doc/doxygen/doxfiles/cppexamples.dox b/src/pmml/doc/doxygen/doxfiles/cppexamples.dox
new file mode 100755 (executable)
index 0000000..9b54a29
--- /dev/null
@@ -0,0 +1,367 @@
+/*!
+\page cppexamples PMMLlib C++ examples
+
+\section sectionA Create a neural network in a new PMML file :
+
+\verbatim
+    PMMLlib::PMMLlib p;
+
+    // Set Header node
+    p.SetHeader("copyright", "description", "name", "version", "annotation");  
+
+    // Set DataDictionnary node and its DataField nodes
+    int nInput = 8;
+    vector<string>vfieldNameI(nInput);
+    vfieldNameI[0] = "rw";
+    vfieldNameI[1] = "r";
+    vfieldNameI[2] = "tu";
+    vfieldNameI[3] = "tl";
+    vfieldNameI[4] = "hu";
+    vfieldNameI[5] = "hl";
+    vfieldNameI[6] = "l";
+    VfieldNameI[7] = "kw";
+    string fieldNameO("yhat"); 
+    string opType = "continuous";
+    string dataType =  "float";
+    for(int j=0 ; j<nInput; j++)
+    {
+        p.AddDataField(vfieldNameI[j], vfieldNameI[j], opType, dataType, "ClosedClosed", 0., 0.);
+    }      
+    p.AddDataField(fieldNameO, fieldNameO, opType, dataType, "ClosedClosed", 0., 0.);      
+
+    // Add a model of type NeuralNetwork   
+    string modelName("sANNName");
+    p.AddNeuralNetwork(modelName, PMMLlib::kREGRESSION);
+
+    // Set MiningSchema  
+    for(int j=0 ; j<nInput ; j++)
+    {
+        p.AddMiningSchema(vfieldNameI[j], "active");
+    }
+    p.AddMiningSchema(fieldNameO, "predicted"); 
+
+    // Set NeuralInput
+    double orig1=0; 
+    vector<double>vnorm1(nInput);
+    vnorm1[0] = -2.889932e-01;
+    vnorm1[1] = -5.756638e-01;
+    vnorm1[2] = -1.699313e-01;
+    vnorm1[3] = -1.707007e-01;
+    vnorm1[4] = -3.302777e-02;
+    vnorm1[5] = -4.562070e-02;
+    vnorm1[6] = -1.155882e-01;
+    vnorm1[7] = -5.780019e-02;
+    vector<double>vorig2(nInput);
+    vorig2[0] = 9.999901e-02;
+    vorig2[1] = 2.504894e+04;
+    vorig2[2] = 8.933486e+04;
+    vorig2[3] = 8.955232e+01;
+    vorig2[4] = 1.050003e+03;
+    vorig2[5] = 7.600007e+02;
+    vorig2[6] = 1.400018e+03;
+    vorig2[7] = 1.095001e+04;
+    double norm2=0;
+    for(int j=0 ; j<nInput ; j++)
+    {
+        p.AddNeuralInput(j, fieldName[j], opType, dataType, 
+                                      vorig1, vnorm1[j], vorig2[j], vnorm2);
+    }
+
+    // Add first NeuralLayer and its neuron
+    p.AddNeuralLayer(PMMLlib::kTANH);    
+    int idN1 = 8;
+    double bias1 = -1.263572;
+    int conNb1 =  nWeights1;
+    int from1 = 0;
+    int nWeights1 = 8;
+    vector<double>weights1(nWeights1);
+    weights1[0] = 7.536629e-01;
+    weights1[1] = 1.653660e-03;
+    weights1[2] = 4.725001e-03;
+    weights1[3] = 9.969786e-03;
+    weights1[4] = 1.787976e-01;
+    weights1[5] = -1.809809e-01;
+    weights1[6] = -1.735688e-01;
+    weights1[7] = 8.559675e-02; 
+    p.AddNeuron(idN1, bias1, conNb1, from1, weights1);
+
+    // Add second NeuralLayer and its neuron
+    p.AddNeuralLayer(PMMLlib::kIDENTITY);
+    int idN2 = 9;
+    double bias2 = -1.745483;
+    int conNb2 =  nWeights2;
+    int from2 = 8;
+    int nWeights2 = 1;
+    vector<double>weights2(nWeights2);
+    weights2[0] = 6.965512e+00;     
+    p.AddNeuron(idN2, bias2, conNb2, from2, weights2);
+
+    // Set NeuralOutput
+    int idO = 9;
+    double dorig1 = 0; 
+    double dnorm1 = -5.873935e-01;
+    double dorig2 = 7.781171e+01 ; 
+    double dnorm2 = 0;
+    p.AddNeuralOutput(idO, fieldNameO, opType, dataType,
+                                     dorig1, dnorm1, dorig2, dnorm2);
+
+    // Generate the PMML file
+    string pmmlFilename = "myPmmlFile.pmml";
+    p.Write( pmmlFilename );
+\endverbatim
+
+The matching PMML file is:
+
+\verbatim
+<?xml version="1.0"?>
+<PMML xmlns="http://www.dmg.org/PMML-4_1" version="version">
+  <Header copyright="copyright" description="description">
+    <Application name="PMMLlib" version="myVersion"/>
+    <Annotation>Tests unitaires PMMLlib</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="rw" displayName="rw" optype="continuous" dataType="float"/>
+    <DataField name="r" displayName="r" optype="continuous" dataType="float"/>
+    <DataField name="tu" displayName="tu" optype="continuous" dataType="float"/>
+    <DataField name="tl" displayName="tl" optype="continuous" dataType="float"/>
+    <DataField name="hu" displayName="hu" optype="continuous" dataType="float"/>
+    <DataField name="hl" displayName="hl" optype="continuous" dataType="float"/>
+    <DataField name="l" displayName="l" optype="continuous" dataType="float"/>
+    <DataField name="kw" displayName="kw" optype="continuous" dataType="float"/>
+    <DataField name="yhat" displayName="yhat" optype="continuous" dataType="float"/>
+  </DataDictionary>
+  <NeuralNetwork modelName="sANNName" functionName="regression" numberOfLayers="2">
+    <MiningSchema>
+      <MiningField name="rw" usageType="active"/>
+      <MiningField name="r" usageType="active"/>
+      <MiningField name="tu" usageType="active"/>
+      <MiningField name="tl" usageType="active"/>
+      <MiningField name="hu" usageType="active"/>
+      <MiningField name="hl" usageType="active"/>
+      <MiningField name="l" usageType="active"/>
+      <MiningField name="kw" usageType="active"/>
+      <MiningField name="yhat" usageType="predicted"/>
+    </MiningSchema>
+    <NeuralInputs numberOfInputs="8">
+      <NeuralInput id="0">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="rw">
+            <LinearNorm orig="0.000000e+00" norm="-2.889932e-01"/>
+            <LinearNorm orig="9.999901e-02" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="1">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="r">
+            <LinearNorm orig="0.000000e+00" norm="-5.756638e-01"/>
+            <LinearNorm orig="2.504894e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="2">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tu">
+            <LinearNorm orig="0.000000e+00" norm="-1.699313e-01"/>
+            <LinearNorm orig="8.933486e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="3">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tl">
+            <LinearNorm orig="0.000000e+00" norm="-1.707007e-01"/>
+            <LinearNorm orig="8.955232e+01" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="4">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hu">
+            <LinearNorm orig="0.000000e+00" norm="-3.302777e-02"/>
+            <LinearNorm orig="1.050003e+03" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="5">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hl">
+            <LinearNorm orig="0.000000e+00" norm="-4.562070e-02"/>
+            <LinearNorm orig="7.600007e+02" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="6">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="l">
+            <LinearNorm orig="0.000000e+00" norm="-1.155882e-01"/>
+            <LinearNorm orig="1.400018e+03" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="7">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="kw">
+            <LinearNorm orig="0.000000e+00" norm="-5.780019e-02"/>
+            <LinearNorm orig="1.095001e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+    </NeuralInputs>
+    <NeuralLayer activationFunction="tanh" numberOfNeurons="1">
+      <Neuron id="8" bias="-1.263572e+00">
+        <Con from="0" weight="7.536629e-01"/>
+        <Con from="1" weight="1.653660e-03"/>
+        <Con from="2" weight="4.725001e-03"/>
+        <Con from="3" weight="9.969786e-03"/>
+        <Con from="4" weight="1.787976e-01"/>
+        <Con from="5" weight="-1.809809e-01"/>
+        <Con from="6" weight="-1.735688e-01"/>
+        <Con from="7" weight="8.559675e-02"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralLayer activationFunction="identity" numberOfNeurons="1">
+      <Neuron id="9" bias="-1.745483e+00">
+        <Con from="8" weight="6.965512e+00"/>
+      </Neuron>
+  </DataDictionary>
+    </NeuralLayer>
+    <NeuralOutputs numberOfOutputs="1">
+      <NeuralOutput outputNeuron="9">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="yhat">
+            <LinearNorm orig="0.000000e+00" norm="-5.873935e-01"/>
+            <LinearNorm orig="7.781171e+01" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralOutput>
+    </NeuralOutputs>
+  </NeuralNetwork>
+</PMML>
+\endverbatim
+
+\section sectionB Create a regression model in new PMML file :
+
+\verbatim
+    PMMLlib::PMMLlib p;
+
+    // Header node
+    p.SetHeader("copyright", "description", "name", "version", "annotation");  
+
+    // DataDictionnary node and its DataField nodes
+    int nData = 3;
+    vector<string>fieldName(nData);
+    fieldName[0] = "x6";
+    fieldName[1] = "x8";
+    fieldName[2] = "x1";
+    vector<string>displayName(nData);
+    displayName[0] = " x_{6}";
+    displayName[1] = " x_{8}";
+    displayName[2] = " x_{1}";    
+    vector<double>leftMargin(nData);
+    leftMargin[0] = 1.1e+01;
+    leftMargin[1] = 2.81e+01;
+    leftMargin[2] = 6.36e+00;
+    vector<double>rightMargin(nData);
+    rightMargin[0] = 2.3e+01;
+    rightMargin[1] = 7.67e+01;
+    rightMargin[2] = 1.251e+01;
+    for(int j=0; j<nData; j++)
+    {
+        p.AddDataField( fieldName[j], displayName[j], 
+                       "continuous", "double", "ClosedClosed", 
+                       leftMargin[j], rightMargin[j], true);
+    }      
+
+    // Add a model of type RegressionModel
+    string modelName("sREGName");
+    p.AddRegressionModel(modelName, PMMLlib::kREGRESSION, fieldName[2]);
+
+    // Set MiningSchema
+    for(int j=0 ; j<nData-1 ; j++)
+    {
+        p.AddMiningSchema(fieldName[j], "active");
+    }
+    p.AddMiningSchema(fieldName[nData-1], "predicted"); 
+
+    // Set Regression table
+    double intercept = 3.837365;
+    p.AddRegressionTable(intercept);
+
+    // Set NumericPredictor nodes
+    vector<double>numPredCoeff(nData-1);
+    numPredCoeff[0] = 0.4759134;
+    numPredCoeff[1] = 0.1428838;    
+    for(int j=0; j<nData-1; j++) 
+    {       
+        p.AddNumericPredictor(fieldName[j], 1, numPredCoeff[j]);
+    }
+    // Set PredictorTerm nodes
+    vector<double>predTermCoeff(nData-1);
+    predTermCoeff[0] = -0.02201903;
+    predTermCoeff[1] = 0.000536256;     
+    vector< vector<string> > fieldRefVect(nData-1);
+    vector<string>fieldRef0(2);
+    fieldRef0[0] = fieldName[0];
+    fieldRef0[1] = fieldName[1];
+    vector<string>fieldRef1(3);
+    fieldRef1[0] = fieldName[0];
+    fieldRef1[1] = fieldName[0];
+    fieldRef1[2] = fieldName[1];
+    fieldRefVect[0] = fieldRef0;
+    fieldRefVect[nData-2] = fieldRef1; 
+    for(int j=0; j<nData-1; j++) 
+    {
+        p.AddPredictorTerm(predTermCoeff[j], fieldRefVect[j]);
+    }  
+
+    // Generate the PMML file
+    string pmmlFilename = "myPmmlFile.pmml";
+    p.Write( pmmlFilename );
+\endverbatim
+
+
+The matching PMML file is:
+\verbatim
+<?xml version="1.0"?>
+<PMML xmlns="http://www.dmg.org/PMML-4_1" version="version">
+  <Header copyright="copyright" description="description">
+    <Application name="PMMLlib" version="myVersion"/>
+    <Annotation>Tests unitaires PMMLlib</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="x6" displayName=" x_{6}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="1.100000e+01" rightMargin="2.300000e+01"/>
+    </DataField>
+    <DataField name="x8" displayName=" x_{8}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="2.810000e+01" rightMargin="7.670000e+01"/>
+    </DataField>
+    <DataField name="x1" displayName=" x_{1}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="6.360000e+00" rightMargin="1.251000e+01"/>
+    </DataField>
+  </DataDictionary>
+  <RegressionModel functionName="regression" modelName="Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]" targetFieldName="x1">
+    <MiningSchema>
+      <MiningField name="x6" usageType="active"/>
+      <MiningField name="x8" usageType="active"/>
+      <MiningField name="x1" usageType="predicted"/>
+    </MiningSchema>
+    <RegressionTable intercept="3.837365e+00">
+      <NumericPredictor name="x6" exponent="1" coefficient="4.759134e-01"/>
+      <NumericPredictor name="x8" exponent="1" coefficient="1.428838e-01"/>
+      <PredictorTerm coefficient="-2.201903e-02">
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+      <PredictorTerm coefficient="5.362560e-04">
+        <FieldRef field="x6"/>
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+    </RegressionTable>
+  </RegressionModel>
+</PMML>
+\endverbatim
+
+*/
diff --git a/src/pmml/doc/doxygen/doxfiles/install.dox b/src/pmml/doc/doxygen/doxfiles/install.dox
new file mode 100755 (executable)
index 0000000..5b37c32
--- /dev/null
@@ -0,0 +1,28 @@
+
+/*!
+\page pmml_install Configuring and Installing PMML from sources
+
+The libraries in SALOME PMML can be configured in several manners so that it can run inside or outside the Salome platform.
+Also, partitioning and parallel functionalities are optional.
+
+The sources of the library are located in the \a PMML_SRC directory.
+The first step consists in preparing the configuration of the library :
+\verbatim
+cd ${PMML_SRC}
+./build_configure
+\endverbatim
+
+This will create SALOME PMML libraries with link to the SALOME Kernel.
+Sometimes, if it is desirable to have a standalone version of the library to be used independently from SALOME, use :
+\verbatim
+cd ${PMML_SRC}
+./build_configure --without-kernel
+\endverbatim
+
+The library can then be configured :
+\verbatim
+mkdir ../PMML_BUILD
+cd ../PMML_BUILD
+../PMML_SRC/configure --prefix=`pwd`/../PMML_INSTALL
+\endverbatim
+*/
diff --git a/src/pmml/doc/doxygen/doxfiles/intro.dox b/src/pmml/doc/doxygen/doxfiles/intro.dox
new file mode 100755 (executable)
index 0000000..5834957
--- /dev/null
@@ -0,0 +1,37 @@
+/*!
+
+\mainpage Introduction
+
+The Predictive Model Markup Language (PMML) is an XML-based file format developed by the Data Mining Group to provide a way for applications to describe and exchange models produced by data mining and machine learning algorithms.
+
+The Salome PMML module allows the handling of PMML files :
+- reading of PMML files containing one or more models,
+- creation of PMML files with  one or more models,
+- supports models of types « NeuralNetwork » and « rRegressionModel »,
+- generation of  C++, FORTRAN and Python code from a model specifications.
+
+
+Furthermore, it is possible to load a model from a PMML file and generate the matching python function through YACS elementary node PyLoadPMML, indicating the PMML file, model name and type. The output port is the python function.
+
+
+<br />
+
+\section contents Contents
+This document is organized as follows:
+- Technical presentation of \ref PMMLlib
+- \ref cppexamples 
+- \ref pyexamples 
+
+
+\section install Installation
+The install procedure of the %PMMLlib SALOME module can handle a variety of configurations
+to suit the needs of its user. Instructions for configuring and
+installing the module can be found in \ref pmml_install.
+
+
+\section references References
+Here follows a list of useful references :
+
+-# Data Mining Group reference page : \c http://www.dmg.org/v4-1/GeneralStructure.html
+
+*/
diff --git a/src/pmml/doc/doxygen/doxfiles/pmml.dox b/src/pmml/doc/doxygen/doxfiles/pmml.dox
new file mode 100755 (executable)
index 0000000..7b36dc6
--- /dev/null
@@ -0,0 +1,14 @@
+/*!
+\page PMMLlib PMMLlib
+
+\section PmmlIntro Introduction
+
+PMMLlib is a library (\b libpmmlLib.so or \b pmmllib.dll) fully written in C++ and wrapped to be called in Python too.
+
+PMMLlib C++ library implements a data structure which is the result of the following tradeoff :
+
+- Compliant with coupling :
+  - Fields definition defined enough to perform well defined interpolation
+  - exchangeable through process as well in parallel case in SPMD paradigm 
+
+*/
diff --git a/src/pmml/doc/doxygen/doxfiles/pyexamples.dox b/src/pmml/doc/doxygen/doxfiles/pyexamples.dox
new file mode 100755 (executable)
index 0000000..4a7ac61
--- /dev/null
@@ -0,0 +1,77 @@
+/*!
+\page pyexamples PMMLlib Python examples
+
+\section sectionC Update a model in an existing PMML file :
+The updating is done in two steps:
+- 1 : delete the XML node of the model with method UnlinkNode();
+- 2 : re-create the model.
+\verbatim
+PMMLlib p ( fileName, log );
+
+# Set the model
+p.SetCurrentModel( modelName, modelType );
+
+# Delete the XML node of the model
+p.UnlinkNode( );
+# Recreate the model with new parameters
+p.AddRegressionModel(« monModele », PMMLlib::kREGRESSION,  « regression » );
+p.AddDataField( ….);
+
+# Save the PMML file
+p.Write( );
+\endverbatim
+
+
+
+
+\section sectionD Backup and update a model in an existing PMML file :
+It is done in two steps:
+- 1 : backup the model in an XML node with name modelName_<i> with method BackupNode();
+- 2 : re-create the model.
+\verbatim
+PMMLlib p ( fileName, log );
+
+# Set the model
+p.SetCurrentModel( « monModele », modelType );
+
+# Save the model in a new XML node
+p.BackupNode( );
+# Modify
+p.AddRegressionModel(« monModele », PMMLlib::kREGRESSION,  « regression » );
+p.AddDataField( ….);
+
+# Save the PMML file
+p.Write( );
+\endverbatim
+
+
+
+\section sectionE Add a model in an existing PMML file :
+
+\verbatim
+PMMLlib p ( fileName, log );
+
+# Create the model
+p.AddRegressionModel(« monModele », PMMLlib::kREGRESSION,  « regression » );
+p.AddDataField( ….);
+
+# Save the PMML file
+p.Write( );
+\endverbatim
+
+\section sectionF Read a model and execute it :
+
+\verbatim
+P = PMMLlib( fileName, log );
+p.SetCurrentModel( modelName, modelType );
+
+pyStrCode = p.ExportPythonStr( « myPyFunc », « function header » ); 
+exec pyStrCode;
+
+# Eval myPyFunc which is now known as a python function
+inputs = [1.,2.,3.,4.]
+res = myPyFunc(inputs)
+\endverbatim
+
+
+*/
\ No newline at end of file
diff --git a/src/pmml/doc/doxygen/images/head.png b/src/pmml/doc/doxygen/images/head.png
new file mode 100755 (executable)
index 0000000..307d9ef
Binary files /dev/null and b/src/pmml/doc/doxygen/images/head.png differ
diff --git a/src/pmml/doc/doxygen/static/footer.html b/src/pmml/doc/doxygen/static/footer.html
new file mode 100755 (executable)
index 0000000..4137de2
--- /dev/null
@@ -0,0 +1,14 @@
+<!-- HTML footer for doxygen 1.8.3.1-->
+<!--BEGIN GENERATE_TREEVIEW-->
+<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
+  <ul>
+    $navpath
+    <li class="footer">
+      Copyright &copy; 2007-2013  CEA/DEN, EDF R&amp;D, OPEN CASCADE<br>
+      Copyright &copy; 2003-2007  OPEN CASCADE, EADS/CCR, LIP6, CEA/DEN, CEDRAT, EDF R&amp;D, LEG, PRINCIPIA R&amp;D, BUREAU VERITAS<br>
+    </li>
+  </ul>
+</div>
+<!--END GENERATE_TREEVIEW-->
+</body>
+</html>
diff --git a/src/pmml/doc/doxygen/static/header.html.in b/src/pmml/doc/doxygen/static/header.html.in
new file mode 100755 (executable)
index 0000000..cd93290
--- /dev/null
@@ -0,0 +1,23 @@
+<!-- HTML header for doxygen 1.8.3.1-->
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
+<meta http-equiv="X-UA-Compatible" content="IE=9"/>
+<meta name="generator" content="Doxygen $doxygenversion"/>
+<!--BEGIN PROJECT_NAME--><title>$projectname: $title</title><!--END PROJECT_NAME-->
+<!--BEGIN !PROJECT_NAME--><title>$title</title><!--END !PROJECT_NAME-->
+<link href="$relpath^tabs.css" rel="stylesheet" type="text/css"/>
+<script type="text/javascript" src="$relpath^jquery.js"></script>
+<script type="text/javascript" src="$relpath^dynsections.js"></script>
+$treeview
+$search
+$mathjax
+<link href="$relpath^$stylesheet" rel="stylesheet" type="text/css" />
+$extrastylesheet
+</head>
+<body>
+<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
+
+<div id="titlearea"><div align="right"><div class="version">Version: @VERSION@</div></div></div>
+<!-- end header part -->
diff --git a/src/pmml/doc/doxygen/static/salome_extra.css b/src/pmml/doc/doxygen/static/salome_extra.css
new file mode 100755 (executable)
index 0000000..3e8b838
--- /dev/null
@@ -0,0 +1,29 @@
+/* The extra CSS for doxygen 1.8.3.1 */
+
+#titlearea {
+        background-image:url('head.png');
+        background-color: #175783;
+        border: 1px solid;
+        height: 80px;
+        background-repeat: no-repeat;
+       padding: 0px;
+       margin: 0px;
+       width: 99.9%;
+       border-bottom: 1px solid #5373B4;
+}
+
+div.version {
+       border:1px solid #0000FF;
+        color: #CCCCCC;
+       font-family: Arial, Helvetica, sans-serif;
+       font-size: 9pt;
+       text-align: center;
+       width:100px;
+       -moz-border-radius: 8px;
+       margin: 5px;
+}
+
+.navpath li.footer {
+       line-height:15px;
+       text-align: right;
+}
\ No newline at end of file
diff --git a/src/pmml/pmml_swig/CMakeLists.txt b/src/pmml/pmml_swig/CMakeLists.txt
new file mode 100755 (executable)
index 0000000..d243dc0
--- /dev/null
@@ -0,0 +1,68 @@
+# Copyright (C) 2012-2013  CEA/DEN, EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+
+INCLUDE(${SWIG_USE_FILE})
+
+ADD_DEFINITIONS(${PYTHON_DEFINITIONS})
+
+SET_SOURCE_FILES_PROPERTIES(PMMLsalome.i PROPERTIES CPLUSPLUS ON)
+SET_SOURCE_FILES_PROPERTIES(PMMLsalome.i PROPERTIES SWIG_DEFINITIONS "-shadow")
+IF(NUMPY_FOUND)
+  SET(SWIG_MODULE_PMML_EXTRA_FLAGS -DWITH_NUMPY)
+ENDIF(NUMPY_FOUND)
+
+SET (PMML_SWIG_DPYS_FILES
+    PMMLsalome.i
+    PMML.i)
+
+INCLUDE_DIRECTORIES(
+  ${PYTHON_INCLUDE_DIRS}
+  ${PTHREAD_INCLUDE_DIR} # pthread dependancy due to python2.7 library
+  ${CMAKE_CURRENT_SOURCE_DIR}
+  ${CMAKE_CURRENT_BINARY_DIR}
+  ${CMAKE_CURRENT_SOURCE_DIR}/..
+  )
+
+# _ABR_ Ensure dependency mechanism on all SWIG files and headers
+SET (SWIG_MODULE_PMML_EXTRA_DEPS ${PMML_SWIG_DPYS_FILES} 
+    ${pmml_HEADERS_HXX} ${pmml_HEADERS_TXX})
+
+SWIG_ADD_MODULE(PMML python PMMLsalome.i)
+SWIG_LINK_LIBRARIES(PMML ${PYTHON_LIBRARIES} ${PLATFORM_LIBS} pmmlLib)
+
+
+# _ABR_ Ensure dependency mechanism on all SWIG files and headers
+
+IF(WIN32)
+  SET_TARGET_PROPERTIES(_PMML PROPERTIES DEBUG_OUTPUT_NAME _PMML_d)
+ENDIF(WIN32)
+
+
+INSTALL(TARGETS ${SWIG_MODULE_PMML_REAL_NAME} DESTINATION ${SALOME_INSTALL_PYTHON})
+
+SET(PYFILES_TO_INSTALL ${CMAKE_CURRENT_BINARY_DIR}/PMML.py)
+
+INSTALL_AND_COMPILE_PYTHON_FILE("${PYFILES_TO_INSTALL}" ${SALOME_INSTALL_SCRIPT_PYTHON})
+
+INSTALL(FILES PMML.i PMMLsalome.i DESTINATION ${SALOME_INSTALL_HEADERS})
+INSTALL(FILES PMMLBasicsTest.py DESTINATION ${SALOME_INSTALL_SCRIPT_PYTHON})
+
+ADD_TEST(PMMLBasicsTest ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/PMMLBasicsTest.py)
+
diff --git a/src/pmml/pmml_swig/PMML.i b/src/pmml/pmml_swig/PMML.i
new file mode 100755 (executable)
index 0000000..b9dc6c4
--- /dev/null
@@ -0,0 +1,61 @@
+// Copyright (C) 2007-2013  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : InckA
+
+%include exception.i 
+
+%{
+#include "PMMLlib.hxx"
+%}
+
+%exception { 
+    try {
+        $action
+    } catch (std::string &e) {
+        std::string s("PMMLlib error: "), s2(e);
+        s = s + s2;
+        SWIG_exception(SWIG_RuntimeError, s.c_str());
+    } catch (...) {
+        SWIG_exception(SWIG_RuntimeError, "unknown exception");
+    }
+}
+
+namespace PMMLlib
+{
+
+enum PMMLType{kUNDEFINED, kANN, kLR};
+
+class PMMLlib
+{
+  public:
+    PMMLlib(bool log=false);
+    PMMLlib(std::string file, bool log=false);
+    ~PMMLlib();
+    void SetCurrentModel(std::string modelName, PMMLType type);
+    std::string ExportPyStr(std::string functionName, std::string header); 
+    void ExportPython(std::string file, std::string functionName, std::string header);
+    void ExportCpp(std::string file, std::string functionName, std::string header);
+    void Write();
+    void Write(std::string file);
+};
+
+}
+
+
+
diff --git a/src/pmml/pmml_swig/PMMLBasicsTest.py b/src/pmml/pmml_swig/PMMLBasicsTest.py
new file mode 100755 (executable)
index 0000000..59a4a18
--- /dev/null
@@ -0,0 +1,86 @@
+# -*- coding: utf-8 -*-
+
+# imports Salomé
+from PMML import PMMLlib, kANN, kLR
+
+# imports python
+import unittest
+import exceptions
+from exceptions import RuntimeError
+import os
+import shutil
+
+class PMMLBasicsTest(unittest.TestCase):
+
+    def setUp(self):
+        pmmlRootDir = os.getenv("YACS_ROOT_DIR");
+        self.resourcesDir = os.path.join(pmmlRootDir,"share","salome","resources","pmml");
+        self.resourcesDir += os.sep ;
+        self.tmpDir = "/tmp/";
+        self.tmpDir += os.environ['LOGNAME']; # ("USER");
+        self.tmpDir += "/PmmlUnitTest/";
+        if ( not os.path.exists(self.tmpDir) ):
+            os.mkdir(self.tmpDir);
+            pass
+        pass
+
+    def tearDown(self):
+        if ( os.path.exists(self.tmpDir) ):
+            shutil.rmtree(self.tmpDir);
+            pass 
+        pass
+
+    def testExportPythonNeuralNet(self):
+        pmmlFile = self.resourcesDir + "ann_model.pmml";
+        model = "sANNName";
+        exportPyScript = self.tmpDir + "swigTestExportPythonNeuralNet.py";
+        refPyFilename = self.resourcesDir + "unittest_ref_ann_model.py";
+        refLines = file(refPyFilename).readlines(); 
+        #
+        p = PMMLlib( pmmlFile );
+        p.SetCurrentModel( model, kANN );
+        p.ExportPython( exportPyScript, "myTestFunc", 
+                        "File used by unit test\n PMMLBasicsTest1::testExportNeuralNetworkPython" );
+        myLines = file(exportPyScript).readlines();
+        self.assertEqual( len(myLines), len(refLines) );
+        for (i,line) in enumerate(myLines):
+            self.assertEqual( line, refLines[i] );
+            pass
+        pass
+  
+    def testExportPythonRegression(self):
+        pmmlFile = self.resourcesDir + "lr_model.pmml";
+        model = "Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]";
+        exportPyScript = self.tmpDir + "swigTestExportPythonRegression.py";
+        refPyFilename = self.resourcesDir + "unittest_ref_lr_model.py";
+        refLines = file(refPyFilename).readlines(); 
+        #
+        p = PMMLlib( pmmlFile );
+        p.SetCurrentModel( model, kLR );
+        p.ExportPython( exportPyScript, "myTestFunc", 
+                               "File used by unit test\n PMMLBasicsTest1::testExportLinearRegressionPython" );
+        myLines = file(exportPyScript).readlines();
+        self.assertEqual( len(myLines), len(refLines) );
+        for (i,line) in enumerate(myLines):
+            self.assertEqual( line, refLines[i] );
+            pass
+        pass
+
+    def testPmmlFileNotReadable(self):
+        self.assertRaises( RuntimeError, PMMLlib, "0.mml" );
+        pass
+
+    def testPmmlFileNotReadable(self):
+        pmmlFile = self.resourcesDir + "ann_model.pmml";
+        model = "sANNName";
+        p = PMMLlib(pmmlFile);
+        self.assertRaises( RuntimeError, p.SetCurrentModel, model, kLR );
+        pass
+
+    def testPmmlFileNotWritable(self):
+        p = PMMLlib();
+        self.assertRaises( RuntimeError, p.Write );
+        pass
+    pass
+
+unittest.main()
diff --git a/src/pmml/pmml_swig/PMMLsalome.i b/src/pmml/pmml_swig/PMMLsalome.i
new file mode 100755 (executable)
index 0000000..d2d182c
--- /dev/null
@@ -0,0 +1,40 @@
+// Copyright (C) 2007-2013  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : InckA
+
+%module PMML
+
+%include std_vector.i
+%include std_string.i
+
+
+%template(ivec) std::vector<int>;
+%template(dvec) std::vector<double>;
+%template(svec) std::vector<std::string>;
+
+%include "PMML.i"
+
+
+%pythoncode %{
+import os
+__filename=os.environ.get('PYTHONSTARTUP')
+if __filename and os.path.isfile(__filename):
+  execfile(__filename)
+  pass
+%}
diff --git a/src/pmml/resources/CMakeLists.txt b/src/pmml/resources/CMakeLists.txt
new file mode 100755 (executable)
index 0000000..8be747b
--- /dev/null
@@ -0,0 +1,45 @@
+# Copyright (C) 2012-2013  CEA/DEN, EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+SET(PMML_RESOURCES_FILES
+  # ici les noms des fichiers ressources 
+  ann_model.pmml
+  ann_model_2.pmml
+  lr_model.pmml
+  lr_model_2.pmml
+  no_model.pmml
+  two_models_ann_lr.pmml
+  unittest_ref_ann_model.cpp
+  unittest_ref_ann_model.f
+  unittest_ref_ann_model.py
+  unittest_ref_lr_model.cpp
+  unittest_ref_lr_model.f
+  unittest_ref_lr_model.py
+  win32_ann_model.pmml
+  win32_lr_model.pmml 
+  )
+
+INSTALL(FILES ${PMML_RESOURCES_FILES} DESTINATION ${SALOME_PMML_INSTALL_RES_DATA})
+
+# MESSAGE(STATUS "Creation of ${CMAKE_CURRENT_BINARY_DIR}/PMMLCatalog.xml")
+# CONFIGURE_FILE(${CMAKE_CURRENT_SOURCE_DIR}/PMMLCatalog.xml.in ${CMAKE_CURRENT_BINARY_DIR}/PMMLCatalog.xml @ONLY)
+# MESSAGE(STATUS "Creation of ${CMAKE_CURRENT_BINARY_DIR}/SalomeApp.xml")
+# CONFIGURE_FILE(${CMAKE_CURRENT_SOURCE_DIR}/SalomeApp.xml.in ${CMAKE_CURRENT_BINARY_DIR}/SalomeApp.xml @ONLY)
+
+# INSTALL(FILES ${CMAKE_CURRENT_BINARY_DIR}/PMMLCatalog.xml ${CMAKE_CURRENT_BINARY_DIR}/SalomeApp.xml DESTINATION ${SALOME_PMML_INSTALL_RES_DATA})
diff --git a/src/pmml/resources/ann_model.pmml b/src/pmml/resources/ann_model.pmml
new file mode 100755 (executable)
index 0000000..bd4d4e8
--- /dev/null
@@ -0,0 +1,124 @@
+<?xml version="1.0"?>
+<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
+  <Header copyright="myCopyright" description="Tests unitaires">
+    <Application name="PMMLlib" version="myVersion"/>
+    <Annotation>Tests unitaires PMMLlib</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="rw" displayName="rw" optype="continuous" dataType="float"/>
+    <DataField name="r" displayName="r" optype="continuous" dataType="float"/>
+    <DataField name="tu" displayName="tu" optype="continuous" dataType="float"/>
+    <DataField name="tl" displayName="tl" optype="continuous" dataType="float"/>
+    <DataField name="hu" displayName="hu" optype="continuous" dataType="float"/>
+    <DataField name="hl" displayName="hl" optype="continuous" dataType="float"/>
+    <DataField name="l" displayName="l" optype="continuous" dataType="float"/>
+    <DataField name="kw" displayName="kw" optype="continuous" dataType="float"/>
+    <DataField name="yhat" displayName="yhat" optype="continuous" dataType="float"/>
+  </DataDictionary>
+  <NeuralNetwork modelName="sANNName" functionName="regression" numberOfLayers="2">
+    <MiningSchema>
+      <MiningField name="rw" usageType="active"/>
+      <MiningField name="r" usageType="active"/>
+      <MiningField name="tu" usageType="active"/>
+      <MiningField name="tl" usageType="active"/>
+      <MiningField name="hu" usageType="active"/>
+      <MiningField name="hl" usageType="active"/>
+      <MiningField name="l" usageType="active"/>
+      <MiningField name="kw" usageType="active"/>
+      <MiningField name="yhat" usageType="predicted"/>
+    </MiningSchema>
+    <NeuralInputs numberOfInputs="8">
+      <NeuralInput id="0">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="rw">
+            <LinearNorm orig="0.000000e+00" norm="-2.889932e-01"/>
+            <LinearNorm orig="9.999901e-02" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="1">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="r">
+            <LinearNorm orig="0.000000e+00" norm="-5.756638e-01"/>
+            <LinearNorm orig="2.504894e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="2">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tu">
+            <LinearNorm orig="0.000000e+00" norm="-1.699313e-01"/>
+            <LinearNorm orig="8.933486e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="3">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tl">
+            <LinearNorm orig="0.000000e+00" norm="-1.707007e-01"/>
+            <LinearNorm orig="8.955232e+01" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="4">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hu">
+            <LinearNorm orig="0.000000e+00" norm="-3.302777e-02"/>
+            <LinearNorm orig="1.050003e+03" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="5">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hl">
+            <LinearNorm orig="0.000000e+00" norm="-4.562070e-02"/>
+            <LinearNorm orig="7.600007e+02" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="6">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="l">
+            <LinearNorm orig="0.000000e+00" norm="-1.155882e-01"/>
+            <LinearNorm orig="1.400018e+03" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="7">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="kw">
+            <LinearNorm orig="0.000000e+00" norm="-5.780019e-02"/>
+            <LinearNorm orig="1.095001e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+    </NeuralInputs>
+    <NeuralLayer activationFunction="tanh" numberOfNeurons="1">
+      <Neuron id="8" bias="-1.263572e+00">
+        <Con from="0" weight="7.536629e-01"/>
+        <Con from="1" weight="1.653660e-03"/>
+        <Con from="2" weight="4.725001e-03"/>
+        <Con from="3" weight="9.969786e-03"/>
+        <Con from="4" weight="1.787976e-01"/>
+        <Con from="5" weight="-1.809809e-01"/>
+        <Con from="6" weight="-1.735688e-01"/>
+        <Con from="7" weight="8.559675e-02"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralLayer activationFunction="identity" numberOfNeurons="1">
+      <Neuron id="9" bias="-1.745483e+00">
+        <Con from="8" weight="6.965512e+00"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralOutputs numberOfOutputs="1">
+      <NeuralOutput outputNeuron="9">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="yhat">
+            <LinearNorm orig="0.000000e+00" norm="-5.873935e-01"/>
+            <LinearNorm orig="7.781171e+01" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralOutput>
+    </NeuralOutputs>
+  </NeuralNetwork>
+</PMML>
diff --git a/src/pmml/resources/ann_model_2.pmml b/src/pmml/resources/ann_model_2.pmml
new file mode 100755 (executable)
index 0000000..c64b4fb
--- /dev/null
@@ -0,0 +1,132 @@
+<?xml version="1.0"?>
+<PMML xmlns="http://www.dmg.org/PMML-3_0" version="3.0">
+  <Header copyright="texte copyright" description="texte description">
+    <Application name="Uranie" version="2.3/1"/>
+    <Annotation>date Fri Oct 07, 2011</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="rw" displayName="rw" optype="continuous" dataType="float"/>
+    <DataField name="r" displayName="r" optype="continuous" dataType="float"/>
+    <DataField name="tu" displayName="tu" optype="continuous" dataType="float"/>
+    <DataField name="tl" displayName="tl" optype="continuous" dataType="float"/>
+    <DataField name="hu" displayName="hu" optype="continuous" dataType="float"/>
+    <DataField name="hl" displayName="hl" optype="continuous" dataType="float"/>
+    <DataField name="l" displayName="l" optype="continuous" dataType="float"/>
+    <DataField name="kw" displayName="kw" optype="continuous" dataType="float"/>
+    <DataField name="yhat" displayName="yhat" optype="continuous" dataType="float"/>
+  </DataDictionary>
+  <NeuralNetwork modelName="sANNName" functionName="regression" numberOfLayers="2">
+    <MiningSchema>
+      <MiningField name="rw" usageType="active"/>
+      <MiningField name="r" usageType="active"/>
+      <MiningField name="tu" usageType="active"/>
+      <MiningField name="tl" usageType="active"/>
+      <MiningField name="hu" usageType="active"/>
+      <MiningField name="hl" usageType="active"/>
+      <MiningField name="l" usageType="active"/>
+      <MiningField name="kw" usageType="active"/>
+      <MiningField name="yhat" usageType="predicted"/>
+    </MiningSchema>
+    <NeuralInputs numberOfInputs="8">
+      <NeuralInput id="0">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="rw">
+            <LinearNorm orig="0" norm="-2.889932e-01"/>
+            <LinearNorm orig="9.999901e-02" norm="0"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="1">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="r">
+            <LinearNorm orig="0" norm="-5.756638e-01"/>
+            <LinearNorm orig="2.504894e+04" norm="0"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="2">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tu">
+            <LinearNorm orig="0" norm="-1.699313e-01"/>
+            <LinearNorm orig="8.933486e+04" norm="0"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="3">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tl">
+            <LinearNorm orig="0" norm="-1.707007e-01"/>
+            <LinearNorm orig="8.955232e+01" norm="0"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="4">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hu">
+            <LinearNorm orig="0" norm="-3.302777e-02"/>
+            <LinearNorm orig="1.050003e+03" norm="0"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="5">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hl">
+            <LinearNorm orig="0" norm="-4.562070e-02"/>
+            <LinearNorm orig="7.600007e+02" norm="0"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="6">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="l">
+            <LinearNorm orig="2" norm="1."/>
+            <LinearNorm orig="1.400018e+03" norm="-1."/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="7">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="kw">
+            <LinearNorm orig="0" norm="-5.780019e-02"/>
+            <LinearNorm orig="1.095001e+04" norm="0"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+    </NeuralInputs>
+    <NeuralLayer activationFunction="tanh" numberOfNeurons="1">
+      <Neuron id="8" bias="-1.263572e+00">
+        <Con from="0" weight="7.536629e-01"/>
+        <Con from="1" weight="1.653660e-03"/>
+        <Con from="2" weight="4.725001e-03"/>
+        <Con from="3" weight="9.969786e-03"/>
+        <Con from="4" weight="1.787976e-01"/>
+        <Con from="5" weight="-1.809809e-01"/>
+        <Con from="6" weight="-1.735688e-01"/>
+        <Con from="7" weight="8.559675e-02"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralLayer activationFunction="identity" numberOfNeurons="1">
+      <Neuron id="9" bias="-1.745483e+00">
+        <Con from="8" weight="6.965512e+00"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralOutputs numberOfOutputs="1">
+      <NeuralOutput outputNeuron="9">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="yhat">
+            <LinearNorm orig="0" norm="-5.873935e-01"/>
+            <LinearNorm orig="7.781171e+01" norm="0"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralOutput>
+      <NeuralOutput outputNeuron="-1">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="yhat">
+            <LinearNorm orig="2." norm="-1"/>
+            <LinearNorm orig="5.781171e+01" norm="1"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralOutput>
+    </NeuralOutputs>
+  </NeuralNetwork>
+</PMML>
diff --git a/src/pmml/resources/lr_model.pmml b/src/pmml/resources/lr_model.pmml
new file mode 100755 (executable)
index 0000000..fae9d26
--- /dev/null
@@ -0,0 +1,38 @@
+<?xml version="1.0"?>
+<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
+  <Header copyright="myCopyright" description="Tests unitaires">
+    <Application name="PMMLlib" version="myVersion"/>
+    <Annotation>Tests unitaires PMMLlib</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="x6" displayName=" x_{6}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="1.100000e+01" rightMargin="2.300000e+01"/>
+    </DataField>
+    <DataField name="x8" displayName=" x_{8}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="2.810000e+01" rightMargin="7.670000e+01"/>
+    </DataField>
+    <DataField name="x1" displayName=" x_{1}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="6.360000e+00" rightMargin="1.251000e+01"/>
+    </DataField>
+  </DataDictionary>
+  <RegressionModel functionName="regression" modelName="Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]" targetFieldName="x1">
+    <MiningSchema>
+      <MiningField name="x6" usageType="active"/>
+      <MiningField name="x8" usageType="active"/>
+      <MiningField name="x1" usageType="predicted"/>
+    </MiningSchema>
+    <RegressionTable intercept="3.837365e+00">
+      <NumericPredictor name="x6" exponent="1" coefficient="4.759134e-01"/>
+      <NumericPredictor name="x8" exponent="1" coefficient="1.428838e-01"/>
+      <PredictorTerm coefficient="-2.201903e-02">
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+      <PredictorTerm coefficient="5.362560e-04">
+        <FieldRef field="x6"/>
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+    </RegressionTable>
+  </RegressionModel>
+</PMML>
diff --git a/src/pmml/resources/lr_model_2.pmml b/src/pmml/resources/lr_model_2.pmml
new file mode 100755 (executable)
index 0000000..2f1ef6b
--- /dev/null
@@ -0,0 +1,38 @@
+<?xml version="1.0"?>
+<PMML version="4.1" xmlns="http://www.dmg.org/PMML-4_1">
+  <Header copyright="myCopyright" description="Text Description">
+    <Application name="Uranie" version="2013.7/18"/>
+    <Annotation>Compilation date : Wed Jul 17, 2013</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="x6" displayName=" x_{6}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="1.100000e+01" rightMargin="2.300000e+01"/>
+    </DataField>
+    <DataField name="x8" displayName=" x_{8}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="2.810000e+01" rightMargin="7.670000e+01"/>
+    </DataField>
+    <DataField name="x1" displayName=" x_{1}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="6.360000e+00" rightMargin="1.251000e+01"/>
+    </DataField>
+  </DataDictionary>
+  <RegressionModel functionName="regression" modelName="Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]" targetFieldName="x1">
+    <MiningSchema>
+      <MiningField name="x6" usageType="active"/>
+      <MiningField name="x8" usageType="active"/>
+      <MiningField name="x1" usageType="predicted"/>
+    </MiningSchema>
+    <RegressionTable>
+      <NumericPredictor name="x6" exponent="1" coefficient="4.759134e-01"/>
+      <NumericPredictor name="x8" exponent="1" coefficient="1.428838e-01"/>
+      <PredictorTerm coefficient="-2.201903e-02">
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+      <PredictorTerm coefficient="5.362560e-04">
+        <FieldRef field="x6"/>
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+    </RegressionTable>
+  </RegressionModel>
+</PMML>
diff --git a/src/pmml/resources/no_model.pmml b/src/pmml/resources/no_model.pmml
new file mode 100755 (executable)
index 0000000..3951518
--- /dev/null
@@ -0,0 +1,144 @@
+<?xml version="1.0"?>
+<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
+  <Header copyright="myCopyright" description="Tests unitaires">
+    <Application name="PMMLlib" version="myVersion"/>
+    <Annotation>Tests unitaires PMMLlib</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="rw" displayName="rw" optype="continuous" dataType="float"/>
+    <DataField name="r" displayName="r" optype="continuous" dataType="float"/>
+    <DataField name="tu" displayName="tu" optype="continuous" dataType="float"/>
+    <DataField name="tl" displayName="tl" optype="continuous" dataType="float"/>
+    <DataField name="hu" displayName="hu" optype="continuous" dataType="float"/>
+    <DataField name="hl" displayName="hl" optype="continuous" dataType="float"/>
+    <DataField name="l" displayName="l" optype="continuous" dataType="float"/>
+    <DataField name="kw" displayName="kw" optype="continuous" dataType="float"/>
+    <DataField name="yhat" displayName="yhat" optype="continuous" dataType="float"/>
+  </DataDictionary>
+  <NeuralNetwork modelName="modelName" functionName="regression" numberOfLayers="2">
+    <MiningSchema>
+      <MiningField name="rw" usageType="active"/>
+      <MiningField name="r" usageType="active"/>
+      <MiningField name="tu" usageType="active"/>
+      <MiningField name="tl" usageType="active"/>
+      <MiningField name="hu" usageType="active"/>
+      <MiningField name="hl" usageType="active"/>
+      <MiningField name="l" usageType="active"/>
+      <MiningField name="kw" usageType="active"/>
+      <MiningField name="yhat" usageType="predicted"/>
+    </MiningSchema>
+    <NeuralInputs numberOfInputs="8">
+      <NeuralInput id="0">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="rw">
+            <LinearNorm orig="0.000000e+00" norm="-2.889932e-01"/>
+            <LinearNorm orig="9.999901e-02" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="1">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="r">
+            <LinearNorm orig="0.000000e+00" norm="-5.756638e-01"/>
+            <LinearNorm orig="2.504894e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="2">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tu">
+            <LinearNorm orig="0.000000e+00" norm="-1.699313e-01"/>
+            <LinearNorm orig="8.933486e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="3">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tl">
+            <LinearNorm orig="0.000000e+00" norm="-1.707007e-01"/>
+            <LinearNorm orig="8.955232e+01" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="4">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hu">
+            <LinearNorm orig="0.000000e+00" norm="-3.302777e-02"/>
+            <LinearNorm orig="1.050003e+03" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="5">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hl">
+            <LineakLRAndkANNrNorm orig="0.000000e+00" norm="-4.562070e-02"/>
+            <LinearNorm orig="7.600007e+02" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="6">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="l">
+            <LinearNorm orig="0.000000e+00" norm="-1.155882e-01"/>
+            <LinearNorm orig="1.400018e+03" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="7">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="kw">
+            <LinearNorm orig="0.000000e+00" norm="-5.780019e-02"/>
+            <LinearNorm orig="1.095001e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+    </NeuralInputs>
+    <NeuralLayer activationFunction="tanh" numberOfNeurons="1">
+      <Neuron id="8" bias="-1.263572e+00">
+        <Con from="0" weight="7.536629e-01"/>
+        <Con from="1" weight="1.653660e-03"/>
+        <Con from="2" weight="4.725001e-03"/>
+        <Con from="3" weight="9.969786e-03"/>
+        <Con from="4" weight="1.787976e-01"/>
+        <Con from="5" weight="-1.809809e-01"/>
+        <Con from="6" weight="-1.735688e-01"/>
+        <Con from="7" weight="8.559675e-02"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralLayer activationFunction="identity" numberOfNeurons="1">
+      <Neuron id="9" bias="-1.745483e+00">
+        <Con from="8" weight="6.965512e+00"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralOutputs numberOfOutputs="1">
+      <NeuralOutput outputNeuron="9">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="yhat">
+            <LinearNorm orig="0.000000e+00" norm="-5.873935e-01"/>
+            <LinearNorm orig="7.781171e+01" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralOutput>
+    </NeuralOutputs>
+  </NeuralNetwork>
+  <RegressionModel functionName="regression" modelName="modelName" targetFieldName="x1">
+    <MiningSchema>
+      <MiningField name="x6" usageType="active"/>
+      <MiningField name="x8" usageType="active"/>
+      <MiningField name="x1" usageType="predicted"/>
+    </MiningSchema>
+    <RegressionTable intercept="3.837365e+00">
+      <NumericPredictor name="x6" exponent="1" coefficient="4.759134e-01"/>
+      <NumericPredictor name="x8" exponent="1" coefficient="1.428838e-01"/>
+      <PredictorTerm coefficient="-2.201903e-02">
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+      <PredictorTerm coefficient="5.362560e-04">
+        <FieldRef field="x6"/>
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+    </RegressionTable>
+  </RegressionModel>
+</PMML>
diff --git a/src/pmml/resources/two_models_ann_lr.pmml b/src/pmml/resources/two_models_ann_lr.pmml
new file mode 100755 (executable)
index 0000000..3951518
--- /dev/null
@@ -0,0 +1,144 @@
+<?xml version="1.0"?>
+<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
+  <Header copyright="myCopyright" description="Tests unitaires">
+    <Application name="PMMLlib" version="myVersion"/>
+    <Annotation>Tests unitaires PMMLlib</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="rw" displayName="rw" optype="continuous" dataType="float"/>
+    <DataField name="r" displayName="r" optype="continuous" dataType="float"/>
+    <DataField name="tu" displayName="tu" optype="continuous" dataType="float"/>
+    <DataField name="tl" displayName="tl" optype="continuous" dataType="float"/>
+    <DataField name="hu" displayName="hu" optype="continuous" dataType="float"/>
+    <DataField name="hl" displayName="hl" optype="continuous" dataType="float"/>
+    <DataField name="l" displayName="l" optype="continuous" dataType="float"/>
+    <DataField name="kw" displayName="kw" optype="continuous" dataType="float"/>
+    <DataField name="yhat" displayName="yhat" optype="continuous" dataType="float"/>
+  </DataDictionary>
+  <NeuralNetwork modelName="modelName" functionName="regression" numberOfLayers="2">
+    <MiningSchema>
+      <MiningField name="rw" usageType="active"/>
+      <MiningField name="r" usageType="active"/>
+      <MiningField name="tu" usageType="active"/>
+      <MiningField name="tl" usageType="active"/>
+      <MiningField name="hu" usageType="active"/>
+      <MiningField name="hl" usageType="active"/>
+      <MiningField name="l" usageType="active"/>
+      <MiningField name="kw" usageType="active"/>
+      <MiningField name="yhat" usageType="predicted"/>
+    </MiningSchema>
+    <NeuralInputs numberOfInputs="8">
+      <NeuralInput id="0">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="rw">
+            <LinearNorm orig="0.000000e+00" norm="-2.889932e-01"/>
+            <LinearNorm orig="9.999901e-02" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="1">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="r">
+            <LinearNorm orig="0.000000e+00" norm="-5.756638e-01"/>
+            <LinearNorm orig="2.504894e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="2">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tu">
+            <LinearNorm orig="0.000000e+00" norm="-1.699313e-01"/>
+            <LinearNorm orig="8.933486e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="3">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tl">
+            <LinearNorm orig="0.000000e+00" norm="-1.707007e-01"/>
+            <LinearNorm orig="8.955232e+01" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="4">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hu">
+            <LinearNorm orig="0.000000e+00" norm="-3.302777e-02"/>
+            <LinearNorm orig="1.050003e+03" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="5">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hl">
+            <LineakLRAndkANNrNorm orig="0.000000e+00" norm="-4.562070e-02"/>
+            <LinearNorm orig="7.600007e+02" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="6">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="l">
+            <LinearNorm orig="0.000000e+00" norm="-1.155882e-01"/>
+            <LinearNorm orig="1.400018e+03" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="7">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="kw">
+            <LinearNorm orig="0.000000e+00" norm="-5.780019e-02"/>
+            <LinearNorm orig="1.095001e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+    </NeuralInputs>
+    <NeuralLayer activationFunction="tanh" numberOfNeurons="1">
+      <Neuron id="8" bias="-1.263572e+00">
+        <Con from="0" weight="7.536629e-01"/>
+        <Con from="1" weight="1.653660e-03"/>
+        <Con from="2" weight="4.725001e-03"/>
+        <Con from="3" weight="9.969786e-03"/>
+        <Con from="4" weight="1.787976e-01"/>
+        <Con from="5" weight="-1.809809e-01"/>
+        <Con from="6" weight="-1.735688e-01"/>
+        <Con from="7" weight="8.559675e-02"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralLayer activationFunction="identity" numberOfNeurons="1">
+      <Neuron id="9" bias="-1.745483e+00">
+        <Con from="8" weight="6.965512e+00"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralOutputs numberOfOutputs="1">
+      <NeuralOutput outputNeuron="9">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="yhat">
+            <LinearNorm orig="0.000000e+00" norm="-5.873935e-01"/>
+            <LinearNorm orig="7.781171e+01" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralOutput>
+    </NeuralOutputs>
+  </NeuralNetwork>
+  <RegressionModel functionName="regression" modelName="modelName" targetFieldName="x1">
+    <MiningSchema>
+      <MiningField name="x6" usageType="active"/>
+      <MiningField name="x8" usageType="active"/>
+      <MiningField name="x1" usageType="predicted"/>
+    </MiningSchema>
+    <RegressionTable intercept="3.837365e+00">
+      <NumericPredictor name="x6" exponent="1" coefficient="4.759134e-01"/>
+      <NumericPredictor name="x8" exponent="1" coefficient="1.428838e-01"/>
+      <PredictorTerm coefficient="-2.201903e-02">
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+      <PredictorTerm coefficient="5.362560e-04">
+        <FieldRef field="x6"/>
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+    </RegressionTable>
+  </RegressionModel>
+</PMML>
diff --git a/src/pmml/resources/unittest_ref_ann_model.cpp b/src/pmml/resources/unittest_ref_ann_model.cpp
new file mode 100755 (executable)
index 0000000..643308c
--- /dev/null
@@ -0,0 +1,67 @@
+#define ActivationFunction(sum) ( 1.0 / ( 1.0 + exp( -1.0 * sum )) )
+void myTestFunc(double *param, double *res)
+{
+  ////////////////////////////// 
+  //
+  // File used by unit test
+  // PMMLBasicsTest1::testExportNeuralNetworkCpp
+  //
+  ////////////////////////////// 
+
+  int nInput   = 8;
+  int nOutput   = 1;
+  int nHidden  = 1;
+  const int nNeurones  = 10;
+  double myTestFunc_act[nNeurones];
+
+  // --- Preprocessing of the inputs and outputs
+  double myTestFunc_minInput[] = {
+  0.099999, 25048.9, 89334.9, 89.5523, 1050, 
+  760.001, 1400.02, 10950, 
+  };
+  double myTestFunc_minOutput[] = {
+  77.8117,   };
+  double myTestFunc_maxInput[] = {
+  0.028899, 14419.8, 15180.8, 15.2866, 34.6793, 
+  34.6718, 161.826, 632.913, 
+  };
+  double myTestFunc_maxOutput[] = {
+  45.7061,   };
+
+  // --- Values of the weights
+  double myTestFunc_valW[] = {
+  -1.74548, 6.96551, -1.26357, 0.753663, 0.00165366, 
+  0.004725, 0.00996979, 0.178798, -0.180981, -0.173569, 
+  0.0855967, 
+  };
+  // --- Constants
+  int indNeurone = 0;
+  int CrtW;
+  double sum;
+
+  // --- Input Layers
+  for(int i = 0; i < nInput; i++) {
+     myTestFunc_act[indNeurone++] = ( param[i] - myTestFunc_minInput[i] ) / myTestFunc_maxInput[i];
+  }
+
+  // --- Hidden Layers
+  for (int member = 0; member < nHidden; member++) {
+     int CrtW = member * ( nInput + 2) + 2;
+     sum = myTestFunc_valW[CrtW++];
+     for (int source = 0; source < nInput; source++) {
+         sum += myTestFunc_act[source] * myTestFunc_valW[CrtW++];
+       }
+       myTestFunc_act[indNeurone++] = ActivationFunction(sum);
+  }
+
+  // --- Output
+  for (int member = 0; member < nOutput; member++) {
+    sum = myTestFunc_valW[0];
+    for (int source = 0; source < nHidden; source++) {
+      CrtW = source * ( nInput + 2) + 1;
+      sum += myTestFunc_act[nInput+source] * myTestFunc_valW[CrtW];
+    }
+    myTestFunc_act[indNeurone++] = sum;
+    res[member] = myTestFunc_minOutput[member] + myTestFunc_maxOutput[member] * sum;
+  }
+}
diff --git a/src/pmml/resources/unittest_ref_ann_model.f b/src/pmml/resources/unittest_ref_ann_model.f
new file mode 100755 (executable)
index 0000000..7996d31
--- /dev/null
@@ -0,0 +1,64 @@
+      SUBROUTINE myTestFunc(rw,r,tu,tl,hu,hl,l,kw,yhat)
+C --- *********************************************
+C --- 
+C ---  File used by unit test
+C ---  PMMLBasicsTest1::testExportNeuralNetworkFortran
+C --- 
+C --- *********************************************
+      IMPLICIT DOUBLE PRECISION (V)
+      DOUBLE PRECISION rw
+      DOUBLE PRECISION r
+      DOUBLE PRECISION tu
+      DOUBLE PRECISION tl
+      DOUBLE PRECISION hu
+      DOUBLE PRECISION hl
+      DOUBLE PRECISION l
+      DOUBLE PRECISION kw
+      DOUBLE PRECISION yhat
+
+C --- Preprocessing of the inputs
+      VXNrw = ( rw - 0.099999D0 ) / 0.028899D0
+      VXNr = ( r - 25048.9D0 ) / 14419.8D0
+      VXNtu = ( tu - 89334.9D0 ) / 15180.8D0
+      VXNtl = ( tl - 89.5523D0 ) / 15.2866D0
+      VXNhu = ( hu - 1050D0 ) / 34.6793D0
+      VXNhl = ( hl - 760.001D0 ) / 34.6718D0
+      VXNl = ( l - 1400.02D0 ) / 161.826D0
+      VXNkw = ( kw - 10950D0 ) / 632.913D0
+
+C --- Values of the weights
+      VW1 = -1.74548
+      VW2 = 6.96551
+      VW3 = -1.26357
+      VW4 = 0.753663
+      VW5 = 0.00165366
+      VW6 = 0.004725
+      VW7 = 0.00996979
+      VW8 = 0.178798
+      VW9 = -0.180981
+      VW10 = -0.173569
+      VW11 = 0.0855967
+
+C --- hidden neural number 1
+      VAct1 = VW3
+     1      + VW4 * VXNrw
+     1      + VW5 * VXNr
+     1      + VW6 * VXNtu
+     1      + VW7 * VXNtl
+     1      + VW8 * VXNhu
+     1      + VW9 * VXNhl
+     1      + VW10 * VXNl
+     1      + VW11 * VXNkw
+
+      VPot1 = 1.D0 / (1.D0 + DEXP(-1.D0 * VAct1))
+
+C --- Output
+      VOut = VW1
+     1    + VW2 * VPot1
+
+C --- Pretraitment of the output
+      yhat = 77.8117D0 + 45.7061D0 * VOut;
+
+C --- 
+      RETURN
+      END
diff --git a/src/pmml/resources/unittest_ref_ann_model.py b/src/pmml/resources/unittest_ref_ann_model.py
new file mode 100755 (executable)
index 0000000..2a1f5e5
--- /dev/null
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from math import tanh, exp
+
+def ActivationFunction(sum): 
+    return ( 1.0 / ( 1.0 + exp( -1.0 * sum ) ) ); 
+
+def myTestFunc(param):
+
+    ############################## 
+    #
+    # File used by unit test
+    # PMMLBasicsTest1::testExportNeuralNetworkPython
+    #
+    ############################## 
+
+    nInput = 8;
+    nOutput = 1;
+    nHidden = 1;
+    nNeurones = 10;
+    myTestFunc_act = [];
+    res = [];
+
+    # --- Preprocessing of the inputs and outputs
+    myTestFunc_minInput = [
+      0.099999, 25048.9, 89334.9, 89.5523, 1050, 
+    760.001, 1400.02, 10950, 
+    ];
+    myTestFunc_minOutput = [
+        77.8117
+    ];
+    myTestFunc_maxInput = [
+    0.028899, 14419.8, 15180.8, 15.2866, 34.6793, 
+    34.6718, 161.826, 632.913, 
+    ];
+    myTestFunc_maxOutput = [
+        45.7061
+    ];
+    # --- Values of the weights
+    myTestFunc_valW = [
+    -1.74548, 6.96551, -1.26357, 0.753663, 0.00165366, 
+    0.004725, 0.00996979, 0.178798, -0.180981, -0.173569, 
+    0.0855967, 
+    ];
+    # --- Constants
+    indNeurone = 0;
+
+    # --- Input Layers
+    for i in range(nInput) :
+        myTestFunc_act.append( ( param[i] - myTestFunc_minInput[i] ) / myTestFunc_maxInput[i] ) ;
+        indNeurone += 1 ;
+        pass
+
+    # --- Hidden Layers
+    for member in range(nHidden):
+        CrtW = member * ( nInput + 2) + 2;
+        sum = myTestFunc_valW[CrtW];
+        CrtW += 1 ;
+        for source in range(nInput) :
+            sum += myTestFunc_act[source] * myTestFunc_valW[CrtW];
+            CrtW += 1 ;
+            pass
+        myTestFunc_act.append( ActivationFunction(sum) ) ;
+        indNeurone += 1 ;
+        pass
+
+    # --- Output
+    for member in range(nOutput):
+        sum = myTestFunc_valW[0];
+        for source in range(nHidden):
+            CrtW = source * ( nInput + 2) + 1;
+            sum += myTestFunc_act[nInput+source] * myTestFunc_valW[CrtW];
+            pass
+        myTestFunc_act.append( sum );
+        indNeurone += 1 ;
+        res.append( myTestFunc_minOutput[member] + myTestFunc_maxOutput[member] * sum );
+        pass
+
+    return res;
+
+
diff --git a/src/pmml/resources/unittest_ref_lr_model.cpp b/src/pmml/resources/unittest_ref_lr_model.cpp
new file mode 100755 (executable)
index 0000000..1072f46
--- /dev/null
@@ -0,0 +1,27 @@
+void myTestFunc(double *param, double *res)
+{
+  ////////////////////////////// 
+  //
+  // File used by unit test
+  // PMMLBasicsTest1::testExportLinearRegressionCpp
+  //
+  ////////////////////////////// 
+
+  // Intercept
+  double y = 3.83737;
+
+  // Attribute : x6
+  y += param[0]*0.475913;
+
+  // Attribute : x8
+  y += param[1]*0.142884;
+
+  // Attribute : x6x8
+  y += param[2]*-0.022019;
+
+  // Attribute : x6x6x8
+  y += param[3]*0.000536256;
+
+  // Return the value
+  res[0] = y;
+}
diff --git a/src/pmml/resources/unittest_ref_lr_model.f b/src/pmml/resources/unittest_ref_lr_model.f
new file mode 100755 (executable)
index 0000000..7e60a97
--- /dev/null
@@ -0,0 +1,31 @@
+      SUBROUTINE myTestFunc(P0, P1, P2, P3, RES)
+C --- *********************************************
+C --- 
+C ---  File used by unit test
+C ---  PMMLBasicsTest1::testExportLinearRegressionFortran
+C --- 
+C --- *********************************************
+
+      IMPLICIT DOUBLE PRECISION (P)
+      DOUBLE PRECISION RES
+      DOUBLE PRECISION Y
+
+C --- Intercept
+      Y = 3.83737;
+
+C --- Attribute : x6
+      Y += P[0]*0.475913;
+
+C --- Attribute : x8
+      Y += P[1]*0.142884;
+
+C --- Attribute : x6x8
+      Y += P[2]*-0.022019;
+
+C --- Attribute : x6x6x8
+      Y += P[3]*0.000536256;
+
+C --- Return the value
+      RES = Y 
+      RETURN
+      END
diff --git a/src/pmml/resources/unittest_ref_lr_model.py b/src/pmml/resources/unittest_ref_lr_model.py
new file mode 100755 (executable)
index 0000000..5dbea2c
--- /dev/null
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+def myTestFunc(param):
+
+    ############################## 
+    # 
+    # File used by unit test
+    # PMMLBasicsTest1::testExportLinearRegressionPython
+    # 
+    ############################## 
+
+    #  Intercept
+    y = 3.83737;
+
+    #  Attribute : x6
+    y += param[0]*0.475913;
+
+    #  Attribute : x8
+    y += param[1]*0.142884;
+
+    #  Attribute : x6x8
+    y += param[2]*-0.022019;
+
+    #  Attribute : x6x6x8
+    y += param[3]*0.000536256;
+
+    #  Return the value
+    return [y];
diff --git a/src/pmml/resources/win32_ann_model.pmml b/src/pmml/resources/win32_ann_model.pmml
new file mode 100755 (executable)
index 0000000..ae32e3a
--- /dev/null
@@ -0,0 +1,124 @@
+<?xml version="1.0"?>
+<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
+  <Header copyright="myCopyright" description="Tests unitaires">
+    <Application name="PMMLlib" version="myVersion"/>
+    <Annotation>Tests unitaires PMMLlib</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="rw" displayName="rw" optype="continuous" dataType="float"/>
+    <DataField name="r" displayName="r" optype="continuous" dataType="float"/>
+    <DataField name="tu" displayName="tu" optype="continuous" dataType="float"/>
+    <DataField name="tl" displayName="tl" optype="continuous" dataType="float"/>
+    <DataField name="hu" displayName="hu" optype="continuous" dataType="float"/>
+    <DataField name="hl" displayName="hl" optype="continuous" dataType="float"/>
+    <DataField name="l" displayName="l" optype="continuous" dataType="float"/>
+    <DataField name="kw" displayName="kw" optype="continuous" dataType="float"/>
+    <DataField name="yhat" displayName="yhat" optype="continuous" dataType="float"/>
+  </DataDictionary>
+  <NeuralNetwork modelName="sANNName" functionName="regression" numberOfLayers="2">
+    <MiningSchema>
+      <MiningField name="rw" usageType="active"/>
+      <MiningField name="r" usageType="active"/>
+      <MiningField name="tu" usageType="active"/>
+      <MiningField name="tl" usageType="active"/>
+      <MiningField name="hu" usageType="active"/>
+      <MiningField name="hl" usageType="active"/>
+      <MiningField name="l" usageType="active"/>
+      <MiningField name="kw" usageType="active"/>
+      <MiningField name="yhat" usageType="predicted"/>
+    </MiningSchema>
+    <NeuralInputs numberOfInputs="8">
+      <NeuralInput id="0">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="rw">
+            <LinearNorm orig="0.000000e+000" norm="-2.889932e-001"/>
+            <LinearNorm orig="9.999901e-002" norm="0.000000e+000"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="1">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="r">
+            <LinearNorm orig="0.000000e+000" norm="-5.756638e-001"/>
+            <LinearNorm orig="2.504894e+004" norm="0.000000e+000"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="2">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tu">
+            <LinearNorm orig="0.000000e+000" norm="-1.699313e-001"/>
+            <LinearNorm orig="8.933486e+004" norm="0.000000e+000"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="3">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tl">
+            <LinearNorm orig="0.000000e+000" norm="-1.707007e-001"/>
+            <LinearNorm orig="8.955232e+001" norm="0.000000e+000"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="4">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hu">
+            <LinearNorm orig="0.000000e+000" norm="-3.302777e-002"/>
+            <LinearNorm orig="1.050003e+003" norm="0.000000e+000"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="5">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hl">
+            <LinearNorm orig="0.000000e+000" norm="-4.562070e-002"/>
+            <LinearNorm orig="7.600007e+002" norm="0.000000e+000"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="6">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="l">
+            <LinearNorm orig="0.000000e+000" norm="-1.155882e-001"/>
+            <LinearNorm orig="1.400018e+003" norm="0.000000e+000"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="7">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="kw">
+            <LinearNorm orig="0.000000e+000" norm="-5.780019e-002"/>
+            <LinearNorm orig="1.095001e+004" norm="0.000000e+000"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+    </NeuralInputs>
+    <NeuralLayer activationFunction="tanh" numberOfNeurons="1">
+      <Neuron id="8" bias="-1.263572e+000">
+        <Con from="0" weight="7.536629e-001"/>
+        <Con from="1" weight="1.653660e-003"/>
+        <Con from="2" weight="4.725001e-003"/>
+        <Con from="3" weight="9.969786e-003"/>
+        <Con from="4" weight="1.787976e-001"/>
+        <Con from="5" weight="-1.809809e-001"/>
+        <Con from="6" weight="-1.735688e-001"/>
+        <Con from="7" weight="8.559675e-002"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralLayer activationFunction="identity" numberOfNeurons="1">
+      <Neuron id="9" bias="-1.745483e+000">
+        <Con from="8" weight="6.965512e+000"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralOutputs numberOfOutputs="1">
+      <NeuralOutput outputNeuron="9">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="yhat">
+            <LinearNorm orig="0.000000e+000" norm="-5.873935e-001"/>
+            <LinearNorm orig="7.781171e+001" norm="0.000000e+000"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralOutput>
+    </NeuralOutputs>
+  </NeuralNetwork>
+</PMML>
diff --git a/src/pmml/resources/win32_lr_model.pmml b/src/pmml/resources/win32_lr_model.pmml
new file mode 100755 (executable)
index 0000000..afc0b14
--- /dev/null
@@ -0,0 +1,38 @@
+<?xml version="1.0"?>
+<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
+  <Header copyright="myCopyright" description="Tests unitaires">
+    <Application name="PMMLlib" version="myVersion"/>
+    <Annotation>Tests unitaires PMMLlib</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="x6" displayName=" x_{6}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="1.100000e+001" rightMargin="2.300000e+001"/>
+    </DataField>
+    <DataField name="x8" displayName=" x_{8}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="2.810000e+001" rightMargin="7.670000e+001"/>
+    </DataField>
+    <DataField name="x1" displayName=" x_{1}" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="6.360000e+000" rightMargin="1.251000e+001"/>
+    </DataField>
+  </DataDictionary>
+  <RegressionModel functionName="regression" modelName="Modeler[LinearRegression]Tds[steamplant]Predictor[x6:x8:x6x8:x6x6x8]Target[x1]" targetFieldName="x1">
+    <MiningSchema>
+      <MiningField name="x6" usageType="active"/>
+      <MiningField name="x8" usageType="active"/>
+      <MiningField name="x1" usageType="predicted"/>
+    </MiningSchema>
+    <RegressionTable intercept="3.837365e+000">
+      <NumericPredictor name="x6" exponent="1" coefficient="4.759134e-001"/>
+      <NumericPredictor name="x8" exponent="1" coefficient="1.428838e-001"/>
+      <PredictorTerm coefficient="-2.201903e-002">
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+      <PredictorTerm coefficient="5.362560e-004">
+        <FieldRef field="x6"/>
+        <FieldRef field="x6"/>
+        <FieldRef field="x8"/>
+      </PredictorTerm>
+    </RegressionTable>
+  </RegressionModel>
+</PMML>
diff --git a/src/yacsloader/pmml/BasicMainTest.hxx b/src/yacsloader/pmml/BasicMainTest.hxx
new file mode 100755 (executable)
index 0000000..9bff03f
--- /dev/null
@@ -0,0 +1,96 @@
+// Copyright (C) 2007-2013  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef _BASICMAINTEST_HXX_
+#define _BASICMAINTEST_HXX_
+
+#include <cppunit/CompilerOutputter.h>
+#include <cppunit/TestResult.h>
+#include <cppunit/TestResultCollector.h>
+#include <cppunit/TextTestProgressListener.h>
+#include <cppunit/BriefTestProgressListener.h>
+#include <cppunit/extensions/TestFactoryRegistry.h>
+#include <cppunit/TestRunner.h>
+#include <stdexcept>
+
+#include <iostream>
+#include <fstream>
+
+#ifndef WIN32
+#include <fpu_control.h>
+#endif
+
+// ============================================================================
+/*!
+ *  Main program source for Unit Tests with cppunit package does not depend
+ *  on actual tests, so we use the same for all partial unit tests.
+ */
+// ============================================================================
+
+int main(int argc, char* argv[])
+{
+#ifndef WIN32
+  fpu_control_t cw = _FPU_DEFAULT & ~(_FPU_MASK_IM | _FPU_MASK_ZM | _FPU_MASK_OM);
+  _FPU_SETCW(cw);
+#endif
+  // --- Create the event manager and test controller
+  CPPUNIT_NS::TestResult controller;
+
+  // ---  Add a listener that collects test result
+  CPPUNIT_NS::TestResultCollector result;
+  controller.addListener( &result );        
+
+  // ---  Add a listener that print dots as test run.
+#ifdef WIN32
+  CPPUNIT_NS::TextTestProgressListener progress;
+#else
+  CPPUNIT_NS::BriefTestProgressListener progress;
+#endif
+  controller.addListener( &progress );      
+
+  // ---  Get the top level suite from the registry
+
+  CPPUNIT_NS::Test *suite =
+    CPPUNIT_NS::TestFactoryRegistry::getRegistry().makeTest();
+
+  // ---  Adds the test to the list of test to run
+
+  CPPUNIT_NS::TestRunner runner;
+  runner.addTest( suite );
+  runner.run( controller);
+
+  // ---  Print test in a compiler compatible format.
+
+  std::ofstream testFile;
+  testFile.open("UnitTestsResult", std::ios::out |  std::ios::trunc);
+  //CPPUNIT_NS::CompilerOutputter outputter( &result, std::cerr );
+  CPPUNIT_NS::CompilerOutputter outputter( &result, testFile );
+  outputter.write(); 
+
+  // ---  Run the tests.
+
+  bool wasSucessful = result.wasSuccessful();
+  testFile.close();
+
+  // ---  Return error code 1 if the one of test failed.
+
+  return wasSucessful ? 0 : 1;
+}
+
+#endif
diff --git a/src/yacsloader/pmml/CMakeLists.txt b/src/yacsloader/pmml/CMakeLists.txt
new file mode 100755 (executable)
index 0000000..ebd32ad
--- /dev/null
@@ -0,0 +1,54 @@
+# Copyright (C) 2012-2013  CEA/DEN, EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+# Author : InckA
+
+ADD_DEFINITIONS(${CPPUNIT_DEFINITIONS})
+
+
+INCLUDE_DIRECTORIES(
+  ${CPPUNIT_INCLUDE_DIRS}
+  ${PTHREADS_INCLUDE_DIRS}
+  ${CMAKE_CURRENT_SOURCE_DIR}/
+  ${CMAKE_CURRENT_SOURCE_DIR}/..
+  ${CMAKE_CURRENT_SOURCE_DIR}/../../bases
+  ${CMAKE_CURRENT_SOURCE_DIR}/../../bases/Test
+  ${CMAKE_CURRENT_SOURCE_DIR}/../../engine
+  ${CMAKE_CURRENT_SOURCE_DIR}/../../runtime
+  ${CMAKE_CURRENT_SOURCE_DIR}/../../pmml
+  )
+
+
+SET(TestYACSPMML_SOURCES
+  TestYACSPMML.cxx
+  YACSPMMLBasicsTest1.cxx
+  )
+
+ADD_EXECUTABLE(TestYACSPMML ${TestYACSPMML_SOURCES})
+
+
+TARGET_LINK_LIBRARIES(TestYACSPMML pmmlLib   
+                      YACSloader
+                      YACSRuntimeSALOME
+                      YACSlibEngine
+                      YACSBases 
+                      ${CPPUNIT_LIBRARIES} ${PLATFORM_LIBS})
+
+ADD_TEST(TestYACSPMML TestYACSPMML)
+
+INSTALL(TARGETS TestYACSPMML DESTINATION ${SALOME_INSTALL_BINS})
diff --git a/src/yacsloader/pmml/TestYACSPMML.cxx b/src/yacsloader/pmml/TestYACSPMML.cxx
new file mode 100755 (executable)
index 0000000..9275142
--- /dev/null
@@ -0,0 +1,25 @@
+// Copyright (C) 2007-2013  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : InckA
+
+#include "YACSPMMLBasicsTest1.hxx"
+
+CPPUNIT_TEST_SUITE_REGISTRATION( YACSPMMLBasicsTest1 );
+
+#include "BasicMainTest.hxx"
diff --git a/src/yacsloader/pmml/YACSPMMLBasicsTest.hxx b/src/yacsloader/pmml/YACSPMMLBasicsTest.hxx
new file mode 100755 (executable)
index 0000000..0324edb
--- /dev/null
@@ -0,0 +1,30 @@
+// Copyright (C) 2007-2013  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : InckA
+
+#ifndef __YACSPMMLBASICSTEST_HXX__
+#define __YACSPMMLBASICSTEST_HXX__
+
+#include <cppunit/extensions/HelperMacros.h>
+
+class YACSPMMLBasicsTest : public CppUnit::TestFixture
+{
+};
+
+#endif
diff --git a/src/yacsloader/pmml/YACSPMMLBasicsTest1.cxx b/src/yacsloader/pmml/YACSPMMLBasicsTest1.cxx
new file mode 100755 (executable)
index 0000000..0246e11
--- /dev/null
@@ -0,0 +1,219 @@
+// Copyright (C) 2007-2013  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : InckA
+
+
+#include "YACSPMMLBasicsTest1.hxx"
+#include "PMMLlib.hxx"
+
+#include "yacsconfig.h"
+#include "RuntimeSALOME.hxx"
+#include "PythonPorts.hxx"
+#include "CORBAPorts.hxx"
+#include "parsers.hxx"
+#include "Proc.hxx"
+#include "Exception.hxx"
+#include "Executor.hxx"
+#include "parsers.hxx"
+
+#include <iostream>
+#include <fstream>
+#include <stdlib.h>
+#ifdef WIN32
+#include <io.h>
+#define F_OK 0
+#define access _access
+#else
+#include <unistd.h>
+#endif
+
+//#define _DEVDEBUG_
+#include "YacsTrace.hxx"
+
+using namespace YACS::ENGINE;
+using namespace YACS;
+using namespace std;
+
+int driverTest(Proc* &p, const char* schema)
+{
+  DEBTRACE("+++++++++++++++++++ BEGIN test " << schema);
+  RuntimeSALOME::setRuntime();
+
+  YACSLoader loader;
+  Executor executor;
+  
+  try
+    {
+      p=loader.load(schema);
+      DEBTRACE("Proc *p = " << p);
+      std::ofstream f("toto");
+      p->writeDot(f);
+      f.close();
+      DEBTRACE("+++++++++++++++++++ BEGIN execution " << schema);
+      executor.RunW(p,0);
+      DEBTRACE("+++++++++++++++++++   END execution " << schema);
+      std::ofstream g("titi");
+      p->writeDot(g);
+      g.close();
+      DEBTRACE("+++++++++++++++++++ END test " << schema);
+      return 0;
+    }
+  catch (YACS::Exception& e)
+    {
+      DEBTRACE("YACS exception caught: ");
+      DEBTRACE(e.what());
+      DEBTRACE("+++++++++++++++++++ END test in error " << schema);
+      return 1;
+    }
+  catch (const std::ios_base::failure&)
+    {
+      DEBTRACE("io failure");
+      DEBTRACE("+++++++++++++++++++ END test in error " << schema);
+      return 1;
+    }
+  catch(CORBA::SystemException& ex)
+    {
+      DEBTRACE("Caught a CORBA::SystemException.");
+      CORBA::Any tmp;
+      tmp <<= ex;
+      CORBA::TypeCode_var tc = tmp.type();
+      const char *p = tc->name();
+      if ( *p != '\0' )
+        {
+          DEBTRACE(p);
+        }
+      else
+        {
+          DEBTRACE(tc->id());
+        }
+      DEBTRACE("+++++++++++++++++++ END test in error " << schema);
+      return 1;
+    }
+  catch(omniORB::fatalException& fe)
+    {
+      DEBTRACE("Caught omniORB::fatalException:" );
+      DEBTRACE("  file: " << fe.file());
+      DEBTRACE("  line: " << fe.line());
+      DEBTRACE("  mesg: " << fe.errmsg());
+      DEBTRACE("+++++++++++++++++++ END test in error " << schema);
+      return 1;
+    }
+  catch(...)
+    {
+      DEBTRACE("Caught unknown exception.");
+      DEBTRACE("+++++++++++++++++++ END test in error " << schema);
+      return 1;
+    }
+}
+
+
+void YACSPMMLBasicsTest1::setUp()
+{
+#ifdef WIN32
+    const char* p = std::getenv("YACS_ROOT_DIR");
+    std::string strP("");
+    if (p) 
+        strP = std::string(p);
+    else 
+        throw std::string("unable to get YACS_ROOT_DIR");
+    resourcesDir = strP;
+    resourcesDir += "/share/salome/yacssamples/";
+#else
+    resourcesDir =  getenv("YACS_ROOT_DIR");
+    resourcesDir += "/share/salome/yacssamples/";
+#endif      
+}
+
+void YACSPMMLBasicsTest1::tearDown()
+{
+}
+
+
+void YACSPMMLBasicsTest1::testYACSdriverLinearRegression()
+{
+    std::string xmlFilename = resourcesDir + "schemaLR2.xml";  
+    Proc *p = 0;
+    int ret = driverTest(p, xmlFilename.c_str() );
+    CPPUNIT_ASSERT(ret == 0);
+    DEBTRACE("Proc *p = " << p);
+    CPPUNIT_ASSERT(p != 0);
+    if (p)
+    {        
+        YACS::ENGINE::Node* node = p->nodeMap[string("PyGetRes")];
+        YACS::ENGINE::OutputPort* outputPort = node->getOutputPort(string("res")); 
+        string str =  outputPort->getAsString(); 
+        CPPUNIT_ASSERT_EQUAL(str, string("True")); 
+        delete p;         
+    }  
+}
+
+void YACSPMMLBasicsTest1::testYACSdriverNeuralNetwork()
+{
+    std::string xmlFilename = resourcesDir + "schemaANN2.xml";  
+    Proc *p = 0;
+    int ret = driverTest(p, xmlFilename.c_str() );
+    CPPUNIT_ASSERT(ret == 0);
+    DEBTRACE("Proc *p = " << p);
+    CPPUNIT_ASSERT(p != 0);    
+    if (p)
+    { 
+        YACS::ENGINE::Node* node = p->nodeMap[string("PyGetRes")];
+        YACS::ENGINE::OutputPort* outputPort = node->getOutputPort(string("res")); 
+        string str =  outputPort->getAsString(); 
+        CPPUNIT_ASSERT_EQUAL(str, string("True")); 
+        delete p;    
+    }
+}  
+
+void YACSPMMLBasicsTest1::testYACSdriver_LRANN()
+{
+    std::string xmlFilename = resourcesDir + "schemaANNLR2.xml";  
+    Proc *p = 0;
+    int ret = driverTest(p, xmlFilename.c_str() );
+    CPPUNIT_ASSERT(ret == 0);
+    DEBTRACE("Proc *p = " << p);
+    CPPUNIT_ASSERT(p != 0);
+    if (p) 
+    { 
+        YACS::ENGINE::Node* node = p->nodeMap[string("PyGetRes")];
+        YACS::ENGINE::OutputPort* outputPort = node->getOutputPort(string("res")); 
+        string str =  outputPort->getAsString(); 
+        CPPUNIT_ASSERT_EQUAL(str, string("True")); 
+        delete p;   
+    }
+}  
+
+void YACSPMMLBasicsTest1::testYACSdriver_PmmlDoesNotExist()
+{
+    std::string xmlFilename = resourcesDir + "schemaPmmlDoesNotExist.xml";  
+    Proc *p = 0;
+    int ret = driverTest(p, xmlFilename.c_str() );
+    CPPUNIT_ASSERT(ret == 0);
+    DEBTRACE("Proc *p = " << p);
+    CPPUNIT_ASSERT(p != 0);
+    if (p)
+    { 
+        YACS::ENGINE::Node* node = p->nodeMap[string("PyGetRes")];
+        YACS::ENGINE::OutputPort* outputPort = node->getOutputPort(string("res"));        
+        string str =  outputPort->getAsString(); 
+        CPPUNIT_ASSERT_EQUAL(str, string("None")); 
+        delete p;     
+    }
+} 
+
diff --git a/src/yacsloader/pmml/YACSPMMLBasicsTest1.hxx b/src/yacsloader/pmml/YACSPMMLBasicsTest1.hxx
new file mode 100755 (executable)
index 0000000..9821c1f
--- /dev/null
@@ -0,0 +1,56 @@
+// Copyright (C) 2007-2013  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : InckA
+
+#ifndef __YACSPMMLBASICSTEST1_HXX__
+#define __YACSPMMLBASICSTEST1_HXX__
+
+#include "YACSPMMLBasicsTest.hxx"
+
+#include <string>
+
+//classe test de YACS / PMMLlib
+  
+class YACSPMMLBasicsTest1 : public YACSPMMLBasicsTest
+{
+    CPPUNIT_TEST_SUITE(YACSPMMLBasicsTest1);
+
+    CPPUNIT_TEST( testYACSdriverLinearRegression );
+    CPPUNIT_TEST( testYACSdriverNeuralNetwork );
+    CPPUNIT_TEST( testYACSdriver_LRANN );
+    CPPUNIT_TEST( testYACSdriver_PmmlDoesNotExist );
+
+    CPPUNIT_TEST_SUITE_END();
+  
+public:
+    
+    void setUp(); 
+    void tearDown();    
+    void testYACSdriverLinearRegression(); 
+    void testYACSdriverNeuralNetwork();
+    void testYACSdriver_LRANN();
+    void testYACSdriver_PmmlDoesNotExist();
+
+private :
+    std::string resourcesDir;
+    
+};
+
+
+#endif
diff --git a/src/yacsloader/samples/pmml_tann_exportFunctionPMML.pmml b/src/yacsloader/samples/pmml_tann_exportFunctionPMML.pmml
new file mode 100644 (file)
index 0000000..2437a0c
--- /dev/null
@@ -0,0 +1,142 @@
+<?xml version="1.0"?>
+<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
+  <Header copyright="myCopyright" description="Text Description">
+    <Application name="Uranie" version="3.3.2"/>
+    <Annotation>Compilation date : 2000-01-01</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="rw" displayName="rw" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="5.006983e-02" rightMargin="1.499176e-01"/>
+    </DataField>
+    <DataField name="r" displayName="r" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="1.479055e+02" rightMargin="4.990631e+04"/>
+    </DataField>
+    <DataField name="tu" displayName="tu" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="6.316370e+04" rightMargin="1.155682e+05"/>
+    </DataField>
+    <DataField name="tl" displayName="tl" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="6.316923e+01" rightMargin="1.159015e+02"/>
+    </DataField>
+    <DataField name="hu" displayName="hu" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="9.900098e+02" rightMargin="1.109786e+03"/>
+    </DataField>
+    <DataField name="hl" displayName="hl" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="7.001450e+02" rightMargin="8.198111e+02"/>
+    </DataField>
+    <DataField name="l" displayName="l" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="1.120343e+03" rightMargin="1.679342e+03"/>
+    </DataField>
+    <DataField name="kw" displayName="kw" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="9.857369e+03" rightMargin="1.204401e+04"/>
+    </DataField>
+    <DataField name="yhat" displayName="yhat" optype="continuous" dataType="double">
+      <Interval closure="ClosedClosed" leftMargin="1.309821e+01" rightMargin="2.082511e+02"/>
+    </DataField>
+  </DataDictionary>
+  <NeuralNetwork modelName="ANNModel" functionName="regression" numberOfLayers="2">
+    <MiningSchema>
+      <MiningField name="rw" usageType="active"/>
+      <MiningField name="r" usageType="active"/>
+      <MiningField name="tu" usageType="active"/>
+      <MiningField name="tl" usageType="active"/>
+      <MiningField name="hu" usageType="active"/>
+      <MiningField name="hl" usageType="active"/>
+      <MiningField name="l" usageType="active"/>
+      <MiningField name="kw" usageType="active"/>
+      <MiningField name="yhat" usageType="predicted"/>
+    </MiningSchema>
+    <NeuralInputs numberOfInputs="8">
+      <NeuralInput id="0">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="rw">
+            <LinearNorm orig="0.000000e+00" norm="-2.889932e-01"/>
+            <LinearNorm orig="9.999901e-02" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="1">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="r">
+            <LinearNorm orig="0.000000e+00" norm="-5.756638e-01"/>
+            <LinearNorm orig="2.504894e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="2">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tu">
+            <LinearNorm orig="0.000000e+00" norm="-1.699313e-01"/>
+            <LinearNorm orig="8.933486e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="3">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tl">
+            <LinearNorm orig="0.000000e+00" norm="-1.707007e-01"/>
+            <LinearNorm orig="8.955232e+01" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="4">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hu">
+            <LinearNorm orig="0.000000e+00" norm="-3.302777e-02"/>
+            <LinearNorm orig="1.050003e+03" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="5">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hl">
+            <LinearNorm orig="0.000000e+00" norm="-4.562070e-02"/>
+            <LinearNorm orig="7.600007e+02" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="6">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="l">
+            <LinearNorm orig="0.000000e+00" norm="-1.155882e-01"/>
+            <LinearNorm orig="1.400018e+03" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="7">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="kw">
+            <LinearNorm orig="0.000000e+00" norm="-5.780019e-02"/>
+            <LinearNorm orig="1.095001e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+    </NeuralInputs>
+    <NeuralLayer activationFunction="tanh" numberOfNeurons="1">
+      <Neuron id="8" bias="1.549906e+00">
+        <Con from="0" weight="-6.660752e-01"/>
+        <Con from="1" weight="-2.960771e-03"/>
+        <Con from="2" weight="-5.281011e-03"/>
+        <Con from="3" weight="-1.061715e-02"/>
+        <Con from="4" weight="-1.541489e-01"/>
+        <Con from="5" weight="1.577635e-01"/>
+        <Con from="6" weight="1.552007e-01"/>
+        <Con from="7" weight="-7.423497e-02"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralLayer activationFunction="identity" numberOfNeurons="1">
+      <Neuron id="9" bias="7.337605e+00">
+        <Con from="8" weight="-9.156475e+00"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralOutputs numberOfOutputs="1">
+      <NeuralOutput outputNeuron="9">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="yhat">
+            <LinearNorm orig="0.000000e+00" norm="-5.873935e-01"/>
+            <LinearNorm orig="7.781171e+01" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralOutput>
+    </NeuralOutputs>
+  </NeuralNetwork>
+</PMML>
diff --git a/src/yacsloader/samples/pmml_tann_tlr_exportFunctionPMML.pmml b/src/yacsloader/samples/pmml_tann_tlr_exportFunctionPMML.pmml
new file mode 100644 (file)
index 0000000..e32c50f
--- /dev/null
@@ -0,0 +1,147 @@
+<?xml version="1.0"?>
+<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
+  <Header copyright="myCopyright" description="Text Description">
+    <Application name="Uranie" version="3.3.2"/>
+    <Annotation>Compilation date : 2000-01-01</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="rw" displayName="rw" optype="continuous" dataType="double"/>
+    <DataField name="r" displayName="r" optype="continuous" dataType="double"/>
+    <DataField name="tu" displayName="tu" optype="continuous" dataType="double"/>
+    <DataField name="tl" displayName="tl" optype="continuous" dataType="double"/>
+    <DataField name="hu" displayName="hu" optype="continuous" dataType="double"/>
+    <DataField name="hl" displayName="hl" optype="continuous" dataType="double"/>
+    <DataField name="l" displayName="l" optype="continuous" dataType="double"/>
+    <DataField name="kw" displayName="kw" optype="continuous" dataType="double"/>
+    <DataField name="yhat" displayName="yhat" optype="continuous" dataType="double"/>
+  </DataDictionary>
+  <RegressionModel functionName="regression" modelName="LRModel" targetFieldName="yhat">
+    <MiningSchema>
+      <MiningField name="rw" usageType="active"/>
+      <MiningField name="r" usageType="active"/>
+      <MiningField name="tu" usageType="active"/>
+      <MiningField name="tl" usageType="active"/>
+      <MiningField name="hu" usageType="active"/>
+      <MiningField name="hl" usageType="active"/>
+      <MiningField name="l" usageType="active"/>
+      <MiningField name="kw" usageType="active"/>
+      <MiningField name="yhat" usageType="predicted"/>
+    </MiningSchema>
+    <RegressionTable intercept="-1.560299e+02">
+      <NumericPredictor name="rw" exponent="1" coefficient="1.422211e+03"/>
+      <NumericPredictor name="r" exponent="1" coefficient="-3.074119e-07"/>
+      <NumericPredictor name="tu" exponent="1" coefficient="2.152080e-06"/>
+      <NumericPredictor name="tl" exponent="1" coefficient="-4.985123e-03"/>
+      <NumericPredictor name="hu" exponent="1" coefficient="2.611037e-01"/>
+      <NumericPredictor name="hl" exponent="1" coefficient="-2.544189e-01"/>
+      <NumericPredictor name="l" exponent="1" coefficient="-5.571449e-02"/>
+      <NumericPredictor name="kw" exponent="1" coefficient="8.135523e-03"/>
+    </RegressionTable>
+  </RegressionModel>
+  <NeuralNetwork modelName="ANNModel" functionName="regression" numberOfLayers="2">
+    <MiningSchema>
+      <MiningField name="rw" usageType="active"/>
+      <MiningField name="r" usageType="active"/>
+      <MiningField name="tu" usageType="active"/>
+      <MiningField name="tl" usageType="active"/>
+      <MiningField name="hu" usageType="active"/>
+      <MiningField name="hl" usageType="active"/>
+      <MiningField name="l" usageType="active"/>
+      <MiningField name="kw" usageType="active"/>
+      <MiningField name="yhat" usageType="predicted"/>
+    </MiningSchema>
+    <NeuralInputs numberOfInputs="8">
+      <NeuralInput id="0">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="rw">
+            <LinearNorm orig="0.000000e+00" norm="-2.889932e-01"/>
+            <LinearNorm orig="9.999901e-02" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="1">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="r">
+            <LinearNorm orig="0.000000e+00" norm="-5.756638e-01"/>
+            <LinearNorm orig="2.504894e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="2">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tu">
+            <LinearNorm orig="0.000000e+00" norm="-1.699313e-01"/>
+            <LinearNorm orig="8.933486e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="3">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="tl">
+            <LinearNorm orig="0.000000e+00" norm="-1.707007e-01"/>
+            <LinearNorm orig="8.955232e+01" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="4">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hu">
+            <LinearNorm orig="0.000000e+00" norm="-3.302777e-02"/>
+            <LinearNorm orig="1.050003e+03" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="5">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="hl">
+            <LinearNorm orig="0.000000e+00" norm="-4.562070e-02"/>
+            <LinearNorm orig="7.600007e+02" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="6">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="l">
+            <LinearNorm orig="0.000000e+00" norm="-1.155882e-01"/>
+            <LinearNorm orig="1.400018e+03" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+      <NeuralInput id="7">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="kw">
+            <LinearNorm orig="0.000000e+00" norm="-5.780019e-02"/>
+            <LinearNorm orig="1.095001e+04" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralInput>
+    </NeuralInputs>
+    <NeuralLayer activationFunction="tanh" numberOfNeurons="1">
+      <Neuron id="8" bias="1.234012e+00">
+        <Con from="0" weight="-7.638734e-01"/>
+        <Con from="1" weight="-7.054051e-04"/>
+        <Con from="2" weight="-5.725514e-03"/>
+        <Con from="3" weight="-7.439748e-03"/>
+        <Con from="4" weight="-1.804265e-01"/>
+        <Con from="5" weight="1.834322e-01"/>
+        <Con from="6" weight="1.775569e-01"/>
+        <Con from="7" weight="-8.738110e-02"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralLayer activationFunction="identity" numberOfNeurons="1">
+      <Neuron id="9" bias="5.092746e+00">
+        <Con from="8" weight="-6.841448e+00"/>
+      </Neuron>
+    </NeuralLayer>
+    <NeuralOutputs numberOfOutputs="1">
+      <NeuralOutput outputNeuron="9">
+        <DerivedField optype="continuous" dataType="float">
+          <NormContinuous field="yhat">
+            <LinearNorm orig="0.000000e+00" norm="-5.873935e-01"/>
+            <LinearNorm orig="7.781171e+01" norm="0.000000e+00"/>
+          </NormContinuous>
+        </DerivedField>
+      </NeuralOutput>
+    </NeuralOutputs>
+  </NeuralNetwork>
+</PMML>
diff --git a/src/yacsloader/samples/pmml_tlr_exportFunctionPMML.pmml b/src/yacsloader/samples/pmml_tlr_exportFunctionPMML.pmml
new file mode 100644 (file)
index 0000000..588134b
--- /dev/null
@@ -0,0 +1,41 @@
+<?xml version="1.0"?>
+<PMML xmlns="http://www.dmg.org/PMML-4_1" version="4.1">
+  <Header copyright="myCopyright" description="Text Description">
+    <Application name="Uranie" version="3.3.2"/>
+    <Annotation>Compilation date : 2000-01-01</Annotation>
+  </Header>
+  <DataDictionary>
+    <DataField name="rw" displayName="rw" optype="continuous" dataType="double"/>
+    <DataField name="r" displayName="r" optype="continuous" dataType="double"/>
+    <DataField name="tu" displayName="tu" optype="continuous" dataType="double"/>
+    <DataField name="tl" displayName="tl" optype="continuous" dataType="double"/>
+    <DataField name="hu" displayName="hu" optype="continuous" dataType="double"/>
+    <DataField name="hl" displayName="hl" optype="continuous" dataType="double"/>
+    <DataField name="l" displayName="l" optype="continuous" dataType="double"/>
+    <DataField name="kw" displayName="kw" optype="continuous" dataType="double"/>
+    <DataField name="yhat" displayName="yhat" optype="continuous" dataType="double"/>
+  </DataDictionary>
+  <RegressionModel functionName="regression" modelName="LRModel" targetFieldName="yhat">
+    <MiningSchema>
+      <MiningField name="rw" usageType="active"/>
+      <MiningField name="r" usageType="active"/>
+      <MiningField name="tu" usageType="active"/>
+      <MiningField name="tl" usageType="active"/>
+      <MiningField name="hu" usageType="active"/>
+      <MiningField name="hl" usageType="active"/>
+      <MiningField name="l" usageType="active"/>
+      <MiningField name="kw" usageType="active"/>
+      <MiningField name="yhat" usageType="predicted"/>
+    </MiningSchema>
+    <RegressionTable intercept="-1.560299e+02">
+      <NumericPredictor name="rw" exponent="1" coefficient="1.422211e+03"/>
+      <NumericPredictor name="r" exponent="1" coefficient="-3.074119e-07"/>
+      <NumericPredictor name="tu" exponent="1" coefficient="2.152080e-06"/>
+      <NumericPredictor name="tl" exponent="1" coefficient="-4.985123e-03"/>
+      <NumericPredictor name="hu" exponent="1" coefficient="2.611037e-01"/>
+      <NumericPredictor name="hl" exponent="1" coefficient="-2.544189e-01"/>
+      <NumericPredictor name="l" exponent="1" coefficient="-5.571449e-02"/>
+      <NumericPredictor name="kw" exponent="1" coefficient="8.135523e-03"/>
+    </RegressionTable>
+  </RegressionModel>
+</PMML>
diff --git a/src/yacsloader/samples/schemaANN2.xml b/src/yacsloader/samples/schemaANN2.xml
new file mode 100644 (file)
index 0000000..27551cc
--- /dev/null
@@ -0,0 +1,412 @@
+<?xml version='1.0' encoding='iso-8859-1' ?>
+<proc name="newSchema_1">
+   <property name="DefaultStudyID" value="1"/>
+   <type name="string" kind="string"/>
+   <struct name="Engines/dataref">
+      <member name="ref" type="string"/>
+   </struct>
+   <type name="bool" kind="bool"/>
+   <sequence name="boolvec" content="bool"/>
+   <type name="double" kind="double"/>
+   <sequence name="dblevec" content="double"/>
+   <objref name="file" id="file"/>
+   <type name="int" kind="int"/>
+   <sequence name="intvec" content="int"/>
+   <struct name="stringpair">
+      <member name="name" type="string"/>
+      <member name="value" type="string"/>
+   </struct>
+   <sequence name="propvec" content="stringpair"/>
+   <objref name="pyobj" id="python:obj:1.0"/>
+   <sequence name="seqboolvec" content="boolvec"/>
+   <sequence name="seqdblevec" content="dblevec"/>
+   <sequence name="seqint" content="int"/>
+   <sequence name="seqintvec" content="intvec"/>
+   <sequence name="stringvec" content="string"/>
+   <sequence name="seqstringvec" content="stringvec"/>
+   <container name="DefaultContainer">
+      <property name="container_name" value="FactoryServer"/>
+      <property name="name" value="localhost"/>
+   </container>
+   <foreach name="ForEachLoop_int6" nbranch="1" type="int">
+      <bloc name="Bloc8">
+         <inline name="PyFuncExec">
+            <script><code><![CDATA[
+# on reconstruit une liste de double
+doubleVecIn = [ float(v) for v in inputs[i] ] ;
+
+doubleVecOut = pyFunc( doubleVecIn );
+
+output = doubleVecOut[0];
+]]></code></script>
+            <inport name="i" type="int"/>
+            <inport name="pyFunc" type="pyobj"/>
+            <inport name="inputs" type="pyobj"/>
+            <outport name="output" type="double"/>
+         </inline>
+         <inline name="PyValidate">
+            <script><code><![CDATA[#-----------------------------#
+#       Validate              #
+#-----------------------------#
+
+ret = True;
+
+refOutput = float( refOutputs[i] );
+
+err = abs( refOutput - output ) ; 
+
+if err > epsilon : 
+    msg   = "KO valid with eps %f at pattern #%i: \n"%(epsilon, i) ;
+    msg += "refOutput - output : %f - %f \n"%(refOutput, output);
+    msg += "Got abs relative diff  %f .\n"%err
+    ret = False;
+    print msg;
+    pass
+
+#print "+++ OK " , i , " +++", ret
+]]></code></script>
+            <inport name="i" type="int"/>
+            <inport name="refOutputs" type="pyobj"/>
+            <inport name="output" type="double"/>
+            <inport name="epsilon" type="double"/>
+            <outport name="ret" type="bool"/>
+         </inline>
+         <control> <fromnode>PyFuncExec</fromnode> <tonode>PyValidate</tonode> </control>
+         <datalink control="false">
+            <fromnode>PyFuncExec</fromnode> <fromport>output</fromport>
+            <tonode>PyValidate</tonode> <toport>output</toport>
+         </datalink>
+      </bloc>
+   </foreach>
+   <inline name="PyInitInputs">
+      <script><code><![CDATA[#-----------------------------#
+#             Inputs              #
+#-----------------------------#
+
+inputs = [ 
+[1.456610531e-01, 4.817264758e+04, 7.398381011e+04, 1.035837913e+02, 9.968027314e+02, 7.662662390e+02, 1.606060725e+03, 1.147627487e+04], 
+[7.767860409e-02, 1.193098539e+04, 7.292686667e+04, 9.785118163e+01, 1.051369254e+03, 7.027872080e+02, 1.447244752e+03, 1.028475889e+04], 
+[6.750842479e-02, 2.882617130e+04, 9.817007636e+04, 8.488524614e+01, 1.090567120e+03, 8.122979233e+02, 1.649818202e+03, 1.149754134e+04], 
+[8.860282350e-02, 3.498171453e+03, 8.530760503e+04, 9.046022050e+01, 1.095160960e+03, 7.874820198e+02, 1.367563528e+03, 1.084392094e+04], 
+[1.365903117e-01, 2.825638843e+04, 8.889946935e+04, 1.113147418e+02, 1.019749972e+03, 7.134926632e+02, 1.507458958e+03, 1.081045588e+04], 
+[7.127271995e-02, 1.753261607e+04, 9.534506291e+04, 9.153808181e+01, 1.061650244e+03, 7.309769019e+02, 1.657218091e+03, 1.072962363e+04], 
+[1.265602027e-01, 4.219433169e+04, 9.456040368e+04, 1.056736168e+02, 1.099933893e+03, 7.471638924e+02, 1.273665410e+03, 1.138336512e+04], 
+[8.667764079e-02, 4.120117756e+04, 1.133877245e+05, 7.767636060e+01, 1.016590900e+03, 7.080602432e+02, 1.660892376e+03, 1.092560938e+04], 
+[9.976394549e-02, 6.420167443e+03, 9.830132327e+04, 8.501884776e+01, 1.097122572e+03, 7.253532648e+02, 1.307720423e+03, 9.866836214e+03], 
+[1.146758223e-01, 8.710374173e+03, 8.523215073e+04, 9.414901527e+01, 1.095537742e+03, 7.060630963e+02, 1.275130095e+03, 1.130554594e+04], 
+[7.880624877e-02, 4.311161871e+04, 7.377943028e+04, 1.061320296e+02, 1.047244176e+03, 7.477760505e+02, 1.415636178e+03, 1.040593206e+04], 
+[1.058084156e-01, 4.582563552e+04, 9.222816385e+04, 1.038847032e+02, 9.955665779e+02, 7.432779758e+02, 1.504745288e+03, 1.146224012e+04], 
+[1.253087794e-01, 4.847286153e+04, 7.518998946e+04, 1.040979939e+02, 1.105768478e+03, 7.577355747e+02, 1.201630771e+03, 1.088149400e+04], 
+[1.175039884e-01, 2.565240792e+04, 7.230018052e+04, 8.257012789e+01, 1.032484559e+03, 7.694279542e+02, 1.566647304e+03, 1.153822324e+04], 
+[6.124755615e-02, 4.507578924e+04, 6.478138490e+04, 9.090792861e+01, 1.062255915e+03, 7.497957649e+02, 1.184192526e+03, 1.170539896e+04], 
+[5.544131559e-02, 3.390925881e+04, 7.328456973e+04, 1.090337509e+02, 1.048630443e+03, 7.628191574e+02, 1.185768164e+03, 1.028085684e+04], 
+[1.183272497e-01, 4.639774146e+04, 6.984716489e+04, 7.483809181e+01, 1.071724305e+03, 8.029602783e+02, 1.301826477e+03, 1.069809527e+04], 
+[1.178550031e-01, 1.065266711e+04, 7.473813352e+04, 7.063285879e+01, 1.017164661e+03, 7.183678405e+02, 1.252220675e+03, 1.085888136e+04], 
+[1.116310036e-01, 7.057176796e+02, 8.914959172e+04, 1.143736719e+02, 1.024291508e+03, 7.893954959e+02, 1.375784904e+03, 1.189588654e+04], 
+[1.056449157e-01, 3.725247649e+04, 1.126706761e+05, 8.220356853e+01, 1.000815794e+03, 7.342054423e+02, 1.331007515e+03, 1.057815755e+04], 
+[5.585244596e-02, 1.080328986e+04, 7.647445600e+04, 1.127840680e+02, 1.101335277e+03, 7.090687232e+02, 1.571310094e+03, 1.179921032e+04], 
+[9.318997589e-02, 3.694986496e+04, 1.142850986e+05, 9.885621621e+01, 1.047818074e+03, 7.462410467e+02, 1.550908728e+03, 1.024738180e+04], 
+[9.769526026e-02, 4.898586800e+04, 8.454146334e+04, 7.390916471e+01, 1.069034353e+03, 7.761299060e+02, 1.366617089e+03, 1.045533000e+04], 
+[1.448874974e-01, 2.618871518e+04, 1.006705237e+05, 6.761931276e+01, 1.084724402e+03, 7.056825472e+02, 1.467825112e+03, 1.063120366e+04], 
+[5.748240145e-02, 4.265983570e+04, 6.922054248e+04, 7.153213366e+01, 1.029573412e+03, 7.140769415e+02, 1.638688665e+03, 1.152371724e+04], 
+[1.004554848e-01, 1.849359821e+04, 1.073035370e+05, 9.843990445e+01, 1.061773839e+03, 7.703136119e+02, 1.142717255e+03, 1.133548780e+04], 
+[6.904713159e-02, 3.114664091e+04, 1.154062790e+05, 8.699726139e+01, 9.951594968e+02, 8.013040888e+02, 1.645133282e+03, 1.101513225e+04], 
+[7.535831709e-02, 2.084271662e+04, 8.796612167e+04, 8.755547732e+01, 1.091487642e+03, 7.501847659e+02, 1.272233814e+03, 1.153502741e+04], 
+[1.316642693e-01, 4.074199552e+04, 9.791860127e+04, 1.096474308e+02, 1.103690417e+03, 7.597649884e+02, 1.149359431e+03, 1.121703132e+04], 
+[1.256804603e-01, 2.171487442e+04, 1.047171996e+05, 8.588439966e+01, 1.102320221e+03, 7.708693798e+02, 1.329214491e+03, 1.059463337e+04], 
+[5.439524620e-02, 2.259584733e+04, 8.079125672e+04, 6.476205727e+01, 1.106152417e+03, 8.113191944e+02, 1.255437236e+03, 1.201333911e+04], 
+[8.750209459e-02, 1.410249021e+04, 1.119751321e+05, 1.073490872e+02, 1.063143667e+03, 7.871042297e+02, 1.329983208e+03, 1.195606571e+04], 
+[8.699797238e-02, 4.021709531e+04, 1.117867687e+05, 9.589155856e+01, 1.073694293e+03, 7.994999584e+02, 1.627265626e+03, 1.136706401e+04], 
+[6.892920064e-02, 2.032907492e+03, 1.010396848e+05, 6.783802062e+01, 1.086461820e+03, 7.374214870e+02, 1.337611800e+03, 1.085112805e+04], 
+[1.110159183e-01, 4.417290645e+04, 7.073649093e+04, 7.518425871e+01, 1.107020884e+03, 7.795442605e+02, 1.148611472e+03, 9.964154503e+03], 
+[5.864651909e-02, 4.709372341e+04, 1.058816566e+05, 6.673370711e+01, 1.046313765e+03, 7.910806290e+02, 1.399673582e+03, 1.115277414e+04], 
+[1.024623536e-01, 3.248763678e+04, 1.123176511e+05, 1.107772688e+02, 1.012914390e+03, 7.571928585e+02, 1.189214491e+03, 1.183043699e+04], 
+[1.268376736e-01, 2.048412849e+04, 1.085318941e+05, 6.459015746e+01, 1.109786159e+03, 8.139508806e+02, 1.359762233e+03, 1.157111067e+04], 
+[5.300816813e-02, 2.943127727e+04, 1.146785278e+05, 8.394814161e+01, 1.043452404e+03, 7.603354413e+02, 1.658862011e+03, 1.163288578e+04], 
+[7.028017434e-02, 4.192750166e+04, 9.553886080e+04, 6.383743056e+01, 1.079645033e+03, 7.723588658e+02, 1.321409167e+03, 1.058567246e+04], 
+[5.991478449e-02, 4.541579388e+04, 6.812180949e+04, 9.246414937e+01, 1.019926004e+03, 8.069446852e+02, 1.239566623e+03, 1.120099431e+04], 
+[1.067481756e-01, 1.157791820e+04, 8.601870382e+04, 9.535250994e+01, 1.058079739e+03, 8.029092666e+02, 1.675663505e+03, 9.857368989e+03], 
+[1.372760225e-01, 2.899852970e+04, 8.623842631e+04, 1.131718316e+02, 1.024500700e+03, 7.638957159e+02, 1.530839326e+03, 1.082014841e+04], 
+[5.646039560e-02, 2.018348280e+04, 1.080224373e+05, 1.152372480e+02, 1.093895079e+03, 7.378488842e+02, 1.286477483e+03, 1.185925929e+04], 
+[1.112123774e-01, 2.393360775e+04, 9.292106786e+04, 7.719527530e+01, 1.067671951e+03, 7.343684587e+02, 1.483330544e+03, 1.201709952e+04], 
+[1.307541334e-01, 1.560575418e+04, 1.072113632e+05, 1.121212726e+02, 1.037577460e+03, 8.001239033e+02, 1.526339128e+03, 1.134591351e+04], 
+[6.497204223e-02, 1.402014180e+04, 6.745969376e+04, 7.862832618e+01, 1.014652310e+03, 7.785303790e+02, 1.410865140e+03, 1.050284643e+04], 
+[7.585702665e-02, 4.060724171e+04, 7.891898759e+04, 8.983721307e+01, 1.027113392e+03, 7.415777465e+02, 1.564676410e+03, 1.021290221e+04], 
+[1.236232545e-01, 3.896089552e+04, 9.928329730e+04, 7.287234301e+01, 9.987764845e+02, 8.183318132e+02, 1.174504796e+03, 1.008298210e+04], 
+[1.472884758e-01, 1.496664561e+04, 9.577485455e+04, 8.233690370e+01, 1.081566913e+03, 7.885231394e+02, 1.401539659e+03, 1.177286288e+04], 
+[1.446232028e-01, 4.176932757e+04, 6.512933971e+04, 9.704737342e+01, 1.013731965e+03, 7.047846719e+02, 1.603844751e+03, 1.068331103e+04], 
+[1.380008828e-01, 3.305905514e+04, 9.999652423e+04, 6.636073041e+01, 1.064322897e+03, 7.729923602e+02, 1.227516863e+03, 1.171354749e+04], 
+[6.344159464e-02, 4.780648795e+04, 9.365102770e+04, 8.802953531e+01, 1.029707604e+03, 7.933230876e+02, 1.374652921e+03, 1.008866356e+04], 
+[5.258122387e-02, 3.702566309e+03, 7.777773463e+04, 7.570896193e+01, 1.052339637e+03, 7.741762325e+02, 1.641636623e+03, 1.121956718e+04], 
+[1.308250614e-01, 4.040441149e+04, 8.911452953e+04, 8.104256772e+01, 1.054111352e+03, 7.773815616e+02, 1.334252555e+03, 1.104079585e+04], 
+[1.420385163e-01, 4.604351716e+04, 8.593646152e+04, 8.159247797e+01, 1.060594140e+03, 8.058854605e+02, 1.637341312e+03, 1.120759720e+04], 
+[1.358384353e-01, 3.498709280e+04, 8.406584308e+04, 8.165787651e+01, 1.022762168e+03, 8.120310170e+02, 1.474777461e+03, 1.003994300e+04], 
+[1.423898316e-01, 4.687374179e+04, 8.660185111e+04, 7.776608109e+01, 9.927602408e+02, 7.641272454e+02, 1.455954292e+03, 1.178518632e+04], 
+[1.321018940e-01, 2.866815831e+03, 9.113096058e+04, 1.007088501e+02, 1.086874972e+03, 7.587825443e+02, 1.610678155e+03, 1.146851885e+04], 
+[1.351663932e-01, 3.196774887e+04, 1.021987705e+05, 7.197885925e+01, 1.021175944e+03, 7.331080072e+02, 1.162493217e+03, 1.109738563e+04], 
+[8.648890684e-02, 4.492471938e+04, 6.438582057e+04, 1.110566727e+02, 1.090853465e+03, 7.597253981e+02, 1.467197961e+03, 1.195185409e+04], 
+[1.104557208e-01, 4.850355694e+04, 1.018022746e+05, 1.149248442e+02, 1.013058279e+03, 7.367222887e+02, 1.358941413e+03, 1.134149282e+04], 
+[1.091740527e-01, 4.724040634e+04, 1.023148364e+05, 1.026078719e+02, 1.016770836e+03, 7.167576085e+02, 1.388776487e+03, 1.018437131e+04], 
+[1.386282688e-01, 1.824670236e+04, 7.934492890e+04, 7.145704182e+01, 1.031398805e+03, 7.882243654e+02, 1.511935264e+03, 1.073294641e+04], 
+[1.072148102e-01, 2.606334586e+04, 1.045009711e+05, 1.153347286e+02, 1.013401269e+03, 7.542190230e+02, 1.392997551e+03, 1.072210903e+04], 
+[1.377281664e-01, 3.939831181e+04, 7.710345011e+04, 6.326330520e+01, 1.038617320e+03, 7.401934748e+02, 1.527993368e+03, 1.129318647e+04], 
+[1.337643862e-01, 4.712556894e+04, 9.486490032e+04, 8.660231295e+01, 1.082875874e+03, 7.991662034e+02, 1.344812635e+03, 1.034653122e+04], 
+[1.450803097e-01, 3.690122313e+04, 7.076429187e+04, 7.923525262e+01, 1.021698784e+03, 7.821386527e+02, 1.240302421e+03, 1.092916457e+04], 
+[9.160223345e-02, 3.477878623e+04, 7.860829708e+04, 7.052989639e+01, 1.045971334e+03, 7.371632922e+02, 1.377972291e+03, 1.202636052e+04], 
+[1.369383921e-01, 7.129085246e+03, 1.003532237e+05, 6.706267812e+01, 9.920945476e+02, 7.700039641e+02, 1.270854730e+03, 1.093421128e+04], 
+[9.150440403e-02, 3.191576156e+04, 1.043970415e+05, 1.121933263e+02, 9.961948536e+02, 7.035905746e+02, 1.503665860e+03, 1.175814238e+04], 
+[1.391914465e-01, 2.679929889e+04, 9.315698192e+04, 1.086281131e+02, 1.108793392e+03, 8.128614591e+02, 1.491457967e+03, 1.029088956e+04], 
+[5.336087538e-02, 1.787440457e+04, 8.274402814e+04, 9.735553791e+01, 1.052490734e+03, 7.853725287e+02, 1.505494679e+03, 1.199289056e+04], 
+[5.136318222e-02, 2.313327941e+04, 8.127627613e+04, 6.730045023e+01, 1.040984645e+03, 7.672337162e+02, 1.340467605e+03, 9.996178363e+03], 
+[5.202323461e-02, 1.418186294e+03, 9.097156505e+04, 6.493013875e+01, 1.030920220e+03, 7.896488622e+02, 1.361926266e+03, 1.152603230e+04], 
+[1.120003670e-01, 4.822621219e+04, 1.096378917e+05, 1.066666519e+02, 1.053178110e+03, 7.431009273e+02, 1.651552956e+03, 1.102461978e+04], 
+[8.156025845e-02, 3.294115069e+04, 7.846891086e+04, 1.125022952e+02, 1.075934524e+03, 8.170942340e+02, 1.251695262e+03, 1.007675874e+04], 
+[6.320769249e-02, 2.369685837e+04, 1.124336882e+05, 8.689656009e+01, 1.035765280e+03, 7.364800974e+02, 1.354682602e+03, 1.166796177e+04], 
+[6.503346261e-02, 1.730539268e+03, 8.554891831e+04, 1.052469487e+02, 1.109487127e+03, 7.156856899e+02, 1.568668470e+03, 1.144257670e+04], 
+[1.060320179e-01, 2.193967854e+04, 9.283579078e+04, 7.307445266e+01, 9.997547759e+02, 8.019001159e+02, 1.425747028e+03, 1.140852632e+04], 
+] 
+
+
+]]></code></script>
+      <outport name="inputs" type="pyobj"/>
+   </inline>
+   <inline name="PyInitRefOutputs">
+      <script><code><![CDATA[#-----------------------------#
+# Outputs de reference  #
+#-----------------------------#
+
+refOutputs = [ 
+1.165811298e+02, 
+4.800731001e+01, 
+2.552307508e+01, 
+5.805453016e+01, 
+1.304836720e+02, 
+3.285696649e+01, 
+1.557608944e+02, 
+4.374810362e+01, 
+8.929287717e+01, 
+1.425999339e+02, 
+4.182190868e+01, 
+6.457050056e+01, 
+1.510971116e+02, 
+7.956605866e+01, 
+4.240742650e+01, 
+2.764774492e+01, 
+9.347296791e+01, 
+1.084714948e+02, 
+7.856829522e+01, 
+7.146608339e+01, 
+3.781926322e+01, 
+5.088280301e+01, 
+6.350651454e+01, 
+1.802545516e+02, 
+2.390540494e+01, 
+8.926908340e+01, 
+1.621872876e+01, 
+5.930546024e+01, 
+1.739576265e+02, 
+1.292594318e+02, 
+3.137491768e+01, 
+5.933983687e+01, 
+4.125195859e+01, 
+4.595759735e+01, 
+1.057164195e+02, 
+2.249466719e+01, 
+8.322085467e+01, 
+1.234257504e+02, 
+1.808878098e+01, 
+3.877651954e+01, 
+2.258184497e+01, 
+4.698313250e+01, 
+1.116999328e+02, 
+4.375304767e+01, 
+1.024774016e+02, 
+9.658347512e+01, 
+2.155679655e+01, 
+3.049337709e+01, 
+7.818527054e+01, 
+1.667466744e+02, 
+1.374770136e+02, 
+1.600991110e+02, 
+2.088718864e+01, 
+1.619537927e+01, 
+1.218138473e+02, 
+1.120293486e+02, 
+8.676828288e+01, 
+1.238520007e+02, 
+1.264091907e+02, 
+1.520588614e+02, 
+6.410449884e+01, 
+8.751103700e+01, 
+8.081852230e+01, 
+1.055199073e+02, 
+6.999515823e+01, 
+1.295983705e+02, 
+1.228930068e+02, 
+1.431794331e+02, 
+6.947691365e+01, 
+1.160897648e+02, 
+5.875505502e+01, 
+1.265981410e+02, 
+2.087500232e+01, 
+1.828049612e+01, 
+1.883459772e+01, 
+7.806138598e+01, 
+4.174578630e+01, 
+3.565871725e+01, 
+4.505400598e+01, 
+5.370771952e+01 ] 
+]]></code></script>
+      <outport name="refOutputs" type="pyobj"/>
+   </inline>
+   <inline name="PyBuildLoopIndex">
+      <function name="make_indexes">
+         <code><![CDATA[def make_indexes(inVal, outVal) :
+    print "In make_indexes" ;
+    if ( len(inVal) != len(outVal) ) :
+        msg = "len(inVal) (%i) != len(outVal) (%i). "%( len(inVal), len(outVal) ) ;
+        raise ValueError(msg)       
+    n = len( inVal ); 
+    indexes = [ i for i in range( n ) ]  ;
+    return indexes
+
+]]></code>
+      </function>
+      <inport name="inVal" type="pyobj"/>
+      <inport name="outVal" type="pyobj"/>
+      <outport name="indexes" type="intvec"/>
+   </inline>
+   <inline name="PyEpsilon">
+      <script><code><![CDATA[epsilon = 1E-6;
+]]></code></script>
+      <outport name="epsilon" type="double"/>
+   </inline>
+   <inline name="PyLoadPMML0">
+      <script><code><![CDATA[ 
+import sys;
+from PMML import *;
+pmmlObj = PMMLlib( filename );   
+pmmlObj.SetCurrentModel( modelname , eval(pmmltype) );
+myFunc = "pyFunc";
+myHeader = "Function processed in YACCS";
+myCode = pmmlObj.ExportPyStr(myFunc, myHeader);
+exec myCode;
+]]></code></script>
+      <inport name="filename" type="string"/>
+      <inport name="modelname" type="string"/>
+      <inport name="pmmltype" type="string"/>
+      <outport name="pyFunc" type="pyobj"/>
+   </inline>
+   <inline name="PyGetPmmlFile">
+      <script><code><![CDATA[#-----------------------------#
+#       Get PMML file         #
+#-----------------------------#
+import os;
+pmmlRootDir = os.getenv("YACS_ROOT_DIR");
+resourcesDir = os.path.join(pmmlRootDir,"share","salome","yacssamples");
+resourcesDir += os.sep ;
+filepath = resourcesDir + filename;
+]]></code></script>
+      <inport name="filename" type="string"/>
+      <outport name="filepath" type="string"/>
+   </inline>
+   <inline name="PyGetRes">
+      <script><code><![CDATA[print "retVec ";
+print retVec;
+res = "True";
+for ret in retVec:
+    if not ret:
+        res = "False";
+        pass
+    pass
+
+]]></code></script>
+      <inport name="retVec" type="boolvec"/>
+      <outport name="res" type="string"/>
+   </inline>
+   <control> <fromnode>ForEachLoop_int6</fromnode> <tonode>PyGetRes</tonode> </control>
+   <control> <fromnode>PyInitInputs</fromnode> <tonode>PyBuildLoopIndex</tonode> </control>
+   <control> <fromnode>PyInitInputs</fromnode> <tonode>PyInitRefOutputs</tonode> </control>
+   <control> <fromnode>PyInitRefOutputs</fromnode> <tonode>PyBuildLoopIndex</tonode> </control>
+   <control> <fromnode>PyInitRefOutputs</fromnode> <tonode>PyEpsilon</tonode> </control>
+   <control> <fromnode>PyBuildLoopIndex</fromnode> <tonode>ForEachLoop_int6</tonode> </control>
+   <control> <fromnode>PyEpsilon</fromnode> <tonode>PyBuildLoopIndex</tonode> </control>
+   <control> <fromnode>PyLoadPMML0</fromnode> <tonode>ForEachLoop_int6</tonode> </control>
+   <control> <fromnode>PyGetPmmlFile</fromnode> <tonode>PyLoadPMML0</tonode> </control>
+   <datalink control="false">
+      <fromnode>ForEachLoop_int6</fromnode> <fromport>evalSamples</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyFuncExec</tonode> <toport>i</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>ForEachLoop_int6</fromnode> <fromport>evalSamples</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyValidate</tonode> <toport>i</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyInitInputs</fromnode> <fromport>inputs</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyFuncExec</tonode> <toport>inputs</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyInitInputs</fromnode> <fromport>inputs</fromport>
+      <tonode>PyBuildLoopIndex</tonode> <toport>inVal</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyInitRefOutputs</fromnode> <fromport>refOutputs</fromport>
+      <tonode>PyBuildLoopIndex</tonode> <toport>outVal</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyInitRefOutputs</fromnode> <fromport>refOutputs</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyValidate</tonode> <toport>refOutputs</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyBuildLoopIndex</fromnode> <fromport>indexes</fromport>
+      <tonode>ForEachLoop_int6</tonode> <toport>SmplsCollection</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyEpsilon</fromnode> <fromport>epsilon</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyValidate</tonode> <toport>epsilon</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyLoadPMML0</fromnode> <fromport>pyFunc</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyFuncExec</tonode> <toport>pyFunc</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyGetPmmlFile</fromnode> <fromport>filepath</fromport>
+      <tonode>PyLoadPMML0</tonode> <toport>filename</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>ForEachLoop_int6.Bloc8.PyValidate</fromnode> <fromport>ret</fromport>
+      <tonode>PyGetRes</tonode> <toport>retVec</toport>
+   </datalink>
+   <parameter>
+      <tonode>PyLoadPMML0</tonode><toport>filename</toport>
+      <value><string>pmml_tann_exportFunctionPMML.pmml</string></value>
+   </parameter>
+   <parameter>
+      <tonode>PyLoadPMML0</tonode><toport>modelname</toport>
+      <value><string>ANNModel</string></value>
+   </parameter>
+   <parameter>
+      <tonode>PyLoadPMML0</tonode><toport>pmmltype</toport>
+      <value><string>kANN</string></value>
+   </parameter>
+   <parameter>
+      <tonode>ForEachLoop_int6</tonode><toport>nbBranches</toport>
+      <value><int>1</int></value>
+   </parameter>
+   <parameter>
+      <tonode>PyGetPmmlFile</tonode><toport>filename</toport>
+      <value><string>pmml_tann_exportFunctionPMML.pmml</string></value>
+   </parameter>
+   <presentation name="ForEachLoop_int6.Bloc8.PyFuncExec" x="4.01977" y="140" width="158" height="117" expanded="1" expx="4.01977" expy="140" expWidth="158" expHeight="117" shownState="0"/>
+   <presentation name="ForEachLoop_int6.Bloc8.PyValidate" x="302.776" y="119.404" width="158" height="144" expanded="1" expx="302.776" expy="119.404" expWidth="158" expHeight="144" shownState="0"/>
+   <presentation name="ForEachLoop_int6.Bloc8" x="6" y="86" width="464.776" height="267.404" expanded="1" expx="6" expy="86" expWidth="464.776" expHeight="267.404" shownState="0"/>
+   <presentation name="PyBuildLoopIndex" x="354.895" y="275" width="158" height="90" expanded="1" expx="354.895" expy="275" expWidth="158" expHeight="90" shownState="0"/>
+   <presentation name="PyInitRefOutputs" x="568.105" y="122" width="158" height="63" expanded="1" expx="568.105" expy="122" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="PyInitInputs" x="264.5" y="91.5" width="158" height="63" expanded="1" expx="264.5" expy="91.5" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="PyLoadPMML0" x="25.1052" y="117.395" width="158" height="117" expanded="1" expx="25.1052" expy="117.395" expWidth="158" expHeight="117" shownState="0"/>
+   <presentation name="PyEpsilon" x="28.5" y="274.5" width="158" height="63" expanded="1" expx="28.5" expy="274.5" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="ForEachLoop_int6" x="7" y="414.5" width="474.776" height="357.404" expanded="1" expx="7" expy="414.5" expWidth="474.776" expHeight="357.404" shownState="0"/>
+   <presentation name="PyGetPmmlFile" x="59" y="42" width="158" height="63" expanded="1" expx="59" expy="42" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="PyGetRes" x="630.384" y="691.128" width="158" height="63" expanded="1" expx="630.384" expy="691.128" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="__ROOT__" x="0" y="0" width="792.384" height="775.904" expanded="1" expx="0" expy="0" expWidth="792.384" expHeight="775.904" shownState="0"/>
+</proc>
diff --git a/src/yacsloader/samples/schemaANNLR2.xml b/src/yacsloader/samples/schemaANNLR2.xml
new file mode 100644 (file)
index 0000000..1fdcf33
--- /dev/null
@@ -0,0 +1,569 @@
+<?xml version='1.0' encoding='iso-8859-1' ?>
+<proc name="newSchema_1">
+   <property name="DefaultStudyID" value="1"/>
+   <type name="string" kind="string"/>
+   <struct name="Engines/dataref">
+      <member name="ref" type="string"/>
+   </struct>
+   <type name="bool" kind="bool"/>
+   <sequence name="boolvec" content="bool"/>
+   <type name="double" kind="double"/>
+   <sequence name="dblevec" content="double"/>
+   <objref name="file" id="file"/>
+   <type name="int" kind="int"/>
+   <sequence name="intvec" content="int"/>
+   <struct name="stringpair">
+      <member name="name" type="string"/>
+      <member name="value" type="string"/>
+   </struct>
+   <sequence name="propvec" content="stringpair"/>
+   <objref name="pyobj" id="python:obj:1.0"/>
+   <sequence name="seqboolvec" content="boolvec"/>
+   <sequence name="seqdblevec" content="dblevec"/>
+   <sequence name="seqint" content="int"/>
+   <sequence name="seqintvec" content="intvec"/>
+   <sequence name="stringvec" content="string"/>
+   <sequence name="seqstringvec" content="stringvec"/>
+   <container name="DefaultContainer">
+      <property name="container_name" value="FactoryServer"/>
+      <property name="name" value="localhost"/>
+   </container>
+   <foreach name="ForEachLoop_int6" nbranch="1" type="int">
+      <bloc name="Bloc8">
+         <inline name="PyFuncExec">
+            <script><code><![CDATA[
+# on reconstruit une liste de double
+doubleVecIn = [ float(v) for v in inputs[i] ] ;
+
+doubleVecOutLR = pyFuncLR( doubleVecIn );
+doubleVecOutANN = pyFuncANN( doubleVecIn );
+
+outputLR = doubleVecOutLR[0];
+outputANN = doubleVecOutANN[0];
+
+
+]]></code></script>
+            <inport name="i" type="int"/>
+            <inport name="inputs" type="pyobj"/>
+            <inport name="pyFuncANN" type="pyobj"/>
+            <inport name="pyFuncLR" type="pyobj"/>
+            <outport name="outputLR" type="double"/>
+            <outport name="outputANN" type="double"/>
+         </inline>
+         <inline name="PyValidate">
+            <script><code><![CDATA[#-----------------------------#
+#       Validate              #
+#-----------------------------#
+
+ret = True;
+refOutputLR = float( refOutputsLR[i] );
+refOutputANN = float( refOutputsANN[i] );
+
+errLR = abs( refOutputLR - outputLR ) ; 
+errANN = abs( refOutputANN - outputANN ) ; 
+
+msg   = " LR : ref = %f , out = %f, err = %f \n"%(refOutputLR, outputLR,errLR);
+msg += "ANN : ref = %f , out = %f, err = %f \n"%(refOutputANN, outputANN, errANN);
+if errLR > epsilon or errANN > epsilon: 
+    msg   += "KO valid with eps %f at pattern #%i: \n"%(epsilon, i) ;
+    ret = False;
+    print msg;
+    pass
+
+print "+++ OK " , i , " +++", ret
+]]></code></script>
+            <inport name="i" type="int"/>
+            <inport name="refOutputsLR" type="pyobj"/>
+            <inport name="refOutputsANN" type="pyobj"/>
+            <inport name="epsilon" type="double"/>
+            <inport name="outputLR" type="double"/>
+            <inport name="outputANN" type="double"/>
+            <outport name="ret" type="bool"/>
+         </inline>
+         <control> <fromnode>PyFuncExec</fromnode> <tonode>PyValidate</tonode> </control>
+         <datalink control="false">
+            <fromnode>PyFuncExec</fromnode> <fromport>outputLR</fromport>
+            <tonode>PyValidate</tonode> <toport>outputLR</toport>
+         </datalink>
+         <datalink control="false">
+            <fromnode>PyFuncExec</fromnode> <fromport>outputANN</fromport>
+            <tonode>PyValidate</tonode> <toport>outputANN</toport>
+         </datalink>
+      </bloc>
+   </foreach>
+   <inline name="PyLoadPMML_LR">
+      <script><code><![CDATA[ 
+import sys;
+from PMML import *;
+pmmlObj = PMMLlib( filename );   
+pmmlObj.SetCurrentModel( modelname , eval(pmmltype) );
+myFunc = "pyFunc";
+myHeader = "Function processed in YACCS";
+myCode = pmmlObj.ExportPyStr(myFunc, myHeader);
+exec myCode;
+]]></code></script>
+      <inport name="filename" type="string"/>
+      <inport name="modelname" type="string"/>
+      <inport name="pmmltype" type="string"/>
+      <outport name="pyFunc" type="pyobj"/>
+   </inline>
+   <inline name="PyLoadPMML_ANN">
+      <script><code><![CDATA[ 
+import sys;
+from PMML import *;
+pmmlObj = PMMLlib( filename );   
+pmmlObj.SetCurrentModel( modelname , eval(pmmltype) );
+myFunc = "pyFunc";
+myHeader = "Function processed in YACCS";
+myCode = pmmlObj.ExportPyStr(myFunc, myHeader);
+exec myCode;
+]]></code></script>
+      <inport name="filename" type="string"/>
+      <inport name="modelname" type="string"/>
+      <inport name="pmmltype" type="string"/>
+      <outport name="pyFunc" type="pyobj"/>
+   </inline>
+   <inline name="PyInitInputs0">
+      <script><code><![CDATA[#-----------------------------#
+#             Inputs              #
+#-----------------------------#
+
+inputs = [ 
+[1.456610531e-01, 4.817264758e+04, 7.398381011e+04, 1.035837913e+02, 9.968027314e+02, 7.662662390e+02, 1.606060725e+03, 1.147627487e+04], 
+[7.767860409e-02, 1.193098539e+04, 7.292686667e+04, 9.785118163e+01, 1.051369254e+03, 7.027872080e+02, 1.447244752e+03, 1.028475889e+04], 
+[6.750842479e-02, 2.882617130e+04, 9.817007636e+04, 8.488524614e+01, 1.090567120e+03, 8.122979233e+02, 1.649818202e+03, 1.149754134e+04], 
+[8.860282350e-02, 3.498171453e+03, 8.530760503e+04, 9.046022050e+01, 1.095160960e+03, 7.874820198e+02, 1.367563528e+03, 1.084392094e+04], 
+[1.365903117e-01, 2.825638843e+04, 8.889946935e+04, 1.113147418e+02, 1.019749972e+03, 7.134926632e+02, 1.507458958e+03, 1.081045588e+04], 
+[7.127271995e-02, 1.753261607e+04, 9.534506291e+04, 9.153808181e+01, 1.061650244e+03, 7.309769019e+02, 1.657218091e+03, 1.072962363e+04], 
+[1.265602027e-01, 4.219433169e+04, 9.456040368e+04, 1.056736168e+02, 1.099933893e+03, 7.471638924e+02, 1.273665410e+03, 1.138336512e+04], 
+[8.667764079e-02, 4.120117756e+04, 1.133877245e+05, 7.767636060e+01, 1.016590900e+03, 7.080602432e+02, 1.660892376e+03, 1.092560938e+04], 
+[9.976394549e-02, 6.420167443e+03, 9.830132327e+04, 8.501884776e+01, 1.097122572e+03, 7.253532648e+02, 1.307720423e+03, 9.866836214e+03], 
+[1.146758223e-01, 8.710374173e+03, 8.523215073e+04, 9.414901527e+01, 1.095537742e+03, 7.060630963e+02, 1.275130095e+03, 1.130554594e+04], 
+[7.880624877e-02, 4.311161871e+04, 7.377943028e+04, 1.061320296e+02, 1.047244176e+03, 7.477760505e+02, 1.415636178e+03, 1.040593206e+04], 
+[1.058084156e-01, 4.582563552e+04, 9.222816385e+04, 1.038847032e+02, 9.955665779e+02, 7.432779758e+02, 1.504745288e+03, 1.146224012e+04], 
+[1.253087794e-01, 4.847286153e+04, 7.518998946e+04, 1.040979939e+02, 1.105768478e+03, 7.577355747e+02, 1.201630771e+03, 1.088149400e+04], 
+[1.175039884e-01, 2.565240792e+04, 7.230018052e+04, 8.257012789e+01, 1.032484559e+03, 7.694279542e+02, 1.566647304e+03, 1.153822324e+04], 
+[6.124755615e-02, 4.507578924e+04, 6.478138490e+04, 9.090792861e+01, 1.062255915e+03, 7.497957649e+02, 1.184192526e+03, 1.170539896e+04], 
+[5.544131559e-02, 3.390925881e+04, 7.328456973e+04, 1.090337509e+02, 1.048630443e+03, 7.628191574e+02, 1.185768164e+03, 1.028085684e+04], 
+[1.183272497e-01, 4.639774146e+04, 6.984716489e+04, 7.483809181e+01, 1.071724305e+03, 8.029602783e+02, 1.301826477e+03, 1.069809527e+04], 
+[1.178550031e-01, 1.065266711e+04, 7.473813352e+04, 7.063285879e+01, 1.017164661e+03, 7.183678405e+02, 1.252220675e+03, 1.085888136e+04], 
+[1.116310036e-01, 7.057176796e+02, 8.914959172e+04, 1.143736719e+02, 1.024291508e+03, 7.893954959e+02, 1.375784904e+03, 1.189588654e+04], 
+[1.056449157e-01, 3.725247649e+04, 1.126706761e+05, 8.220356853e+01, 1.000815794e+03, 7.342054423e+02, 1.331007515e+03, 1.057815755e+04], 
+[5.585244596e-02, 1.080328986e+04, 7.647445600e+04, 1.127840680e+02, 1.101335277e+03, 7.090687232e+02, 1.571310094e+03, 1.179921032e+04], 
+[9.318997589e-02, 3.694986496e+04, 1.142850986e+05, 9.885621621e+01, 1.047818074e+03, 7.462410467e+02, 1.550908728e+03, 1.024738180e+04], 
+[9.769526026e-02, 4.898586800e+04, 8.454146334e+04, 7.390916471e+01, 1.069034353e+03, 7.761299060e+02, 1.366617089e+03, 1.045533000e+04], 
+[1.448874974e-01, 2.618871518e+04, 1.006705237e+05, 6.761931276e+01, 1.084724402e+03, 7.056825472e+02, 1.467825112e+03, 1.063120366e+04], 
+[5.748240145e-02, 4.265983570e+04, 6.922054248e+04, 7.153213366e+01, 1.029573412e+03, 7.140769415e+02, 1.638688665e+03, 1.152371724e+04], 
+[1.004554848e-01, 1.849359821e+04, 1.073035370e+05, 9.843990445e+01, 1.061773839e+03, 7.703136119e+02, 1.142717255e+03, 1.133548780e+04], 
+[6.904713159e-02, 3.114664091e+04, 1.154062790e+05, 8.699726139e+01, 9.951594968e+02, 8.013040888e+02, 1.645133282e+03, 1.101513225e+04], 
+[7.535831709e-02, 2.084271662e+04, 8.796612167e+04, 8.755547732e+01, 1.091487642e+03, 7.501847659e+02, 1.272233814e+03, 1.153502741e+04], 
+[1.316642693e-01, 4.074199552e+04, 9.791860127e+04, 1.096474308e+02, 1.103690417e+03, 7.597649884e+02, 1.149359431e+03, 1.121703132e+04], 
+[1.256804603e-01, 2.171487442e+04, 1.047171996e+05, 8.588439966e+01, 1.102320221e+03, 7.708693798e+02, 1.329214491e+03, 1.059463337e+04], 
+[5.439524620e-02, 2.259584733e+04, 8.079125672e+04, 6.476205727e+01, 1.106152417e+03, 8.113191944e+02, 1.255437236e+03, 1.201333911e+04], 
+[8.750209459e-02, 1.410249021e+04, 1.119751321e+05, 1.073490872e+02, 1.063143667e+03, 7.871042297e+02, 1.329983208e+03, 1.195606571e+04], 
+[8.699797238e-02, 4.021709531e+04, 1.117867687e+05, 9.589155856e+01, 1.073694293e+03, 7.994999584e+02, 1.627265626e+03, 1.136706401e+04], 
+[6.892920064e-02, 2.032907492e+03, 1.010396848e+05, 6.783802062e+01, 1.086461820e+03, 7.374214870e+02, 1.337611800e+03, 1.085112805e+04], 
+[1.110159183e-01, 4.417290645e+04, 7.073649093e+04, 7.518425871e+01, 1.107020884e+03, 7.795442605e+02, 1.148611472e+03, 9.964154503e+03], 
+[5.864651909e-02, 4.709372341e+04, 1.058816566e+05, 6.673370711e+01, 1.046313765e+03, 7.910806290e+02, 1.399673582e+03, 1.115277414e+04], 
+[1.024623536e-01, 3.248763678e+04, 1.123176511e+05, 1.107772688e+02, 1.012914390e+03, 7.571928585e+02, 1.189214491e+03, 1.183043699e+04], 
+[1.268376736e-01, 2.048412849e+04, 1.085318941e+05, 6.459015746e+01, 1.109786159e+03, 8.139508806e+02, 1.359762233e+03, 1.157111067e+04], 
+[5.300816813e-02, 2.943127727e+04, 1.146785278e+05, 8.394814161e+01, 1.043452404e+03, 7.603354413e+02, 1.658862011e+03, 1.163288578e+04], 
+[7.028017434e-02, 4.192750166e+04, 9.553886080e+04, 6.383743056e+01, 1.079645033e+03, 7.723588658e+02, 1.321409167e+03, 1.058567246e+04], 
+[5.991478449e-02, 4.541579388e+04, 6.812180949e+04, 9.246414937e+01, 1.019926004e+03, 8.069446852e+02, 1.239566623e+03, 1.120099431e+04], 
+[1.067481756e-01, 1.157791820e+04, 8.601870382e+04, 9.535250994e+01, 1.058079739e+03, 8.029092666e+02, 1.675663505e+03, 9.857368989e+03], 
+[1.372760225e-01, 2.899852970e+04, 8.623842631e+04, 1.131718316e+02, 1.024500700e+03, 7.638957159e+02, 1.530839326e+03, 1.082014841e+04], 
+[5.646039560e-02, 2.018348280e+04, 1.080224373e+05, 1.152372480e+02, 1.093895079e+03, 7.378488842e+02, 1.286477483e+03, 1.185925929e+04], 
+[1.112123774e-01, 2.393360775e+04, 9.292106786e+04, 7.719527530e+01, 1.067671951e+03, 7.343684587e+02, 1.483330544e+03, 1.201709952e+04], 
+[1.307541334e-01, 1.560575418e+04, 1.072113632e+05, 1.121212726e+02, 1.037577460e+03, 8.001239033e+02, 1.526339128e+03, 1.134591351e+04], 
+[6.497204223e-02, 1.402014180e+04, 6.745969376e+04, 7.862832618e+01, 1.014652310e+03, 7.785303790e+02, 1.410865140e+03, 1.050284643e+04], 
+[7.585702665e-02, 4.060724171e+04, 7.891898759e+04, 8.983721307e+01, 1.027113392e+03, 7.415777465e+02, 1.564676410e+03, 1.021290221e+04], 
+[1.236232545e-01, 3.896089552e+04, 9.928329730e+04, 7.287234301e+01, 9.987764845e+02, 8.183318132e+02, 1.174504796e+03, 1.008298210e+04], 
+[1.472884758e-01, 1.496664561e+04, 9.577485455e+04, 8.233690370e+01, 1.081566913e+03, 7.885231394e+02, 1.401539659e+03, 1.177286288e+04], 
+[1.446232028e-01, 4.176932757e+04, 6.512933971e+04, 9.704737342e+01, 1.013731965e+03, 7.047846719e+02, 1.603844751e+03, 1.068331103e+04], 
+[1.380008828e-01, 3.305905514e+04, 9.999652423e+04, 6.636073041e+01, 1.064322897e+03, 7.729923602e+02, 1.227516863e+03, 1.171354749e+04], 
+[6.344159464e-02, 4.780648795e+04, 9.365102770e+04, 8.802953531e+01, 1.029707604e+03, 7.933230876e+02, 1.374652921e+03, 1.008866356e+04], 
+[5.258122387e-02, 3.702566309e+03, 7.777773463e+04, 7.570896193e+01, 1.052339637e+03, 7.741762325e+02, 1.641636623e+03, 1.121956718e+04], 
+[1.308250614e-01, 4.040441149e+04, 8.911452953e+04, 8.104256772e+01, 1.054111352e+03, 7.773815616e+02, 1.334252555e+03, 1.104079585e+04], 
+[1.420385163e-01, 4.604351716e+04, 8.593646152e+04, 8.159247797e+01, 1.060594140e+03, 8.058854605e+02, 1.637341312e+03, 1.120759720e+04], 
+[1.358384353e-01, 3.498709280e+04, 8.406584308e+04, 8.165787651e+01, 1.022762168e+03, 8.120310170e+02, 1.474777461e+03, 1.003994300e+04], 
+[1.423898316e-01, 4.687374179e+04, 8.660185111e+04, 7.776608109e+01, 9.927602408e+02, 7.641272454e+02, 1.455954292e+03, 1.178518632e+04], 
+[1.321018940e-01, 2.866815831e+03, 9.113096058e+04, 1.007088501e+02, 1.086874972e+03, 7.587825443e+02, 1.610678155e+03, 1.146851885e+04], 
+[1.351663932e-01, 3.196774887e+04, 1.021987705e+05, 7.197885925e+01, 1.021175944e+03, 7.331080072e+02, 1.162493217e+03, 1.109738563e+04], 
+[8.648890684e-02, 4.492471938e+04, 6.438582057e+04, 1.110566727e+02, 1.090853465e+03, 7.597253981e+02, 1.467197961e+03, 1.195185409e+04], 
+[1.104557208e-01, 4.850355694e+04, 1.018022746e+05, 1.149248442e+02, 1.013058279e+03, 7.367222887e+02, 1.358941413e+03, 1.134149282e+04], 
+[1.091740527e-01, 4.724040634e+04, 1.023148364e+05, 1.026078719e+02, 1.016770836e+03, 7.167576085e+02, 1.388776487e+03, 1.018437131e+04], 
+[1.386282688e-01, 1.824670236e+04, 7.934492890e+04, 7.145704182e+01, 1.031398805e+03, 7.882243654e+02, 1.511935264e+03, 1.073294641e+04], 
+[1.072148102e-01, 2.606334586e+04, 1.045009711e+05, 1.153347286e+02, 1.013401269e+03, 7.542190230e+02, 1.392997551e+03, 1.072210903e+04], 
+[1.377281664e-01, 3.939831181e+04, 7.710345011e+04, 6.326330520e+01, 1.038617320e+03, 7.401934748e+02, 1.527993368e+03, 1.129318647e+04], 
+[1.337643862e-01, 4.712556894e+04, 9.486490032e+04, 8.660231295e+01, 1.082875874e+03, 7.991662034e+02, 1.344812635e+03, 1.034653122e+04], 
+[1.450803097e-01, 3.690122313e+04, 7.076429187e+04, 7.923525262e+01, 1.021698784e+03, 7.821386527e+02, 1.240302421e+03, 1.092916457e+04], 
+[9.160223345e-02, 3.477878623e+04, 7.860829708e+04, 7.052989639e+01, 1.045971334e+03, 7.371632922e+02, 1.377972291e+03, 1.202636052e+04], 
+[1.369383921e-01, 7.129085246e+03, 1.003532237e+05, 6.706267812e+01, 9.920945476e+02, 7.700039641e+02, 1.270854730e+03, 1.093421128e+04], 
+[9.150440403e-02, 3.191576156e+04, 1.043970415e+05, 1.121933263e+02, 9.961948536e+02, 7.035905746e+02, 1.503665860e+03, 1.175814238e+04], 
+[1.391914465e-01, 2.679929889e+04, 9.315698192e+04, 1.086281131e+02, 1.108793392e+03, 8.128614591e+02, 1.491457967e+03, 1.029088956e+04], 
+[5.336087538e-02, 1.787440457e+04, 8.274402814e+04, 9.735553791e+01, 1.052490734e+03, 7.853725287e+02, 1.505494679e+03, 1.199289056e+04], 
+[5.136318222e-02, 2.313327941e+04, 8.127627613e+04, 6.730045023e+01, 1.040984645e+03, 7.672337162e+02, 1.340467605e+03, 9.996178363e+03], 
+[5.202323461e-02, 1.418186294e+03, 9.097156505e+04, 6.493013875e+01, 1.030920220e+03, 7.896488622e+02, 1.361926266e+03, 1.152603230e+04], 
+[1.120003670e-01, 4.822621219e+04, 1.096378917e+05, 1.066666519e+02, 1.053178110e+03, 7.431009273e+02, 1.651552956e+03, 1.102461978e+04], 
+[8.156025845e-02, 3.294115069e+04, 7.846891086e+04, 1.125022952e+02, 1.075934524e+03, 8.170942340e+02, 1.251695262e+03, 1.007675874e+04], 
+[6.320769249e-02, 2.369685837e+04, 1.124336882e+05, 8.689656009e+01, 1.035765280e+03, 7.364800974e+02, 1.354682602e+03, 1.166796177e+04], 
+[6.503346261e-02, 1.730539268e+03, 8.554891831e+04, 1.052469487e+02, 1.109487127e+03, 7.156856899e+02, 1.568668470e+03, 1.144257670e+04], 
+[1.060320179e-01, 2.193967854e+04, 9.283579078e+04, 7.307445266e+01, 9.997547759e+02, 8.019001159e+02, 1.425747028e+03, 1.140852632e+04], 
+] 
+
+
+]]></code></script>
+      <outport name="inputs" type="pyobj"/>
+   </inline>
+   <inline name="PyInitRefOutputs_ANN">
+      <script><code><![CDATA[#-----------------------------#
+# Outputs de reference  #
+#-----------------------------#
+
+refOutputs_ANN = [ 
+1.168089449e+02, 
+4.781593273e+01, 
+2.496977483e+01, 
+5.842099314e+01, 
+1.305961979e+02, 
+3.233802619e+01, 
+1.546164483e+02, 
+4.336998926e+01, 
+9.087786254e+01, 
+1.434798212e+02, 
+4.099991316e+01, 
+6.431449369e+01, 
+1.501872417e+02, 
+8.053221147e+01, 
+4.197310874e+01, 
+2.671091281e+01, 
+9.454134503e+01, 
+1.102350186e+02, 
+7.907556494e+01, 
+7.172391411e+01, 
+3.754366431e+01, 
+5.038705917e+01, 
+6.375031502e+01, 
+1.771890832e+02, 
+2.343676246e+01, 
+9.027995034e+01, 
+1.569189976e+01, 
+5.982813776e+01, 
+1.705315538e+02, 
+1.303445228e+02, 
+3.113554928e+01, 
+5.939844286e+01, 
+4.063679863e+01, 
+4.624494051e+01, 
+1.070568093e+02, 
+2.191301917e+01, 
+8.354237959e+01, 
+1.253189335e+02, 
+1.767668338e+01, 
+3.842513272e+01, 
+2.175455106e+01, 
+4.647435880e+01, 
+1.120795974e+02, 
+4.340174985e+01, 
+1.045134248e+02, 
+9.720940205e+01, 
+2.096661911e+01, 
+2.966589787e+01, 
+7.830846633e+01, 
+1.651341366e+02, 
+1.374000378e+02, 
+1.592408357e+02, 
+2.009023783e+01, 
+1.597628494e+01, 
+1.227331903e+02, 
+1.130605736e+02, 
+8.720282158e+01, 
+1.245096409e+02, 
+1.277797482e+02, 
+1.515343460e+02, 
+6.430457392e+01, 
+8.767183986e+01, 
+8.096423589e+01, 
+1.069264773e+02, 
+6.979279556e+01, 
+1.308867480e+02, 
+1.234734350e+02, 
+1.428528024e+02, 
+7.048251785e+01, 
+1.174200781e+02, 
+5.850344216e+01, 
+1.269707613e+02, 
+2.034820017e+01, 
+1.785455851e+01, 
+1.853062331e+01, 
+7.838090949e+01, 
+4.078240457e+01, 
+3.516311210e+01, 
+4.512340938e+01, 
+5.357101580e+01 ] 
+]]></code></script>
+      <outport name="refOutputs_ANN" type="pyobj"/>
+   </inline>
+   <inline name="PyInitRefOutputs_LR">
+      <script><code><![CDATA[#-----------------------------#
+# Outputs de reference  #
+#-----------------------------#
+
+refOutputs_LR = [ 
+1.199597225e+02, 
+5.286440730e+01, 
+1.946749529e+01, 
+6.734171678e+01, 
+1.265538870e+02, 
+3.126495039e+01, 
+1.423813327e+02, 
+4.872963193e+01, 
+9.497332404e+01, 
+1.341211849e+02, 
+4.464289526e+01, 
+7.437583290e+01, 
+1.393299719e+02, 
+9.123466759e+01, 
+4.659905036e+01, 
+1.972448931e+01, 
+1.020662831e+02, 
+1.127851501e+02, 
+8.909165532e+01, 
+8.046407212e+01, 
+3.861336913e+01, 
+5.693876214e+01, 
+7.329772086e+01, 
+1.583000177e+02, 
+1.510501114e+01, 
+9.637844578e+01, 
+-4.096191309e+00, 
+6.798388710e+01, 
+1.529755696e+02, 
+1.263372809e+02, 
+3.137004476e+01, 
+6.862410531e+01, 
+4.620214686e+01, 
+5.170112470e+01, 
+1.094077720e+02, 
+1.194064903e+01, 
+9.119449042e+01, 
+1.253286559e+02, 
+4.007263377e-01, 
+4.169281315e+01, 
+1.192188109e+01, 
+5.432408674e+01, 
+1.147070671e+02, 
+4.662373285e+01, 
+1.090036057e+02, 
+1.042114148e+02, 
+9.820112838e+00, 
+2.698818217e+01, 
+8.880536866e+01, 
+1.527146568e+02, 
+1.322338373e+02, 
+1.482507911e+02, 
+6.458012341e+00, 
+-3.840634931e+00, 
+1.227435169e+02, 
+1.175914158e+02, 
+9.688870975e+01, 
+1.258285676e+02, 
+1.258427717e+02, 
+1.416880775e+02, 
+7.357448465e+01, 
+9.432611383e+01, 
+8.753893164e+01, 
+1.127819173e+02, 
+7.842914575e+01, 
+1.292992326e+02, 
+1.226379987e+02, 
+1.376409297e+02, 
+8.068110325e+01, 
+1.198912866e+02, 
+6.675030235e+01, 
+1.249090790e+02, 
+8.233983443e+00, 
+9.857868847e-02, 
+3.996566681e+00, 
+8.655301005e+01, 
+4.485232922e+01, 
+3.618350443e+01, 
+4.942136971e+01, 
+6.499941347e+01 ] 
+
+]]></code></script>
+      <outport name="refOutputs_LR" type="pyobj"/>
+   </inline>
+   <inline name="PyEpsilon">
+      <script><code><![CDATA[epsilon = 1E-6;
+]]></code></script>
+      <outport name="epsilon" type="double"/>
+   </inline>
+   <inline name="PyBuildLoopIndex0">
+      <function name="make_indexes">
+         <code><![CDATA[def make_indexes(inVal, outVal_LR, outVal_ANN) :
+    print "In make_indexes" ;
+
+    if ( len(inVal) != len(outVal_LR) ) :
+        msg = "len(inVal) (%i) != len(outVal_LR) (%i). "%( len(inVal), len(outVal_LR) ) ;
+        raise ValueError(msg)       
+
+    if ( len(inVal) != len(outVal_ANN) ) :
+        msg = "len(inVal) (%i) != len(outVal_ANN) (%i). "%( len(inVal), len(outVal_ANN) ) ;
+        raise ValueError(msg)          
+
+    n = len( inVal ); 
+    indexes = [ i for i in range( n ) ]  ;
+    return indexes
+
+]]></code>
+      </function>
+      <inport name="inVal" type="pyobj"/>
+      <inport name="outVal_LR" type="pyobj"/>
+      <inport name="outVal_ANN" type="pyobj"/>
+      <outport name="indexes" type="intvec"/>
+   </inline>
+   <inline name="PyGetPmmlFile">
+      <script><code><![CDATA[#-----------------------------#
+#       Get PMML file         #
+#-----------------------------#
+import os;
+pmmlRootDir = os.getenv("YACS_ROOT_DIR");
+resourcesDir = os.path.join(pmmlRootDir,"share","salome","yacssamples");
+resourcesDir += os.sep ;
+filepath = resourcesDir + filename;
+]]></code></script>
+      <inport name="filename" type="string"/>
+      <outport name="filepath" type="string"/>
+   </inline>
+   <inline name="PyGetRes">
+      <script><code><![CDATA[print "retVec ";
+print retVec;
+res = "True";
+for ret in retVec:
+    if not ret:
+        res = "False";
+        pass
+    pass
+
+]]></code></script>
+      <inport name="retVec" type="boolvec"/>
+      <outport name="res" type="string"/>
+   </inline>
+   <control> <fromnode>ForEachLoop_int6</fromnode> <tonode>PyGetRes</tonode> </control>
+   <control> <fromnode>PyLoadPMML_LR</fromnode> <tonode>PyLoadPMML_ANN</tonode> </control>
+   <control> <fromnode>PyLoadPMML_ANN</fromnode> <tonode>PyInitInputs0</tonode> </control>
+   <control> <fromnode>PyInitInputs0</fromnode> <tonode>PyInitRefOutputs_ANN</tonode> </control>
+   <control> <fromnode>PyInitRefOutputs_ANN</fromnode> <tonode>PyInitRefOutputs_LR</tonode> </control>
+   <control> <fromnode>PyInitRefOutputs_LR</fromnode> <tonode>PyEpsilon</tonode> </control>
+   <control> <fromnode>PyEpsilon</fromnode> <tonode>PyBuildLoopIndex0</tonode> </control>
+   <control> <fromnode>PyBuildLoopIndex0</fromnode> <tonode>ForEachLoop_int6</tonode> </control>
+   <control> <fromnode>PyGetPmmlFile</fromnode> <tonode>PyLoadPMML_LR</tonode> </control>
+   <control> <fromnode>PyGetPmmlFile</fromnode> <tonode>PyLoadPMML_ANN</tonode> </control>
+   <datalink control="false">
+      <fromnode>ForEachLoop_int6</fromnode> <fromport>evalSamples</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyValidate</tonode> <toport>i</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>ForEachLoop_int6</fromnode> <fromport>evalSamples</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyFuncExec</tonode> <toport>i</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyLoadPMML_LR</fromnode> <fromport>pyFunc</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyFuncExec</tonode> <toport>pyFuncLR</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyLoadPMML_ANN</fromnode> <fromport>pyFunc</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyFuncExec</tonode> <toport>pyFuncANN</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyInitInputs0</fromnode> <fromport>inputs</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyFuncExec</tonode> <toport>inputs</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyInitInputs0</fromnode> <fromport>inputs</fromport>
+      <tonode>PyBuildLoopIndex0</tonode> <toport>inVal</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyInitRefOutputs_ANN</fromnode> <fromport>refOutputs_ANN</fromport>
+      <tonode>PyBuildLoopIndex0</tonode> <toport>outVal_ANN</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyInitRefOutputs_ANN</fromnode> <fromport>refOutputs_ANN</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyValidate</tonode> <toport>refOutputsANN</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyInitRefOutputs_LR</fromnode> <fromport>refOutputs_LR</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyValidate</tonode> <toport>refOutputsLR</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyInitRefOutputs_LR</fromnode> <fromport>refOutputs_LR</fromport>
+      <tonode>PyBuildLoopIndex0</tonode> <toport>outVal_LR</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyEpsilon</fromnode> <fromport>epsilon</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyValidate</tonode> <toport>epsilon</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyBuildLoopIndex0</fromnode> <fromport>indexes</fromport>
+      <tonode>ForEachLoop_int6</tonode> <toport>SmplsCollection</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyGetPmmlFile</fromnode> <fromport>filepath</fromport>
+      <tonode>PyLoadPMML_LR</tonode> <toport>filename</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyGetPmmlFile</fromnode> <fromport>filepath</fromport>
+      <tonode>PyLoadPMML_ANN</tonode> <toport>filename</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>ForEachLoop_int6.Bloc8.PyValidate</fromnode> <fromport>ret</fromport>
+      <tonode>PyGetRes</tonode> <toport>retVec</toport>
+   </datalink>
+   <parameter>
+      <tonode>PyGetPmmlFile</tonode><toport>filename</toport>
+      <value><string>pmml_tann_tlr_exportFunctionPMML.pmml</string></value>
+   </parameter>
+   <parameter>
+      <tonode>ForEachLoop_int6</tonode><toport>nbBranches</toport>
+      <value><int>1</int></value>
+   </parameter>
+   <parameter>
+      <tonode>PyLoadPMML_LR</tonode><toport>filename</toport>
+      <value><string>pmml_tann_tlr_exportFunctionPMML.pmml</string></value>
+   </parameter>
+   <parameter>
+      <tonode>PyLoadPMML_LR</tonode><toport>modelname</toport>
+      <value><string>LRModel</string></value>
+   </parameter>
+   <parameter>
+      <tonode>PyLoadPMML_LR</tonode><toport>pmmltype</toport>
+      <value><string>kLR</string></value>
+   </parameter>
+   <parameter>
+      <tonode>PyLoadPMML_ANN</tonode><toport>filename</toport>
+      <value><string>pmml_tann_tlr_exportFunctionPMML.pmml</string></value>
+   </parameter>
+   <parameter>
+      <tonode>PyLoadPMML_ANN</tonode><toport>modelname</toport>
+      <value><string>ANNModel</string></value>
+   </parameter>
+   <parameter>
+      <tonode>PyLoadPMML_ANN</tonode><toport>pmmltype</toport>
+      <value><string>kANN</string></value>
+   </parameter>
+   <presentation name="ForEachLoop_int6.Bloc8.PyValidate" x="250.158" y="108.92" width="158" height="198" expanded="1" expx="250.158" expy="108.92" expWidth="158" expHeight="198" shownState="0"/>
+   <presentation name="ForEachLoop_int6.Bloc8.PyFuncExec" x="30" y="109.105" width="158" height="144" expanded="1" expx="30" expy="109.105" expWidth="158" expHeight="144" shownState="0"/>
+   <presentation name="ForEachLoop_int6.Bloc8" x="4.7786" y="86" width="412.158" height="310.92" expanded="1" expx="4.7786" expy="86" expWidth="412.158" expHeight="310.92" shownState="0"/>
+   <presentation name="PyInitRefOutputs_LR" x="53.0257" y="338.181" width="158" height="63" expanded="1" expx="53.0257" expy="338.181" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="PyEpsilon" x="39.4207" y="446.791" width="158" height="63" expanded="1" expx="39.4207" expy="446.791" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="PyInitRefOutputs_ANN" x="289.99" y="261.595" width="158" height="63" expanded="1" expx="289.99" expy="261.595" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="PyGetPmmlFile" x="44.1955" y="32" width="158" height="63" expanded="1" expx="44.1955" expy="32" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="ForEachLoop_int6" x="160" y="671" width="420.937" height="400.92" expanded="1" expx="160" expy="671" expWidth="420.937" expHeight="400.92" shownState="0"/>
+   <presentation name="PyLoadPMML_LR" x="74.1757" y="105.151" width="158" height="117" expanded="1" expx="74.1757" expy="105.151" expWidth="158" expHeight="117" shownState="0"/>
+   <presentation name="PyLoadPMML_ANN" x="300.952" y="83.3513" width="158" height="117" expanded="1" expx="300.952" expy="83.3513" expWidth="158" expHeight="117" shownState="0"/>
+   <presentation name="PyInitInputs0" x="38.6513" y="245.599" width="158" height="63" expanded="1" expx="38.6513" expy="245.599" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="PyBuildLoopIndex0" x="539.827" y="420.002" width="158" height="117" expanded="1" expx="539.827" expy="420.002" expWidth="158" expHeight="117" shownState="0"/>
+   <presentation name="PyGetRes" x="635.783" y="930.053" width="158" height="63" expanded="1" expx="635.783" expy="930.053" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="__ROOT__" x="0" y="0" width="797.783" height="1075.92" expanded="1" expx="0" expy="0" expWidth="797.783" expHeight="1075.92" shownState="0"/>
+</proc>
diff --git a/src/yacsloader/samples/schemaLR2.xml b/src/yacsloader/samples/schemaLR2.xml
new file mode 100644 (file)
index 0000000..bf78b60
--- /dev/null
@@ -0,0 +1,414 @@
+<?xml version='1.0' encoding='iso-8859-1' ?>
+<proc name="newSchema_1">
+   <property name="DefaultStudyID" value="1"/>
+   <type name="string" kind="string"/>
+   <struct name="Engines/dataref">
+      <member name="ref" type="string"/>
+   </struct>
+   <type name="bool" kind="bool"/>
+   <sequence name="boolvec" content="bool"/>
+   <type name="double" kind="double"/>
+   <sequence name="dblevec" content="double"/>
+   <objref name="file" id="file"/>
+   <type name="int" kind="int"/>
+   <sequence name="intvec" content="int"/>
+   <struct name="stringpair">
+      <member name="name" type="string"/>
+      <member name="value" type="string"/>
+   </struct>
+   <sequence name="propvec" content="stringpair"/>
+   <objref name="pyobj" id="python:obj:1.0"/>
+   <sequence name="seqboolvec" content="boolvec"/>
+   <sequence name="seqdblevec" content="dblevec"/>
+   <sequence name="seqint" content="int"/>
+   <sequence name="seqintvec" content="intvec"/>
+   <sequence name="stringvec" content="string"/>
+   <sequence name="seqstringvec" content="stringvec"/>
+   <container name="DefaultContainer">
+      <property name="container_name" value="FactoryServer"/>
+      <property name="name" value="localhost"/>
+   </container>
+   <foreach name="ForEachLoop_int6" nbranch="1" type="int">
+      <bloc name="Bloc8">
+         <inline name="PyFuncExec">
+            <script><code><![CDATA[
+# on reconstruit une liste de double
+doubleVecIn = [ float(v) for v in inputs[i] ] ;
+
+doubleVecOut = pyFunc( doubleVecIn );
+
+output = doubleVecOut[0];
+]]></code></script>
+            <inport name="i" type="int"/>
+            <inport name="pyFunc" type="pyobj"/>
+            <inport name="inputs" type="pyobj"/>
+            <outport name="output" type="double"/>
+         </inline>
+         <inline name="PyValidate">
+            <script><code><![CDATA[#-----------------------------#
+#       Validate              #
+#-----------------------------#
+
+ret = True;
+
+refOutput = float( refOutputs[i] );
+
+err = abs( refOutput - output ) ; 
+
+if err > epsilon : 
+    msg   = "KO valid with eps %f at pattern #%i: \n"%(epsilon, i) ;
+    msg += "refOutput - output : %f - %f \n"%(refOutput, output);
+    msg += "Got abs relative diff  %f .\n"%err
+    ret = False;
+    print msg;
+    pass
+
+#print "+++ OK " , i , " +++", ret
+]]></code></script>
+            <inport name="i" type="int"/>
+            <inport name="refOutputs" type="pyobj"/>
+            <inport name="output" type="double"/>
+            <inport name="epsilon" type="double"/>
+            <outport name="ret" type="bool"/>
+         </inline>
+         <control> <fromnode>PyFuncExec</fromnode> <tonode>PyValidate</tonode> </control>
+         <datalink control="false">
+            <fromnode>PyFuncExec</fromnode> <fromport>output</fromport>
+            <tonode>PyValidate</tonode> <toport>output</toport>
+         </datalink>
+      </bloc>
+   </foreach>
+   <inline name="PyInitInputs">
+      <script><code><![CDATA[#-----------------------------#
+#             Inputs              #
+#-----------------------------#
+
+inputs = [ 
+[1.456610531e-01, 4.817264758e+04, 7.398381011e+04, 1.035837913e+02, 9.968027314e+02, 7.662662390e+02, 1.606060725e+03, 1.147627487e+04], 
+[7.767860409e-02, 1.193098539e+04, 7.292686667e+04, 9.785118163e+01, 1.051369254e+03, 7.027872080e+02, 1.447244752e+03, 1.028475889e+04], 
+[6.750842479e-02, 2.882617130e+04, 9.817007636e+04, 8.488524614e+01, 1.090567120e+03, 8.122979233e+02, 1.649818202e+03, 1.149754134e+04], 
+[8.860282350e-02, 3.498171453e+03, 8.530760503e+04, 9.046022050e+01, 1.095160960e+03, 7.874820198e+02, 1.367563528e+03, 1.084392094e+04], 
+[1.365903117e-01, 2.825638843e+04, 8.889946935e+04, 1.113147418e+02, 1.019749972e+03, 7.134926632e+02, 1.507458958e+03, 1.081045588e+04], 
+[7.127271995e-02, 1.753261607e+04, 9.534506291e+04, 9.153808181e+01, 1.061650244e+03, 7.309769019e+02, 1.657218091e+03, 1.072962363e+04], 
+[1.265602027e-01, 4.219433169e+04, 9.456040368e+04, 1.056736168e+02, 1.099933893e+03, 7.471638924e+02, 1.273665410e+03, 1.138336512e+04], 
+[8.667764079e-02, 4.120117756e+04, 1.133877245e+05, 7.767636060e+01, 1.016590900e+03, 7.080602432e+02, 1.660892376e+03, 1.092560938e+04], 
+[9.976394549e-02, 6.420167443e+03, 9.830132327e+04, 8.501884776e+01, 1.097122572e+03, 7.253532648e+02, 1.307720423e+03, 9.866836214e+03], 
+[1.146758223e-01, 8.710374173e+03, 8.523215073e+04, 9.414901527e+01, 1.095537742e+03, 7.060630963e+02, 1.275130095e+03, 1.130554594e+04], 
+[7.880624877e-02, 4.311161871e+04, 7.377943028e+04, 1.061320296e+02, 1.047244176e+03, 7.477760505e+02, 1.415636178e+03, 1.040593206e+04], 
+[1.058084156e-01, 4.582563552e+04, 9.222816385e+04, 1.038847032e+02, 9.955665779e+02, 7.432779758e+02, 1.504745288e+03, 1.146224012e+04], 
+[1.253087794e-01, 4.847286153e+04, 7.518998946e+04, 1.040979939e+02, 1.105768478e+03, 7.577355747e+02, 1.201630771e+03, 1.088149400e+04], 
+[1.175039884e-01, 2.565240792e+04, 7.230018052e+04, 8.257012789e+01, 1.032484559e+03, 7.694279542e+02, 1.566647304e+03, 1.153822324e+04], 
+[6.124755615e-02, 4.507578924e+04, 6.478138490e+04, 9.090792861e+01, 1.062255915e+03, 7.497957649e+02, 1.184192526e+03, 1.170539896e+04], 
+[5.544131559e-02, 3.390925881e+04, 7.328456973e+04, 1.090337509e+02, 1.048630443e+03, 7.628191574e+02, 1.185768164e+03, 1.028085684e+04], 
+[1.183272497e-01, 4.639774146e+04, 6.984716489e+04, 7.483809181e+01, 1.071724305e+03, 8.029602783e+02, 1.301826477e+03, 1.069809527e+04], 
+[1.178550031e-01, 1.065266711e+04, 7.473813352e+04, 7.063285879e+01, 1.017164661e+03, 7.183678405e+02, 1.252220675e+03, 1.085888136e+04], 
+[1.116310036e-01, 7.057176796e+02, 8.914959172e+04, 1.143736719e+02, 1.024291508e+03, 7.893954959e+02, 1.375784904e+03, 1.189588654e+04], 
+[1.056449157e-01, 3.725247649e+04, 1.126706761e+05, 8.220356853e+01, 1.000815794e+03, 7.342054423e+02, 1.331007515e+03, 1.057815755e+04], 
+[5.585244596e-02, 1.080328986e+04, 7.647445600e+04, 1.127840680e+02, 1.101335277e+03, 7.090687232e+02, 1.571310094e+03, 1.179921032e+04], 
+[9.318997589e-02, 3.694986496e+04, 1.142850986e+05, 9.885621621e+01, 1.047818074e+03, 7.462410467e+02, 1.550908728e+03, 1.024738180e+04], 
+[9.769526026e-02, 4.898586800e+04, 8.454146334e+04, 7.390916471e+01, 1.069034353e+03, 7.761299060e+02, 1.366617089e+03, 1.045533000e+04], 
+[1.448874974e-01, 2.618871518e+04, 1.006705237e+05, 6.761931276e+01, 1.084724402e+03, 7.056825472e+02, 1.467825112e+03, 1.063120366e+04], 
+[5.748240145e-02, 4.265983570e+04, 6.922054248e+04, 7.153213366e+01, 1.029573412e+03, 7.140769415e+02, 1.638688665e+03, 1.152371724e+04], 
+[1.004554848e-01, 1.849359821e+04, 1.073035370e+05, 9.843990445e+01, 1.061773839e+03, 7.703136119e+02, 1.142717255e+03, 1.133548780e+04], 
+[6.904713159e-02, 3.114664091e+04, 1.154062790e+05, 8.699726139e+01, 9.951594968e+02, 8.013040888e+02, 1.645133282e+03, 1.101513225e+04], 
+[7.535831709e-02, 2.084271662e+04, 8.796612167e+04, 8.755547732e+01, 1.091487642e+03, 7.501847659e+02, 1.272233814e+03, 1.153502741e+04], 
+[1.316642693e-01, 4.074199552e+04, 9.791860127e+04, 1.096474308e+02, 1.103690417e+03, 7.597649884e+02, 1.149359431e+03, 1.121703132e+04], 
+[1.256804603e-01, 2.171487442e+04, 1.047171996e+05, 8.588439966e+01, 1.102320221e+03, 7.708693798e+02, 1.329214491e+03, 1.059463337e+04], 
+[5.439524620e-02, 2.259584733e+04, 8.079125672e+04, 6.476205727e+01, 1.106152417e+03, 8.113191944e+02, 1.255437236e+03, 1.201333911e+04], 
+[8.750209459e-02, 1.410249021e+04, 1.119751321e+05, 1.073490872e+02, 1.063143667e+03, 7.871042297e+02, 1.329983208e+03, 1.195606571e+04], 
+[8.699797238e-02, 4.021709531e+04, 1.117867687e+05, 9.589155856e+01, 1.073694293e+03, 7.994999584e+02, 1.627265626e+03, 1.136706401e+04], 
+[6.892920064e-02, 2.032907492e+03, 1.010396848e+05, 6.783802062e+01, 1.086461820e+03, 7.374214870e+02, 1.337611800e+03, 1.085112805e+04], 
+[1.110159183e-01, 4.417290645e+04, 7.073649093e+04, 7.518425871e+01, 1.107020884e+03, 7.795442605e+02, 1.148611472e+03, 9.964154503e+03], 
+[5.864651909e-02, 4.709372341e+04, 1.058816566e+05, 6.673370711e+01, 1.046313765e+03, 7.910806290e+02, 1.399673582e+03, 1.115277414e+04], 
+[1.024623536e-01, 3.248763678e+04, 1.123176511e+05, 1.107772688e+02, 1.012914390e+03, 7.571928585e+02, 1.189214491e+03, 1.183043699e+04], 
+[1.268376736e-01, 2.048412849e+04, 1.085318941e+05, 6.459015746e+01, 1.109786159e+03, 8.139508806e+02, 1.359762233e+03, 1.157111067e+04], 
+[5.300816813e-02, 2.943127727e+04, 1.146785278e+05, 8.394814161e+01, 1.043452404e+03, 7.603354413e+02, 1.658862011e+03, 1.163288578e+04], 
+[7.028017434e-02, 4.192750166e+04, 9.553886080e+04, 6.383743056e+01, 1.079645033e+03, 7.723588658e+02, 1.321409167e+03, 1.058567246e+04], 
+[5.991478449e-02, 4.541579388e+04, 6.812180949e+04, 9.246414937e+01, 1.019926004e+03, 8.069446852e+02, 1.239566623e+03, 1.120099431e+04], 
+[1.067481756e-01, 1.157791820e+04, 8.601870382e+04, 9.535250994e+01, 1.058079739e+03, 8.029092666e+02, 1.675663505e+03, 9.857368989e+03], 
+[1.372760225e-01, 2.899852970e+04, 8.623842631e+04, 1.131718316e+02, 1.024500700e+03, 7.638957159e+02, 1.530839326e+03, 1.082014841e+04], 
+[5.646039560e-02, 2.018348280e+04, 1.080224373e+05, 1.152372480e+02, 1.093895079e+03, 7.378488842e+02, 1.286477483e+03, 1.185925929e+04], 
+[1.112123774e-01, 2.393360775e+04, 9.292106786e+04, 7.719527530e+01, 1.067671951e+03, 7.343684587e+02, 1.483330544e+03, 1.201709952e+04], 
+[1.307541334e-01, 1.560575418e+04, 1.072113632e+05, 1.121212726e+02, 1.037577460e+03, 8.001239033e+02, 1.526339128e+03, 1.134591351e+04], 
+[6.497204223e-02, 1.402014180e+04, 6.745969376e+04, 7.862832618e+01, 1.014652310e+03, 7.785303790e+02, 1.410865140e+03, 1.050284643e+04], 
+[7.585702665e-02, 4.060724171e+04, 7.891898759e+04, 8.983721307e+01, 1.027113392e+03, 7.415777465e+02, 1.564676410e+03, 1.021290221e+04], 
+[1.236232545e-01, 3.896089552e+04, 9.928329730e+04, 7.287234301e+01, 9.987764845e+02, 8.183318132e+02, 1.174504796e+03, 1.008298210e+04], 
+[1.472884758e-01, 1.496664561e+04, 9.577485455e+04, 8.233690370e+01, 1.081566913e+03, 7.885231394e+02, 1.401539659e+03, 1.177286288e+04], 
+[1.446232028e-01, 4.176932757e+04, 6.512933971e+04, 9.704737342e+01, 1.013731965e+03, 7.047846719e+02, 1.603844751e+03, 1.068331103e+04], 
+[1.380008828e-01, 3.305905514e+04, 9.999652423e+04, 6.636073041e+01, 1.064322897e+03, 7.729923602e+02, 1.227516863e+03, 1.171354749e+04], 
+[6.344159464e-02, 4.780648795e+04, 9.365102770e+04, 8.802953531e+01, 1.029707604e+03, 7.933230876e+02, 1.374652921e+03, 1.008866356e+04], 
+[5.258122387e-02, 3.702566309e+03, 7.777773463e+04, 7.570896193e+01, 1.052339637e+03, 7.741762325e+02, 1.641636623e+03, 1.121956718e+04], 
+[1.308250614e-01, 4.040441149e+04, 8.911452953e+04, 8.104256772e+01, 1.054111352e+03, 7.773815616e+02, 1.334252555e+03, 1.104079585e+04], 
+[1.420385163e-01, 4.604351716e+04, 8.593646152e+04, 8.159247797e+01, 1.060594140e+03, 8.058854605e+02, 1.637341312e+03, 1.120759720e+04], 
+[1.358384353e-01, 3.498709280e+04, 8.406584308e+04, 8.165787651e+01, 1.022762168e+03, 8.120310170e+02, 1.474777461e+03, 1.003994300e+04], 
+[1.423898316e-01, 4.687374179e+04, 8.660185111e+04, 7.776608109e+01, 9.927602408e+02, 7.641272454e+02, 1.455954292e+03, 1.178518632e+04], 
+[1.321018940e-01, 2.866815831e+03, 9.113096058e+04, 1.007088501e+02, 1.086874972e+03, 7.587825443e+02, 1.610678155e+03, 1.146851885e+04], 
+[1.351663932e-01, 3.196774887e+04, 1.021987705e+05, 7.197885925e+01, 1.021175944e+03, 7.331080072e+02, 1.162493217e+03, 1.109738563e+04], 
+[8.648890684e-02, 4.492471938e+04, 6.438582057e+04, 1.110566727e+02, 1.090853465e+03, 7.597253981e+02, 1.467197961e+03, 1.195185409e+04], 
+[1.104557208e-01, 4.850355694e+04, 1.018022746e+05, 1.149248442e+02, 1.013058279e+03, 7.367222887e+02, 1.358941413e+03, 1.134149282e+04], 
+[1.091740527e-01, 4.724040634e+04, 1.023148364e+05, 1.026078719e+02, 1.016770836e+03, 7.167576085e+02, 1.388776487e+03, 1.018437131e+04], 
+[1.386282688e-01, 1.824670236e+04, 7.934492890e+04, 7.145704182e+01, 1.031398805e+03, 7.882243654e+02, 1.511935264e+03, 1.073294641e+04], 
+[1.072148102e-01, 2.606334586e+04, 1.045009711e+05, 1.153347286e+02, 1.013401269e+03, 7.542190230e+02, 1.392997551e+03, 1.072210903e+04], 
+[1.377281664e-01, 3.939831181e+04, 7.710345011e+04, 6.326330520e+01, 1.038617320e+03, 7.401934748e+02, 1.527993368e+03, 1.129318647e+04], 
+[1.337643862e-01, 4.712556894e+04, 9.486490032e+04, 8.660231295e+01, 1.082875874e+03, 7.991662034e+02, 1.344812635e+03, 1.034653122e+04], 
+[1.450803097e-01, 3.690122313e+04, 7.076429187e+04, 7.923525262e+01, 1.021698784e+03, 7.821386527e+02, 1.240302421e+03, 1.092916457e+04], 
+[9.160223345e-02, 3.477878623e+04, 7.860829708e+04, 7.052989639e+01, 1.045971334e+03, 7.371632922e+02, 1.377972291e+03, 1.202636052e+04], 
+[1.369383921e-01, 7.129085246e+03, 1.003532237e+05, 6.706267812e+01, 9.920945476e+02, 7.700039641e+02, 1.270854730e+03, 1.093421128e+04], 
+[9.150440403e-02, 3.191576156e+04, 1.043970415e+05, 1.121933263e+02, 9.961948536e+02, 7.035905746e+02, 1.503665860e+03, 1.175814238e+04], 
+[1.391914465e-01, 2.679929889e+04, 9.315698192e+04, 1.086281131e+02, 1.108793392e+03, 8.128614591e+02, 1.491457967e+03, 1.029088956e+04], 
+[5.336087538e-02, 1.787440457e+04, 8.274402814e+04, 9.735553791e+01, 1.052490734e+03, 7.853725287e+02, 1.505494679e+03, 1.199289056e+04], 
+[5.136318222e-02, 2.313327941e+04, 8.127627613e+04, 6.730045023e+01, 1.040984645e+03, 7.672337162e+02, 1.340467605e+03, 9.996178363e+03], 
+[5.202323461e-02, 1.418186294e+03, 9.097156505e+04, 6.493013875e+01, 1.030920220e+03, 7.896488622e+02, 1.361926266e+03, 1.152603230e+04], 
+[1.120003670e-01, 4.822621219e+04, 1.096378917e+05, 1.066666519e+02, 1.053178110e+03, 7.431009273e+02, 1.651552956e+03, 1.102461978e+04], 
+[8.156025845e-02, 3.294115069e+04, 7.846891086e+04, 1.125022952e+02, 1.075934524e+03, 8.170942340e+02, 1.251695262e+03, 1.007675874e+04], 
+[6.320769249e-02, 2.369685837e+04, 1.124336882e+05, 8.689656009e+01, 1.035765280e+03, 7.364800974e+02, 1.354682602e+03, 1.166796177e+04], 
+[6.503346261e-02, 1.730539268e+03, 8.554891831e+04, 1.052469487e+02, 1.109487127e+03, 7.156856899e+02, 1.568668470e+03, 1.144257670e+04], 
+[1.060320179e-01, 2.193967854e+04, 9.283579078e+04, 7.307445266e+01, 9.997547759e+02, 8.019001159e+02, 1.425747028e+03, 1.140852632e+04], 
+] 
+
+
+]]></code></script>
+      <outport name="inputs" type="pyobj"/>
+   </inline>
+   <inline name="PyInitRefOutputs">
+      <script><code><![CDATA[#-----------------------------#
+# Outputs de reference  #
+#-----------------------------#
+
+refOutputs = [ 
+1.199597225e+02, 
+5.286440730e+01, 
+1.946749529e+01, 
+6.734171678e+01, 
+1.265538870e+02, 
+3.126495039e+01, 
+1.423813327e+02, 
+4.872963193e+01, 
+9.497332404e+01, 
+1.341211849e+02, 
+4.464289526e+01, 
+7.437583290e+01, 
+1.393299719e+02, 
+9.123466759e+01, 
+4.659905036e+01, 
+1.972448931e+01, 
+1.020662831e+02, 
+1.127851501e+02, 
+8.909165532e+01, 
+8.046407212e+01, 
+3.861336913e+01, 
+5.693876214e+01, 
+7.329772086e+01, 
+1.583000177e+02, 
+1.510501114e+01, 
+9.637844578e+01, 
+-4.096191309e+00, 
+6.798388710e+01, 
+1.529755696e+02, 
+1.263372809e+02, 
+3.137004476e+01, 
+6.862410531e+01, 
+4.620214686e+01, 
+5.170112470e+01, 
+1.094077720e+02, 
+1.194064903e+01, 
+9.119449042e+01, 
+1.253286559e+02, 
+4.007263377e-01, 
+4.169281315e+01, 
+1.192188109e+01, 
+5.432408674e+01, 
+1.147070671e+02, 
+4.662373285e+01, 
+1.090036057e+02, 
+1.042114148e+02, 
+9.820112838e+00, 
+2.698818217e+01, 
+8.880536866e+01, 
+1.527146568e+02, 
+1.322338373e+02, 
+1.482507911e+02, 
+6.458012341e+00, 
+-3.840634931e+00, 
+1.227435169e+02, 
+1.175914158e+02, 
+9.688870975e+01, 
+1.258285676e+02, 
+1.258427717e+02, 
+1.416880775e+02, 
+7.357448465e+01, 
+9.432611383e+01, 
+8.753893164e+01, 
+1.127819173e+02, 
+7.842914575e+01, 
+1.292992326e+02, 
+1.226379987e+02, 
+1.376409297e+02, 
+8.068110325e+01, 
+1.198912866e+02, 
+6.675030235e+01, 
+1.249090790e+02, 
+8.233983443e+00, 
+9.857868847e-02, 
+3.996566681e+00, 
+8.655301005e+01, 
+4.485232922e+01, 
+3.618350443e+01, 
+4.942136971e+01, 
+6.499941347e+01 ] 
+
+
+]]></code></script>
+      <outport name="refOutputs" type="pyobj"/>
+   </inline>
+   <inline name="PyBuildLoopIndex">
+      <function name="make_indexes">
+         <code><![CDATA[def make_indexes(inVal, outVal) :
+    print "In make_indexes" ;
+    if ( len(inVal) != len(outVal) ) :
+        msg = "len(inVal) (%i) != len(outVal) (%i). "%( len(inVal), len(outVal) ) ;
+        raise ValueError(msg)       
+    n = len( inVal ); 
+    indexes = [ i for i in range( n ) ]  ;
+    return indexes
+
+]]></code>
+      </function>
+      <inport name="inVal" type="pyobj"/>
+      <inport name="outVal" type="pyobj"/>
+      <outport name="indexes" type="intvec"/>
+   </inline>
+   <inline name="PyEpsilon">
+      <script><code><![CDATA[epsilon = 1E-6;
+]]></code></script>
+      <outport name="epsilon" type="double"/>
+   </inline>
+   <inline name="PyLoadPMML0">
+      <script><code><![CDATA[ 
+import sys;
+from PMML import *;
+pmmlObj = PMMLlib( filename );   
+pmmlObj.SetCurrentModel( modelname , eval(pmmltype) );
+myFunc = "pyFunc";
+myHeader = "Function processed in YACCS";
+myCode = pmmlObj.ExportPyStr(myFunc, myHeader);
+exec myCode;
+]]></code></script>
+      <inport name="filename" type="string"/>
+      <inport name="modelname" type="string"/>
+      <inport name="pmmltype" type="string"/>
+      <outport name="pyFunc" type="pyobj"/>
+   </inline>
+   <inline name="PyGetPmmlFile">
+      <script><code><![CDATA[#-----------------------------#
+#       Get PMML file         #
+#-----------------------------#
+import os;
+pmmlRootDir = os.getenv("YACS_ROOT_DIR");
+resourcesDir = os.path.join(pmmlRootDir,"share","salome","yacssamples");
+resourcesDir += os.sep ;
+filepath = resourcesDir + filename;
+]]></code></script>
+      <inport name="filename" type="string"/>
+      <outport name="filepath" type="string"/>
+   </inline>
+   <inline name="PyGetRes">
+      <script><code><![CDATA[print "retVec ";
+print retVec;
+res = "True";
+for ret in retVec:
+    if not ret:
+        res = "False";
+        pass
+    pass
+
+]]></code></script>
+      <inport name="retVec" type="boolvec"/>
+      <outport name="res" type="string"/>
+   </inline>
+   <control> <fromnode>ForEachLoop_int6</fromnode> <tonode>PyGetRes</tonode> </control>
+   <control> <fromnode>PyInitInputs</fromnode> <tonode>PyBuildLoopIndex</tonode> </control>
+   <control> <fromnode>PyInitInputs</fromnode> <tonode>PyInitRefOutputs</tonode> </control>
+   <control> <fromnode>PyInitRefOutputs</fromnode> <tonode>PyBuildLoopIndex</tonode> </control>
+   <control> <fromnode>PyInitRefOutputs</fromnode> <tonode>PyEpsilon</tonode> </control>
+   <control> <fromnode>PyBuildLoopIndex</fromnode> <tonode>ForEachLoop_int6</tonode> </control>
+   <control> <fromnode>PyEpsilon</fromnode> <tonode>PyBuildLoopIndex</tonode> </control>
+   <control> <fromnode>PyLoadPMML0</fromnode> <tonode>ForEachLoop_int6</tonode> </control>
+   <control> <fromnode>PyGetPmmlFile</fromnode> <tonode>PyLoadPMML0</tonode> </control>
+   <datalink control="false">
+      <fromnode>ForEachLoop_int6</fromnode> <fromport>evalSamples</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyValidate</tonode> <toport>i</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>ForEachLoop_int6</fromnode> <fromport>evalSamples</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyFuncExec</tonode> <toport>i</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyInitInputs</fromnode> <fromport>inputs</fromport>
+      <tonode>PyBuildLoopIndex</tonode> <toport>inVal</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyInitInputs</fromnode> <fromport>inputs</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyFuncExec</tonode> <toport>inputs</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyInitRefOutputs</fromnode> <fromport>refOutputs</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyValidate</tonode> <toport>refOutputs</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyInitRefOutputs</fromnode> <fromport>refOutputs</fromport>
+      <tonode>PyBuildLoopIndex</tonode> <toport>outVal</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyBuildLoopIndex</fromnode> <fromport>indexes</fromport>
+      <tonode>ForEachLoop_int6</tonode> <toport>SmplsCollection</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyEpsilon</fromnode> <fromport>epsilon</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyValidate</tonode> <toport>epsilon</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyLoadPMML0</fromnode> <fromport>pyFunc</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyFuncExec</tonode> <toport>pyFunc</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyGetPmmlFile</fromnode> <fromport>filepath</fromport>
+      <tonode>PyLoadPMML0</tonode> <toport>filename</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>ForEachLoop_int6.Bloc8.PyValidate</fromnode> <fromport>ret</fromport>
+      <tonode>PyGetRes</tonode> <toport>retVec</toport>
+   </datalink>
+   <parameter>
+      <tonode>PyGetPmmlFile</tonode><toport>filename</toport>
+      <value><string>pmml_tlr_exportFunctionPMML.pmml</string></value>
+   </parameter>
+   <parameter>
+      <tonode>ForEachLoop_int6</tonode><toport>nbBranches</toport>
+      <value><int>1</int></value>
+   </parameter>
+   <parameter>
+      <tonode>PyLoadPMML0</tonode><toport>filename</toport>
+      <value><string>pmml_tann_exportFunctionPMML.pmml</string></value>
+   </parameter>
+   <parameter>
+      <tonode>PyLoadPMML0</tonode><toport>modelname</toport>
+      <value><string>LRModel</string></value>
+   </parameter>
+   <parameter>
+      <tonode>PyLoadPMML0</tonode><toport>pmmltype</toport>
+      <value><string>kLR</string></value>
+   </parameter>
+   <presentation name="ForEachLoop_int6.Bloc8.PyValidate" x="301.426" y="121.754" width="158" height="144" expanded="1" expx="301.426" expy="121.754" expWidth="158" expHeight="144" shownState="0"/>
+   <presentation name="PyInitInputs" x="264.5" y="91.5" width="158" height="63" expanded="1" expx="264.5" expy="91.5" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="PyBuildLoopIndex" x="356" y="275" width="158" height="90" expanded="1" expx="356" expy="275" expWidth="158" expHeight="90" shownState="0"/>
+   <presentation name="ForEachLoop_int6.Bloc8" x="6" y="86" width="463.426" height="269.754" expanded="1" expx="6" expy="86" expWidth="463.426" expHeight="269.754" shownState="0"/>
+   <presentation name="PyGetPmmlFile" x="58.6501" y="40.3003" width="158" height="63" expanded="1" expx="58.6501" expy="40.3003" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="PyGetRes" x="618.235" y="695.177" width="158" height="63" expanded="1" expx="618.235" expy="695.177" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="ForEachLoop_int6.Bloc8.PyFuncExec" x="4" y="141.105" width="158" height="117" expanded="1" expx="4" expy="141.105" expWidth="158" expHeight="117" shownState="0"/>
+   <presentation name="ForEachLoop_int6" x="7" y="414.5" width="473.426" height="359.754" expanded="1" expx="7" expy="414.5" expWidth="473.426" expHeight="359.754" shownState="0"/>
+   <presentation name="PyLoadPMML0" x="24" y="118.5" width="158" height="117" expanded="1" expx="24" expy="118.5" expWidth="158" expHeight="117" shownState="0"/>
+   <presentation name="PyEpsilon" x="28.5" y="274.5" width="158" height="63" expanded="1" expx="28.5" expy="274.5" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="PyInitRefOutputs" x="567.105" y="122" width="158" height="63" expanded="1" expx="567.105" expy="122" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="__ROOT__" x="0" y="0" width="780.235" height="778.254" expanded="1" expx="0" expy="0" expWidth="780.235" expHeight="778.254" shownState="0"/>
+</proc>
diff --git a/src/yacsloader/samples/schemaPmmlDoesNotExist.xml b/src/yacsloader/samples/schemaPmmlDoesNotExist.xml
new file mode 100644 (file)
index 0000000..33da8c7
--- /dev/null
@@ -0,0 +1,411 @@
+<?xml version='1.0' encoding='iso-8859-1' ?>
+<proc name="newSchema_1">
+   <property name="DefaultStudyID" value="1"/>
+   <type name="string" kind="string"/>
+   <struct name="Engines/dataref">
+      <member name="ref" type="string"/>
+   </struct>
+   <type name="bool" kind="bool"/>
+   <sequence name="boolvec" content="bool"/>
+   <type name="double" kind="double"/>
+   <sequence name="dblevec" content="double"/>
+   <objref name="file" id="file"/>
+   <type name="int" kind="int"/>
+   <sequence name="intvec" content="int"/>
+   <struct name="stringpair">
+      <member name="name" type="string"/>
+      <member name="value" type="string"/>
+   </struct>
+   <sequence name="propvec" content="stringpair"/>
+   <objref name="pyobj" id="python:obj:1.0"/>
+   <sequence name="seqboolvec" content="boolvec"/>
+   <sequence name="seqdblevec" content="dblevec"/>
+   <sequence name="seqint" content="int"/>
+   <sequence name="seqintvec" content="intvec"/>
+   <sequence name="stringvec" content="string"/>
+   <sequence name="seqstringvec" content="stringvec"/>
+   <container name="DefaultContainer">
+      <property name="container_name" value="FactoryServer"/>
+      <property name="name" value="localhost"/>
+   </container>
+   <foreach name="ForEachLoop_int6" nbranch="1" type="int">
+      <bloc name="Bloc8">
+         <inline name="PyFuncExec">
+            <script><code><![CDATA[
+# on reconstruit une liste de double
+doubleVecIn = [ float(v) for v in inputs[i] ] ;
+
+doubleVecOut = pyFunc( doubleVecIn );
+
+output = doubleVecOut[0];
+]]></code></script>
+            <inport name="i" type="int"/>
+            <inport name="pyFunc" type="pyobj"/>
+            <inport name="inputs" type="pyobj"/>
+            <outport name="output" type="double"/>
+         </inline>
+         <inline name="PyValidate">
+            <script><code><![CDATA[#-----------------------------#
+#       Validate              #
+#-----------------------------#
+
+ret = True;
+
+refOutput = float( refOutputs[i] );
+
+err = abs( refOutput - output ) ; 
+
+if err > epsilon : 
+    msg   = "KO valid with eps %f at pattern #%i: \n"%(epsilon, i) ;
+    msg += "refOutput - output : %f - %f \n"%(refOutput, output);
+    msg += "Got abs relative diff  %f .\n"%err
+    ret = False;
+    print msg;
+    pass
+    
+print "+++ OK " , i , " +++", ret
+]]></code></script>
+            <inport name="i" type="int"/>
+            <inport name="refOutputs" type="pyobj"/>
+            <inport name="output" type="double"/>
+            <inport name="epsilon" type="double"/>
+            <outport name="ret" type="bool"/>
+         </inline>
+         <control> <fromnode>PyFuncExec</fromnode> <tonode>PyValidate</tonode> </control>
+         <datalink control="false">
+            <fromnode>PyFuncExec</fromnode> <fromport>output</fromport>
+            <tonode>PyValidate</tonode> <toport>output</toport>
+         </datalink>
+      </bloc>
+   </foreach>
+   <inline name="PyInitInputs">
+      <script><code><![CDATA[#-----------------------------#
+#             Inputs              #
+#-----------------------------#
+
+inputs = [ 
+[1.456610531e-01, 4.817264758e+04, 7.398381011e+04, 1.035837913e+02, 9.968027314e+02, 7.662662390e+02, 1.606060725e+03, 1.147627487e+04], 
+[7.767860409e-02, 1.193098539e+04, 7.292686667e+04, 9.785118163e+01, 1.051369254e+03, 7.027872080e+02, 1.447244752e+03, 1.028475889e+04], 
+[6.750842479e-02, 2.882617130e+04, 9.817007636e+04, 8.488524614e+01, 1.090567120e+03, 8.122979233e+02, 1.649818202e+03, 1.149754134e+04], 
+[8.860282350e-02, 3.498171453e+03, 8.530760503e+04, 9.046022050e+01, 1.095160960e+03, 7.874820198e+02, 1.367563528e+03, 1.084392094e+04], 
+[1.365903117e-01, 2.825638843e+04, 8.889946935e+04, 1.113147418e+02, 1.019749972e+03, 7.134926632e+02, 1.507458958e+03, 1.081045588e+04], 
+[7.127271995e-02, 1.753261607e+04, 9.534506291e+04, 9.153808181e+01, 1.061650244e+03, 7.309769019e+02, 1.657218091e+03, 1.072962363e+04], 
+[1.265602027e-01, 4.219433169e+04, 9.456040368e+04, 1.056736168e+02, 1.099933893e+03, 7.471638924e+02, 1.273665410e+03, 1.138336512e+04], 
+[8.667764079e-02, 4.120117756e+04, 1.133877245e+05, 7.767636060e+01, 1.016590900e+03, 7.080602432e+02, 1.660892376e+03, 1.092560938e+04], 
+[9.976394549e-02, 6.420167443e+03, 9.830132327e+04, 8.501884776e+01, 1.097122572e+03, 7.253532648e+02, 1.307720423e+03, 9.866836214e+03], 
+[1.146758223e-01, 8.710374173e+03, 8.523215073e+04, 9.414901527e+01, 1.095537742e+03, 7.060630963e+02, 1.275130095e+03, 1.130554594e+04], 
+[7.880624877e-02, 4.311161871e+04, 7.377943028e+04, 1.061320296e+02, 1.047244176e+03, 7.477760505e+02, 1.415636178e+03, 1.040593206e+04], 
+[1.058084156e-01, 4.582563552e+04, 9.222816385e+04, 1.038847032e+02, 9.955665779e+02, 7.432779758e+02, 1.504745288e+03, 1.146224012e+04], 
+[1.253087794e-01, 4.847286153e+04, 7.518998946e+04, 1.040979939e+02, 1.105768478e+03, 7.577355747e+02, 1.201630771e+03, 1.088149400e+04], 
+[1.175039884e-01, 2.565240792e+04, 7.230018052e+04, 8.257012789e+01, 1.032484559e+03, 7.694279542e+02, 1.566647304e+03, 1.153822324e+04], 
+[6.124755615e-02, 4.507578924e+04, 6.478138490e+04, 9.090792861e+01, 1.062255915e+03, 7.497957649e+02, 1.184192526e+03, 1.170539896e+04], 
+[5.544131559e-02, 3.390925881e+04, 7.328456973e+04, 1.090337509e+02, 1.048630443e+03, 7.628191574e+02, 1.185768164e+03, 1.028085684e+04], 
+[1.183272497e-01, 4.639774146e+04, 6.984716489e+04, 7.483809181e+01, 1.071724305e+03, 8.029602783e+02, 1.301826477e+03, 1.069809527e+04], 
+[1.178550031e-01, 1.065266711e+04, 7.473813352e+04, 7.063285879e+01, 1.017164661e+03, 7.183678405e+02, 1.252220675e+03, 1.085888136e+04], 
+[1.116310036e-01, 7.057176796e+02, 8.914959172e+04, 1.143736719e+02, 1.024291508e+03, 7.893954959e+02, 1.375784904e+03, 1.189588654e+04], 
+[1.056449157e-01, 3.725247649e+04, 1.126706761e+05, 8.220356853e+01, 1.000815794e+03, 7.342054423e+02, 1.331007515e+03, 1.057815755e+04], 
+[5.585244596e-02, 1.080328986e+04, 7.647445600e+04, 1.127840680e+02, 1.101335277e+03, 7.090687232e+02, 1.571310094e+03, 1.179921032e+04], 
+[9.318997589e-02, 3.694986496e+04, 1.142850986e+05, 9.885621621e+01, 1.047818074e+03, 7.462410467e+02, 1.550908728e+03, 1.024738180e+04], 
+[9.769526026e-02, 4.898586800e+04, 8.454146334e+04, 7.390916471e+01, 1.069034353e+03, 7.761299060e+02, 1.366617089e+03, 1.045533000e+04], 
+[1.448874974e-01, 2.618871518e+04, 1.006705237e+05, 6.761931276e+01, 1.084724402e+03, 7.056825472e+02, 1.467825112e+03, 1.063120366e+04], 
+[5.748240145e-02, 4.265983570e+04, 6.922054248e+04, 7.153213366e+01, 1.029573412e+03, 7.140769415e+02, 1.638688665e+03, 1.152371724e+04], 
+[1.004554848e-01, 1.849359821e+04, 1.073035370e+05, 9.843990445e+01, 1.061773839e+03, 7.703136119e+02, 1.142717255e+03, 1.133548780e+04], 
+[6.904713159e-02, 3.114664091e+04, 1.154062790e+05, 8.699726139e+01, 9.951594968e+02, 8.013040888e+02, 1.645133282e+03, 1.101513225e+04], 
+[7.535831709e-02, 2.084271662e+04, 8.796612167e+04, 8.755547732e+01, 1.091487642e+03, 7.501847659e+02, 1.272233814e+03, 1.153502741e+04], 
+[1.316642693e-01, 4.074199552e+04, 9.791860127e+04, 1.096474308e+02, 1.103690417e+03, 7.597649884e+02, 1.149359431e+03, 1.121703132e+04], 
+[1.256804603e-01, 2.171487442e+04, 1.047171996e+05, 8.588439966e+01, 1.102320221e+03, 7.708693798e+02, 1.329214491e+03, 1.059463337e+04], 
+[5.439524620e-02, 2.259584733e+04, 8.079125672e+04, 6.476205727e+01, 1.106152417e+03, 8.113191944e+02, 1.255437236e+03, 1.201333911e+04], 
+[8.750209459e-02, 1.410249021e+04, 1.119751321e+05, 1.073490872e+02, 1.063143667e+03, 7.871042297e+02, 1.329983208e+03, 1.195606571e+04], 
+[8.699797238e-02, 4.021709531e+04, 1.117867687e+05, 9.589155856e+01, 1.073694293e+03, 7.994999584e+02, 1.627265626e+03, 1.136706401e+04], 
+[6.892920064e-02, 2.032907492e+03, 1.010396848e+05, 6.783802062e+01, 1.086461820e+03, 7.374214870e+02, 1.337611800e+03, 1.085112805e+04], 
+[1.110159183e-01, 4.417290645e+04, 7.073649093e+04, 7.518425871e+01, 1.107020884e+03, 7.795442605e+02, 1.148611472e+03, 9.964154503e+03], 
+[5.864651909e-02, 4.709372341e+04, 1.058816566e+05, 6.673370711e+01, 1.046313765e+03, 7.910806290e+02, 1.399673582e+03, 1.115277414e+04], 
+[1.024623536e-01, 3.248763678e+04, 1.123176511e+05, 1.107772688e+02, 1.012914390e+03, 7.571928585e+02, 1.189214491e+03, 1.183043699e+04], 
+[1.268376736e-01, 2.048412849e+04, 1.085318941e+05, 6.459015746e+01, 1.109786159e+03, 8.139508806e+02, 1.359762233e+03, 1.157111067e+04], 
+[5.300816813e-02, 2.943127727e+04, 1.146785278e+05, 8.394814161e+01, 1.043452404e+03, 7.603354413e+02, 1.658862011e+03, 1.163288578e+04], 
+[7.028017434e-02, 4.192750166e+04, 9.553886080e+04, 6.383743056e+01, 1.079645033e+03, 7.723588658e+02, 1.321409167e+03, 1.058567246e+04], 
+[5.991478449e-02, 4.541579388e+04, 6.812180949e+04, 9.246414937e+01, 1.019926004e+03, 8.069446852e+02, 1.239566623e+03, 1.120099431e+04], 
+[1.067481756e-01, 1.157791820e+04, 8.601870382e+04, 9.535250994e+01, 1.058079739e+03, 8.029092666e+02, 1.675663505e+03, 9.857368989e+03], 
+[1.372760225e-01, 2.899852970e+04, 8.623842631e+04, 1.131718316e+02, 1.024500700e+03, 7.638957159e+02, 1.530839326e+03, 1.082014841e+04], 
+[5.646039560e-02, 2.018348280e+04, 1.080224373e+05, 1.152372480e+02, 1.093895079e+03, 7.378488842e+02, 1.286477483e+03, 1.185925929e+04], 
+[1.112123774e-01, 2.393360775e+04, 9.292106786e+04, 7.719527530e+01, 1.067671951e+03, 7.343684587e+02, 1.483330544e+03, 1.201709952e+04], 
+[1.307541334e-01, 1.560575418e+04, 1.072113632e+05, 1.121212726e+02, 1.037577460e+03, 8.001239033e+02, 1.526339128e+03, 1.134591351e+04], 
+[6.497204223e-02, 1.402014180e+04, 6.745969376e+04, 7.862832618e+01, 1.014652310e+03, 7.785303790e+02, 1.410865140e+03, 1.050284643e+04], 
+[7.585702665e-02, 4.060724171e+04, 7.891898759e+04, 8.983721307e+01, 1.027113392e+03, 7.415777465e+02, 1.564676410e+03, 1.021290221e+04], 
+[1.236232545e-01, 3.896089552e+04, 9.928329730e+04, 7.287234301e+01, 9.987764845e+02, 8.183318132e+02, 1.174504796e+03, 1.008298210e+04], 
+[1.472884758e-01, 1.496664561e+04, 9.577485455e+04, 8.233690370e+01, 1.081566913e+03, 7.885231394e+02, 1.401539659e+03, 1.177286288e+04], 
+[1.446232028e-01, 4.176932757e+04, 6.512933971e+04, 9.704737342e+01, 1.013731965e+03, 7.047846719e+02, 1.603844751e+03, 1.068331103e+04], 
+[1.380008828e-01, 3.305905514e+04, 9.999652423e+04, 6.636073041e+01, 1.064322897e+03, 7.729923602e+02, 1.227516863e+03, 1.171354749e+04], 
+[6.344159464e-02, 4.780648795e+04, 9.365102770e+04, 8.802953531e+01, 1.029707604e+03, 7.933230876e+02, 1.374652921e+03, 1.008866356e+04], 
+[5.258122387e-02, 3.702566309e+03, 7.777773463e+04, 7.570896193e+01, 1.052339637e+03, 7.741762325e+02, 1.641636623e+03, 1.121956718e+04], 
+[1.308250614e-01, 4.040441149e+04, 8.911452953e+04, 8.104256772e+01, 1.054111352e+03, 7.773815616e+02, 1.334252555e+03, 1.104079585e+04], 
+[1.420385163e-01, 4.604351716e+04, 8.593646152e+04, 8.159247797e+01, 1.060594140e+03, 8.058854605e+02, 1.637341312e+03, 1.120759720e+04], 
+[1.358384353e-01, 3.498709280e+04, 8.406584308e+04, 8.165787651e+01, 1.022762168e+03, 8.120310170e+02, 1.474777461e+03, 1.003994300e+04], 
+[1.423898316e-01, 4.687374179e+04, 8.660185111e+04, 7.776608109e+01, 9.927602408e+02, 7.641272454e+02, 1.455954292e+03, 1.178518632e+04], 
+[1.321018940e-01, 2.866815831e+03, 9.113096058e+04, 1.007088501e+02, 1.086874972e+03, 7.587825443e+02, 1.610678155e+03, 1.146851885e+04], 
+[1.351663932e-01, 3.196774887e+04, 1.021987705e+05, 7.197885925e+01, 1.021175944e+03, 7.331080072e+02, 1.162493217e+03, 1.109738563e+04], 
+[8.648890684e-02, 4.492471938e+04, 6.438582057e+04, 1.110566727e+02, 1.090853465e+03, 7.597253981e+02, 1.467197961e+03, 1.195185409e+04], 
+[1.104557208e-01, 4.850355694e+04, 1.018022746e+05, 1.149248442e+02, 1.013058279e+03, 7.367222887e+02, 1.358941413e+03, 1.134149282e+04], 
+[1.091740527e-01, 4.724040634e+04, 1.023148364e+05, 1.026078719e+02, 1.016770836e+03, 7.167576085e+02, 1.388776487e+03, 1.018437131e+04], 
+[1.386282688e-01, 1.824670236e+04, 7.934492890e+04, 7.145704182e+01, 1.031398805e+03, 7.882243654e+02, 1.511935264e+03, 1.073294641e+04], 
+[1.072148102e-01, 2.606334586e+04, 1.045009711e+05, 1.153347286e+02, 1.013401269e+03, 7.542190230e+02, 1.392997551e+03, 1.072210903e+04], 
+[1.377281664e-01, 3.939831181e+04, 7.710345011e+04, 6.326330520e+01, 1.038617320e+03, 7.401934748e+02, 1.527993368e+03, 1.129318647e+04], 
+[1.337643862e-01, 4.712556894e+04, 9.486490032e+04, 8.660231295e+01, 1.082875874e+03, 7.991662034e+02, 1.344812635e+03, 1.034653122e+04], 
+[1.450803097e-01, 3.690122313e+04, 7.076429187e+04, 7.923525262e+01, 1.021698784e+03, 7.821386527e+02, 1.240302421e+03, 1.092916457e+04], 
+[9.160223345e-02, 3.477878623e+04, 7.860829708e+04, 7.052989639e+01, 1.045971334e+03, 7.371632922e+02, 1.377972291e+03, 1.202636052e+04], 
+[1.369383921e-01, 7.129085246e+03, 1.003532237e+05, 6.706267812e+01, 9.920945476e+02, 7.700039641e+02, 1.270854730e+03, 1.093421128e+04], 
+[9.150440403e-02, 3.191576156e+04, 1.043970415e+05, 1.121933263e+02, 9.961948536e+02, 7.035905746e+02, 1.503665860e+03, 1.175814238e+04], 
+[1.391914465e-01, 2.679929889e+04, 9.315698192e+04, 1.086281131e+02, 1.108793392e+03, 8.128614591e+02, 1.491457967e+03, 1.029088956e+04], 
+[5.336087538e-02, 1.787440457e+04, 8.274402814e+04, 9.735553791e+01, 1.052490734e+03, 7.853725287e+02, 1.505494679e+03, 1.199289056e+04], 
+[5.136318222e-02, 2.313327941e+04, 8.127627613e+04, 6.730045023e+01, 1.040984645e+03, 7.672337162e+02, 1.340467605e+03, 9.996178363e+03], 
+[5.202323461e-02, 1.418186294e+03, 9.097156505e+04, 6.493013875e+01, 1.030920220e+03, 7.896488622e+02, 1.361926266e+03, 1.152603230e+04], 
+[1.120003670e-01, 4.822621219e+04, 1.096378917e+05, 1.066666519e+02, 1.053178110e+03, 7.431009273e+02, 1.651552956e+03, 1.102461978e+04], 
+[8.156025845e-02, 3.294115069e+04, 7.846891086e+04, 1.125022952e+02, 1.075934524e+03, 8.170942340e+02, 1.251695262e+03, 1.007675874e+04], 
+[6.320769249e-02, 2.369685837e+04, 1.124336882e+05, 8.689656009e+01, 1.035765280e+03, 7.364800974e+02, 1.354682602e+03, 1.166796177e+04], 
+[6.503346261e-02, 1.730539268e+03, 8.554891831e+04, 1.052469487e+02, 1.109487127e+03, 7.156856899e+02, 1.568668470e+03, 1.144257670e+04], 
+[1.060320179e-01, 2.193967854e+04, 9.283579078e+04, 7.307445266e+01, 9.997547759e+02, 8.019001159e+02, 1.425747028e+03, 1.140852632e+04], 
+] 
+
+
+]]></code></script>
+      <outport name="inputs" type="pyobj"/>
+   </inline>
+   <inline name="PyInitRefOutputs">
+      <script><code><![CDATA[#-----------------------------#
+# Outputs de reference  #
+#-----------------------------#
+
+refOutputs = [ 
+1.165811298e+02, 
+4.800731001e+01, 
+2.552307508e+01, 
+5.805453016e+01, 
+1.304836720e+02, 
+3.285696649e+01, 
+1.557608944e+02, 
+4.374810362e+01, 
+8.929287717e+01, 
+1.425999339e+02, 
+4.182190868e+01, 
+6.457050056e+01, 
+1.510971116e+02, 
+7.956605866e+01, 
+4.240742650e+01, 
+2.764774492e+01, 
+9.347296791e+01, 
+1.084714948e+02, 
+7.856829522e+01, 
+7.146608339e+01, 
+3.781926322e+01, 
+5.088280301e+01, 
+6.350651454e+01, 
+1.802545516e+02, 
+2.390540494e+01, 
+8.926908340e+01, 
+1.621872876e+01, 
+5.930546024e+01, 
+1.739576265e+02, 
+1.292594318e+02, 
+3.137491768e+01, 
+5.933983687e+01, 
+4.125195859e+01, 
+4.595759735e+01, 
+1.057164195e+02, 
+2.249466719e+01, 
+8.322085467e+01, 
+1.234257504e+02, 
+1.808878098e+01, 
+3.877651954e+01, 
+2.258184497e+01, 
+4.698313250e+01, 
+1.116999328e+02, 
+4.375304767e+01, 
+1.024774016e+02, 
+9.658347512e+01, 
+2.155679655e+01, 
+3.049337709e+01, 
+7.818527054e+01, 
+1.667466744e+02, 
+1.374770136e+02, 
+1.600991110e+02, 
+2.088718864e+01, 
+1.619537927e+01, 
+1.218138473e+02, 
+1.120293486e+02, 
+8.676828288e+01, 
+1.238520007e+02, 
+1.264091907e+02, 
+1.520588614e+02, 
+6.410449884e+01, 
+8.751103700e+01, 
+8.081852230e+01, 
+1.055199073e+02, 
+6.999515823e+01, 
+1.295983705e+02, 
+1.228930068e+02, 
+1.431794331e+02, 
+6.947691365e+01, 
+1.160897648e+02, 
+5.875505502e+01, 
+1.265981410e+02, 
+2.087500232e+01, 
+1.828049612e+01, 
+1.883459772e+01, 
+7.806138598e+01, 
+4.174578630e+01, 
+3.565871725e+01, 
+4.505400598e+01, 
+5.370771952e+01 ] 
+]]></code></script>
+      <outport name="refOutputs" type="pyobj"/>
+   </inline>
+   <inline name="PyBuildLoopIndex">
+      <function name="make_indexes">
+         <code><![CDATA[def make_indexes(inVal, outVal) :
+    print "In make_indexes" ;
+    if ( len(inVal) != len(outVal) ) :
+        msg = "len(inVal) (%i) != len(outVal) (%i). "%( len(inVal), len(outVal) ) ;
+        raise ValueError(msg)       
+    n = len( inVal ); 
+    indexes = [ i for i in range( n ) ]  ;
+    return indexes
+
+]]></code>
+      </function>
+      <inport name="inVal" type="pyobj"/>
+      <inport name="outVal" type="pyobj"/>
+      <outport name="indexes" type="intvec"/>
+   </inline>
+   <inline name="PyEpsilon">
+      <script><code><![CDATA[epsilon = 1E-6;
+]]></code></script>
+      <outport name="epsilon" type="double"/>
+   </inline>
+   <inline name="PyLoadPMML0">
+      <script><code><![CDATA[ 
+import sys;
+from PMML import *;
+pmmlObj = PMMLlib( filename );   
+pmmlObj.SetCurrentModel( modelname , eval(pmmltype) );
+myFunc = "pyFunc";
+myHeader = "Function processed in YACCS";
+myCode = pmmlObj.ExportPyStr(myFunc, myHeader);
+exec myCode;
+]]></code></script>
+      <inport name="filename" type="string"/>
+      <inport name="modelname" type="string"/>
+      <inport name="pmmltype" type="string"/>
+      <outport name="pyFunc" type="pyobj"/>
+   </inline>
+   <inline name="PyGetPmmlFile">
+      <script><code><![CDATA[#-----------------------------#
+#       Get PMML file         #
+#-----------------------------#
+import os;
+pmmlRootDir = os.getenv("YACS_ROOT_DIR");
+resourcesDir = os.path.join(pmmlRootDir,"share","salome","resources","pmml");
+resourcesDir += os.sep ;
+filepath = resourcesDir + filename;
+]]></code></script>
+      <inport name="filename" type="string"/>
+      <outport name="filepath" type="string"/>
+   </inline>
+   <inline name="PyGetRes">
+      <script><code><![CDATA[print "retVec ";
+print retVec;
+res = "True";
+for ret in retVec:
+    if not ret:
+        res = "False";
+        pass
+    pass
+]]></code></script>
+      <inport name="retVec" type="boolvec"/>
+      <outport name="res" type="string"/>
+   </inline>
+   <control> <fromnode>ForEachLoop_int6</fromnode> <tonode>PyGetRes</tonode> </control>
+   <control> <fromnode>PyInitInputs</fromnode> <tonode>PyBuildLoopIndex</tonode> </control>
+   <control> <fromnode>PyInitInputs</fromnode> <tonode>PyInitRefOutputs</tonode> </control>
+   <control> <fromnode>PyInitRefOutputs</fromnode> <tonode>PyBuildLoopIndex</tonode> </control>
+   <control> <fromnode>PyInitRefOutputs</fromnode> <tonode>PyEpsilon</tonode> </control>
+   <control> <fromnode>PyBuildLoopIndex</fromnode> <tonode>ForEachLoop_int6</tonode> </control>
+   <control> <fromnode>PyEpsilon</fromnode> <tonode>PyBuildLoopIndex</tonode> </control>
+   <control> <fromnode>PyLoadPMML0</fromnode> <tonode>ForEachLoop_int6</tonode> </control>
+   <control> <fromnode>PyGetPmmlFile</fromnode> <tonode>PyLoadPMML0</tonode> </control>
+   <datalink control="false">
+      <fromnode>ForEachLoop_int6</fromnode> <fromport>evalSamples</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyValidate</tonode> <toport>i</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>ForEachLoop_int6</fromnode> <fromport>evalSamples</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyFuncExec</tonode> <toport>i</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyInitInputs</fromnode> <fromport>inputs</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyFuncExec</tonode> <toport>inputs</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyInitInputs</fromnode> <fromport>inputs</fromport>
+      <tonode>PyBuildLoopIndex</tonode> <toport>inVal</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyInitRefOutputs</fromnode> <fromport>refOutputs</fromport>
+      <tonode>PyBuildLoopIndex</tonode> <toport>outVal</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyInitRefOutputs</fromnode> <fromport>refOutputs</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyValidate</tonode> <toport>refOutputs</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyBuildLoopIndex</fromnode> <fromport>indexes</fromport>
+      <tonode>ForEachLoop_int6</tonode> <toport>SmplsCollection</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyEpsilon</fromnode> <fromport>epsilon</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyValidate</tonode> <toport>epsilon</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyLoadPMML0</fromnode> <fromport>pyFunc</fromport>
+      <tonode>ForEachLoop_int6.Bloc8.PyFuncExec</tonode> <toport>pyFunc</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>PyGetPmmlFile</fromnode> <fromport>filepath</fromport>
+      <tonode>PyLoadPMML0</tonode> <toport>filename</toport>
+   </datalink>
+   <datalink control="false">
+      <fromnode>ForEachLoop_int6.Bloc8.PyValidate</fromnode> <fromport>ret</fromport>
+      <tonode>PyGetRes</tonode> <toport>retVec</toport>
+   </datalink>
+   <parameter>
+      <tonode>ForEachLoop_int6</tonode><toport>nbBranches</toport>
+      <value><int>1</int></value>
+   </parameter>
+   <parameter>
+      <tonode>PyLoadPMML0</tonode><toport>filename</toport>
+      <value><string>pmml_tann_exportFunctionPMML.pmml</string></value>
+   </parameter>
+   <parameter>
+      <tonode>PyLoadPMML0</tonode><toport>modelname</toport>
+      <value><string>ANNModel</string></value>
+   </parameter>
+   <parameter>
+      <tonode>PyLoadPMML0</tonode><toport>pmmltype</toport>
+      <value><string>kANN</string></value>
+   </parameter>
+   <parameter>
+      <tonode>PyGetPmmlFile</tonode><toport>filename</toport>
+      <value><string>pmml0.pmml</string></value>
+   </parameter>
+   <presentation name="ForEachLoop_int6.Bloc8.PyFuncExec" x="4.01977" y="140" width="158" height="117" expanded="1" expx="4.01977" expy="140" expWidth="158" expHeight="117" shownState="0"/>
+   <presentation name="ForEachLoop_int6.Bloc8.PyValidate" x="301.671" y="119.404" width="158" height="144" expanded="1" expx="301.671" expy="119.404" expWidth="158" expHeight="144" shownState="0"/>
+   <presentation name="ForEachLoop_int6" x="7" y="414.5" width="473.671" height="357.404" expanded="1" expx="7" expy="414.5" expWidth="473.671" expHeight="357.404" shownState="0"/>
+   <presentation name="PyInitInputs" x="264.5" y="91.5" width="158" height="63" expanded="1" expx="264.5" expy="91.5" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="PyBuildLoopIndex" x="354.895" y="275" width="158" height="90" expanded="1" expx="354.895" expy="275" expWidth="158" expHeight="90" shownState="0"/>
+   <presentation name="PyEpsilon" x="28.5" y="274.5" width="158" height="63" expanded="1" expx="28.5" expy="274.5" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="PyLoadPMML0" x="25.1052" y="117.395" width="158" height="117" expanded="1" expx="25.1052" expy="117.395" expWidth="158" expHeight="117" shownState="0"/>
+   <presentation name="ForEachLoop_int6.Bloc8" x="6" y="86" width="463.671" height="267.404" expanded="1" expx="6" expy="86" expWidth="463.671" expHeight="267.404" shownState="0"/>
+   <presentation name="PyInitRefOutputs" x="568.105" y="122" width="158" height="63" expanded="1" expx="568.105" expy="122" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="PyGetRes" x="630.384" y="690.023" width="158" height="63" expanded="1" expx="630.384" expy="690.023" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="PyGetPmmlFile" x="60" y="43" width="158" height="63" expanded="1" expx="60" expy="43" expWidth="158" expHeight="63" shownState="0"/>
+   <presentation name="__ROOT__" x="0" y="0" width="792.384" height="775.904" expanded="1" expx="0" expy="0" expWidth="792.384" expHeight="775.904" shownState="0"/>
+</proc>