]> SALOME platform Git repositories - modules/kernel.git/commitdiff
Salome HOME
PR: merge from branch BR_UT_V310a3 tag mergeto_trunk_05dec05
authorprascle <prascle>
Mon, 5 Dec 2005 08:03:19 +0000 (08:03 +0000)
committerprascle <prascle>
Mon, 5 Dec 2005 08:03:19 +0000 (08:03 +0000)
41 files changed:
INSTALL
Makefile.in
bin/NSparam.py [new file with mode: 0755]
bin/VERSION
bin/appliskel/README
bin/appliskel/envd
bin/appliskel/killCurrentPort [new file with mode: 0755]
bin/appliskel/runAppli
bin/appliskel/runConsole
bin/appliskel/runParam
bin/appliskel/runRemote.sh
bin/appliskel/runSession
bin/appliskel/runTests
bin/appliskel/searchFreePort.sh [new file with mode: 0755]
bin/appliskel/setAppliPath.sh [new file with mode: 0755]
bin/killSalomeWithPort.py
bin/runSalome.py
doc/SALOME_Application.txt
salome_adm/unix/config_files/check_lam.m4
salome_adm/unix/config_files/check_qt.m4
salome_adm/unix/config_files/check_qwt.m4
src/Communication/ReceiverFactory.cxx
src/Communication/Receivers.hxx
src/Communication/SALOME_Comm_i.cxx
src/Communication/SALOME_Comm_i.hxx
src/Communication/SenderFactory.cxx
src/Communication_SWIG/libSALOME_Comm.i
src/Container/SALOME_Container.cxx
src/Container/SALOME_ContainerManager.cxx
src/Container/SALOME_ContainerManager.hxx
src/MPIContainer/MPIContainer_i.cxx
src/MPIContainer/MPIObject_i.cxx
src/MPIContainer/SALOME_MPIContainer.cxx
src/NamingService/SALOME_NamingService.cxx
src/NamingService/SALOME_NamingServicePy.py
src/NamingService/Test/NamingServiceTest.cxx
src/ResourcesManager/SALOME_ResourcesManager.cxx
src/ResourcesManager/SALOME_ResourcesManager.hxx
src/SALOMEDS/SALOMEDS_StudyBuilder_i.cxx
src/SALOMEDS/SALOME_DriverPy.py
src/SALOMELocalTrace/LocalTraceBufferPool.cxx

diff --git a/INSTALL b/INSTALL
index bf7c941f0c4da08c341ed72c16b8388b70d583b3..526e28e9f9c72b2cf0532e985e4b02fee601aa7e 100644 (file)
--- a/INSTALL
+++ b/INSTALL
@@ -1,4 +1,4 @@
-This is the version 3.1.0a2 of KERNEL
+This is the version 3.1.0b1 of KERNEL
 Previous versions :
         - 3.0.0
         - 2.2.4
index 85ffaf0225aea74aa7e72c360c7b884addb4718f..42dedf9404d953d1a5de097ffaf94f8497024cf8 100644 (file)
@@ -41,7 +41,8 @@ salome.launch \
 envSalome.py \
 salomeConsole.py \
 showNS.py \
-addToKillList.py
+addToKillList.py \
+NSparam.py
 
 # copy header files in common directory
 OWN_CONFIG_H=@OWN_CONFIG_H@
diff --git a/bin/NSparam.py b/bin/NSparam.py
new file mode 100755 (executable)
index 0000000..721c6a0
--- /dev/null
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+
+import sys,os
+import string
+
+def getNSparams(info=""):
+    """
+    check environment for omniORB configuration file.
+    parse the file to find the line defining naming service  host and port,
+    set environment variables NSPORT and NSHOST,
+    get host and port,
+    if   info==host print host
+    elif info==port print host
+    else    print 2 strings on stdout on one line: host port
+    """
+    my_port=""
+    my_host=""
+    if os.environ.has_key("OMNIORB_CONFIG"):
+        file = open(os.environ["OMNIORB_CONFIG"], "r")
+        s = file.read()
+        while len(s):
+            l = string.split(s, ":")
+            if string.split(l[0], " ")[0] == "ORBInitRef" or \
+               string.split(l[0], " ")[0] == "InitRef" :
+                my_port = l[len(l)-1]
+                if my_port[-1] == '\n':
+                    my_port = my_port[:-1]
+                    pass
+                my_host = l[len(l)-2]
+                break;
+                pass
+            s = file.read()
+            pass
+        pass
+    if info=='host':
+        # keep print, stdout used in shell
+        print my_host
+        os.environ['NSHOST']=my_host
+        return my_host
+        pass
+    elif info=='port':
+        # keep print, stdout used in shell
+        print my_port
+        os.environ['NSPORT']=my_port
+        return my_port
+        pass
+    else:
+        # keep print, stdout used in shell
+        print  my_host, my_port
+        return my_host, my_port
+    pass
+
+# ------------------------------------------------------------------------
+
+if __name__ == "__main__":
+    if len(sys.argv) >1:        
+        if sys.argv[1]=='host':
+            getNSparams('host')
+            pass
+        elif sys.argv[1]=='port':
+            getNSparams('port')
+            pass
+        else:
+            getNSparams('')
+            pass
+        pass
+    else:
+        getNSparams('')
+        pass
index 509227443d169d30493267a20f9501e2b6f99602..3775769376481f8d7f863efe4d8e0aff0fd2c9c0 100755 (executable)
@@ -1 +1 @@
-THIS IS SALOME - KERNEL VERSION: 3.1.0a2
+THIS IS SALOME - KERNEL VERSION: 3.1.0b1
index 264b879bc7858ba16c85e7baeb7f2813b23c95b0..f3f283cb8810a1ace8a842401179cd59866096d2 100644 (file)
+=======================================
 Set of scripts for a SALOME application
 =======================================
+*html version of this document is produced with docutils*::
+
+  rest2html < doc.txt > doc.html
+
+This document corresponds to SALOME2 3.1. (alpha version)
+
++-------------------------------------------+
+| **WORK in PROGRESS, INCOMPLETE DOCUMENT** |
++-------------------------------------------+
+
+SALOME Application concept
+--------------------------
+
+See SALOME_Application_ to define your own configuration of SALOME and run it
+on one or several computers. This is the recommended way of configuration.
+
+.. _SALOME_Application: ../../doc/SALOME_Application.html
+
+
+User run scripts
+----------------
+
+The SALOME user can use the following scripts:
+
+runAppli
+   Launches a SALOME Session
+   (similar to ${KERNEL_ROOT_DIR}/bin/salome/runSalome but with a different
+   name to avoid confusions).
+
+runSession
+   Launches a shell script in the SALOME application environment, with access
+   to the current SALOME session (naming service), if any.
+   Without arguments, the script is interactive. With arguments, the script
+   executes the command in the SALOME application environment.
+
+runConsole
+   Gives a python console connected to the current SALOME Session.
+   It is also possible to use runSession, then python.
+
+runTests
+   Similar to runSession, used for unit testing. runTests defines a new 
+   configuration for naming service (new port number) to avoid interferences
+   with a running SALOME session. runSession tries to use an already existing
+   naming service definition from a running session (hostname & port number).
+
+killCurrentPort
+   Kills the last SALOME session corresponding to this application, and 
+   intially launched from this computer.
+   Cleans associated config files.
+
+SALOME internal run scripts
+---------------------------
+
+envd
+   Sets SALOME application environment, envd is sourced by other scripts.
+
+setAppliPath.sh
+   Used by other scripts to define the Application Path.
+
+searchFreePort.sh
+   Used by other scripts to find a free port for naming service.
+
+For remote calls, SALOME uses one script.
+
+runRemote.sh
+   This script is mainly used to launch containers. The first 2 arguments
+   define the hostname and port userd for naming service, the remaining
+   arguments define the command to execute.
+
+
+The following files must be adapted to your environment and SALOME Application
+------------------------------------------------------------------------------
 
-#     - A SALOME application distributed on several computers needs APPLI
-#       directories on the same path ($APPLI) relative to $HOME directory
-#       of the user, on each computer.
+- CatalogResources.xml
+- SalomeApp.xml
+- env.d/atFirst.sh
+- env.d/envProducts.sh
+- env.d/envSalome.sh
 
-user scripts:
--------------
+CatalogRessources.xml
+   This files describes all the computer the application can use. The given
+   example is minimal and suppose ${APPLI} is the same relative path
+   to ${HOME}, on all the computers. A different directory can be set on a
+   particular computer with a line::
 
-runAppli   : SALOME launch (idem runSalome but different name to avoid
-             confusion with ${KERNEL_ROOT_DIR}/bin/salome/runSalome
+       appliPath="my/specific/path/on/this/computer"
 
-runConsole : a python console in the current SALOME session environment
+SalomeApp.xml
+   This file is similar to the default given
+   in ${GUI_ROOT_DIR}/share/salome/resources
 
-runSession : a shell in SALOME environment, either interactive (without args)
-             or used to run a program (defined by given args)
 
-internal application scripts:
------------------------------
+Proposal for env.d scripts
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+Each user **must define** his own configuration for these scripts, following
+the above rules. **The following is only an example not working as it is**.
 
-runRemote.sh : called from remote computer, via ssh, rsh...
-               used for instance to create container
+atFirst.sh
+    Sets the computer configuration not directly related to SALOME,
+    like useful tools, default PATH.
 
-envd         : sets SALOME application environment
-               sourced by other scripts
+envProducts.sh
+    Sets the SALOME prerequisites.
 
-The following files must be adapted to your environment and SALOME Application:
--------------------------------------------------------------------------------
+envSALOME.sh
+    Sets all the MODULE_ROOT_DIR that can be used in the SALOME application.
 
-SalomeApp.xml        - list of modules, options on server launch and resources...
-CatalogResources.xml - configuration of machines used in SALOME application
-                       (no need of modules list and path here)
+    SalomeAppConfig is also defined by::
 
-env.d directory must contain the necessary files to source, to define 
-the SALOME Application environment :
-  (  envd script source these files in alphanumeric order )
+      export SalomeAppConfig=${HOME}/${APPLI}
 
-For instance,
- atFirst.sh     - general presets
- envProducts.sh - prerequisite SALOME environment
- envSalome.sh   - list of MODULE_ROOT_DIR
+    where SalomeAppConfig designates the directory containing SalomeApp.xml. 
+    Note that ${APPLI} is already defined by the calling scripts when 
+    env.d/envSalome.sh is sourced.
index 80261ada21a8c4dfd8304bd0d08102654f0e5140..db31cbb1b39609146ad1ca75b03647e47367a8d3 100644 (file)
@@ -1,3 +1,3 @@
 #!/bin/bash
 
-for i in $1/env.d/*;do source ${i}; done
+for i in $1/env.d/*.sh; do source ${i}; done
diff --git a/bin/appliskel/killCurrentPort b/bin/appliskel/killCurrentPort
new file mode 100755 (executable)
index 0000000..bca5060
--- /dev/null
@@ -0,0 +1,35 @@
+#!/bin/bash
+
+# --- retrieve APPLI path, relative to $HOME, set ${APPLI}
+
+. `dirname $0`/setAppliPath.sh
+
+# --- set the SALOME environment (prerequisites, MODULES_ROOT_DIR...)
+
+. ${HOME}/${APPLI}/envd ${HOME}/${APPLI}
+
+# --- find omniORB configuration relative to current session if any
+
+myhost=`hostname`
+fileOmniConfig=${HOME}/${APPLI}/.omniORB_${myhost}_last.cfg
+
+if [ -f $fileOmniConfig ]; then
+  export OMNIORB_CONFIG=${HOME}/${APPLI}/.omniORB_${myhost}_last.cfg
+fi
+
+currentPort=`${KERNEL_ROOT_DIR}/bin/salome/NSparam.py port`
+echo $currentPort
+
+# --- kill current salome session
+
+${KERNEL_ROOT_DIR}/bin/salome/killSalomeWithPort.py $currentPort
+
+# --- delete config files
+
+if [ -s $fileOmniConfig ]; then
+  refConfig=`ls -l $fileOmniConfig | awk '{print \$NF}'`
+  if [ -f $refConfig ]; then
+    rm $refConfig
+  fi
+  rm $fileOmniConfig
+fi
index 700e669d4e5a1342548b7a24be3d469a049ae939..1d86ad3d596273903687f570c706a0a420a8bf37 100755 (executable)
@@ -1,20 +1,8 @@
 #!/bin/bash
 
-# --- retrieve APPLI path, relative to $HOME
-#     on sarge, "which" gives not allways the absolute path...
-     
-comName=`which $0`
-aa=${comName:0:1}
-if test x$aa == x\/; then
-  mycom=${comName}
-elif test x$aa == x\.; then
-  mycom=${PWD}/${comName:2}
-else
-  mycom=${PWD}/${comName}
-fi
-APPLI=`echo ${HOME} \`dirname $mycom\` | awk ' { print substr($2,length($1)+2) } '`
-#echo $APPLI
-export APPLI
+# --- retrieve APPLI path, relative to $HOME, set ${APPLI}
+
+. `dirname $0`/setAppliPath.sh
 
 # --- set the SALOME environment (prerequisites, MODULES_ROOT_DIR...)
 
@@ -22,38 +10,8 @@ export APPLI
 
 # --- define port for CORBA naming service
 
-searchFreePort() {
-    echo -n "Searching for a free port for naming service: "
-    export NSPORT=2810
-    local limit=$NSPORT
-    let limit=limit+100
-    while [ 1 ]
-    do
-        aRes=`netstat -ltn | grep -E :${NSPORT}`
-        if [ -z "$aRes" ]; then
-            echo ${NSPORT} - Ok
-           local myhost=`hostname`
-            export OMNIORB_CONFIG=${HOME}/${APPLI}/.omniORB_${myhost}_${NSPORT}.cfg
-           export NSPORT
-            export NSHOST=${myhost}
-            local initref="NameService=corbaname::"`hostname`":$NSPORT"
-            #echo "ORBInitRef $initref" > $OMNIORB_CONFIG
-            echo "InitRef = $initref" > $OMNIORB_CONFIG
-            export LAST_RUNNING_CONFIG=${HOME}/${APPLI}/.omniORB_${myhost}_last.cfg
-           rm ${LAST_RUNNING_CONFIG}
-            ln -s ${OMNIORB_CONFIG} ${LAST_RUNNING_CONFIG}
-            break
-        fi
-        echo -n "${NSPORT} "
-        if [[ $NSPORT -eq $limit ]] ; then
-            echo
-            echo "Can't find a free port to launch omniNames"
-            echo "Try to kill the running servers and then launch SALOME again."
-            exit
-        fi
-        let NSPORT=NSPORT+1
-    done
-}
+. `dirname $0`/searchFreePort.sh
+searchFreePort
 
 # --- if mpi lam, start lam (seems safe to be done several times)
 #     arret manuel avec lamhalt
@@ -66,13 +24,9 @@ fi
 #    (default arguments defined in local salome.launch could be completed
 #     by arguments to this command)
 
-searchFreePort
-
 if [ $# -ne 0 ] ; then
     ${KERNEL_ROOT_DIR}/bin/salome/envSalome.py python -i ${KERNEL_ROOT_DIR}/bin/salome/runSalome.py $*
-    # --- todo delete omniORB config files in relation to the naming service kill
-    rm ${OMNIORB_CONFIG}
-    rm ${LAST_RUNNING_CONFIG}
+
 else
     ${KERNEL_ROOT_DIR}/bin/salome/envSalome.py python ${KERNEL_ROOT_DIR}/bin/salome/runSalome.py 
 fi
index e45f586112c305e12d1da58421099a4e7dbe7252..caeabbdd61319e13363aaad5944a21d032d5e8a9 100755 (executable)
@@ -1,20 +1,8 @@
 #!/bin/bash
 
-# --- retrieve APPLI path, relative to $HOME
-#     on sarge, "which" gives not allways the absolute path...
-     
-comName=`which $0`
-aa=${comName:0:1}
-if test x$aa == x\/; then
-  mycom=${comName}
-elif test x$aa == x\.; then
-  mycom=${PWD}/${comName:2}
-else
-  mycom=${PWD}/${comName}
-fi
-APPLI=`echo ${HOME} \`dirname $mycom\` | awk ' { print substr($2,length($1)+2) } '`
-#echo $APPLI
-export APPLI
+# --- retrieve APPLI path, relative to $HOME, set ${APPLI}
+
+. `dirname $0`/setAppliPath.sh
 
 # --- set the SALOME environment (prerequisites, MODULES_ROOT_DIR...)
 
index 17bbd618b308b5f37b4c29f6edde6cbd37fae7e5..c858c670fcdea8da2a0c25c67fddc14dfae8a714 100755 (executable)
@@ -1,5 +1,6 @@
 #!/bin/bash
 
-./runAppli --killall  
+./KillCurrentPort
+
+./runAppli --logger  
 
-#./runSession killSalome.py
index d3297a6c5ce80e561dab308ffbef69694e56203c..594616a0709701485a25059c812642b114070f51 100755 (executable)
 #     $3 and following : local command to execute, with args
 #
 
-# --- retrieve APPLI path, relative to $HOME
-#     on sarge, "which" gives not allways the absolute path...
-     
-comName=`which $0`
-aa=${comName:0:1}
-if test x$aa == x\/; then
-  mycom=${comName}
-elif test x$aa == x\.; then
-  mycom=${PWD}/${comName:2}
-else
-  mycom=${PWD}/${comName}
-fi
-APPLI=`echo ${HOME} \`dirname $mycom\` | awk ' { print substr($2,length($1)+2) } '`
-#echo $APPLI
-export APPLI
+# --- retrieve APPLI path, relative to $HOME, set ${APPLI}
+
+. `dirname $0`/setAppliPath.sh
 
 # --- set the SALOME environment (prerequisites, MODULES_ROOT_DIR...)
 
index 2ab579ce6d80091da99e6d26e688f40944089969..22d3fdd0fb3307419a4e8f9ff1e3ef810454d9f0 100755 (executable)
@@ -5,28 +5,27 @@
 # Use it without args to run an interactive shell under Salome env
 #
 
-# --- retrieve APPLI path, relative to $HOME
-#     on sarge, "which" gives not allways the absolute path...
-     
-comName=`which $0`
-aa=${comName:0:1}
-if test x$aa == x\/; then
-  mycom=${comName}
-elif test x$aa == x\.; then
-  mycom=${PWD}/${comName:2}
-else
-  mycom=${PWD}/${comName}
-fi
-APPLI=`echo ${HOME} \`dirname $mycom\` | awk ' { print substr($2,length($1)+2) } '`
-echo $APPLI
-export APPLI
+# --- retrieve APPLI path, relative to $HOME, set ${APPLI}
+
+. `dirname $0`/setAppliPath.sh
 
 # --- set the SALOME environment (prerequisites, MODULES_ROOT_DIR...)
 
 . ${HOME}/${APPLI}/envd ${HOME}/${APPLI}
 
+# --- set omniORB configuration to current session if any
+
 myhost=`hostname`
-export OMNIORB_CONFIG=${HOME}/${APPLI}/.omniORB_${myhost}_last.cfg
+fileOmniConfig=${HOME}/${APPLI}/.omniORB_${myhost}_last.cfg
+
+if [ -f $fileOmniConfig ]; then
+  export OMNIORB_CONFIG=${HOME}/${APPLI}/.omniORB_${myhost}_last.cfg
+
+  # --- set environment variables for port and hostname of NamingService
+
+  export NSHOST=`${KERNEL_ROOT_DIR}/bin/salome/NSparam.py host`
+  export NSPORT=`${KERNEL_ROOT_DIR}/bin/salome/NSparam.py port`
+fi
 
 # --- invoque shell with or without args
 
index 6e625f0a0e82ecf4768f660c84c1ea1893e6fae9..755825d797a7588da6480fdbdbaa56faaec88d91 100755 (executable)
@@ -1,20 +1,8 @@
 #!/bin/bash
 
-# --- retrieve APPLI path, relative to $HOME
-#     on sarge, "which" gives not allways the absolute path...
-     
-comName=`which $0`
-aa=${comName:0:1}
-if test x$aa == x\/; then
-  mycom=${comName}
-elif test x$aa == x\.; then
-  mycom=${PWD}/${comName:2}
-else
-  mycom=${PWD}/${comName}
-fi
-APPLI=`echo ${HOME} \`dirname $mycom\` | awk ' { print substr($2,length($1)+2) } '`
-#echo $APPLI
-export APPLI
+# --- retrieve APPLI path, relative to $HOME, set ${APPLI}
+
+. `dirname $0`/setAppliPath.sh
 
 # --- set the SALOME environment (prerequisites, MODULES_ROOT_DIR...)
 
diff --git a/bin/appliskel/searchFreePort.sh b/bin/appliskel/searchFreePort.sh
new file mode 100755 (executable)
index 0000000..8108a93
--- /dev/null
@@ -0,0 +1,37 @@
+#!/bin/bash
+
+# --- define port for CORBA naming service
+
+searchFreePort() {
+    echo -n "Searching for a free port for naming service: "
+    export NSPORT=2810
+    local limit=$NSPORT
+    let limit=limit+100
+    while [ 1 ]
+    do
+        aRes=`netstat -ltn | grep -E :${NSPORT}`
+        if [ -z "$aRes" ]; then
+            echo ${NSPORT} - Ok
+           local myhost=`hostname`
+            export OMNIORB_CONFIG=${HOME}/${APPLI}/.omniORB_${myhost}_${NSPORT}.cfg
+           export NSPORT
+            export NSHOST=${myhost}
+            local initref="NameService=corbaname::"`hostname`":$NSPORT"
+            #echo "ORBInitRef $initref" > $OMNIORB_CONFIG
+            echo "InitRef = $initref" > $OMNIORB_CONFIG
+            export LAST_RUNNING_CONFIG=${HOME}/${APPLI}/.omniORB_${myhost}_last.cfg
+           rm ${LAST_RUNNING_CONFIG}
+            ln -s ${OMNIORB_CONFIG} ${LAST_RUNNING_CONFIG}
+            break
+        fi
+        echo -n "${NSPORT} "
+        if [[ $NSPORT -eq $limit ]] ; then
+            echo
+            echo "Can't find a free port to launch omniNames"
+            echo "Try to kill the running servers and then launch SALOME again."
+            exit
+        fi
+        let NSPORT=NSPORT+1
+    done
+}
+
diff --git a/bin/appliskel/setAppliPath.sh b/bin/appliskel/setAppliPath.sh
new file mode 100755 (executable)
index 0000000..d9361ae
--- /dev/null
@@ -0,0 +1,17 @@
+#!/bin/bash
+
+# --- retrieve APPLI path, relative to $HOME, set ${APPLI}
+#     on sarge, "which" gives not allways the absolute path...
+     
+comName=`which $0`
+aa=${comName:0:1}
+if test x$aa == x\/; then
+  mycom=${comName}
+elif test x$aa == x\.; then
+  mycom=${PWD}/${comName:2}
+else
+  mycom=${PWD}/${comName}
+fi
+APPLI=`echo ${HOME} \`dirname $mycom\` | awk ' { print substr($2,length($1)+2) } '`
+#echo $APPLI
+export APPLI
index 404c9ef8f56f517c0b83f7e888c8fdfc8bb4970f..e52a25f883fec97a6273a12c1389926f73042fc2 100755 (executable)
@@ -3,16 +3,16 @@ import os, sys, pickle, signal, commands
 
 def getPiDict(port,appname='salome',full=True):
     from Utils_Identity import getShortHostName
-    
-    if os.getenv("HOSTNAME") == None:
-        if os.getenv("HOST") == None:
-            os.environ["HOSTNAME"]=getShortHostName()
-        else:
-            os.environ["HOSTNAME"]=os.getenv("HOST")
+
+    host = os.getenv("HOSTNAME")
+    if not host:
+        host = os.getenv("HOST")
+    if not host:
+        host = getShortHostName()
 
     filedict = []
     filedict.append( os.getenv('USER') )          # user name
-    filedict.append( os.getenv('HOSTNAME') )      # host name
+    filedict.append( host )                       # host name
     filedict.append( str(port) )                  # port number
     filedict.append( appname.upper() )            # application name
     filedict.append( 'pidict' )                   # constant part
index 94e17db922b19830b9a6057b945a4abdd5de520a..c4773d189c718681464821f4209195be5c7574ae 100755 (executable)
@@ -408,6 +408,11 @@ class SessionServer(Server):
             self.SCMD2+=['GUI']
         if self.args['splash']:
             self.SCMD2+=['SPLASH']
+        if self.args.has_key('modules'):
+            self.SCMD2+=['--modules (']
+            for mod in self.args['modules']:
+                self.SCMD2+=[mod + ':']
+            self.SCMD2+=[')']    
 
     def setpath(self,modules_list,modules_root_dir):
         cata_path=[]
@@ -610,6 +615,7 @@ def startSalome(args, modules_list, modules_root_dir):
     #
     # Lancement Session Server
     #
+
     mySessionServ = SessionServer(args)
     mySessionServ.setpath(modules_list,modules_root_dir)
     mySessionServ.run()
index ef4205242b306237f0832adf593d377d446fa61f..590bf2c8fe8faedba1bf665fb8a6f914d673f2ab 100644 (file)
@@ -115,7 +115,7 @@ envSALOME.sh
 
     where SalomeAppConfig designates the directory containing SalomeApp.xml. 
     Note that ${APPLI} is already defined by the calling scripts when 
-    envSALOME.sh is sourced.
+    env.d/envSalome.sh is sourced.
 
 2.2 User run scripts
 ~~~~~~~~~~~~~~~~~~~~
@@ -143,8 +143,11 @@ runTests
    and port number), runTests defines a new configuration for naming service
    (new port number).
 
-2.3 SALOME internal run script
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+2.3 SALOME internal run scripts
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+envd
+   Sets SALOME application environment, envd is sourced by other scripts.
 
 For remote calls, SALOME uses one script.
 
@@ -157,7 +160,8 @@ runRemote.sh
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
 SalomeApp.xml
-   This file is similar to the default given in $GUI_ROOT_DIR
+   This file is similar to the default given
+   in ${GUI_ROOT_DIR}/share/salome/resources
 
 CatalogRessources.xml
    This files describes all the computer the application can use. The given
index bde79d7e60460356bd778f267dda9f4c89b97e95..ae21b56fe91d53eb910fcee86b9aa7ed7f65ab54 100644 (file)
@@ -59,17 +59,51 @@ if test "$WITHLAM" = yes; then
     LIBS_old="$LIBS"
     LDFLAGS_old="$LDFLAGS"
     LDFLAGS="$MPI_LIBS $LDFLAGS"
-    AC_CHECK_LIB(lam,lam_mp_init,,WITHLAM="no")
-    AC_CHECK_LIB(mpi,MPI_Init,WITHLAM="yes",WITHLAM="no")
-    AC_CHECK_LIB(mpi,MPI_Publish_name,WITHMPI2="yes",WITHMPI2="no")
-    LDFLAGS="$LDFLAGS_old"
-    LIBS="$LIBS_old"
+  fi
+
+  if test "$WITHLAM" = "yes";then
+    WITHLAM="no"
+
+    if test "$WITHLAM" = "no";then
+      CPPFLAGS="$MPI_INCLUDES $CPPFLAGS"
+      LIBS="$LIBS -lmpi++"
+      AC_TRY_LINK([
+      #include <mpi.h>
+      ], [int argc=0; char **argv=0; MPI_Init(&argc,&argv);],
+      WITHLAM="yes",WITHLAM="no")
+      if test "$WITHLAM" = "yes";then
+        MPI_LIBS="$MPI_LIBS -lmpi++"
+      fi
+      LIBS="$LIBS_old"
+      CPPFLAGS="$CPPFLAGS_old"
+
+      AC_CHECK_LIB(mpi++,MPI_Publish_name,WITHMPI2="yes",WITHMPI2="no")
+      LDFLAGS="$LDFLAGS_old"
+      LIBS="$LIBS_old"
+    fi
+
+    if test "$WITHLAM" = "no";then
+      AC_CHECK_LIB(lam,lam_mp_init,WITHLAM="yes",WITHLAM="no")
+      if test "$WITHLAM" = "yes";then
+        MPI_LIBS="$MPI_LIBS -llam"
+        LIBS="$LIBS -llam"
+      fi
+
+      AC_CHECK_LIB(mpi,MPI_Init,WITHLAM="yes",WITHLAM="no")
+      if test "$WITHLAM" = "yes";then
+        MPI_LIBS="$MPI_LIBS -lmpi"
+      fi
+
+      AC_CHECK_LIB(mpi,MPI_Publish_name,WITHMPI2="yes",WITHMPI2="no")
+      LDFLAGS="$LDFLAGS_old"
+      LIBS="$LIBS_old"
+    fi
   fi
 
   if test "$WITHLAM" = "yes";then
      WITHMPI="yes"
      mpi_ok=yes
-     MPI_LIBS="$MPI_LIBS -llammpi++"
+     CPPFLAGS="-DWITHLAM $CPPFLAGS"
   else
      mpi_ok=no
   fi
index 3faa1e990dd1076a526628175ecb53028d9c20e0..ab2869c21b1cc31c76d888a91a7d219ff72715fe 100644 (file)
@@ -104,8 +104,8 @@ then
     AC_MSG_RESULT(QTDIR environment variable may be wrong)
   else
     AC_MSG_RESULT(yes)
-    QT_INCLUDES="-I${QT_ROOT}/include${QTINC} -DQT_THREAD_SUPPORT"
-    QT_MT_INCLUDES="-I${QT_ROOT}/include${QTINC} -DQT_THREAD_SUPPORT"
+    QT_INCLUDES="-I${QT_ROOT}/include${QTINC} -DQT_THREAD_SUPPORT -DQT_CLEAN_NAMESPACE"
+    QT_MT_INCLUDES="-I${QT_ROOT}/include${QTINC} -DQT_THREAD_SUPPORT -DQT_CLEAN_NAMESPACE"
   fi
 fi
 
index f1b456ddad3d6947f3045170b994850e21a1c34a..5803c5e92dc4aff097deb2b8504a147a75996311 100644 (file)
@@ -90,7 +90,7 @@ then
   else
     LIBS="$LIBS -L$QTDIR/lib -lqt-mt"
   fi
-  if test "x$QWTHOME" = "x/usr/lib"
+  if test "x$QWTHOME" = "x/usr"
   then
     LIBS="$LIBS -lqwt"
   else
@@ -120,7 +120,7 @@ then
     AC_MSG_RESULT(QWTHOME environment variable may be wrong)
   else
     QWT_INCLUDES="-I$QWT_INCLUDES"
-    if test "x$QWTHOME" = "x/usr/lib"
+    if test "x$QWTHOME" = "x/usr"
     then
       QWT_LIBS=" -lqwt"
     else
index 1a2bd9125d61c1b49d5dc5ef1b1617ac8bc3bbb0..94e52b25695a6c819d3628b10ac64068a3c63ed8 100644 (file)
@@ -1,3 +1,6 @@
+#ifdef HAVE_MPI2
+#include "mpi.h"
+#endif
 #include "ReceiverFactory.hxx"
 #include "Receivers.hxx"
 using namespace std;
index c4309b754a3fa082c66d5df09c19a40426a726e0..c02d9fffc5d26826058125b83c2b9585432accd8 100644 (file)
@@ -1,11 +1,11 @@
 #ifndef _RECEIVERS_HXX_
 #define _RECEIVERS_HXX_
 
-#include "SALOME_Comm_i.hxx"
-#include "Receiver.hxx"
 #ifdef HAVE_MPI2
 #include "mpi.h"
 #endif
+#include "SALOME_Comm_i.hxx"
+#include "Receiver.hxx"
 
 /*!
   Receiver used for transfert with CORBA when no copy is required remotely and locally.
index 71fbdadf85813225600046620e72dc46993fdcaa..7eb8513f70cbd25d84e1afd35ef0cd28c2a13f10 100644 (file)
@@ -1,7 +1,7 @@
+#include "SALOME_Comm_i.hxx"
 #ifndef WNT
 #include <rpc/xdr.h>
 #endif
-#include "SALOME_Comm_i.hxx"
 #include "poa.h"
 #include "omnithread.h"
 #include "Utils_SINGLETON.hxx"
index f7f196c620b5bf62740c164fe1e1dc2c88486267..1a8507ab0e6576a07eac99dd2b6d0d9f6b1e9828 100644 (file)
@@ -1,12 +1,12 @@
 #ifndef _SALOME_COMM_I_HXX_
 #define _SALOME_COMM_I_HXX_
 
-#include <string>
-#include <SALOMEconfig.h>
-#include CORBA_SERVER_HEADER(SALOME_Comm)
 #ifdef HAVE_MPI2
 #include "mpi.h"
 #endif
+#include <string>
+#include <SALOMEconfig.h>
+#include CORBA_SERVER_HEADER(SALOME_Comm)
 
 #define TIMEOUT 20
 
index 04f8056d0ec928486ff5907008a5911a3a6f4c33..ef1e956e045fa14ed744b84f2c245126cdb8a4d6 100644 (file)
@@ -1,7 +1,7 @@
+#include "SALOME_Comm_i.hxx"
 #include "SenderFactory.hxx"
 #include "utilities.h"
 #include "SALOMEMultiComm.hxx"
-#include "SALOME_Comm_i.hxx"
 using namespace std;
 
 #ifdef COMP_CORBA_DOUBLE
index 42d4d5a3e43faab00b858580bb3d3d010c44b2fe..e96cc3eb04d31389cecc0b363dbd4d9d1bf4c120 100644 (file)
@@ -2,6 +2,9 @@
 
 %{
   #include "ReceiverFactory.hxx"
+  #undef SEEK_SET
+  #undef SEEK_CUR
+  #undef SEEK_END
   #include "SALOME_Comm_i.hxx"
 %}
 
index f6f1884ff70a0fa602af6ed029cbd15429fcfda4..a97bb39dc028626ebda6aaf8f79569392ac0feb8 100644 (file)
 //  Module : SALOME
 //  $Header$
 
+#ifdef HAVE_MPI2
+#include <mpi.h>
+#endif
+
 #include <iostream>
 #include <string>
 #include <stdio.h>
 #include <Utils_Timer.hxx>
 #endif
 
-#ifdef HAVE_MPI2
-#include <mpi.h>
-#endif
-
 #include "Container_init_python.hxx"
 
 using namespace std;
index b7a3c708e1c68133adde04f27734be07ee527cf7..7e19bed50d3afc2705a5e4d560bd0366104d05a8 100644 (file)
@@ -29,6 +29,7 @@ SALOME_ContainerManager::SALOME_ContainerManager(CORBA::ORB_ptr orb)
   MESSAGE("constructor");
   _NS = new SALOME_NamingService(orb);
   _ResManager = new SALOME_ResourcesManager(orb);
+  _id=0;
   PortableServer::POA_var root_poa = PortableServer::POA::_the_root_poa();
   PortableServer::POAManager_var pman = root_poa->the_POAManager();
   PortableServer::POA_var my_poa;
@@ -118,6 +119,10 @@ SALOME_ContainerManager::
 FindOrStartContainer(const Engines::MachineParameters& params,
                     const Engines::MachineList& possibleComputers)
 {
+  long id;
+  string containerNameInNS;
+  char idc[3*sizeof(long)];
+
   Engines::Container_ptr ret = FindContainer(params,possibleComputers);
   if(!CORBA::is_nil(ret))
     return ret;
@@ -128,6 +133,15 @@ FindOrStartContainer(const Engines::MachineParameters& params,
   string theMachine=_ResManager->FindBest(possibleComputers);
   MESSAGE("try to launch it on " << theMachine);
 
+  // Get Id for container: a parallel container registers in Naming Service
+  // on the machine where is process 0. ContainerManager does'nt know the name
+  // of this machine before the launch of the parallel container. So to get
+  // the IOR of the parallel container in Naming Service, ContainerManager
+  // gives a unique Id. The parallel container registers his name under
+  // /ContainerManager/Id directory in NamingService
+
+  id = GetIdForContainer();
+
   string command;
   if(theMachine=="")
     {
@@ -137,11 +151,11 @@ FindOrStartContainer(const Engines::MachineParameters& params,
     }
   else if(theMachine==GetHostname())
     {
-      command=_ResManager->BuildCommandToLaunchLocalContainer(params);
+      command=_ResManager->BuildCommandToLaunchLocalContainer(params,id);
     }
   else
     command =
-      _ResManager->BuildCommandToLaunchRemoteContainer(theMachine,params);
+      _ResManager->BuildCommandToLaunchRemoteContainer(theMachine,params,id);
 
   _ResManager->RmTmpFile();
   int status=system(command.c_str());
@@ -170,8 +184,15 @@ FindOrStartContainer(const Engines::MachineParameters& params,
          count-- ;
          if ( count != 10 )
            MESSAGE( count << ". Waiting for FactoryServer on " << theMachine);
-         string containerNameInNS =
-           _NS->BuildContainerNameForNS(params,theMachine.c_str());
+         if(params.isMPI)
+           {
+             containerNameInNS = "/ContainerManager/id";
+             sprintf(idc,"%ld",id);
+             containerNameInNS += idc;
+           }
+         else
+           containerNameInNS =
+             _NS->BuildContainerNameForNS(params,theMachine.c_str());
          SCRUTE(containerNameInNS);
          CORBA::Object_var obj = _NS->Resolve(containerNameInNS.c_str());
          ret=Engines::Container::_narrow(obj);
@@ -273,3 +294,22 @@ FindContainer(const Engines::MachineParameters& params,
   MESSAGE("FindContainer: not found");
   return Engines::Container::_nil();
 }
+
+//=============================================================================
+/*! 
+ * Get Id for container: a parallel container registers in Naming Service
+ * on the machine where is process 0. ContainerManager does'nt know the name
+ * of this machine before the launch of the parallel container. So to get
+ * the IOR of the parallel container in Naming Service, ContainerManager
+ * gives a unique Id. The parallel container registers his name under
+ * /ContainerManager/Id directory in NamingService
+ */
+//=============================================================================
+
+
+long SALOME_ContainerManager::GetIdForContainer(void)
+{
+  _id++;
+  return _id;
+}
+
index f8311e9f732b09bb55c1a84051272ecdca9d0055..1217db07bfb6ac18d201c53031be3df6cbe4eeff 100644 (file)
@@ -58,6 +58,9 @@ private:
   FindContainer(const Engines::MachineParameters& params,
                const char *theMachine);
 
+  long GetIdForContainer(void);
+  long _id;
+
   SALOME_ResourcesManager *_ResManager;
   SALOME_NamingService *_NS;
 };
index 3b97ad7938dd76dcec8cd55aee9c547c61c620f2..872c32d95b7cde771318ec21f64606b88ba051de 100644 (file)
@@ -45,25 +45,52 @@ Engines_MPIContainer_i::Engines_MPIContainer_i(int nbproc, int numproc,
                                               int argc, char *argv[]) 
   : Engines_Container_i(orb,poa,containerName,argc,argv,false), MPIObject_i(nbproc,numproc)
 {
+  long id=0;
+  string IdContainerinNS;
+  char idc[3*sizeof(long)];
+
   MESSAGE("[" << numproc << "] activate object");
   _id = _poa->activate_object(this);
-//   this->_add_ref();
+
+  if(argc>1)
+    {
+      for(int i=0;i<argc;i++)
+       {
+         if(strcmp(argv[i],"-id")==NULL)
+           {
+             id = atoi(argv[i+1]);
+             continue;
+           }
+       }
+    }
+  SCRUTE(id);
 
   if(numproc==0){
 
     _NS = new SALOME_NamingService();
-//     _NS = SINGLETON_<SALOME_NamingService>::Instance() ;
-//     ASSERT(SINGLETON_<SALOME_NamingService>::IsAlreadyExisting()) ;
     _NS->init_orb( CORBA::ORB::_duplicate(_orb) ) ;
 
-//     Engines::Container_ptr pCont 
-//       = Engines::Container::_narrow(POA_Engines::MPIContainer::_this());
     CORBA::Object_var obj=_poa->id_to_reference(*_id);
     Engines::Container_var pCont = Engines::Container::_narrow(obj);
+
     string hostname = GetHostname();
     _containerName = _NS->BuildContainerNameForNS(containerName,hostname.c_str());
     SCRUTE(_containerName);
     _NS->Register(pCont, _containerName.c_str());
+
+    // A parallel container registers in Naming Service
+    // on the machine where is process 0. ContainerManager does'nt know the name
+    // of this machine before the launch of the parallel container. So to get
+    // the IOR of the parallel container in Naming Service, ContainerManager
+    // gives a unique Id. The parallel container registers his name under
+    // /ContainerManager/Id directory in NamingService
+
+    IdContainerinNS = "/ContainerManager/id";
+    sprintf(idc,"%ld",id);
+    IdContainerinNS += idc;
+    SCRUTE(IdContainerinNS);
+    _NS->Register(pCont, IdContainerinNS.c_str());
+
   }
 
   // Root recupere les ior des container des autre process
index 0da19e33ca5753158c78f16f0951fd48bd9b7590..ab6e1a38600695fe9de72e802c719534749b8c42 100644 (file)
@@ -24,9 +24,9 @@
 //  File   : MPIObject_i.cxx
 //  Module : SALOME
 
+#include <mpi.h>
 #include "MPIObject_i.hxx"
 #include "utilities.h"
-#include <mpi.h>
 using namespace std;
 
 MPIObject_i::MPIObject_i()
index bf2322190a7e30cf16008458e622172568d32045..3932ff3d4ef1ca1a741576580faa1bb7cbf0cbbb 100644 (file)
@@ -1,9 +1,9 @@
+#include <mpi.h>
 #include <iostream>
 #include "MPIContainer_i.hxx"
 #include "Utils_ORB_INIT.hxx"
 #include "Utils_SINGLETON.hxx"
 #include "utilities.h"
-#include <mpi.h>
 #include "SALOMETraceCollector.hxx"
 using namespace std;
 
index a13dd52d46736c30467500f2b5da3038b7d427e3..e0ad5fcd59e69d2487ca1d186878731643d8ce04 100644 (file)
@@ -543,6 +543,16 @@ SALOME_NamingService::ResolveComponent(const char* hostname,
          for (unsigned int ind = 0; ind < contList.size(); ind++)
            {
              name = contList[ind].c_str();
+
+             if ( nbproc >= 1 )
+               {
+                 char *str_nbproc = new char[8];
+                 sprintf(str_nbproc, "_%d", nbproc);
+                 if( strstr(name.c_str(),str_nbproc) == NULL)
+                   continue; // check only containers with _%d in name
+                 delete [] str_nbproc;
+               }
+
              name += "/";
              name += componentName;
              SCRUTE(name);
@@ -1458,15 +1468,17 @@ throw(ServiceUnreachable)
 void SALOME_NamingService::Destroy_FullDirectory(const char* Path)
 throw(ServiceUnreachable)
 {
-  Change_Directory(Path);
-  vector<string> contList = list_directory();
-
-  for (unsigned int ind = 0; ind < contList.size(); ind++)
-    Destroy_Name(contList[ind].c_str());
-
-  Destroy_Directory(Path);
+  if( Change_Directory(Path) )
+    {
+      vector<string> contList = list_directory();
 
-  Destroy_Name(Path);
+      for (unsigned int ind = 0; ind < contList.size(); ind++)
+       Destroy_Name(contList[ind].c_str());
+      
+      Destroy_Directory(Path);
+      
+      Destroy_Name(Path);
+    }
 }
 
 // ============================================================================
@@ -1543,7 +1555,7 @@ SALOME_NamingService::_createContextNameDir(string path,
       if (endIdx == string::npos)
        endIdx = path.length();
       int lsub = endIdx - begIdx;
-      if (lsub > 1)
+      if (lsub >= 1)
        splitPath.push_back(path.substr(begIdx, lsub));
       begIdx = path.find_first_not_of(delims, endIdx);
     }
index ab7f2050d1a6e8ab35a2cf71fba50aca1ceffd84..8c36cf0183b3f121b107e7e38a7328b3f0914f20 100644 (file)
@@ -50,7 +50,7 @@ class SALOME_NamingServicePy_i:
         self._orb = orb
         # initialize root context and current context
        ok = 0
-       steps = 40
+       steps = 240
        while steps > 0 and ok == 0:
          try:
             obj =self._orb.resolve_initial_references("NameService")
@@ -68,7 +68,7 @@ class SALOME_NamingServicePy_i:
            MESSAGE(" Name service not found")
          time.sleep(0.25)
          steps = steps - 1
-        if steps == 0: 
+        if steps == 0 and self._root_context is None
           MESSAGE ( "Name Service Reference is invalid" )
           sys.exit(1)
     #-------------------------------------------------------------------------
index 2d4d5d9483d70751f5418410c36cdd12751cce37..deedb2b6141ae49ade7df9a69a365b21c9cfaf02 100644 (file)
@@ -273,6 +273,15 @@ NamingServiceTest::testRegisterResolveAbsWithPath()
   NSTEST::echo_var anEchoRef1a = NSTEST::echo::_narrow(obj);
   CPPUNIT_ASSERT(!CORBA::is_nil(anEchoRef1a));
   CPPUNIT_ASSERT(anEchoRef1->getId() == anEchoRef1a->getId());
+
+  NSTEST::echo_var anEchoRef2 = myFactory->createInstance();
+  _NS.Register(anEchoRef2,"/nstest2/1/2/3/4/echo_1");
+
+  obj = _NS.Resolve("/nstest2/1/2/3/4/echo_1");
+  CPPUNIT_ASSERT(!CORBA::is_nil(obj));
+  NSTEST::echo_var anEchoRef2a = NSTEST::echo::_narrow(obj);
+  CPPUNIT_ASSERT(!CORBA::is_nil(anEchoRef2a));
+  CPPUNIT_ASSERT(anEchoRef2->getId() == anEchoRef2a->getId());
 }
 
 // ============================================================================
index 26f9d2de74d3d1172c60d9d62799eda7afa00ef0..3e4ffc748bbd996a8c0f41d5c9b94952c49323d4 100644 (file)
@@ -357,10 +357,12 @@ bool isPythonContainer(const char* ContainerName)
 string
 SALOME_ResourcesManager::BuildCommandToLaunchRemoteContainer
 (const string& machine,
- const Engines::MachineParameters& params)
+ const Engines::MachineParameters& params, const long id)
 {
   string command;
-
+  int nbproc;
+  char idc[3*sizeof(long)];
+         
   if ( ! _isAppliSalomeDefined )
     command = BuildTempFileToLaunchRemoteContainer(machine, params);
 
@@ -370,8 +372,6 @@ SALOME_ResourcesManager::BuildCommandToLaunchRemoteContainer
 
       if (params.isMPI)
         {
-          int nbproc;
-
           if ( (params.nb_node <= 0) && (params.nb_proc_per_node <= 0) )
             nbproc = 1;
           else if ( params.nb_node == 0 )
@@ -418,9 +418,33 @@ SALOME_ResourcesManager::BuildCommandToLaunchRemoteContainer
       ASSERT(getenv("NSPORT"));
       command += getenv("NSPORT"); // port of CORBA name server
 
-      command += " SALOME_Container ";
+      if(params.isMPI)
+       {
+         command += " mpirun -np ";
+         std::ostringstream o;
+         o << nbproc << " ";
+         command += o.str();
+#ifdef WITHLAM
+         command += "-x PATH,LD_LIBRARY_PATH,OMNIORB_CONFIG,SALOME_trace ";
+#endif 
+         command += " SALOME_MPIContainer ";
+       }
+      else
+       command += " SALOME_Container ";
+
+      command += _NS->ContainerName(params);
+      command += " -id ";
+      sprintf(idc,"%ld",id);
+      command += idc;
+      command += " -";
+      AddOmninamesParams(command);
+      command += " > /tmp/";
       command += _NS->ContainerName(params);
-      command += "&";
+      command += "_";
+      command += GetHostname();
+      command += "_";
+      command += getenv( "USER" ) ;
+      command += ".log 2>&1 &" ;
 
       MESSAGE("command =" << command);
     }
@@ -437,11 +461,12 @@ SALOME_ResourcesManager::BuildCommandToLaunchRemoteContainer
 
 string
 SALOME_ResourcesManager::BuildCommandToLaunchLocalContainer
-(const Engines::MachineParameters& params)
+(const Engines::MachineParameters& params, const long id)
 {
   _TmpFileName = "";
   string command;
   int nbproc = 0;
+  char idc[3*sizeof(long)];
 
   if (params.isMPI)
     {
@@ -461,7 +486,9 @@ SALOME_ResourcesManager::BuildCommandToLaunchLocalContainer
       o << nbproc << " ";
 
       command += o.str();
+#ifdef WITHLAM
       command += "-x PATH,LD_LIBRARY_PATH,OMNIORB_CONFIG,SALOME_trace ";
+#endif
 
       if (isPythonContainer(params.container_name))
         command += "pyMPI SALOME_ContainerPy.py ";
@@ -478,6 +505,9 @@ SALOME_ResourcesManager::BuildCommandToLaunchLocalContainer
     }
 
   command += _NS->ContainerName(params);
+  command += " -id ";
+  sprintf(idc,"%ld",id);
+  command += idc;
   command += " -";
   AddOmninamesParams(command);
   command += " > /tmp/";
@@ -771,6 +801,9 @@ SALOME_ResourcesManager::BuildTempFileToLaunchRemoteContainer
       std::ostringstream o;
 
       tempOutputFile << nbproc << " ";
+#ifdef WITHLAM
+      tempOutputFile << "-x PATH,LD_LIBRARY_PATH,OMNIORB_CONFIG,SALOME_trace ";
+#endif
     }
 
   tempOutputFile << (*(resInfo.ModulesPath.find("KERNEL"))).second
index 029c9fa9ee96ae04822feb6fab6e03f15da82035..b41e0d24a0bab73dbe61b8c61be5a715ffc35f57 100644 (file)
@@ -53,10 +53,10 @@ class RESOURCESMANAGER_EXPORT SALOME_ResourcesManager
 
     std::string BuildCommandToLaunchRemoteContainer
     (const std::string& machine,
-     const Engines::MachineParameters& params);
+     const Engines::MachineParameters& params, const long id);
 
     std::string BuildCommandToLaunchLocalContainer
-    (const Engines::MachineParameters& params);
+    (const Engines::MachineParameters& params, const long id);
 
     void RmTmpFile();
 
index a020990f53e7b05b578c99ef4d6038bd74a44b99..8e916a7f5e13c08e2cc9ca416f7edda22c8f41ba 100644 (file)
@@ -206,9 +206,8 @@ SALOMEDS::GenericAttribute_ptr SALOMEDS_StudyBuilder_i::FindOrCreateAttribute(SA
 {
   SALOMEDS::Locker lock;
   Handle(SALOMEDSImpl_SObject) aSO;
-  char* anID = anObject->GetID();  
-  aSO = Handle(SALOMEDSImpl_Study)::DownCast(_impl->GetOwner())->GetSObject(anID);
-  delete [] anID;
+  CORBA::String_var anID = anObject->GetID();
+  aSO = Handle(SALOMEDSImpl_Study)::DownCast(_impl->GetOwner())->GetSObject(anID.inout());
   Handle(TDF_Attribute) anAttr;
   try {
      anAttr = _impl->FindOrCreateAttribute(aSO, TCollection_AsciiString((char*)aTypeOfAttribute));
index 8728ffae89df0e3980f14651cb732f79e4153708..d1f1c4e27b13339b45975275fb9067c59d1a44f9 100644 (file)
@@ -2,12 +2,13 @@ import SALOMEDS__POA
 
 class SALOME_DriverPy_i(SALOMEDS__POA.Driver):
     """
+    Python implementation of generic SALOMEDS driver.
+    Should be inherited by any Python module's engine
+    to provide persistence mechanism.
     """
-    _ComponentDataType = None
-
     def __init__ (self, componentDataType):
         print "SALOME_DriverPy.__init__: ",componentDataType
-        _ComponentDataType = componentDataType
+        self._ComponentDataType = componentDataType
 
     def IORToLocalPersistentID(self, theSObject, IORString, isMultiFile, isASCII):
         return theSObject.GetID()
@@ -16,7 +17,7 @@ class SALOME_DriverPy_i(SALOMEDS__POA.Driver):
         return ""
 
     def ComponentDataType(self):
-        return _ComponentDataType
+        return self._ComponentDataType
 
     def Save(self, theComponent, theURL, isMultiFile):
         return NULL
index 8fd4c5bf2d3bfea754094b37bdf59eddc5e72352..4523a897dfc5d02d1de9d62ef68bcba57b43d697 100644 (file)
@@ -312,8 +312,8 @@ unsigned long LocalTraceBufferPool::lockedIncrement(unsigned long& pos)
 {
   int ret;
   ret = pthread_mutex_lock(&_incrementMutex);   // lock access to counters
-  pos++;
+  unsigned long mypos = ++pos;
   ret = pthread_mutex_unlock(&_incrementMutex); // release lock
-  return pos;
+  return mypos;
 }