1 # -*- coding: iso-8859-1 -*-
2 # Copyright (C) 2007-2023 CEA, EDF, OPEN CASCADE
4 # This library is free software; you can redistribute it and/or
5 # modify it under the terms of the GNU Lesser General Public
6 # License as published by the Free Software Foundation; either
7 # version 2.1 of the License, or (at your option) any later version.
9 # This library is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 # Lesser General Public License for more details.
14 # You should have received a copy of the GNU Lesser General Public
15 # License along with this library; if not, write to the Free Software
16 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # File : SALOME_PyNode.py
22 # Author : Christian CAREMOLI, EDF
35 from SALOME_ContainerHelper import ScriptExecInfo
37 MY_CONTAINER_ENTRY_IN_GLBS = "my_container"
39 class Generic(SALOME__POA.GenericObj):
40 """A Python implementation of the GenericObj CORBA IDL"""
41 def __init__(self,poa):
46 #print("Register called : %d"%self.cnt)
50 #print("UnRegister called : %d"%self.cnt)
53 oid=self.poa.servant_to_id(self)
54 self.poa.deactivate_object(oid)
57 print("WARNING SALOME::GenericObj::Destroy() function is obsolete! Use UnRegister() instead.")
61 #print("Destuctor called")
64 class PyNode_i (Engines__POA.PyNode,Generic):
65 """The implementation of the PyNode CORBA IDL"""
66 def __init__(self, nodeName,code,poa,my_container):
67 """Initialize the node : compilation in the local context"""
68 Generic.__init__(self,poa)
69 self.nodeName=nodeName
71 self.my_container=my_container._container
72 linecache.cache[nodeName]=0,None,code.split('\n'),nodeName
73 ccode=compile(code,nodeName,'exec')
75 self.context[MY_CONTAINER_ENTRY_IN_GLBS] = self.my_container
76 exec(ccode, self.context)
78 def getContainer(self):
79 return self.my_container
87 def defineNewCustomVar(self,varName,valueOfVar):
88 self.context[varName] = pickle.loads(valueOfVar)
91 def executeAnotherPieceOfCode(self,code):
92 """Called for initialization of container lodging self."""
94 ccode=compile(code,self.nodeName,'exec')
95 exec(ccode, self.context)
97 raise SALOME.SALOME_Exception(SALOME.ExceptionStruct(SALOME.BAD_PARAM,"","PyScriptNode (%s) : code to be executed \"%s\"" %(self.nodeName,code),0))
99 def execute(self,funcName,argsin):
100 """Execute the function funcName found in local context with pickled args (argsin)"""
102 argsin,kws=pickle.loads(argsin)
103 func=self.context[funcName]
104 argsout=func(*argsin,**kws)
105 argsout=pickle.dumps(argsout,-1)
108 exc_typ,exc_val,exc_fr=sys.exc_info()
109 l=traceback.format_exception(exc_typ,exc_val,exc_fr)
110 raise SALOME.SALOME_Exception(SALOME.ExceptionStruct(SALOME.BAD_PARAM,"".join(l),"PyNode: %s, function: %s" % (self.nodeName,funcName),0))
112 class SenderByte_i(SALOME__POA.SenderByte,Generic):
113 def __init__(self,poa,bytesToSend):
114 Generic.__init__(self,poa)
115 self.bytesToSend = bytesToSend
118 return len(self.bytesToSend)
120 def sendPart(self,n1,n2):
121 return self.bytesToSend[n1:n2]
123 SALOME_FILE_BIG_OBJ_DIR = "SALOME_FILE_BIG_OBJ_DIR"
125 SALOME_BIG_OBJ_ON_DISK_THRES_VAR = "SALOME_BIG_OBJ_ON_DISK_THRES"
128 SALOME_BIG_OBJ_ON_DISK_THRES_DFT = 50000000
130 from ctypes import c_int
134 return len( bytes(TypeCounter(0) ) )
136 def GetSizeOfBufferedReader(f):
138 This method returns in bytes size of a file openned.
142 f (io.IOBase): buffered reader returned by open
150 f.seek(0,io.SEEK_END)
152 f.seek(pos,io.SEEK_SET)
155 def GetObjectFromFile(fname, visitor = None):
156 with open(fname,"rb") as f:
157 cntb = f.read( GetSizeOfTCnt() )
158 cnt = TypeCounter.from_buffer_copy( cntb ).value
160 visitor.setHDDMem( GetSizeOfBufferedReader(f) )
161 visitor.setFileName( fname )
165 def DumpInFile(obj,fname):
166 with open(fname,"wb") as f:
167 f.write( bytes( TypeCounter(1) ) )
170 def IncrRefInFile(fname):
171 with open(fname,"rb") as f:
172 cntb = f.read( GetSizeOfTCnt() )
173 cnt = TypeCounter.from_buffer_copy( cntb ).value
174 with open(fname,"rb+") as f:
175 #import KernelServices ; KernelServices.EntryForDebuggerBreakPoint()
176 f.write( bytes( TypeCounter(cnt+1) ) )
178 def DecrRefInFile(fname):
180 with open(fname,"rb") as f:
181 cntb = f.read( GetSizeOfTCnt() )
182 cnt = TypeCounter.from_buffer_copy( cntb ).value
184 #import KernelServices ; KernelServices.EntryForDebuggerBreakPoint()
188 with open(fname,"rb+") as f:
189 f.write( bytes( TypeCounter(cnt-1) ) )
191 def GetBigObjectOnDiskThreshold():
193 if SALOME_BIG_OBJ_ON_DISK_THRES_VAR in os.environ:
194 return int( os.environ[SALOME_BIG_OBJ_ON_DISK_THRES_VAR] )
196 return SALOME_BIG_OBJ_ON_DISK_THRES_DFT
198 def ActivateProxyMecanismOrNot( sizeInByte ):
199 thres = GetBigObjectOnDiskThreshold()
203 return sizeInByte > thres
205 def GetBigObjectDirectory():
207 if SALOME_FILE_BIG_OBJ_DIR not in os.environ:
208 raise RuntimeError("An object of size higher than limit detected and no directory specified to dump it in file !")
209 return os.path.expanduser( os.path.expandvars( os.environ[SALOME_FILE_BIG_OBJ_DIR] ) )
211 def GetBigObjectFileName():
213 Return a filename in the most secure manner (see tempfile documentation)
216 with tempfile.NamedTemporaryFile(dir=GetBigObjectDirectory(),prefix="mem_",suffix=".pckl") as f:
220 class BigObjectOnDiskBase:
221 def __init__(self, fileName, objSerialized):
223 :param fileName: the file used to dump into.
224 :param objSerialized: the object in pickeled form
225 :type objSerialized: bytes
227 self._filename = fileName
228 # attribute _destroy is here to tell client side or server side
229 # only client side can be with _destroy set to True. server side due to risk of concurrency
230 # so pickled form of self must be done with this attribute set to False.
231 self._destroy = False
232 self.__dumpIntoFile(objSerialized)
234 def getDestroyStatus(self):
239 IncrRefInFile( self._filename )
241 # should never happen !
242 RuntimeError("Invalid call to incrRef !")
246 DecrRefInFile( self._filename )
248 # should never happen !
249 RuntimeError("Invalid call to decrRef !")
251 def unlinkOnDestructor(self):
254 def doNotTouchFile(self):
256 Method called slave side. The life cycle management of file is client side not slave side.
258 self._destroy = False
262 DecrRefInFile( self._filename )
264 def getFileName(self):
265 return self._filename
267 def __dumpIntoFile(self, objSerialized):
268 DumpInFile( objSerialized, self._filename )
270 def get(self, visitor = None):
271 obj, _ = GetObjectFromFile( self._filename, visitor )
275 return float( self.get() )
278 return int( self.get() )
282 if isinstance(obj,str):
285 raise RuntimeError("Not a string")
287 class BigObjectOnDisk(BigObjectOnDiskBase):
288 def __init__(self, fileName, objSerialized):
289 BigObjectOnDiskBase.__init__(self, fileName, objSerialized)
291 class BigObjectOnDiskListElement(BigObjectOnDiskBase):
292 def __init__(self, pos, length, fileName):
293 self._filename = fileName
294 self._destroy = False
296 self._length = length
298 def get(self, visitor = None):
299 fullObj = BigObjectOnDiskBase.get(self, visitor)
300 return fullObj[ self._pos ]
302 def __getitem__(self, i):
306 return len(self.get())
308 class BigObjectOnDiskSequence(BigObjectOnDiskBase):
309 def __init__(self, length, fileName, objSerialized):
310 BigObjectOnDiskBase.__init__(self, fileName, objSerialized)
311 self._length = length
313 def __getitem__(self, i):
314 return BigObjectOnDiskListElement(i, self._length, self.getFileName())
319 class BigObjectOnDiskList(BigObjectOnDiskSequence):
320 def __init__(self, length, fileName, objSerialized):
321 BigObjectOnDiskSequence.__init__(self, length, fileName, objSerialized)
323 class BigObjectOnDiskTuple(BigObjectOnDiskSequence):
324 def __init__(self, length, fileName, objSerialized):
325 BigObjectOnDiskSequence.__init__(self, length, fileName, objSerialized)
327 def ProxyfyPickeled( obj, pickleObjInit = None, visitor = None ):
329 This method return a proxy instance of pickled form of object given in input.
333 obj (pickelable type) : object to be proxified
334 pickleObjInit (bytes) : Optionnal. Original pickeled form of object to be proxyfied if already computed. If not this method generate it
338 BigObjectOnDiskBase: proxy instance
340 pickleObj = pickleObjInit
341 if pickleObj is None:
342 pickleObj = pickle.dumps( obj , pickle.HIGHEST_PROTOCOL )
343 fileName = GetBigObjectFileName()
345 visitor.setHDDMem( len(pickleObj) )
346 visitor.setFileName(fileName)
347 if isinstance( obj, list):
348 proxyObj = BigObjectOnDiskList( len(obj), fileName, pickleObj )
349 elif isinstance( obj, tuple):
350 proxyObj = BigObjectOnDiskTuple( len(obj), fileName , pickleObj )
352 proxyObj = BigObjectOnDisk( fileName , pickleObj )
355 def SpoolPickleObject( obj, visitor = None ):
357 with InOutputObjVisitorCM(visitor) as v:
358 pickleObjInit = pickle.dumps( obj , pickle.HIGHEST_PROTOCOL )
359 if not ActivateProxyMecanismOrNot( len(pickleObjInit) ):
362 proxyObj = ProxyfyPickeled( obj, pickleObjInit, v.visitor() )
363 pickleProxy = pickle.dumps( proxyObj , pickle.HIGHEST_PROTOCOL )
366 from SALOME_ContainerHelper import InOutputObjVisitorCM, InOutputObjVisitor
368 def UnProxyObjectSimple( obj, visitor = None ):
370 Method to be called in Remote mode. Alterate the obj _status attribute.
371 Because the slave process does not participate in the reference counting
375 visitor (InOutputObjVisitor): A visitor to keep track of amount of memory on chip and those on HDD
378 with InOutputObjVisitorCM(visitor) as v:
379 logging.debug( "UnProxyObjectSimple {}".format(type(obj)) )
380 if isinstance(obj,BigObjectOnDiskBase):
383 elif isinstance( obj, list):
386 retObj.append( UnProxyObjectSimple(elt,v.visitor()) )
391 def UnProxyObjectSimpleLocal( obj ):
393 Method to be called in Local mode. Do not alterate the PyObj counter
395 if isinstance(obj,BigObjectOnDiskBase):
397 elif isinstance( obj, list):
400 retObj.append( UnProxyObjectSimpleLocal(elt) )
406 def __init__(self, fileName):
407 self._filename = fileName
410 return self._filename
413 if os.path.exists( self._filename ):
414 os.unlink( self._filename )
416 class MonitoringInfo:
417 def __init__(self, pyFileName, outFileName, pid):
418 self._py_file_name = pyFileName
419 self._out_file_name = outFileName
423 def pyFileName(self):
424 return self._py_file_name
431 def outFileName(self):
432 return self._out_file_name
434 def LaunchTimeCPUMonitoring( intervalInMs ):
436 Launch a subprocess monitoring self process.
437 This monitoring subprocess is a python process lauching every intervalInMs ms evaluation of
438 CPU usage and RSS memory.
439 Communication between subprocess and self is done by file.
442 def BuildPythonFileForCPUPercent( intervalInMs ):
445 with tempfile.NamedTemporaryFile(prefix="htop_",suffix=".py") as f:
447 tempOutFile = "{}.txt".format( os.path.splitext( tempPyFile )[0] )
449 with open(tempPyFile,"w") as f:
450 f.write("""import psutil
452 process = psutil.Process( pid )
454 with open("{}","a") as f:
456 f.write( "{{}}\\n".format( str( process.cpu_percent() ) ) )
457 f.write( "{{}}\\n".format( str( process.memory_info().rss ) ) )
459 time.sleep( {} / 1000.0 )
460 """.format(pid, tempOutFile, intervalInMs))
461 return FileDeleter(tempPyFile), FileDeleter(tempOutFile)
462 pyFileName, outFileName = BuildPythonFileForCPUPercent( intervalInMs )
463 pid = KernelBasis.LaunchMonitoring(pyFileName.filename)
464 return MonitoringInfo(pyFileName, outFileName, pid)
466 def StopMonitoring( monitoringInfo ):
468 Kill monitoring subprocess.
472 monitoringInfo (MonitoringInfo): info returned by LaunchMonitoring
475 KernelBasis.StopMonitoring(monitoringInfo.pid)
477 def ReadCPUMemInfo( monitoringInfo ):
479 Retrieve data of monitoring.
483 monitoringInfo (MonitoringInfo): info returned by LaunchMonitoring
487 list<float,str> : list of pairs. First param of pair is CPU usage. Second param of pair is rss memory usage
490 ret = KernelBasis.ReadFloatsInFile( monitoringInfo.outFileName.filename )
492 mem_rss = [ int(elt) for elt in ret[1::2]]
493 return [(a,b) for a,b in zip(cpu,mem_rss)]
495 class SeqByteReceiver:
496 # 2GB limit to trigger split into chunks
497 CHUNK_SIZE = 2000000000
498 def __init__(self,sender):
501 self._obj.UnRegister()
504 size = self._obj.getSize()
505 if size <= SeqByteReceiver.CHUNK_SIZE:
506 return self.fetchOneShot( size )
508 return self.fetchByChunks( size )
509 def fetchOneShot(self,size):
510 return self._obj.sendPart(0,size)
511 def fetchByChunks(self,size):
513 To avoid memory peak parts over 2GB are sent using EFF_CHUNK_SIZE size.
515 data_for_split_case = bytes(0)
516 EFF_CHUNK_SIZE = SeqByteReceiver.CHUNK_SIZE // 8
517 iStart = 0 ; iEnd = EFF_CHUNK_SIZE
518 while iStart!=iEnd and iEnd <= size:
519 part = self._obj.sendPart(iStart,iEnd)
520 data_for_split_case = bytes(0).join( [data_for_split_case,part] )
521 iStart = iEnd; iEnd = min(iStart + EFF_CHUNK_SIZE,size)
522 return data_for_split_case
524 class LogOfCurrentExecutionSession:
525 def __init__(self, handleToCentralizedInst):
526 self._remote_handle = handleToCentralizedInst
527 self._current_instance = ScriptExecInfo()
529 def addInfoOnLevel2(self, key, value):
530 setattr(self._current_instance,key,value)
532 def finalizeAndPushToMaster(self):
533 self._remote_handle.assign( pickle.dumps( self._current_instance ) )
535 class PyScriptNode_i (Engines__POA.PyScriptNode,Generic):
536 """The implementation of the PyScriptNode CORBA IDL that executes a script"""
537 def __init__(self, nodeName,code,poa,my_container,logscript):
538 """Initialize the node : compilation in the local context"""
539 Generic.__init__(self,poa)
540 self.nodeName=nodeName
542 self.my_container_py = my_container
543 self.my_container=my_container._container
544 linecache.cache[nodeName]=0,None,code.split('\n'),nodeName
545 self.ccode=compile(code,nodeName,'exec')
547 self.context[MY_CONTAINER_ENTRY_IN_GLBS] = self.my_container
548 self._log_script = logscript
549 self._current_execution_session = None
550 sys.stdout.flush() ; sys.stderr.flush() # flush to correctly capture log per execution session
553 # force removal of self.context. Don t know why it s not done by default
554 self.removeAllVarsInContext()
557 def getContainer(self):
558 return self.my_container
566 def defineNewCustomVar(self,varName,valueOfVar):
567 self.context[varName] = pickle.loads(valueOfVar)
570 def executeAnotherPieceOfCode(self,code):
571 """Called for initialization of container lodging self."""
573 ccode=compile(code,self.nodeName,'exec')
574 exec(ccode, self.context)
576 raise SALOME.SALOME_Exception(SALOME.ExceptionStruct(SALOME.BAD_PARAM,"","PyScriptNode (%s) : code to be executed \"%s\"" %(self.nodeName,code),0))
578 def assignNewCompiledCode(self,codeStr):
581 self.ccode=compile(codeStr,self.nodeName,'exec')
583 raise SALOME.SALOME_Exception(SALOME.ExceptionStruct(SALOME.BAD_PARAM,"","PyScriptNode.assignNewCompiledCode (%s) : code to be executed \"%s\"" %(self.nodeName,codeStr),0))
585 def executeSimple(self, key, val):
587 Same as execute method except that no pickelization mecanism is implied here. No output is expected
590 self.context.update({ "env" : [(k,v) for k,v in zip(key,val)]})
591 exec(self.ccode,self.context)
593 exc_typ,exc_val,exc_fr=sys.exc_info()
594 l=traceback.format_exception(exc_typ,exc_val,exc_fr)
595 print("".join(l)) ; sys.stdout.flush() # print error also in logs of remote container
596 raise SALOME.SALOME_Exception(SALOME.ExceptionStruct(SALOME.BAD_PARAM,"".join(l),"PyScriptNode: %s" % (self.nodeName),0))
598 def execute(self,outargsname,argsin):
599 """Execute the script stored in attribute ccode with pickled args (argsin)"""
601 argsname,kws=pickle.loads(argsin)
602 self.context.update(kws)
603 exec(self.ccode, self.context)
605 for arg in outargsname:
606 if arg not in self.context:
607 raise KeyError("There is no variable %s in context" % arg)
608 argsout.append(self.context[arg])
609 argsout=pickle.dumps(tuple(argsout),-1)
612 exc_typ,exc_val,exc_fr=sys.exc_info()
613 l=traceback.format_exception(exc_typ,exc_val,exc_fr)
614 print("".join(l)) ; sys.stdout.flush() # print error also in logs of remote container
615 raise SALOME.SALOME_Exception(SALOME.ExceptionStruct(SALOME.BAD_PARAM,"".join(l),"PyScriptNode: %s, outargsname: %s" % (self.nodeName,outargsname),0))
617 def executeFirst(self,argsin):
618 """ Same than first part of self.execute to reduce memory peak."""
620 self.beginOfCurrentExecutionSession()
622 self.addTimeInfoOnLevel2("startInputTime")
623 if True: # to force call of SeqByteReceiver's destructor
624 argsInPy = SeqByteReceiver( argsin )
625 data = argsInPy.data()
626 self.addInfoOnLevel2("inputMem",len(data))
627 _,kws=pickle.loads(data)
628 vis = InOutputObjVisitor()
630 # fetch real data if necessary
631 kws[elt] = UnProxyObjectSimple( kws[elt],vis)
632 self.addInfoOnLevel2("inputHDDMem",vis)
633 self.context.update(kws)
634 self.addTimeInfoOnLevel2("endInputTime")
636 exc_typ,exc_val,exc_fr=sys.exc_info()
637 l=traceback.format_exception(exc_typ,exc_val,exc_fr)
638 print("".join(l)) ; sys.stdout.flush() # print error also in logs of remote container
639 raise SALOME.SALOME_Exception(SALOME.ExceptionStruct(SALOME.BAD_PARAM,"".join(l),"PyScriptNode:First %s" % (self.nodeName),0))
641 def executeSecond(self,outargsname):
642 """ Same than second part of self.execute to reduce memory peak."""
645 self.addTimeInfoOnLevel2("startExecTime")
647 self.addInfoOnLevel2("measureTimeResolution",self.my_container_py.monitoringtimeresms())
648 monitoringParams = LaunchTimeCPUMonitoring( self.my_container_py.monitoringtimeresms() )
649 exec(self.ccode, self.context)
650 StopMonitoring( monitoringParams )
651 cpumeminfo = ReadCPUMemInfo( monitoringParams )
653 self.addInfoOnLevel2("CPUMemDuringExec",cpumeminfo)
655 self.addTimeInfoOnLevel2("endExecTime")
656 self.addTimeInfoOnLevel2("startOutputTime")
658 for arg in outargsname:
659 if arg not in self.context:
660 raise KeyError("There is no variable %s in context" % arg)
661 argsout.append(self.context[arg])
664 vis = InOutputObjVisitor()
666 # the proxy mecanism is catched here
667 argPickle = SpoolPickleObject( arg, vis )
668 retArg = SenderByte_i( self.poa,argPickle )
669 id_o = self.poa.activate_object(retArg)
670 retObj = self.poa.id_to_reference(id_o)
671 ret.append( retObj._narrow( SALOME.SenderByte ) )
672 outputMem += len(argPickle)
673 self.addInfoOnLevel2("outputMem",outputMem)
674 self.addInfoOnLevel2("outputHDDMem",vis)
675 self.addTimeInfoOnLevel2("endOutputTime")
676 self.endOfCurrentExecutionSession()
679 exc_typ,exc_val,exc_fr=sys.exc_info()
680 l=traceback.format_exception(exc_typ,exc_val,exc_fr)
681 print("".join(l)) ; sys.stdout.flush() # print error also in logs of remote container
682 raise SALOME.SALOME_Exception(SALOME.ExceptionStruct(SALOME.BAD_PARAM,"".join(l),"PyScriptNode:Second %s, outargsname: %s" % (self.nodeName,outargsname),0))
684 def listAllVarsInContext(self):
686 pat = re.compile("^__([a-z]+)__$")
687 return [elt for elt in self.context if not pat.match(elt) and elt != MY_CONTAINER_ENTRY_IN_GLBS]
689 def removeAllVarsInContext(self):
690 for elt in self.listAllVarsInContext():
691 del self.context[elt]
693 def getValueOfVarInContext(self,varName):
695 return pickle.dumps(self.context[varName],-1)
697 exc_typ,exc_val,exc_fr=sys.exc_info()
698 l=traceback.format_exception(exc_typ,exc_val,exc_fr)
699 raise SALOME.SALOME_Exception(SALOME.ExceptionStruct(SALOME.BAD_PARAM,"".join(l),"PyScriptNode: %s" %self.nodeName,0))
702 def assignVarInContext(self, varName, value):
704 self.context[varName][0] = pickle.loads(value)
706 exc_typ,exc_val,exc_fr=sys.exc_info()
707 l=traceback.format_exception(exc_typ,exc_val,exc_fr)
708 raise SALOME.SALOME_Exception(SALOME.ExceptionStruct(SALOME.BAD_PARAM,"".join(l),"PyScriptNode: %s" %self.nodeName,0))
711 def callMethodOnVarInContext(self, varName, methodName, args):
713 return pickle.dumps( getattr(self.context[varName][0],methodName)(*pickle.loads(args)),-1 )
715 exc_typ,exc_val,exc_fr=sys.exc_info()
716 l=traceback.format_exception(exc_typ,exc_val,exc_fr)
717 raise SALOME.SALOME_Exception(SALOME.ExceptionStruct(SALOME.BAD_PARAM,"".join(l),"PyScriptNode: %s" %self.nodeName,0))
720 def beginOfCurrentExecutionSession(self):
721 self._current_execution_session = LogOfCurrentExecutionSession( self._log_script.addExecutionSession() )
723 def endOfCurrentExecutionSession(self):
724 self._current_execution_session.finalizeAndPushToMaster()
725 self._current_execution_session = None
727 def addInfoOnLevel2(self, key, value):
728 self._current_execution_session.addInfoOnLevel2(key, value)
730 def addTimeInfoOnLevel2(self, key):
731 from datetime import datetime
732 self._current_execution_session.addInfoOnLevel2(key,datetime.now())