Salome HOME
Merge 'agy/br810_1' branch.
[tools/medcoupling.git] / src / MEDLoader / Swig / CaseReader.py
index 7dc270e575a9f001027b8785af38cf2c2fc5eddb..987869ebbd25967a1c4615d5e10eafadaedf29b7 100644 (file)
@@ -1,10 +1,10 @@
 #  -*- coding: iso-8859-1 -*-
-# Copyright (C) 2007-2013  CEA/DEN, EDF R&D
+# Copyright (C) 2007-2016  CEA/DEN, EDF R&D
 #
 # This library is free software; you can redistribute it and/or
 # modify it under the terms of the GNU Lesser General Public
 # License as published by the Free Software Foundation; either
-# version 2.1 of the License.
+# version 2.1 of the License, or (at your option) any later version.
 #
 # This library is distributed in the hope that it will be useful,
 # but WITHOUT ANY WARRANTY; without even the implied warranty of
@@ -17,7 +17,7 @@
 #
 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
 #
-# Author Anthony GEAY (CEA/DEN/DM2S/STMF/LGLS)
+# Author Anthony GEAY (CEA/DEN/DM2S/STMF/LGLS)
 
 # http://www-vis.lbl.gov/NERSC/Software/ensight/doc/OnlineHelp/UM-C11.pdf
 import numpy as np
@@ -61,7 +61,7 @@ class CaseReader(CaseIO):
             pass
         c=DataArrayInt(len(cells),nbNodesPerCell+1) ; c[:,0]=ct ; c[:,1:]=c2-1 ; c.rearrange(1)
         m.setConnectivity(c,cI,True)
-        m.checkCoherency2()
+        m.checkConsistency()
         return m
 
     def __traduceMeshForPolyhed(self,name,coords,arr0,arr1,arr2):
@@ -72,13 +72,13 @@ class CaseReader(CaseIO):
         m.setCoords(coo)
         #
         arr2=arr2[:]-1
-        arr0mc0=DataArrayInt(arr0) ; arr0mc0.computeOffsets2()
-        arr0mc1=DataArrayInt(arr0).deepCpy()
-        arr0mc2=DataArrayInt(len(arr0),2) ; arr0mc2[:,0]=DataArrayInt(arr0)-1 ; arr0mc2[:,1]=1 ; arr0mc2.rearrange(1) ; arr0mc2.computeOffsets2()
+        arr0mc0=DataArrayInt(arr0) ; arr0mc0.computeOffsetsFull()
+        arr0mc1=DataArrayInt(arr0).deepCopy()
+        arr0mc2=DataArrayInt(len(arr0),2) ; arr0mc2[:,0]=DataArrayInt(arr0)-1 ; arr0mc2[:,1]=1 ; arr0mc2.rearrange(1) ; arr0mc2.computeOffsetsFull()
         arr0mc3=DataArrayInt.Range(0,2*len(arr0),2).buildExplicitArrByRanges(arr0mc2)
-        arr1mc0=DataArrayInt(arr1) ; arr1mc0.computeOffsets2()
+        arr1mc0=DataArrayInt(arr1) ; arr1mc0.computeOffsetsFull()
         arr1mc1=arr1mc0[arr0mc0] ; arr1mc1[1:]+=arr0mc0[1:] 
-        arr1mc2=DataArrayInt(arr1).deepCpy() ; arr1mc2+=1 ; arr1mc2.computeOffsets2()
+        arr1mc2=DataArrayInt(arr1).deepCopy() ; arr1mc2+=1 ; arr1mc2.computeOffsetsFull()
         arr2mc0=(arr1mc2[1:])[arr0mc3]
         #
         c=DataArrayInt(arr1.size+arr2.size)
@@ -88,7 +88,7 @@ class CaseReader(CaseIO):
         c[a]=DataArrayInt(arr2)
         #
         m.setConnectivity(c,arr1mc1,True)
-        m.checkCoherency2()
+        m.checkConsistency()
         return m
 
     def __traduceMeshForPolygon(self,name,coords,arr0,arr1):
@@ -98,14 +98,14 @@ class CaseReader(CaseIO):
         m=MEDCouplingUMesh(name,2)
         m.setCoords(coo)
         #
-        arr0_0=DataArrayInt(arr0+1) ; arr0_0.computeOffsets2()
-        arr0_1=DataArrayInt(len(arr0),2) ; arr0_1[:,1]=DataArrayInt(arr0) ; arr0_1[:,0]=1 ; arr0_1.rearrange(1) ; arr0_1.computeOffsets2()
+        arr0_0=DataArrayInt(arr0+1) ; arr0_0.computeOffsetsFull()
+        arr0_1=DataArrayInt(len(arr0),2) ; arr0_1[:,1]=DataArrayInt(arr0) ; arr0_1[:,0]=1 ; arr0_1.rearrange(1) ; arr0_1.computeOffsetsFull()
         arr0_2=DataArrayInt.Range(1,2*len(arr0),2).buildExplicitArrByRanges(arr0_1)
         c=DataArrayInt(len(arr0)+len(arr1)) ; c[:]=0 ; c[arr0_0[:-1]]=NORM_POLYGON
         c[arr0_2]=DataArrayInt(arr1-1)
         #
         m.setConnectivity(c,arr0_0,True)
-        m.checkCoherency2()
+        m.checkConsistency()
         return m
 
     def __convertGeo2MED(self,geoFileName):
@@ -289,7 +289,7 @@ class CaseReader(CaseIO):
                 pass
             f=MEDCouplingFieldDouble(self.discSpatial2[discr],ONE_TIME) ; f.setName("%s_%s"%(fieldName,mcmeshes[meshId].getName()))
             f.setMesh(mcmeshes[meshId]) ; f.setArray(vals2) ; f.setTime(float(it),it,-1)
-            f.checkCoherency()
+            f.checkConsistencyLight()
             mlfields[locId+meshId].appendFieldNoProfileSBT(f)
             pass
 
@@ -342,7 +342,7 @@ class CaseReader(CaseIO):
                 pass
             f=MEDCouplingFieldDouble(self.discSpatial2[discr],ONE_TIME) ; f.setName("%s_%s"%(fieldName,mcmeshes[nbTurn].getName()))
             f.setMesh(mcmeshes[nbTurn]) ; f.setArray(vals2) ; f.setTime(float(it),it,-1)
-            f.checkCoherency()
+            f.checkConsistencyLight()
             mlfields[locId+nbTurn].appendFieldNoProfileSBT(f)
             nbTurn+=1
             pass
@@ -357,36 +357,38 @@ class CaseReader(CaseIO):
             raise Exception("Error with file %s"%(fname))
         geoName=re.match("model:([\W]*)([\w\.]+)",lines[ind+1]).group(2)
         m1,m2,typeOfFile=self.__convertGeo2MED(geoName)
-        fieldsInfo=[]
-        ind=lines.index("VARIABLE\n")
-        end=len(lines)-1
-        if "TIME\n" in lines:
-            end=lines.index("TIME\n")
-            pass
-        for i in xrange(ind+1,end):
-            m=re.match("^([\w]+)[\s]+\per[\s]+([\w]+)[\s]*\:[\s]*([\w]+)[\s]+([\S]+)$",lines[i])
-            if m:
-                if m.groups()[0]=="constant":
-                    continue
-                spatialDisc=m.groups()[1] ; fieldName=m.groups()[2] ; nbOfCompo=self.dictCompo2[m.groups()[0]] ; fieldFileName=m.groups()[3]
-                fieldsInfo.append((fieldName,spatialDisc,nbOfCompo,fieldFileName))
+        fieldsInfo=[] ; nbOfTimeSteps=0
+        if "VARIABLE\n" in lines:
+            ind=lines.index("VARIABLE\n")
+            end=len(lines)-1
+            if "TIME\n" in lines:
+                end=lines.index("TIME\n")
                 pass
+            for i in xrange(ind+1,end):
+                m=re.match("^([\w]+)[\s]+\per[\s]+([\w]+)[\s]*\:[\s]*([\w]+)[\s]+([\S]+)$",lines[i])
+                if m:
+                    if m.groups()[0]=="constant":
+                        continue
+                    spatialDisc=m.groups()[1] ; fieldName=m.groups()[2] ; nbOfCompo=self.dictCompo2[m.groups()[0]] ; fieldFileName=m.groups()[3]
+                    fieldsInfo.append((fieldName,spatialDisc,nbOfCompo,fieldFileName))
+                    pass
+                pass
+            
+            expr=re.compile("number[\s]+of[\s]+steps[\s]*\:[\s]*([\d]+)")
+            tmp=filter(expr.search,lines)
+            if len(tmp)!=0:
+                nbOfTimeSteps=int(expr.search(filter(expr.search,lines)[0]).group(1))
+                expr=re.compile("filename[\s]+start[\s]+number[\s]*\:[\s]*([\d]+)")
+                startIt=int(expr.search(filter(expr.search,lines)[0]).group(1))
+                expr=re.compile("filename[\s]+increment[\s]*\:[\s]*([\d]+)")
+                incrIt=int(expr.search(filter(expr.search,lines)[0]).group(1))
+            else:
+                nbOfTimeSteps=1
+                startIt=0
+                incrIt=1
+                pass
+            curIt=startIt
             pass
-        
-        expr=re.compile("number[\s]+of[\s]+steps[\s]*\:[\s]*([\d]+)")
-        tmp=filter(expr.search,lines)
-        if len(tmp)!=0:
-            nbOfTimeSteps=int(expr.search(filter(expr.search,lines)[0]).group(1))
-            expr=re.compile("filename[\s]+start[\s]+number[\s]*\:[\s]*([\d]+)")
-            startIt=int(expr.search(filter(expr.search,lines)[0]).group(1))
-            expr=re.compile("filename[\s]+increment[\s]*\:[\s]*([\d]+)")
-            incrIt=int(expr.search(filter(expr.search,lines)[0]).group(1))
-        else:
-            nbOfTimeSteps=1
-            startIt=0
-            incrIt=1
-            pass
-        curIt=startIt
         mlfields=MEDFileFields()
         mlfields.resize(len(fieldsInfo)*len(m1))
         i=0