Salome HOME
MAJ vers paraview 5.4
[modules/paravis.git] / src / PV_SWIG / presentations.py
index cb2c5fd3260509cc6837bd8319695f8cfbc5a7bc..000216512bb723eaf357579d2288d3654713f2ee 100644 (file)
@@ -1,9 +1,9 @@
-# Copyright (C) 2010-2013  CEA/DEN, EDF R&D
+# Copyright (C) 2010-2016  CEA/DEN, EDF R&D
 #
 # This library is free software; you can redistribute it and/or
 # modify it under the terms of the GNU Lesser General Public
 # License as published by the Free Software Foundation; either
 #
 # This library is free software; you can redistribute it and/or
 # modify it under the terms of the GNU Lesser General Public
 # License as published by the Free Software Foundation; either
-# version 2.1 of the License.
+# version 2.1 of the License, or (at your option) any later version.
 #
 # This library is distributed in the hope that it will be useful,
 # but WITHOUT ANY WARRANTY; without even the implied warranty of
 #
 # This library is distributed in the hope that it will be useful,
 # but WITHOUT ANY WARRANTY; without even the implied warranty of
@@ -32,12 +32,15 @@ import warnings
 from math import sqrt, sin, cos, radians
 from string import upper
 
 from math import sqrt, sin, cos, radians
 from string import upper
 
-import pvsimple as pv
+# Do not use pv as a short name.
+# It is a name of function from numpy and may be redefined implicitly by 'from numpy import *' call.
+# import pvsimple as pv
+import pvsimple as pvs
 #try:
 #    # TODO(MZN): to be removed (issue with Point Sprite texture)
 #    #import paravisSM as sm
 #except:
 #try:
 #    # TODO(MZN): to be removed (issue with Point Sprite texture)
 #    #import paravisSM as sm
 #except:
-#    import paraview.simple as pv
+#    import paraview.simple as pvs
 #    import paraview.servermanager as sm
 
 
 #    import paraview.servermanager as sm
 
 
@@ -50,6 +53,7 @@ GAP_COEFFICIENT = 0.0001
 
 # Globals
 _current_bar = None
 
 # Globals
 _current_bar = None
+_med_field_sep = '@@][@@'
 
 
 # Enumerations
 
 
 # Enumerations
@@ -94,11 +98,11 @@ class EntityType:
     NODE = 0
     CELL = 1
 
     NODE = 0
     CELL = 1
 
-    _type2name = {NODE: 'OnPoint',
-                  CELL: 'OnCell'}
+    _type2name = {NODE: 'P1',
+                  CELL: 'P0'}
 
 
-    _name2type = {'OnPoint': NODE,
-                  'OnCell': CELL}
+    _name2type = {'P1': NODE,
+                  'P0': CELL}
 
     _type2pvtype = {NODE: 'POINT_DATA',
                     CELL: 'CELL_DATA'}
 
     _type2pvtype = {NODE: 'POINT_DATA',
                     CELL: 'CELL_DATA'}
@@ -168,6 +172,42 @@ class GaussType:
 
 
 # Auxiliary functions
 
 
 # Auxiliary functions
+
+def get_field_mesh_name(full_field_name):
+    """Return mesh name of the field by its full name."""
+    aList = full_field_name.split('/')
+    if len(aList) >= 2 :
+        field_name = full_field_name.split('/')[1]
+        return field_name
+
+
+def get_field_entity(full_field_name):
+    """Return entity type of the field by its full name."""
+    aList = full_field_name.split(_med_field_sep)
+    if len(aList) == 2 :
+        entity_name = full_field_name.split(_med_field_sep)[-1]
+        entity = EntityType.get_type(entity_name)
+        return entity
+
+
+def get_field_short_name(full_field_name):
+    """Return short name of the field by its full name."""
+    aList = full_field_name.split('/')
+    if len(aList) == 4 :
+        short_name_with_type = full_field_name.split('/')[-1]
+        short_name = short_name_with_type.split(_med_field_sep)[0]
+        return short_name
+
+
+def find_mesh_full_name(proxy, short_mesh_name):
+    """Return full mesh path by short mesh name, if found"""
+    proxy.UpdatePipeline()
+    all_mesh_names = get_mesh_full_names(proxy)
+    for name in all_mesh_names:
+        if short_mesh_name == get_field_short_name(name):
+            return name
+
+
 def process_prs_for_test(prs, view, picture_name, show_bar=True):
     """Show presentation and record snapshot image.
 
 def process_prs_for_test(prs, view, picture_name, show_bar=True):
     """Show presentation and record snapshot image.
 
@@ -182,6 +222,7 @@ def process_prs_for_test(prs, view, picture_name, show_bar=True):
     display_only(prs, view)
 
     # Show scalar bar
     display_only(prs, view)
 
     # Show scalar bar
+    global _current_bar
     if show_bar and _current_bar:
         _current_bar.Visibility = 1
 
     if show_bar and _current_bar:
         _current_bar.Visibility = 1
 
@@ -195,7 +236,8 @@ def process_prs_for_test(prs, view, picture_name, show_bar=True):
         os.makedirs(pic_dir)
 
     # Save picture
         os.makedirs(pic_dir)
 
     # Save picture
-    pv.WriteImage(file_name, view=view, Magnification=1)
+    print "Write image:", file_name
+    pvs.WriteImage(file_name, view=view, Magnification=1)
 
 
 def reset_view(view=None):
 
 
 def reset_view(view=None):
@@ -206,7 +248,7 @@ def reset_view(view=None):
 
     """
     if not view:
 
     """
     if not view:
-        view = pv.GetRenderView()
+        view = pvs.GetRenderView()
 
     # Camera preferences
     view.CameraFocalPoint = [0.0, 0.0, 0.0]
 
     # Camera preferences
     view.CameraFocalPoint = [0.0, 0.0, 0.0]
@@ -221,13 +263,13 @@ def reset_view(view=None):
     view.CameraParallelProjection = 1
 
     view.ResetCamera()
     view.CameraParallelProjection = 1
 
     view.ResetCamera()
-    pv.Render(view=view)
+    pvs.Render(view=view)
 
 
 def hide_all(view, to_remove=False):
     """Hide all representations in the view."""
     if not view:
 
 
 def hide_all(view, to_remove=False):
     """Hide all representations in the view."""
     if not view:
-        view = pv.GetRenderView()
+        view = pvs.GetRenderView()
 
     rep_list = view.Representations
     for rep in rep_list:
 
     rep_list = view.Representations
     for rep in rep_list:
@@ -235,20 +277,24 @@ def hide_all(view, to_remove=False):
             rep.Visibility = 0
         if to_remove:
             view.Representations.remove(rep)
             rep.Visibility = 0
         if to_remove:
             view.Representations.remove(rep)
-    pv.Render(view=view)
+    pvs.Render(view=view)
 
 
 def display_only(prs, view=None):
     """Display only the given presentation in the view."""
 
 
 def display_only(prs, view=None):
     """Display only the given presentation in the view."""
-    hide_all(view)
-    if (hasattr(prs, 'Visibility') and prs.Visibility != 1):
-        prs.Visibility = 1
-    pv.Render(view=view)
+    if not view:
+        view = pvs.GetRenderView()
+
+    rep_list = view.Representations
+    for rep in rep_list:
+        if hasattr(rep, 'Visibility'):
+            rep.Visibility = (rep == prs)
+    pvs.Render(view=view)
 
 
 def set_visible_lines(xy_prs, lines):
     """Set visible only the given lines for XYChartRepresentation."""
 
 
 def set_visible_lines(xy_prs, lines):
     """Set visible only the given lines for XYChartRepresentation."""
-    sv = xy_prs.GetProperty("SeriesVisibilityInfo").GetData()
+    sv = xy_prs.GetProperty("SeriesVisibility").GetData()
     visible = '0'
 
     for i in xrange(0, len(sv)):
     visible = '0'
 
     for i in xrange(0, len(sv)):
@@ -325,6 +371,7 @@ def get_data_range(proxy, entity, field_name, vector_mode='Magnitude',
       Data range as [min, max]
 
     """
       Data range as [min, max]
 
     """
+    proxy.UpdatePipeline()
     entity_data_info = None
     field_data = proxy.GetFieldDataInformation()
 
     entity_data_info = None
     field_data = proxy.GetFieldDataInformation()
 
@@ -357,6 +404,7 @@ def get_data_range(proxy, entity, field_name, vector_mode='Magnitude',
 
 def get_bounds(proxy):
     """Get bounds of the proxy in 3D."""
 
 def get_bounds(proxy):
     """Get bounds of the proxy in 3D."""
+    proxy.UpdatePipeline()
     dataInfo = proxy.GetDataInformation()
     bounds_info = dataInfo.GetBounds()
     return bounds_info
     dataInfo = proxy.GetDataInformation()
     bounds_info = dataInfo.GetBounds()
     return bounds_info
@@ -364,24 +412,28 @@ def get_bounds(proxy):
 
 def get_x_range(proxy):
     """Get X range of the proxy bounds in 3D."""
 
 def get_x_range(proxy):
     """Get X range of the proxy bounds in 3D."""
+    proxy.UpdatePipeline()
     bounds_info = get_bounds(proxy)
     return bounds_info[0:2]
 
 
 def get_y_range(proxy):
     """Get Y range of the proxy bounds in 3D."""
     bounds_info = get_bounds(proxy)
     return bounds_info[0:2]
 
 
 def get_y_range(proxy):
     """Get Y range of the proxy bounds in 3D."""
+    proxy.UpdatePipeline()
     bounds_info = get_bounds(proxy)
     return bounds_info[2:4]
 
 
 def get_z_range(proxy):
     """Get Z range of the proxy bounds in 3D."""
     bounds_info = get_bounds(proxy)
     return bounds_info[2:4]
 
 
 def get_z_range(proxy):
     """Get Z range of the proxy bounds in 3D."""
+    proxy.UpdatePipeline()
     bounds_info = get_bounds(proxy)
     return bounds_info[4:6]
 
 
 def is_planar_input(proxy):
     """Check if the given input is planar."""
     bounds_info = get_bounds(proxy)
     return bounds_info[4:6]
 
 
 def is_planar_input(proxy):
     """Check if the given input is planar."""
+    proxy.UpdatePipeline()
     bounds_info = get_bounds(proxy)
 
     if (abs(bounds_info[0] - bounds_info[1]) <= FLT_MIN or
     bounds_info = get_bounds(proxy)
 
     if (abs(bounds_info[0] - bounds_info[1]) <= FLT_MIN or
@@ -394,6 +446,7 @@ def is_planar_input(proxy):
 
 def is_data_on_cells(proxy, field_name):
     """Check the existence of a field on cells with the given name."""
 
 def is_data_on_cells(proxy, field_name):
     """Check the existence of a field on cells with the given name."""
+    proxy.UpdatePipeline()
     cell_data_info = proxy.GetCellDataInformation()
     return (field_name in cell_data_info.keys())
 
     cell_data_info = proxy.GetCellDataInformation()
     return (field_name in cell_data_info.keys())
 
@@ -406,6 +459,7 @@ def is_empty(proxy):
       False: otherwise
 
     """
       False: otherwise
 
     """
+    proxy.UpdatePipeline()
     data_info = proxy.GetDataInformation()
 
     nb_cells = data_info.GetNumberOfCells()
     data_info = proxy.GetDataInformation()
 
     nb_cells = data_info.GetNumberOfCells()
@@ -416,6 +470,7 @@ def is_empty(proxy):
 
 def get_orientation(proxy):
     """Get the optimum cutting plane orientation for Plot 3D."""
 
 def get_orientation(proxy):
     """Get the optimum cutting plane orientation for Plot 3D."""
+    proxy.UpdatePipeline()
     orientation = Orientation.XY
 
     bounds = get_bounds(proxy)
     orientation = Orientation.XY
 
     bounds = get_bounds(proxy)
@@ -587,14 +642,17 @@ def get_contours(scalar_range, nb_contours):
 
 def get_nb_components(proxy, entity, field_name):
     """Return number of components for the field."""
 
 def get_nb_components(proxy, entity, field_name):
     """Return number of components for the field."""
+    proxy.UpdatePipeline()
     entity_data_info = None
     field_data = proxy.GetFieldDataInformation()
 
     if field_name in field_data.keys():
         entity_data_info = field_data
     elif entity == EntityType.CELL:
     entity_data_info = None
     field_data = proxy.GetFieldDataInformation()
 
     if field_name in field_data.keys():
         entity_data_info = field_data
     elif entity == EntityType.CELL:
+        select_cells_with_data(proxy, on_cells=[field_name])
         entity_data_info = proxy.GetCellDataInformation()
     elif entity == EntityType.NODE:
         entity_data_info = proxy.GetCellDataInformation()
     elif entity == EntityType.NODE:
+        select_cells_with_data(proxy, on_points=[field_name])
         entity_data_info = proxy.GetPointDataInformation()
 
     nb_comp = None
         entity_data_info = proxy.GetPointDataInformation()
 
     nb_comp = None
@@ -640,6 +698,7 @@ def get_scale_factor(proxy):
 
 def get_default_scale(prs_type, proxy, entity, field_name):
     """Get default scale factor."""
 
 def get_default_scale(prs_type, proxy, entity, field_name):
     """Get default scale factor."""
+    proxy.UpdatePipeline()
     data_range = get_data_range(proxy, entity, field_name)
 
     if prs_type == PrsTypeEnum.DEFORMEDSHAPE:
     data_range = get_data_range(proxy, entity, field_name)
 
     if prs_type == PrsTypeEnum.DEFORMEDSHAPE:
@@ -667,12 +726,13 @@ def get_calc_magnitude(proxy, array_entity, array_name):
       the calculator object.
 
     """
       the calculator object.
 
     """
+    proxy.UpdatePipeline()
     calculator = None
 
     # Transform vector array to scalar array if possible
     nb_components = get_nb_components(proxy, array_entity, array_name)
     if (nb_components > 1):
     calculator = None
 
     # Transform vector array to scalar array if possible
     nb_components = get_nb_components(proxy, array_entity, array_name)
     if (nb_components > 1):
-        calculator = pv.Calculator(proxy)
+        calculator = pvs.Calculator(proxy)
         attribute_mode = "Point Data"
         if array_entity != EntityType.NODE:
             attribute_mode = "Cell Data"
         attribute_mode = "Point Data"
         if array_entity != EntityType.NODE:
             attribute_mode = "Cell Data"
@@ -701,11 +761,12 @@ def get_add_component_calc(proxy, array_entity, array_name):
       the calculator object.
 
     """
       the calculator object.
 
     """
+    proxy.UpdatePipeline()
     calculator = None
 
     nb_components = get_nb_components(proxy, array_entity, array_name)
     if nb_components == 2:
     calculator = None
 
     nb_components = get_nb_components(proxy, array_entity, array_name)
     if nb_components == 2:
-        calculator = pv.Calculator(proxy)
+        calculator = pvs.Calculator(proxy)
         attribute_mode = "Point Data"
         if array_entity != EntityType.NODE:
             attribute_mode = "Cell Data"
         attribute_mode = "Point Data"
         if array_entity != EntityType.NODE:
             attribute_mode = "Cell Data"
@@ -724,14 +785,13 @@ def select_all_cells(proxy):
     Used in creation of mesh/submesh presentation.
 
     """
     Used in creation of mesh/submesh presentation.
 
     """
-    ### Old API all_cell_types = proxy.CellTypes.Available
-    all_cell_types = proxy.Entity.Available
-    ### Old API proxy.CellTypes = all_cell_types
-    proxy.Entity = all_cell_types
     proxy.UpdatePipeline()
     proxy.UpdatePipeline()
+    extractCT = pvs.ExtractCellType()
+    extractCT.AllGeoTypes = extractCT.GetProperty("GeoTypesInfo")[::2]
+    extractCT.UpdatePipelineInformation()
 
 
 
 
-def select_cells_with_data(proxy, on_points=None, on_cells=None):
+def select_cells_with_data(proxy, on_points=[], on_cells=[], on_gauss=[]):
     """Select cell types with data.
 
     Only cell types with data for the given fields will be selected.
     """Select cell types with data.
 
     Only cell types with data for the given fields will be selected.
@@ -739,6 +799,35 @@ def select_cells_with_data(proxy, on_points=None, on_cells=None):
     types with data for even one field (from available) will be selected.
 
     """
     types with data for even one field (from available) will be selected.
 
     """
+    if not proxy.GetProperty("FieldsTreeInfo"):
+        return
+
+    proxy.UpdatePipeline()
+    if not hasattr(proxy, 'Entity'):
+        fields_info = proxy.GetProperty("FieldsTreeInfo")[::2]
+        arr_name_with_dis=[elt.split("/")[-1] for elt in fields_info]
+
+        fields = []
+        for name in on_gauss:
+            fields.append(name+_med_field_sep+'GAUSS')
+        for name in on_cells:
+            fields.append(name+_med_field_sep+'P0')
+        for name in on_points:
+            fields.append(name+_med_field_sep+'P1')
+
+        field_list = []
+        for name in fields:
+            if arr_name_with_dis.count(name) > 0:
+                index = arr_name_with_dis.index(name)
+                field_list.append(fields_info[index])
+
+        if field_list:
+            proxy.AllArrays = field_list
+            proxy.UpdatePipeline()
+        return len(field_list) != 0
+
+    # TODO: VTN. Looks like this code is out of date.
+
     #all_cell_types = proxy.CellTypes.Available
     all_cell_types = proxy.Entity.Available
     all_arrays = list(proxy.CellArrays.GetData())
     #all_cell_types = proxy.CellTypes.Available
     all_cell_types = proxy.Entity.Available
     all_arrays = list(proxy.CellArrays.GetData())
@@ -780,63 +869,9 @@ def select_cells_with_data(proxy, on_points=None, on_cells=None):
     proxy.Entity = cell_types_on
     proxy.UpdatePipeline()
 
     proxy.Entity = cell_types_on
     proxy.UpdatePipeline()
 
-
-def extract_groups_for_field(proxy, field_name, field_entity, force=False):
-    """Exctract only groups which have the field.
-
-    Arguments:
-      proxy: the pipeline object, containig data
-      field_name: the field name
-      field_entity: the field entity
-      force: if True - ExtractGroup object will be created in any case
-
-    Returns:
-      ExtractGroup object: if not all groups have the field or
-      the force argument is true
-      The initial proxy: if no groups had been filtered.
-
-    """
-    source = proxy
-
-    # Remember the state
-    initial_groups = list(proxy.Groups)
-
-    # Get data information for the field entity
-    entity_data_info = None
-    field_data = proxy.GetFieldDataInformation()
-
-    if field_name in field_data.keys():
-        entity_data_info = field_data
-    elif field_entity == EntityType.CELL:
-        entity_data_info = proxy.GetCellDataInformation()
-    elif field_entity == EntityType.NODE:
-        entity_data_info = proxy.GetPointDataInformation()
-
-    # Collect groups for extraction
-    groups_to_extract = []
-
-    for group in initial_groups:
-        proxy.Groups = [group]
-        proxy.UpdatePipeline()
-        if field_name in entity_data_info.keys():
-            groups_to_extract.append(group)
-
-    # Restore state
-    proxy.Groups = initial_groups
-    proxy.UpdatePipeline()
-
-    # Extract groups if necessary
-    if force or (len(groups_to_extract) < len(initial_groups)):
-        extract_group = pv.ExtractGroup(proxy)
-        extract_group.Groups = groups_to_extract
-        extract_group.UpdatePipeline()
-        source = extract_group
-
-    return source
-
-
-def if_possible(proxy, field_name, entity, prs_type):
+def if_possible(proxy, field_name, entity, prs_type, extrGrps=None):
     """Check if the presentation creation is possible on the given field."""
     """Check if the presentation creation is possible on the given field."""
+    proxy.UpdatePipeline()
     result = True
     if (prs_type == PrsTypeEnum.DEFORMEDSHAPE or
         prs_type == PrsTypeEnum.DEFORMEDSHAPESCALARMAP or
     result = True
     if (prs_type == PrsTypeEnum.DEFORMEDSHAPE or
         prs_type == PrsTypeEnum.DEFORMEDSHAPESCALARMAP or
@@ -848,7 +883,7 @@ def if_possible(proxy, field_name, entity, prs_type):
         result = (entity == EntityType.CELL or
                   field_name in proxy.QuadraturePointArrays.Available)
     elif (prs_type == PrsTypeEnum.MESH):
         result = (entity == EntityType.CELL or
                   field_name in proxy.QuadraturePointArrays.Available)
     elif (prs_type == PrsTypeEnum.MESH):
-        result = len(get_group_names(proxy, field_name, entity)) > 0
+        result = len(get_group_names(extrGrps)) > 0
 
     return result
 
 
     return result
 
@@ -864,7 +899,7 @@ def add_scalar_bar(field_name, nb_components,
         title = "\n".join([title, vector_mode])
 
     # Create scalar bar
         title = "\n".join([title, vector_mode])
 
     # Create scalar bar
-    scalar_bar = pv.CreateScalarBar(Enabled=1)
+    scalar_bar = pvs.CreateScalarBar(Enabled=1)
     scalar_bar.Orientation = 'Vertical'
     scalar_bar.Title = title
     scalar_bar.LookupTable = lookup_table
     scalar_bar.Orientation = 'Vertical'
     scalar_bar.Title = title
     scalar_bar.LookupTable = lookup_table
@@ -887,24 +922,22 @@ def add_scalar_bar(field_name, nb_components,
     scalar_bar.LabelShadow = 1
 
     # Add the scalar bar to the view
     scalar_bar.LabelShadow = 1
 
     # Add the scalar bar to the view
-    pv.GetRenderView().Representations.append(scalar_bar)
+    pvs.GetRenderView().Representations.append(scalar_bar)
 
     # Reassign the current bar
     _current_bar = scalar_bar
 
 
     # Reassign the current bar
     _current_bar = scalar_bar
 
-    return scalar_bar
+    return _current_bar
 
 
 def get_bar():
     """Get current scalar bar."""
 
 
 def get_bar():
     """Get current scalar bar."""
-    global _current_bar
-
     return _current_bar
 
 
 def get_lookup_table(field_name, nb_components, vector_mode='Magnitude'):
     """Get lookup table for the given field."""
     return _current_bar
 
 
 def get_lookup_table(field_name, nb_components, vector_mode='Magnitude'):
     """Get lookup table for the given field."""
-    lookup_table = pv.GetLookupTableForArray(field_name, nb_components)
+    lookup_table = pvs.GetLookupTableForArray(field_name, nb_components)
 
     if vector_mode == 'Magnitude':
         lookup_table.VectorMode = vector_mode
 
     if vector_mode == 'Magnitude':
         lookup_table.VectorMode = vector_mode
@@ -922,7 +955,12 @@ def get_lookup_table(field_name, nb_components, vector_mode='Magnitude'):
 
     lookup_table.Discretize = 0
     lookup_table.ColorSpace = 'HSV'
 
     lookup_table.Discretize = 0
     lookup_table.ColorSpace = 'HSV'
-    lookup_table.LockScalarRange = 0
+    if hasattr(lookup_table,"LockDataRange"):
+        lookup_table.LockDataRange = 0
+    elif hasattr(lookup_table,"LockScalarRange"):
+        lookup_table.LockScalarRange = 0
+    else:
+        raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
 
     return lookup_table
 
 
     return lookup_table
 
@@ -934,7 +972,6 @@ def get_group_mesh_name(full_group_name):
         group_name = full_group_name.split('/')[1]
         return group_name
 
         group_name = full_group_name.split('/')[1]
         return group_name
 
-
 def get_group_entity(full_group_name):
     """Return entity type of the group by its full name."""
     aList = full_group_name.split('/')
 def get_group_entity(full_group_name):
     """Return entity type of the group by its full name."""
     aList = full_group_name.split('/')
@@ -946,48 +983,48 @@ def get_group_entity(full_group_name):
 
 def get_group_short_name(full_group_name):
     """Return short name of the group by its full name."""
 
 def get_group_short_name(full_group_name):
     """Return short name of the group by its full name."""
-    aList = full_group_name.split('/')
-    if len(aList) >= 4 :
-        short_name = full_group_name.split('/')[3]
-        return short_name
+    short_name = re.sub('^GRP_', '', full_group_name)
+    return short_name
 
 
 
 
-def get_mesh_names(proxy):
+def get_mesh_full_names(proxy):
     """Return all mesh names in the given proxy as a set."""
     """Return all mesh names in the given proxy as a set."""
-    groups = proxy.Groups.Available
-    mesh_names = set([get_group_mesh_name(item) for item in groups])
-
-    return mesh_names
+    proxy.UpdatePipeline()
+    fields = proxy.GetProperty("FieldsTreeInfo")[::2]
+    mesh_full_names = set([item for item in fields if get_field_mesh_name(item) == get_field_short_name(item)])
+    return mesh_full_names
 
 
 
 
-def get_group_names(proxy, mesh_name, entity, wo_nogroups=False):
-    """Return full names of all groups of the given entity type
-    from the mesh with the given name as a list.
+def get_group_names(extrGrps):
+    """Return full names of all groups of the given 'ExtractGroup' filter object.
     """
     """
-    groups = proxy.Groups.Available
-
-    condition = lambda item: (get_group_mesh_name(item) == mesh_name and
-                              get_group_entity(item) == entity)
-    group_names = [item for item in groups if condition(item)]
-
-    if wo_nogroups:
-        # Remove "No_Group" group
-        not_no_group = lambda item: get_group_short_name(item) != "No_Group"
-        group_names = filter(not_no_group, group_names)
-
+    group_names = filter(lambda x:x[:4]=="GRP_",list(extrGrps.GetProperty("GroupsFlagsInfo")[::2]))
     return group_names
 
 
 def get_time(proxy, timestamp_nb):
     """Get time value by timestamp number."""
     return group_names
 
 
 def get_time(proxy, timestamp_nb):
     """Get time value by timestamp number."""
+    #proxy.UpdatePipeline()
     # Check timestamp number
     # Check timestamp number
-    timestamps = proxy.TimestepValues.GetData()
-    if ((timestamp_nb - 1) not in xrange(len(timestamps))):
-        raise ValueError("Timestamp number is out of range: " + timestamp_nb)
+    timestamps = []
 
 
-    # Return time value
-    return timestamps[timestamp_nb - 1]
+    if (hasattr(proxy, 'TimestepValues')):
+        timestamps = proxy.TimestepValues.GetData()
+    elif (hasattr(proxy.Input, 'TimestepValues')):
+        timestamps = proxy.Input.TimestepValues.GetData()
+
+    length = len(timestamps)
+    if (timestamp_nb > 0 and (timestamp_nb - 1) not in xrange(length) ) or (timestamp_nb < 0 and -timestamp_nb > length):
+        raise ValueError("Timestamp number is out of range: " + str(timestamp_nb))
 
 
+    if not timestamps:
+        return 0.0
+
+    # Return time value
+    if timestamp_nb > 0:
+        return timestamps[timestamp_nb - 1]
+    else:
+        return timestamps[timestamp_nb]
 
 def create_prs(prs_type, proxy, field_entity, field_name, timestamp_nb):
     """Auxiliary function.
 
 def create_prs(prs_type, proxy, field_entity, field_name, timestamp_nb):
     """Auxiliary function.
@@ -997,6 +1034,7 @@ def create_prs(prs_type, proxy, field_entity, field_name, timestamp_nb):
     Set the presentation properties like visu.CreatePrsForResult() do.
 
     """
     Set the presentation properties like visu.CreatePrsForResult() do.
 
     """
+    proxy.UpdatePipeline()
     prs = None
 
     if prs_type == PrsTypeEnum.SCALARMAP:
     prs = None
 
     if prs_type == PrsTypeEnum.SCALARMAP:
@@ -1048,6 +1086,7 @@ def ScalarMapOnField(proxy, entity, field_name, timestamp_nb,
       Scalar Map as representation object.
 
     """
       Scalar Map as representation object.
 
     """
+    proxy.UpdatePipeline()
     # We don't need mesh parts with no data on them
     if entity == EntityType.NODE:
         select_cells_with_data(proxy, on_points=[field_name])
     # We don't need mesh parts with no data on them
     if entity == EntityType.NODE:
         select_cells_with_data(proxy, on_points=[field_name])
@@ -1062,15 +1101,11 @@ def ScalarMapOnField(proxy, entity, field_name, timestamp_nb,
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
-    pv.GetRenderView().ViewTime = time_value
-    pv.UpdatePipeline(time_value, proxy)
-
-    # Extract only groups with data for the field
-    new_proxy = extract_groups_for_field(proxy, field_name, entity,
-                                         force=True)
+    pvs.GetRenderView().ViewTime = time_value
+    pvs.UpdatePipeline(time_value, proxy)
 
     # Get Scalar Map representation object
 
     # Get Scalar Map representation object
-    scalarmap = pv.GetRepresentation(new_proxy)
+    scalarmap = pvs.GetRepresentation(proxy)
 
     # Get lookup table
     lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
 
     # Get lookup table
     lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
@@ -1078,11 +1113,16 @@ def ScalarMapOnField(proxy, entity, field_name, timestamp_nb,
     # Set field range if necessary
     data_range = get_data_range(proxy, entity,
                                 field_name, vector_mode)
     # Set field range if necessary
     data_range = get_data_range(proxy, entity,
                                 field_name, vector_mode)
-    lookup_table.LockScalarRange = 1
+    if hasattr(lookup_table,"LockDataRange"):
+        lookup_table.LockDataRange = 1
+    elif hasattr(lookup_table,"LockScalarRange"):
+        lookup_table.LockScalarRange = 1
+    else:
+        raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
     # Set properties
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
     # Set properties
-    scalarmap.ColorAttributeType = EntityType.get_pvtype(entity)
-    scalarmap.ColorArrayName = field_name
+    pvs.ColorBy(scalarmap, (EntityType.get_pvtype(entity), field_name))
     scalarmap.LookupTable = lookup_table
 
     # Add scalar bar
     scalarmap.LookupTable = lookup_table
 
     # Add scalar bar
@@ -1122,6 +1162,12 @@ def CutPlanesOnField(proxy, entity, field_name, timestamp_nb,
       Cut Planes as representation object.
 
     """
       Cut Planes as representation object.
 
     """
+    proxy.UpdatePipeline()
+    if entity == EntityType.NODE:
+        select_cells_with_data(proxy, on_points=[field_name])
+    else:
+        select_cells_with_data(proxy, on_cells=[field_name])
+
     # Check vector mode
     nb_components = get_nb_components(proxy, entity, field_name)
     check_vector_mode(vector_mode, nb_components)
     # Check vector mode
     nb_components = get_nb_components(proxy, entity, field_name)
     check_vector_mode(vector_mode, nb_components)
@@ -1130,11 +1176,11 @@ def CutPlanesOnField(proxy, entity, field_name, timestamp_nb,
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
-    pv.GetRenderView().ViewTime = time_value
-    pv.UpdatePipeline(time_value, proxy)
+    pvs.GetRenderView().ViewTime = time_value
+    pvs.UpdatePipeline(time_value, proxy)
 
     # Create slice filter
 
     # Create slice filter
-    slice_filter = pv.Slice(proxy)
+    slice_filter = pvs.Slice(proxy)
     slice_filter.SliceType = "Plane"
 
     # Set cut planes normal
     slice_filter.SliceType = "Plane"
 
     # Set cut planes normal
@@ -1148,7 +1194,7 @@ def CutPlanesOnField(proxy, entity, field_name, timestamp_nb,
     slice_filter.SliceOffsetValues = positions
 
     # Get Cut Planes representation object
     slice_filter.SliceOffsetValues = positions
 
     # Get Cut Planes representation object
-    cut_planes = pv.GetRepresentation(slice_filter)
+    cut_planes = pvs.GetRepresentation(slice_filter)
 
     # Get lookup table
     lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
 
     # Get lookup table
     lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
@@ -1156,12 +1202,18 @@ def CutPlanesOnField(proxy, entity, field_name, timestamp_nb,
     # Set field range if necessary
     data_range = get_data_range(proxy, entity,
                                 field_name, vector_mode)
     # Set field range if necessary
     data_range = get_data_range(proxy, entity,
                                 field_name, vector_mode)
-    lookup_table.LockScalarRange = 1
+
+    if hasattr(lookup_table,"LockDataRange"):
+        lookup_table.LockDataRange = 1
+    elif hasattr(lookup_table,"LockScalarRange"):
+        lookup_table.LockScalarRange = 1
+    else:
+        raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
 
     # Set properties
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
 
     # Set properties
-    cut_planes.ColorAttributeType = EntityType.get_pvtype(entity)
-    cut_planes.ColorArrayName = field_name
+    pvs.ColorBy(cut_planes, (EntityType.get_pvtype(entity), field_name))
     cut_planes.LookupTable = lookup_table
 
     # Add scalar bar
     cut_planes.LookupTable = lookup_table
 
     # Add scalar bar
@@ -1212,6 +1264,12 @@ def CutLinesOnField(proxy, entity, field_name, timestamp_nb,
       (Cut Lines as representation object, list of 'PlotOverLine') otherwise
 
     """
       (Cut Lines as representation object, list of 'PlotOverLine') otherwise
 
     """
+    proxy.UpdatePipeline()
+    if entity == EntityType.NODE:
+        select_cells_with_data(proxy, on_points=[field_name])
+    else:
+        select_cells_with_data(proxy, on_cells=[field_name])
+
     # Check vector mode
     nb_components = get_nb_components(proxy, entity, field_name)
     check_vector_mode(vector_mode, nb_components)
     # Check vector mode
     nb_components = get_nb_components(proxy, entity, field_name)
     check_vector_mode(vector_mode, nb_components)
@@ -1220,11 +1278,11 @@ def CutLinesOnField(proxy, entity, field_name, timestamp_nb,
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
-    pv.GetRenderView().ViewTime = time_value
-    pv.UpdatePipeline(time_value, proxy)
+    pvs.GetRenderView().ViewTime = time_value
+    pvs.UpdatePipeline(time_value, proxy)
 
     # Create base plane
 
     # Create base plane
-    base_plane = pv.Slice(proxy)
+    base_plane = pvs.Slice(proxy)
     base_plane.SliceType = "Plane"
 
     # Set base plane normal
     base_plane.SliceType = "Plane"
 
     # Set base plane normal
@@ -1244,7 +1302,7 @@ def CutLinesOnField(proxy, entity, field_name, timestamp_nb,
         base_plane = proxy
 
     # Create cutting planes
         base_plane = proxy
 
     # Create cutting planes
-    cut_planes = pv.Slice(base_plane)
+    cut_planes = pvs.Slice(base_plane)
     cut_planes.SliceType = "Plane"
 
     # Set cutting planes normal and get positions
     cut_planes.SliceType = "Plane"
 
     # Set cutting planes normal and get positions
@@ -1270,9 +1328,9 @@ def CutLinesOnField(proxy, entity, field_name, timestamp_nb,
             point2 = [bounds[1], bounds[3], bounds[5]]
 
             # Create plot over line filter
             point2 = [bounds[1], bounds[3], bounds[5]]
 
             # Create plot over line filter
-            pol = pv.PlotOverLine(cut_planes,
+            pol = pvs.PlotOverLine(cut_planes,
                                   Source="High Resolution Line Source")
                                   Source="High Resolution Line Source")
-            pv.RenameSource('Y' + str(index), pol)
+            pvs.RenameSource('Y' + str(index), pol)
             pol.Source.Point1 = point1
             pol.Source.Point2 = point2
             pol.UpdatePipeline()
             pol.Source.Point1 = point1
             pol.Source.Point2 = point2
             pol.UpdatePipeline()
@@ -1284,7 +1342,7 @@ def CutLinesOnField(proxy, entity, field_name, timestamp_nb,
     cut_planes.UpdatePipeline()
 
     # Get Cut Lines representation object
     cut_planes.UpdatePipeline()
 
     # Get Cut Lines representation object
-    cut_lines = pv.GetRepresentation(cut_planes)
+    cut_lines = pvs.GetRepresentation(cut_planes)
 
     # Get lookup table
     lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
 
     # Get lookup table
     lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
@@ -1292,12 +1350,17 @@ def CutLinesOnField(proxy, entity, field_name, timestamp_nb,
     # Set field range if necessary
     data_range = get_data_range(proxy, entity,
                                 field_name, vector_mode)
     # Set field range if necessary
     data_range = get_data_range(proxy, entity,
                                 field_name, vector_mode)
-    lookup_table.LockScalarRange = 1
+    if hasattr(lookup_table,"LockDataRange"):
+        lookup_table.LockDataRange = 1
+    elif hasattr(lookup_table,"LockScalarRange"):
+        lookup_table.LockScalarRange = 1
+    else:
+        raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
 
     # Set properties
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
 
     # Set properties
-    cut_lines.ColorAttributeType = EntityType.get_pvtype(entity)
-    cut_lines.ColorArrayName = field_name
+    pvs.ColorBy(cut_lines, (EntityType.get_pvtype(entity), field_name))
     cut_lines.LookupTable = lookup_table
 
     # Set wireframe represenatation mode
     cut_lines.LookupTable = lookup_table
 
     # Set wireframe represenatation mode
@@ -1334,6 +1397,12 @@ def CutSegmentOnField(proxy, entity, field_name, timestamp_nb,
       Cut Segment as 3D representation object.
 
     """
       Cut Segment as 3D representation object.
 
     """
+    proxy.UpdatePipeline()
+    if entity == EntityType.NODE:
+        select_cells_with_data(proxy, on_points=[field_name])
+    else:
+        select_cells_with_data(proxy, on_cells=[field_name])
+
     # Check vector mode
     nb_components = get_nb_components(proxy, entity, field_name)
     check_vector_mode(vector_mode, nb_components)
     # Check vector mode
     nb_components = get_nb_components(proxy, entity, field_name)
     check_vector_mode(vector_mode, nb_components)
@@ -1342,17 +1411,17 @@ def CutSegmentOnField(proxy, entity, field_name, timestamp_nb,
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
-    pv.GetRenderView().ViewTime = time_value
-    pv.UpdatePipeline(time_value, proxy)
+    pvs.GetRenderView().ViewTime = time_value
+    pvs.UpdatePipeline(time_value, proxy)
 
     # Create plot over line filter
 
     # Create plot over line filter
-    pol = pv.PlotOverLine(proxy, Source="High Resolution Line Source")
+    pol = pvs.PlotOverLine(proxy, Source="High Resolution Line Source")
     pol.Source.Point1 = point1
     pol.Source.Point2 = point2
     pol.UpdatePipeline()
 
     # Get Cut Segment representation object
     pol.Source.Point1 = point1
     pol.Source.Point2 = point2
     pol.UpdatePipeline()
 
     # Get Cut Segment representation object
-    cut_segment = pv.GetRepresentation(pol)
+    cut_segment = pvs.GetRepresentation(pol)
 
     # Get lookup table
     lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
 
     # Get lookup table
     lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
@@ -1360,12 +1429,17 @@ def CutSegmentOnField(proxy, entity, field_name, timestamp_nb,
     # Set field range if necessary
     data_range = get_data_range(proxy, entity,
                                 field_name, vector_mode)
     # Set field range if necessary
     data_range = get_data_range(proxy, entity,
                                 field_name, vector_mode)
-    lookup_table.LockScalarRange = 1
+    if hasattr(lookup_table,"LockDataRange"):
+        lookup_table.LockDataRange = 1
+    elif hasattr(lookup_table,"LockScalarRange"):
+        lookup_table.LockScalarRange = 1
+    else:
+        raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
 
     # Set properties
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
 
     # Set properties
-    cut_segment.ColorAttributeType = EntityType.get_pvtype(entity)
-    cut_segment.ColorArrayName = field_name
+    pvs.ColorBy(cut_segment, (EntityType.get_pvtype(entity), field_name))
     cut_segment.LookupTable = lookup_table
 
     # Set wireframe represenatation mode
     cut_segment.LookupTable = lookup_table
 
     # Set wireframe represenatation mode
@@ -1402,6 +1476,12 @@ def VectorsOnField(proxy, entity, field_name, timestamp_nb,
       Vectors as representation object.
 
     """
       Vectors as representation object.
 
     """
+    proxy.UpdatePipeline()
+    if entity == EntityType.NODE:
+        select_cells_with_data(proxy, on_points=[field_name])
+    else:
+        select_cells_with_data(proxy, on_cells=[field_name])
+
     # Check vector mode
     nb_components = get_nb_components(proxy, entity, field_name)
     check_vector_mode(vector_mode, nb_components)
     # Check vector mode
     nb_components = get_nb_components(proxy, entity, field_name)
     check_vector_mode(vector_mode, nb_components)
@@ -1410,16 +1490,15 @@ def VectorsOnField(proxy, entity, field_name, timestamp_nb,
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
-    pv.GetRenderView().ViewTime = time_value
-    pv.UpdatePipeline(time_value, proxy)
+    pvs.GetRenderView().ViewTime = time_value
+    pvs.UpdatePipeline(time_value, proxy)
 
     # Extract only groups with data for the field
 
     # Extract only groups with data for the field
-    new_proxy = extract_groups_for_field(proxy, field_name, entity)
-    source = new_proxy
+    source = proxy
 
     # Cell centers
     if is_data_on_cells(proxy, field_name):
 
     # Cell centers
     if is_data_on_cells(proxy, field_name):
-        cell_centers = pv.CellCenters(source)
+        cell_centers = pvs.CellCenters(source)
         cell_centers.VertexCells = 1
         source = cell_centers
 
         cell_centers.VertexCells = 1
         source = cell_centers
 
@@ -1431,10 +1510,10 @@ def VectorsOnField(proxy, entity, field_name, timestamp_nb,
         source = calc
 
     # Glyph
         source = calc
 
     # Glyph
-    glyph = pv.Glyph(source)
+    glyph = pvs.Glyph(source)
     glyph.Vectors = vector_array
     glyph.ScaleMode = 'vector'
     glyph.Vectors = vector_array
     glyph.ScaleMode = 'vector'
-    glyph.MaskPoints = 0
+    #glyph.MaskPoints = 0
 
     # Set glyph type
     glyph.GlyphType = glyph_type
 
     # Set glyph type
     glyph.GlyphType = glyph_type
@@ -1455,16 +1534,16 @@ def VectorsOnField(proxy, entity, field_name, timestamp_nb,
             glyph.GlyphType.Center = [0.0, 0.0, 0.0]
 
     if scale_factor is not None:
             glyph.GlyphType.Center = [0.0, 0.0, 0.0]
 
     if scale_factor is not None:
-        glyph.SetScaleFactor = scale_factor
+        glyph.ScaleFactor = scale_factor
     else:
         def_scale = get_default_scale(PrsTypeEnum.DEFORMEDSHAPE,
     else:
         def_scale = get_default_scale(PrsTypeEnum.DEFORMEDSHAPE,
-                                      new_proxy, entity, field_name)
-        glyph.SetScaleFactor = def_scale
+                                      proxy, entity, field_name)
+        glyph.ScaleFactor = def_scale
 
     glyph.UpdatePipeline()
 
     # Get Vectors representation object
 
     glyph.UpdatePipeline()
 
     # Get Vectors representation object
-    vectors = pv.GetRepresentation(glyph)
+    vectors = pvs.GetRepresentation(glyph)
 
     # Get lookup table
     lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
 
     # Get lookup table
     lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
@@ -1472,14 +1551,20 @@ def VectorsOnField(proxy, entity, field_name, timestamp_nb,
     # Set field range if necessary
     data_range = get_data_range(proxy, entity,
                                 field_name, vector_mode)
     # Set field range if necessary
     data_range = get_data_range(proxy, entity,
                                 field_name, vector_mode)
-    lookup_table.LockScalarRange = 1
+    if hasattr(lookup_table,"LockDataRange"):
+        lookup_table.LockDataRange = 1
+    elif hasattr(lookup_table,"LockScalarRange"):
+        lookup_table.LockScalarRange = 1
+    else:
+        raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
 
     # Set properties
     if (is_colored):
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
 
     # Set properties
     if (is_colored):
-        vectors.ColorArrayName = 'GlyphVector'
+        pvs.ColorBy(vectors, (EntityType.get_pvtype(entity), 'GlyphVector'))
     else:
     else:
-        vectors.ColorArrayName = ''
+        pvs.ColorBy(vectors, (EntityType.get_pvtype(entity), None))
     vectors.LookupTable = lookup_table
 
     vectors.LineWidth = 1.0
     vectors.LookupTable = lookup_table
 
     vectors.LineWidth = 1.0
@@ -1516,6 +1601,7 @@ def DeformedShapeOnField(proxy, entity, field_name,
       Defromed Shape as representation object.
 
     """
       Defromed Shape as representation object.
 
     """
+    proxy.UpdatePipeline()
     # We don't need mesh parts with no data on them
     if entity == EntityType.NODE:
         select_cells_with_data(proxy, on_points=[field_name])
     # We don't need mesh parts with no data on them
     if entity == EntityType.NODE:
         select_cells_with_data(proxy, on_points=[field_name])
@@ -1530,18 +1616,16 @@ def DeformedShapeOnField(proxy, entity, field_name,
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
-    pv.GetRenderView().ViewTime = time_value
-    pv.UpdatePipeline(time_value, proxy)
-
-    # Extract only groups with data for the field
-    new_proxy = extract_groups_for_field(proxy, field_name, entity)
+    pvs.GetRenderView().ViewTime = time_value
+    pvs.UpdatePipeline(time_value, proxy)
 
     # Do merge
 
     # Do merge
-    source = pv.MergeBlocks(new_proxy)
+    source = pvs.MergeBlocks(proxy)
+    pvs.UpdatePipeline()
 
     # Cell data to point data
     if is_data_on_cells(proxy, field_name):
 
     # Cell data to point data
     if is_data_on_cells(proxy, field_name):
-        cell_to_point = pv.CellDatatoPointData()
+        cell_to_point = pvs.CellDatatoPointData()
         cell_to_point.PassCellData = 1
         source = cell_to_point
 
         cell_to_point.PassCellData = 1
         source = cell_to_point
 
@@ -1553,7 +1637,7 @@ def DeformedShapeOnField(proxy, entity, field_name,
         source = calc
 
     # Warp by vector
         source = calc
 
     # Warp by vector
-    warp_vector = pv.WarpByVector(source)
+    warp_vector = pvs.WarpByVector(source)
     warp_vector.Vectors = [vector_array]
     if scale_factor is not None:
         warp_vector.ScaleFactor = scale_factor
     warp_vector.Vectors = [vector_array]
     if scale_factor is not None:
         warp_vector.ScaleFactor = scale_factor
@@ -1563,7 +1647,7 @@ def DeformedShapeOnField(proxy, entity, field_name,
         warp_vector.ScaleFactor = def_scale
 
     # Get Deformed Shape representation object
         warp_vector.ScaleFactor = def_scale
 
     # Get Deformed Shape representation object
-    defshape = pv.GetRepresentation(warp_vector)
+    defshape = pvs.GetRepresentation(warp_vector)
 
     # Get lookup table
     lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
 
     # Get lookup table
     lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
@@ -1571,15 +1655,20 @@ def DeformedShapeOnField(proxy, entity, field_name,
     # Set field range if necessary
     data_range = get_data_range(proxy, entity,
                                 field_name, vector_mode)
     # Set field range if necessary
     data_range = get_data_range(proxy, entity,
                                 field_name, vector_mode)
-    lookup_table.LockScalarRange = 1
+    if hasattr(lookup_table,"LockDataRange"):
+        lookup_table.LockDataRange = 1
+    elif hasattr(lookup_table,"LockScalarRange"):
+        lookup_table.LockScalarRange = 1
+    else:
+        raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
 
     # Set properties
     if is_colored:
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
 
     # Set properties
     if is_colored:
-        defshape.ColorAttributeType = EntityType.get_pvtype(entity)
-        defshape.ColorArrayName = field_name
+        pvs.ColorBy(defshape, (EntityType.get_pvtype(entity), field_name))
     else:
     else:
-        defshape.ColorArrayName = ''
+        pvs.ColorBy(defshape, (EntityType.get_pvtype(entity), None))
     defshape.LookupTable = lookup_table
 
     # Set wireframe represenatation mode
     defshape.LookupTable = lookup_table
 
     # Set wireframe represenatation mode
@@ -1616,6 +1705,7 @@ def DeformedShapeAndScalarMapOnField(proxy, entity, field_name,
       Defromed Shape And Scalar Map as representation object.
 
     """
       Defromed Shape And Scalar Map as representation object.
 
     """
+    proxy.UpdatePipeline()
     # We don't need mesh parts with no data on them
     on_points = []
     on_cells = []
     # We don't need mesh parts with no data on them
     on_points = []
     on_cells = []
@@ -1631,18 +1721,20 @@ def DeformedShapeAndScalarMapOnField(proxy, entity, field_name,
         else:
             on_cells.append(scalar_field_name)
 
         else:
             on_cells.append(scalar_field_name)
 
+    nb_components = get_nb_components(proxy, entity, field_name)
+
+    # Select fields
     select_cells_with_data(proxy, on_points, on_cells)
 
     # Check vector mode
     select_cells_with_data(proxy, on_points, on_cells)
 
     # Check vector mode
-    nb_components = get_nb_components(proxy, entity, field_name)
     check_vector_mode(vector_mode, nb_components)
 
     # Get time value
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
     check_vector_mode(vector_mode, nb_components)
 
     # Get time value
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
-    pv.GetRenderView().ViewTime = time_value
-    pv.UpdatePipeline(time_value, proxy)
+    pvs.GetRenderView().ViewTime = time_value
+    pvs.UpdatePipeline(time_value, proxy)
 
     # Set scalar field by default
     scalar_field_entity = scalar_entity
 
     # Set scalar field by default
     scalar_field_entity = scalar_entity
@@ -1651,15 +1743,13 @@ def DeformedShapeAndScalarMapOnField(proxy, entity, field_name,
         scalar_field_entity = entity
         scalar_field = field_name
 
         scalar_field_entity = entity
         scalar_field = field_name
 
-    # Extract only groups with data for the field
-    new_proxy = extract_groups_for_field(proxy, field_name, entity)
-
     # Do merge
     # Do merge
-    source = pv.MergeBlocks(new_proxy)
+    source = pvs.MergeBlocks(proxy)
+    pvs.UpdatePipeline()
 
     # Cell data to point data
     if is_data_on_cells(proxy, field_name):
 
     # Cell data to point data
     if is_data_on_cells(proxy, field_name):
-        cell_to_point = pv.CellDatatoPointData(source)
+        cell_to_point = pvs.CellDatatoPointData(source)
         cell_to_point.PassCellData = 1
         source = cell_to_point
 
         cell_to_point.PassCellData = 1
         source = cell_to_point
 
@@ -1671,17 +1761,17 @@ def DeformedShapeAndScalarMapOnField(proxy, entity, field_name,
         source = calc
 
     # Warp by vector
         source = calc
 
     # Warp by vector
-    warp_vector = pv.WarpByVector(source)
+    warp_vector = pvs.WarpByVector(source)
     warp_vector.Vectors = [vector_array]
     if scale_factor is not None:
         warp_vector.ScaleFactor = scale_factor
     else:
         def_scale = get_default_scale(PrsTypeEnum.DEFORMEDSHAPE,
     warp_vector.Vectors = [vector_array]
     if scale_factor is not None:
         warp_vector.ScaleFactor = scale_factor
     else:
         def_scale = get_default_scale(PrsTypeEnum.DEFORMEDSHAPE,
-                                      new_proxy, entity, field_name)
+                                      proxy, entity, field_name)
         warp_vector.ScaleFactor = def_scale
 
     # Get Defromed Shape And Scalar Map representation object
         warp_vector.ScaleFactor = def_scale
 
     # Get Defromed Shape And Scalar Map representation object
-    defshapemap = pv.GetRepresentation(warp_vector)
+    defshapemap = pvs.GetRepresentation(warp_vector)
 
     # Get lookup table
     lookup_table = get_lookup_table(scalar_field, nb_components, vector_mode)
 
     # Get lookup table
     lookup_table = get_lookup_table(scalar_field, nb_components, vector_mode)
@@ -1689,13 +1779,18 @@ def DeformedShapeAndScalarMapOnField(proxy, entity, field_name,
     # Set field range if necessary
     data_range = get_data_range(proxy, scalar_field_entity,
                                 scalar_field, vector_mode)
     # Set field range if necessary
     data_range = get_data_range(proxy, scalar_field_entity,
                                 scalar_field, vector_mode)
-    lookup_table.LockScalarRange = 1
+    if hasattr(lookup_table,"LockDataRange"):
+        lookup_table.LockDataRange = 1
+    elif hasattr(lookup_table,"LockScalarRange"):
+        lookup_table.LockScalarRange = 1
+    else:
+        raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
 
     # Set properties
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
 
     # Set properties
-    defshapemap.ColorArrayName = scalar_field
+    pvs.ColorBy(defshapemap, (EntityType.get_pvtype(scalar_field_entity), scalar_field))
     defshapemap.LookupTable = lookup_table
     defshapemap.LookupTable = lookup_table
-    defshapemap.ColorAttributeType = EntityType.get_pvtype(scalar_field_entity)
 
     # Add scalar bar
     add_scalar_bar(field_name, nb_components,
 
     # Add scalar bar
     add_scalar_bar(field_name, nb_components,
@@ -1741,6 +1836,7 @@ def Plot3DOnField(proxy, entity, field_name, timestamp_nb,
       Plot 3D as representation object.
 
     """
       Plot 3D as representation object.
 
     """
+    proxy.UpdatePipeline()
     # We don't need mesh parts with no data on them
     if entity == EntityType.NODE:
         select_cells_with_data(proxy, on_points=[field_name])
     # We don't need mesh parts with no data on them
     if entity == EntityType.NODE:
         select_cells_with_data(proxy, on_points=[field_name])
@@ -1755,14 +1851,11 @@ def Plot3DOnField(proxy, entity, field_name, timestamp_nb,
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
-    pv.GetRenderView().ViewTime = time_value
-    pv.UpdatePipeline(time_value, proxy)
-
-    # Extract only groups with data for the field
-    new_proxy = extract_groups_for_field(proxy, field_name, entity)
+    pvs.GetRenderView().ViewTime = time_value
+    pvs.UpdatePipeline(time_value, proxy)
 
     # Do merge
 
     # Do merge
-    merge_blocks = pv.MergeBlocks(new_proxy)
+    merge_blocks = pvs.MergeBlocks(proxy)
     merge_blocks.UpdatePipeline()
 
     poly_data = None
     merge_blocks.UpdatePipeline()
 
     poly_data = None
@@ -1782,7 +1875,7 @@ def Plot3DOnField(proxy, entity, field_name, timestamp_nb,
                                            radians(angle1), radians(angle2))
 
         # Create slice filter
                                            radians(angle1), radians(angle2))
 
         # Create slice filter
-        slice_filter = pv.Slice(merge_blocks)
+        slice_filter = pvs.Slice(merge_blocks)
         slice_filter.SliceType = "Plane"
 
         # Set cutting plane normal
         slice_filter.SliceType = "Plane"
 
         # Set cutting plane normal
@@ -1804,7 +1897,7 @@ def Plot3DOnField(proxy, entity, field_name, timestamp_nb,
     use_normal = 0
     # Geometry filter
     if not poly_data or poly_data.GetDataInformation().GetNumberOfCells() == 0:
     use_normal = 0
     # Geometry filter
     if not poly_data or poly_data.GetDataInformation().GetNumberOfCells() == 0:
-        geometry_filter = pv.GeometryFilter(merge_blocks)
+        geometry_filter = pvs.GeometryFilter(merge_blocks)
         poly_data = geometry_filter
         use_normal = 1  # TODO(MZN): workaround
 
         poly_data = geometry_filter
         use_normal = 1  # TODO(MZN): workaround
 
@@ -1814,7 +1907,7 @@ def Plot3DOnField(proxy, entity, field_name, timestamp_nb,
 
     if is_data_on_cells(poly_data, field_name):
         # Cell data to point data
 
     if is_data_on_cells(poly_data, field_name):
         # Cell data to point data
-        cell_to_point = pv.CellDatatoPointData(poly_data)
+        cell_to_point = pvs.CellDatatoPointData(poly_data)
         cell_to_point.PassCellData = 1
         source = cell_to_point
 
         cell_to_point.PassCellData = 1
         source = cell_to_point
 
@@ -1827,7 +1920,7 @@ def Plot3DOnField(proxy, entity, field_name, timestamp_nb,
         source = calc
 
     # Warp by scalar
         source = calc
 
     # Warp by scalar
-    warp_scalar = pv.WarpByScalar(source)
+    warp_scalar = pvs.WarpByScalar(source)
     warp_scalar.Scalars = scalars
     warp_scalar.Normal = normal
     warp_scalar.UseNormal = use_normal
     warp_scalar.Scalars = scalars
     warp_scalar.Normal = normal
     warp_scalar.UseNormal = use_normal
@@ -1843,7 +1936,7 @@ def Plot3DOnField(proxy, entity, field_name, timestamp_nb,
 
     if (is_contour):
         # Contours
 
     if (is_contour):
         # Contours
-        contour = pv.Contour(warp_scalar)
+        contour = pvs.Contour(warp_scalar)
         contour.PointMergeMethod = "Uniform Binning"
         contour.ContourBy = ['POINTS', field_name]
         scalar_range = get_data_range(proxy, entity,
         contour.PointMergeMethod = "Uniform Binning"
         contour.ContourBy = ['POINTS', field_name]
         scalar_range = get_data_range(proxy, entity,
@@ -1853,7 +1946,7 @@ def Plot3DOnField(proxy, entity, field_name, timestamp_nb,
         source = contour
 
     # Get Plot 3D representation object
         source = contour
 
     # Get Plot 3D representation object
-    plot3d = pv.GetRepresentation(source)
+    plot3d = pvs.GetRepresentation(source)
 
     # Get lookup table
     lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
 
     # Get lookup table
     lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
@@ -1861,12 +1954,17 @@ def Plot3DOnField(proxy, entity, field_name, timestamp_nb,
     # Set field range if necessary
     data_range = get_data_range(proxy, entity,
                                 field_name, vector_mode)
     # Set field range if necessary
     data_range = get_data_range(proxy, entity,
                                 field_name, vector_mode)
-    lookup_table.LockScalarRange = 1
+    if hasattr(lookup_table,"LockDataRange"):
+        lookup_table.LockDataRange = 1
+    elif hasattr(lookup_table,"LockScalarRange"):
+        lookup_table.LockScalarRange = 1
+    else:
+        raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
 
     # Set properties
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
 
     # Set properties
-    plot3d.ColorAttributeType = EntityType.get_pvtype(entity)
-    plot3d.ColorArrayName = field_name
+    pvs.ColorBy(plot3d, (EntityType.get_pvtype(entity), field_name))
     plot3d.LookupTable = lookup_table
 
     # Add scalar bar
     plot3d.LookupTable = lookup_table
 
     # Add scalar bar
@@ -1901,6 +1999,7 @@ def IsoSurfacesOnField(proxy, entity, field_name, timestamp_nb,
       Iso Surfaces as representation object.
 
     """
       Iso Surfaces as representation object.
 
     """
+    proxy.UpdatePipeline()
     # We don't need mesh parts with no data on them
     if entity == EntityType.NODE:
         select_cells_with_data(proxy, on_points=[field_name])
     # We don't need mesh parts with no data on them
     if entity == EntityType.NODE:
         select_cells_with_data(proxy, on_points=[field_name])
@@ -1915,18 +2014,16 @@ def IsoSurfacesOnField(proxy, entity, field_name, timestamp_nb,
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
-    pv.GetRenderView().ViewTime = time_value
-    pv.UpdatePipeline(time_value, proxy)
-
-    # Extract only groups with data for the field
-    new_proxy = extract_groups_for_field(proxy, field_name, entity)
+    pvs.GetRenderView().ViewTime = time_value
+    pvs.UpdatePipeline(time_value, proxy)
 
     # Do merge
 
     # Do merge
-    source = pv.MergeBlocks(new_proxy)
+    source = pvs.MergeBlocks(proxy)
+    pvs.UpdatePipeline()
 
     # Transform cell data into point data if necessary
     if is_data_on_cells(proxy, field_name):
 
     # Transform cell data into point data if necessary
     if is_data_on_cells(proxy, field_name):
-        cell_to_point = pv.CellDatatoPointData(source)
+        cell_to_point = pvs.CellDatatoPointData(source)
         cell_to_point.PassCellData = 1
         source = cell_to_point
 
         cell_to_point.PassCellData = 1
         source = cell_to_point
 
@@ -1939,7 +2036,7 @@ def IsoSurfacesOnField(proxy, entity, field_name, timestamp_nb,
         source = calc
 
     # Contour filter settings
         source = calc
 
     # Contour filter settings
-    contour = pv.Contour(source)
+    contour = pvs.Contour(source)
     contour.ComputeScalars = 1
     contour.ContourBy = contour_by
 
     contour.ComputeScalars = 1
     contour.ContourBy = contour_by
 
@@ -1956,7 +2053,7 @@ def IsoSurfacesOnField(proxy, entity, field_name, timestamp_nb,
     contour.Isosurfaces = surfaces
 
     # Get Iso Surfaces representation object
     contour.Isosurfaces = surfaces
 
     # Get Iso Surfaces representation object
-    isosurfaces = pv.GetRepresentation(contour)
+    isosurfaces = pvs.GetRepresentation(contour)
 
     # Get lookup table
     lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
 
     # Get lookup table
     lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
@@ -1964,15 +2061,20 @@ def IsoSurfacesOnField(proxy, entity, field_name, timestamp_nb,
     # Set field range if necessary
     data_range = get_data_range(proxy, entity,
                                 field_name, vector_mode)
     # Set field range if necessary
     data_range = get_data_range(proxy, entity,
                                 field_name, vector_mode)
-    lookup_table.LockScalarRange = 1
+    if hasattr(lookup_table,"LockDataRange"):
+        lookup_table.LockDataRange = 1
+    elif hasattr(lookup_table,"LockScalarRange"):
+        lookup_table.LockScalarRange = 1
+    else:
+        raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
 
     # Set display properties
     if (is_colored):
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
 
     # Set display properties
     if (is_colored):
-        isosurfaces.ColorAttributeType = EntityType.get_pvtype(entity)
-        isosurfaces.ColorArrayName = field_name
+        pvs.ColorBy(isosurfaces, (EntityType.get_pvtype(entity), field_name))
     else:
     else:
-        isosurfaces.ColorArrayName = ''
+        pvs.ColorBy(isosurfaces, (EntityType.get_pvtype(entity), None))
         if color:
             isosurfaces.DiffuseColor = color
     isosurfaces.LookupTable = lookup_table
         if color:
             isosurfaces.DiffuseColor = color
     isosurfaces.LookupTable = lookup_table
@@ -2023,11 +2125,14 @@ def GaussPointsOnField(proxy, entity, field_name,
       Gauss Points as representation object.
 
     """
       Gauss Points as representation object.
 
     """
+    proxy.UpdatePipeline()
     # We don't need mesh parts with no data on them
     # We don't need mesh parts with no data on them
-    if entity == EntityType.NODE:
-        select_cells_with_data(proxy, on_points=[field_name])
-    else:
-        select_cells_with_data(proxy, on_cells=[field_name])
+    on_gauss = select_cells_with_data(proxy, on_gauss=[field_name])
+    if not on_gauss:
+        if entity == EntityType.NODE:
+            select_cells_with_data(proxy, on_points=[field_name])
+        else:
+            select_cells_with_data(proxy, on_cells=[field_name])
 
     # Check vector mode
     nb_components = get_nb_components(proxy, entity, field_name)
 
     # Check vector mode
     nb_components = get_nb_components(proxy, entity, field_name)
@@ -2037,23 +2142,19 @@ def GaussPointsOnField(proxy, entity, field_name,
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
-    pv.GetRenderView().ViewTime = time_value
-    proxy.UpdatePipeline(time=time_value)
+    pvs.GetRenderView().ViewTime = time_value
+    pvs.UpdatePipeline(time_value, proxy)
 
 
-    # Extract only groups with data for the field
-    source = extract_groups_for_field(proxy, field_name, entity)
-
-    # Quadrature point arrays
-    qp_arrays = proxy.QuadraturePointArrays.Available
+    source = proxy
 
     # If no quadrature point array is passed, use cell centers
 
     # If no quadrature point array is passed, use cell centers
-    if field_name in qp_arrays:
-        generate_qp = pv.GenerateQuadraturePoints(source)
-        generate_qp.SelectSourceArray = ['CELLS', 'ELGA_Offset']
+    if on_gauss:
+        generate_qp = pvs.GenerateQuadraturePoints(source)
+        generate_qp.QuadratureSchemeDef = ['CELLS', 'ELGA@0']
         source = generate_qp
     else:
         # Cell centers
         source = generate_qp
     else:
         # Cell centers
-        cell_centers = pv.CellCenters(source)
+        cell_centers = pvs.CellCenters(source)
         cell_centers.VertexCells = 1
         source = cell_centers
 
         cell_centers.VertexCells = 1
         source = cell_centers
 
@@ -2064,13 +2165,12 @@ def GaussPointsOnField(proxy, entity, field_name,
         vector_array = field_name
         # If the given vector array has only 2 components, add the third one
         if nb_components == 2:
         vector_array = field_name
         # If the given vector array has only 2 components, add the third one
         if nb_components == 2:
-            calc = get_add_component_calc(source,
-                                          EntityType.NODE, field_name)
+            calc = get_add_component_calc(source, EntityType.NODE, field_name)
             vector_array = calc.ResultArrayName
             source = calc
 
         # Warp by vector
             vector_array = calc.ResultArrayName
             source = calc
 
         # Warp by vector
-        warp_vector = pv.WarpByVector(source)
+        warp_vector = pvs.WarpByVector(source)
         warp_vector.Vectors = [vector_array]
         if scale_factor is not None:
             warp_vector.ScaleFactor = scale_factor
         warp_vector.Vectors = [vector_array]
         if scale_factor is not None:
             warp_vector.ScaleFactor = scale_factor
@@ -2082,7 +2182,7 @@ def GaussPointsOnField(proxy, entity, field_name,
         source = warp_vector
 
     # Get Gauss Points representation object
         source = warp_vector
 
     # Get Gauss Points representation object
-    gausspnt = pv.GetRepresentation(source)
+    gausspnt = pvs.GetRepresentation(source)
 
     # Get lookup table
     lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
 
     # Get lookup table
     lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
@@ -2090,15 +2190,20 @@ def GaussPointsOnField(proxy, entity, field_name,
     # Set field range if necessary
     data_range = get_data_range(proxy, entity,
                                 field_name, vector_mode)
     # Set field range if necessary
     data_range = get_data_range(proxy, entity,
                                 field_name, vector_mode)
-    lookup_table.LockScalarRange = 1
+    if hasattr(lookup_table,"LockDataRange"):
+        lookup_table.LockDataRange = 1
+    elif hasattr(lookup_table,"LockScalarRange"):
+        lookup_table.LockScalarRange = 1
+    else:
+        raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
 
     # Set display properties
     if is_colored:
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
 
     # Set display properties
     if is_colored:
-        gausspnt.ColorAttributeType = EntityType.get_pvtype(entity)
-        gausspnt.ColorArrayName = field_name
+        pvs.ColorBy(gausspnt, (EntityType.get_pvtype(entity), field_name))
     else:
     else:
-        gausspnt.ColorArrayName = ''
+        pvs.ColorBy(gausspnt, (EntityType.get_pvtype(entity), None))
         if color:
             gausspnt.DiffuseColor = color
 
         if color:
             gausspnt.DiffuseColor = color
 
@@ -2141,7 +2246,7 @@ def GaussPointsOnField(proxy, entity, field_name,
 
     if is_proportional:
         mult = multiplier
 
     if is_proportional:
         mult = multiplier
-        if mult is None:
+        if mult is None and data_range[1] != 0:
             mult = abs(0.1 / data_range[1])
 
         gausspnt.RadiusScalarRange = data_range
             mult = abs(0.1 / data_range[1])
 
         gausspnt.RadiusScalarRange = data_range
@@ -2154,8 +2259,9 @@ def GaussPointsOnField(proxy, entity, field_name,
         gausspnt.RadiusTransferFunctionMode = 'Table'
         gausspnt.RadiusScalarRange = data_range
         gausspnt.RadiusUseScalarRange = 1
         gausspnt.RadiusTransferFunctionMode = 'Table'
         gausspnt.RadiusScalarRange = data_range
         gausspnt.RadiusUseScalarRange = 1
-        gausspnt.RadiusIsProportional = 1
-        gausspnt.RadiusProportionalFactor = mult
+        if mult is not None:
+            gausspnt.RadiusIsProportional = 1
+            gausspnt.RadiusProportionalFactor = mult
     else:
         gausspnt.RadiusTransferFunctionEnabled = 0
         gausspnt.RadiusMode = 'Constant'
     else:
         gausspnt.RadiusTransferFunctionEnabled = 0
         gausspnt.RadiusMode = 'Constant'
@@ -2163,6 +2269,143 @@ def GaussPointsOnField(proxy, entity, field_name,
 
     return gausspnt
 
 
     return gausspnt
 
+def GaussPointsOnField1(proxy, entity, field_name,
+                        timestamp_nb,
+                        is_colored=True, color=None,
+                        primitive=GaussType.SPHERE,
+                        is_proportional=True,
+                        max_pixel_size=256,
+                        multiplier=None,
+                        vector_mode='Magnitude'):
+    """Creates Gauss Points on the given field. Use GaussPoints() Paraview interface.
+
+    Arguments:
+    proxy: the pipeline object, containig data
+    entity: the field entity type from PrsTypeEnum
+    field_name: the field name
+    timestamp_nb: the number of time step (1, 2, ...)
+    is_colored -- defines whether the Gauss Points will be multicolored,
+    using the corresponding data values
+    color: defines the presentation color as [R, G, B] triple. Taken into
+    account only if is_colored is False.
+    primitive: primitive type from GaussType
+    is_proportional: if True, the size of primitives will depends on
+    the gauss point value
+    max_pixel_size: the maximum sizr of the Gauss Points primitive in pixels
+    multiplier: coefficient between data values and the size of primitives
+    If not passed by user, default scale will be computed.
+    vector_mode: the mode of transformation of vector values into
+    scalar values, applicable only if the field contains vector values.
+    Possible modes: 'Magnitude' - vector module;
+    'X', 'Y', 'Z' - vector components.
+
+    Returns:
+      Gauss Points as representation object.
+
+    """
+    proxy.UpdatePipeline()
+    select_cells_with_data(proxy, on_gauss=[field_name])
+
+    nb_components = get_nb_components(proxy, entity, field_name)
+
+    # Get time value
+    time_value = get_time(proxy, timestamp_nb)
+
+    # Set timestamp
+    pvs.GetRenderView().ViewTime = time_value
+    proxy.UpdatePipeline(time=time_value)
+
+    # Create Gauss Points object
+    source = pvs.ELGAfieldToPointSprite(proxy)
+    source.UpdatePipeline()
+
+    # Get Gauss Points representation object
+    gausspnt = pvs.GetRepresentation(source)
+
+    # Get lookup table
+    lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
+
+    # Set field range if necessary
+    data_range = get_data_range(proxy, entity,
+                                field_name, vector_mode)
+    if hasattr(lookup_table,"LockDataRange"):
+        lookup_table.LockDataRange = 1
+    elif hasattr(lookup_table,"LockScalarRange"):
+        lookup_table.LockScalarRange = 1
+    else:
+        raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
+    lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
+
+    # Set display properties
+    if is_colored:
+        pvs.ColorBy(gausspnt, (EntityType.get_pvtype(entity), field_name))
+    else:
+        pvs.ColorBy(gausspnt, (EntityType.get_pvtype(entity), None))
+        if color:
+            gausspnt.DiffuseColor = color
+
+    gausspnt.LookupTable = lookup_table
+
+    # Add scalar bar
+    add_scalar_bar(field_name, nb_components,
+                   vector_mode, lookup_table, time_value)
+
+    # Set point sprite representation
+    gausspnt.Representation = 'Point Sprite'
+
+    # Point sprite settings
+    gausspnt.InterpolateScalarsBeforeMapping = 0
+    gausspnt.MaxPixelSize = max_pixel_size
+
+    # Render mode
+    gausspnt.RenderMode = GaussType.get_mode(primitive)
+
+    #if primitive == GaussType.SPRITE:
+        # Set texture
+        # TODO(MZN): replace with pvsimple high-level interface
+    #    texture = sm.CreateProxy("textures", "SpriteTexture")
+    #    alphamprop = texture.GetProperty("AlphaMethod")
+    #    alphamprop.SetElement(0, 2)  # Clamp
+    #    alphatprop = texture.GetProperty("AlphaThreshold")
+    #    alphatprop.SetElement(0, 63)
+    #    maxprop = texture.GetProperty("Maximum")
+    #    maxprop.SetElement(0, 255)
+    #    texture.UpdateVTKObjects()
+
+    #    gausspnt.Texture = texture
+        #gausspnt.Texture.AlphaMethod = 'Clamp'
+        #gausspnt.Texture.AlphaThreshold = 63
+        #gausspnt.Texture.Maximum= 255
+
+    # Proportional radius
+    gausspnt.RadiusUseScalarRange = 0
+    gausspnt.RadiusIsProportional = 0
+
+    if is_proportional:
+        mult = multiplier
+        if mult is None and data_range[1] != 0:
+            mult = abs(0.1 / data_range[1])
+
+        gausspnt.RadiusScalarRange = data_range
+        gausspnt.RadiusTransferFunctionEnabled = 1
+        gausspnt.RadiusMode = 'Scalar'
+        gausspnt.RadiusArray = ['POINTS', field_name]
+        if nb_components > 1:
+            v_comp = get_vector_component(vector_mode)
+            gausspnt.RadiusVectorComponent = v_comp
+        gausspnt.RadiusTransferFunctionMode = 'Table'
+        gausspnt.RadiusScalarRange = data_range
+        gausspnt.RadiusUseScalarRange = 1
+        if mult is not None:
+            gausspnt.RadiusIsProportional = 1
+            gausspnt.RadiusProportionalFactor = mult
+    else:
+        gausspnt.RadiusTransferFunctionEnabled = 0
+        gausspnt.RadiusMode = 'Constant'
+        gausspnt.RadiusArray = ['POINTS', 'Constant Radius']
+
+    return gausspnt
 
 def StreamLinesOnField(proxy, entity, field_name, timestamp_nb,
                        direction='BOTH', is_colored=False, color=None,
 
 def StreamLinesOnField(proxy, entity, field_name, timestamp_nb,
                        direction='BOTH', is_colored=False, color=None,
@@ -2188,6 +2431,7 @@ def StreamLinesOnField(proxy, entity, field_name, timestamp_nb,
       Stream Lines as representation object.
 
     """
       Stream Lines as representation object.
 
     """
+    proxy.UpdatePipeline()
     # We don't need mesh parts with no data on them
     if entity == EntityType.NODE:
         select_cells_with_data(proxy, on_points=[field_name])
     # We don't need mesh parts with no data on them
     if entity == EntityType.NODE:
         select_cells_with_data(proxy, on_points=[field_name])
@@ -2202,20 +2446,18 @@ def StreamLinesOnField(proxy, entity, field_name, timestamp_nb,
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
     time_value = get_time(proxy, timestamp_nb)
 
     # Set timestamp
-    pv.GetRenderView().ViewTime = time_value
-    pv.UpdatePipeline(time_value, proxy)
-
-    # Extract only groups with data for the field
-    new_proxy = extract_groups_for_field(proxy, field_name, entity)
+    pvs.GetRenderView().ViewTime = time_value
+    pvs.UpdatePipeline(time_value, proxy)
 
     # Do merge
 
     # Do merge
-    source = pv.MergeBlocks(new_proxy)
+    source = pvs.MergeBlocks(proxy)
+    pvs.UpdatePipeline()
 
     # Cell data to point data
     if is_data_on_cells(proxy, field_name):
 
     # Cell data to point data
     if is_data_on_cells(proxy, field_name):
-        cell_to_point = pv.CellDatatoPointData(source)
+        cell_to_point = pvs.CellDatatoPointData(source)
         cell_to_point.PassCellData = 1
         cell_to_point.PassCellData = 1
-        cell_to_point.UpdatePipeline()
+        pvs.UpdatePipeline()
         source = cell_to_point
 
     vector_array = field_name
         source = cell_to_point
 
     vector_array = field_name
@@ -2223,38 +2465,43 @@ def StreamLinesOnField(proxy, entity, field_name, timestamp_nb,
     if nb_components == 2:
         calc = get_add_component_calc(source, EntityType.NODE, field_name)
         vector_array = calc.ResultArrayName
     if nb_components == 2:
         calc = get_add_component_calc(source, EntityType.NODE, field_name)
         vector_array = calc.ResultArrayName
-        calc.UpdatePipeline()
+        pvs.UpdatePipeline()
         source = calc
 
     # Stream Tracer
         source = calc
 
     # Stream Tracer
-    stream = pv.StreamTracer(source)
+    stream = pvs.StreamTracer(source)
     stream.SeedType = "Point Source"
     stream.Vectors = ['POINTS', vector_array]
     stream.SeedType = "Point Source"
     stream.Vectors = ['POINTS', vector_array]
-    stream.SeedType = "Point Source"
     stream.IntegrationDirection = direction
     stream.IntegratorType = 'Runge-Kutta 2'
     stream.IntegrationDirection = direction
     stream.IntegratorType = 'Runge-Kutta 2'
+    stream.SeedType = 'High Resolution Line Source'
     stream.UpdatePipeline()
 
     # Get Stream Lines representation object
     if is_empty(stream):
         return None
     stream.UpdatePipeline()
 
     # Get Stream Lines representation object
     if is_empty(stream):
         return None
-    streamlines = pv.GetRepresentation(stream)
+    streamlines = pvs.GetRepresentation(stream)
 
     # Get lookup table
     lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
 
     # Set field range if necessary
 
     # Get lookup table
     lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
 
     # Set field range if necessary
-    data_range = get_data_range(new_proxy, entity,
+    data_range = get_data_range(proxy, entity,
                                 field_name, vector_mode)
                                 field_name, vector_mode)
-    lookup_table.LockScalarRange = 1
+    if hasattr(lookup_table,"LockDataRange"):
+        lookup_table.LockDataRange = 1
+    elif hasattr(lookup_table,"LockScalarRange"):
+        lookup_table.LockScalarRange = 1
+    else:
+        raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
 
     # Set properties
     if is_colored:
     lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
 
     # Set properties
     if is_colored:
-        streamlines.ColorAttributeType = EntityType.get_pvtype(entity)
-        streamlines.ColorArrayName = field_name
+        pvs.ColorBy(streamlines, (EntityType.get_pvtype(entity), field_name))
     else:
     else:
-        streamlines.ColorArrayName = ''
+        pvs.ColorBy(streamlines, (EntityType.get_pvtype(entity), None))
         if color:
             streamlines.DiffuseColor = color
 
         if color:
             streamlines.DiffuseColor = color
 
@@ -2272,81 +2519,83 @@ def MeshOnEntity(proxy, mesh_name, entity):
 
     Arguments:
       proxy -- the pipeline object, containig data
 
     Arguments:
       proxy -- the pipeline object, containig data
-      mesh_name -- the mesh name
-      entity -- the entity type
+      mesh_name -- the full or short name of mesh field
 
     Returns:
       Submesh as representation object of the given source.
 
     """
 
     Returns:
       Submesh as representation object of the given source.
 
     """
-    # Select all cell types
-    select_all_cells(proxy)
-
-    # Get subset of groups on the given entity
-    subset = get_group_names(proxy, mesh_name, entity)
-
-    # Select only groups of the given entity type
-    proxy.Groups = subset
+    proxy.UpdatePipeline()
+    mesh_full_name = None
+    aList = mesh_name.split('/')
+    if len(aList) >= 2:
+        mesh_full_name = mesh_name
+    else:
+        mesh_full_name = find_mesh_full_name(proxy, mesh_name)
+    if not mesh_full_name:
+        raise RuntimeError, "The given mesh name was not found"
+    # Select only the given mesh
+    proxy.AllArrays = [mesh_full_name]
     proxy.UpdatePipeline()
 
     # Get representation object if the submesh is not empty
     prs = None
     if (proxy.GetDataInformation().GetNumberOfPoints() or
         proxy.GetDataInformation().GetNumberOfCells()):
     proxy.UpdatePipeline()
 
     # Get representation object if the submesh is not empty
     prs = None
     if (proxy.GetDataInformation().GetNumberOfPoints() or
         proxy.GetDataInformation().GetNumberOfCells()):
-        prs = pv.GetRepresentation(proxy)
-        prs.ColorArrayName = ''
+        my_view = pvs.GetRenderView()
+        prs = pvs.GetRepresentation(proxy, view=my_view)
+        prs.ColorArrayName = (None, '')
 
     return prs
 
 
 
     return prs
 
 
-def MeshOnGroup(proxy, group_name):
+def MeshOnGroup(proxy, extrGroups, group_name):
     """Creates submesh on the group.
 
     Arguments:
       proxy -- the pipeline object, containig data
       group_name -- the full group name
     """Creates submesh on the group.
 
     Arguments:
       proxy -- the pipeline object, containig data
       group_name -- the full group name
+      extrGroups -- all extracted groups object
 
     Returns:
       Representation object of the given source with single group
       selected.
 
     """
 
     Returns:
       Representation object of the given source with single group
       selected.
 
     """
-    # Select all cell types
-    select_all_cells(proxy)
-
-    # Select only the group with the given name
-    one_group = [group_name]
-    proxy.Groups = one_group
     proxy.UpdatePipeline()
     proxy.UpdatePipeline()
+    # Deselect all groups
+    extrGroups.AllGroups = []
+    extrGroups.UpdatePipelineInformation()
+    # Select only the group with the given name
+    extrGroups.AllGroups = [group_name]
+    extrGroups.UpdatePipelineInformation()
 
     # Get representation object if the submesh is not empty
     prs = None
 
     # Check if the group was set
 
     # Get representation object if the submesh is not empty
     prs = None
 
     # Check if the group was set
-    if proxy.Groups.GetData() == one_group:
-        group_entity = get_group_entity(group_name)
+    if len(extrGroups.AllGroups) == 1 and \
+       extrGroups.AllGroups[0] == group_name:
         # Check if the submesh is not empty
         # Check if the submesh is not empty
-        nb_items = 0
-        if group_entity == EntityType.NODE:
-            nb_items = proxy.GetDataInformation().GetNumberOfPoints()
-        elif group_entity == EntityType.CELL:
-            nb_items = proxy.GetDataInformation().GetNumberOfCells()
+        nb_points = proxy.GetDataInformation().GetNumberOfPoints()
+        nb_cells = proxy.GetDataInformation().GetNumberOfCells()
 
 
-        if nb_items:
-            prs = pv.GetRepresentation(proxy)
-            prs.ColorArrayName = ''
+        if nb_points or nb_cells:
+#            prs = pvs.GetRepresentation(proxy)
+            prs = pvs.Show()
+            prs.ColorArrayName = (None, '')
+            display_only(prs)
 
     return prs
 
 
 
     return prs
 
 
-def CreatePrsForFile(paravis_instance, file_name, prs_types,
+def CreatePrsForFile(file_name, prs_types,
                      picture_dir, picture_ext):
     """Build presentations of the given types for the file.
 
     Build presentations for all fields on all timestamps.
 
     Arguments:
                      picture_dir, picture_ext):
     """Build presentations of the given types for the file.
 
     Build presentations for all fields on all timestamps.
 
     Arguments:
-      paravis_instance: ParaVis module instance object
       file_name: full path to the MED file
       prs_types: the list of presentation types to build
       picture_dir: the directory path for saving snapshots
       file_name: full path to the MED file
       prs_types: the list of presentation types to build
       picture_dir: the directory path for saving snapshots
@@ -2357,24 +2606,25 @@ def CreatePrsForFile(paravis_instance, file_name, prs_types,
     print "Import " + file_name.split(os.sep)[-1] + "..."
 
     try:
     print "Import " + file_name.split(os.sep)[-1] + "..."
 
     try:
-        paravis_instance.ImportFile(file_name)
-        proxy = pv.GetActiveSource()
+        proxy = pvs.MEDReader(FileName=file_name)
         if proxy is None:
             print "FAILED"
         else:
         if proxy is None:
             print "FAILED"
         else:
-            proxy.UpdatePipeline()
+            #proxy.UpdatePipeline()
             print "OK"
     except:
         print "FAILED"
     else:
         # Get view
             print "OK"
     except:
         print "FAILED"
     else:
         # Get view
-        view = pv.GetRenderView()
+        view = pvs.GetRenderView()
+        time_value = get_time(proxy, 0)
+        view.ViewTime = time_value
+        pvs.UpdatePipeline(time=time_value, proxy=proxy)
 
         # Create required presentations for the proxy
         CreatePrsForProxy(proxy, view, prs_types,
                           picture_dir, picture_ext)
 
 
         # Create required presentations for the proxy
         CreatePrsForProxy(proxy, view, prs_types,
                           picture_dir, picture_ext)
 
-
 def CreatePrsForProxy(proxy, view, prs_types, picture_dir, picture_ext):
     """Build presentations of the given types for all fields of the proxy.
 
 def CreatePrsForProxy(proxy, view, prs_types, picture_dir, picture_ext):
     """Build presentations of the given types for all fields of the proxy.
 
@@ -2389,10 +2639,9 @@ def CreatePrsForProxy(proxy, view, prs_types, picture_dir, picture_ext):
       picture_ext: graphics files extension (determines file type)
 
     """
       picture_ext: graphics files extension (determines file type)
 
     """
+    proxy.UpdatePipeline()
     # List of the field names
     # List of the field names
-    field_names = list(proxy.PointArrays.GetData())
-    nb_on_nodes = len(field_names)
-    field_names.extend(proxy.CellArrays.GetData())
+    fields_info = proxy.GetProperty("FieldsTreeInfo")[::2]
 
     # Add path separator to the end of picture path if necessery
     if not picture_dir.endswith(os.sep):
 
     # Add path separator to the end of picture path if necessery
     if not picture_dir.endswith(os.sep):
@@ -2400,77 +2649,56 @@ def CreatePrsForProxy(proxy, view, prs_types, picture_dir, picture_ext):
 
     # Mesh Presentation
     if PrsTypeEnum.MESH in prs_types:
 
     # Mesh Presentation
     if PrsTypeEnum.MESH in prs_types:
-        # Create Mesh presentation. Build all possible submeshes.
-
-        # Remember the current state
-        groups = list(proxy.Groups)
-
         # Iterate on meshes
         # Iterate on meshes
-        mesh_names = get_mesh_names(proxy)
+        mesh_names = get_mesh_full_names(proxy)
         for mesh_name in mesh_names:
         for mesh_name in mesh_names:
-            # Build mesh on nodes and cells
-            for entity in (EntityType.NODE, EntityType.CELL):
-                entity_name = EntityType.get_name(entity)
-                if if_possible(proxy, mesh_name, entity, PrsTypeEnum.MESH):
-                    print "Creating submesh on " + entity_name + " for '" + mesh_name + "' mesh... "
-                    prs = MeshOnEntity(proxy, mesh_name, entity)
-                    if prs is None:
-                        print "FAILED"
-                        continue
-                    else:
-                        print "OK"
-                    # Construct image file name
-                    pic_name = picture_dir + mesh_name + "_" + entity_name + "." + picture_ext
-
-                    # Show and dump the presentation into a graphics file
-                    process_prs_for_test(prs, view, pic_name, False)
-
-                # Build submesh on all groups of the mesh
-                mesh_groups = get_group_names(proxy, mesh_name,
-                                              entity, wo_nogroups=True)
-                for group in mesh_groups:
-                    print "Creating submesh on group " + group + "... "
-                    prs = MeshOnGroup(proxy, group)
+            # Build mesh field presentation
+            print "Creating submesh for '" + get_field_short_name(mesh_name) + "' mesh... "
+            prs = MeshOnEntity(proxy, mesh_name, None)
+            if prs is None:
+                print "FAILED"
+                continue
+            else:
+                print "OK"
+            # Construct image file name
+            pic_name = picture_dir + get_field_short_name(mesh_name) + "." + picture_ext
+
+            # Show and dump the presentation into a graphics file
+            process_prs_for_test(prs, view, pic_name, False)
+
+            # Create Mesh presentation. Build all groups.
+            extGrp = pvs.ExtractGroup()
+            extGrp.UpdatePipelineInformation()
+            if if_possible(proxy, None, None, PrsTypeEnum.MESH, extGrp):
+                for group in get_group_names(extGrp):
+                    print "Creating submesh on group " + get_group_short_name(group) + "... "
+                    prs = MeshOnGroup(proxy, extGrp, group)
                     if prs is None:
                         print "FAILED"
                         continue
                     else:
                         print "OK"
                     # Construct image file name
                     if prs is None:
                         print "FAILED"
                         continue
                     else:
                         print "OK"
                     # Construct image file name
-                    pic_name = picture_dir + group.replace('/', '_') + "." + picture_ext
+                    pic_name = picture_dir + get_group_short_name(group) + "." + picture_ext
 
                     # Show and dump the presentation into a graphics file
                     process_prs_for_test(prs, view, pic_name, False)
 
 
                     # Show and dump the presentation into a graphics file
                     process_prs_for_test(prs, view, pic_name, False)
 
-        # Restore the state
-        proxy.Groups = groups
-        proxy.UpdatePipeline()
-
     # Presentations on fields
     # Presentations on fields
-    for (i, field_name) in enumerate(field_names):
+    for field in fields_info:
+        field_name = get_field_short_name(field)
+        # Ignore mesh presentation
+        if field_name == get_field_mesh_name(field):
+            continue
+        field_entity = get_field_entity(field)
         # Select only the current field:
         # necessary for getting the right timestamps
         # Select only the current field:
         # necessary for getting the right timestamps
-        cell_arrays = proxy.CellArrays.GetData()
-        point_arrays = proxy.PointArrays.GetData()
-        field_entity = None
-        if (i >= nb_on_nodes):
-            field_entity = EntityType.CELL
-            proxy.PointArrays.DeselectAll()
-            proxy.CellArrays = [field_name]
-        else:
-            field_entity = EntityType.NODE
-            proxy.CellArrays.DeselectAll()
-            proxy.PointArrays = [field_name]
+        proxy.AllArrays = [field]
+        proxy.UpdatePipeline()
 
         # Get timestamps
 
         # Get timestamps
-        proxy.UpdatePipelineInformation()
         timestamps = proxy.TimestepValues.GetData()
 
         timestamps = proxy.TimestepValues.GetData()
 
-        # Restore fields selection state
-        proxy.CellArrays = cell_arrays
-        proxy.PointArrays = point_arrays
-        proxy.UpdatePipelineInformation()
-
         for prs_type in prs_types:
             # Ignore mesh presentation
             if prs_type == PrsTypeEnum.MESH:
         for prs_type in prs_types:
             # Ignore mesh presentation
             if prs_type == PrsTypeEnum.MESH:
@@ -2488,9 +2716,25 @@ def CreatePrsForProxy(proxy, view, prs_types, picture_dir, picture_ext):
 
                 for timestamp_nb in xrange(1, len(timestamps) + 1):
                     time = timestamps[timestamp_nb - 1]
 
                 for timestamp_nb in xrange(1, len(timestamps) + 1):
                     time = timestamps[timestamp_nb - 1]
+                    if (time == 0.0):
+                        scalar_range = get_data_range(proxy, field_entity,
+                                                      field_name, cut_off=True)
+                        # exclude time stamps with null lenght of scalar range
+                        if (scalar_range[0] == scalar_range[1]):
+                            continue
                     print "Creating " + prs_name + " on " + field_name + ", time = " + str(time) + "... "
                     print "Creating " + prs_name + " on " + field_name + ", time = " + str(time) + "... "
-                    prs = create_prs(prs_type, proxy,
-                                     field_entity, field_name, timestamp_nb)
+                    try:
+                        prs = create_prs(prs_type, proxy,
+                                         field_entity, field_name, timestamp_nb)
+                    except ValueError:
+                        """ This exception comes from get_nb_components(...) function.
+                            The reason of exception is an implementation of MEDReader
+                            activating the first leaf when reading MED file (refer to
+                            MEDFileFieldRepresentationTree::activateTheFirst() and
+                            MEDFileFieldRepresentationTree::getTheSingleActivated(...) methods).
+                        """
+                        print "ValueError exception is catched"
+                        continue
                     if prs is None:
                         print "FAILED"
                         continue
                     if prs is None:
                         print "FAILED"
                         continue
@@ -2502,3 +2746,12 @@ def CreatePrsForProxy(proxy, view, prs_types, picture_dir, picture_ext):
 
                     # Show and dump the presentation into a graphics file
                     process_prs_for_test(prs, view, pic_name)
 
                     # Show and dump the presentation into a graphics file
                     process_prs_for_test(prs, view, pic_name)
+    return
+
+
+def delete_pv_object(obj):
+  # There is a bug when repeating CreateRenderView/Delete calls
+  # Here is a workaround proposed by KW (#10744)
+  import gc
+  del obj
+  gc.collect()