-# Copyright (C) 2010-2012 CEA/DEN, EDF R&D
+# Copyright (C) 2010-2016 CEA/DEN, EDF R&D
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
-# version 2.1 of the License.
+# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
from math import sqrt, sin, cos, radians
from string import upper
-import pvsimple as pv
+# Do not use pv as a short name.
+# It is a name of function from numpy and may be redefined implicitly by 'from numpy import *' call.
+# import pvsimple as pv
+import pvsimple as pvs
#try:
# # TODO(MZN): to be removed (issue with Point Sprite texture)
# #import paravisSM as sm
#except:
-# import paraview.simple as pv
+# import paraview.simple as pvs
# import paraview.servermanager as sm
# Globals
_current_bar = None
+_med_field_sep = '@@][@@'
# Enumerations
NODE = 0
CELL = 1
- _type2name = {NODE: 'OnPoint',
- CELL: 'OnCell'}
+ _type2name = {NODE: 'P1',
+ CELL: 'P0'}
- _name2type = {'OnPoint': NODE,
- 'OnCell': CELL}
+ _name2type = {'P1': NODE,
+ 'P0': CELL}
_type2pvtype = {NODE: 'POINT_DATA',
CELL: 'CELL_DATA'}
# Auxiliary functions
+
+def get_field_mesh_name(full_field_name):
+ """Return mesh name of the field by its full name."""
+ aList = full_field_name.split('/')
+ if len(aList) >= 2 :
+ field_name = full_field_name.split('/')[1]
+ return field_name
+
+
+def get_field_entity(full_field_name):
+ """Return entity type of the field by its full name."""
+ aList = full_field_name.split(_med_field_sep)
+ if len(aList) == 2 :
+ entity_name = full_field_name.split(_med_field_sep)[-1]
+ entity = EntityType.get_type(entity_name)
+ return entity
+
+
+def get_field_short_name(full_field_name):
+ """Return short name of the field by its full name."""
+ aList = full_field_name.split('/')
+ if len(aList) == 4 :
+ short_name_with_type = full_field_name.split('/')[-1]
+ short_name = short_name_with_type.split(_med_field_sep)[0]
+ return short_name
+
+
+def find_mesh_full_name(proxy, short_mesh_name):
+ """Return full mesh path by short mesh name, if found"""
+ proxy.UpdatePipeline()
+ all_mesh_names = get_mesh_full_names(proxy)
+ for name in all_mesh_names:
+ if short_mesh_name == get_field_short_name(name):
+ return name
+
+
def process_prs_for_test(prs, view, picture_name, show_bar=True):
"""Show presentation and record snapshot image.
display_only(prs, view)
# Show scalar bar
+ global _current_bar
if show_bar and _current_bar:
_current_bar.Visibility = 1
os.makedirs(pic_dir)
# Save picture
- pv.WriteImage(file_name, view=view, Magnification=1)
+ print "Write image:", file_name
+ pvs.WriteImage(file_name, view=view, Magnification=1)
def reset_view(view=None):
"""
if not view:
- view = pv.GetRenderView()
+ view = pvs.GetRenderView()
# Camera preferences
view.CameraFocalPoint = [0.0, 0.0, 0.0]
view.CameraParallelProjection = 1
view.ResetCamera()
- pv.Render(view=view)
+ pvs.Render(view=view)
def hide_all(view, to_remove=False):
"""Hide all representations in the view."""
if not view:
- view = pv.GetRenderView()
+ view = pvs.GetRenderView()
rep_list = view.Representations
for rep in rep_list:
rep.Visibility = 0
if to_remove:
view.Representations.remove(rep)
- pv.Render(view=view)
+ pvs.Render(view=view)
def display_only(prs, view=None):
"""Display only the given presentation in the view."""
- hide_all(view)
- if (hasattr(prs, 'Visibility') and prs.Visibility != 1):
- prs.Visibility = 1
- pv.Render(view=view)
+ if not view:
+ view = pvs.GetRenderView()
+
+ rep_list = view.Representations
+ for rep in rep_list:
+ if hasattr(rep, 'Visibility'):
+ rep.Visibility = (rep == prs)
+ pvs.Render(view=view)
def set_visible_lines(xy_prs, lines):
"""Set visible only the given lines for XYChartRepresentation."""
- sv = xy_prs.GetProperty("SeriesVisibilityInfo").GetData()
+ sv = xy_prs.GetProperty("SeriesVisibility").GetData()
visible = '0'
for i in xrange(0, len(sv)):
Data range as [min, max]
"""
+ proxy.UpdatePipeline()
entity_data_info = None
field_data = proxy.GetFieldDataInformation()
def get_bounds(proxy):
"""Get bounds of the proxy in 3D."""
+ proxy.UpdatePipeline()
dataInfo = proxy.GetDataInformation()
bounds_info = dataInfo.GetBounds()
return bounds_info
def get_x_range(proxy):
"""Get X range of the proxy bounds in 3D."""
+ proxy.UpdatePipeline()
bounds_info = get_bounds(proxy)
return bounds_info[0:2]
def get_y_range(proxy):
"""Get Y range of the proxy bounds in 3D."""
+ proxy.UpdatePipeline()
bounds_info = get_bounds(proxy)
return bounds_info[2:4]
def get_z_range(proxy):
"""Get Z range of the proxy bounds in 3D."""
+ proxy.UpdatePipeline()
bounds_info = get_bounds(proxy)
return bounds_info[4:6]
def is_planar_input(proxy):
"""Check if the given input is planar."""
+ proxy.UpdatePipeline()
bounds_info = get_bounds(proxy)
if (abs(bounds_info[0] - bounds_info[1]) <= FLT_MIN or
def is_data_on_cells(proxy, field_name):
"""Check the existence of a field on cells with the given name."""
+ proxy.UpdatePipeline()
cell_data_info = proxy.GetCellDataInformation()
return (field_name in cell_data_info.keys())
False: otherwise
"""
+ proxy.UpdatePipeline()
data_info = proxy.GetDataInformation()
nb_cells = data_info.GetNumberOfCells()
def get_orientation(proxy):
"""Get the optimum cutting plane orientation for Plot 3D."""
+ proxy.UpdatePipeline()
orientation = Orientation.XY
bounds = get_bounds(proxy)
def get_nb_components(proxy, entity, field_name):
"""Return number of components for the field."""
+ proxy.UpdatePipeline()
entity_data_info = None
field_data = proxy.GetFieldDataInformation()
if field_name in field_data.keys():
entity_data_info = field_data
elif entity == EntityType.CELL:
+ select_cells_with_data(proxy, on_cells=[field_name])
entity_data_info = proxy.GetCellDataInformation()
elif entity == EntityType.NODE:
+ select_cells_with_data(proxy, on_points=[field_name])
entity_data_info = proxy.GetPointDataInformation()
nb_comp = None
def get_default_scale(prs_type, proxy, entity, field_name):
"""Get default scale factor."""
+ proxy.UpdatePipeline()
data_range = get_data_range(proxy, entity, field_name)
if prs_type == PrsTypeEnum.DEFORMEDSHAPE:
the calculator object.
"""
+ proxy.UpdatePipeline()
calculator = None
# Transform vector array to scalar array if possible
nb_components = get_nb_components(proxy, array_entity, array_name)
if (nb_components > 1):
- calculator = pv.Calculator(proxy)
- attribute_mode = "point_data"
+ calculator = pvs.Calculator(proxy)
+ attribute_mode = "Point Data"
if array_entity != EntityType.NODE:
- attribute_mode = "cell_data"
+ attribute_mode = "Cell Data"
calculator.AttributeMode = attribute_mode
if (nb_components == 2):
# Workaroud: calculator unable to compute magnitude
the calculator object.
"""
+ proxy.UpdatePipeline()
calculator = None
nb_components = get_nb_components(proxy, array_entity, array_name)
if nb_components == 2:
- calculator = pv.Calculator(proxy)
+ calculator = pvs.Calculator(proxy)
attribute_mode = "Point Data"
if array_entity != EntityType.NODE:
attribute_mode = "Cell Data"
Used in creation of mesh/submesh presentation.
"""
- ### Old API all_cell_types = proxy.CellTypes.Available
- all_cell_types = proxy.Entity.Available
- ### Old API proxy.CellTypes = all_cell_types
- proxy.Entity = all_cell_types
proxy.UpdatePipeline()
+ extractCT = pvs.ExtractCellType()
+ extractCT.AllGeoTypes = extractCT.GetProperty("GeoTypesInfo")[::2]
+ extractCT.UpdatePipelineInformation()
-def select_cells_with_data(proxy, on_points=None, on_cells=None):
+def select_cells_with_data(proxy, on_points=[], on_cells=[], on_gauss=[]):
"""Select cell types with data.
Only cell types with data for the given fields will be selected.
types with data for even one field (from available) will be selected.
"""
+ if not proxy.GetProperty("FieldsTreeInfo"):
+ return
+
+ proxy.UpdatePipeline()
+ if not hasattr(proxy, 'Entity'):
+ fields_info = proxy.GetProperty("FieldsTreeInfo")[::2]
+ arr_name_with_dis=[elt.split("/")[-1] for elt in fields_info]
+
+ fields = []
+ for name in on_gauss:
+ fields.append(name+_med_field_sep+'GAUSS')
+ for name in on_cells:
+ fields.append(name+_med_field_sep+'P0')
+ for name in on_points:
+ fields.append(name+_med_field_sep+'P1')
+
+ field_list = []
+ for name in fields:
+ if arr_name_with_dis.count(name) > 0:
+ index = arr_name_with_dis.index(name)
+ field_list.append(fields_info[index])
+
+ if field_list:
+ proxy.AllArrays = field_list
+ proxy.UpdatePipeline()
+ return len(field_list) != 0
+
+ # TODO: VTN. Looks like this code is out of date.
+
#all_cell_types = proxy.CellTypes.Available
all_cell_types = proxy.Entity.Available
all_arrays = list(proxy.CellArrays.GetData())
proxy.Entity = cell_types_on
proxy.UpdatePipeline()
-
-def extract_groups_for_field(proxy, field_name, field_entity, force=False):
- """Exctract only groups which have the field.
-
- Arguments:
- proxy: the pipeline object, containig data
- field_name: the field name
- field_entity: the field entity
- force: if True - ExtractGroup object will be created in any case
-
- Returns:
- ExtractGroup object: if not all groups have the field or
- the force argument is true
- The initial proxy: if no groups had been filtered.
-
- """
- source = proxy
-
- # Remember the state
- initial_groups = list(proxy.Groups)
-
- # Get data information for the field entity
- entity_data_info = None
- field_data = proxy.GetFieldDataInformation()
-
- if field_name in field_data.keys():
- entity_data_info = field_data
- elif field_entity == EntityType.CELL:
- entity_data_info = proxy.GetCellDataInformation()
- elif field_entity == EntityType.NODE:
- entity_data_info = proxy.GetPointDataInformation()
-
- # Collect groups for extraction
- groups_to_extract = []
-
- for group in initial_groups:
- proxy.Groups = [group]
- proxy.UpdatePipeline()
- if field_name in entity_data_info.keys():
- groups_to_extract.append(group)
-
- # Restore state
- proxy.Groups = initial_groups
- proxy.UpdatePipeline()
-
- # Extract groups if necessary
- if force or (len(groups_to_extract) < len(initial_groups)):
- extract_group = pv.ExtractGroup(proxy)
- extract_group.Groups = groups_to_extract
- extract_group.UpdatePipeline()
- source = extract_group
-
- return source
-
-
-def if_possible(proxy, field_name, entity, prs_type):
+def if_possible(proxy, field_name, entity, prs_type, extrGrps=None):
"""Check if the presentation creation is possible on the given field."""
+ proxy.UpdatePipeline()
result = True
if (prs_type == PrsTypeEnum.DEFORMEDSHAPE or
prs_type == PrsTypeEnum.DEFORMEDSHAPESCALARMAP or
result = (entity == EntityType.CELL or
field_name in proxy.QuadraturePointArrays.Available)
elif (prs_type == PrsTypeEnum.MESH):
- result = len(get_group_names(proxy, field_name, entity)) > 0
+ result = len(get_group_names(extrGrps)) > 0
return result
title = "\n".join([title, vector_mode])
# Create scalar bar
- scalar_bar = pv.CreateScalarBar(Enabled=1)
+ scalar_bar = pvs.CreateScalarBar(Enabled=1)
scalar_bar.Orientation = 'Vertical'
scalar_bar.Title = title
scalar_bar.LookupTable = lookup_table
scalar_bar.LabelShadow = 1
# Add the scalar bar to the view
- pv.GetRenderView().Representations.append(scalar_bar)
+ pvs.GetRenderView().Representations.append(scalar_bar)
# Reassign the current bar
_current_bar = scalar_bar
- return scalar_bar
+ return _current_bar
def get_bar():
"""Get current scalar bar."""
- global _current_bar
-
return _current_bar
def get_lookup_table(field_name, nb_components, vector_mode='Magnitude'):
"""Get lookup table for the given field."""
- lookup_table = pv.GetLookupTableForArray(field_name, nb_components)
+ lookup_table = pvs.GetLookupTableForArray(field_name, nb_components)
if vector_mode == 'Magnitude':
lookup_table.VectorMode = vector_mode
lookup_table.Discretize = 0
lookup_table.ColorSpace = 'HSV'
- lookup_table.LockScalarRange = 0
+ if hasattr(lookup_table,"LockDataRange"):
+ lookup_table.LockDataRange = 0
+ elif hasattr(lookup_table,"LockScalarRange"):
+ lookup_table.LockScalarRange = 0
+ else:
+ raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
return lookup_table
group_name = full_group_name.split('/')[1]
return group_name
-
def get_group_entity(full_group_name):
"""Return entity type of the group by its full name."""
aList = full_group_name.split('/')
def get_group_short_name(full_group_name):
"""Return short name of the group by its full name."""
- aList = full_group_name.split('/')
- if len(aList) >= 4 :
- short_name = full_group_name.split('/')[3]
- return short_name
+ short_name = re.sub('^GRP_', '', full_group_name)
+ return short_name
-def get_mesh_names(proxy):
+def get_mesh_full_names(proxy):
"""Return all mesh names in the given proxy as a set."""
- groups = proxy.Groups.Available
- mesh_names = set([get_group_mesh_name(item) for item in groups])
-
- return mesh_names
+ proxy.UpdatePipeline()
+ fields = proxy.GetProperty("FieldsTreeInfo")[::2]
+ mesh_full_names = set([item for item in fields if get_field_mesh_name(item) == get_field_short_name(item)])
+ return mesh_full_names
-def get_group_names(proxy, mesh_name, entity, wo_nogroups=False):
- """Return full names of all groups of the given entity type
- from the mesh with the given name as a list.
+def get_group_names(extrGrps):
+ """Return full names of all groups of the given 'ExtractGroup' filter object.
"""
- groups = proxy.Groups.Available
-
- condition = lambda item: (get_group_mesh_name(item) == mesh_name and
- get_group_entity(item) == entity)
- group_names = [item for item in groups if condition(item)]
-
- if wo_nogroups:
- # Remove "No_Group" group
- not_no_group = lambda item: get_group_short_name(item) != "No_Group"
- group_names = filter(not_no_group, group_names)
-
+ group_names = filter(lambda x:x[:4]=="GRP_",list(extrGrps.GetProperty("GroupsFlagsInfo")[::2]))
return group_names
def get_time(proxy, timestamp_nb):
"""Get time value by timestamp number."""
+ #proxy.UpdatePipeline()
# Check timestamp number
- timestamps = proxy.TimestepValues.GetData()
- if ((timestamp_nb - 1) not in xrange(len(timestamps))):
- raise ValueError("Timestamp number is out of range: " + timestamp_nb)
+ timestamps = []
- # Return time value
- return timestamps[timestamp_nb - 1]
+ if (hasattr(proxy, 'TimestepValues')):
+ timestamps = proxy.TimestepValues.GetData()
+ elif (hasattr(proxy.Input, 'TimestepValues')):
+ timestamps = proxy.Input.TimestepValues.GetData()
+
+ length = len(timestamps)
+ if (timestamp_nb > 0 and (timestamp_nb - 1) not in xrange(length) ) or (timestamp_nb < 0 and -timestamp_nb > length):
+ raise ValueError("Timestamp number is out of range: " + str(timestamp_nb))
+ if not timestamps:
+ return 0.0
+
+ # Return time value
+ if timestamp_nb > 0:
+ return timestamps[timestamp_nb - 1]
+ else:
+ return timestamps[timestamp_nb]
def create_prs(prs_type, proxy, field_entity, field_name, timestamp_nb):
"""Auxiliary function.
Set the presentation properties like visu.CreatePrsForResult() do.
"""
+ proxy.UpdatePipeline()
prs = None
if prs_type == PrsTypeEnum.SCALARMAP:
Scalar Map as representation object.
"""
+ proxy.UpdatePipeline()
# We don't need mesh parts with no data on them
if entity == EntityType.NODE:
select_cells_with_data(proxy, on_points=[field_name])
time_value = get_time(proxy, timestamp_nb)
# Set timestamp
- pv.GetRenderView().ViewTime = time_value
- pv.UpdatePipeline(time_value, proxy)
-
- # Extract only groups with data for the field
- new_proxy = extract_groups_for_field(proxy, field_name, entity,
- force=True)
+ pvs.GetRenderView().ViewTime = time_value
+ pvs.UpdatePipeline(time_value, proxy)
# Get Scalar Map representation object
- scalarmap = pv.GetRepresentation(new_proxy)
+ scalarmap = pvs.GetRepresentation(proxy)
# Get lookup table
lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
# Set field range if necessary
data_range = get_data_range(proxy, entity,
field_name, vector_mode)
- lookup_table.LockScalarRange = 1
+ if hasattr(lookup_table,"LockDataRange"):
+ lookup_table.LockDataRange = 1
+ elif hasattr(lookup_table,"LockScalarRange"):
+ lookup_table.LockScalarRange = 1
+ else:
+ raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
# Set properties
- scalarmap.ColorAttributeType = EntityType.get_pvtype(entity)
- scalarmap.ColorArrayName = field_name
+ pvs.ColorBy(scalarmap, (EntityType.get_pvtype(entity), field_name))
scalarmap.LookupTable = lookup_table
# Add scalar bar
Cut Planes as representation object.
"""
+ proxy.UpdatePipeline()
+ if entity == EntityType.NODE:
+ select_cells_with_data(proxy, on_points=[field_name])
+ else:
+ select_cells_with_data(proxy, on_cells=[field_name])
+
# Check vector mode
nb_components = get_nb_components(proxy, entity, field_name)
check_vector_mode(vector_mode, nb_components)
time_value = get_time(proxy, timestamp_nb)
# Set timestamp
- pv.GetRenderView().ViewTime = time_value
- pv.UpdatePipeline(time_value, proxy)
+ pvs.GetRenderView().ViewTime = time_value
+ pvs.UpdatePipeline(time_value, proxy)
# Create slice filter
- slice_filter = pv.Slice(proxy)
+ slice_filter = pvs.Slice(proxy)
slice_filter.SliceType = "Plane"
# Set cut planes normal
slice_filter.SliceOffsetValues = positions
# Get Cut Planes representation object
- cut_planes = pv.GetRepresentation(slice_filter)
+ cut_planes = pvs.GetRepresentation(slice_filter)
# Get lookup table
lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
# Set field range if necessary
data_range = get_data_range(proxy, entity,
field_name, vector_mode)
- lookup_table.LockScalarRange = 1
+
+ if hasattr(lookup_table,"LockDataRange"):
+ lookup_table.LockDataRange = 1
+ elif hasattr(lookup_table,"LockScalarRange"):
+ lookup_table.LockScalarRange = 1
+ else:
+ raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
# Set properties
- cut_planes.ColorAttributeType = EntityType.get_pvtype(entity)
- cut_planes.ColorArrayName = field_name
+ pvs.ColorBy(cut_planes, (EntityType.get_pvtype(entity), field_name))
cut_planes.LookupTable = lookup_table
# Add scalar bar
(Cut Lines as representation object, list of 'PlotOverLine') otherwise
"""
+ proxy.UpdatePipeline()
+ if entity == EntityType.NODE:
+ select_cells_with_data(proxy, on_points=[field_name])
+ else:
+ select_cells_with_data(proxy, on_cells=[field_name])
+
# Check vector mode
nb_components = get_nb_components(proxy, entity, field_name)
check_vector_mode(vector_mode, nb_components)
time_value = get_time(proxy, timestamp_nb)
# Set timestamp
- pv.GetRenderView().ViewTime = time_value
- pv.UpdatePipeline(time_value, proxy)
+ pvs.GetRenderView().ViewTime = time_value
+ pvs.UpdatePipeline(time_value, proxy)
# Create base plane
- base_plane = pv.Slice(proxy)
+ base_plane = pvs.Slice(proxy)
base_plane.SliceType = "Plane"
# Set base plane normal
base_plane = proxy
# Create cutting planes
- cut_planes = pv.Slice(base_plane)
+ cut_planes = pvs.Slice(base_plane)
cut_planes.SliceType = "Plane"
# Set cutting planes normal and get positions
point2 = [bounds[1], bounds[3], bounds[5]]
# Create plot over line filter
- pol = pv.PlotOverLine(cut_planes,
+ pol = pvs.PlotOverLine(cut_planes,
Source="High Resolution Line Source")
- pv.RenameSource('Y' + str(index), pol)
+ pvs.RenameSource('Y' + str(index), pol)
pol.Source.Point1 = point1
pol.Source.Point2 = point2
pol.UpdatePipeline()
cut_planes.UpdatePipeline()
# Get Cut Lines representation object
- cut_lines = pv.GetRepresentation(cut_planes)
+ cut_lines = pvs.GetRepresentation(cut_planes)
# Get lookup table
lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
# Set field range if necessary
data_range = get_data_range(proxy, entity,
field_name, vector_mode)
- lookup_table.LockScalarRange = 1
+ if hasattr(lookup_table,"LockDataRange"):
+ lookup_table.LockDataRange = 1
+ elif hasattr(lookup_table,"LockScalarRange"):
+ lookup_table.LockScalarRange = 1
+ else:
+ raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
# Set properties
- cut_lines.ColorAttributeType = EntityType.get_pvtype(entity)
- cut_lines.ColorArrayName = field_name
+ pvs.ColorBy(cut_lines, (EntityType.get_pvtype(entity), field_name))
cut_lines.LookupTable = lookup_table
# Set wireframe represenatation mode
Cut Segment as 3D representation object.
"""
+ proxy.UpdatePipeline()
+ if entity == EntityType.NODE:
+ select_cells_with_data(proxy, on_points=[field_name])
+ else:
+ select_cells_with_data(proxy, on_cells=[field_name])
+
# Check vector mode
nb_components = get_nb_components(proxy, entity, field_name)
check_vector_mode(vector_mode, nb_components)
time_value = get_time(proxy, timestamp_nb)
# Set timestamp
- pv.GetRenderView().ViewTime = time_value
- pv.UpdatePipeline(time_value, proxy)
+ pvs.GetRenderView().ViewTime = time_value
+ pvs.UpdatePipeline(time_value, proxy)
# Create plot over line filter
- pol = pv.PlotOverLine(proxy, Source="High Resolution Line Source")
+ pol = pvs.PlotOverLine(proxy, Source="High Resolution Line Source")
pol.Source.Point1 = point1
pol.Source.Point2 = point2
pol.UpdatePipeline()
# Get Cut Segment representation object
- cut_segment = pv.GetRepresentation(pol)
+ cut_segment = pvs.GetRepresentation(pol)
# Get lookup table
lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
# Set field range if necessary
data_range = get_data_range(proxy, entity,
field_name, vector_mode)
- lookup_table.LockScalarRange = 1
+ if hasattr(lookup_table,"LockDataRange"):
+ lookup_table.LockDataRange = 1
+ elif hasattr(lookup_table,"LockScalarRange"):
+ lookup_table.LockScalarRange = 1
+ else:
+ raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
# Set properties
- cut_segment.ColorAttributeType = EntityType.get_pvtype(entity)
- cut_segment.ColorArrayName = field_name
+ pvs.ColorBy(cut_segment, (EntityType.get_pvtype(entity), field_name))
cut_segment.LookupTable = lookup_table
# Set wireframe represenatation mode
Vectors as representation object.
"""
+ proxy.UpdatePipeline()
+ if entity == EntityType.NODE:
+ select_cells_with_data(proxy, on_points=[field_name])
+ else:
+ select_cells_with_data(proxy, on_cells=[field_name])
+
# Check vector mode
nb_components = get_nb_components(proxy, entity, field_name)
check_vector_mode(vector_mode, nb_components)
time_value = get_time(proxy, timestamp_nb)
# Set timestamp
- pv.GetRenderView().ViewTime = time_value
- pv.UpdatePipeline(time_value, proxy)
+ pvs.GetRenderView().ViewTime = time_value
+ pvs.UpdatePipeline(time_value, proxy)
# Extract only groups with data for the field
- new_proxy = extract_groups_for_field(proxy, field_name, entity)
- source = new_proxy
+ source = proxy
# Cell centers
if is_data_on_cells(proxy, field_name):
- cell_centers = pv.CellCenters(source)
+ cell_centers = pvs.CellCenters(source)
cell_centers.VertexCells = 1
source = cell_centers
source = calc
# Glyph
- glyph = pv.Glyph(source)
+ glyph = pvs.Glyph(source)
glyph.Vectors = vector_array
glyph.ScaleMode = 'vector'
- glyph.MaskPoints = 0
+ #glyph.MaskPoints = 0
# Set glyph type
glyph.GlyphType = glyph_type
glyph.GlyphType.Center = [0.0, 0.0, 0.0]
if scale_factor is not None:
- glyph.SetScaleFactor = scale_factor
+ glyph.ScaleFactor = scale_factor
else:
def_scale = get_default_scale(PrsTypeEnum.DEFORMEDSHAPE,
- new_proxy, entity, field_name)
- glyph.SetScaleFactor = def_scale
+ proxy, entity, field_name)
+ glyph.ScaleFactor = def_scale
glyph.UpdatePipeline()
# Get Vectors representation object
- vectors = pv.GetRepresentation(glyph)
+ vectors = pvs.GetRepresentation(glyph)
# Get lookup table
lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
# Set field range if necessary
data_range = get_data_range(proxy, entity,
field_name, vector_mode)
- lookup_table.LockScalarRange = 1
+ if hasattr(lookup_table,"LockDataRange"):
+ lookup_table.LockDataRange = 1
+ elif hasattr(lookup_table,"LockScalarRange"):
+ lookup_table.LockScalarRange = 1
+ else:
+ raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
# Set properties
if (is_colored):
- vectors.ColorArrayName = 'GlyphVector'
+ pvs.ColorBy(vectors, (EntityType.get_pvtype(entity), 'GlyphVector'))
else:
- vectors.ColorArrayName = ''
+ pvs.ColorBy(vectors, (EntityType.get_pvtype(entity), None))
vectors.LookupTable = lookup_table
vectors.LineWidth = 1.0
Defromed Shape as representation object.
"""
+ proxy.UpdatePipeline()
# We don't need mesh parts with no data on them
if entity == EntityType.NODE:
select_cells_with_data(proxy, on_points=[field_name])
time_value = get_time(proxy, timestamp_nb)
# Set timestamp
- pv.GetRenderView().ViewTime = time_value
- pv.UpdatePipeline(time_value, proxy)
-
- # Extract only groups with data for the field
- new_proxy = extract_groups_for_field(proxy, field_name, entity)
+ pvs.GetRenderView().ViewTime = time_value
+ pvs.UpdatePipeline(time_value, proxy)
# Do merge
- source = pv.MergeBlocks(new_proxy)
+ source = pvs.MergeBlocks(proxy)
+ pvs.UpdatePipeline()
# Cell data to point data
if is_data_on_cells(proxy, field_name):
- cell_to_point = pv.CellDatatoPointData()
+ cell_to_point = pvs.CellDatatoPointData()
cell_to_point.PassCellData = 1
source = cell_to_point
source = calc
# Warp by vector
- warp_vector = pv.WarpByVector(source)
+ warp_vector = pvs.WarpByVector(source)
warp_vector.Vectors = [vector_array]
if scale_factor is not None:
warp_vector.ScaleFactor = scale_factor
warp_vector.ScaleFactor = def_scale
# Get Deformed Shape representation object
- defshape = pv.GetRepresentation(warp_vector)
+ defshape = pvs.GetRepresentation(warp_vector)
# Get lookup table
lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
# Set field range if necessary
data_range = get_data_range(proxy, entity,
field_name, vector_mode)
- lookup_table.LockScalarRange = 1
+ if hasattr(lookup_table,"LockDataRange"):
+ lookup_table.LockDataRange = 1
+ elif hasattr(lookup_table,"LockScalarRange"):
+ lookup_table.LockScalarRange = 1
+ else:
+ raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
# Set properties
if is_colored:
- defshape.ColorAttributeType = EntityType.get_pvtype(entity)
- defshape.ColorArrayName = field_name
+ pvs.ColorBy(defshape, (EntityType.get_pvtype(entity), field_name))
else:
- defshape.ColorArrayName = ''
+ pvs.ColorBy(defshape, (EntityType.get_pvtype(entity), None))
defshape.LookupTable = lookup_table
# Set wireframe represenatation mode
Defromed Shape And Scalar Map as representation object.
"""
+ proxy.UpdatePipeline()
# We don't need mesh parts with no data on them
on_points = []
on_cells = []
else:
on_cells.append(scalar_field_name)
+ nb_components = get_nb_components(proxy, entity, field_name)
+
+ # Select fields
select_cells_with_data(proxy, on_points, on_cells)
# Check vector mode
- nb_components = get_nb_components(proxy, entity, field_name)
check_vector_mode(vector_mode, nb_components)
# Get time value
time_value = get_time(proxy, timestamp_nb)
# Set timestamp
- pv.GetRenderView().ViewTime = time_value
- pv.UpdatePipeline(time_value, proxy)
+ pvs.GetRenderView().ViewTime = time_value
+ pvs.UpdatePipeline(time_value, proxy)
# Set scalar field by default
scalar_field_entity = scalar_entity
scalar_field_entity = entity
scalar_field = field_name
- # Extract only groups with data for the field
- new_proxy = extract_groups_for_field(proxy, field_name, entity)
-
# Do merge
- source = pv.MergeBlocks(new_proxy)
+ source = pvs.MergeBlocks(proxy)
+ pvs.UpdatePipeline()
# Cell data to point data
if is_data_on_cells(proxy, field_name):
- cell_to_point = pv.CellDatatoPointData(source)
+ cell_to_point = pvs.CellDatatoPointData(source)
cell_to_point.PassCellData = 1
source = cell_to_point
source = calc
# Warp by vector
- warp_vector = pv.WarpByVector(source)
+ warp_vector = pvs.WarpByVector(source)
warp_vector.Vectors = [vector_array]
if scale_factor is not None:
warp_vector.ScaleFactor = scale_factor
else:
def_scale = get_default_scale(PrsTypeEnum.DEFORMEDSHAPE,
- new_proxy, entity, field_name)
+ proxy, entity, field_name)
warp_vector.ScaleFactor = def_scale
# Get Defromed Shape And Scalar Map representation object
- defshapemap = pv.GetRepresentation(warp_vector)
+ defshapemap = pvs.GetRepresentation(warp_vector)
# Get lookup table
lookup_table = get_lookup_table(scalar_field, nb_components, vector_mode)
# Set field range if necessary
data_range = get_data_range(proxy, scalar_field_entity,
scalar_field, vector_mode)
- lookup_table.LockScalarRange = 1
+ if hasattr(lookup_table,"LockDataRange"):
+ lookup_table.LockDataRange = 1
+ elif hasattr(lookup_table,"LockScalarRange"):
+ lookup_table.LockScalarRange = 1
+ else:
+ raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
# Set properties
- defshapemap.ColorArrayName = scalar_field
+ pvs.ColorBy(defshapemap, (EntityType.get_pvtype(scalar_field_entity), scalar_field))
defshapemap.LookupTable = lookup_table
- defshapemap.ColorAttributeType = EntityType.get_pvtype(scalar_field_entity)
# Add scalar bar
add_scalar_bar(field_name, nb_components,
Plot 3D as representation object.
"""
+ proxy.UpdatePipeline()
# We don't need mesh parts with no data on them
if entity == EntityType.NODE:
select_cells_with_data(proxy, on_points=[field_name])
time_value = get_time(proxy, timestamp_nb)
# Set timestamp
- pv.GetRenderView().ViewTime = time_value
- pv.UpdatePipeline(time_value, proxy)
-
- # Extract only groups with data for the field
- new_proxy = extract_groups_for_field(proxy, field_name, entity)
+ pvs.GetRenderView().ViewTime = time_value
+ pvs.UpdatePipeline(time_value, proxy)
# Do merge
- merge_blocks = pv.MergeBlocks(new_proxy)
+ merge_blocks = pvs.MergeBlocks(proxy)
merge_blocks.UpdatePipeline()
poly_data = None
radians(angle1), radians(angle2))
# Create slice filter
- slice_filter = pv.Slice(merge_blocks)
+ slice_filter = pvs.Slice(merge_blocks)
slice_filter.SliceType = "Plane"
# Set cutting plane normal
use_normal = 0
# Geometry filter
if not poly_data or poly_data.GetDataInformation().GetNumberOfCells() == 0:
- geometry_filter = pv.GeometryFilter(merge_blocks)
+ geometry_filter = pvs.GeometryFilter(merge_blocks)
poly_data = geometry_filter
use_normal = 1 # TODO(MZN): workaround
if is_data_on_cells(poly_data, field_name):
# Cell data to point data
- cell_to_point = pv.CellDatatoPointData(poly_data)
+ cell_to_point = pvs.CellDatatoPointData(poly_data)
cell_to_point.PassCellData = 1
source = cell_to_point
source = calc
# Warp by scalar
- warp_scalar = pv.WarpByScalar(source)
+ warp_scalar = pvs.WarpByScalar(source)
warp_scalar.Scalars = scalars
warp_scalar.Normal = normal
warp_scalar.UseNormal = use_normal
if (is_contour):
# Contours
- contour = pv.Contour(warp_scalar)
+ contour = pvs.Contour(warp_scalar)
contour.PointMergeMethod = "Uniform Binning"
contour.ContourBy = ['POINTS', field_name]
scalar_range = get_data_range(proxy, entity,
source = contour
# Get Plot 3D representation object
- plot3d = pv.GetRepresentation(source)
+ plot3d = pvs.GetRepresentation(source)
# Get lookup table
lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
# Set field range if necessary
data_range = get_data_range(proxy, entity,
field_name, vector_mode)
- lookup_table.LockScalarRange = 1
+ if hasattr(lookup_table,"LockDataRange"):
+ lookup_table.LockDataRange = 1
+ elif hasattr(lookup_table,"LockScalarRange"):
+ lookup_table.LockScalarRange = 1
+ else:
+ raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
# Set properties
- plot3d.ColorAttributeType = EntityType.get_pvtype(entity)
- plot3d.ColorArrayName = field_name
+ pvs.ColorBy(plot3d, (EntityType.get_pvtype(entity), field_name))
plot3d.LookupTable = lookup_table
# Add scalar bar
Iso Surfaces as representation object.
"""
+ proxy.UpdatePipeline()
# We don't need mesh parts with no data on them
if entity == EntityType.NODE:
select_cells_with_data(proxy, on_points=[field_name])
time_value = get_time(proxy, timestamp_nb)
# Set timestamp
- pv.GetRenderView().ViewTime = time_value
- pv.UpdatePipeline(time_value, proxy)
-
- # Extract only groups with data for the field
- new_proxy = extract_groups_for_field(proxy, field_name, entity)
+ pvs.GetRenderView().ViewTime = time_value
+ pvs.UpdatePipeline(time_value, proxy)
# Do merge
- source = pv.MergeBlocks(new_proxy)
+ source = pvs.MergeBlocks(proxy)
+ pvs.UpdatePipeline()
# Transform cell data into point data if necessary
if is_data_on_cells(proxy, field_name):
- cell_to_point = pv.CellDatatoPointData(source)
+ cell_to_point = pvs.CellDatatoPointData(source)
cell_to_point.PassCellData = 1
source = cell_to_point
source = calc
# Contour filter settings
- contour = pv.Contour(source)
+ contour = pvs.Contour(source)
contour.ComputeScalars = 1
contour.ContourBy = contour_by
if (scalar_range is None):
scalar_range = get_data_range(proxy, entity,
field_name, cut_off=True)
-
+
# Get contour values for the range
surfaces = get_contours(scalar_range, nb_surfaces)
contour.Isosurfaces = surfaces
# Get Iso Surfaces representation object
- isosurfaces = pv.GetRepresentation(contour)
+ isosurfaces = pvs.GetRepresentation(contour)
# Get lookup table
lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
# Set field range if necessary
data_range = get_data_range(proxy, entity,
field_name, vector_mode)
- lookup_table.LockScalarRange = 1
+ if hasattr(lookup_table,"LockDataRange"):
+ lookup_table.LockDataRange = 1
+ elif hasattr(lookup_table,"LockScalarRange"):
+ lookup_table.LockScalarRange = 1
+ else:
+ raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
# Set display properties
if (is_colored):
- isosurfaces.ColorAttributeType = EntityType.get_pvtype(entity)
- isosurfaces.ColorArrayName = field_name
+ pvs.ColorBy(isosurfaces, (EntityType.get_pvtype(entity), field_name))
else:
- isosurfaces.ColorArrayName = ''
+ pvs.ColorBy(isosurfaces, (EntityType.get_pvtype(entity), None))
if color:
isosurfaces.DiffuseColor = color
isosurfaces.LookupTable = lookup_table
Gauss Points as representation object.
"""
+ proxy.UpdatePipeline()
# We don't need mesh parts with no data on them
- if entity == EntityType.NODE:
- select_cells_with_data(proxy, on_points=[field_name])
- else:
- select_cells_with_data(proxy, on_cells=[field_name])
+ on_gauss = select_cells_with_data(proxy, on_gauss=[field_name])
+ if not on_gauss:
+ if entity == EntityType.NODE:
+ select_cells_with_data(proxy, on_points=[field_name])
+ else:
+ select_cells_with_data(proxy, on_cells=[field_name])
# Check vector mode
nb_components = get_nb_components(proxy, entity, field_name)
time_value = get_time(proxy, timestamp_nb)
# Set timestamp
- pv.GetRenderView().ViewTime = time_value
- proxy.UpdatePipeline(time=time_value)
-
- # Extract only groups with data for the field
- source = extract_groups_for_field(proxy, field_name, entity)
+ pvs.GetRenderView().ViewTime = time_value
+ pvs.UpdatePipeline(time_value, proxy)
- # Quadrature point arrays
- qp_arrays = proxy.QuadraturePointArrays.Available
+ source = proxy
# If no quadrature point array is passed, use cell centers
- if field_name in qp_arrays:
- generate_qp = pv.GenerateQuadraturePoints(source)
- generate_qp.SelectSourceArray = ['CELLS', 'ELGA_Offset']
+ if on_gauss:
+ generate_qp = pvs.GenerateQuadraturePoints(source)
+ generate_qp.QuadratureSchemeDef = ['CELLS', 'ELGA@0']
source = generate_qp
else:
# Cell centers
- cell_centers = pv.CellCenters(source)
+ cell_centers = pvs.CellCenters(source)
cell_centers.VertexCells = 1
source = cell_centers
vector_array = field_name
# If the given vector array has only 2 components, add the third one
if nb_components == 2:
- calc = get_add_component_calc(source,
- EntityType.NODE, field_name)
+ calc = get_add_component_calc(source, EntityType.NODE, field_name)
vector_array = calc.ResultArrayName
source = calc
# Warp by vector
- warp_vector = pv.WarpByVector(source)
+ warp_vector = pvs.WarpByVector(source)
warp_vector.Vectors = [vector_array]
if scale_factor is not None:
warp_vector.ScaleFactor = scale_factor
source = warp_vector
# Get Gauss Points representation object
- gausspnt = pv.GetRepresentation(source)
+ gausspnt = pvs.GetRepresentation(source)
# Get lookup table
lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
# Set field range if necessary
data_range = get_data_range(proxy, entity,
field_name, vector_mode)
- lookup_table.LockScalarRange = 1
+ if hasattr(lookup_table,"LockDataRange"):
+ lookup_table.LockDataRange = 1
+ elif hasattr(lookup_table,"LockScalarRange"):
+ lookup_table.LockScalarRange = 1
+ else:
+ raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
# Set display properties
if is_colored:
- gausspnt.ColorAttributeType = EntityType.get_pvtype(entity)
- gausspnt.ColorArrayName = field_name
+ pvs.ColorBy(gausspnt, (EntityType.get_pvtype(entity), field_name))
else:
- gausspnt.ColorArrayName = ''
+ pvs.ColorBy(gausspnt, (EntityType.get_pvtype(entity), None))
if color:
gausspnt.DiffuseColor = color
if is_proportional:
mult = multiplier
- if mult is None:
+ if mult is None and data_range[1] != 0:
mult = abs(0.1 / data_range[1])
gausspnt.RadiusScalarRange = data_range
gausspnt.RadiusTransferFunctionMode = 'Table'
gausspnt.RadiusScalarRange = data_range
gausspnt.RadiusUseScalarRange = 1
- gausspnt.RadiusIsProportional = 1
- gausspnt.RadiusProportionalFactor = mult
+ if mult is not None:
+ gausspnt.RadiusIsProportional = 1
+ gausspnt.RadiusProportionalFactor = mult
else:
gausspnt.RadiusTransferFunctionEnabled = 0
gausspnt.RadiusMode = 'Constant'
return gausspnt
+def GaussPointsOnField1(proxy, entity, field_name,
+ timestamp_nb,
+ is_colored=True, color=None,
+ primitive=GaussType.SPHERE,
+ is_proportional=True,
+ max_pixel_size=256,
+ multiplier=None,
+ vector_mode='Magnitude'):
+ """Creates Gauss Points on the given field. Use GaussPoints() Paraview interface.
+
+ Arguments:
+ proxy: the pipeline object, containig data
+ entity: the field entity type from PrsTypeEnum
+ field_name: the field name
+ timestamp_nb: the number of time step (1, 2, ...)
+ is_colored -- defines whether the Gauss Points will be multicolored,
+ using the corresponding data values
+ color: defines the presentation color as [R, G, B] triple. Taken into
+ account only if is_colored is False.
+ primitive: primitive type from GaussType
+ is_proportional: if True, the size of primitives will depends on
+ the gauss point value
+ max_pixel_size: the maximum sizr of the Gauss Points primitive in pixels
+ multiplier: coefficient between data values and the size of primitives
+ If not passed by user, default scale will be computed.
+ vector_mode: the mode of transformation of vector values into
+ scalar values, applicable only if the field contains vector values.
+ Possible modes: 'Magnitude' - vector module;
+ 'X', 'Y', 'Z' - vector components.
+
+ Returns:
+ Gauss Points as representation object.
+
+ """
+ proxy.UpdatePipeline()
+ select_cells_with_data(proxy, on_gauss=[field_name])
+
+ nb_components = get_nb_components(proxy, entity, field_name)
+
+ # Get time value
+ time_value = get_time(proxy, timestamp_nb)
+
+ # Set timestamp
+ pvs.GetRenderView().ViewTime = time_value
+ proxy.UpdatePipeline(time=time_value)
+
+ # Create Gauss Points object
+ source = pvs.GaussPoints(proxy)
+ source.UpdatePipeline()
+
+ # Get Gauss Points representation object
+ gausspnt = pvs.GetRepresentation(source)
+
+ # Get lookup table
+ lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
+
+ # Set field range if necessary
+ data_range = get_data_range(proxy, entity,
+ field_name, vector_mode)
+ if hasattr(lookup_table,"LockDataRange"):
+ lookup_table.LockDataRange = 1
+ elif hasattr(lookup_table,"LockScalarRange"):
+ lookup_table.LockScalarRange = 1
+ else:
+ raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
+ lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
+
+ # Set display properties
+ if is_colored:
+ pvs.ColorBy(gausspnt, (EntityType.get_pvtype(entity), field_name))
+ else:
+ pvs.ColorBy(gausspnt, (EntityType.get_pvtype(entity), None))
+ if color:
+ gausspnt.DiffuseColor = color
+
+ gausspnt.LookupTable = lookup_table
+
+ # Add scalar bar
+ add_scalar_bar(field_name, nb_components,
+ vector_mode, lookup_table, time_value)
+
+ # Set point sprite representation
+ gausspnt.Representation = 'Point Sprite'
+
+ # Point sprite settings
+ gausspnt.InterpolateScalarsBeforeMapping = 0
+ gausspnt.MaxPixelSize = max_pixel_size
+
+ # Render mode
+ gausspnt.RenderMode = GaussType.get_mode(primitive)
+
+ #if primitive == GaussType.SPRITE:
+ # Set texture
+ # TODO(MZN): replace with pvsimple high-level interface
+ # texture = sm.CreateProxy("textures", "SpriteTexture")
+ # alphamprop = texture.GetProperty("AlphaMethod")
+ # alphamprop.SetElement(0, 2) # Clamp
+ # alphatprop = texture.GetProperty("AlphaThreshold")
+ # alphatprop.SetElement(0, 63)
+ # maxprop = texture.GetProperty("Maximum")
+ # maxprop.SetElement(0, 255)
+ # texture.UpdateVTKObjects()
+
+ # gausspnt.Texture = texture
+ #gausspnt.Texture.AlphaMethod = 'Clamp'
+ #gausspnt.Texture.AlphaThreshold = 63
+ #gausspnt.Texture.Maximum= 255
+
+ # Proportional radius
+ gausspnt.RadiusUseScalarRange = 0
+ gausspnt.RadiusIsProportional = 0
+
+ if is_proportional:
+ mult = multiplier
+ if mult is None and data_range[1] != 0:
+ mult = abs(0.1 / data_range[1])
+
+ gausspnt.RadiusScalarRange = data_range
+ gausspnt.RadiusTransferFunctionEnabled = 1
+ gausspnt.RadiusMode = 'Scalar'
+ gausspnt.RadiusArray = ['POINTS', field_name]
+ if nb_components > 1:
+ v_comp = get_vector_component(vector_mode)
+ gausspnt.RadiusVectorComponent = v_comp
+ gausspnt.RadiusTransferFunctionMode = 'Table'
+ gausspnt.RadiusScalarRange = data_range
+ gausspnt.RadiusUseScalarRange = 1
+ if mult is not None:
+ gausspnt.RadiusIsProportional = 1
+ gausspnt.RadiusProportionalFactor = mult
+ else:
+ gausspnt.RadiusTransferFunctionEnabled = 0
+ gausspnt.RadiusMode = 'Constant'
+ gausspnt.RadiusArray = ['POINTS', 'Constant Radius']
+
+ return gausspnt
def StreamLinesOnField(proxy, entity, field_name, timestamp_nb,
direction='BOTH', is_colored=False, color=None,
Stream Lines as representation object.
"""
+ proxy.UpdatePipeline()
# We don't need mesh parts with no data on them
if entity == EntityType.NODE:
select_cells_with_data(proxy, on_points=[field_name])
time_value = get_time(proxy, timestamp_nb)
# Set timestamp
- pv.GetRenderView().ViewTime = time_value
- pv.UpdatePipeline(time_value, proxy)
-
- # Extract only groups with data for the field
- new_proxy = extract_groups_for_field(proxy, field_name, entity)
+ pvs.GetRenderView().ViewTime = time_value
+ pvs.UpdatePipeline(time_value, proxy)
# Do merge
- source = pv.MergeBlocks(new_proxy)
+ source = pvs.MergeBlocks(proxy)
+ pvs.UpdatePipeline()
# Cell data to point data
if is_data_on_cells(proxy, field_name):
- cell_to_point = pv.CellDatatoPointData(source)
+ cell_to_point = pvs.CellDatatoPointData(source)
cell_to_point.PassCellData = 1
- cell_to_point.UpdatePipeline()
+ pvs.UpdatePipeline()
source = cell_to_point
vector_array = field_name
if nb_components == 2:
calc = get_add_component_calc(source, EntityType.NODE, field_name)
vector_array = calc.ResultArrayName
- calc.UpdatePipeline()
+ pvs.UpdatePipeline()
source = calc
# Stream Tracer
- stream = pv.StreamTracer(source)
+ stream = pvs.StreamTracer(source)
stream.SeedType = "Point Source"
stream.Vectors = ['POINTS', vector_array]
- stream.SeedType = "Point Source"
stream.IntegrationDirection = direction
stream.IntegratorType = 'Runge-Kutta 2'
+ stream.SeedType = 'High Resolution Line Source'
stream.UpdatePipeline()
# Get Stream Lines representation object
if is_empty(stream):
return None
- streamlines = pv.GetRepresentation(stream)
+ streamlines = pvs.GetRepresentation(stream)
# Get lookup table
lookup_table = get_lookup_table(field_name, nb_components, vector_mode)
# Set field range if necessary
- data_range = get_data_range(new_proxy, entity,
+ data_range = get_data_range(proxy, entity,
field_name, vector_mode)
- lookup_table.LockScalarRange = 1
+ if hasattr(lookup_table,"LockDataRange"):
+ lookup_table.LockDataRange = 1
+ elif hasattr(lookup_table,"LockScalarRange"):
+ lookup_table.LockScalarRange = 1
+ else:
+ raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table))
+
lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0]
# Set properties
if is_colored:
- streamlines.ColorAttributeType = EntityType.get_pvtype(entity)
- streamlines.ColorArrayName = field_name
+ pvs.ColorBy(streamlines, (EntityType.get_pvtype(entity), field_name))
else:
- streamlines.ColorArrayName = ''
+ pvs.ColorBy(streamlines, (EntityType.get_pvtype(entity), None))
if color:
streamlines.DiffuseColor = color
Arguments:
proxy -- the pipeline object, containig data
- mesh_name -- the mesh name
- entity -- the entity type
+ mesh_name -- the full or short name of mesh field
Returns:
Submesh as representation object of the given source.
"""
- # Select all cell types
- select_all_cells(proxy)
-
- # Get subset of groups on the given entity
- subset = get_group_names(proxy, mesh_name, entity)
-
- # Select only groups of the given entity type
- proxy.Groups = subset
+ proxy.UpdatePipeline()
+ mesh_full_name = None
+ aList = mesh_name.split('/')
+ if len(aList) >= 2:
+ mesh_full_name = mesh_name
+ else:
+ mesh_full_name = find_mesh_full_name(proxy, mesh_name)
+ if not mesh_full_name:
+ raise RuntimeError, "The given mesh name was not found"
+ # Select only the given mesh
+ proxy.AllArrays = [mesh_full_name]
proxy.UpdatePipeline()
# Get representation object if the submesh is not empty
prs = None
if (proxy.GetDataInformation().GetNumberOfPoints() or
proxy.GetDataInformation().GetNumberOfCells()):
- prs = pv.GetRepresentation(proxy)
- prs.ColorArrayName = ''
+ my_view = pvs.GetRenderView()
+ prs = pvs.GetRepresentation(proxy, view=my_view)
+ prs.ColorArrayName = (None, '')
return prs
-def MeshOnGroup(proxy, group_name):
+def MeshOnGroup(proxy, extrGroups, group_name):
"""Creates submesh on the group.
Arguments:
proxy -- the pipeline object, containig data
group_name -- the full group name
+ extrGroups -- all extracted groups object
Returns:
Representation object of the given source with single group
selected.
"""
- # Select all cell types
- select_all_cells(proxy)
-
- # Select only the group with the given name
- one_group = [group_name]
- proxy.Groups = one_group
proxy.UpdatePipeline()
+ # Deselect all groups
+ extrGroups.AllGroups = []
+ extrGroups.UpdatePipelineInformation()
+ # Select only the group with the given name
+ extrGroups.AllGroups = [group_name]
+ extrGroups.UpdatePipelineInformation()
# Get representation object if the submesh is not empty
prs = None
# Check if the group was set
- if proxy.Groups.GetData() == one_group:
- group_entity = get_group_entity(group_name)
+ if len(extrGroups.AllGroups) == 1 and \
+ extrGroups.AllGroups[0] == group_name:
# Check if the submesh is not empty
- nb_items = 0
- if group_entity == EntityType.NODE:
- nb_items = proxy.GetDataInformation().GetNumberOfPoints()
- elif group_entity == EntityType.CELL:
- nb_items = proxy.GetDataInformation().GetNumberOfCells()
+ nb_points = proxy.GetDataInformation().GetNumberOfPoints()
+ nb_cells = proxy.GetDataInformation().GetNumberOfCells()
- if nb_items:
- prs = pv.GetRepresentation(proxy)
- prs.ColorArrayName = ''
+ if nb_points or nb_cells:
+# prs = pvs.GetRepresentation(proxy)
+ prs = pvs.Show()
+ prs.ColorArrayName = (None, '')
+ display_only(prs)
return prs
-def CreatePrsForFile(paravis_instance, file_name, prs_types,
+def CreatePrsForFile(file_name, prs_types,
picture_dir, picture_ext):
"""Build presentations of the given types for the file.
Build presentations for all fields on all timestamps.
Arguments:
- paravis_instance: ParaVis module instance object
file_name: full path to the MED file
prs_types: the list of presentation types to build
picture_dir: the directory path for saving snapshots
print "Import " + file_name.split(os.sep)[-1] + "..."
try:
- paravis_instance.ImportFile(file_name)
- proxy = pv.GetActiveSource()
+ proxy = pvs.MEDReader(FileName=file_name)
if proxy is None:
print "FAILED"
else:
- proxy.UpdatePipeline()
+ #proxy.UpdatePipeline()
print "OK"
except:
print "FAILED"
else:
# Get view
- view = pv.GetRenderView()
+ view = pvs.GetRenderView()
+ time_value = get_time(proxy, 0)
+ view.ViewTime = time_value
+ pvs.UpdatePipeline(time=time_value, proxy=proxy)
# Create required presentations for the proxy
CreatePrsForProxy(proxy, view, prs_types,
picture_dir, picture_ext)
-
def CreatePrsForProxy(proxy, view, prs_types, picture_dir, picture_ext):
"""Build presentations of the given types for all fields of the proxy.
picture_ext: graphics files extension (determines file type)
"""
+ proxy.UpdatePipeline()
# List of the field names
- field_names = list(proxy.PointArrays.GetData())
- nb_on_nodes = len(field_names)
- field_names.extend(proxy.CellArrays.GetData())
+ fields_info = proxy.GetProperty("FieldsTreeInfo")[::2]
# Add path separator to the end of picture path if necessery
if not picture_dir.endswith(os.sep):
# Mesh Presentation
if PrsTypeEnum.MESH in prs_types:
- # Create Mesh presentation. Build all possible submeshes.
-
- # Remember the current state
- groups = list(proxy.Groups)
-
# Iterate on meshes
- mesh_names = get_mesh_names(proxy)
+ mesh_names = get_mesh_full_names(proxy)
for mesh_name in mesh_names:
- # Build mesh on nodes and cells
- for entity in (EntityType.NODE, EntityType.CELL):
- entity_name = EntityType.get_name(entity)
- if if_possible(proxy, mesh_name, entity, PrsTypeEnum.MESH):
- print "Creating submesh on " + entity_name + " for '" + mesh_name + "' mesh... "
- prs = MeshOnEntity(proxy, mesh_name, entity)
- if prs is None:
- print "FAILED"
- continue
- else:
- print "OK"
- # Construct image file name
- pic_name = picture_dir + mesh_name + "_" + entity_name + "." + picture_ext
-
- # Show and dump the presentation into a graphics file
- process_prs_for_test(prs, view, pic_name, False)
-
- # Build submesh on all groups of the mesh
- mesh_groups = get_group_names(proxy, mesh_name,
- entity, wo_nogroups=True)
- for group in mesh_groups:
- print "Creating submesh on group " + group + "... "
- prs = MeshOnGroup(proxy, group)
+ # Build mesh field presentation
+ print "Creating submesh for '" + get_field_short_name(mesh_name) + "' mesh... "
+ prs = MeshOnEntity(proxy, mesh_name, None)
+ if prs is None:
+ print "FAILED"
+ continue
+ else:
+ print "OK"
+ # Construct image file name
+ pic_name = picture_dir + get_field_short_name(mesh_name) + "." + picture_ext
+
+ # Show and dump the presentation into a graphics file
+ process_prs_for_test(prs, view, pic_name, False)
+
+ # Create Mesh presentation. Build all groups.
+ extGrp = pvs.ExtractGroup()
+ extGrp.UpdatePipelineInformation()
+ if if_possible(proxy, None, None, PrsTypeEnum.MESH, extGrp):
+ for group in get_group_names(extGrp):
+ print "Creating submesh on group " + get_group_short_name(group) + "... "
+ prs = MeshOnGroup(proxy, extGrp, group)
if prs is None:
print "FAILED"
continue
else:
print "OK"
# Construct image file name
- pic_name = picture_dir + group.replace('/', '_') + "." + picture_ext
+ pic_name = picture_dir + get_group_short_name(group) + "." + picture_ext
# Show and dump the presentation into a graphics file
process_prs_for_test(prs, view, pic_name, False)
- # Restore the state
- proxy.Groups = groups
- proxy.UpdatePipeline()
-
# Presentations on fields
- for (i, field_name) in enumerate(field_names):
+ for field in fields_info:
+ field_name = get_field_short_name(field)
+ # Ignore mesh presentation
+ if field_name == get_field_mesh_name(field):
+ continue
+ field_entity = get_field_entity(field)
# Select only the current field:
# necessary for getting the right timestamps
- cell_arrays = proxy.CellArrays.GetData()
- point_arrays = proxy.PointArrays.GetData()
- field_entity = None
- if (i >= nb_on_nodes):
- field_entity = EntityType.CELL
- proxy.PointArrays.DeselectAll()
- proxy.CellArrays = [field_name]
- else:
- field_entity = EntityType.NODE
- proxy.CellArrays.DeselectAll()
- proxy.PointArrays = [field_name]
+ proxy.AllArrays = [field]
+ proxy.UpdatePipeline()
# Get timestamps
- proxy.UpdatePipelineInformation()
timestamps = proxy.TimestepValues.GetData()
- # Restore fields selection state
- proxy.CellArrays = cell_arrays
- proxy.PointArrays = point_arrays
- proxy.UpdatePipelineInformation()
-
for prs_type in prs_types:
# Ignore mesh presentation
if prs_type == PrsTypeEnum.MESH:
for timestamp_nb in xrange(1, len(timestamps) + 1):
time = timestamps[timestamp_nb - 1]
+ if (time == 0.0):
+ scalar_range = get_data_range(proxy, field_entity,
+ field_name, cut_off=True)
+ # exclude time stamps with null lenght of scalar range
+ if (scalar_range[0] == scalar_range[1]):
+ continue
print "Creating " + prs_name + " on " + field_name + ", time = " + str(time) + "... "
- prs = create_prs(prs_type, proxy,
- field_entity, field_name, timestamp_nb)
+ try:
+ prs = create_prs(prs_type, proxy,
+ field_entity, field_name, timestamp_nb)
+ except ValueError:
+ """ This exception comes from get_nb_components(...) function.
+ The reason of exception is an implementation of MEDReader
+ activating the first leaf when reading MED file (refer to
+ MEDFileFieldRepresentationTree::activateTheFirst() and
+ MEDFileFieldRepresentationTree::getTheSingleActivated(...) methods).
+ """
+ print "ValueError exception is catched"
+ continue
if prs is None:
print "FAILED"
continue
# Show and dump the presentation into a graphics file
process_prs_for_test(prs, view, pic_name)
+ return