X-Git-Url: http://git.salome-platform.org/gitweb/?a=blobdiff_plain;f=src%2FPV_SWIG%2Fpresentations.py;h=000216512bb723eaf357579d2288d3654713f2ee;hb=808c8afc6ffb56c3c5803aa792bc336c8b89fbe7;hp=5b408786c45a1f79883852f54d47b7b29553616b;hpb=2e6a99cc337f2f99b297afc2c6c9a4708c869382;p=modules%2Fparavis.git diff --git a/src/PV_SWIG/presentations.py b/src/PV_SWIG/presentations.py index 5b408786..00021651 100644 --- a/src/PV_SWIG/presentations.py +++ b/src/PV_SWIG/presentations.py @@ -1,9 +1,9 @@ -# Copyright (C) 2010-2013 CEA/DEN, EDF R&D +# Copyright (C) 2010-2016 CEA/DEN, EDF R&D # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either -# version 2.1 of the License. +# version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of @@ -53,6 +53,7 @@ GAP_COEFFICIENT = 0.0001 # Globals _current_bar = None +_med_field_sep = '@@][@@' # Enumerations @@ -97,11 +98,11 @@ class EntityType: NODE = 0 CELL = 1 - _type2name = {NODE: 'OnPoint', - CELL: 'OnCell'} + _type2name = {NODE: 'P1', + CELL: 'P0'} - _name2type = {'OnPoint': NODE, - 'OnCell': CELL} + _name2type = {'P1': NODE, + 'P0': CELL} _type2pvtype = {NODE: 'POINT_DATA', CELL: 'CELL_DATA'} @@ -171,6 +172,42 @@ class GaussType: # Auxiliary functions + +def get_field_mesh_name(full_field_name): + """Return mesh name of the field by its full name.""" + aList = full_field_name.split('/') + if len(aList) >= 2 : + field_name = full_field_name.split('/')[1] + return field_name + + +def get_field_entity(full_field_name): + """Return entity type of the field by its full name.""" + aList = full_field_name.split(_med_field_sep) + if len(aList) == 2 : + entity_name = full_field_name.split(_med_field_sep)[-1] + entity = EntityType.get_type(entity_name) + return entity + + +def get_field_short_name(full_field_name): + """Return short name of the field by its full name.""" + aList = full_field_name.split('/') + if len(aList) == 4 : + short_name_with_type = full_field_name.split('/')[-1] + short_name = short_name_with_type.split(_med_field_sep)[0] + return short_name + + +def find_mesh_full_name(proxy, short_mesh_name): + """Return full mesh path by short mesh name, if found""" + proxy.UpdatePipeline() + all_mesh_names = get_mesh_full_names(proxy) + for name in all_mesh_names: + if short_mesh_name == get_field_short_name(name): + return name + + def process_prs_for_test(prs, view, picture_name, show_bar=True): """Show presentation and record snapshot image. @@ -185,6 +222,7 @@ def process_prs_for_test(prs, view, picture_name, show_bar=True): display_only(prs, view) # Show scalar bar + global _current_bar if show_bar and _current_bar: _current_bar.Visibility = 1 @@ -198,6 +236,7 @@ def process_prs_for_test(prs, view, picture_name, show_bar=True): os.makedirs(pic_dir) # Save picture + print "Write image:", file_name pvs.WriteImage(file_name, view=view, Magnification=1) @@ -243,15 +282,19 @@ def hide_all(view, to_remove=False): def display_only(prs, view=None): """Display only the given presentation in the view.""" - hide_all(view) - if (hasattr(prs, 'Visibility') and prs.Visibility != 1): - prs.Visibility = 1 + if not view: + view = pvs.GetRenderView() + + rep_list = view.Representations + for rep in rep_list: + if hasattr(rep, 'Visibility'): + rep.Visibility = (rep == prs) pvs.Render(view=view) def set_visible_lines(xy_prs, lines): """Set visible only the given lines for XYChartRepresentation.""" - sv = xy_prs.GetProperty("SeriesVisibilityInfo").GetData() + sv = xy_prs.GetProperty("SeriesVisibility").GetData() visible = '0' for i in xrange(0, len(sv)): @@ -328,6 +371,7 @@ def get_data_range(proxy, entity, field_name, vector_mode='Magnitude', Data range as [min, max] """ + proxy.UpdatePipeline() entity_data_info = None field_data = proxy.GetFieldDataInformation() @@ -360,6 +404,7 @@ def get_data_range(proxy, entity, field_name, vector_mode='Magnitude', def get_bounds(proxy): """Get bounds of the proxy in 3D.""" + proxy.UpdatePipeline() dataInfo = proxy.GetDataInformation() bounds_info = dataInfo.GetBounds() return bounds_info @@ -367,24 +412,28 @@ def get_bounds(proxy): def get_x_range(proxy): """Get X range of the proxy bounds in 3D.""" + proxy.UpdatePipeline() bounds_info = get_bounds(proxy) return bounds_info[0:2] def get_y_range(proxy): """Get Y range of the proxy bounds in 3D.""" + proxy.UpdatePipeline() bounds_info = get_bounds(proxy) return bounds_info[2:4] def get_z_range(proxy): """Get Z range of the proxy bounds in 3D.""" + proxy.UpdatePipeline() bounds_info = get_bounds(proxy) return bounds_info[4:6] def is_planar_input(proxy): """Check if the given input is planar.""" + proxy.UpdatePipeline() bounds_info = get_bounds(proxy) if (abs(bounds_info[0] - bounds_info[1]) <= FLT_MIN or @@ -397,6 +446,7 @@ def is_planar_input(proxy): def is_data_on_cells(proxy, field_name): """Check the existence of a field on cells with the given name.""" + proxy.UpdatePipeline() cell_data_info = proxy.GetCellDataInformation() return (field_name in cell_data_info.keys()) @@ -409,6 +459,7 @@ def is_empty(proxy): False: otherwise """ + proxy.UpdatePipeline() data_info = proxy.GetDataInformation() nb_cells = data_info.GetNumberOfCells() @@ -419,6 +470,7 @@ def is_empty(proxy): def get_orientation(proxy): """Get the optimum cutting plane orientation for Plot 3D.""" + proxy.UpdatePipeline() orientation = Orientation.XY bounds = get_bounds(proxy) @@ -590,14 +642,17 @@ def get_contours(scalar_range, nb_contours): def get_nb_components(proxy, entity, field_name): """Return number of components for the field.""" + proxy.UpdatePipeline() entity_data_info = None field_data = proxy.GetFieldDataInformation() if field_name in field_data.keys(): entity_data_info = field_data elif entity == EntityType.CELL: + select_cells_with_data(proxy, on_cells=[field_name]) entity_data_info = proxy.GetCellDataInformation() elif entity == EntityType.NODE: + select_cells_with_data(proxy, on_points=[field_name]) entity_data_info = proxy.GetPointDataInformation() nb_comp = None @@ -643,6 +698,7 @@ def get_scale_factor(proxy): def get_default_scale(prs_type, proxy, entity, field_name): """Get default scale factor.""" + proxy.UpdatePipeline() data_range = get_data_range(proxy, entity, field_name) if prs_type == PrsTypeEnum.DEFORMEDSHAPE: @@ -670,6 +726,7 @@ def get_calc_magnitude(proxy, array_entity, array_name): the calculator object. """ + proxy.UpdatePipeline() calculator = None # Transform vector array to scalar array if possible @@ -704,6 +761,7 @@ def get_add_component_calc(proxy, array_entity, array_name): the calculator object. """ + proxy.UpdatePipeline() calculator = None nb_components = get_nb_components(proxy, array_entity, array_name) @@ -727,14 +785,13 @@ def select_all_cells(proxy): Used in creation of mesh/submesh presentation. """ - ### Old API all_cell_types = proxy.CellTypes.Available - all_cell_types = proxy.Entity.Available - ### Old API proxy.CellTypes = all_cell_types - proxy.Entity = all_cell_types proxy.UpdatePipeline() + extractCT = pvs.ExtractCellType() + extractCT.AllGeoTypes = extractCT.GetProperty("GeoTypesInfo")[::2] + extractCT.UpdatePipelineInformation() -def select_cells_with_data(proxy, on_points=None, on_cells=None): +def select_cells_with_data(proxy, on_points=[], on_cells=[], on_gauss=[]): """Select cell types with data. Only cell types with data for the given fields will be selected. @@ -742,9 +799,35 @@ def select_cells_with_data(proxy, on_points=None, on_cells=None): types with data for even one field (from available) will be selected. """ - if not hasattr(proxy, 'Entity'): + if not proxy.GetProperty("FieldsTreeInfo"): return - + + proxy.UpdatePipeline() + if not hasattr(proxy, 'Entity'): + fields_info = proxy.GetProperty("FieldsTreeInfo")[::2] + arr_name_with_dis=[elt.split("/")[-1] for elt in fields_info] + + fields = [] + for name in on_gauss: + fields.append(name+_med_field_sep+'GAUSS') + for name in on_cells: + fields.append(name+_med_field_sep+'P0') + for name in on_points: + fields.append(name+_med_field_sep+'P1') + + field_list = [] + for name in fields: + if arr_name_with_dis.count(name) > 0: + index = arr_name_with_dis.index(name) + field_list.append(fields_info[index]) + + if field_list: + proxy.AllArrays = field_list + proxy.UpdatePipeline() + return len(field_list) != 0 + + # TODO: VTN. Looks like this code is out of date. + #all_cell_types = proxy.CellTypes.Available all_cell_types = proxy.Entity.Available all_arrays = list(proxy.CellArrays.GetData()) @@ -786,63 +869,9 @@ def select_cells_with_data(proxy, on_points=None, on_cells=None): proxy.Entity = cell_types_on proxy.UpdatePipeline() - -def extract_groups_for_field(proxy, field_name, field_entity, force=False): - """Exctract only groups which have the field. - - Arguments: - proxy: the pipeline object, containig data - field_name: the field name - field_entity: the field entity - force: if True - ExtractGroup object will be created in any case - - Returns: - ExtractGroup object: if not all groups have the field or - the force argument is true - The initial proxy: if no groups had been filtered. - - """ - source = proxy - - # Remember the state - initial_groups = list(proxy.Groups) - - # Get data information for the field entity - entity_data_info = None - field_data = proxy.GetFieldDataInformation() - - if field_name in field_data.keys(): - entity_data_info = field_data - elif field_entity == EntityType.CELL: - entity_data_info = proxy.GetCellDataInformation() - elif field_entity == EntityType.NODE: - entity_data_info = proxy.GetPointDataInformation() - - # Collect groups for extraction - groups_to_extract = [] - - for group in initial_groups: - proxy.Groups = [group] - proxy.UpdatePipeline() - if field_name in entity_data_info.keys(): - groups_to_extract.append(group) - - # Restore state - proxy.Groups = initial_groups - proxy.UpdatePipeline() - - # Extract groups if necessary - if force or (len(groups_to_extract) < len(initial_groups)): - extract_group = pvs.ExtractGroup(proxy) - extract_group.Groups = groups_to_extract - extract_group.UpdatePipeline() - source = extract_group - - return source - - -def if_possible(proxy, field_name, entity, prs_type): +def if_possible(proxy, field_name, entity, prs_type, extrGrps=None): """Check if the presentation creation is possible on the given field.""" + proxy.UpdatePipeline() result = True if (prs_type == PrsTypeEnum.DEFORMEDSHAPE or prs_type == PrsTypeEnum.DEFORMEDSHAPESCALARMAP or @@ -854,7 +883,7 @@ def if_possible(proxy, field_name, entity, prs_type): result = (entity == EntityType.CELL or field_name in proxy.QuadraturePointArrays.Available) elif (prs_type == PrsTypeEnum.MESH): - result = len(get_group_names(proxy, field_name, entity)) > 0 + result = len(get_group_names(extrGrps)) > 0 return result @@ -898,13 +927,11 @@ def add_scalar_bar(field_name, nb_components, # Reassign the current bar _current_bar = scalar_bar - return scalar_bar + return _current_bar def get_bar(): """Get current scalar bar.""" - global _current_bar - return _current_bar @@ -928,7 +955,12 @@ def get_lookup_table(field_name, nb_components, vector_mode='Magnitude'): lookup_table.Discretize = 0 lookup_table.ColorSpace = 'HSV' - lookup_table.LockScalarRange = 0 + if hasattr(lookup_table,"LockDataRange"): + lookup_table.LockDataRange = 0 + elif hasattr(lookup_table,"LockScalarRange"): + lookup_table.LockScalarRange = 0 + else: + raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table)) return lookup_table @@ -940,7 +972,6 @@ def get_group_mesh_name(full_group_name): group_name = full_group_name.split('/')[1] return group_name - def get_group_entity(full_group_name): """Return entity type of the group by its full name.""" aList = full_group_name.split('/') @@ -952,54 +983,48 @@ def get_group_entity(full_group_name): def get_group_short_name(full_group_name): """Return short name of the group by its full name.""" - aList = full_group_name.split('/') - if len(aList) >= 4 : - short_name = full_group_name.split('/')[3] - return short_name + short_name = re.sub('^GRP_', '', full_group_name) + return short_name -def get_mesh_names(proxy): +def get_mesh_full_names(proxy): """Return all mesh names in the given proxy as a set.""" - groups = proxy.Groups.Available - mesh_names = set([get_group_mesh_name(item) for item in groups]) - - return mesh_names + proxy.UpdatePipeline() + fields = proxy.GetProperty("FieldsTreeInfo")[::2] + mesh_full_names = set([item for item in fields if get_field_mesh_name(item) == get_field_short_name(item)]) + return mesh_full_names -def get_group_names(proxy, mesh_name, entity, wo_nogroups=False): - """Return full names of all groups of the given entity type - from the mesh with the given name as a list. +def get_group_names(extrGrps): + """Return full names of all groups of the given 'ExtractGroup' filter object. """ - groups = proxy.Groups.Available - - condition = lambda item: (get_group_mesh_name(item) == mesh_name and - get_group_entity(item) == entity) - group_names = [item for item in groups if condition(item)] - - if wo_nogroups: - # Remove "No_Group" group - not_no_group = lambda item: get_group_short_name(item) != "No_Group" - group_names = filter(not_no_group, group_names) - + group_names = filter(lambda x:x[:4]=="GRP_",list(extrGrps.GetProperty("GroupsFlagsInfo")[::2])) return group_names def get_time(proxy, timestamp_nb): """Get time value by timestamp number.""" + #proxy.UpdatePipeline() # Check timestamp number timestamps = [] - + if (hasattr(proxy, 'TimestepValues')): timestamps = proxy.TimestepValues.GetData() elif (hasattr(proxy.Input, 'TimestepValues')): timestamps = proxy.Input.TimestepValues.GetData() - if ((timestamp_nb - 1) not in xrange(len(timestamps))): + length = len(timestamps) + if (timestamp_nb > 0 and (timestamp_nb - 1) not in xrange(length) ) or (timestamp_nb < 0 and -timestamp_nb > length): raise ValueError("Timestamp number is out of range: " + str(timestamp_nb)) - # Return time value - return timestamps[timestamp_nb - 1] + if not timestamps: + return 0.0 + # Return time value + if timestamp_nb > 0: + return timestamps[timestamp_nb - 1] + else: + return timestamps[timestamp_nb] def create_prs(prs_type, proxy, field_entity, field_name, timestamp_nb): """Auxiliary function. @@ -1009,6 +1034,7 @@ def create_prs(prs_type, proxy, field_entity, field_name, timestamp_nb): Set the presentation properties like visu.CreatePrsForResult() do. """ + proxy.UpdatePipeline() prs = None if prs_type == PrsTypeEnum.SCALARMAP: @@ -1060,6 +1086,7 @@ def ScalarMapOnField(proxy, entity, field_name, timestamp_nb, Scalar Map as representation object. """ + proxy.UpdatePipeline() # We don't need mesh parts with no data on them if entity == EntityType.NODE: select_cells_with_data(proxy, on_points=[field_name]) @@ -1077,12 +1104,8 @@ def ScalarMapOnField(proxy, entity, field_name, timestamp_nb, pvs.GetRenderView().ViewTime = time_value pvs.UpdatePipeline(time_value, proxy) - # Extract only groups with data for the field - new_proxy = extract_groups_for_field(proxy, field_name, entity, - force=True) - # Get Scalar Map representation object - scalarmap = pvs.GetRepresentation(new_proxy) + scalarmap = pvs.GetRepresentation(proxy) # Get lookup table lookup_table = get_lookup_table(field_name, nb_components, vector_mode) @@ -1090,11 +1113,16 @@ def ScalarMapOnField(proxy, entity, field_name, timestamp_nb, # Set field range if necessary data_range = get_data_range(proxy, entity, field_name, vector_mode) - lookup_table.LockScalarRange = 1 + if hasattr(lookup_table,"LockDataRange"): + lookup_table.LockDataRange = 1 + elif hasattr(lookup_table,"LockScalarRange"): + lookup_table.LockScalarRange = 1 + else: + raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table)) + lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0] # Set properties - scalarmap.ColorAttributeType = EntityType.get_pvtype(entity) - scalarmap.ColorArrayName = field_name + pvs.ColorBy(scalarmap, (EntityType.get_pvtype(entity), field_name)) scalarmap.LookupTable = lookup_table # Add scalar bar @@ -1134,6 +1162,12 @@ def CutPlanesOnField(proxy, entity, field_name, timestamp_nb, Cut Planes as representation object. """ + proxy.UpdatePipeline() + if entity == EntityType.NODE: + select_cells_with_data(proxy, on_points=[field_name]) + else: + select_cells_with_data(proxy, on_cells=[field_name]) + # Check vector mode nb_components = get_nb_components(proxy, entity, field_name) check_vector_mode(vector_mode, nb_components) @@ -1168,12 +1202,18 @@ def CutPlanesOnField(proxy, entity, field_name, timestamp_nb, # Set field range if necessary data_range = get_data_range(proxy, entity, field_name, vector_mode) - lookup_table.LockScalarRange = 1 + + if hasattr(lookup_table,"LockDataRange"): + lookup_table.LockDataRange = 1 + elif hasattr(lookup_table,"LockScalarRange"): + lookup_table.LockScalarRange = 1 + else: + raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table)) + lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0] # Set properties - cut_planes.ColorAttributeType = EntityType.get_pvtype(entity) - cut_planes.ColorArrayName = field_name + pvs.ColorBy(cut_planes, (EntityType.get_pvtype(entity), field_name)) cut_planes.LookupTable = lookup_table # Add scalar bar @@ -1224,6 +1264,12 @@ def CutLinesOnField(proxy, entity, field_name, timestamp_nb, (Cut Lines as representation object, list of 'PlotOverLine') otherwise """ + proxy.UpdatePipeline() + if entity == EntityType.NODE: + select_cells_with_data(proxy, on_points=[field_name]) + else: + select_cells_with_data(proxy, on_cells=[field_name]) + # Check vector mode nb_components = get_nb_components(proxy, entity, field_name) check_vector_mode(vector_mode, nb_components) @@ -1304,12 +1350,17 @@ def CutLinesOnField(proxy, entity, field_name, timestamp_nb, # Set field range if necessary data_range = get_data_range(proxy, entity, field_name, vector_mode) - lookup_table.LockScalarRange = 1 + if hasattr(lookup_table,"LockDataRange"): + lookup_table.LockDataRange = 1 + elif hasattr(lookup_table,"LockScalarRange"): + lookup_table.LockScalarRange = 1 + else: + raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table)) + lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0] # Set properties - cut_lines.ColorAttributeType = EntityType.get_pvtype(entity) - cut_lines.ColorArrayName = field_name + pvs.ColorBy(cut_lines, (EntityType.get_pvtype(entity), field_name)) cut_lines.LookupTable = lookup_table # Set wireframe represenatation mode @@ -1346,6 +1397,12 @@ def CutSegmentOnField(proxy, entity, field_name, timestamp_nb, Cut Segment as 3D representation object. """ + proxy.UpdatePipeline() + if entity == EntityType.NODE: + select_cells_with_data(proxy, on_points=[field_name]) + else: + select_cells_with_data(proxy, on_cells=[field_name]) + # Check vector mode nb_components = get_nb_components(proxy, entity, field_name) check_vector_mode(vector_mode, nb_components) @@ -1372,12 +1429,17 @@ def CutSegmentOnField(proxy, entity, field_name, timestamp_nb, # Set field range if necessary data_range = get_data_range(proxy, entity, field_name, vector_mode) - lookup_table.LockScalarRange = 1 + if hasattr(lookup_table,"LockDataRange"): + lookup_table.LockDataRange = 1 + elif hasattr(lookup_table,"LockScalarRange"): + lookup_table.LockScalarRange = 1 + else: + raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table)) + lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0] # Set properties - cut_segment.ColorAttributeType = EntityType.get_pvtype(entity) - cut_segment.ColorArrayName = field_name + pvs.ColorBy(cut_segment, (EntityType.get_pvtype(entity), field_name)) cut_segment.LookupTable = lookup_table # Set wireframe represenatation mode @@ -1414,6 +1476,12 @@ def VectorsOnField(proxy, entity, field_name, timestamp_nb, Vectors as representation object. """ + proxy.UpdatePipeline() + if entity == EntityType.NODE: + select_cells_with_data(proxy, on_points=[field_name]) + else: + select_cells_with_data(proxy, on_cells=[field_name]) + # Check vector mode nb_components = get_nb_components(proxy, entity, field_name) check_vector_mode(vector_mode, nb_components) @@ -1426,8 +1494,7 @@ def VectorsOnField(proxy, entity, field_name, timestamp_nb, pvs.UpdatePipeline(time_value, proxy) # Extract only groups with data for the field - new_proxy = extract_groups_for_field(proxy, field_name, entity) - source = new_proxy + source = proxy # Cell centers if is_data_on_cells(proxy, field_name): @@ -1446,7 +1513,7 @@ def VectorsOnField(proxy, entity, field_name, timestamp_nb, glyph = pvs.Glyph(source) glyph.Vectors = vector_array glyph.ScaleMode = 'vector' - glyph.MaskPoints = 0 + #glyph.MaskPoints = 0 # Set glyph type glyph.GlyphType = glyph_type @@ -1467,11 +1534,11 @@ def VectorsOnField(proxy, entity, field_name, timestamp_nb, glyph.GlyphType.Center = [0.0, 0.0, 0.0] if scale_factor is not None: - glyph.SetScaleFactor = scale_factor + glyph.ScaleFactor = scale_factor else: def_scale = get_default_scale(PrsTypeEnum.DEFORMEDSHAPE, - new_proxy, entity, field_name) - glyph.SetScaleFactor = def_scale + proxy, entity, field_name) + glyph.ScaleFactor = def_scale glyph.UpdatePipeline() @@ -1484,14 +1551,20 @@ def VectorsOnField(proxy, entity, field_name, timestamp_nb, # Set field range if necessary data_range = get_data_range(proxy, entity, field_name, vector_mode) - lookup_table.LockScalarRange = 1 + if hasattr(lookup_table,"LockDataRange"): + lookup_table.LockDataRange = 1 + elif hasattr(lookup_table,"LockScalarRange"): + lookup_table.LockScalarRange = 1 + else: + raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table)) + lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0] # Set properties if (is_colored): - vectors.ColorArrayName = 'GlyphVector' + pvs.ColorBy(vectors, (EntityType.get_pvtype(entity), 'GlyphVector')) else: - vectors.ColorArrayName = '' + pvs.ColorBy(vectors, (EntityType.get_pvtype(entity), None)) vectors.LookupTable = lookup_table vectors.LineWidth = 1.0 @@ -1528,6 +1601,7 @@ def DeformedShapeOnField(proxy, entity, field_name, Defromed Shape as representation object. """ + proxy.UpdatePipeline() # We don't need mesh parts with no data on them if entity == EntityType.NODE: select_cells_with_data(proxy, on_points=[field_name]) @@ -1545,11 +1619,9 @@ def DeformedShapeOnField(proxy, entity, field_name, pvs.GetRenderView().ViewTime = time_value pvs.UpdatePipeline(time_value, proxy) - # Extract only groups with data for the field - new_proxy = extract_groups_for_field(proxy, field_name, entity) - # Do merge - source = pvs.MergeBlocks(new_proxy) + source = pvs.MergeBlocks(proxy) + pvs.UpdatePipeline() # Cell data to point data if is_data_on_cells(proxy, field_name): @@ -1583,15 +1655,20 @@ def DeformedShapeOnField(proxy, entity, field_name, # Set field range if necessary data_range = get_data_range(proxy, entity, field_name, vector_mode) - lookup_table.LockScalarRange = 1 + if hasattr(lookup_table,"LockDataRange"): + lookup_table.LockDataRange = 1 + elif hasattr(lookup_table,"LockScalarRange"): + lookup_table.LockScalarRange = 1 + else: + raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table)) + lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0] # Set properties if is_colored: - defshape.ColorAttributeType = EntityType.get_pvtype(entity) - defshape.ColorArrayName = field_name + pvs.ColorBy(defshape, (EntityType.get_pvtype(entity), field_name)) else: - defshape.ColorArrayName = '' + pvs.ColorBy(defshape, (EntityType.get_pvtype(entity), None)) defshape.LookupTable = lookup_table # Set wireframe represenatation mode @@ -1628,6 +1705,7 @@ def DeformedShapeAndScalarMapOnField(proxy, entity, field_name, Defromed Shape And Scalar Map as representation object. """ + proxy.UpdatePipeline() # We don't need mesh parts with no data on them on_points = [] on_cells = [] @@ -1643,10 +1721,12 @@ def DeformedShapeAndScalarMapOnField(proxy, entity, field_name, else: on_cells.append(scalar_field_name) + nb_components = get_nb_components(proxy, entity, field_name) + + # Select fields select_cells_with_data(proxy, on_points, on_cells) # Check vector mode - nb_components = get_nb_components(proxy, entity, field_name) check_vector_mode(vector_mode, nb_components) # Get time value @@ -1663,11 +1743,9 @@ def DeformedShapeAndScalarMapOnField(proxy, entity, field_name, scalar_field_entity = entity scalar_field = field_name - # Extract only groups with data for the field - new_proxy = extract_groups_for_field(proxy, field_name, entity) - # Do merge - source = pvs.MergeBlocks(new_proxy) + source = pvs.MergeBlocks(proxy) + pvs.UpdatePipeline() # Cell data to point data if is_data_on_cells(proxy, field_name): @@ -1689,7 +1767,7 @@ def DeformedShapeAndScalarMapOnField(proxy, entity, field_name, warp_vector.ScaleFactor = scale_factor else: def_scale = get_default_scale(PrsTypeEnum.DEFORMEDSHAPE, - new_proxy, entity, field_name) + proxy, entity, field_name) warp_vector.ScaleFactor = def_scale # Get Defromed Shape And Scalar Map representation object @@ -1701,13 +1779,18 @@ def DeformedShapeAndScalarMapOnField(proxy, entity, field_name, # Set field range if necessary data_range = get_data_range(proxy, scalar_field_entity, scalar_field, vector_mode) - lookup_table.LockScalarRange = 1 + if hasattr(lookup_table,"LockDataRange"): + lookup_table.LockDataRange = 1 + elif hasattr(lookup_table,"LockScalarRange"): + lookup_table.LockScalarRange = 1 + else: + raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table)) + lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0] # Set properties - defshapemap.ColorArrayName = scalar_field + pvs.ColorBy(defshapemap, (EntityType.get_pvtype(scalar_field_entity), scalar_field)) defshapemap.LookupTable = lookup_table - defshapemap.ColorAttributeType = EntityType.get_pvtype(scalar_field_entity) # Add scalar bar add_scalar_bar(field_name, nb_components, @@ -1753,6 +1836,7 @@ def Plot3DOnField(proxy, entity, field_name, timestamp_nb, Plot 3D as representation object. """ + proxy.UpdatePipeline() # We don't need mesh parts with no data on them if entity == EntityType.NODE: select_cells_with_data(proxy, on_points=[field_name]) @@ -1770,11 +1854,8 @@ def Plot3DOnField(proxy, entity, field_name, timestamp_nb, pvs.GetRenderView().ViewTime = time_value pvs.UpdatePipeline(time_value, proxy) - # Extract only groups with data for the field - new_proxy = extract_groups_for_field(proxy, field_name, entity) - # Do merge - merge_blocks = pvs.MergeBlocks(new_proxy) + merge_blocks = pvs.MergeBlocks(proxy) merge_blocks.UpdatePipeline() poly_data = None @@ -1873,12 +1954,17 @@ def Plot3DOnField(proxy, entity, field_name, timestamp_nb, # Set field range if necessary data_range = get_data_range(proxy, entity, field_name, vector_mode) - lookup_table.LockScalarRange = 1 + if hasattr(lookup_table,"LockDataRange"): + lookup_table.LockDataRange = 1 + elif hasattr(lookup_table,"LockScalarRange"): + lookup_table.LockScalarRange = 1 + else: + raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table)) + lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0] # Set properties - plot3d.ColorAttributeType = EntityType.get_pvtype(entity) - plot3d.ColorArrayName = field_name + pvs.ColorBy(plot3d, (EntityType.get_pvtype(entity), field_name)) plot3d.LookupTable = lookup_table # Add scalar bar @@ -1913,6 +1999,7 @@ def IsoSurfacesOnField(proxy, entity, field_name, timestamp_nb, Iso Surfaces as representation object. """ + proxy.UpdatePipeline() # We don't need mesh parts with no data on them if entity == EntityType.NODE: select_cells_with_data(proxy, on_points=[field_name]) @@ -1930,11 +2017,9 @@ def IsoSurfacesOnField(proxy, entity, field_name, timestamp_nb, pvs.GetRenderView().ViewTime = time_value pvs.UpdatePipeline(time_value, proxy) - # Extract only groups with data for the field - new_proxy = extract_groups_for_field(proxy, field_name, entity) - # Do merge - source = pvs.MergeBlocks(new_proxy) + source = pvs.MergeBlocks(proxy) + pvs.UpdatePipeline() # Transform cell data into point data if necessary if is_data_on_cells(proxy, field_name): @@ -1976,15 +2061,20 @@ def IsoSurfacesOnField(proxy, entity, field_name, timestamp_nb, # Set field range if necessary data_range = get_data_range(proxy, entity, field_name, vector_mode) - lookup_table.LockScalarRange = 1 + if hasattr(lookup_table,"LockDataRange"): + lookup_table.LockDataRange = 1 + elif hasattr(lookup_table,"LockScalarRange"): + lookup_table.LockScalarRange = 1 + else: + raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table)) + lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0] # Set display properties if (is_colored): - isosurfaces.ColorAttributeType = EntityType.get_pvtype(entity) - isosurfaces.ColorArrayName = field_name + pvs.ColorBy(isosurfaces, (EntityType.get_pvtype(entity), field_name)) else: - isosurfaces.ColorArrayName = '' + pvs.ColorBy(isosurfaces, (EntityType.get_pvtype(entity), None)) if color: isosurfaces.DiffuseColor = color isosurfaces.LookupTable = lookup_table @@ -2035,11 +2125,14 @@ def GaussPointsOnField(proxy, entity, field_name, Gauss Points as representation object. """ + proxy.UpdatePipeline() # We don't need mesh parts with no data on them - if entity == EntityType.NODE: - select_cells_with_data(proxy, on_points=[field_name]) - else: - select_cells_with_data(proxy, on_cells=[field_name]) + on_gauss = select_cells_with_data(proxy, on_gauss=[field_name]) + if not on_gauss: + if entity == EntityType.NODE: + select_cells_with_data(proxy, on_points=[field_name]) + else: + select_cells_with_data(proxy, on_cells=[field_name]) # Check vector mode nb_components = get_nb_components(proxy, entity, field_name) @@ -2050,18 +2143,14 @@ def GaussPointsOnField(proxy, entity, field_name, # Set timestamp pvs.GetRenderView().ViewTime = time_value - proxy.UpdatePipeline(time=time_value) - - # Extract only groups with data for the field - source = extract_groups_for_field(proxy, field_name, entity) + pvs.UpdatePipeline(time_value, proxy) - # Quadrature point arrays - qp_arrays = proxy.QuadraturePointArrays.Available + source = proxy # If no quadrature point array is passed, use cell centers - if field_name in qp_arrays: + if on_gauss: generate_qp = pvs.GenerateQuadraturePoints(source) - generate_qp.SelectSourceArray = ['CELLS', 'ELGA_Offset'] + generate_qp.QuadratureSchemeDef = ['CELLS', 'ELGA@0'] source = generate_qp else: # Cell centers @@ -2076,8 +2165,7 @@ def GaussPointsOnField(proxy, entity, field_name, vector_array = field_name # If the given vector array has only 2 components, add the third one if nb_components == 2: - calc = get_add_component_calc(source, - EntityType.NODE, field_name) + calc = get_add_component_calc(source, EntityType.NODE, field_name) vector_array = calc.ResultArrayName source = calc @@ -2102,15 +2190,20 @@ def GaussPointsOnField(proxy, entity, field_name, # Set field range if necessary data_range = get_data_range(proxy, entity, field_name, vector_mode) - lookup_table.LockScalarRange = 1 + if hasattr(lookup_table,"LockDataRange"): + lookup_table.LockDataRange = 1 + elif hasattr(lookup_table,"LockScalarRange"): + lookup_table.LockScalarRange = 1 + else: + raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table)) + lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0] # Set display properties if is_colored: - gausspnt.ColorAttributeType = EntityType.get_pvtype(entity) - gausspnt.ColorArrayName = field_name + pvs.ColorBy(gausspnt, (EntityType.get_pvtype(entity), field_name)) else: - gausspnt.ColorArrayName = '' + pvs.ColorBy(gausspnt, (EntityType.get_pvtype(entity), None)) if color: gausspnt.DiffuseColor = color @@ -2153,7 +2246,7 @@ def GaussPointsOnField(proxy, entity, field_name, if is_proportional: mult = multiplier - if mult is None: + if mult is None and data_range[1] != 0: mult = abs(0.1 / data_range[1]) gausspnt.RadiusScalarRange = data_range @@ -2166,8 +2259,9 @@ def GaussPointsOnField(proxy, entity, field_name, gausspnt.RadiusTransferFunctionMode = 'Table' gausspnt.RadiusScalarRange = data_range gausspnt.RadiusUseScalarRange = 1 - gausspnt.RadiusIsProportional = 1 - gausspnt.RadiusProportionalFactor = mult + if mult is not None: + gausspnt.RadiusIsProportional = 1 + gausspnt.RadiusProportionalFactor = mult else: gausspnt.RadiusTransferFunctionEnabled = 0 gausspnt.RadiusMode = 'Constant' @@ -2209,6 +2303,11 @@ def GaussPointsOnField1(proxy, entity, field_name, Gauss Points as representation object. """ + proxy.UpdatePipeline() + select_cells_with_data(proxy, on_gauss=[field_name]) + + nb_components = get_nb_components(proxy, entity, field_name) + # Get time value time_value = get_time(proxy, timestamp_nb) @@ -2217,35 +2316,32 @@ def GaussPointsOnField1(proxy, entity, field_name, proxy.UpdatePipeline(time=time_value) # Create Gauss Points object - source = pvs.GaussPoints(proxy) + source = pvs.ELGAfieldToPointSprite(proxy) source.UpdatePipeline() - + # Get Gauss Points representation object gausspnt = pvs.GetRepresentation(source) # Get lookup table - entity_data_info = None - point_data_info = source.GetPointDataInformation() - if field_name in point_data_info.keys(): - entity_data_info = point_data_info - else: - entity_data_info = source.GetCellDataInformation() - nb_components = entity_data_info[field_name].GetNumberOfComponents() - lookup_table = get_lookup_table(field_name, nb_components, vector_mode) # Set field range if necessary data_range = get_data_range(proxy, entity, field_name, vector_mode) - lookup_table.LockScalarRange = 1 + if hasattr(lookup_table,"LockDataRange"): + lookup_table.LockDataRange = 1 + elif hasattr(lookup_table,"LockScalarRange"): + lookup_table.LockScalarRange = 1 + else: + raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table)) + lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0] # Set display properties if is_colored: - gausspnt.ColorAttributeType = EntityType.get_pvtype(entity) - gausspnt.ColorArrayName = field_name + pvs.ColorBy(gausspnt, (EntityType.get_pvtype(entity), field_name)) else: - gausspnt.ColorArrayName = '' + pvs.ColorBy(gausspnt, (EntityType.get_pvtype(entity), None)) if color: gausspnt.DiffuseColor = color @@ -2288,7 +2384,7 @@ def GaussPointsOnField1(proxy, entity, field_name, if is_proportional: mult = multiplier - if mult is None: + if mult is None and data_range[1] != 0: mult = abs(0.1 / data_range[1]) gausspnt.RadiusScalarRange = data_range @@ -2301,8 +2397,9 @@ def GaussPointsOnField1(proxy, entity, field_name, gausspnt.RadiusTransferFunctionMode = 'Table' gausspnt.RadiusScalarRange = data_range gausspnt.RadiusUseScalarRange = 1 - gausspnt.RadiusIsProportional = 1 - gausspnt.RadiusProportionalFactor = mult + if mult is not None: + gausspnt.RadiusIsProportional = 1 + gausspnt.RadiusProportionalFactor = mult else: gausspnt.RadiusTransferFunctionEnabled = 0 gausspnt.RadiusMode = 'Constant' @@ -2334,6 +2431,7 @@ def StreamLinesOnField(proxy, entity, field_name, timestamp_nb, Stream Lines as representation object. """ + proxy.UpdatePipeline() # We don't need mesh parts with no data on them if entity == EntityType.NODE: select_cells_with_data(proxy, on_points=[field_name]) @@ -2351,17 +2449,15 @@ def StreamLinesOnField(proxy, entity, field_name, timestamp_nb, pvs.GetRenderView().ViewTime = time_value pvs.UpdatePipeline(time_value, proxy) - # Extract only groups with data for the field - new_proxy = extract_groups_for_field(proxy, field_name, entity) - # Do merge - source = pvs.MergeBlocks(new_proxy) + source = pvs.MergeBlocks(proxy) + pvs.UpdatePipeline() # Cell data to point data if is_data_on_cells(proxy, field_name): cell_to_point = pvs.CellDatatoPointData(source) cell_to_point.PassCellData = 1 - cell_to_point.UpdatePipeline() + pvs.UpdatePipeline() source = cell_to_point vector_array = field_name @@ -2369,16 +2465,16 @@ def StreamLinesOnField(proxy, entity, field_name, timestamp_nb, if nb_components == 2: calc = get_add_component_calc(source, EntityType.NODE, field_name) vector_array = calc.ResultArrayName - calc.UpdatePipeline() + pvs.UpdatePipeline() source = calc # Stream Tracer stream = pvs.StreamTracer(source) stream.SeedType = "Point Source" stream.Vectors = ['POINTS', vector_array] - stream.SeedType = "Point Source" stream.IntegrationDirection = direction stream.IntegratorType = 'Runge-Kutta 2' + stream.SeedType = 'High Resolution Line Source' stream.UpdatePipeline() # Get Stream Lines representation object @@ -2390,17 +2486,22 @@ def StreamLinesOnField(proxy, entity, field_name, timestamp_nb, lookup_table = get_lookup_table(field_name, nb_components, vector_mode) # Set field range if necessary - data_range = get_data_range(new_proxy, entity, + data_range = get_data_range(proxy, entity, field_name, vector_mode) - lookup_table.LockScalarRange = 1 + if hasattr(lookup_table,"LockDataRange"): + lookup_table.LockDataRange = 1 + elif hasattr(lookup_table,"LockScalarRange"): + lookup_table.LockScalarRange = 1 + else: + raise RuntimeError("Object %s has no 'LockDataRange' or 'LockScalarRange' attribute!"%(lookup_table)) + lookup_table.RGBPoints = [data_range[0], 0, 0, 1, data_range[1], 1, 0, 0] # Set properties if is_colored: - streamlines.ColorAttributeType = EntityType.get_pvtype(entity) - streamlines.ColorArrayName = field_name + pvs.ColorBy(streamlines, (EntityType.get_pvtype(entity), field_name)) else: - streamlines.ColorArrayName = '' + pvs.ColorBy(streamlines, (EntityType.get_pvtype(entity), None)) if color: streamlines.DiffuseColor = color @@ -2418,81 +2519,83 @@ def MeshOnEntity(proxy, mesh_name, entity): Arguments: proxy -- the pipeline object, containig data - mesh_name -- the mesh name - entity -- the entity type + mesh_name -- the full or short name of mesh field Returns: Submesh as representation object of the given source. """ - # Select all cell types - select_all_cells(proxy) - - # Get subset of groups on the given entity - subset = get_group_names(proxy, mesh_name, entity) - - # Select only groups of the given entity type - proxy.Groups = subset + proxy.UpdatePipeline() + mesh_full_name = None + aList = mesh_name.split('/') + if len(aList) >= 2: + mesh_full_name = mesh_name + else: + mesh_full_name = find_mesh_full_name(proxy, mesh_name) + if not mesh_full_name: + raise RuntimeError, "The given mesh name was not found" + # Select only the given mesh + proxy.AllArrays = [mesh_full_name] proxy.UpdatePipeline() # Get representation object if the submesh is not empty prs = None if (proxy.GetDataInformation().GetNumberOfPoints() or proxy.GetDataInformation().GetNumberOfCells()): - prs = pvs.GetRepresentation(proxy) - prs.ColorArrayName = '' + my_view = pvs.GetRenderView() + prs = pvs.GetRepresentation(proxy, view=my_view) + prs.ColorArrayName = (None, '') return prs -def MeshOnGroup(proxy, group_name): +def MeshOnGroup(proxy, extrGroups, group_name): """Creates submesh on the group. Arguments: proxy -- the pipeline object, containig data group_name -- the full group name + extrGroups -- all extracted groups object Returns: Representation object of the given source with single group selected. """ - # Select all cell types - select_all_cells(proxy) - - # Select only the group with the given name - one_group = [group_name] - proxy.Groups = one_group proxy.UpdatePipeline() + # Deselect all groups + extrGroups.AllGroups = [] + extrGroups.UpdatePipelineInformation() + # Select only the group with the given name + extrGroups.AllGroups = [group_name] + extrGroups.UpdatePipelineInformation() # Get representation object if the submesh is not empty prs = None # Check if the group was set - if proxy.Groups.GetData() == one_group: - group_entity = get_group_entity(group_name) + if len(extrGroups.AllGroups) == 1 and \ + extrGroups.AllGroups[0] == group_name: # Check if the submesh is not empty - nb_items = 0 - if group_entity == EntityType.NODE: - nb_items = proxy.GetDataInformation().GetNumberOfPoints() - elif group_entity == EntityType.CELL: - nb_items = proxy.GetDataInformation().GetNumberOfCells() + nb_points = proxy.GetDataInformation().GetNumberOfPoints() + nb_cells = proxy.GetDataInformation().GetNumberOfCells() - if nb_items: - prs = pvs.GetRepresentation(proxy) - prs.ColorArrayName = '' + if nb_points or nb_cells: +# prs = pvs.GetRepresentation(proxy) + prs = pvs.Show() + prs.ColorArrayName = (None, '') + display_only(prs) return prs -def CreatePrsForFile(paravis_instance, file_name, prs_types, +def CreatePrsForFile(file_name, prs_types, picture_dir, picture_ext): """Build presentations of the given types for the file. Build presentations for all fields on all timestamps. Arguments: - paravis_instance: ParaVis module instance object file_name: full path to the MED file prs_types: the list of presentation types to build picture_dir: the directory path for saving snapshots @@ -2503,24 +2606,25 @@ def CreatePrsForFile(paravis_instance, file_name, prs_types, print "Import " + file_name.split(os.sep)[-1] + "..." try: - paravis_instance.ImportFile(file_name) - proxy = pvs.GetActiveSource() + proxy = pvs.MEDReader(FileName=file_name) if proxy is None: print "FAILED" else: - proxy.UpdatePipeline() + #proxy.UpdatePipeline() print "OK" except: print "FAILED" else: # Get view view = pvs.GetRenderView() + time_value = get_time(proxy, 0) + view.ViewTime = time_value + pvs.UpdatePipeline(time=time_value, proxy=proxy) # Create required presentations for the proxy CreatePrsForProxy(proxy, view, prs_types, picture_dir, picture_ext) - def CreatePrsForProxy(proxy, view, prs_types, picture_dir, picture_ext): """Build presentations of the given types for all fields of the proxy. @@ -2535,10 +2639,9 @@ def CreatePrsForProxy(proxy, view, prs_types, picture_dir, picture_ext): picture_ext: graphics files extension (determines file type) """ + proxy.UpdatePipeline() # List of the field names - field_names = list(proxy.PointArrays.GetData()) - nb_on_nodes = len(field_names) - field_names.extend(proxy.CellArrays.GetData()) + fields_info = proxy.GetProperty("FieldsTreeInfo")[::2] # Add path separator to the end of picture path if necessery if not picture_dir.endswith(os.sep): @@ -2546,77 +2649,56 @@ def CreatePrsForProxy(proxy, view, prs_types, picture_dir, picture_ext): # Mesh Presentation if PrsTypeEnum.MESH in prs_types: - # Create Mesh presentation. Build all possible submeshes. - - # Remember the current state - groups = list(proxy.Groups) - # Iterate on meshes - mesh_names = get_mesh_names(proxy) + mesh_names = get_mesh_full_names(proxy) for mesh_name in mesh_names: - # Build mesh on nodes and cells - for entity in (EntityType.NODE, EntityType.CELL): - entity_name = EntityType.get_name(entity) - if if_possible(proxy, mesh_name, entity, PrsTypeEnum.MESH): - print "Creating submesh on " + entity_name + " for '" + mesh_name + "' mesh... " - prs = MeshOnEntity(proxy, mesh_name, entity) - if prs is None: - print "FAILED" - continue - else: - print "OK" - # Construct image file name - pic_name = picture_dir + mesh_name + "_" + entity_name + "." + picture_ext - - # Show and dump the presentation into a graphics file - process_prs_for_test(prs, view, pic_name, False) - - # Build submesh on all groups of the mesh - mesh_groups = get_group_names(proxy, mesh_name, - entity, wo_nogroups=True) - for group in mesh_groups: - print "Creating submesh on group " + group + "... " - prs = MeshOnGroup(proxy, group) + # Build mesh field presentation + print "Creating submesh for '" + get_field_short_name(mesh_name) + "' mesh... " + prs = MeshOnEntity(proxy, mesh_name, None) + if prs is None: + print "FAILED" + continue + else: + print "OK" + # Construct image file name + pic_name = picture_dir + get_field_short_name(mesh_name) + "." + picture_ext + + # Show and dump the presentation into a graphics file + process_prs_for_test(prs, view, pic_name, False) + + # Create Mesh presentation. Build all groups. + extGrp = pvs.ExtractGroup() + extGrp.UpdatePipelineInformation() + if if_possible(proxy, None, None, PrsTypeEnum.MESH, extGrp): + for group in get_group_names(extGrp): + print "Creating submesh on group " + get_group_short_name(group) + "... " + prs = MeshOnGroup(proxy, extGrp, group) if prs is None: print "FAILED" continue else: print "OK" # Construct image file name - pic_name = picture_dir + group.replace('/', '_') + "." + picture_ext + pic_name = picture_dir + get_group_short_name(group) + "." + picture_ext # Show and dump the presentation into a graphics file process_prs_for_test(prs, view, pic_name, False) - # Restore the state - proxy.Groups = groups - proxy.UpdatePipeline() - # Presentations on fields - for (i, field_name) in enumerate(field_names): + for field in fields_info: + field_name = get_field_short_name(field) + # Ignore mesh presentation + if field_name == get_field_mesh_name(field): + continue + field_entity = get_field_entity(field) # Select only the current field: # necessary for getting the right timestamps - cell_arrays = proxy.CellArrays.GetData() - point_arrays = proxy.PointArrays.GetData() - field_entity = None - if (i >= nb_on_nodes): - field_entity = EntityType.CELL - proxy.PointArrays.DeselectAll() - proxy.CellArrays = [field_name] - else: - field_entity = EntityType.NODE - proxy.CellArrays.DeselectAll() - proxy.PointArrays = [field_name] + proxy.AllArrays = [field] + proxy.UpdatePipeline() # Get timestamps - proxy.UpdatePipelineInformation() timestamps = proxy.TimestepValues.GetData() - # Restore fields selection state - proxy.CellArrays = cell_arrays - proxy.PointArrays = point_arrays - proxy.UpdatePipelineInformation() - for prs_type in prs_types: # Ignore mesh presentation if prs_type == PrsTypeEnum.MESH: @@ -2634,9 +2716,25 @@ def CreatePrsForProxy(proxy, view, prs_types, picture_dir, picture_ext): for timestamp_nb in xrange(1, len(timestamps) + 1): time = timestamps[timestamp_nb - 1] + if (time == 0.0): + scalar_range = get_data_range(proxy, field_entity, + field_name, cut_off=True) + # exclude time stamps with null lenght of scalar range + if (scalar_range[0] == scalar_range[1]): + continue print "Creating " + prs_name + " on " + field_name + ", time = " + str(time) + "... " - prs = create_prs(prs_type, proxy, - field_entity, field_name, timestamp_nb) + try: + prs = create_prs(prs_type, proxy, + field_entity, field_name, timestamp_nb) + except ValueError: + """ This exception comes from get_nb_components(...) function. + The reason of exception is an implementation of MEDReader + activating the first leaf when reading MED file (refer to + MEDFileFieldRepresentationTree::activateTheFirst() and + MEDFileFieldRepresentationTree::getTheSingleActivated(...) methods). + """ + print "ValueError exception is catched" + continue if prs is None: print "FAILED" continue @@ -2648,3 +2746,12 @@ def CreatePrsForProxy(proxy, view, prs_types, picture_dir, picture_ext): # Show and dump the presentation into a graphics file process_prs_for_test(prs, view, pic_name) + return + + +def delete_pv_object(obj): + # There is a bug when repeating CreateRenderView/Delete calls + # Here is a workaround proposed by KW (#10744) + import gc + del obj + gc.collect()