forked from pyvista/pyvista
-
Notifications
You must be signed in to change notification settings - Fork 0
/
filters.py
659 lines (552 loc) · 25.6 KB
/
filters.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
"""
These classes hold methods to apply general filters to any data type.
By inherritting these classes into the wrapped VTK data structures, a user
can easily apply common filters in an intuitive manner.
Example
-------
>>> import vtki
>>> from vtki import examples
>>> dataset = examples.load_uniform()
>>> # Threshold
>>> thresh = dataset.threshold([100, 500])
>>> # Slice
>>> slc = dataset.slice()
>>> # Clip
>>> clp = dataset.clip(invert=True)
>>> # Contour
>>> iso = dataset.contour()
"""
import collections
import logging
import numpy as np
import vtk
import vtki
from vtki.utilities import get_scalar, wrap, is_inside_bounds
NORMALS = {
'x': [1, 0, 0],
'y': [0, 1, 0],
'z': [0, 0, 1],
'-x': [-1, 0, 0],
'-y': [0, -1, 0],
'-z': [0, 0, -1],
}
def _get_output(algorithm, iport=0, iconnection=0, oport=0, active_scalar=None,
active_scalar_field='point'):
"""A helper to get the algorithm's output and copy input's vtki meta info"""
ido = algorithm.GetInputDataObject(iport, iconnection)
data = wrap(algorithm.GetOutputDataObject(oport))
data.copy_meta_from(ido)
if active_scalar is not None:
data.set_active_scalar(active_scalar, preference=active_scalar_field)
return data
def _generate_plane(normal, origin):
""" Returns a vtk.vtkPlane """
plane = vtk.vtkPlane()
plane.SetNormal(normal[0], normal[1], normal[2])
plane.SetOrigin(origin[0], origin[1], origin[2])
return plane
class DataSetFilters(object):
"""A set of common filters that can be applied to any vtkDataSet"""
def clip(dataset, normal='x', origin=None, invert=True):
"""
Clip a dataset by a plane by specifying the origin and normal. If no
parameters are given the clip will occur in the center of that dataset
Parameters
----------
normal : tuple(float) or str
Length 3 tuple for the normal vector direction. Can also be
specified as a string conventional direction such as ``'x'`` for
``(1,0,0)`` or ``'-x'`` for ``(-1,0,0)``, etc.
origin : tuple(float)
The center ``(x,y,z)`` coordinate of the plane on which the clip
occurs
invert : bool
Flag on whether to flip/invert the clip
"""
if isinstance(normal, str):
normal = NORMALS[normal.lower()]
# find center of data if origin not specified
if origin is None:
origin = dataset.center
# create the plane for clipping
plane = _generate_plane(normal, origin)
# run the clip
alg = vtk.vtkClipDataSet()
alg.SetInputDataObject(dataset) # Use the grid as the data we desire to cut
alg.SetClipFunction(plane) # the the cutter to use the plane we made
alg.SetInsideOut(invert) # invert the clip if needed
alg.Update() # Perfrom the Cut
return _get_output(alg)
def clip_box(dataset, bounds=None, invert=True, factor=0.35):
"""Clips a dataset by a bounding box defined by the bounds. If no bounds
are given, a corner of the dataset bounds will be removed.
Parameters
----------
bounds : tuple(float)
Length 6 iterable of floats: (xmin, xmax, ymin, ymax, zmin, zmax)
invert : bool
Flag on whether to flip/invert the clip
factor : float, optional
If bounds are not given this is the factor along each axis to
extract the default box.
"""
if bounds is None:
def _get_quarter(dmin, dmax):
return dmax - ((dmax - dmin) * factor)
xmin, xmax, ymin, ymax, zmin, zmax = dataset.bounds
xmin = _get_quarter(xmin, xmax)
ymin = _get_quarter(ymin, ymax)
zmin = _get_quarter(zmin, zmax)
bounds = [xmin, xmax, ymin, ymax, zmin, zmax]
if not isinstance(bounds, collections.Iterable) or len(bounds) != 6:
raise AssertionError('Bounds must be a length 6 iterable of floats')
xmin, xmax, ymin, ymax, zmin, zmax = bounds
alg = vtk.vtkBoxClipDataSet()
alg.SetInputDataObject(dataset)
alg.SetBoxClip(xmin, xmax, ymin, ymax, zmin, zmax)
port = 0
if invert:
# invert the clip if needed
port = 1
alg.GenerateClippedOutputOn()
alg.Update()
return _get_output(alg, oport=port)
def slice(dataset, normal='x', origin=None, generate_triangles=False):
"""Slice a dataset by a plane at the specified origin and normal vector
orientation. If no origin is specified, the center of the input dataset will
be used.
Parameters
----------
normal : tuple(float) or str
Length 3 tuple for the normal vector direction. Can also be
specified as a string conventional direction such as ``'x'`` for
``(1,0,0)`` or ``'-x'`` for ``(-1,0,0)```, etc.
origin : tuple(float)
The center (x,y,z) coordinate of the plane on which the slice occurs
generate_triangles: bool, optional
If this is enabled (``False`` by default), the output will be
triangles otherwise, the output will be the intersection polygons.
"""
if isinstance(normal, str):
normal = NORMALS[normal.lower()]
# find center of data if origin not specified
if origin is None:
origin = dataset.center
if not is_inside_bounds(origin, dataset.bounds):
raise AssertionError('Slice is outside data bounds.')
# create the plane for clipping
plane = _generate_plane(normal, origin)
# create slice
alg = vtk.vtkCutter() # Construct the cutter object
alg.SetInputDataObject(dataset) # Use the grid as the data we desire to cut
alg.SetCutFunction(plane) # the the cutter to use the plane we made
if not generate_triangles:
alg.GenerateTrianglesOff()
alg.Update() # Perfrom the Cut
return _get_output(alg)
def slice_orthogonal(dataset, x=None, y=None, z=None, generate_triangles=False):
"""Creates three orthogonal slices through the dataset on the three
caresian planes. Yields a MutliBlock dataset of the three slices
Parameters
----------
x : float
The X location of the YZ slice
y : float
The Y location of the XZ slice
z : float
The Z location of the XY slice
generate_triangles: bool, optional
If this is enabled (``False`` by default), the output will be
triangles otherwise, the output will be the intersection polygons.
"""
output = vtki.MultiBlock()
# Create the three slices
if x is None:
x = dataset.center[0]
if y is None:
y = dataset.center[1]
if z is None:
z = dataset.center[2]
output[0, 'YZ'] = dataset.slice(normal='x', origin=[x,y,z], generate_triangles=generate_triangles)
output[1, 'XZ'] = dataset.slice(normal='y', origin=[x,y,z], generate_triangles=generate_triangles)
output[2, 'XY'] = dataset.slice(normal='z', origin=[x,y,z], generate_triangles=generate_triangles)
return output
def slice_along_axis(dataset, n=5, axis='x', tolerance=None, generate_triangles=False):
"""Create many slices of the input dataset along a specified axis.
Parameters
----------
n : int
The number of slices to create
axis : str or int
The axis to generate the slices along. Perpendicular to the slices.
Can be string name (``'x'``, ``'y'``, or ``'z'``) or axis index
(``0``, ``1``, or ``2``).
tolerance : float, optional
The toleranceerance to the edge of the dataset bounds to create the slices
generate_triangles: bool, optional
If this is enabled (``False`` by default), the output will be
triangles otherwise, the output will be the intersection polygons.
"""
output = vtki.MultiBlock()
if isinstance(axis, str):
axes = {'x':0, 'y':1, 'z':2}
try:
ax = axes[axis]
except KeyError:
raise RuntimeError('Axis ({}) not understood'.format(axis))
else:
ax = axis
# get the locations along that axis
if tolerance is None:
tolerance = (dataset.bounds[ax*2+1] - dataset.bounds[ax*2]) * 0.01
rng = np.linspace(dataset.bounds[ax*2]+tolerance, dataset.bounds[ax*2+1]-tolerance, n)
center = list(dataset.center)
# Make each of the slices
for i in range(n):
center[ax] = rng[i]
slc = DataSetFilters.slice(dataset, normal=axis, origin=center, generate_triangles=generate_triangles)
output[i, 'slice%.2d'%i] = slc
return output
def threshold(dataset, value=None, scalars=None, invert=False, continuous=False,
preference='cell'):
"""
This filter will apply a ``vtkThreshold`` filter to the input dataset and
return the resulting object. This extracts cells where scalar value in each
cell satisfies threshold criterion. If scalars is None, the inputs
active_scalar is used.
Parameters
----------
value : float or iterable, optional
Single value or (min, max) to be used for the data threshold. If
iterable, then length must be 2. If no value is specified, the
non-NaN data range will be used to remove any NaN values.
scalars : str, optional
Name of scalars to threshold on. Defaults to currently active scalars.
invert : bool, optional
If value is a single value, when invert is True cells are kept when
their values are below parameter "value". When invert is False
cells are kept when their value is above the threshold "value".
Default is False: yielding above the threshold "value".
continuous : bool, optional
When True, the continuous interval [minimum cell scalar,
maxmimum cell scalar] will be used to intersect the threshold bound,
rather than the set of discrete scalar values from the vertices.
preference : str, optional
When scalars is specified, this is the perfered scalar type to search
for in the dataset. Must be either 'point' or 'cell'.
"""
# set the scalaras to threshold on
if scalars is None:
field, scalars = dataset.active_scalar_info
arr, field = get_scalar(dataset, scalars, preference=preference, info=True)
if arr is None:
raise AssertionError('No arrays present to threshold.')
# If using an inverted range, merge the result of two fitlers:
if isinstance(value, collections.Iterable) and invert:
valid_range = [np.nanmin(arr), np.nanmax(arr)]
# Create two thresholds
t1 = dataset.threshold([valid_range[0], value[0]], scalars=scalars,
continuous=continuous, preference=preference, invert=False)
t2 = dataset.threshold([value[1], valid_range[1]], scalars=scalars,
continuous=continuous, preference=preference, invert=False)
# Use an AppendFilter to merge the two results
appender = vtk.vtkAppendFilter()
appender.AddInputData(t1)
appender.AddInputData(t2)
appender.Update()
return _get_output(appender)
# Run a standard threshold algorithm
alg = vtk.vtkThreshold()
alg.SetInputDataObject(dataset)
alg.SetInputArrayToProcess(0, 0, 0, field, scalars) # args: (idx, port, connection, field, name)
# set thresholding parameters
alg.SetUseContinuousCellRange(continuous)
# use valid range if no value given
if value is None:
value = dataset.get_data_range(scalars)
# check if value is iterable (if so threshold by min max range like ParaView)
if isinstance(value, collections.Iterable):
if len(value) != 2:
raise AssertionError('Value range must be length one for a float value or two for min/max; not ({}).'.format(value))
alg.ThresholdBetween(value[0], value[1])
else:
# just a single value
if invert:
alg.ThresholdByLower(value)
else:
alg.ThresholdByUpper(value)
# Run the threshold
alg.Update()
return _get_output(alg)
def threshold_percent(dataset, percent=0.50, scalars=None, invert=False,
continuous=False, preference='cell'):
"""Thresholds the dataset by a percentage of its range on the active
scalar array or as specified
Parameters
----------
percent : float or tuple(float), optional
The percentage (0,1) to threshold. If value is out of 0 to 1 range,
then it will be divided by 100 and checked to be in that range.
scalars : str, optional
Name of scalars to threshold on. Defaults to currently active scalars.
invert : bool, optional
When invert is True cells are kept when their values are below the
percentage of the range. When invert is False, cells are kept when
their value is above the percentage of the range.
Default is False: yielding above the threshold "value".
continuous : bool, optional
When True, the continuous interval [minimum cell scalar,
maxmimum cell scalar] will be used to intersect the threshold bound,
rather than the set of discrete scalar values from the vertices.
preference : str, optional
When scalars is specified, this is the perfered scalar type to search
for in the dataset. Must be either 'point' or 'cell'.
"""
if scalars is None:
field, tscalars = dataset.active_scalar_info
else:
tscalars = scalars
dmin, dmax = dataset.get_data_range(arr=tscalars, preference=preference)
def _check_percent(percent):
"""Make sure percent is between 0 and 1 or fix if between 0 and 100."""
if percent >= 1:
percent = float(percent) / 100.0
if percent > 1:
raise RuntimeError('Percentage ({}) is out of range (0, 1).'.format(percent))
if percent < 1e-10:
raise RuntimeError('Percentage ({}) is too close to zero or negative.'.format(percent))
return percent
def _get_val(percent, dmin, dmax):
"""Gets the value from a percentage of a range"""
percent = _check_percent(percent)
return dmin + float(percent) * (dmax - dmin)
# Compute the values
if isinstance(percent, collections.Iterable):
# Get two values
value = [_get_val(percent[0], dmin, dmax), _get_val(percent[1], dmin, dmax)]
else:
# Compute one value to threshold
value = _get_val(percent, dmin, dmax)
# Use the normal thresholding function on these values
return DataSetFilters.threshold(dataset, value=value, scalars=scalars,
invert=invert, continuous=continuous, preference=preference)
def outline(dataset, generate_faces=False):
"""Produces an outline of the full extent for the input dataset.
Parameters
----------
generate_faces : bool, optional
Generate solid faces for the box. This is off by default
"""
alg = vtk.vtkOutlineFilter()
alg.SetInputDataObject(dataset)
alg.SetGenerateFaces(generate_faces)
alg.Update()
return wrap(alg.GetOutputDataObject(0))
def outline_corners(dataset, factor=0.2):
"""Produces an outline of the corners for the input dataset.
Parameters
----------
factor : float, optional
controls the relative size of the corners to the length of the
corresponding bounds
"""
alg = vtk.vtkOutlineCornerFilter()
alg.SetInputDataObject(dataset)
alg.SetCornerFactor(factor)
alg.Update()
return wrap(alg.GetOutputDataObject(0))
def extract_geometry(dataset):
"""Extract the outer surface of a volume or structured grid dataset as
PolyData. This will extract all 0D, 1D, and 2D cells producing the
boundary faces of the dataset.
"""
alg = vtk.vtkGeometryFilter()
alg.SetInputDataObject(dataset)
alg.Update()
return _get_output(alg)
def wireframe(dataset):
"""Extract all the internal/external edges of the dataset as PolyData.
This produces a full wireframe representation of the input dataset.
"""
alg = vtk.vtkExtractEdges()
alg.SetInputDataObject(dataset)
alg.Update()
return _get_output(alg)
def elevation(dataset, low_point=None, high_point=None, scalar_range=None,
preference='point', set_active=True):
"""Generate scalar values on a dataset. The scalar values lie within a
user specified range, and are generated by computing a projection of
each dataset point onto a line.
The line can be oriented arbitrarily.
A typical example is to generate scalars based on elevation or height
above a plane.
Parameters
----------
low_point : tuple(float), optional
The low point of the projection line in 3D space. Default is bottom
center of the dataset. Otherwise pass a length 3 tuple(float).
high_point : tuple(float), optional
The high point of the projection line in 3D space. Default is top
center of the dataset. Otherwise pass a length 3 tuple(float).
scalar_range : str or tuple(float), optional
The scalar range to project to the low and high points on the line
that will be mapped to the dataset. If None given, the values will
be computed from the elevation (Z component) range between the
high and low points. Min and max of a range can be given as a length
2 tuple(float). If ``str`` name of scalara array present in the
dataset given, the valid range of that array will be used.
preference : str, optional
When a scalar name is specified for ``scalar_range``, this is the
perfered scalar type to search for in the dataset.
Must be either 'point' or 'cell'.
set_active : bool, optional
A boolean flag on whethter or not to set the new `Elevation` scalar
as the active scalar array on the output dataset.
Warning
-------
This will create a scalar array named `Elevation` on the point data of
the input dataset and overasdf write an array named `Elevation` if present.
"""
# Fix the projection line:
if low_point is None:
low_point = list(dataset.center)
low_point[2] = dataset.bounds[4]
if high_point is None:
high_point = list(dataset.center)
high_point[2] = dataset.bounds[5]
# Fix scalar_range:
if scalar_range is None:
scalar_range = (low_point[2], high_point[2])
elif isinstance(scalar_range, str):
scalar_range = dataset.get_data_range(arr=scalar_range, preference=preference)
elif isinstance(scalar_range, collections.Iterable):
assert len(scalar_range) == 2, 'scalar_range must have a length of two defining the min and max'
else:
raise RuntimeError('scalar_range argument ({}) not understood.'.format(type(scalar_range)))
# Construct the filter
alg = vtk.vtkElevationFilter()
alg.SetInputDataObject(dataset)
# Set the parameters
alg.SetScalarRange(scalar_range)
alg.SetLowPoint(low_point)
alg.SetHighPoint(high_point)
alg.Update()
# Decide on updating active scalar array
name = 'Elevation' # Note that this is added to the PointData
if not set_active:
name = None
return _get_output(alg, active_scalar=name, active_scalar_field='point')
def contour(dataset, isosurfaces=10, scalars=None, compute_normals=False,
compute_gradients=False, compute_scalars=True, preference='point'):
"""Contours an input dataset by an array. ``isosurfaces`` can be an integer
specifying the number of isosurfaces in the data range or an iterable set of
values for explicitly setting the isosurfaces.
Parameters
----------
isosurfaces : int or iterable
Number of isosurfaces to compute across valid data range or an
iterable of float values to explicitly use as the isosurfaces.
scalars : str, optional
Name of scalars to threshold on. Defaults to currently active scalars.
compute_normals : bool, optional
compute_gradients : bool, optional
Desc
compute_scalars : bool, optional
Preserves the scalar values that are being contoured
preference : str, optional
When scalars is specified, this is the perfered scalar type to search
for in the dataset. Must be either 'point' or 'cell'.
"""
# Make sure the input has scalars to contour on
if dataset.n_scalars < 1:
raise AssertionError('Input dataset for the contour filter must have scalar data.')
alg = vtk.vtkContourFilter()
alg.SetInputDataObject(dataset)
alg.SetComputeNormals(compute_normals)
alg.SetComputeGradients(compute_gradients)
alg.SetComputeScalars(compute_scalars)
# set the array to contour on
if scalars is None:
field, scalars = dataset.active_scalar_info
else:
_, field = get_scalar(dataset, scalars, preference=preference, info=True)
# NOTE: only point data is allowed? well cells works but seems buggy?
if field != 0:
raise AssertionError('Contour filter only works on Point data. Array ({}) is in the Cell data.'.format(scalars))
alg.SetInputArrayToProcess(0, 0, 0, field, scalars) # args: (idx, port, connection, field, name)
# set the isosurfaces
if isinstance(isosurfaces, int):
# generate values
alg.GenerateValues(isosurfaces, dataset.get_data_range(scalars))
elif isinstance(isosurfaces, collections.Iterable):
alg.SetNumberOfContours(len(isosurfaces))
for i, val in enumerate(isosurfaces):
alg.SetValue(i, val)
else:
raise RuntimeError('isosurfaces not understood.')
alg.Update()
return _get_output(alg)
def texture_map_to_plane(dataset, origin=None, point_u=None, point_v=None,
inplace=False, name='Texture Coordinates'):
"""Texture map this dataset to a user defined plane. This is often used
to define a plane to texture map an image to this dataset. The plane
defines the spatial reference and extent of that image.
Parameters
----------
origin : tuple(float)
Length 3 iterable of floats defining the XYZ coordinates of the
BOTTOM LEFT CORNER of the plane
point_u : tuple(float)
Length 3 iterable of floats defining the XYZ coordinates of the
BOTTOM RIGHT CORNER of the plane
point_v : tuple(float)
Length 3 iterable of floats defining the XYZ coordinates of the
TOP LEFT CORNER of the plane
inplace : bool, optional
If True, the new texture coordinates will be added to the dataset
inplace. If False (default), a new dataset is returned with the
textures coordinates
name : str, optional
The string name to give the new texture coordinates if applying
the filter inplace.
"""
alg = vtk.vtkTextureMapToPlane()
if origin is None or point_u is None or point_v is None:
alg.SetAutomaticPlaneGeneration(True)
else:
alg.SetOrigin(origin) # BOTTOM LEFT CORNER
alg.SetPoint1(point_u) # BOTTOM RIGHT CORNER
alg.SetPoint2(point_v) # TOP LEFT CORNER
alg.SetInputDataObject(dataset)
alg.Update()
output = _get_output(alg)
if not inplace:
return output
t_coords = output.GetPointData().GetTCoords()
t_coords.SetName(name)
otc = dataset.GetPointData().GetTCoords()
dataset.GetPointData().SetTCoords(t_coords)
dataset.GetPointData().AddArray(t_coords)
# CRITICAL:
dataset.GetPointData().AddArray(otc) # Add old ones back at the end
return # No return type because it is inplace
def compute_cell_sizes(dataset, length=False, area=True, volume=True):
"""This filter computes sizes for 1D (length), 2D (area) and 3D (volume)
cells.
Parameters
----------
length : bool
Specify whether or not to compute the length of 1D cells.
area : bool
Specify whether or not to compute the area of 2D cells.
volume : bool
Specify whether or not to compute the volume of 3D cells.
"""
alg = vtk.vtkCellSizeFilter()
alg.SetInputDataObject(dataset)
alg.SetComputeArea(area)
alg.SetComputeVolume(volume)
alg.SetComputeLength(length)
alg.SetComputeVertexCount(False)
alg.Update()
return _get_output(alg)