-
Notifications
You must be signed in to change notification settings - Fork 563
/
graph.py
996 lines (839 loc) · 29.5 KB
/
graph.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
"""
graph.py
-------------
Deal with graph operations. Primarily deal with graphs in (n, 2)
edge list form, and abstract the backend graph library being used.
Currently uses networkx or scipy.sparse.csgraph backend.
"""
import numpy as np
import collections
from . import util
from . import grouping
from . import exceptions
from .constants import log, tol
from .geometry import faces_to_edges
try:
from scipy.sparse import csgraph, coo_matrix
except BaseException as E:
# re-raise exception when used
csgraph = exceptions.ExceptionModule(E)
coo_matrix = exceptions.closure(E)
try:
import networkx as nx
except BaseException as E:
# create a dummy module which will raise the ImportError
# or other exception only when someone tries to use networkx
nx = exceptions.ExceptionModule(E)
def face_adjacency(faces=None,
mesh=None,
return_edges=False):
"""
Returns an (n, 2) list of face indices.
Each pair of faces in the list shares an edge, making them adjacent.
Parameters
-----------
faces : (n, 3) int, or None
Vertex indices representing triangles
mesh : Trimesh object
If passed will used cached edges
instead of generating from faces
return_edges : bool
Return the edges shared by adjacent faces
Returns
----------
adjacency : (m, 2) int
Indexes of faces that are adjacent
edges: (m, 2) int
Only returned if return_edges is True
Indexes of vertices which make up the
edges shared by the adjacent faces
Examples
----------
This is useful for lots of things such as finding
face- connected components:
```python
>>> graph = nx.Graph()
>>> graph.add_edges_from(mesh.face_adjacency)
>>> groups = nx.connected_components(graph_connected)
```
"""
if mesh is None:
# first generate the list of edges for the current faces
# also return the index for which face the edge is from
edges, edges_face = faces_to_edges(faces, return_index=True)
# make sure edge rows are sorted
edges.sort(axis=1)
else:
# if passed a mesh, used the cached values
edges = mesh.edges_sorted
edges_face = mesh.edges_face
# this will return the indices for duplicate edges
# every edge appears twice in a well constructed mesh
# so for every row in edge_idx:
# edges[edge_idx[*][0]] == edges[edge_idx[*][1]]
# in this call to group rows we discard edges which
# don't occur twice
edge_groups = grouping.group_rows(edges, require_count=2)
if len(edge_groups) == 0:
log.debug('No adjacent faces detected! Did you merge vertices?')
# the pairs of all adjacent faces
# so for every row in face_idx, self.faces[face_idx[*][0]] and
# self.faces[face_idx[*][1]] will share an edge
adjacency = edges_face[edge_groups]
# degenerate faces may appear in adjacency as the same value
nondegenerate = adjacency[:, 0] != adjacency[:, 1]
adjacency = adjacency[nondegenerate]
# sort pairs in-place so we can search for indexes with ordered pairs
adjacency.sort(axis=1)
if return_edges:
adjacency_edges = edges[edge_groups[:, 0][nondegenerate]]
assert len(adjacency_edges) == len(adjacency)
return adjacency, adjacency_edges
return adjacency
def face_adjacency_unshared(mesh):
"""
Return the vertex index of the two vertices not in the shared
edge between two adjacent faces
Parameters
----------
mesh : Trimesh object
Input mesh
Returns
-----------
vid_unshared : (len(mesh.face_adjacency), 2) int
Indexes of mesh.vertices
for degenerate faces without exactly
one unshared vertex per face it will be -1
"""
# the non- shared vertex index is the same shape
# as face_adjacency holding vertex indices vs face indices
vid_unshared = np.zeros_like(mesh.face_adjacency,
dtype=np.int64) - 1
# get the shared edges between adjacent faces
edges = mesh.face_adjacency_edges
# loop through the two columns of face adjacency
for i, fid in enumerate(mesh.face_adjacency.T):
# faces from the current column of face adjacency
faces = mesh.faces[fid]
# should have one True per row of (3,)
# index of vertex not included in shared edge
unshared = np.logical_not(np.logical_or(
faces == edges[:, 0].reshape((-1, 1)),
faces == edges[:, 1].reshape((-1, 1))))
# each row should have exactly one uncontained verted
row_ok = unshared.sum(axis=1) == 1
# any degenerate row should be ignored
unshared[~row_ok, :] = False
# set the
vid_unshared[row_ok, i] = faces[unshared]
return vid_unshared
def face_adjacency_radius(mesh):
"""
Compute an approximate radius between adjacent faces.
Parameters
--------------
mesh : trimesh.Trimesh
Returns
-------------
radii : (len(self.face_adjacency),) float
Approximate radius between faces
Parallel faces will have a value of np.inf
span : (len(self.face_adjacency),) float
Perpendicular projection distance of two
unshared vertices onto the shared edge
"""
# solve for the radius of the adjacent faces
# distance
# R = ---------------
# 2 * sin(theta)
nonzero = mesh.face_adjacency_angles > np.radians(.01)
denominator = np.abs(
2.0 * np.sin(mesh.face_adjacency_angles[nonzero]))
# consider the distance between the non- shared vertices of the
# face adjacency pair as the key distance
point_pairs = mesh.vertices[mesh.face_adjacency_unshared]
vectors = np.diff(point_pairs,
axis=1).reshape((-1, 3))
# the vertex indices of the shared edge for the adjacency pairx
edges = mesh.face_adjacency_edges
# unit vector along shared the edge
edges_vec = util.unitize(np.diff(mesh.vertices[edges],
axis=1).reshape((-1, 3)))
# the vector of the perpendicular projection to the shared edge
perp = np.subtract(
vectors, (util.diagonal_dot(
vectors, edges_vec).reshape(
(-1, 1)) * edges_vec))
# the length of the perpendicular projection
span = util.row_norm(perp)
# complete the values for non- infinite radii
radii = np.ones(len(mesh.face_adjacency)) * np.inf
radii[nonzero] = span[nonzero] / denominator
return radii, span
def vertex_adjacency_graph(mesh):
"""
Returns a networkx graph representing the vertices and
their connections in the mesh.
Parameters
----------
mesh : Trimesh object
Returns
---------
graph : networkx.Graph
Graph representing vertices and edges between
them where vertices are nodes and edges are edges
Examples
----------
This is useful for getting nearby vertices for a given vertex,
potentially for some simple smoothing techniques.
>>> graph = mesh.vertex_adjacency_graph
>>> graph.neighbors(0)
> [1, 3, 4]
"""
g = nx.Graph()
g.add_edges_from(mesh.edges_unique)
return g
def shared_edges(faces_a, faces_b):
"""
Given two sets of faces, find the edges which are in both sets.
Parameters
---------
faces_a : (n, 3) int
Array of faces
faces_b : (m, 3) int
Array of faces
Returns
---------
shared : (p, 2) int
Edges shared between faces
"""
e_a = np.sort(faces_to_edges(faces_a), axis=1)
e_b = np.sort(faces_to_edges(faces_b), axis=1)
shared = grouping.boolean_rows(
e_a, e_b, operation=np.intersect1d)
return shared
def facets(mesh, engine=None):
"""
Find the list of parallel adjacent faces.
Parameters
-----------
mesh : trimesh.Trimesh
engine : str
Which graph engine to use:
('scipy', 'networkx')
Returns
---------
facets : sequence of (n,) int
Groups of face indexes of
parallel adjacent faces.
"""
# what is the radius of a circle that passes through the perpendicular
# projection of the vector between the two non- shared vertices
# onto the shared edge, with the face normal from the two adjacent faces
radii = mesh.face_adjacency_radius
# what is the span perpendicular to the shared edge
span = mesh.face_adjacency_span
# a very arbitrary formula for declaring two adjacent faces
# parallel in a way that is hopefully (and anecdotally) robust
# to numeric error
# a common failure mode is two faces that are very narrow with a slight
# angle between them, so here we divide by the perpendicular span
# to penalize very narrow faces, and then square it just for fun
parallel = np.ones(len(radii), dtype=bool)
# if span is zero we know faces are small/parallel
nonzero = np.abs(span) > tol.zero
# faces with a radii/span ratio larger than a threshold pass
parallel[nonzero] = (radii[nonzero] /
span[nonzero]) ** 2 > tol.facet_threshold
# run connected components on the parallel faces to group them
components = connected_components(
mesh.face_adjacency[parallel],
nodes=np.arange(len(mesh.faces)),
min_len=2,
engine=engine)
return components
def split(mesh, only_watertight=True, adjacency=None, engine=None, **kwargs):
"""
Split a mesh into multiple meshes from face
connectivity.
If only_watertight is true it will only return
watertight meshes and will attempt to repair
single triangle or quad holes.
Parameters
----------
mesh : trimesh.Trimesh
only_watertight: bool
Only return watertight components
adjacency : (n, 2) int
Face adjacency to override full mesh
engine : str or None
Which graph engine to use
Returns
----------
meshes : (m,) trimesh.Trimesh
Results of splitting
"""
if adjacency is None:
adjacency = mesh.face_adjacency
# if only watertight the shortest thing we can split has 3 triangles
if only_watertight:
min_len = 4
else:
min_len = 1
components = connected_components(
edges=adjacency,
nodes=np.arange(len(mesh.faces)),
min_len=min_len,
engine=engine)
meshes = mesh.submesh(
components, only_watertight=only_watertight, **kwargs)
return meshes
def connected_components(edges,
min_len=1,
nodes=None,
engine=None):
"""
Find groups of connected nodes from an edge list.
Parameters
-----------
edges : (n, 2) int
Edges between nodes
nodes : (m, ) int or None
List of nodes that exist
min_len : int
Minimum length of a component group to return
engine : str or None
Which graph engine to use (None for automatic):
(None, 'networkx', 'scipy')
Returns
-----------
components : (n,) sequence of (*,) int
Nodes which are connected
"""
def components_networkx():
"""
Find connected components using networkx
"""
graph = nx.from_edgelist(edges)
# make sure every face has a node, so single triangles
# aren't discarded (as they aren't adjacent to anything)
if min_len <= 1:
graph.add_nodes_from(nodes)
return [list(i) for i in nx.connected_components(graph)]
def components_csgraph():
"""
Find connected components using scipy.sparse.csgraph
"""
# label each node
labels = connected_component_labels(edges,
node_count=node_count)
# we have to remove results that contain nodes outside
# of the specified node set and reindex
contained = np.zeros(node_count, dtype=bool)
contained[nodes] = True
index = np.arange(node_count, dtype=np.int64)[contained]
components = grouping.group(labels[contained], min_len=min_len)
return [index[c] for c in components]
return components
# check input edges
edges = np.asanyarray(edges, dtype=np.int64)
# if no nodes were specified just use unique
if nodes is None:
nodes = np.unique(edges)
# exit early if we have no nodes
if len(nodes) == 0:
return []
elif len(edges) == 0:
if min_len <= 1:
return np.reshape(nodes, (-1, 1)).tolist()
else:
return []
if not util.is_shape(edges, (-1, 2)):
raise ValueError('edges must be (n, 2)!')
# find the maximum index referenced in either nodes or edges
counts = [0]
if len(edges) > 0:
counts.append(edges.max())
if len(nodes) > 0:
counts.append(nodes.max())
node_count = np.max(counts) + 1
# remove edges that don't have both nodes in the node set
mask = np.zeros(node_count, dtype=bool)
mask[nodes] = True
edges_ok = mask[edges].all(axis=1)
edges = edges[edges_ok]
# networkx is pure python and is usually 5-10x slower than scipy
engines = collections.OrderedDict((
('scipy', components_csgraph),
('networkx', components_networkx)))
# if a graph engine has explicitly been requested use it
if engine in engines:
return engines[engine]()
# otherwise, go through our ordered list of graph engines
# until we get to one that has actually been installed
for function in engines.values():
try:
return function()
# will be raised if the library didn't import correctly above
except BaseException:
continue
raise ImportError('no graph engines available!')
def connected_component_labels(edges, node_count=None):
"""
Label graph nodes from an edge list, using scipy.sparse.csgraph
Parameters
-----------
edges : (n, 2) int
Edges of a graph
node_count : int, or None
The largest node in the graph.
Returns
----------
labels : (node_count,) int
Component labels for each node
"""
matrix = edges_to_coo(edges, node_count)
body_count, labels = csgraph.connected_components(
matrix, directed=False)
if node_count is not None:
assert len(labels) == node_count
return labels
def split_traversal(traversal,
edges,
edges_hash=None):
"""
Given a traversal as a list of nodes, split the traversal
if a sequential index pair is not in the given edges.
Parameters
--------------
edges : (n, 2) int
Graph edge indexes
traversal : (m,) int
Traversal through edges
edge_hash : (n,)
Edges sorted on axis=1 and
passed to grouping.hashable_rows
Returns
---------------
split : sequence of (p,) int
"""
traversal = np.asanyarray(traversal,
dtype=np.int64)
# hash edge rows for contains checks
if edges_hash is None:
edges_hash = grouping.hashable_rows(
np.sort(edges, axis=1))
# turn the (n,) traversal into (n-1, 2) edges
trav_edge = np.column_stack((traversal[:-1],
traversal[1:]))
# hash each edge so we can compare to edge set
trav_hash = grouping.hashable_rows(
np.sort(trav_edge, axis=1))
# check if each edge is contained in edge set
contained = np.in1d(trav_hash, edges_hash)
# exit early if every edge of traversal exists
if contained.all():
# just reshape one traversal
split = [traversal]
else:
# find contiguous groups of contained edges
blocks = grouping.blocks(contained,
min_len=1,
only_nonzero=True)
# turn edges back in to sequence of traversals
split = [np.append(trav_edge[b][:, 0],
trav_edge[b[-1]][1])
for b in blocks]
# close traversals if necessary
for i, t in enumerate(split):
# make sure elements of sequence are numpy arrays
split[i] = np.asanyarray(split[i], dtype=np.int64)
# don't close if its a single edge
if len(t) <= 2:
continue
# make sure it's not already closed
edge = np.sort([t[0], t[-1]])
if edge.ptp() == 0:
continue
close = grouping.hashable_rows(edge.reshape((1, 2)))[0]
# if we need the edge add it
if close in edges_hash:
split[i] = np.append(t, t[0]).astype(np.int64)
return split
def fill_traversals(traversals, edges, edges_hash=None):
"""
Convert a traversal of a list of edges into a sequence of
traversals where every pair of consecutive node indexes
is an edge in a passed edge list
Parameters
-------------
traversals : sequence of (m,) int
Node indexes of traversals of a graph
edges : (n, 2) int
Pairs of connected node indexes
edges_hash : None, or (n,) int
Edges sorted along axis 1 then hashed
using grouping.hashable_rows
Returns
--------------
splits : sequence of (p,) int
Node indexes of connected traversals
"""
# make sure edges are correct type
edges = np.asanyarray(edges, dtype=np.int64)
# make sure edges are sorted
edges.sort(axis=1)
# if there are no traversals just return edges
if len(traversals) == 0:
return edges.copy()
# hash edges for contains checks
if edges_hash is None:
edges_hash = grouping.hashable_rows(edges)
splits = []
for nodes in traversals:
# split traversals to remove edges
# that don't actually exist
splits.extend(split_traversal(
traversal=nodes,
edges=edges,
edges_hash=edges_hash))
# turn the split traversals back into (n, 2) edges
included = util.vstack_empty([np.column_stack((i[:-1], i[1:]))
for i in splits])
if len(included) > 0:
# sort included edges in place
included.sort(axis=1)
# make sure any edges not included in split traversals
# are just added as a length 2 traversal
splits.extend(grouping.boolean_rows(
edges,
included,
operation=np.setdiff1d))
else:
# no edges were included, so our filled traversal
# is just the original edges copied over
splits = edges.copy()
return splits
def traversals(edges, mode='bfs'):
"""
Given an edge list generate a sequence of ordered depth
first search traversals using scipy.csgraph routines.
Parameters
------------
edges : (n, 2) int
Undirected edges of a graph
mode : str
Traversal type, 'bfs' or 'dfs'
Returns
-----------
traversals : (m,) sequence of (p,) int
Ordered DFS or BFS traversals of the graph.
"""
edges = np.array(edges, dtype=np.int64)
if len(edges) == 0:
return []
elif not util.is_shape(edges, (-1, 2)):
raise ValueError('edges are not (n, 2)!')
# pick the traversal method
mode = str(mode).lower().strip()
if mode == 'bfs':
func = csgraph.breadth_first_order
elif mode == 'dfs':
func = csgraph.depth_first_order
else:
raise ValueError('traversal mode must be either dfs or bfs')
# make sure edges are sorted so we can query
# an ordered pair later
edges.sort(axis=1)
# set of nodes to make sure we get every node
nodes = set(edges.reshape(-1))
# coo_matrix for csgraph routines
graph = edges_to_coo(edges)
# we're going to make a sequence of traversals
traversals = []
while len(nodes) > 0:
# starting at any node
start = nodes.pop()
# get an (n,) ordered traversal
ordered = func(graph,
i_start=start,
return_predecessors=False,
directed=False).astype(np.int64)
traversals.append(ordered)
# remove the nodes we've consumed
nodes.difference_update(ordered)
return traversals
def edges_to_coo(edges, count=None, data=None):
"""
Given an edge list, return a boolean scipy.sparse.coo_matrix
representing the edges in matrix form.
Parameters
------------
edges : (n, 2) int
Edges of a graph
count : int
The total number of nodes in the graph
if None: count = edges.max() + 1
data : (n,) any
Assign data to each edge, if None will
be bool True for each specified edge
Returns
------------
matrix: (count, count) scipy.sparse.coo_matrix
Sparse COO
"""
edges = np.asanyarray(edges, dtype=np.int64)
if not (len(edges) == 0 or
util.is_shape(edges, (-1, 2))):
raise ValueError('edges must be (n, 2)!')
# if count isn't specified just set it to largest
# value referenced in edges
if count is None:
count = edges.max() + 1
count = int(count)
# if no data is specified set every specified edge
# to True
if data is None:
data = np.ones(len(edges), dtype=bool)
matrix = coo_matrix((data, edges.T),
dtype=data.dtype,
shape=(count, count))
return matrix
def neighbors(edges, max_index=None, directed=False):
"""
Find the neighbors for each node in an edgelist graph.
TODO : re-write this with sparse matrix operations
Parameters
------------
edges : (n, 2) int
Connected nodes
directed : bool
If True, only connect edges in one direction
Returns
---------
neighbors : sequence
Vertex index corresponds to set of other vertex indices
"""
neighbors = collections.defaultdict(set)
if directed:
[neighbors[edge[0]].add(edge[1])
for edge in edges]
else:
[(neighbors[edge[0]].add(edge[1]),
neighbors[edge[1]].add(edge[0]))
for edge in edges]
if max_index is None:
max_index = edges.max() + 1
array = [list(neighbors[i]) for i in range(max_index)]
return array
def smoothed(mesh, angle=None, facet_minarea=10):
"""
Return a non- watertight version of the mesh which
will render nicely with smooth shading by
disconnecting faces at sharp angles to each other.
Parameters
-----------
mesh : trimesh.Trimesh
Source geometry
angle : float or None
Angle in radians face pairs with angles
smaller than this will appear smoothed
facet_minarea : float or None
Minimum area fraction to consider
IE for `facets_minarea=25` only facets larger
than `mesh.area / 25` will be considered.
Returns
---------
smooth : trimesh.Trimesh
Geometry with disconnected face patches
"""
if angle is None:
angle = np.radians(20)
# if the mesh has no adjacent faces return a copy
if len(mesh.face_adjacency) == 0:
return mesh.copy()
# face pairs below angle threshold
angle_ok = mesh.face_adjacency_angles < angle
# subset of face adjacency
adjacency = mesh.face_adjacency[angle_ok]
# coplanar groups of faces
facets = []
nodes = None
# collect coplanar regions for smoothing
if facet_minarea is not None:
areas = mesh.area_faces
min_area = mesh.area / facet_minarea
try:
# we can survive not knowing facets
# exclude facets with few faces
facets = [f for f in mesh.facets
if areas[f].sum() > min_area]
if len(facets) > 0:
# mask for removing adjacency pairs where
# one of the faces is contained in a facet
mask = np.ones(len(mesh.faces),
dtype=bool)
mask[np.hstack(facets)] = False
# apply the mask to adjacency
adjacency = adjacency[mask[adjacency].all(axis=1)]
# nodes are no longer every faces
nodes = np.unique(adjacency)
except BaseException:
log.warning('failed to calculate facets',
exc_info=True)
# run connected components on facet adjacency
components = connected_components(
adjacency,
min_len=2,
nodes=nodes)
# add back coplanar groups if any exist
if len(facets) > 0:
components.extend(facets)
if len(components) == 0:
# if no components for some reason
# just return a copy of the original mesh
return mesh.copy()
# add back any faces that were missed
unique = np.unique(np.hstack(components))
if len(unique) != len(mesh.faces):
# things like single loose faces
# or groups below facet_minlen
broke = np.setdiff1d(
np.arange(len(mesh.faces)), unique)
components.extend(broke.reshape((-1, 1)))
# get a submesh as a single appended Trimesh
smooth = mesh.submesh(components,
only_watertight=False,
append=True)
# store face indices from original mesh
smooth.metadata['original_components'] = components
# smoothed should have exactly the same number of faces
if len(smooth.faces) != len(mesh.faces):
log.warning('face count in smooth wrong!')
return smooth
def is_watertight(edges, edges_sorted=None):
"""
Parameters
-----------
edges : (n, 2) int
List of vertex indices
edges_sorted : (n, 2) int
Pass vertex indices sorted on axis 1 as a speedup
Returns
---------
watertight : boolean
Whether every edge is shared by an even
number of faces
winding : boolean
Whether every shared edge is reversed
"""
# passing edges_sorted is a speedup only
if edges_sorted is None:
edges_sorted = np.sort(edges, axis=1)
# group sorted edges
groups = grouping.group_rows(
edges_sorted, require_count=2)
watertight = bool((len(groups) * 2) == len(edges))
# are opposing edges reversed
opposing = edges[groups].reshape((-1, 4))[:, 1:3].T
# wrap the weird numpy bool
winding = bool(np.equal(*opposing).all())
return watertight, winding
def graph_to_svg(graph):
"""
Turn a networkx graph into an SVG string
using graphviz `dot`.
Parameters
----------
graph: networkx graph
Returns
---------
svg: string, pictoral layout in SVG format
"""
import tempfile
import subprocess
with tempfile.NamedTemporaryFile() as dot_file:
nx.drawing.nx_agraph.write_dot(graph, dot_file.name)
svg = subprocess.check_output(['dot', dot_file.name, '-Tsvg'])
return svg
def multigraph_paths(G, source, cutoff=None):
"""
For a networkx MultiDiGraph, find all paths from a source node
to leaf nodes. This function returns edge instance numbers
in addition to nodes, unlike networkx.all_simple_paths.
Parameters
---------------
G : networkx.MultiDiGraph
Graph to evaluate
source : hashable
Node to start traversal at
cutoff : int
Number of nodes to visit
If None will visit all nodes
Returns
----------
traversals : (n,) list of [(node, edge instance index), ] paths
Traversals of the multigraph
"""
if cutoff is None:
cutoff = (len(G.edges()) * len(G.nodes())) + 1
# the path starts at the node specified
current = [(source, 0)]
# traversals we need to go back and do
queue = []
# completed paths
traversals = []
for i in range(cutoff):
# paths are stored as (node, instance) so
# get the node of the last place visited
current_node = current[-1][0]
# get all the children of the current node
child = G[current_node]
if len(child) == 0:
# we have no children, so we are at the end of this path
# save the path as a completed traversal
traversals.append(current)
# if there is nothing on the queue, we are done
if len(queue) == 0:
break
# otherwise continue traversing with the next path
# on the queue
current = queue.pop()
else:
# oh no, we have multiple edges from current -> child
start = True
# iterate through child nodes and edge instances
for node in child.keys():
for instance in child[node].keys():
if start:
# if this is the first edge, keep it on the
# current traversal and save the others for later
current.append((node, instance))
start = False
else:
# this child has multiple instances
# so we will need to traverse them multiple times
# we appended a node to current, so only take the
# first n-1 visits
queue.append(current[:-1] + [(node, instance)])
return traversals
def multigraph_collect(G, traversal, attrib=None):
"""
Given a MultiDiGraph traversal, collect attributes along it.
Parameters
-------------
G: networkx.MultiDiGraph
traversal: (n) list of (node, instance) tuples
attrib: dict key, name to collect. If None, will return all
Returns
-------------
collected: (len(traversal) - 1) list of attributes
"""
collected = []
for u, v in util.pairwise(traversal):
attribs = G[u[0]][v[0]][v[1]]
if attrib is None:
collected.append(attribs)
else:
collected.append(attribs[attrib])
return collected