Skip to content

Commit

Permalink
handle degenerate paths better
Browse files Browse the repository at this point in the history
  • Loading branch information
mikedh committed Jan 23, 2019
1 parent ab21358 commit a6b31e6
Show file tree
Hide file tree
Showing 18 changed files with 5,319 additions and 84 deletions.
2,478 changes: 2,478 additions & 0 deletions models/2D/broken_loop.dxf

Large diffs are not rendered by default.

2,478 changes: 2,478 additions & 0 deletions models/2D/broken_pair.dxf

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion tests/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@


try:
from shapely.geometry import Point, Polygon
from shapely.geometry import Point, Polygon, LineString
has_path = True
except ImportError:
has_path = False
Expand Down
2 changes: 0 additions & 2 deletions tests/test_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,8 +167,6 @@ def test_obj(self):
# the loader may reorder vertices, so we shouldn't check direct
# equality
assert m.vertex_normals.shape == reconstructed.vertex_normals.shape
assert m.metadata['vertex_texture'].shape == reconstructed.metadata[
'vertex_texture'].shape

def test_obj_order(self):
"""
Expand Down
36 changes: 36 additions & 0 deletions tests/test_fill.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
try:
from . import generic as g
except BaseException:
import generic as g


class FillTest(g.unittest.TestCase):

def test_fill(self):

# a path closed with a bowtie, so the topology is wrong
a = g.get_mesh('2D/broken_loop.dxf')
assert len(a.paths) == 0
# bowtie shouldn't require any connection distance
a.fill_gaps(0.0)
# topology should be good now
assert len(a.paths) == 1
# it is a rectangle
assert g.np.isclose(a.area, g.np.product(a.extents))

# a path with a bowtie and a .05 gap
b = g.get_mesh('2D/broken_pair.dxf')
assert len(b.paths) == 0
# should be too small to fill gap
b.fill_gaps(.01)
assert len(b.paths) == 0
# should be large enough to fill gap
b.fill_gaps(.06)
assert len(b.paths) == 1
# it is a rectangle
assert g.np.isclose(b.area, g.np.product(b.extents))


if __name__ == '__main__':
g.trimesh.util.attach_to_log()
g.unittest.main()
6 changes: 3 additions & 3 deletions tests/test_normals.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,10 +45,10 @@ def test_face_normals(self):
mesh.face_normals = None
assert mesh.face_normals.shape == mesh.faces.shape

# we should be able to assign stupid wrong values of the
# right shape
# we shouldn't be able to assign stupid wrong values
# even with nonzero and the right shape
mesh.face_normals = g.np.ones_like(mesh.faces) * [0.0, 0.0, 1.0]
assert g.np.allclose(mesh.face_normals, [0.0, 0.0, 1.0])
assert not g.np.allclose(mesh.face_normals, [0.0, 0.0, 1.0])

# setting normals to None should force recompute
mesh.face_normals = None
Expand Down
13 changes: 13 additions & 0 deletions tests/test_paths.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,9 @@ def test_discrete(self):
m = d.medial_axis()
assert len(m.entities) > 0

# shouldn't crash
d.fill_gaps()

# transform to first quadrant
d.rezero()
# run process manually
Expand Down Expand Up @@ -210,6 +213,16 @@ def test_sample(self):
assert s.shape[1] == 2


class UpstreamTests(g.unittest.TestCase):
def test_shapely(self):
"""
conda installs of shapely started returning NaN on
valid input so make sure our builds fail in that case
"""
l = g.LineString([[0, 0], [1, 0]])
assert g.np.isclose(l.length, 1.0)


class ArcTests(g.unittest.TestCase):

def test_center(self):
Expand Down
3 changes: 2 additions & 1 deletion tests/test_texture.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def check_fuze(fuze):
# ASCII PLY should have textures defined
m = g.get_mesh('fuze_ascii.ply', process=False)
check_fuze(m)

# load without doing the vertex separation
# will look like garbage but represents original
# and skips "disconnect vertices with different UV"
Expand All @@ -98,6 +98,7 @@ def check_fuze(fuze):
assert len(b.vertices) == 502
assert len(b.visual.uv) == 502


if __name__ == '__main__':
g.trimesh.util.attach_to_log()
g.unittest.main()
10 changes: 10 additions & 0 deletions trimesh/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -372,6 +372,16 @@ def face_normals(self, values):
log.warning('face_normals all zero, ignoring!')
return

# make sure the first few normals match the first few triangles
check, valid = triangles.normals(
self.vertices.view(np.ndarray)[self.faces[:20]])
compare = np.zeros((len(valid), 3))
compare[valid] = check

if not np.allclose(compare, values[:20]):
log.warning('face_normals didn\'t match triangles, ignoring!')
return

self._cache['face_normals'] = values

@property
Expand Down
21 changes: 6 additions & 15 deletions trimesh/exchange/wavefront.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,20 +141,6 @@ def append_mesh():
dtype=np.float64).reshape((-1, 3))
loaded['vertex_normals'] = normals[vert_order]

# handle vertex texture
if len(current['vt']) > 0:
texture = np.full((len(current['vt_ok']), 3),
np.nan,
dtype=np.float64)
# make sure mask is numpy array for older numpy
vt_ok = np.asanyarray(current['vt_ok'], dtype=np.bool)

texture[vt_ok] = current['vt']
texture = texture[vert_order]
texture = texture[~np.any(np.isnan(texture), axis=1)]
# save vertex texture with correct ordering
loaded['metadata']['vertex_texture'] = texture

# build face groups information
# faces didn't move around so we don't have to reindex
if len(current['g']) > 0:
Expand Down Expand Up @@ -193,7 +179,12 @@ def append_mesh():

# apply the vertex order to the visual object
if 'visual' in loaded:
loaded['visual'].update_vertices(vert_order)
try:
loaded['visual'].update_vertices(vert_order)
except BaseException:
log.error('failed to update vertices',
exc_info=True)
loaded.pop('visual')

# this mesh is done so append the loaded mesh kwarg dict
meshes.append(loaded)
Expand Down
6 changes: 3 additions & 3 deletions trimesh/path/entities.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,7 +362,7 @@ def length(self, vertices):
return 0.0

def discrete(self, *args, **kwargs):
return []
return np.array([])

@property
def closed(self):
Expand All @@ -374,11 +374,11 @@ def is_valid(self):

@property
def nodes(self):
return []
return np.array([])

@property
def end_points(self):
return []
return np.array([])

def _bytes(self):
data = b''.join([b'Text',
Expand Down
25 changes: 19 additions & 6 deletions trimesh/path/exchange/dxf.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,8 @@ def convert_polyline(e):

# 70 is the closed flag for polylines
# if the closed flag is set make sure to close
if ('70' in e and int(e['70'][0]) & 1):
is_closed = '70' in e and int(e['70'][0]) & 1
if is_closed:
lines = np.vstack((lines, lines[:1]))

# 42 is the vertex bulge flag for LWPOLYLINE entities
Expand All @@ -171,19 +172,30 @@ def convert_polyline(e):
# A bulge of 0 indicates a straight segment, and a
# bulge of 1 is a semicircle.
log.debug('polyline bulge: {}'.format(e['42']))
# the actual bulge float values
bulge = np.array(e['42'], dtype=np.float64)

# what position were vertices stored at
vid = np.nonzero(chunk[:, 0] == '10')[0]
# what position were bulges stored at in the chunk
bid = np.nonzero(chunk[:, 0] == '42')[0]

if not is_closed:
bid_ok = bid < vid.max()
bid = bid[bid_ok]
bulge = bulge[bid_ok]

# which vertex index is bulge value associated with
bulge_idx = np.searchsorted(vid, bid)

# the actual bulge float values
bulge = np.array(e['42'], dtype=np.float64)
# use bulge to calculate included angle of the arc
angle = np.arctan(bulge) * 4.0

# the indexes making up a bulged segment
tid = np.column_stack((bulge_idx, bulge_idx - 1))

# if it's a closed segment modulus to start vertex
if is_closed:
tid %= len(lines)
# the vector connecting the two ends of the arc
vector = lines[tid[:, 0]] - lines[tid[:, 1]]
# the length of the connector segment
Expand Down Expand Up @@ -214,7 +226,7 @@ def convert_polyline(e):

# if we're in strict mode make sure our arcs
# have the same magnitude as the input data
if tol.strict:
if True or tol.strict:
from .. import arc as arcmod
check_angle = [arcmod.arc_center(i)['span']
for i in three]
Expand Down Expand Up @@ -443,7 +455,8 @@ def convert_text(e):
polyline = None

# loop through chunks of entity information
for index in np.array_split(np.arange(len(entity_blob)), inflection):
for index in np.array_split(np.arange(len(entity_blob)),
inflection):

# if there is only a header continue
if len(index) < 1:
Expand Down
67 changes: 18 additions & 49 deletions trimesh/path/path.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
from .. import transformations

from . import raster
from . import repair
from . import simplify
from . import polygons
from . import segments
Expand Down Expand Up @@ -75,7 +76,7 @@ def __init__(self,
Run simple cleanup or not
"""

self.entities = np.array(entities)
self.entities = entities
self.vertices = vertices
self.metadata = dict()

Expand All @@ -85,8 +86,8 @@ def __init__(self,
self._cache = caching.Cache(id_function=self.crc)

if process:
# literally nothing will work if vertices aren't
# merged properly
# literally nothing will work if vertices
# aren't merged properly
self.merge_vertices()

def process(self):
Expand All @@ -107,6 +108,14 @@ def vertices(self):
def vertices(self, values):
self._vertices = caching.tracked_array(values)

@property
def entities(self):
return self._entities

@entities.setter
def entities(self, values):
self._entities = np.asanyarray(values)

@property
def layers(self):
"""
Expand Down Expand Up @@ -286,57 +295,17 @@ def explode(self):
new_entities.extend(entity.explode())
self.entities = np.array(new_entities)

def fill_gaps(self, max_distance=None):
def fill_gaps(self, distance=0.025):
"""
Find vertices with degree 1 and try to connect them to
other vertices of degree 1, in place.
Find vertices without degree 2 and try to connect to
other vertices. Operations are done in-place.
Parameters
----------
max_distance : float
Connect vertices up to this distance.
Default is path.scale / 1000.0
distance : float
Connect vertices up to this distance
"""

# which vertices are only connected to one entity
broken = np.array(
[k for k, v in
dict(self.vertex_graph.degree()).items() if v == 1])

# of there is only one broken end we can't do anything
if len(broken) < 2:
return

# find pairs of close vertices
distance, node = KDTree(self.vertices[broken]).query(
self.vertices[broken], k=2)

# set a scale- relative max distance
if max_distance is None:
max_distance = self.scale / 1000.0

# change edges into a (n, 2) int
# that references self.vertices
edges = np.sort(broken[node], axis=1)
# remove duplicate edges
unique = grouping.unique_rows(edges)[0]
# apply the unique mask
edges = edges[unique]
distance = distance[unique]

# make sure edge doesn't exist and distance between
# vertices is the maximum allowable
ok = np.logical_and(distance[:, 1] < max_distance,
[not self.vertex_graph.has_edge(*i) for i in edges])

# the vertices we want to merge
merge = edges[ok]
# do the merge with a mask
mask = np.arange(len(self.vertices))
mask[merge[:, 0]] = merge[:, 1]

# apply the mask to the
self.replace_vertex_references(mask)
repair.fill_gaps(self, distance=distance)

@property
def is_closed(self):
Expand Down
Loading

0 comments on commit a6b31e6

Please sign in to comment.