Skip to content

Commit

Permalink
add test failing any function returning NaN or inf
Browse files Browse the repository at this point in the history
  • Loading branch information
mikedh committed Dec 13, 2018
1 parent 283df23 commit 0f30d5d
Show file tree
Hide file tree
Showing 5 changed files with 48 additions and 6 deletions.
7 changes: 6 additions & 1 deletion docker/config/xvfb.supervisord.conf
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
[program:Xvfb]
command=bash -c "/usr/bin/Xvfb $DISPLAY -screen 0 $XVFB_WHD"

# remove the lockfile before starting XVFB
# lockfile is at "/tmp/.X99-lock"
# $DISPLAY is ":99" so cut of first character
# if lockfile doesn't exist don't exit
command=bash -c "rm /tmp/.X${DISPLAY:1}.lock || true; /usr/bin/Xvfb $DISPLAY -screen 0 $XVFB_WHD"
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
Expand Down
38 changes: 36 additions & 2 deletions tests/test_mesh.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,13 @@ def test_meshes(self):
formats = g.trimesh.available_formats()
assert all(isinstance(i, str) for i in formats)
assert all(len(i) > 0 for i in formats)
assert all(i in formats for i in ['stl', 'ply', 'off', 'obj'])
assert all(i in formats
for i in ['stl', 'ply', 'off', 'obj'])

for mesh in g.get_meshes(raise_error=True):
g.log.info('Testing %s', mesh.metadata['file_name'])
# log file name for debugging
file_name = mesh.metadata['file_name']
g.log.info('Testing %s', file_name)

start = {mesh.md5(), mesh.crc()}
assert len(mesh.faces) > 0
Expand Down Expand Up @@ -75,6 +78,37 @@ def test_meshes(self):
assert hasattr(r, 'intersection')
g.log.info('Triangles tree ok')

# face angles should have same
assert mesh.face_angles.shape == mesh.faces.shape
assert len(mesh.vertices) == len(mesh.vertex_defects)
assert len(mesh.principal_inertia_components) == 3

# we should have built up a bunch of stuff into
# our cache, so make sure all numpy arrays cached are
# finite
for name, cached in mesh._cache.cache.items():
# only check numpy arrays
if not isinstance(cached, g.np.ndarray):
continue

# only check int, float, and bool
if cached.dtype.kind not in 'ibf':
continue

# there should never be NaN values
if g.np.isnan(cached).any():
raise ValueError('NaN values in %s/%s',
file_name, name)

# fields allowed to have infinite values
if name in ['face_adjacency_radius']:
continue

# make sure everything is finite
if not g.np.isfinite(cached).all():
raise ValueError('inf values in %s/%s',
file_name, name)

# some memory issues only show up when you copy the mesh a bunch
# specifically, if you cache c- objects then deepcopy the mesh this
# generally segfaults randomly
Expand Down
4 changes: 3 additions & 1 deletion trimesh/creation.py
Original file line number Diff line number Diff line change
Expand Up @@ -985,7 +985,9 @@ def camera_marker(camera, marker_height=0.4, origin_size=None):

# create line segments for the FOV visualization
# a segment from the origin to each bound of the FOV
segments = np.column_stack((np.zeros_like(points), points)).reshape((-1, 3))
segments = np.column_stack(
(np.zeros_like(points), points)).reshape(
(-1, 3))

# add a loop for the outside of the FOV then reshape
# the whole thing into multiple line segments
Expand Down
3 changes: 2 additions & 1 deletion trimesh/io/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -459,7 +459,8 @@ def parse_file_args(file_obj,
elif 'https://' in file_obj or 'http://' in file_obj:
# we've been passed a URL, warn to use explicit function
# and don't do network calls via magical pipeline
raise ValueError('use load_remote to load URL: {}'.format(file_obj))
raise ValueError(
'use load_remote to load URL: {}'.format(file_obj))
elif file_type is None:
raise ValueError('string is not a file: {}'.format(file_obj))

Expand Down
2 changes: 1 addition & 1 deletion trimesh/version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '2.35.47'
__version__ = '2.35.48'

0 comments on commit 0f30d5d

Please sign in to comment.