Skip to content

Commit

Permalink
Merge pull request #1236 from mikedh/fix/vn
Browse files Browse the repository at this point in the history
Release: VN + Nested Scenes
  • Loading branch information
mikedh committed May 9, 2021
2 parents 87bf85e + 0251302 commit b0eb6a9
Show file tree
Hide file tree
Showing 21 changed files with 212 additions and 130 deletions.
Binary file added models/chair.zip
Binary file not shown.
Binary file added models/nested.glb
Binary file not shown.
2 changes: 1 addition & 1 deletion tests/test_dxf.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def test_dxf(self):
# compare reloaded with original
for r in rc:
assert g.np.isclose(r.area, d.area)
assert g.np.isclose(r.length, d.length)
assert g.np.isclose(r.length, d.length, rtol=1e-4)
assert len(r.entities) == len(d.entities)

single = g.np.hstack(splits)
Expand Down
9 changes: 9 additions & 0 deletions tests/test_gltf.py
Original file line number Diff line number Diff line change
Expand Up @@ -340,6 +340,15 @@ def test_node_name(self):
assert (set(s.graph.nodes_geometry) ==
set(r.graph.nodes_geometry))

def test_nested_scale(self):
# nested transforms with scale
s = g.get_mesh('nested.glb')
assert len(s.graph.nodes_geometry) == 3
assert g.np.allclose(
[[-1.16701, -2.3366, -0.26938],
[0.26938, 1., 0.26938]],
s.bounds, atol=1e-4)

def test_schema(self):
# get a copy of the GLTF schema and do simple checks
s = g.trimesh.exchange.gltf.get_schema()
Expand Down
19 changes: 19 additions & 0 deletions tests/test_obj.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,6 +260,25 @@ def test_no_uv(self):
mesh.export(file_type='obj'), file_type='obj')
assert g.np.isclose(mesh.area, rec.area)

def test_chair(self):
mesh = next(iter(g.get_mesh('chair.zip').geometry.values()))

# this model comes with vertex normals
assert 'vertex_normals' in mesh._cache
assert g.np.allclose(
1.0, g.np.linalg.norm(mesh.vertex_normals, axis=1))
mesh.apply_scale(0.46377314288075433)
assert 'vertex_normals' in mesh._cache
assert g.np.allclose(
1.0, g.np.linalg.norm(mesh.vertex_normals, axis=1))
assert 'vertex_normals' in mesh._cache
mesh._cache.clear()
assert 'vertex_normals' not in mesh._cache
# if we recomputed now, the degenerate faces
# would lead some of these vertex normals to be zero
# assert g.np.allclose(
# 1.0, g.np.linalg.norm(mesh.vertex_normals, axis=1))


def simple_load(text):
# we're going to load faces in a basic text way
Expand Down
20 changes: 7 additions & 13 deletions trimesh/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,8 +237,8 @@ def process(self, validate=False, **kwargs):
# we can keep face and vertex normals in the cache without recomputing
# if faces or vertices have been removed, normals are validated before
# being returned so there is no danger of inconsistent dimensions
self._cache.clear(exclude=['face_normals',
'vertex_normals'])
self._cache.clear(exclude={'face_normals',
'vertex_normals'})
self.metadata['processed'] = True
return self

Expand Down Expand Up @@ -2235,19 +2235,17 @@ def apply_transform(self, matrix):
matrix)[0]

# preserve face normals if we have them stored
new_face_normals = None
if has_rotation and 'face_normals' in self._cache:
# transform face normals by rotation component
new_face_normals = util.unitize(
self._cache.cache['face_normals'] = util.unitize(
transformations.transform_points(
self.face_normals,
matrix=matrix,
translate=False))

# preserve vertex normals if we have them stored
new_vertex_normals = None
if has_rotation and 'vertex_normals' in self._cache:
new_vertex_normals = util.unitize(
self._cache.cache['vertex_normals'] = util.unitize(
transformations.transform_points(
self.vertex_normals,
matrix=matrix,
Expand All @@ -2264,15 +2262,12 @@ def apply_transform(self, matrix):

# assign the new values
self.vertices = new_vertices
# may be None if we didn't have them previously
self.face_normals = new_face_normals
self.vertex_normals = new_vertex_normals

# preserve normals and topology in cache
# while dumping everything else
self._cache.clear(exclude=[
self._cache.clear(exclude={
'face_normals', # transformed by us
'vertex_normals' # also transformed by us
'vertex_normals', # also transformed by us
'face_adjacency', # topological
'face_adjacency_edges',
'face_adjacency_unshared',
Expand All @@ -2285,10 +2280,9 @@ def apply_transform(self, matrix):
'edges_sparse',
'body_count',
'faces_unique_edges',
'euler_number', ])
'euler_number'})
# set the cache ID with the current hash value
self._cache.id_set()

log.debug('mesh transformed by matrix')
return self

Expand Down
7 changes: 6 additions & 1 deletion trimesh/caching.py
Original file line number Diff line number Diff line change
Expand Up @@ -423,7 +423,12 @@ def verify(self):

def clear(self, exclude=None):
"""
Remove all elements in the cache.
Remove elements in the cache.
Parameters
-----------
exclude : list
List of keys in cache to not clear.
"""
if exclude is None:
self.cache = {}
Expand Down
4 changes: 2 additions & 2 deletions trimesh/comparison.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
Provide methods for quickly hashing and comparing meshes.
"""

import hashlib
import numpy as np

from . import util
Expand Down Expand Up @@ -153,8 +154,7 @@ def identifier_hash(identifier, sigfig=None):
if (multiplier < 0).any():
multiplier += np.abs(multiplier.min())
hashable = (as_int * (10 ** multiplier)).astype(np.int64)
md5 = util.md5_object(hashable)
return md5
return hashlib.md5(hashable).hexdigest()


def face_ordering(mesh):
Expand Down
60 changes: 36 additions & 24 deletions trimesh/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,20 +59,27 @@ class TolerancePath(object):
still be considered to be on the plane
tol.seg_frac : float
When simplifying line segments what percentage of the drawing
scale can a segment be and have a curve fitted
tol.seg_angle: when simplifying line segments to arcs, what angle
can a segment span to be acceptable.
tol.aspect_frac: when simplifying line segments to closed arcs (circles)
what percentage can the aspect ratio differfrom 1:1
before escaping the fit early
tol.radius_frac: when simplifying line segments to arcs, what percentage
of the fit radius can vertices deviate to be acceptable
tol.radius_min: when simplifying line segments to arcs, what is the minimum
radius multiplied by document scale for an acceptable fit
tol.radius_max: when simplifying line segments to arcs, what is the maximum
radius multiplied by document scale for an acceptable fit
tol.tangent: when simplifying line segments to curves, what is the maximum
angle the end sections can deviate from tangent that is acceptable.
scale can a segment be and have a curve fitted
tol.seg_angle : float
When simplifying line segments to arcs, what angle
can a segment span to be acceptable.
tol.aspect_frac : float
When simplifying line segments to closed arcs (circles)
what percentage can the aspect ratio differfrom 1:1
before escaping the fit early
tol.radius_frac : float
When simplifying line segments to arcs, what percentage
of the fit radius can vertices deviate to be acceptable
tol.radius_min :
When simplifying line segments to arcs, what is the minimum
radius multiplied by document scale for an acceptable fit
tol.radius_max :
When simplifying line segments to arcs, what is the maximum
radius multiplied by document scale for an acceptable fit
tol.tangent :
When simplifying line segments to curves, what is the maximum
angle the end sections can deviate from tangent that is
acceptable.
"""

def __init__(self, **kwargs):
Expand All @@ -97,21 +104,26 @@ def __init__(self, **kwargs):

class ResolutionPath(object):
"""
res.seg_frac: when discretizing curves, what percentage of the drawing
scale should we aim to make a single segment
res.seg_angle: when discretizing curves, what angle should a section span
res.max_sections: when discretizing splines, what is the maximum number
of segments per control point
res.min_sections: when discretizing splines, what is the minimum number
of segments per control point
res.export: format string to use when exporting floating point vertices
res.seg_frac : float
When discretizing curves, what percentage of the drawing
scale should we aim to make a single segment
res.seg_angle : float
When discretizing curves, what angle should a section span
res.max_sections : int
When discretizing splines, what is the maximum number
of segments per control point
res.min_sections : int
When discretizing splines, what is the minimum number
of segments per control point
res.export : str
Format string to use when exporting floating point vertices
"""

def __init__(self, **kwargs):
self.seg_frac = .05
self.seg_angle = .08
self.max_sections = 10
self.min_sections = 5
self.max_sections = 500
self.min_sections = 20
self.export = '0.10f'


Expand Down
1 change: 1 addition & 0 deletions trimesh/exchange/gltf.py
Original file line number Diff line number Diff line change
Expand Up @@ -1427,6 +1427,7 @@ def _read_buffers(header, buffers, mesh_kwargs, merge_primitives=False, resolver

if "mesh" in child:
geometries = mesh_prim[child["mesh"]]

# if the node has a mesh associated with it
if len(geometries) > 1:
# append root node
Expand Down
1 change: 0 additions & 1 deletion trimesh/exchange/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,6 @@ def load_mesh(file_obj,
file_type=file_type,
resolver=resolver,
**kwargs)

if not isinstance(results, list):
results = [results]

Expand Down
19 changes: 14 additions & 5 deletions trimesh/exchange/obj.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,13 +229,22 @@ def load_obj(file_obj,
# start with vertices and faces
mesh.update({'faces': new_faces,
'vertices': v[mask_v].copy()})
# if vertex colors are OK save them

# if colors and normals are OK save them
if vc is not None:
mesh['vertex_colors'] = vc[mask_v]
# if vertex normals are OK save them
try:
# may fail on a malformed color mask
mesh['vertex_colors'] = vc[mask_v]
except BaseException:
log.warning('failed to load vertex_colors',
exc_info=True)
if mask_vn is not None:
mesh['vertex_normals'] = vn[mask_vn]

try:
# may fail on a malformed mask
mesh['vertex_normals'] = vn[mask_vn]
except BaseException:
log.warning('failed to load vertex_normals',
exc_info=True)
visual = None
if material in materials:
# use the material with the UV coordinates
Expand Down
2 changes: 1 addition & 1 deletion trimesh/exchange/ply.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ def export_ply(mesh,
# if vertex normals aren't specifically asked for
# only export them if they are stored in cache
if vertex_normal is None:
vertex_normal = 'vertex_normal' in mesh._cache
vertex_normal = 'vertex_normals' in mesh._cache

# if we want to include mesh attributes in the export
if include_attributes:
Expand Down
21 changes: 12 additions & 9 deletions trimesh/intersections.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,10 +189,11 @@ def handle_basic(signs, faces, vertices):
return lines


def mesh_multiplane(mesh,
plane_origin,
plane_normal,
heights):
def mesh_multiplane(
mesh,
plane_origin,
plane_normal,
heights):
"""
A utility function for slicing a mesh by multiple
parallel planes which caches the dot product operation.
Expand Down Expand Up @@ -225,12 +226,14 @@ def mesh_multiplane(mesh,
heights = np.asanyarray(heights, dtype=np.float64)

# dot product of every vertex with plane
vertex_dots = np.dot(plane_normal,
(mesh.vertices - plane_origin).T)
vertex_dots = np.dot(
plane_normal,
(mesh.vertices - plane_origin).T)

# reconstruct transforms for each 2D section
base_transform = geometry.plane_transform(origin=plane_origin,
normal=plane_normal)
base_transform = geometry.plane_transform(
origin=plane_origin,
normal=plane_normal)
base_transform = np.linalg.inv(base_transform)

# alter translation Z inside loop
Expand Down Expand Up @@ -267,7 +270,7 @@ def mesh_multiplane(mesh,

# if we didn't screw up the transform all
# of the Z values should be zero
assert np.allclose(lines_2D[:, 2], 0.0)
# assert np.allclose(lines_2D[:, 2], 0.0)

# reshape back in to lines and discard Z
lines_2D = lines_2D[:, :2].reshape((-1, 2, 2))
Expand Down
41 changes: 33 additions & 8 deletions trimesh/path/entities.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ def explode(self):
Returns
------------
explode : list of Entity
Current entity split into multiple entities if necessary
Current entity split into multiple entities.
"""
return [self.copy()]

Expand Down Expand Up @@ -558,10 +558,34 @@ def is_valid(self):

def _bytes(self):
# give consistent ordering of points for hash
if self.points[0] > self.points[-1]:
return b'Arc' + bytes(self.closed) + self.points.tobytes()
else:
return b'Arc' + bytes(self.closed) + self.points[::-1].tobytes()
order = int(self.points[0] > self.points[-1]) * 2 - 1
return b'Arc' + bytes(self.closed) + self.points[::order].tobytes()

def length(self, vertices):
"""
Return the arc length of the 3-point arc.
Parameter
----------
vertices : (n, d) float
Vertices for overall drawing.
Returns
-----------
length : float
Length of arc.
"""
# find the actual radius and angle span
if self.closed:
# we don't need the angular span as
# it's indicated as a closed circle
fit = self.center(
vertices, return_normal=False, return_angle=False)
return np.pi * fit['radius'] * 4
# get the angular span of the circular arc
fit = self.center(
vertices, return_normal=False, return_angle=True)
return fit['span'] * fit['radius'] * 2

def discrete(self, vertices, scale=1.0):
"""
Expand All @@ -579,9 +603,10 @@ def discrete(self, vertices, scale=1.0):
discrete : (m, dimension) float
Path in space made up of line segments
"""
discrete = discretize_arc(vertices[self.points],
close=self.closed,
scale=scale)
discrete = discretize_arc(
vertices[self.points],
close=self.closed,
scale=scale)
return self._orient(discrete)

def center(self, vertices, **kwargs):
Expand Down
Loading

0 comments on commit b0eb6a9

Please sign in to comment.