diff --git a/models/chair.zip b/models/chair.zip new file mode 100644 index 000000000..18bc8fc33 Binary files /dev/null and b/models/chair.zip differ diff --git a/models/nested.glb b/models/nested.glb new file mode 100644 index 000000000..104e3fe54 Binary files /dev/null and b/models/nested.glb differ diff --git a/tests/test_dxf.py b/tests/test_dxf.py index daa104f9a..679b38f22 100644 --- a/tests/test_dxf.py +++ b/tests/test_dxf.py @@ -40,7 +40,7 @@ def test_dxf(self): # compare reloaded with original for r in rc: assert g.np.isclose(r.area, d.area) - assert g.np.isclose(r.length, d.length) + assert g.np.isclose(r.length, d.length, rtol=1e-4) assert len(r.entities) == len(d.entities) single = g.np.hstack(splits) diff --git a/tests/test_gltf.py b/tests/test_gltf.py index c42d0a861..1f1de70fa 100644 --- a/tests/test_gltf.py +++ b/tests/test_gltf.py @@ -340,6 +340,15 @@ def test_node_name(self): assert (set(s.graph.nodes_geometry) == set(r.graph.nodes_geometry)) + def test_nested_scale(self): + # nested transforms with scale + s = g.get_mesh('nested.glb') + assert len(s.graph.nodes_geometry) == 3 + assert g.np.allclose( + [[-1.16701, -2.3366, -0.26938], + [0.26938, 1., 0.26938]], + s.bounds, atol=1e-4) + def test_schema(self): # get a copy of the GLTF schema and do simple checks s = g.trimesh.exchange.gltf.get_schema() diff --git a/tests/test_obj.py b/tests/test_obj.py index 23287d611..9e074328e 100644 --- a/tests/test_obj.py +++ b/tests/test_obj.py @@ -260,6 +260,25 @@ def test_no_uv(self): mesh.export(file_type='obj'), file_type='obj') assert g.np.isclose(mesh.area, rec.area) + def test_chair(self): + mesh = next(iter(g.get_mesh('chair.zip').geometry.values())) + + # this model comes with vertex normals + assert 'vertex_normals' in mesh._cache + assert g.np.allclose( + 1.0, g.np.linalg.norm(mesh.vertex_normals, axis=1)) + mesh.apply_scale(0.46377314288075433) + assert 'vertex_normals' in mesh._cache + assert g.np.allclose( + 1.0, g.np.linalg.norm(mesh.vertex_normals, axis=1)) + assert 'vertex_normals' in mesh._cache + mesh._cache.clear() + assert 'vertex_normals' not in mesh._cache + # if we recomputed now, the degenerate faces + # would lead some of these vertex normals to be zero + # assert g.np.allclose( + # 1.0, g.np.linalg.norm(mesh.vertex_normals, axis=1)) + def simple_load(text): # we're going to load faces in a basic text way diff --git a/trimesh/base.py b/trimesh/base.py index 559dda9cc..26794ee39 100644 --- a/trimesh/base.py +++ b/trimesh/base.py @@ -237,8 +237,8 @@ def process(self, validate=False, **kwargs): # we can keep face and vertex normals in the cache without recomputing # if faces or vertices have been removed, normals are validated before # being returned so there is no danger of inconsistent dimensions - self._cache.clear(exclude=['face_normals', - 'vertex_normals']) + self._cache.clear(exclude={'face_normals', + 'vertex_normals'}) self.metadata['processed'] = True return self @@ -2235,19 +2235,17 @@ def apply_transform(self, matrix): matrix)[0] # preserve face normals if we have them stored - new_face_normals = None if has_rotation and 'face_normals' in self._cache: # transform face normals by rotation component - new_face_normals = util.unitize( + self._cache.cache['face_normals'] = util.unitize( transformations.transform_points( self.face_normals, matrix=matrix, translate=False)) # preserve vertex normals if we have them stored - new_vertex_normals = None if has_rotation and 'vertex_normals' in self._cache: - new_vertex_normals = util.unitize( + self._cache.cache['vertex_normals'] = util.unitize( transformations.transform_points( self.vertex_normals, matrix=matrix, @@ -2264,15 +2262,12 @@ def apply_transform(self, matrix): # assign the new values self.vertices = new_vertices - # may be None if we didn't have them previously - self.face_normals = new_face_normals - self.vertex_normals = new_vertex_normals # preserve normals and topology in cache # while dumping everything else - self._cache.clear(exclude=[ + self._cache.clear(exclude={ 'face_normals', # transformed by us - 'vertex_normals' # also transformed by us + 'vertex_normals', # also transformed by us 'face_adjacency', # topological 'face_adjacency_edges', 'face_adjacency_unshared', @@ -2285,10 +2280,9 @@ def apply_transform(self, matrix): 'edges_sparse', 'body_count', 'faces_unique_edges', - 'euler_number', ]) + 'euler_number'}) # set the cache ID with the current hash value self._cache.id_set() - log.debug('mesh transformed by matrix') return self diff --git a/trimesh/caching.py b/trimesh/caching.py index 139486579..90247edd7 100644 --- a/trimesh/caching.py +++ b/trimesh/caching.py @@ -423,7 +423,12 @@ def verify(self): def clear(self, exclude=None): """ - Remove all elements in the cache. + Remove elements in the cache. + + Parameters + ----------- + exclude : list + List of keys in cache to not clear. """ if exclude is None: self.cache = {} diff --git a/trimesh/comparison.py b/trimesh/comparison.py index 5019132f6..b58c4ee42 100644 --- a/trimesh/comparison.py +++ b/trimesh/comparison.py @@ -5,6 +5,7 @@ Provide methods for quickly hashing and comparing meshes. """ +import hashlib import numpy as np from . import util @@ -153,8 +154,7 @@ def identifier_hash(identifier, sigfig=None): if (multiplier < 0).any(): multiplier += np.abs(multiplier.min()) hashable = (as_int * (10 ** multiplier)).astype(np.int64) - md5 = util.md5_object(hashable) - return md5 + return hashlib.md5(hashable).hexdigest() def face_ordering(mesh): diff --git a/trimesh/constants.py b/trimesh/constants.py index 969d1e56a..0ed0d4017 100644 --- a/trimesh/constants.py +++ b/trimesh/constants.py @@ -59,20 +59,27 @@ class TolerancePath(object): still be considered to be on the plane tol.seg_frac : float When simplifying line segments what percentage of the drawing - scale can a segment be and have a curve fitted - tol.seg_angle: when simplifying line segments to arcs, what angle - can a segment span to be acceptable. - tol.aspect_frac: when simplifying line segments to closed arcs (circles) - what percentage can the aspect ratio differfrom 1:1 - before escaping the fit early - tol.radius_frac: when simplifying line segments to arcs, what percentage - of the fit radius can vertices deviate to be acceptable - tol.radius_min: when simplifying line segments to arcs, what is the minimum - radius multiplied by document scale for an acceptable fit - tol.radius_max: when simplifying line segments to arcs, what is the maximum - radius multiplied by document scale for an acceptable fit - tol.tangent: when simplifying line segments to curves, what is the maximum - angle the end sections can deviate from tangent that is acceptable. + scale can a segment be and have a curve fitted + tol.seg_angle : float + When simplifying line segments to arcs, what angle + can a segment span to be acceptable. + tol.aspect_frac : float + When simplifying line segments to closed arcs (circles) + what percentage can the aspect ratio differfrom 1:1 + before escaping the fit early + tol.radius_frac : float + When simplifying line segments to arcs, what percentage + of the fit radius can vertices deviate to be acceptable + tol.radius_min : + When simplifying line segments to arcs, what is the minimum + radius multiplied by document scale for an acceptable fit + tol.radius_max : + When simplifying line segments to arcs, what is the maximum + radius multiplied by document scale for an acceptable fit + tol.tangent : + When simplifying line segments to curves, what is the maximum + angle the end sections can deviate from tangent that is + acceptable. """ def __init__(self, **kwargs): @@ -97,21 +104,26 @@ def __init__(self, **kwargs): class ResolutionPath(object): """ - res.seg_frac: when discretizing curves, what percentage of the drawing - scale should we aim to make a single segment - res.seg_angle: when discretizing curves, what angle should a section span - res.max_sections: when discretizing splines, what is the maximum number - of segments per control point - res.min_sections: when discretizing splines, what is the minimum number - of segments per control point - res.export: format string to use when exporting floating point vertices + res.seg_frac : float + When discretizing curves, what percentage of the drawing + scale should we aim to make a single segment + res.seg_angle : float + When discretizing curves, what angle should a section span + res.max_sections : int + When discretizing splines, what is the maximum number + of segments per control point + res.min_sections : int + When discretizing splines, what is the minimum number + of segments per control point + res.export : str + Format string to use when exporting floating point vertices """ def __init__(self, **kwargs): self.seg_frac = .05 self.seg_angle = .08 - self.max_sections = 10 - self.min_sections = 5 + self.max_sections = 500 + self.min_sections = 20 self.export = '0.10f' diff --git a/trimesh/exchange/gltf.py b/trimesh/exchange/gltf.py index 17ab97a4e..5c2d8df34 100644 --- a/trimesh/exchange/gltf.py +++ b/trimesh/exchange/gltf.py @@ -1427,6 +1427,7 @@ def _read_buffers(header, buffers, mesh_kwargs, merge_primitives=False, resolver if "mesh" in child: geometries = mesh_prim[child["mesh"]] + # if the node has a mesh associated with it if len(geometries) > 1: # append root node diff --git a/trimesh/exchange/load.py b/trimesh/exchange/load.py index 07aacb2f1..47b45a156 100644 --- a/trimesh/exchange/load.py +++ b/trimesh/exchange/load.py @@ -209,7 +209,6 @@ def load_mesh(file_obj, file_type=file_type, resolver=resolver, **kwargs) - if not isinstance(results, list): results = [results] diff --git a/trimesh/exchange/obj.py b/trimesh/exchange/obj.py index a6427d0a9..ba41a776b 100644 --- a/trimesh/exchange/obj.py +++ b/trimesh/exchange/obj.py @@ -229,13 +229,22 @@ def load_obj(file_obj, # start with vertices and faces mesh.update({'faces': new_faces, 'vertices': v[mask_v].copy()}) - # if vertex colors are OK save them + + # if colors and normals are OK save them if vc is not None: - mesh['vertex_colors'] = vc[mask_v] - # if vertex normals are OK save them + try: + # may fail on a malformed color mask + mesh['vertex_colors'] = vc[mask_v] + except BaseException: + log.warning('failed to load vertex_colors', + exc_info=True) if mask_vn is not None: - mesh['vertex_normals'] = vn[mask_vn] - + try: + # may fail on a malformed mask + mesh['vertex_normals'] = vn[mask_vn] + except BaseException: + log.warning('failed to load vertex_normals', + exc_info=True) visual = None if material in materials: # use the material with the UV coordinates diff --git a/trimesh/exchange/ply.py b/trimesh/exchange/ply.py index fc4d1f7ba..eb4f26bde 100644 --- a/trimesh/exchange/ply.py +++ b/trimesh/exchange/ply.py @@ -255,7 +255,7 @@ def export_ply(mesh, # if vertex normals aren't specifically asked for # only export them if they are stored in cache if vertex_normal is None: - vertex_normal = 'vertex_normal' in mesh._cache + vertex_normal = 'vertex_normals' in mesh._cache # if we want to include mesh attributes in the export if include_attributes: diff --git a/trimesh/intersections.py b/trimesh/intersections.py index 00be08327..15c5e9138 100644 --- a/trimesh/intersections.py +++ b/trimesh/intersections.py @@ -189,10 +189,11 @@ def handle_basic(signs, faces, vertices): return lines -def mesh_multiplane(mesh, - plane_origin, - plane_normal, - heights): +def mesh_multiplane( + mesh, + plane_origin, + plane_normal, + heights): """ A utility function for slicing a mesh by multiple parallel planes which caches the dot product operation. @@ -225,12 +226,14 @@ def mesh_multiplane(mesh, heights = np.asanyarray(heights, dtype=np.float64) # dot product of every vertex with plane - vertex_dots = np.dot(plane_normal, - (mesh.vertices - plane_origin).T) + vertex_dots = np.dot( + plane_normal, + (mesh.vertices - plane_origin).T) # reconstruct transforms for each 2D section - base_transform = geometry.plane_transform(origin=plane_origin, - normal=plane_normal) + base_transform = geometry.plane_transform( + origin=plane_origin, + normal=plane_normal) base_transform = np.linalg.inv(base_transform) # alter translation Z inside loop @@ -267,7 +270,7 @@ def mesh_multiplane(mesh, # if we didn't screw up the transform all # of the Z values should be zero - assert np.allclose(lines_2D[:, 2], 0.0) + # assert np.allclose(lines_2D[:, 2], 0.0) # reshape back in to lines and discard Z lines_2D = lines_2D[:, :2].reshape((-1, 2, 2)) diff --git a/trimesh/path/entities.py b/trimesh/path/entities.py index 4abcbf43a..94ccbc646 100644 --- a/trimesh/path/entities.py +++ b/trimesh/path/entities.py @@ -194,7 +194,7 @@ def explode(self): Returns ------------ explode : list of Entity - Current entity split into multiple entities if necessary + Current entity split into multiple entities. """ return [self.copy()] @@ -558,10 +558,34 @@ def is_valid(self): def _bytes(self): # give consistent ordering of points for hash - if self.points[0] > self.points[-1]: - return b'Arc' + bytes(self.closed) + self.points.tobytes() - else: - return b'Arc' + bytes(self.closed) + self.points[::-1].tobytes() + order = int(self.points[0] > self.points[-1]) * 2 - 1 + return b'Arc' + bytes(self.closed) + self.points[::order].tobytes() + + def length(self, vertices): + """ + Return the arc length of the 3-point arc. + + Parameter + ---------- + vertices : (n, d) float + Vertices for overall drawing. + + Returns + ----------- + length : float + Length of arc. + """ + # find the actual radius and angle span + if self.closed: + # we don't need the angular span as + # it's indicated as a closed circle + fit = self.center( + vertices, return_normal=False, return_angle=False) + return np.pi * fit['radius'] * 4 + # get the angular span of the circular arc + fit = self.center( + vertices, return_normal=False, return_angle=True) + return fit['span'] * fit['radius'] * 2 def discrete(self, vertices, scale=1.0): """ @@ -579,9 +603,10 @@ def discrete(self, vertices, scale=1.0): discrete : (m, dimension) float Path in space made up of line segments """ - discrete = discretize_arc(vertices[self.points], - close=self.closed, - scale=scale) + discrete = discretize_arc( + vertices[self.points], + close=self.closed, + scale=scale) return self._orient(discrete) def center(self, vertices, **kwargs): diff --git a/trimesh/path/exchange/svg_io.py b/trimesh/path/exchange/svg_io.py index 24bc134f1..e44b7484f 100644 --- a/trimesh/path/exchange/svg_io.py +++ b/trimesh/path/exchange/svg_io.py @@ -255,7 +255,8 @@ def __init__(self, lines): # get parsed entities from svg.path raw = np.array(list(parse_path(path_string))) # check to see if each entity is "line-like" - is_line = np.array([type(i).__name__ in ('Line', 'Close') + is_line = np.array([type(i).__name__ in + ('Line', 'Close') for i in raw]) # find groups of consecutive lines so we can combine them blocks = grouping.blocks( @@ -341,15 +342,17 @@ def svg_arc(arc, reverse): arc_idx = arc.points[::((reverse * -2) + 1)] vertices = points[arc_idx] vertex_start, vertex_mid, vertex_end = vertices - center_info = arc_center(vertices, return_normal=False, return_angle=True) + center_info = arc_center( + vertices, return_normal=False, return_angle=True) C, R, angle = (center_info['center'], center_info['radius'], center_info['span']) if arc.closed: return circle_to_svgpath(C, R, reverse) large_flag = str(int(angle > np.pi)) - sweep_flag = str(int(np.cross(vertex_mid - vertex_start, - vertex_end - vertex_start) > 0.0)) + sweep_flag = str(int(np.cross( + vertex_mid - vertex_start, + vertex_end - vertex_start) > 0.0)) return (move_to(arc_idx[0]) + 'A {R},{R} 0 {}, {} {},{}'.format( large_flag, diff --git a/trimesh/path/path.py b/trimesh/path/path.py index 64353cce5..a1dacb3a1 100644 --- a/trimesh/path/path.py +++ b/trimesh/path/path.py @@ -8,6 +8,7 @@ import numpy as np import copy +import hashlib import collections from ..points import plane_fit @@ -241,8 +242,8 @@ def md5(self): """ # first MD5 the points in every entity target = '{}{}'.format( - util.md5_object(bytes().join( - e._bytes() for e in self.entities)), + hashlib.md5(bytes().join( + e._bytes() for e in self.entities)).hexdigest(), self.vertices.md5()) return target @@ -314,7 +315,8 @@ def length(self): Returns -------- - length: float, summed length of every entity + length : float + Summed length of every entity """ length = float(sum(i.length(self.vertices) for i in self.entities)) @@ -1044,15 +1046,21 @@ def rasterize(self, Parameters ------------ - pitch: float or (2,) float, length(s) in model space of pixel edges - origin: (2,) float, origin position in model space - resolution: (2,) int, resolution in pixel space - fill: bool, if True will return closed regions as filled - width: int, if not None will draw outline this wide (pixels) + pitch : float or (2,) float + Length(s) in model space of pixel edges + origin : (2,) float + Origin position in model space + resolution : (2,) int + Resolution in pixel space + fill : bool + If True will return closed regions as filled + width : int + If not None will draw outline this wide (pixels) Returns ------------ - raster: PIL.Image object, mode 1 + raster : PIL.Image object, mode 1 + Rasterized version of closed regions. """ image = raster.rasterize(self, pitch=pitch, @@ -1069,22 +1077,22 @@ def sample(self, count, **kwargs): Parameters ----------- - count : int - Number of points to return - If there are multiple bodies, there will - be up to count * bodies points returned - factor : float - How many points to test per loop - IE, count * factor + count : int + Number of points to return + If there are multiple bodies, there will + be up to count * bodies points returned + factor : float + How many points to test per loop + IE, count * factor max_iter : int, - Maximum number of intersection loops - to run, total points sampled is - count * factor * max_iter + Maximum number of intersection loops + to run, total points sampled is + count * factor * max_iter Returns ----------- hit : (n, 2) float - Random points inside polygon + Random points inside polygon """ poly = self.polygons_full @@ -1116,7 +1124,8 @@ def to_3D(self, transform=None): Returns ----------- - path_3D: Path3D version of current path + path_3D : Path3D + 3D version of current path """ # if there is a stored 'to_3D' transform in metadata use it if transform is None and 'to_3D' in self.metadata: @@ -1195,7 +1204,8 @@ def area(self): Returns --------- - area: float, total area of polygons minus interiors + area : float + Total area of polygons minus interiors """ area = float(sum(i.area for i in self.polygons_full)) return area @@ -1434,7 +1444,8 @@ def identifier(self): Returns --------- - identifier: (5,) float, unique identifier + identifier : (5,) float + Unique identifier """ if len(self.polygons_full) != 1: raise TypeError('Identifier only valid for single body') @@ -1443,10 +1454,15 @@ def identifier(self): @caching.cache_decorator def identifier_md5(self): """ - Return an MD5 of the identifier + Return an MD5 of the identifier. + + Returns + ---------- + hashed : str + Hashed identifier. """ as_int = (self.identifier * 1e4).astype(np.int64) - hashed = util.md5_object(as_int.tobytes(order='C')) + hashed = hashlib.md5(as_int.tobytes(order='C')).hexdigest() return hashed @property @@ -1454,22 +1470,25 @@ def path_valid(self): """ Returns ---------- - path_valid: (n,) bool, indexes of self.paths self.polygons_closed - which are valid polygons + path_valid : (n,) bool + Indexes of self.paths self.polygons_closed + which are valid polygons. """ - valid = [i is not None for i in self.polygons_closed] - valid = np.array(valid, dtype=bool) + valid = np.array( + [i is not None for i in self.polygons_closed], + dtype=bool) return valid @caching.cache_decorator def root(self): """ - Which indexes of self.paths/self.polygons_closed are root curves. - Also known as 'shell' or 'exterior. + Which indexes of self.paths/self.polygons_closed + are root curves, also known as 'shell' or 'exterior. Returns --------- - root: (n,) int, list of indexes + root : (n,) int + List of indexes """ populate = self.enclosure_directed # NOQA return self._cache['root'] @@ -1500,7 +1519,8 @@ def enclosure_shell(self): Returns ---------- - corresponding: dict, {index of self.paths of shell : [indexes of holes]} + corresponding : dict + {index of self.paths of shell : [indexes of holes]} """ pairs = [(r, self.connected_paths(r, include_self=False)) for r in self.root] diff --git a/trimesh/scene/scene.py b/trimesh/scene/scene.py index d8b546c1c..00e86500a 100644 --- a/trimesh/scene/scene.py +++ b/trimesh/scene/scene.py @@ -1,3 +1,4 @@ +import hashlib import numpy as np import collections @@ -230,7 +231,7 @@ def md5(self): MD5 hash of scene """ # start with transforms hash - return util.md5_object(self._hashable()) + return hashlib.md5(self._hashable()).hexdigest() def crc(self): """ diff --git a/trimesh/scene/transforms.py b/trimesh/scene/transforms.py index 0df2b1e85..14a7c3f65 100644 --- a/trimesh/scene/transforms.py +++ b/trimesh/scene/transforms.py @@ -453,9 +453,9 @@ def __init__(self, **kwargs): # store data for a particular edge keyed by tuple # {(u, v) : data } - self.edge_data = {} + self.edge_data = collections.defaultdict(dict) # {u: data} - self.node_data = {} + self.node_data = collections.defaultdict(dict) # if multiple calls are made for the same path # but the connectivity hasn't changed return cached @@ -496,7 +496,12 @@ def add_edge(self, u, v, **kwargs): # store kwargs for edge data keyed with tuple self.edge_data[(u, v)] = kwargs # set empty node data - self.node_data.update({u: {}, v: {}}) + self.node_data[u].update({}) + if 'geometry' in kwargs: + self.node_data[v].update( + {'geometry': kwargs['geometry']}) + else: + self.node_data[v].update({}) return True diff --git a/trimesh/util.py b/trimesh/util.py index 27842ce1c..5371bc060 100644 --- a/trimesh/util.py +++ b/trimesh/util.py @@ -889,29 +889,6 @@ def hash_file(file_obj, return hashed -def md5_object(obj): - """ - If an object is hashable, return the string of the MD5. - - Parameters - ------------ - obj: object - - Returns - ---------- - md5: str, MD5 hash - """ - hasher = hashlib.md5() - if isinstance(obj, basestring) and PY3: - # in python3 convert strings to bytes before hashing - hasher.update(obj.encode('utf-8')) - else: - hasher.update(obj) - - md5 = hasher.hexdigest() - return md5 - - def attach_to_log(level=logging.DEBUG, handler=None, loggers=None, diff --git a/trimesh/version.py b/trimesh/version.py index 5fadf310d..007aa1eb9 100644 --- a/trimesh/version.py +++ b/trimesh/version.py @@ -1 +1 @@ -__version__ = '3.9.15' +__version__ = '3.9.16'