Skip to content

Commit

Permalink
Merge pull request #746 from mikedh/merge/plyfix
Browse files Browse the repository at this point in the history
PLY fixes and cones
  • Loading branch information
mikedh committed Mar 19, 2020
2 parents 0a6f298 + b2a3dcc commit 0102123
Show file tree
Hide file tree
Showing 7 changed files with 267 additions and 34 deletions.
25 changes: 24 additions & 1 deletion .github/workflows/release.yml
Expand Up @@ -162,4 +162,27 @@ jobs:
if: always()
run: |
docker logout ${{ steps.login-ecr.outputs.registry }}
docker logout
docker logout
release:
name: Create Release
needs: tests
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@master
- name: Tag Version
id: set_tag
run: |
export VER=$(python -c "exec(open('trimesh/version.py','r').read());print(__version__)")
echo "::set-output name=tag_name::${VER}"
- name: Create Release
id: create_release
uses: actions/create-release@latest
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ steps.set_tag.outputs.tag_name }}
release_name: Release ${{ steps.set_tag.outputs.tag_name }}
draft: false
prerelease: false
18 changes: 18 additions & 0 deletions models/points_ascii_with_lists.ply
@@ -0,0 +1,18 @@
ply
format ascii 1.0
element vertex 5
property float x
property float y
property float z
element point_list 2
property list uchar uint point_indices1
property list uchar uint point_indices2
property float some_float
end_header
-0.06325 0.0359793 0.0420873
-0.06275 0.0360343 0.0425949
-0.0645 0.0365101 0.0404362
-0.064 0.0366195 0.0414512
-0.0635 0.0367289 0.0424662
3 10 11 12 2 13 14 1.1
2 10 11 3 12 13 14 2.2
13 changes: 13 additions & 0 deletions tests/test_creation.py
Expand Up @@ -25,6 +25,19 @@ def setUp(self):

self.engines = engines

def test_cone(self):
# tolerance for cone
atol = 0.03

c = g.trimesh.creation.cone()
assert c.is_volume
assert c.body_count == 1

c = g.trimesh.creation.cone(radius=0.5, height=1.0)
assert c.is_volume
assert c.body_count == 1
assert g.np.allclose(c.extents, 1.0, atol=atol)

def test_cylinder(self):
# tolerance for cylinders
atol = 0.03
Expand Down
21 changes: 21 additions & 0 deletions tests/test_ply.py
Expand Up @@ -57,6 +57,27 @@ def test_points(self):
assert m.vertices.shape == (1024, 3)
assert isinstance(m, g.trimesh.PointCloud)

def test_list_properties(self):
"""
Test reading point clouds with the following metadata:
- lists of differing length
- multiple list properties
- single-element properties that come after list properties
"""
m = g.get_mesh('points_ascii_with_lists.ply')

point_list = m.metadata['ply_raw']['point_list']['data']
assert g.np.array_equal(
point_list['point_indices1'][0], g.np.array([10, 11, 12], dtype=g.np.uint32))
assert g.np.array_equal(
point_list['point_indices1'][1], g.np.array([10, 11], dtype=g.np.uint32))
assert g.np.array_equal(
point_list['point_indices2'][0], g.np.array([13, 14], dtype=g.np.uint32))
assert g.np.array_equal(
point_list['point_indices2'][1], g.np.array([12, 13, 14], dtype=g.np.uint32))
assert g.np.array_equal(
point_list['some_float'], g.np.array([1.1, 2.2], dtype=g.np.float32))


if __name__ == '__main__':
g.trimesh.util.attach_to_log()
Expand Down
92 changes: 92 additions & 0 deletions trimesh/creation.py
Expand Up @@ -735,6 +735,98 @@ def capsule(height=1.0,
return capsule


def cone(radius=1.0,
height=1.0,
sections=32,
transform=None,
**kwargs):
"""
Create a mesh of a cone along Z centered at the origin.
Parameters
----------
radius : float
The radius of the cylinder
height : float
The height of the cylinder
sections : int
How many pie wedges should the cylinder have
transform : (4, 4) float
Transform to apply
**kwargs:
passed to Trimesh to create cylinder
Returns
----------
cone: trimesh.Trimesh
Resulting mesh of a cone
"""
# create a 2D pie out of wedges
theta = np.linspace(0, np.pi * 2, sections)
vertices = np.column_stack((np.sin(theta),
np.cos(theta))) * radius
# the single vertex at the center of the circle
# we're overwriting the duplicated start/end vertex
vertices[0] = [0, 0]

# whangle indexes into a triangulation of the pie wedges
index = np.arange(1, len(vertices) + 1).reshape((-1, 1))
index[-1] = 1
faces = np.tile(index, (1, 2)).reshape(-1)[1:-1].reshape((-1, 2))
faces = np.column_stack((np.zeros(len(faces), dtype=np.int), faces))

vertices = np.asanyarray(vertices, dtype=np.float64)
height = float(height)
faces = np.asanyarray(faces, dtype=np.int64)

# make sure triangulation winding is pointing up
normal_test = normals(
[util.stack_3D(vertices[faces[0]])])[0]

normal_dot = np.dot(normal_test,
[0.0, 0.0, np.sign(height)])[0]

# make sure the triangulation is aligned with the sign of
# the height we've been passed
if normal_dot < 0.0:
faces = np.fliplr(faces)

# stack the (n,3) faces into (3*n, 2) edges
edges = faces_to_edges(faces)
edges_sorted = np.sort(edges, axis=1)
# edges which only occur once are on the boundary of the polygon
# since the triangulation may have subdivided the boundary of the
# shapely polygon, we need to find it again
edges_unique = grouping.group_rows(
edges_sorted, require_count=1)

# (n, 2, 2) set of line segments (positions, not references)
boundary = vertices[edges[edges_unique]]

vertical = np.concatenate([boundary, np.zeros((sections - 1, 1, 2))], axis=1)
vertical = vertical.reshape(-1, 2)
vertical = np.column_stack(
(vertical, np.tile([0, 0, height], len(boundary))))
vertical_faces = np.arange(len(vertical)).reshape(-1, 3)

vertices_3D = util.stack_3D(vertices)

# a sequence of zero- indexed faces, which will then be appended
# with offsets to create the final mesh
faces_seq = [faces[:, ::-1], vertical_faces]
vertices_seq = [vertices_3D, vertical]

# append sequences into flat nicely indexed arrays
vertices, faces = util.append_faces(vertices_seq, faces_seq)
cone = Trimesh(vertices=vertices, faces=faces, **kwargs)

cone.vertices[:, 2] -= height / 4.0
if transform is not None:
cone.apply_transform(transform)

return cone


def cylinder(radius=1.0,
height=1.0,
sections=32,
Expand Down
130 changes: 98 additions & 32 deletions trimesh/exchange/ply.py
Expand Up @@ -477,6 +477,75 @@ def element_colors(element):
return None, 0.0


def load_element_different(properties, data):
"""
Load elements which include lists of different lengths
based on the element's property-definitions.
Parameters
------------
properties : dict
Property definitions encoded in a dict where the property name is the key
and the property data type the value.
data : array
Data rows for this element.
"""
element_data = {k: [] for k in properties.keys()}
for row in data:
start = 0
for name, dt in properties.items():
length = 1
if '$LIST' in dt:
dt = dt.split('($LIST,)')[-1]
# the first entry in a list-property is the number of elements in the list
length = int(row[start])
# skip the first entry (the length), when reading the data
start += 1
end = start + length
element_data[name].append(row[start:end].astype(dt))
# start next property at the end of this one
start = end

# convert all property lists to numpy arrays
for name in element_data.keys():
element_data[name] = np.array(element_data[name]).squeeze()

return element_data


def load_element_single(properties, data):
"""
Load element data with lists of a single length
based on the element's property-definitions.
Parameters
------------
properties : dict
Property definitions encoded in a dict where the property name is the key
and the property data type the value.
data : array
Data rows for this element. If the data contains list-properties,
all lists belonging to one property must have the same length.
"""
col_ranges = []
start = 0
row0 = data[0]
for name, dt in properties.items():
length = 1
if '$LIST' in dt:
# the first entry in a list-property is the number of elements in the list
length = int(row0[start])
# skip the first entry (the length), when reading the data
start += 1
end = start + length
col_ranges.append((start, end))
# start next property at the end of this one
start = end

return {n: data[:, c[0]:c[1]].astype(dt.split('($LIST,)')[-1])
for c, (n, dt) in zip(col_ranges, properties.items())}


def ply_ascii(elements, file_obj):
"""
Load data from an ASCII PLY file into an existing elements data structure.
Expand All @@ -501,44 +570,41 @@ def ply_ascii(elements, file_obj):
for i in lines])

# store the line position in the file
position = 0
row_pos = 0

# loop through data we need
for key, values in elements.items():
# if the element is empty ignore it
if 'length' not in values or values['length'] == 0:
continue
# will store (start, end) column index of data
columns = collections.deque()
# will store the total number of rows
rows = 0

for name, dtype in values['properties'].items():
# we need to know how many elements are in this dtype
if '$LIST' in dtype:
# if an element contains a list property handle it here
row = array[position]
list_count = int(row[rows])
# ignore the count and take the data
columns.append([rows + 1,
rows + 1 + list_count])
rows += list_count + 1
# change the datatype to just the dtype for data
values['properties'][name] = dtype.split('($LIST,)')[-1]
else:
# a single column data field
columns.append([rows, rows + 1])
rows += 1
# get the lines as a 2D numpy array
data = np.vstack(array[position:position + values['length']])
# offset position in file
position += values['length']
# store columns we care about by name and convert to data type
elements[key]['data'] = {n: data[:, c[0]:c[1]].astype(dt)
for n, dt, c in zip(
values['properties'].keys(), # field name
values['properties'].values(), # data type of field
columns)} # list of (start, end) column indexes

data = array[row_pos:row_pos + values['length']]
row_pos += values['length']

# try stacking the data, which simplifies column-wise access. this is only
# possible, if all rows have the same length.
try:
data = np.vstack(data)
col_count_equal = True
except ValueError:
col_count_equal = False

# number of list properties in this element
list_count = sum(1 for dt in values['properties'].values() if '$LIST' in dt)
if col_count_equal and list_count <= 1:
# all rows have the same length and we only have at most one list
# property where all entries have the same length. this means we can
# use the quick numpy-based loading.
element_data = load_element_single(
values['properties'], data)
else:
# there are lists of differing lengths. we need to fall back to loading
# the data by iterating all rows and checking for list-lengths. this is
# slower than the variant above.
element_data = load_element_different(
values['properties'], data)

elements[key]['data'] = element_data


def ply_binary(elements, file_obj):
Expand Down
2 changes: 1 addition & 1 deletion trimesh/version.py
@@ -1 +1 @@
__version__ = '3.6.10'
__version__ = '3.6.11'

0 comments on commit 0102123

Please sign in to comment.