Skip to content

Commit

Permalink
Merge branch 'release/v1.7.0'
Browse files Browse the repository at this point in the history
  • Loading branch information
wolph committed Oct 10, 2015
2 parents fa39f50 + 798ca51 commit 96cf737
Show file tree
Hide file tree
Showing 7 changed files with 147 additions and 38 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Expand Up @@ -7,7 +7,7 @@ python:
install:
- pip install .
- pip install -r test_requirements.txt
- pip install coveralls
- pip install coveralls flake8

# command to run tests
script:
Expand Down
1 change: 1 addition & 0 deletions setup.py
Expand Up @@ -46,6 +46,7 @@ def run_tests(self):
install_requires=[
'numpy',
'nine',
'enum34',
'python-utils>=1.6.2',
],
)
Expand Down
80 changes: 64 additions & 16 deletions stl/base.py
@@ -1,5 +1,6 @@
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import enum
import math
import numpy
import collections
Expand All @@ -14,12 +15,41 @@
VECTORS = 3
#: Dimensions used in a vector
DIMENSIONS = 3
#: X index (for example, `mesh.v0[0][X]`)
X = 0
#: Y index (for example, `mesh.v0[0][Y]`)
Y = 1
#: Z index (for example, `mesh.v0[0][Z]`)
Z = 2


class Dimension(enum.IntEnum):
#: X index (for example, `mesh.v0[0][X]`)
X = 0
#: Y index (for example, `mesh.v0[0][Y]`)
Y = 1
#: Z index (for example, `mesh.v0[0][Z]`)
Z = 2

# For backwards compatibility, leave the original references
X = Dimension.X
Y = Dimension.Y
Z = Dimension.Z


class RemoveDuplicates(enum.Enum):
'''
Choose whether to remove no duplicates, leave only a single of the
duplicates or remove all duplicates (leaving holes).
'''
NONE = 0
SINGLE = 1
ALL = 2

@classmethod
def map(cls, value):
if value and value in cls:
pass
elif value:
value = cls.SINGLE
else:
value = cls.NONE

return value


class BaseMesh(logger.Logged, collections.Mapping):
Expand Down Expand Up @@ -95,23 +125,25 @@ class BaseMesh(logger.Logged, collections.Mapping):
])

def __init__(self, data, calculate_normals=True,
remove_empty_areas=False, remove_duplicate_polygons=False,
remove_empty_areas=False,
remove_duplicate_polygons=RemoveDuplicates.NONE,
name='', **kwargs):
super(BaseMesh, self).__init__(**kwargs)
if remove_empty_areas:
data = self.remove_empty_areas(data)

if remove_duplicate_polygons:
data = self.remove_duplicate_polygons(data)
if RemoveDuplicates.map(remove_duplicate_polygons).value:
data = self.remove_duplicate_polygons(data,
remove_duplicate_polygons)

self.name = name
self.data = data

points = self.points = data['vectors']
self.points.shape = data.size, 9
self.x = points[:, X::3]
self.y = points[:, Y::3]
self.z = points[:, Z::3]
self.x = points[:, Dimension.X::3]
self.y = points[:, Dimension.Y::3]
self.z = points[:, Dimension.Z::3]
self.v0 = data['vectors'][:, 0]
self.v1 = data['vectors'][:, 1]
self.v2 = data['vectors'][:, 2]
Expand All @@ -123,15 +155,31 @@ def __init__(self, data, calculate_normals=True,
self.update_normals()

@classmethod
def remove_duplicate_polygons(cls, data):
def remove_duplicate_polygons(cls, data, value=RemoveDuplicates.SINGLE):
value = RemoveDuplicates.map(value)
polygons = data['vectors'].sum(axis=1)
# Get a sorted list of indices
idx = numpy.lexsort(polygons.T)
# Get the indices of all different indices
diff = numpy.any(polygons[idx[1:]] != polygons[idx[:-1]], axis=1)
# Only return the unique data, the True is so we always get at least
# the originals
return data[numpy.sort(idx[numpy.concatenate(([True], diff))])]

if value is RemoveDuplicates.SINGLE:
# Only return the unique data, the True is so we always get at
# least the originals
return data[numpy.sort(idx[numpy.concatenate(([True], diff))])]
elif value is RemoveDuplicates.ALL:
# We need to return both items of the shifted diff
diff_a = numpy.concatenate(([True], diff))
diff_b = numpy.concatenate((diff, [True]))

# Combine both unique lists
filtered_data = data[numpy.sort(idx[diff_a & diff_b])]
if len(filtered_data) <= len(data) / 2:
return data[numpy.sort(idx[diff_a])]
else:
return data[numpy.sort(idx[diff])]
else:
return data

@classmethod
def remove_empty_areas(cls, data):
Expand Down
2 changes: 1 addition & 1 deletion stl/metadata.py
@@ -1,6 +1,6 @@
__package_name__ = 'numpy-stl'
__import_name__ = 'stl'
__version__ = '1.6.1'
__version__ = '1.7.0'
__author__ = 'Rick van Hattem'
__author_email__ = 'Wolph@Wol.ph'
__description__ = '''
Expand Down
3 changes: 0 additions & 3 deletions stl/stl.py
Expand Up @@ -213,9 +213,6 @@ def save(self, filename, fh=None, mode=AUTOMATIC, update_normals=True):
pass

def _write_ascii(self, fh, name):
print(fh)
print(type(fh))

def p(s, file):
file.write(b('%s\n' % s))

Expand Down
30 changes: 15 additions & 15 deletions test_requirements.txt
@@ -1,15 +1,15 @@
cov-core==1.14.0
coverage==3.7.1
docutils==0.12
execnet==1.2.0
numpy==1.9.0
pep8==1.5.7
py==1.4.25
pyflakes==0.8.1
pytest==2.6.3
pytest-cache==1.0
pytest-cov==1.8.0
pytest-flakes==0.2
pytest-pep8==1.0.6
python-utils==1.6.2
Sphinx==1.2.3
cov-core
coverage
docutils
execnet
numpy
pep8
py
pyflakes
pytest
pytest-cache
pytest-cov
pytest-flakes
pytest-pep8
python-utils
Sphinx
67 changes: 65 additions & 2 deletions tests/test_mesh.py
@@ -1,6 +1,7 @@
import numpy

from stl.mesh import Mesh
from stl.base import RemoveDuplicates


def test_units_1d():
Expand Down Expand Up @@ -77,14 +78,27 @@ def test_duplicate_polygons():
[0, 0, 0],
[0, 0, 0]])

mesh = Mesh(data)
assert mesh.data.size == 6

mesh = Mesh(data, remove_duplicate_polygons=0)
assert mesh.data.size == 6

mesh = Mesh(data, remove_duplicate_polygons=False)
assert mesh.data.size == 6

mesh = Mesh(data, remove_duplicate_polygons=None)
assert mesh.data.size == 6

mesh = Mesh(data, remove_duplicate_polygons=RemoveDuplicates.NONE)
assert mesh.data.size == 6

mesh = Mesh(data, remove_duplicate_polygons=RemoveDuplicates.SINGLE)
assert mesh.data.size == 3

mesh = Mesh(data, remove_duplicate_polygons=True)
assert mesh.data.size == 3

print('vectors')
print(mesh.vectors)
assert (mesh.vectors[0] == numpy.array([[1, 0, 0],
[0, 0, 0],
[0, 0, 0]])).all()
Expand All @@ -95,6 +109,55 @@ def test_duplicate_polygons():
[0, 0, 0],
[0, 0, 0]])).all()

mesh = Mesh(data, remove_duplicate_polygons=RemoveDuplicates.ALL)
assert mesh.data.size == 3

assert (mesh.vectors[0] == numpy.array([[1, 0, 0],
[0, 0, 0],
[0, 0, 0]])).all()
assert (mesh.vectors[1] == numpy.array([[2, 0, 0],
[0, 0, 0],
[0, 0, 0]])).all()
assert (mesh.vectors[2] == numpy.array([[0, 0, 0],
[0, 0, 0],
[0, 0, 0]])).all()


def test_remove_all_duplicate_polygons():
data = numpy.zeros(5, dtype=Mesh.dtype)
data['vectors'][0] = numpy.array([[0, 0, 0],
[0, 0, 0],
[0, 0, 0]])
data['vectors'][1] = numpy.array([[1, 0, 0],
[0, 0, 0],
[0, 0, 0]])
data['vectors'][2] = numpy.array([[2, 0, 0],
[0, 0, 0],
[0, 0, 0]])
data['vectors'][3] = numpy.array([[3, 0, 0],
[0, 0, 0],
[0, 0, 0]])
data['vectors'][4] = numpy.array([[3, 0, 0],
[0, 0, 0],
[0, 0, 0]])

mesh = Mesh(data, remove_duplicate_polygons=False)
assert mesh.data.size == 5
Mesh.remove_duplicate_polygons(mesh.data, RemoveDuplicates.NONE)

mesh = Mesh(data, remove_duplicate_polygons=RemoveDuplicates.ALL)
assert mesh.data.size == 3

assert (mesh.vectors[0] == numpy.array([[0, 0, 0],
[0, 0, 0],
[0, 0, 0]])).all()
assert (mesh.vectors[1] == numpy.array([[1, 0, 0],
[0, 0, 0],
[0, 0, 0]])).all()
assert (mesh.vectors[2] == numpy.array([[2, 0, 0],
[0, 0, 0],
[0, 0, 0]])).all()


def test_empty_areas():
data = numpy.zeros(3, dtype=Mesh.dtype)
Expand Down

0 comments on commit 96cf737

Please sign in to comment.