Skip to content

Commit

Permalink
feat: add reading of .xyz file format
Browse files Browse the repository at this point in the history
  • Loading branch information
peppsac committed Feb 22, 2019
1 parent f55669b commit 1347272
Show file tree
Hide file tree
Showing 2 changed files with 174 additions and 17 deletions.
40 changes: 23 additions & 17 deletions py3dtiles/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
from py3dtiles import TileReader
from py3dtiles.points.shared_node_store import SharedNodeStore
import py3dtiles.points.task.las_reader as las_reader
import py3dtiles.points.task.xyz_reader as xyz_reader
import py3dtiles.points.task.node_process as node_process
import py3dtiles.points.task.pnts_writer as pnts_writer

Expand Down Expand Up @@ -129,7 +130,9 @@ def zmq_process(activity_graph, projection, node_store, octree_metadata, folder,
# ack
break

las_reader.run(
_, ext = os.path.splitext(command['filename'])
fn = las_reader.run if ext == '.las' else xyz_reader.run
fn(
command['id'],
command['filename'],
command['offset_scale'],
Expand Down Expand Up @@ -200,23 +203,23 @@ def can_pnts_be_written(name, finished_node, input_nodes, active_nodes):
not is_ancestor_in_list(ln, name, input_nodes))


LasReader = namedtuple('LasReader', ['input', 'active'])
Reader = namedtuple('Reader', ['input', 'active'])
NodeProcess = namedtuple('NodeProcess', ['input', 'active', 'inactive'])
ToPnts = namedtuple('ToPnts', ['input', 'active'])


class State():
def __init__(self, pointcloud_file_portions):
self.las_reader = LasReader(input=pointcloud_file_portions, active=[])
self.reader = Reader(input=pointcloud_file_portions, active=[])
self.node_process = NodeProcess(input={}, active={}, inactive=[])
self.to_pnts = ToPnts(input=[], active=[])

def print_debug(self):
print('{:^16}|{:^8}|{:^8}|{:^8}'.format('Step', 'Input', 'Active', 'Inactive'))
print('{:^16}|{:^8}|{:^8}|{:^8}'.format(
'LAS reader',
len(self.las_reader.input),
len(self.las_reader.active),
len(self.reader.input),
len(self.reader.active),
''))
print('{:^16}|{:^8}|{:^8}|{:^8}'.format(
'Node process',
Expand Down Expand Up @@ -305,7 +308,9 @@ def main(args):
node_store = SharedNodeStore(working_dir)

# read all input files headers and determine the aabb/spacing
infos = las_reader.init(args)
_, ext = os.path.splitext(args.files[0])
fn = las_reader.init if ext == '.las' else xyz_reader.init
infos = fn(args)

avg_min = infos['avg_min']
rotation_matrix = None
Expand Down Expand Up @@ -367,7 +372,7 @@ def main(args):
elif base_spacing > 1:
root_scale = np.array([0.1, 0.1, 0.1])
else:
root_scale = np.array([1.0, 1.0, 1.0])
root_scale = np.array([1, 1, 1])

root_aabb = root_aabb * root_scale
root_spacing = compute_spacing(root_aabb)
Expand All @@ -381,6 +386,7 @@ def main(args):
print(' - root spacing: {}'.format(root_spacing / root_scale[0]))
print(' - root aabb: {}'.format(root_aabb))
print(' - original aabb: {}'.format(original_aabb))
print(' - scale: {}'.format(root_scale))

startup = time.time()

Expand Down Expand Up @@ -464,14 +470,14 @@ def add_tasks_to_process(state, name, task, point_count):
node_store.put(result['name'], result['save'])

if result['name'][0:4] == b'root':
state.las_reader.active.remove(result['name'])
state.reader.active.remove(result['name'])
else:
del state.node_process.active[result['name']]

if len(result['name']) > 0:
state.node_process.inactive.append(result['name'])

if not state.las_reader.input and not state.las_reader.active:
if not state.reader.input and not state.reader.active:
if state.node_process.active or state.node_process.input:
finished_node = result['name']
if not can_pnts_be_written(
Expand Down Expand Up @@ -546,14 +552,14 @@ def add_tasks_to_process(state, name, task, point_count):
if job_list:
zmq_send_to_process(zmq_idle_clients, zmq_skt, job_list)

while (state.las_reader.input and
(points_in_progress < 60000000 or not state.las_reader.active) and
len(state.las_reader.active) < max_splitting_jobs_count and
while (state.reader.input and
(points_in_progress < 60000000 or not state.reader.active) and
len(state.reader.active) < max_splitting_jobs_count and
can_queue_more_jobs(zmq_idle_clients)):
if args.verbose >= 1:
print('Submit next portion {}'.format(state.las_reader.input[-1]))
_id = 'root_{}'.format(len(state.las_reader.input)).encode('ascii')
file, portion = state.las_reader.input.pop()
print('Submit next portion {}'.format(state.reader.input[-1]))
_id = 'root_{}'.format(len(state.reader.input)).encode('ascii')
file, portion = state.reader.input.pop()
points_in_progress += portion[1] - portion[0]

zmq_send_to_process(zmq_idle_clients, zmq_skt, [pickle.dumps({
Expand All @@ -563,7 +569,7 @@ def add_tasks_to_process(state, name, task, point_count):
'id': _id
})])

state.las_reader.active.append(_id)
state.reader.active.append(_id)

# if at this point we have no work in progress => we're done
if len(zmq_idle_clients) == args.jobs or zmq_processes_killed == args.jobs:
Expand Down Expand Up @@ -609,7 +615,7 @@ def add_tasks_to_process(state, name, task, point_count):
print('')
print('Pending:')
print(' - root: {} / {}'.format(
len(state.las_reader.input),
len(state.reader.input),
initial_portion_count))
print(' - other: {} files for {} nodes'.format(
sum([len(f[0]) for f in state.node_process.input.values()]),
Expand Down
151 changes: 151 additions & 0 deletions py3dtiles/points/task/xyz_reader.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,151 @@
import numpy as np
import math
import traceback
import pyproj
import struct
from pickle import dumps as pdumps


def init(args):
aabb = None
total_point_count = 0
pointcloud_file_portions = []
avg_min = np.array([0.0, 0.0, 0.0])
color_scale = args.color_scale if "color_scale" in args else None

input_srs = args.srs_in

for filename in args.files:
try:
f = open(filename, "r")
except Exception as e:
print("Error opening {filename}. Skipping.".format(**locals()))
print(e)
continue

count = 0
seek_values = []
while True:
batch = 10000
points = np.zeros((batch, 3))

offset = f.tell()
for i in range(batch):
line = f.readline()
if not line:
points = np.resize(points, (i, 3))
break
points[i] = [float(s) for s in line.split(" ")]

if points.shape[0] == 0:
break

if not count % 1000000:
seek_values += [offset]

count += points.shape[0]
batch_aabb = np.array([np.min(points, axis=0), np.max(points, axis=0)])

# Update aabb
if aabb is None:
aabb = batch_aabb
else:
aabb[0] = np.minimum(aabb[0], batch_aabb[0])
aabb[1] = np.maximum(aabb[1], batch_aabb[1])

# We need an exact point count
total_point_count += count * args.fraction / 100

_1M = min(count, 1000000)
steps = math.ceil(count / _1M)
assert steps == len(seek_values)
portions = [
(i * _1M, min(count, (i + 1) * _1M), seek_values[i]) for i in range(steps)
]
for p in portions:
pointcloud_file_portions += [(filename, p)]

if args.srs_out is not None and input_srs is None:
raise Exception(
"'{}' file doesn't contain srs information. Please use the --srs_in option to declare it.".format(
filename
)
)

return {
"portions": pointcloud_file_portions,
"aabb": aabb,
"color_scale": color_scale,
"srs_in": input_srs,
"point_count": total_point_count,
"avg_min": aabb[0],
}


def run(_id, filename, offset_scale, portion, queue, projection, verbose):
"""
Reads points from a xyz file
"""
try:
f = open(filename, "r")

point_count = portion[1] - portion[0]

step = min(point_count, max((point_count) // 10, 100000))

f.seek(portion[2])

for i in range(0, point_count, step):
points = np.zeros((step, 3), dtype=np.float32)

for j in range(0, step):
line = f.readline()
if not line:
points = np.resize(points, (j, 3))
break
points[j] = [float(s) for s in line.split(" ")]

x, y, z = [points[:, c] for c in [0, 1, 2]]

if projection:
x, y, z = pyproj.transform(projection[0], projection[1], x, y, z)

x = (x + offset_scale[0][0]) * offset_scale[1][0]
y = (y + offset_scale[0][1]) * offset_scale[1][1]
z = (z + offset_scale[0][2]) * offset_scale[1][2]

coords = np.vstack((x, y, z)).transpose()

if offset_scale[2] is not None:
# Apply transformation matrix (because the tile's transform will contain
# the inverse of this matrix)
coords = np.dot(coords, offset_scale[2])

coords = np.ascontiguousarray(coords.astype(np.float32))

# Read colors
colors = np.full((point_count, 3), 255, dtype=np.uint8)

result = (
"".encode("ascii"),
pdumps({"xyz": coords, "rgb": colors}),
len(coords),
)
queue.send_multipart(
[
"".encode("ascii"),
pdumps({"xyz": coords, "rgb": colors}),
struct.pack(">I", len(coords)),
],
copy=False,
)

queue.send_multipart([pdumps({"name": _id, "total": 0})])
# notify we're idle
queue.send_multipart([b""])

f.close()
except Exception as e:
print("Exception while reading points from xyz file")
print(e)
traceback.print_exc()

0 comments on commit 1347272

Please sign in to comment.