Skip to content
This repository has been archived by the owner on Aug 18, 2022. It is now read-only.

Commit

Permalink
Merge branch 'feature/fn-change-scaling'
Browse files Browse the repository at this point in the history
  • Loading branch information
tmontaigu committed Oct 5, 2020
2 parents 2542a8a + a32ef37 commit 12f32af
Show file tree
Hide file tree
Showing 2 changed files with 38 additions and 9 deletions.
11 changes: 11 additions & 0 deletions pylas/lasdatas/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,17 @@ def points(self, new_points):
self._points = new_points
self.update_header()

def change_scaling(self, scales=None, offsets=None) -> None:
if scales is None:
scales = self.header.scales
if offsets is None:
offsets = self.header.offsets

record.apply_new_scaling(self, scales, offsets)

self.header.scales = scales
self.header.offsets = offsets

def __getattr__(self, item):
"""Automatically called by Python when the attribute
named 'item' is no found. We use this function to forward the call the
Expand Down
36 changes: 27 additions & 9 deletions pylas/point/record.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
"""
import logging
from abc import ABC, abstractmethod
from typing import NoReturn
from typing import NoReturn, Any

import numpy as np

Expand All @@ -17,8 +17,16 @@
logger = logging.getLogger(__name__)


def scale_dimension(array_dim, scale, offset):
return (array_dim * scale) + offset


def unscale_dimension(array_dim, scale, offset):
return np.round((np.array(array_dim) - offset) / scale)


def raise_not_enough_bytes_error(
expected_bytes_len, missing_bytes_len, point_data_buffer_len, points_dtype
expected_bytes_len, missing_bytes_len, point_data_buffer_len, points_dtype
) -> NoReturn:
raise errors.PylasError(
"The file does not contain enough bytes to store the expected number of points\n"
Expand Down Expand Up @@ -348,12 +356,29 @@ def __repr__(self):
)


def apply_new_scaling(record, scales, offsets) -> None:
record['X'] = unscale_dimension(np.asarray(record.x), scales[0], offsets[0])
record['Y'] = unscale_dimension(np.asarray(record.y), scales[1], offsets[1])
record['Z'] = unscale_dimension(np.asarray(record.x), scales[2], offsets[2])


class ScaleAwarePointRecord(PackedPointRecord):
def __init__(self, array, point_format, scales, offsets):
super().__init__(array, point_format)
self.scales = scales
self.offsets = offsets

def change_scaling(self, scales=None, offsets=None) -> None:
if scales is not None:
self.scales = scales
if offsets is not None:
self.offsets = offsets

apply_new_scaling(self, scales, offsets)

self.scales = scales
self.offsets = offsets

def __getitem__(self, item):
if isinstance(item, (slice, np.ndarray)):
return ScaleAwarePointRecord(
Expand All @@ -369,10 +394,3 @@ def __getitem__(self, item):
else:
return super().__getitem__(item)


def scale_dimension(array_dim, scale, offset):
return (array_dim * scale) + offset


def unscale_dimension(array_dim, scale, offset):
return np.round((np.array(array_dim) - offset) / scale)

0 comments on commit 12f32af

Please sign in to comment.