Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into issue-2350
Browse files Browse the repository at this point in the history
  • Loading branch information
abellgithub committed Jan 24, 2019
2 parents d9d6b87 + 755ac1a commit f2188f0
Show file tree
Hide file tree
Showing 42 changed files with 590 additions and 346 deletions.
1 change: 1 addition & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,7 @@ include(${PDAL_CMAKE_DIR}/json.cmake)
include(${PDAL_CMAKE_DIR}/libxml2.cmake)
include(${PDAL_CMAKE_DIR}/dimension.cmake)
include(${PDAL_CMAKE_DIR}/arbiter.cmake)
include(${PDAL_CMAKE_DIR}/openssl.cmake) # Optional

#------------------------------------------------------------------------------
# generate the pdal_features.hpp header
Expand Down
3 changes: 2 additions & 1 deletion appveyor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ install:
- set PATH=%CONDA_ROOT%;%CONDA_ROOT%\\scripts;%CONDA_ROOT%\\Library\\bin;%PATH%;C:\\Program Files (x86)\\CMake\\bin
- conda config --set always_yes yes
- conda config --add channels conda-forge
- conda config --add channels anaconda
- conda update -q conda
- conda config --set auto_update_conda no
- conda update -q --all
Expand All @@ -36,7 +37,7 @@ install:
- python -c "import sys; print(sys.executable)"
- python -c "import sys; print(sys.prefix)"
- call "%CONDA_ROOT%\Scripts\activate.bat" base
- conda install geotiff laszip nitro curl gdal=2.2.4 pcl cmake eigen ninja libgdal geos zstd numpy=1.15.3 xz libxml2 laz-perf qhull sqlite hdf5 oracle-instantclient numpy-base=1.15.3
- conda install geotiff laszip nitro curl gdal pcl cmake eigen ninja libgdal geos zstd numpy xz libxml2 laz-perf qhull sqlite hdf5 oracle-instantclient numpy-base
- call "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" amd64
- call scripts\\appveyor\\config.cmd

Expand Down
4 changes: 2 additions & 2 deletions cmake/config.cmake
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
set(INCLUDE_INSTALL_DIR include/ CACHE PATH "include")
set(LIB_INSTALL_DIR lib/ CACHE PATH "lib")
set(LIB_INSTALL_DIR ${PDAL_LIB_INSTALL_DIR} CACHE PATH "lib")
set(SYSCONFIG_INSTALL_DIR etc/pdal/ CACHE PATH "sysconfig")

include(CMakePackageConfigHelpers)

set(PDAL_CONFIG_INCLUDE_DIRS
"${CMAKE_INSTALL_PREFIX}/include")
set(PDAL_CONFIG_LIBRARY_DIRS
"${CMAKE_INSTALL_PREFIX}/lib")
"${CMAKE_INSTALL_PREFIX}/${LIB_INSTALL_DIR}")

configure_package_config_file(
PDALConfig.cmake.in
Expand Down
12 changes: 9 additions & 3 deletions cmake/macros.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ endmacro(PDAL_ADD_EXECUTABLE)
# ARGN :
# FILES the source files for the plugin
# LINK_WITH link plugin with libraries
# INCLUDES header directories
#
# The "generate_dimension_hpp" ensures that Dimension.hpp is built before
# attempting to build anything else in the "library".
Expand All @@ -101,7 +102,7 @@ endmacro(PDAL_ADD_EXECUTABLE)
macro(PDAL_ADD_PLUGIN _name _type _shortname)
set(options)
set(oneValueArgs)
set(multiValueArgs FILES LINK_WITH)
set(multiValueArgs FILES LINK_WITH INCLUDES)
cmake_parse_arguments(PDAL_ADD_PLUGIN "${options}" "${oneValueArgs}"
"${multiValueArgs}" ${ARGN})
if(WIN32)
Expand All @@ -118,7 +119,9 @@ macro(PDAL_ADD_PLUGIN _name _type _shortname)
pdal_target_compile_settings(${${_name}})
target_include_directories(${${_name}} PRIVATE
${PROJECT_BINARY_DIR}/include
${PDAL_INCLUDE_DIR})
${PDAL_INCLUDE_DIR}
${PDAL_ADD_PLUGIN_INCLUDES}
)
target_link_libraries(${${_name}}
PRIVATE
${PDAL_BASE_LIB_NAME}
Expand Down Expand Up @@ -149,10 +152,12 @@ endmacro(PDAL_ADD_PLUGIN)
# ARGN :
# FILES the source files for the test
# LINK_WITH link test executable with libraries
# INCLUDES header file directories
#
macro(PDAL_ADD_TEST _name)
set(options)
set(oneValueArgs)
set(multiValueArgs FILES LINK_WITH)
set(multiValueArgs FILES LINK_WITH INCLUDES)
cmake_parse_arguments(PDAL_ADD_TEST "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
if (WIN32)
list(APPEND ${PDAL_ADD_TEST_FILES} ${PDAL_TARGET_OBJECTS})
Expand All @@ -164,6 +169,7 @@ macro(PDAL_ADD_TEST _name)
target_include_directories(${_name} PRIVATE
${ROOT_DIR}
${PDAL_INCLUDE_DIR}
${PDAL_ADD_TEST_INCLUDES}
${PROJECT_SOURCE_DIR}/test/unit
${PROJECT_BINARY_DIR}/test/unit
${PROJECT_BINARY_DIR}/include)
Expand Down
4 changes: 2 additions & 2 deletions cmake/modules/FindGeoTIFF.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,10 @@ IF(WIN32)
ENDIF()

ENDIF()

FIND_PATH(GEOTIFF_INCLUDE_DIR
geotiff.h
PATH_SUFFIXES geotiff
PATH_SUFFIXES geotiff libgeotiff
PATHS
${OSGEO4W_INCLUDE_DIR})

Expand Down
8 changes: 8 additions & 0 deletions cmake/openssl.cmake
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
find_package(OpenSSL 1.0.1)
if (OPENSSL_FOUND)
set(ARBITER_OPENSSL TRUE)
else()
# For me this is /usr/local/opt/openssl\@1.1
message("OpenSSL NOT found - `export OPENSSL_ROOT_DIR=___`")
message("Google storage IO will not be available")
endif()
2 changes: 1 addition & 1 deletion doc/pipeline.rst
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ writer from filenames, and able to be specified as a set of sequential steps:
"pipeline":[
"input.las",
{
"type":"crop",
"type":"filters.crop",
"bounds":"([0,100],[0,100])"
},
"output.bpf"
Expand Down
5 changes: 5 additions & 0 deletions doc/stages/readers.ept.rst
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,11 @@ filename
bounds
The extents of the resource to select in 2 or 3 dimensions, expressed as a string, e.g.: ``([xmin, xmax], [ymin, ymax], [zmin, zmax])``. If omitted, the entire dataset will be selected.

resolution
A point resolution limit to select, expressed as a grid cell edge length. Units correspond to resource coordinate system units. For example, for a coordinate system expressed in meters, a ``resolution`` value of ``0.1`` will select points up to a ground resolution of 100 points per square meter.

The resulting resolution may not be exactly this value: the minimum possible resolution that is at *least* as precise as the requested resolution will be selected. Therefore the result may be a bit more precise than requested.

origin
EPT datasets are lossless aggregations of potentially multiple source files. The *origin* options can be used to select all points from a single source file. This option may be specified as a string or an integral ID.

Expand Down
4 changes: 2 additions & 2 deletions doc/stages/readers.i3s.rst
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,15 @@ This example will download the Autzen dataset from the arcgis scene server and o
"pipeline":[
{
"type": "readers.i3s",
"filename": "https://tiles.arcgis.com/tiles/arcgis/rest/services/AUTZEN_LiDAR/SceneServer",
"filename": "https://tiles.arcgis.com/tiles/8cv2FuXuWSfF0nbL/arcgis/rest/services/AUTZEN_LiDAR/SceneServer",
"bounds": "([-123.075542,-123.06196],[44.049719,44.06278])"
}
]
}
.. code::
pdal traslate i3s://https://tiles.arcgis.com/tiles/arcgis/rest/services/AUTZEN_LiDAR/SceneServer \
pdal translate i3s://https://tiles.arcgis.com/tiles/8cv2FuXuWSfF0nbL/arcgis/rest/services/AUTZEN_LiDAR/SceneServer \
autzen.las \
--readers.i3s.threads=64 \
--readers.i3s.bounds="([-123.075542,-123.06196],[44.049719,44.06278])"
Expand Down
2 changes: 0 additions & 2 deletions doc/stages/writers.ply.rst
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,6 @@ mesh suitable for output as faces.

.. embed::

.. streamable::

Example
-------

Expand Down
41 changes: 33 additions & 8 deletions io/EptReader.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@ void EptReader::addArgs(ProgramArgs& args)
args.add("bounds", "Bounds to fetch", m_args.boundsArg());
args.add("origin", "Origin of source file to fetch", m_args.originArg());
args.add("threads", "Number of worker threads", m_args.threadsArg());
args.add("resolution", "Resolution limit", m_args.resolutionArg());
}

BOX3D EptReader::Args::bounds() const
Expand Down Expand Up @@ -149,21 +150,44 @@ void EptReader::initialize()
m_arbiter.reset(new arbiter::Arbiter());
m_ep.reset(new arbiter::Endpoint(m_arbiter->getEndpoint(m_root)));
m_pool.reset(new Pool(m_args.threads()));
log()->get(LogLevel::Debug) << "Endpoint: " << m_ep->prefixedRoot() <<
std::endl;
auto& debug(log()->get(LogLevel::Debug));

debug << "Endpoint: " << m_ep->prefixedRoot() << std::endl;
m_info.reset(new EptInfo(parse(m_ep->get("ept.json"))));
log()->get(LogLevel::Debug) << "Got EPT info" << std::endl;

debug << "Got EPT info" << std::endl;
debug << "SRS: " << m_info->srs() << std::endl;
setSpatialReference(m_info->srs());

// Figure out our query parameters.
m_queryBounds = m_args.bounds();
handleOriginQuery();

log()->get(LogLevel::Debug) << "Query bounds: " << m_queryBounds <<
std::endl;
log()->get(LogLevel::Debug) << "Threads: " << m_pool->size() << std::endl;
// Figure out our max depth.
const double queryResolution(m_args.resolution());
if (queryResolution)
{
double currentResolution =
(m_info->bounds().maxx - m_info->bounds().minx) / m_info->span();

debug << "Root resolution: " << currentResolution << std::endl;

// To select the current resolution level, we need depthEnd to be one
// beyond it - this is a non-inclusive parameter.
++m_depthEnd;

while (currentResolution > queryResolution)
{
currentResolution /= 2;
++m_depthEnd;
}

debug << "Query resolution: " << queryResolution << "\n";
debug << "Actual resolution: " << currentResolution << "\n";
debug << "Depth end: " << m_depthEnd << "\n";
}

debug << "Query bounds: " << m_queryBounds << "\n";
debug << "Threads: " << m_pool->size() << std::endl;
}

void EptReader::handleOriginQuery()
Expand Down Expand Up @@ -193,7 +217,7 @@ void EptReader::handleOriginQuery()
// Otherwise it's a file path (or part of one - for example selecting
// by a basename or a tile ID rather than a full path is convenient).
// Find it within the sources list, and make sure it's specified
// uniquely enough to select only one filt.
// uniquely enough to select only one file.
for (Json::ArrayIndex i(0); i < sources.size(); ++i)
{
const Json::Value& entry(sources[i]);
Expand Down Expand Up @@ -352,6 +376,7 @@ void EptReader::overlaps()
void EptReader::overlaps(const Json::Value& hier, const Key& key)
{
if (!key.b.overlaps(m_queryBounds)) return;
if (m_depthEnd && key.d >= m_depthEnd) return;
const int64_t np(hier[key.toString()].asInt64());
if (!np) return;

Expand Down
17 changes: 10 additions & 7 deletions io/EptReader.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -106,18 +106,20 @@ class PDAL_DLL EptReader : public Reader
{
public:
Bounds& boundsArg() { return m_bounds; }
BOX3D bounds() const;

std::string& originArg() { return m_originArg; }
const std::string& origin() const { return m_originArg; }

std::string& originArg() { return m_origin; }
uint64_t& threadsArg() { return m_threads; }
double& resolutionArg() { return m_resolution; }

BOX3D bounds() const;
std::string origin() const { return m_origin; }
uint64_t threads() const { return std::max<uint64_t>(4, m_threads); }
double resolution() const { return m_resolution; }

private:
Bounds m_bounds;
std::string m_originArg;
uint64_t m_threads;
std::string m_origin;
uint64_t m_threads = 0;
double m_resolution = 0;
};

Args m_args;
Expand All @@ -129,6 +131,7 @@ class PDAL_DLL EptReader : public Reader

std::set<Key> m_overlapKeys;
uint64_t m_overlapPoints = 0;
uint64_t m_depthEnd = 0; // Zero indicates selection of all depths.

std::unique_ptr<FixedPointLayout> m_remoteLayout;
DimTypeList m_dimTypes;
Expand Down
2 changes: 1 addition & 1 deletion io/LasWriter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -960,14 +960,14 @@ bool LasWriter::writeLasZipBuf(PointRef& point)
p.extended_classification = classification;
p.extended_return_number = returnNumber;
p.extended_number_of_returns = numberOfReturns;

}
else
{
p.return_number = returnNumber;
p.number_of_returns = numberOfReturns;
p.scan_angle_rank = point.getFieldAs<int8_t>(Id::ScanAngleRank);
p.classification = classification;
p.extended_point_type = 0;
}

if (m_lasHeader.hasTime())
Expand Down
12 changes: 12 additions & 0 deletions io/private/EptSupport.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ class PDAL_DLL EptInfo
{
m_bounds = toBox3d(m_info["bounds"]);
m_points = m_info["points"].asUInt64();
m_span = m_info["span"].asUInt64();
m_srs = m_info["srs"]["wkt"].asString();

if (m_srs.empty())
Expand Down Expand Up @@ -131,6 +132,7 @@ class PDAL_DLL EptInfo

const BOX3D& bounds() const { return m_bounds; }
uint64_t points() const { return m_points; }
uint64_t span() const { return m_span; }
DataType dataType() const { return m_dataType; }
const std::string& srs() const { return m_srs; }
const Json::Value& schema() const { return m_info["schema"]; }
Expand All @@ -151,9 +153,19 @@ class PDAL_DLL EptInfo
const Json::Value& json() { return m_info; }

private:
// Info comes from the values here:
// https://entwine.io/entwine-point-tile.html#ept-json
const Json::Value m_info;
BOX3D m_bounds;
uint64_t m_points = 0;

// The span is the length, width, and depth of the octree grid. For
// example, a dataset oriented as a 256*256*256 octree grid would have a
// span of 256.
//
// See: https://entwine.io/entwine-point-tile.html#span
uint64_t m_span = 0;

DataType m_dataType;
std::string m_srs;
};
Expand Down
8 changes: 8 additions & 0 deletions pdal/Reader.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,14 @@ void Reader::readerAddArgs(ProgramArgs& args)

void Reader::setSpatialReference(MetadataNode& m, const SpatialReference& srs)
{
if (srs.empty() && !m_defaultSrs.empty())
{
// If an attempt comes in to clear the SRS but we have a default,
// revert to the default rather than clearing.
Stage::setSpatialReference(m, m_defaultSrs);
return;
}

if (getSpatialReference().empty() || m_overrideSrs.empty())
{
Stage::setSpatialReference(m, srs);
Expand Down
18 changes: 12 additions & 6 deletions plugins/cpd/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,21 @@ set(files filters/CpdFilter.cpp)
set(include_dirs "${CMAKE_CURRENT_LIST_DIR}" "${PDAL_VENDOR_DIR}/eigen")

PDAL_ADD_PLUGIN(filter_libname filter cpd
FILES filters/CpdFilter.cpp
LINK_WITH Cpd::Library-C++
FILES
filters/CpdFilter.cpp
LINK_WITH
Cpd::Library-C++
INCLUDES
"${include_dirs}"
)
target_include_directories(${filter_libname} PRIVATE "${include_dirs}")

if(${WITH_TESTS})
PDAL_ADD_TEST(pdal_filters_cpd_test
FILES test/CpdFilterTest.cpp
LINK_WITH ${filter_libname}
FILES
test/CpdFilterTest.cpp
LINK_WITH
${filter_libname}
INCLUDES
"${include_dirs}"
)
target_include_directories(pdal_filters_cpd_test PRIVATE "${include_dirs}")
endif()
20 changes: 12 additions & 8 deletions plugins/fbx/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -26,17 +26,21 @@ if (FBX_FOUND)
io/FbxWriter.cpp
LINK_WITH
${FBX_LIBRARY}
${CORE_FOUNDATION})
target_include_directories(${writer_libname} PRIVATE
${PDAL_IO_DIR}
${FBX_INCLUDE_DIR})
${CORE_FOUNDATION}
INCLUDES
${PDAL_IO_DIR}
${FBX_INCLUDE_DIR}
)

if (WITH_TESTS)
PDAL_ADD_TEST(fbxtest
FILES test/FbxWriterTest.cpp
LINK_WITH ${writer_libname})
target_include_directories(fbxtest PRIVATE
${PDAL_IO_DIR})
FILES
test/FbxWriterTest.cpp
LINK_WITH
${writer_libname}
INCLUDES
${PDAL_IO_DIR}
)
endif()
else()
message(STATUS "Building without FBX support")
Expand Down

0 comments on commit f2188f0

Please sign in to comment.