diff --git a/doc/pipeline.rst b/doc/pipeline.rst index aebb600ab9..9787e599c7 100644 --- a/doc/pipeline.rst +++ b/doc/pipeline.rst @@ -129,7 +129,7 @@ files from a single pipeline. The crop filter creates two output point views "input.las", { "type" : "filters.crop", - "bounds" : [ "([0, 75], [0, 75])", "([50, 125], [50, 125])" ], + "bounds" : [ "([0, 75], [0, 75])", "([50, 125], [50, 125])" ] }, "output#.las" ] diff --git a/doc/python.rst b/doc/python.rst index 3620fc75a6..43a4949857 100644 --- a/doc/python.rst +++ b/doc/python.rst @@ -73,15 +73,14 @@ results as `Numpy`_ arrays: json = """ - { - [ + [ "1.2-with-color.las", { "type": "filters.sort", "dimension": "X" } - ] - }""" + ] + """ import pdal pipeline = pdal.Pipeline(json) diff --git a/doc/references.rst b/doc/references.rst index 2c9c6797eb..af232a9988 100644 --- a/doc/references.rst +++ b/doc/references.rst @@ -34,6 +34,8 @@ Reference .. [Alexa2003] Alexa, Marc, et al. "Computing and rendering point set surfaces." Visualization and Computer Graphics, IEEE Transactions on 9.1 (2003): 3-15. +.. [Bartels2010] Bartels, Marc, and Hong Wei. "Threshold-free object and ground point separation in LIDAR data." Pattern recognition letters 31.10 (2010): 1089-1099. + .. [Breunig2000] Breunig, M.M., Kriegel, H.-P., Ng, R.T., Sander, J., 2000. LOF: Identifying Density-Based Local Outliers. Proc. 2000 Acm Sigmod Int. Conf. Manag. Data 1–12. .. [Chen2012] Chen, Ziyue et al. “Upward-Fusion Urban DTM Generating Method Using Airborne Lidar Data.” ISPRS Journal of Photogrammetry and Remote Sensing 72 (2012): 121–130. diff --git a/doc/stages/filters.chipper.rst b/doc/stages/filters.chipper.rst index 67648aec88..02ba0fb411 100644 --- a/doc/stages/filters.chipper.rst +++ b/doc/stages/filters.chipper.rst @@ -46,7 +46,7 @@ Example "example.las", { "type":"filters.chipper", - "capacity":"400", + "capacity":"400" }, { "type":"writers.pgpointcloud", diff --git a/doc/stages/filters.hexbin.rst b/doc/stages/filters.hexbin.rst index 0f298c9806..541979c054 100644 --- a/doc/stages/filters.hexbin.rst +++ b/doc/stages/filters.hexbin.rst @@ -117,3 +117,9 @@ threshold precision Minimum number of significant digits to use in writing out the well-known text of the boundary polygon. [Default: 8] + +preserve_topology + Use GEOS SimplifyPreserveTopology instead of Simplify for polygon simplification with `smooth` option. [Default: true] + +smooth + Use GEOS simplify operations to smooth boundary to a tolerance [Default: true] diff --git a/doc/stages/filters.merge.rst b/doc/stages/filters.merge.rst index 25f8ae5196..a93463b11b 100644 --- a/doc/stages/filters.merge.rst +++ b/doc/stages/filters.merge.rst @@ -50,7 +50,7 @@ contains the points in "utm3.las". .. code-block:: json [ - "utm1.las" + "utm1.las", "utm2.las", "utm3.las", "out#.las" @@ -65,7 +65,7 @@ and "utm2.las", while "out2.las" contains the points from "utm3.las". .. code-block:: json [ - "utm1.las" + "utm1.las", "utm2.las", { "type" : "filters.merge" diff --git a/doc/stages/filters.miniball.rst b/doc/stages/filters.miniball.rst index 4fa755d50d..9f3deb5537 100644 --- a/doc/stages/filters.miniball.rst +++ b/doc/stages/filters.miniball.rst @@ -41,7 +41,7 @@ outlier. { "type":"filters.miniball", "knn":8 - } + }, "output.laz" ] diff --git a/doc/stages/filters.planefit.rst b/doc/stages/filters.planefit.rst index 580367890a..1c300d5679 100644 --- a/doc/stages/filters.planefit.rst +++ b/doc/stages/filters.planefit.rst @@ -43,7 +43,7 @@ outlier. { "type":"filters.planefit", "knn":8 - } + }, "output.laz" ] diff --git a/doc/stages/filters.rst b/doc/stages/filters.rst index 922ebbf150..4db1c60760 100644 --- a/doc/stages/filters.rst +++ b/doc/stages/filters.rst @@ -55,6 +55,7 @@ invalidate an existing KD-tree. filters.pmf filters.radialdensity filters.reciprocity + filters.skewnessbalancing filters.smrf :ref:`filters.approximatecoplanar` @@ -132,6 +133,9 @@ invalidate an existing KD-tree. Compute the percentage of points that are considered uni-directional neighbors of a point. +:ref:`filters.skewnessbalancing` + Label ground/non-ground returns using [Bartels2010]_. + :ref:`filters.smrf` Label ground/non-ground returns using [Pingel2013]_. diff --git a/doc/stages/filters.shell.rst b/doc/stages/filters.shell.rst index 1b5d38bd3f..24a97172be 100644 --- a/doc/stages/filters.shell.rst +++ b/doc/stages/filters.shell.rst @@ -55,7 +55,7 @@ command to construct overview bands for the data using average interpolation. { "type":"filters.shell", "command" : "gdaladdo -r average output-2m.tif 2 4 8 16" - } + }, { "type":"filters.shell", "command" : "gdaladdo -r average output-5m.tif 2 4 8 16" diff --git a/doc/stages/filters.skewnessbalancing.rst b/doc/stages/filters.skewnessbalancing.rst new file mode 100644 index 0000000000..8cdd967b13 --- /dev/null +++ b/doc/stages/filters.skewnessbalancing.rst @@ -0,0 +1,46 @@ +.. _filters.skewnessbalancing: + +filters.skewnessbalancing +=============================================================================== + +**Skewness Balancing** classifies ground points based on the approach outlined +in [Bartels2010]_. + +.. embed:: + +.. note:: + + For Skewness Balancing to work well, the scene being processed needs to be + quite flat, otherwise many above ground features will begin to be included + in the ground surface. + +Example +------- + +The sample pipeline below uses the Skewness Balancing filter to segment ground +and non-ground returns, using default options, and writing only the ground +returns to the output file. + +.. code-block:: json + + [ + "input.las", + { + "type":"filters.skewnessbalancing" + }, + { + "type":"filters.range", + "limits":"Classification[2:2]" + }, + "output.laz" + ] + +Options +------------------------------------------------------------------------------- + +.. note:: + + The Skewness Balancing method is touted as being threshold-free. We may + still in the future add convenience parameters that are common to other + ground segmentation filters, such as ``returns`` or ``ignore`` to limit the + points under consideration for filtering. diff --git a/doc/stages/filters.stats.rst b/doc/stages/filters.stats.rst index ebcd1bc440..6fb25caf08 100644 --- a/doc/stages/filters.stats.rst +++ b/doc/stages/filters.stats.rst @@ -33,14 +33,16 @@ Example Options ------- -_`dimensions` +.. _stats-dimensions: + +dimensions A comma-separated list of dimensions whose statistics should be processed. If not provided, statistics for all dimensions are calculated. _`enumerate` A comma-separated list of dimensions whose values should be enumerated. Note that this list does not add to the list of dimensions that may be - provided in the dimensions_ option. + provided in the :ref:`dimensions ` option. count Identical to the enumerate_ option, but provides a count of the number diff --git a/doc/stages/readers.gdal.rst b/doc/stages/readers.gdal.rst index 744b575b17..45c72cad0e 100644 --- a/doc/stages/readers.gdal.rst +++ b/doc/stages/readers.gdal.rst @@ -48,8 +48,7 @@ RGB values of an `ASPRS LAS`_ file using :ref:`writers.las`. { "type":"readers.gdal", "filename":"./pdal/test/data/autzen/autzen.jpg", - "header", "Red, Green, Blue" - + "header": "Red, Green, Blue" }, { "type":"writers.text", diff --git a/doc/stages/readers.las.rst b/doc/stages/readers.las.rst index 94159bfc8a..2bfd86dda1 100644 --- a/doc/stages/readers.las.rst +++ b/doc/stages/readers.las.rst @@ -63,7 +63,7 @@ Example }, { "type":"writers.text", - "filename":"outputfile.txt", + "filename":"outputfile.txt" } ] diff --git a/doc/stages/readers.pgpointcloud.rst b/doc/stages/readers.pgpointcloud.rst index 8e3262d3dd..c5635b0940 100644 --- a/doc/stages/readers.pgpointcloud.rst +++ b/doc/stages/readers.pgpointcloud.rst @@ -25,7 +25,7 @@ Example "table":"lidar", "column":"pa", "spatialreference":"EPSG:26910", - "where":"PC_Intersects(pa, ST_MakeEnvelope(560037.36, 5114846.45, 562667.31, 5118943.24, 26910))", + "where":"PC_Intersects(pa, ST_MakeEnvelope(560037.36, 5114846.45, 562667.31, 5118943.24, 26910))" }, { "type":"writers.text", diff --git a/doc/stages/writers.bpf.rst b/doc/stages/writers.bpf.rst index bfdd67e729..5078b9a34e 100644 --- a/doc/stages/writers.bpf.rst +++ b/doc/stages/writers.bpf.rst @@ -19,7 +19,7 @@ Example [ { - "type":"readers.bpf" + "type":"readers.bpf", "filename":"inputfile.las" }, { diff --git a/doc/stages/writers.e57.rst b/doc/stages/writers.e57.rst index 7f0e3f8ab6..197d8c9f56 100644 --- a/doc/stages/writers.e57.rst +++ b/doc/stages/writers.e57.rst @@ -40,7 +40,7 @@ Example { "type":"writers.e57", "filename":"outputfile.e57", - "doublePrecision":false + "doublePrecision":false } ] diff --git a/doc/stages/writers.fbx.rst b/doc/stages/writers.fbx.rst index ca65310fca..9387da47a2 100644 --- a/doc/stages/writers.fbx.rst +++ b/doc/stages/writers.fbx.rst @@ -36,7 +36,7 @@ Example [ { - "type":"readers.las" + "type":"readers.las", "filename":"inputfile.las" }, { diff --git a/doc/stages/writers.null.rst b/doc/stages/writers.null.rst index 46abb12208..5ab1f5f622 100644 --- a/doc/stages/writers.null.rst +++ b/doc/stages/writers.null.rst @@ -24,7 +24,7 @@ Example "type":"filters.hexbin" }, { - "type":"writers.null", + "type":"writers.null" } ] diff --git a/doc/stages/writers.tiledb.rst b/doc/stages/writers.tiledb.rst index 4f5f820969..ffb546af17 100644 --- a/doc/stages/writers.tiledb.rst +++ b/doc/stages/writers.tiledb.rst @@ -1,6 +1,6 @@ .. _writers.tiledb: -readers.tiledb +writers.tiledb ============== Implements `TileDB`_ 1.4.1+ reads from an array. diff --git a/doc/tutorial/reading.rst b/doc/tutorial/reading.rst index fe0d3e5eee..0e6f6be037 100644 --- a/doc/tutorial/reading.rst +++ b/doc/tutorial/reading.rst @@ -74,7 +74,7 @@ at all in the destination format. For example, some formats don't support spatial references for point data, some have no metadata support and others have limited :ref:`dimension ` support. Even when data types are supported in both source and destination formats, there may be limitations -with regard to data type, precision or , scaling. PDAL attempts to convert +with regard to data type, precision or, scaling. PDAL attempts to convert data as accurately as possible, but you should make sure that you're aware of the capabilities of the data formats you're using. diff --git a/doc/type-table.csv b/doc/type-table.csv index 28948dbf3a..5d027621ca 100644 --- a/doc/type-table.csv +++ b/doc/type-table.csv @@ -4,7 +4,7 @@ "Signed Integer", "64", "``int64``, ``int64_t``, ``long``" "Unsigned Integer", "8", "``uint8``, ``uint8_t``, ``uchar``" "Unsigned Integer", "16", "``uint16``, ``uint16_t``, ``ushort``" -"Unsigned Integer", "16", "``uint32``, ``uint32_t``, ``uint``" -"Unsigned Integer", "16", "``uint64``, ``uint64_t``, ``ulong``" +"Unsigned Integer", "32", "``uint32``, ``uint32_t``, ``uint``" +"Unsigned Integer", "64", "``uint64``, ``uint64_t``, ``ulong``" "Floating Point", "32", "``float``, ``float32``" "Floating Point", "64", "``double``, ``float64``" diff --git a/filters/HexBinFilter.cpp b/filters/HexBinFilter.cpp index 35827f5dbe..ec6833b2f5 100644 --- a/filters/HexBinFilter.cpp +++ b/filters/HexBinFilter.cpp @@ -83,6 +83,7 @@ void HexBin::addArgs(ProgramArgs& args) m_cullArg = &args.add("hole_cull_area_tolerance", "Tolerance area to " "apply to holes before cull", m_cullArea); args.add("smooth", "Smooth boundary output", m_doSmooth, true); + args.add("preserve_topology", "Preserve topology when smoothing", m_preserve_topology, true); } @@ -236,7 +237,7 @@ void HexBin::done(PointTableRef table) } if (m_doSmooth) - p.simplify(tolerance, cull); + p.simplify(tolerance, cull, m_preserve_topology); std::string boundary_text = p.wkt(m_precision); diff --git a/filters/HexBinFilter.hpp b/filters/HexBinFilter.hpp index 13b6d69c35..c4223a0323 100644 --- a/filters/HexBinFilter.hpp +++ b/filters/HexBinFilter.hpp @@ -71,6 +71,7 @@ class PDAL_DLL HexBin : public Filter, public Streamable bool m_outputTesselation; bool m_doSmooth; point_count_t m_count; + bool m_preserve_topology; virtual void addArgs(ProgramArgs& args); virtual void ready(PointTableRef table); diff --git a/filters/SkewnessBalancingFilter.cpp b/filters/SkewnessBalancingFilter.cpp new file mode 100644 index 0000000000..84d535adb1 --- /dev/null +++ b/filters/SkewnessBalancingFilter.cpp @@ -0,0 +1,137 @@ +/****************************************************************************** + * Copyright (c) 2019, Bradley J Chambers (brad.chambers@gmail.com) + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following + * conditions are met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided + * with the distribution. + * * Neither the name of Hobu, Inc. or Flaxen Geo Consulting nor the + * names of its contributors may be used to endorse or promote + * products derived from this software without specific prior + * written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED + * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT + * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY + * OF SUCH DAMAGE. + ****************************************************************************/ + +#include "SkewnessBalancingFilter.hpp" + +#include +#include + +namespace pdal +{ + +static PluginInfo const s_info{ + "filters.skewnessbalancing", "Bartels & Wei Skewness Balancing", + "http://pdal.io/stages/filters.skewnessbalancing.html"}; + +CREATE_STATIC_STAGE(SkewnessBalancingFilter, s_info) + +std::string SkewnessBalancingFilter::getName() const +{ + return s_info.name; +} + +void SkewnessBalancingFilter::addDimensions(PointLayoutPtr layout) +{ + layout->registerDim(Dimension::Id::Classification); +} + +std::set SkewnessBalancingFilter::processGround(PointViewPtr view) +{ + auto cmp = [](const PointIdxRef& p1, const PointIdxRef& p2) { + return p1.compare(Dimension::Id::Z, p2); + }; + std::sort(view->begin(), view->end(), cmp); + + point_count_t n(0); + point_count_t n1(0); + double delta, delta_n, term1, M1, M2, M3; + M1 = M2 = M3 = 0.0; + std::vector skewness; + for (PointId i = 0; i < view->size(); ++i) + { + double z = view->getFieldAs(Dimension::Id::Z, i); + n1 = n; + n++; + delta = z - M1; + delta_n = delta / n; + term1 = delta * delta_n * n1; + M1 += delta_n; + M3 += term1 * delta_n * (n - 2) - 3 * delta_n * M2; + M2 += term1; + skewness[i] = std::sqrt(n) * M3 / std::pow(M2, 1.5); + } + + PointId j(0); + for (PointId i = view->size() - 1; i >= 0; --i) + { + if (skewness[i] <= 0) + { + j = i; + break; + } + } + + std::set groundIdx; + for (PointId i = 0; i <= j; ++i) + groundIdx.insert(i); + + log()->get(LogLevel::Debug) + << "Stopped with " << groundIdx.size() + << " ground returns and skewness of " << skewness[j] << std::endl; + + return groundIdx; +} + +PointViewSet SkewnessBalancingFilter::run(PointViewPtr input) +{ + bool logOutput = log()->getLevel() > LogLevel::Debug1; + if (logOutput) + log()->floatPrecision(8); + + auto idx = processGround(input); + + PointViewSet viewSet; + if (!idx.empty()) + { + // set the classification label of ground returns as 2 + // (corresponding to ASPRS LAS specification) + for (const auto& i : idx) + input->setField(Dimension::Id::Classification, i, 2); + + viewSet.insert(input); + } + else + { + if (idx.empty()) + log()->get(LogLevel::Debug2) + << "Filtered cloud has no ground returns!\n"; + + // return the input buffer unchanged + viewSet.insert(input); + } + + return viewSet; +} + +} // namespace pdal diff --git a/filters/SkewnessBalancingFilter.hpp b/filters/SkewnessBalancingFilter.hpp new file mode 100644 index 0000000000..abba15f327 --- /dev/null +++ b/filters/SkewnessBalancingFilter.hpp @@ -0,0 +1,61 @@ +/****************************************************************************** + * Copyright (c) 2019, Bradley J Chambers (brad.chambers@gmail.com) + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following + * conditions are met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided + * with the distribution. + * * Neither the name of Hobu, Inc. or Flaxen Geo Consulting nor the + * names of its contributors may be used to endorse or promote + * products derived from this software without specific prior + * written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED + * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT + * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY + * OF SUCH DAMAGE. + ****************************************************************************/ + +#pragma once + +#include + +namespace pdal +{ + +class PDAL_DLL SkewnessBalancingFilter : public Filter +{ +public: + SkewnessBalancingFilter() : Filter() + { + } + + std::string getName() const; + +private: + virtual void addDimensions(PointLayoutPtr layout); + std::set processGround(PointViewPtr view); + virtual PointViewSet run(PointViewPtr view); + + SkewnessBalancingFilter& + operator=(const SkewnessBalancingFilter&); // not implemented + SkewnessBalancingFilter(const SkewnessBalancingFilter&); // not implemented +}; + +} // namespace pdal diff --git a/filters/private/pnp/GridPnp.hpp b/filters/private/pnp/GridPnp.hpp index 3248501a3a..ffcc621b68 100644 --- a/filters/private/pnp/GridPnp.hpp +++ b/filters/private/pnp/GridPnp.hpp @@ -53,11 +53,11 @@ class GridPnp setupGrid(); } - bool inside(const Point& p) + bool inside(const Point& p) const { return inside(p.first, p.second); } // Determine if a point is inside the polygon attached to this class. - bool inside(double x, double y) + bool inside(double x, double y) const { // Find the index of the grid cell at the position. // If the position isn't in the grid, we're certainly outside. @@ -105,7 +105,7 @@ class GridPnp { return m_edges.empty(); } void setPoint(double x, double y) { m_point = Point(x, y); } - bool computed() + bool computed() const { return !std::isnan(m_point.first); } GridPnp::Point point() const { return m_point; } @@ -157,16 +157,16 @@ class GridPnp }; - Point& point1(EdgeId id) + const Point& point1(EdgeId id) const { return m_rings[id]; } - Point& point2(EdgeId id) + const Point& point2(EdgeId id) const { return m_rings[id + 1]; } - double xval(const Point& p) + double xval(const Point& p) const { return p.first; } - double yval(const Point& p) + double yval(const Point& p) const { return p.second; } - void validateRing(const Ring& r) + void validateRing(const Ring& r) const { if (r.size() < 4) throw grid_error("Invalid ring. Ring must consist of at least " @@ -267,9 +267,9 @@ class GridPnp // though, in preprocessing vs. actual pnp tests. Hopefully more work // can be done on this later. My stupid way of dealing with this is // to set a minimum grid size of 1000 cells. - XYIndex calcGridSize(double xAvgLen, double yAvgLen) + XYIndex calcGridSize(double xAvgLen, double yAvgLen) const { - // I'm setting a minmum number of cells as 1000, because, why not? + // I'm setting a minimum number of cells as 1000, because, why not? // m_rings isn't necessarily an exact count of edges, but it's close // enough for this purpose. size_t m = (std::max)((size_t)1000, m_rings.size()); @@ -313,8 +313,8 @@ class GridPnp for (EdgeIt it(m_rings); it; it.next()) { EdgeId id = *it; - Point& p1 = point1(id); - Point& p2 = point2(id); + const Point& p1 = point1(id); + const Point& p2 = point2(id); Point origin = m_grid->origin(); VoxelRayTrace vrt(m_grid->cellWidth(), m_grid->cellHeight(), xval(origin), yval(origin), @@ -327,14 +327,14 @@ class GridPnp // Determine if a point is collinear with an edge. - bool pointCollinear(double x, double y, EdgeId edgeId) + bool pointCollinear(double x, double y, EdgeId edgeId) const { - Point& p1 = point1(edgeId); - Point& p2 = point2(edgeId); - double x1 = xval(p1); - double x2 = xval(p2); - double y1 = yval(p1); - double y2 = yval(p2); + const Point& p1 = point1(edgeId); + const Point& p2 = point2(edgeId); + const double x1 = xval(p1); + const double x2 = xval(p2); + const double y1 = yval(p1); + const double y2 = yval(p2); // If p1 == p2, this will fail. @@ -346,7 +346,7 @@ class GridPnp // Put a reference point in the cell. Figure out if the reference point // is inside the polygon. - void computeCell(Cell& cell, XYIndex& pos) + void computeCell(Cell& cell, XYIndex& pos) const { generateRefPoint(cell, pos); determinePointStatus(cell, pos); @@ -359,7 +359,7 @@ class GridPnp // a known status (inside or outside the polygon). So we just pick a point // that isn't collinear with any of the segments in the cell. Eliminating // collinearity eliminates special cases when counting crossings. - void generateRefPoint(Cell& cell, XYIndex& pos) + void generateRefPoint(Cell& cell, XYIndex& pos) const { // A test point is valid if it's not collinear with any segments // in the cell. @@ -389,7 +389,7 @@ class GridPnp // If we're determining the status of the leftmost cell, choose a point // to the left of the leftmost cell, which is guaranteed to be outside // the polygon. - void determinePointStatus(Cell& cell, XYIndex& pos) + void determinePointStatus(Cell& cell, XYIndex& pos) const { Point p1(cell.point()); @@ -433,7 +433,7 @@ class GridPnp // Determine the number of intersections between an edge and // all edges indexes by the 'edges' list. template - size_t intersections(Edge& e1, const EDGES& edges) + size_t intersections(Edge& e1, const EDGES& edges) const { size_t isect = 0; for (auto& edgeId : edges) @@ -449,7 +449,7 @@ class GridPnp // Determine if a point in a cell is inside the polygon or outside. // We're always calling a point that lies on an edge as 'inside' // the polygon. - bool testCell(Cell& cell, double x, double y) + bool testCell(Cell& cell, double x, double y) const { Edge tester({x, y}, cell.point()); @@ -474,7 +474,7 @@ class GridPnp // is one or 0 and the other factor is between 0 and 1. // This is standard math, but it's shown nicely on Stack Overflow // question 563198. The variable names map to the good response there. - IntersectType intersects(Edge& e1, Edge& e2) + IntersectType intersects(Edge& e1, Edge& e2) const { using Vector = std::pair; @@ -512,7 +512,7 @@ class GridPnp } RingList m_rings; - std::mt19937 m_ranGen; + mutable std::mt19937 m_ranGen; std::unique_ptr> m_xDistribution; std::unique_ptr> m_yDistribution; std::unique_ptr> m_grid; diff --git a/io/EptReader.cpp b/io/EptReader.cpp index 50d394e933..ae8ee6875c 100644 --- a/io/EptReader.cpp +++ b/io/EptReader.cpp @@ -107,6 +107,8 @@ class EptBounds : public SrsBounds EptBounds() : SrsBounds(BOX3D(LOWEST, LOWEST, LOWEST, HIGHEST, HIGHEST, HIGHEST)) {} + EptBounds(const SrsBounds& b) : SrsBounds( b ) + {} }; namespace Utils @@ -141,6 +143,7 @@ struct EptReader::Args NL::json m_query; NL::json m_headers; + NL::json m_ogr; }; EptReader::EptReader() : m_args(new EptReader::Args) @@ -166,6 +169,8 @@ void EptReader::addArgs(ProgramArgs& args) m_args->m_headers); args.add("query", "Query parameters to forward with HTTP requests", m_args->m_query); + args.add("ogr", "OGR filter geometries", + m_args->m_ogr); } @@ -244,6 +249,13 @@ void EptReader::initialize() setSpatialReference(m_info->srs()); + if (!m_args->m_ogr.is_null()) + { + auto& plist = m_args->m_polys; + std::vector ogrPolys = gdal::getPolygons(m_args->m_ogr); + plist.insert(plist.end(), ogrPolys.begin(), ogrPolys.end()); + } + // Transform query bounds to match point source SRS. const SpatialReference& boundsSrs = m_args->m_bounds.spatialReference(); if (!m_info->srs().valid() && boundsSrs.valid()) @@ -269,6 +281,12 @@ void EptReader::initialize() } m_args->m_polys = std::move(exploded); + for (auto& p : m_args->m_polys) + { + m_queryGrids.emplace_back( + new GridPnp(p.exteriorRing(), p.interiorRings())); + } + try { handleOriginQuery(); @@ -954,7 +972,8 @@ void EptReader::load() // Insert an empty placeholder node to keep track of the outstanding // nodes that are currently being fetched/executed. std::unique_lock lock(m_mutex); - auto& loadingBuffer = m_upcomingNodeBuffers[nodeId]; + m_upcomingNodeBuffers.emplace_front(); + auto& loadingBuffer = m_upcomingNodeBuffers.front(); lock.unlock(); m_pool->add([this, &loadingBuffer, nodeId, key]() @@ -964,10 +983,8 @@ void EptReader::load() if (m_info->dataType() == EptInfo::DataType::Laszip) readLaszip(nodeBuffer->view, key, nodeId); - else if (m_info->dataType() == EptInfo::DataType::Binary) - readBinary(nodeBuffer->view, key, nodeId); else - readZstandard(nodeBuffer->view, key, nodeId); + readBinary(nodeBuffer->view, key, nodeId); for (const auto& addon : m_addons) readAddon(nodeBuffer->view, key, *addon); @@ -982,6 +999,15 @@ void EptReader::load() } } +EptReader::NodeBufferIt EptReader::findBuffer() +{ + return std::find_if( + m_upcomingNodeBuffers.begin(), + m_upcomingNodeBuffers.end(), + [](const std::unique_ptr& n) + { return static_cast(n); }); +} + bool EptReader::next() { // Asynchronously trigger the loading of some nodes. @@ -993,26 +1019,17 @@ bool EptReader::next() m_cv.wait(lock, [this]() { return m_upcomingNodeBuffers.empty() || - std::any_of( - m_upcomingNodeBuffers.begin(), - m_upcomingNodeBuffers.end(), - [this](const NodeBufferPair& p) { return !!p.second; }); + findBuffer() != m_upcomingNodeBuffers.end(); }); - // Processing is complete. - if (m_upcomingNodeBuffers.empty()) return false; - - // We know at least one node is populated from our `wait` predicate, so find - // the first one and transfer it out of our `upcoming` pool to make it the - // current active node. - const auto it = std::find_if( - m_upcomingNodeBuffers.begin(), - m_upcomingNodeBuffers.end(), - [](const NodeBufferPair& p) { return !!p.second; } - ); + // Grab the first completed buffer from our list and transfer it out of our + // `upcoming` pool to make it the current active node. If there are no + // completed buffers in the list, then we're done with processing. + const auto it = findBuffer(); + if (it == m_upcomingNodeBuffers.end()) return false; m_pointId = 0; - m_currentNodeBuffer = std::move(it->second); + m_currentNodeBuffer = std::move(*it); m_upcomingNodeBuffers.erase(it); return true; @@ -1031,17 +1048,18 @@ bool EptReader::processOne(PointRef& point) if (m_currentNodeBuffer->view.empty()) m_currentNodeBuffer.reset(); } - for (const auto& id : m_currentNodeBuffer->table.layout()->dims()) + auto& sourceView(m_currentNodeBuffer->view); + const auto& layout(*m_currentNodeBuffer->table.layout()); + + for (const auto& id : layout.dims()) { point.setField( id, - m_currentNodeBuffer->view.getFieldAs(id, m_pointId)); + layout.dimType(id), + sourceView.getPoint(m_pointId) + layout.dimOffset(id)); } - if (++m_pointId == m_currentNodeBuffer->view.size()) - { - m_currentNodeBuffer.reset(); - } + if (++m_pointId == sourceView.size()) m_currentNodeBuffer.reset(); return true; } diff --git a/io/EptReader.hpp b/io/EptReader.hpp index fdde6ffa12..a8274ef1ba 100644 --- a/io/EptReader.hpp +++ b/io/EptReader.hpp @@ -36,8 +36,12 @@ #include #include +#include +#include #include #include +#include +#include #include #include @@ -110,10 +114,15 @@ class PDAL_DLL EptReader : public Reader, public Streamable static Dimension::Type getRemoteTypeTest(const NL::json& dimInfo); static Dimension::Type getCoercedTypeTest(const NL::json& dimInfo); - // For streamable pipeline. + // For streaming operation. + struct NodeBuffer; + using NodeBufferList = std::list>; + using NodeBufferIt = NodeBufferList::iterator; + virtual bool processOne(PointRef& point) override; void load(); // Asynchronously fetch EPT nodes for streaming use. bool next(); // Acquire an already-fetched node for processing. + NodeBufferIt findBuffer(); // Find a fully acquired node. // Data fetching - these forward user-specified query/header params. std::string get(std::string path) const; @@ -159,15 +168,10 @@ class PDAL_DLL EptReader : public Reader, public Streamable // The below are for streaming operation only. PointLayout* m_userLayout = nullptr; - struct NodeBuffer; - using NodeBufferMap = std::map>; - using NodeBufferIt = NodeBufferMap::const_iterator; - using NodeBufferPair = NodeBufferMap::value_type; - // These represent a lookahead of asynchronously loaded nodes, when we have // finished processing a streaming node we will wait for something to be // loaded here. - NodeBufferMap m_upcomingNodeBuffers; + NodeBufferList m_upcomingNodeBuffers; // This is the node we are currently processing in streaming mode, which is // plucked out of our upcoming node buffers when we have finished our diff --git a/pdal/GDALUtils.cpp b/pdal/GDALUtils.cpp index e5cdd45f29..87615730a0 100644 --- a/pdal/GDALUtils.cpp +++ b/pdal/GDALUtils.cpp @@ -32,6 +32,7 @@ * OF SUCH DAMAGE. ****************************************************************************/ +#include #include #include #include @@ -44,6 +45,9 @@ #include #include +#include +#include +#include #pragma warning(disable: 4127) // conditional expression is constant @@ -774,6 +778,127 @@ OGRGeometry *createFromGeoJson(const std::string& s, std::string& srs) return newGeom; } + +std::vector getPolygons(const NL::json& ogr) +{ + registerDrivers(); + const NL::json& datasource = ogr.at("datasource"); + + char** papszDriverOptions = nullptr; + if (ogr.count("drivers")) + { + + const NL::json& dops = ogr.at("drivers"); + std::vector driverOptions = dops.get>(); + for(const auto& s: driverOptions) + papszDriverOptions = CSLAddString(papszDriverOptions, s.c_str()); + } + std::vector openoptions{}; + + char** papszOpenOptions = nullptr; + if (ogr.count("openoptions")) + { + + const NL::json& oops = ogr.at("openoptions"); + std::vector openOptions = oops.get>(); + for(const auto& s: openOptions) + papszOpenOptions = CSLAddString(papszOpenOptions, s.c_str()); + } + + std::string dsString = datasource.get(); + unsigned int openFlags = GDAL_OF_READONLY | GDAL_OF_VECTOR | GDAL_OF_VERBOSE_ERROR; + GDALDataset* ds = (GDALDataset*) GDALOpenEx( dsString.c_str(), + openFlags, + papszDriverOptions, papszOpenOptions, NULL ); + CSLDestroy(papszDriverOptions); + CSLDestroy(papszOpenOptions); + if (!ds) + throw pdal_error("Unable to read datasource in fetchOGRGeometries " + datasource.dump() ); + + OGRLayer* poLayer(nullptr); + + if (ogr.count("layer")) + { + const NL::json& layer = ogr.at("layer"); + std::string lyrString = layer.get(); + poLayer = ds->GetLayerByName( lyrString.c_str() ); + + if (!poLayer) + throw pdal_error("Unable to read layer in fetchOGRGeometries " + layer.dump() ); + } + + OGRFeature *poFeature (nullptr); + OGRGeometry* filterGeometry(nullptr); + std::string dialect("OGRSQL"); + + if (ogr.count("sql")) + { + + const NL::json sql = ogr.at("sql"); + + if (ogr.count("options")) + { + const NL::json options = ogr.at("options"); + if (options.count("dialect")) + dialect = options.at("dialect").get(); + + if (options.count("geometry")) + { + std::string wkt_or_json = options.at("geometry").get(); + bool isJson = (wkt_or_json.find("{") != wkt_or_json.npos) || + (wkt_or_json.find("}") != wkt_or_json.npos); + + std::string srs; + if (isJson) + { + filterGeometry = gdal::createFromGeoJson(wkt_or_json, srs); + if (!filterGeometry) + throw pdal_error("Unable to create filter geometry from input GeoJSON"); + } + else + { + filterGeometry = gdal::createFromWkt(wkt_or_json, srs); + if (!filterGeometry) + throw pdal_error("Unable to create filter geometry from input WKT"); + } + filterGeometry->assignSpatialReference( + new OGRSpatialReference(SpatialReference(srs).getWKT().data())); + + } + } + + std::string query = sql.get(); + + // execute the query to get the SRS without + // the filterGeometry, assign it to the filterGeometry, + // and then query again. + if (filterGeometry) + { + poLayer = ds->ExecuteSQL( query.c_str(), + NULL, + dialect.c_str()); + if (!poLayer) + throw pdal_error("unable to execute sql query!"); + + filterGeometry->transformTo(poLayer->GetSpatialRef()); + } + + poLayer = ds->ExecuteSQL( query.c_str(), + filterGeometry, + dialect.c_str()); + if (!poLayer) + throw pdal_error("unable to execute sql query!"); + } + + std::vector polys; + while ((poFeature = poLayer->GetNextFeature()) != NULL) + { + polys.emplace_back(poFeature->GetGeometryRef()); + OGRFeature::DestroyFeature( poFeature ); + } + return polys; +} + } // namespace gdal namespace oldgdalsupport diff --git a/pdal/GDALUtils.hpp b/pdal/GDALUtils.hpp index 25da63b211..14878282f6 100644 --- a/pdal/GDALUtils.hpp +++ b/pdal/GDALUtils.hpp @@ -43,8 +43,9 @@ #include #include #include -#include #include +#include +#include #include #include @@ -58,6 +59,8 @@ class OGRGeometry; namespace pdal { +class Polygon; + namespace gdal { @@ -866,6 +869,8 @@ OGRGeometry *createFromGeoJson(const char *s); OGRGeometry *createFromWkt(const std::string& s, std::string& srs); OGRGeometry *createFromGeoJson(const std::string& s, std::string& srs); +std::vector getPolygons(const NL::json& ogr); + inline OGRGeometry *fromHandle(OGRGeometryH geom) { return reinterpret_cast(geom); } diff --git a/pdal/Geometry.cpp b/pdal/Geometry.cpp index 9f10738b8c..7f76bea0b2 100644 --- a/pdal/Geometry.cpp +++ b/pdal/Geometry.cpp @@ -67,6 +67,11 @@ Geometry::Geometry(Geometry&& input) : m_geom(std::move(input.m_geom)) {} +Geometry::Geometry(OGRGeometryH g) : + m_geom((reinterpret_cast(g))->clone()) +{} + + Geometry::Geometry(OGRGeometryH g, const SpatialReference& srs) : m_geom((reinterpret_cast(g))->clone()) { diff --git a/pdal/Geometry.hpp b/pdal/Geometry.hpp index f61ae92e2f..7e5fae3cdb 100644 --- a/pdal/Geometry.hpp +++ b/pdal/Geometry.hpp @@ -41,6 +41,7 @@ class OGRGeometry; using OGRGeometryH = void *; +using OGRSpatialReferenceH = void *; namespace pdal { @@ -55,6 +56,7 @@ class PDAL_DLL Geometry Geometry(Geometry&&); Geometry(const std::string& wkt_or_json, SpatialReference ref = SpatialReference()); + Geometry(OGRGeometryH g); Geometry(OGRGeometryH g, const SpatialReference& srs); public: diff --git a/pdal/Polygon.cpp b/pdal/Polygon.cpp index e7823c149c..232779166e 100644 --- a/pdal/Polygon.cpp +++ b/pdal/Polygon.cpp @@ -38,7 +38,19 @@ namespace pdal { +Polygon::Polygon(OGRGeometryH g) : Geometry(g) +{ + init(); +} + + Polygon::Polygon(OGRGeometryH g, const SpatialReference& srs) : Geometry(g, srs) +{ + init(); +} + + +void Polygon::init() { // If the handle was null, we need to create an empty polygon. if (!m_geom) @@ -59,6 +71,7 @@ Polygon::Polygon(OGRGeometryH g, const SpatialReference& srs) : Geometry(g, srs) } } + Polygon::Polygon(const BOX2D& box) { OGRPolygon *poly = new OGRPolygon(); @@ -87,7 +100,7 @@ Polygon::Polygon(const BOX3D& box) } -void Polygon::simplify(double distance_tolerance, double area_tolerance) +void Polygon::simplify(double distance_tolerance, double area_tolerance, bool preserve_topology) { throwNoGeos(); @@ -113,7 +126,12 @@ void Polygon::simplify(double distance_tolerance, double area_tolerance) OGR_G_RemoveGeometry(gdal::toHandle(poly), i, true); }; - OGRGeometry *g = m_geom->SimplifyPreserveTopology(distance_tolerance); + OGRGeometry *g; + if (preserve_topology) + g = m_geom->SimplifyPreserveTopology(distance_tolerance); + else + g = m_geom->Simplify(distance_tolerance); + m_geom.reset(g); OGRwkbGeometryType t = m_geom->getGeometryType(); diff --git a/pdal/Polygon.hpp b/pdal/Polygon.hpp index e1f9d44816..91c295d9db 100644 --- a/pdal/Polygon.hpp +++ b/pdal/Polygon.hpp @@ -58,11 +58,12 @@ class PDAL_DLL Polygon : public Geometry Polygon(const BOX2D&); Polygon(const BOX3D&); + Polygon(OGRGeometryH g); Polygon(OGRGeometryH g, const SpatialReference& srs); OGRGeometryH getOGRHandle(); - void simplify(double distance_tolerance, double area_tolerance); + void simplify(double distance_tolerance, double area_tolerance, bool preserve_topology = true); double area() const; std::vector polygons() const; @@ -78,6 +79,9 @@ class PDAL_DLL Polygon : public Geometry bool crosses(const Polygon& p) const; Ring exteriorRing() const; std::vector interiorRings() const; + +private: + void init(); }; } // namespace pdal diff --git a/pdal/SpatialReference.hpp b/pdal/SpatialReference.hpp index 67bd0dbdd4..429e4d317f 100644 --- a/pdal/SpatialReference.hpp +++ b/pdal/SpatialReference.hpp @@ -36,6 +36,8 @@ #include +using OGRSpatialReferenceH = void *; + namespace pdal { @@ -70,6 +72,7 @@ class PDAL_DLL SpatialReference */ SpatialReference(const std::string& wkt); + /** Determine if this spatial reference is the same as another. diff --git a/scripts/conda/osx.sh b/scripts/conda/osx.sh index 9edfbebcbe..efb63d636f 100755 --- a/scripts/conda/osx.sh +++ b/scripts/conda/osx.sh @@ -12,7 +12,7 @@ fi rm -rf $BUILDDIR mkdir -p $BUILDDIR cd $BUILDDIR -CC=/usr/bin/cc CXX=/usr/bin/c++ cmake -G "$CONFIG" \ +CFLAGS= CXXFLAGS= CC=/usr/bin/cc CXX=/usr/bin/c++ cmake -G "$CONFIG" \ -DCMAKE_LIBRARY_PATH:FILEPATH="$CONDA_PREFIX/lib" \ -DCMAKE_INCLUDE_PATH:FILEPATH="$CONDA_PREFIX/include" \ -DPython3_ROOT_DIR:FILEPATH="$CONDA_PREFIX" \ diff --git a/test/data/autzen/autzen-attribute-cropped.las b/test/data/autzen/autzen-attribute-cropped.las new file mode 100644 index 0000000000..e791e0e32a Binary files /dev/null and b/test/data/autzen/autzen-attribute-cropped.las differ diff --git a/test/data/ept/1.2-with-color/ept.json b/test/data/ept/1.2-with-color/ept.json index 5b626628e6..088cc83176 100644 --- a/test/data/ept/1.2-with-color/ept.json +++ b/test/data/ept/1.2-with-color/ept.json @@ -112,6 +112,9 @@ } ], "span": 128, - "srs": {}, + "srs": { + + "wkt": "PROJCS[\"NAD_1983_HARN_Lambert_Conformal_Conic\",GEOGCS[\"GCS_North_American_1983_HARN\",DATUM[\"NAD83_High_Accuracy_Reference_Network\",SPHEROID[\"GRS 1980\",6378137,298.2572221010002,AUTHORITY[\"EPSG\",\"7019\"]],AUTHORITY[\"EPSG\",\"6152\"]],PRIMEM[\"Greenwich\",0],UNIT[\"degree\",0.0174532925199433]],PROJECTION[\"Lambert_Conformal_Conic_2SP\"],PARAMETER[\"standard_parallel_1\",43],PARAMETER[\"standard_parallel_2\",45.5],PARAMETER[\"latitude_of_origin\",41.75],PARAMETER[\"central_meridian\",-120.5],PARAMETER[\"false_easting\",1312335.958005249],PARAMETER[\"false_northing\",0],UNIT[\"foot\",0.3048,AUTHORITY[\"EPSG\",\"9002\"]]]" + }, "version": "1.0.0" -} \ No newline at end of file +} diff --git a/test/unit/io/EptReaderTest.cpp b/test/unit/io/EptReaderTest.cpp index e4be7c3f7b..aef237ddda 100644 --- a/test/unit/io/EptReaderTest.cpp +++ b/test/unit/io/EptReaderTest.cpp @@ -70,6 +70,8 @@ namespace "ept://" + Support::datapath("ept/lone-star-laszip")); const std::string eptAutzenPath( "ept://" + Support::datapath("ept/1.2-with-color")); + const std::string attributesPath( + Support::datapath("autzen/attributes.json")); // Also test a basic read of binary/zstandard versions of a smaller dataset. const std::string ellipsoidEptBinaryPath( @@ -635,7 +637,8 @@ TEST(EptReaderTest, boundedCrop) { Options options; options.add("filename", eptAutzenPath); - Option polygon("polygon", wkt); + std::string overrides(wkt + "/ EPSG:3644"); + Option polygon("polygon", overrides); options.add(polygon); reader.setOptions(options); } @@ -659,8 +662,9 @@ TEST(EptReaderTest, boundedCrop) } CropFilter crop; { + std::string overrides(wkt + "/ EPSG:3644"); Options options; - Option polygon("polygon", wkt); + Option polygon("polygon", overrides); options.add(polygon); crop.setOptions(options); crop.setInput(source); @@ -742,4 +746,54 @@ TEST(EptReaderTest, boundedCropReprojection) EXPECT_EQ(sourceNp, 47u); } + +TEST(EptReaderTest, ogrCrop) +{ + + EptReader reader; + { + Options options; + options.add("filename", eptAutzenPath); + NL::json ogr; + ogr["drivers"] = {"GeoJSON"}; + ogr["datasource"] = attributesPath; + ogr["sql"] = "select \"_ogr_geometry_\" from attributes"; + + options.add("ogr", ogr); + + reader.setOptions(options); + } + + PointTable eptTable; + reader.prepare(eptTable); + + uint64_t eptNp(0); + for (const PointViewPtr& view : reader.execute(eptTable)) + { + eptNp += view->size(); + } + + // Now we'll check the result against a crop filter of the source file with + // the same bounds. + LasReader source; + { + Options options; + options.add("filename", Support::datapath("autzen/autzen-attribute-cropped.las")); + source.setOptions(options); + } + PointTable sourceTable; + source.prepare(sourceTable); + uint64_t sourceNp(0); + for (const PointViewPtr& view : source.execute(sourceTable)) + { + sourceNp += view->size(); + } + + EXPECT_EQ(eptNp, sourceNp); + EXPECT_EQ(eptNp, 86u); + EXPECT_EQ(sourceNp, 86u); +} + + + } // namespace pdal