Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into issue-3394
Browse files Browse the repository at this point in the history
  • Loading branch information
abellgithub committed Mar 23, 2021
2 parents b22340a + 1a802af commit 34e30b3
Show file tree
Hide file tree
Showing 34 changed files with 1,442 additions and 409 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ jobs:
containers:
runs-on: ubuntu-latest
env:
PUSH_PACKAGES: ${{ github.event_name == 'push' && github.ref == 'refs/heads/master' }}
PUSH_PACKAGES: ${{ github.repository_owner == 'PDAL' }}
steps:
- uses: actions/checkout@v2
- name: Lint
Expand Down
Binary file added doc/_static/logo/pdal-logo-big.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
1 change: 1 addition & 0 deletions doc/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ def process_dimensions():


spelling_word_list_filename='spelling_wordlist.txt'
bibtex_bibfiles = ['./stages/references.bib','./workshop/bibliography.bib']


# The version info for the project you're documenting, acts as replacement for
Expand Down
4 changes: 2 additions & 2 deletions doc/stages/readers.gdal.rst
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,8 @@ RGB values of an `ASPRS LAS`_ file using :ref:`writers.las`.
"header": "Red, Green, Blue"
},
{
"type":"writers.text",
"filename":"outputfile.txt"
"type":"writers.las",
"filename":"outputfile.las"
}
]
Expand Down
17 changes: 16 additions & 1 deletion doc/stages/writers.pcd.rst
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,21 @@ formatted data.

.. streamable::

.. note::

X, Y, and Z dimensions will be written as single-precision floats by
default to be compatible with most of the existing PCL point types. These
dimensions can be forced to double-precision using the `order` option, but
the PCL code reading this data must be capable of reading double-precision
fields (i.e., it is not the responsibility of PDAL to ensure this
compatibility).

.. note::

When working with large coordinate values it is recommended that users
first translate the coordinate values using :ref:`filters.transformation`
to avoid loss of precision when writing single-precision XYZ data.


Example
-------
Expand Down Expand Up @@ -42,7 +57,7 @@ compression

_`precision`
Decimal Precision for output of values. This can be overridden for individual
dimensions using the order option. [Default: 3]
dimensions using the order option. [Default: 2]

_`order`
Comma-separated list of dimension names in the desired output order. For
Expand Down
2 changes: 1 addition & 1 deletion filters/CSFilter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ PointViewSet CSFilter::run(PointViewPtr view)
}

if ((nrOneZero || rnOneZero) && !(nrAllZero && rnAllZero))
throwError("Some NumberOfReturns or ReternNumber values were 0, but "
throwError("Some NumberOfReturns or ReturnNumber values were 0, but "
"not all. Check that all values in the input file are >= "
"1.");

Expand Down
6 changes: 4 additions & 2 deletions filters/ColorizationFilter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -215,9 +215,11 @@ bool ColorizationFilter::processOne(PointRef& point)
point.setField(b.m_dim, data[i] * b.m_scale);
++i;
}
return true;
}
return false;

// always return true to retain all points inside OR outside the raster. the output bands of
// any points outside the raster are ignored.
return true;
}


Expand Down
2 changes: 1 addition & 1 deletion filters/PMFFilter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ PointViewSet PMFFilter::run(PointViewPtr input)
}

if ((nrOneZero || rnOneZero) && !(nrAllZero && rnAllZero))
throwError("Some NumberOfReturns or ReternNumber values were 0, but "
throwError("Some NumberOfReturns or ReturnNumber values were 0, but "
"not all. Check that all values in the input file are >= "
"1.");

Expand Down
2 changes: 1 addition & 1 deletion filters/SMRFilter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ PointViewSet SMRFilter::run(PointViewPtr view)
}

if ((nrOneZero || rnOneZero) && !(nrAllZero && rnAllZero))
throwError("Some NumberOfReturns or ReternNumber values were 0, but "
throwError("Some NumberOfReturns or ReturnNumber values were 0, but "
"not all. Check that all values in the input file are >= "
"1.");

Expand Down
12 changes: 9 additions & 3 deletions io/GDALWriter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -202,9 +202,15 @@ int GDALWriter::height() const

void GDALWriter::createGrid(BOX2D bounds)
{
int width = (int)std::floor((bounds.maxx - bounds.minx) / m_edgeLength) + 1;
int height = (int)std::floor((bounds.maxy - bounds.miny) / m_edgeLength) + 1;

// Validating before casting avoids float-cast-overflow undefined behavior.
double d_width = std::floor((bounds.maxx - bounds.minx) / m_edgeLength) + 1;
double d_height = std::floor((bounds.maxy - bounds.miny) / m_edgeLength) + 1;
if (d_width < 0.0 || d_width > (std::numeric_limits<int>::max)())
throwError("Grid width out of range.");
if (d_height < 0.0 || d_height > (std::numeric_limits<int>::max)())
throwError("Grid height out of range.");
int width = static_cast<int>(d_width);
int height = static_cast<int>(d_height);
try
{
m_grid.reset(new GDALGrid(bounds.minx, bounds.miny, width, height, m_edgeLength,
Expand Down
2 changes: 1 addition & 1 deletion io/LasHeader.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ class PDAL_DLL LasHeader
}

/// The length in bytes of each point. All points in the file are
/// considered to be fixed in size, and the PointFormatName is used
/// considered to be fixed in size, and the point format is used
/// to determine the fixed portion of the dimensions in the point.
uint16_t pointLen() const
{ return m_pointLen; }
Expand Down
22 changes: 16 additions & 6 deletions io/LasReader.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ struct LasReader::Args
bool useEbVlr;
StringList ignoreVLROption;
bool fixNames;
PointId start;
};

struct LasReader::Private
Expand Down Expand Up @@ -119,9 +120,9 @@ void LasReader::addArgs(ProgramArgs& args)
args.add("extra_dims", "Dimensions to assign to extra byte data",
m_args->extraDimSpec);
args.add("compression", "Decompressor to use", m_args->compression, "EITHER");
args.add("use_eb_vlr", "Use extra bytes VLR for 1.0 - 1.3 files",
m_args->useEbVlr);
args.add("use_eb_vlr", "Use extra bytes VLR for 1.0 - 1.3 files", m_args->useEbVlr);
args.add("ignore_vlr", "VLR userid/recordid to ignore", m_args->ignoreVLROption);
args.add("start", "Point at which reading should start (0-indexed).", m_args->start);
args.add("fix_dims", "Make invalid dimension names valid by changing "
"invalid characters to '_'", m_args->fixNames, true);
}
Expand All @@ -147,7 +148,7 @@ const LasHeader& LasReader::header() const

point_count_t LasReader::getNumPoints() const
{
return m_p->header.pointCount();
return m_p->header.pointCount() - m_args->start;
}

void LasReader::initialize(PointTableRef table)
Expand Down Expand Up @@ -270,6 +271,9 @@ void LasReader::initializeLocal(PointTableRef table, MetadataNode& m)
m_p->header.removeVLR(i.m_userId);
}

if (m_args->start > m_p->header.pointCount())
throwError("'start' value of " + std::to_string(m_args->start) + " is too large. "
"File contains " + std::to_string(m_p->header.pointCount()) + " points.");
if (m_p->header.compressed())
handleCompressionOption();
#ifdef PDAL_HAVE_LASZIP
Expand Down Expand Up @@ -321,6 +325,7 @@ void LasReader::ready(PointTableRef table)
handleLaszip(laszip_open_reader_stream(m_p->laszip, *stream,
&compressed));
handleLaszip(laszip_get_point_pointer(m_p->laszip, &m_p->laszipPoint));
handleLaszip(laszip_seek_point(m_p->laszip, m_args->start));
}
#endif

Expand All @@ -329,8 +334,9 @@ void LasReader::ready(PointTableRef table)
{
delete m_p->decompressor;

const LasVLR *vlr = m_p->header.findVlr(LASZIP_USER_ID,
LASZIP_RECORD_ID);
if (m_args->start != 0)
throwError("LAZperf does not support the 'start' option.");
const LasVLR *vlr = m_p->header.findVlr(LASZIP_USER_ID, LASZIP_RECORD_ID);
if (!vlr)
throwError("LAZ file missing required laszip VLR.");
m_p->decompressor = new LazPerfVlrDecompressor(*stream,
Expand All @@ -345,7 +351,11 @@ void LasReader::ready(PointTableRef table)
#endif
}
else
stream->seekg(m_p->header.pointOffset());
{
std::istream::pos_type start = m_p->header.pointOffset() +
(m_args->start * m_p->header.pointLen());
stream->seekg(start);
}
}


Expand Down
4 changes: 2 additions & 2 deletions io/LasWriter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -751,7 +751,7 @@ bool LasWriter::processOne(PointRef& point)
{
if (scale.m_auto)
log()->get(LogLevel::Warning) << "Auto scale for " << name <<
"requested in stream mode. Using value of 1.0." << std::endl;
" requested in stream mode. Using value of 1.0." << std::endl;
};

doScale(m_scaling.m_xXform.m_scale, "X");
Expand All @@ -765,7 +765,7 @@ bool LasWriter::processOne(PointRef& point)
{
offset.m_val = val;
log()->get(LogLevel::Warning) << "Auto offset for " << name <<
"requested in stream mode. Using value of " <<
" requested in stream mode. Using value of " <<
offset.m_val << "." << std::endl;
}
};
Expand Down
Loading

0 comments on commit 34e30b3

Please sign in to comment.