Skip to content

Commit

Permalink
add simple readers.hdf test based on autzen
Browse files Browse the repository at this point in the history
  • Loading branch information
hobu committed Feb 12, 2020
1 parent 9132331 commit 6f43b31
Show file tree
Hide file tree
Showing 6 changed files with 21 additions and 112 deletions.
6 changes: 4 additions & 2 deletions plugins/hdf/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,10 @@ else()
)

if (WITH_TESTS)
PDAL_ADD_TEST(hdftest
PDAL_ADD_TEST(pdal_io_hdf_reader_test
FILES test/HdfReadertest.cpp
LINK_WITH ${libname})
LINK_WITH ${libname}
INCLUDES
${NLOHMANN_INCLUDE_DIR})
endif()
endif()
27 changes: 6 additions & 21 deletions plugins/hdf/io/HdfReader.cpp
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/******************************************************************************
* Copyright (c) 2014, Connor Manning, connor@hobu.co
* Copyright (c) 2020, Ryan Pals, ryan@hobu.co
*
* All rights reserved.
*
Expand Down Expand Up @@ -45,8 +45,8 @@ namespace pdal
static PluginInfo const s_info
{
"readers.hdf",
"HDF Reader (WIP)",
"http://pdal.io/"
"HDF Reader",
"http://pdal.io/stages/readers.hdf.html"
};

CREATE_SHARED_STAGE(HdfReader, s_info)
Expand Down Expand Up @@ -80,20 +80,7 @@ point_count_t HdfReader::read(PointViewPtr view, point_count_t count)

PointId nextId = startId;
uint8_t *buf = nullptr;
// for(uint64_t pi = 0; pi < m_hdf5Handler.getNumPoints(); pi++) {
// int index = 0;
// for(auto info : m_infos) {
// // auto info = m_infos.at(0);
// int bufIndex = pi % m_hdf5Handler.getChunkSize();
// if(bufIndex == 0) {
// buf = m_hdf5Handler.getNextChunk(index);
// }
// uint8_t *p = buf + bufIndex*point_size;
// view->setField(info.id, info.pdal_type, nextId, (void*) p);
// index++;
// }
// nextId++;
// }

int index = 0;
log()->get(LogLevel::Info) << "num infos: " << m_infos.size() << std::endl;
log()->get(LogLevel::Info) << "num points: " << m_hdf5Handler.getNumPoints() << std::endl;
Expand Down Expand Up @@ -130,10 +117,8 @@ void HdfReader::initialize()
log()->get(LogLevel::Info) << "Key: " << key << ", Val: " << value <<std::endl;
}

// Data are WGS84 (4326) with ITRF2000 datum (6656)
// See http://nsidc.org/data/docs/daac/icebridge/ilvis2/index.html for
// background
setSpatialReference(SpatialReference("EPSG:4326"));
// ICESat-2 datasets are EPSG:7912
setSpatialReference(SpatialReference("EPSG:7912"));
}

void HdfReader::done(PointTableRef table)
Expand Down
2 changes: 1 addition & 1 deletion plugins/hdf/io/HdfReader.hpp
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/******************************************************************************
* Copyright (c) 2014, Connor Manning, connor@hobu.co
* Copyright (c) 2020, Ryan Pals, ryan@hobu.co
*
* All rights reserved.
*
Expand Down
97 changes: 9 additions & 88 deletions plugins/hdf/test/HdfReadertest.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -39,50 +39,11 @@
#include <pdal/PipelineManager.hpp>
#include <pdal/StageFactory.hpp>
#include <pdal/util/FileUtils.hpp>

#include <nlohmann/json.hpp>
#include "Support.hpp"

using namespace pdal;

template <typename T>
void checkDimension(const PointView& data, std::size_t index,
Dimension::Id dim, T expected)
{
T actual = data.getFieldAs<T>(dim, index);
EXPECT_FLOAT_EQ((float)expected, (float)actual);
}

void checkPoint(
const PointView& data,
std::size_t index,
float time,
float latitude,
float longitude,
float elevation,
int xmtSig,
int rcvSig,
float azimuth,
float pitch,
float roll,
float gpsPdop,
float pulseWidth,
float relTime)
{
using namespace Dimension;
checkDimension(data, index, Id::OffsetTime, time);
checkDimension(data, index, Id::Y, latitude);
checkDimension(data, index, Id::X, longitude);
checkDimension(data, index, Id::Z, elevation);
checkDimension(data, index, Id::StartPulse, xmtSig);
checkDimension(data, index, Id::ReflectedPulse, rcvSig);
checkDimension(data, index, Id::Azimuth, azimuth);
checkDimension(data, index, Id::Pitch, pitch);
checkDimension(data, index, Id::Roll, roll);
checkDimension(data, index, Id::Pdop, gpsPdop);
checkDimension(data, index, Id::PulseWidth, pulseWidth);
checkDimension(data, index, Id::GpsTime, relTime);
}

std::string getFilePath()
{
return Support::datapath("hdf/autzen.h5");
Expand All @@ -95,59 +56,19 @@ TEST(HdfReaderTest, testRead)
EXPECT_TRUE(reader);

Option filename("filename", getFilePath());
std::cout << getFilePath() << std::endl;
Option dataset("dataset", "/autzen");

NL::json j = {{ "X" ,"autzen/X"}, {"Y" , "autzen/Y"}, {"Z" , "autzen/Z" }};
Option dataset("map", j.dump());

Options options(filename);
options.add(dataset);
reader->setOptions(options);

PointTable table;
reader->prepare(table);
PointViewSet viewSet = reader->execute(table);
// EXPECT_EQ(viewSet.size(), 1u);
EXPECT_EQ(viewSet.size(), 1u);
PointViewPtr view = *viewSet.begin();
// EXPECT_EQ(view->size(), 2u);

// checkPoint(
// *view,
// 0,
// 1414375e2f, // time
// 82.60531f, // latitude
// -58.59381f, // longitude
// 18.678f, // elevation
// 2408, // xmtSig
// 181, // rcvSig
// 49.91f, // azimuth
// -4.376f, // pitch
// 0.608f, // roll
// 2.9f, // gpsPdop
// 20.0f, // pulseWidth
// 0.0f); // relTime

// checkPoint(
// *view,
// 1,
// 1414375e2f, // time
// 82.60528f, // latitude
// -58.59512f, // longitude
// 18.688f, // elevation
// 2642, // xmtSig
// 173, // rcvSig
// 52.006f, // azimuth
// -4.376f, // pitch
// 0.609f, // roll
// 2.9f, // gpsPdop
// 17.0f, // pulseWidth
// 0.0f); // relTime
}

// TEST(HdfReaderTest, testPipeline)
// {
// PipelineManager manager;

// manager.readPipeline(Support::configuredpath("hdf/pipeline.json"));

// point_count_t numPoints = manager.execute();
// EXPECT_EQ(numPoints, 2u);
// FileUtils::deleteFile(Support::datapath("hdf/outfile.txt"));
// }
EXPECT_EQ(view->size(), 1065u);
Support::check_p0_p1_p2(*view);
}
1 change: 1 addition & 0 deletions scripts/conda/osx.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ CFLAGS= CXXFLAGS= CC=/usr/bin/cc CXX=/usr/bin/c++ cmake -G "$CONFIG" \
-DCMAKE_INSTALL_PREFIX=${CONDA_PREFIX} \
-DBUILD_PLUGIN_PGPOINTCLOUD=ON \
-DBUILD_PLUGIN_NITF=ON \
-DBUILD_PLUGIN_HDF=ON \
-DBUILD_PLUGIN_ICEBRIDGE=ON \
-DBUILD_PLUGIN_TILEDB=ON \
-DBUILD_PLUGIN_RDBLIB=ON \
Expand Down
Binary file added test/data/hdf/autzen.h5
Binary file not shown.

0 comments on commit 6f43b31

Please sign in to comment.