Skip to content

Commit

Permalink
WIP
Browse files Browse the repository at this point in the history
Print out info from hard-coded autzen. remove test points in test file. iterate over dimensions and print size, offset, type and endianness
  • Loading branch information
Ryan Pals committed Jan 27, 2020
1 parent 6554036 commit 226b1cd
Show file tree
Hide file tree
Showing 5 changed files with 144 additions and 44 deletions.
9 changes: 7 additions & 2 deletions plugins/hdf/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
#
# Icebridge plugin CMake configuration
# HDF plugin CMake configuration
#

include (${PDAL_CMAKE_DIR}/hdf5.cmake)


if (NOT PDAL_HAVE_HDF5)
message(FATAL "HDF5 not found but is required for Icebridge.")
message(FATAL "HDF5 not found but is required for HDF.")
else()
PDAL_ADD_PLUGIN(libname reader hdf
FILES
Expand All @@ -19,4 +19,9 @@ else()
${LIBXML2_INCLUDE_DIR}
)

if (WITH_TESTS)
PDAL_ADD_TEST(hdftest
FILES test/HdfReadertest.cpp
LINK_WITH ${libname})
endif()
endif()
90 changes: 90 additions & 0 deletions plugins/hdf/io/Hdf5Handler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,97 @@ void Hdf5Handler::initialize(
{
try
{
int numPoints = 1065; // TODO pull this out of the file
m_h5File.reset(new H5::H5File(filename, H5F_ACC_RDONLY));

std::cout << "Number of HD5 Objects: " << m_h5File.get()->getObjCount() <<std::endl;
H5::DataSet dset = m_h5File.get()->openDataSet("/autzen");
H5::DataSpace dspace = dset.getSpace();
std::cout << "Number of dataspacedimensions: " << dspace.getSimpleExtentNdims() << std::endl;
H5::CompType ctype = dset.getCompType();//H5::CompType(dset);
std::cout << "Point length: " << ctype.getSize() << std::endl;
std::cout << "Number of HDF compound type members (PDAL dimensions): " << ctype.getNmembers() << std::endl;

// print names
for(int j = 0; j < ctype.getNmembers(); ++j) {
H5T_class_t vauge_type = ctype.getMemberDataType(j).getClass();
H5::IntType int_type = ctype.getMemberIntType(j);
H5::FloatType float_type = ctype.getMemberFloatType(j);
switch(vauge_type) {
case H5T_COMPOUND:
std::cout << "Compound type";
std::cout << "Nested compound types not supported" << std::endl;
break;
case H5T_INTEGER:
if(int_type.getSign() == H5T_SGN_NONE) {
std::cout << "uint, s:" << int_type.getSize() << ", e:" << int_type.getOrder();
} else if(int_type.getSign() == H5T_SGN_2) {
std::cout << "sint, s:" << int_type.getSize() << ", e:" << int_type.getOrder();
} else {
std::cout << "sign error";
}
break;
case H5T_FLOAT:
std::cout << "float, s:" << float_type.getSize() << ", e:" << float_type.getOrder();
break;
default:
std::cout << "Unkown type: " << vauge_type;
}
std::cout << ", o:" << ctype.getMemberOffset(j) << ", " << ctype.getMemberName(j);
std::cout << std::endl;
}

// std::vector<char> data;
//char data[numPoints*s];
// autzen_t* data = new autzen_t[numPoints];
// dset.read(data.data(), ctype, dspace);

// std::cout << data.data() << std::endl << std::endl;
/*
// std::cout << "Size of compound type: " << ctype.getSize() << std::endl;
if(type_class == H5T_COMPOUND) std::cout << "Compound type" <<std::endl;
H5::DataType dtype = dset.getDataType();
H5T_class_t clas = dtype.getClass();
std::cout << "clas: " << clas << std::endl;
std::cout << "THING: " << H5Tget_native_type(clas, H5T_DIR_DEFAULT) << std::endl;
// std::cout << dtype << std::endl;
//dset.read(data, dtype, dspace);
ctype.insertMember("red" , HOFFSET(autzen_t, red), H5::PredType::STD_I16LE);
ctype.insertMember("green", HOFFSET(autzen_t, green), H5::PredType::STD_I16LE);
ctype.insertMember("blue" , HOFFSET(autzen_t, blue), H5::PredType::STD_I16LE);
dset.read(data, ctype, dspace);
// std::cout << dspace.getNumMembers(h5type) << std::endl;
// char hex[2];
// for(char *p = data; p < data+s*numPoints; p++) {
// sprintf(hex, "%X", *p);
// // std::cout << hex;
// }
autzen_t *struct_data = (autzen_t *) data;
for(int j = 0; j < numPoints; j++) {
autzen_t point = struct_data[j];
std::cout << "Point number: " << j << ", RGB: " << point.red << ", " << point.green << ", " << point.blue << std::endl;
}
std::cout << std::endl;
std::cout << "Got here!" << std::endl;
*/

// auto accessPlist = m_h5File.get()->getAccessPlist();
// for(auto i = 0; i < accessPlist.get) {

// }

// hid_t *p = (hid_t *)malloc(sizeof(hid_t) * objCount);
// for(auto i = 0; i < objCount; ++i) {
// std::cout << "p[" << i << "]: " << p[i] << std::endl;
// }
// m_h5File.get()->getObjIDs(H5F_OBJ_DATASET, INT32_MAX, p);
// for(auto i = 0; i < objCount; ++i) {
// auto thing = p[i];
// std::cout << "p[" << i << "]: " << thing << std::endl;
// }
}
catch (const H5::FileIException&)
{
Expand Down
3 changes: 2 additions & 1 deletion plugins/hdf/io/HdfReader.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,8 @@ point_count_t HdfReader::read(PointViewPtr view, point_count_t count)

void HdfReader::addArgs(ProgramArgs& args)
{
args.add("metadata", "Metadata file", m_metadataFile);
// args.add("metadata", "Metadata file", m_metadataFile);
args.add("dataset", "HDF dataset to open", m_datasetName);
}

void HdfReader::initialize()
Expand Down
1 change: 1 addition & 0 deletions plugins/hdf/io/HdfReader.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ class PDAL_DLL HdfReader : public pdal::Reader
virtual bool eof();

std::string m_metadataFile;
std::string m_datasetName;

HdfReader& operator=(const HdfReader&); // Not implemented.
HdfReader(const HdfReader&); // Not implemented.
Expand Down
85 changes: 44 additions & 41 deletions plugins/hdf/test/HdfReadertest.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ void checkPoint(

std::string getFilePath()
{
return Support::datapath("icebridge/twoPoints.h5");
return Support::datapath("hdf/autzen.h5");
}

TEST(HdfReaderTest, testRead)
Expand All @@ -95,56 +95,59 @@ TEST(HdfReaderTest, testRead)
EXPECT_TRUE(reader);

Option filename("filename", getFilePath());
std::cout << getFilePath() << std::endl;
Option dataset("dataset", "/autzen");
Options options(filename);
options.add(dataset);
reader->setOptions(options);

PointTable table;
reader->prepare(table);
PointViewSet viewSet = reader->execute(table);
EXPECT_EQ(viewSet.size(), 1u);
// EXPECT_EQ(viewSet.size(), 1u);
PointViewPtr view = *viewSet.begin();
EXPECT_EQ(view->size(), 2u);
// EXPECT_EQ(view->size(), 2u);

checkPoint(
*view,
0,
1414375e2f, // time
82.60531f, // latitude
-58.59381f, // longitude
18.678f, // elevation
2408, // xmtSig
181, // rcvSig
49.91f, // azimuth
-4.376f, // pitch
0.608f, // roll
2.9f, // gpsPdop
20.0f, // pulseWidth
0.0f); // relTime
// checkPoint(
// *view,
// 0,
// 1414375e2f, // time
// 82.60531f, // latitude
// -58.59381f, // longitude
// 18.678f, // elevation
// 2408, // xmtSig
// 181, // rcvSig
// 49.91f, // azimuth
// -4.376f, // pitch
// 0.608f, // roll
// 2.9f, // gpsPdop
// 20.0f, // pulseWidth
// 0.0f); // relTime

checkPoint(
*view,
1,
1414375e2f, // time
82.60528f, // latitude
-58.59512f, // longitude
18.688f, // elevation
2642, // xmtSig
173, // rcvSig
52.006f, // azimuth
-4.376f, // pitch
0.609f, // roll
2.9f, // gpsPdop
17.0f, // pulseWidth
0.0f); // relTime
// checkPoint(
// *view,
// 1,
// 1414375e2f, // time
// 82.60528f, // latitude
// -58.59512f, // longitude
// 18.688f, // elevation
// 2642, // xmtSig
// 173, // rcvSig
// 52.006f, // azimuth
// -4.376f, // pitch
// 0.609f, // roll
// 2.9f, // gpsPdop
// 17.0f, // pulseWidth
// 0.0f); // relTime
}

TEST(HdfReaderTest, testPipeline)
{
PipelineManager manager;
// TEST(HdfReaderTest, testPipeline)
// {
// PipelineManager manager;

manager.readPipeline(Support::configuredpath("icebridge/pipeline.json"));
// manager.readPipeline(Support::configuredpath("hdf/pipeline.json"));

point_count_t numPoints = manager.execute();
EXPECT_EQ(numPoints, 2u);
FileUtils::deleteFile(Support::datapath("icebridge/outfile.txt"));
}
// point_count_t numPoints = manager.execute();
// EXPECT_EQ(numPoints, 2u);
// FileUtils::deleteFile(Support::datapath("hdf/outfile.txt"));
// }

0 comments on commit 226b1cd

Please sign in to comment.