Skip to content

Commit

Permalink
Add JSON
Browse files Browse the repository at this point in the history
  • Loading branch information
Ryan Pals committed Feb 7, 2020
1 parent bcd8c3a commit 63ace00
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 7 deletions.
1 change: 1 addition & 0 deletions plugins/hdf/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ else()
INCLUDES
${ROOT_DIR}
${LIBXML2_INCLUDE_DIR}
${NLOHMANN_INCLUDE_DIR}
)

if (WITH_TESTS)
Expand Down
8 changes: 4 additions & 4 deletions plugins/hdf/io/Hdf5Handler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -194,15 +194,15 @@ void *Hdf5Handler::getNextChunk() {
// m_logger->get(LogLevel::Warning) << "Points remainging: " << elementsRemaining;
H5::DataSpace memspace(1, &selectionSize);
m_dspace.selectHyperslab(H5S_SELECT_SET, &selectionSize, &m_chunkOffset);
m_logger->get(LogLevel::Warning) << "m_data: " << (void *)m_data.data() << std::endl;
m_logger->get(LogLevel::Warning) << "chunkOffset: " << m_chunkOffset << std::endl;
m_logger->get(LogLevel::Warning) << "chunkSize: " << selectionSize << std::endl;
// m_logger->get(LogLevel::Warning) << "m_data: " << (void *)m_data.data() << std::endl;
// m_logger->get(LogLevel::Warning) << "chunkOffset: " << m_chunkOffset << std::endl;
// m_logger->get(LogLevel::Warning) << "chunkSize: " << selectionSize << std::endl;
m_dset.read(m_data.data(),
m_dset.getDataType(),
memspace,
m_dspace );
m_chunkOffset += m_chunkSize;
m_logger->get(LogLevel::Warning) << "m_data[0] = " << *((double *)m_data.data()) << std::endl;
// m_logger->get(LogLevel::Warning) << "m_data[0] = " << *((double *)m_data.data()) << std::endl;
return m_data.data();
}

Expand Down
14 changes: 11 additions & 3 deletions plugins/hdf/io/HdfReader.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -180,12 +180,12 @@ point_count_t HdfReader::read(PointViewPtr view, point_count_t count)
auto info = m_infos.at(0);
int bufIndex = pi % m_hdf5Handler.m_chunkSize;
if(bufIndex == 0) {
std::cout << "bufIndex: " << bufIndex <<
" pi: " << pi << std::endl;
// std::cout << "bufIndex: " << bufIndex <<
// " pi: " << pi << std::endl;
buf = m_hdf5Handler.getNextChunk();
}
void *p = buf + bufIndex*point_size + info.offset;
if(pi == 0) std::cout<< Dimension::interpretationName(info.pdal_type) <<std::endl;
// if(pi == 0) std::cout<< Dimension::interpretationName(info.pdal_type) <<std::endl;
addField(view, info, nextId, p);
// }
nextId++;
Expand Down Expand Up @@ -258,11 +258,19 @@ void HdfReader::addArgs(ProgramArgs& args)
{
// args.add("metadata", "Metadata file", m_metadataFile);
args.add("dataset", "HDF dataset to open", m_datasetName);
args.add("map", "Map of HDF path to PDAL dimension", m_pathDimMap);
args.add("name", "PDAL Dimension name of the selected dataset", m_dimName);
}

void HdfReader::initialize()
{
std::cout << "***JSON TESTING***" << std::endl;
std::cout << m_pathDimMap << std::endl;
std::cout << "----------------" << std::endl;
for(auto it = m_pathDimMap.begin(); it != m_pathDimMap.end(); ++it) {
std::cout << "Key: " << it.key() << ", Val: " << it.value() <<std::endl;
}

std::cout << "HdfReader::initialize()" << std::endl;
if (!m_metadataFile.empty() && !FileUtils::fileExists(m_metadataFile))
{
Expand Down
2 changes: 2 additions & 0 deletions plugins/hdf/io/HdfReader.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
#include <pdal/Reader.hpp>
#include <pdal/Options.hpp>
#include <pdal/StageFactory.hpp>
#include <nlohmann/json.hpp>

#include "Hdf5Handler.hpp"

Expand Down Expand Up @@ -69,6 +70,7 @@ class PDAL_DLL HdfReader : public pdal::Reader
std::string m_metadataFile;
std::string m_datasetName;
std::string m_dimName;
NL::json m_pathDimMap;
Dimension::IdList m_idlist;
std::vector<hdf5::DimInfo> m_infos;

Expand Down

0 comments on commit 63ace00

Please sign in to comment.