Skip to content

Commit

Permalink
Cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
Ryan Pals committed Feb 7, 2020
1 parent 1ef7923 commit c017b1f
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 59 deletions.
12 changes: 5 additions & 7 deletions plugins/hdf/io/Hdf5Handler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,6 @@ void Hdf5Handler::initialize(
}
m_logger->get(LogLevel::Warning) << "Chunk size: " << m_chunkSize << std::endl;
m_logger->get(LogLevel::Warning) << "Num points: " << m_numPoints << std::endl;
m_logger->get(LogLevel::Warning) << "Number of dataspace dimensions: " << dspace.getSimpleExtentNdims() << std::endl;
H5::DataType dtype = dset.getDataType();
H5T_class_t vague_type = dtype.getClass();

Expand All @@ -119,13 +118,7 @@ void Hdf5Handler::initialize(
throw pdal_error("Unkown type: " + vague_type);
}
m_data.resize(m_chunkSize*dtype.getSize());
m_logger->get(LogLevel::Warning) << "m_data.size: " << m_data.size() << std::endl;
m_logger->get(LogLevel::Warning) << "Chunk offset: " << m_chunkOffset << std::endl;
}
// datasetName.selectElements(H5S_SELECT_SET, m_chunkSize, &m_chunkOffset);
// dspace.selectHyperslab(H5S_SELECT_SET, &m_chunkSize, &m_chunkOffset);
// H5::DataSpace mspace( 1, &m_chunkOffset);
// dset.read(m_buf, dtype, H5::DataSpace::ALL, dspace);
}

void Hdf5Handler::close()
Expand Down Expand Up @@ -159,5 +152,10 @@ std::vector<pdal::hdf5::DimInfo> Hdf5Handler::getDimensionInfos() {
return m_dimInfos;
}


hsize_t Hdf5Handler::getChunkSize() {
return m_chunkSize;
}

} // namespace pdal

33 changes: 2 additions & 31 deletions plugins/hdf/io/Hdf5Handler.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -91,40 +91,13 @@ class Hdf5Handler
uint64_t getNumPoints() const;
std::vector<pdal::hdf5::DimInfo> getDimensionInfos();

void getColumnEntries(
void* data,
const std::string& dataSetName,
const hsize_t numEntries,
const hsize_t offset) const;
hsize_t m_chunkSize;
void setLog(pdal::LogPtr log);
hsize_t getChunkSize();

private:
struct ColumnData
{
ColumnData(
H5::PredType predType,
H5::DataSet dataSet,
H5::DataSpace dataSpace)
: predType(predType)
, dataSet(dataSet)
, dataSpace(dataSpace)
{ }

ColumnData(H5::PredType predType)
: predType(predType)
, dataSet()
, dataSpace()
{ }

H5::PredType predType;
H5::DataSet dataSet;
H5::DataSpace dataSpace;
};

std::vector<pdal::hdf5::DimInfo> m_dimInfos;
std::vector<uint8_t> m_data;
// hsize_t m_chunkSize;
hsize_t m_chunkSize;
// H5::DataSet m_dset;
// H5::DataSpace m_dspace;
std::vector<H5::DataSet> m_dsets;
Expand All @@ -135,8 +108,6 @@ class Hdf5Handler

std::unique_ptr<H5::H5File> m_h5File;
uint64_t m_numPoints = 0;

std::map<std::string, ColumnData> m_columnDataMap;
};

} // namespace pdal
Expand Down
19 changes: 1 addition & 18 deletions plugins/hdf/io/HdfReader.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -73,25 +73,18 @@ void HdfReader::ready(PointTableRef table)

point_count_t HdfReader::read(PointViewPtr view, point_count_t count)
{
//All data we read for icebridge is currently 4 bytes wide, so
// just allocate once and forget it.
//This could be a huge allocation. Perhaps we should do something
// in the icebridge handler?
std::cout << "HdfReader::read" << std::endl;
size_t point_size = m_infos.at(0).size;
PointId startId = view->size();
point_count_t remaining = m_hdf5Handler.getNumPoints() - m_index;
count = (std::min)(count, remaining);

// std::unique_ptr<unsigned char>
// rawData(new unsigned char[count * point_size]);

PointId nextId = startId;
uint8_t *buf = nullptr;
for(uint64_t pi = 0; pi < m_hdf5Handler.getNumPoints(); pi++) {
// for(auto info : m_infos) {
auto info = m_infos.at(0);
int bufIndex = pi % m_hdf5Handler.m_chunkSize;
int bufIndex = pi % m_hdf5Handler.getChunkSize();
if(bufIndex == 0) {
buf = m_hdf5Handler.getNextChunk();
}
Expand All @@ -106,10 +99,7 @@ point_count_t HdfReader::read(PointViewPtr view, point_count_t count)

void HdfReader::addArgs(ProgramArgs& args)
{
// args.add("metadata", "Metadata file", m_metadataFile);
args.add("dataset", "HDF dataset to open", m_datasetName);
args.add("map", "Map of HDF path to PDAL dimension", m_pathDimMap);
args.add("name", "PDAL Dimension name of the selected dataset", m_dimName);
}

void HdfReader::initialize()
Expand All @@ -121,13 +111,6 @@ void HdfReader::initialize()
std::cout << "Key: " << key << ", Val: " << value <<std::endl;
}

std::cout << "HdfReader::initialize()" << std::endl;
if (!m_metadataFile.empty() && !FileUtils::fileExists(m_metadataFile))
{
throwError("Invalid metadata file: '" + m_metadataFile + "'");
}
// m_hdf5Handler.initialize(m_filename, m_dimName, m_datasetName);
// m_hdf5Handler.setLog(log());
// Data are WGS84 (4326) with ITRF2000 datum (6656)
// See http://nsidc.org/data/docs/daac/icebridge/ilvis2/index.html for
// background
Expand Down
3 changes: 0 additions & 3 deletions plugins/hdf/io/HdfReader.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -67,9 +67,6 @@ class PDAL_DLL HdfReader : public pdal::Reader
virtual void done(PointTableRef table);
virtual bool eof();

std::string m_metadataFile;
std::string m_datasetName;
std::string m_dimName;
NL::json m_pathDimMap;
Dimension::IdList m_idlist;
std::vector<hdf5::DimInfo> m_infos;
Expand Down

0 comments on commit c017b1f

Please sign in to comment.