Skip to content

Commit

Permalink
No longer segfaults - wip
Browse files Browse the repository at this point in the history
  • Loading branch information
Ryan Pals committed Feb 7, 2020
1 parent 1dc6ce3 commit b079046
Show file tree
Hide file tree
Showing 3 changed files with 39 additions and 15 deletions.
44 changes: 31 additions & 13 deletions plugins/hdf/io/Hdf5Handler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -63,11 +63,15 @@ using namespace hdf5;


Hdf5Handler::Hdf5Handler()
: m_numPoints(0)
: m_chunkOffset(0)
, m_numPoints(0)
, m_columnDataMap()
, m_chunkOffset(0)
{ }

void Hdf5Handler::setLog(pdal::LogPtr log) {
m_logger = log;
}

DimInfo::DimInfo(
const std::string& dimName,
H5::IntType int_type)
Expand Down Expand Up @@ -121,12 +125,12 @@ void Hdf5Handler::initialize(
if(dimensionality != 1)
throw error("Only 1-dimensional arrays are supported.");
} else {
std::cout << "Dataset not chunked; proceeding to read one element at a time" << std::endl;
m_logger->get(LogLevel::Warning) << "Dataset not chunked; proceeding to read 1024 elements at a time" << std::endl;
m_chunkSize = 1024;
}
std::cout << "Chunk size: " << m_chunkSize << std::endl;
std::cout << "Num points: " << m_numPoints << std::endl;
std::cout << "Number of dataspace dimensions: " << m_dspace.getSimpleExtentNdims() << std::endl;
m_logger->get(LogLevel::Warning) << "Chunk size: " << m_chunkSize << std::endl;
m_logger->get(LogLevel::Warning) << "Num points: " << m_numPoints << std::endl;
m_logger->get(LogLevel::Warning) << "Number of dataspace dimensions: " << m_dspace.getSimpleExtentNdims() << std::endl;
H5::DataType dtype = m_dset.getDataType();
H5T_class_t vauge_type = dtype.getClass();

Expand All @@ -153,7 +157,8 @@ void Hdf5Handler::initialize(
}
// m_buf = malloc(dtype.getSize() * m_chunkSize); //TODO free
m_buf = malloc(dtype.getSize() * m_chunkSize); //TODO free
std::cout << "Chunk offset: " << m_chunkOffset << std::endl;
m_data.resize(m_chunkSize*dtype.getSize());
m_logger->get(LogLevel::Warning) << "Chunk offset: " << m_chunkOffset << std::endl;
// dspace.selectElements(H5S_SELECT_SET, m_chunkSize, &m_chunkOffset);
// dspace.selectHyperslab(H5S_SELECT_SET, &m_chunkSize, &m_chunkOffset);
// H5::DataSpace mspace( 1, &m_chunkOffset);
Expand All @@ -165,17 +170,30 @@ void Hdf5Handler::close()
m_h5File->close();
}

// void *Hdf5Handler::getNextChunk() {
// void *buf = malloc(m_dset.getDataType().getSize() * m_chunkSize);
// m_logger->get(LogLevel::Warning) << "chunk size: " << m_chunkSize << ", chunk offset: "
// << m_chunkOffset << std::endl;
// m_dspace.selectHyperslab(H5S_SELECT_SET, &m_chunkSize, &m_chunkOffset);
// m_dset.read(buf, m_dset.getDataType(), H5::DataSpace::ALL, m_dspace);
// // m_dset.read(m_buf, m_dset.getDataType());
// m_chunkOffset += m_chunkSize;
// return buf;
// }

void *Hdf5Handler::getNextChunk() {
void *buf = malloc(m_dset.getDataType().getSize() * m_chunkSize);
std::cout << "chunk size: " << m_chunkSize << ", chunk offset: "
<< m_chunkOffset << std::endl;
// m_logger->get(LogLevel::Warning) << "chunk size: " << m_chunkSize << ", chunk offset: "
// << m_chunkOffset << std::endl;
m_dspace.selectHyperslab(H5S_SELECT_SET, &m_chunkSize, &m_chunkOffset);
m_dset.read(buf, m_dset.getDataType(), H5::DataSpace::ALL, m_dspace);
// m_dset.read(m_buf, m_dset.getDataType());
m_dset.read(m_data.data(),
m_dset.getDataType(),
H5::DataSpace::ALL,
m_dspace );
m_chunkOffset += m_chunkSize;
return buf;
return m_data.data();
}


uint64_t Hdf5Handler::getNumPoints() const
{
return m_numPoints;
Expand Down
4 changes: 4 additions & 0 deletions plugins/hdf/io/Hdf5Handler.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@

#include <pdal/pdal_export.hpp> // Suppresses windows 4251 messages
#include <pdal/Dimension.hpp>
#include <pdal/Log.hpp>
#include "H5Cpp.h"

#include <memory>
Expand Down Expand Up @@ -125,6 +126,7 @@ class Hdf5Handler
const hsize_t numEntries,
const hsize_t offset) const;
hsize_t m_chunkSize;
void setLog(pdal::LogPtr log);

private:
struct ColumnData
Expand Down Expand Up @@ -155,10 +157,12 @@ class Hdf5Handler
const ColumnData& getColumnData(const std::string& dataSetName) const;
void *m_buf;
void *m_nextVal;
std::vector<uint8_t> m_data;
// hsize_t m_chunkSize;
H5::DataSet m_dset;
H5::DataSpace m_dspace;
hsize_t m_chunkOffset;
pdal::LogPtr m_logger;

std::unique_ptr<H5::H5File> m_h5File;
uint64_t m_numPoints;
Expand Down
6 changes: 4 additions & 2 deletions plugins/hdf/io/HdfReader.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -114,10 +114,12 @@ void HdfReader::addDimensions(PointLayoutPtr layout)
void HdfReader::ready(PointTableRef table)
{
std::cout << "HdfReader::ready" << std::endl;
m_hdf5Handler.setLog(log());
try
{
// m_hdf5Handler.initialize(m_filename, hdf5Columns);
// m_hdf5Handler.initialize(m_filename);
m_hdf5Handler.initialize(m_filename, m_dimName, m_datasetName);
}
catch (const Hdf5Handler::error& err)
{
Expand Down Expand Up @@ -275,8 +277,8 @@ void HdfReader::initialize()
{
throwError("Invalid metadata file: '" + m_metadataFile + "'");
}
m_hdf5Handler.initialize(m_filename, m_dimName, m_datasetName);

// m_hdf5Handler.initialize(m_filename, m_dimName, m_datasetName);
// m_hdf5Handler.setLog(log());
// Data are WGS84 (4326) with ITRF2000 datum (6656)
// See http://nsidc.org/data/docs/daac/icebridge/ilvis2/index.html for
// background
Expand Down

0 comments on commit b079046

Please sign in to comment.