diff --git a/io/LasHeader.cpp b/io/LasHeader.cpp index 02da92f0d7..94c23ae85f 100644 --- a/io/LasHeader.cpp +++ b/io/LasHeader.cpp @@ -398,8 +398,11 @@ ILeStream& operator>>(ILeStream& in, LasHeader& h) throw LasHeader::error("Invalid point count. Number of points exceeds file size."); if (h.m_vlrOffset > h.m_fileSize) throw LasHeader::error("Invalid VLR offset - exceeds file size."); + // There was a bug in PDAL where it didn't write the VLR offset :( + /** if (h.m_eVlrOffset > h.m_fileSize) throw LasHeader::error("Invalid extended VLR offset - exceeds file size."); + **/ // Read regular VLRs. in.seek(h.m_vlrOffset); diff --git a/io/LasReader.cpp b/io/LasReader.cpp index a1834e5857..851564ef36 100644 --- a/io/LasReader.cpp +++ b/io/LasReader.cpp @@ -246,7 +246,7 @@ void LasReader::initializeLocal(PointTableRef table, MetadataNode& m) throwError(err.what()); } - m_p->header.initialize(log(), FileUtils::fileSize(m_filename)); + m_p->header.initialize(log(), Utils::fileSize(m_filename)); createStream(); std::istream *stream(m_streamIf->m_istream); diff --git a/kernels/InfoKernel.cpp b/kernels/InfoKernel.cpp index 20f9b9cfe4..ac81573847 100644 --- a/kernels/InfoKernel.cpp +++ b/kernels/InfoKernel.cpp @@ -253,9 +253,9 @@ MetadataNode InfoKernel::run(const std::string& filename) root.add("reader", m_reader->getName()); root.add("now", t.str()); - if (pdal::FileUtils::fileExists(filename) && - (!pdal::FileUtils::isDirectory(filename))) // allow for s3 uris and directories - root.add("file_size", pdal::FileUtils::fileSize(filename)); + uintmax_t size = Utils::fileSize(filename); + if (size) + root.add("file_size", size); return root; } diff --git a/pdal/PDALUtils.cpp b/pdal/PDALUtils.cpp index 3293dfe242..fec5e679cb 100644 --- a/pdal/PDALUtils.cpp +++ b/pdal/PDALUtils.cpp @@ -241,6 +241,20 @@ class ArbiterInStream : public std::ifstream } // unnamed namespace +uintmax_t fileSize(const std::string& path) +{ + uintmax_t size = 0; + if (isRemote(path)) + { + std::unique_ptr pSize = arbiter::Arbiter().tryGetSize(path); + if (pSize) + size = *pSize; + } + else + size = FileUtils::fileSize(path); + return size; +} + /** Create a file (may be on a supported remote filesystem). diff --git a/pdal/PDALUtils.hpp b/pdal/PDALUtils.hpp index bc4274c6bd..9e43624cfa 100644 --- a/pdal/PDALUtils.hpp +++ b/pdal/PDALUtils.hpp @@ -264,6 +264,7 @@ inline void writeProgress(int fd, const std::string& type, std::string dllDir(); std::string PDAL_DLL toJSON(const MetadataNode& m); void PDAL_DLL toJSON(const MetadataNode& m, std::ostream& o); +uintmax_t PDAL_DLL fileSize(const std::string& path); std::istream PDAL_DLL *openFile(const std::string& path, bool asBinary = true); std::ostream PDAL_DLL *createFile(const std::string& path, bool asBinary = true); diff --git a/pdal/util/FileUtils.cpp b/pdal/util/FileUtils.cpp index 883cb06b24..e98b838da5 100644 --- a/pdal/util/FileUtils.cpp +++ b/pdal/util/FileUtils.cpp @@ -255,7 +255,8 @@ bool fileExists(const std::string& name) uintmax_t fileSize(const std::string& file) { - return pdalboost::filesystem::file_size(toNative(file)); + pdalboost::system::error_code ec; + return pdalboost::filesystem::file_size(toNative(file), ec); } diff --git a/vendor/arbiter/arbiter.cpp b/vendor/arbiter/arbiter.cpp index 5ad2affa3b..204cbfb32d 100644 --- a/vendor/arbiter/arbiter.cpp +++ b/vendor/arbiter/arbiter.cpp @@ -99,6 +99,27 @@ namespace return merge(in, config); } + + inline bool iequals(const std::string& s, const std::string& s2) + { + if (s.length() != s2.length()) + return false; + for (size_t i = 0; i < s.length(); ++i) + if (std::toupper(s[i]) != std::toupper(s2[i])) + return false; + return true; + } + + inline bool findEntry(const StringMap& map, const std::string& key, std::string& val) + { + for (auto& p : map) + if (iequals(p.first, key)) + { + val = p.second; + return true; + } + return false; + } } Arbiter::Arbiter() : Arbiter(json().dump()) { } @@ -1366,11 +1387,9 @@ std::unique_ptr Http::tryGetSize(std::string path) const auto http(m_pool.acquire()); Response res(http.head(typedPath(path))); - if (res.ok() && res.headers().count("Content-Length")) - { - const std::string& str(res.headers().at("Content-Length")); - size.reset(new std::size_t(std::stoul(str))); - } + std::string val; + if (res.ok() && findEntry(res.headers(), "Content-Length", val)) + size.reset(new std::size_t(std::stoul(val))); return size; } @@ -2572,19 +2591,9 @@ std::unique_ptr Google::tryGetSize(const std::string path) const const auto res( https.internalHead(resource.endpoint(), headers, altMediaQuery)); - if (res.ok()) - { - if (res.headers().count("Content-Length")) - { - const auto& s(res.headers().at("Content-Length")); - return makeUnique(std::stoull(s)); - } - else if (res.headers().count("content-length")) - { - const auto& s(res.headers().at("content-length")); - return makeUnique(std::stoull(s)); - } - } + std::string val; + if (res.ok() && findEntry(res.headers(), "Content-Length", val)) + return makeUnique(std::stoull(val)); return std::unique_ptr(); }