From a32602986449eb9c5ba34ec619b27b5893bb3ec2 Mon Sep 17 00:00:00 2001 From: Andrew Bell Date: Tue, 14 Mar 2017 17:04:47 -0500 Subject: [PATCH] Accept full pipelines with --json argument to pdal pipeline. Close #1521 --- kernels/TranslateKernel.cpp | 60 ++++++++------- pdal/PipelineManager.cpp | 40 ++++++++++ pdal/PipelineManager.hpp | 3 + pdal/Stage.hpp | 2 +- test/unit/apps/TranslateTest.cpp | 126 ++++++++++++++++++++++++++----- 5 files changed, 181 insertions(+), 50 deletions(-) diff --git a/kernels/TranslateKernel.cpp b/kernels/TranslateKernel.cpp index 9f3fcd91b6..0820f301b9 100644 --- a/kernels/TranslateKernel.cpp +++ b/kernels/TranslateKernel.cpp @@ -40,9 +40,10 @@ #include #include #include -#include +#include #include #include +#include #include #include @@ -101,43 +102,44 @@ void TranslateKernel::makeJSONPipeline() if (json.empty()) json = m_filterJSON; + std::stringstream in(json); + m_manager.readPipeline(in); - Json::Reader jsonReader; - Json::Value filters; - jsonReader.parse(json, filters); - if (filters.type() != Json::arrayValue || filters.empty()) - throw pdal_error("JSON must be an array of filter specifications"); + std::vector roots = m_manager.roots(); + if (roots.size() > 1) + throw pdal_error("Can't process pipeline with more than one root."); - Json::Value pipeline(Json::arrayValue); - - // Add the input file, the filters (as provided) and the output file. - if (m_readerType.size()) + Stage *r(nullptr); + if (roots.size()) + r = dynamic_cast(roots[0]); + if (r) { - Json::Value node(Json::objectValue); - node["filename"] = m_inputFile; - node["type"] = m_readerType; - pipeline.append(node); + StageCreationOptions ops { m_inputFile, m_readerType, nullptr, + Options(), r->tag() }; + m_manager.replace(r, &m_manager.makeReader(ops)); } else - pipeline.append(Json::Value(m_inputFile)); - for (Json::ArrayIndex i = 0; i < filters.size(); ++i) - pipeline.append(filters[i]); - if (m_writerType.size()) { - Json::Value node(Json::objectValue); - node["filename"] = m_outputFile; - node["type"] = m_writerType; - pipeline.append(node); + r = &m_manager.makeReader(m_inputFile, m_readerType); + if (roots.size()) + roots[0]->setInput(*r); } - else - pipeline.append(Json::Value(m_outputFile)); - Json::Value root; - root["pipeline"] = pipeline; + std::vector leaves = m_manager.leaves(); + if (leaves.size() != 1) + throw pdal_error("Can't process pipeline with more than one " + "terminal stage."); - std::stringstream pipeline_str; - pipeline_str << root; - m_manager.readPipeline(pipeline_str); + Stage *w = dynamic_cast(leaves[0]); + if (w) + m_manager.replace(w, &m_manager.makeWriter(m_outputFile, m_writerType)); + else + { + // We know we have a leaf because we added a reader. + StageCreationOptions ops { m_outputFile, m_writerType, leaves[0], + Options(), "" }; // These last two args just keep compiler quiet. + m_manager.makeWriter(ops); + } } diff --git a/pdal/PipelineManager.cpp b/pdal/PipelineManager.cpp index 39796b5eb6..c538f010a3 100644 --- a/pdal/PipelineManager.cpp +++ b/pdal/PipelineManager.cpp @@ -35,6 +35,7 @@ #include #include #include +#include #include #include "private/PipelineReaderXML.hpp" @@ -97,6 +98,9 @@ void PipelineManager::readPipeline(const std::string& filename) { Utils::closeFile(m_input); m_input = Utils::openFile(filename); + if (!m_input) + throw pdal_error("Can't open file '" + filename + "' as pipeline " + "input."); try { readPipeline(*m_input); @@ -242,6 +246,7 @@ MetadataNode PipelineManager::getMetadata() const return output; } + Stage& PipelineManager::makeReader(const std::string& inputFile, std::string driver) { @@ -418,4 +423,39 @@ Options PipelineManager::stageOptions(Stage& stage) return opts; } + +std::vector PipelineManager::roots() const +{ + std::vector rlist; + + for (Stage *s : m_stages) + if (s->getInputs().empty()) + rlist.push_back(s); + return rlist; +} + + +std::vector PipelineManager::leaves() const +{ + std::vector llist = m_stages; + for (Stage *s : m_stages) + for (Stage *ss : s->getInputs()) + Utils::remove(llist, ss); + return llist; +} + + +void PipelineManager::replace(Stage *sOld, Stage *sNew) +{ + Utils::remove(m_stages, sNew); + for (Stage * & s : m_stages) + { + if (s == sOld) + s = sNew; + for (Stage * & ss : s->getInputs()) + if (ss == sOld) + ss = sNew; + } +} + } // namespace pdal diff --git a/pdal/PipelineManager.hpp b/pdal/PipelineManager.hpp index bbd7e65e82..363d9ecb16 100644 --- a/pdal/PipelineManager.hpp +++ b/pdal/PipelineManager.hpp @@ -135,6 +135,9 @@ class PDAL_DLL PipelineManager { return m_commonOptions; } OptionsMap& stageOptions() { return m_stageOptions; } + std::vector roots() const; + std::vector leaves() const; + void replace(Stage *sOld, Stage *sNew); private: void setOptions(Stage& stage, const Options& addOps); diff --git a/pdal/Stage.hpp b/pdal/Stage.hpp index bbe6631664..8f0e4473a6 100644 --- a/pdal/Stage.hpp +++ b/pdal/Stage.hpp @@ -271,7 +271,7 @@ class PDAL_DLL Stage \return A vector pointers to input stages. **/ - const std::vector& getInputs() const + std::vector& getInputs() { return m_inputs; } /** diff --git a/test/unit/apps/TranslateTest.cpp b/test/unit/apps/TranslateTest.cpp index ddf925b95b..11b02cc2a4 100644 --- a/test/unit/apps/TranslateTest.cpp +++ b/test/unit/apps/TranslateTest.cpp @@ -71,6 +71,7 @@ TEST(translateTest, t1) " --json filters.stats", output), 0); } +// Tests for processing JSON input. TEST(translateTest, t2) { std::string output; @@ -78,29 +79,114 @@ TEST(translateTest, t2) std::string in = Support::datapath("las/autzen_trim.las"); std::string out = Support::temppath("out.las"); - const char *json = " \ - [ \ + std::string json = " \ + { \ + \\\"pipeline\\\" : [ \ { \\\"type\\\":\\\"filters.stats\\\" }, \ { \\\"type\\\":\\\"filters.range\\\", \ \\\"limits\\\":\\\"Z[0:100]\\\" } \ - ]"; - - EXPECT_EQ(runTranslate(in + " " + out + - " --json=\"" + json + "\"", output), 0); - EXPECT_EQ(runTranslate(in + " " + out + " -r readers.las " - " --json=\"" + json + "\"", output), 0); - EXPECT_EQ(runTranslate(in + " " + out + " -w writers.las " - " --json=\"" + json + "\"", output), 0); - EXPECT_EQ(runTranslate(in + " " + out + " -r readers.las -w writers.las " - " --json=\"" + json + "\"", output), 0); - - const char *json2 = " \ - { \\\"type\\\":\\\"filters.stats\\\" }, \ - { \\\"type\\\":\\\"filters.range\\\", \ - \\\"limits\\\":\\\"Z[0:100]\\\" }"; - - EXPECT_NE(runTranslate(in + " " + out + - " --json=\"" + json2 + "\"", output), 0); + ] \ + }"; + + // Check that we work with just a bunch of filters. + EXPECT_EQ(runTranslate(in + " " + out + " --json=\"" + json + "\"", + output), 0); + + // Check that we fail with no bad input file. + EXPECT_NE(runTranslate("foo.las " + out + " --json=\"" + json + "\"", + output), 0); + + // Check that we file with bad output file. + EXPECT_NE(runTranslate(in + " foo.blam " + " --json=\"" + json + "\"", + output), 0); + + // Check that we work with no stages. + json = " \ + { \ + \\\"pipeline\\\" : [ \ + ] \ + }"; + EXPECT_EQ(runTranslate(in + " " + out + " --json=\"" + json + "\"", + output), 0); + + // Check that we work with only an input (not specified as such). + json = " \ + { \ + \\\"pipeline\\\" : [ \ + \\\"badinput.las\\\" \ + ] \ + }"; + EXPECT_EQ(runTranslate(in + " " + out + " --json=\"" + json + "\"", + output), 0); + + // Check that we work with an input and an output. + json = " \ + { \ + \\\"pipeline\\\" : [ \ + \\\"badinput.las\\\", \ + \\\"badoutput.las\\\" \ + ] \ + }"; + EXPECT_EQ(runTranslate(in + " " + out + " --json=\"" + json + "\"", + output), 0); + + // Check that we work with only an output. + json = " \ + { \ + \\\"pipeline\\\" : [ \ + { \ + \\\"type\\\":\\\"writers.las\\\", \ + \\\"filename\\\":\\\"badoutput.las\\\" \ + } \ + ] \ + }"; + EXPECT_EQ(runTranslate(in + " " + out + " --json=\"" + json + "\"", + output), 0); + + // Check that we work with only an input. + json = " \ + { \ + \\\"pipeline\\\" : [ \ + { \ + \\\"type\\\":\\\"readers.las\\\", \ + \\\"filename\\\":\\\"badinput.las\\\" \ + } \ + ] \ + }"; + EXPECT_EQ(runTranslate(in + " " + out + " --json=\"" + json + "\"", + output), 0); + + // Check that we fail with unchanined multiple writers. + json = " \ + { \ + \\\"pipeline\\\" : [ \ + { \ + \\\"type\\\":\\\"writers.las\\\", \ + \\\"filename\\\":\\\"badoutput.las\\\" \ + }, \ + \\\"badoutput2.las\\\" \ + ] \ + }"; + EXPECT_NE(runTranslate(in + " " + out + " --json=\"" + json + "\"", + output), 0); + + // Check that we can handle chained writers. + json = " \ + { \ + \\\"pipeline\\\" : [ \ + { \ + \\\"type\\\":\\\"writers.las\\\", \ + \\\"filename\\\":\\\"badoutput.las\\\", \ + \\\"tag\\\":\\\"mytag\\\" \ + }, \ + { \ + \\\"filename\\\":\\\"badoutput2.las\\\", \ + \\\"inputs\\\": \\\"mytag\\\" \ + } \ + ] \ + }"; + EXPECT_EQ(runTranslate(in + " " + out + " --json=\"" + json + "\"", + output), 0); } TEST(translateTest, t3)