Skip to content

Commit

Permalink
Accept full pipelines with --json argument to pdal pipeline.
Browse files Browse the repository at this point in the history
Close #1521
  • Loading branch information
abellgithub committed Mar 14, 2017
1 parent 7ee2489 commit a326029
Show file tree
Hide file tree
Showing 5 changed files with 181 additions and 50 deletions.
60 changes: 31 additions & 29 deletions kernels/TranslateKernel.cpp
Expand Up @@ -40,9 +40,10 @@
#include <pdal/PipelineWriter.hpp>
#include <pdal/PointTable.hpp>
#include <pdal/PointView.hpp>
#include <pdal/Stage.hpp>
#include <pdal/Reader.hpp>
#include <pdal/StageFactory.hpp>
#include <pdal/PipelineReaderJSON.hpp>
#include <pdal/Writer.hpp>
#include <pdal/util/FileUtils.hpp>
#include <json/json.h>

Expand Down Expand Up @@ -101,43 +102,44 @@ void TranslateKernel::makeJSONPipeline()

if (json.empty())
json = m_filterJSON;
std::stringstream in(json);
m_manager.readPipeline(in);

Json::Reader jsonReader;
Json::Value filters;
jsonReader.parse(json, filters);
if (filters.type() != Json::arrayValue || filters.empty())
throw pdal_error("JSON must be an array of filter specifications");
std::vector<Stage *> roots = m_manager.roots();
if (roots.size() > 1)
throw pdal_error("Can't process pipeline with more than one root.");

Json::Value pipeline(Json::arrayValue);

// Add the input file, the filters (as provided) and the output file.
if (m_readerType.size())
Stage *r(nullptr);
if (roots.size())
r = dynamic_cast<Reader *>(roots[0]);
if (r)
{
Json::Value node(Json::objectValue);
node["filename"] = m_inputFile;
node["type"] = m_readerType;
pipeline.append(node);
StageCreationOptions ops { m_inputFile, m_readerType, nullptr,
Options(), r->tag() };
m_manager.replace(r, &m_manager.makeReader(ops));
}
else
pipeline.append(Json::Value(m_inputFile));
for (Json::ArrayIndex i = 0; i < filters.size(); ++i)
pipeline.append(filters[i]);
if (m_writerType.size())
{
Json::Value node(Json::objectValue);
node["filename"] = m_outputFile;
node["type"] = m_writerType;
pipeline.append(node);
r = &m_manager.makeReader(m_inputFile, m_readerType);
if (roots.size())
roots[0]->setInput(*r);
}
else
pipeline.append(Json::Value(m_outputFile));

Json::Value root;
root["pipeline"] = pipeline;
std::vector<Stage *> leaves = m_manager.leaves();
if (leaves.size() != 1)
throw pdal_error("Can't process pipeline with more than one "
"terminal stage.");

std::stringstream pipeline_str;
pipeline_str << root;
m_manager.readPipeline(pipeline_str);
Stage *w = dynamic_cast<Writer *>(leaves[0]);
if (w)
m_manager.replace(w, &m_manager.makeWriter(m_outputFile, m_writerType));
else
{
// We know we have a leaf because we added a reader.
StageCreationOptions ops { m_outputFile, m_writerType, leaves[0],
Options(), "" }; // These last two args just keep compiler quiet.
m_manager.makeWriter(ops);
}
}


Expand Down
40 changes: 40 additions & 0 deletions pdal/PipelineManager.cpp
Expand Up @@ -35,6 +35,7 @@
#include <pdal/PipelineManager.hpp>
#include <pdal/PipelineReaderJSON.hpp>
#include <pdal/PDALUtils.hpp>
#include <pdal/util/Algorithm.hpp>
#include <pdal/util/FileUtils.hpp>

#include "private/PipelineReaderXML.hpp"
Expand Down Expand Up @@ -97,6 +98,9 @@ void PipelineManager::readPipeline(const std::string& filename)
{
Utils::closeFile(m_input);
m_input = Utils::openFile(filename);
if (!m_input)
throw pdal_error("Can't open file '" + filename + "' as pipeline "
"input.");
try
{
readPipeline(*m_input);
Expand Down Expand Up @@ -242,6 +246,7 @@ MetadataNode PipelineManager::getMetadata() const
return output;
}


Stage& PipelineManager::makeReader(const std::string& inputFile,
std::string driver)
{
Expand Down Expand Up @@ -418,4 +423,39 @@ Options PipelineManager::stageOptions(Stage& stage)
return opts;
}


std::vector<Stage *> PipelineManager::roots() const
{
std::vector<Stage *> rlist;

for (Stage *s : m_stages)
if (s->getInputs().empty())
rlist.push_back(s);
return rlist;
}


std::vector<Stage *> PipelineManager::leaves() const
{
std::vector<Stage *> llist = m_stages;
for (Stage *s : m_stages)
for (Stage *ss : s->getInputs())
Utils::remove(llist, ss);
return llist;
}


void PipelineManager::replace(Stage *sOld, Stage *sNew)
{
Utils::remove(m_stages, sNew);
for (Stage * & s : m_stages)
{
if (s == sOld)
s = sNew;
for (Stage * & ss : s->getInputs())
if (ss == sOld)
ss = sNew;
}
}

} // namespace pdal
3 changes: 3 additions & 0 deletions pdal/PipelineManager.hpp
Expand Up @@ -135,6 +135,9 @@ class PDAL_DLL PipelineManager
{ return m_commonOptions; }
OptionsMap& stageOptions()
{ return m_stageOptions; }
std::vector<Stage *> roots() const;
std::vector<Stage *> leaves() const;
void replace(Stage *sOld, Stage *sNew);

private:
void setOptions(Stage& stage, const Options& addOps);
Expand Down
2 changes: 1 addition & 1 deletion pdal/Stage.hpp
Expand Up @@ -271,7 +271,7 @@ class PDAL_DLL Stage
\return A vector pointers to input stages.
**/
const std::vector<Stage*>& getInputs() const
std::vector<Stage*>& getInputs()
{ return m_inputs; }

/**
Expand Down
126 changes: 106 additions & 20 deletions test/unit/apps/TranslateTest.cpp
Expand Up @@ -71,36 +71,122 @@ TEST(translateTest, t1)
" --json filters.stats", output), 0);
}

// Tests for processing JSON input.
TEST(translateTest, t2)
{
std::string output;

std::string in = Support::datapath("las/autzen_trim.las");
std::string out = Support::temppath("out.las");

const char *json = " \
[ \
std::string json = " \
{ \
\\\"pipeline\\\" : [ \
{ \\\"type\\\":\\\"filters.stats\\\" }, \
{ \\\"type\\\":\\\"filters.range\\\", \
\\\"limits\\\":\\\"Z[0:100]\\\" } \
]";

EXPECT_EQ(runTranslate(in + " " + out +
" --json=\"" + json + "\"", output), 0);
EXPECT_EQ(runTranslate(in + " " + out + " -r readers.las "
" --json=\"" + json + "\"", output), 0);
EXPECT_EQ(runTranslate(in + " " + out + " -w writers.las "
" --json=\"" + json + "\"", output), 0);
EXPECT_EQ(runTranslate(in + " " + out + " -r readers.las -w writers.las "
" --json=\"" + json + "\"", output), 0);

const char *json2 = " \
{ \\\"type\\\":\\\"filters.stats\\\" }, \
{ \\\"type\\\":\\\"filters.range\\\", \
\\\"limits\\\":\\\"Z[0:100]\\\" }";

EXPECT_NE(runTranslate(in + " " + out +
" --json=\"" + json2 + "\"", output), 0);
] \
}";

// Check that we work with just a bunch of filters.
EXPECT_EQ(runTranslate(in + " " + out + " --json=\"" + json + "\"",
output), 0);

// Check that we fail with no bad input file.
EXPECT_NE(runTranslate("foo.las " + out + " --json=\"" + json + "\"",
output), 0);

// Check that we file with bad output file.
EXPECT_NE(runTranslate(in + " foo.blam " + " --json=\"" + json + "\"",
output), 0);

// Check that we work with no stages.
json = " \
{ \
\\\"pipeline\\\" : [ \
] \
}";
EXPECT_EQ(runTranslate(in + " " + out + " --json=\"" + json + "\"",
output), 0);

// Check that we work with only an input (not specified as such).
json = " \
{ \
\\\"pipeline\\\" : [ \
\\\"badinput.las\\\" \
] \
}";
EXPECT_EQ(runTranslate(in + " " + out + " --json=\"" + json + "\"",
output), 0);

// Check that we work with an input and an output.
json = " \
{ \
\\\"pipeline\\\" : [ \
\\\"badinput.las\\\", \
\\\"badoutput.las\\\" \
] \
}";
EXPECT_EQ(runTranslate(in + " " + out + " --json=\"" + json + "\"",
output), 0);

// Check that we work with only an output.
json = " \
{ \
\\\"pipeline\\\" : [ \
{ \
\\\"type\\\":\\\"writers.las\\\", \
\\\"filename\\\":\\\"badoutput.las\\\" \
} \
] \
}";
EXPECT_EQ(runTranslate(in + " " + out + " --json=\"" + json + "\"",
output), 0);

// Check that we work with only an input.
json = " \
{ \
\\\"pipeline\\\" : [ \
{ \
\\\"type\\\":\\\"readers.las\\\", \
\\\"filename\\\":\\\"badinput.las\\\" \
} \
] \
}";
EXPECT_EQ(runTranslate(in + " " + out + " --json=\"" + json + "\"",
output), 0);

// Check that we fail with unchanined multiple writers.
json = " \
{ \
\\\"pipeline\\\" : [ \
{ \
\\\"type\\\":\\\"writers.las\\\", \
\\\"filename\\\":\\\"badoutput.las\\\" \
}, \
\\\"badoutput2.las\\\" \
] \
}";
EXPECT_NE(runTranslate(in + " " + out + " --json=\"" + json + "\"",
output), 0);

// Check that we can handle chained writers.
json = " \
{ \
\\\"pipeline\\\" : [ \
{ \
\\\"type\\\":\\\"writers.las\\\", \
\\\"filename\\\":\\\"badoutput.las\\\", \
\\\"tag\\\":\\\"mytag\\\" \
}, \
{ \
\\\"filename\\\":\\\"badoutput2.las\\\", \
\\\"inputs\\\": \\\"mytag\\\" \
} \
] \
}";
EXPECT_EQ(runTranslate(in + " " + out + " --json=\"" + json + "\"",
output), 0);
}

TEST(translateTest, t3)
Expand Down

0 comments on commit a326029

Please sign in to comment.