Skip to content

Commit

Permalink
more compact, copy-and-paste friendly dump output
Browse files Browse the repository at this point in the history
  • Loading branch information
kali committed May 26, 2019
1 parent 5eff327 commit 61c70e6
Show file tree
Hide file tree
Showing 7 changed files with 64 additions and 377 deletions.
12 changes: 0 additions & 12 deletions cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,7 @@ log = "0.4"
ndarray = "0.12"
ndarray-npy = { version = "0.4", features = [ "compressed_npz" ] }
pbr = "1.0"
prettytable-rs = "0.8"
rand = "0.6.5"
terminal_size = "0.1.7"
textwrap = "0.11"
tract-core = { path = "../core" }
tract-onnx = { optional = true, path = "../onnx" }
tract-tensorflow = { optional = true, path = "../tensorflow" }
Expand All @@ -40,15 +37,6 @@ default = ["tf", "onnx"]
tf = [ "tract-tensorflow" ]
onnx = [ "tract-onnx" ]
conform = [ "tract-tensorflow/conform" ]
# blis = ["tract-core/blis"]

[dev-dependencies]
criterion = "0.2"

# [[bench]]
# name = "streaming_conv2d"
# harness = false

# [[bench]]
# name = "streaming_diamond"
# harness = false
6 changes: 2 additions & 4 deletions cli/src/compare.rs
Original file line number Diff line number Diff line change
Expand Up @@ -115,8 +115,6 @@ where
TI: TensorInfo + Clone + for<'a> From<&'a Tensor>,
O: AsRef<Op> + AsMut<Op> + Display + Debug + Clone,
{
use crate::format::Row;

let eval_order = ::tract_core::model::eval_order(&tract)?;

// Execute the model step-by-step on tract.
Expand Down Expand Up @@ -200,7 +198,7 @@ where
(Green, "ok".into())
};

Row::Double(color.paint(format!("Output {}", n)).to_string(), reason.to_string())
color.paint(format!("Output {}: {}", n, reason)).to_string()
})
.collect::<Vec<_>>();
let inputs = tract.nodes()[n]
Expand All @@ -209,7 +207,7 @@ where
.enumerate()
.map(|(ix, o)| {
let tensor = &state.values[o.node].as_ref().unwrap()[o.slot];
Row::Double(format!("Input #{}", ix), format!("{:?}", tensor))
format!("Input #{}: {:?}", ix, tensor)
})
.collect::<Vec<_>>();
display_graph.add_node_section(n, inputs)?;
Expand Down
3 changes: 1 addition & 2 deletions cli/src/cost.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use crate::display_graph::*;
use crate::errors::*;
use crate::format::Row;
use crate::{Parameters, SomeModel};
use tract_core::internal::*;

Expand Down Expand Up @@ -29,7 +28,7 @@ fn handle_t(
let rows = cost
.iter()
.inspect(|(c, i)| *total.entry(*c).or_insert(0.to_dim()) += *i)
.map(|(c, i)| Row::Double(format!("{:?}", c), format!("{:?}", i)))
.map(|(c, i)| format!("{:?} {:?}", c, i))
.collect();
display_graph.add_node_section(i, rows)?;
}
Expand Down
123 changes: 54 additions & 69 deletions cli/src/display_graph.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
use crate::format::Row;
use crate::CliResult;
use crate::SomeGraphDef;
use ansi_term::Color::*;
Expand All @@ -7,7 +6,7 @@ use std::borrow::Borrow;
use std::collections::HashMap;
use std::convert::TryFrom;
use std::fmt::{Debug, Display};
use tract_core::prelude::{Model, BaseNode, Op, Tensor, TensorInfo};
use tract_core::internal::*;
#[cfg(feature = "onnx")]
use tract_onnx::pb::ModelProto;
#[cfg(feature = "tf")]
Expand Down Expand Up @@ -56,8 +55,8 @@ where
model: M,
pub options: DisplayOptions,
node_labels: HashMap<usize, Vec<String>>,
node_sections: HashMap<usize, Vec<Vec<Row>>>,
_bloody_baron: ::std::marker::PhantomData<(TI,O)>,
node_sections: HashMap<usize, Vec<Vec<String>>>,
_bloody_baron: ::std::marker::PhantomData<(TI, O)>,
}

impl<TI, O, M> DisplayGraph<TI, O, M>
Expand All @@ -81,77 +80,60 @@ where
Ok(())
}

pub fn render_node(&self, node: &BaseNode<TI,O>) -> CliResult<()> {
let bold = Style::new().bold();
let mut sections: Vec<Vec<Row>> = vec![];
pub fn render_node(&self, node: &BaseNode<TI, O>) -> CliResult<()> {
println!(
"{} {} {}",
White.bold().paint(format!("{}", node.id)),
if node.op_is::<tract_core::ops::unimpl::UnimplementedOp>() {
Red.bold().paint(format!("{}", node.op().name()))
} else {
Blue.bold().paint(format!("{}", node.op().name()))
},
White.italic().paint(&node.name)
);
if let Some(id) =
self.model.borrow().input_outlets()?.iter().position(|n| n.node == node.id)
{
sections.push(vec![Row::Simple(
Yellow.bold().paint(format!("MODEL INPUT {}", id)).to_string(),
)]);
println!("{}", Yellow.bold().paint(format!("MODEL INPUT #{}", id)));
}
for (ix, i) in node.inputs.iter().enumerate() {
let star = if ix == 0 { '*' } else { ' ' };
println!(" {} input #{}: {:?} {:?}", star, ix, i, self.model.borrow().outlet_fact(*i)?);
}
for (ix, o) in node.outputs.iter().enumerate() {
let star = if ix == 0 { '*' } else { ' ' };
println!(
" {} output #{}: {:?} {}",
star,
format!("{:?}", ix),
o.fact,
if let Some(id) = self
.model
.borrow()
.output_outlets()?
.iter()
.position(|n| n.node == node.id && n.slot == ix)
{
Yellow.bold().paint(format!("MODEL OUTPUT #{}", id)).to_string()
} else {
"".to_string()
}
);
}
sections.push(
node.inputs
.iter()
.enumerate()
.map(|(ix, a)| {
Ok(Row::Double(
format!(
"Input {}: Node #{}/{}",
bold.paint(format!("{}", ix)),
bold.paint(format!("{}", a.node)),
bold.paint(format!("{}", a.slot)),
),
format!("{:?}", self.model.borrow().outlet_fact(*a)?),
))
})
.collect::<CliResult<_>>()?,
);
sections.push(
node.outputs
.iter()
.enumerate()
.map(|(ix, outlet)| {
if let Some(pos) = self
.model
.borrow()
.output_outlets()
.unwrap()
.iter()
.position(|&o| o == ::tract_core::model::OutletId::new(node.id, ix))
{
Row::Double(
format!("Output {}:", bold.paint(ix.to_string())),
format!("{:?} {} #{}", outlet.fact, bold.paint("Model output"), pos),
)
} else {
Row::Double(
format!("Output {}:", bold.paint(ix.to_string())),
format!("{:?}", outlet.fact),
)
}
})
.collect(),
);
if let Some(info) = node.op().info()? {
sections.push(vec![Row::Simple(info)])
println!(" * {}", info);
}
if self.options.debug_op {
sections.push(vec![Row::Simple(format!("{:?}", node.op))]);
println!(" * {:?}", node.op());
}
if let Some(node_sections) = self.node_sections.get(&node.id) {
for s in node_sections {
sections.push(s.clone());
for section in node_sections {
println!(" * {}", section[0]);
for s in &section[1..] {
println!(" {}", s);
}
}
}
crate::format::print_box(
&node.id.to_string(),
&node.op.as_ref().name(),
&node.name,
self.node_labels.get(&node.id).map(|v| v.as_slice()).unwrap_or(&[]),
sections,
);
Ok(())
}

Expand All @@ -170,11 +152,11 @@ where

pub fn with_graph_def(self, graph_def: &SomeGraphDef) -> CliResult<DisplayGraph<TI, O, M>> {
match graph_def {
SomeGraphDef::NoGraphDef => Ok(self),
#[cfg(feature = "tf")]
SomeGraphDef::Tf(tf) => self.with_tf_graph_def(tf),
#[cfg(feature = "onnx")]
SomeGraphDef::Onnx(onnx) => self.with_onnx_model(onnx),
SomeGraphDef::_NoGraph => unreachable!(),
}
}

Expand All @@ -183,7 +165,7 @@ where
Ok(())
}

pub fn add_node_section(&mut self, id: usize, section: Vec<Row>) -> CliResult<()> {
pub fn add_node_section(&mut self, id: usize, section: Vec<String>) -> CliResult<()> {
self.node_sections.entry(id).or_insert(vec![]).push(section);
Ok(())
}
Expand All @@ -200,7 +182,7 @@ where
} else {
format!("{:?}", a.1)
};
v.push(Row::Double(format!("Attr {}:", bold.paint(a.0)), value));
v.push(format!("Attr {}: {}", bold.paint(a.0), value));
}
self.add_node_section(node_id, v)?;
}
Expand All @@ -209,7 +191,10 @@ where
}

#[cfg(feature = "onnx")]
pub fn with_onnx_model(mut self, model_proto: &ModelProto) -> CliResult<DisplayGraph<TI, O, M>> {
pub fn with_onnx_model(
mut self,
model_proto: &ModelProto,
) -> CliResult<DisplayGraph<TI, O, M>> {
let bold = Style::new().bold();
for gnode in model_proto.get_graph().get_node().iter() {
let mut node_name = gnode.get_name();
Expand All @@ -224,7 +209,7 @@ where
} else {
format!("{:?}", a)
};
v.push(Row::Double(format!("Attr {}:", bold.paint(a.get_name())), value));
v.push(format!("Attr {}: {}", bold.paint(a.get_name()), value));
}
self.add_node_section(id, v)?;
}
Expand Down
Loading

0 comments on commit 61c70e6

Please sign in to comment.