Skip to content

Commit

Permalink
Merge pull request #31 from ekmartin/travis_fmt
Browse files Browse the repository at this point in the history
Run rustfmt on Travis
  • Loading branch information
jonhoo committed Oct 3, 2017
2 parents 3f909f4 + e2b5649 commit 1ed4a74
Show file tree
Hide file tree
Showing 9 changed files with 40 additions and 19 deletions.
6 changes: 6 additions & 0 deletions .travis.yml
Expand Up @@ -5,3 +5,9 @@ rust:
cache: cargo
env:
- SETTLE_TIME=2000
before_script:
- export PATH="$PATH:$HOME/.cargo/bin"
- cargo install --git https://github.com/rust-lang-nursery/rustfmt.git --rev 6e41100725267974fa6dcc61134d4377b676ad01 --force
script:
- cargo fmt -- --write-mode=diff
- cargo test --verbose
3 changes: 2 additions & 1 deletion benchmarks/tpc_w/parameters.rs
Expand Up @@ -102,7 +102,8 @@ impl SampleKeys {
dedup(self.order_line.iter().map(|x| x[0].clone()).collect())
}
"createEmptyCart" => 0,
"addItem" => dedup(self.item.iter().map(|x| x[0].clone()).collect()), // XXX(malte): dual parameter query, need SCL ID range
// XXX(malte): dual parameter query, need SCL ID range:
"addItem" => dedup(self.item.iter().map(|x| x[0].clone()).collect()),
"addRandomItemToCartIfNecessary" => {
dedup(self.shopping_cart.iter().map(|x| x[0].clone()).collect())
}
Expand Down
8 changes: 7 additions & 1 deletion src/flow/domain/mod.rs
Expand Up @@ -1501,7 +1501,13 @@ impl Domain {
transaction_state,
} => {
if let ReplayPieceContext::Partial { ref for_keys, .. } = context {
trace!(self.log, "replaying batch"; "#" => data.len(), "tag" => tag.id(), "keys" => ?for_keys);
trace!(
self.log,
"replaying batch";
"#" => data.len(),
"tag" => tag.id(),
"keys" => ?for_keys,
);
} else {
debug!(self.log, "replaying batch"; "#" => data.len());
}
Expand Down
7 changes: 4 additions & 3 deletions src/flow/keys.rs
Expand Up @@ -79,9 +79,10 @@ where

// is it a generated column?
if resolved.len() == 1 && resolved[0].0 == node {
assert!(resolved[0].1.is_none()); // how could this be Some?
// path terminates here, and has no connection to ancestors
// so, we depend on *all* our *full* parents
// how could this be Some?
// path terminates here, and has no connection to ancestors
// so, we depend on *all* our *full* parents
assert!(resolved[0].1.is_none());
let mut paths = Vec::with_capacity(parents.len());
for p in parents {
let mut path = path.clone();
Expand Down
7 changes: 4 additions & 3 deletions src/flow/migrate/materialization/plan.rs
Expand Up @@ -262,9 +262,10 @@ impl<'a> Plan<'a> {
}

if i != segments.len() - 1 {
// since there is a later domain, the last node of any non-final domain must either
// be an egress or a Sharder. If it's an egress, we need to tell it about this
// replay path so that it knows what path to forward replay packets on.
// since there is a later domain, the last node of any non-final domain
// must either be an egress or a Sharder. If it's an egress, we need
// to tell it about this replay path so that it knows
// what path to forward replay packets on.
let n = &self.graph[nodes.last().unwrap().0];
if n.is_egress() {
self.domains
Expand Down
8 changes: 5 additions & 3 deletions src/lib.rs
Expand Up @@ -139,8 +139,8 @@
//!
//! // add final join using first field from article and first from vc.
//! // joins are trickier because you need to specify what to join on. the vec![1, 0] here
//! // signifies that the first field of article and vc should be equal, and the second field can
//! // be whatever.
//! // signifies that the first field of article and vc should be equal,
//! // and the second field can be whatever.
//! use distributary::JoinSource::*;
//! let j = Join::new(article, vc, JoinType::Inner, vec![B(0, 0), L(1), R(1)]);
//! let awvc = mig.add_ingredient("end", &["id", "title", "votes"], j);
Expand Down Expand Up @@ -172,7 +172,9 @@
//! ```rust
//! # use distributary::{Blender, Base};
//! # let mut g = Blender::new();
//! # let article = g.migrate(|mig| mig.add_ingredient("article", &["id", "title"], Base::default()));
//! # let article = g.migrate(|mig|
//! # mig.add_ingredient("article", &["id", "title"], Base::default())
//! # );
//! let mut muta = g.get_mutator(article);
//! muta.put(vec![1.into(), "Hello world".into()]);
//! ```
Expand Down
3 changes: 2 additions & 1 deletion src/sql/mir.rs
Expand Up @@ -870,7 +870,8 @@ impl SqlToMirConverter {
}
}
ComparisonOp(ref ct) => {
// currently, we only support filter-like comparison operations, no nested-selections
// currently, we only support filter-like
// comparison operations, no nested-selections
let f = self.make_filter_node(&format!("{}_f{}", name, nc), parent, ct);

pred_nodes.push(f);
Expand Down
9 changes: 5 additions & 4 deletions src/sql/reuse/finkelstein.rs
Expand Up @@ -101,8 +101,8 @@ impl Finkelstein {
for (name, ex_qgn) in &existing_qg.relations {
let new_qgn = &new_qg.relations[name];

// iterate over predicates and ensure that each matching one on the existing QG is implied
// by the new one
// iterate over predicates and ensure that each matching
// one on the existing QG is implied by the new one
for ep in &ex_qgn.predicates {
let mut matched = false;

Expand Down Expand Up @@ -180,8 +180,9 @@ impl Finkelstein {
// return Some(ReuseType::DirectExtension);
// } else {
// if name == "computed_columns" {
// // NQG has some extra columns, and they're computed ones (i.e., grouped/function
// // columns). We can recompute those, but not via a backjoin.
// // NQG has some extra columns, and they're computed ones
// // (i.e., grouped/function columns).
// // We can recompute those, but not via a backjoin.
// // TODO(malte): be cleverer about this situation
// return None;
// }
Expand Down
8 changes: 5 additions & 3 deletions src/sql/reuse/relaxed.rs
Expand Up @@ -11,7 +11,9 @@ use std::collections::HashMap;
/// While Finkelstein checks if queries are compatible for direct extension,
/// this algorithm considers the possibility of reuse of internal views.
/// For example, given the queries:
/// 1) select * from Paper, PaperReview where Paper.paperId = PaperReview.paperId and PaperReview.reviewType = 1;
/// 1) select * from Paper, PaperReview
/// where Paper.paperId = PaperReview.paperId
/// and PaperReview.reviewType = 1;
/// 2) select * from Paper, PaperReview where Paper.paperId = PaperReview.paperId;
///
/// Finkelstein reuse would be conditional on the order the queries are added,
Expand Down Expand Up @@ -142,8 +144,8 @@ impl Relaxed {
}
let new_qgn = &new_qg.relations[name];

// iterate over predicates and ensure that each matching one on the existing QG is implied
// by the new one
// iterate over predicates and ensure that each
// matching one on the existing QG is implied by the new one
for ep in &ex_qgn.predicates {
let mut matched = false;

Expand Down

0 comments on commit 1ed4a74

Please sign in to comment.