Skip to content

Commit

Permalink
Fix hygiene regression in patterns
Browse files Browse the repository at this point in the history
  • Loading branch information
petrochenkov committed Nov 11, 2015
1 parent b8eaa16 commit fba1926
Show file tree
Hide file tree
Showing 7 changed files with 65 additions and 14 deletions.
5 changes: 3 additions & 2 deletions src/librustc/middle/pat_util.rs
Expand Up @@ -14,6 +14,7 @@ use middle::ty;
use util::nodemap::FnvHashMap;

use syntax::ast;
use syntax::ext::mtwt;
use rustc_front::hir;
use rustc_front::util::walk_pat;
use syntax::codemap::{respan, Span, Spanned, DUMMY_SP};
Expand All @@ -26,8 +27,8 @@ pub type PatIdMap = FnvHashMap<ast::Name, ast::NodeId>;
// use the NodeId of their namesake in the first pattern.
pub fn pat_id_map(dm: &RefCell<DefMap>, pat: &hir::Pat) -> PatIdMap {
let mut map = FnvHashMap();
pat_bindings(dm, pat, |_bm, p_id, _s, path1| {
map.insert(path1.node, p_id);
pat_bindings_hygienic(dm, pat, |_bm, p_id, _s, path1| {
map.insert(mtwt::resolve(path1.node), p_id);
});
map
}
Expand Down
3 changes: 2 additions & 1 deletion src/librustc_driver/driver.rs
Expand Up @@ -678,8 +678,9 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|| resolve::resolve_crate(sess, &ast_map, make_glob_map));

// Discard MTWT tables that aren't required past resolution.
// FIXME: get rid of uses of MTWT tables in typeck, mir and trans and clear them
if !sess.opts.debugging_opts.keep_mtwt_tables {
syntax::ext::mtwt::clear_tables();
// syntax::ext::mtwt::clear_tables();
}

let named_region_map = time(time_passes,
Expand Down
5 changes: 3 additions & 2 deletions src/librustc_mir/hair/cx/expr.rs
Expand Up @@ -22,6 +22,7 @@ use rustc::middle::pat_util;
use rustc::middle::ty::{self, VariantDef, Ty};
use rustc_front::hir;
use rustc_front::util as hir_util;
use syntax::ext::mtwt;
use syntax::parse::token;
use syntax::ptr::P;

Expand Down Expand Up @@ -490,8 +491,8 @@ fn convert_arm<'a, 'tcx: 'a>(cx: &mut Cx<'a, 'tcx>, arm: &'tcx hir::Arm) -> Arm<
None
} else {
map = FnvHashMap();
pat_util::pat_bindings(&cx.tcx.def_map, &arm.pats[0], |_, p_id, _, path| {
map.insert(path.node, p_id);
pat_util::pat_bindings_hygienic(&cx.tcx.def_map, &arm.pats[0], |_, p_id, _, path| {
map.insert(mtwt::resolve(path.node), p_id);
});
Some(&map)
};
Expand Down
3 changes: 2 additions & 1 deletion src/librustc_mir/hair/cx/pattern.rs
Expand Up @@ -19,6 +19,7 @@ use rustc::middle::subst::Substs;
use rustc::middle::ty::{self, Ty};
use rustc_front::hir;
use syntax::ast;
use syntax::ext::mtwt;
use syntax::ptr::P;

/// When there are multiple patterns in a single arm, each one has its
Expand Down Expand Up @@ -161,7 +162,7 @@ impl<'patcx, 'cx, 'tcx> PatCx<'patcx, 'cx, 'tcx> {
{
let id = match self.binding_map {
None => pat.id,
Some(ref map) => map[&ident.node.name],
Some(ref map) => map[&mtwt::resolve(ident.node)],
};
let var_ty = self.cx.tcx.node_id_to_type(pat.id);
let region = match var_ty.sty {
Expand Down
15 changes: 8 additions & 7 deletions src/librustc_trans/trans/_match.rs
Expand Up @@ -228,6 +228,7 @@ use std::fmt;
use std::rc::Rc;
use rustc_front::hir;
use syntax::ast::{self, DUMMY_NODE_ID, NodeId};
use syntax::ext::mtwt;
use syntax::codemap::Span;
use rustc_front::fold::Folder;
use syntax::ptr::P;
Expand Down Expand Up @@ -477,7 +478,7 @@ fn expand_nested_bindings<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
loop {
pat = match pat.node {
hir::PatIdent(_, ref path, Some(ref inner)) => {
bound_ptrs.push((path.node.name, val.val));
bound_ptrs.push((mtwt::resolve(path.node), val.val));
&**inner
},
_ => break
Expand Down Expand Up @@ -518,15 +519,15 @@ fn enter_match<'a, 'b, 'p, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
match this.node {
hir::PatIdent(_, ref path, None) => {
if pat_is_binding(&dm.borrow(), &*this) {
bound_ptrs.push((path.node.name, val.val));
bound_ptrs.push((mtwt::resolve(path.node), val.val));
}
}
hir::PatVec(ref before, Some(ref slice), ref after) => {
if let hir::PatIdent(_, ref path, None) = slice.node {
let subslice_val = bind_subslice_pat(
bcx, this.id, val,
before.len(), after.len());
bound_ptrs.push((path.node.name, subslice_val));
bound_ptrs.push((mtwt::resolve(path.node), subslice_val));
}
}
_ => {}
Expand Down Expand Up @@ -1127,8 +1128,8 @@ fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
}
None => {
let data = &m[0].data;
for &(ref ident, ref value_ptr) in &m[0].bound_ptrs {
let binfo = *data.bindings_map.get(ident).unwrap();
for &(ref name, ref value_ptr) in &m[0].bound_ptrs {
let binfo = *data.bindings_map.get(name).unwrap();
call_lifetime_start(bcx, binfo.llmatch);
if binfo.trmode == TrByRef && type_is_fat_ptr(bcx.tcx(), binfo.ty) {
expr::copy_fat_ptr(bcx, *value_ptr, binfo.llmatch);
Expand Down Expand Up @@ -1526,8 +1527,8 @@ fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &hir::Pat,
let tcx = bcx.tcx();
let reassigned = is_discr_reassigned(bcx, discr, body);
let mut bindings_map = FnvHashMap();
pat_bindings(&tcx.def_map, &*pat, |bm, p_id, span, path1| {
let name = path1.node;
pat_bindings_hygienic(&tcx.def_map, &*pat, |bm, p_id, span, path1| {
let name = mtwt::resolve(path1.node);
let variable_ty = node_id_type(bcx, p_id);
let llvariable_ty = type_of::type_of(ccx, variable_ty);
let tcx = bcx.tcx();
Expand Down
3 changes: 2 additions & 1 deletion src/librustc_typeck/check/_match.rs
Expand Up @@ -25,6 +25,7 @@ use util::nodemap::FnvHashMap;
use std::cmp;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use syntax::ast;
use syntax::ext::mtwt;
use syntax::codemap::{Span, Spanned};
use syntax::ptr::P;

Expand Down Expand Up @@ -179,7 +180,7 @@ pub fn check_pat<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>,

// if there are multiple arms, make sure they all agree on
// what the type of the binding `x` ought to be
let canon_id = *pcx.map.get(&path.node.name).unwrap();
let canon_id = *pcx.map.get(&mtwt::resolve(path.node)).unwrap();
if canon_id != pat.id {
let ct = fcx.local_ty(pat.span, canon_id);
demand::eqtype(fcx, pat.span, ct, typ);
Expand Down
45 changes: 45 additions & 0 deletions src/test/run-pass/issue-29746.rs
@@ -0,0 +1,45 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

// zip!(a1,a2,a3,a4) is equivalent to:
// a1.zip(a2).zip(a3).zip(a4).map(|(((x1,x2),x3),x4)| (x1,x2,x3,x4))
macro_rules! zip {
// Entry point
([$a:expr, $b:expr, $($rest:expr),*]) => {
zip!([$($rest),*], $a.zip($b), (x,y), [x,y])
};

// Intermediate steps to build the zipped expression, the match pattern, and
// and the output tuple of the closure, using macro hygene to repeatedly
// introduce new variables named 'x'.
([$a:expr, $($rest:expr),*], $zip:expr, $pat:pat, [$($flat:expr),*]) => {
zip!([$($rest),*], $zip.zip($a), ($pat,x), [$($flat),*, x])
};

// Final step
([], $zip:expr, $pat:pat, [$($flat:expr),+]) => {
$zip.map(|$pat| ($($flat),+))
};

// Comma
([$a:expr], $zip:expr, $pat:pat, [$($flat:expr),*]) => {
zip!([$a,], $zip, $pat, [$($flat),*])
};
}

fn main() {
let p1 = vec![1i32, 2].into_iter();
let p2 = vec!["10", "20"].into_iter();
let p3 = vec![100u16, 200].into_iter();
let p4 = vec![1000i64, 2000].into_iter();

let e = zip!([p1,p2,p3,p4]).collect::<Vec<_>>();
assert_eq!(e[0], (1i32,"10",100u16,1000i64));
}

0 comments on commit fba1926

Please sign in to comment.