Skip to content

Commit

Permalink
Auto merge of #34004 - Manishearth:rollup, r=Manishearth
Browse files Browse the repository at this point in the history
Rollup of 11 pull requests

- Successful merges: #33385, #33606, #33841, #33892, #33896, #33915, #33921, #33967, #33970, #33973, #33977
- Failed merges:
  • Loading branch information
bors committed Jun 1, 2016
2 parents 433d70c + 42e593a commit 806a553
Show file tree
Hide file tree
Showing 31 changed files with 354 additions and 81 deletions.
2 changes: 1 addition & 1 deletion src/doc/footer.inc
@@ -1,5 +1,5 @@
<footer><p>
Copyright &copy; 2011-2015 The Rust Project Developers. Licensed under the
Copyright &copy; 2011 The Rust Project Developers. Licensed under the
<a href="http://www.apache.org/licenses/LICENSE-2.0">Apache License, Version 2.0</a>
or the <a href="https://opensource.org/licenses/MIT">MIT license</a>, at your option.
</p><p>
Expand Down
9 changes: 3 additions & 6 deletions src/libcollections/fmt.rs
Expand Up @@ -8,19 +8,16 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! Utilities for formatting and printing strings
//! Utilities for formatting and printing `String`s
//!
//! This module contains the runtime support for the `format!` syntax extension.
//! This macro is implemented in the compiler to emit calls to this module in
//! order to format arguments at runtime into strings and streams.
//! order to format arguments at runtime into strings.
//!
//! # Usage
//!
//! The `format!` macro is intended to be familiar to those coming from C's
//! printf/fprintf functions or Python's `str.format` function. In its current
//! revision, the `format!` macro returns a `String` type which is the result of
//! the formatting. In the future it will also be able to pass in a stream to
//! format arguments directly while performing minimal allocations.
//! printf/fprintf functions or Python's `str.format` function.
//!
//! Some examples of the `format!` extension are:
//!
Expand Down
3 changes: 3 additions & 0 deletions src/libcore/slice.rs
Expand Up @@ -1830,6 +1830,9 @@ impl<A> SlicePartialEq<A> for [A]
if self.len() != other.len() {
return false;
}
if self.as_ptr() == other.as_ptr() {
return true;
}
unsafe {
let size = mem::size_of_val(self);
memcmp(self.as_ptr() as *const u8,
Expand Down
5 changes: 3 additions & 2 deletions src/libcore/str/mod.rs
Expand Up @@ -354,7 +354,7 @@ fn unwrap_or_0(opt: Option<&u8>) -> u8 {
/// UTF-8-like encoding).
#[unstable(feature = "str_internals", issue = "0")]
#[inline]
pub fn next_code_point(bytes: &mut slice::Iter<u8>) -> Option<u32> {
pub fn next_code_point<'a, I: Iterator<Item = &'a u8>>(bytes: &mut I) -> Option<u32> {
// Decode UTF-8
let x = match bytes.next() {
None => return None,
Expand Down Expand Up @@ -388,7 +388,8 @@ pub fn next_code_point(bytes: &mut slice::Iter<u8>) -> Option<u32> {
/// Reads the last code point out of a byte iterator (assuming a
/// UTF-8-like encoding).
#[inline]
fn next_code_point_reverse(bytes: &mut slice::Iter<u8>) -> Option<u32> {
fn next_code_point_reverse<'a,
I: DoubleEndedIterator<Item = &'a u8>>(bytes: &mut I) -> Option<u32> {
// Decode UTF-8
let w = match bytes.next_back() {
None => return None,
Expand Down
28 changes: 17 additions & 11 deletions src/librustc/middle/stability.rs
Expand Up @@ -26,7 +26,7 @@ use syntax::parse::token::InternedString;
use syntax::codemap::{Span, DUMMY_SP};
use syntax::ast;
use syntax::ast::{NodeId, Attribute};
use syntax::feature_gate::{GateIssue, emit_feature_err};
use syntax::feature_gate::{GateIssue, emit_feature_err, find_lang_feature_accepted_version};
use syntax::attr::{self, Stability, Deprecation, AttrMetaMethods};
use util::nodemap::{DefIdMap, FnvHashSet, FnvHashMap};

Expand All @@ -37,6 +37,7 @@ use hir::pat_util::EnumerateAndAdjustIterator;

use std::mem::replace;
use std::cmp::Ordering;
use std::ops::Deref;

#[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Copy, Debug, Eq, Hash)]
pub enum StabilityLevel {
Expand Down Expand Up @@ -322,7 +323,7 @@ impl<'a, 'tcx> Index<'tcx> {
/// features and possibly prints errors. Returns a list of all
/// features used.
pub fn check_unstable_api_usage<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
-> FnvHashMap<InternedString, StabilityLevel> {
-> FnvHashMap<InternedString, attr::StabilityLevel> {
let _task = tcx.dep_graph.in_task(DepNode::StabilityCheck);
let ref active_lib_features = tcx.sess.features.borrow().declared_lib_features;

Expand All @@ -343,7 +344,7 @@ pub fn check_unstable_api_usage<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
struct Checker<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
active_features: FnvHashSet<InternedString>,
used_features: FnvHashMap<InternedString, StabilityLevel>,
used_features: FnvHashMap<InternedString, attr::StabilityLevel>,
// Within a block where feature gate checking can be skipped.
in_skip_block: u32,
}
Expand All @@ -367,7 +368,8 @@ impl<'a, 'tcx> Checker<'a, 'tcx> {

match *stab {
Some(&Stability { level: attr::Unstable {ref reason, issue}, ref feature, .. }) => {
self.used_features.insert(feature.clone(), Unstable);
self.used_features.insert(feature.clone(),
attr::Unstable { reason: reason.clone(), issue: issue });

if !self.active_features.contains(feature) {
let msg = match *reason {
Expand All @@ -380,7 +382,7 @@ impl<'a, 'tcx> Checker<'a, 'tcx> {
}
}
Some(&Stability { ref level, ref feature, .. }) => {
self.used_features.insert(feature.clone(), StabilityLevel::from_attr_level(level));
self.used_features.insert(feature.clone(), level.clone());

// Stable APIs are always ok to call and deprecated APIs are
// handled by a lint.
Expand Down Expand Up @@ -716,28 +718,32 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
/// libraries, identify activated features that don't exist and error about them.
pub fn check_unused_or_stable_features(sess: &Session,
lib_features_used: &FnvHashMap<InternedString,
StabilityLevel>) {
attr::StabilityLevel>) {
let ref declared_lib_features = sess.features.borrow().declared_lib_features;
let mut remaining_lib_features: FnvHashMap<InternedString, Span>
= declared_lib_features.clone().into_iter().collect();

let stable_msg = "this feature is stable. attribute no longer needed";
fn format_stable_since_msg(version: &str) -> String {
format!("this feature has been stable since {}. Attribute no longer needed", version)
}

for &span in &sess.features.borrow().declared_stable_lang_features {
for &(ref stable_lang_feature, span) in &sess.features.borrow().declared_stable_lang_features {
let version = find_lang_feature_accepted_version(stable_lang_feature.deref())
.expect("unexpectedly couldn't find version feature was stabilized");
sess.add_lint(lint::builtin::STABLE_FEATURES,
ast::CRATE_NODE_ID,
span,
stable_msg.to_string());
format_stable_since_msg(version));
}

for (used_lib_feature, level) in lib_features_used {
match remaining_lib_features.remove(used_lib_feature) {
Some(span) => {
if *level == Stable {
if let &attr::StabilityLevel::Stable { since: ref version } = level {
sess.add_lint(lint::builtin::STABLE_FEATURES,
ast::CRATE_NODE_ID,
span,
stable_msg.to_string());
format_stable_since_msg(version.deref()));
}
}
None => ( /* used but undeclared, handled during the previous ast visit */ )
Expand Down
4 changes: 2 additions & 2 deletions src/librustc/ty/context.rs
Expand Up @@ -665,7 +665,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
/// reference to the context, to allow formatting values that need it.
pub fn create_and_enter<F, R>(s: &'tcx Session,
arenas: &'tcx CtxtArenas<'tcx>,
def_map: RefCell<DefMap>,
def_map: DefMap,
named_region_map: resolve_lifetime::NamedRegionMap,
map: ast_map::Map<'tcx>,
freevars: FreevarMap,
Expand Down Expand Up @@ -693,7 +693,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
item_variance_map: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
variance_computed: Cell::new(false),
sess: s,
def_map: def_map,
def_map: RefCell::new(def_map),
tables: RefCell::new(Tables::empty()),
impl_trait_refs: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
trait_defs: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
Expand Down
3 changes: 1 addition & 2 deletions src/librustc_driver/driver.rs
Expand Up @@ -44,7 +44,6 @@ use super::Compilation;

use serialize::json;

use std::cell::RefCell;
use std::collections::HashMap;
use std::env;
use std::ffi::{OsString, OsStr};
Expand Down Expand Up @@ -893,7 +892,7 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
let trait_map = resolutions.trait_map;
TyCtxt::create_and_enter(sess,
arenas,
RefCell::new(resolutions.def_map),
resolutions.def_map,
named_region_map,
hir_map,
resolutions.freevars,
Expand Down
3 changes: 1 addition & 2 deletions src/librustc_driver/test.rs
Expand Up @@ -29,7 +29,6 @@ use rustc_metadata::cstore::CStore;
use rustc_metadata::creader::read_local_crates;
use rustc::hir::map as hir_map;
use rustc::session::{self, config};
use std::cell::RefCell;
use std::rc::Rc;
use syntax::ast;
use syntax::abi::Abi;
Expand Down Expand Up @@ -140,7 +139,7 @@ fn test_env<F>(source_string: &str,
let index = stability::Index::new(&ast_map);
TyCtxt::create_and_enter(&sess,
&arenas,
RefCell::new(resolutions.def_map),
resolutions.def_map,
named_region_map.unwrap(),
ast_map,
resolutions.freevars,
Expand Down
49 changes: 24 additions & 25 deletions src/librustc_mir/transform/type_check.rs
Expand Up @@ -118,10 +118,6 @@ impl<'a, 'b, 'gcx, 'tcx> TypeVerifier<'a, 'b, 'gcx, 'tcx> {
self.cx.infcx.tcx
}

fn infcx(&self) -> &'a InferCtxt<'a, 'gcx, 'tcx> {
self.cx.infcx
}

fn sanitize_type(&mut self, parent: &fmt::Debug, ty: Ty<'tcx>) -> Ty<'tcx> {
if ty.needs_infer() || ty.has_escaping_regions() || ty.references_error() {
span_mirbug_and_err!(self, parent, "bad type {:?}", ty)
Expand Down Expand Up @@ -292,30 +288,11 @@ impl<'a, 'b, 'gcx, 'tcx> TypeVerifier<'a, 'b, 'gcx, 'tcx> {
};

if let Some(field) = variant.fields.get(field.index()) {
Ok(self.normalize(field.ty(tcx, substs)))
Ok(self.cx.normalize(&field.ty(tcx, substs)))
} else {
Err(FieldAccessError::OutOfRange { field_count: variant.fields.len() })
}
}

fn normalize(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
let infcx = self.infcx();
let mut selcx = traits::SelectionContext::new(infcx);
let cause = traits::ObligationCause::misc(self.last_span, 0);
let traits::Normalized { value: ty, obligations } =
traits::normalize(&mut selcx, cause, &ty);

debug!("normalize: ty={:?} obligations={:?}",
ty,
obligations);

let mut fulfill_cx = &mut self.cx.fulfillment_cx;
for obligation in obligations {
fulfill_cx.register_predicate_obligation(infcx, obligation);
}

ty
}
}

pub struct TypeChecker<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
Expand Down Expand Up @@ -373,7 +350,7 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
}
}

fn check_terminator(&self,
fn check_terminator(&mut self,
mir: &Mir<'tcx>,
term: &Terminator<'tcx>) {
debug!("check_terminator: {:?}", term);
Expand Down Expand Up @@ -431,6 +408,7 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
}
};
let sig = tcx.erase_late_bound_regions(&func_ty.sig);
let sig = self.normalize(&sig);
self.check_call_dest(mir, term, &sig, destination);

if self.is_box_free(func) {
Expand Down Expand Up @@ -558,6 +536,27 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
}
}


fn normalize<T>(&mut self, value: &T) -> T
where T: fmt::Debug + TypeFoldable<'tcx>
{
let mut selcx = traits::SelectionContext::new(self.infcx);
let cause = traits::ObligationCause::misc(self.last_span, 0);
let traits::Normalized { value, obligations } =
traits::normalize(&mut selcx, cause, value);

debug!("normalize: value={:?} obligations={:?}",
value,
obligations);

let mut fulfill_cx = &mut self.fulfillment_cx;
for obligation in obligations {
fulfill_cx.register_predicate_obligation(self.infcx, obligation);
}

value
}

fn verify_obligations(&mut self, mir: &Mir<'tcx>) {
self.last_span = mir.span;
if let Err(e) = self.fulfillment_cx.select_all_or_error(self.infcx) {
Expand Down
17 changes: 10 additions & 7 deletions src/librustc_resolve/lib.rs
Expand Up @@ -2422,13 +2422,16 @@ impl<'a> Resolver<'a> {
}
}
}
} else if let Err(false) = self.resolve_path(pat_id, &path, 0, ValueNS) {
resolve_error(
self,
path.span,
ResolutionError::UnresolvedEnumVariantStructOrConst(
&path.segments.last().unwrap().identifier.name.as_str())
);
} else {
if let Err(false) = self.resolve_path(pat_id, &path, 0, ValueNS) {
// No error has been reported, so we need to do this ourselves.
resolve_error(
self,
path.span,
ResolutionError::UnresolvedEnumVariantStructOrConst(
&path.segments.last().unwrap().identifier.name.as_str())
);
}
self.record_def(pattern.id, err_path_resolution());
}
visit::walk_path(self, path);
Expand Down
2 changes: 2 additions & 0 deletions src/libstd/io/cursor.rs
Expand Up @@ -230,6 +230,7 @@ impl<T> BufRead for Cursor<T> where T: AsRef<[u8]> {

#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> Write for Cursor<&'a mut [u8]> {
#[inline]
fn write(&mut self, data: &[u8]) -> io::Result<usize> {
let pos = cmp::min(self.pos, self.inner.len() as u64);
let amt = (&mut self.inner[(pos as usize)..]).write(data)?;
Expand Down Expand Up @@ -269,6 +270,7 @@ impl Write for Cursor<Vec<u8>> {

#[stable(feature = "cursor_box_slice", since = "1.5.0")]
impl Write for Cursor<Box<[u8]>> {
#[inline]
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
let pos = cmp::min(self.pos, self.inner.len() as u64);
let amt = (&mut self.inner[(pos as usize)..]).write(buf)?;
Expand Down
13 changes: 4 additions & 9 deletions src/libsyntax/ext/tt/macro_rules.rs
Expand Up @@ -179,15 +179,15 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
let lhs_tt = match *lhs {
TokenTree::Delimited(_, ref delim) => &delim.tts[..],
_ => cx.span_fatal(sp, "malformed macro lhs")
_ => cx.span_bug(sp, "malformed macro lhs")
};

match TokenTree::parse(cx, lhs_tt, arg) {
Success(named_matches) => {
let rhs = match rhses[i] {
// ignore delimiters
TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(),
_ => cx.span_fatal(sp, "malformed macro rhs"),
_ => cx.span_bug(sp, "malformed macro rhs"),
};
// rhs has holes ( `$id` and `$(...)` that need filled)
let trncbr = new_tt_reader(&cx.parse_sess().span_diagnostic,
Expand Down Expand Up @@ -326,19 +326,14 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
NormalTT(exp, Some(def.span), def.allow_internal_unstable)
}

// why is this here? because of https://github.com/rust-lang/rust/issues/27774
fn ref_slice<A>(s: &A) -> &[A] { use std::slice::from_raw_parts; unsafe { from_raw_parts(s, 1) } }

fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &TokenTree) -> bool {
// lhs is going to be like TokenTree::Delimited(...), where the
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
match lhs {
&TokenTree::Delimited(_, ref tts) => check_matcher(cx, &tts.tts),
tt @ &TokenTree::Sequence(..) => check_matcher(cx, ref_slice(tt)),
_ => {
cx.span_err(lhs.get_span(),
"invalid macro matcher; matchers must be contained \
in balanced delimiters or a repetition indicator");
cx.span_err(lhs.get_span(), "invalid macro matcher; matchers must \
be contained in balanced delimiters");
false
}
}
Expand Down

0 comments on commit 806a553

Please sign in to comment.