Skip to content

Commit

Permalink
librustdoc has been updated
Browse files Browse the repository at this point in the history
Fixes run build error

Fix test failure

Fix tests' errors
  • Loading branch information
GuillaumeGomez committed Feb 6, 2015
1 parent 9e20035 commit a2e01c6
Show file tree
Hide file tree
Showing 8 changed files with 33 additions and 31 deletions.
36 changes: 18 additions & 18 deletions src/librustdoc/clean/mod.rs
Expand Up @@ -410,12 +410,12 @@ pub enum Attribute {
impl Clean<Attribute> for ast::MetaItem {
fn clean(&self, cx: &DocContext) -> Attribute {
match self.node {
ast::MetaWord(ref s) => Word(s.get().to_string()),
ast::MetaWord(ref s) => Word(s.to_string()),
ast::MetaList(ref s, ref l) => {
List(s.get().to_string(), l.clean(cx))
List(s.to_string(), l.clean(cx))
}
ast::MetaNameValue(ref s, ref v) => {
NameValue(s.get().to_string(), lit_to_string(v))
NameValue(s.to_string(), lit_to_string(v))
}
}
}
Expand Down Expand Up @@ -700,19 +700,19 @@ impl Lifetime {

impl Clean<Lifetime> for ast::Lifetime {
fn clean(&self, _: &DocContext) -> Lifetime {
Lifetime(token::get_name(self.name).get().to_string())
Lifetime(token::get_name(self.name).to_string())
}
}

impl Clean<Lifetime> for ast::LifetimeDef {
fn clean(&self, _: &DocContext) -> Lifetime {
Lifetime(token::get_name(self.lifetime.name).get().to_string())
Lifetime(token::get_name(self.lifetime.name).to_string())
}
}

impl Clean<Lifetime> for ty::RegionParameterDef {
fn clean(&self, _: &DocContext) -> Lifetime {
Lifetime(token::get_name(self.name).get().to_string())
Lifetime(token::get_name(self.name).to_string())
}
}

Expand All @@ -721,7 +721,7 @@ impl Clean<Option<Lifetime>> for ty::Region {
match *self {
ty::ReStatic => Some(Lifetime::statik()),
ty::ReLateBound(_, ty::BrNamed(_, name)) =>
Some(Lifetime(token::get_name(name).get().to_string())),
Some(Lifetime(token::get_name(name).to_string())),
ty::ReEarlyBound(_, _, _, name) => Some(Lifetime(name.clean(cx))),

ty::ReLateBound(..) |
Expand Down Expand Up @@ -1953,20 +1953,20 @@ fn path_to_string(p: &ast::Path) -> String {
} else {
first = false;
}
s.push_str(i.get());
s.push_str(&i);
}
s
}

impl Clean<String> for ast::Ident {
fn clean(&self, _: &DocContext) -> String {
token::get_ident(*self).get().to_string()
token::get_ident(*self).to_string()
}
}

impl Clean<String> for ast::Name {
fn clean(&self, _: &DocContext) -> String {
token::get_name(*self).get().to_string()
token::get_name(*self).to_string()
}
}

Expand Down Expand Up @@ -2158,7 +2158,7 @@ impl Clean<Vec<Item>> for doctree::Import {
// forcefully don't inline if this is not public or if the
// #[doc(no_inline)] attribute is present.
let denied = self.vis != ast::Public || self.attrs.iter().any(|a| {
a.name().get() == "doc" && match a.meta_item_list() {
&a.name()[] == "doc" && match a.meta_item_list() {
Some(l) => attr::contains_name(l, "no_inline"),
None => false,
}
Expand Down Expand Up @@ -2311,7 +2311,7 @@ impl ToSource for syntax::codemap::Span {

fn lit_to_string(lit: &ast::Lit) -> String {
match lit.node {
ast::LitStr(ref st, _) => st.get().to_string(),
ast::LitStr(ref st, _) => st.to_string(),
ast::LitBinary(ref data) => format!("{:?}", data),
ast::LitByte(b) => {
let mut res = String::from_str("b'");
Expand All @@ -2323,8 +2323,8 @@ fn lit_to_string(lit: &ast::Lit) -> String {
},
ast::LitChar(c) => format!("'{}'", c),
ast::LitInt(i, _t) => i.to_string(),
ast::LitFloat(ref f, _t) => f.get().to_string(),
ast::LitFloatUnsuffixed(ref f) => f.get().to_string(),
ast::LitFloat(ref f, _t) => f.to_string(),
ast::LitFloatUnsuffixed(ref f) => f.to_string(),
ast::LitBool(b) => b.to_string(),
}
}
Expand All @@ -2336,7 +2336,7 @@ fn name_from_pat(p: &ast::Pat) -> String {
match p.node {
PatWild(PatWildSingle) => "_".to_string(),
PatWild(PatWildMulti) => "..".to_string(),
PatIdent(_, ref p, _) => token::get_ident(p.node).get().to_string(),
PatIdent(_, ref p, _) => token::get_ident(p.node).to_string(),
PatEnum(ref p, _) => path_to_string(p),
PatStruct(ref name, ref fields, etc) => {
format!("{} {{ {}{} }}", path_to_string(name),
Expand Down Expand Up @@ -2486,11 +2486,11 @@ impl Clean<Stability> for attr::Stability {
fn clean(&self, _: &DocContext) -> Stability {
Stability {
level: self.level,
feature: self.feature.get().to_string(),
feature: self.feature.to_string(),
since: self.since.as_ref().map_or("".to_string(),
|interned| interned.get().to_string()),
|interned| interned.to_string()),
reason: self.reason.as_ref().map_or("".to_string(),
|interned| interned.get().to_string()),
|interned| interned.to_string()),
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/librustdoc/html/highlight.rs
Expand Up @@ -142,7 +142,7 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,

// keywords are also included in the identifier set
token::Ident(ident, _is_mod_sep) => {
match token::get_ident(ident).get() {
match &token::get_ident(ident)[] {
"ref" | "mut" => "kw-2",

"self" => "self",
Expand Down
4 changes: 2 additions & 2 deletions src/librustdoc/visit_ast.rs
Expand Up @@ -237,7 +237,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
ast::ItemExternCrate(ref p) => {
let path = match *p {
None => None,
Some((ref x, _)) => Some(x.get().to_string()),
Some((ref x, _)) => Some(x.to_string()),
};
om.extern_crates.push(ExternCrate {
name: name,
Expand All @@ -253,7 +253,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
let please_inline = item.attrs.iter().any(|item| {
match item.meta_item_list() {
Some(list) => {
list.iter().any(|i| i.name().get() == "inline")
list.iter().any(|i| &i.name()[] == "inline")
}
None => false,
}
Expand Down
6 changes: 3 additions & 3 deletions src/libsyntax/ext/expand.rs
Expand Up @@ -1862,7 +1862,7 @@ mod test {
.collect();
println!("varref #{}: {:?}, resolves to {}",idx, varref_idents, varref_name);
let string = token::get_ident(final_varref_ident);
println!("varref's first segment's string: \"{}\"", string.get());
println!("varref's first segment's string: \"{}\"", &string[]);
println!("binding #{}: {}, resolves to {}",
binding_idx, bindings[binding_idx], binding_name);
mtwt::with_sctable(|x| mtwt::display_sctable(x));
Expand Down Expand Up @@ -1915,7 +1915,7 @@ foo_module!();
let cxbinds: Vec<&ast::Ident> =
bindings.iter().filter(|b| {
let ident = token::get_ident(**b);
let string = ident.get();
let string = &ident[];
"xx" == string
}).collect();
let cxbinds: &[&ast::Ident] = &cxbinds[];
Expand All @@ -1929,7 +1929,7 @@ foo_module!();
// the xx binding should bind all of the xx varrefs:
for (idx,v) in varrefs.iter().filter(|p| {
p.segments.len() == 1
&& "xx" == token::get_ident(p.segments[0].identifier).get()
&& "xx" == &token::get_ident(p.segments[0].identifier)[]
}).enumerate() {
if mtwt::resolve(v.segments[0].identifier) != resolved_binding {
println!("uh oh, xx binding didn't match xx varref:");
Expand Down
8 changes: 4 additions & 4 deletions src/libsyntax/parse/mod.rs
Expand Up @@ -1201,19 +1201,19 @@ mod test {
let source = "/// doc comment\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name.clone(), source, Vec::new(), &sess).unwrap();
let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
assert_eq!(doc.get(), "/// doc comment");
assert_eq!(&doc[], "/// doc comment");

let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name.clone(), source, Vec::new(), &sess).unwrap();
let docs = item.attrs.iter().filter(|a| a.name().get() == "doc")
.map(|a| a.value_str().unwrap().get().to_string()).collect::<Vec<_>>();
let docs = item.attrs.iter().filter(|a| &a.name()[] == "doc")
.map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>();
let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
assert_eq!(&docs[], b);

let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name, source, Vec::new(), &sess).unwrap();
let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
assert_eq!(doc.get(), "/** doc comment\n * with CRLF */");
assert_eq!(&doc[], "/** doc comment\n * with CRLF */");
}

#[test]
Expand Down
4 changes: 2 additions & 2 deletions src/test/auxiliary/lint_group_plugin_test.rs
Expand Up @@ -37,9 +37,9 @@ impl LintPass for Pass {

fn check_item(&mut self, cx: &Context, it: &ast::Item) {
let name = token::get_ident(it.ident);
if name.get() == "lintme" {
if &name[] == "lintme" {
cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'");
} else if name.get() == "pleaselintme" {
} else if &name[] == "pleaselintme" {
cx.span_lint(PLEASE_LINT, it.span, "item is named 'pleaselintme'");
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/test/auxiliary/lint_plugin_test.rs
Expand Up @@ -35,7 +35,7 @@ impl LintPass for Pass {

fn check_item(&mut self, cx: &Context, it: &ast::Item) {
let name = token::get_ident(it.ident);
if name.get() == "lintme" {
if &name[] == "lintme" {
cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'");
}
}
Expand Down
2 changes: 2 additions & 0 deletions src/test/run-pass/issue-15149.rs
@@ -1,3 +1,5 @@
// no-prefer-dynamic

// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
Expand Down

0 comments on commit a2e01c6

Please sign in to comment.