Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
126 changes: 61 additions & 65 deletions src/clang.rs
Original file line number Diff line number Diff line change
Expand Up @@ -628,6 +628,67 @@ impl Cursor {
};
if rt.is_valid() { Some(rt) } else { None }
}

/// Gets the tokens that correspond to that cursor.
pub fn tokens(&self) -> Option<Vec<Token>> {
let range = self.extent();
let mut tokens = vec![];
unsafe {
let tu = clang_Cursor_getTranslationUnit(self.x);
let mut token_ptr = ptr::null_mut();
let mut num_tokens: c_uint = 0;
clang_tokenize(tu, range, &mut token_ptr, &mut num_tokens);
if token_ptr.is_null() {
return None;
}

let token_array =
slice::from_raw_parts(token_ptr, num_tokens as usize);
for &token in token_array.iter() {
let kind = clang_getTokenKind(token);
let spelling =
cxstring_into_string(clang_getTokenSpelling(tu, token));

tokens.push(Token {
kind: kind,
spelling: spelling,
});
}
clang_disposeTokens(tu, token_ptr, num_tokens);
}
Some(tokens)
}

/// Gets the tokens that correspond to that cursor as `cexpr` tokens.
pub fn cexpr_tokens(self) -> Option<Vec<cexpr::token::Token>> {
use cexpr::token;

self.tokens().map(|tokens| {
tokens
.into_iter()
.filter_map(|token| {
let kind = match token.kind {
CXToken_Punctuation => token::Kind::Punctuation,
CXToken_Literal => token::Kind::Literal,
CXToken_Identifier => token::Kind::Identifier,
CXToken_Keyword => token::Kind::Keyword,
// NB: cexpr is not too happy about comments inside
// expressions, so we strip them down here.
CXToken_Comment => return None,
_ => {
error!("Found unexpected token kind: {:?}", token);
return None;
}
};

Some(token::Token {
kind: kind,
raw: token.spelling.into_bytes().into_boxed_slice(),
})
})
.collect::<Vec<_>>()
})
}
}

/// Checks whether the name looks like an identifier, i.e. is alphanumeric
Expand Down Expand Up @@ -1346,71 +1407,6 @@ impl TranslationUnit {
pub fn is_null(&self) -> bool {
self.x.is_null()
}

/// Invoke Clang's lexer on this translation unit and get the stream of
/// tokens that come out.
pub fn tokens(&self, cursor: &Cursor) -> Option<Vec<Token>> {
let range = cursor.extent();
let mut tokens = vec![];
unsafe {
let mut token_ptr = ptr::null_mut();
let mut num_tokens: c_uint = 0;
clang_tokenize(self.x, range, &mut token_ptr, &mut num_tokens);
if token_ptr.is_null() {
return None;
}

let token_array =
slice::from_raw_parts(token_ptr, num_tokens as usize);
for &token in token_array.iter() {
let kind = clang_getTokenKind(token);
let spelling =
cxstring_into_string(clang_getTokenSpelling(self.x, token));

tokens.push(Token {
kind: kind,
spelling: spelling,
});
}
clang_disposeTokens(self.x, token_ptr, num_tokens);
}
Some(tokens)
}

/// Convert a set of tokens from clang into `cexpr` tokens, for further
/// processing.
pub fn cexpr_tokens(
&self,
cursor: &Cursor,
) -> Option<Vec<cexpr::token::Token>> {
use cexpr::token;

self.tokens(cursor).map(|tokens| {
tokens
.into_iter()
.filter_map(|token| {
let kind = match token.kind {
CXToken_Punctuation => token::Kind::Punctuation,
CXToken_Literal => token::Kind::Literal,
CXToken_Identifier => token::Kind::Identifier,
CXToken_Keyword => token::Kind::Keyword,
// NB: cexpr is not too happy about comments inside
// expressions, so we strip them down here.
CXToken_Comment => return None,
_ => {
error!("Found unexpected token kind: {:?}", token);
return None;
}
};

Some(token::Token {
kind: kind,
raw: token.spelling.into_bytes().into_boxed_slice(),
})
})
.collect::<Vec<_>>()
})
}
}

impl Drop for TranslationUnit {
Expand Down
2 changes: 1 addition & 1 deletion src/ir/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2092,7 +2092,7 @@ impl BindgenContext {
::clang_sys::CXCursor_Namespace,
"Be a nice person"
);
let tokens = match self.translation_unit.tokens(&cursor) {
let tokens = match cursor.tokens() {
Some(tokens) => tokens,
None => return (None, ModuleKind::Normal),
};
Expand Down
14 changes: 5 additions & 9 deletions src/ir/var.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ impl ClangSubItemParser for Var {
visitor.parsed_macro(&cursor.spelling());
}

let value = parse_macro(ctx, &cursor, ctx.translation_unit());
let value = parse_macro(ctx, &cursor);

let (id, value) = match value {
Some(v) => v,
Expand Down Expand Up @@ -294,11 +294,10 @@ impl ClangSubItemParser for Var {
fn parse_macro(
ctx: &BindgenContext,
cursor: &clang::Cursor,
unit: &clang::TranslationUnit,
) -> Option<(Vec<u8>, cexpr::expr::EvalResult)> {
use cexpr::{expr, nom};

let mut cexpr_tokens = match unit.cexpr_tokens(cursor) {
let mut cexpr_tokens = match cursor.cexpr_tokens() {
None => return None,
Some(tokens) => tokens,
};
Expand Down Expand Up @@ -328,14 +327,11 @@ fn parse_macro(
}
}

fn parse_int_literal_tokens(
cursor: &clang::Cursor,
unit: &clang::TranslationUnit,
) -> Option<i64> {
fn parse_int_literal_tokens(cursor: &clang::Cursor) -> Option<i64> {
use cexpr::{expr, nom};
use cexpr::expr::EvalResult;

let cexpr_tokens = match unit.cexpr_tokens(cursor) {
let cexpr_tokens = match cursor.cexpr_tokens() {
None => return None,
Some(tokens) => tokens,
};
Expand All @@ -357,7 +353,7 @@ fn get_integer_literal_from_cursor(
match c.kind() {
CXCursor_IntegerLiteral |
CXCursor_UnaryOperator => {
value = parse_int_literal_tokens(&c, unit);
value = parse_int_literal_tokens(&c);
}
CXCursor_UnexposedExpr => {
value = get_integer_literal_from_cursor(&c, unit);
Expand Down