@@ -628,6 +628,67 @@ impl Cursor {
628628 } ;
629629 if rt. is_valid ( ) { Some ( rt) } else { None }
630630 }
631+
632+ /// Gets the tokens that correspond to that cursor.
633+ pub fn tokens ( & self ) -> Option < Vec < Token > > {
634+ let range = self . extent ( ) ;
635+ let mut tokens = vec ! [ ] ;
636+ unsafe {
637+ let tu = clang_Cursor_getTranslationUnit ( self . x ) ;
638+ let mut token_ptr = ptr:: null_mut ( ) ;
639+ let mut num_tokens: c_uint = 0 ;
640+ clang_tokenize ( tu, range, & mut token_ptr, & mut num_tokens) ;
641+ if token_ptr. is_null ( ) {
642+ return None ;
643+ }
644+
645+ let token_array =
646+ slice:: from_raw_parts ( token_ptr, num_tokens as usize ) ;
647+ for & token in token_array. iter ( ) {
648+ let kind = clang_getTokenKind ( token) ;
649+ let spelling =
650+ cxstring_into_string ( clang_getTokenSpelling ( tu, token) ) ;
651+
652+ tokens. push ( Token {
653+ kind : kind,
654+ spelling : spelling,
655+ } ) ;
656+ }
657+ clang_disposeTokens ( tu, token_ptr, num_tokens) ;
658+ }
659+ Some ( tokens)
660+ }
661+
662+ /// Gets the tokens that correspond to that cursor as `cexpr` tokens.
663+ pub fn cexpr_tokens ( self ) -> Option < Vec < cexpr:: token:: Token > > {
664+ use cexpr:: token;
665+
666+ self . tokens ( ) . map ( |tokens| {
667+ tokens
668+ . into_iter ( )
669+ . filter_map ( |token| {
670+ let kind = match token. kind {
671+ CXToken_Punctuation => token:: Kind :: Punctuation ,
672+ CXToken_Literal => token:: Kind :: Literal ,
673+ CXToken_Identifier => token:: Kind :: Identifier ,
674+ CXToken_Keyword => token:: Kind :: Keyword ,
675+ // NB: cexpr is not too happy about comments inside
676+ // expressions, so we strip them down here.
677+ CXToken_Comment => return None ,
678+ _ => {
679+ error ! ( "Found unexpected token kind: {:?}" , token) ;
680+ return None ;
681+ }
682+ } ;
683+
684+ Some ( token:: Token {
685+ kind : kind,
686+ raw : token. spelling . into_bytes ( ) . into_boxed_slice ( ) ,
687+ } )
688+ } )
689+ . collect :: < Vec < _ > > ( )
690+ } )
691+ }
631692}
632693
633694/// Checks whether the name looks like an identifier, i.e. is alphanumeric
@@ -1346,71 +1407,6 @@ impl TranslationUnit {
13461407 pub fn is_null ( & self ) -> bool {
13471408 self . x . is_null ( )
13481409 }
1349-
1350- /// Invoke Clang's lexer on this translation unit and get the stream of
1351- /// tokens that come out.
1352- pub fn tokens ( & self , cursor : & Cursor ) -> Option < Vec < Token > > {
1353- let range = cursor. extent ( ) ;
1354- let mut tokens = vec ! [ ] ;
1355- unsafe {
1356- let mut token_ptr = ptr:: null_mut ( ) ;
1357- let mut num_tokens: c_uint = 0 ;
1358- clang_tokenize ( self . x , range, & mut token_ptr, & mut num_tokens) ;
1359- if token_ptr. is_null ( ) {
1360- return None ;
1361- }
1362-
1363- let token_array =
1364- slice:: from_raw_parts ( token_ptr, num_tokens as usize ) ;
1365- for & token in token_array. iter ( ) {
1366- let kind = clang_getTokenKind ( token) ;
1367- let spelling =
1368- cxstring_into_string ( clang_getTokenSpelling ( self . x , token) ) ;
1369-
1370- tokens. push ( Token {
1371- kind : kind,
1372- spelling : spelling,
1373- } ) ;
1374- }
1375- clang_disposeTokens ( self . x , token_ptr, num_tokens) ;
1376- }
1377- Some ( tokens)
1378- }
1379-
1380- /// Convert a set of tokens from clang into `cexpr` tokens, for further
1381- /// processing.
1382- pub fn cexpr_tokens (
1383- & self ,
1384- cursor : & Cursor ,
1385- ) -> Option < Vec < cexpr:: token:: Token > > {
1386- use cexpr:: token;
1387-
1388- self . tokens ( cursor) . map ( |tokens| {
1389- tokens
1390- . into_iter ( )
1391- . filter_map ( |token| {
1392- let kind = match token. kind {
1393- CXToken_Punctuation => token:: Kind :: Punctuation ,
1394- CXToken_Literal => token:: Kind :: Literal ,
1395- CXToken_Identifier => token:: Kind :: Identifier ,
1396- CXToken_Keyword => token:: Kind :: Keyword ,
1397- // NB: cexpr is not too happy about comments inside
1398- // expressions, so we strip them down here.
1399- CXToken_Comment => return None ,
1400- _ => {
1401- error ! ( "Found unexpected token kind: {:?}" , token) ;
1402- return None ;
1403- }
1404- } ;
1405-
1406- Some ( token:: Token {
1407- kind : kind,
1408- raw : token. spelling . into_bytes ( ) . into_boxed_slice ( ) ,
1409- } )
1410- } )
1411- . collect :: < Vec < _ > > ( )
1412- } )
1413- }
14141410}
14151411
14161412impl Drop for TranslationUnit {
0 commit comments