44//! governing what is and is not a valid token are defined in the Python reference
55//! guide section on [Lexical analysis].
66//!
7- //! The primary function in this module is [`make_tokenizer `], which takes a string slice
7+ //! The primary function in this module is [`lex `], which takes a string slice
88//! and returns an iterator over the tokens in the source code. The tokens are currently returned
99//! as a `Result<Spanned, LexicalError>`, where [`Spanned`] is a tuple containing the
1010//! start and end [`Location`] and a [`Tok`] denoting the token.
1111//!
1212//! # Example
1313//!
1414//! ```
15- //! use rustpython_parser::lexer::{make_tokenizer , Tok};
15+ //! use rustpython_parser::lexer::{lex , Tok};
1616//! use rustpython_parser::mode::Mode;
1717//! use rustpython_parser::token::StringKind;
1818//!
1919//! let source = "x = 'RustPython'";
20- //! let tokens = make_tokenizer (source, Mode::Module)
20+ //! let tokens = lex (source, Mode::Module)
2121//! .map(|tok| tok.expect("Failed to lex"))
2222//! .collect::<Vec<_>>();
2323//!
@@ -195,29 +195,29 @@ pub type Spanned = (Location, Tok, Location);
195195/// The result of lexing a token.
196196pub type LexResult = Result < Spanned , LexicalError > ;
197197
198- /// Create a new tokenizer from a source string.
198+ /// Create a new lexer from a source string.
199199///
200200/// # Examples
201201///
202202/// ```
203203/// use rustpython_parser::mode::Mode;
204- /// use rustpython_parser::lexer::{make_tokenizer };
204+ /// use rustpython_parser::lexer::{lex };
205205///
206206/// let source = "def hello(): return 'world'";
207- /// let tokenizer = make_tokenizer (source, Mode::Module);
207+ /// let lexer = lex (source, Mode::Module);
208208///
209- /// for token in tokenizer {
209+ /// for token in lexer {
210210/// println!("{:?}", token);
211211/// }
212212/// ```
213213#[ inline]
214- pub fn make_tokenizer ( source : & str , mode : Mode ) -> impl Iterator < Item = LexResult > + ' _ {
215- make_tokenizer_located ( source, mode, Location :: default ( ) )
214+ pub fn lex ( source : & str , mode : Mode ) -> impl Iterator < Item = LexResult > + ' _ {
215+ lex_located ( source, mode, Location :: default ( ) )
216216}
217217
218- /// Create a new tokenizer from a source string, starting at a given location.
219- /// You probably want to use [`make_tokenizer `] instead.
220- pub fn make_tokenizer_located (
218+ /// Create a new lexer from a source string, starting at a given location.
219+ /// You probably want to use [`lex `] instead.
220+ pub fn lex_located (
221221 source : & str ,
222222 mode : Mode ,
223223 start_location : Location ,
@@ -230,7 +230,7 @@ where
230230 T : Iterator < Item = char > ,
231231{
232232 /// Create a new lexer from T and a starting location. You probably want to use
233- /// [`make_tokenizer `] instead.
233+ /// [`lex `] instead.
234234 pub fn new ( input : T , start : Location ) -> Self {
235235 let mut lxr = Lexer {
236236 at_begin_of_line : true ,
@@ -1320,7 +1320,7 @@ impl std::fmt::Display for LexicalErrorType {
13201320
13211321#[ cfg( test) ]
13221322mod tests {
1323- use super :: { make_tokenizer , StringKind , Tok } ;
1323+ use super :: { lex , StringKind , Tok } ;
13241324 use crate :: mode:: Mode ;
13251325 use num_bigint:: BigInt ;
13261326
@@ -1329,7 +1329,7 @@ mod tests {
13291329 const UNIX_EOL : & str = "\n " ;
13301330
13311331 pub fn lex_source ( source : & str ) -> Vec < Tok > {
1332- let lexer = make_tokenizer ( source, Mode :: Module ) ;
1332+ let lexer = lex ( source, Mode :: Module ) ;
13331333 lexer. map ( |x| x. unwrap ( ) . 1 ) . collect ( )
13341334 }
13351335
0 commit comments