Skip to content

Commit

Permalink
Fix clippy warnings
Browse files Browse the repository at this point in the history
  • Loading branch information
Phantomical committed Feb 2, 2024
1 parent d4d884c commit 30bdcf1
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 7 deletions.
4 changes: 2 additions & 2 deletions src/de.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use crate::error::Expected;
use crate::lex::{Lexer, Token, TokenKind};
use crate::Error;

//// A serde deserializer for rust's debug format.
/// A serde deserializer for rust's debug format.
pub struct Deserializer<'de> {
total: &'de str,
lexer: Lexer<'de>,
Expand Down Expand Up @@ -888,7 +888,7 @@ impl<'de> VariantAccess<'de> for DebugEnumAccess<'_, 'de> {
}
}

fn unescape<'de>(mut text: &'de str) -> Result<Cow<'de, str>, Error> {
fn unescape(mut text: &str) -> Result<Cow<'_, str>, Error> {
let mut next = match text.find('\\') {
Some(pos) => pos,
None => return Ok(Cow::Borrowed(text)),
Expand Down
2 changes: 1 addition & 1 deletion src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ impl LexerError {

impl fmt::Display for LexerError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.found == "" {
if self.found.is_empty() {
write!(f, "unexpected end of file, expected {}", self.expected)
} else {
write!(
Expand Down
8 changes: 4 additions & 4 deletions src/lex.rs
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ impl<'de> Lexer<'de> {
}

fn parse_string(&mut self) -> Result<TokenKind, LexerError> {
self.data = match self.data.strip_prefix("\"") {
self.data = match self.data.strip_prefix('"') {
Some(rest) => rest,
None => return Err(self.unexpected_token(TokenKind::String)),
};
Expand All @@ -176,7 +176,7 @@ impl<'de> Lexer<'de> {
break;
}

match self.data.as_bytes().get(0) {
match self.data.as_bytes().first() {
Some(b'\"') => {
self.advance(1);
Ok(TokenKind::String)
Expand All @@ -186,7 +186,7 @@ impl<'de> Lexer<'de> {
}

fn parse_char(&mut self) -> Result<TokenKind, LexerError> {
self.data = match self.data.strip_prefix("\'") {
self.data = match self.data.strip_prefix('\'') {
Some(rest) => rest,
None => return Err(self.unexpected_token(TokenKind::Char)),
};
Expand All @@ -201,7 +201,7 @@ impl<'de> Lexer<'de> {
break;
}

match self.data.as_bytes().get(0) {
match self.data.as_bytes().first() {
Some(b'\'') => {
self.advance(1);
Ok(TokenKind::Char)
Expand Down

0 comments on commit 30bdcf1

Please sign in to comment.