src: address a bunch of clippy::pedantic lints
This commit is contained in:
		
							
								
								
									
										14
									
								
								src/lexer.rs
									
									
									
									
									
								
							
							
						
						
									
										14
									
								
								src/lexer.rs
									
									
									
									
									
								
							@@ -110,8 +110,8 @@ impl<'t> Lexer<'t> {
 | 
			
		||||
        self
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// Produces a LexError at the start of the current token
 | 
			
		||||
    fn error(&self, res: LexFailure) -> LexError {
 | 
			
		||||
    /// Produces a [`LexError`] at the start of the current token
 | 
			
		||||
    const fn error(&self, res: LexFailure) -> LexError {
 | 
			
		||||
        LexError { pos: Span(self.head, self.tail), res }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
@@ -144,7 +144,7 @@ impl<'t> Lexer<'t> {
 | 
			
		||||
        self
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fn start_token(&mut self) -> &mut Self {
 | 
			
		||||
    const fn start_token(&mut self) -> &mut Self {
 | 
			
		||||
        self.head = self.tail;
 | 
			
		||||
        self
 | 
			
		||||
    }
 | 
			
		||||
@@ -278,11 +278,11 @@ impl<'t> Lexer<'t> {
 | 
			
		||||
        Err(self.error(UnterminatedBlockComment))
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// Consumes characters until it reaches a character not in [is_xid_continue].
 | 
			
		||||
    /// Consumes characters until it reaches a character not in [`is_xid_continue`].
 | 
			
		||||
    ///
 | 
			
		||||
    /// Always consumes the first character.
 | 
			
		||||
    ///
 | 
			
		||||
    /// Maps the result to either a [TKind::Identifier] or a [TKind] keyword.
 | 
			
		||||
    /// Maps the result to either a [`TKind::Identifier`] or a [`TKind`] keyword.
 | 
			
		||||
    pub fn identifier(&mut self) -> Result<Token, LexError> {
 | 
			
		||||
        while self.consume().peek().is_some_and(is_xid_continue) {}
 | 
			
		||||
        let (lexeme, _span) = self.as_str();
 | 
			
		||||
@@ -346,7 +346,7 @@ impl<'t> Lexer<'t> {
 | 
			
		||||
                Some('\\') => self.escape()?,
 | 
			
		||||
                Some('"') => break,
 | 
			
		||||
                Some(c) => c,
 | 
			
		||||
            })
 | 
			
		||||
            });
 | 
			
		||||
        }
 | 
			
		||||
        lexeme.shrink_to_fit();
 | 
			
		||||
        Ok(self.produce_with_lexeme(TKind::String, Lexeme::String(lexeme)))
 | 
			
		||||
@@ -375,7 +375,7 @@ impl<'t> Lexer<'t> {
 | 
			
		||||
    /// Parses two hex-digits and constructs a [char] out of them.
 | 
			
		||||
    pub fn hex_escape(&mut self) -> Result<char, LexError> {
 | 
			
		||||
        let out = (self.digit::<16>()? << 4) + self.digit::<16>()?;
 | 
			
		||||
        char::from_u32(out).ok_or(self.error(InvalidUnicodeEscape(out)))
 | 
			
		||||
        char::from_u32(out).ok_or_else(|| self.error(InvalidUnicodeEscape(out)))
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// Parses a sequence of `{}`-bracketed hex-digits and constructs a [char] out of them.
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user