From 588606e0c4cca5ec956e15192f857bdef818a4dc Mon Sep 17 00:00:00 2001 From: John Date: Tue, 28 Oct 2025 02:53:20 -0400 Subject: [PATCH] src: address a bunch of clippy::pedantic lints --- src/ast.rs | 8 +++---- src/ast/macro_matcher.rs | 6 ++--- src/fmt.rs | 16 +++++++------- src/lexer.rs | 14 ++++++------ src/lib.rs | 2 +- src/parser.rs | 47 ++++++++++++++++++++-------------------- src/span.rs | 2 +- src/token.rs | 6 ++--- 8 files changed, 50 insertions(+), 51 deletions(-) diff --git a/src/ast.rs b/src/ast.rs index 0500ce0..042c4b9 100644 --- a/src/ast.rs +++ b/src/ast.rs @@ -276,7 +276,7 @@ impl Default for Expr { } impl Expr { - pub fn anno(self, annotation: A) -> Anno, A> { + pub const fn anno(self, annotation: A) -> Anno, A> { Anno(self, annotation) } @@ -292,15 +292,15 @@ impl Expr { Self::Op(Op::Do, exprs) } - pub fn is_place(&self) -> bool { + pub const fn is_place(&self) -> bool { matches!( self, - Self::Id(_) | Self::Op(Op::Index, _) | Self::Op(Op::Dot, _) | Self::Op(Op::Deref, _) + Self::Id(_) | Self::Op(Op::Index | Op::Dot | Op::Deref, _) ) } #[allow(clippy::type_complexity)] - pub fn as_slice(&self) -> Option<(Op, &[Anno, A>])> { + pub const fn as_slice(&self) -> Option<(Op, &[Anno, A>])> { match self { Expr::Op(op, args) => Some((*op, args.as_slice())), _ => None, diff --git a/src/ast/macro_matcher.rs b/src/ast/macro_matcher.rs index 2f5df6b..8970942 100644 --- a/src/ast/macro_matcher.rs +++ b/src/ast/macro_matcher.rs @@ -135,7 +135,7 @@ impl Match for Expr { match self { Expr::MetId(id) => { if let Some(expr) = sub.exp.get(id) { - *self = expr.clone() + *self = expr.clone(); } } Expr::Omitted | Expr::Id(_) | Expr::Lit(_) | Expr::Use(_) => {} @@ -145,7 +145,7 @@ impl Match for Expr { op.apply(sub); exprs.apply(sub); } - }; + } } } @@ -190,7 +190,7 @@ impl Match for Pat { Pat::Ignore | Pat::Never | Pat::Name(_) | Pat::Path(_) | Pat::Lit(_) => {} Pat::MetId(id) => { if let Some(expr) = sub.pat.get(id) { - *self = expr.clone() + *self = expr.clone(); } } Pat::NamedStruct(_, expr) => expr.apply(sub), diff --git a/src/fmt.rs b/src/fmt.rs index 482025c..eb5690e 100644 --- a/src/fmt.rs +++ b/src/fmt.rs @@ -62,7 +62,7 @@ pub trait FmtAdapter: Write { if !rest.is_empty() { write!(self, "{sep}")?; } - items = rest + items = rest; } write!(self, "{close}") } @@ -76,12 +76,12 @@ pub struct Indent<'f, F: Write + ?Sized> { } impl<'f, F: Write + ?Sized> Indent<'f, F> { - pub fn new(f: &'f mut F, indent: &'static str) -> Self { + pub const fn new(f: &'f mut F, indent: &'static str) -> Self { Indent { f, needs_indent: false, indent } } /// Gets mutable access to the inner [Write]-adapter - pub fn inner(&mut self) -> &mut F { + pub const fn inner(&mut self) -> &mut F { self.f } } @@ -113,9 +113,9 @@ pub struct Delimit<'f, F: Write + ?Sized, E: Display = &'static str> { close: E, } -impl<'f, F: Write + ?Sized, E: Display> Delimit<'f, F, E> { +impl Delimit<'_, F, E> { /// Gets mutable access to the inner [Write]-adapter - pub fn inner(&mut self) -> &mut F { + pub const fn inner(&mut self) -> &mut F { self.f } } @@ -146,9 +146,9 @@ pub struct DelimitIndent<'f, F: Write + ?Sized, E: Display = &'static str> { close: E, } -impl<'f, F: Write + ?Sized, E: Display> DelimitIndent<'f, F, E> { +impl DelimitIndent<'_, F, E> { /// Gets mutable access to the inner [Write]-adapter - pub fn inner(&mut self) -> &mut F { + pub const fn inner(&mut self) -> &mut F { self.f.inner() } } @@ -164,7 +164,7 @@ impl<'f, F: Write + ?Sized, E: Display> DelimitIndent<'f, F, E> { impl Drop for DelimitIndent<'_, F, E> { fn drop(&mut self) { let Self { f: Indent { f, .. }, close, .. } = self; - let _ = write!(f, "{}", close); + let _ = write!(f, "{close}"); } } diff --git a/src/lexer.rs b/src/lexer.rs index fdc0f4c..8c7744a 100644 --- a/src/lexer.rs +++ b/src/lexer.rs @@ -110,8 +110,8 @@ impl<'t> Lexer<'t> { self } - /// Produces a LexError at the start of the current token - fn error(&self, res: LexFailure) -> LexError { + /// Produces a [`LexError`] at the start of the current token + const fn error(&self, res: LexFailure) -> LexError { LexError { pos: Span(self.head, self.tail), res } } @@ -144,7 +144,7 @@ impl<'t> Lexer<'t> { self } - fn start_token(&mut self) -> &mut Self { + const fn start_token(&mut self) -> &mut Self { self.head = self.tail; self } @@ -278,11 +278,11 @@ impl<'t> Lexer<'t> { Err(self.error(UnterminatedBlockComment)) } - /// Consumes characters until it reaches a character not in [is_xid_continue]. + /// Consumes characters until it reaches a character not in [`is_xid_continue`]. /// /// Always consumes the first character. /// - /// Maps the result to either a [TKind::Identifier] or a [TKind] keyword. + /// Maps the result to either a [`TKind::Identifier`] or a [`TKind`] keyword. pub fn identifier(&mut self) -> Result { while self.consume().peek().is_some_and(is_xid_continue) {} let (lexeme, _span) = self.as_str(); @@ -346,7 +346,7 @@ impl<'t> Lexer<'t> { Some('\\') => self.escape()?, Some('"') => break, Some(c) => c, - }) + }); } lexeme.shrink_to_fit(); Ok(self.produce_with_lexeme(TKind::String, Lexeme::String(lexeme))) @@ -375,7 +375,7 @@ impl<'t> Lexer<'t> { /// Parses two hex-digits and constructs a [char] out of them. pub fn hex_escape(&mut self) -> Result { let out = (self.digit::<16>()? << 4) + self.digit::<16>()?; - char::from_u32(out).ok_or(self.error(InvalidUnicodeEscape(out))) + char::from_u32(out).ok_or_else(|| self.error(InvalidUnicodeEscape(out))) } /// Parses a sequence of `{}`-bracketed hex-digits and constructs a [char] out of them. diff --git a/src/lib.rs b/src/lib.rs index f0a92e4..6f993b1 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -27,7 +27,7 @@ pub mod typed_ast { pub local: HashMap, } - /// DefID annotation + /// `DefID` annotation #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub struct Defn { pub span: Span, diff --git a/src/parser.rs b/src/parser.rs index 6a8176c..d71513d 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -1,4 +1,5 @@ -//! The parser takes a stream of [Token]s from the [Lexer], and turns them into [crate::ast] nodes. +//! The parser takes a stream of [`Token`]s from the [`Lexer`], and turns them into [`crate::ast`] +//! nodes. use crate::{ ast::*, lexer::{LexError, LexFailure, Lexer}, @@ -68,7 +69,7 @@ impl PResultExt for PResult { } } -/// Opens a scope where [ParseError::EOF] is unexpected (See [PResultExt::no_eof]) +/// Opens a scope where [`ParseError::EOF`] is unexpected (See [`PResultExt::no_eof`]) fn no_eof(f: impl FnOnce() -> PResult) -> PResult { f().no_eof() } @@ -88,11 +89,11 @@ impl<'t> Parser<'t> { } /// The identity function. This exists to make production chaining easier. - pub fn then(&self, t: T) -> T { + pub const fn then(&self, t: T) -> T { t } - pub fn span(&self) -> Span { + pub const fn span(&self) -> Span { self.last_loc } @@ -101,7 +102,7 @@ impl<'t> Parser<'t> { Parse::parse(self, level) } - /// Peeks the next [Token]. Returns [ParseError::FromLexer] on lexer error. + /// Peeks the next [`Token`]. Returns [`ParseError::FromLexer`] on lexer error. pub fn peek(&mut self) -> PResult<&Token> { let next_tok = match self.next_tok.take() { Some(tok) => tok, @@ -114,9 +115,7 @@ impl<'t> Parser<'t> { } }, }; - self.next_tok = Some(next_tok); - - let next_tok = self.next_tok.as_ref().expect("should have Some lex result"); + let next_tok = self.next_tok.insert(next_tok); if let Ok(tok) = next_tok { self.last_loc = tok.span; @@ -125,7 +124,7 @@ impl<'t> Parser<'t> { next_tok.as_ref().map_err(|e| *e) } - /// Peeks the next token if it matches the `expected` [TKind] + /// Peeks the next token if it matches the `expected` [`TKind`] pub fn peek_if(&mut self, expected: TKind) -> PResult> { match self.peek() { Ok(tok) if tok.kind == expected => Ok(Some(tok)), @@ -152,10 +151,10 @@ impl<'t> Parser<'t> { #[allow(clippy::should_implement_trait)] pub fn next(&mut self) -> PResult { self.peek().no_eof()?; - Ok(self.take().expect("should have token here")) + self.take() // .expect("should have token here") } - /// Consumes and returns the next [Token] if it matches the `expected` [TKind] + /// Consumes and returns the next [`Token`] if it matches the `expected` [`TKind`] pub fn next_if(&mut self, expected: TKind) -> PResult> { match self.peek() { Ok(t) if t.kind == expected => self.take().map(Ok), @@ -177,7 +176,7 @@ impl<'t> Parser<'t> { ) -> PResult> { // TODO: This loses lexer errors while self.peek_if(end).no_eof()?.is_none() { - elems.push(self.parse(level.clone()).no_eof()?); + elems.push(self.parse(level).no_eof()?); match self.peek_if(sep)? { Some(_) => self.consume(), None => break, @@ -199,7 +198,7 @@ impl<'t> Parser<'t> { sep: TKind, ) -> PResult> { loop { - let elem = self.parse(level.clone()).no_eof()?; + let elem = self.parse(level).no_eof()?; elems.push(elem); match self.peek_if(sep) { Ok(Some(_)) => self.consume(), @@ -241,13 +240,13 @@ impl<'t> Parser<'t> { } pub trait Parse<'t> { - type Prec: Clone; + type Prec: Copy; + fn parse(p: &mut Parser<'t>, _level: Self::Prec) -> PResult where Self: Sized; } impl<'t> Parse<'t> for FqPath { - // ugly hack: provide a partial path to parse() type Prec = (); fn parse(p: &mut Parser<'t>, _level: Self::Prec) -> PResult { @@ -317,7 +316,7 @@ pub enum PPrec { } impl PPrec { - fn next(self) -> Self { + const fn next(self) -> Self { match self { Self::Min => Self::Alt, Self::Alt => Self::Tuple, @@ -448,7 +447,7 @@ impl<'t> Parse<'t> for Pat { } } -fn parse_array_pat<'t>(p: &mut Parser<'t>) -> PResult { +fn parse_array_pat(p: &mut Parser<'_>) -> PResult { if p.consume().peek()?.kind == TKind::RBrack { p.consume(); return Ok(Pat::Op(PatOp::Slice, vec![])); @@ -528,7 +527,7 @@ impl Prec { } } -/// PseudoOperator: fake operators used to give certain tokens special behavior. +/// `PseudoOperator`: fake operators used to give certain tokens special behavior. #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum Ps { Id, // Identifier @@ -603,7 +602,7 @@ fn from_prefix(token: &Token) -> PResult<(Ps, Prec)> { }) } -fn from_infix(token: &Token) -> PResult<(Ps, Prec)> { +const fn from_infix(token: &Token) -> PResult<(Ps, Prec)> { Ok(match token.kind { TKind::Semi => (Ps::Op(Op::Do), Prec::Do), // the inspiration TKind::In => (Ps::Op(Op::Do), Prec::Do), @@ -806,7 +805,7 @@ impl<'t> Parse<'t> for Expr { p.consume() .expect(TKind::LBrack)? .opt(MIN, TKind::RBrack)? - .unwrap_or(Expr::Op(Op::Tuple, vec![]).anno(span)), + .unwrap_or_else(|| Expr::Op(Op::Tuple, vec![]).anno(span)), p.parse(level)?, ], ), @@ -885,7 +884,7 @@ impl<'t> Parse<'t> for Expr { head.anno(span), p.consume() .opt(0, TKind::RParen)? - .unwrap_or(Expr::Op(Op::Tuple, vec![]).anno(span)), + .unwrap_or_else(|| Expr::Op(Op::Tuple, vec![]).anno(span)), ], ), Ps::Op(op @ (Op::Tuple | Op::Dot | Op::LogAnd | Op::LogOr)) => Expr::Op( @@ -910,7 +909,7 @@ impl<'t> Parse<'t> for Expr { } /// Parses an array with 0 or more elements, or an array-repetition -fn parse_array<'t>(p: &mut Parser<'t>) -> PResult { +fn parse_array(p: &mut Parser<'_>) -> PResult { if p.consume().peek()?.kind == TKind::RBrack { p.consume(); return Ok(Expr::Op(Op::Array, vec![])); @@ -928,7 +927,7 @@ fn parse_array<'t>(p: &mut Parser<'t>) -> PResult { }) } -fn parse_match<'t>(p: &mut Parser<'t>) -> PResult { +fn parse_match(p: &mut Parser<'_>) -> PResult { let scrutinee = p.consume().parse(Prec::Logical.value())?; let arms = p @@ -942,7 +941,7 @@ fn parse_match<'t>(p: &mut Parser<'t>) -> PResult { Ok(expr) } -fn parse_for<'t>(p: &mut Parser<'t>, _level: ()) -> PResult { +fn parse_for(p: &mut Parser<'_>, _level: ()) -> PResult { // for Pat let pat = p.consume().parse(PPrec::Tuple)?; // in Expr diff --git a/src/span.rs b/src/span.rs index d37e8f1..b1624d9 100644 --- a/src/span.rs +++ b/src/span.rs @@ -16,7 +16,7 @@ impl std::fmt::Debug for Span { #[expect(non_snake_case)] /// Stores the start and end byte position -pub fn Span(head: u32, tail: u32) -> Span { +pub const fn Span(head: u32, tail: u32) -> Span { Span { head, tail } } diff --git a/src/token.rs b/src/token.rs index 6aaacc2..d117e80 100644 --- a/src/token.rs +++ b/src/token.rs @@ -10,7 +10,7 @@ pub struct Token { } impl Token { - pub fn kind(&self) -> TKind { + pub const fn kind(&self) -> TKind { self.kind } } @@ -35,13 +35,13 @@ impl Lexeme { _ => None, } } - pub fn int(&self) -> Option { + pub const fn int(&self) -> Option { match self { Self::Integer(i, _) => Some(*i), _ => None, } } - pub fn char(&self) -> Option { + pub const fn char(&self) -> Option { match self { Self::Char(c) => Some(*c), _ => None,