cl-token: Flatten TokenKind into a single enum (wow!)

This commit is contained in:
John 2024-07-30 16:47:09 -05:00
parent 1b217b2e75
commit a3e383b53f
5 changed files with 298 additions and 302 deletions

View File

@ -97,33 +97,33 @@ impl<'t> Lexer<'t> {
/// Scans through the text, searching for the next [Token] /// Scans through the text, searching for the next [Token]
pub fn scan(&mut self) -> LResult<Token> { pub fn scan(&mut self) -> LResult<Token> {
match self.skip_whitespace().peek()? { match self.skip_whitespace().peek()? {
'{' => self.consume()?.produce_op(Punct::LCurly), '{' => self.consume()?.produce_op(Kind::LCurly),
'}' => self.consume()?.produce_op(Punct::RCurly), '}' => self.consume()?.produce_op(Kind::RCurly),
'[' => self.consume()?.produce_op(Punct::LBrack), '[' => self.consume()?.produce_op(Kind::LBrack),
']' => self.consume()?.produce_op(Punct::RBrack), ']' => self.consume()?.produce_op(Kind::RBrack),
'(' => self.consume()?.produce_op(Punct::LParen), '(' => self.consume()?.produce_op(Kind::LParen),
')' => self.consume()?.produce_op(Punct::RParen), ')' => self.consume()?.produce_op(Kind::RParen),
'&' => self.consume()?.amp(), '&' => self.consume()?.amp(),
'@' => self.consume()?.produce_op(Punct::At), '@' => self.consume()?.produce_op(Kind::At),
'\\' => self.consume()?.produce_op(Punct::Backslash), '\\' => self.consume()?.produce_op(Kind::Backslash),
'!' => self.consume()?.bang(), '!' => self.consume()?.bang(),
'|' => self.consume()?.bar(), '|' => self.consume()?.bar(),
':' => self.consume()?.colon(), ':' => self.consume()?.colon(),
',' => self.consume()?.produce_op(Punct::Comma), ',' => self.consume()?.produce_op(Kind::Comma),
'.' => self.consume()?.dot(), '.' => self.consume()?.dot(),
'=' => self.consume()?.equal(), '=' => self.consume()?.equal(),
'`' => self.consume()?.produce_op(Punct::Grave), '`' => self.consume()?.produce_op(Kind::Grave),
'>' => self.consume()?.greater(), '>' => self.consume()?.greater(),
'#' => self.consume()?.hash(), '#' => self.consume()?.hash(),
'<' => self.consume()?.less(), '<' => self.consume()?.less(),
'-' => self.consume()?.minus(), '-' => self.consume()?.minus(),
'+' => self.consume()?.plus(), '+' => self.consume()?.plus(),
'?' => self.consume()?.produce_op(Punct::Question), '?' => self.consume()?.produce_op(Kind::Question),
'%' => self.consume()?.rem(), '%' => self.consume()?.rem(),
';' => self.consume()?.produce_op(Punct::Semi), ';' => self.consume()?.produce_op(Kind::Semi),
'/' => self.consume()?.slash(), '/' => self.consume()?.slash(),
'*' => self.consume()?.star(), '*' => self.consume()?.star(),
'~' => self.consume()?.produce_op(Punct::Tilde), '~' => self.consume()?.produce_op(Kind::Tilde),
'^' => self.consume()?.xor(), '^' => self.consume()?.xor(),
'0' => self.consume()?.int_with_base(), '0' => self.consume()?.int_with_base(),
'1'..='9' => self.digits::<10>(), '1'..='9' => self.digits::<10>(),
@ -157,14 +157,14 @@ impl<'t> Lexer<'t> {
.copied() .copied()
.ok_or(Error::end_of_file(self.line(), self.col())) .ok_or(Error::end_of_file(self.line(), self.col()))
} }
fn produce(&mut self, kind: TokenKind, data: impl Into<TokenData>) -> LResult<Token> { fn produce(&mut self, kind: Kind, data: impl Into<TokenData>) -> LResult<Token> {
let loc = self.start_loc; let loc = self.start_loc;
self.start_loc = self.current_loc; self.start_loc = self.current_loc;
self.start = self.current; self.start = self.current;
Ok(Token::new(kind, data, loc.0, loc.1)) Ok(Token::new(kind, data, loc.0, loc.1))
} }
fn produce_op(&mut self, kind: Punct) -> LResult<Token> { fn produce_op(&mut self, kind: Kind) -> LResult<Token> {
self.produce(TokenKind::Punct(kind), ()) self.produce(kind, ())
} }
fn skip_whitespace(&mut self) -> &mut Self { fn skip_whitespace(&mut self) -> &mut Self {
while let Ok(c) = self.peek() { while let Ok(c) = self.peek() {
@ -195,126 +195,126 @@ impl<'t> Lexer<'t> {
impl<'t> Lexer<'t> { impl<'t> Lexer<'t> {
fn amp(&mut self) -> LResult<Token> { fn amp(&mut self) -> LResult<Token> {
match self.peek() { match self.peek() {
Ok('&') => self.consume()?.produce_op(Punct::AmpAmp), Ok('&') => self.consume()?.produce_op(Kind::AmpAmp),
Ok('=') => self.consume()?.produce_op(Punct::AmpEq), Ok('=') => self.consume()?.produce_op(Kind::AmpEq),
_ => self.produce_op(Punct::Amp), _ => self.produce_op(Kind::Amp),
} }
} }
fn bang(&mut self) -> LResult<Token> { fn bang(&mut self) -> LResult<Token> {
match self.peek() { match self.peek() {
Ok('!') => self.consume()?.produce_op(Punct::BangBang), Ok('!') => self.consume()?.produce_op(Kind::BangBang),
Ok('=') => self.consume()?.produce_op(Punct::BangEq), Ok('=') => self.consume()?.produce_op(Kind::BangEq),
_ => self.produce_op(Punct::Bang), _ => self.produce_op(Kind::Bang),
} }
} }
fn bar(&mut self) -> LResult<Token> { fn bar(&mut self) -> LResult<Token> {
match self.peek() { match self.peek() {
Ok('|') => self.consume()?.produce_op(Punct::BarBar), Ok('|') => self.consume()?.produce_op(Kind::BarBar),
Ok('=') => self.consume()?.produce_op(Punct::BarEq), Ok('=') => self.consume()?.produce_op(Kind::BarEq),
_ => self.produce_op(Punct::Bar), _ => self.produce_op(Kind::Bar),
} }
} }
fn colon(&mut self) -> LResult<Token> { fn colon(&mut self) -> LResult<Token> {
match self.peek() { match self.peek() {
Ok(':') => self.consume()?.produce_op(Punct::ColonColon), Ok(':') => self.consume()?.produce_op(Kind::ColonColon),
_ => self.produce_op(Punct::Colon), _ => self.produce_op(Kind::Colon),
} }
} }
fn dot(&mut self) -> LResult<Token> { fn dot(&mut self) -> LResult<Token> {
match self.peek() { match self.peek() {
Ok('.') => { Ok('.') => {
if let Ok('=') = self.consume()?.peek() { if let Ok('=') = self.consume()?.peek() {
self.consume()?.produce_op(Punct::DotDotEq) self.consume()?.produce_op(Kind::DotDotEq)
} else { } else {
self.produce_op(Punct::DotDot) self.produce_op(Kind::DotDot)
} }
} }
_ => self.produce_op(Punct::Dot), _ => self.produce_op(Kind::Dot),
} }
} }
fn equal(&mut self) -> LResult<Token> { fn equal(&mut self) -> LResult<Token> {
match self.peek() { match self.peek() {
Ok('=') => self.consume()?.produce_op(Punct::EqEq), Ok('=') => self.consume()?.produce_op(Kind::EqEq),
Ok('>') => self.consume()?.produce_op(Punct::FatArrow), Ok('>') => self.consume()?.produce_op(Kind::FatArrow),
_ => self.produce_op(Punct::Eq), _ => self.produce_op(Kind::Eq),
} }
} }
fn greater(&mut self) -> LResult<Token> { fn greater(&mut self) -> LResult<Token> {
match self.peek() { match self.peek() {
Ok('=') => self.consume()?.produce_op(Punct::GtEq), Ok('=') => self.consume()?.produce_op(Kind::GtEq),
Ok('>') => { Ok('>') => {
if let Ok('=') = self.consume()?.peek() { if let Ok('=') = self.consume()?.peek() {
self.consume()?.produce_op(Punct::GtGtEq) self.consume()?.produce_op(Kind::GtGtEq)
} else { } else {
self.produce_op(Punct::GtGt) self.produce_op(Kind::GtGt)
} }
} }
_ => self.produce_op(Punct::Gt), _ => self.produce_op(Kind::Gt),
} }
} }
fn hash(&mut self) -> LResult<Token> { fn hash(&mut self) -> LResult<Token> {
match self.peek() { match self.peek() {
Ok('!') => self.consume()?.hashbang(), Ok('!') => self.consume()?.hashbang(),
_ => self.produce_op(Punct::Hash), _ => self.produce_op(Kind::Hash),
} }
} }
fn hashbang(&mut self) -> LResult<Token> { fn hashbang(&mut self) -> LResult<Token> {
match self.peek() { match self.peek() {
Ok('/' | '\'') => self.line_comment(), Ok('/' | '\'') => self.line_comment(),
_ => self.produce_op(Punct::HashBang), _ => self.produce_op(Kind::HashBang),
} }
} }
fn less(&mut self) -> LResult<Token> { fn less(&mut self) -> LResult<Token> {
match self.peek() { match self.peek() {
Ok('=') => self.consume()?.produce_op(Punct::LtEq), Ok('=') => self.consume()?.produce_op(Kind::LtEq),
Ok('<') => { Ok('<') => {
if let Ok('=') = self.consume()?.peek() { if let Ok('=') = self.consume()?.peek() {
self.consume()?.produce_op(Punct::LtLtEq) self.consume()?.produce_op(Kind::LtLtEq)
} else { } else {
self.produce_op(Punct::LtLt) self.produce_op(Kind::LtLt)
} }
} }
_ => self.produce_op(Punct::Lt), _ => self.produce_op(Kind::Lt),
} }
} }
fn minus(&mut self) -> LResult<Token> { fn minus(&mut self) -> LResult<Token> {
match self.peek() { match self.peek() {
Ok('=') => self.consume()?.produce_op(Punct::MinusEq), Ok('=') => self.consume()?.produce_op(Kind::MinusEq),
Ok('>') => self.consume()?.produce_op(Punct::Arrow), Ok('>') => self.consume()?.produce_op(Kind::Arrow),
_ => self.produce_op(Punct::Minus), _ => self.produce_op(Kind::Minus),
} }
} }
fn plus(&mut self) -> LResult<Token> { fn plus(&mut self) -> LResult<Token> {
match self.peek() { match self.peek() {
Ok('=') => self.consume()?.produce_op(Punct::PlusEq), Ok('=') => self.consume()?.produce_op(Kind::PlusEq),
_ => self.produce_op(Punct::Plus), _ => self.produce_op(Kind::Plus),
} }
} }
fn rem(&mut self) -> LResult<Token> { fn rem(&mut self) -> LResult<Token> {
match self.peek() { match self.peek() {
Ok('=') => self.consume()?.produce_op(Punct::RemEq), Ok('=') => self.consume()?.produce_op(Kind::RemEq),
_ => self.produce_op(Punct::Rem), _ => self.produce_op(Kind::Rem),
} }
} }
fn slash(&mut self) -> LResult<Token> { fn slash(&mut self) -> LResult<Token> {
match self.peek() { match self.peek() {
Ok('=') => self.consume()?.produce_op(Punct::SlashEq), Ok('=') => self.consume()?.produce_op(Kind::SlashEq),
Ok('/') => self.consume()?.line_comment(), Ok('/') => self.consume()?.line_comment(),
Ok('*') => self.consume()?.block_comment(), Ok('*') => self.consume()?.block_comment(),
_ => self.produce_op(Punct::Slash), _ => self.produce_op(Kind::Slash),
} }
} }
fn star(&mut self) -> LResult<Token> { fn star(&mut self) -> LResult<Token> {
match self.peek() { match self.peek() {
Ok('=') => self.consume()?.produce_op(Punct::StarEq), Ok('=') => self.consume()?.produce_op(Kind::StarEq),
_ => self.produce_op(Punct::Star), _ => self.produce_op(Kind::Star),
} }
} }
fn xor(&mut self) -> LResult<Token> { fn xor(&mut self) -> LResult<Token> {
match self.peek() { match self.peek() {
Ok('=') => self.consume()?.produce_op(Punct::XorEq), Ok('=') => self.consume()?.produce_op(Kind::XorEq),
Ok('^') => self.consume()?.produce_op(Punct::XorXor), Ok('^') => self.consume()?.produce_op(Kind::XorXor),
_ => self.produce_op(Punct::Xor), _ => self.produce_op(Kind::Xor),
} }
} }
} }
@ -489,7 +489,7 @@ pub mod error {
pub enum Reason { pub enum Reason {
/// Found an opening delimiter of type [char], but not the expected closing delimiter /// Found an opening delimiter of type [char], but not the expected closing delimiter
UnmatchedDelimiters(char), UnmatchedDelimiters(char),
/// Found a character that doesn't belong to any [TokenKind](cl_token::TokenKind) /// Found a character that doesn't belong to any [Kind](cl_token::Kind)
UnexpectedChar(char), UnexpectedChar(char),
/// Found a character that's not valid in identifiers while looking for an identifier /// Found a character that's not valid in identifiers while looking for an identifier
NotIdentifier(char), NotIdentifier(char),

View File

@ -110,7 +110,7 @@ mod string {
} }
mod punct { mod punct {
macro op($op:ident) { macro op($op:ident) {
TokenKind::Punct(Punct::$op) TokenKind::$op
} }
use super::*; use super::*;

View File

@ -83,30 +83,26 @@ impl<'t> Parser<'t> {
Err(self.error(ExpectedToken { want, got }, while_parsing)) Err(self.error(ExpectedToken { want, got }, while_parsing))
} }
} }
#[inline]
pub fn match_op(&mut self, want: Punct, while_parsing: Parsing) -> PResult<Token> {
self.match_type(TokenKind::Punct(want), while_parsing)
}
} }
// the three matched delimiter pairs // the three matched delimiter pairs
/// Square brackets: `[` `]` /// Square brackets: `[` `]`
const BRACKETS: (Punct, Punct) = (Punct::LBrack, Punct::RBrack); const BRACKETS: (TokenKind, TokenKind) = (TokenKind::LBrack, TokenKind::RBrack);
/// Curly braces: `{` `}` /// Curly braces: `{` `}`
const CURLIES: (Punct, Punct) = (Punct::LCurly, Punct::RCurly); const CURLIES: (TokenKind, TokenKind) = (TokenKind::LCurly, TokenKind::RCurly);
/// Parentheses: `(` `)` /// Parentheses: `(` `)`
const PARENS: (Punct, Punct) = (Punct::LParen, Punct::RParen); const PARENS: (TokenKind, TokenKind) = (TokenKind::LParen, TokenKind::RParen);
/// Parses constructions of the form `delim.0 f delim.1` (i.e. `(` `foobar` `)`) /// Parses constructions of the form `delim.0 f delim.1` (i.e. `(` `foobar` `)`)
const fn delim<'t, T>( const fn delim<'t, T>(
f: impl Fn(&mut Parser<'t>) -> PResult<T>, f: impl Fn(&mut Parser<'t>) -> PResult<T>,
delim: (Punct, Punct), delim: (TokenKind, TokenKind),
while_parsing: Parsing, while_parsing: Parsing,
) -> impl Fn(&mut Parser<'t>) -> PResult<T> { ) -> impl Fn(&mut Parser<'t>) -> PResult<T> {
move |parser| { move |parser| {
parser.match_op(delim.0, while_parsing)?; parser.match_type(delim.0, while_parsing)?;
let out = f(parser)?; let out = f(parser)?;
parser.match_op(delim.1, while_parsing)?; parser.match_type(delim.1, while_parsing)?;
Ok(out) Ok(out)
} }
} }
@ -116,15 +112,15 @@ const fn delim<'t, T>(
/// where `~until` is a negative lookahead assertion /// where `~until` is a negative lookahead assertion
const fn sep<'t, T>( const fn sep<'t, T>(
f: impl Fn(&mut Parser<'t>) -> PResult<T>, f: impl Fn(&mut Parser<'t>) -> PResult<T>,
sep: Punct, sep: TokenKind,
until: Punct, until: TokenKind,
while_parsing: Parsing, while_parsing: Parsing,
) -> impl Fn(&mut Parser<'t>) -> PResult<Vec<T>> { ) -> impl Fn(&mut Parser<'t>) -> PResult<Vec<T>> {
move |parser| { move |parser| {
let mut args = vec![]; let mut args = vec![];
while TokenKind::Punct(until) != parser.peek_kind(while_parsing)? { while until != parser.peek_kind(while_parsing)? {
args.push(f(parser)?); args.push(f(parser)?);
if TokenKind::Punct(sep) != parser.peek_kind(while_parsing)? { if sep != parser.peek_kind(while_parsing)? {
break; break;
} }
parser.consume_peeked(); parser.consume_peeked();
@ -138,12 +134,12 @@ const fn sep<'t, T>(
/// where `~until` is a negative lookahead assertion /// where `~until` is a negative lookahead assertion
const fn rep<'t, T>( const fn rep<'t, T>(
f: impl Fn(&mut Parser<'t>) -> PResult<T>, f: impl Fn(&mut Parser<'t>) -> PResult<T>,
until: Punct, until: TokenKind,
while_parsing: Parsing, while_parsing: Parsing,
) -> impl Fn(&mut Parser<'t>) -> PResult<Vec<T>> { ) -> impl Fn(&mut Parser<'t>) -> PResult<Vec<T>> {
move |parser| { move |parser| {
let mut out = vec![]; let mut out = vec![];
while TokenKind::Punct(until) != parser.peek_kind(while_parsing)? { while until != parser.peek_kind(while_parsing)? {
out.push(f(parser)?) out.push(f(parser)?)
} }
Ok(out) Ok(out)
@ -152,7 +148,7 @@ const fn rep<'t, T>(
/// Expands to a pattern which matches item-like [Token] [TokenKind]s /// Expands to a pattern which matches item-like [Token] [TokenKind]s
macro item_like() { macro item_like() {
TokenKind::Punct(Punct::Hash) TokenKind::Hash
| TokenKind::Pub | TokenKind::Pub
| TokenKind::Type | TokenKind::Type
| TokenKind::Const | TokenKind::Const
@ -171,7 +167,7 @@ impl<'t> Parser<'t> {
pub fn file(&mut self) -> PResult<File> { pub fn file(&mut self) -> PResult<File> {
let mut items = vec![]; let mut items = vec![];
while match self.peek_kind(Parsing::File) { while match self.peek_kind(Parsing::File) {
Ok(TokenKind::Punct(Punct::RCurly)) | Err(Error { reason: EndOfInput, .. }) => false, Ok(TokenKind::RCurly) | Err(Error { reason: EndOfInput, .. }) => false,
Ok(_) => true, Ok(_) => true,
Err(e) => Err(e)?, Err(e) => Err(e)?,
} { } {
@ -209,7 +205,7 @@ impl<'t> Parser<'t> {
/// *RelativePath* = [PathPart] (`::` [PathPart])* /// *RelativePath* = [PathPart] (`::` [PathPart])*
pub fn path(&mut self) -> PResult<Path> { pub fn path(&mut self) -> PResult<Path> {
const PARSING: Parsing = Parsing::Path; const PARSING: Parsing = Parsing::Path;
let absolute = self.match_op(Punct::ColonColon, PARSING).is_ok(); let absolute = self.match_type(TokenKind::ColonColon, PARSING).is_ok();
let mut parts = vec![]; let mut parts = vec![];
if absolute { if absolute {
@ -221,7 +217,10 @@ impl<'t> Parser<'t> {
parts.push(self.path_part()?) parts.push(self.path_part()?)
}; };
while self.match_op(Punct::ColonColon, Parsing::Path).is_ok() { while self
.match_type(TokenKind::ColonColon, Parsing::Path)
.is_ok()
{
parts.push(self.path_part()?) parts.push(self.path_part()?)
} }
@ -235,7 +234,7 @@ impl<'t> Parser<'t> {
let start = self.loc(); let start = self.loc();
Ok(Stmt { Ok(Stmt {
kind: self.stmtkind()?, kind: self.stmtkind()?,
semi: match self.match_op(Punct::Semi, Parsing::Stmt) { semi: match self.match_type(TokenKind::Semi, Parsing::Stmt) {
Ok(_) => Semi::Terminated, Ok(_) => Semi::Terminated,
_ => Semi::Unterminated, _ => Semi::Unterminated,
}, },
@ -255,11 +254,11 @@ impl<'t> Parser<'t> {
impl<'t> Parser<'t> { impl<'t> Parser<'t> {
/// Parses an [attribute set](Attrs) /// Parses an [attribute set](Attrs)
pub fn attributes(&mut self) -> PResult<Attrs> { pub fn attributes(&mut self) -> PResult<Attrs> {
if self.match_op(Punct::Hash, Parsing::Attrs).is_err() { if self.match_type(TokenKind::Hash, Parsing::Attrs).is_err() {
return Ok(Attrs { meta: vec![] }); return Ok(Attrs { meta: vec![] });
} }
let meta = delim( let meta = delim(
sep(Self::meta, Punct::Comma, BRACKETS.1, Parsing::Attrs), sep(Self::meta, TokenKind::Comma, BRACKETS.1, Parsing::Attrs),
BRACKETS, BRACKETS,
Parsing::Attrs, Parsing::Attrs,
)(self)?; )(self)?;
@ -273,16 +272,16 @@ impl<'t> Parser<'t> {
pub fn meta_kind(&mut self) -> PResult<MetaKind> { pub fn meta_kind(&mut self) -> PResult<MetaKind> {
const PARSING: Parsing = Parsing::Meta; const PARSING: Parsing = Parsing::Meta;
let lit_tuple = delim( let lit_tuple = delim(
sep(Self::literal, Punct::Comma, PARENS.1, PARSING), sep(Self::literal, TokenKind::Comma, PARENS.1, PARSING),
PARENS, PARENS,
PARSING, PARSING,
); );
Ok(match self.peek_kind(PARSING) { Ok(match self.peek_kind(PARSING) {
Ok(TokenKind::Punct(Punct::Eq)) => { Ok(TokenKind::Eq) => {
self.consume_peeked(); self.consume_peeked();
MetaKind::Equals(self.literal()?) MetaKind::Equals(self.literal()?)
} }
Ok(TokenKind::Punct(Punct::LParen)) => MetaKind::Func(lit_tuple(self)?), Ok(TokenKind::LParen) => MetaKind::Func(lit_tuple(self)?),
_ => MetaKind::Plain, _ => MetaKind::Plain,
}) })
} }
@ -315,13 +314,13 @@ impl<'t> Parser<'t> {
let out = Ok(Alias { let out = Ok(Alias {
to: self.identifier()?, to: self.identifier()?,
from: if self.match_op(Punct::Eq, PARSING).is_ok() { from: if self.match_type(TokenKind::Eq, PARSING).is_ok() {
Some(self.ty()?.into()) Some(self.ty()?.into())
} else { } else {
None None
}, },
}); });
self.match_op(Punct::Semi, PARSING)?; self.match_type(TokenKind::Semi, PARSING)?;
out out
} }
@ -333,15 +332,15 @@ impl<'t> Parser<'t> {
let out = Ok(Const { let out = Ok(Const {
name: self.identifier()?, name: self.identifier()?,
ty: { ty: {
self.match_op(Punct::Colon, PARSING)?; self.match_type(TokenKind::Colon, PARSING)?;
self.ty()?.into() self.ty()?.into()
}, },
init: { init: {
self.match_op(Punct::Eq, PARSING)?; self.match_type(TokenKind::Eq, PARSING)?;
self.expr()?.into() self.expr()?.into()
}, },
}); });
self.match_op(Punct::Semi, PARSING)?; self.match_type(TokenKind::Semi, PARSING)?;
out out
} }
@ -354,15 +353,15 @@ impl<'t> Parser<'t> {
mutable: self.mutability(), mutable: self.mutability(),
name: self.identifier()?, name: self.identifier()?,
ty: { ty: {
self.match_op(Punct::Colon, PARSING)?; self.match_type(TokenKind::Colon, PARSING)?;
self.ty()?.into() self.ty()?.into()
}, },
init: { init: {
self.match_op(Punct::Eq, PARSING)?; self.match_type(TokenKind::Eq, PARSING)?;
self.expr()?.into() self.expr()?.into()
}, },
}); });
self.match_op(Punct::Semi, PARSING)?; self.match_type(TokenKind::Semi, PARSING)?;
out out
} }
@ -379,15 +378,12 @@ impl<'t> Parser<'t> {
let inline = delim(Self::file, CURLIES, PARSING); let inline = delim(Self::file, CURLIES, PARSING);
match self.peek_kind(PARSING)? { match self.peek_kind(PARSING)? {
TokenKind::Punct(Punct::LCurly) => Ok(ModuleKind::Inline(inline(self)?)), TokenKind::LCurly => Ok(ModuleKind::Inline(inline(self)?)),
TokenKind::Punct(Punct::Semi) => { TokenKind::Semi => {
self.consume_peeked(); self.consume_peeked();
Ok(ModuleKind::Outline) Ok(ModuleKind::Outline)
} }
got => Err(self.error( got => Err(self.error(ExpectedToken { want: TokenKind::Semi, got }, PARSING)),
ExpectedToken { want: TokenKind::Punct(Punct::Semi), got },
PARSING,
)),
} }
} }
@ -410,8 +406,8 @@ impl<'t> Parser<'t> {
sign, sign,
bind, bind,
body: match self.peek_kind(PARSING)? { body: match self.peek_kind(PARSING)? {
TokenKind::Punct(Punct::LCurly) => Some(self.block()?), TokenKind::LCurly => Some(self.block()?),
TokenKind::Punct(Punct::Semi) => { TokenKind::Semi => {
self.consume_peeked(); self.consume_peeked();
None None
} }
@ -424,11 +420,11 @@ impl<'t> Parser<'t> {
pub fn parse_params(&mut self) -> PResult<(Vec<Param>, Vec<TyKind>)> { pub fn parse_params(&mut self) -> PResult<(Vec<Param>, Vec<TyKind>)> {
const PARSING: Parsing = Parsing::Function; const PARSING: Parsing = Parsing::Function;
let (mut params, mut types) = (vec![], vec![]); let (mut params, mut types) = (vec![], vec![]);
while Ok(TokenKind::Punct(Punct::RParen)) != self.peek_kind(PARSING) { while Ok(TokenKind::RParen) != self.peek_kind(PARSING) {
let (param, ty) = self.parse_param()?; let (param, ty) = self.parse_param()?;
params.push(param); params.push(param);
types.push(ty); types.push(ty);
if self.match_op(Punct::Comma, PARSING).is_err() { if self.match_type(TokenKind::Comma, PARSING).is_err() {
break; break;
} }
} }
@ -440,7 +436,7 @@ impl<'t> Parser<'t> {
Ok(( Ok((
Param { mutability: self.mutability(), name: self.identifier()? }, Param { mutability: self.mutability(), name: self.identifier()? },
{ {
self.match_op(Punct::Colon, Parsing::Param)?; self.match_type(TokenKind::Colon, Parsing::Param)?;
self.tykind()? self.tykind()?
}, },
)) ))
@ -454,16 +450,13 @@ impl<'t> Parser<'t> {
Ok(Struct { Ok(Struct {
name: self.identifier()?, name: self.identifier()?,
kind: match self.peek_kind(PARSING)? { kind: match self.peek_kind(PARSING)? {
TokenKind::Punct(Punct::LParen) => self.structkind_tuple()?, TokenKind::LParen => self.structkind_tuple()?,
TokenKind::Punct(Punct::LCurly) => self.structkind_struct()?, TokenKind::LCurly => self.structkind_struct()?,
TokenKind::Punct(Punct::Semi) => { TokenKind::Semi => {
self.consume_peeked(); self.consume_peeked();
StructKind::Empty StructKind::Empty
} }
got => Err(self.error( got => Err(self.error(ExpectedToken { want: TokenKind::Semi, got }, PARSING))?,
ExpectedToken { want: TokenKind::Punct(Punct::Semi), got },
PARSING,
))?,
}, },
}) })
} }
@ -473,7 +466,7 @@ impl<'t> Parser<'t> {
const PARSING: Parsing = Parsing::StructKind; const PARSING: Parsing = Parsing::StructKind;
Ok(StructKind::Tuple(delim( Ok(StructKind::Tuple(delim(
sep(Self::ty, Punct::Comma, PARENS.1, PARSING), sep(Self::ty, TokenKind::Comma, PARENS.1, PARSING),
PARENS, PARENS,
PARSING, PARSING,
)(self)?)) )(self)?))
@ -484,7 +477,7 @@ impl<'t> Parser<'t> {
const PARSING: Parsing = Parsing::StructKind; const PARSING: Parsing = Parsing::StructKind;
Ok(StructKind::Struct(delim( Ok(StructKind::Struct(delim(
sep(Self::struct_member, Punct::Comma, CURLIES.1, PARSING), sep(Self::struct_member, TokenKind::Comma, CURLIES.1, PARSING),
CURLIES, CURLIES,
PARSING, PARSING,
)(self)?)) )(self)?))
@ -497,7 +490,7 @@ impl<'t> Parser<'t> {
vis: self.visibility(), vis: self.visibility(),
name: self.identifier()?, name: self.identifier()?,
ty: { ty: {
self.match_op(Punct::Colon, PARSING)?; self.match_type(TokenKind::Colon, PARSING)?;
self.ty()? self.ty()?
}, },
}) })
@ -511,12 +504,17 @@ impl<'t> Parser<'t> {
Ok(Enum { Ok(Enum {
name: self.identifier()?, name: self.identifier()?,
kind: match self.peek_kind(PARSING)? { kind: match self.peek_kind(PARSING)? {
TokenKind::Punct(Punct::LCurly) => EnumKind::Variants(delim( TokenKind::LCurly => EnumKind::Variants(delim(
sep(Self::enum_variant, Punct::Comma, Punct::RCurly, PARSING), sep(
Self::enum_variant,
TokenKind::Comma,
TokenKind::RCurly,
PARSING,
),
CURLIES, CURLIES,
PARSING, PARSING,
)(self)?), )(self)?),
TokenKind::Punct(Punct::Semi) => { TokenKind::Semi => {
self.consume_peeked(); self.consume_peeked();
EnumKind::NoVariants EnumKind::NoVariants
} }
@ -532,9 +530,9 @@ impl<'t> Parser<'t> {
Ok(Variant { Ok(Variant {
name: self.identifier()?, name: self.identifier()?,
kind: match self.peek_kind(PARSING)? { kind: match self.peek_kind(PARSING)? {
TokenKind::Punct(Punct::Eq) => self.variantkind_clike()?, TokenKind::Eq => self.variantkind_clike()?,
TokenKind::Punct(Punct::LCurly) => self.variantkind_struct()?, TokenKind::LCurly => self.variantkind_struct()?,
TokenKind::Punct(Punct::LParen) => self.variantkind_tuple()?, TokenKind::LParen => self.variantkind_tuple()?,
_ => VariantKind::Plain, _ => VariantKind::Plain,
}, },
}) })
@ -544,7 +542,7 @@ impl<'t> Parser<'t> {
pub fn variantkind_clike(&mut self) -> PResult<VariantKind> { pub fn variantkind_clike(&mut self) -> PResult<VariantKind> {
const PARSING: Parsing = Parsing::VariantKind; const PARSING: Parsing = Parsing::VariantKind;
self.match_op(Punct::Eq, PARSING)?; self.match_type(TokenKind::Eq, PARSING)?;
let tok = self.match_type(TokenKind::Literal, PARSING)?; let tok = self.match_type(TokenKind::Literal, PARSING)?;
Ok(VariantKind::CLike(match tok.data() { Ok(VariantKind::CLike(match tok.data() {
@ -557,7 +555,12 @@ impl<'t> Parser<'t> {
pub fn variantkind_struct(&mut self) -> PResult<VariantKind> { pub fn variantkind_struct(&mut self) -> PResult<VariantKind> {
const PARSING: Parsing = Parsing::VariantKind; const PARSING: Parsing = Parsing::VariantKind;
Ok(VariantKind::Struct(delim( Ok(VariantKind::Struct(delim(
sep(Self::struct_member, Punct::Comma, Punct::RCurly, PARSING), sep(
Self::struct_member,
TokenKind::Comma,
TokenKind::RCurly,
PARSING,
),
CURLIES, CURLIES,
PARSING, PARSING,
)(self)?)) )(self)?))
@ -607,9 +610,9 @@ impl<'t> Parser<'t> {
pub fn parse_use(&mut self) -> PResult<Use> { pub fn parse_use(&mut self) -> PResult<Use> {
self.consume_peeked(); self.consume_peeked();
let absolute = self.match_op(Punct::ColonColon, Parsing::Use).is_ok(); let absolute = self.match_type(TokenKind::ColonColon, Parsing::Use).is_ok();
let tree = self.parse_use_tree()?; let tree = self.parse_use_tree()?;
self.match_op(Punct::Semi, Parsing::Use)?; self.match_type(TokenKind::Semi, Parsing::Use)?;
Ok(Use { tree, absolute }) Ok(Use { tree, absolute })
} }
@ -617,18 +620,18 @@ impl<'t> Parser<'t> {
const PARSING: Parsing = Parsing::UseTree; const PARSING: Parsing = Parsing::UseTree;
// glob import // glob import
Ok(match self.peek_kind(PARSING)? { Ok(match self.peek_kind(PARSING)? {
TokenKind::Punct(Punct::Star) => { TokenKind::Star => {
self.consume_peeked(); self.consume_peeked();
UseTree::Glob UseTree::Glob
} }
TokenKind::Punct(Punct::LCurly) => UseTree::Tree(delim( TokenKind::LCurly => UseTree::Tree(delim(
sep(Self::parse_use_tree, Punct::Comma, CURLIES.1, PARSING), sep(Self::parse_use_tree, TokenKind::Comma, CURLIES.1, PARSING),
CURLIES, CURLIES,
PARSING, PARSING,
)(self)?), )(self)?),
TokenKind::SelfKw | TokenKind::Super | TokenKind::Identifier => { TokenKind::SelfKw | TokenKind::Super | TokenKind::Identifier => {
let name = self.path_part()?; let name = self.path_part()?;
if self.match_op(Punct::ColonColon, PARSING).is_ok() { if self.match_type(TokenKind::ColonColon, PARSING).is_ok() {
UseTree::Path(name, Box::new(self.parse_use_tree()?)) UseTree::Path(name, Box::new(self.parse_use_tree()?))
} else { } else {
let PathPart::Ident(name) = name else { let PathPart::Ident(name) = name else {
@ -668,13 +671,13 @@ impl<'t> Parser<'t> {
pub fn tykind(&mut self) -> PResult<TyKind> { pub fn tykind(&mut self) -> PResult<TyKind> {
const PARSING: Parsing = Parsing::TyKind; const PARSING: Parsing = Parsing::TyKind;
let out = match self.peek_kind(PARSING)? { let out = match self.peek_kind(PARSING)? {
TokenKind::Punct(Punct::Bang) => { TokenKind::Bang => {
self.consume_peeked(); self.consume_peeked();
TyKind::Never TyKind::Never
} }
TokenKind::Punct(Punct::Amp) | TokenKind::Punct(Punct::AmpAmp) => self.tyref()?.into(), TokenKind::Amp | TokenKind::AmpAmp => self.tyref()?.into(),
TokenKind::Punct(Punct::LBrack) => self.tyslice_or_array()?, TokenKind::LBrack => self.tyslice_or_array()?,
TokenKind::Punct(Punct::LParen) => { TokenKind::LParen => {
let out = self.tytuple()?; let out = self.tytuple()?;
match out.types.is_empty() { match out.types.is_empty() {
true => TyKind::Empty, true => TyKind::Empty,
@ -692,9 +695,9 @@ impl<'t> Parser<'t> {
/// [`TySlice`] = `[` [Ty] `]` \ /// [`TySlice`] = `[` [Ty] `]` \
/// [`TyArray`] = `[` [Ty] `;` [usize] `]` /// [`TyArray`] = `[` [Ty] `;` [usize] `]`
pub fn tyslice_or_array(&mut self) -> PResult<TyKind> { pub fn tyslice_or_array(&mut self) -> PResult<TyKind> {
self.match_op(BRACKETS.0, Parsing::TySlice)?; self.match_type(BRACKETS.0, Parsing::TySlice)?;
let ty = self.tykind()?; let ty = self.tykind()?;
let (out, kind) = match self.match_op(Punct::Semi, Parsing::TyArray).is_ok() { let (out, kind) = match self.match_type(TokenKind::Semi, Parsing::TyArray).is_ok() {
true => { true => {
let literal = self.match_type(TokenKind::Literal, Parsing::TyArray)?; let literal = self.match_type(TokenKind::Literal, Parsing::TyArray)?;
let &TokenData::Integer(count) = literal.data() else { let &TokenData::Integer(count) = literal.data() else {
@ -710,7 +713,7 @@ impl<'t> Parser<'t> {
Parsing::TySlice, Parsing::TySlice,
), ),
}; };
self.match_op(BRACKETS.1, kind)?; self.match_type(BRACKETS.1, kind)?;
Ok(out) Ok(out)
} }
@ -719,7 +722,7 @@ impl<'t> Parser<'t> {
const PARSING: Parsing = Parsing::TyTuple; const PARSING: Parsing = Parsing::TyTuple;
Ok(TyTuple { Ok(TyTuple {
types: delim( types: delim(
sep(Self::tykind, Punct::Comma, PARENS.1, PARSING), sep(Self::tykind, TokenKind::Comma, PARENS.1, PARSING),
PARENS, PARENS,
PARSING, PARSING,
)(self)?, )(self)?,
@ -731,8 +734,8 @@ impl<'t> Parser<'t> {
let mut count = 0; let mut count = 0;
loop { loop {
match self.peek_kind(PARSING)? { match self.peek_kind(PARSING)? {
TokenKind::Punct(Punct::Amp) => count += 1, TokenKind::Amp => count += 1,
TokenKind::Punct(Punct::AmpAmp) => count += 2, TokenKind::AmpAmp => count += 2,
_ => break, _ => break,
} }
self.consume_peeked(); self.consume_peeked();
@ -754,7 +757,7 @@ impl<'t> Parser<'t> {
} }
pub fn parse_rety(&mut self) -> PResult<Option<Ty>> { pub fn parse_rety(&mut self) -> PResult<Option<Ty>> {
Ok(match self.match_op(Punct::Arrow, Parsing::TyFn) { Ok(match self.match_type(TokenKind::Arrow, Parsing::TyFn) {
Ok(_) => Some(self.ty()?), Ok(_) => Some(self.ty()?),
Err(_) => None, Err(_) => None,
}) })
@ -762,7 +765,7 @@ impl<'t> Parser<'t> {
pub fn tyfn_args(&mut self) -> PResult<Vec<TyKind>> { pub fn tyfn_args(&mut self) -> PResult<Vec<TyKind>> {
const P: Parsing = Parsing::TyFn; const P: Parsing = Parsing::TyFn;
delim(sep(Self::tykind, Punct::Comma, PARENS.1, P), PARENS, P)(self) delim(sep(Self::tykind, TokenKind::Comma, PARENS.1, P), PARENS, P)(self)
} }
} }
@ -776,7 +779,7 @@ macro path_like() {
| TokenKind::SelfKw | TokenKind::SelfKw
| TokenKind::SelfTy | TokenKind::SelfTy
| TokenKind::Identifier | TokenKind::Identifier
| TokenKind::Punct(Punct::ColonColon) | TokenKind::ColonColon
} }
/// # Path parsing /// # Path parsing
impl<'t> Parser<'t> { impl<'t> Parser<'t> {
@ -811,7 +814,7 @@ impl<'t> Parser<'t> {
/// See also: [Parser::stmt] /// See also: [Parser::stmt]
pub fn stmtkind(&mut self) -> PResult<StmtKind> { pub fn stmtkind(&mut self) -> PResult<StmtKind> {
Ok(match self.peek_kind(Parsing::StmtKind)? { Ok(match self.peek_kind(Parsing::StmtKind)? {
TokenKind::Punct(Punct::Semi) => StmtKind::Empty, TokenKind::Semi => StmtKind::Empty,
TokenKind::Let => self.parse_let()?.into(), TokenKind::Let => self.parse_let()?.into(),
item_like!() => self.item()?.into(), item_like!() => self.item()?.into(),
_ => self.expr()?.into(), _ => self.expr()?.into(),
@ -823,13 +826,13 @@ impl<'t> Parser<'t> {
Ok(Let { Ok(Let {
mutable: self.mutability(), mutable: self.mutability(),
name: self.identifier()?, name: self.identifier()?,
ty: if Ok(TokenKind::Punct(Punct::Colon)) == self.peek_kind(Parsing::Let) { ty: if Ok(TokenKind::Colon) == self.peek_kind(Parsing::Let) {
self.consume_peeked(); self.consume_peeked();
Some(self.ty()?.into()) Some(self.ty()?.into())
} else { } else {
None None
}, },
init: if Ok(TokenKind::Punct(Punct::Eq)) == self.peek_kind(Parsing::Let) { init: if Ok(TokenKind::Eq) == self.peek_kind(Parsing::Let) {
self.consume_peeked(); self.consume_peeked();
Some(self.expr()?.into()) Some(self.expr()?.into())
} else { } else {
@ -856,17 +859,10 @@ impl<'t> Parser<'t> {
let mut head = match self.peek_kind(Parsing::Unary)? { let mut head = match self.peek_kind(Parsing::Unary)? {
literal_like!() => self.literal()?.into(), literal_like!() => self.literal()?.into(),
path_like!() => self.exprkind_pathlike()?, path_like!() => self.exprkind_pathlike()?,
TokenKind::Punct(Punct::Amp | Punct::AmpAmp) => self.addrof()?.into(), TokenKind::Amp | TokenKind::AmpAmp => self.addrof()?.into(),
TokenKind::Punct(Punct::LCurly) => self.block()?.into(), TokenKind::LCurly => self.block()?.into(),
TokenKind::Punct(Punct::LBrack) => self.exprkind_arraylike()?, TokenKind::LBrack => self.exprkind_arraylike()?,
TokenKind::Punct(Punct::LParen) => self.exprkind_tuplelike()?, TokenKind::LParen => self.exprkind_tuplelike()?,
TokenKind::Punct(op) => {
let (kind, prec) = from_prefix(op)
.ok_or_else(|| self.error(Unexpected(TokenKind::Punct(op)), parsing))?;
let ((), after) = prec.prefix().expect("should have a precedence");
self.consume_peeked();
Unary { kind, tail: self.exprkind(after)?.into() }.into()
}
TokenKind::Loop => { TokenKind::Loop => {
self.consume_peeked(); self.consume_peeked();
Loop { body: self.expr()?.into() }.into() Loop { body: self.expr()?.into() }.into()
@ -880,19 +876,26 @@ impl<'t> Parser<'t> {
self.consume_peeked(); self.consume_peeked();
Continue.into() Continue.into()
} }
t => Err(self.error(Unexpected(t), Parsing::Unary))?,
op => {
let (kind, prec) =
from_prefix(op).ok_or_else(|| self.error(Unexpected(op), parsing))?;
let ((), after) = prec.prefix().expect("should have a precedence");
self.consume_peeked();
Unary { kind, tail: self.exprkind(after)?.into() }.into()
}
}; };
fn from_postfix(op: Punct) -> Option<Precedence> { fn from_postfix(op: TokenKind) -> Option<Precedence> {
Some(match op { Some(match op {
Punct::LBrack => Precedence::Index, TokenKind::LBrack => Precedence::Index,
Punct::LParen => Precedence::Call, TokenKind::LParen => Precedence::Call,
Punct::Dot => Precedence::Member, TokenKind::Dot => Precedence::Member,
_ => None?, _ => None?,
}) })
} }
while let Ok(TokenKind::Punct(op)) = self.peek_kind(parsing) { while let Ok(op) = self.peek_kind(parsing) {
// Postfix expressions // Postfix expressions
if let Some((before, ())) = from_postfix(op).and_then(Precedence::postfix) { if let Some((before, ())) = from_postfix(op).and_then(Precedence::postfix) {
if before < power { if before < power {
@ -901,25 +904,27 @@ impl<'t> Parser<'t> {
self.consume_peeked(); self.consume_peeked();
head = match op { head = match op {
Punct::LBrack => { TokenKind::LBrack => {
let indices = sep(Self::expr, Punct::Comma, Punct::RBrack, parsing)(self)?; let indices =
self.match_op(Punct::RBrack, parsing)?; sep(Self::expr, TokenKind::Comma, TokenKind::RBrack, parsing)(self)?;
self.match_type(TokenKind::RBrack, parsing)?;
ExprKind::Index(Index { head: head.into(), indices }) ExprKind::Index(Index { head: head.into(), indices })
} }
Punct::LParen => { TokenKind::LParen => {
let exprs = sep(Self::expr, Punct::Comma, Punct::RParen, parsing)(self)?; let exprs =
self.match_op(Punct::RParen, parsing)?; sep(Self::expr, TokenKind::Comma, TokenKind::RParen, parsing)(self)?;
self.match_type(TokenKind::RParen, parsing)?;
Binary { Binary {
kind: BinaryKind::Call, kind: BinaryKind::Call,
parts: (head, Tuple { exprs }.into()).into(), parts: (head, Tuple { exprs }.into()).into(),
} }
.into() .into()
} }
Punct::Dot => { TokenKind::Dot => {
let kind = self.access()?; let kind = self.access()?;
Member { head: Box::new(head), kind }.into() Member { head: Box::new(head), kind }.into()
} }
_ => Err(self.error(Unexpected(TokenKind::Punct(op)), parsing))?, _ => Err(self.error(Unexpected(op), parsing))?,
}; };
continue; continue;
} }
@ -948,7 +953,7 @@ impl<'t> Parser<'t> {
continue; continue;
} }
if let Punct::As = op { if let TokenKind::As = op {
let before = Precedence::Cast.level(); let before = Precedence::Cast.level();
if before < power { if before < power {
break; break;
@ -959,7 +964,7 @@ impl<'t> Parser<'t> {
continue; continue;
} }
if let Punct::Eq = op { if let TokenKind::Eq = op {
let (before, after) = Precedence::Assign let (before, after) = Precedence::Assign
.infix() .infix()
.expect("should have a precedence"); .expect("should have a precedence");
@ -978,15 +983,15 @@ impl<'t> Parser<'t> {
pub fn access(&mut self) -> PResult<MemberKind> { pub fn access(&mut self) -> PResult<MemberKind> {
const PARSING: Parsing = Parsing::Member; const PARSING: Parsing = Parsing::Member;
const DEL: (Punct, Punct) = PARENS; // delimiter const DEL: (TokenKind, TokenKind) = PARENS; // delimiter
match self.peek_kind(PARSING)? { match self.peek_kind(PARSING)? {
TokenKind::Identifier => { TokenKind::Identifier => {
let name = self.identifier()?; let name = self.identifier()?;
if self.match_op(DEL.0, PARSING).is_err() { if self.match_type(DEL.0, PARSING).is_err() {
Ok(MemberKind::Struct(name)) Ok(MemberKind::Struct(name))
} else { } else {
let exprs = sep(Self::expr, Punct::Comma, DEL.1, PARSING)(self)?; let exprs = sep(Self::expr, TokenKind::Comma, DEL.1, PARSING)(self)?;
self.match_op(DEL.1, PARSING)?; // should succeed self.match_type(DEL.1, PARSING)?; // should succeed
Ok(MemberKind::Call(name, Tuple { exprs })) Ok(MemberKind::Call(name, Tuple { exprs }))
} }
} }
@ -1001,7 +1006,7 @@ impl<'t> Parser<'t> {
/// Parses an expression beginning with a [Path] (i.e. [Path] or [Structor]) /// Parses an expression beginning with a [Path] (i.e. [Path] or [Structor])
pub fn exprkind_pathlike(&mut self) -> PResult<ExprKind> { pub fn exprkind_pathlike(&mut self) -> PResult<ExprKind> {
let head = self.path()?; let head = self.path()?;
Ok(match self.match_op(Punct::Colon, Parsing::Path) { Ok(match self.match_type(TokenKind::Colon, Parsing::Path) {
Ok(_) => ExprKind::Structor(self.structor_body(head)?), Ok(_) => ExprKind::Structor(self.structor_body(head)?),
Err(_) => ExprKind::Path(head), Err(_) => ExprKind::Path(head),
}) })
@ -1010,7 +1015,12 @@ impl<'t> Parser<'t> {
/// [Structor]Body = `{` ([Fielder] `,`)* [Fielder]? `}` /// [Structor]Body = `{` ([Fielder] `,`)* [Fielder]? `}`
pub fn structor_body(&mut self, to: Path) -> PResult<Structor> { pub fn structor_body(&mut self, to: Path) -> PResult<Structor> {
let init = delim( let init = delim(
sep(Self::fielder, Punct::Comma, CURLIES.1, Parsing::Structor), sep(
Self::fielder,
TokenKind::Comma,
CURLIES.1,
Parsing::Structor,
),
CURLIES, CURLIES,
Parsing::Structor, Parsing::Structor,
)(self)?; )(self)?;
@ -1023,7 +1033,7 @@ impl<'t> Parser<'t> {
const PARSING: Parsing = Parsing::Fielder; const PARSING: Parsing = Parsing::Fielder;
Ok(Fielder { Ok(Fielder {
name: self.identifier()?, name: self.identifier()?,
init: match self.match_op(Punct::Colon, PARSING) { init: match self.match_type(TokenKind::Colon, PARSING) {
Ok(_) => Some(Box::new(self.expr()?)), Ok(_) => Some(Box::new(self.expr()?)),
Err(_) => None, Err(_) => None,
}, },
@ -1036,26 +1046,26 @@ impl<'t> Parser<'t> {
/// so they can't be independent subexpressions /// so they can't be independent subexpressions
pub fn exprkind_arraylike(&mut self) -> PResult<ExprKind> { pub fn exprkind_arraylike(&mut self) -> PResult<ExprKind> {
const PARSING: Parsing = Parsing::Array; const PARSING: Parsing = Parsing::Array;
const START: Punct = Punct::LBrack; const START: TokenKind = TokenKind::LBrack;
const END: Punct = Punct::RBrack; const END: TokenKind = TokenKind::RBrack;
self.match_op(START, PARSING)?; self.match_type(START, PARSING)?;
let out = match self.peek_kind(PARSING)? { let out = match self.peek_kind(PARSING)? {
TokenKind::Punct(END) => Array { values: vec![] }.into(), END => Array { values: vec![] }.into(),
_ => self.exprkind_array_rep()?, _ => self.exprkind_array_rep()?,
}; };
self.match_op(END, PARSING)?; self.match_type(END, PARSING)?;
Ok(out) Ok(out)
} }
/// [ArrayRep] = `[` [Expr] `;` [Expr] `]` /// [ArrayRep] = `[` [Expr] `;` [Expr] `]`
pub fn exprkind_array_rep(&mut self) -> PResult<ExprKind> { pub fn exprkind_array_rep(&mut self) -> PResult<ExprKind> {
const PARSING: Parsing = Parsing::Array; const PARSING: Parsing = Parsing::Array;
const END: Punct = Punct::RBrack; const END: TokenKind = TokenKind::RBrack;
let first = self.expr()?; let first = self.expr()?;
Ok(match self.peek_kind(PARSING)? { Ok(match self.peek_kind(PARSING)? {
TokenKind::Punct(Punct::Semi) => ArrayRep { TokenKind::Semi => ArrayRep {
value: first.kind.into(), value: first.kind.into(),
repeat: { repeat: {
self.consume_peeked(); self.consume_peeked();
@ -1063,12 +1073,12 @@ impl<'t> Parser<'t> {
}, },
} }
.into(), .into(),
TokenKind::Punct(Punct::RBrack) => Array { values: vec![first] }.into(), TokenKind::RBrack => Array { values: vec![first] }.into(),
TokenKind::Punct(Punct::Comma) => Array { TokenKind::Comma => Array {
values: { values: {
self.consume_peeked(); self.consume_peeked();
let mut out = vec![first]; let mut out = vec![first];
out.extend(sep(Self::expr, Punct::Comma, END, PARSING)(self)?); out.extend(sep(Self::expr, TokenKind::Comma, END, PARSING)(self)?);
out out
}, },
} }
@ -1080,25 +1090,25 @@ impl<'t> Parser<'t> {
/// ///
/// [ExprKind::Empty] and [Group] are special cases of [Tuple] /// [ExprKind::Empty] and [Group] are special cases of [Tuple]
pub fn exprkind_tuplelike(&mut self) -> PResult<ExprKind> { pub fn exprkind_tuplelike(&mut self) -> PResult<ExprKind> {
self.match_op(Punct::LParen, Parsing::Group)?; self.match_type(TokenKind::LParen, Parsing::Group)?;
let out = match self.peek_kind(Parsing::Group)? { let out = match self.peek_kind(Parsing::Group)? {
TokenKind::Punct(Punct::RParen) => Ok(ExprKind::Empty), TokenKind::RParen => Ok(ExprKind::Empty),
_ => self.exprkind_group(), _ => self.exprkind_group(),
}; };
self.match_op(Punct::RParen, Parsing::Group)?; self.match_type(TokenKind::RParen, Parsing::Group)?;
out out
} }
/// [Group] = `(`([Empty](ExprKind::Empty)|[Expr]|[Tuple])`)` /// [Group] = `(`([Empty](ExprKind::Empty)|[Expr]|[Tuple])`)`
pub fn exprkind_group(&mut self) -> PResult<ExprKind> { pub fn exprkind_group(&mut self) -> PResult<ExprKind> {
let first = self.expr()?; let first = self.expr()?;
match self.peek_kind(Parsing::Group)? { match self.peek_kind(Parsing::Group)? {
TokenKind::Punct(Punct::Comma) => { TokenKind::Comma => {
let mut exprs = vec![first]; let mut exprs = vec![first];
self.consume_peeked(); self.consume_peeked();
while TokenKind::Punct(Punct::RParen) != self.peek_kind(Parsing::Tuple)? { while TokenKind::RParen != self.peek_kind(Parsing::Tuple)? {
exprs.push(self.expr()?); exprs.push(self.expr()?);
match self.peek_kind(Parsing::Tuple)? { match self.peek_kind(Parsing::Tuple)? {
TokenKind::Punct(Punct::Comma) => self.consume_peeked(), TokenKind::Comma => self.consume_peeked(),
_ => break, _ => break,
}; };
} }
@ -1117,8 +1127,8 @@ impl<'t> Parser<'t> {
let mut count = 0; let mut count = 0;
loop { loop {
count += match self.peek_kind(PARSING)? { count += match self.peek_kind(PARSING)? {
TokenKind::Punct(Punct::Amp) => 1, TokenKind::Amp => 1,
TokenKind::Punct(Punct::AmpAmp) => 2, TokenKind::AmpAmp => 2,
_ => break, _ => break,
}; };
self.consume_peeked(); self.consume_peeked();
@ -1163,7 +1173,7 @@ impl<'t> Parser<'t> {
/// ret_body = (*unconsumed* `;` | [Expr]) /// ret_body = (*unconsumed* `;` | [Expr])
fn ret_body(&mut self, while_parsing: Parsing) -> PResult<Option<Box<Expr>>> { fn ret_body(&mut self, while_parsing: Parsing) -> PResult<Option<Box<Expr>>> {
Ok(match self.peek_kind(while_parsing)? { Ok(match self.peek_kind(while_parsing)? {
TokenKind::Punct(Punct::Semi) => None, TokenKind::Semi => None,
_ => Some(self.expr()?.into()), _ => Some(self.expr()?.into()),
}) })
} }
@ -1297,14 +1307,14 @@ macro operator($($name:ident ($takes:ident => $returns:ident) {$($t:ident => $p:
} }
operator! { operator! {
from_prefix (Punct => UnaryKind) { from_prefix (TokenKind => UnaryKind) {
Star => Deref, Star => Deref,
Minus => Neg, Minus => Neg,
Bang => Not, Bang => Not,
At => At, At => At,
Tilde => Tilde, Tilde => Tilde,
}; };
from_modify(Punct => ModifyKind) { from_modify(TokenKind => ModifyKind) {
AmpEq => And, AmpEq => And,
BarEq => Or, BarEq => Or,
XorEq => Xor, XorEq => Xor,
@ -1316,7 +1326,7 @@ operator! {
SlashEq => Div, SlashEq => Div,
RemEq => Rem, RemEq => Rem,
}; };
from_infix (Punct => BinaryKind) { from_infix (TokenKind => BinaryKind) {
Lt => Lt, Lt => Lt,
LtEq => LtEq, LtEq => LtEq,

View File

@ -10,4 +10,4 @@ pub mod token_type;
pub use token::Token; pub use token::Token;
pub use token_data::TokenData; pub use token_data::TokenData;
pub use token_type::{Punct, TokenKind}; pub use token_type::TokenKind;

View File

@ -13,41 +13,35 @@ pub enum TokenKind {
/// A non-keyword identifier /// A non-keyword identifier
Identifier, Identifier,
// A keyword // A keyword
Break,
Cl,
Const,
Continue,
Else,
Enum,
False,
For,
Fn,
If,
Impl,
In,
Let,
Loop,
Mod,
Mut,
Pub,
Return,
SelfKw,
SelfTy,
Static,
Struct,
Super,
True,
Type,
Use,
While,
/// Delimiter or punctuation
Punct(Punct),
}
/// An operator character (delimiter, punctuation)
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum Punct {
As, // as As, // as
Break, // "break"
Cl, // "cl"
Const, // "const"
Continue, // "continue"
Else, // "else"
Enum, // "enum"
False, // "false"
Fn, // "fn"
For, // "for"
If, // "if"
Impl, // "impl"
In, // "in"
Let, // "let"
Loop, // "loop"
Mod, // "mod"
Mut, // "mut"
Pub, // "pub"
Return, // "return"
SelfKw, // "self"
SelfTy, // "Self"
Static, // "static"
Struct, // "struct"
Super, // "super"
True, // "true"
Type, // "type"
Use, // "use"
While, // "while"
// Delimiter or punctuation
LCurly, // { LCurly, // {
RCurly, // } RCurly, // }
LBrack, // [ LBrack, // [
@ -112,6 +106,7 @@ impl Display for TokenKind {
TokenKind::Literal => "literal".fmt(f), TokenKind::Literal => "literal".fmt(f),
TokenKind::Identifier => "identifier".fmt(f), TokenKind::Identifier => "identifier".fmt(f),
TokenKind::As => "as".fmt(f),
TokenKind::Break => "break".fmt(f), TokenKind::Break => "break".fmt(f),
TokenKind::Cl => "cl".fmt(f), TokenKind::Cl => "cl".fmt(f),
TokenKind::Const => "const".fmt(f), TokenKind::Const => "const".fmt(f),
@ -119,8 +114,8 @@ impl Display for TokenKind {
TokenKind::Else => "else".fmt(f), TokenKind::Else => "else".fmt(f),
TokenKind::Enum => "enum".fmt(f), TokenKind::Enum => "enum".fmt(f),
TokenKind::False => "false".fmt(f), TokenKind::False => "false".fmt(f),
TokenKind::For => "for".fmt(f),
TokenKind::Fn => "fn".fmt(f), TokenKind::Fn => "fn".fmt(f),
TokenKind::For => "for".fmt(f),
TokenKind::If => "if".fmt(f), TokenKind::If => "if".fmt(f),
TokenKind::Impl => "impl".fmt(f), TokenKind::Impl => "impl".fmt(f),
TokenKind::In => "in".fmt(f), TokenKind::In => "in".fmt(f),
@ -140,7 +135,60 @@ impl Display for TokenKind {
TokenKind::Use => "use".fmt(f), TokenKind::Use => "use".fmt(f),
TokenKind::While => "while".fmt(f), TokenKind::While => "while".fmt(f),
TokenKind::Punct(op) => op.fmt(f), TokenKind::LCurly => "{".fmt(f),
TokenKind::RCurly => "}".fmt(f),
TokenKind::LBrack => "[".fmt(f),
TokenKind::RBrack => "]".fmt(f),
TokenKind::LParen => "(".fmt(f),
TokenKind::RParen => ")".fmt(f),
TokenKind::Amp => "&".fmt(f),
TokenKind::AmpAmp => "&&".fmt(f),
TokenKind::AmpEq => "&=".fmt(f),
TokenKind::Arrow => "->".fmt(f),
TokenKind::At => "@".fmt(f),
TokenKind::Backslash => "\\".fmt(f),
TokenKind::Bang => "!".fmt(f),
TokenKind::BangBang => "!!".fmt(f),
TokenKind::BangEq => "!=".fmt(f),
TokenKind::Bar => "|".fmt(f),
TokenKind::BarBar => "||".fmt(f),
TokenKind::BarEq => "|=".fmt(f),
TokenKind::Colon => ":".fmt(f),
TokenKind::ColonColon => "::".fmt(f),
TokenKind::Comma => ",".fmt(f),
TokenKind::Dot => ".".fmt(f),
TokenKind::DotDot => "..".fmt(f),
TokenKind::DotDotEq => "..=".fmt(f),
TokenKind::Eq => "=".fmt(f),
TokenKind::EqEq => "==".fmt(f),
TokenKind::FatArrow => "=>".fmt(f),
TokenKind::Grave => "`".fmt(f),
TokenKind::Gt => ">".fmt(f),
TokenKind::GtEq => ">=".fmt(f),
TokenKind::GtGt => ">>".fmt(f),
TokenKind::GtGtEq => ">>=".fmt(f),
TokenKind::Hash => "#".fmt(f),
TokenKind::HashBang => "#!".fmt(f),
TokenKind::Lt => "<".fmt(f),
TokenKind::LtEq => "<=".fmt(f),
TokenKind::LtLt => "<<".fmt(f),
TokenKind::LtLtEq => "<<=".fmt(f),
TokenKind::Minus => "-".fmt(f),
TokenKind::MinusEq => "-=".fmt(f),
TokenKind::Plus => "+".fmt(f),
TokenKind::PlusEq => "+=".fmt(f),
TokenKind::Question => "?".fmt(f),
TokenKind::Rem => "%".fmt(f),
TokenKind::RemEq => "%=".fmt(f),
TokenKind::Semi => ";".fmt(f),
TokenKind::Slash => "/".fmt(f),
TokenKind::SlashEq => "/=".fmt(f),
TokenKind::Star => "*".fmt(f),
TokenKind::StarEq => "*=".fmt(f),
TokenKind::Tilde => "~".fmt(f),
TokenKind::Xor => "^".fmt(f),
TokenKind::XorEq => "^=".fmt(f),
TokenKind::XorXor => "^^".fmt(f),
} }
} }
} }
@ -150,7 +198,7 @@ impl FromStr for TokenKind {
/// Parses a string s to return a Keyword /// Parses a string s to return a Keyword
fn from_str(s: &str) -> Result<Self, Self::Err> { fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(match s { Ok(match s {
"as" => Self::Punct(Punct::As), "as" => Self::As,
"break" => Self::Break, "break" => Self::Break,
"cl" => Self::Cl, "cl" => Self::Cl,
"const" => Self::Const, "const" => Self::Const,
@ -158,8 +206,8 @@ impl FromStr for TokenKind {
"else" => Self::Else, "else" => Self::Else,
"enum" => Self::Enum, "enum" => Self::Enum,
"false" => Self::False, "false" => Self::False,
"for" => Self::For,
"fn" => Self::Fn, "fn" => Self::Fn,
"for" => Self::For,
"if" => Self::If, "if" => Self::If,
"impl" => Self::Impl, "impl" => Self::Impl,
"in" => Self::In, "in" => Self::In,
@ -182,65 +230,3 @@ impl FromStr for TokenKind {
}) })
} }
} }
impl Display for Punct {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Punct::As => "as".fmt(f),
Punct::LCurly => "{".fmt(f),
Punct::RCurly => "}".fmt(f),
Punct::LBrack => "[".fmt(f),
Punct::RBrack => "]".fmt(f),
Punct::LParen => "(".fmt(f),
Punct::RParen => ")".fmt(f),
Punct::Amp => "&".fmt(f),
Punct::AmpAmp => "&&".fmt(f),
Punct::AmpEq => "&=".fmt(f),
Punct::Arrow => "->".fmt(f),
Punct::At => "@".fmt(f),
Punct::Backslash => "\\".fmt(f),
Punct::Bang => "!".fmt(f),
Punct::BangBang => "!!".fmt(f),
Punct::BangEq => "!=".fmt(f),
Punct::Bar => "|".fmt(f),
Punct::BarBar => "||".fmt(f),
Punct::BarEq => "|=".fmt(f),
Punct::Colon => ":".fmt(f),
Punct::ColonColon => "::".fmt(f),
Punct::Comma => ",".fmt(f),
Punct::Dot => ".".fmt(f),
Punct::DotDot => "..".fmt(f),
Punct::DotDotEq => "..=".fmt(f),
Punct::Eq => "=".fmt(f),
Punct::EqEq => "==".fmt(f),
Punct::FatArrow => "=>".fmt(f),
Punct::Grave => "`".fmt(f),
Punct::Gt => ">".fmt(f),
Punct::GtEq => ">=".fmt(f),
Punct::GtGt => ">>".fmt(f),
Punct::GtGtEq => ">>=".fmt(f),
Punct::Hash => "#".fmt(f),
Punct::HashBang => "#!".fmt(f),
Punct::Lt => "<".fmt(f),
Punct::LtEq => "<=".fmt(f),
Punct::LtLt => "<<".fmt(f),
Punct::LtLtEq => "<<=".fmt(f),
Punct::Minus => "-".fmt(f),
Punct::MinusEq => "-=".fmt(f),
Punct::Plus => "+".fmt(f),
Punct::PlusEq => "+=".fmt(f),
Punct::Question => "?".fmt(f),
Punct::Rem => "%".fmt(f),
Punct::RemEq => "%=".fmt(f),
Punct::Semi => ";".fmt(f),
Punct::Slash => "/".fmt(f),
Punct::SlashEq => "/=".fmt(f),
Punct::Star => "*".fmt(f),
Punct::StarEq => "*=".fmt(f),
Punct::Tilde => "~".fmt(f),
Punct::Xor => "^".fmt(f),
Punct::XorEq => "^=".fmt(f),
Punct::XorXor => "^^".fmt(f),
}
}
}