lexer: - Un-stringify errors - Reserve more words - Doc the comments parser: - MASSIVE changes to peek, peek_if, next_if, consume_if=>expect. - Keep track of when EOF is allowable - TKind is stupidly cheap with >100 niches, so we can fit like 4 of them in a single ParseError lmao - TODO: make sure EOF/UnexpectedEOF propagation is correct. It seems... Kinda Not correct. - Add meta-expressions
230 lines
6.5 KiB
Rust
230 lines
6.5 KiB
Rust
//! Tests the lexer
|
|
use doughlang::{
|
|
ast::{Anno, Pat, Ty},
|
|
parser::PPrec,
|
|
};
|
|
#[allow(unused_imports)]
|
|
use doughlang::{
|
|
ast::{
|
|
Expr,
|
|
macro_matcher::{Match, Subst},
|
|
},
|
|
lexer::{EOF, LexError, Lexer},
|
|
parser::{ParseError, Parser},
|
|
span::Span,
|
|
token::{TKind, Token},
|
|
};
|
|
use repline::prebaked::*;
|
|
use std::{
|
|
error::Error,
|
|
io::{IsTerminal, stdin},
|
|
};
|
|
|
|
fn main() -> Result<(), Box<dyn Error>> {
|
|
if stdin().is_terminal() {
|
|
read_and("\x1b[32m", ".>", " >", |line| match line.trim_end() {
|
|
"" => Ok(Response::Continue),
|
|
"exit" => Ok(Response::Break),
|
|
"clear" => {
|
|
print!("\x1b[H\x1b[2J");
|
|
Ok(Response::Deny)
|
|
}
|
|
"lex" => {
|
|
lex()?;
|
|
Ok(Response::Deny)
|
|
}
|
|
"expr" => {
|
|
exprs()?;
|
|
Ok(Response::Deny)
|
|
}
|
|
"pat" => {
|
|
pats()?;
|
|
Ok(Response::Deny)
|
|
}
|
|
"ty" => {
|
|
tys()?;
|
|
Ok(Response::Deny)
|
|
}
|
|
"macro" => {
|
|
if let Err(e) = subst() {
|
|
println!("\x1b[31m{e}\x1b[0m");
|
|
}
|
|
Ok(Response::Deny)
|
|
}
|
|
_ if line.ends_with("\n\n") => {
|
|
parse(line);
|
|
Ok(Response::Accept)
|
|
}
|
|
_ => Ok(Response::Continue),
|
|
})?;
|
|
} else {
|
|
let doc = std::io::read_to_string(stdin())?;
|
|
// lex(&doc);
|
|
parse(&doc);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
fn lex() -> Result<(), Box<dyn Error>> {
|
|
read_and("\x1b[93m", " >", "?>", |line| {
|
|
let mut lexer = Lexer::new(line);
|
|
if line.trim().is_empty() {
|
|
return Ok(Response::Break);
|
|
}
|
|
loop {
|
|
match lexer.scan() {
|
|
Err(LexError { res: EOF, .. }) => {
|
|
break Ok(Response::Accept);
|
|
}
|
|
Err(e) => {
|
|
println!("\x1b[31m{e}\x1b[0m");
|
|
break Ok(Response::Deny);
|
|
}
|
|
Ok(Token { lexeme, kind, span: Span { head, tail } }) => {
|
|
println!("{kind:?}\x1b[11G {head:<4} {tail:<4} {lexeme:?}")
|
|
}
|
|
}
|
|
}
|
|
})?;
|
|
Ok(())
|
|
}
|
|
|
|
fn exprs() -> Result<(), Box<dyn Error>> {
|
|
read_and("\x1b[93m", ".>", " >", |line| {
|
|
let mut parser = Parser::new(Lexer::new(line));
|
|
if line.trim().is_empty() {
|
|
return Ok(Response::Break);
|
|
}
|
|
for idx in 0.. {
|
|
match parser.parse::<Anno<Expr>>(0) {
|
|
Err(ParseError::FromLexer(LexError { res: EOF, .. })) => {
|
|
return Ok(Response::Accept);
|
|
}
|
|
Err(e) => {
|
|
println!("\x1b[31m{e}\x1b[0m");
|
|
return Ok(Response::Deny);
|
|
}
|
|
Ok(v) => println!("{idx}: {v}\n{v:#?}"),
|
|
}
|
|
}
|
|
Ok(Response::Accept)
|
|
})?;
|
|
Ok(())
|
|
}
|
|
|
|
fn pats() -> Result<(), Box<dyn Error>> {
|
|
read_and("\x1b[94m", " >", "?>", |line| {
|
|
let mut parser = Parser::new(Lexer::new(line));
|
|
if line.trim().is_empty() {
|
|
return Ok(Response::Break);
|
|
}
|
|
loop {
|
|
match parser.parse::<Pat>(PPrec::Min) {
|
|
Err(ParseError::FromLexer(LexError { res: EOF, .. })) => {
|
|
break Ok(Response::Accept);
|
|
}
|
|
Err(e) => {
|
|
println!("\x1b[31m{e}\x1b[0m");
|
|
break Ok(Response::Deny);
|
|
}
|
|
Ok(v) => println!("{v}\n{v:#?}"),
|
|
}
|
|
}
|
|
})?;
|
|
Ok(())
|
|
}
|
|
|
|
fn tys() -> Result<(), Box<dyn Error>> {
|
|
read_and("\x1b[94m", ".>", " >", |line| {
|
|
let mut parser = Parser::new(Lexer::new(line));
|
|
if line.trim().is_empty() {
|
|
return Ok(Response::Break);
|
|
}
|
|
loop {
|
|
match parser.parse::<Ty>(()) {
|
|
Err(ParseError::FromLexer(LexError { res: EOF, .. })) => {
|
|
break Ok(Response::Accept);
|
|
}
|
|
Err(e) => {
|
|
println!("\x1b[31m{e}\x1b[0m");
|
|
break Ok(Response::Deny);
|
|
}
|
|
Ok(v) => println!("{v}\n{v:#?}"),
|
|
}
|
|
}
|
|
})?;
|
|
Ok(())
|
|
}
|
|
|
|
fn subst() -> Result<(), Box<dyn Error>> {
|
|
let mut rl = repline::Repline::new("\x1b[35mexp", " >", "?>");
|
|
let exp = rl.read()?;
|
|
let mut exp: Expr = Parser::new(Lexer::new(&exp)).parse(0)?;
|
|
println!("\x1b[G\x1b[J{exp}");
|
|
|
|
rl.accept();
|
|
|
|
loop {
|
|
rl.set_color("\x1b[36mpat");
|
|
let pat = rl.read()?;
|
|
rl.accept();
|
|
print!("\x1b[G\x1b[J");
|
|
let mut p = Parser::new(Lexer::new(&pat));
|
|
|
|
let Ok(pat) = p.parse::<Expr>(0) else {
|
|
println!("{exp}");
|
|
continue;
|
|
};
|
|
|
|
if p.next_if(TKind::Arrow).is_err() {
|
|
let Some(Subst { exp, pat }) = exp.construct(&pat) else {
|
|
println!("Match failed: {exp} <- {pat}");
|
|
continue;
|
|
};
|
|
let mut pats: Vec<_> = pat.into_iter().collect();
|
|
pats.sort_by(|(a, _), (b, _)| a.cmp(b));
|
|
for (name, pat) in pats {
|
|
println!("{name}: {pat}")
|
|
}
|
|
let mut exprs: Vec<_> = exp.into_iter().collect();
|
|
exprs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
|
for (name, expr) in exprs.iter() {
|
|
println!("{name}: {expr}")
|
|
}
|
|
continue;
|
|
}
|
|
|
|
let sub: Expr = p.parse(0)?;
|
|
if exp.apply_rule(&pat, &sub) {
|
|
println!("{exp}");
|
|
} else {
|
|
println!("No match: {pat} in {exp}\n")
|
|
}
|
|
}
|
|
}
|
|
|
|
fn parse(document: &str) {
|
|
let mut parser = Parser::new(Lexer::new(document));
|
|
let isatty = std::io::stdin().is_terminal();
|
|
for idx in 0.. {
|
|
match parser.parse::<Expr>(0) {
|
|
Err(e @ ParseError::EOF(s)) if s.tail == document.len() as _ => {
|
|
println!("\x1b[92m{e} (total {} bytes)\x1b[0m", document.len());
|
|
break;
|
|
}
|
|
Err(e @ ParseError::EOF(_)) => {
|
|
println!("\x1b[93m{e} (total {} bytes)\x1b[0m", document.len());
|
|
break;
|
|
}
|
|
Err(e) => {
|
|
println!("\x1b[91m{e}\x1b[0m");
|
|
break;
|
|
}
|
|
Ok(v) if isatty => {
|
|
println!("\x1b[{}m{v}", (idx + 5) % 6 + 31);
|
|
}
|
|
_ => {}
|
|
}
|
|
}
|
|
}
|