Files
Doughlang/src/main.rs

239 lines
7.1 KiB
Rust

//! Tests the lexer\
use doughlang::{
ast::{
Anno, Annotation, Bind, Expr, Literal, Pat, Path, Use,
macro_matcher::{Match, Subst},
},
lexer::{EOF, LexError, Lexer},
parser::{Parse, ParseError, Parser},
span::Span,
token::{TKind, Token},
};
use repline::prebaked::*;
use std::{
error::Error,
io::{IsTerminal, stdin},
marker::PhantomData,
};
fn clear() {
print!("\x1b[H\x1b[2J\x1b[3J");
}
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
enum Verbosity {
#[default]
Pretty,
Debug,
Quiet,
}
impl From<&str> for Verbosity {
fn from(value: &str) -> Self {
match value {
"quiet" | "false" | "0" | "no" => Verbosity::Quiet,
"debug" | "d" => Verbosity::Debug,
"pretty" => Verbosity::Pretty,
_ => Default::default(),
}
}
}
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
enum ParseMode {
#[default]
Expr,
Pat,
Bind,
Path,
Literal,
Use,
Tokens,
}
impl From<&str> for ParseMode {
fn from(value: &str) -> Self {
match value {
"expr" => Self::Expr,
"pat" => Self::Pat,
"bind" => Self::Bind,
"path" => Self::Path,
"literal" => Self::Literal,
"use" => Self::Use,
"tokens" => Self::Tokens,
_ => Default::default(),
}
}
}
impl ParseMode {
fn with<'a>(&self) -> fn(&'a str, Verbosity) {
match self {
Self::Expr => parse::<'a, Expr>,
Self::Pat => parse::<'a, Pat>,
Self::Bind => parse::<'a, Bind>,
Self::Path => parse::<'a, Path>,
Self::Literal => parse::<'a, Literal>,
Self::Use => parse::<'a, Use>,
Self::Tokens => tokens::<'a, dyn Parse<'a, Prec = ()>>,
}
}
}
fn main() -> Result<(), Box<dyn Error>> {
let mut verbose = Verbosity::from(std::env::var("DO_VERBOSE").as_deref().unwrap_or_default());
let mut parsing = ParseMode::from(std::env::var("DO_PARSING").as_deref().unwrap_or_default());
if stdin().is_terminal() {
read_and("\x1b[32m", ".>", " >", |line| match line.trim_end() {
"" => Ok(Response::Continue),
"exit" => Ok(Response::Break),
"help" => {
println!("Parsing: {parsing:?} (expr, pat, bind, path, literal, use, tokens)");
println!("Verbose: {verbose:?} (pretty, debug, quiet)");
Ok(Response::Deny)
}
"clear" => {
clear();
Ok(Response::Deny)
}
"macro" => {
if let Err(e) = subst() {
println!("\x1b[31m{e}\x1b[0m");
}
Ok(Response::Accept)
}
line @ ("tokens" | "expr" | "pat" | "bind" | "path" | "literal" | "use") => {
parsing = ParseMode::from(line);
println!("Parse mode set to '{parsing:?}'");
Ok(Response::Accept)
}
line @ ("quiet" | "debug" | "pretty") => {
verbose = Verbosity::from(line);
println!("Verbosity set to '{verbose:?}'");
Ok(Response::Accept)
}
_ if line.ends_with("\n\n") => {
parsing.with()(line, verbose);
Ok(Response::Accept)
}
_ => Ok(Response::Continue),
})?;
} else {
let doc = std::io::read_to_string(stdin())?;
parsing.with()(&doc, verbose);
}
Ok(())
}
fn subst() -> Result<(), Box<dyn Error>> {
let mut rl = repline::Repline::new("\x1b[35mexp", " >", "?>");
let exp = rl.read()?;
let mut exp: Expr = Parser::new(Lexer::new(&exp)).parse(0)?;
println!("\x1b[G\x1b[J{exp}");
rl.accept();
loop {
rl.set_color("\x1b[36mpat");
let pat = rl.read()?;
rl.accept();
print!("\x1b[G\x1b[J");
let mut p = Parser::new(Lexer::new(&pat));
let Ok(pat) = p.parse::<Expr>(0) else {
println!("{exp}");
continue;
};
if p.next_if(TKind::Arrow).is_err() {
let Some(Subst { exp, pat }) = exp.construct(&pat) else {
println!("Match failed: {exp} <- {pat}");
continue;
};
let mut pats: Vec<_> = pat.into_iter().collect();
pats.sort_by(|(a, _), (b, _)| a.cmp(b));
for (name, pat) in pats {
println!("{name}: {pat}")
}
let mut exprs: Vec<_> = exp.into_iter().collect();
exprs.sort_by(|(a, _), (b, _)| a.cmp(b));
for (name, expr) in exprs.iter() {
println!("{name}: {expr}")
}
continue;
}
let sub: Expr = p.parse(0)?;
if exp.apply_rule(&pat, &sub) {
println!("{exp}");
} else {
println!("No match: {pat} in {exp}\n")
}
}
}
fn plural(count: usize) -> &'static str {
match count {
1 => "",
_ => "s",
}
}
fn tokens<'t, T: Parse<'t> + ?Sized>(document: &'t str, verbose: Verbosity) {
let _: PhantomData<T>; // for lifetime variance
let mut lexer = Lexer::new(document);
loop {
match (lexer.scan(), verbose) {
(Err(LexError { res: EOF, .. }), _) => {
break;
}
(Err(e), _) => {
println!("\x1b[31m{e}\x1b[0m");
break;
}
(Ok(Token { lexeme, kind, span: Span { head, tail } }), Verbosity::Pretty) => {
println!("{kind:?}\x1b[11G {head:<4} {tail:<4} {lexeme:?}")
}
(Ok(token), Verbosity::Debug) => {
println!("{token:?}")
}
_ => {}
}
}
}
fn parse<'t, T: Parse<'t> + Annotation>(document: &'t str, verbose: Verbosity) {
let mut parser = Parser::new(Lexer::new(document));
for idx in 0.. {
match (parser.parse::<Anno<T, Span>>(T::Prec::default()), verbose) {
(Err(e @ ParseError::EOF(s)), _) if s.tail == document.len() as _ => {
println!(
"\x1b[92m{e} (total {} byte{}, {idx} expression{})\x1b[0m",
document.len(),
plural(document.len()),
plural(idx),
);
break;
}
(Err(e @ ParseError::EOF(_)), _) => {
println!(
"\x1b[93m{e} (total {} byte{}, {idx} expression{})\x1b[0m",
document.len(),
plural(document.len()),
plural(idx),
);
break;
}
(Err(e), _) => {
println!("\x1b[91m{e}\x1b[0m");
break;
}
(Ok(Anno(expr, span)), Verbosity::Pretty) => {
println!("\x1b[{}m{span}:\n{expr}", (idx + 5) % 6 + 31);
}
(Ok(Anno(expr, span)), Verbosity::Debug) => {
println!("\x1b[{}m{span}:\n{expr:#?}", (idx + 5) % 6 + 31);
}
_ => {}
}
}
}