88 lines
2.5 KiB
Rust
88 lines
2.5 KiB
Rust
// © 2023-2024 John Breaux
|
|
|
|
use crate::{
|
|
lexer::{
|
|
token::{Token, TokenKind as Kind},
|
|
Lexer,
|
|
},
|
|
span::Span,
|
|
};
|
|
use std::collections::{HashMap, VecDeque};
|
|
|
|
#[derive(Clone, Debug)]
|
|
pub struct Preprocessor<'t> {
|
|
lexer: Lexer<'t>,
|
|
buf: VecDeque<Token<'t>>,
|
|
defn: HashMap<&'t str, Vec<Token<'t>>>,
|
|
/// Location for injected tokens
|
|
pos: Span<usize>,
|
|
}
|
|
|
|
impl<'t> Preprocessor<'t> {
|
|
pub fn new(text: &'t str) -> Self {
|
|
Self {
|
|
lexer: Lexer::new(text),
|
|
buf: Default::default(),
|
|
defn: Default::default(),
|
|
pos: Default::default(),
|
|
}
|
|
}
|
|
pub fn with_lexer(lexer: Lexer<'t>) -> Self {
|
|
Self { lexer, buf: Default::default(), defn: Default::default(), pos: Default::default() }
|
|
}
|
|
pub fn scan(&mut self) -> Option<Token<'t>> {
|
|
self.buf.pop_front().or_else(|| self.next()).inspect(|t| self.pos = t.pos)
|
|
}
|
|
pub fn start(&self) -> usize {
|
|
self.lexer.location()
|
|
}
|
|
/// Grabs a token from the lexer, and attempts to match its lexeme
|
|
fn next(&mut self) -> Option<Token<'t>> {
|
|
let token = self.lexer.scan()?;
|
|
if let Some(tokens) = self.defn.get(token.lexeme) {
|
|
self.buf.extend(tokens.iter().copied().map(|mut t| {
|
|
t.pos = self.pos;
|
|
t
|
|
}));
|
|
return self.scan();
|
|
} else {
|
|
match token.kind {
|
|
Kind::Directive => self.directive(token),
|
|
Kind::Newline => return self.scan(),
|
|
_ => {}
|
|
}
|
|
Some(token)
|
|
}
|
|
}
|
|
/// Passes a token through while parsing a directive
|
|
fn tee(&mut self) -> Option<Token<'t>> {
|
|
let token = self.lexer.scan()?;
|
|
self.buf.push_back(token);
|
|
// self.buf.push_back(token);
|
|
Some(token)
|
|
}
|
|
/// Parses and executes a directive
|
|
pub fn directive(&mut self, token: Token<'t>) {
|
|
if ".define" == token.lexeme {
|
|
self.define()
|
|
}
|
|
}
|
|
pub fn define(&mut self) {
|
|
let Some(key) = self.tee() else {
|
|
return;
|
|
};
|
|
let mut value = vec![];
|
|
while let Some(token) = self.tee() {
|
|
match token.kind {
|
|
Kind::Comment => {
|
|
self.buf.push_back(token);
|
|
break;
|
|
}
|
|
Kind::Newline => break,
|
|
_ => value.push(token),
|
|
}
|
|
}
|
|
self.defn.insert(key.lexeme, value);
|
|
}
|
|
}
|