conlang: Rename literals; split, compose, and document Rules

- Renamed literal Types to reflect their literal nature
  - This allows for consistent naming across future non-literal Types
- Complicated lexer Rules have been split into composable sub-rules,
  and moved into the Rule struct.
  - This improves modularity, and allows sharing of sub-rules across rules.
- Documented each lexer rule with (at least) a one-line blurb
  describing its function
This commit is contained in:
2023-09-27 18:13:01 -05:00
parent 46e72e4889
commit 097e2c4f11
4 changed files with 176 additions and 118 deletions

View File

@@ -1,18 +1,16 @@
//! This example grabs input from stdin, lexes it, and prints which lexer rules matched
#![allow(unused_imports)]
use conlang::lexer::Lexer;
use std::{io::stdin, error::Error};
use std::{error::Error, io::stdin};
fn main() -> Result<(), Box<dyn Error>>{
fn main() -> Result<(), Box<dyn Error>> {
// get input from stdin
for line in stdin().lines() {
let line = line?;
// lex the line
for func in [Lexer::line_comment, Lexer::block_comment, Lexer::shebang_comment, Lexer::identifier, Lexer::integer, Lexer::float, Lexer::string] {
if let Some(token) = func(&mut Lexer::new(&line)) {
println!("{:?}: {}", token, &line[token.range()])
}
let mut lexer = Lexer::new(&line);
while let Some(token) = lexer.any() {
println!("{:?}: {}", token, &line[token.range()])
}
}
Ok(())
}
}