conlang: Rename literals; split, compose, and document Rules
- Renamed literal Types to reflect their literal nature - This allows for consistent naming across future non-literal Types - Complicated lexer Rules have been split into composable sub-rules, and moved into the Rule struct. - This improves modularity, and allows sharing of sub-rules across rules. - Documented each lexer rule with (at least) a one-line blurb describing its function
This commit is contained in:
@@ -1,18 +1,16 @@
|
||||
//! This example grabs input from stdin, lexes it, and prints which lexer rules matched
|
||||
#![allow(unused_imports)]
|
||||
use conlang::lexer::Lexer;
|
||||
use std::{io::stdin, error::Error};
|
||||
use std::{error::Error, io::stdin};
|
||||
|
||||
fn main() -> Result<(), Box<dyn Error>>{
|
||||
fn main() -> Result<(), Box<dyn Error>> {
|
||||
// get input from stdin
|
||||
for line in stdin().lines() {
|
||||
let line = line?;
|
||||
// lex the line
|
||||
for func in [Lexer::line_comment, Lexer::block_comment, Lexer::shebang_comment, Lexer::identifier, Lexer::integer, Lexer::float, Lexer::string] {
|
||||
if let Some(token) = func(&mut Lexer::new(&line)) {
|
||||
println!("{:?}: {}", token, &line[token.range()])
|
||||
}
|
||||
let mut lexer = Lexer::new(&line);
|
||||
while let Some(token) = lexer.any() {
|
||||
println!("{:?}: {}", token, &line[token.range()])
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user