Update to match cl v0.0.5
This commit is contained in:
parent
700c990a11
commit
c048cdfbf4
14
Cargo.lock
generated
14
Cargo.lock
generated
@ -59,14 +59,14 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "cl-ast"
|
||||
version = "0.0.4"
|
||||
version = "0.0.5"
|
||||
dependencies = [
|
||||
"cl-structures",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cl-interpret"
|
||||
version = "0.0.4"
|
||||
version = "0.0.5"
|
||||
dependencies = [
|
||||
"cl-ast",
|
||||
"cl-structures",
|
||||
@ -74,7 +74,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cl-lexer"
|
||||
version = "0.0.4"
|
||||
version = "0.0.5"
|
||||
dependencies = [
|
||||
"cl-structures",
|
||||
"cl-token",
|
||||
@ -83,7 +83,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cl-parser"
|
||||
version = "0.0.4"
|
||||
version = "0.0.5"
|
||||
dependencies = [
|
||||
"cl-ast",
|
||||
"cl-lexer",
|
||||
@ -93,7 +93,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cl-repl"
|
||||
version = "0.0.4"
|
||||
version = "0.0.5"
|
||||
dependencies = [
|
||||
"argh",
|
||||
"cl-ast",
|
||||
@ -106,11 +106,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cl-structures"
|
||||
version = "0.0.4"
|
||||
version = "0.0.5"
|
||||
|
||||
[[package]]
|
||||
name = "cl-token"
|
||||
version = "0.0.4"
|
||||
version = "0.0.5"
|
||||
|
||||
[[package]]
|
||||
name = "crossterm"
|
||||
|
69
src/lib.rs
69
src/lib.rs
@ -194,7 +194,7 @@ pub mod parser {
|
||||
|
||||
pub mod token {
|
||||
//! Custom token type, plus a [Tokenizer] iterator adapter for cl-lexer's token type
|
||||
use cl_token::{token_type::Op as Tkop, *};
|
||||
use cl_token::{token_type::Punct as Tkop, *};
|
||||
|
||||
pub struct Tokenizer<'t> {
|
||||
lexer: cl_lexer::lexer_iter::LexerIter<'t>,
|
||||
@ -212,12 +212,11 @@ pub mod token {
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let token = self.lexer.next()?.ok()?;
|
||||
let (ty, data) = (token.ty(), token.into_data());
|
||||
|
||||
match data {
|
||||
TokenData::Integer(v) => return Some(Token::Int(v as _)),
|
||||
TokenData::Character(v) => return Some(Token::Char(v)),
|
||||
TokenData::Identifier(v) => return Some(Token::Ident(v.into_string())),
|
||||
TokenData::String(v) => return Some(Token::Str(v.to_owned())),
|
||||
match (ty, data) {
|
||||
(TokenKind::Literal, TokenData::Integer(v)) => return Some(Token::Int(v as _)),
|
||||
(TokenKind::Literal, TokenData::Character(v)) => return Some(Token::Char(v)),
|
||||
(TokenKind::Literal, TokenData::String(v)) => return Some(Token::Str(v)),
|
||||
(TokenKind::Identifier, TokenData::String(v)) => return Some(Token::Ident(v)),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
@ -291,44 +290,44 @@ pub mod token {
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum Op {
|
||||
// Delimiter
|
||||
#["("] Lpa = TokenKind::Op(Tkop::LParen),
|
||||
#[")"] Rpa = TokenKind::Op(Tkop::RParen),
|
||||
#["["] Lbk = TokenKind::Op(Tkop::LBrack),
|
||||
#["]"] Rbk = TokenKind::Op(Tkop::RBrack),
|
||||
#["("] Lpa = TokenKind::Punct(Tkop::LParen),
|
||||
#[")"] Rpa = TokenKind::Punct(Tkop::RParen),
|
||||
#["["] Lbk = TokenKind::Punct(Tkop::LBrack),
|
||||
#["]"] Rbk = TokenKind::Punct(Tkop::RBrack),
|
||||
// Member
|
||||
#["."] Dot = TokenKind::Op(Tkop::Dot),
|
||||
#["."] Dot = TokenKind::Punct(Tkop::Dot),
|
||||
// Factor
|
||||
#["*"] Mul = TokenKind::Op(Tkop::Star),
|
||||
#["/"] Div = TokenKind::Op(Tkop::Slash),
|
||||
#["%"] Rem = TokenKind::Op(Tkop::Rem),
|
||||
#["*"] Mul = TokenKind::Punct(Tkop::Star),
|
||||
#["/"] Div = TokenKind::Punct(Tkop::Slash),
|
||||
#["%"] Rem = TokenKind::Punct(Tkop::Rem),
|
||||
// Term
|
||||
#["+"] Add = TokenKind::Op(Tkop::Plus),
|
||||
#["-"] Sub = TokenKind::Op(Tkop::Minus),
|
||||
#["+"] Add = TokenKind::Punct(Tkop::Plus),
|
||||
#["-"] Sub = TokenKind::Punct(Tkop::Minus),
|
||||
// Shift
|
||||
#["<<"] Shl = TokenKind::Op(Tkop::LtLt),
|
||||
#[">>"] Shr = TokenKind::Op(Tkop::GtGt),
|
||||
#["<<"] Shl = TokenKind::Punct(Tkop::LtLt),
|
||||
#[">>"] Shr = TokenKind::Punct(Tkop::GtGt),
|
||||
// Bitwise
|
||||
#["&"] Ban = TokenKind::Op(Tkop::Amp),
|
||||
#["|"] Bor = TokenKind::Op(Tkop::Bar),
|
||||
#["^"] Bxr = TokenKind::Op(Tkop::Xor),
|
||||
#["&"] Ban = TokenKind::Punct(Tkop::Amp),
|
||||
#["|"] Bor = TokenKind::Punct(Tkop::Bar),
|
||||
#["^"] Bxr = TokenKind::Punct(Tkop::Xor),
|
||||
// Logic
|
||||
#["&&"] Lan = TokenKind::Op(Tkop::AmpAmp),
|
||||
#["||"] Lor = TokenKind::Op(Tkop::BarBar),
|
||||
#["^^"] Lxr = TokenKind::Op(Tkop::XorXor),
|
||||
#["&&"] Lan = TokenKind::Punct(Tkop::AmpAmp),
|
||||
#["||"] Lor = TokenKind::Punct(Tkop::BarBar),
|
||||
#["^^"] Lxr = TokenKind::Punct(Tkop::XorXor),
|
||||
// Range
|
||||
#["..="] Inc = TokenKind::Op(Tkop::DotDotEq),
|
||||
#[".."] Exc = TokenKind::Op(Tkop::DotDot),
|
||||
#["..="] Inc = TokenKind::Punct(Tkop::DotDotEq),
|
||||
#[".."] Exc = TokenKind::Punct(Tkop::DotDot),
|
||||
// Compare
|
||||
#["<"] Lt = TokenKind::Op(Tkop::Lt),
|
||||
#["<="] Lte = TokenKind::Op(Tkop::LtEq),
|
||||
#["=="] Eq = TokenKind::Op(Tkop::EqEq),
|
||||
#["!="] Neq = TokenKind::Op(Tkop::BangEq),
|
||||
#[">="] Gte = TokenKind::Op(Tkop::GtEq),
|
||||
#[">"] Gt = TokenKind::Op(Tkop::Gt),
|
||||
#["<"] Lt = TokenKind::Punct(Tkop::Lt),
|
||||
#["<="] Lte = TokenKind::Punct(Tkop::LtEq),
|
||||
#["=="] Eq = TokenKind::Punct(Tkop::EqEq),
|
||||
#["!="] Neq = TokenKind::Punct(Tkop::BangEq),
|
||||
#[">="] Gte = TokenKind::Punct(Tkop::GtEq),
|
||||
#[">"] Gt = TokenKind::Punct(Tkop::Gt),
|
||||
// Unary-only
|
||||
#["!"] Not = TokenKind::Op(Tkop::Bang),
|
||||
#["!"] Not = TokenKind::Punct(Tkop::Bang),
|
||||
// Postfix unary
|
||||
#["?"] Huh = TokenKind::Op(Tkop::Question),
|
||||
#["?"] Huh = TokenKind::Punct(Tkop::Question),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -6,7 +6,7 @@ fn main() {
|
||||
|
||||
while let Ok(line) = rl.read() {
|
||||
if let Some(expr) = parser::expr(&line) {
|
||||
println!("\x1b[G\x1b[J{expr:?}");
|
||||
println!("\x1b[G\x1b[J{expr}");
|
||||
rl.accept();
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user