diff --git a/src/lib.rs b/src/lib.rs index bb61a54..f17540c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -194,7 +194,7 @@ pub mod parser { pub mod token { //! Custom token type, plus a [Tokenizer] iterator adapter for cl-lexer's token type - use cl_token::*; + use cl_token::{token_type::Op as Tkop, *}; pub struct Tokenizer<'t> { lexer: cl_lexer::lexer_iter::LexerIter<'t>, @@ -291,44 +291,44 @@ pub mod token { #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum Op { // Delimiter - #["("] Lpa = TokenKind::LParen, - #[")"] Rpa = TokenKind::RParen, - #["["] Lbk = TokenKind::LBrack, - #["]"] Rbk = TokenKind::RBrack, + #["("] Lpa = TokenKind::Op(Tkop::LParen), + #[")"] Rpa = TokenKind::Op(Tkop::RParen), + #["["] Lbk = TokenKind::Op(Tkop::LBrack), + #["]"] Rbk = TokenKind::Op(Tkop::RBrack), // Member - #["."] Dot = TokenKind::Dot, + #["."] Dot = TokenKind::Op(Tkop::Dot), // Factor - #["*"] Mul = TokenKind::Star, - #["/"] Div = TokenKind::Slash, - #["%"] Rem = TokenKind::Rem, + #["*"] Mul = TokenKind::Op(Tkop::Star), + #["/"] Div = TokenKind::Op(Tkop::Slash), + #["%"] Rem = TokenKind::Op(Tkop::Rem), // Term - #["+"] Add = TokenKind::Plus, - #["-"] Sub = TokenKind::Minus, + #["+"] Add = TokenKind::Op(Tkop::Plus), + #["-"] Sub = TokenKind::Op(Tkop::Minus), // Shift - #["<<"] Shl = TokenKind::LtLt, - #[">>"] Shr = TokenKind::GtGt, + #["<<"] Shl = TokenKind::Op(Tkop::LtLt), + #[">>"] Shr = TokenKind::Op(Tkop::GtGt), // Bitwise - #["&"] Ban = TokenKind::Amp, - #["|"] Bor = TokenKind::Bar, - #["^"] Bxr = TokenKind::Xor, + #["&"] Ban = TokenKind::Op(Tkop::Amp), + #["|"] Bor = TokenKind::Op(Tkop::Bar), + #["^"] Bxr = TokenKind::Op(Tkop::Xor), // Logic - #["&&"] Lan = TokenKind::AmpAmp, - #["||"] Lor = TokenKind::BarBar, - #["^^"] Lxr = TokenKind::XorXor, + #["&&"] Lan = TokenKind::Op(Tkop::AmpAmp), + #["||"] Lor = TokenKind::Op(Tkop::BarBar), + #["^^"] Lxr = TokenKind::Op(Tkop::XorXor), // Range - #["..="] Inc = TokenKind::DotDotEq, - #[".."] Exc = TokenKind::DotDot, + #["..="] Inc = TokenKind::Op(Tkop::DotDotEq), + #[".."] Exc = TokenKind::Op(Tkop::DotDot), // Compare - #["<"] Lt = TokenKind::Lt, - #["<="] Lte = TokenKind::LtEq, - #["=="] Eq = TokenKind::EqEq, - #["!="] Neq = TokenKind::BangEq, - #[">="] Gte = TokenKind::GtEq, - #[">"] Gt = TokenKind::Gt, + #["<"] Lt = TokenKind::Op(Tkop::Lt), + #["<="] Lte = TokenKind::Op(Tkop::LtEq), + #["=="] Eq = TokenKind::Op(Tkop::EqEq), + #["!="] Neq = TokenKind::Op(Tkop::BangEq), + #[">="] Gte = TokenKind::Op(Tkop::GtEq), + #[">"] Gt = TokenKind::Op(Tkop::Gt), // Unary-only - #["!"] Not = TokenKind::Bang, + #["!"] Not = TokenKind::Op(Tkop::Bang), // Postfix unary - #["?"] Huh = TokenKind::Question, + #["?"] Huh = TokenKind::Op(Tkop::Question), } }