From 09f22d6bf38e25adafe89a4e9bd8b0544082b62a Mon Sep 17 00:00:00 2001 From: John Date: Mon, 25 Sep 2023 16:51:18 -0500 Subject: [PATCH] lexer: Tokenize float literals (resolves #4) --- libconlang/src/lib.rs | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/libconlang/src/lib.rs b/libconlang/src/lib.rs index 147d1f4..91d2635 100644 --- a/libconlang/src/lib.rs +++ b/libconlang/src/lib.rs @@ -10,6 +10,7 @@ pub mod token { Comment, Identifier, Integer, + Float, String, } #[derive(Clone, Copy, Debug, PartialEq, Eq)] @@ -132,6 +133,17 @@ pub mod lexer { .end()?, ) } + pub fn float(&mut self) -> Option { + self.skip_whitespace(); + self.produce_token( + Type::Float, + Rule::new(self.text()) + .and_any(Rule::dec_digit) + .char('.') + .and_many(Rule::dec_digit) + .end()?, + ) + } pub fn string(&mut self) -> Option { self.skip_whitespace(); self.produce_token( @@ -376,6 +388,27 @@ mod tests { assert_whole_input_is_token("0b1010", Lexer::integer, Type::Integer); } } + mod float { + use super::*; + #[test] + fn number_dot_number_is_float() { + assert_whole_input_is_token("1.0", Lexer::float, Type::Float); + } + #[test] + fn nothing_dot_number_is_float() { + assert_whole_input_is_token(".0", Lexer::float, Type::Float); + } + #[test] + #[should_panic] + fn number_dot_nothing_is_not_float() { + assert_whole_input_is_token("1.", Lexer::float, Type::Float); + } + #[test] + #[should_panic] + fn nothing_dot_nothing_is_not_float() { + assert_whole_input_is_token(".", Lexer::float, Type::Float); + } + } mod string { use super::*; #[test]