lexer: Tokenize float literals (resolves #4)

This commit is contained in:
John 2023-09-25 16:51:18 -05:00
parent 8ddf73dc76
commit 09f22d6bf3

View File

@ -10,6 +10,7 @@ pub mod token {
Comment, Comment,
Identifier, Identifier,
Integer, Integer,
Float,
String, String,
} }
#[derive(Clone, Copy, Debug, PartialEq, Eq)] #[derive(Clone, Copy, Debug, PartialEq, Eq)]
@ -132,6 +133,17 @@ pub mod lexer {
.end()?, .end()?,
) )
} }
pub fn float(&mut self) -> Option<Token> {
self.skip_whitespace();
self.produce_token(
Type::Float,
Rule::new(self.text())
.and_any(Rule::dec_digit)
.char('.')
.and_many(Rule::dec_digit)
.end()?,
)
}
pub fn string(&mut self) -> Option<Token> { pub fn string(&mut self) -> Option<Token> {
self.skip_whitespace(); self.skip_whitespace();
self.produce_token( self.produce_token(
@ -376,6 +388,27 @@ mod tests {
assert_whole_input_is_token("0b1010", Lexer::integer, Type::Integer); assert_whole_input_is_token("0b1010", Lexer::integer, Type::Integer);
} }
} }
mod float {
use super::*;
#[test]
fn number_dot_number_is_float() {
assert_whole_input_is_token("1.0", Lexer::float, Type::Float);
}
#[test]
fn nothing_dot_number_is_float() {
assert_whole_input_is_token(".0", Lexer::float, Type::Float);
}
#[test]
#[should_panic]
fn number_dot_nothing_is_not_float() {
assert_whole_input_is_token("1.", Lexer::float, Type::Float);
}
#[test]
#[should_panic]
fn nothing_dot_nothing_is_not_float() {
assert_whole_input_is_token(".", Lexer::float, Type::Float);
}
}
mod string { mod string {
use super::*; use super::*;
#[test] #[test]