cl-token: Move token definition into its own crate

This commit is contained in:
John 2024-02-29 19:36:06 -06:00
parent ee27095fb3
commit 6e1d5af134
14 changed files with 41 additions and 30 deletions

View File

@ -1,5 +1,11 @@
[workspace]
members = ["libconlang", "cl-repl", "cl-interpret", "cl-structures"]
members = [
"libconlang",
"cl-repl",
"cl-interpret",
"cl-structures",
"cl-token",
]
resolver = "2"
[workspace.package]

View File

@ -12,6 +12,7 @@ publish.workspace = true
[dependencies]
conlang = { path = "../libconlang" }
cl-interpret = { path = "../cl-interpret" }
cl-token = { path = "../cl-token" }
crossterm = "0.27.0"
[dev-dependencies]

View File

@ -1,5 +1,6 @@
//! This example grabs input from stdin, lexes it, and prints which lexer rules matched
#![allow(unused_imports)]
use cl_token::Token;
use conlang::lexer::Lexer;
use std::{
error::Error,
@ -57,7 +58,7 @@ fn lex_tokens(file: &str, path: Option<&Path>) -> Result<(), Box<dyn Error>> {
Ok(())
}
fn print_token(t: conlang::token::Token) {
fn print_token(t: Token) {
println!(
"{:02}:{:02}: {:#19} │{}│",
t.line(),

View File

@ -194,7 +194,8 @@ pub mod cli {
program::{Parsable, Parsed, Program},
};
use cl_interpret::env::Environment;
use conlang::{resolver::Resolver, token::Token};
use cl_token::Token;
use conlang::resolver::Resolver;
use std::{
convert::Infallible,
error::Error,

10
cl-token/Cargo.toml Normal file
View File

@ -0,0 +1,10 @@
[package]
name = "cl-token"
repository.workspace = true
version.workspace = true
authors.workspace = true
edition.workspace = true
license.workspace = true
publish.workspace = true
[dependencies]

12
cl-token/src/lib.rs Normal file
View File

@ -0,0 +1,12 @@
//! # Token
//!
//! Stores a component of a file as a [Type], some [Data], and a line and column number
#![feature(decl_macro)]
pub mod token;
pub mod token_data;
pub mod token_type;
pub use token::Token;
pub use token_data::Data;
pub use token_type::{Keyword, Type};

View File

@ -1,20 +1,5 @@
//! # Token
//!
//! Stores a component of a file as a [Type], some [Data], and a line and column number
pub mod token_data;
pub mod token_type;
pub mod preamble {
//! Common imports for working with [tokens](super)
pub use super::{
token_data::Data,
token_type::{Keyword, Type},
Token,
};
}
use token_data::Data;
use token_type::Type;
//! A [Token] contains a single unit of lexical information, and an optional bit of [Data]
use super::{Data, Type};
/// Contains a single unit of lexical information,
/// and an optional bit of [Data]

View File

@ -14,3 +14,4 @@ repository.workspace = true
[dependencies]
unicode-xid = "0.2.4"
cl-structures = { path = "../cl-structures" }
cl-token = { path = "../cl-token" }

View File

@ -1,5 +1,5 @@
//! Converts a text file into tokens
use crate::token::preamble::*;
use cl_token::*;
use cl_structures::span::Loc;
use std::{
iter::Peekable,

View File

@ -2,8 +2,6 @@
#![warn(clippy::all)]
#![feature(decl_macro)]
pub mod token;
pub mod ast;
pub mod lexer;

View File

@ -12,13 +12,9 @@ use self::error::{
use crate::{
ast::*,
lexer::{error::Error as LexError, Lexer},
token::{
token_data::Data,
token_type::{Keyword, Type},
Token,
},
};
use cl_structures::span::*;
use cl_token::*;
pub mod error {
use std::fmt::Display;

View File

@ -5,8 +5,8 @@ mod ast {
// TODO
}
mod lexer {
#[allow(unused_imports)]
use crate::{lexer::Lexer, token::preamble::*};
use crate::lexer::Lexer;
use cl_token::*;
macro test_lexer_output_type ($($f:ident {$($test:expr => $expect:expr),*$(,)?})*) {$(
#[test]