cl-token: Move token definition into its own crate
This commit is contained in:
parent
ee27095fb3
commit
6e1d5af134
@ -1,5 +1,11 @@
|
||||
[workspace]
|
||||
members = ["libconlang", "cl-repl", "cl-interpret", "cl-structures"]
|
||||
members = [
|
||||
"libconlang",
|
||||
"cl-repl",
|
||||
"cl-interpret",
|
||||
"cl-structures",
|
||||
"cl-token",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
|
@ -12,6 +12,7 @@ publish.workspace = true
|
||||
[dependencies]
|
||||
conlang = { path = "../libconlang" }
|
||||
cl-interpret = { path = "../cl-interpret" }
|
||||
cl-token = { path = "../cl-token" }
|
||||
crossterm = "0.27.0"
|
||||
|
||||
[dev-dependencies]
|
||||
|
@ -1,5 +1,6 @@
|
||||
//! This example grabs input from stdin, lexes it, and prints which lexer rules matched
|
||||
#![allow(unused_imports)]
|
||||
use cl_token::Token;
|
||||
use conlang::lexer::Lexer;
|
||||
use std::{
|
||||
error::Error,
|
||||
@ -57,7 +58,7 @@ fn lex_tokens(file: &str, path: Option<&Path>) -> Result<(), Box<dyn Error>> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn print_token(t: conlang::token::Token) {
|
||||
fn print_token(t: Token) {
|
||||
println!(
|
||||
"{:02}:{:02}: {:#19} │{}│",
|
||||
t.line(),
|
||||
|
@ -194,7 +194,8 @@ pub mod cli {
|
||||
program::{Parsable, Parsed, Program},
|
||||
};
|
||||
use cl_interpret::env::Environment;
|
||||
use conlang::{resolver::Resolver, token::Token};
|
||||
use cl_token::Token;
|
||||
use conlang::resolver::Resolver;
|
||||
use std::{
|
||||
convert::Infallible,
|
||||
error::Error,
|
||||
|
10
cl-token/Cargo.toml
Normal file
10
cl-token/Cargo.toml
Normal file
@ -0,0 +1,10 @@
|
||||
[package]
|
||||
name = "cl-token"
|
||||
repository.workspace = true
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
publish.workspace = true
|
||||
|
||||
[dependencies]
|
12
cl-token/src/lib.rs
Normal file
12
cl-token/src/lib.rs
Normal file
@ -0,0 +1,12 @@
|
||||
//! # Token
|
||||
//!
|
||||
//! Stores a component of a file as a [Type], some [Data], and a line and column number
|
||||
#![feature(decl_macro)]
|
||||
|
||||
pub mod token;
|
||||
pub mod token_data;
|
||||
pub mod token_type;
|
||||
|
||||
pub use token::Token;
|
||||
pub use token_data::Data;
|
||||
pub use token_type::{Keyword, Type};
|
@ -1,20 +1,5 @@
|
||||
//! # Token
|
||||
//!
|
||||
//! Stores a component of a file as a [Type], some [Data], and a line and column number
|
||||
|
||||
pub mod token_data;
|
||||
pub mod token_type;
|
||||
pub mod preamble {
|
||||
//! Common imports for working with [tokens](super)
|
||||
pub use super::{
|
||||
token_data::Data,
|
||||
token_type::{Keyword, Type},
|
||||
Token,
|
||||
};
|
||||
}
|
||||
|
||||
use token_data::Data;
|
||||
use token_type::Type;
|
||||
//! A [Token] contains a single unit of lexical information, and an optional bit of [Data]
|
||||
use super::{Data, Type};
|
||||
|
||||
/// Contains a single unit of lexical information,
|
||||
/// and an optional bit of [Data]
|
@ -14,3 +14,4 @@ repository.workspace = true
|
||||
[dependencies]
|
||||
unicode-xid = "0.2.4"
|
||||
cl-structures = { path = "../cl-structures" }
|
||||
cl-token = { path = "../cl-token" }
|
||||
|
@ -1,5 +1,5 @@
|
||||
//! Converts a text file into tokens
|
||||
use crate::token::preamble::*;
|
||||
use cl_token::*;
|
||||
use cl_structures::span::Loc;
|
||||
use std::{
|
||||
iter::Peekable,
|
||||
|
@ -2,8 +2,6 @@
|
||||
#![warn(clippy::all)]
|
||||
#![feature(decl_macro)]
|
||||
|
||||
pub mod token;
|
||||
|
||||
pub mod ast;
|
||||
|
||||
pub mod lexer;
|
||||
|
@ -12,13 +12,9 @@ use self::error::{
|
||||
use crate::{
|
||||
ast::*,
|
||||
lexer::{error::Error as LexError, Lexer},
|
||||
token::{
|
||||
token_data::Data,
|
||||
token_type::{Keyword, Type},
|
||||
Token,
|
||||
},
|
||||
};
|
||||
use cl_structures::span::*;
|
||||
use cl_token::*;
|
||||
|
||||
pub mod error {
|
||||
use std::fmt::Display;
|
||||
|
@ -5,8 +5,8 @@ mod ast {
|
||||
// TODO
|
||||
}
|
||||
mod lexer {
|
||||
#[allow(unused_imports)]
|
||||
use crate::{lexer::Lexer, token::preamble::*};
|
||||
use crate::lexer::Lexer;
|
||||
use cl_token::*;
|
||||
|
||||
macro test_lexer_output_type ($($f:ident {$($test:expr => $expect:expr),*$(,)?})*) {$(
|
||||
#[test]
|
||||
|
Loading…
Reference in New Issue
Block a user