cl-token: Move token definition into its own crate
This commit is contained in:
parent
ee27095fb3
commit
6e1d5af134
@ -1,5 +1,11 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
members = ["libconlang", "cl-repl", "cl-interpret", "cl-structures"]
|
members = [
|
||||||
|
"libconlang",
|
||||||
|
"cl-repl",
|
||||||
|
"cl-interpret",
|
||||||
|
"cl-structures",
|
||||||
|
"cl-token",
|
||||||
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
|
@ -12,6 +12,7 @@ publish.workspace = true
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
conlang = { path = "../libconlang" }
|
conlang = { path = "../libconlang" }
|
||||||
cl-interpret = { path = "../cl-interpret" }
|
cl-interpret = { path = "../cl-interpret" }
|
||||||
|
cl-token = { path = "../cl-token" }
|
||||||
crossterm = "0.27.0"
|
crossterm = "0.27.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
//! This example grabs input from stdin, lexes it, and prints which lexer rules matched
|
//! This example grabs input from stdin, lexes it, and prints which lexer rules matched
|
||||||
#![allow(unused_imports)]
|
#![allow(unused_imports)]
|
||||||
|
use cl_token::Token;
|
||||||
use conlang::lexer::Lexer;
|
use conlang::lexer::Lexer;
|
||||||
use std::{
|
use std::{
|
||||||
error::Error,
|
error::Error,
|
||||||
@ -57,7 +58,7 @@ fn lex_tokens(file: &str, path: Option<&Path>) -> Result<(), Box<dyn Error>> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_token(t: conlang::token::Token) {
|
fn print_token(t: Token) {
|
||||||
println!(
|
println!(
|
||||||
"{:02}:{:02}: {:#19} │{}│",
|
"{:02}:{:02}: {:#19} │{}│",
|
||||||
t.line(),
|
t.line(),
|
||||||
|
@ -194,7 +194,8 @@ pub mod cli {
|
|||||||
program::{Parsable, Parsed, Program},
|
program::{Parsable, Parsed, Program},
|
||||||
};
|
};
|
||||||
use cl_interpret::env::Environment;
|
use cl_interpret::env::Environment;
|
||||||
use conlang::{resolver::Resolver, token::Token};
|
use cl_token::Token;
|
||||||
|
use conlang::resolver::Resolver;
|
||||||
use std::{
|
use std::{
|
||||||
convert::Infallible,
|
convert::Infallible,
|
||||||
error::Error,
|
error::Error,
|
||||||
|
10
cl-token/Cargo.toml
Normal file
10
cl-token/Cargo.toml
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
[package]
|
||||||
|
name = "cl-token"
|
||||||
|
repository.workspace = true
|
||||||
|
version.workspace = true
|
||||||
|
authors.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
publish.workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
12
cl-token/src/lib.rs
Normal file
12
cl-token/src/lib.rs
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
//! # Token
|
||||||
|
//!
|
||||||
|
//! Stores a component of a file as a [Type], some [Data], and a line and column number
|
||||||
|
#![feature(decl_macro)]
|
||||||
|
|
||||||
|
pub mod token;
|
||||||
|
pub mod token_data;
|
||||||
|
pub mod token_type;
|
||||||
|
|
||||||
|
pub use token::Token;
|
||||||
|
pub use token_data::Data;
|
||||||
|
pub use token_type::{Keyword, Type};
|
@ -1,20 +1,5 @@
|
|||||||
//! # Token
|
//! A [Token] contains a single unit of lexical information, and an optional bit of [Data]
|
||||||
//!
|
use super::{Data, Type};
|
||||||
//! Stores a component of a file as a [Type], some [Data], and a line and column number
|
|
||||||
|
|
||||||
pub mod token_data;
|
|
||||||
pub mod token_type;
|
|
||||||
pub mod preamble {
|
|
||||||
//! Common imports for working with [tokens](super)
|
|
||||||
pub use super::{
|
|
||||||
token_data::Data,
|
|
||||||
token_type::{Keyword, Type},
|
|
||||||
Token,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
use token_data::Data;
|
|
||||||
use token_type::Type;
|
|
||||||
|
|
||||||
/// Contains a single unit of lexical information,
|
/// Contains a single unit of lexical information,
|
||||||
/// and an optional bit of [Data]
|
/// and an optional bit of [Data]
|
@ -14,3 +14,4 @@ repository.workspace = true
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
unicode-xid = "0.2.4"
|
unicode-xid = "0.2.4"
|
||||||
cl-structures = { path = "../cl-structures" }
|
cl-structures = { path = "../cl-structures" }
|
||||||
|
cl-token = { path = "../cl-token" }
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
//! Converts a text file into tokens
|
//! Converts a text file into tokens
|
||||||
use crate::token::preamble::*;
|
use cl_token::*;
|
||||||
use cl_structures::span::Loc;
|
use cl_structures::span::Loc;
|
||||||
use std::{
|
use std::{
|
||||||
iter::Peekable,
|
iter::Peekable,
|
||||||
|
@ -2,8 +2,6 @@
|
|||||||
#![warn(clippy::all)]
|
#![warn(clippy::all)]
|
||||||
#![feature(decl_macro)]
|
#![feature(decl_macro)]
|
||||||
|
|
||||||
pub mod token;
|
|
||||||
|
|
||||||
pub mod ast;
|
pub mod ast;
|
||||||
|
|
||||||
pub mod lexer;
|
pub mod lexer;
|
||||||
|
@ -12,13 +12,9 @@ use self::error::{
|
|||||||
use crate::{
|
use crate::{
|
||||||
ast::*,
|
ast::*,
|
||||||
lexer::{error::Error as LexError, Lexer},
|
lexer::{error::Error as LexError, Lexer},
|
||||||
token::{
|
|
||||||
token_data::Data,
|
|
||||||
token_type::{Keyword, Type},
|
|
||||||
Token,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
use cl_structures::span::*;
|
use cl_structures::span::*;
|
||||||
|
use cl_token::*;
|
||||||
|
|
||||||
pub mod error {
|
pub mod error {
|
||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
|
@ -5,8 +5,8 @@ mod ast {
|
|||||||
// TODO
|
// TODO
|
||||||
}
|
}
|
||||||
mod lexer {
|
mod lexer {
|
||||||
#[allow(unused_imports)]
|
use crate::lexer::Lexer;
|
||||||
use crate::{lexer::Lexer, token::preamble::*};
|
use cl_token::*;
|
||||||
|
|
||||||
macro test_lexer_output_type ($($f:ident {$($test:expr => $expect:expr),*$(,)?})*) {$(
|
macro test_lexer_output_type ($($f:ident {$($test:expr => $expect:expr),*$(,)?})*) {$(
|
||||||
#[test]
|
#[test]
|
||||||
|
Loading…
Reference in New Issue
Block a user