main.rs: Cleanup dead and commented-out code
This commit is contained in:
parent
a9ee7d3bc9
commit
d283043440
72
src/main.rs
72
src/main.rs
@ -1,21 +1,9 @@
|
||||
//! Simple frontend for the assembler
|
||||
|
||||
use msp430_asm::preamble::*;
|
||||
use std::io::Read;
|
||||
|
||||
use msp430_asm::preamble::*;
|
||||
|
||||
// const ASM: &str = r"
|
||||
// //.org 8000
|
||||
// //.define INT #2400
|
||||
// //entry:
|
||||
// mov.b 8000(sp), r15 ; pop into sp
|
||||
// rrc @pc+
|
||||
// add #64, r8
|
||||
// call #10 // call INT
|
||||
// ";
|
||||
|
||||
fn main() -> Result<(), Error> {
|
||||
// Get args
|
||||
let mut repl = true;
|
||||
for arg in std::env::args() {
|
||||
match arg.as_str() {
|
||||
@ -24,39 +12,23 @@ fn main() -> Result<(), Error> {
|
||||
}
|
||||
}
|
||||
|
||||
// Decide if repl mode is enabled
|
||||
let mut buf = String::new();
|
||||
|
||||
if repl {
|
||||
// print!("> ");
|
||||
// let _ = std::io::stdout().flush();
|
||||
while let Ok(len) = std::io::stdin().read_line(&mut buf) {
|
||||
match len {
|
||||
0 => break,
|
||||
1 => continue,
|
||||
0 => break, // No newline (reached EOF)
|
||||
1 => continue, // Line is empty
|
||||
_ => (),
|
||||
}
|
||||
if len < 1 {
|
||||
break;
|
||||
}
|
||||
// print!("\nLexer: ");
|
||||
// tokenizer_dump(&mut Tokenizer::new(&buf));
|
||||
//print!("Parser: ");
|
||||
match Parser::default().parse(&buf) {
|
||||
Ok(line) => println!("{line:x}"),
|
||||
//Ok(tree) => println!("=> {tree}\n => {tree:x}"),
|
||||
Err(error) => println!("{error}"),
|
||||
}
|
||||
buf.clear();
|
||||
// print!("> ");
|
||||
// let _ = std::io::stdout().flush();
|
||||
buf.clear(); // Reuse buf's allocation
|
||||
}
|
||||
} else {
|
||||
std::io::stdin().lock().read_to_string(&mut buf).map_err(|_| Error::EndOfFile)?;
|
||||
let mut tk = Tokenizer::new(&buf);
|
||||
|
||||
// println!("Lexer: ");
|
||||
// tokenizer_dump(&mut Tokenizer::new(&buf));
|
||||
let tree = Parser::default().parse_with(&mut tk);
|
||||
match &tree {
|
||||
Ok(tree) => println!("{tree:x}"),
|
||||
@ -66,39 +38,3 @@ fn main() -> Result<(), Error> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn tokenizer_dump<'text, T: TokenStream<'text>>(t: &mut T) {
|
||||
for token in t {
|
||||
match token.variant() {
|
||||
//Token::Space => (),
|
||||
Type::Endl => {
|
||||
println!();
|
||||
continue;
|
||||
}
|
||||
Type::Comment => (),
|
||||
Type::Label => (),
|
||||
Type::Insn => (),
|
||||
Type::ByteWidth => (),
|
||||
Type::WordWidth => (),
|
||||
Type::Register => (),
|
||||
Type::RadixMarkerHex => (),
|
||||
Type::RadixMarkerOct => (),
|
||||
Type::RadixMarkerBin => (),
|
||||
Type::Number => (),
|
||||
Type::Minus => (),
|
||||
Type::LParen => (),
|
||||
Type::RParen => (),
|
||||
Type::Indirect => (),
|
||||
Type::Plus => (),
|
||||
Type::Absolute => (),
|
||||
Type::Immediate => (),
|
||||
Type::Identifier => (),
|
||||
Type::Directive => (),
|
||||
Type::Separator => (),
|
||||
Type::EndOfFile => (),
|
||||
_ => continue,
|
||||
};
|
||||
print!("{token:?} ");
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user