More lexer work (almost working)

This commit is contained in:
TerraMaster85 2024-01-16 09:21:51 -05:00
parent 227cddf0d4
commit d13c9befb2
4 changed files with 66 additions and 15 deletions

1
.gitignore vendored
View File

@ -1 +1,2 @@
kabel-rs/target kabel-rs/target
*/target

View File

@ -0,0 +1 @@

View File

@ -1,5 +1,5 @@
use crate::lexutil; use crate::lexutil;
use lexutil::{Bracket, Literal, Statement, Token}; use lexutil::{Bracket, Literal, Statement, ArithOperator, Token};
use std::error::Error; use std::error::Error;
pub fn lexer(text_source: &str) -> Result<Vec<Token>, Box<dyn Error>> { pub fn lexer(text_source: &str) -> Result<Vec<Token>, Box<dyn Error>> {
@ -11,20 +11,30 @@ pub fn lexer(text_source: &str) -> Result<Vec<Token>, Box<dyn Error>> {
quoting: false, quoting: false,
commenting: false, commenting: false,
numbering: false, numbering: false,
escape_next: false,
}; };
for (i, c) in text_source.chars().enumerate() { for (i, c) in text_source.chars().enumerate() {
dbg!("{} into {}", &c, &state); dbg!("{} into {}", &c, &state);
// Commenting end
if state.commenting && c == '\n' { if state.commenting && c == '\n' {
state.commenting = false; state.commenting = false;
continue;
} }
// TODO: descriptive error // Commenting continue
if c == '\"' && state.current_token.clone().pop().ok_or(panic!()) != Ok('\\') { if state.commenting {
if c == '\n' {
state.commenting = false;
}
continue;
}
// Stringing begin/end
if c == '\"' && !state.escape_next {
if state.quoting { if state.quoting {
let mut tok_cpy = state.current_token.clone(); let tok_cpy = state.current_token.clone();
tok_cpy.pop();
state.lexed.push(Token::Literal(Literal::Str(tok_cpy))); state.lexed.push(Token::Literal(Literal::Str(tok_cpy)));
state.current_token = String::new(); state.current_token = String::new();
state.quoting = false; state.quoting = false;
@ -33,14 +43,19 @@ pub fn lexer(text_source: &str) -> Result<Vec<Token>, Box<dyn Error>> {
state.quoting = true; state.quoting = true;
} }
continue; continue;
} } else if state.escape_next {
state.current_token.push(c);
if state.commenting { state.escape_next = false;
continue; continue;
} }
// Stringing continue
if state.quoting { if state.quoting {
if c == '\\' {
state.escape_next = true;
}
state.current_token.push(c); state.current_token.push(c);
continue;
} }
if c.is_ascii_digit() { if c.is_ascii_digit() {
@ -48,13 +63,13 @@ pub fn lexer(text_source: &str) -> Result<Vec<Token>, Box<dyn Error>> {
state.numbering = true; state.numbering = true;
} }
} else if state.numbering && !c.is_ascii_digit() { } else if state.numbering && !c.is_ascii_digit() {
state.lexed.push(Token::Literal(Literal::Num(state.current_token.parse::<f64>().unwrap()))); state.lexed.push(Token::Literal(Literal::Num(
state.current_token.parse::<f64>().unwrap(),
)));
state.current_token = String::new(); state.current_token = String::new();
state.numbering = false; state.numbering = false;
} }
state.current_token.push(c);
// Known meaningful tokens // Known meaningful tokens
match state.current_token.as_str() { match state.current_token.as_str() {
"\n" => { "\n" => {
@ -62,6 +77,7 @@ pub fn lexer(text_source: &str) -> Result<Vec<Token>, Box<dyn Error>> {
} }
"#" => { "#" => {
state.commenting = true; state.commenting = true;
state.current_token = String::new();
continue; continue;
} }
";" => { ";" => {
@ -98,9 +114,41 @@ pub fn lexer(text_source: &str) -> Result<Vec<Token>, Box<dyn Error>> {
state.current_token = String::new(); state.current_token = String::new();
continue; continue;
} }
"*" => {
state.lexed.push(Token::ArithOperator(ArithOperator::Multiply));
state.current_token = String::new();
continue;
}
"/" => {
state.lexed.push(Token::ArithOperator(ArithOperator::Divide));
state.current_token = String::new();
continue;
}
"+" => {
state.lexed.push(Token::ArithOperator(ArithOperator::Add));
state.current_token = String::new();
continue;
}
"-" => {
state.lexed.push(Token::ArithOperator(ArithOperator::Subtract));
state.current_token = String::new();
continue;
}
"^" => {
state.lexed.push(Token::ArithOperator(ArithOperator::Exponentiate));
state.current_token = String::new();
continue;
}
"%" => {
state.lexed.push(Token::ArithOperator(ArithOperator::Reduce));
state.current_token = String::new();
continue;
}
&_ => {}
&_ => { state.current_token.push(c); }
} }
} }
Ok(state.lexed) Ok(state.lexed)

View File

@ -3,13 +3,13 @@ use crate::variables;
// parts of Token // parts of Token
#[derive(Debug)] #[derive(Debug)]
pub enum ArithmeticOperator { pub enum ArithOperator {
Add, Add,
Subtract, Subtract,
Multiply, Multiply,
Divide, Divide,
Exponentiate, Exponentiate,
Modulus, Reduce,
} }
#[derive(Debug)] #[derive(Debug)]
@ -42,7 +42,7 @@ pub enum Bracket {
#[derive(Debug)] #[derive(Debug)]
pub enum Token { pub enum Token {
Literal(Literal), Literal(Literal),
ArithmeticOperator(ArithmeticOperator), ArithOperator(ArithOperator),
Statement(Statement), Statement(Statement),
Bracket(Bracket), Bracket(Bracket),
Variable(variables::Variable), Variable(variables::Variable),
@ -55,4 +55,5 @@ pub struct LexerMachine {
pub quoting: bool, pub quoting: bool,
pub commenting: bool, pub commenting: bool,
pub numbering: bool, pub numbering: bool,
pub escape_next: bool,
} }