asklyphe/asklyphe-frontend/src/math.rs

134 lines
3 KiB
Rust
Raw Normal View History

2025-06-22 12:09:21 +12:00
use tracing::{debug, error};
use once_cell::sync::Lazy;
#[derive(Debug)]
pub struct Calculation {
pub equation: String,
pub result: String,
}
pub fn calculate(query: &str) -> Option<Calculation> {
debug!("Got query {}", query);
let mut parser = Parser::new(Lexer::new(query));
// debug!("final token was: {:?}", lexer.next());
// debug!("Tokens: {:?}", lexer.lex_all());
None
}
// TODO: put into own crate with dependency astro-float = "0.9.2" so I can use more than f64
#[derive(Debug)]
enum Token {
Add,
Subtract,
Multiply,
Divide,
Exponent,
LParen,
RParen,
Number(f64),
Func(Func),
}
#[derive(Debug)]
enum Func {
Sine,
Cosine,
Tangent,
// sin-1, cos-1, tan-1
ArcSine,
ArcCosine,
ArcTangent,
Log2,
Log10,
LogN,
Square,
SquareRoot,
}
#[derive(Debug)]
enum Fault {
Eof,
Invalid,
}
// this can probably be swapped out with a lexer generator like Logos if needed
struct Lexer<'a> {
data: &'a str,
data_ptr: &'a str,
idx: usize,
}
// TODO: refactor with iterator that returns Option(Token) where one token option is Eof (or a enum of Token(Token) and Eof, or just Option(Option(Token)))
impl Lexer<'_> {
fn new(data: &str) -> Lexer { Lexer {data, data_ptr: data, idx: 0} }
fn next(&mut self) -> Result<Token, Fault> {
match self.data.chars().nth(self.idx) {
Some(val) => {
debug!("lexing char '{}' at idx {}", val, self.idx);
// debug!("current char '{}'", self.data.chars().nth(0).unwrap());
self.idx += 1;
// TODO: make more efficient
self.data_ptr = &self.data[self.idx..];
match val {
'+' => Ok(Token::Add),
'-' => Ok(Token::Subtract),
'×' | '*' => Ok(Token::Multiply),
'÷' | '/' => Ok(Token::Divide),
'^' => Ok(Token::Exponent),
'(' => Ok(Token::LParen),
')' => Ok(Token::RParen),
_ if val.is_whitespace() => self.next(),
// TODO: parse - as part of number so I can do '1 + -1' and similar
_ if val.is_digit(10) => {
let start = self.idx - 1;
self.data_ptr.chars().take_while(|c| c.is_digit(10)).for_each(|_| self.idx += 1);//.next().unwrap_or(' ').is_digit(10) {self.idx += 1;}
match self.data[start..self.idx].parse() {
Ok(val) => Ok(Token::Number(val)),
Err(e) => Err(Fault::Invalid),
}
},
_ => {
debug!("got invalid char '{}'", val);
Err(Fault::Invalid)
}
}
}
None => Err(Fault::Eof),
}
}
// TODO: replace with iterator so I can do parser.parse(lexer.iter()) and parse does lex_iter.next() & such
fn lex_all(&mut self) -> Option<Vec<Token>> {
let mut tokens: Vec<Token> = vec![];
loop {
match self.next() {
Err(Fault::Eof) => return Some(tokens),
Err(Fault::Invalid) => return None,
Ok(tok) => tokens.push(tok),
}
// debug!("tokens: {:?}", tokens);
}
}
}
struct Parser<'a> {
lex: Lexer<'a>,
}
impl Parser<'_> {
fn new(lex: Lexer) -> Parser { Parser {lex} }
fn parse(&mut self) -> Option<TokenTree>
}
// TODO: look at that parser video again
enum TokenTree {
Leaf(Token),
Thingy(TokenTree, TokenTree)
}