forked from asklyphe-public/asklyphe
186 lines
4.1 KiB
Rust
186 lines
4.1 KiB
Rust
use tracing::{debug, error};
|
||
use once_cell::sync::Lazy;
|
||
|
||
#[derive(Debug)]
|
||
pub struct Calculation {
|
||
pub equation: String,
|
||
pub result: String,
|
||
}
|
||
|
||
pub fn calculate(query: &str) -> Option<Calculation> {
|
||
debug!("Got query {}", query);
|
||
let mut parser = Parser::new(Lexer::new(query));
|
||
debug!("Parse tree: {:?}", parser.parse());
|
||
// debug!("final token was: {:?}", lexer.next());
|
||
// debug!("Tokens: {:?}", lexer.lex_all());
|
||
None
|
||
}
|
||
|
||
// TODO: put into own crate with dependency astro-float = "0.9.2" so I can use more than f64
|
||
#[derive(Debug)]
|
||
enum Token {
|
||
Op(Op),
|
||
Atom(Atom),
|
||
/* Number(f64),
|
||
Func(Func),*/
|
||
}
|
||
|
||
#[derive(Debug)]
|
||
enum Op {
|
||
BinOp(BinOp),
|
||
Func(Func), // A function is an Op that takes whatever the next thing is and binds it, either the next number or whatever is in parens
|
||
}
|
||
|
||
#[derive(Debug)]
|
||
enum BinOp {
|
||
Add,
|
||
Subtract,
|
||
Multiply,
|
||
Divide,
|
||
Exponent,
|
||
LParen,
|
||
RParen,
|
||
}
|
||
|
||
#[derive(Debug)]
|
||
enum Atom {
|
||
Number(f64), // TODO: use the unlimited precision floats library instead
|
||
Const(Const),
|
||
}
|
||
|
||
#[derive(Debug)]
|
||
enum Func {
|
||
Sine,
|
||
Cosine,
|
||
Tangent,
|
||
// sin-1, cos-1, tan-1
|
||
ArcSine,
|
||
ArcCosine,
|
||
ArcTangent,
|
||
Log2,
|
||
Log10,
|
||
LogN,
|
||
Square,
|
||
SquareRoot,
|
||
}
|
||
|
||
#[derive(Debug)]
|
||
enum Const {
|
||
Pi,
|
||
E,
|
||
}
|
||
|
||
#[derive(Debug)]
|
||
enum LexErr {
|
||
Eof,
|
||
Invalid,
|
||
}
|
||
|
||
// this can probably be swapped out with a lexer generator like Logos if needed
|
||
|
||
struct Lexer<'a> {
|
||
data: &'a str,
|
||
data_ptr: &'a str,
|
||
idx: usize,
|
||
}
|
||
|
||
// TODO: refactor with iterator that returns Option(Token) where one token option is Eof (or a enum of Token(Token) and Eof, or just Option(Option(Token)))
|
||
impl Lexer<'_> {
|
||
|
||
fn new(data: &str) -> Lexer<'_> { Lexer {data, data_ptr: data, idx: 0} }
|
||
|
||
fn next(&mut self) -> Result<Token, LexErr> {
|
||
match self.data.chars().nth(self.idx) {
|
||
Some(val) => {
|
||
debug!("lexing char '{}' at idx {}", val, self.idx);
|
||
// debug!("current char '{}'", self.data.chars().nth(0).unwrap());
|
||
self.idx += 1;
|
||
// TODO: make more efficient
|
||
self.data_ptr = &self.data[self.idx..];
|
||
match val {
|
||
'+' => Ok(Token::Op(Op::BinOp(BinOp::Add))),
|
||
'-' => Ok(Token::Op(Op::BinOp(BinOp::Subtract))),
|
||
'×' | '*' => Ok(Token::Op(Op::BinOp(BinOp::Multiply))),
|
||
'÷' | '/' => Ok(Token::Op(Op::BinOp(BinOp::Divide))),
|
||
'^' => Ok(Token::Op(Op::BinOp(BinOp::Exponent))),
|
||
'(' => Ok(Token::Op(Op::BinOp(BinOp::LParen))),
|
||
')' => Ok(Token::Op(Op::BinOp(BinOp::RParen))),
|
||
_ if val.is_whitespace() => self.next(),
|
||
// TODO: maybe parse '-' as part of number so I can do '1 + -1' and similar
|
||
_ if val.is_digit(10) => {
|
||
let start = self.idx - 1;
|
||
|
||
self.data_ptr.chars().take_while(|c| c.is_digit(10)).for_each(|_| self.idx += 1);//.next().unwrap_or(' ').is_digit(10) {self.idx += 1;}
|
||
|
||
match self.data[start..self.idx].parse() {
|
||
Ok(val) => Ok(Token::Atom(Atom::Number(val))),
|
||
Err(e) => Err(LexErr::Invalid),
|
||
}
|
||
},
|
||
_ => {
|
||
debug!("got invalid char '{}'", val);
|
||
Err(LexErr::Invalid)
|
||
}
|
||
}
|
||
}
|
||
None => Err(LexErr::Eof),
|
||
}
|
||
}
|
||
|
||
// TODO: replace with iterator so I can do parser.parse(lexer.iter()) and parse does lex_iter.next() & such
|
||
fn lex_all(&mut self) -> Option<Vec<Token>> {
|
||
let mut tokens: Vec<Token> = vec![];
|
||
loop {
|
||
match self.next() {
|
||
Err(LexErr::Eof) => return Some(tokens),
|
||
Err(LexErr::Invalid) => return None,
|
||
Ok(tok) => tokens.push(tok),
|
||
}
|
||
// debug!("tokens: {:?}", tokens);
|
||
}
|
||
}
|
||
}
|
||
|
||
struct Parser<'a> {
|
||
lex: Lexer<'a>,
|
||
}
|
||
|
||
#[derive(Debug)]
|
||
enum ParseErr {
|
||
Eof,
|
||
Invalid,
|
||
// TODO, add more or maybe just use an option instead
|
||
}
|
||
|
||
|
||
impl Parser<'_> {
|
||
fn new(lex: Lexer) -> Parser { Parser {lex} }
|
||
|
||
fn parse(&mut self) -> Option<Expr> {
|
||
self.parse_expr(0.0).ok()
|
||
}
|
||
|
||
fn parse_expr(&mut self, min_bp: f64) -> Result<Expr, ParseErr> {
|
||
while let Ok(val) = self.lex.next() {debug!("token: {:?}", val)}
|
||
match self.lex.next().err() {
|
||
|
||
_ => return Err(ParseErr::Invalid),
|
||
}
|
||
}
|
||
}
|
||
|
||
/*#[derive(Debug)]
|
||
enum Op {
|
||
Add(Expr, Expr),
|
||
Sub(Expr, Expr),
|
||
Mul(Expr, Expr),
|
||
Div(Expr, Expr),
|
||
Func(Func, Expr),
|
||
}*/
|
||
|
||
// TODO: look at that parser video again
|
||
#[derive(Debug)]
|
||
enum Expr {
|
||
Atom(Token),
|
||
Node(Op, Vec<Expr>),
|
||
}
|