Compare commits

...
Sign in to create a new pull request.

40 commits

Author SHA1 Message Date
2eed8fc323 Support conversions between radians and degrees 2025-09-12 15:01:14 +12:00
a691732a81 Update error message when no ip address is found from NATS_ADDR="some_host:port" 2025-09-09 16:18:35 +12:00
c4cf11a5ef add todos 2025-09-07 13:13:46 +12:00
a8c86790fc add calculator to nojs (nojs seems to always 504 when I use it, see if I can fix that) 2025-09-07 12:25:01 +12:00
f955d63937 redisable bing & google services 2025-09-06 16:10:18 +12:00
5b1e215b77 updated cargo files 2025-09-06 16:07:57 +12:00
b101b5af51 Fix them rounding errors! 2025-09-06 16:07:09 +12:00
86c6c96909 Checkpoint 2 (seems to compile now) 2025-09-06 15:36:22 +12:00
cd493f8d32 Checkpoint 2025-09-06 15:08:40 +12:00
74b29455e5 Constants work and *finally* fix the off-by-one in lexing 2025-09-05 23:58:06 +12:00
47477732ba finish up lexing & do some parsing improvements (functions now require parens for multiple args :(, I'll see if I can change that later) 2025-09-05 23:32:21 +12:00
cbf35e9746 improve number lexing (temporarily breaking function lexing) 2025-09-05 22:26:06 +12:00
adf97d4b20 implement logic for logN 2025-09-05 21:56:46 +12:00
328ec0bfdc functions are no longer infix, and more 2025-09-05 21:50:22 +12:00
199bce4f90 simplify 2025-09-05 21:14:04 +12:00
9c9172d4c9 fix exponent precedence 2025-09-05 21:09:31 +12:00
4f6063bf72 WIP ui 2025-09-05 20:58:21 +12:00
b7b466b774 *Mostly* works, apart from 4 sqrt 4 being 1.4142135623730951 for some reason (messed up unary operators or something) 2025-09-05 20:40:49 +12:00
25644b9d4e Add get_val 2025-09-05 19:39:48 +12:00
849393fbd1 Parsing seems to work now! 2025-09-05 19:35:18 +12:00
c2f210c32c WIP pratt parsing 2025-09-05 16:45:57 +12:00
7a57c7bf1e Merge branch 'work' into calculator 2025-09-05 16:17:43 +12:00
fe539a637e Fix build 2025-09-05 16:11:11 +12:00
f11aa99038 Add protocol to proxy file format (may remove), misc changes 2025-09-05 15:44:29 +12:00
c39a425d48 Lock dockerfile cargo cache 2025-09-05 14:20:42 +12:00
430e0fce85 reenable bing & google services in dockerfile, fix bingservice crash when proxies file is empty 2025-09-04 21:35:45 +12:00
7fbe876c54 Finally, build caching 2025-09-04 20:49:25 +12:00
3ed54bf7e8 Fix docker compose & the authservice admin_count function 2025-09-04 19:15:57 +12:00
1370bc8a43 Docker compose setup 2025-09-04 15:53:53 +12:00
0725850ad0 WIP 2025-09-04 08:54:21 +12:00
de1d9931b1 initial calculator stuff 2025-06-22 12:10:40 +12:00
24067eca99 I was just writing the PR and realised that aliases is a better name than keys 2025-05-08 20:18:00 +12:00
3696d4cb6d spaces to follow at least a little code style :( 2025-05-08 20:12:03 +12:00
f06b84bf66 a little cleanup 2025-05-08 20:11:15 +12:00
43aae463e8 use a more sensible name 2025-05-08 19:57:38 +12:00
87458f30b6 bangs: fix redict including extra space sometimes 2025-05-08 19:40:35 +12:00
64a771f8cc make it work fully 2025-05-08 19:21:13 +12:00
bac21898c9 just make the bangs better 2025-05-08 18:40:38 +12:00
245744a317 properly format bang urls 2025-05-08 15:46:18 +12:00
109e20c7b4 initial mostly working thing 2025-05-08 15:03:00 +12:00
25 changed files with 1005 additions and 13 deletions

7
.gitignore vendored
View file

@ -1,2 +1,7 @@
.idea
/target
/target
# /nginx
database
database.bak
.env
proxies.txt

View file

@ -1,4 +1,4 @@
http://127.0.0.1:8001 {
http://127.0.0.1:1235 {
route /static/* {
uri strip_prefix /static
file_server {
@ -6,10 +6,10 @@ http://127.0.0.1:8001 {
}
}
reverse_proxy 127.0.0.1:5843
reverse_proxy http://auth-frontend:5843
}
http://127.0.0.1:8002 {
http://127.0.0.1:1234 {
route /static/* {
uri strip_prefix /static
file_server {
@ -17,5 +17,5 @@ http://127.0.0.1:8002 {
}
}
reverse_proxy 127.0.0.1:5842
reverse_proxy http://frontend:5842
}

1
Cargo.lock generated
View file

@ -265,6 +265,7 @@ dependencies = [
"askama",
"askama_axum",
"asklyphe-common",
"astro-float",
"async-nats",
"axum",
"axum-extra",

19
Dockerfile.auth-frontend Normal file
View file

@ -0,0 +1,19 @@
FROM rust:1.89.0 AS builder
WORKDIR /usr/src/asklyphe/
COPY asklyphe-auth-frontend asklyphe-auth-frontend
COPY asklyphe-common asklyphe-common
COPY lyphedb lyphedb
RUN --mount=type=cache,target=$CARGO_HOME/registry,sharing=locked \
--mount=type=cache,target=/usr/src/asklyphe/asklyphe-auth-frontend/target \
--mount=type=cache,target=$CARGO_HOME/git/db,sharing=locked \
cargo install --debug --path asklyphe-auth-frontend/
FROM debian:trixie-slim
RUN apt-get update && apt-get upgrade -y && apt-get install -y libssl3 && rm -rf /var/lib/apt-get/lists/*
COPY --from=builder /usr/local/cargo/bin/asklyphe-auth-frontend /usr/local/bin/
COPY --from=builder /usr/src/asklyphe/asklyphe-auth-frontend/static /data/static
VOLUME /data
CMD ["asklyphe-auth-frontend"]

24
Dockerfile.authservice Normal file
View file

@ -0,0 +1,24 @@
FROM rust:1.89.0 AS builder
WORKDIR /usr/src/asklyphe/
COPY authservice authservice
COPY asklyphe-common asklyphe-common
COPY lyphedb lyphedb
RUN --mount=type=cache,target=$CARGO_HOME/registry,sharing=locked \
--mount=type=cache,target=/usr/src/asklyphe/authservice/target \
--mount=type=cache,target=$CARGO_HOME/git/db,sharing=locked \
cargo install --debug --path authservice/
RUN --mount=type=cache,target=$CARGO_HOME/registry,sharing=locked \
--mount=type=cache,target=/usr/src/asklyphe/authservice/migration/target \
--mount=type=cache,target=$CARGO_HOME/git/db,sharing=locked \
cargo install --debug --path authservice/migration/
FROM debian:trixie-slim
RUN apt-get update && apt-get upgrade -y && apt-get install -y libssl3 && rm -rf /var/lib/apt-get/lists/*
COPY --from=builder /usr/local/cargo/bin/authservice /usr/local/bin/
COPY --from=builder /usr/local/cargo/bin/migration /usr/local/bin/
CMD ["authservice"]

17
Dockerfile.bingservice Normal file
View file

@ -0,0 +1,17 @@
FROM rust:1.89.0 AS builder
WORKDIR /usr/src/asklyphe/
COPY bingservice bingservice
COPY asklyphe-common asklyphe-common
COPY lyphedb lyphedb
RUN --mount=type=cache,target=$CARGO_HOME/registry,sharing=locked \
--mount=type=cache,target=/usr/src/asklyphe/bingservice/target \
--mount=type=cache,target=$CARGO_HOME/git/db,sharing=locked \
cargo install --debug --path bingservice/
FROM debian:trixie-slim
RUN apt-get update && apt-get upgrade -y && apt-get install -y libssl3 && rm -rf /var/lib/apt-get/lists/*
COPY --from=builder /usr/local/cargo/bin/bingservice /usr/local/bin/
CMD ["bingservice"]

20
Dockerfile.frontend Normal file
View file

@ -0,0 +1,20 @@
FROM rust:1.89.0 AS builder
WORKDIR /usr/src/asklyphe/
COPY asklyphe-frontend asklyphe-frontend
COPY asklyphe-common asklyphe-common
COPY lyphedb lyphedb
COPY unit_converter unit_converter
RUN --mount=type=cache,target=$CARGO_HOME/registry,sharing=locked \
--mount=type=cache,target=/usr/src/asklyphe/asklyphe-frontend/target \
--mount=type=cache,target=$CARGO_HOME/git/db,sharing=locked \
cargo install --debug --path asklyphe-frontend/
FROM debian:trixie-slim
RUN apt-get update && apt-get upgrade -y && apt-get install -y libssl3 && rm -rf /var/lib/apt-get/lists/*
COPY --from=builder /usr/local/cargo/bin/asklyphe-frontend /usr/local/bin/
COPY --from=builder /usr/src/asklyphe/asklyphe-frontend/static /data/static
VOLUME /data
CMD ["asklyphe-frontend"]

17
Dockerfile.googleservice Normal file
View file

@ -0,0 +1,17 @@
FROM rust:1.89.0 AS builder
WORKDIR /usr/src/asklyphe/
COPY googleservice googleservice
COPY asklyphe-common asklyphe-common
COPY lyphedb lyphedb
RUN --mount=type=cache,target=$CARGO_HOME/registry,sharing=locked \
--mount=type=cache,target=/usr/src/asklyphe/googleservice/target \
--mount=type=cache,target=$CARGO_HOME/git/db,sharing=locked \
cargo install --debug --path googleservice/
FROM debian:trixie-slim
RUN apt-get update && apt-get upgrade -y && apt-get install -y libssl3 && rm -rf /var/lib/apt-get/lists/*
COPY --from=builder /usr/local/cargo/bin/googleservice /usr/local/bin/
CMD ["googleservice"]

17
Dockerfile.vorebot Normal file
View file

@ -0,0 +1,17 @@
FROM rust:1.89.0 AS builder
WORKDIR /usr/src/asklyphe/
COPY vorebot vorebot
COPY asklyphe-common asklyphe-common
COPY lyphedb lyphedb
RUN --mount=type=cache,target=$CARGO_HOME/registry,sharing=locked \
--mount=type=cache,target=/usr/src/asklyphe/vorebot/target \
--mount=type=cache,target=$CARGO_HOME/git/db,sharing=locked \
cargo install --debug --path vorebot/
FROM debian:trixie-slim
RUN apt-get update && apt-get upgrade -y && apt-get install -y libssl3 && rm -rf /var/lib/apt-get/lists/*
COPY --from=builder /usr/local/cargo/bin/vorebot /usr/local/bin/
CMD ["vorebot"]

View file

@ -17,7 +17,7 @@ mod login;
use std::{env, process};
use std::collections::HashMap;
use std::net::SocketAddr;
use std::net::{SocketAddr, ToSocketAddrs};
use std::ops::Deref;
use std::sync::Arc;
use std::sync::atomic::{AtomicU64, Ordering};
@ -66,7 +66,7 @@ async fn main() {
env_logger::init();
let opts = Opts {
bind_addr: env::var("BIND_ADDR").unwrap_or("0.0.0.0:5843".to_string()).parse().expect("Badly formed BIND_ADDR (Needs to be SocketAddr)"),
nats_addr: env::var("NATS_ADDR").unwrap_or("127.0.0.1:4222".to_string()).parse().expect("Badly formed NATS_ADDR (Needs to be SocketAddr)"),
nats_addr: env::var("NATS_ADDR").unwrap_or("127.0.0.1:4222".to_string()).to_socket_addrs().expect("Badly formed NATS_ADDR (Needs to be SocketAddr)").nth(0).expect("Unable to resolve DNS address of NATS_ADDR"),
nats_cert: env::var("NATS_CERT").expect("NATS_CERT needs to be set"),
nats_key: env::var("NATS_KEY").expect("NATS_KEY needs to be set"),
asklyphe_url: env::var("ASKLYPHE_URL").unwrap_or("https://asklyphe.com".to_string()),

View file

@ -39,4 +39,6 @@ url_encoded_data = "0.6.1"
strum = "0.27.1"
strum_macros = "0.27.1"
astro-float = "0.9.2"
env_logger = "*"

View file

@ -0,0 +1,106 @@
use tracing::{debug, error};
use once_cell::sync::Lazy;
use std::collections::BTreeMap;
use url_encoded_data;
pub static BANG_PREFIX: &str = "!";
#[derive(Debug)]
struct Bang<'a> {
pub url: &'a str,
pub aliases: &'a [&'a str]
}
impl<'a> Bang<'_> {
fn new(url: &'a str, aliases: &'a [&'a str]) -> Bang<'a> {
Bang {url, aliases}
}
}
static BUILTIN_BANGS: Lazy<BTreeMap<&str, Bang>> = Lazy::new(|| {
let mut bangs = BTreeMap::new();
bangs.insert("Google", Bang::new("https://google.com/search?q={}", &["g", "google"] as &[&str]));
bangs.insert("DuckDuckGo", Bang::new("https://duckduckgo.com/?q={}", &["d", "ddg", "duckduckgo"] as &[&str]));
bangs.insert("Wikipedia", Bang::new("https://wikipedia.org/w/index.php?search={}", &["w", "wiki", "wikipedia"] as &[&str]));
bangs
});
#[derive(Debug, Clone)]
struct BangLoc<'b> {
pub url: &'b str,
pub start_idx: usize,
pub len: usize
}
impl<'b> BangLoc<'_> {
fn new(url: &'b str, start_idx: usize, len: usize) -> BangLoc<'b> {
BangLoc {url, start_idx, len}
}
}
pub fn redirect_bang(query: &String) -> Option<String> {
if !query.contains(BANG_PREFIX) {
return None;
}
let bangs = query.match_indices(BANG_PREFIX).filter(|(bang_start_idx, _)| {
if *bang_start_idx == 0 || query.chars().nth(*bang_start_idx - 1).unwrap().is_whitespace() {
true
} else {
false
}
}).map(|(bang_start_idx, _)| {
let rest = query.get(bang_start_idx + 1..query.len()).unwrap();
BUILTIN_BANGS.iter().map(|(_, bang)| {
let alias = bang.aliases.iter()
.filter(|alias| rest.starts_with(**alias))
.filter(
|alias| rest.chars()
.nth(alias.len())
.unwrap_or(' ')
.is_whitespace())
.max_by(|a, b| a.len().cmp(&b.len()))?;
Some(BangLoc::new(bang.url, bang_start_idx, alias.len()))
}).filter(|bang| bang.is_some()).map(|bang| bang.unwrap()).next()
}).filter(|bang| bang.is_some())
.map(|bang| bang.unwrap())
.collect::<Vec<_>>();
let bang = bangs.first()?;
let end_idx = {
let mut end_idx = bang.start_idx + 1 + bang.len;
if end_idx < query.len() {
end_idx += 1;
}
end_idx
};
let start_idx = if end_idx == query.len() && bang.start_idx > 0 {
bang.start_idx - 1
} else {
bang.start_idx
};
let query_split = query.split_once(query.get(start_idx..end_idx).unwrap()).unwrap();
let query_trimmed = format!("{}{}", query_split.0, query_split.1);
// A hack to get URL escaping without using a proper URL layout, hopefully has no other issues apart from prepending '=' to the string
let query_encoded = url_encoded_data::stringify(&[("", query_trimmed.as_str())]);
let query_encoded = query_encoded.get(1..query_encoded.len()).unwrap().to_owned();
let bang_url_split = bang.url.split_once("{}").unwrap();
let bang_url = format!(
"{}{}{}",
bang_url_split.0,
query_encoded,
bang_url_split.1
);
Some(bang_url)
}

View file

@ -14,11 +14,13 @@
pub mod searchbot;
pub mod wikipedia;
pub mod unit_converter;
pub mod bangs;
pub mod math;
pub mod routes;
use std::{env, process};
use std::collections::HashMap;
use std::net::SocketAddr;
use std::net::{SocketAddr, ToSocketAddrs};
use std::ops::Deref;
use std::sync::Arc;
use std::sync::atomic::{AtomicU64, Ordering};
@ -84,7 +86,7 @@ async fn main() {
env_logger::init();
let opts = Opts {
bind_addr: env::var("BIND_ADDR").unwrap_or("0.0.0.0:5842".to_string()).parse().expect("Badly formed BIND_ADDR (Needs to be SocketAddr)"),
nats_addr: env::var("NATS_ADDR").unwrap_or("127.0.0.1:4222".to_string()).parse().expect("Badly formed NATS_ADDR (Needs to be SocketAddr)"),
nats_addr: env::var("NATS_ADDR").unwrap_or("127.0.0.1:4222".to_string()).to_socket_addrs().expect("Badly formed NATS_ADDR (Needs to be SocketAddr)").nth(0).expect("Unable to resolve DNS address of NATS_ADDR"),
nats_cert: env::var("NATS_CERT").expect("NATS_CERT needs to be set"),
nats_key: env::var("NATS_KEY").expect("NATS_KEY needs to be set"),
auth_url: env::var("AUTH_URL").unwrap_or("https://auth.asklyphe.com".to_string()),

View file

@ -0,0 +1,456 @@
use tracing::{debug, error};
use once_cell::sync::Lazy;
use astro_float::{BigFloat, Sign, RoundingMode, Consts};
use std::str::FromStr;
use std::sync::{Arc, Mutex};
use std::mem;
pub const PRECISION: usize = 2048;
static CONST_CACHE: Lazy<Arc<Mutex<Consts>>> = Lazy::new(|| Arc::new(Mutex::new(Consts::new().expect("Unable to allocate memory for Conts cache"))));
// static PI: Lazy<BigFloat> = Lazy::new(|| BigFloat::from_str("3.141592653589793238462643383279").unwrap());
// static E: Lazy<BigFloat> = Lazy::new(|| BigFloat::from_str("2.718281828459045235360287471352").unwrap());
#[derive(Debug)]
pub struct Calculation {
pub equation: String,
pub result: String,
}
// TODO: handle partial match in query where <words> (valid equation) <words> gets parsed
// TODO: have some option to switch between degrees and radians in the settings/search query params (or maybe have a function that converts between them instead?)
pub fn calculate(query: &str) -> Option<Calculation> {
debug!("Got query {}", query);
let mut parser = Parser::new(Lexer::new(query));
let mut tree = parser.parse()?;
let res = tree.eval();
let res_float = f64::from_str(&format!("{}", res)).unwrap();
debug!("Calculation: {}", query);
debug!("Tree: {:?}", tree);
debug!("Result: {:?}", res_float);
Some(Calculation {equation: query.to_string(), result: res_float.to_string()})
}
// TODO: put into own crate with dependency astro-float = "0.9.2" so I can use more than BigFloat
#[derive(Debug, Clone, PartialEq)]
enum Token {
Op(Op),
Atom(Atom),
/* Number(BigFloat),
Func(Func),*/
}
#[derive(Debug, Copy, Clone, PartialEq)]
enum Op {
Add,
Subtract,
Multiply,
Divide,
Exponent,
LParen,
RParen,
Func(Func), // A function is an Op that takes whatever the next thing is and binds it, either the next number or whatever is in parens
}
impl Op {
fn bp_infix(&self) -> Option<(f64, f64)> {
match self {
// Op::LParen => Some(0.0),
// Op::RParen => Some(0.0),
Op::Add => Some((1.0, 1.1)),
Op::Subtract => Some((1.0, 1.1)),
Op::Multiply => Some((2.0, 2.1)),
Op::Divide => Some((2.0, 2.1)),
Op::Exponent => Some((3.1, 3.0)),
_ => None,
// Op::Func(_) => 0.0, // TODO: decide if this is a good LBP
}
}
fn bp_prefix(&self) -> Option<f64> {
match self {
Op::Func(_) => Some(6.0),
Op::Subtract => Some(5.0),
Op::Add => Some(5.0),
_ => None,
}
}
fn apply_to(&self, args: &mut Vec<Expr>) -> BigFloat {
match args.len() {
1 => match self {
Op::Subtract => {
let mut res = args[0].eval();
res.set_sign(Sign::Neg);
res
},
Op::Add => {
let mut res = args[0].eval();
res.set_sign(Sign::Pos);
res
}
Op::Func(f) => match f {
Func::Sine => args[0].eval().sin(PRECISION, RoundingMode::None, &mut CONST_CACHE.lock().unwrap()),
Func::Cosine => args[0].eval().cos(PRECISION, RoundingMode::None, &mut CONST_CACHE.lock().unwrap()),
Func::Tangent => args[0].eval().tan(PRECISION, RoundingMode::None, &mut CONST_CACHE.lock().unwrap()),
Func::ArcSine => args[0].eval().asin(PRECISION, RoundingMode::None, &mut CONST_CACHE.lock().unwrap()),
Func::ArcCosine => args[0].eval().acos(PRECISION, RoundingMode::None, &mut CONST_CACHE.lock().unwrap()),
Func::ArcTangent => args[0].eval().atan(PRECISION, RoundingMode::None, &mut CONST_CACHE.lock().unwrap()),
Func::Log2 => args[0].eval().log2(PRECISION, RoundingMode::None, &mut CONST_CACHE.lock().unwrap()),
Func::Log10 => args[0].eval().log10(PRECISION, RoundingMode::None, &mut CONST_CACHE.lock().unwrap()),
Func::LogN => args[0].eval().ln(PRECISION, RoundingMode::None, &mut CONST_CACHE.lock().unwrap()),
Func::Square => args[0].eval().pow(&BigFloat::from_f64(2.0, PRECISION), PRECISION, RoundingMode::None, &mut CONST_CACHE.lock().unwrap()),
Func::SquareRoot => args[0].eval().sqrt(PRECISION, RoundingMode::None),
Func::Abs => args[0].eval().abs(),
Func::Deg => args[0].eval().mul(&Const::Pi.get_val().div(&BigFloat::from_f64(180.0, PRECISION), PRECISION, RoundingMode::None), PRECISION, RoundingMode::None),
Func::Rad => args[0].eval().mul(&BigFloat::from_f64(180.0, PRECISION).div(&Const::Pi.get_val(), PRECISION, RoundingMode::None), PRECISION, RoundingMode::None),
_ => {
error!("Got 1 params for func {:?} which expects 2 (should not be possible)", self);
astro_float::NAN
},
},
_ => {
error!("Got 1 params for {:?} which expects 2 (should not be possible)", self);
astro_float::NAN
},
}
2 => match self {
Op::LParen => args[0].eval(),
Op::RParen => args[0].eval(),
Op::Add => args[0].eval().add(&mut args[1].eval(), PRECISION, RoundingMode::None),
Op::Subtract => args[0].eval().sub(&mut args[1].eval(), PRECISION, RoundingMode::None),
Op::Multiply => args[0].eval().mul(&mut args[1].eval(), PRECISION, RoundingMode::None),
Op::Divide => args[0].eval().div(&mut args[1].eval(), PRECISION, RoundingMode::None),
Op::Exponent => args[0].eval().pow(&mut args[1].eval(), PRECISION, RoundingMode::None, &mut CONST_CACHE.lock().unwrap()),
Op::Func(Func::Log) => args[0].eval().log(&mut args[1].eval(), PRECISION, RoundingMode::None, &mut CONST_CACHE.lock().unwrap()),
_ => {
error!("Got 2 params for {:?} which only expects 1 (should not be possible)", self);
astro_float::NAN
},
}
_ => {
error!("Unexpected number of params ({}) for {:?} (should not be possible)", args.len(), self);
astro_float::NAN
},
}
}
}
#[derive(Debug, Clone, PartialEq)]
enum Atom {
Number(BigFloat),
Const(Const),
}
/*impl Atom {
fn get_val(&self) -> BigFloat {
match self {
Atom::Number(val) => *val,
Atom::Const(c) => match c {
Const::Pi => CONST_CACHE.lock().unwrap().pi(PRECISION, RoundingMode::None),
Const::E => CONST_CACHE.lock().unwrap().e(PRECISION, RoundingMode::None),
Const::Inf => astro_float::INF_POS,
Const::Nan => astro_float::NAN,
}
}
}
}*/
impl Const {
fn get_val(&self) -> BigFloat {
match self {
Const::Pi => CONST_CACHE.lock().unwrap().pi(PRECISION, RoundingMode::None),
Const::E => CONST_CACHE.lock().unwrap().e(PRECISION, RoundingMode::None),
Const::Inf => astro_float::INF_POS,
Const::Nan => astro_float::NAN,
}
}
}
#[derive(Debug, Copy, Clone, PartialEq)]
enum Func {
Sine,
Cosine,
Tangent,
// sin-1, cos-1, tan-1
ArcSine,
ArcCosine,
ArcTangent,
Log2,
Log10,
LogN,
Log,
Square,
SquareRoot,
Abs,
Deg,
Rad,
}
impl Func {
fn names() -> &'static [(Func, &'static [&'static str])] {
&[
(Func::Sine, &["sin", "sine"]),
(Func::Cosine, &["cos", "cosine"]),
(Func::Tangent, &["tan", "tangent"]),
(Func::ArcSine, &["asin", "asine", "arcsin", "arcsine"]),
(Func::ArcCosine, &["acos", "acosine", "arccos", "arccosine"]),
(Func::ArcTangent, &["atan", "atangent", "arctan", "arctangent"]),
(Func::Log2, &["log2"]),
(Func::Log10, &["log10"]),
(Func::LogN, &["ln", "logn"]),
(Func::Log, &["log"]),
(Func::Square, &["square", "squared"]),
(Func::SquareRoot, &["sqrt", "squareroot", ""]),
(Func::Abs, &["abs", "absolute"]),
(Func::Deg, &["deg", "degrees", "deg2rad"]),
(Func::Rad, &["rad", "radians", "rad2deg"]),
]
}
}
#[derive(Debug, Copy, Clone, PartialEq)]
enum Const {
Pi,
E,
Inf,
Nan,
}
impl Const {
fn names() -> &'static [(Const, &'static [&'static str])] {
&[
(Const::Pi, &["pi", "PI", "π"]),
(Const::E, &["e", "euler"]),
(Const::Inf, &["inf", "infinity", ""]),
(Const::Nan, &["nan", "NaN"])
]
}
}
#[derive(Debug, Copy, Clone, PartialEq)]
enum ParseErr {
Eof,
Invalid,
}
// this can probably be swapped out with a lexer generator like Logos if needed
struct Lexer<'a> {
data: &'a str,
data_ptr: &'a str,
// idx: usize,
next_by: usize,
next_tok: Result<Token, ParseErr>,
}
// TODO: refactor with iterator that returns Option(Token) where one token option is Eof (or a enum of Token(Token) and Eof, or just Option(Option(Token)))
impl Lexer<'_> {
fn new(data: &str) -> Lexer<'_> {
let mut n: Lexer = Lexer {data, data_ptr: data, next_by: 0, next_tok: Err(ParseErr::Eof)};
n.next();
debug!("New finished!");
n
}
fn _next(&mut self) -> Result<Token, ParseErr> {
self.data_ptr = &self.data_ptr[self.next_by..];
match self.data_ptr.chars().nth(0) {
Some(val) => {
debug!("lexing char '{}' at idx {}", val, self.data.chars().count() - self.data_ptr.chars().count());
self.next_by = 1;
match val {
'+' => Ok(Token::Op(Op::Add)),
'-' => Ok(Token::Op(Op::Subtract)),
'×' | '*' => Ok(Token::Op(Op::Multiply)),
'÷' | '/' => Ok(Token::Op(Op::Divide)),
'^' => Ok(Token::Op(Op::Exponent)),
'(' => Ok(Token::Op(Op::LParen)),
')' => Ok(Token::Op(Op::RParen)),
_ if val.is_whitespace() => self._next(),
_ if val.is_digit(10) => {
let mut len: usize = 0;
self.data_ptr.chars().take_while(|c| c.is_digit(10) || *c == '.').for_each(|_| len += 1);
self.next_by = len;
match self.data_ptr[..len].parse() {
Ok(val) => Ok(Token::Atom(Atom::Number(val))),
Err(e) => Err(ParseErr::Invalid),
}
},
_ => {
let len = self.data_ptr.chars().count();
for (f, names) in Func::names() {
for name in *names {
let n_len = name.chars().count();
if self.data_ptr.starts_with(name) && (len == n_len || !self.data_ptr.chars().nth(n_len).unwrap().is_alphanumeric()) {
self.next_by = n_len;
return Ok(Token::Op(Op::Func(*f)));
}
}
}
for (f, names) in Const::names() {
for name in *names {
let n_len = name.chars().count();
if self.data_ptr.starts_with(name) && (len == n_len || !self.data_ptr.chars().nth(n_len).unwrap().is_alphanumeric()) {
self.next_by = n_len;
return Ok(Token::Atom(Atom::Const(*f)));
}
}
}
debug!("got invalid char '{}'", val);
Err(ParseErr::Invalid)
}
}
}
None => {
self.next_by = 0;
Err(ParseErr::Eof)
},
}
}
fn next(&mut self) -> Result<Token, ParseErr> {
let res = self._next();
let val = mem::replace(&mut self.next_tok, res);
// self.next_tok = self._next();
val
}
fn peek(&mut self) -> &Result<Token, ParseErr> {
&self.next_tok
}
// TODO: replace with iterator so I can do parser.parse(lexer.iter()) and parse does lex_iter.next() & such
fn lex_all(&mut self) -> Option<Vec<Token>> {
let mut tokens: Vec<Token> = vec![];
loop {
match self.next() {
Err(ParseErr::Eof) => return Some(tokens),
Err(ParseErr::Invalid) => return None,
Ok(tok) => tokens.push(tok),
}
// debug!("tokens: {:?}", tokens);
}
}
}
fn matches(s: &str, check: &str) -> usize {
// debug!("s: \"{}\", check: \"{}\"c_len: {}, s_len: {}, s[c_len]: {:?}, s[c_len + 1]: {:?}", s, check, check.chars().count(), s.chars().count(), s.chars().nth(check.chars().count()), s.chars().nth(check.chars().count() + 1));
match (s.chars().count(), check.chars().count()) {
(s_len, c_len) if s_len < c_len => 0,
(s_len, c_len) if s_len == c_len && s == check => c_len - 1,
(s_len, c_len) if s_len > c_len && s.starts_with(check) && s.chars().nth(c_len).unwrap().is_whitespace() => c_len,
(_, _) => 0,
}
}
struct Parser<'a> {
lex: Lexer<'a>,
}
impl Parser<'_> {
fn new(lex: Lexer) -> Parser { Parser {lex} }
fn parse(&mut self) -> Option<Expr> {
self.parse_expr(0.0).ok()
}
fn parse_expr(&mut self, min_bp: f64) -> Result<Expr, ParseErr> {
/*while let Ok(val) = self.lex.next() {debug!("token: {:?}", val)}
match self.lex.next().err() {
_ => return Err(ParseErr::Invalid),
}*/
let mut lhs: Expr = match self.lex.next() {
Ok(val) => match val {
Token::Atom(val) => Ok(Expr::Atom(val)),
Token::Op(op) => match op {
Op::LParen => {
let val = self.parse_expr(0.0);
if self.lex.next() != Ok(Token::Op(Op::RParen)) {
debug!("Unclosed parens");
Err(ParseErr::Invalid)
}
else {
val
}
},
// Op::Func(f) => Ok(Expr::Node(Op::Func(f), vec![self.parse_expr(op.get_lbp())?])),
_ => match op.bp_prefix() {
Some(bp) => Ok(Expr::Node(op, vec![self.parse_expr(bp)?])),
None => {debug!("Got unexpected {:?} as prefix", op); Err(ParseErr::Invalid)}
}
},
},
Err(err) => Err(err),
}.map_err(|err| { debug!("Unexpected error at start of expr: {:?}", err); err })?;
debug!("lhs of expression is {:?}, min_bp is {}", lhs, min_bp);
loop {
debug!("loop start");
let op: Op = match self.lex.peek() {
Err(ParseErr::Eof) => break,
Err(e) => { debug!("In expr got err {:?}", e); Err(*e) },
Ok(tok) => match tok {
Token::Op(op) => match op {
Op::RParen => {
debug!("got RParen");
break;
},
_ => Ok(*op),
}
v => { debug!("Got unexpected token {:?}", v); Err(ParseErr::Invalid) },
}
}.map_err(|err| { debug!("Unexpected error inside expr at {:?}", err); err })?;
debug!("op is {:?}", op);
if let Some((lbp, rbp)) = op.bp_infix() {
if (lbp < min_bp) { break; }
self.lex.next();
let rhs: Expr = self.parse_expr(rbp)?;
lhs = Expr::Node(op, vec![lhs, rhs]);
} else {
debug!("Got unexpected non-infix operator in expression: {:?}", op);
return Err(ParseErr::Invalid);
}
}
debug!("Returning expr {:?}", lhs);
Ok(lhs)
}
}
#[derive(Debug)]
enum Expr {
Evaluated,
Atom(Atom),
Node(Op, Vec<Expr>),
}
impl Expr {
fn eval(&mut self) -> BigFloat {
let res = match self {
Expr::Atom(_) => {
let v = mem::replace(self, Expr::Evaluated);
if let Expr::Atom(at) = v {
match at {
Atom::Number(n) => n,
Atom::Const(c) => c.get_val(),
}
} else {
unreachable!();
}
// at.get_val()
}
Expr::Node(op, exprs) => {
*self = Expr::Atom(Atom::Number(op.apply_to(exprs)));
self.eval()
}
Expr::Evaluated => unreachable!("Tried to evaluate an already evaluated node"),
};
// debug!("{:?} evaluated to {}", self, res);
res
}
}

View file

@ -18,6 +18,9 @@ use crate::unit_converter;
use crate::unit_converter::UnitConversion;
use crate::wikipedia::WikipediaSummary;
use crate::{wikipedia, Opts, ALPHA, BUILT_ON, GIT_COMMIT, VERSION, YEAR};
use crate::bangs;
use crate::math;
use crate::math::Calculation;
use askama::Template;
use asklyphe_common::nats;
use asklyphe_common::nats::bingservice::{
@ -68,6 +71,7 @@ pub struct Complications {
disabled: bool,
wikipedia: Option<WikipediaSummary>,
unit_converter: Option<UnitConversion>,
math: Option<Calculation>,
}
pub async fn search(
@ -170,6 +174,16 @@ pub async fn search_js(
unit_query = unit_query.replace("metre", "meter");
let unit_comp = unit_converter::convert_unit(&unit_query);
complications.unit_converter = unit_comp;
let bang_redirect = bangs::redirect_bang(&query);
if let Some(redirect) = bang_redirect {
return Redirect::to(&redirect).into_response();
}
let mut calc_query = query.clone().to_lowercase();
calc_query = calc_query.replace("calculate", "").replace("what is", "");
let math = math::calculate(&calc_query);
complications.math = math;
} else {
complications.disabled = true;
query = query.replace("-complications", "");
@ -282,6 +296,16 @@ pub async fn search_nojs(
unit_query = unit_query.replace("metre", "meter");
let unit_comp = unit_converter::convert_unit(&unit_query);
complications.unit_converter = unit_comp;
let bang_redirect = bangs::redirect_bang(&query);
if let Some(redirect) = bang_redirect {
return Redirect::to(&redirect).into_response();
}
let mut calc_query = query.clone().to_lowercase();
calc_query = calc_query.replace("calculate", "").replace("what is", "");
let math = math::calculate(&calc_query);
complications.math = math;
} else {
complications.disabled = true;
query = query.replace("-complications", "");

View file

@ -75,6 +75,13 @@
</div>
{% when None %}
{% endmatch %}
{% match complications.math %}
{% when Some with (math) %}
<div class="calculator-complication">
<h1>{{ math.equation }} = {{ math.result }}</h1>
</div>
{% when None %}
{% endmatch %}
</div>
<ol class="search-result-list">
{% for result in search_results %}

View file

@ -67,6 +67,13 @@
</div>
{% when None %}
{% endmatch %}
{% match complications.math %}
{% when Some with (math) %}
<div class="calculator-complication">
<h1>{{ math.equation }} = {{ math.result }}</h1>
</div>
{% when None %}
{% endmatch %}
</div>
</div>
{% endmatch %}

View file

@ -458,7 +458,7 @@ pub async fn user_count(db: &DatabaseConnection) -> Result<usize, FetchUserError
pub async fn admin_count(db: &DatabaseConnection) -> Result<usize, FetchUserError> {
// dont fucking touch this, i don't know why it works but it does, it's actually evil
// note: doesn't work
Ok(user::Entity::find().filter(user::Column::Flags.into_expr().binary(BinOper::LShift, Expr::value(63 - 2)).lt(1 << (63 - 2)))
Ok(user::Entity::find().filter(user::Column::Flags.into_expr().binary(BinOper::BitAnd, UserFlag::Administrator as i64).binary(BinOper::NotEqual, 0))
.count(db).await.map_err(|e| {
error!("DATABASE ERROR WHILE ADMINCOUNT: {e}");
FetchUserError::DatabaseError

View file

@ -34,7 +34,7 @@ pub static PROXIES: Lazy<Vec<Proxy>> = Lazy::new(|| {
let contents = std::fs::read_to_string(proxy_file);
let mut proxies = vec![];
for line in contents.expect("FAILED TO READ FILE").lines() {
for line in contents.expect("FAILED TO READ FILE").lines().filter(|l| l.len() > 0) {
proxies.push(Proxy::from_str(line).expect("INVALID PROXY"));
}

View file

@ -30,6 +30,7 @@ impl FromStr for Proxy {
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parts = s.split(':');
let protocol = parts.next().ok_or(ProxyError::InvalidProxyFormat)?;
let host = parts.next().ok_or(ProxyError::InvalidProxyFormat)?;
let port = parts.next().ok_or(ProxyError::InvalidProxyFormat)?;
let auth = if let Some(user) = parts.next() {
@ -39,8 +40,8 @@ impl FromStr for Proxy {
None
};
Ok(Proxy {
address: format!("{}:{}", host, port),
address: format!("{}://{}:{}", protocol, host, port),
credentials: auth,
})
}
}
}

34
cert.pem Normal file
View file

@ -0,0 +1,34 @@
-----BEGIN CERTIFICATE-----
MIIF7zCCA9egAwIBAgIUb46GxLSqbrjV/nlD+ovwlYcyzOcwDQYJKoZIhvcNAQEL
BQAwgYYxCzAJBgNVBAYTAlhYMRIwEAYDVQQIDAlTdGF0ZU5hbWUxETAPBgNVBAcM
CENpdHlOYW1lMRQwEgYDVQQKDAtDb21wYW55TmFtZTEbMBkGA1UECwwSQ29tcGFu
eVNlY3Rpb25OYW1lMR0wGwYDVQQDDBRDb21tb25OYW1lT3JIb3N0bmFtZTAeFw0y
NTA2MjEwNTA1NTlaFw0zNTA2MTkwNTA1NTlaMIGGMQswCQYDVQQGEwJYWDESMBAG
A1UECAwJU3RhdGVOYW1lMREwDwYDVQQHDAhDaXR5TmFtZTEUMBIGA1UECgwLQ29t
cGFueU5hbWUxGzAZBgNVBAsMEkNvbXBhbnlTZWN0aW9uTmFtZTEdMBsGA1UEAwwU
Q29tbW9uTmFtZU9ySG9zdG5hbWUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
AoICAQC28URbBWcTpvOar679u4CwsAHQ+i+9iPBvjRG/ShdvXkAgWm+t/BvKi9JG
FOAn49IKOpcteY15qvDFRPDKk8YWoiwdMQSKRNEwEow3YlIs6xX94+PNdwsjaqy/
mhJTMh0xrElZJ5+B4mDXQHOzdS6fe0SlNhqEAkFaIuUNX1NAks7yRnkC5LGkSHHj
gD2ZThwyZ+cstvT7WEUN9uMz/FfLuQQLrVZDydE9tsoQo0CIl1l0NLiE0BN5RIwi
i6Gkao74jlxh6tXv7XcOTxZ1aV3F92qMKN1NtWFEqpC2PDdfLG5iAlwamKguD24N
RMDC9CGCiutE4lRhRQWkC89NSxOkG25MGRvK0jut7MiKOia/Xk5uJI2Rid9IWFKv
xnuT5AW9PCbjM0OSkw2G0PzthUAO1jrOyA2R50oh/YGsdslELhlpZSiu/StSx+0U
x0E9dcQHvnlllU5BrYXbDkoSCiejhD4xV35KmhIwtz7pr2cajfeJ5r7Em/hSBVbS
Zqbv5OmGgxTSSDLUTaLJA015vLnLNCV5al/iGzXKl1FOwTIzRLv+og/jK70rwOGX
Red2JnKntqfBEnR51gky9axyyz3dAMEE1rCc+oOv7ycZoEKwPdXiyneOCLT40QT6
No1UrMJCa32a4+YJgbANB8igFwwhdapD5N+qvpCWtiKsdnbPeQIDAQABo1MwUTAd
BgNVHQ4EFgQUq3zHWtMlNawKBIPiOECsuPWTB7IwHwYDVR0jBBgwFoAUq3zHWtMl
NawKBIPiOECsuPWTB7IwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC
AgEAtoERmowTDvnZFV4ETGmj3mPXNLXaqSUDRAB5ol9N4NlRU/vFjI70sYV7uco9
628Wc5SLbA66wSmpnq5t1MeTjjra6o6bj5wOKixo1gmpUq7Ru1JmVWsNIo/Zv4ew
rmCxuapHL0U9R0P1qpJe0tYzSMFikgkFmHyL86HcuW8B/Ps3bGaGlamqmIK3HlP4
Ib+sOtInNZ1Td8eYTIYO5Nk5ko/4rjZrv9+3aZdjde1z/M2VduGGH8TCExD55Rbv
+UL8fGIEkfSNTeiA5SAN3dfqcra703mfOJfjJeoubfk8KowfGb/bVKv8Z8rkEDOj
so+sOgbq1xnSaQov7WRqYZ0yKZA0u8Arc0L8CX/rwgwoBkQafySEI/32Mqt0R4/w
MkmGZLSFTcIFrQVE+wBHTilQ1PfUmAA6kh7ks7SGwlc6KxTAtYZHWklCqib0efaJ
AbODBc97vLrR5qoH0VFSGLnjDVEYHb6TREqsCZR+9EP/JcsTRJ8RTeDVg8RnN2a6
uy01L7A3d1xnXPux45fpwgGTOEig2sD0BTHZW/bl53xQr8gJvwyr78cIVmycT/6N
K0AmYBPQWZLf6rxtommjMgf2DtvhPm6VrbHV7epk8cw8tOVRPD5uLjZzKxgFoZez
ZYNjSUse3ChC7l4FhjmTiI5DWOrS/qYbWYi9rzvG6QZwHss=
-----END CERTIFICATE-----

129
docker-compose.yml Normal file
View file

@ -0,0 +1,129 @@
services:
auth-frontend:
restart: unless-stopped
networks:
- lyphenet
env_file: ".env"
depends_on:
- nats
- authservice
build:
dockerfile: Dockerfile.auth-frontend
volumes:
- auth_frontend_data:/data
image: asklyphe/auth-frontend
authservice:
restart: unless-stopped
networks:
- lyphenet
env_file: ".env"
depends_on:
- nats
build:
dockerfile: Dockerfile.authservice
image: asklyphe/authservice
frontend:
restart: unless-stopped
networks:
- lyphenet
env_file: ".env"
depends_on:
- nats
build:
dockerfile: Dockerfile.frontend
volumes:
- frontend_data:/data
image: asklyphe/frontend
# vorebot:
# restart: unless-stopped
# networks:
# - lyphenet
# - outer
# env_file: ".env"
# depends_on:
# - nats
# build:
# dockerfile: Dockerfile.vorebot
# image: asklyphe/vorebot
# bingservice:
# restart: unless-stopped
# networks:
# - lyphenet
# - outer
# env_file: ".env"
# depends_on:
# - nats
# build:
# dockerfile: Dockerfile.bingservice
# volumes:
# - ./proxies.txt:/data/proxies.txt
# image: asklyphe/bingservice
# googleservice:
# restart: unless-stopped
# networks:
# - lyphenet
# - outer
# env_file: ".env"
# depends_on:
# - nats
# - bingservice
# build:
# dockerfile: Dockerfile.googleservice
# image: asklyphe/googleservice
nats:
restart: unless-stopped
networks:
- lyphenet
depends_on:
- db
env_file: ".env"
image: nats:2.11.8
command: "-js"
db:
image: postgres:17
networks:
- lyphenet
env_file: ".env"
healthcheck:
test: ['CMD', 'pg_isready', '-U', 'postgres']
volumes:
- ./database:/var/lib/postgresql/data
proxy:
image: nginx:latest
networks:
- lyphenet
- outer
depends_on:
- frontend
- auth-frontend
volumes:
- ./nginx:/etc/nginx/conf.d
- frontend_data:/data/frontend
- auth_frontend_data:/data/auth-frontend
ports:
- "1234:80"
- "1235:81"
# caddy:
# image: caddy:latest
# networks:
# - lyphenet
# - outer
# depends_on:
# - frontend
# - auth-frontend
# volumes:
# - ./Caddyfile:/etc/caddy/Caddyfile
# ports:
# - 1234:1234
# - 1235:1235
networks:
outer:
lyphenet:
internal: true
volumes:
frontend_data:
auth_frontend_data:

52
key.pem Normal file
View file

@ -0,0 +1,52 @@
-----BEGIN PRIVATE KEY-----
MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQC28URbBWcTpvOa
r679u4CwsAHQ+i+9iPBvjRG/ShdvXkAgWm+t/BvKi9JGFOAn49IKOpcteY15qvDF
RPDKk8YWoiwdMQSKRNEwEow3YlIs6xX94+PNdwsjaqy/mhJTMh0xrElZJ5+B4mDX
QHOzdS6fe0SlNhqEAkFaIuUNX1NAks7yRnkC5LGkSHHjgD2ZThwyZ+cstvT7WEUN
9uMz/FfLuQQLrVZDydE9tsoQo0CIl1l0NLiE0BN5RIwii6Gkao74jlxh6tXv7XcO
TxZ1aV3F92qMKN1NtWFEqpC2PDdfLG5iAlwamKguD24NRMDC9CGCiutE4lRhRQWk
C89NSxOkG25MGRvK0jut7MiKOia/Xk5uJI2Rid9IWFKvxnuT5AW9PCbjM0OSkw2G
0PzthUAO1jrOyA2R50oh/YGsdslELhlpZSiu/StSx+0Ux0E9dcQHvnlllU5BrYXb
DkoSCiejhD4xV35KmhIwtz7pr2cajfeJ5r7Em/hSBVbSZqbv5OmGgxTSSDLUTaLJ
A015vLnLNCV5al/iGzXKl1FOwTIzRLv+og/jK70rwOGXRed2JnKntqfBEnR51gky
9axyyz3dAMEE1rCc+oOv7ycZoEKwPdXiyneOCLT40QT6No1UrMJCa32a4+YJgbAN
B8igFwwhdapD5N+qvpCWtiKsdnbPeQIDAQABAoICAALHmEMkqAWbK66QB7UhSe48
z7l0ImIQrrb1mB59EjHJOD6EiUJfc/2D4tvqYECsgqW3G30saifxWsLjxcpiuTA2
nhngUGAQD1IZ2pQwpyPkNyQC0b8qaquotZDPhekOmkrJ+Y0AF3D5Mk3+LVPfznz/
F8XOQ0uTwal7ZfNlyAveNqaYcktcV+hxkQSAfnnHTBpITGCab6in0Rxj24fyCCms
n5zleaEgOAyIUlPVSh1ZMeaT5eT4/BBdH8mAyXcuqRtMScmgOPMc1Q6m84xblxPA
JuTHEBwGivPK3Gbvpw7/ftiaSb41gsJnvPr0qjHeQ9jQhLdkk9iKth82oZc18kVg
ipF1TdSHz1EauoczyHM27aN1ZdPibkaic8QdPya0086sn+uXDRPELivV1xSMSHsH
xpEuANeL874X5h9Cpv4vWcJnQrbs81C4cXI46Mrc561uKljDVtvYFXvpdZuJ4GNp
C9zNNLp1ssmME9uLjLYIbmek/shb9XMpn7XhV0poWUZPGijI7qGLe6OoOqXdKoes
KMXkVJ5omfd/GvvmisJQaFcstqPO54MscFm4cBXQ0U+DxGT0QtSNi4YHtNs8EMdw
2AYlLN/DIzIm+YeR+rWNf8TYZbS1CazQj/ee4DTPAprKsumaR8/Cw+ACl6ICpUFA
bHMCd65TcV94F+LU7L5VAoIBAQDv8oPAUMeaFpY524h9k33HlNmvrmA5aF9tbeEp
b0QJpisldK8w8Rx9/7PS3NP5DO2z1GbNW8+iZE+QH03fIa1QeNBWISnfK8VUkUHn
8j6Q52O0bxC2v+a76U0MdP4JqfrihuHXvjH++9FN/KVwpAJlj7Fq8HugymEpi8+Y
Xv4VnzSm/sdbThvbSHzSGo8Y38dbN7pM6tOen36mxcAnOlH6GnTFEWYmo/f5jj8b
I/+rI8pmeDK6HPZFXw8FonEykX2My8OrN4iGLkFqlFfdgXHtuuPDLImxOCiJN0y7
bizq412/kh7Fbg7Q3oSULd9tmojVi3em4QWvxlxbOwIXjyT1AoIBAQDDLnOsvy2G
ajFvKJ3bSrh3OQdGFyJn0nquZtgzDXsatrItaBYZtUIqnRsfjQZqyvoYNRDqs9QR
xmqB7guPkqLN/mk+mAjmHWPs0oP0Y9S2IXl5/CRIuM3ZcVTVVsPC8lOrXAH3X1jZ
OJiUG/fUUJoonDPnFZLGajefmFsUWjyIr85VOUMhYsq2lA3ZTyM6rQLX3gM//36u
d70K1kXPWoWIsbaztPpqBSJK05EjztVmkUYbPKqHVz+8TD4Xr1baSC1Q0KuHqrr1
451biNN/TSG5GOgdJRZcVXh0imp+xQjB3x2UmwNKk4uRXRWnoa0QlhKm0kbapaGP
QVCUgwlQOA31AoIBAGmvhbx1WBVUkYKWYX3+Ms5vj5pD0fo3MKEAXsZjTbJ6UFLF
HE0QRh5xPAFKZssxmJk2mrklEUVTrX+rah83tCDXtdvZ65lyrA3dlQvWtRwZ7t6Q
dOopiDWIQvmTpjkXd3vDMUJXcan/vGb/OtdsRen56olRtwJRYY5tGFjirkNTxlsv
qRtcQgTJ3sCkFhc8qZBR8Wrjm6YoVh6ax1H/7A+fC4OpcDbgzd5Lexw3NOtqbkHH
+3/iNc7EWdd/fyBo2MXlEiAd67I+OW36POFBnK67PIrA2T0HoUMe6ls74ejrkGVK
tOb83OW+vOKPefPKty5nqaIFRv3u/sroKLm7wOkCggEAFBsR4WakKud/hiLZ9///
dpCSVj8F1UoSRyri9Idb+gl92z2QoT9RvJAIfjyJv7B/CMVWo8a4fshAqne6CyUg
zjV54+/HYuT+KSQaYa9y9vwFxnIZzr/yvIZ3Ja7VZZyOz+UfcrsIrP+uf/tNkTpo
VuyYUCKhxvykFDWelD8jYzUw/Qh0CNljZmFj99G2IFI4K8J79Ti9dP1ypM4jzNNX
VBhyaJqo/QjgWnLmzZh91R376cxbCKwNLblw4AG44a1ztZJ5SPVmYvP6frZeiwuI
AMg3COGMJyDK0r57b+meGFKCeo9pTGJcizHajDUUXdQHwdWBZP6Q4O/qfBHvgKr1
jQKCAQEA4UaBpHUcO9TG5NEMX+IOMurcuYnlf+12ImGix21A2ylOI7RIfA/RCfgK
7UAwT9OqTNhvgJ9DCth7y6TJ6/4UnKJ1duaM7sP6vX3Vq8z0zixYDZJ2bE4COgBJ
tzAOLM22cpIJj98XhCdMOzkVse7vQDpBJPmTh19pqfuFnGLmsIIdLuo64Xa+6bvB
p21edHgxH1pl82rfnvMTMzoicH0mQZQD+l19O6togwhpJY1YbPaqGlCavlQqMKyC
r8fseEBic1de7Y9XaG25TS+lZVg53vOB2BNZM6Z8CjeRf3rdZZd+cCqa7aFsdmER
hfHYKzHGaDbp/aPWH8HQzfs6QxGRog==
-----END PRIVATE KEY-----

27
nginx/default.conf Normal file
View file

@ -0,0 +1,27 @@
server {
listen 81;
server_name 0.0.0.0;
access_log off;
location /static/ {
root /data/auth-frontend;
}
location / {
proxy_pass http://auth-frontend:5843;
}
}
server {
listen 80;
server_name 0.0.0.0;
access_log off;
location /static/ {
root /data/frontend;
}
location / {
proxy_pass http://frontend:5842;
}
}

25
shell.nix Executable file
View file

@ -0,0 +1,25 @@
{ pkgs ? import <nixpkgs> {}, lib ? pkgs.lib }:
pkgs.mkShellNoCC {
packages = with pkgs; [ rustup nats-server caddy postgresql clang pkg-config tmux /*you'll *need* tmux*/ ];
buildInputs = with pkgs; [ openssl clang foundationdb ];
LIBCLANG_PATH = lib.makeLibraryPath [ pkgs.libclang ];
shellHook = ''
rustup install stable
rustup default stable
export RUST_LOG=debug
export NATS_URL="127.0.0.1:4222"
export NATS_CERT=$(cat cert.pem)
export NATS_KEY=$(cat key.pem)
export ASKLYPHE_URL="http://127.0.0.1:8002"
export AUTH_URL="http://127.0.0.1:8001"
export DB_URL="postgres://127.0.0.1:5432/user"
export SMTP_DISABLE=1
export SMTP_USERNAME=""
export SMTP_PASSWORD=""
export SMTP_URL=""
export POSTGRESQL_PASSWORD="user"
# lmao
echo WARNING: RUSTFLAGS="-A dead_code -A unused"
export RUSTFLAGS="-A dead_code -A unused"
'';
}