initial user derank frontend impl
All checks were successful
/ build-all-services (push) Successful in 9m25s
All checks were successful
/ build-all-services (push) Successful in 9m25s
only implemented for searching; no inserting untested
This commit is contained in:
parent
08903aa0a1
commit
667c9193e8
4 changed files with 77 additions and 11 deletions
|
@ -48,7 +48,8 @@ pub struct SearchResult {
|
|||
pub title: Option<String>,
|
||||
pub description: Option<String>,
|
||||
pub url: String,
|
||||
pub percentage: String,
|
||||
pub percentage: f64,
|
||||
pub percentage_str: String,
|
||||
pub value: String,
|
||||
pub asklyphe: bool,
|
||||
pub bing: bool,
|
||||
|
@ -255,7 +256,7 @@ pub async fn search_nojs(
|
|||
}
|
||||
if let Some(token) = jar.get("token") {
|
||||
let token = token.value().to_string();
|
||||
let info = match authenticate_user(nats.clone(), token).await {
|
||||
let info = match authenticate_user(nats.clone(), token.clone()).await {
|
||||
Ok(i) => i,
|
||||
Err(e) => {
|
||||
return (
|
||||
|
@ -300,7 +301,7 @@ pub async fn search_nojs(
|
|||
engines.retain(|v| v != "asklyphe");
|
||||
}
|
||||
|
||||
gather_search_results(nats, query.as_str(), info, complications, Some(engines))
|
||||
gather_search_results(nats, query.as_str(), token, info, complications, Some(engines))
|
||||
.await
|
||||
.into_response()
|
||||
} else {
|
||||
|
@ -351,7 +352,7 @@ pub async fn search_json(
|
|||
}
|
||||
if let Some(token) = jar.get("token") {
|
||||
let token = token.value().to_string();
|
||||
let info = match authenticate_user(nats.clone(), token).await {
|
||||
let info = match authenticate_user(nats.clone(), token.clone()).await {
|
||||
Ok(i) => i,
|
||||
Err(e) => {
|
||||
return error_response("not authenticated");
|
||||
|
@ -374,6 +375,7 @@ pub async fn search_json(
|
|||
let result = gather_search_results(
|
||||
nats,
|
||||
query.as_str(),
|
||||
token,
|
||||
info,
|
||||
Complications::default(),
|
||||
Some(engines),
|
||||
|
|
|
@ -27,6 +27,8 @@ use tokio::sync::Mutex;
|
|||
use tracing::log::error;
|
||||
use tracing::warn;
|
||||
use ulid::Ulid;
|
||||
use asklyphe_common::nats::authservice::{AuthServiceQuery, AuthServiceRequest, AuthServiceResponse};
|
||||
use asklyphe_common::nats::authservice::deranklist::{DerankEntry, UserFetchActiveDeranksRequest, UserFetchActiveDeranksResponse};
|
||||
use crate::routes::search::{Complications, ImageSearchResult, ImageSearchTemplate, SearchResult, SearchTemplate};
|
||||
use crate::routes::UserInfo;
|
||||
use crate::{BUILT_ON, GIT_COMMIT, ALPHA, VERSION, WEBSITE_COUNT, YEAR};
|
||||
|
@ -52,7 +54,38 @@ pub async fn update_website_counter(nats: Arc<jetstream::Context>) {
|
|||
}
|
||||
}
|
||||
|
||||
pub async fn gather_search_results(nats: Arc<jetstream::Context>, query: &str, user_info: UserInfo, complications: Complications, engines: Option<Vec<String>>) -> SearchTemplate {
|
||||
async fn user_active_deranks(nats: Arc<jetstream::Context>, token: String) -> Vec<DerankEntry> {
|
||||
let result = comms::query_service(comms::Query::AuthService(AuthServiceQuery {
|
||||
request: AuthServiceRequest::UserFetchActiveDeranksRequest(UserFetchActiveDeranksRequest {
|
||||
token,
|
||||
}),
|
||||
replyto: "".to_string(),
|
||||
}), nats.deref(), false).await;
|
||||
|
||||
if let Ok(comms::ServiceResponse::AuthService(result)) = result {
|
||||
match result {
|
||||
AuthServiceResponse::UserFetchActiveDeranksResponse(v) => match v {
|
||||
UserFetchActiveDeranksResponse::Success(v) => v,
|
||||
UserFetchActiveDeranksResponse::InternalServerError => {
|
||||
warn!("received error while asking for user active deranks (internal server error)");
|
||||
vec![]
|
||||
}
|
||||
UserFetchActiveDeranksResponse::Logout => {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
warn!("received invalid response while asking for derank list");
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
} else {
|
||||
error!("{:?}", result.err().unwrap());
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn gather_search_results(nats: Arc<jetstream::Context>, query: &str, token: String, user_info: UserInfo, complications: Complications, engines: Option<Vec<String>>) -> SearchTemplate {
|
||||
let mut search_results = vec![];
|
||||
let mut note = None;
|
||||
|
||||
|
@ -119,7 +152,8 @@ pub async fn gather_search_results(nats: Arc<jetstream::Context>, query: &str, u
|
|||
}
|
||||
shortened
|
||||
}),
|
||||
percentage: format!("{:.2}", ((1.0 - (i as f64 / result_count as f64)) * 50.0) + 40.0),
|
||||
percentage: ((1.0 - (i as f64 / result_count as f64)) * 50.0) + 40.0,
|
||||
percentage_str: "".to_string(),
|
||||
value: format!("{}", i),
|
||||
asklyphe: false,
|
||||
bing: true,
|
||||
|
@ -194,7 +228,8 @@ pub async fn gather_search_results(nats: Arc<jetstream::Context>, query: &str, u
|
|||
}
|
||||
shortened
|
||||
}),
|
||||
percentage: format!("{:.2}", ((1.0 - (i as f64 / result_count as f64)) * 50.0) + 40.0),
|
||||
percentage: ((1.0 - (i as f64 / result_count as f64)) * 50.0) + 40.0,
|
||||
percentage_str: "".to_string(),
|
||||
value: format!("{}", i),
|
||||
asklyphe: false,
|
||||
bing: false,
|
||||
|
@ -286,7 +321,8 @@ pub async fn gather_search_results(nats: Arc<jetstream::Context>, query: &str, u
|
|||
shortened
|
||||
}),
|
||||
url: v.url,
|
||||
percentage: format!("{:.2}", (v.relevance / results.max_relevance) * 100.0),
|
||||
percentage: (v.relevance / results.max_relevance) * 100.0,
|
||||
percentage_str: "".to_string(),
|
||||
value: format!("{:.2}", v.relevance),
|
||||
asklyphe: true,
|
||||
bing: false,
|
||||
|
@ -303,10 +339,37 @@ pub async fn gather_search_results(nats: Arc<jetstream::Context>, query: &str, u
|
|||
}
|
||||
}
|
||||
|
||||
// apply deranks
|
||||
let deranks = user_active_deranks(nats.clone(), token).await;
|
||||
for result in &mut search_results {
|
||||
for derank in &deranks {
|
||||
if result.url.contains(&derank.urlmatch) {
|
||||
// continue if contains unless, or doesn't contain and
|
||||
if let Some(unless) = &derank.unless {
|
||||
if result.url.contains(unless) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if let Some(and) = &derank.and {
|
||||
if !result.url.contains(and) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// multiply
|
||||
result.percentage *= derank.multiplier;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
search_results.sort_by(|a, b| {
|
||||
b.percentage.parse::<f64>().unwrap().total_cmp(&a.percentage.parse::<f64>().unwrap())
|
||||
b.percentage.total_cmp(&a.percentage)
|
||||
});
|
||||
|
||||
for result in &mut search_results {
|
||||
result.percentage_str = format!("{:.2}", result.percentage);
|
||||
}
|
||||
|
||||
let mut already_included = BTreeMap::new();
|
||||
|
||||
let mut remove = vec![];
|
||||
|
@ -341,6 +404,7 @@ pub async fn gather_search_results(nats: Arc<jetstream::Context>, query: &str, u
|
|||
search_results.remove(rm - i);
|
||||
}
|
||||
|
||||
|
||||
let theme = user_info.theme.clone();
|
||||
let querystr = url_encoded_data::stringify(&[("q", query)]);
|
||||
SearchTemplate {
|
||||
|
|
|
@ -110,7 +110,7 @@ function process(result) {
|
|||
}
|
||||
const relevance = document.createElement("span");
|
||||
relevance.classList.add("search-relevance");
|
||||
relevance.innerText = `(${result_obj.percentage}%, ${result_obj.value} / ${search_results.max_relevance})`;
|
||||
relevance.innerText = `(${result_obj.percentage_str}%, ${result_obj.value} / ${search_results.max_relevance})`;
|
||||
const enginelist = document.createElement("span");
|
||||
enginelist.classList.add("enginelist");
|
||||
if (result_obj.hasOwnProperty("asklyphe")) {
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
{% match result.title %}
|
||||
{% when Some with (_) %}<span class="search-url">{{ result.url }}</span>{% when None %}
|
||||
{% endmatch %}
|
||||
<span class="search-relevance">({{ result.percentage }}%, {{ result.value }} / {{ max_relevance }})</span>
|
||||
<span class="search-relevance">({{ result.percentage_str }}%, {{ result.value }} / {{ max_relevance }})</span>
|
||||
<span class="enginelist">
|
||||
{% if result.asklyphe %}
|
||||
<img src="/static/img/tinylyphe.png" title="from askLyphe database"/>
|
||||
|
|
Loading…
Add table
Reference in a new issue