Spaces:
Running
Running
neon_arch
commited on
Commit
Β·
9a5f1c5
1
Parent(s):
ebee1f4
π fix: reimplement caching code within the `search` function (#592)
Browse files- reduce resource usage &
- only cache search results which has not been cached before.
- src/server/routes/search.rs +20 -28
src/server/routes/search.rs
CHANGED
|
@@ -12,6 +12,7 @@ use crate::{
|
|
| 12 |
results::aggregator::aggregate,
|
| 13 |
};
|
| 14 |
use actix_web::{get, http::header::ContentType, web, HttpRequest, HttpResponse};
|
|
|
|
| 15 |
use regex::Regex;
|
| 16 |
use std::borrow::Cow;
|
| 17 |
use tokio::{
|
|
@@ -40,7 +41,6 @@ pub async fn search(
|
|
| 40 |
config: web::Data<&'static Config>,
|
| 41 |
cache: web::Data<&'static SharedCache>,
|
| 42 |
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
| 43 |
-
use std::sync::Arc;
|
| 44 |
let params = web::Query::<SearchParams>::from_query(req.query_string())?;
|
| 45 |
match ¶ms.q {
|
| 46 |
Some(query) => {
|
|
@@ -83,44 +83,36 @@ pub async fn search(
|
|
| 83 |
let previous_page = page.saturating_sub(1);
|
| 84 |
let next_page = page + 1;
|
| 85 |
|
| 86 |
-
let
|
| 87 |
if page != previous_page {
|
| 88 |
let (previous_results, current_results, next_results) = join!(
|
| 89 |
get_results(previous_page),
|
| 90 |
get_results(page),
|
| 91 |
get_results(next_page)
|
| 92 |
);
|
| 93 |
-
let (parsed_previous_results, parsed_next_results) =
|
| 94 |
-
(previous_results?, next_results?);
|
| 95 |
|
| 96 |
-
|
| 97 |
-
[
|
| 98 |
-
parsed_previous_results.1,
|
| 99 |
-
results.1.clone(),
|
| 100 |
-
parsed_next_results.1,
|
| 101 |
-
],
|
| 102 |
-
[
|
| 103 |
-
parsed_previous_results.0,
|
| 104 |
-
results.0.clone(),
|
| 105 |
-
parsed_next_results.0,
|
| 106 |
-
],
|
| 107 |
-
);
|
| 108 |
|
| 109 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 110 |
|
| 111 |
tokio::spawn(async move { cache.cache_results(&results_list, &cache_keys).await });
|
| 112 |
} else {
|
| 113 |
let (current_results, next_results) =
|
| 114 |
join!(get_results(page), get_results(page + 1));
|
| 115 |
|
| 116 |
-
|
| 117 |
-
|
| 118 |
-
results = Arc::new(current_results?);
|
| 119 |
|
| 120 |
-
let (
|
| 121 |
-
[results.
|
| 122 |
-
|
| 123 |
-
|
|
|
|
| 124 |
|
| 125 |
tokio::spawn(async move { cache.cache_results(&results_list, &cache_keys).await });
|
| 126 |
}
|
|
@@ -163,7 +155,7 @@ async fn results(
|
|
| 163 |
query: &str,
|
| 164 |
page: u32,
|
| 165 |
search_settings: &server_models::Cookie<'_>,
|
| 166 |
-
) -> Result<(SearchResults, String), Box<dyn std::error::Error>> {
|
| 167 |
// eagerly parse cookie value to evaluate safe search level
|
| 168 |
let safe_search_level = search_settings.safe_search_level;
|
| 169 |
|
|
@@ -182,7 +174,7 @@ async fn results(
|
|
| 182 |
// check if fetched cache results was indeed fetched or it was an error and if so
|
| 183 |
// handle the data accordingly.
|
| 184 |
match cached_results {
|
| 185 |
-
Ok(results) => Ok((results, cache_key)),
|
| 186 |
Err(_) => {
|
| 187 |
if safe_search_level == 4 {
|
| 188 |
let mut results: SearchResults = SearchResults::default();
|
|
@@ -196,7 +188,7 @@ async fn results(
|
|
| 196 |
.cache_results(&[results.clone()], &[cache_key.clone()])
|
| 197 |
.await?;
|
| 198 |
results.set_safe_search_level(safe_search_level);
|
| 199 |
-
return Ok((results, cache_key));
|
| 200 |
}
|
| 201 |
}
|
| 202 |
|
|
@@ -235,7 +227,7 @@ async fn results(
|
|
| 235 |
.cache_results(&[results.clone()], &[cache_key.clone()])
|
| 236 |
.await?;
|
| 237 |
results.set_safe_search_level(safe_search_level);
|
| 238 |
-
Ok((results, cache_key))
|
| 239 |
}
|
| 240 |
}
|
| 241 |
}
|
|
|
|
| 12 |
results::aggregator::aggregate,
|
| 13 |
};
|
| 14 |
use actix_web::{get, http::header::ContentType, web, HttpRequest, HttpResponse};
|
| 15 |
+
use itertools::Itertools;
|
| 16 |
use regex::Regex;
|
| 17 |
use std::borrow::Cow;
|
| 18 |
use tokio::{
|
|
|
|
| 41 |
config: web::Data<&'static Config>,
|
| 42 |
cache: web::Data<&'static SharedCache>,
|
| 43 |
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
|
|
|
| 44 |
let params = web::Query::<SearchParams>::from_query(req.query_string())?;
|
| 45 |
match ¶ms.q {
|
| 46 |
Some(query) => {
|
|
|
|
| 83 |
let previous_page = page.saturating_sub(1);
|
| 84 |
let next_page = page + 1;
|
| 85 |
|
| 86 |
+
let results: (SearchResults, String, bool);
|
| 87 |
if page != previous_page {
|
| 88 |
let (previous_results, current_results, next_results) = join!(
|
| 89 |
get_results(previous_page),
|
| 90 |
get_results(page),
|
| 91 |
get_results(next_page)
|
| 92 |
);
|
|
|
|
|
|
|
| 93 |
|
| 94 |
+
results = current_results?;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 95 |
|
| 96 |
+
let (results_list, cache_keys): (Vec<SearchResults>, Vec<String>) =
|
| 97 |
+
[previous_results?, results.clone(), next_results?]
|
| 98 |
+
.into_iter()
|
| 99 |
+
.filter_map(|(result, cache_key, flag)| {
|
| 100 |
+
dbg!(flag).then_some((result, cache_key))
|
| 101 |
+
})
|
| 102 |
+
.multiunzip();
|
| 103 |
|
| 104 |
tokio::spawn(async move { cache.cache_results(&results_list, &cache_keys).await });
|
| 105 |
} else {
|
| 106 |
let (current_results, next_results) =
|
| 107 |
join!(get_results(page), get_results(page + 1));
|
| 108 |
|
| 109 |
+
results = current_results?;
|
|
|
|
|
|
|
| 110 |
|
| 111 |
+
let (results_list, cache_keys): (Vec<SearchResults>, Vec<String>) =
|
| 112 |
+
[results.clone(), next_results?]
|
| 113 |
+
.into_iter()
|
| 114 |
+
.filter_map(|(result, cache_key, flag)| flag.then_some((result, cache_key)))
|
| 115 |
+
.multiunzip();
|
| 116 |
|
| 117 |
tokio::spawn(async move { cache.cache_results(&results_list, &cache_keys).await });
|
| 118 |
}
|
|
|
|
| 155 |
query: &str,
|
| 156 |
page: u32,
|
| 157 |
search_settings: &server_models::Cookie<'_>,
|
| 158 |
+
) -> Result<(SearchResults, String, bool), Box<dyn std::error::Error>> {
|
| 159 |
// eagerly parse cookie value to evaluate safe search level
|
| 160 |
let safe_search_level = search_settings.safe_search_level;
|
| 161 |
|
|
|
|
| 174 |
// check if fetched cache results was indeed fetched or it was an error and if so
|
| 175 |
// handle the data accordingly.
|
| 176 |
match cached_results {
|
| 177 |
+
Ok(results) => Ok((results, cache_key, false)),
|
| 178 |
Err(_) => {
|
| 179 |
if safe_search_level == 4 {
|
| 180 |
let mut results: SearchResults = SearchResults::default();
|
|
|
|
| 188 |
.cache_results(&[results.clone()], &[cache_key.clone()])
|
| 189 |
.await?;
|
| 190 |
results.set_safe_search_level(safe_search_level);
|
| 191 |
+
return Ok((results, cache_key, true));
|
| 192 |
}
|
| 193 |
}
|
| 194 |
|
|
|
|
| 227 |
.cache_results(&[results.clone()], &[cache_key.clone()])
|
| 228 |
.await?;
|
| 229 |
results.set_safe_search_level(safe_search_level);
|
| 230 |
+
Ok((results, cache_key, true))
|
| 231 |
}
|
| 232 |
}
|
| 233 |
}
|