Spaces:
Running
Running
Merge branch 'rolling' into FIX/463_results_from_different_search_engines_get_cached_as_the_same_key
Browse files- .github/workflows/pr_labeler.yml +1 -1
- .github/workflows/stale.yml +1 -1
- Cargo.lock +12 -10
- Cargo.toml +5 -5
- Dockerfile +1 -1
- dev.Dockerfile +1 -1
- src/engines/bing.rs +124 -0
- src/engines/mod.rs +1 -0
- src/lib.rs +8 -1
- src/models/engine_models.rs +4 -0
- src/server/router.rs +36 -44
- src/server/routes/search.rs +11 -13
- websurfx/config.lua +1 -0
.github/workflows/pr_labeler.yml
CHANGED
@@ -9,7 +9,7 @@ jobs:
|
|
9 |
pull-requests: write
|
10 |
runs-on: ubuntu-latest
|
11 |
steps:
|
12 |
-
- uses: actions/labeler@
|
13 |
with:
|
14 |
sync-labels: true
|
15 |
dot: true
|
|
|
9 |
pull-requests: write
|
10 |
runs-on: ubuntu-latest
|
11 |
steps:
|
12 |
+
- uses: actions/labeler@v5
|
13 |
with:
|
14 |
sync-labels: true
|
15 |
dot: true
|
.github/workflows/stale.yml
CHANGED
@@ -19,7 +19,7 @@ jobs:
|
|
19 |
pull-requests: write
|
20 |
|
21 |
steps:
|
22 |
-
- uses: actions/stale@
|
23 |
with:
|
24 |
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
25 |
stale-issue-message: 'Stale issue message'
|
|
|
19 |
pull-requests: write
|
20 |
|
21 |
steps:
|
22 |
+
- uses: actions/stale@v9
|
23 |
with:
|
24 |
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
25 |
stale-issue-message: 'Stale issue message'
|
Cargo.lock
CHANGED
@@ -82,6 +82,7 @@ dependencies = [
|
|
82 |
"ahash 0.8.7",
|
83 |
"base64 0.21.5",
|
84 |
"bitflags 2.4.1",
|
|
|
85 |
"bytes 1.5.0",
|
86 |
"bytestring",
|
87 |
"derive_more",
|
@@ -363,9 +364,9 @@ checksum = "9338790e78aa95a416786ec8389546c4b6a1dfc3dc36071ed9518a9413a542eb"
|
|
363 |
|
364 |
[[package]]
|
365 |
name = "async-trait"
|
366 |
-
version = "0.1.
|
367 |
source = "registry+https://github.com/rust-lang/crates.io-index"
|
368 |
-
checksum = "
|
369 |
dependencies = [
|
370 |
"proc-macro2 1.0.71",
|
371 |
"quote 1.0.33",
|
@@ -2028,10 +2029,11 @@ dependencies = [
|
|
2028 |
|
2029 |
[[package]]
|
2030 |
name = "minify-js"
|
2031 |
-
version = "0.
|
2032 |
source = "registry+https://github.com/rust-lang/crates.io-index"
|
2033 |
-
checksum = "
|
2034 |
dependencies = [
|
|
|
2035 |
"lazy_static",
|
2036 |
"parse-js",
|
2037 |
]
|
@@ -2328,9 +2330,9 @@ dependencies = [
|
|
2328 |
|
2329 |
[[package]]
|
2330 |
name = "parse-js"
|
2331 |
-
version = "0.
|
2332 |
source = "registry+https://github.com/rust-lang/crates.io-index"
|
2333 |
-
checksum = "
|
2334 |
dependencies = [
|
2335 |
"aho-corasick 0.7.20",
|
2336 |
"bumpalo",
|
@@ -2790,9 +2792,9 @@ dependencies = [
|
|
2790 |
|
2791 |
[[package]]
|
2792 |
name = "redis"
|
2793 |
-
version = "0.
|
2794 |
source = "registry+https://github.com/rust-lang/crates.io-index"
|
2795 |
-
checksum = "
|
2796 |
dependencies = [
|
2797 |
"arc-swap",
|
2798 |
"async-trait",
|
@@ -3183,9 +3185,9 @@ dependencies = [
|
|
3183 |
|
3184 |
[[package]]
|
3185 |
name = "serde_json"
|
3186 |
-
version = "1.0.
|
3187 |
source = "registry+https://github.com/rust-lang/crates.io-index"
|
3188 |
-
checksum = "
|
3189 |
dependencies = [
|
3190 |
"itoa 1.0.10",
|
3191 |
"ryu",
|
|
|
82 |
"ahash 0.8.7",
|
83 |
"base64 0.21.5",
|
84 |
"bitflags 2.4.1",
|
85 |
+
"brotli",
|
86 |
"bytes 1.5.0",
|
87 |
"bytestring",
|
88 |
"derive_more",
|
|
|
364 |
|
365 |
[[package]]
|
366 |
name = "async-trait"
|
367 |
+
version = "0.1.76"
|
368 |
source = "registry+https://github.com/rust-lang/crates.io-index"
|
369 |
+
checksum = "531b97fb4cd3dfdce92c35dedbfdc1f0b9d8091c8ca943d6dae340ef5012d514"
|
370 |
dependencies = [
|
371 |
"proc-macro2 1.0.71",
|
372 |
"quote 1.0.33",
|
|
|
2029 |
|
2030 |
[[package]]
|
2031 |
name = "minify-js"
|
2032 |
+
version = "0.6.0"
|
2033 |
source = "registry+https://github.com/rust-lang/crates.io-index"
|
2034 |
+
checksum = "b1fa5546ee8bd66024113e506cabe4230e76635a094c06ea2051b66021dda92e"
|
2035 |
dependencies = [
|
2036 |
+
"aho-corasick 0.7.20",
|
2037 |
"lazy_static",
|
2038 |
"parse-js",
|
2039 |
]
|
|
|
2330 |
|
2331 |
[[package]]
|
2332 |
name = "parse-js"
|
2333 |
+
version = "0.20.1"
|
2334 |
source = "registry+https://github.com/rust-lang/crates.io-index"
|
2335 |
+
checksum = "2742b5e32dcb5930447ed9f9e401a7dfd883867fc079c4fac44ae8ba3593710e"
|
2336 |
dependencies = [
|
2337 |
"aho-corasick 0.7.20",
|
2338 |
"bumpalo",
|
|
|
2792 |
|
2793 |
[[package]]
|
2794 |
name = "redis"
|
2795 |
+
version = "0.24.0"
|
2796 |
source = "registry+https://github.com/rust-lang/crates.io-index"
|
2797 |
+
checksum = "c580d9cbbe1d1b479e8d67cf9daf6a62c957e6846048408b80b43ac3f6af84cd"
|
2798 |
dependencies = [
|
2799 |
"arc-swap",
|
2800 |
"async-trait",
|
|
|
3185 |
|
3186 |
[[package]]
|
3187 |
name = "serde_json"
|
3188 |
+
version = "1.0.109"
|
3189 |
source = "registry+https://github.com/rust-lang/crates.io-index"
|
3190 |
+
checksum = "cb0652c533506ad7a2e353cce269330d6afd8bdfb6d75e0ace5b35aacbd7b9e9"
|
3191 |
dependencies = [
|
3192 |
"itoa 1.0.10",
|
3193 |
"ryu",
|
Cargo.toml
CHANGED
@@ -16,20 +16,20 @@ path = "src/bin/websurfx.rs"
|
|
16 |
reqwest = {version="0.11.22", default-features=false, features=["rustls-tls","brotli", "gzip"]}
|
17 |
tokio = {version="1.32.0",features=["rt-multi-thread","macros"], default-features = false}
|
18 |
serde = {version="1.0.190", default-features=false, features=["derive"]}
|
19 |
-
serde_json = {version="1.0.
|
20 |
maud = {version="0.25.0", default-features=false, features=["actix-web"]}
|
21 |
scraper = {version="0.18.1", default-features = false}
|
22 |
-
actix-web = {version="4.4.0", features = ["cookies", "macros"], default-features=false}
|
23 |
actix-files = {version="0.6.2", default-features=false}
|
24 |
actix-cors = {version="0.6.4", default-features=false}
|
25 |
fake-useragent = {version="0.1.3", default-features=false}
|
26 |
env_logger = {version="0.10.0", default-features=false}
|
27 |
log = {version="0.4.20", default-features=false}
|
28 |
mlua = {version="0.9.1", features=["luajit", "vendored"], default-features=false}
|
29 |
-
redis = {version="0.
|
30 |
blake3 = {version="1.5.0", default-features=false}
|
31 |
error-stack = {version="0.4.0", default-features=false, features=["std"]}
|
32 |
-
async-trait = {version="0.1.
|
33 |
regex = {version="1.9.4", features=["perf"], default-features = false}
|
34 |
smallvec = {version="1.11.0", features=["union", "serde"], default-features=false}
|
35 |
futures = {version="0.3.28", default-features=false}
|
@@ -46,7 +46,7 @@ tempfile = {version="3.8.0", default-features=false}
|
|
46 |
|
47 |
[build-dependencies]
|
48 |
lightningcss = {version="1.0.0-alpha.50", default-features=false, features=["grid"]}
|
49 |
-
minify-js = {version="0.
|
50 |
|
51 |
[profile.dev]
|
52 |
opt-level = 0
|
|
|
16 |
reqwest = {version="0.11.22", default-features=false, features=["rustls-tls","brotli", "gzip"]}
|
17 |
tokio = {version="1.32.0",features=["rt-multi-thread","macros"], default-features = false}
|
18 |
serde = {version="1.0.190", default-features=false, features=["derive"]}
|
19 |
+
serde_json = {version="1.0.109", default-features=false}
|
20 |
maud = {version="0.25.0", default-features=false, features=["actix-web"]}
|
21 |
scraper = {version="0.18.1", default-features = false}
|
22 |
+
actix-web = {version="4.4.0", features = ["cookies", "macros", "compress-brotli"], default-features=false}
|
23 |
actix-files = {version="0.6.2", default-features=false}
|
24 |
actix-cors = {version="0.6.4", default-features=false}
|
25 |
fake-useragent = {version="0.1.3", default-features=false}
|
26 |
env_logger = {version="0.10.0", default-features=false}
|
27 |
log = {version="0.4.20", default-features=false}
|
28 |
mlua = {version="0.9.1", features=["luajit", "vendored"], default-features=false}
|
29 |
+
redis = {version="0.24.0", features=["tokio-comp","connection-manager"], default-features = false, optional = true}
|
30 |
blake3 = {version="1.5.0", default-features=false}
|
31 |
error-stack = {version="0.4.0", default-features=false, features=["std"]}
|
32 |
+
async-trait = {version="0.1.76", default-features=false}
|
33 |
regex = {version="1.9.4", features=["perf"], default-features = false}
|
34 |
smallvec = {version="1.11.0", features=["union", "serde"], default-features=false}
|
35 |
futures = {version="0.3.28", default-features=false}
|
|
|
46 |
|
47 |
[build-dependencies]
|
48 |
lightningcss = {version="1.0.0-alpha.50", default-features=false, features=["grid"]}
|
49 |
+
minify-js = {version="0.6.0", default-features=false}
|
50 |
|
51 |
[profile.dev]
|
52 |
opt-level = 0
|
Dockerfile
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
FROM --platform=$BUILDPLATFORM rust:1.
|
2 |
# We only pay the installation cost once,
|
3 |
# it will be cached from the second build onwards
|
4 |
RUN apk add --no-cache alpine-sdk musl-dev g++ make libcrypto3 libressl-dev upx perl build-base
|
|
|
1 |
+
FROM --platform=$BUILDPLATFORM rust:1.75.0-alpine3.18 AS chef
|
2 |
# We only pay the installation cost once,
|
3 |
# it will be cached from the second build onwards
|
4 |
RUN apk add --no-cache alpine-sdk musl-dev g++ make libcrypto3 libressl-dev upx perl build-base
|
dev.Dockerfile
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
# Create Builder image
|
2 |
-
FROM --platform=$BUILDPLATFORM rust:1.
|
3 |
|
4 |
# Install required dependencies
|
5 |
RUN apk add --no-cache alpine-sdk musl-dev g++ make libcrypto3 libressl-dev perl build-base
|
|
|
1 |
# Create Builder image
|
2 |
+
FROM --platform=$BUILDPLATFORM rust:1.75.0-alpine3.18
|
3 |
|
4 |
# Install required dependencies
|
5 |
RUN apk add --no-cache alpine-sdk musl-dev g++ make libcrypto3 libressl-dev perl build-base
|
src/engines/bing.rs
ADDED
@@ -0,0 +1,124 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
//! The `bing` module handles the scraping of results from the bing search engine
|
2 |
+
//! by querying the upstream bing search engine with user provided query and with a page
|
3 |
+
//! number if provided.
|
4 |
+
|
5 |
+
use std::collections::HashMap;
|
6 |
+
|
7 |
+
use regex::Regex;
|
8 |
+
use reqwest::header::HeaderMap;
|
9 |
+
use reqwest::Client;
|
10 |
+
use scraper::Html;
|
11 |
+
|
12 |
+
use crate::models::aggregation_models::SearchResult;
|
13 |
+
|
14 |
+
use crate::models::engine_models::{EngineError, SearchEngine};
|
15 |
+
|
16 |
+
use error_stack::{Report, Result, ResultExt};
|
17 |
+
|
18 |
+
use super::search_result_parser::SearchResultParser;
|
19 |
+
|
20 |
+
/// A new Bing engine type defined in-order to implement the `SearchEngine` trait which allows to
|
21 |
+
/// reduce code duplication as well as allows to create vector of different search engines easily.
|
22 |
+
pub struct Bing {
|
23 |
+
/// The parser, used to interpret the search result.
|
24 |
+
parser: SearchResultParser,
|
25 |
+
}
|
26 |
+
|
27 |
+
impl Bing {
|
28 |
+
/// Creates the Bing parser.
|
29 |
+
pub fn new() -> Result<Self, EngineError> {
|
30 |
+
Ok(Self {
|
31 |
+
parser: SearchResultParser::new(
|
32 |
+
".b_results",
|
33 |
+
".b_algo",
|
34 |
+
"h2 a",
|
35 |
+
".tpcn a.tilk",
|
36 |
+
".b_caption p",
|
37 |
+
)?,
|
38 |
+
})
|
39 |
+
}
|
40 |
+
}
|
41 |
+
|
42 |
+
#[async_trait::async_trait]
|
43 |
+
impl SearchEngine for Bing {
|
44 |
+
async fn results(
|
45 |
+
&self,
|
46 |
+
query: &str,
|
47 |
+
page: u32,
|
48 |
+
user_agent: &str,
|
49 |
+
client: &Client,
|
50 |
+
_safe_search: u8,
|
51 |
+
) -> Result<HashMap<String, SearchResult>, EngineError> {
|
52 |
+
// Bing uses `start results from this number` convention
|
53 |
+
// So, for 10 results per page, page 0 starts at 1, page 1
|
54 |
+
// starts at 11, and so on.
|
55 |
+
let results_per_page = 10;
|
56 |
+
let start_result = results_per_page * page + 1;
|
57 |
+
|
58 |
+
let url: String = match page {
|
59 |
+
0 => {
|
60 |
+
format!("https://www.bing.com/search?q={query}")
|
61 |
+
}
|
62 |
+
_ => {
|
63 |
+
format!("https://www.bing.com/search?q={query}&first={start_result}")
|
64 |
+
}
|
65 |
+
};
|
66 |
+
|
67 |
+
let query_params: Vec<(&str, &str)> = vec![
|
68 |
+
("_EDGE_V", "1"),
|
69 |
+
("SRCHD=AF", "NOFORM"),
|
70 |
+
("_Rwho=u", "d"),
|
71 |
+
("bngps=s", "0"),
|
72 |
+
("_UR=QS=0&TQS", "0"),
|
73 |
+
("_UR=QS=0&TQS", "0"),
|
74 |
+
];
|
75 |
+
|
76 |
+
let mut cookie_string = String::new();
|
77 |
+
for (k, v) in &query_params {
|
78 |
+
cookie_string.push_str(&format!("{k}={v}; "));
|
79 |
+
}
|
80 |
+
|
81 |
+
let header_map = HeaderMap::try_from(&HashMap::from([
|
82 |
+
("USER_AGENT".to_string(), user_agent.to_string()),
|
83 |
+
("REFERER".to_string(), "https://google.com/".to_string()),
|
84 |
+
(
|
85 |
+
"CONTENT_TYPE".to_string(),
|
86 |
+
"application/x-www-form-urlencoded".to_string(),
|
87 |
+
),
|
88 |
+
("COOKIE".to_string(), cookie_string),
|
89 |
+
]))
|
90 |
+
.change_context(EngineError::UnexpectedError)?;
|
91 |
+
|
92 |
+
let document: Html = Html::parse_document(
|
93 |
+
&Bing::fetch_html_from_upstream(self, &url, header_map, client).await?,
|
94 |
+
);
|
95 |
+
|
96 |
+
// Bing is very aggressive in finding matches
|
97 |
+
// even with the most absurd of queries. ".b_algo" is the
|
98 |
+
// class for the list item of results
|
99 |
+
if let Some(no_result_msg) = self.parser.parse_for_no_results(&document).nth(0) {
|
100 |
+
if no_result_msg
|
101 |
+
.value()
|
102 |
+
.attr("class")
|
103 |
+
.map(|classes| classes.contains("b_algo"))
|
104 |
+
.unwrap_or(false)
|
105 |
+
{
|
106 |
+
return Err(Report::new(EngineError::EmptyResultSet));
|
107 |
+
}
|
108 |
+
}
|
109 |
+
|
110 |
+
let re_span = Regex::new(r#"<span.*?>.*?(?:</span> ·|</span>)"#).unwrap();
|
111 |
+
let re_strong = Regex::new(r#"(<strong>|</strong>)"#).unwrap();
|
112 |
+
|
113 |
+
// scrape all the results from the html
|
114 |
+
self.parser
|
115 |
+
.parse_for_results(&document, |title, url, desc| {
|
116 |
+
Some(SearchResult::new(
|
117 |
+
&re_strong.replace_all(title.inner_html().trim(), ""),
|
118 |
+
url.value().attr("href").unwrap(),
|
119 |
+
&re_span.replace_all(desc.inner_html().trim(), ""),
|
120 |
+
&["bing"],
|
121 |
+
))
|
122 |
+
})
|
123 |
+
}
|
124 |
+
}
|
src/engines/mod.rs
CHANGED
@@ -3,6 +3,7 @@
|
|
3 |
//! provide a standard functions to be implemented for all the upstream search engine handling
|
4 |
//! code. Moreover, it also provides a custom error for the upstream search engine handling code.
|
5 |
|
|
|
6 |
pub mod brave;
|
7 |
pub mod duckduckgo;
|
8 |
pub mod librex;
|
|
|
3 |
//! provide a standard functions to be implemented for all the upstream search engine handling
|
4 |
//! code. Moreover, it also provides a custom error for the upstream search engine handling code.
|
5 |
|
6 |
+
pub mod bing;
|
7 |
pub mod brave;
|
8 |
pub mod duckduckgo;
|
9 |
pub mod librex;
|
src/lib.rs
CHANGED
@@ -21,7 +21,12 @@ use crate::server::router;
|
|
21 |
use actix_cors::Cors;
|
22 |
use actix_files as fs;
|
23 |
use actix_governor::{Governor, GovernorConfigBuilder};
|
24 |
-
use actix_web::{
|
|
|
|
|
|
|
|
|
|
|
25 |
use cache::cacher::{Cacher, SharedCache};
|
26 |
use config::parser::Config;
|
27 |
use handler::{file_path, FileType};
|
@@ -73,6 +78,8 @@ pub fn run(
|
|
73 |
]);
|
74 |
|
75 |
App::new()
|
|
|
|
|
76 |
.wrap(Logger::default()) // added logging middleware for logging.
|
77 |
.app_data(web::Data::new(config.clone()))
|
78 |
.app_data(cache.clone())
|
|
|
21 |
use actix_cors::Cors;
|
22 |
use actix_files as fs;
|
23 |
use actix_governor::{Governor, GovernorConfigBuilder};
|
24 |
+
use actix_web::{
|
25 |
+
dev::Server,
|
26 |
+
http::header,
|
27 |
+
middleware::{Compress, Logger},
|
28 |
+
web, App, HttpServer,
|
29 |
+
};
|
30 |
use cache::cacher::{Cacher, SharedCache};
|
31 |
use config::parser::Config;
|
32 |
use handler::{file_path, FileType};
|
|
|
78 |
]);
|
79 |
|
80 |
App::new()
|
81 |
+
// Compress the responses provided by the server for the client requests.
|
82 |
+
.wrap(Compress::default())
|
83 |
.wrap(Logger::default()) // added logging middleware for logging.
|
84 |
.app_data(web::Data::new(config.clone()))
|
85 |
.app_data(cache.clone())
|
src/models/engine_models.rs
CHANGED
@@ -166,6 +166,10 @@ impl EngineHandler {
|
|
166 |
let engine = crate::engines::mojeek::Mojeek::new()?;
|
167 |
("mojeek", Box::new(engine))
|
168 |
}
|
|
|
|
|
|
|
|
|
169 |
_ => {
|
170 |
return Err(Report::from(EngineError::NoSuchEngineFound(
|
171 |
engine_name.to_string(),
|
|
|
166 |
let engine = crate::engines::mojeek::Mojeek::new()?;
|
167 |
("mojeek", Box::new(engine))
|
168 |
}
|
169 |
+
"bing" => {
|
170 |
+
let engine = crate::engines::bing::Bing::new()?;
|
171 |
+
("bing", Box::new(engine))
|
172 |
+
}
|
173 |
_ => {
|
174 |
return Err(Report::from(EngineError::NoSuchEngineFound(
|
175 |
engine_name.to_string(),
|
src/server/router.rs
CHANGED
@@ -6,22 +6,20 @@ use crate::{
|
|
6 |
config::parser::Config,
|
7 |
handler::{file_path, FileType},
|
8 |
};
|
9 |
-
use actix_web::{get, web, HttpRequest, HttpResponse};
|
10 |
use std::fs::read_to_string;
|
11 |
|
12 |
/// Handles the route of index page or main page of the `websurfx` meta search engine website.
|
13 |
#[get("/")]
|
14 |
pub async fn index(config: web::Data<Config>) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
15 |
-
Ok(HttpResponse::Ok()
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
.0,
|
24 |
-
))
|
25 |
}
|
26 |
|
27 |
/// Handles the route of any other accessed route/page which is not provided by the
|
@@ -29,16 +27,14 @@ pub async fn index(config: web::Data<Config>) -> Result<HttpResponse, Box<dyn st
|
|
29 |
pub async fn not_found(
|
30 |
config: web::Data<Config>,
|
31 |
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
32 |
-
Ok(HttpResponse::Ok()
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
.0,
|
41 |
-
))
|
42 |
}
|
43 |
|
44 |
/// Handles the route of robots.txt page of the `websurfx` meta search engine website.
|
@@ -47,23 +43,21 @@ pub async fn robots_data(_req: HttpRequest) -> Result<HttpResponse, Box<dyn std:
|
|
47 |
let page_content: String =
|
48 |
read_to_string(format!("{}/robots.txt", file_path(FileType::Theme)?))?;
|
49 |
Ok(HttpResponse::Ok()
|
50 |
-
.content_type(
|
51 |
.body(page_content))
|
52 |
}
|
53 |
|
54 |
/// Handles the route of about page of the `websurfx` meta search engine website.
|
55 |
#[get("/about")]
|
56 |
pub async fn about(config: web::Data<Config>) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
57 |
-
Ok(HttpResponse::Ok()
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
.0,
|
66 |
-
))
|
67 |
}
|
68 |
|
69 |
/// Handles the route of settings page of the `websurfx` meta search engine website.
|
@@ -71,16 +65,14 @@ pub async fn about(config: web::Data<Config>) -> Result<HttpResponse, Box<dyn st
|
|
71 |
pub async fn settings(
|
72 |
config: web::Data<Config>,
|
73 |
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
74 |
-
Ok(HttpResponse::Ok()
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
.0,
|
85 |
-
))
|
86 |
}
|
|
|
6 |
config::parser::Config,
|
7 |
handler::{file_path, FileType},
|
8 |
};
|
9 |
+
use actix_web::{get, http::header::ContentType, web, HttpRequest, HttpResponse};
|
10 |
use std::fs::read_to_string;
|
11 |
|
12 |
/// Handles the route of index page or main page of the `websurfx` meta search engine website.
|
13 |
#[get("/")]
|
14 |
pub async fn index(config: web::Data<Config>) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
15 |
+
Ok(HttpResponse::Ok().content_type(ContentType::html()).body(
|
16 |
+
crate::templates::views::index::index(
|
17 |
+
&config.style.colorscheme,
|
18 |
+
&config.style.theme,
|
19 |
+
&config.style.animation,
|
20 |
+
)
|
21 |
+
.0,
|
22 |
+
))
|
|
|
|
|
23 |
}
|
24 |
|
25 |
/// Handles the route of any other accessed route/page which is not provided by the
|
|
|
27 |
pub async fn not_found(
|
28 |
config: web::Data<Config>,
|
29 |
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
30 |
+
Ok(HttpResponse::Ok().content_type(ContentType::html()).body(
|
31 |
+
crate::templates::views::not_found::not_found(
|
32 |
+
&config.style.colorscheme,
|
33 |
+
&config.style.theme,
|
34 |
+
&config.style.animation,
|
35 |
+
)
|
36 |
+
.0,
|
37 |
+
))
|
|
|
|
|
38 |
}
|
39 |
|
40 |
/// Handles the route of robots.txt page of the `websurfx` meta search engine website.
|
|
|
43 |
let page_content: String =
|
44 |
read_to_string(format!("{}/robots.txt", file_path(FileType::Theme)?))?;
|
45 |
Ok(HttpResponse::Ok()
|
46 |
+
.content_type(ContentType::plaintext())
|
47 |
.body(page_content))
|
48 |
}
|
49 |
|
50 |
/// Handles the route of about page of the `websurfx` meta search engine website.
|
51 |
#[get("/about")]
|
52 |
pub async fn about(config: web::Data<Config>) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
53 |
+
Ok(HttpResponse::Ok().content_type(ContentType::html()).body(
|
54 |
+
crate::templates::views::about::about(
|
55 |
+
&config.style.colorscheme,
|
56 |
+
&config.style.theme,
|
57 |
+
&config.style.animation,
|
58 |
+
)
|
59 |
+
.0,
|
60 |
+
))
|
|
|
|
|
61 |
}
|
62 |
|
63 |
/// Handles the route of settings page of the `websurfx` meta search engine website.
|
|
|
65 |
pub async fn settings(
|
66 |
config: web::Data<Config>,
|
67 |
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
68 |
+
Ok(HttpResponse::Ok().content_type(ContentType::html()).body(
|
69 |
+
crate::templates::views::settings::settings(
|
70 |
+
config.safe_search,
|
71 |
+
&config.style.colorscheme,
|
72 |
+
&config.style.theme,
|
73 |
+
&config.style.animation,
|
74 |
+
&config.upstream_search_engines,
|
75 |
+
)?
|
76 |
+
.0,
|
77 |
+
))
|
|
|
|
|
78 |
}
|
src/server/routes/search.rs
CHANGED
@@ -11,7 +11,7 @@ use crate::{
|
|
11 |
},
|
12 |
results::aggregator::aggregate,
|
13 |
};
|
14 |
-
use actix_web::{get, web, HttpRequest, HttpResponse};
|
15 |
use regex::Regex;
|
16 |
use std::{
|
17 |
fs::File,
|
@@ -68,18 +68,16 @@ pub async fn search(
|
|
68 |
get_results(page + 1)
|
69 |
);
|
70 |
|
71 |
-
Ok(HttpResponse::Ok()
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
.0,
|
82 |
-
))
|
83 |
}
|
84 |
None => Ok(HttpResponse::TemporaryRedirect()
|
85 |
.insert_header(("location", "/"))
|
|
|
11 |
},
|
12 |
results::aggregator::aggregate,
|
13 |
};
|
14 |
+
use actix_web::{get, http::header::ContentType, web, HttpRequest, HttpResponse};
|
15 |
use regex::Regex;
|
16 |
use std::{
|
17 |
fs::File,
|
|
|
68 |
get_results(page + 1)
|
69 |
);
|
70 |
|
71 |
+
Ok(HttpResponse::Ok().content_type(ContentType::html()).body(
|
72 |
+
crate::templates::views::search::search(
|
73 |
+
&config.style.colorscheme,
|
74 |
+
&config.style.theme,
|
75 |
+
&config.style.animation,
|
76 |
+
query,
|
77 |
+
&results?,
|
78 |
+
)
|
79 |
+
.0,
|
80 |
+
))
|
|
|
|
|
81 |
}
|
82 |
None => Ok(HttpResponse::TemporaryRedirect()
|
83 |
.insert_header(("location", "/"))
|
websurfx/config.lua
CHANGED
@@ -65,4 +65,5 @@ upstream_search_engines = {
|
|
65 |
Startpage = false,
|
66 |
LibreX = false,
|
67 |
Mojeek = false,
|
|
|
68 |
} -- select the upstream search engines from which the results should be fetched.
|
|
|
65 |
Startpage = false,
|
66 |
LibreX = false,
|
67 |
Mojeek = false,
|
68 |
+
Bing = false,
|
69 |
} -- select the upstream search engines from which the results should be fetched.
|