Trickshotblaster
commited on
Commit
·
99936bc
1
Parent(s):
2dbffcc
Rename getter functions
Browse files- src/cache/cacher.rs +1 -1
- src/config/parser.rs +2 -2
- src/handler/public_paths.rs +1 -1
- src/lib.rs +2 -2
- src/server/routes.rs +5 -5
src/cache/cacher.rs
CHANGED
|
@@ -41,7 +41,7 @@ impl RedisCache {
|
|
| 41 |
/// # Arguments
|
| 42 |
///
|
| 43 |
/// * `url` - It takes an url as a string.
|
| 44 |
-
pub fn
|
| 45 |
let hashed_url_string = Self::hash_url(url);
|
| 46 |
Ok(self.connection.get(hashed_url_string)?)
|
| 47 |
}
|
|
|
|
| 41 |
/// # Arguments
|
| 42 |
///
|
| 43 |
/// * `url` - It takes an url as a string.
|
| 44 |
+
pub fn cached_json(&mut self, url: &str) -> Result<String, Box<dyn std::error::Error>> {
|
| 45 |
let hashed_url_string = Self::hash_url(url);
|
| 46 |
Ok(self.connection.get(hashed_url_string)?)
|
| 47 |
}
|
src/config/parser.rs
CHANGED
|
@@ -50,7 +50,7 @@ impl Config {
|
|
| 50 |
let globals = context.globals();
|
| 51 |
|
| 52 |
context
|
| 53 |
-
.load(&fs::read_to_string(Config::
|
| 54 |
.exec()?;
|
| 55 |
|
| 56 |
Ok(Config {
|
|
@@ -81,7 +81,7 @@ impl Config {
|
|
| 81 |
/// one (3).
|
| 82 |
/// 3. `websurfx/` (under project folder ( or codebase in other words)) if it is not present
|
| 83 |
/// here then it returns an error as mentioned above.
|
| 84 |
-
fn
|
| 85 |
// check user config
|
| 86 |
|
| 87 |
let path = format!(
|
|
|
|
| 50 |
let globals = context.globals();
|
| 51 |
|
| 52 |
context
|
| 53 |
+
.load(&fs::read_to_string(Config::config_path()?)?)
|
| 54 |
.exec()?;
|
| 55 |
|
| 56 |
Ok(Config {
|
|
|
|
| 81 |
/// one (3).
|
| 82 |
/// 3. `websurfx/` (under project folder ( or codebase in other words)) if it is not present
|
| 83 |
/// here then it returns an error as mentioned above.
|
| 84 |
+
fn config_path() -> Result<String, Box<dyn std::error::Error>> {
|
| 85 |
// check user config
|
| 86 |
|
| 87 |
let path = format!(
|
src/handler/public_paths.rs
CHANGED
|
@@ -17,7 +17,7 @@ static PUBLIC_DIRECTORY_NAME: &str = "public";
|
|
| 17 |
/// 1. `/opt/websurfx` if it not present here then it fallbacks to the next one (2)
|
| 18 |
/// 2. Under project folder ( or codebase in other words) if it is not present
|
| 19 |
/// here then it returns an error as mentioned above.
|
| 20 |
-
pub fn
|
| 21 |
if Path::new(format!("/opt/websurfx/{}/", PUBLIC_DIRECTORY_NAME).as_str()).exists() {
|
| 22 |
return Ok(format!("/opt/websurfx/{}", PUBLIC_DIRECTORY_NAME));
|
| 23 |
}
|
|
|
|
| 17 |
/// 1. `/opt/websurfx` if it not present here then it fallbacks to the next one (2)
|
| 18 |
/// 2. Under project folder ( or codebase in other words) if it is not present
|
| 19 |
/// here then it returns an error as mentioned above.
|
| 20 |
+
pub fn public_path() -> Result<String, Error> {
|
| 21 |
if Path::new(format!("/opt/websurfx/{}/", PUBLIC_DIRECTORY_NAME).as_str()).exists() {
|
| 22 |
return Ok(format!("/opt/websurfx/{}", PUBLIC_DIRECTORY_NAME));
|
| 23 |
}
|
src/lib.rs
CHANGED
|
@@ -16,7 +16,7 @@ use actix_files as fs;
|
|
| 16 |
use actix_web::{dev::Server, middleware::Logger, web, App, HttpServer};
|
| 17 |
use config::parser::Config;
|
| 18 |
use handlebars::Handlebars;
|
| 19 |
-
use handler::public_paths::
|
| 20 |
|
| 21 |
/// Runs the web server on the provided TCP listener and returns a `Server` instance.
|
| 22 |
///
|
|
@@ -41,7 +41,7 @@ use handler::public_paths::get_public_path;
|
|
| 41 |
pub fn run(listener: TcpListener, config: Config) -> std::io::Result<Server> {
|
| 42 |
let mut handlebars: Handlebars = Handlebars::new();
|
| 43 |
|
| 44 |
-
let public_folder_path: String =
|
| 45 |
|
| 46 |
handlebars
|
| 47 |
.register_templates_directory(".html", format!("{}/templates", public_folder_path))
|
|
|
|
| 16 |
use actix_web::{dev::Server, middleware::Logger, web, App, HttpServer};
|
| 17 |
use config::parser::Config;
|
| 18 |
use handlebars::Handlebars;
|
| 19 |
+
use handler::public_paths::public_path;
|
| 20 |
|
| 21 |
/// Runs the web server on the provided TCP listener and returns a `Server` instance.
|
| 22 |
///
|
|
|
|
| 41 |
pub fn run(listener: TcpListener, config: Config) -> std::io::Result<Server> {
|
| 42 |
let mut handlebars: Handlebars = Handlebars::new();
|
| 43 |
|
| 44 |
+
let public_folder_path: String = public_path()?;
|
| 45 |
|
| 46 |
handlebars
|
| 47 |
.register_templates_directory(".html", format!("{}/templates", public_folder_path))
|
src/server/routes.rs
CHANGED
|
@@ -7,7 +7,7 @@ use std::fs::read_to_string;
|
|
| 7 |
use crate::{
|
| 8 |
cache::cacher::RedisCache,
|
| 9 |
config::parser::Config,
|
| 10 |
-
handler::public_paths::
|
| 11 |
results::{aggregation_models::SearchResults, aggregator::aggregate},
|
| 12 |
};
|
| 13 |
use actix_web::{get, web, HttpRequest, HttpResponse};
|
|
@@ -89,7 +89,7 @@ pub async fn search(
|
|
| 89 |
"http://{}:{}/search?q={}&page={}",
|
| 90 |
config.binding_ip, config.port, query, page
|
| 91 |
);
|
| 92 |
-
let results_json =
|
| 93 |
let page_content: String = hbs.render("search", &results_json)?;
|
| 94 |
Ok(HttpResponse::Ok().body(page_content))
|
| 95 |
}
|
|
@@ -101,7 +101,7 @@ pub async fn search(
|
|
| 101 |
|
| 102 |
/// Fetches the results for a query and page.
|
| 103 |
/// First checks the redis cache, if that fails it gets proper results
|
| 104 |
-
async fn
|
| 105 |
url: String,
|
| 106 |
config: &Config,
|
| 107 |
query: &str,
|
|
@@ -110,7 +110,7 @@ async fn get_results(
|
|
| 110 |
//Initialize redis cache connection struct
|
| 111 |
let mut redis_cache = RedisCache::new(config.redis_url.clone())?;
|
| 112 |
// fetch the cached results json.
|
| 113 |
-
let cached_results_json = redis_cache.
|
| 114 |
// check if fetched results was indeed fetched or it was an error and if so
|
| 115 |
// handle the data accordingly.
|
| 116 |
match cached_results_json {
|
|
@@ -128,7 +128,7 @@ async fn get_results(
|
|
| 128 |
/// Handles the route of robots.txt page of the `websurfx` meta search engine website.
|
| 129 |
#[get("/robots.txt")]
|
| 130 |
pub async fn robots_data(_req: HttpRequest) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
| 131 |
-
let page_content: String = read_to_string(format!("{}/robots.txt",
|
| 132 |
Ok(HttpResponse::Ok()
|
| 133 |
.content_type("text/plain; charset=ascii")
|
| 134 |
.body(page_content))
|
|
|
|
| 7 |
use crate::{
|
| 8 |
cache::cacher::RedisCache,
|
| 9 |
config::parser::Config,
|
| 10 |
+
handler::public_paths::public_path,
|
| 11 |
results::{aggregation_models::SearchResults, aggregator::aggregate},
|
| 12 |
};
|
| 13 |
use actix_web::{get, web, HttpRequest, HttpResponse};
|
|
|
|
| 89 |
"http://{}:{}/search?q={}&page={}",
|
| 90 |
config.binding_ip, config.port, query, page
|
| 91 |
);
|
| 92 |
+
let results_json = results(url, &config, query, page).await?;
|
| 93 |
let page_content: String = hbs.render("search", &results_json)?;
|
| 94 |
Ok(HttpResponse::Ok().body(page_content))
|
| 95 |
}
|
|
|
|
| 101 |
|
| 102 |
/// Fetches the results for a query and page.
|
| 103 |
/// First checks the redis cache, if that fails it gets proper results
|
| 104 |
+
async fn results(
|
| 105 |
url: String,
|
| 106 |
config: &Config,
|
| 107 |
query: &str,
|
|
|
|
| 110 |
//Initialize redis cache connection struct
|
| 111 |
let mut redis_cache = RedisCache::new(config.redis_url.clone())?;
|
| 112 |
// fetch the cached results json.
|
| 113 |
+
let cached_results_json = redis_cache.cached_json(&url);
|
| 114 |
// check if fetched results was indeed fetched or it was an error and if so
|
| 115 |
// handle the data accordingly.
|
| 116 |
match cached_results_json {
|
|
|
|
| 128 |
/// Handles the route of robots.txt page of the `websurfx` meta search engine website.
|
| 129 |
#[get("/robots.txt")]
|
| 130 |
pub async fn robots_data(_req: HttpRequest) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
| 131 |
+
let page_content: String = read_to_string(format!("{}/robots.txt", public_path()?))?;
|
| 132 |
Ok(HttpResponse::Ok()
|
| 133 |
.content_type("text/plain; charset=ascii")
|
| 134 |
.body(page_content))
|