mirror of
https://github.com/redlib-org/redlib.git
synced 2025-04-03 04:57:38 +03:00
Merge branch 'main' into rss-add-created-field
This commit is contained in:
commit
d8f80280e0
6 changed files with 331 additions and 38 deletions
7
Cargo.lock
generated
7
Cargo.lock
generated
|
@ -770,6 +770,12 @@ version = "0.4.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc"
|
||||
|
||||
[[package]]
|
||||
name = "htmlescape"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e9025058dae765dee5070ec375f591e2ba14638c63feff74f13805a72e523163"
|
||||
|
||||
[[package]]
|
||||
name = "http"
|
||||
version = "0.2.12"
|
||||
|
@ -1368,6 +1374,7 @@ dependencies = [
|
|||
"dotenvy",
|
||||
"fastrand",
|
||||
"futures-lite",
|
||||
"htmlescape",
|
||||
"hyper",
|
||||
"hyper-rustls",
|
||||
"libflate",
|
||||
|
|
|
@ -52,6 +52,7 @@ hyper-rustls = { version = "0.24.2", features = [ "http2" ] }
|
|||
tegen = "0.1.4"
|
||||
serde_urlencoded = "0.7.1"
|
||||
chrono = { version = "0.4.39", default-features = false, features = [ "std" ] }
|
||||
htmlescape = "0.3.1"
|
||||
|
||||
|
||||
[dev-dependencies]
|
||||
|
|
|
@ -25,7 +25,7 @@ use std::{
|
|||
str::{from_utf8, Split},
|
||||
string::ToString,
|
||||
};
|
||||
use time::Duration;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::dbg_msg;
|
||||
|
||||
|
@ -170,10 +170,8 @@ impl ResponseExt for Response<Body> {
|
|||
}
|
||||
|
||||
fn remove_cookie(&mut self, name: String) {
|
||||
let mut cookie = Cookie::from(name);
|
||||
cookie.set_path("/");
|
||||
cookie.set_max_age(Duration::seconds(1));
|
||||
if let Ok(val) = header::HeaderValue::from_str(&cookie.to_string()) {
|
||||
let removal_cookie = Cookie::build(name).path("/").http_only(true).expires(OffsetDateTime::now_utc());
|
||||
if let Ok(val) = header::HeaderValue::from_str(&removal_cookie.to_string()) {
|
||||
self.headers_mut().append("Set-Cookie", val);
|
||||
}
|
||||
}
|
||||
|
@ -240,8 +238,14 @@ impl Server {
|
|||
path.pop();
|
||||
}
|
||||
|
||||
// Replace HEAD with GET for routing
|
||||
let (method, is_head) = match req.method() {
|
||||
&Method::HEAD => (&Method::GET, true),
|
||||
method => (method, false),
|
||||
};
|
||||
|
||||
// Match the visited path with an added route
|
||||
match router.recognize(&format!("/{}{}", req.method().as_str(), path)) {
|
||||
match router.recognize(&format!("/{}{}", method.as_str(), path)) {
|
||||
// If a route was configured for this path
|
||||
Ok(found) => {
|
||||
let mut parammed = req;
|
||||
|
@ -253,17 +257,21 @@ impl Server {
|
|||
match func.await {
|
||||
Ok(mut res) => {
|
||||
res.headers_mut().extend(def_headers);
|
||||
let _ = compress_response(&req_headers, &mut res).await;
|
||||
if is_head {
|
||||
*res.body_mut() = Body::empty();
|
||||
} else {
|
||||
let _ = compress_response(&req_headers, &mut res).await;
|
||||
}
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
Err(msg) => new_boilerplate(def_headers, req_headers, 500, Body::from(msg)).await,
|
||||
Err(msg) => new_boilerplate(def_headers, req_headers, 500, if is_head { Body::empty() } else { Body::from(msg) }).await,
|
||||
}
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
// If there was a routing error
|
||||
Err(e) => new_boilerplate(def_headers, req_headers, 404, e.into()).boxed(),
|
||||
Err(e) => new_boilerplate(def_headers, req_headers, 404, if is_head { Body::empty() } else { e.into() }).boxed(),
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
|
116
src/settings.rs
116
src/settings.rs
|
@ -4,6 +4,7 @@ use std::collections::HashMap;
|
|||
|
||||
// CRATES
|
||||
use crate::server::ResponseExt;
|
||||
use crate::subreddit::join_until_size_limit;
|
||||
use crate::utils::{redirect, template, Preferences};
|
||||
use cookie::Cookie;
|
||||
use futures_lite::StreamExt;
|
||||
|
@ -119,7 +120,7 @@ fn set_cookies_method(req: Request<Body>, remove_cookies: bool) -> Response<Body
|
|||
|
||||
let mut response = redirect(&path);
|
||||
|
||||
for name in [PREFS.to_vec(), vec!["subscriptions", "filters"]].concat() {
|
||||
for name in PREFS {
|
||||
match form.get(name) {
|
||||
Some(value) => response.insert_cookie(
|
||||
Cookie::build((name.to_owned(), value.clone()))
|
||||
|
@ -136,6 +137,119 @@ fn set_cookies_method(req: Request<Body>, remove_cookies: bool) -> Response<Body
|
|||
};
|
||||
}
|
||||
|
||||
// Get subscriptions/filters to restore from query string
|
||||
let subscriptions = form.get("subscriptions");
|
||||
let filters = form.get("filters");
|
||||
|
||||
// We can't search through the cookies directly like in subreddit.rs, so instead we have to make a string out of the request's headers to search through
|
||||
let cookies_string = parts
|
||||
.headers
|
||||
.get("cookie")
|
||||
.map(|hv| hv.to_str().unwrap_or("").to_string()) // Return String
|
||||
.unwrap_or_else(String::new); // Return an empty string if None
|
||||
|
||||
// If there are subscriptions to restore set them and delete any old subscriptions cookies, otherwise delete them all
|
||||
if subscriptions.is_some() {
|
||||
let sub_list: Vec<String> = subscriptions.expect("Subscriptions").split('+').map(str::to_string).collect();
|
||||
|
||||
// Start at 0 to keep track of what number we need to start deleting old subscription cookies from
|
||||
let mut subscriptions_number_to_delete_from = 0;
|
||||
|
||||
// Starting at 0 so we handle the subscription cookie without a number first
|
||||
for (subscriptions_number, list) in join_until_size_limit(&sub_list).into_iter().enumerate() {
|
||||
let subscriptions_cookie = if subscriptions_number == 0 {
|
||||
"subscriptions".to_string()
|
||||
} else {
|
||||
format!("subscriptions{}", subscriptions_number)
|
||||
};
|
||||
|
||||
response.insert_cookie(
|
||||
Cookie::build((subscriptions_cookie, list))
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||
.into(),
|
||||
);
|
||||
|
||||
subscriptions_number_to_delete_from += 1;
|
||||
}
|
||||
|
||||
// While subscriptionsNUMBER= is in the string of cookies add a response removing that cookie
|
||||
while cookies_string.contains(&format!("subscriptions{subscriptions_number_to_delete_from}=")) {
|
||||
// Remove that subscriptions cookie
|
||||
response.remove_cookie(format!("subscriptions{subscriptions_number_to_delete_from}"));
|
||||
|
||||
// Increment subscriptions cookie number
|
||||
subscriptions_number_to_delete_from += 1;
|
||||
}
|
||||
} else {
|
||||
// Remove unnumbered subscriptions cookie
|
||||
response.remove_cookie("subscriptions".to_string());
|
||||
|
||||
// Starts at one to deal with the first numbered subscription cookie and onwards
|
||||
let mut subscriptions_number_to_delete_from = 1;
|
||||
|
||||
// While subscriptionsNUMBER= is in the string of cookies add a response removing that cookie
|
||||
while cookies_string.contains(&format!("subscriptions{subscriptions_number_to_delete_from}=")) {
|
||||
// Remove that subscriptions cookie
|
||||
response.remove_cookie(format!("subscriptions{subscriptions_number_to_delete_from}"));
|
||||
|
||||
// Increment subscriptions cookie number
|
||||
subscriptions_number_to_delete_from += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// If there are filters to restore set them and delete any old filters cookies, otherwise delete them all
|
||||
if filters.is_some() {
|
||||
let filters_list: Vec<String> = filters.expect("Filters").split('+').map(str::to_string).collect();
|
||||
|
||||
// Start at 0 to keep track of what number we need to start deleting old subscription cookies from
|
||||
let mut filters_number_to_delete_from = 0;
|
||||
|
||||
// Starting at 0 so we handle the subscription cookie without a number first
|
||||
for (filters_number, list) in join_until_size_limit(&filters_list).into_iter().enumerate() {
|
||||
let filters_cookie = if filters_number == 0 {
|
||||
"filters".to_string()
|
||||
} else {
|
||||
format!("filters{}", filters_number)
|
||||
};
|
||||
|
||||
response.insert_cookie(
|
||||
Cookie::build((filters_cookie, list))
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||
.into(),
|
||||
);
|
||||
|
||||
filters_number_to_delete_from += 1;
|
||||
}
|
||||
|
||||
// While filtersNUMBER= is in the string of cookies add a response removing that cookie
|
||||
while cookies_string.contains(&format!("filters{filters_number_to_delete_from}=")) {
|
||||
// Remove that filters cookie
|
||||
response.remove_cookie(format!("filters{filters_number_to_delete_from}"));
|
||||
|
||||
// Increment filters cookie number
|
||||
filters_number_to_delete_from += 1;
|
||||
}
|
||||
} else {
|
||||
// Remove unnumbered filters cookie
|
||||
response.remove_cookie("filters".to_string());
|
||||
|
||||
// Starts at one to deal with the first numbered subscription cookie and onwards
|
||||
let mut filters_number_to_delete_from = 1;
|
||||
|
||||
// While filtersNUMBER= is in the string of cookies add a response removing that cookie
|
||||
while cookies_string.contains(&format!("filters{filters_number_to_delete_from}=")) {
|
||||
// Remove that sfilters cookie
|
||||
response.remove_cookie(format!("filters{filters_number_to_delete_from}"));
|
||||
|
||||
// Increment filters cookie number
|
||||
filters_number_to_delete_from += 1;
|
||||
}
|
||||
}
|
||||
|
||||
response
|
||||
}
|
||||
|
||||
|
|
138
src/subreddit.rs
138
src/subreddit.rs
|
@ -215,6 +215,41 @@ pub fn can_access_quarantine(req: &Request<Body>, sub: &str) -> bool {
|
|||
setting(req, &format!("allow_quaran_{}", sub.to_lowercase())).parse().unwrap_or_default()
|
||||
}
|
||||
|
||||
// Join items in chunks of 4000 bytes in length for cookies
|
||||
pub fn join_until_size_limit<T: std::fmt::Display>(vec: &[T]) -> Vec<std::string::String> {
|
||||
let mut result = Vec::new();
|
||||
let mut list = String::new();
|
||||
let mut current_size = 0;
|
||||
|
||||
for item in vec {
|
||||
// Size in bytes
|
||||
let item_size = item.to_string().len();
|
||||
// Use 4000 bytes to leave us some headroom because the name and options of the cookie count towards the 4096 byte cap
|
||||
if current_size + item_size > 4000 {
|
||||
// If last item add a seperator on the end of the list so it's interpreted properly in tanden with the next cookie
|
||||
list.push('+');
|
||||
|
||||
// Push current list to result vector
|
||||
result.push(list);
|
||||
|
||||
// Reset the list variable so we can continue with only new items
|
||||
list = String::new();
|
||||
}
|
||||
// Add separator if not the first item
|
||||
if !list.is_empty() {
|
||||
list.push('+');
|
||||
}
|
||||
// Add current item to list
|
||||
list.push_str(&item.to_string());
|
||||
current_size = list.len() + item_size;
|
||||
}
|
||||
// Make sure to push whatever the remaining subreddits are there into the result vector
|
||||
result.push(list);
|
||||
|
||||
// Return resulting vector
|
||||
result
|
||||
}
|
||||
|
||||
// Sub, filter, unfilter, or unsub by setting subscription cookie using response "Set-Cookie" header
|
||||
pub async fn subscriptions_filters(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let sub = req.param("sub").unwrap_or_default();
|
||||
|
@ -307,28 +342,101 @@ pub async fn subscriptions_filters(req: Request<Body>) -> Result<Response<Body>,
|
|||
|
||||
let mut response = redirect(&path);
|
||||
|
||||
// Delete cookie if empty, else set
|
||||
// If sub_list is empty remove all subscriptions cookies, otherwise update them and remove old ones
|
||||
if sub_list.is_empty() {
|
||||
// Remove subscriptions cookie
|
||||
response.remove_cookie("subscriptions".to_string());
|
||||
|
||||
// Start with first numbered subscriptions cookie
|
||||
let mut subscriptions_number = 1;
|
||||
|
||||
// While whatever subscriptionsNUMBER cookie we're looking at has a value
|
||||
while req.cookie(&format!("subscriptions{}", subscriptions_number)).is_some() {
|
||||
// Remove that subscriptions cookie
|
||||
response.remove_cookie(format!("subscriptions{}", subscriptions_number));
|
||||
|
||||
// Increment subscriptions cookie number
|
||||
subscriptions_number += 1;
|
||||
}
|
||||
} else {
|
||||
response.insert_cookie(
|
||||
Cookie::build(("subscriptions", sub_list.join("+")))
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||
.into(),
|
||||
);
|
||||
// Start at 0 to keep track of what number we need to start deleting old subscription cookies from
|
||||
let mut subscriptions_number_to_delete_from = 0;
|
||||
|
||||
// Starting at 0 so we handle the subscription cookie without a number first
|
||||
for (subscriptions_number, list) in join_until_size_limit(&sub_list).into_iter().enumerate() {
|
||||
let subscriptions_cookie = if subscriptions_number == 0 {
|
||||
"subscriptions".to_string()
|
||||
} else {
|
||||
format!("subscriptions{}", subscriptions_number)
|
||||
};
|
||||
|
||||
response.insert_cookie(
|
||||
Cookie::build((subscriptions_cookie, list))
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||
.into(),
|
||||
);
|
||||
|
||||
subscriptions_number_to_delete_from += 1;
|
||||
}
|
||||
|
||||
// While whatever subscriptionsNUMBER cookie we're looking at has a value
|
||||
while req.cookie(&format!("subscriptions{}", subscriptions_number_to_delete_from)).is_some() {
|
||||
// Remove that subscriptions cookie
|
||||
response.remove_cookie(format!("subscriptions{}", subscriptions_number_to_delete_from));
|
||||
|
||||
// Increment subscriptions cookie number
|
||||
subscriptions_number_to_delete_from += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// If filters is empty remove all filters cookies, otherwise update them and remove old ones
|
||||
if filters.is_empty() {
|
||||
// Remove filters cookie
|
||||
response.remove_cookie("filters".to_string());
|
||||
|
||||
// Start with first numbered filters cookie
|
||||
let mut filters_number = 1;
|
||||
|
||||
// While whatever filtersNUMBER cookie we're looking at has a value
|
||||
while req.cookie(&format!("filters{}", filters_number)).is_some() {
|
||||
// Remove that filters cookie
|
||||
response.remove_cookie(format!("filters{}", filters_number));
|
||||
|
||||
// Increment filters cookie number
|
||||
filters_number += 1;
|
||||
}
|
||||
} else {
|
||||
response.insert_cookie(
|
||||
Cookie::build(("filters", filters.join("+")))
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||
.into(),
|
||||
);
|
||||
// Start at 0 to keep track of what number we need to start deleting old filters cookies from
|
||||
let mut filters_number_to_delete_from = 0;
|
||||
|
||||
for (filters_number, list) in join_until_size_limit(&filters).into_iter().enumerate() {
|
||||
let filters_cookie = if filters_number == 0 {
|
||||
"filters".to_string()
|
||||
} else {
|
||||
format!("filters{}", filters_number)
|
||||
};
|
||||
|
||||
response.insert_cookie(
|
||||
Cookie::build((filters_cookie, list))
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||
.into(),
|
||||
);
|
||||
|
||||
filters_number_to_delete_from += 1;
|
||||
}
|
||||
|
||||
// While whatever filtersNUMBER cookie we're looking at has a value
|
||||
while req.cookie(&format!("filters{}", filters_number_to_delete_from)).is_some() {
|
||||
// Remove that filters cookie
|
||||
response.remove_cookie(format!("filters{}", filters_number_to_delete_from));
|
||||
|
||||
// Increment filters cookie number
|
||||
filters_number_to_delete_from += 1;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(response)
|
||||
|
|
81
src/utils.rs
81
src/utils.rs
|
@ -7,6 +7,7 @@ use crate::config::{self, get_setting};
|
|||
//
|
||||
use crate::{client::json, server::RequestExt};
|
||||
use cookie::Cookie;
|
||||
use htmlescape::decode_html;
|
||||
use hyper::{Body, Request, Response};
|
||||
use log::error;
|
||||
use once_cell::sync::Lazy;
|
||||
|
@ -376,7 +377,7 @@ impl Post {
|
|||
let awards = Awards::parse(&data["all_awardings"]);
|
||||
|
||||
// selftext_html is set for text posts when browsing.
|
||||
let mut body = rewrite_urls(&val(post, "selftext_html"));
|
||||
let mut body = rewrite_urls(&decode_html(&val(post, "selftext_html")).unwrap());
|
||||
if body.is_empty() {
|
||||
body = rewrite_urls(&val(post, "body_html"));
|
||||
}
|
||||
|
@ -825,18 +826,72 @@ pub fn param(path: &str, value: &str) -> Option<String> {
|
|||
// Retrieve the value of a setting by name
|
||||
pub fn setting(req: &Request<Body>, name: &str) -> String {
|
||||
// Parse a cookie value from request
|
||||
req
|
||||
.cookie(name)
|
||||
.unwrap_or_else(|| {
|
||||
// If there is no cookie for this setting, try receiving a default from the config
|
||||
if let Some(default) = get_setting(&format!("REDLIB_DEFAULT_{}", name.to_uppercase())) {
|
||||
Cookie::new(name, default)
|
||||
} else {
|
||||
Cookie::from(name)
|
||||
}
|
||||
})
|
||||
.value()
|
||||
.to_string()
|
||||
|
||||
// If this was called with "subscriptions" and the "subscriptions" cookie has a value
|
||||
if name == "subscriptions" && req.cookie("subscriptions").is_some() {
|
||||
// Create subscriptions string
|
||||
let mut subscriptions = String::new();
|
||||
|
||||
// Default subscriptions cookie
|
||||
if req.cookie("subscriptions").is_some() {
|
||||
subscriptions.push_str(req.cookie("subscriptions").unwrap().value());
|
||||
}
|
||||
|
||||
// Start with first numbered subscription cookie
|
||||
let mut subscriptions_number = 1;
|
||||
|
||||
// While whatever subscriptionsNUMBER cookie we're looking at has a value
|
||||
while req.cookie(&format!("subscriptions{}", subscriptions_number)).is_some() {
|
||||
// Push whatever subscriptionsNUMBER cookie we're looking at into the subscriptions string
|
||||
subscriptions.push_str(req.cookie(&format!("subscriptions{}", subscriptions_number)).unwrap().value());
|
||||
|
||||
// Increment subscription cookie number
|
||||
subscriptions_number += 1;
|
||||
}
|
||||
|
||||
// Return the subscriptions cookies as one large string
|
||||
subscriptions
|
||||
}
|
||||
// If this was called with "filters" and the "filters" cookie has a value
|
||||
else if name == "filters" && req.cookie("filters").is_some() {
|
||||
// Create filters string
|
||||
let mut filters = String::new();
|
||||
|
||||
// Default filters cookie
|
||||
if req.cookie("filters").is_some() {
|
||||
filters.push_str(req.cookie("filters").unwrap().value());
|
||||
}
|
||||
|
||||
// Start with first numbered filters cookie
|
||||
let mut filters_number = 1;
|
||||
|
||||
// While whatever filtersNUMBER cookie we're looking at has a value
|
||||
while req.cookie(&format!("filters{}", filters_number)).is_some() {
|
||||
// Push whatever filtersNUMBER cookie we're looking at into the filters string
|
||||
filters.push_str(req.cookie(&format!("filters{}", filters_number)).unwrap().value());
|
||||
|
||||
// Increment filters cookie number
|
||||
filters_number += 1;
|
||||
}
|
||||
|
||||
// Return the filters cookies as one large string
|
||||
filters
|
||||
}
|
||||
// The above two still come to this if there was no existing value
|
||||
else {
|
||||
req
|
||||
.cookie(name)
|
||||
.unwrap_or_else(|| {
|
||||
// If there is no cookie for this setting, try receiving a default from the config
|
||||
if let Some(default) = get_setting(&format!("REDLIB_DEFAULT_{}", name.to_uppercase())) {
|
||||
Cookie::new(name, default)
|
||||
} else {
|
||||
Cookie::from(name)
|
||||
}
|
||||
})
|
||||
.value()
|
||||
.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
// Retrieve the value of a setting by name or the default value
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue