`: Sets address to listen on. Default is `[::]`.
+- `-p`, `--port `: Port to listen on. Default is `8080`.
+- `-H`, `--hsts `: HSTS header to tell browsers that this site should only be accessed over HTTPS. Default is `604800`.
+
## Instance settings
Assign a default value for each instance-specific setting by passing environment variables to Redlib in the format `REDLIB_{X}`. Replace `{X}` with the setting name (see list below) in capital letters.
@@ -429,7 +440,7 @@ Assign a default value for each user-modifiable setting by passing environment v
| `WIDE` | `["on", "off"]` | `off` |
| `POST_SORT` | `["hot", "new", "top", "rising", "controversial"]` | `hot` |
| `COMMENT_SORT` | `["confidence", "top", "new", "controversial", "old"]` | `confidence` |
-| `BLUR_SPOILER` | `["on", "off"]` | `off` |
+| `BLUR_SPOILER` | `["on", "off"]` | `off` |
| `SHOW_NSFW` | `["on", "off"]` | `off` |
| `BLUR_NSFW` | `["on", "off"]` | `off` |
| `USE_HLS` | `["on", "off"]` | `off` |
@@ -441,3 +452,4 @@ Assign a default value for each user-modifiable setting by passing environment v
| `HIDE_SCORE` | `["on", "off"]` | `off` |
| `HIDE_SIDEBAR_AND_SUMMARY` | `["on", "off"]` | `off` |
| `FIXED_NAVBAR` | `["on", "off"]` | `on` |
+| `REMOVE_DEFAULT_FEEDS` | `["on", "off"]` | `off` |
\ No newline at end of file
diff --git a/flake.lock b/flake.lock
index 4569244..2b0b585 100644
--- a/flake.lock
+++ b/flake.lock
@@ -1,17 +1,12 @@
{
"nodes": {
"crane": {
- "inputs": {
- "nixpkgs": [
- "nixpkgs"
- ]
- },
"locked": {
- "lastModified": 1717025063,
- "narHash": "sha256-dIubLa56W9sNNz0e8jGxrX3CAkPXsq7snuFA/Ie6dn8=",
+ "lastModified": 1731974733,
+ "narHash": "sha256-enYSSZVVl15FI5p+0Y5/Ckf5DZAvXe6fBrHxyhA/njc=",
"owner": "ipetkov",
"repo": "crane",
- "rev": "480dff0be03dac0e51a8dfc26e882b0d123a450e",
+ "rev": "3cb338ce81076ce5e461cf77f7824476addb0e1c",
"type": "github"
},
"original": {
@@ -25,11 +20,11 @@
"systems": "systems"
},
"locked": {
- "lastModified": 1710146030,
- "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
+ "lastModified": 1731533236,
+ "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
- "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
+ "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
@@ -40,11 +35,11 @@
},
"nixpkgs": {
"locked": {
- "lastModified": 1717112898,
- "narHash": "sha256-7R2ZvOnvd9h8fDd65p0JnB7wXfUvreox3xFdYWd1BnY=",
+ "lastModified": 1731890469,
+ "narHash": "sha256-D1FNZ70NmQEwNxpSSdTXCSklBH1z2isPR84J6DQrJGs=",
"owner": "NixOS",
"repo": "nixpkgs",
- "rev": "6132b0f6e344ce2fe34fc051b72fb46e34f668e0",
+ "rev": "5083ec887760adfe12af64830a66807423a859a7",
"type": "github"
},
"original": {
@@ -64,19 +59,16 @@
},
"rust-overlay": {
"inputs": {
- "flake-utils": [
- "flake-utils"
- ],
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
- "lastModified": 1717121863,
- "narHash": "sha256-/3sxIe7MZqF/jw1RTQCSmgTjwVod43mmrk84m50MJQ4=",
+ "lastModified": 1732069891,
+ "narHash": "sha256-moKx8AVJrViCSdA0e0nSsG8b1dAsObI4sRAtbqbvBY8=",
"owner": "oxalica",
"repo": "rust-overlay",
- "rev": "2a7b53172ed08f856b8382d7dcfd36a4e0cbd866",
+ "rev": "8509a51241c407d583b1963d5079585a992506e8",
"type": "github"
},
"original": {
diff --git a/flake.nix b/flake.nix
index 8bcacf6..0180c8d 100644
--- a/flake.nix
+++ b/flake.nix
@@ -4,19 +4,13 @@
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
- crane = {
- url = "github:ipetkov/crane";
- inputs.nixpkgs.follows = "nixpkgs";
- };
+ crane.url = "github:ipetkov/crane";
flake-utils.url = "github:numtide/flake-utils";
rust-overlay = {
url = "github:oxalica/rust-overlay";
- inputs = {
- nixpkgs.follows = "nixpkgs";
- flake-utils.follows = "flake-utils";
- };
+ inputs.nixpkgs.follows = "nixpkgs";
};
};
diff --git a/src/client.rs b/src/client.rs
index fa32fc0..76369ca 100644
--- a/src/client.rs
+++ b/src/client.rs
@@ -544,12 +544,6 @@ async fn test_obfuscated_share_link() {
assert_eq!(canonical_path(share_link, 3).await, Ok(Some(canonical_link)));
}
-#[tokio::test(flavor = "multi_thread")]
-async fn test_share_link_strip_json() {
- let link = "/17krzvz".into();
- let canonical_link = "/comments/17krzvz".into();
- assert_eq!(canonical_path(link, 3).await, Ok(Some(canonical_link)));
-}
#[tokio::test(flavor = "multi_thread")]
async fn test_private_sub() {
let link = json("/r/suicide/about.json?raw_json=1".into(), true).await;
diff --git a/src/main.rs b/src/main.rs
index 542f1e8..9b34e75 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -128,6 +128,8 @@ async fn main() {
let matches = Command::new("Redlib")
.version(env!("CARGO_PKG_VERSION"))
.about("Private front-end for Reddit written in Rust ")
+ .arg(Arg::new("ipv4-only").short('4').long("ipv4-only").help("Listen on IPv4 only").num_args(0))
+ .arg(Arg::new("ipv6-only").short('6').long("ipv6-only").help("Listen on IPv6 only").num_args(0))
.arg(
Arg::new("redirect-https")
.short('r')
@@ -184,7 +186,16 @@ async fn main() {
let port = matches.get_one::("port").unwrap();
let hsts = matches.get_one("hsts").map(|m: &String| m.as_str());
- let listener = [address, ":", port].concat();
+ let ipv4_only = std::env::var("IPV4_ONLY").is_ok() || matches.get_flag("ipv4-only");
+ let ipv6_only = std::env::var("IPV6_ONLY").is_ok() || matches.get_flag("ipv6-only");
+
+ let listener = if ipv4_only {
+ format!("0.0.0.0:{}", port)
+ } else if ipv6_only {
+ format!("[::]:{}", port)
+ } else {
+ [address, ":", port].concat()
+ };
println!("Starting Redlib...");
@@ -255,6 +266,7 @@ async fn main() {
app
.at("/check_update.js")
.get(|_| resource(include_str!("../static/check_update.js"), "text/javascript", false).boxed());
+ app.at("/copy.js").get(|_| resource(include_str!("../static/copy.js"), "text/javascript", false).boxed());
app.at("/commits.atom").get(|_| async move { proxy_commit_info().await }.boxed());
app.at("/instances.json").get(|_| async move { proxy_instances().await }.boxed());
@@ -293,6 +305,7 @@ async fn main() {
// Configure settings
app.at("/settings").get(|r| settings::get(r).boxed()).post(|r| settings::set(r).boxed());
app.at("/settings/restore").get(|r| settings::restore(r).boxed());
+ app.at("/settings/encoded-restore").post(|r| settings::encoded_restore(r).boxed());
app.at("/settings/update").get(|r| settings::update(r).boxed());
// RSS Subscriptions
@@ -389,7 +402,7 @@ async fn main() {
Some("best" | "hot" | "new" | "top" | "rising" | "controversial") => subreddit::community(req).await,
// Short link for post
- Some(id) if (5..8).contains(&id.len()) => match canonical_path(format!("/{id}"), 3).await {
+ Some(id) if (5..8).contains(&id.len()) => match canonical_path(format!("/comments/{id}"), 3).await {
Ok(path_opt) => match path_opt {
Some(path) => Ok(redirect(&path)),
None => error(req, "Post ID is invalid. It may point to a post on a community that has been banned.").await,
diff --git a/src/scraper/main.rs b/src/scraper/main.rs
deleted file mode 100644
index f2e48d6..0000000
--- a/src/scraper/main.rs
+++ /dev/null
@@ -1,132 +0,0 @@
-use std::{collections::HashMap, fmt::Display, io::Write};
-
-use clap::{Parser, ValueEnum};
-use common_words_all::{get_top, Language, NgramSize};
-use redlib::utils::Post;
-
-#[derive(Parser)]
-#[command(name = "my_cli")]
-#[command(about = "A simple CLI example", long_about = None)]
-struct Cli {
- #[arg(short = 's', long = "sub")]
- sub: String,
-
- #[arg(long = "sort")]
- sort: SortOrder,
-
- #[arg(short = 'f', long = "format", value_enum)]
- format: Format,
- #[arg(short = 'o', long = "output")]
- output: Option,
-}
-
-#[derive(Debug, Clone, ValueEnum)]
-enum SortOrder {
- Hot,
- Rising,
- New,
- Top,
- Controversial,
-}
-
-impl Display for SortOrder {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- match self {
- SortOrder::Hot => write!(f, "hot"),
- SortOrder::Rising => write!(f, "rising"),
- SortOrder::New => write!(f, "new"),
- SortOrder::Top => write!(f, "top"),
- SortOrder::Controversial => write!(f, "controversial"),
- }
- }
-}
-
-#[derive(Debug, Clone, ValueEnum)]
-enum Format {
- Json,
-}
-
-#[tokio::main]
-async fn main() {
- pretty_env_logger::init();
- let cli = Cli::parse();
- let (sub, sort, format, output) = (cli.sub, cli.sort, cli.format, cli.output);
- let initial = format!("/r/{sub}/{sort}.json?&raw_json=1");
- let (posts, mut after) = Post::fetch(&initial, false).await.unwrap();
- let mut hashmap = HashMap::new();
- hashmap.extend(posts.into_iter().map(|post| (post.id.clone(), post)));
- loop {
- print!("\r");
- let path = format!("/r/{sub}/{sort}.json?sort={sort}&t=&after={after}&raw_json=1");
- let (new_posts, new_after) = Post::fetch(&path, false).await.unwrap();
- let old_len = hashmap.len();
- // convert to hashmap and extend hashmap
- let new_posts = new_posts.into_iter().map(|post| (post.id.clone(), post)).collect::>();
- let len = new_posts.len();
- hashmap.extend(new_posts);
- if hashmap.len() - old_len < 3 {
- break;
- }
-
- let x = hashmap.len() - old_len;
- after = new_after;
- // Print number of posts fetched
- print!("Fetched {len} posts (+{x})",);
- std::io::stdout().flush().unwrap();
- }
- println!("\n\n");
- // additionally search if final count not reached
-
- for word in get_top(Language::English, 10_000, NgramSize::One) {
- let mut retrieved_posts_from_search = 0;
- let initial = format!("/r/{sub}/search.json?q={word}&restrict_sr=on&include_over_18=on&raw_json=1&sort={sort}");
- println!("Grabbing posts with word {word}.");
- let (posts, mut after) = Post::fetch(&initial, false).await.unwrap();
- hashmap.extend(posts.into_iter().map(|post| (post.id.clone(), post)));
- 'search: loop {
- let path = format!("/r/{sub}/search.json?q={word}&restrict_sr=on&include_over_18=on&raw_json=1&sort={sort}&after={after}");
- let (new_posts, new_after) = Post::fetch(&path, false).await.unwrap();
- if new_posts.is_empty() || new_after.is_empty() {
- println!("No more posts for word {word}");
- break 'search;
- }
- retrieved_posts_from_search += new_posts.len();
- let old_len = hashmap.len();
- let new_posts = new_posts.into_iter().map(|post| (post.id.clone(), post)).collect::>();
- let len = new_posts.len();
- hashmap.extend(new_posts);
- let delta = hashmap.len() - old_len;
- after = new_after;
- // Print number of posts fetched
- println!("Fetched {len} posts (+{delta})",);
-
- if retrieved_posts_from_search > 1000 {
- println!("Reached 1000 posts from search");
- break 'search;
- }
- }
- // Need to save incrementally. atomic save + move
- let tmp_file = output.clone().unwrap_or_else(|| format!("{sub}.json.tmp"));
- let perm_file = output.clone().unwrap_or_else(|| format!("{sub}.json"));
- write_posts(&hashmap.values().collect(), tmp_file.clone());
- // move file
- std::fs::rename(tmp_file, perm_file).unwrap();
- }
-
- println!("\n\n");
-
- println!("Size of hashmap: {}", hashmap.len());
-
- let posts: Vec<&Post> = hashmap.values().collect();
- match format {
- Format::Json => {
- let filename: String = output.unwrap_or_else(|| format!("{sub}.json"));
- write_posts(&posts, filename);
- }
- }
-}
-
-fn write_posts(posts: &Vec<&Post>, filename: String) {
- let json = serde_json::to_string(&posts).unwrap();
- std::fs::write(filename, json).unwrap();
-}
diff --git a/src/server.rs b/src/server.rs
index 15c56ad..a287de2 100644
--- a/src/server.rs
+++ b/src/server.rs
@@ -25,7 +25,7 @@ use std::{
str::{from_utf8, Split},
string::ToString,
};
-use time::Duration;
+use time::OffsetDateTime;
use crate::dbg_msg;
@@ -170,10 +170,8 @@ impl ResponseExt for Response {
}
fn remove_cookie(&mut self, name: String) {
- let mut cookie = Cookie::from(name);
- cookie.set_path("/");
- cookie.set_max_age(Duration::seconds(1));
- if let Ok(val) = header::HeaderValue::from_str(&cookie.to_string()) {
+ let removal_cookie = Cookie::build(name).path("/").http_only(true).expires(OffsetDateTime::now_utc());
+ if let Ok(val) = header::HeaderValue::from_str(&removal_cookie.to_string()) {
self.headers_mut().append("Set-Cookie", val);
}
}
@@ -240,8 +238,14 @@ impl Server {
path.pop();
}
+ // Replace HEAD with GET for routing
+ let (method, is_head) = match req.method() {
+ &Method::HEAD => (&Method::GET, true),
+ method => (method, false),
+ };
+
// Match the visited path with an added route
- match router.recognize(&format!("/{}{}", req.method().as_str(), path)) {
+ match router.recognize(&format!("/{}{}", method.as_str(), path)) {
// If a route was configured for this path
Ok(found) => {
let mut parammed = req;
@@ -253,17 +257,21 @@ impl Server {
match func.await {
Ok(mut res) => {
res.headers_mut().extend(def_headers);
- let _ = compress_response(&req_headers, &mut res).await;
+ if is_head {
+ *res.body_mut() = Body::empty();
+ } else {
+ let _ = compress_response(&req_headers, &mut res).await;
+ }
Ok(res)
}
- Err(msg) => new_boilerplate(def_headers, req_headers, 500, Body::from(msg)).await,
+ Err(msg) => new_boilerplate(def_headers, req_headers, 500, if is_head { Body::empty() } else { Body::from(msg) }).await,
}
}
.boxed()
}
// If there was a routing error
- Err(e) => new_boilerplate(def_headers, req_headers, 404, e.into()).boxed(),
+ Err(e) => new_boilerplate(def_headers, req_headers, 404, if is_head { Body::empty() } else { e.into() }).boxed(),
}
}))
}
@@ -274,8 +282,19 @@ impl Server {
// Bind server to address specified above. Gracefully shut down if CTRL+C is pressed
let server = HyperServer::bind(address).serve(make_svc).with_graceful_shutdown(async {
+ #[cfg(windows)]
// Wait for the CTRL+C signal
tokio::signal::ctrl_c().await.expect("Failed to install CTRL+C signal handler");
+
+ #[cfg(unix)]
+ {
+ // Wait for CTRL+C or SIGTERM signals
+ let mut signal_terminate = tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate()).expect("Failed to install SIGTERM signal handler");
+ tokio::select! {
+ _ = tokio::signal::ctrl_c() => (),
+ _ = signal_terminate.recv() => ()
+ }
+ }
});
server.boxed()
diff --git a/src/settings.rs b/src/settings.rs
index 0a8cabc..3029b55 100644
--- a/src/settings.rs
+++ b/src/settings.rs
@@ -4,12 +4,14 @@ use std::collections::HashMap;
// CRATES
use crate::server::ResponseExt;
-use crate::utils::{redirect, template, Preferences};
+use crate::subreddit::join_until_size_limit;
+use crate::utils::{deflate_decompress, redirect, template, Preferences};
use cookie::Cookie;
use futures_lite::StreamExt;
use hyper::{Body, Request, Response};
use rinja::Template;
use time::{Duration, OffsetDateTime};
+use url::form_urlencoded;
// STRUCTS
#[derive(Template)]
@@ -21,7 +23,7 @@ struct SettingsTemplate {
// CONSTANTS
-const PREFS: [&str; 19] = [
+const PREFS: [&str; 20] = [
"theme_light",
"theme_dark",
"front_page",
@@ -41,6 +43,7 @@ const PREFS: [&str; 19] = [
"hide_score",
"disable_visit_reddit_confirmation",
"video_quality",
+ "remove_default_feeds",
];
// FUNCTIONS
@@ -120,7 +123,7 @@ fn set_cookies_method(req: Request, remove_cookies: bool) -> Response response.insert_cookie(
Cookie::build((name.to_owned(), value.clone()))
@@ -137,6 +140,119 @@ fn set_cookies_method(req: Request, remove_cookies: bool) -> Response = subscriptions.expect("Subscriptions").split('+').map(str::to_string).collect();
+
+ // Start at 0 to keep track of what number we need to start deleting old subscription cookies from
+ let mut subscriptions_number_to_delete_from = 0;
+
+ // Starting at 0 so we handle the subscription cookie without a number first
+ for (subscriptions_number, list) in join_until_size_limit(&sub_list).into_iter().enumerate() {
+ let subscriptions_cookie = if subscriptions_number == 0 {
+ "subscriptions".to_string()
+ } else {
+ format!("subscriptions{}", subscriptions_number)
+ };
+
+ response.insert_cookie(
+ Cookie::build((subscriptions_cookie, list))
+ .path("/")
+ .http_only(true)
+ .expires(OffsetDateTime::now_utc() + Duration::weeks(52))
+ .into(),
+ );
+
+ subscriptions_number_to_delete_from += 1;
+ }
+
+ // While subscriptionsNUMBER= is in the string of cookies add a response removing that cookie
+ while cookies_string.contains(&format!("subscriptions{subscriptions_number_to_delete_from}=")) {
+ // Remove that subscriptions cookie
+ response.remove_cookie(format!("subscriptions{subscriptions_number_to_delete_from}"));
+
+ // Increment subscriptions cookie number
+ subscriptions_number_to_delete_from += 1;
+ }
+ } else {
+ // Remove unnumbered subscriptions cookie
+ response.remove_cookie("subscriptions".to_string());
+
+ // Starts at one to deal with the first numbered subscription cookie and onwards
+ let mut subscriptions_number_to_delete_from = 1;
+
+ // While subscriptionsNUMBER= is in the string of cookies add a response removing that cookie
+ while cookies_string.contains(&format!("subscriptions{subscriptions_number_to_delete_from}=")) {
+ // Remove that subscriptions cookie
+ response.remove_cookie(format!("subscriptions{subscriptions_number_to_delete_from}"));
+
+ // Increment subscriptions cookie number
+ subscriptions_number_to_delete_from += 1;
+ }
+ }
+
+ // If there are filters to restore set them and delete any old filters cookies, otherwise delete them all
+ if filters.is_some() {
+ let filters_list: Vec = filters.expect("Filters").split('+').map(str::to_string).collect();
+
+ // Start at 0 to keep track of what number we need to start deleting old subscription cookies from
+ let mut filters_number_to_delete_from = 0;
+
+ // Starting at 0 so we handle the subscription cookie without a number first
+ for (filters_number, list) in join_until_size_limit(&filters_list).into_iter().enumerate() {
+ let filters_cookie = if filters_number == 0 {
+ "filters".to_string()
+ } else {
+ format!("filters{}", filters_number)
+ };
+
+ response.insert_cookie(
+ Cookie::build((filters_cookie, list))
+ .path("/")
+ .http_only(true)
+ .expires(OffsetDateTime::now_utc() + Duration::weeks(52))
+ .into(),
+ );
+
+ filters_number_to_delete_from += 1;
+ }
+
+ // While filtersNUMBER= is in the string of cookies add a response removing that cookie
+ while cookies_string.contains(&format!("filters{filters_number_to_delete_from}=")) {
+ // Remove that filters cookie
+ response.remove_cookie(format!("filters{filters_number_to_delete_from}"));
+
+ // Increment filters cookie number
+ filters_number_to_delete_from += 1;
+ }
+ } else {
+ // Remove unnumbered filters cookie
+ response.remove_cookie("filters".to_string());
+
+ // Starts at one to deal with the first numbered subscription cookie and onwards
+ let mut filters_number_to_delete_from = 1;
+
+ // While filtersNUMBER= is in the string of cookies add a response removing that cookie
+ while cookies_string.contains(&format!("filters{filters_number_to_delete_from}=")) {
+ // Remove that sfilters cookie
+ response.remove_cookie(format!("filters{filters_number_to_delete_from}"));
+
+ // Increment filters cookie number
+ filters_number_to_delete_from += 1;
+ }
+ }
+
response
}
@@ -148,3 +264,25 @@ pub async fn restore(req: Request) -> Result, String> {
pub async fn update(req: Request) -> Result, String> {
Ok(set_cookies_method(req, false))
}
+
+pub async fn encoded_restore(req: Request) -> Result, String> {
+ let body = hyper::body::to_bytes(req.into_body())
+ .await
+ .map_err(|e| format!("Failed to get bytes from request body: {}", e))?;
+
+ let encoded_prefs = form_urlencoded::parse(&body)
+ .find(|(key, _)| key == "encoded_prefs")
+ .map(|(_, value)| value)
+ .ok_or_else(|| "encoded_prefs parameter not found in request body".to_string())?;
+
+ let bytes = base2048::decode(&encoded_prefs).ok_or_else(|| "Failed to decode base2048 encoded preferences".to_string())?;
+
+ let out = deflate_decompress(bytes)?;
+
+ let mut prefs: Preferences = bincode::deserialize(&out).map_err(|e| format!("Failed to deserialize bytes into Preferences struct: {}", e))?;
+ prefs.available_themes = vec![];
+
+ let url = format!("/settings/restore/?{}", prefs.to_urlencoded()?);
+
+ Ok(redirect(&url))
+}
diff --git a/src/subreddit.rs b/src/subreddit.rs
index 88aa542..e6d1cca 100644
--- a/src/subreddit.rs
+++ b/src/subreddit.rs
@@ -3,14 +3,17 @@
use crate::{config, utils};
// CRATES
use crate::utils::{
- catch_random, error, filter_posts, format_num, format_url, get_filters, nsfw_landing, param, redirect, rewrite_urls, setting, template, val, Post, Preferences, Subreddit,
+ catch_random, error, filter_posts, format_num, format_url, get_filters, info, nsfw_landing, param, redirect, rewrite_urls, setting, template, val, Post, Preferences,
+ Subreddit,
};
use crate::{client::json, server::RequestExt, server::ResponseExt};
use cookie::Cookie;
+use htmlescape::decode_html;
use hyper::{Body, Request, Response};
-use log::{debug, trace};
+use log::debug;
use rinja::Template;
+use chrono::DateTime;
use once_cell::sync::Lazy;
use regex::Regex;
use time::{Duration, OffsetDateTime};
@@ -63,9 +66,9 @@ pub async fn community(req: Request) -> Result, String> {
// Build Reddit API path
let root = req.uri().path() == "/";
let query = req.uri().query().unwrap_or_default().to_string();
- trace!("query: {}", query);
let subscribed = setting(&req, "subscriptions");
let front_page = setting(&req, "front_page");
+ let remove_default_feeds = setting(&req, "remove_default_feeds") == "on";
let post_sort = req.cookie("post_sort").map_or_else(|| "hot".to_string(), |c| c.value().to_string());
let sort = req.param("sort").unwrap_or_else(|| req.param("id").unwrap_or(post_sort));
@@ -78,6 +81,21 @@ pub async fn community(req: Request) -> Result, String> {
} else {
front_page.clone()
});
+
+ if (sub_name == "popular" || sub_name == "all") && remove_default_feeds {
+ if subscribed.is_empty() {
+ return info(req, "Subscribe to some subreddits! (Default feeds disabled in settings)").await;
+ } else {
+ // If there are subscribed subs, but we get here, then the problem is that front_page pref is set to something besides default.
+ // Tell user to go to settings and change front page to default.
+ return info(
+ req,
+ "You have subscribed to some subreddits, but your front page is not set to default. Visit settings and change front page to default.",
+ )
+ .await;
+ }
+ }
+
let quarantined = can_access_quarantine(&req, &sub_name) || root;
// Handle random subreddits
@@ -214,6 +232,41 @@ pub fn can_access_quarantine(req: &Request, sub: &str) -> bool {
setting(req, &format!("allow_quaran_{}", sub.to_lowercase())).parse().unwrap_or_default()
}
+// Join items in chunks of 4000 bytes in length for cookies
+pub fn join_until_size_limit(vec: &[T]) -> Vec {
+ let mut result = Vec::new();
+ let mut list = String::new();
+ let mut current_size = 0;
+
+ for item in vec {
+ // Size in bytes
+ let item_size = item.to_string().len();
+ // Use 4000 bytes to leave us some headroom because the name and options of the cookie count towards the 4096 byte cap
+ if current_size + item_size > 4000 {
+ // If last item add a seperator on the end of the list so it's interpreted properly in tanden with the next cookie
+ list.push('+');
+
+ // Push current list to result vector
+ result.push(list);
+
+ // Reset the list variable so we can continue with only new items
+ list = String::new();
+ }
+ // Add separator if not the first item
+ if !list.is_empty() {
+ list.push('+');
+ }
+ // Add current item to list
+ list.push_str(&item.to_string());
+ current_size = list.len() + item_size;
+ }
+ // Make sure to push whatever the remaining subreddits are there into the result vector
+ result.push(list);
+
+ // Return resulting vector
+ result
+}
+
// Sub, filter, unfilter, or unsub by setting subscription cookie using response "Set-Cookie" header
pub async fn subscriptions_filters(req: Request) -> Result, String> {
let sub = req.param("sub").unwrap_or_default();
@@ -306,28 +359,101 @@ pub async fn subscriptions_filters(req: Request) -> Result,
let mut response = redirect(&path);
- // Delete cookie if empty, else set
+ // If sub_list is empty remove all subscriptions cookies, otherwise update them and remove old ones
if sub_list.is_empty() {
+ // Remove subscriptions cookie
response.remove_cookie("subscriptions".to_string());
+
+ // Start with first numbered subscriptions cookie
+ let mut subscriptions_number = 1;
+
+ // While whatever subscriptionsNUMBER cookie we're looking at has a value
+ while req.cookie(&format!("subscriptions{}", subscriptions_number)).is_some() {
+ // Remove that subscriptions cookie
+ response.remove_cookie(format!("subscriptions{}", subscriptions_number));
+
+ // Increment subscriptions cookie number
+ subscriptions_number += 1;
+ }
} else {
- response.insert_cookie(
- Cookie::build(("subscriptions", sub_list.join("+")))
- .path("/")
- .http_only(true)
- .expires(OffsetDateTime::now_utc() + Duration::weeks(52))
- .into(),
- );
+ // Start at 0 to keep track of what number we need to start deleting old subscription cookies from
+ let mut subscriptions_number_to_delete_from = 0;
+
+ // Starting at 0 so we handle the subscription cookie without a number first
+ for (subscriptions_number, list) in join_until_size_limit(&sub_list).into_iter().enumerate() {
+ let subscriptions_cookie = if subscriptions_number == 0 {
+ "subscriptions".to_string()
+ } else {
+ format!("subscriptions{}", subscriptions_number)
+ };
+
+ response.insert_cookie(
+ Cookie::build((subscriptions_cookie, list))
+ .path("/")
+ .http_only(true)
+ .expires(OffsetDateTime::now_utc() + Duration::weeks(52))
+ .into(),
+ );
+
+ subscriptions_number_to_delete_from += 1;
+ }
+
+ // While whatever subscriptionsNUMBER cookie we're looking at has a value
+ while req.cookie(&format!("subscriptions{}", subscriptions_number_to_delete_from)).is_some() {
+ // Remove that subscriptions cookie
+ response.remove_cookie(format!("subscriptions{}", subscriptions_number_to_delete_from));
+
+ // Increment subscriptions cookie number
+ subscriptions_number_to_delete_from += 1;
+ }
}
+
+ // If filters is empty remove all filters cookies, otherwise update them and remove old ones
if filters.is_empty() {
+ // Remove filters cookie
response.remove_cookie("filters".to_string());
+
+ // Start with first numbered filters cookie
+ let mut filters_number = 1;
+
+ // While whatever filtersNUMBER cookie we're looking at has a value
+ while req.cookie(&format!("filters{}", filters_number)).is_some() {
+ // Remove that filters cookie
+ response.remove_cookie(format!("filters{}", filters_number));
+
+ // Increment filters cookie number
+ filters_number += 1;
+ }
} else {
- response.insert_cookie(
- Cookie::build(("filters", filters.join("+")))
- .path("/")
- .http_only(true)
- .expires(OffsetDateTime::now_utc() + Duration::weeks(52))
- .into(),
- );
+ // Start at 0 to keep track of what number we need to start deleting old filters cookies from
+ let mut filters_number_to_delete_from = 0;
+
+ for (filters_number, list) in join_until_size_limit(&filters).into_iter().enumerate() {
+ let filters_cookie = if filters_number == 0 {
+ "filters".to_string()
+ } else {
+ format!("filters{}", filters_number)
+ };
+
+ response.insert_cookie(
+ Cookie::build((filters_cookie, list))
+ .path("/")
+ .http_only(true)
+ .expires(OffsetDateTime::now_utc() + Duration::weeks(52))
+ .into(),
+ );
+
+ filters_number_to_delete_from += 1;
+ }
+
+ // While whatever filtersNUMBER cookie we're looking at has a value
+ while req.cookie(&format!("filters{}", filters_number_to_delete_from)).is_some() {
+ // Remove that filters cookie
+ response.remove_cookie(format!("filters{}", filters_number_to_delete_from));
+
+ // Increment filters cookie number
+ filters_number_to_delete_from += 1;
+ }
}
Ok(response)
@@ -496,9 +622,10 @@ pub async fn rss(req: Request) -> Result, String> {
.into_iter()
.map(|post| Item {
title: Some(post.title.to_string()),
- link: Some(utils::get_post_url(&post)),
+ link: Some(format_url(&utils::get_post_url(&post))),
author: Some(post.author.name),
- content: Some(rewrite_urls(&post.body)),
+ content: Some(rewrite_urls(&decode_html(&post.body).unwrap())),
+ pub_date: Some(DateTime::from_timestamp(post.created_ts as i64, 0).unwrap_or_default().to_rfc2822()),
description: Some(format!(
"Comments",
config::get_setting("REDLIB_FULL_URL").unwrap_or_default(),
diff --git a/src/user.rs b/src/user.rs
index 50a4daa..592389d 100644
--- a/src/user.rs
+++ b/src/user.rs
@@ -5,6 +5,8 @@ use crate::client::json;
use crate::server::RequestExt;
use crate::utils::{error, filter_posts, format_url, get_filters, nsfw_landing, param, setting, template, Post, Preferences, User};
use crate::{config, utils};
+use chrono::DateTime;
+use htmlescape::decode_html;
use hyper::{Body, Request, Response};
use rinja::Template;
use time::{macros::format_description, OffsetDateTime};
@@ -163,9 +165,10 @@ pub async fn rss(req: Request) -> Result, String> {
.into_iter()
.map(|post| Item {
title: Some(post.title.to_string()),
- link: Some(utils::get_post_url(&post)),
+ link: Some(format_url(&utils::get_post_url(&post))),
author: Some(post.author.name),
- content: Some(rewrite_urls(&post.body)),
+ pub_date: Some(DateTime::from_timestamp(post.created_ts as i64, 0).unwrap_or_default().to_rfc2822()),
+ content: Some(rewrite_urls(&decode_html(&post.body).unwrap())),
..Default::default()
})
.collect::>(),
diff --git a/src/utils.rs b/src/utils.rs
index 4ae3e1a..e39d851 100644
--- a/src/utils.rs
+++ b/src/utils.rs
@@ -8,16 +8,19 @@ use crate::config::{self, get_setting};
use crate::{client::json, server::RequestExt};
use cookie::Cookie;
use hyper::{Body, Request, Response};
+use libflate::deflate::{Decoder, Encoder};
use log::error;
use once_cell::sync::Lazy;
use regex::Regex;
+use revision::revisioned;
use rinja::Template;
use rust_embed::RustEmbed;
-use serde::{Serialize, Serializer};
+use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde_json::Value;
use serde_json_path::{JsonPath, JsonPathExt};
use std::collections::{HashMap, HashSet};
use std::env;
+use std::io::{Read, Write};
use std::str::FromStr;
use std::string::ToString;
use time::{macros::format_description, Duration, OffsetDateTime};
@@ -233,6 +236,14 @@ impl Media {
// If this post contains a gallery of images
gallery = GalleryMedia::parse(&data["gallery_data"]["items"], &data["media_metadata"]);
+ ("gallery", &data["url"], None)
+ } else if data["crosspost_parent_list"][0]["is_gallery"].as_bool().unwrap_or_default() {
+ // If this post contains a gallery of images
+ gallery = GalleryMedia::parse(
+ &data["crosspost_parent_list"][0]["gallery_data"]["items"],
+ &data["crosspost_parent_list"][0]["media_metadata"],
+ );
+
("gallery", &data["url"], None)
} else if data["is_reddit_media_domain"].as_bool().unwrap_or_default() && data["domain"] == "i.redd.it" {
// If this post contains a reddit media (image) URL.
@@ -542,6 +553,14 @@ pub struct ErrorTemplate {
pub url: String,
}
+#[derive(Template)]
+#[template(path = "info.html")]
+pub struct InfoTemplate {
+ pub msg: String,
+ pub prefs: Preferences,
+ pub url: String,
+}
+
/// Template for NSFW landing page. The landing page is displayed when a page's
/// content is wholly NSFW, but a user has not enabled the option to view NSFW
/// posts.
@@ -601,42 +620,78 @@ pub struct Params {
pub before: Option,
}
-#[derive(Default, Serialize)]
+#[derive(Default, Serialize, Deserialize, Debug, PartialEq, Eq)]
+#[revisioned(revision = 1)]
pub struct Preferences {
- #[serde(skip)]
+ #[revision(start = 1)]
+ #[serde(skip_serializing, skip_deserializing)]
pub available_themes: Vec,
+ #[revision(start = 1)]
pub theme_light: String,
+ #[revision(start = 1)]
pub theme_dark: String,
+ #[revision(start = 1)]
pub front_page: String,
+ #[revision(start = 1)]
pub layout: String,
+ #[revision(start = 1)]
pub wide: String,
+ #[revision(start = 1)]
pub blur_spoiler: String,
+ #[revision(start = 1)]
pub show_nsfw: String,
+ #[revision(start = 1)]
pub blur_nsfw: String,
+ #[revision(start = 1)]
pub hide_hls_notification: String,
+ #[revision(start = 1)]
pub video_quality: String,
+ #[revision(start = 1)]
pub hide_sidebar_and_summary: String,
+ #[revision(start = 1)]
pub use_hls: String,
+ #[revision(start = 1)]
pub autoplay_videos: String,
+ #[revision(start = 1)]
pub fixed_navbar: String,
+ #[revision(start = 1)]
pub disable_visit_reddit_confirmation: String,
+ #[revision(start = 1)]
pub comment_sort: String,
+ #[revision(start = 1)]
pub post_sort: String,
- #[serde(serialize_with = "serialize_vec_with_plus")]
+ #[revision(start = 1)]
+ #[serde(serialize_with = "serialize_vec_with_plus", deserialize_with = "deserialize_vec_with_plus")]
pub subscriptions: Vec,
- #[serde(serialize_with = "serialize_vec_with_plus")]
+ #[revision(start = 1)]
+ #[serde(serialize_with = "serialize_vec_with_plus", deserialize_with = "deserialize_vec_with_plus")]
pub filters: Vec,
+ #[revision(start = 1)]
pub hide_awards: String,
+ #[revision(start = 1)]
pub hide_score: String,
+ #[revision(start = 1)]
+ pub remove_default_feeds: String,
}
-fn serialize_vec_with_plus(vec: &Vec, serializer: S) -> Result
+fn serialize_vec_with_plus(vec: &[String], serializer: S) -> Result
where
S: Serializer,
{
serializer.serialize_str(&vec.join("+"))
}
+fn deserialize_vec_with_plus<'de, D>(deserializer: D) -> Result, D::Error>
+where
+ D: Deserializer<'de>,
+{
+ let string = String::deserialize(deserializer)?;
+ if string.is_empty() {
+ return Ok(Vec::new());
+ }
+ Ok(string.split('+').map(|s| s.to_string()).collect())
+}
+
#[derive(RustEmbed)]
#[folder = "static/themes/"]
#[include = "*.css"]
@@ -674,12 +729,36 @@ impl Preferences {
filters: setting(req, "filters").split('+').map(String::from).filter(|s| !s.is_empty()).collect(),
hide_awards: setting(req, "hide_awards"),
hide_score: setting(req, "hide_score"),
+ remove_default_feeds: setting(req, "remove_default_feeds"),
}
}
pub fn to_urlencoded(&self) -> Result {
serde_urlencoded::to_string(self).map_err(|e| e.to_string())
}
+
+ pub fn to_bincode(&self) -> Result, String> {
+ bincode::serialize(self).map_err(|e| e.to_string())
+ }
+ pub fn to_compressed_bincode(&self) -> Result, String> {
+ deflate_compress(self.to_bincode()?)
+ }
+ pub fn to_bincode_str(&self) -> Result {
+ Ok(base2048::encode(&self.to_compressed_bincode()?))
+ }
+}
+
+pub fn deflate_compress(i: Vec) -> Result, String> {
+ let mut e = Encoder::new(Vec::new());
+ e.write_all(&i).map_err(|e| e.to_string())?;
+ e.finish().into_result().map_err(|e| e.to_string())
+}
+
+pub fn deflate_decompress(i: Vec) -> Result, String> {
+ let mut decoder = Decoder::new(&i[..]);
+ let mut out = Vec::new();
+ decoder.read_to_end(&mut out).map_err(|e| format!("Failed to read from gzip decoder: {}", e))?;
+ Ok(out)
}
/// Gets a `HashSet` of filters from the cookie in the given `Request`.
@@ -735,7 +814,15 @@ pub async fn parse_post(post: &Value) -> Post {
get_setting("REDLIB_PUSHSHIFT_FRONTEND").unwrap_or_else(|| String::from(crate::config::DEFAULT_PUSHSHIFT_FRONTEND)),
)
} else {
- rewrite_urls(&val(post, "selftext_html"))
+ let selftext = val(post, "selftext");
+ if selftext.contains("```") {
+ let mut html_output = String::new();
+ let parser = pulldown_cmark::Parser::new(&selftext);
+ pulldown_cmark::html::push_html(&mut html_output, parser);
+ rewrite_urls(&html_output)
+ } else {
+ rewrite_urls(&val(post, "selftext_html"))
+ }
};
// Build a post using data parsed from Reddit post API
@@ -826,18 +913,72 @@ pub fn param(path: &str, value: &str) -> Option {
// Retrieve the value of a setting by name
pub fn setting(req: &Request, name: &str) -> String {
// Parse a cookie value from request
- req
- .cookie(name)
- .unwrap_or_else(|| {
- // If there is no cookie for this setting, try receiving a default from the config
- if let Some(default) = get_setting(&format!("REDLIB_DEFAULT_{}", name.to_uppercase())) {
- Cookie::new(name, default)
- } else {
- Cookie::from(name)
- }
- })
- .value()
- .to_string()
+
+ // If this was called with "subscriptions" and the "subscriptions" cookie has a value
+ if name == "subscriptions" && req.cookie("subscriptions").is_some() {
+ // Create subscriptions string
+ let mut subscriptions = String::new();
+
+ // Default subscriptions cookie
+ if req.cookie("subscriptions").is_some() {
+ subscriptions.push_str(req.cookie("subscriptions").unwrap().value());
+ }
+
+ // Start with first numbered subscription cookie
+ let mut subscriptions_number = 1;
+
+ // While whatever subscriptionsNUMBER cookie we're looking at has a value
+ while req.cookie(&format!("subscriptions{}", subscriptions_number)).is_some() {
+ // Push whatever subscriptionsNUMBER cookie we're looking at into the subscriptions string
+ subscriptions.push_str(req.cookie(&format!("subscriptions{}", subscriptions_number)).unwrap().value());
+
+ // Increment subscription cookie number
+ subscriptions_number += 1;
+ }
+
+ // Return the subscriptions cookies as one large string
+ subscriptions
+ }
+ // If this was called with "filters" and the "filters" cookie has a value
+ else if name == "filters" && req.cookie("filters").is_some() {
+ // Create filters string
+ let mut filters = String::new();
+
+ // Default filters cookie
+ if req.cookie("filters").is_some() {
+ filters.push_str(req.cookie("filters").unwrap().value());
+ }
+
+ // Start with first numbered filters cookie
+ let mut filters_number = 1;
+
+ // While whatever filtersNUMBER cookie we're looking at has a value
+ while req.cookie(&format!("filters{}", filters_number)).is_some() {
+ // Push whatever filtersNUMBER cookie we're looking at into the filters string
+ filters.push_str(req.cookie(&format!("filters{}", filters_number)).unwrap().value());
+
+ // Increment filters cookie number
+ filters_number += 1;
+ }
+
+ // Return the filters cookies as one large string
+ filters
+ }
+ // The above two still come to this if there was no existing value
+ else {
+ req
+ .cookie(name)
+ .unwrap_or_else(|| {
+ // If there is no cookie for this setting, try receiving a default from the config
+ if let Some(default) = get_setting(&format!("REDLIB_DEFAULT_{}", name.to_uppercase())) {
+ Cookie::new(name, default)
+ } else {
+ Cookie::from(name)
+ }
+ })
+ .value()
+ .to_string()
+ }
}
// Retrieve the value of a setting by name or the default value
@@ -853,11 +994,12 @@ pub fn setting_or_default(req: &Request, name: &str, default: String) -> S
// Detect and redirect in the event of a random subreddit
pub async fn catch_random(sub: &str, additional: &str) -> Result, String> {
if sub == "random" || sub == "randnsfw" {
- let new_sub = json(format!("/r/{sub}/about.json?raw_json=1"), false).await?["data"]["display_name"]
- .as_str()
- .unwrap_or_default()
- .to_string();
- Ok(redirect(&format!("/r/{new_sub}{additional}")))
+ Ok(redirect(&format!(
+ "/r/{}{additional}",
+ json(format!("/r/{sub}/about.json?raw_json=1"), false).await?["data"]["display_name"]
+ .as_str()
+ .unwrap_or_default()
+ )))
} else {
Err("No redirect needed".to_string())
}
@@ -935,9 +1077,20 @@ pub fn format_url(url: &str) -> String {
}
}
+static REGEX_BULLET: Lazy = Lazy::new(|| Regex::new(r"(?m)^- (.*)$").unwrap());
+static REGEX_BULLET_CONSECUTIVE_LINES: Lazy = Lazy::new(|| Regex::new(r"\n
").unwrap());
+
+pub fn render_bullet_lists(input_text: &str) -> String {
+ // ref: https://stackoverflow.com/a/4902622
+ // First enclose each bullet with
tags
+ let text1 = REGEX_BULLET.replace_all(input_text, "
$1
").to_string();
+ // Then remove any consecutive
tags
+ REGEX_BULLET_CONSECUTIVE_LINES.replace_all(&text1, "").to_string()
+}
+
// These are links we want to replace in-body
static REDDIT_REGEX: Lazy = Lazy::new(|| Regex::new(r#"href="(https|http|)://(www\.|old\.|np\.|amp\.|new\.|)(reddit\.com|redd\.it)/"#).unwrap());
-static REDDIT_PREVIEW_REGEX: Lazy = Lazy::new(|| Regex::new(r"https?://(external-preview|preview|i)\.redd\.it(.*)[^?]").unwrap());
+static REDDIT_PREVIEW_REGEX: Lazy = Lazy::new(|| Regex::new(r"https?://(external-preview|preview|i)\.redd\.it(.*)").unwrap());
static REDDIT_EMOJI_REGEX: Lazy = Lazy::new(|| Regex::new(r"https?://(www|).redditstatic\.com/(.*)").unwrap());
static REDLIB_PREVIEW_LINK_REGEX: Lazy = Lazy::new(|| Regex::new(r#"/(img|preview/)(pre|external-pre)?/(.*?)>"#).unwrap());
static REDLIB_PREVIEW_TEXT_REGEX: Lazy = Lazy::new(|| Regex::new(r">(.*?)").unwrap());
@@ -946,8 +1099,7 @@ static REDLIB_PREVIEW_TEXT_REGEX: Lazy = Lazy::new(|| Regex::new(r">(.*?)
pub fn rewrite_urls(input_text: &str) -> String {
let mut text1 =
// Rewrite Reddit links to Redlib
- REDDIT_REGEX.replace_all(input_text, r#"href="/"#)
- .to_string();
+ REDDIT_REGEX.replace_all(input_text, r#"href="/"#).to_string();
loop {
if REDDIT_EMOJI_REGEX.find(&text1).is_none() {
@@ -969,49 +1121,44 @@ pub fn rewrite_urls(input_text: &str) -> String {
} else {
let formatted_url = format_url(REDDIT_PREVIEW_REGEX.find(&text1).map(|x| x.as_str()).unwrap_or_default());
- let image_url = REDLIB_PREVIEW_LINK_REGEX.find(&formatted_url).map_or("", |m| m.as_str()).to_string();
- let mut image_caption = REDLIB_PREVIEW_TEXT_REGEX.find(&formatted_url).map_or("", |m| m.as_str()).to_string();
+ let image_url = REDLIB_PREVIEW_LINK_REGEX.find(&formatted_url).map_or("", |m| m.as_str());
+ let mut image_caption = REDLIB_PREVIEW_TEXT_REGEX.find(&formatted_url).map_or("", |m| m.as_str());
/* As long as image_caption isn't empty remove first and last four characters of image_text to leave us with just the text in the caption without any HTML.
This makes it possible to enclose it in a later on without having stray HTML breaking it */
if !image_caption.is_empty() {
- image_caption = image_caption[1..image_caption.len() - 4].to_string();
+ image_caption = &image_caption[1..image_caption.len() - 4];
}
// image_url contains > at the end of it, and right above this we remove image_text's front >, leaving us with just a single > between them
- let image_to_replace = format!("");
-
- // _image_replacement needs to be in scope for the replacement at the bottom of the loop
- let mut _image_replacement = String::new();
+ let image_to_replace = format!("
");
/* We don't want to show a caption that's just the image's link, so we check if we find a Reddit preview link within the image's caption.
If we don't find one we must have actual text, so we include a block that contains it.
Otherwise we don't include the block as we don't need it. */
- if REDDIT_PREVIEW_REGEX.find(&image_caption).is_none() {
+ let _image_replacement = if REDDIT_PREVIEW_REGEX.find(image_caption).is_none() {
// Without this " would show as \" instead. "\"" is how the quotes are formatted within image_text beforehand
- image_caption = image_caption.replace("\\"", "\"");
-
- _image_replacement = format!("{image_caption}");
+ format!(
+ "{}",
+ image_caption.replace("\\"", "\"")
+ )
} else {
- _image_replacement = format!("");
- }
+ format!("")
+ };
/* In order to know if we're dealing with a normal or external preview we need to take a look at the first capture group of REDDIT_PREVIEW_REGEX
if it's preview we're dealing with something that needs /preview/pre, external-preview is /preview/external-pre, and i is /img */
- let reddit_preview_regex_capture = REDDIT_PREVIEW_REGEX.captures(&text1).unwrap().get(1).map_or("", |m| m.as_str()).to_string();
- let mut _preview_type = String::new();
- if reddit_preview_regex_capture == "preview" {
- _preview_type = "/preview/pre".to_string();
- } else if reddit_preview_regex_capture == "external-preview" {
- _preview_type = "/preview/external-pre".to_string();
- } else {
- _preview_type = "/img".to_string();
- }
+ let reddit_preview_regex_capture = REDDIT_PREVIEW_REGEX.captures(&text1).unwrap().get(1).map_or("", |m| m.as_str());
+
+ let _preview_type = match reddit_preview_regex_capture {
+ "preview" => "/preview/pre",
+ "external-preview" => "/preview/external-pre",
+ _ => "/img",
+ };
text1 = REDDIT_PREVIEW_REGEX
.replace(&text1, format!("{_preview_type}$2"))
.replace(&image_to_replace, &_image_replacement)
- .to_string()
}
}
}
@@ -1085,10 +1232,14 @@ pub fn rewrite_emotes(media_metadata: &Value, comment: String) -> String {
);
// Inside the comment replace the ID we found with the string that will embed the image
- comment = comment.replace(&id, &to_replace_with).to_string();
+ comment = comment.replace(&id, &to_replace_with);
}
}
}
+
+ // render bullet (unordered) lists
+ comment = render_bullet_lists(&comment);
+
// Call rewrite_urls() to transform any other Reddit links
rewrite_urls(&comment)
}
@@ -1185,6 +1336,20 @@ pub async fn error(req: Request, msg: &str) -> Result, Stri
Ok(Response::builder().status(404).header("content-type", "text/html").body(body.into()).unwrap_or_default())
}
+/// Renders a generic info landing page.
+pub async fn info(req: Request, msg: &str) -> Result, String> {
+ let url = req.uri().to_string();
+ let body = InfoTemplate {
+ msg: msg.to_string(),
+ prefs: Preferences::new(&req),
+ url,
+ }
+ .render()
+ .unwrap_or_default();
+
+ Ok(Response::builder().status(200).header("content-type", "text/html").body(body.into()).unwrap_or_default())
+}
+
/// Returns true if the config/env variable `REDLIB_SFW_ONLY` carries the
/// value `on`.
///
@@ -1272,7 +1437,7 @@ pub fn url_path_basename(path: &str) -> String {
let mut url = url_result.unwrap();
url.path_segments_mut().unwrap().pop_if_empty();
- url.path_segments().unwrap().last().unwrap().to_string()
+ url.path_segments().unwrap().next_back().unwrap().to_string()
}
}
@@ -1383,10 +1548,11 @@ mod tests {
filters: vec![],
hide_awards: "off".to_owned(),
hide_score: "off".to_owned(),
+ remove_default_feeds: "off".to_owned(),
};
let urlencoded = serde_urlencoded::to_string(prefs).expect("Failed to serialize Prefs");
- assert_eq!(urlencoded, "theme=laserwave&front_page=default&layout=compact&wide=on&blur_spoiler=on&show_nsfw=off&blur_nsfw=on&hide_hls_notification=off&video_quality=best&hide_sidebar_and_summary=off&use_hls=on&autoplay_videos=on&fixed_navbar=on&disable_visit_reddit_confirmation=on&comment_sort=confidence&post_sort=top&subscriptions=memes%2Bmildlyinteresting&filters=&hide_awards=off&hide_score=off")
+ assert_eq!(urlencoded, "theme=laserwave&front_page=default&layout=compact&wide=on&blur_spoiler=on&show_nsfw=off&blur_nsfw=on&hide_hls_notification=off&video_quality=best&hide_sidebar_and_summary=off&use_hls=on&autoplay_videos=on&fixed_navbar=on&disable_visit_reddit_confirmation=on&comment_sort=confidence&post_sort=top&subscriptions=memes%2Bmildlyinteresting&filters=&hide_awards=off&hide_score=off&remove_default_feeds=off");
}
}
@@ -1406,7 +1572,10 @@ async fn test_fetching_subreddit_quarantined() {
#[tokio::test(flavor = "multi_thread")]
async fn test_fetching_nsfw_subreddit() {
- let subreddit = Post::fetch("/r/randnsfw", false).await;
+ // Gonwild is a place for closed, Euclidean Geometric shapes to exchange their nth terms for karma; showing off their edges in a comfortable environment without pressure.
+ // Find a good sub that is tagged NSFW but that actually isn't in case my future employers are watching (they probably are)
+ // switched from randnsfw as it is no longer functional.
+ let subreddit = Post::fetch("/r/gonwild", false).await;
assert!(subreddit.is_ok());
assert!(!subreddit.unwrap().0.is_empty());
}
@@ -1424,7 +1593,7 @@ async fn test_fetching_ws() {
fn test_rewriting_image_links() {
let input =
r#"
Hi, I've bought this very same monitor and found no calibration whatsoever. I have an ICC profile that has been set up since I've installed its driver from the LG website and it works ok. I also used http://www.lagom.nl/lcd-test/ to calibrate it. After some good tinkering I've found the following settings + the color profile from the driver gets me past all the tests perfectly:
+- Brightness 50 (still have to settle on this one, it's personal preference, it controls the backlight, not the colors)
+- Contrast 70 (which for me was the default one)
+- Picture mode Custom
+- Super resolution + Off (it looks horrible anyway)
+- Sharpness 50 (default one I think)
+- Black level High (low messes up gray colors)
+- DFC Off
+- Response Time Middle (personal preference, https://www.blurbusters.com/ show horrible overdrive with it on high)
+- Freesync doesn't matter
+- Black stabilizer 50
+- Gamma setting on 0
+- Color Temp Medium
+How`s your monitor by the way? Any IPS bleed whatsoever? I either got lucky or the panel is pretty good, 0 bleed for me, just the usual IPS glow. How about the pixels? I see the pixels even at one meter away, especially on Microsoft Edge's icon for example, the blue background is just blocky, don't know why.
+
"#;
+ let output = r#"
Hi, I've bought this very same monitor and found no calibration whatsoever. I have an ICC profile that has been set up since I've installed its driver from the LG website and it works ok. I also used http://www.lagom.nl/lcd-test/ to calibrate it. After some good tinkering I've found the following settings + the color profile from the driver gets me past all the tests perfectly:
+
Brightness 50 (still have to settle on this one, it's personal preference, it controls the backlight, not the colors)
Contrast 70 (which for me was the default one)
Picture mode Custom
Super resolution + Off (it looks horrible anyway)
Sharpness 50 (default one I think)
Black level High (low messes up gray colors)
DFC Off
Response Time Middle (personal preference, https://www.blurbusters.com/ show horrible overdrive with it on high)
Freesync doesn't matter
Black stabilizer 50
Gamma setting on 0
Color Temp Medium
+How`s your monitor by the way? Any IPS bleed whatsoever? I either got lucky or the panel is pretty good, 0 bleed for me, just the usual IPS glow. How about the pixels? I see the pixels even at one meter away, especially on Microsoft Edge's icon for example, the blue background is just blocky, don't know why.
+