From d0d2c22e9415deb33147fabc9f24c6de2ad46883 Mon Sep 17 00:00:00 2001 From: nieve Date: Tue, 11 Feb 2025 21:56:29 -0500 Subject: [PATCH] Merge branch 'main' of https://github.com/redlib-org/redlib --- .devcontainer/devcontainer.json | 2 +- Cargo.lock | 115 +++++----- Cargo.toml | 15 +- Dockerfile.alpine | 45 ++++ Dockerfile.ubuntu | 51 +++++ README.md | 14 +- flake.lock | 32 ++- flake.nix | 10 +- src/client.rs | 6 - src/main.rs | 17 +- src/scraper/main.rs | 132 ------------ src/server.rs | 37 +++- src/settings.rs | 144 ++++++++++++- src/subreddit.rs | 167 +++++++++++++-- src/user.rs | 7 +- src/utils.rs | 353 ++++++++++++++++++++++++++----- static/check_update.js | 4 +- static/copy.js | 9 + static/style.css | 15 +- static/themes/midnightPurple.css | 14 ++ templates/info.html | 20 ++ templates/settings.html | 144 ++++++++----- templates/utils.html | 6 +- 23 files changed, 985 insertions(+), 374 deletions(-) create mode 100644 Dockerfile.alpine create mode 100644 Dockerfile.ubuntu delete mode 100644 src/scraper/main.rs create mode 100644 static/copy.js create mode 100644 static/themes/midnightPurple.css create mode 100644 templates/info.html diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 3a941de..3ec1ead 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,6 +1,6 @@ { "name": "Rust", - "image": "mcr.microsoft.com/devcontainers/rust:0-1-bullseye", + "image": "mcr.microsoft.com/devcontainers/rust:1.0.9-bookworm", "features": { "ghcr.io/devcontainers/features/docker-in-docker:2": {} }, diff --git a/Cargo.lock b/Cargo.lock index 819d4bc..a29b750 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -71,12 +71,6 @@ version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" -[[package]] -name = "anyhow" -version = "1.0.93" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" - [[package]] name = "arc-swap" version = "1.7.1" @@ -139,6 +133,12 @@ dependencies = [ "windows-targets", ] +[[package]] +name = "base2048" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71f4fe417e8cc3bb9b437dfa9290ce92bd2730ba5374719bdfd9147fbc8f17cd" + [[package]] name = "base64" version = "0.21.7" @@ -151,6 +151,15 @@ version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + [[package]] name = "bitflags" version = "2.6.0" @@ -274,9 +283,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.38" +version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" dependencies = [ "num-traits", ] @@ -319,18 +328,6 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "afb84c814227b90d6895e01398aee0d8033c00e7466aca416fb6a8e0eb19d8a7" -[[package]] -name = "common-words-all" -version = "0.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84a6ff47eb813c9e315610ceca0ddd247827e22f2cdadc4189e4676a81470c77" -dependencies = [ - "anyhow", - "csv", - "glob", - "serde", -] - [[package]] name = "cookie" version = "0.18.1" @@ -394,27 +391,6 @@ dependencies = [ "typenum", ] -[[package]] -name = "csv" -version = "1.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acdc4883a9c96732e4733212c01447ebd805833b7275a73ca3ee080fd77afdaf" -dependencies = [ - "csv-core", - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "csv-core" -version = "0.1.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5efa2b3d7902f4b634a20cae3c9c4e6209dc4779feb6863329607560143efa70" -dependencies = [ - "memchr", -] - [[package]] name = "darling" version = "0.20.10" @@ -698,12 +674,6 @@ version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" -[[package]] -name = "glob" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" - [[package]] name = "globset" version = "0.4.15" @@ -770,6 +740,12 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" +[[package]] +name = "htmlescape" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9025058dae765dee5070ec375f591e2ba14638c63feff74f13805a72e523163" + [[package]] name = "http" version = "0.2.12" @@ -1296,6 +1272,24 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "pulldown-cmark" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f86ba2052aebccc42cbbb3ed234b8b13ce76f75c3551a303cb2bcffcff12bb14" +dependencies = [ + "bitflags", + "memchr", + "pulldown-cmark-escape", + "unicase", +] + +[[package]] +name = "pulldown-cmark-escape" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "007d8adb5ddab6f8e3f491ac63566a7d5002cc7ed73901f72057943fa71ae1ae" + [[package]] name = "quick-error" version = "1.2.3" @@ -1357,16 +1351,19 @@ version = "0.35.1" dependencies = [ "arc-swap", "async-recursion", + "base2048", "base64 0.22.1", + "bincode", "brotli", "build_html", "cached", + "chrono", "clap", - "common-words-all", "cookie", "dotenvy", "fastrand", "futures-lite", + "htmlescape", "hyper", "hyper-rustls", "libflate", @@ -1375,7 +1372,9 @@ dependencies = [ "once_cell", "percent-encoding", "pretty_env_logger", + "pulldown-cmark", "regex", + "revision", "rinja", "route-recognizer", "rss", @@ -1432,6 +1431,26 @@ version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" +[[package]] +name = "revision" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22f53179a035f881adad8c4d58a2c599c6b4a8325b989c68d178d7a34d1b1e4c" +dependencies = [ + "revision-derive", +] + +[[package]] +name = "revision-derive" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f0ec466e5d8dca9965eb6871879677bef5590cf7525ad96cae14376efb75073" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "ring" version = "0.17.8" diff --git a/Cargo.toml b/Cargo.toml index a1d3ec0..c7b6d4a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -47,10 +47,15 @@ rss = "2.0.7" arc-swap = "1.7.1" serde_json_path = "0.7.1" async-recursion = "1.1.1" -common-words-all = { version = "0.0.2", default-features = false, features = ["english", "one"] } +pulldown-cmark = { version = "0.12.0", features = ["simd", "html"], default-features = false } hyper-rustls = { version = "0.24.2", features = [ "http2" ] } tegen = "0.1.4" serde_urlencoded = "0.7.1" +chrono = { version = "0.4.39", default-features = false, features = [ "std" ] } +htmlescape = "0.3.1" +bincode = "1.3.3" +base2048 = "2.0.2" +revision = "0.10.0" [dev-dependencies] @@ -61,11 +66,3 @@ sealed_test = "1.0.0" codegen-units = 1 lto = true strip = "symbols" - -[[bin]] -name = "redlib" -path = "src/main.rs" - -[[bin]] -name = "scraper" -path = "src/scraper/main.rs" diff --git a/Dockerfile.alpine b/Dockerfile.alpine new file mode 100644 index 0000000..051476a --- /dev/null +++ b/Dockerfile.alpine @@ -0,0 +1,45 @@ +# supported versions here: https://hub.docker.com/_/rust +ARG ALPINE_VERSION=3.20 + +######################## +## builder image +######################## +FROM rust:alpine${ALPINE_VERSION} AS builder + +RUN apk add --no-cache musl-dev + +WORKDIR /redlib + +# download (most) dependencies in their own layer +COPY Cargo.lock Cargo.toml ./ +RUN mkdir src && echo "fn main() { panic!(\"why am i running?\") }" > src/main.rs +RUN cargo build --release --locked --bin redlib +RUN rm ./src/main.rs && rmdir ./src + +# copy the source and build the redlib binary +COPY . ./ +RUN cargo build --release --locked --bin redlib +RUN echo "finished building redlib!" + +######################## +## release image +######################## +FROM alpine:${ALPINE_VERSION} AS release + +# Import redlib binary from builder +COPY --from=builder /redlib/target/release/redlib /usr/local/bin/redlib + +# Add non-root user for running redlib +RUN adduser --home /nonexistent --no-create-home --disabled-password redlib +USER redlib + +# Document that we intend to expose port 8080 to whoever runs the container +EXPOSE 8080 + +# Run a healthcheck every minute to make sure redlib is functional +HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1 + +# Add container metadata +LABEL org.opencontainers.image.authors="sigaloid" + +CMD ["redlib"] diff --git a/Dockerfile.ubuntu b/Dockerfile.ubuntu new file mode 100644 index 0000000..2e277c5 --- /dev/null +++ b/Dockerfile.ubuntu @@ -0,0 +1,51 @@ +# supported versions here: https://hub.docker.com/_/rust +ARG RUST_BUILDER_VERSION=slim-bookworm +ARG UBUNTU_RELEASE_VERSION=noble + +######################## +## builder image +######################## +FROM rust:${RUST_BUILDER_VERSION} AS builder + +WORKDIR /redlib + +# download (most) dependencies in their own layer +COPY Cargo.lock Cargo.toml ./ +RUN mkdir src && echo "fn main() { panic!(\"why am i running?\") }" > src/main.rs +RUN cargo build --release --locked --bin redlib +RUN rm ./src/main.rs && rmdir ./src + +# copy the source and build the redlib binary +COPY . ./ +RUN cargo build --release --locked --bin redlib +RUN echo "finished building redlib!" + +######################## +## release image +######################## +FROM ubuntu:${UBUNTU_RELEASE_VERSION} AS release + +# Install ca-certificates +RUN apt-get update && apt-get install -y ca-certificates + +# Import redlib binary from builder +COPY --from=builder /redlib/target/release/redlib /usr/local/bin/redlib + +# Add non-root user for running redlib +RUN useradd \ + --no-create-home \ + --password "!" \ + --comment "user for running redlib" \ + redlib +USER redlib + +# Document that we intend to expose port 8080 to whoever runs the container +EXPOSE 8080 + +# Run a healthcheck every minute to make sure redlib is functional +HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1 + +# Add container metadata +LABEL org.opencontainers.image.authors="sigaloid" + +CMD ["redlib"] diff --git a/README.md b/README.md index 1285edc..9bf998f 100644 --- a/README.md +++ b/README.md @@ -404,6 +404,17 @@ REDLIB_DEFAULT_USE_HLS = "on" > > If using Docker Compose, no changes are needed as the `.env` file is already referenced in `compose.yaml` via the `env_file: .env` line. +## Command Line Flags + +Redlib supports the following command line flags: + +- `-4`, `--ipv4-only`: Listen on IPv4 only. +- `-6`, `--ipv6-only`: Listen on IPv6 only. +- `-r`, `--redirect-https`: Redirect all HTTP requests to HTTPS (no longer functional). +- `-a`, `--address
`: Sets address to listen on. Default is `[::]`. +- `-p`, `--port `: Port to listen on. Default is `8080`. +- `-H`, `--hsts `: HSTS header to tell browsers that this site should only be accessed over HTTPS. Default is `604800`. + ## Instance settings Assign a default value for each instance-specific setting by passing environment variables to Redlib in the format `REDLIB_{X}`. Replace `{X}` with the setting name (see list below) in capital letters. @@ -429,7 +440,7 @@ Assign a default value for each user-modifiable setting by passing environment v | `WIDE` | `["on", "off"]` | `off` | | `POST_SORT` | `["hot", "new", "top", "rising", "controversial"]` | `hot` | | `COMMENT_SORT` | `["confidence", "top", "new", "controversial", "old"]` | `confidence` | -| `BLUR_SPOILER` | `["on", "off"]` | `off` | +| `BLUR_SPOILER` | `["on", "off"]` | `off` | | `SHOW_NSFW` | `["on", "off"]` | `off` | | `BLUR_NSFW` | `["on", "off"]` | `off` | | `USE_HLS` | `["on", "off"]` | `off` | @@ -441,3 +452,4 @@ Assign a default value for each user-modifiable setting by passing environment v | `HIDE_SCORE` | `["on", "off"]` | `off` | | `HIDE_SIDEBAR_AND_SUMMARY` | `["on", "off"]` | `off` | | `FIXED_NAVBAR` | `["on", "off"]` | `on` | +| `REMOVE_DEFAULT_FEEDS` | `["on", "off"]` | `off` | \ No newline at end of file diff --git a/flake.lock b/flake.lock index 4569244..2b0b585 100644 --- a/flake.lock +++ b/flake.lock @@ -1,17 +1,12 @@ { "nodes": { "crane": { - "inputs": { - "nixpkgs": [ - "nixpkgs" - ] - }, "locked": { - "lastModified": 1717025063, - "narHash": "sha256-dIubLa56W9sNNz0e8jGxrX3CAkPXsq7snuFA/Ie6dn8=", + "lastModified": 1731974733, + "narHash": "sha256-enYSSZVVl15FI5p+0Y5/Ckf5DZAvXe6fBrHxyhA/njc=", "owner": "ipetkov", "repo": "crane", - "rev": "480dff0be03dac0e51a8dfc26e882b0d123a450e", + "rev": "3cb338ce81076ce5e461cf77f7824476addb0e1c", "type": "github" }, "original": { @@ -25,11 +20,11 @@ "systems": "systems" }, "locked": { - "lastModified": 1710146030, - "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", "owner": "numtide", "repo": "flake-utils", - "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", "type": "github" }, "original": { @@ -40,11 +35,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1717112898, - "narHash": "sha256-7R2ZvOnvd9h8fDd65p0JnB7wXfUvreox3xFdYWd1BnY=", + "lastModified": 1731890469, + "narHash": "sha256-D1FNZ70NmQEwNxpSSdTXCSklBH1z2isPR84J6DQrJGs=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "6132b0f6e344ce2fe34fc051b72fb46e34f668e0", + "rev": "5083ec887760adfe12af64830a66807423a859a7", "type": "github" }, "original": { @@ -64,19 +59,16 @@ }, "rust-overlay": { "inputs": { - "flake-utils": [ - "flake-utils" - ], "nixpkgs": [ "nixpkgs" ] }, "locked": { - "lastModified": 1717121863, - "narHash": "sha256-/3sxIe7MZqF/jw1RTQCSmgTjwVod43mmrk84m50MJQ4=", + "lastModified": 1732069891, + "narHash": "sha256-moKx8AVJrViCSdA0e0nSsG8b1dAsObI4sRAtbqbvBY8=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "2a7b53172ed08f856b8382d7dcfd36a4e0cbd866", + "rev": "8509a51241c407d583b1963d5079585a992506e8", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 8bcacf6..0180c8d 100644 --- a/flake.nix +++ b/flake.nix @@ -4,19 +4,13 @@ inputs = { nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; - crane = { - url = "github:ipetkov/crane"; - inputs.nixpkgs.follows = "nixpkgs"; - }; + crane.url = "github:ipetkov/crane"; flake-utils.url = "github:numtide/flake-utils"; rust-overlay = { url = "github:oxalica/rust-overlay"; - inputs = { - nixpkgs.follows = "nixpkgs"; - flake-utils.follows = "flake-utils"; - }; + inputs.nixpkgs.follows = "nixpkgs"; }; }; diff --git a/src/client.rs b/src/client.rs index fa32fc0..76369ca 100644 --- a/src/client.rs +++ b/src/client.rs @@ -544,12 +544,6 @@ async fn test_obfuscated_share_link() { assert_eq!(canonical_path(share_link, 3).await, Ok(Some(canonical_link))); } -#[tokio::test(flavor = "multi_thread")] -async fn test_share_link_strip_json() { - let link = "/17krzvz".into(); - let canonical_link = "/comments/17krzvz".into(); - assert_eq!(canonical_path(link, 3).await, Ok(Some(canonical_link))); -} #[tokio::test(flavor = "multi_thread")] async fn test_private_sub() { let link = json("/r/suicide/about.json?raw_json=1".into(), true).await; diff --git a/src/main.rs b/src/main.rs index 542f1e8..9b34e75 100644 --- a/src/main.rs +++ b/src/main.rs @@ -128,6 +128,8 @@ async fn main() { let matches = Command::new("Redlib") .version(env!("CARGO_PKG_VERSION")) .about("Private front-end for Reddit written in Rust ") + .arg(Arg::new("ipv4-only").short('4').long("ipv4-only").help("Listen on IPv4 only").num_args(0)) + .arg(Arg::new("ipv6-only").short('6').long("ipv6-only").help("Listen on IPv6 only").num_args(0)) .arg( Arg::new("redirect-https") .short('r') @@ -184,7 +186,16 @@ async fn main() { let port = matches.get_one::("port").unwrap(); let hsts = matches.get_one("hsts").map(|m: &String| m.as_str()); - let listener = [address, ":", port].concat(); + let ipv4_only = std::env::var("IPV4_ONLY").is_ok() || matches.get_flag("ipv4-only"); + let ipv6_only = std::env::var("IPV6_ONLY").is_ok() || matches.get_flag("ipv6-only"); + + let listener = if ipv4_only { + format!("0.0.0.0:{}", port) + } else if ipv6_only { + format!("[::]:{}", port) + } else { + [address, ":", port].concat() + }; println!("Starting Redlib..."); @@ -255,6 +266,7 @@ async fn main() { app .at("/check_update.js") .get(|_| resource(include_str!("../static/check_update.js"), "text/javascript", false).boxed()); + app.at("/copy.js").get(|_| resource(include_str!("../static/copy.js"), "text/javascript", false).boxed()); app.at("/commits.atom").get(|_| async move { proxy_commit_info().await }.boxed()); app.at("/instances.json").get(|_| async move { proxy_instances().await }.boxed()); @@ -293,6 +305,7 @@ async fn main() { // Configure settings app.at("/settings").get(|r| settings::get(r).boxed()).post(|r| settings::set(r).boxed()); app.at("/settings/restore").get(|r| settings::restore(r).boxed()); + app.at("/settings/encoded-restore").post(|r| settings::encoded_restore(r).boxed()); app.at("/settings/update").get(|r| settings::update(r).boxed()); // RSS Subscriptions @@ -389,7 +402,7 @@ async fn main() { Some("best" | "hot" | "new" | "top" | "rising" | "controversial") => subreddit::community(req).await, // Short link for post - Some(id) if (5..8).contains(&id.len()) => match canonical_path(format!("/{id}"), 3).await { + Some(id) if (5..8).contains(&id.len()) => match canonical_path(format!("/comments/{id}"), 3).await { Ok(path_opt) => match path_opt { Some(path) => Ok(redirect(&path)), None => error(req, "Post ID is invalid. It may point to a post on a community that has been banned.").await, diff --git a/src/scraper/main.rs b/src/scraper/main.rs deleted file mode 100644 index f2e48d6..0000000 --- a/src/scraper/main.rs +++ /dev/null @@ -1,132 +0,0 @@ -use std::{collections::HashMap, fmt::Display, io::Write}; - -use clap::{Parser, ValueEnum}; -use common_words_all::{get_top, Language, NgramSize}; -use redlib::utils::Post; - -#[derive(Parser)] -#[command(name = "my_cli")] -#[command(about = "A simple CLI example", long_about = None)] -struct Cli { - #[arg(short = 's', long = "sub")] - sub: String, - - #[arg(long = "sort")] - sort: SortOrder, - - #[arg(short = 'f', long = "format", value_enum)] - format: Format, - #[arg(short = 'o', long = "output")] - output: Option, -} - -#[derive(Debug, Clone, ValueEnum)] -enum SortOrder { - Hot, - Rising, - New, - Top, - Controversial, -} - -impl Display for SortOrder { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - SortOrder::Hot => write!(f, "hot"), - SortOrder::Rising => write!(f, "rising"), - SortOrder::New => write!(f, "new"), - SortOrder::Top => write!(f, "top"), - SortOrder::Controversial => write!(f, "controversial"), - } - } -} - -#[derive(Debug, Clone, ValueEnum)] -enum Format { - Json, -} - -#[tokio::main] -async fn main() { - pretty_env_logger::init(); - let cli = Cli::parse(); - let (sub, sort, format, output) = (cli.sub, cli.sort, cli.format, cli.output); - let initial = format!("/r/{sub}/{sort}.json?&raw_json=1"); - let (posts, mut after) = Post::fetch(&initial, false).await.unwrap(); - let mut hashmap = HashMap::new(); - hashmap.extend(posts.into_iter().map(|post| (post.id.clone(), post))); - loop { - print!("\r"); - let path = format!("/r/{sub}/{sort}.json?sort={sort}&t=&after={after}&raw_json=1"); - let (new_posts, new_after) = Post::fetch(&path, false).await.unwrap(); - let old_len = hashmap.len(); - // convert to hashmap and extend hashmap - let new_posts = new_posts.into_iter().map(|post| (post.id.clone(), post)).collect::>(); - let len = new_posts.len(); - hashmap.extend(new_posts); - if hashmap.len() - old_len < 3 { - break; - } - - let x = hashmap.len() - old_len; - after = new_after; - // Print number of posts fetched - print!("Fetched {len} posts (+{x})",); - std::io::stdout().flush().unwrap(); - } - println!("\n\n"); - // additionally search if final count not reached - - for word in get_top(Language::English, 10_000, NgramSize::One) { - let mut retrieved_posts_from_search = 0; - let initial = format!("/r/{sub}/search.json?q={word}&restrict_sr=on&include_over_18=on&raw_json=1&sort={sort}"); - println!("Grabbing posts with word {word}."); - let (posts, mut after) = Post::fetch(&initial, false).await.unwrap(); - hashmap.extend(posts.into_iter().map(|post| (post.id.clone(), post))); - 'search: loop { - let path = format!("/r/{sub}/search.json?q={word}&restrict_sr=on&include_over_18=on&raw_json=1&sort={sort}&after={after}"); - let (new_posts, new_after) = Post::fetch(&path, false).await.unwrap(); - if new_posts.is_empty() || new_after.is_empty() { - println!("No more posts for word {word}"); - break 'search; - } - retrieved_posts_from_search += new_posts.len(); - let old_len = hashmap.len(); - let new_posts = new_posts.into_iter().map(|post| (post.id.clone(), post)).collect::>(); - let len = new_posts.len(); - hashmap.extend(new_posts); - let delta = hashmap.len() - old_len; - after = new_after; - // Print number of posts fetched - println!("Fetched {len} posts (+{delta})",); - - if retrieved_posts_from_search > 1000 { - println!("Reached 1000 posts from search"); - break 'search; - } - } - // Need to save incrementally. atomic save + move - let tmp_file = output.clone().unwrap_or_else(|| format!("{sub}.json.tmp")); - let perm_file = output.clone().unwrap_or_else(|| format!("{sub}.json")); - write_posts(&hashmap.values().collect(), tmp_file.clone()); - // move file - std::fs::rename(tmp_file, perm_file).unwrap(); - } - - println!("\n\n"); - - println!("Size of hashmap: {}", hashmap.len()); - - let posts: Vec<&Post> = hashmap.values().collect(); - match format { - Format::Json => { - let filename: String = output.unwrap_or_else(|| format!("{sub}.json")); - write_posts(&posts, filename); - } - } -} - -fn write_posts(posts: &Vec<&Post>, filename: String) { - let json = serde_json::to_string(&posts).unwrap(); - std::fs::write(filename, json).unwrap(); -} diff --git a/src/server.rs b/src/server.rs index 15c56ad..a287de2 100644 --- a/src/server.rs +++ b/src/server.rs @@ -25,7 +25,7 @@ use std::{ str::{from_utf8, Split}, string::ToString, }; -use time::Duration; +use time::OffsetDateTime; use crate::dbg_msg; @@ -170,10 +170,8 @@ impl ResponseExt for Response { } fn remove_cookie(&mut self, name: String) { - let mut cookie = Cookie::from(name); - cookie.set_path("/"); - cookie.set_max_age(Duration::seconds(1)); - if let Ok(val) = header::HeaderValue::from_str(&cookie.to_string()) { + let removal_cookie = Cookie::build(name).path("/").http_only(true).expires(OffsetDateTime::now_utc()); + if let Ok(val) = header::HeaderValue::from_str(&removal_cookie.to_string()) { self.headers_mut().append("Set-Cookie", val); } } @@ -240,8 +238,14 @@ impl Server { path.pop(); } + // Replace HEAD with GET for routing + let (method, is_head) = match req.method() { + &Method::HEAD => (&Method::GET, true), + method => (method, false), + }; + // Match the visited path with an added route - match router.recognize(&format!("/{}{}", req.method().as_str(), path)) { + match router.recognize(&format!("/{}{}", method.as_str(), path)) { // If a route was configured for this path Ok(found) => { let mut parammed = req; @@ -253,17 +257,21 @@ impl Server { match func.await { Ok(mut res) => { res.headers_mut().extend(def_headers); - let _ = compress_response(&req_headers, &mut res).await; + if is_head { + *res.body_mut() = Body::empty(); + } else { + let _ = compress_response(&req_headers, &mut res).await; + } Ok(res) } - Err(msg) => new_boilerplate(def_headers, req_headers, 500, Body::from(msg)).await, + Err(msg) => new_boilerplate(def_headers, req_headers, 500, if is_head { Body::empty() } else { Body::from(msg) }).await, } } .boxed() } // If there was a routing error - Err(e) => new_boilerplate(def_headers, req_headers, 404, e.into()).boxed(), + Err(e) => new_boilerplate(def_headers, req_headers, 404, if is_head { Body::empty() } else { e.into() }).boxed(), } })) } @@ -274,8 +282,19 @@ impl Server { // Bind server to address specified above. Gracefully shut down if CTRL+C is pressed let server = HyperServer::bind(address).serve(make_svc).with_graceful_shutdown(async { + #[cfg(windows)] // Wait for the CTRL+C signal tokio::signal::ctrl_c().await.expect("Failed to install CTRL+C signal handler"); + + #[cfg(unix)] + { + // Wait for CTRL+C or SIGTERM signals + let mut signal_terminate = tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate()).expect("Failed to install SIGTERM signal handler"); + tokio::select! { + _ = tokio::signal::ctrl_c() => (), + _ = signal_terminate.recv() => () + } + } }); server.boxed() diff --git a/src/settings.rs b/src/settings.rs index 0a8cabc..3029b55 100644 --- a/src/settings.rs +++ b/src/settings.rs @@ -4,12 +4,14 @@ use std::collections::HashMap; // CRATES use crate::server::ResponseExt; -use crate::utils::{redirect, template, Preferences}; +use crate::subreddit::join_until_size_limit; +use crate::utils::{deflate_decompress, redirect, template, Preferences}; use cookie::Cookie; use futures_lite::StreamExt; use hyper::{Body, Request, Response}; use rinja::Template; use time::{Duration, OffsetDateTime}; +use url::form_urlencoded; // STRUCTS #[derive(Template)] @@ -21,7 +23,7 @@ struct SettingsTemplate { // CONSTANTS -const PREFS: [&str; 19] = [ +const PREFS: [&str; 20] = [ "theme_light", "theme_dark", "front_page", @@ -41,6 +43,7 @@ const PREFS: [&str; 19] = [ "hide_score", "disable_visit_reddit_confirmation", "video_quality", + "remove_default_feeds", ]; // FUNCTIONS @@ -120,7 +123,7 @@ fn set_cookies_method(req: Request, remove_cookies: bool) -> Response response.insert_cookie( Cookie::build((name.to_owned(), value.clone())) @@ -137,6 +140,119 @@ fn set_cookies_method(req: Request, remove_cookies: bool) -> Response = subscriptions.expect("Subscriptions").split('+').map(str::to_string).collect(); + + // Start at 0 to keep track of what number we need to start deleting old subscription cookies from + let mut subscriptions_number_to_delete_from = 0; + + // Starting at 0 so we handle the subscription cookie without a number first + for (subscriptions_number, list) in join_until_size_limit(&sub_list).into_iter().enumerate() { + let subscriptions_cookie = if subscriptions_number == 0 { + "subscriptions".to_string() + } else { + format!("subscriptions{}", subscriptions_number) + }; + + response.insert_cookie( + Cookie::build((subscriptions_cookie, list)) + .path("/") + .http_only(true) + .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) + .into(), + ); + + subscriptions_number_to_delete_from += 1; + } + + // While subscriptionsNUMBER= is in the string of cookies add a response removing that cookie + while cookies_string.contains(&format!("subscriptions{subscriptions_number_to_delete_from}=")) { + // Remove that subscriptions cookie + response.remove_cookie(format!("subscriptions{subscriptions_number_to_delete_from}")); + + // Increment subscriptions cookie number + subscriptions_number_to_delete_from += 1; + } + } else { + // Remove unnumbered subscriptions cookie + response.remove_cookie("subscriptions".to_string()); + + // Starts at one to deal with the first numbered subscription cookie and onwards + let mut subscriptions_number_to_delete_from = 1; + + // While subscriptionsNUMBER= is in the string of cookies add a response removing that cookie + while cookies_string.contains(&format!("subscriptions{subscriptions_number_to_delete_from}=")) { + // Remove that subscriptions cookie + response.remove_cookie(format!("subscriptions{subscriptions_number_to_delete_from}")); + + // Increment subscriptions cookie number + subscriptions_number_to_delete_from += 1; + } + } + + // If there are filters to restore set them and delete any old filters cookies, otherwise delete them all + if filters.is_some() { + let filters_list: Vec = filters.expect("Filters").split('+').map(str::to_string).collect(); + + // Start at 0 to keep track of what number we need to start deleting old subscription cookies from + let mut filters_number_to_delete_from = 0; + + // Starting at 0 so we handle the subscription cookie without a number first + for (filters_number, list) in join_until_size_limit(&filters_list).into_iter().enumerate() { + let filters_cookie = if filters_number == 0 { + "filters".to_string() + } else { + format!("filters{}", filters_number) + }; + + response.insert_cookie( + Cookie::build((filters_cookie, list)) + .path("/") + .http_only(true) + .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) + .into(), + ); + + filters_number_to_delete_from += 1; + } + + // While filtersNUMBER= is in the string of cookies add a response removing that cookie + while cookies_string.contains(&format!("filters{filters_number_to_delete_from}=")) { + // Remove that filters cookie + response.remove_cookie(format!("filters{filters_number_to_delete_from}")); + + // Increment filters cookie number + filters_number_to_delete_from += 1; + } + } else { + // Remove unnumbered filters cookie + response.remove_cookie("filters".to_string()); + + // Starts at one to deal with the first numbered subscription cookie and onwards + let mut filters_number_to_delete_from = 1; + + // While filtersNUMBER= is in the string of cookies add a response removing that cookie + while cookies_string.contains(&format!("filters{filters_number_to_delete_from}=")) { + // Remove that sfilters cookie + response.remove_cookie(format!("filters{filters_number_to_delete_from}")); + + // Increment filters cookie number + filters_number_to_delete_from += 1; + } + } + response } @@ -148,3 +264,25 @@ pub async fn restore(req: Request) -> Result, String> { pub async fn update(req: Request) -> Result, String> { Ok(set_cookies_method(req, false)) } + +pub async fn encoded_restore(req: Request) -> Result, String> { + let body = hyper::body::to_bytes(req.into_body()) + .await + .map_err(|e| format!("Failed to get bytes from request body: {}", e))?; + + let encoded_prefs = form_urlencoded::parse(&body) + .find(|(key, _)| key == "encoded_prefs") + .map(|(_, value)| value) + .ok_or_else(|| "encoded_prefs parameter not found in request body".to_string())?; + + let bytes = base2048::decode(&encoded_prefs).ok_or_else(|| "Failed to decode base2048 encoded preferences".to_string())?; + + let out = deflate_decompress(bytes)?; + + let mut prefs: Preferences = bincode::deserialize(&out).map_err(|e| format!("Failed to deserialize bytes into Preferences struct: {}", e))?; + prefs.available_themes = vec![]; + + let url = format!("/settings/restore/?{}", prefs.to_urlencoded()?); + + Ok(redirect(&url)) +} diff --git a/src/subreddit.rs b/src/subreddit.rs index 88aa542..e6d1cca 100644 --- a/src/subreddit.rs +++ b/src/subreddit.rs @@ -3,14 +3,17 @@ use crate::{config, utils}; // CRATES use crate::utils::{ - catch_random, error, filter_posts, format_num, format_url, get_filters, nsfw_landing, param, redirect, rewrite_urls, setting, template, val, Post, Preferences, Subreddit, + catch_random, error, filter_posts, format_num, format_url, get_filters, info, nsfw_landing, param, redirect, rewrite_urls, setting, template, val, Post, Preferences, + Subreddit, }; use crate::{client::json, server::RequestExt, server::ResponseExt}; use cookie::Cookie; +use htmlescape::decode_html; use hyper::{Body, Request, Response}; -use log::{debug, trace}; +use log::debug; use rinja::Template; +use chrono::DateTime; use once_cell::sync::Lazy; use regex::Regex; use time::{Duration, OffsetDateTime}; @@ -63,9 +66,9 @@ pub async fn community(req: Request) -> Result, String> { // Build Reddit API path let root = req.uri().path() == "/"; let query = req.uri().query().unwrap_or_default().to_string(); - trace!("query: {}", query); let subscribed = setting(&req, "subscriptions"); let front_page = setting(&req, "front_page"); + let remove_default_feeds = setting(&req, "remove_default_feeds") == "on"; let post_sort = req.cookie("post_sort").map_or_else(|| "hot".to_string(), |c| c.value().to_string()); let sort = req.param("sort").unwrap_or_else(|| req.param("id").unwrap_or(post_sort)); @@ -78,6 +81,21 @@ pub async fn community(req: Request) -> Result, String> { } else { front_page.clone() }); + + if (sub_name == "popular" || sub_name == "all") && remove_default_feeds { + if subscribed.is_empty() { + return info(req, "Subscribe to some subreddits! (Default feeds disabled in settings)").await; + } else { + // If there are subscribed subs, but we get here, then the problem is that front_page pref is set to something besides default. + // Tell user to go to settings and change front page to default. + return info( + req, + "You have subscribed to some subreddits, but your front page is not set to default. Visit settings and change front page to default.", + ) + .await; + } + } + let quarantined = can_access_quarantine(&req, &sub_name) || root; // Handle random subreddits @@ -214,6 +232,41 @@ pub fn can_access_quarantine(req: &Request, sub: &str) -> bool { setting(req, &format!("allow_quaran_{}", sub.to_lowercase())).parse().unwrap_or_default() } +// Join items in chunks of 4000 bytes in length for cookies +pub fn join_until_size_limit(vec: &[T]) -> Vec { + let mut result = Vec::new(); + let mut list = String::new(); + let mut current_size = 0; + + for item in vec { + // Size in bytes + let item_size = item.to_string().len(); + // Use 4000 bytes to leave us some headroom because the name and options of the cookie count towards the 4096 byte cap + if current_size + item_size > 4000 { + // If last item add a seperator on the end of the list so it's interpreted properly in tanden with the next cookie + list.push('+'); + + // Push current list to result vector + result.push(list); + + // Reset the list variable so we can continue with only new items + list = String::new(); + } + // Add separator if not the first item + if !list.is_empty() { + list.push('+'); + } + // Add current item to list + list.push_str(&item.to_string()); + current_size = list.len() + item_size; + } + // Make sure to push whatever the remaining subreddits are there into the result vector + result.push(list); + + // Return resulting vector + result +} + // Sub, filter, unfilter, or unsub by setting subscription cookie using response "Set-Cookie" header pub async fn subscriptions_filters(req: Request) -> Result, String> { let sub = req.param("sub").unwrap_or_default(); @@ -306,28 +359,101 @@ pub async fn subscriptions_filters(req: Request) -> Result, let mut response = redirect(&path); - // Delete cookie if empty, else set + // If sub_list is empty remove all subscriptions cookies, otherwise update them and remove old ones if sub_list.is_empty() { + // Remove subscriptions cookie response.remove_cookie("subscriptions".to_string()); + + // Start with first numbered subscriptions cookie + let mut subscriptions_number = 1; + + // While whatever subscriptionsNUMBER cookie we're looking at has a value + while req.cookie(&format!("subscriptions{}", subscriptions_number)).is_some() { + // Remove that subscriptions cookie + response.remove_cookie(format!("subscriptions{}", subscriptions_number)); + + // Increment subscriptions cookie number + subscriptions_number += 1; + } } else { - response.insert_cookie( - Cookie::build(("subscriptions", sub_list.join("+"))) - .path("/") - .http_only(true) - .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) - .into(), - ); + // Start at 0 to keep track of what number we need to start deleting old subscription cookies from + let mut subscriptions_number_to_delete_from = 0; + + // Starting at 0 so we handle the subscription cookie without a number first + for (subscriptions_number, list) in join_until_size_limit(&sub_list).into_iter().enumerate() { + let subscriptions_cookie = if subscriptions_number == 0 { + "subscriptions".to_string() + } else { + format!("subscriptions{}", subscriptions_number) + }; + + response.insert_cookie( + Cookie::build((subscriptions_cookie, list)) + .path("/") + .http_only(true) + .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) + .into(), + ); + + subscriptions_number_to_delete_from += 1; + } + + // While whatever subscriptionsNUMBER cookie we're looking at has a value + while req.cookie(&format!("subscriptions{}", subscriptions_number_to_delete_from)).is_some() { + // Remove that subscriptions cookie + response.remove_cookie(format!("subscriptions{}", subscriptions_number_to_delete_from)); + + // Increment subscriptions cookie number + subscriptions_number_to_delete_from += 1; + } } + + // If filters is empty remove all filters cookies, otherwise update them and remove old ones if filters.is_empty() { + // Remove filters cookie response.remove_cookie("filters".to_string()); + + // Start with first numbered filters cookie + let mut filters_number = 1; + + // While whatever filtersNUMBER cookie we're looking at has a value + while req.cookie(&format!("filters{}", filters_number)).is_some() { + // Remove that filters cookie + response.remove_cookie(format!("filters{}", filters_number)); + + // Increment filters cookie number + filters_number += 1; + } } else { - response.insert_cookie( - Cookie::build(("filters", filters.join("+"))) - .path("/") - .http_only(true) - .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) - .into(), - ); + // Start at 0 to keep track of what number we need to start deleting old filters cookies from + let mut filters_number_to_delete_from = 0; + + for (filters_number, list) in join_until_size_limit(&filters).into_iter().enumerate() { + let filters_cookie = if filters_number == 0 { + "filters".to_string() + } else { + format!("filters{}", filters_number) + }; + + response.insert_cookie( + Cookie::build((filters_cookie, list)) + .path("/") + .http_only(true) + .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) + .into(), + ); + + filters_number_to_delete_from += 1; + } + + // While whatever filtersNUMBER cookie we're looking at has a value + while req.cookie(&format!("filters{}", filters_number_to_delete_from)).is_some() { + // Remove that filters cookie + response.remove_cookie(format!("filters{}", filters_number_to_delete_from)); + + // Increment filters cookie number + filters_number_to_delete_from += 1; + } } Ok(response) @@ -496,9 +622,10 @@ pub async fn rss(req: Request) -> Result, String> { .into_iter() .map(|post| Item { title: Some(post.title.to_string()), - link: Some(utils::get_post_url(&post)), + link: Some(format_url(&utils::get_post_url(&post))), author: Some(post.author.name), - content: Some(rewrite_urls(&post.body)), + content: Some(rewrite_urls(&decode_html(&post.body).unwrap())), + pub_date: Some(DateTime::from_timestamp(post.created_ts as i64, 0).unwrap_or_default().to_rfc2822()), description: Some(format!( "Comments", config::get_setting("REDLIB_FULL_URL").unwrap_or_default(), diff --git a/src/user.rs b/src/user.rs index 50a4daa..592389d 100644 --- a/src/user.rs +++ b/src/user.rs @@ -5,6 +5,8 @@ use crate::client::json; use crate::server::RequestExt; use crate::utils::{error, filter_posts, format_url, get_filters, nsfw_landing, param, setting, template, Post, Preferences, User}; use crate::{config, utils}; +use chrono::DateTime; +use htmlescape::decode_html; use hyper::{Body, Request, Response}; use rinja::Template; use time::{macros::format_description, OffsetDateTime}; @@ -163,9 +165,10 @@ pub async fn rss(req: Request) -> Result, String> { .into_iter() .map(|post| Item { title: Some(post.title.to_string()), - link: Some(utils::get_post_url(&post)), + link: Some(format_url(&utils::get_post_url(&post))), author: Some(post.author.name), - content: Some(rewrite_urls(&post.body)), + pub_date: Some(DateTime::from_timestamp(post.created_ts as i64, 0).unwrap_or_default().to_rfc2822()), + content: Some(rewrite_urls(&decode_html(&post.body).unwrap())), ..Default::default() }) .collect::>(), diff --git a/src/utils.rs b/src/utils.rs index 4ae3e1a..e39d851 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -8,16 +8,19 @@ use crate::config::{self, get_setting}; use crate::{client::json, server::RequestExt}; use cookie::Cookie; use hyper::{Body, Request, Response}; +use libflate::deflate::{Decoder, Encoder}; use log::error; use once_cell::sync::Lazy; use regex::Regex; +use revision::revisioned; use rinja::Template; use rust_embed::RustEmbed; -use serde::{Serialize, Serializer}; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde_json::Value; use serde_json_path::{JsonPath, JsonPathExt}; use std::collections::{HashMap, HashSet}; use std::env; +use std::io::{Read, Write}; use std::str::FromStr; use std::string::ToString; use time::{macros::format_description, Duration, OffsetDateTime}; @@ -233,6 +236,14 @@ impl Media { // If this post contains a gallery of images gallery = GalleryMedia::parse(&data["gallery_data"]["items"], &data["media_metadata"]); + ("gallery", &data["url"], None) + } else if data["crosspost_parent_list"][0]["is_gallery"].as_bool().unwrap_or_default() { + // If this post contains a gallery of images + gallery = GalleryMedia::parse( + &data["crosspost_parent_list"][0]["gallery_data"]["items"], + &data["crosspost_parent_list"][0]["media_metadata"], + ); + ("gallery", &data["url"], None) } else if data["is_reddit_media_domain"].as_bool().unwrap_or_default() && data["domain"] == "i.redd.it" { // If this post contains a reddit media (image) URL. @@ -542,6 +553,14 @@ pub struct ErrorTemplate { pub url: String, } +#[derive(Template)] +#[template(path = "info.html")] +pub struct InfoTemplate { + pub msg: String, + pub prefs: Preferences, + pub url: String, +} + /// Template for NSFW landing page. The landing page is displayed when a page's /// content is wholly NSFW, but a user has not enabled the option to view NSFW /// posts. @@ -601,42 +620,78 @@ pub struct Params { pub before: Option, } -#[derive(Default, Serialize)] +#[derive(Default, Serialize, Deserialize, Debug, PartialEq, Eq)] +#[revisioned(revision = 1)] pub struct Preferences { - #[serde(skip)] + #[revision(start = 1)] + #[serde(skip_serializing, skip_deserializing)] pub available_themes: Vec, + #[revision(start = 1)] pub theme_light: String, + #[revision(start = 1)] pub theme_dark: String, + #[revision(start = 1)] pub front_page: String, + #[revision(start = 1)] pub layout: String, + #[revision(start = 1)] pub wide: String, + #[revision(start = 1)] pub blur_spoiler: String, + #[revision(start = 1)] pub show_nsfw: String, + #[revision(start = 1)] pub blur_nsfw: String, + #[revision(start = 1)] pub hide_hls_notification: String, + #[revision(start = 1)] pub video_quality: String, + #[revision(start = 1)] pub hide_sidebar_and_summary: String, + #[revision(start = 1)] pub use_hls: String, + #[revision(start = 1)] pub autoplay_videos: String, + #[revision(start = 1)] pub fixed_navbar: String, + #[revision(start = 1)] pub disable_visit_reddit_confirmation: String, + #[revision(start = 1)] pub comment_sort: String, + #[revision(start = 1)] pub post_sort: String, - #[serde(serialize_with = "serialize_vec_with_plus")] + #[revision(start = 1)] + #[serde(serialize_with = "serialize_vec_with_plus", deserialize_with = "deserialize_vec_with_plus")] pub subscriptions: Vec, - #[serde(serialize_with = "serialize_vec_with_plus")] + #[revision(start = 1)] + #[serde(serialize_with = "serialize_vec_with_plus", deserialize_with = "deserialize_vec_with_plus")] pub filters: Vec, + #[revision(start = 1)] pub hide_awards: String, + #[revision(start = 1)] pub hide_score: String, + #[revision(start = 1)] + pub remove_default_feeds: String, } -fn serialize_vec_with_plus(vec: &Vec, serializer: S) -> Result +fn serialize_vec_with_plus(vec: &[String], serializer: S) -> Result where S: Serializer, { serializer.serialize_str(&vec.join("+")) } +fn deserialize_vec_with_plus<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + let string = String::deserialize(deserializer)?; + if string.is_empty() { + return Ok(Vec::new()); + } + Ok(string.split('+').map(|s| s.to_string()).collect()) +} + #[derive(RustEmbed)] #[folder = "static/themes/"] #[include = "*.css"] @@ -674,12 +729,36 @@ impl Preferences { filters: setting(req, "filters").split('+').map(String::from).filter(|s| !s.is_empty()).collect(), hide_awards: setting(req, "hide_awards"), hide_score: setting(req, "hide_score"), + remove_default_feeds: setting(req, "remove_default_feeds"), } } pub fn to_urlencoded(&self) -> Result { serde_urlencoded::to_string(self).map_err(|e| e.to_string()) } + + pub fn to_bincode(&self) -> Result, String> { + bincode::serialize(self).map_err(|e| e.to_string()) + } + pub fn to_compressed_bincode(&self) -> Result, String> { + deflate_compress(self.to_bincode()?) + } + pub fn to_bincode_str(&self) -> Result { + Ok(base2048::encode(&self.to_compressed_bincode()?)) + } +} + +pub fn deflate_compress(i: Vec) -> Result, String> { + let mut e = Encoder::new(Vec::new()); + e.write_all(&i).map_err(|e| e.to_string())?; + e.finish().into_result().map_err(|e| e.to_string()) +} + +pub fn deflate_decompress(i: Vec) -> Result, String> { + let mut decoder = Decoder::new(&i[..]); + let mut out = Vec::new(); + decoder.read_to_end(&mut out).map_err(|e| format!("Failed to read from gzip decoder: {}", e))?; + Ok(out) } /// Gets a `HashSet` of filters from the cookie in the given `Request`. @@ -735,7 +814,15 @@ pub async fn parse_post(post: &Value) -> Post { get_setting("REDLIB_PUSHSHIFT_FRONTEND").unwrap_or_else(|| String::from(crate::config::DEFAULT_PUSHSHIFT_FRONTEND)), ) } else { - rewrite_urls(&val(post, "selftext_html")) + let selftext = val(post, "selftext"); + if selftext.contains("```") { + let mut html_output = String::new(); + let parser = pulldown_cmark::Parser::new(&selftext); + pulldown_cmark::html::push_html(&mut html_output, parser); + rewrite_urls(&html_output) + } else { + rewrite_urls(&val(post, "selftext_html")) + } }; // Build a post using data parsed from Reddit post API @@ -826,18 +913,72 @@ pub fn param(path: &str, value: &str) -> Option { // Retrieve the value of a setting by name pub fn setting(req: &Request, name: &str) -> String { // Parse a cookie value from request - req - .cookie(name) - .unwrap_or_else(|| { - // If there is no cookie for this setting, try receiving a default from the config - if let Some(default) = get_setting(&format!("REDLIB_DEFAULT_{}", name.to_uppercase())) { - Cookie::new(name, default) - } else { - Cookie::from(name) - } - }) - .value() - .to_string() + + // If this was called with "subscriptions" and the "subscriptions" cookie has a value + if name == "subscriptions" && req.cookie("subscriptions").is_some() { + // Create subscriptions string + let mut subscriptions = String::new(); + + // Default subscriptions cookie + if req.cookie("subscriptions").is_some() { + subscriptions.push_str(req.cookie("subscriptions").unwrap().value()); + } + + // Start with first numbered subscription cookie + let mut subscriptions_number = 1; + + // While whatever subscriptionsNUMBER cookie we're looking at has a value + while req.cookie(&format!("subscriptions{}", subscriptions_number)).is_some() { + // Push whatever subscriptionsNUMBER cookie we're looking at into the subscriptions string + subscriptions.push_str(req.cookie(&format!("subscriptions{}", subscriptions_number)).unwrap().value()); + + // Increment subscription cookie number + subscriptions_number += 1; + } + + // Return the subscriptions cookies as one large string + subscriptions + } + // If this was called with "filters" and the "filters" cookie has a value + else if name == "filters" && req.cookie("filters").is_some() { + // Create filters string + let mut filters = String::new(); + + // Default filters cookie + if req.cookie("filters").is_some() { + filters.push_str(req.cookie("filters").unwrap().value()); + } + + // Start with first numbered filters cookie + let mut filters_number = 1; + + // While whatever filtersNUMBER cookie we're looking at has a value + while req.cookie(&format!("filters{}", filters_number)).is_some() { + // Push whatever filtersNUMBER cookie we're looking at into the filters string + filters.push_str(req.cookie(&format!("filters{}", filters_number)).unwrap().value()); + + // Increment filters cookie number + filters_number += 1; + } + + // Return the filters cookies as one large string + filters + } + // The above two still come to this if there was no existing value + else { + req + .cookie(name) + .unwrap_or_else(|| { + // If there is no cookie for this setting, try receiving a default from the config + if let Some(default) = get_setting(&format!("REDLIB_DEFAULT_{}", name.to_uppercase())) { + Cookie::new(name, default) + } else { + Cookie::from(name) + } + }) + .value() + .to_string() + } } // Retrieve the value of a setting by name or the default value @@ -853,11 +994,12 @@ pub fn setting_or_default(req: &Request, name: &str, default: String) -> S // Detect and redirect in the event of a random subreddit pub async fn catch_random(sub: &str, additional: &str) -> Result, String> { if sub == "random" || sub == "randnsfw" { - let new_sub = json(format!("/r/{sub}/about.json?raw_json=1"), false).await?["data"]["display_name"] - .as_str() - .unwrap_or_default() - .to_string(); - Ok(redirect(&format!("/r/{new_sub}{additional}"))) + Ok(redirect(&format!( + "/r/{}{additional}", + json(format!("/r/{sub}/about.json?raw_json=1"), false).await?["data"]["display_name"] + .as_str() + .unwrap_or_default() + ))) } else { Err("No redirect needed".to_string()) } @@ -935,9 +1077,20 @@ pub fn format_url(url: &str) -> String { } } +static REGEX_BULLET: Lazy = Lazy::new(|| Regex::new(r"(?m)^- (.*)$").unwrap()); +static REGEX_BULLET_CONSECUTIVE_LINES: Lazy = Lazy::new(|| Regex::new(r"\n