mirror of
https://github.com/redlib-org/redlib.git
synced 2025-04-03 04:57:38 +03:00
Merge branch 'main' into patch-2
This commit is contained in:
commit
09a3667be4
23 changed files with 1451 additions and 340 deletions
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "Rust",
|
||||
"image": "mcr.microsoft.com/devcontainers/rust:0-1-bullseye",
|
||||
"image": "mcr.microsoft.com/devcontainers/rust:1.0.9-bookworm",
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/docker-in-docker:2": {}
|
||||
},
|
||||
|
|
620
Cargo.lock
generated
620
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
15
Cargo.toml
15
Cargo.toml
|
@ -13,7 +13,7 @@ default-run = "redlib"
|
|||
|
||||
[dependencies]
|
||||
rinja = { version = "0.3.4", default-features = false }
|
||||
cached = { version = "0.51.3", features = ["async"] }
|
||||
cached = { version = "0.54.0", features = ["async"] }
|
||||
clap = { version = "4.4.11", default-features = false, features = [
|
||||
"std",
|
||||
"env",
|
||||
|
@ -23,11 +23,10 @@ regex = "1.10.2"
|
|||
serde = { version = "1.0.193", features = ["derive"] }
|
||||
cookie = "0.18.0"
|
||||
futures-lite = "2.2.0"
|
||||
hyper = { version = "0.14.28", features = ["full"] }
|
||||
hyper-rustls = { version = "0.24.2", features = [ "http2" ] }
|
||||
hyper = { version = "0.14.31", features = ["full"] }
|
||||
percent-encoding = "2.3.1"
|
||||
route-recognizer = "0.3.1"
|
||||
serde_json = "1.0.108"
|
||||
serde_json = "1.0.133"
|
||||
tokio = { version = "1.35.1", features = ["full"] }
|
||||
time = { version = "0.3.31", features = ["local-offset"] }
|
||||
url = "2.5.0"
|
||||
|
@ -46,9 +45,15 @@ pretty_env_logger = "0.5.0"
|
|||
dotenvy = "0.15.7"
|
||||
rss = "2.0.7"
|
||||
arc-swap = "1.7.1"
|
||||
serde_json_path = "0.6.7"
|
||||
serde_json_path = "0.7.1"
|
||||
async-recursion = "1.1.1"
|
||||
pulldown-cmark = { version = "0.12.0", features = ["simd", "html"], default-features = false }
|
||||
common-words-all = { version = "0.0.2", default-features = false, features = ["english", "one"] }
|
||||
hyper-rustls = { version = "0.24.2", features = [ "http2" ] }
|
||||
tegen = "0.1.4"
|
||||
serde_urlencoded = "0.7.1"
|
||||
chrono = { version = "0.4.39", default-features = false, features = [ "std" ] }
|
||||
htmlescape = "0.3.1"
|
||||
|
||||
|
||||
[dev-dependencies]
|
||||
|
|
45
Dockerfile.alpine
Normal file
45
Dockerfile.alpine
Normal file
|
@ -0,0 +1,45 @@
|
|||
# supported versions here: https://hub.docker.com/_/rust
|
||||
ARG ALPINE_VERSION=3.20
|
||||
|
||||
########################
|
||||
## builder image
|
||||
########################
|
||||
FROM rust:alpine${ALPINE_VERSION} AS builder
|
||||
|
||||
RUN apk add --no-cache musl-dev
|
||||
|
||||
WORKDIR /redlib
|
||||
|
||||
# download (most) dependencies in their own layer
|
||||
COPY Cargo.lock Cargo.toml ./
|
||||
RUN mkdir src && echo "fn main() { panic!(\"why am i running?\") }" > src/main.rs
|
||||
RUN cargo build --release --locked --bin redlib
|
||||
RUN rm ./src/main.rs && rmdir ./src
|
||||
|
||||
# copy the source and build the redlib binary
|
||||
COPY . ./
|
||||
RUN cargo build --release --locked --bin redlib
|
||||
RUN echo "finished building redlib!"
|
||||
|
||||
########################
|
||||
## release image
|
||||
########################
|
||||
FROM alpine:${ALPINE_VERSION} AS release
|
||||
|
||||
# Import redlib binary from builder
|
||||
COPY --from=builder /redlib/target/release/redlib /usr/local/bin/redlib
|
||||
|
||||
# Add non-root user for running redlib
|
||||
RUN adduser --home /nonexistent --no-create-home --disabled-password redlib
|
||||
USER redlib
|
||||
|
||||
# Document that we intend to expose port 8080 to whoever runs the container
|
||||
EXPOSE 8080
|
||||
|
||||
# Run a healthcheck every minute to make sure redlib is functional
|
||||
HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1
|
||||
|
||||
# Add container metadata
|
||||
LABEL org.opencontainers.image.authors="sigaloid"
|
||||
|
||||
CMD ["redlib"]
|
51
Dockerfile.ubuntu
Normal file
51
Dockerfile.ubuntu
Normal file
|
@ -0,0 +1,51 @@
|
|||
# supported versions here: https://hub.docker.com/_/rust
|
||||
ARG RUST_BUILDER_VERSION=slim-bookworm
|
||||
ARG UBUNTU_RELEASE_VERSION=noble
|
||||
|
||||
########################
|
||||
## builder image
|
||||
########################
|
||||
FROM rust:${RUST_BUILDER_VERSION} AS builder
|
||||
|
||||
WORKDIR /redlib
|
||||
|
||||
# download (most) dependencies in their own layer
|
||||
COPY Cargo.lock Cargo.toml ./
|
||||
RUN mkdir src && echo "fn main() { panic!(\"why am i running?\") }" > src/main.rs
|
||||
RUN cargo build --release --locked --bin redlib
|
||||
RUN rm ./src/main.rs && rmdir ./src
|
||||
|
||||
# copy the source and build the redlib binary
|
||||
COPY . ./
|
||||
RUN cargo build --release --locked --bin redlib
|
||||
RUN echo "finished building redlib!"
|
||||
|
||||
########################
|
||||
## release image
|
||||
########################
|
||||
FROM ubuntu:${UBUNTU_RELEASE_VERSION} AS release
|
||||
|
||||
# Install ca-certificates
|
||||
RUN apt-get update && apt-get install -y ca-certificates
|
||||
|
||||
# Import redlib binary from builder
|
||||
COPY --from=builder /redlib/target/release/redlib /usr/local/bin/redlib
|
||||
|
||||
# Add non-root user for running redlib
|
||||
RUN useradd \
|
||||
--no-create-home \
|
||||
--password "!" \
|
||||
--comment "user for running redlib" \
|
||||
redlib
|
||||
USER redlib
|
||||
|
||||
# Document that we intend to expose port 8080 to whoever runs the container
|
||||
EXPOSE 8080
|
||||
|
||||
# Run a healthcheck every minute to make sure redlib is functional
|
||||
HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1
|
||||
|
||||
# Add container metadata
|
||||
LABEL org.opencontainers.image.authors="sigaloid"
|
||||
|
||||
CMD ["redlib"]
|
47
README.md
47
README.md
|
@ -35,6 +35,9 @@
|
|||
- [Docker](#docker)
|
||||
- [Docker Compose](#docker-compose)
|
||||
- [Docker CLI](#docker-cli)
|
||||
- Podman
|
||||
- Quadlets
|
||||
|
||||
- [Binary](#binary)
|
||||
- [Running as a systemd service](#running-as-a-systemd-service)
|
||||
- [Building from source](#building-from-source)
|
||||
|
@ -180,7 +183,7 @@ For configuration options, see the [Configuration section](#Configuration).
|
|||
|
||||
[Docker](https://www.docker.com) lets you run containerized applications. Containers are loosely isolated environments that are lightweight and contain everything needed to run the application, so there's no need to rely on what's installed on the host.
|
||||
|
||||
Docker images for Redlib are available at [quay.io](https://quay.io/repository/redlib/redlib), with support for `amd64`, `arm64`, and `armv7` platforms.
|
||||
Container images for Redlib are available at [quay.io](https://quay.io/repository/redlib/redlib), with support for `amd64`, `arm64`, and `armv7` platforms.
|
||||
|
||||
### Docker Compose
|
||||
|
||||
|
@ -224,6 +227,37 @@ Stream logs from the Redlib container:
|
|||
```bash
|
||||
docker logs -f redlib
|
||||
```
|
||||
## Podman
|
||||
|
||||
[Podman](https://podman.io/) lets you run containerized applications in a rootless fashion. Containers are loosely isolated environments that are lightweight and contain everything needed to run the application, so there's no need to rely on what's installed on the host.
|
||||
|
||||
Container images for Redlib are available at [quay.io](https://quay.io/repository/redlib/redlib), with support for `amd64`, `arm64`, and `armv7` platforms.
|
||||
|
||||
### Quadlets
|
||||
|
||||
> [!IMPORTANT]
|
||||
> These instructions assume that you are on a systemd based distro with [podman](https://podman.io/). If not, follow these [instructions on podman's website](https://podman.io/docs/installation) for how to do so.
|
||||
> It also assumes you have used `loginctl enable-linger <username>` to enable the service to start for your user without logging in.
|
||||
|
||||
Copy the `redlib.container` and `.env.example` files to `.config/containers/systemd/` and modify any relevant values (for example, the ports Redlib should listen on, renaming the .env file and editing its values, etc.).
|
||||
|
||||
To start Redlib either reboot or follow the instructions below:
|
||||
|
||||
Notify systemd of the new files
|
||||
```bash
|
||||
systemctl --user daemon-reload
|
||||
```
|
||||
|
||||
Start the newly generated service file
|
||||
|
||||
```bash
|
||||
systemctl --user start redlib.service
|
||||
```
|
||||
|
||||
You can check the status of your container by using the following command:
|
||||
```bash
|
||||
systemctl --user status redlib.service
|
||||
```
|
||||
|
||||
## Binary
|
||||
|
||||
|
@ -370,6 +404,17 @@ REDLIB_DEFAULT_USE_HLS = "on"
|
|||
>
|
||||
> If using Docker Compose, no changes are needed as the `.env` file is already referenced in `compose.yaml` via the `env_file: .env` line.
|
||||
|
||||
## Command Line Flags
|
||||
|
||||
Redlib supports the following command line flags:
|
||||
|
||||
- `-4`, `--ipv4-only`: Listen on IPv4 only.
|
||||
- `-6`, `--ipv6-only`: Listen on IPv6 only.
|
||||
- `-r`, `--redirect-https`: Redirect all HTTP requests to HTTPS (no longer functional).
|
||||
- `-a`, `--address <ADDRESS>`: Sets address to listen on. Default is `[::]`.
|
||||
- `-p`, `--port <PORT>`: Port to listen on. Default is `8080`.
|
||||
- `-H`, `--hsts <EXPIRE_TIME>`: HSTS header to tell browsers that this site should only be accessed over HTTPS. Default is `604800`.
|
||||
|
||||
## Instance settings
|
||||
|
||||
Assign a default value for each instance-specific setting by passing environment variables to Redlib in the format `REDLIB_{X}`. Replace `{X}` with the setting name (see list below) in capital letters.
|
||||
|
|
32
flake.lock
generated
32
flake.lock
generated
|
@ -1,17 +1,12 @@
|
|||
{
|
||||
"nodes": {
|
||||
"crane": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1717025063,
|
||||
"narHash": "sha256-dIubLa56W9sNNz0e8jGxrX3CAkPXsq7snuFA/Ie6dn8=",
|
||||
"lastModified": 1731974733,
|
||||
"narHash": "sha256-enYSSZVVl15FI5p+0Y5/Ckf5DZAvXe6fBrHxyhA/njc=",
|
||||
"owner": "ipetkov",
|
||||
"repo": "crane",
|
||||
"rev": "480dff0be03dac0e51a8dfc26e882b0d123a450e",
|
||||
"rev": "3cb338ce81076ce5e461cf77f7824476addb0e1c",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
@ -25,11 +20,11 @@
|
|||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1710146030,
|
||||
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
@ -40,11 +35,11 @@
|
|||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1717112898,
|
||||
"narHash": "sha256-7R2ZvOnvd9h8fDd65p0JnB7wXfUvreox3xFdYWd1BnY=",
|
||||
"lastModified": 1731890469,
|
||||
"narHash": "sha256-D1FNZ70NmQEwNxpSSdTXCSklBH1z2isPR84J6DQrJGs=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "6132b0f6e344ce2fe34fc051b72fb46e34f668e0",
|
||||
"rev": "5083ec887760adfe12af64830a66807423a859a7",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
@ -64,19 +59,16 @@
|
|||
},
|
||||
"rust-overlay": {
|
||||
"inputs": {
|
||||
"flake-utils": [
|
||||
"flake-utils"
|
||||
],
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1717121863,
|
||||
"narHash": "sha256-/3sxIe7MZqF/jw1RTQCSmgTjwVod43mmrk84m50MJQ4=",
|
||||
"lastModified": 1732069891,
|
||||
"narHash": "sha256-moKx8AVJrViCSdA0e0nSsG8b1dAsObI4sRAtbqbvBY8=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "2a7b53172ed08f856b8382d7dcfd36a4e0cbd866",
|
||||
"rev": "8509a51241c407d583b1963d5079585a992506e8",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
|
10
flake.nix
10
flake.nix
|
@ -4,19 +4,13 @@
|
|||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
|
||||
|
||||
crane = {
|
||||
url = "github:ipetkov/crane";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
crane.url = "github:ipetkov/crane";
|
||||
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
|
||||
rust-overlay = {
|
||||
url = "github:oxalica/rust-overlay";
|
||||
inputs = {
|
||||
nixpkgs.follows = "nixpkgs";
|
||||
flake-utils.follows = "flake-utils";
|
||||
};
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
};
|
||||
|
||||
|
|
16
redlib.container
Normal file
16
redlib.container
Normal file
|
@ -0,0 +1,16 @@
|
|||
[Install]
|
||||
WantedBy=default.target
|
||||
|
||||
[Container]
|
||||
AutoUpdate=registry
|
||||
ContainerName=redlib
|
||||
DropCapability=ALL
|
||||
EnvironmentFile=.env
|
||||
HealthCmd=["wget","--spider","-q","--tries=1","http://localhost:8080/settings"]
|
||||
HealthInterval=5m
|
||||
HealthTimeout=3s
|
||||
Image=quay.io/redlib/redlib:latest
|
||||
NoNewPrivileges=true
|
||||
PublishPort=8080:8080
|
||||
ReadOnly=true
|
||||
User=nobody
|
|
@ -24,7 +24,7 @@ echo "// Please do not edit manually" >> "$filename"
|
|||
echo "// Filled in with real app versions" >> "$filename"
|
||||
|
||||
# Open the array in the source file
|
||||
echo "pub static _IOS_APP_VERSION_LIST: &[&str; $ios_app_count] = &[" >> "$filename"
|
||||
echo "pub const _IOS_APP_VERSION_LIST: &[&str; $ios_app_count] = &[" >> "$filename"
|
||||
|
||||
num=0
|
||||
|
||||
|
@ -39,12 +39,12 @@ done
|
|||
echo "];" >> "$filename"
|
||||
|
||||
# Fetch Android app versions
|
||||
page_1=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions/" | rg "<a class=\"ver-item\" href=\"(/reddit/com\.reddit\.frontpage/download/phone-20\d{2}\.\d+\.\d+-apk)\" rel=\"nofollow\">" -r "https://apkcombo.com\$1" | sort | uniq)
|
||||
page_1=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions/" | rg "<a class=\"ver-item\" href=\"(/reddit/com\.reddit\.frontpage/download/phone-20\d{2}\.\d+\.\d+-apk)\" rel=\"nofollow\">" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g')
|
||||
# Append with pages
|
||||
page_2=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=2" | rg "<a class=\"ver-item\" href=\"(/reddit/com\.reddit\.frontpage/download/phone-20\d{2}\.\d+\.\d+-apk)\" rel=\"nofollow\">" -r "https://apkcombo.com\$1" | sort | uniq)
|
||||
page_3=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=3" | rg "<a class=\"ver-item\" href=\"(/reddit/com\.reddit\.frontpage/download/phone-20\d{2}\.\d+\.\d+-apk)\" rel=\"nofollow\">" -r "https://apkcombo.com\$1" | sort | uniq)
|
||||
page_4=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=4" | rg "<a class=\"ver-item\" href=\"(/reddit/com\.reddit\.frontpage/download/phone-20\d{2}\.\d+\.\d+-apk)\" rel=\"nofollow\">" -r "https://apkcombo.com\$1" | sort | uniq)
|
||||
page_5=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=5" | rg "<a class=\"ver-item\" href=\"(/reddit/com\.reddit\.frontpage/download/phone-20\d{2}\.\d+\.\d+-apk)\" rel=\"nofollow\">" -r "https://apkcombo.com\$1" | sort | uniq)
|
||||
page_2=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=2" | rg "<a class=\"ver-item\" href=\"(/reddit/com\.reddit\.frontpage/download/phone-20\d{2}\.\d+\.\d+-apk)\" rel=\"nofollow\">" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g')
|
||||
page_3=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=3" | rg "<a class=\"ver-item\" href=\"(/reddit/com\.reddit\.frontpage/download/phone-20\d{2}\.\d+\.\d+-apk)\" rel=\"nofollow\">" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g')
|
||||
page_4=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=4" | rg "<a class=\"ver-item\" href=\"(/reddit/com\.reddit\.frontpage/download/phone-20\d{2}\.\d+\.\d+-apk)\" rel=\"nofollow\">" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g')
|
||||
page_5=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=5" | rg "<a class=\"ver-item\" href=\"(/reddit/com\.reddit\.frontpage/download/phone-20\d{2}\.\d+\.\d+-apk)\" rel=\"nofollow\">" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g')
|
||||
|
||||
# Concatenate all pages
|
||||
versions="${page_1}"
|
||||
|
@ -63,7 +63,7 @@ android_count=$(echo "$versions" | wc -l)
|
|||
echo -e "Fetching \e[32m$android_count Android app versions...\e[0m"
|
||||
|
||||
# Append to the source file
|
||||
echo "pub static ANDROID_APP_VERSION_LIST: &[&str; $android_count] = &[" >> "$filename"
|
||||
echo "pub const ANDROID_APP_VERSION_LIST: &[&str; $android_count] = &[" >> "$filename"
|
||||
|
||||
num=0
|
||||
|
||||
|
@ -89,7 +89,7 @@ ios_count=$(echo "$table" | wc -l)
|
|||
echo -e "Fetching \e[34m$ios_count iOS versions...\e[0m"
|
||||
|
||||
# Append to the source file
|
||||
echo "pub static _IOS_OS_VERSION_LIST: &[&str; $ios_count] = &[" >> "$filename"
|
||||
echo "pub const _IOS_OS_VERSION_LIST: &[&str; $ios_count] = &[" >> "$filename"
|
||||
|
||||
num=0
|
||||
|
||||
|
|
125
src/client.rs
125
src/client.rs
|
@ -19,7 +19,7 @@ use std::{io, result::Result};
|
|||
use crate::dbg_msg;
|
||||
use crate::oauth::{force_refresh_token, token_daemon, Oauth};
|
||||
use crate::server::RequestExt;
|
||||
use crate::utils::format_url;
|
||||
use crate::utils::{format_url, Post};
|
||||
|
||||
const REDDIT_URL_BASE: &str = "https://oauth.reddit.com";
|
||||
const REDDIT_URL_BASE_HOST: &str = "oauth.reddit.com";
|
||||
|
@ -45,7 +45,7 @@ pub static OAUTH_RATELIMIT_REMAINING: AtomicU16 = AtomicU16::new(99);
|
|||
|
||||
pub static OAUTH_IS_ROLLING_OVER: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
static URL_PAIRS: [(&str, &str); 2] = [
|
||||
const URL_PAIRS: [(&str, &str); 2] = [
|
||||
(ALTERNATIVE_REDDIT_URL_BASE, ALTERNATIVE_REDDIT_URL_BASE_HOST),
|
||||
(REDDIT_SHORT_URL_BASE, REDDIT_SHORT_URL_BASE_HOST),
|
||||
];
|
||||
|
@ -218,40 +218,38 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo
|
|||
// Construct the hyper client from the HTTPS connector.
|
||||
let client: &Lazy<Client<_, Body>> = &CLIENT;
|
||||
|
||||
let (token, vendor_id, device_id, user_agent, loid) = {
|
||||
let client = OAUTH_CLIENT.load_full();
|
||||
(
|
||||
client.token.clone(),
|
||||
client.headers_map.get("Client-Vendor-Id").cloned().unwrap_or_default(),
|
||||
client.headers_map.get("X-Reddit-Device-Id").cloned().unwrap_or_default(),
|
||||
client.headers_map.get("User-Agent").cloned().unwrap_or_default(),
|
||||
client.headers_map.get("x-reddit-loid").cloned().unwrap_or_default(),
|
||||
)
|
||||
};
|
||||
|
||||
// Build request to Reddit. When making a GET, request gzip compression.
|
||||
// (Reddit doesn't do brotli yet.)
|
||||
let builder = Request::builder()
|
||||
.method(method)
|
||||
.uri(&url)
|
||||
.header("User-Agent", user_agent)
|
||||
.header("Client-Vendor-Id", vendor_id)
|
||||
.header("X-Reddit-Device-Id", device_id)
|
||||
.header("x-reddit-loid", loid)
|
||||
.header("Host", host)
|
||||
.header("Authorization", &format!("Bearer {token}"))
|
||||
.header("Accept-Encoding", if method == Method::GET { "gzip" } else { "identity" })
|
||||
.header("Accept-Language", "en-US,en;q=0.5")
|
||||
.header("Connection", "keep-alive")
|
||||
.header(
|
||||
"Cookie",
|
||||
let mut headers: Vec<(String, String)> = vec![
|
||||
("Host".into(), host.into()),
|
||||
("Accept-Encoding".into(), if method == Method::GET { "gzip".into() } else { "identity".into() }),
|
||||
(
|
||||
"Cookie".into(),
|
||||
if quarantine {
|
||||
"_options=%7B%22pref_quarantine_optin%22%3A%20true%2C%20%22pref_gated_sr_optin%22%3A%20true%7D"
|
||||
"_options=%7B%22pref_quarantine_optin%22%3A%20true%2C%20%22pref_gated_sr_optin%22%3A%20true%7D".into()
|
||||
} else {
|
||||
""
|
||||
"".into()
|
||||
},
|
||||
)
|
||||
.body(Body::empty());
|
||||
),
|
||||
];
|
||||
|
||||
{
|
||||
let client = OAUTH_CLIENT.load_full();
|
||||
for (key, value) in client.headers_map.clone() {
|
||||
headers.push((key, value));
|
||||
}
|
||||
}
|
||||
|
||||
// shuffle headers: https://github.com/redlib-org/redlib/issues/324
|
||||
fastrand::shuffle(&mut headers);
|
||||
|
||||
let mut builder = Request::builder().method(method).uri(&url);
|
||||
|
||||
for (key, value) in headers {
|
||||
builder = builder.header(key, value);
|
||||
}
|
||||
|
||||
let builder = builder.body(Body::empty());
|
||||
|
||||
async move {
|
||||
match builder {
|
||||
|
@ -264,7 +262,7 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo
|
|||
return Ok(response);
|
||||
};
|
||||
let location_header = response.headers().get(header::LOCATION);
|
||||
if location_header == Some(&HeaderValue::from_static("https://www.reddit.com/")) {
|
||||
if location_header == Some(&HeaderValue::from_static(ALTERNATIVE_REDDIT_URL_BASE)) {
|
||||
return Err("Reddit response was invalid".to_string());
|
||||
}
|
||||
return request(
|
||||
|
@ -390,6 +388,12 @@ pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
|
|||
"Ratelimit remaining: Header says {remaining}, we have {current_rate_limit}. Resets in {reset}. Rollover: {}. Ratelimit used: {used}",
|
||||
if is_rolling_over { "yes" } else { "no" },
|
||||
);
|
||||
|
||||
// If can parse remaining as a float, round to a u16 and save
|
||||
if let Ok(val) = remaining.parse::<f32>() {
|
||||
OAUTH_RATELIMIT_REMAINING.store(val.round() as u16, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
Some(reset)
|
||||
} else {
|
||||
None
|
||||
|
@ -474,8 +478,57 @@ pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
|
|||
}
|
||||
}
|
||||
|
||||
async fn self_check(sub: &str) -> Result<(), String> {
|
||||
let query = format!("/r/{sub}/hot.json?&raw_json=1");
|
||||
|
||||
match Post::fetch(&query, true).await {
|
||||
Ok(_) => Ok(()),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn rate_limit_check() -> Result<(), String> {
|
||||
// First, check a subreddit.
|
||||
self_check("reddit").await?;
|
||||
// This will reduce the rate limit to 99. Assert this check.
|
||||
if OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst) != 99 {
|
||||
return Err(format!("Rate limit check failed: expected 99, got {}", OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst)));
|
||||
}
|
||||
// Now, we switch out the OAuth client.
|
||||
// This checks for the IP rate limit association.
|
||||
force_refresh_token().await;
|
||||
// Now, check a new sub to break cache.
|
||||
self_check("rust").await?;
|
||||
// Again, assert the rate limit check.
|
||||
if OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst) != 99 {
|
||||
return Err(format!("Rate limit check failed: expected 99, got {}", OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst)));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
static POPULAR_URL: &str = "/r/popular/hot.json?&raw_json=1&geo_filter=GLOBAL";
|
||||
use {crate::config::get_setting, sealed_test::prelude::*};
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_rate_limit_check() {
|
||||
rate_limit_check().await.unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[sealed_test(env = [("REDLIB_DEFAULT_SUBSCRIPTIONS", "rust")])]
|
||||
fn test_default_subscriptions() {
|
||||
tokio::runtime::Builder::new_multi_thread().enable_all().build().unwrap().block_on(async {
|
||||
let subscriptions = get_setting("REDLIB_DEFAULT_SUBSCRIPTIONS");
|
||||
assert!(subscriptions.is_some());
|
||||
|
||||
// check rate limit
|
||||
rate_limit_check().await.unwrap();
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
const POPULAR_URL: &str = "/r/popular/hot.json?&raw_json=1&geo_filter=GLOBAL";
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_localization_popular() {
|
||||
|
@ -491,12 +544,6 @@ async fn test_obfuscated_share_link() {
|
|||
assert_eq!(canonical_path(share_link, 3).await, Ok(Some(canonical_link)));
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_share_link_strip_json() {
|
||||
let link = "/17krzvz".into();
|
||||
let canonical_link = "/comments/17krzvz".into();
|
||||
assert_eq!(canonical_path(link, 3).await, Ok(Some(canonical_link)));
|
||||
}
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_private_sub() {
|
||||
let link = json("/r/suicide/about.json?raw_json=1".into(), true).await;
|
||||
|
|
53
src/main.rs
53
src/main.rs
|
@ -9,9 +9,9 @@ use std::str::FromStr;
|
|||
use futures_lite::FutureExt;
|
||||
use hyper::Uri;
|
||||
use hyper::{header::HeaderValue, Body, Request, Response};
|
||||
use log::info;
|
||||
use log::{info, warn};
|
||||
use once_cell::sync::Lazy;
|
||||
use redlib::client::{canonical_path, proxy, CLIENT};
|
||||
use redlib::client::{canonical_path, proxy, rate_limit_check, CLIENT};
|
||||
use redlib::server::{self, RequestExt};
|
||||
use redlib::utils::{error, redirect, ThemeAssets};
|
||||
use redlib::{config, duplicates, headers, instance_info, post, search, settings, subreddit, user};
|
||||
|
@ -108,6 +108,8 @@ async fn main() {
|
|||
let matches = Command::new("Redlib")
|
||||
.version(env!("CARGO_PKG_VERSION"))
|
||||
.about("Private front-end for Reddit written in Rust ")
|
||||
.arg(Arg::new("ipv4-only").short('4').long("ipv4-only").help("Listen on IPv4 only").num_args(0))
|
||||
.arg(Arg::new("ipv6-only").short('6').long("ipv6-only").help("Listen on IPv6 only").num_args(0))
|
||||
.arg(
|
||||
Arg::new("redirect-https")
|
||||
.short('r')
|
||||
|
@ -146,11 +148,34 @@ async fn main() {
|
|||
)
|
||||
.get_matches();
|
||||
|
||||
match rate_limit_check().await {
|
||||
Ok(()) => {
|
||||
info!("[✅] Rate limit check passed");
|
||||
}
|
||||
Err(e) => {
|
||||
let mut message = format!("Rate limit check failed: {}", e);
|
||||
message += "\nThis may cause issues with the rate limit.";
|
||||
message += "\nPlease report this error with the above information.";
|
||||
message += "\nhttps://github.com/redlib-org/redlib/issues/new?assignees=sigaloid&labels=bug&title=%F0%9F%90%9B+Bug+Report%3A+Rate+limit+mismatch";
|
||||
warn!("{}", message);
|
||||
eprintln!("{}", message);
|
||||
}
|
||||
}
|
||||
|
||||
let address = matches.get_one::<String>("address").unwrap();
|
||||
let port = matches.get_one::<String>("port").unwrap();
|
||||
let hsts = matches.get_one("hsts").map(|m: &String| m.as_str());
|
||||
|
||||
let listener = [address, ":", port].concat();
|
||||
let ipv4_only = std::env::var("IPV4_ONLY").is_ok() || matches.get_flag("ipv4-only");
|
||||
let ipv6_only = std::env::var("IPV6_ONLY").is_ok() || matches.get_flag("ipv6-only");
|
||||
|
||||
let listener = if ipv4_only {
|
||||
format!("0.0.0.0:{}", port)
|
||||
} else if ipv6_only {
|
||||
format!("[::]:{}", port)
|
||||
} else {
|
||||
[address, ":", port].concat()
|
||||
};
|
||||
|
||||
println!("Starting Redlib...");
|
||||
|
||||
|
@ -223,6 +248,7 @@ async fn main() {
|
|||
.get(|_| resource(include_str!("../static/check_update.js"), "text/javascript", false).boxed());
|
||||
|
||||
app.at("/commits.atom").get(|_| async move { proxy_commit_info().await }.boxed());
|
||||
app.at("/instances.json").get(|_| async move { proxy_instances().await }.boxed());
|
||||
|
||||
// Proxy media through Redlib
|
||||
app.at("/vid/:id/:size").get(|r| proxy(r, "https://v.redd.it/{id}/DASH_{size}").boxed());
|
||||
|
@ -354,7 +380,7 @@ async fn main() {
|
|||
Some("best" | "hot" | "new" | "top" | "rising" | "controversial") => subreddit::community(req).await,
|
||||
|
||||
// Short link for post
|
||||
Some(id) if (5..8).contains(&id.len()) => match canonical_path(format!("/{id}"), 3).await {
|
||||
Some(id) if (5..8).contains(&id.len()) => match canonical_path(format!("/comments/{id}"), 3).await {
|
||||
Ok(path_opt) => match path_opt {
|
||||
Some(path) => Ok(redirect(&path)),
|
||||
None => error(req, "Post ID is invalid. It may point to a post on a community that has been banned.").await,
|
||||
|
@ -399,3 +425,22 @@ async fn fetch_commit_info() -> String {
|
|||
|
||||
hyper::body::to_bytes(resp).await.expect("Failed to read body").iter().copied().map(|x| x as char).collect()
|
||||
}
|
||||
|
||||
pub async fn proxy_instances() -> Result<Response<Body>, String> {
|
||||
Ok(
|
||||
Response::builder()
|
||||
.status(200)
|
||||
.header("content-type", "application/json")
|
||||
.body(Body::from(fetch_instances().await))
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
}
|
||||
|
||||
#[cached(time = 600)]
|
||||
async fn fetch_instances() -> String {
|
||||
let uri = Uri::from_str("https://raw.githubusercontent.com/redlib-org/redlib-instances/refs/heads/main/instances.json").expect("Invalid URI");
|
||||
|
||||
let resp: Body = CLIENT.get(uri).await.expect("Failed to request GitHub").into_body();
|
||||
|
||||
hyper::body::to_bytes(resp).await.expect("Failed to read body").iter().copied().map(|x| x as char).collect()
|
||||
}
|
||||
|
|
30
src/oauth.rs
30
src/oauth.rs
|
@ -7,13 +7,13 @@ use crate::{
|
|||
use base64::{engine::general_purpose, Engine as _};
|
||||
use hyper::{client, Body, Method, Request};
|
||||
use log::{error, info, trace};
|
||||
|
||||
use serde_json::json;
|
||||
use tegen::tegen::TextGenerator;
|
||||
use tokio::time::{error::Elapsed, timeout};
|
||||
|
||||
static REDDIT_ANDROID_OAUTH_CLIENT_ID: &str = "ohXpoqrZYub1kg";
|
||||
const REDDIT_ANDROID_OAUTH_CLIENT_ID: &str = "ohXpoqrZYub1kg";
|
||||
|
||||
static AUTH_ENDPOINT: &str = "https://www.reddit.com";
|
||||
const AUTH_ENDPOINT: &str = "https://www.reddit.com";
|
||||
|
||||
// Spoofed client for Android devices
|
||||
#[derive(Debug, Clone, Default)]
|
||||
|
@ -38,12 +38,12 @@ impl Oauth {
|
|||
}
|
||||
Ok(None) => {
|
||||
error!("Failed to create OAuth client. Retrying in 5 seconds...");
|
||||
continue;
|
||||
}
|
||||
Err(duration) => {
|
||||
error!("Failed to create OAuth client in {duration:?}. Retrying in 5 seconds...");
|
||||
}
|
||||
}
|
||||
tokio::time::sleep(Duration::from_secs(5)).await;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -84,20 +84,21 @@ impl Oauth {
|
|||
|
||||
// Set JSON body. I couldn't tell you what this means. But that's what the client sends
|
||||
let json = json!({
|
||||
"scopes": ["*","email"]
|
||||
"scopes": ["*","email", "pii"]
|
||||
});
|
||||
let body = Body::from(json.to_string());
|
||||
|
||||
// Build request
|
||||
let request = builder.body(body).unwrap();
|
||||
|
||||
trace!("Sending token request...");
|
||||
trace!("Sending token request...\n\n{request:?}");
|
||||
|
||||
// Send request
|
||||
let client: &once_cell::sync::Lazy<client::Client<_, Body>> = &CLIENT;
|
||||
let resp = client.request(request).await.ok()?;
|
||||
|
||||
trace!("Received response with status {} and length {:?}", resp.status(), resp.headers().get("content-length"));
|
||||
trace!("OAuth headers: {:#?}", resp.headers());
|
||||
|
||||
// Parse headers - loid header _should_ be saved sent on subsequent token refreshes.
|
||||
// Technically it's not needed, but it's easy for Reddit API to check for this.
|
||||
|
@ -185,11 +186,22 @@ impl Device {
|
|||
|
||||
let android_user_agent = format!("Reddit/{android_app_version}/Android {android_version}");
|
||||
|
||||
let qos = fastrand::u32(1000..=100_000);
|
||||
let qos: f32 = qos as f32 / 1000.0;
|
||||
let qos = format!("{:.3}", qos);
|
||||
|
||||
let codecs = TextGenerator::new().generate("available-codecs=video/avc, video/hevc{, video/x-vnd.on2.vp9|}");
|
||||
|
||||
// Android device headers
|
||||
let headers = HashMap::from([
|
||||
("Client-Vendor-Id".into(), uuid.clone()),
|
||||
("X-Reddit-Device-Id".into(), uuid.clone()),
|
||||
let headers: HashMap<String, String> = HashMap::from([
|
||||
("User-Agent".into(), android_user_agent),
|
||||
("x-reddit-retry".into(), "algo=no-retries".into()),
|
||||
("x-reddit-compression".into(), "1".into()),
|
||||
("x-reddit-qos".into(), qos),
|
||||
("x-reddit-media-codecs".into(), codecs),
|
||||
("Content-Type".into(), "application/json; charset=UTF-8".into()),
|
||||
("client-vendor-id".into(), uuid.clone()),
|
||||
("X-Reddit-Device-Id".into(), uuid.clone()),
|
||||
]);
|
||||
|
||||
info!("[🔄] Spoofing Android client with headers: {headers:?}, uuid: \"{uuid}\", and OAuth ID \"{REDDIT_ANDROID_OAUTH_CLIENT_ID}\"");
|
||||
|
|
|
@ -2,8 +2,38 @@
|
|||
// Rerun scripts/update_oauth_resources.sh to update this file
|
||||
// Please do not edit manually
|
||||
// Filled in with real app versions
|
||||
pub static _IOS_APP_VERSION_LIST: &[&str; 1] = &[""];
|
||||
pub static ANDROID_APP_VERSION_LIST: &[&str; 150] = &[
|
||||
pub const _IOS_APP_VERSION_LIST: &[&str; 1] = &[""];
|
||||
pub const ANDROID_APP_VERSION_LIST: &[&str; 150] = &[
|
||||
"Version 2024.22.1/Build 1652272",
|
||||
"Version 2024.23.1/Build 1665606",
|
||||
"Version 2024.24.1/Build 1682520",
|
||||
"Version 2024.25.0/Build 1693595",
|
||||
"Version 2024.25.2/Build 1700401",
|
||||
"Version 2024.25.3/Build 1703490",
|
||||
"Version 2024.26.0/Build 1710470",
|
||||
"Version 2024.26.1/Build 1717435",
|
||||
"Version 2024.28.0/Build 1737665",
|
||||
"Version 2024.28.1/Build 1741165",
|
||||
"Version 2024.30.0/Build 1770787",
|
||||
"Version 2024.31.0/Build 1786202",
|
||||
"Version 2024.32.0/Build 1809095",
|
||||
"Version 2024.32.1/Build 1813258",
|
||||
"Version 2024.33.0/Build 1819908",
|
||||
"Version 2024.34.0/Build 1837909",
|
||||
"Version 2024.35.0/Build 1861437",
|
||||
"Version 2024.36.0/Build 1875012",
|
||||
"Version 2024.37.0/Build 1888053",
|
||||
"Version 2024.38.0/Build 1902791",
|
||||
"Version 2024.39.0/Build 1916713",
|
||||
"Version 2024.40.0/Build 1928580",
|
||||
"Version 2024.41.0/Build 1941199",
|
||||
"Version 2024.41.1/Build 1947805",
|
||||
"Version 2024.42.0/Build 1952440",
|
||||
"Version 2024.43.0/Build 1972250",
|
||||
"Version 2024.44.0/Build 1988458",
|
||||
"Version 2024.45.0/Build 2001943",
|
||||
"Version 2024.46.0/Build 2012731",
|
||||
"Version 2024.47.0/Build 2029755",
|
||||
"Version 2023.48.0/Build 1319123",
|
||||
"Version 2023.49.0/Build 1321715",
|
||||
"Version 2023.49.1/Build 1322281",
|
||||
|
@ -31,9 +61,9 @@ pub static ANDROID_APP_VERSION_LIST: &[&str; 150] = &[
|
|||
"Version 2024.20.0/Build 1612800",
|
||||
"Version 2024.20.1/Build 1615586",
|
||||
"Version 2024.20.2/Build 1624969",
|
||||
"Version 2024.20.3/Build 1624970",
|
||||
"Version 2024.21.0/Build 1631686",
|
||||
"Version 2024.22.0/Build 1645257",
|
||||
"Version 2024.22.1/Build 1652272",
|
||||
"Version 2023.21.0/Build 956283",
|
||||
"Version 2023.22.0/Build 968223",
|
||||
"Version 2023.23.0/Build 983896",
|
||||
|
@ -124,35 +154,5 @@ pub static ANDROID_APP_VERSION_LIST: &[&str; 150] = &[
|
|||
"Version 2022.40.0/Build 624782",
|
||||
"Version 2022.41.0/Build 630468",
|
||||
"Version 2022.41.1/Build 634168",
|
||||
"Version 2021.39.1/Build 372418",
|
||||
"Version 2021.41.0/Build 376052",
|
||||
"Version 2021.42.0/Build 378193",
|
||||
"Version 2021.43.0/Build 382019",
|
||||
"Version 2021.44.0/Build 385129",
|
||||
"Version 2021.45.0/Build 387663",
|
||||
"Version 2021.46.0/Build 392043",
|
||||
"Version 2021.47.0/Build 394342",
|
||||
"Version 2022.10.0/Build 429896",
|
||||
"Version 2022.1.0/Build 402829",
|
||||
"Version 2022.11.0/Build 433004",
|
||||
"Version 2022.12.0/Build 436848",
|
||||
"Version 2022.13.0/Build 442084",
|
||||
"Version 2022.13.1/Build 444621",
|
||||
"Version 2022.14.1/Build 452742",
|
||||
"Version 2022.15.0/Build 455453",
|
||||
"Version 2022.16.0/Build 462377",
|
||||
"Version 2022.17.0/Build 468480",
|
||||
"Version 2022.18.0/Build 473740",
|
||||
"Version 2022.19.1/Build 482464",
|
||||
"Version 2022.2.0/Build 405543",
|
||||
"Version 2022.3.0/Build 408637",
|
||||
"Version 2022.4.0/Build 411368",
|
||||
"Version 2022.5.0/Build 414731",
|
||||
"Version 2022.6.0/Build 418391",
|
||||
"Version 2022.6.1/Build 419585",
|
||||
"Version 2022.6.2/Build 420562",
|
||||
"Version 2022.7.0/Build 420849",
|
||||
"Version 2022.8.0/Build 423906",
|
||||
"Version 2022.9.0/Build 426592",
|
||||
];
|
||||
pub static _IOS_OS_VERSION_LIST: &[&str; 1] = &[""];
|
||||
pub const _IOS_OS_VERSION_LIST: &[&str; 1] = &[""];
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use std::{fmt::Display, io::Write};
|
||||
use std::{collections::HashMap, fmt::Display, io::Write};
|
||||
|
||||
use clap::{Parser, ValueEnum};
|
||||
use common_words_all::{get_top, Language, NgramSize};
|
||||
use redlib::utils::Post;
|
||||
|
||||
#[derive(Parser)]
|
||||
|
@ -10,9 +11,6 @@ struct Cli {
|
|||
#[arg(short = 's', long = "sub")]
|
||||
sub: String,
|
||||
|
||||
#[arg(short = 'c', long = "count")]
|
||||
count: usize,
|
||||
|
||||
#[arg(long = "sort")]
|
||||
sort: SortOrder,
|
||||
|
||||
|
@ -50,28 +48,85 @@ enum Format {
|
|||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
pretty_env_logger::init();
|
||||
let cli = Cli::parse();
|
||||
let (sub, final_count, sort, format, output) = (cli.sub, cli.count, cli.sort, cli.format, cli.output);
|
||||
let (sub, sort, format, output) = (cli.sub, cli.sort, cli.format, cli.output);
|
||||
let initial = format!("/r/{sub}/{sort}.json?&raw_json=1");
|
||||
let (mut posts, mut after) = Post::fetch(&initial, false).await.unwrap();
|
||||
while posts.len() < final_count {
|
||||
let (posts, mut after) = Post::fetch(&initial, false).await.unwrap();
|
||||
let mut hashmap = HashMap::new();
|
||||
hashmap.extend(posts.into_iter().map(|post| (post.id.clone(), post)));
|
||||
loop {
|
||||
print!("\r");
|
||||
let path = format!("/r/{sub}/{sort}.json?sort={sort}&t=&after={after}&raw_json=1");
|
||||
let (new_posts, new_after) = Post::fetch(&path, false).await.unwrap();
|
||||
posts.extend(new_posts);
|
||||
let old_len = hashmap.len();
|
||||
// convert to hashmap and extend hashmap
|
||||
let new_posts = new_posts.into_iter().map(|post| (post.id.clone(), post)).collect::<HashMap<String, Post>>();
|
||||
let len = new_posts.len();
|
||||
hashmap.extend(new_posts);
|
||||
if hashmap.len() - old_len < 3 {
|
||||
break;
|
||||
}
|
||||
|
||||
let x = hashmap.len() - old_len;
|
||||
after = new_after;
|
||||
// Print number of posts fetched
|
||||
print!("Fetched {} posts", posts.len());
|
||||
print!("Fetched {len} posts (+{x})",);
|
||||
std::io::stdout().flush().unwrap();
|
||||
}
|
||||
println!("\n\n");
|
||||
// additionally search if final count not reached
|
||||
|
||||
posts.truncate(final_count);
|
||||
for word in get_top(Language::English, 10_000, NgramSize::One) {
|
||||
let mut retrieved_posts_from_search = 0;
|
||||
let initial = format!("/r/{sub}/search.json?q={word}&restrict_sr=on&include_over_18=on&raw_json=1&sort={sort}");
|
||||
println!("Grabbing posts with word {word}.");
|
||||
let (posts, mut after) = Post::fetch(&initial, false).await.unwrap();
|
||||
hashmap.extend(posts.into_iter().map(|post| (post.id.clone(), post)));
|
||||
'search: loop {
|
||||
let path = format!("/r/{sub}/search.json?q={word}&restrict_sr=on&include_over_18=on&raw_json=1&sort={sort}&after={after}");
|
||||
let (new_posts, new_after) = Post::fetch(&path, false).await.unwrap();
|
||||
if new_posts.is_empty() || new_after.is_empty() {
|
||||
println!("No more posts for word {word}");
|
||||
break 'search;
|
||||
}
|
||||
retrieved_posts_from_search += new_posts.len();
|
||||
let old_len = hashmap.len();
|
||||
let new_posts = new_posts.into_iter().map(|post| (post.id.clone(), post)).collect::<HashMap<String, Post>>();
|
||||
let len = new_posts.len();
|
||||
hashmap.extend(new_posts);
|
||||
let delta = hashmap.len() - old_len;
|
||||
after = new_after;
|
||||
// Print number of posts fetched
|
||||
println!("Fetched {len} posts (+{delta})",);
|
||||
|
||||
if retrieved_posts_from_search > 1000 {
|
||||
println!("Reached 1000 posts from search");
|
||||
break 'search;
|
||||
}
|
||||
}
|
||||
// Need to save incrementally. atomic save + move
|
||||
let tmp_file = output.clone().unwrap_or_else(|| format!("{sub}.json.tmp"));
|
||||
let perm_file = output.clone().unwrap_or_else(|| format!("{sub}.json"));
|
||||
write_posts(&hashmap.values().collect(), tmp_file.clone());
|
||||
// move file
|
||||
std::fs::rename(tmp_file, perm_file).unwrap();
|
||||
}
|
||||
|
||||
println!("\n\n");
|
||||
|
||||
println!("Size of hashmap: {}", hashmap.len());
|
||||
|
||||
let posts: Vec<&Post> = hashmap.values().collect();
|
||||
match format {
|
||||
Format::Json => {
|
||||
let filename: String = output.unwrap_or_else(|| format!("{sub}.json"));
|
||||
let json = serde_json::to_string(&posts).unwrap();
|
||||
std::fs::write(filename, json).unwrap();
|
||||
write_posts(&posts, filename);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn write_posts(posts: &Vec<&Post>, filename: String) {
|
||||
let json = serde_json::to_string(&posts).unwrap();
|
||||
std::fs::write(filename, json).unwrap();
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ use std::{
|
|||
str::{from_utf8, Split},
|
||||
string::ToString,
|
||||
};
|
||||
use time::Duration;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::dbg_msg;
|
||||
|
||||
|
@ -170,10 +170,8 @@ impl ResponseExt for Response<Body> {
|
|||
}
|
||||
|
||||
fn remove_cookie(&mut self, name: String) {
|
||||
let mut cookie = Cookie::from(name);
|
||||
cookie.set_path("/");
|
||||
cookie.set_max_age(Duration::seconds(1));
|
||||
if let Ok(val) = header::HeaderValue::from_str(&cookie.to_string()) {
|
||||
let removal_cookie = Cookie::build(name).path("/").http_only(true).expires(OffsetDateTime::now_utc());
|
||||
if let Ok(val) = header::HeaderValue::from_str(&removal_cookie.to_string()) {
|
||||
self.headers_mut().append("Set-Cookie", val);
|
||||
}
|
||||
}
|
||||
|
@ -240,8 +238,14 @@ impl Server {
|
|||
path.pop();
|
||||
}
|
||||
|
||||
// Replace HEAD with GET for routing
|
||||
let (method, is_head) = match req.method() {
|
||||
&Method::HEAD => (&Method::GET, true),
|
||||
method => (method, false),
|
||||
};
|
||||
|
||||
// Match the visited path with an added route
|
||||
match router.recognize(&format!("/{}{}", req.method().as_str(), path)) {
|
||||
match router.recognize(&format!("/{}{}", method.as_str(), path)) {
|
||||
// If a route was configured for this path
|
||||
Ok(found) => {
|
||||
let mut parammed = req;
|
||||
|
@ -253,17 +257,21 @@ impl Server {
|
|||
match func.await {
|
||||
Ok(mut res) => {
|
||||
res.headers_mut().extend(def_headers);
|
||||
let _ = compress_response(&req_headers, &mut res).await;
|
||||
if is_head {
|
||||
*res.body_mut() = Body::empty();
|
||||
} else {
|
||||
let _ = compress_response(&req_headers, &mut res).await;
|
||||
}
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
Err(msg) => new_boilerplate(def_headers, req_headers, 500, Body::from(msg)).await,
|
||||
Err(msg) => new_boilerplate(def_headers, req_headers, 500, if is_head { Body::empty() } else { Body::from(msg) }).await,
|
||||
}
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
// If there was a routing error
|
||||
Err(e) => new_boilerplate(def_headers, req_headers, 404, e.into()).boxed(),
|
||||
Err(e) => new_boilerplate(def_headers, req_headers, 404, if is_head { Body::empty() } else { e.into() }).boxed(),
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
@ -274,8 +282,19 @@ impl Server {
|
|||
|
||||
// Bind server to address specified above. Gracefully shut down if CTRL+C is pressed
|
||||
let server = HyperServer::bind(address).serve(make_svc).with_graceful_shutdown(async {
|
||||
#[cfg(windows)]
|
||||
// Wait for the CTRL+C signal
|
||||
tokio::signal::ctrl_c().await.expect("Failed to install CTRL+C signal handler");
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
// Wait for CTRL+C or SIGTERM signals
|
||||
let mut signal_terminate = tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate()).expect("Failed to install SIGTERM signal handler");
|
||||
tokio::select! {
|
||||
_ = tokio::signal::ctrl_c() => (),
|
||||
_ = signal_terminate.recv() => ()
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
server.boxed()
|
||||
|
|
116
src/settings.rs
116
src/settings.rs
|
@ -4,6 +4,7 @@ use std::collections::HashMap;
|
|||
|
||||
// CRATES
|
||||
use crate::server::ResponseExt;
|
||||
use crate::subreddit::join_until_size_limit;
|
||||
use crate::utils::{redirect, template, Preferences};
|
||||
use cookie::Cookie;
|
||||
use futures_lite::StreamExt;
|
||||
|
@ -119,7 +120,7 @@ fn set_cookies_method(req: Request<Body>, remove_cookies: bool) -> Response<Body
|
|||
|
||||
let mut response = redirect(&path);
|
||||
|
||||
for name in [PREFS.to_vec(), vec!["subscriptions", "filters"]].concat() {
|
||||
for name in PREFS {
|
||||
match form.get(name) {
|
||||
Some(value) => response.insert_cookie(
|
||||
Cookie::build((name.to_owned(), value.clone()))
|
||||
|
@ -136,6 +137,119 @@ fn set_cookies_method(req: Request<Body>, remove_cookies: bool) -> Response<Body
|
|||
};
|
||||
}
|
||||
|
||||
// Get subscriptions/filters to restore from query string
|
||||
let subscriptions = form.get("subscriptions");
|
||||
let filters = form.get("filters");
|
||||
|
||||
// We can't search through the cookies directly like in subreddit.rs, so instead we have to make a string out of the request's headers to search through
|
||||
let cookies_string = parts
|
||||
.headers
|
||||
.get("cookie")
|
||||
.map(|hv| hv.to_str().unwrap_or("").to_string()) // Return String
|
||||
.unwrap_or_else(String::new); // Return an empty string if None
|
||||
|
||||
// If there are subscriptions to restore set them and delete any old subscriptions cookies, otherwise delete them all
|
||||
if subscriptions.is_some() {
|
||||
let sub_list: Vec<String> = subscriptions.expect("Subscriptions").split('+').map(str::to_string).collect();
|
||||
|
||||
// Start at 0 to keep track of what number we need to start deleting old subscription cookies from
|
||||
let mut subscriptions_number_to_delete_from = 0;
|
||||
|
||||
// Starting at 0 so we handle the subscription cookie without a number first
|
||||
for (subscriptions_number, list) in join_until_size_limit(&sub_list).into_iter().enumerate() {
|
||||
let subscriptions_cookie = if subscriptions_number == 0 {
|
||||
"subscriptions".to_string()
|
||||
} else {
|
||||
format!("subscriptions{}", subscriptions_number)
|
||||
};
|
||||
|
||||
response.insert_cookie(
|
||||
Cookie::build((subscriptions_cookie, list))
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||
.into(),
|
||||
);
|
||||
|
||||
subscriptions_number_to_delete_from += 1;
|
||||
}
|
||||
|
||||
// While subscriptionsNUMBER= is in the string of cookies add a response removing that cookie
|
||||
while cookies_string.contains(&format!("subscriptions{subscriptions_number_to_delete_from}=")) {
|
||||
// Remove that subscriptions cookie
|
||||
response.remove_cookie(format!("subscriptions{subscriptions_number_to_delete_from}"));
|
||||
|
||||
// Increment subscriptions cookie number
|
||||
subscriptions_number_to_delete_from += 1;
|
||||
}
|
||||
} else {
|
||||
// Remove unnumbered subscriptions cookie
|
||||
response.remove_cookie("subscriptions".to_string());
|
||||
|
||||
// Starts at one to deal with the first numbered subscription cookie and onwards
|
||||
let mut subscriptions_number_to_delete_from = 1;
|
||||
|
||||
// While subscriptionsNUMBER= is in the string of cookies add a response removing that cookie
|
||||
while cookies_string.contains(&format!("subscriptions{subscriptions_number_to_delete_from}=")) {
|
||||
// Remove that subscriptions cookie
|
||||
response.remove_cookie(format!("subscriptions{subscriptions_number_to_delete_from}"));
|
||||
|
||||
// Increment subscriptions cookie number
|
||||
subscriptions_number_to_delete_from += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// If there are filters to restore set them and delete any old filters cookies, otherwise delete them all
|
||||
if filters.is_some() {
|
||||
let filters_list: Vec<String> = filters.expect("Filters").split('+').map(str::to_string).collect();
|
||||
|
||||
// Start at 0 to keep track of what number we need to start deleting old subscription cookies from
|
||||
let mut filters_number_to_delete_from = 0;
|
||||
|
||||
// Starting at 0 so we handle the subscription cookie without a number first
|
||||
for (filters_number, list) in join_until_size_limit(&filters_list).into_iter().enumerate() {
|
||||
let filters_cookie = if filters_number == 0 {
|
||||
"filters".to_string()
|
||||
} else {
|
||||
format!("filters{}", filters_number)
|
||||
};
|
||||
|
||||
response.insert_cookie(
|
||||
Cookie::build((filters_cookie, list))
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||
.into(),
|
||||
);
|
||||
|
||||
filters_number_to_delete_from += 1;
|
||||
}
|
||||
|
||||
// While filtersNUMBER= is in the string of cookies add a response removing that cookie
|
||||
while cookies_string.contains(&format!("filters{filters_number_to_delete_from}=")) {
|
||||
// Remove that filters cookie
|
||||
response.remove_cookie(format!("filters{filters_number_to_delete_from}"));
|
||||
|
||||
// Increment filters cookie number
|
||||
filters_number_to_delete_from += 1;
|
||||
}
|
||||
} else {
|
||||
// Remove unnumbered filters cookie
|
||||
response.remove_cookie("filters".to_string());
|
||||
|
||||
// Starts at one to deal with the first numbered subscription cookie and onwards
|
||||
let mut filters_number_to_delete_from = 1;
|
||||
|
||||
// While filtersNUMBER= is in the string of cookies add a response removing that cookie
|
||||
while cookies_string.contains(&format!("filters{filters_number_to_delete_from}=")) {
|
||||
// Remove that sfilters cookie
|
||||
response.remove_cookie(format!("filters{filters_number_to_delete_from}"));
|
||||
|
||||
// Increment filters cookie number
|
||||
filters_number_to_delete_from += 1;
|
||||
}
|
||||
}
|
||||
|
||||
response
|
||||
}
|
||||
|
||||
|
|
145
src/subreddit.rs
145
src/subreddit.rs
|
@ -8,8 +8,10 @@ use crate::utils::{
|
|||
use crate::{client::json, server::RequestExt, server::ResponseExt};
|
||||
use cookie::Cookie;
|
||||
use hyper::{Body, Request, Response};
|
||||
use log::{debug, trace};
|
||||
use rinja::Template;
|
||||
|
||||
use chrono::DateTime;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
@ -62,6 +64,7 @@ pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||
// Build Reddit API path
|
||||
let root = req.uri().path() == "/";
|
||||
let query = req.uri().query().unwrap_or_default().to_string();
|
||||
trace!("query: {}", query);
|
||||
let subscribed = setting(&req, "subscriptions");
|
||||
let front_page = setting(&req, "front_page");
|
||||
let post_sort = req.cookie("post_sort").map_or_else(|| "hot".to_string(), |c| c.value().to_string());
|
||||
|
@ -123,6 +126,7 @@ pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||
}
|
||||
|
||||
let path = format!("/r/{}/{sort}.json?{}{params}", sub_name.replace('+', "%2B"), req.uri().query().unwrap_or_default());
|
||||
debug!("Path: {}", path);
|
||||
let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
|
||||
let redirect_url = url[1..].replace('?', "%3F").replace('&', "%26").replace('+', "%2B");
|
||||
let filters = get_filters(&req);
|
||||
|
@ -211,6 +215,41 @@ pub fn can_access_quarantine(req: &Request<Body>, sub: &str) -> bool {
|
|||
setting(req, &format!("allow_quaran_{}", sub.to_lowercase())).parse().unwrap_or_default()
|
||||
}
|
||||
|
||||
// Join items in chunks of 4000 bytes in length for cookies
|
||||
pub fn join_until_size_limit<T: std::fmt::Display>(vec: &[T]) -> Vec<std::string::String> {
|
||||
let mut result = Vec::new();
|
||||
let mut list = String::new();
|
||||
let mut current_size = 0;
|
||||
|
||||
for item in vec {
|
||||
// Size in bytes
|
||||
let item_size = item.to_string().len();
|
||||
// Use 4000 bytes to leave us some headroom because the name and options of the cookie count towards the 4096 byte cap
|
||||
if current_size + item_size > 4000 {
|
||||
// If last item add a seperator on the end of the list so it's interpreted properly in tanden with the next cookie
|
||||
list.push('+');
|
||||
|
||||
// Push current list to result vector
|
||||
result.push(list);
|
||||
|
||||
// Reset the list variable so we can continue with only new items
|
||||
list = String::new();
|
||||
}
|
||||
// Add separator if not the first item
|
||||
if !list.is_empty() {
|
||||
list.push('+');
|
||||
}
|
||||
// Add current item to list
|
||||
list.push_str(&item.to_string());
|
||||
current_size = list.len() + item_size;
|
||||
}
|
||||
// Make sure to push whatever the remaining subreddits are there into the result vector
|
||||
result.push(list);
|
||||
|
||||
// Return resulting vector
|
||||
result
|
||||
}
|
||||
|
||||
// Sub, filter, unfilter, or unsub by setting subscription cookie using response "Set-Cookie" header
|
||||
pub async fn subscriptions_filters(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let sub = req.param("sub").unwrap_or_default();
|
||||
|
@ -303,28 +342,101 @@ pub async fn subscriptions_filters(req: Request<Body>) -> Result<Response<Body>,
|
|||
|
||||
let mut response = redirect(&path);
|
||||
|
||||
// Delete cookie if empty, else set
|
||||
// If sub_list is empty remove all subscriptions cookies, otherwise update them and remove old ones
|
||||
if sub_list.is_empty() {
|
||||
// Remove subscriptions cookie
|
||||
response.remove_cookie("subscriptions".to_string());
|
||||
|
||||
// Start with first numbered subscriptions cookie
|
||||
let mut subscriptions_number = 1;
|
||||
|
||||
// While whatever subscriptionsNUMBER cookie we're looking at has a value
|
||||
while req.cookie(&format!("subscriptions{}", subscriptions_number)).is_some() {
|
||||
// Remove that subscriptions cookie
|
||||
response.remove_cookie(format!("subscriptions{}", subscriptions_number));
|
||||
|
||||
// Increment subscriptions cookie number
|
||||
subscriptions_number += 1;
|
||||
}
|
||||
} else {
|
||||
response.insert_cookie(
|
||||
Cookie::build(("subscriptions", sub_list.join("+")))
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||
.into(),
|
||||
);
|
||||
// Start at 0 to keep track of what number we need to start deleting old subscription cookies from
|
||||
let mut subscriptions_number_to_delete_from = 0;
|
||||
|
||||
// Starting at 0 so we handle the subscription cookie without a number first
|
||||
for (subscriptions_number, list) in join_until_size_limit(&sub_list).into_iter().enumerate() {
|
||||
let subscriptions_cookie = if subscriptions_number == 0 {
|
||||
"subscriptions".to_string()
|
||||
} else {
|
||||
format!("subscriptions{}", subscriptions_number)
|
||||
};
|
||||
|
||||
response.insert_cookie(
|
||||
Cookie::build((subscriptions_cookie, list))
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||
.into(),
|
||||
);
|
||||
|
||||
subscriptions_number_to_delete_from += 1;
|
||||
}
|
||||
|
||||
// While whatever subscriptionsNUMBER cookie we're looking at has a value
|
||||
while req.cookie(&format!("subscriptions{}", subscriptions_number_to_delete_from)).is_some() {
|
||||
// Remove that subscriptions cookie
|
||||
response.remove_cookie(format!("subscriptions{}", subscriptions_number_to_delete_from));
|
||||
|
||||
// Increment subscriptions cookie number
|
||||
subscriptions_number_to_delete_from += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// If filters is empty remove all filters cookies, otherwise update them and remove old ones
|
||||
if filters.is_empty() {
|
||||
// Remove filters cookie
|
||||
response.remove_cookie("filters".to_string());
|
||||
|
||||
// Start with first numbered filters cookie
|
||||
let mut filters_number = 1;
|
||||
|
||||
// While whatever filtersNUMBER cookie we're looking at has a value
|
||||
while req.cookie(&format!("filters{}", filters_number)).is_some() {
|
||||
// Remove that filters cookie
|
||||
response.remove_cookie(format!("filters{}", filters_number));
|
||||
|
||||
// Increment filters cookie number
|
||||
filters_number += 1;
|
||||
}
|
||||
} else {
|
||||
response.insert_cookie(
|
||||
Cookie::build(("filters", filters.join("+")))
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||
.into(),
|
||||
);
|
||||
// Start at 0 to keep track of what number we need to start deleting old filters cookies from
|
||||
let mut filters_number_to_delete_from = 0;
|
||||
|
||||
for (filters_number, list) in join_until_size_limit(&filters).into_iter().enumerate() {
|
||||
let filters_cookie = if filters_number == 0 {
|
||||
"filters".to_string()
|
||||
} else {
|
||||
format!("filters{}", filters_number)
|
||||
};
|
||||
|
||||
response.insert_cookie(
|
||||
Cookie::build((filters_cookie, list))
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||
.into(),
|
||||
);
|
||||
|
||||
filters_number_to_delete_from += 1;
|
||||
}
|
||||
|
||||
// While whatever filtersNUMBER cookie we're looking at has a value
|
||||
while req.cookie(&format!("filters{}", filters_number_to_delete_from)).is_some() {
|
||||
// Remove that filters cookie
|
||||
response.remove_cookie(format!("filters{}", filters_number_to_delete_from));
|
||||
|
||||
// Increment filters cookie number
|
||||
filters_number_to_delete_from += 1;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(response)
|
||||
|
@ -493,9 +605,10 @@ pub async fn rss(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||
.into_iter()
|
||||
.map(|post| Item {
|
||||
title: Some(post.title.to_string()),
|
||||
link: Some(utils::get_post_url(&post)),
|
||||
link: Some(format_url(&utils::get_post_url(&post))),
|
||||
author: Some(post.author.name),
|
||||
content: Some(rewrite_urls(&post.body)),
|
||||
pub_date: Some(DateTime::from_timestamp(post.created_ts as i64, 0).unwrap_or_default().to_rfc2822()),
|
||||
description: Some(format!(
|
||||
"<a href='{}{}'>Comments</a>",
|
||||
config::get_setting("REDLIB_FULL_URL").unwrap_or_default(),
|
||||
|
|
|
@ -5,6 +5,7 @@ use crate::client::json;
|
|||
use crate::server::RequestExt;
|
||||
use crate::utils::{error, filter_posts, format_url, get_filters, nsfw_landing, param, setting, template, Post, Preferences, User};
|
||||
use crate::{config, utils};
|
||||
use chrono::DateTime;
|
||||
use hyper::{Body, Request, Response};
|
||||
use rinja::Template;
|
||||
use time::{macros::format_description, OffsetDateTime};
|
||||
|
@ -163,8 +164,9 @@ pub async fn rss(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||
.into_iter()
|
||||
.map(|post| Item {
|
||||
title: Some(post.title.to_string()),
|
||||
link: Some(utils::get_post_url(&post)),
|
||||
link: Some(format_url(&utils::get_post_url(&post))),
|
||||
author: Some(post.author.name),
|
||||
pub_date: Some(DateTime::from_timestamp(post.created_ts as i64, 0).unwrap_or_default().to_rfc2822()),
|
||||
content: Some(rewrite_urls(&post.body)),
|
||||
..Default::default()
|
||||
})
|
||||
|
|
248
src/utils.rs
248
src/utils.rs
|
@ -7,13 +7,14 @@ use crate::config::{self, get_setting};
|
|||
//
|
||||
use crate::{client::json, server::RequestExt};
|
||||
use cookie::Cookie;
|
||||
use htmlescape::decode_html;
|
||||
use hyper::{Body, Request, Response};
|
||||
use log::error;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use rinja::Template;
|
||||
use rust_embed::RustEmbed;
|
||||
use serde::Serialize;
|
||||
use serde::{Serialize, Serializer};
|
||||
use serde_json::Value;
|
||||
use serde_json_path::{JsonPath, JsonPathExt};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
@ -233,6 +234,14 @@ impl Media {
|
|||
// If this post contains a gallery of images
|
||||
gallery = GalleryMedia::parse(&data["gallery_data"]["items"], &data["media_metadata"]);
|
||||
|
||||
("gallery", &data["url"], None)
|
||||
} else if data["crosspost_parent_list"][0]["is_gallery"].as_bool().unwrap_or_default() {
|
||||
// If this post contains a gallery of images
|
||||
gallery = GalleryMedia::parse(
|
||||
&data["crosspost_parent_list"][0]["gallery_data"]["items"],
|
||||
&data["crosspost_parent_list"][0]["media_metadata"],
|
||||
);
|
||||
|
||||
("gallery", &data["url"], None)
|
||||
} else if data["is_reddit_media_domain"].as_bool().unwrap_or_default() && data["domain"] == "i.redd.it" {
|
||||
// If this post contains a reddit media (image) URL.
|
||||
|
@ -376,7 +385,7 @@ impl Post {
|
|||
let awards = Awards::parse(&data["all_awardings"]);
|
||||
|
||||
// selftext_html is set for text posts when browsing.
|
||||
let mut body = rewrite_urls(&val(post, "selftext_html"));
|
||||
let mut body = rewrite_urls(&decode_html(&val(post, "selftext_html")).unwrap());
|
||||
if body.is_empty() {
|
||||
body = rewrite_urls(&val(post, "body_html"));
|
||||
}
|
||||
|
@ -507,7 +516,7 @@ impl std::ops::Deref for Awards {
|
|||
|
||||
impl std::fmt::Display for Awards {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.iter().fold(Ok(()), |result, award| result.and_then(|()| writeln!(f, "{award}")))
|
||||
self.iter().try_fold((), |_, award| writeln!(f, "{award}"))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -601,8 +610,9 @@ pub struct Params {
|
|||
pub before: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
#[derive(Default, Serialize)]
|
||||
pub struct Preferences {
|
||||
#[serde(skip)]
|
||||
pub available_themes: Vec<String>,
|
||||
pub theme: String,
|
||||
pub front_page: String,
|
||||
|
@ -620,12 +630,21 @@ pub struct Preferences {
|
|||
pub disable_visit_reddit_confirmation: String,
|
||||
pub comment_sort: String,
|
||||
pub post_sort: String,
|
||||
#[serde(serialize_with = "serialize_vec_with_plus")]
|
||||
pub subscriptions: Vec<String>,
|
||||
#[serde(serialize_with = "serialize_vec_with_plus")]
|
||||
pub filters: Vec<String>,
|
||||
pub hide_awards: String,
|
||||
pub hide_score: String,
|
||||
}
|
||||
|
||||
fn serialize_vec_with_plus<S>(vec: &[String], serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_str(&vec.join("+"))
|
||||
}
|
||||
|
||||
#[derive(RustEmbed)]
|
||||
#[folder = "static/themes/"]
|
||||
#[include = "*.css"]
|
||||
|
@ -665,6 +684,10 @@ impl Preferences {
|
|||
hide_score: setting(req, "hide_score"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_urlencoded(&self) -> Result<String, String> {
|
||||
serde_urlencoded::to_string(self).map_err(|e| e.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets a `HashSet` of filters from the cookie in the given `Request`.
|
||||
|
@ -819,18 +842,72 @@ pub fn param(path: &str, value: &str) -> Option<String> {
|
|||
// Retrieve the value of a setting by name
|
||||
pub fn setting(req: &Request<Body>, name: &str) -> String {
|
||||
// Parse a cookie value from request
|
||||
req
|
||||
.cookie(name)
|
||||
.unwrap_or_else(|| {
|
||||
// If there is no cookie for this setting, try receiving a default from the config
|
||||
if let Some(default) = get_setting(&format!("REDLIB_DEFAULT_{}", name.to_uppercase())) {
|
||||
Cookie::new(name, default)
|
||||
} else {
|
||||
Cookie::from(name)
|
||||
}
|
||||
})
|
||||
.value()
|
||||
.to_string()
|
||||
|
||||
// If this was called with "subscriptions" and the "subscriptions" cookie has a value
|
||||
if name == "subscriptions" && req.cookie("subscriptions").is_some() {
|
||||
// Create subscriptions string
|
||||
let mut subscriptions = String::new();
|
||||
|
||||
// Default subscriptions cookie
|
||||
if req.cookie("subscriptions").is_some() {
|
||||
subscriptions.push_str(req.cookie("subscriptions").unwrap().value());
|
||||
}
|
||||
|
||||
// Start with first numbered subscription cookie
|
||||
let mut subscriptions_number = 1;
|
||||
|
||||
// While whatever subscriptionsNUMBER cookie we're looking at has a value
|
||||
while req.cookie(&format!("subscriptions{}", subscriptions_number)).is_some() {
|
||||
// Push whatever subscriptionsNUMBER cookie we're looking at into the subscriptions string
|
||||
subscriptions.push_str(req.cookie(&format!("subscriptions{}", subscriptions_number)).unwrap().value());
|
||||
|
||||
// Increment subscription cookie number
|
||||
subscriptions_number += 1;
|
||||
}
|
||||
|
||||
// Return the subscriptions cookies as one large string
|
||||
subscriptions
|
||||
}
|
||||
// If this was called with "filters" and the "filters" cookie has a value
|
||||
else if name == "filters" && req.cookie("filters").is_some() {
|
||||
// Create filters string
|
||||
let mut filters = String::new();
|
||||
|
||||
// Default filters cookie
|
||||
if req.cookie("filters").is_some() {
|
||||
filters.push_str(req.cookie("filters").unwrap().value());
|
||||
}
|
||||
|
||||
// Start with first numbered filters cookie
|
||||
let mut filters_number = 1;
|
||||
|
||||
// While whatever filtersNUMBER cookie we're looking at has a value
|
||||
while req.cookie(&format!("filters{}", filters_number)).is_some() {
|
||||
// Push whatever filtersNUMBER cookie we're looking at into the filters string
|
||||
filters.push_str(req.cookie(&format!("filters{}", filters_number)).unwrap().value());
|
||||
|
||||
// Increment filters cookie number
|
||||
filters_number += 1;
|
||||
}
|
||||
|
||||
// Return the filters cookies as one large string
|
||||
filters
|
||||
}
|
||||
// The above two still come to this if there was no existing value
|
||||
else {
|
||||
req
|
||||
.cookie(name)
|
||||
.unwrap_or_else(|| {
|
||||
// If there is no cookie for this setting, try receiving a default from the config
|
||||
if let Some(default) = get_setting(&format!("REDLIB_DEFAULT_{}", name.to_uppercase())) {
|
||||
Cookie::new(name, default)
|
||||
} else {
|
||||
Cookie::from(name)
|
||||
}
|
||||
})
|
||||
.value()
|
||||
.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
// Retrieve the value of a setting by name or the default value
|
||||
|
@ -846,11 +923,12 @@ pub fn setting_or_default(req: &Request<Body>, name: &str, default: String) -> S
|
|||
// Detect and redirect in the event of a random subreddit
|
||||
pub async fn catch_random(sub: &str, additional: &str) -> Result<Response<Body>, String> {
|
||||
if sub == "random" || sub == "randnsfw" {
|
||||
let new_sub = json(format!("/r/{sub}/about.json?raw_json=1"), false).await?["data"]["display_name"]
|
||||
.as_str()
|
||||
.unwrap_or_default()
|
||||
.to_string();
|
||||
Ok(redirect(&format!("/r/{new_sub}{additional}")))
|
||||
Ok(redirect(&format!(
|
||||
"/r/{}{additional}",
|
||||
json(format!("/r/{sub}/about.json?raw_json=1"), false).await?["data"]["display_name"]
|
||||
.as_str()
|
||||
.unwrap_or_default()
|
||||
)))
|
||||
} else {
|
||||
Err("No redirect needed".to_string())
|
||||
}
|
||||
|
@ -928,9 +1006,20 @@ pub fn format_url(url: &str) -> String {
|
|||
}
|
||||
}
|
||||
|
||||
static REGEX_BULLET: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?m)^- (.*)$").unwrap());
|
||||
static REGEX_BULLET_CONSECUTIVE_LINES: Lazy<Regex> = Lazy::new(|| Regex::new(r"</ul>\n<ul>").unwrap());
|
||||
|
||||
pub fn render_bullet_lists(input_text: &str) -> String {
|
||||
// ref: https://stackoverflow.com/a/4902622
|
||||
// First enclose each bullet with <ul> <li> tags
|
||||
let text1 = REGEX_BULLET.replace_all(input_text, "<ul><li>$1</li></ul>").to_string();
|
||||
// Then remove any consecutive </ul> <ul> tags
|
||||
REGEX_BULLET_CONSECUTIVE_LINES.replace_all(&text1, "").to_string()
|
||||
}
|
||||
|
||||
// These are links we want to replace in-body
|
||||
static REDDIT_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r#"href="(https|http|)://(www\.|old\.|np\.|amp\.|new\.|)(reddit\.com|redd\.it)/"#).unwrap());
|
||||
static REDDIT_PREVIEW_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://(external-preview|preview|i)\.redd\.it(.*)[^?]").unwrap());
|
||||
static REDDIT_PREVIEW_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://(external-preview|preview|i)\.redd\.it(.*)").unwrap());
|
||||
static REDDIT_EMOJI_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://(www|).redditstatic\.com/(.*)").unwrap());
|
||||
static REDLIB_PREVIEW_LINK_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r#"/(img|preview/)(pre|external-pre)?/(.*?)>"#).unwrap());
|
||||
static REDLIB_PREVIEW_TEXT_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r">(.*?)</a>").unwrap());
|
||||
|
@ -939,8 +1028,7 @@ static REDLIB_PREVIEW_TEXT_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r">(.*?)
|
|||
pub fn rewrite_urls(input_text: &str) -> String {
|
||||
let mut text1 =
|
||||
// Rewrite Reddit links to Redlib
|
||||
REDDIT_REGEX.replace_all(input_text, r#"href="/"#)
|
||||
.to_string();
|
||||
REDDIT_REGEX.replace_all(input_text, r#"href="/"#).to_string();
|
||||
|
||||
loop {
|
||||
if REDDIT_EMOJI_REGEX.find(&text1).is_none() {
|
||||
|
@ -962,49 +1050,44 @@ pub fn rewrite_urls(input_text: &str) -> String {
|
|||
} else {
|
||||
let formatted_url = format_url(REDDIT_PREVIEW_REGEX.find(&text1).map(|x| x.as_str()).unwrap_or_default());
|
||||
|
||||
let image_url = REDLIB_PREVIEW_LINK_REGEX.find(&formatted_url).map_or("", |m| m.as_str()).to_string();
|
||||
let mut image_caption = REDLIB_PREVIEW_TEXT_REGEX.find(&formatted_url).map_or("", |m| m.as_str()).to_string();
|
||||
let image_url = REDLIB_PREVIEW_LINK_REGEX.find(&formatted_url).map_or("", |m| m.as_str());
|
||||
let mut image_caption = REDLIB_PREVIEW_TEXT_REGEX.find(&formatted_url).map_or("", |m| m.as_str());
|
||||
|
||||
/* As long as image_caption isn't empty remove first and last four characters of image_text to leave us with just the text in the caption without any HTML.
|
||||
This makes it possible to enclose it in a <figcaption> later on without having stray HTML breaking it */
|
||||
if !image_caption.is_empty() {
|
||||
image_caption = image_caption[1..image_caption.len() - 4].to_string();
|
||||
image_caption = &image_caption[1..image_caption.len() - 4];
|
||||
}
|
||||
|
||||
// image_url contains > at the end of it, and right above this we remove image_text's front >, leaving us with just a single > between them
|
||||
let image_to_replace = format!("<a href=\"{image_url}{image_caption}</a>");
|
||||
|
||||
// _image_replacement needs to be in scope for the replacement at the bottom of the loop
|
||||
let mut _image_replacement = String::new();
|
||||
let image_to_replace = format!("<p><a href=\"{image_url}{image_caption}</a></p>");
|
||||
|
||||
/* We don't want to show a caption that's just the image's link, so we check if we find a Reddit preview link within the image's caption.
|
||||
If we don't find one we must have actual text, so we include a <figcaption> block that contains it.
|
||||
Otherwise we don't include the <figcaption> block as we don't need it. */
|
||||
if REDDIT_PREVIEW_REGEX.find(&image_caption).is_none() {
|
||||
let _image_replacement = if REDDIT_PREVIEW_REGEX.find(image_caption).is_none() {
|
||||
// Without this " would show as \" instead. "\"" is how the quotes are formatted within image_text beforehand
|
||||
image_caption = image_caption.replace("\\"", "\"");
|
||||
|
||||
_image_replacement = format!("<figure><a href=\"{image_url}<img loading=\"lazy\" src=\"{image_url}</a><figcaption>{image_caption}</figcaption></figure>");
|
||||
format!(
|
||||
"<figure><a href=\"{image_url}<img loading=\"lazy\" src=\"{image_url}</a><figcaption>{}</figcaption></figure>",
|
||||
image_caption.replace("\\"", "\"")
|
||||
)
|
||||
} else {
|
||||
_image_replacement = format!("<figure><a href=\"{image_url}<img loading=\"lazy\" src=\"{image_url}</a></figure>");
|
||||
}
|
||||
format!("<figure><a href=\"{image_url}<img loading=\"lazy\" src=\"{image_url}</a></figure>")
|
||||
};
|
||||
|
||||
/* In order to know if we're dealing with a normal or external preview we need to take a look at the first capture group of REDDIT_PREVIEW_REGEX
|
||||
if it's preview we're dealing with something that needs /preview/pre, external-preview is /preview/external-pre, and i is /img */
|
||||
let reddit_preview_regex_capture = REDDIT_PREVIEW_REGEX.captures(&text1).unwrap().get(1).map_or("", |m| m.as_str()).to_string();
|
||||
let mut _preview_type = String::new();
|
||||
if reddit_preview_regex_capture == "preview" {
|
||||
_preview_type = "/preview/pre".to_string();
|
||||
} else if reddit_preview_regex_capture == "external-preview" {
|
||||
_preview_type = "/preview/external-pre".to_string();
|
||||
} else {
|
||||
_preview_type = "/img".to_string();
|
||||
}
|
||||
let reddit_preview_regex_capture = REDDIT_PREVIEW_REGEX.captures(&text1).unwrap().get(1).map_or("", |m| m.as_str());
|
||||
|
||||
let _preview_type = match reddit_preview_regex_capture {
|
||||
"preview" => "/preview/pre",
|
||||
"external-preview" => "/preview/external-pre",
|
||||
_ => "/img",
|
||||
};
|
||||
|
||||
text1 = REDDIT_PREVIEW_REGEX
|
||||
.replace(&text1, format!("{_preview_type}$2"))
|
||||
.replace(&image_to_replace, &_image_replacement)
|
||||
.to_string()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1078,10 +1161,14 @@ pub fn rewrite_emotes(media_metadata: &Value, comment: String) -> String {
|
|||
);
|
||||
|
||||
// Inside the comment replace the ID we found with the string that will embed the image
|
||||
comment = comment.replace(&id, &to_replace_with).to_string();
|
||||
comment = comment.replace(&id, &to_replace_with);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// render bullet (unordered) lists
|
||||
comment = render_bullet_lists(&comment);
|
||||
|
||||
// Call rewrite_urls() to transform any other Reddit links
|
||||
rewrite_urls(&comment)
|
||||
}
|
||||
|
@ -1265,7 +1352,7 @@ pub fn url_path_basename(path: &str) -> String {
|
|||
let mut url = url_result.unwrap();
|
||||
url.path_segments_mut().unwrap().pop_if_empty();
|
||||
|
||||
url.path_segments().unwrap().last().unwrap().to_string()
|
||||
url.path_segments().unwrap().next_back().unwrap().to_string()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1285,7 +1372,7 @@ pub fn get_post_url(post: &Post) -> String {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{format_num, format_url, rewrite_urls};
|
||||
use super::{format_num, format_url, rewrite_urls, Preferences};
|
||||
|
||||
#[test]
|
||||
fn format_num_works() {
|
||||
|
@ -1352,6 +1439,35 @@ mod tests {
|
|||
assert_eq!(format_url("nsfw"), "");
|
||||
assert_eq!(format_url("spoiler"), "");
|
||||
}
|
||||
#[test]
|
||||
fn serialize_prefs() {
|
||||
let prefs = Preferences {
|
||||
available_themes: vec![],
|
||||
theme: "laserwave".to_owned(),
|
||||
front_page: "default".to_owned(),
|
||||
layout: "compact".to_owned(),
|
||||
wide: "on".to_owned(),
|
||||
blur_spoiler: "on".to_owned(),
|
||||
show_nsfw: "off".to_owned(),
|
||||
blur_nsfw: "on".to_owned(),
|
||||
hide_hls_notification: "off".to_owned(),
|
||||
video_quality: "best".to_owned(),
|
||||
hide_sidebar_and_summary: "off".to_owned(),
|
||||
use_hls: "on".to_owned(),
|
||||
autoplay_videos: "on".to_owned(),
|
||||
fixed_navbar: "on".to_owned(),
|
||||
disable_visit_reddit_confirmation: "on".to_owned(),
|
||||
comment_sort: "confidence".to_owned(),
|
||||
post_sort: "top".to_owned(),
|
||||
subscriptions: vec!["memes".to_owned(), "mildlyinteresting".to_owned()],
|
||||
filters: vec![],
|
||||
hide_awards: "off".to_owned(),
|
||||
hide_score: "off".to_owned(),
|
||||
};
|
||||
let urlencoded = serde_urlencoded::to_string(prefs).expect("Failed to serialize Prefs");
|
||||
|
||||
assert_eq!(urlencoded, "theme=laserwave&front_page=default&layout=compact&wide=on&blur_spoiler=on&show_nsfw=off&blur_nsfw=on&hide_hls_notification=off&video_quality=best&hide_sidebar_and_summary=off&use_hls=on&autoplay_videos=on&fixed_navbar=on&disable_visit_reddit_confirmation=on&comment_sort=confidence&post_sort=top&subscriptions=memes%2Bmildlyinteresting&filters=&hide_awards=off&hide_score=off")
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -1370,7 +1486,10 @@ async fn test_fetching_subreddit_quarantined() {
|
|||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_fetching_nsfw_subreddit() {
|
||||
let subreddit = Post::fetch("/r/randnsfw", false).await;
|
||||
// Gonwild is a place for closed, Euclidean Geometric shapes to exchange their nth terms for karma; showing off their edges in a comfortable environment without pressure.
|
||||
// Find a good sub that is tagged NSFW but that actually isn't in case my future employers are watching (they probably are)
|
||||
// switched from randnsfw as it is no longer functional.
|
||||
let subreddit = Post::fetch("/r/gonwild", false).await;
|
||||
assert!(subreddit.is_ok());
|
||||
assert!(!subreddit.unwrap().0.is_empty());
|
||||
}
|
||||
|
@ -1388,7 +1507,7 @@ async fn test_fetching_ws() {
|
|||
fn test_rewriting_image_links() {
|
||||
let input =
|
||||
r#"<p><a href="https://preview.redd.it/6awags382xo31.png?width=2560&format=png&auto=webp&s=9c563aed4f07a91bdd249b5a3cea43a79710dcfc">caption 1</a></p>"#;
|
||||
let output = r#"<p><figure><a href="/preview/pre/6awags382xo31.png?width=2560&format=png&auto=webp&s=9c563aed4f07a91bdd249b5a3cea43a79710dcfc"><img loading="lazy" src="/preview/pre/6awags382xo31.png?width=2560&format=png&auto=webp&s=9c563aed4f07a91bdd249b5a3cea43a79710dcfc"></a><figcaption>caption 1</figcaption></figure></p"#;
|
||||
let output = r#"<figure><a href="/preview/pre/6awags382xo31.png?width=2560&format=png&auto=webp&s=9c563aed4f07a91bdd249b5a3cea43a79710dcfc"><img loading="lazy" src="/preview/pre/6awags382xo31.png?width=2560&format=png&auto=webp&s=9c563aed4f07a91bdd249b5a3cea43a79710dcfc"></a><figcaption>caption 1</figcaption></figure>"#;
|
||||
assert_eq!(rewrite_urls(input), output);
|
||||
}
|
||||
|
||||
|
@ -1415,3 +1534,28 @@ fn test_rewriting_emotes() {
|
|||
let output = r#"<div class="comment_body "><div class="md"><p><img loading="lazy" src="/emote/t5_31hpy/PW6WsOaLcd.png" width="60" height="60" style="vertical-align:text-bottom"></p></div></div>"#;
|
||||
assert_eq!(rewrite_emotes(&json_input, comment_input.to_string()), output);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rewriting_bullet_list() {
|
||||
let input = r#"<div class="md"><p>Hi, I've bought this very same monitor and found no calibration whatsoever. I have an ICC profile that has been set up since I've installed its driver from the LG website and it works ok. I also used <a href="http://www.lagom.nl/lcd-test/">http://www.lagom.nl/lcd-test/</a> to calibrate it. After some good tinkering I've found the following settings + the color profile from the driver gets me past all the tests perfectly:
|
||||
- Brightness 50 (still have to settle on this one, it's personal preference, it controls the backlight, not the colors)
|
||||
- Contrast 70 (which for me was the default one)
|
||||
- Picture mode Custom
|
||||
- Super resolution + Off (it looks horrible anyway)
|
||||
- Sharpness 50 (default one I think)
|
||||
- Black level High (low messes up gray colors)
|
||||
- DFC Off
|
||||
- Response Time Middle (personal preference, <a href="https://www.blurbusters.com/">https://www.blurbusters.com/</a> show horrible overdrive with it on high)
|
||||
- Freesync doesn't matter
|
||||
- Black stabilizer 50
|
||||
- Gamma setting on 0
|
||||
- Color Temp Medium
|
||||
How`s your monitor by the way? Any IPS bleed whatsoever? I either got lucky or the panel is pretty good, 0 bleed for me, just the usual IPS glow. How about the pixels? I see the pixels even at one meter away, especially on Microsoft Edge's icon for example, the blue background is just blocky, don't know why.</p>
|
||||
</div>"#;
|
||||
let output = r#"<div class="md"><p>Hi, I've bought this very same monitor and found no calibration whatsoever. I have an ICC profile that has been set up since I've installed its driver from the LG website and it works ok. I also used <a href="http://www.lagom.nl/lcd-test/">http://www.lagom.nl/lcd-test/</a> to calibrate it. After some good tinkering I've found the following settings + the color profile from the driver gets me past all the tests perfectly:
|
||||
<ul><li>Brightness 50 (still have to settle on this one, it's personal preference, it controls the backlight, not the colors)</li><li>Contrast 70 (which for me was the default one)</li><li>Picture mode Custom</li><li>Super resolution + Off (it looks horrible anyway)</li><li>Sharpness 50 (default one I think)</li><li>Black level High (low messes up gray colors)</li><li>DFC Off </li><li>Response Time Middle (personal preference, <a href="https://www.blurbusters.com/">https://www.blurbusters.com/</a> show horrible overdrive with it on high)</li><li>Freesync doesn't matter</li><li>Black stabilizer 50</li><li>Gamma setting on 0 </li><li>Color Temp Medium</li></ul>
|
||||
How`s your monitor by the way? Any IPS bleed whatsoever? I either got lucky or the panel is pretty good, 0 bleed for me, just the usual IPS glow. How about the pixels? I see the pixels even at one meter away, especially on Microsoft Edge's icon for example, the blue background is just blocky, don't know why.</p>
|
||||
</div>"#;
|
||||
|
||||
assert_eq!(render_bullet_lists(input), output);
|
||||
}
|
||||
|
|
|
@ -37,4 +37,22 @@ async function checkInstanceUpdateStatus() {
|
|||
}
|
||||
}
|
||||
|
||||
async function checkOtherInstances() {
|
||||
try {
|
||||
const response = await fetch('/instances.json');
|
||||
const data = await response.json();
|
||||
const randomInstance = data.instances[Math.floor(Math.random() * data.instances.length)];
|
||||
const instanceUrl = randomInstance.url;
|
||||
// Set the href of the <a> tag to the instance URL with path included
|
||||
document.getElementById('random-instance').href = instanceUrl + window.location.pathname;
|
||||
document.getElementById('random-instance').innerText = "Visit Random Instance";
|
||||
} catch (error) {
|
||||
console.error('Error fetching instances:', error);
|
||||
document.getElementById('update-status').innerText = '⚠️ Error checking update status.';
|
||||
}
|
||||
}
|
||||
|
||||
// Set the target URL when the page loads
|
||||
window.addEventListener('load', checkOtherInstances);
|
||||
|
||||
checkInstanceUpdateStatus();
|
||||
|
|
|
@ -7,6 +7,8 @@
|
|||
<h3><a href="https://www.redditstatus.com/">Reddit Status</a></h3>
|
||||
<br />
|
||||
<h3 id="update-status"></h3>
|
||||
<br />
|
||||
<h3 id="update-status"><a id="random-instance"></a></h3>
|
||||
<br>
|
||||
<div id="git_commit" data-value="{{ crate::instance_info::INSTANCE_INFO.git_commit }}"></div>
|
||||
<script src="/check_update.js"></script>
|
||||
|
|
|
@ -161,8 +161,16 @@
|
|||
{% endif %}
|
||||
|
||||
<div id="settings_note">
|
||||
<p><b>Note:</b> settings and subscriptions are saved in browser cookies. Clearing your cookies will reset them.</p><br>
|
||||
<p>You can restore your current settings and subscriptions after clearing your cookies using <a href="/settings/restore/?theme={{ prefs.theme }}&front_page={{ prefs.front_page }}&layout={{ prefs.layout }}&wide={{ prefs.wide }}&post_sort={{ prefs.post_sort }}&comment_sort={{ prefs.comment_sort }}&show_nsfw={{ prefs.show_nsfw }}&use_hls={{ prefs.use_hls }}&hide_hls_notification={{ prefs.hide_hls_notification }}&hide_awards={{ prefs.hide_awards }}&fixed_navbar={{ prefs.fixed_navbar }}&subscriptions={{ prefs.subscriptions.join("%2B") }}&filters={{ prefs.filters.join("%2B") }}">this link</a>.</p>
|
||||
<p><b>Note:</b> settings and subscriptions are saved in browser cookies. Clearing your cookies will reset them.</p>
|
||||
<br>
|
||||
{% match prefs.to_urlencoded() %}
|
||||
{% when Ok with (encoded_prefs) %}
|
||||
<p>You can restore your current settings and subscriptions after clearing your cookies using <a
|
||||
href="/settings/restore/?{{ encoded_prefs }}">this link</a>.</p>
|
||||
{% when Err with (err) %}
|
||||
<p>There was an error creating your restore link: {{ err }}</p>
|
||||
<p>Please report this issue</p>
|
||||
{% endmatch %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue