diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 1c0405f..3ec1ead 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,12 +1,12 @@ { "name": "Rust", - "image": "mcr.microsoft.com/devcontainers/rust:0-1-bullseye", + "image": "mcr.microsoft.com/devcontainers/rust:1.0.9-bookworm", "features": { "ghcr.io/devcontainers/features/docker-in-docker:2": {} }, "portsAttributes": { "8080": { - "label": "libreddit", + "label": "redlib", "onAutoForward": "notify" } }, diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..5e60b08 --- /dev/null +++ b/.env.example @@ -0,0 +1,52 @@ +# Redlib configuration +# See the Configuration section of the README for a more detailed explanation of these settings. + +# Instance-specific settings +# Enable SFW-only mode for the instance +REDLIB_SFW_ONLY=off +# Set a banner message for the instance +REDLIB_BANNER= +# Disable search engine indexing +REDLIB_ROBOTS_DISABLE_INDEXING=off +# Set the Pushshift frontend for "removed" links +REDLIB_PUSHSHIFT_FRONTEND=undelete.pullpush.io + +# Default user settings +# Set the default theme (options: system, light, dark, black, dracula, nord, laserwave, violet, gold, rosebox, gruvboxdark, gruvboxlight) +REDLIB_DEFAULT_THEME=system +# Set the default front page (options: default, popular, all) +REDLIB_DEFAULT_FRONT_PAGE=default +# Set the default layout (options: card, clean, compact) +REDLIB_DEFAULT_LAYOUT=card +# Enable wide mode by default +REDLIB_DEFAULT_WIDE=off +# Set the default post sort method (options: hot, new, top, rising, controversial) +REDLIB_DEFAULT_POST_SORT=hot +# Set the default comment sort method (options: confidence, top, new, controversial, old) +REDLIB_DEFAULT_COMMENT_SORT=confidence +# Enable blurring Spoiler content by default +REDLIB_DEFAULT_BLUR_SPOILER=off +# Enable showing NSFW content by default +REDLIB_DEFAULT_SHOW_NSFW=off +# Enable blurring NSFW content by default +REDLIB_DEFAULT_BLUR_NSFW=off +# Enable HLS video format by default +REDLIB_DEFAULT_USE_HLS=off +# Hide HLS notification by default +REDLIB_DEFAULT_HIDE_HLS_NOTIFICATION=off +# Disable autoplay videos by default +REDLIB_DEFAULT_AUTOPLAY_VIDEOS=off +# Define a default list of subreddit subscriptions (format: sub1+sub2+sub3) +REDLIB_DEFAULT_SUBSCRIPTIONS= +# Define a default list of subreddit filters (format: sub1+sub2+sub3) +REDLIB_DEFAULT_FILTERS= +# Hide awards by default +REDLIB_DEFAULT_HIDE_AWARDS=off +# Hide sidebar and summary +REDLIB_DEFAULT_HIDE_SIDEBAR_AND_SUMMARY=off +# Disable the confirmation before visiting Reddit +REDLIB_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION=off +# Hide score by default +REDLIB_DEFAULT_HIDE_SCORE=off +# Enable fixed navbar by default +REDLIB_DEFAULT_FIXED_NAVBAR=on diff --git a/.envrc b/.envrc new file mode 100644 index 0000000..3550a30 --- /dev/null +++ b/.envrc @@ -0,0 +1 @@ +use flake diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml index 5723180..262c5e9 100644 --- a/.github/FUNDING.yml +++ b/.github/FUNDING.yml @@ -1,2 +1,3 @@ -liberapay: spike -custom: ['https://www.buymeacoffee.com/spikecodes'] +liberapay: sigaloid +buy_me_a_coffee: sigaloid +github: sigaloid \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 03d38d1..f181d4f 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -7,6 +7,10 @@ assignees: '' --- + + ## Describe the bug + + + +- [ ] I checked that the instance that this was reported on is running the latest git commit, or I can reproduce it locally on the latest git commit \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/feature_parity.md b/.github/ISSUE_TEMPLATE/feature_parity.md index 22b9bd8..fc0a2f0 100644 --- a/.github/ISSUE_TEMPLATE/feature_parity.md +++ b/.github/ISSUE_TEMPLATE/feature_parity.md @@ -1,6 +1,6 @@ --- name: ✨ Feature parity -about: Suggest implementing a feature into Libreddit that is found in Reddit.com +about: Suggest implementing a feature into Redlib that is found in Reddit.com title: '✨ Feature parity: ' labels: feature parity assignees: '' @@ -12,7 +12,7 @@ assignees: '' A clear and concise description of what the feature is. --> -## Describe how this could be implemented into Libreddit +## Describe how this could be implemented into Redlib diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index e9e6d31..e670476 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,6 +1,6 @@ --- name: 💡 Feature request -about: Suggest a feature for Libreddit that is not found in Reddit +about: Suggest a feature for Redlib that is not found in Reddit title: '💡 Feature request: ' labels: enhancement assignees: '' diff --git a/.github/workflows/build-artifacts.yaml b/.github/workflows/build-artifacts.yaml new file mode 100644 index 0000000..695d1bf --- /dev/null +++ b/.github/workflows/build-artifacts.yaml @@ -0,0 +1,76 @@ +name: Release Build + +on: + push: + paths-ignore: + - "*.md" + - "compose.*" + branches: + - "main" + release: + types: [published] + +env: + CARGO_TERM_COLOR: always + + CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER: aarch64-linux-gnu-gcc + CC_aarch64_unknown_linux_musl: aarch64-linux-gnu-gcc + CARGO_TARGET_ARMV7_UNKNOWN_LINUX_MUSLEABIHF_LINKER: arm-linux-gnueabihf-gcc + CC_armv7_unknown_linux_musleabihf: arm-linux-gnueabihf-gcc + +jobs: + build: + name: Rust project - latest + runs-on: ubuntu-latest + strategy: + matrix: + target: + - x86_64-unknown-linux-musl + - aarch64-unknown-linux-musl + - armv7-unknown-linux-musleabihf + steps: + - uses: actions/checkout@v4 + + - uses: actions-rust-lang/setup-rust-toolchain@v1 + with: + target: ${{ matrix.target }} + + - if: matrix.target == 'x86_64-unknown-linux-musl' + run: | + sudo apt-get update + sudo apt-get install -y --no-install-recommends musl-tools + + - if: matrix.target == 'armv7-unknown-linux-musleabihf' + run: | + sudo apt update + sudo apt install -y gcc-arm-linux-gnueabihf musl-tools + + - if: matrix.target == 'aarch64-unknown-linux-musl' + run: | + sudo apt update + sudo apt install -y gcc-aarch64-linux-gnu musl-tools + + - name: Versions + id: version + run: echo "VERSION=$(cargo metadata --format-version 1 --no-deps | jq .packages[0].version -r | sed 's/^/v/')" >> "$GITHUB_OUTPUT" + + - name: Build + run: cargo build --release --target ${{ matrix.target }} + + - name: Package release + run: tar czf redlib-${{ matrix.target }}.tar.gz -C target/${{ matrix.target }}/release/ redlib + + - name: Upload release + uses: softprops/action-gh-release@v1 + with: + tag_name: ${{ steps.version.outputs.VERSION }} + name: ${{ steps.version.outputs.VERSION }} - ${{ github.event.head_commit.message }} + draft: true + files: | + redlib-${{ matrix.target }}.tar.gz + body: | + - ${{ github.event.head_commit.message }} ${{ github.sha }} + generate_release_notes: true + + + diff --git a/.github/workflows/docker-arm.yml b/.github/workflows/docker-arm.yml deleted file mode 100644 index 251e325..0000000 --- a/.github/workflows/docker-arm.yml +++ /dev/null @@ -1,38 +0,0 @@ -name: Docker ARM Build - -on: - push: - paths-ignore: - - "**.md" - branches: - - master - -jobs: - build-docker: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Set up QEMU - uses: docker/setup-qemu-action@v1 - with: - platforms: all - - name: Set up Docker Buildx - id: buildx - uses: docker/setup-buildx-action@v1 - with: - version: latest - - name: Login to DockerHub - uses: docker/login-action@v1 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - - name: Build and push - uses: docker/build-push-action@v2 - with: - context: . - file: ./Dockerfile.arm - platforms: linux/arm64 - push: true - tags: libreddit/libreddit:arm - cache-from: type=gha - cache-to: type=gha,mode=max diff --git a/.github/workflows/docker-armv7.yml b/.github/workflows/docker-armv7.yml deleted file mode 100644 index d2817d8..0000000 --- a/.github/workflows/docker-armv7.yml +++ /dev/null @@ -1,41 +0,0 @@ -name: Docker ARM V7 Build - -on: - push: - paths-ignore: - - "**.md" - branches: - - master - -jobs: - build-docker: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Set up QEMU - id: qemu - uses: docker/setup-qemu-action@v1 - with: - platforms: all - - name: Set up Docker Buildx - id: buildx - uses: docker/setup-buildx-action@v1 - with: - version: latest - - name: Login to DockerHub - uses: docker/login-action@v1 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - - name: Build and push - id: build_push - uses: docker/build-push-action@v2 - with: - context: . - file: ./Dockerfile.armv7 - platforms: linux/arm/v7 - push: true - tags: libreddit/libreddit:armv7 - cache-from: type=gha - cache-to: type=gha,mode=max diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml deleted file mode 100644 index c90bd4d..0000000 --- a/.github/workflows/docker.yml +++ /dev/null @@ -1,44 +0,0 @@ -name: Docker amd64 Build - -on: - push: - paths-ignore: - - "**.md" - branches: - - master - -jobs: - build-docker: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Set up QEMU - uses: docker/setup-qemu-action@v1 - with: - platforms: all - - name: Set up Docker Buildx - id: buildx - uses: docker/setup-buildx-action@v1 - with: - version: latest - - name: Login to DockerHub - uses: docker/login-action@v1 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - - name: Docker Hub Description - uses: peter-evans/dockerhub-description@v3 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - repository: libreddit/libreddit - - name: Build and push - uses: docker/build-push-action@v2 - with: - context: . - file: ./Dockerfile - platforms: linux/amd64 - push: true - tags: libreddit/libreddit:latest - cache-from: type=gha - cache-to: type=gha,mode=max diff --git a/.github/workflows/main-docker.yml b/.github/workflows/main-docker.yml new file mode 100644 index 0000000..5442775 --- /dev/null +++ b/.github/workflows/main-docker.yml @@ -0,0 +1,109 @@ +name: Container build + +on: + workflow_run: + workflows: ["Release Build"] + types: + - completed +env: + REGISTRY_IMAGE: quay.io/redlib/redlib + +jobs: + build: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + include: + - { platform: linux/amd64, target: x86_64-unknown-linux-musl } + - { platform: linux/arm64, target: aarch64-unknown-linux-musl } + - { platform: linux/arm/v7, target: armv7-unknown-linux-musleabihf } + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Docker meta + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY_IMAGE }} + tags: | + type=sha + type=raw,value=latest,enable={{is_default_branch}} + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Login to Quay.io Container Registry + uses: docker/login-action@v3 + with: + registry: quay.io + username: ${{ secrets.QUAY_USERNAME }} + password: ${{ secrets.QUAY_ROBOT_TOKEN }} + - name: Build and push + id: build + uses: docker/build-push-action@v5 + with: + context: . + platforms: ${{ matrix.platform }} + labels: ${{ steps.meta.outputs.labels }} + outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true + file: Dockerfile + build-args: TARGET=${{ matrix.target }} + - name: Export digest + run: | + mkdir -p /tmp/digests + digest="${{ steps.build.outputs.digest }}" + touch "/tmp/digests/${digest#sha256:}" + - name: Upload digest + uses: actions/upload-artifact@v4 + with: + name: digests-${{ matrix.target }} + path: /tmp/digests/* + if-no-files-found: error + retention-days: 1 + merge: + runs-on: ubuntu-latest + needs: + - build + steps: + - name: Download digests + uses: actions/download-artifact@v4.1.7 + with: + path: /tmp/digests + pattern: digests-* + merge-multiple: true + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Docker meta + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY_IMAGE }} + tags: | + type=sha + type=raw,value=latest,enable={{is_default_branch}} + - name: Login to Quay.io Container Registry + uses: docker/login-action@v3 + with: + registry: quay.io + username: ${{ secrets.QUAY_USERNAME }} + password: ${{ secrets.QUAY_ROBOT_TOKEN }} + - name: Create manifest list and push + working-directory: /tmp/digests + run: | + docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ + $(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *) + + # - name: Push README to Quay.io + # uses: christian-korneck/update-container-description-action@v1 + # env: + # DOCKER_APIKEY: ${{ secrets.APIKEY__QUAY_IO }} + # with: + # destination_container_repo: quay.io/redlib/redlib + # provider: quay + # readme_file: 'README.md' + + - name: Inspect image + run: | + docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }} diff --git a/.github/workflows/main-rust.yml b/.github/workflows/main-rust.yml new file mode 100644 index 0000000..f38c01d --- /dev/null +++ b/.github/workflows/main-rust.yml @@ -0,0 +1,84 @@ +name: Rust Build & Publish + +on: + push: + paths-ignore: + - "**.md" + + branches: + - 'main' + + release: + types: [published] + +env: + CARGO_TERM_COLOR: always + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout sources + uses: actions/checkout@v3 + + - name: Cache Packages + uses: Swatinem/rust-cache@v2 + + - name: Install stable toolchain + uses: dtolnay/rust-toolchain@stable + with: + toolchain: stable + + - name: Install musl-gcc + run: sudo apt-get install musl-tools + + - name: Install cargo musl target + run: rustup target add x86_64-unknown-linux-musl + + # Building actions + - name: Build + run: RUSTFLAGS='-C target-feature=+crt-static' cargo build --release --target x86_64-unknown-linux-musl + + - name: Versions + id: version + run: echo "VERSION=$(cargo metadata --format-version 1 --no-deps | jq .packages[0].version -r | sed 's/^/v/')" >> "$GITHUB_OUTPUT" + + # Publishing actions + + - name: Publish to crates.io + if: github.event_name == 'release' + run: cargo publish --no-verify --token ${{ secrets.CARGO_REGISTRY_TOKEN }} + + - name: Calculate SHA512 checksum + run: sha512sum target/x86_64-unknown-linux-musl/release/redlib > redlib.sha512 + + - name: Calculate SHA256 checksum + run: sha256sum target/x86_64-unknown-linux-musl/release/redlib > redlib.sha256 + + - uses: actions/upload-artifact@v4 + name: Upload a Build Artifact + with: + name: redlib + path: | + target/x86_64-unknown-linux-musl/release/redlib + redlib.sha512 + redlib.sha256 + + + - name: Release + uses: softprops/action-gh-release@v1 + if: github.base_ref != 'main' && github.event_name == 'release' + with: + tag_name: ${{ steps.version.outputs.VERSION }} + name: ${{ steps.version.outputs.VERSION }} - ${{ github.event.head_commit.message }} + draft: true + files: | + target/x86_64-unknown-linux-musl/release/redlib + redlib.sha512 + redlib.sha256 + body: | + - ${{ github.event.head_commit.message }} ${{ github.sha }} + generate_release_notes: true + env: + GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }} diff --git a/.github/workflows/pull-request.yml b/.github/workflows/pull-request.yml new file mode 100644 index 0000000..c8e8d2a --- /dev/null +++ b/.github/workflows/pull-request.yml @@ -0,0 +1,67 @@ +name: Pull Request + +env: + CARGO_TERM_COLOR: always + NEXTEST_RETRIES: 10 + +on: + push: + branches: + - 'main' + + pull_request: + branches: + - 'main' + +jobs: + test: + name: cargo test + runs-on: ubuntu-latest + steps: + - name: Checkout sources + uses: actions/checkout@v3 + + - name: Install stable toolchain + uses: dtolnay/rust-toolchain@stable + with: + toolchain: stable + + - name: Install cargo-nextest + uses: taiki-e/install-action@nextest + + - name: Run cargo nextest + run: cargo nextest run + + format: + name: cargo fmt --all -- --check + runs-on: ubuntu-latest + + steps: + - name: Checkout sources + uses: actions/checkout@v3 + + - name: Install stable toolchain with rustfmt component + uses: dtolnay/rust-toolchain@stable + with: + toolchain: stable + components: rustfmt + + - name: Run cargo fmt + run: cargo fmt --all -- --check + + clippy: + name: cargo clippy -- -D warnings + runs-on: ubuntu-latest + + steps: + - name: Checkout sources + uses: actions/checkout@v3 + + - name: Install stable toolchain with clippy component + uses: dtolnay/rust-toolchain@stable + with: + toolchain: stable + components: clippy + + - name: Run cargo clippy + run: cargo clippy -- -D warnings \ No newline at end of file diff --git a/.github/workflows/rust-tests.yml b/.github/workflows/rust-tests.yml deleted file mode 100644 index c93aadf..0000000 --- a/.github/workflows/rust-tests.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: Tests - -on: - push: - branches: [ "master" ] - pull_request: - branches: [ "master" ] - -env: - CARGO_TERM_COLOR: always - -jobs: - build: - - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - name: Build - run: cargo build --verbose - - name: Run tests - run: cargo test --verbose diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml deleted file mode 100644 index c233140..0000000 --- a/.github/workflows/rust.yml +++ /dev/null @@ -1,59 +0,0 @@ -name: Rust - -on: - push: - paths-ignore: - - "**.md" - branches: - - master - -env: - CARGO_TERM_COLOR: always - -jobs: - build: - runs-on: ubuntu-18.04 - - steps: - - uses: actions/checkout@v2 - - - name: Cache Packages - uses: Swatinem/rust-cache@v1.0.1 - - - name: Build - run: cargo build --release - - - name: Publish to crates.io - continue-on-error: true - run: cargo publish --no-verify --token ${{ secrets.CARGO_REGISTRY_TOKEN }} - - - uses: actions/upload-artifact@v2.2.1 - name: Upload a Build Artifact - with: - name: libreddit - path: target/release/libreddit - - - name: Versions - id: version - run: | - echo "::set-output name=version::$(cargo metadata --format-version 1 --no-deps | jq .packages[0].version -r | sed 's/^/v/')" - echo "::set-output name=tag::$(git describe --tags)" - - - name: Calculate SHA512 checksum - run: sha512sum target/release/libreddit > libreddit.sha512 - - - name: Release - uses: softprops/action-gh-release@v1 - if: github.base_ref != 'master' - with: - tag_name: ${{ steps.version.outputs.version }} - name: ${{ steps.version.outputs.version }} - ${{ github.event.head_commit.message }} - draft: true - files: | - target/release/libreddit - libreddit.sha512 - body: | - - ${{ github.event.head_commit.message }} ${{ github.sha }} - generate_release_notes: true - env: - GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }} diff --git a/.gitignore b/.gitignore index c41cc9e..6ca325c 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,10 @@ -/target \ No newline at end of file +/target +.env +redlib.toml + +# Idea Files +.idea/ + +# nix files +.direnv/ +result diff --git a/.replit b/.replit index 164ef4f..b857cc1 100644 --- a/.replit +++ b/.replit @@ -1,2 +1,2 @@ -run = "while :; do set -ex; curl -o./libreddit -fsSL -- https://github.com/libreddit/libreddit/releases/latest/download/libreddit ; chmod +x libreddit; set +e; ./libreddit -H 63115200; sleep 1; done" +run = "while :; do set -ex; nix-env -iA nixpkgs.unzip; curl -o./redlib.zip -fsSL -- https://nightly.link/redlib-org/redlib/workflows/main-rust/main/redlib.zip; unzip -n redlib.zip; mv target/x86_64-unknown-linux-musl/release/redlib .; chmod +x redlib; set +e; ./redlib -H 63115200; sleep 1; done" language = "bash" diff --git a/Cargo.lock b/Cargo.lock index 04c4d03..6722b4d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,21 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 + +[[package]] +name = "addr2line" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "adler32" @@ -9,10 +24,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234" [[package]] -name = "aho-corasick" -version = "0.7.20" +name = "ahash" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +dependencies = [ + "cfg-if", + "once_cell", + "version_check", + "zerocopy", +] + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] @@ -33,43 +60,29 @@ dependencies = [ ] [[package]] -name = "askama" -version = "0.11.1" +name = "allocator-api2" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb98f10f371286b177db5eeb9a6e5396609555686a35e1d4f7b9a9c6d8af0139" -dependencies = [ - "askama_derive", - "askama_escape", - "askama_shared", -] +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] -name = "askama_derive" -version = "0.11.2" +name = "anstyle" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87bf87e6e8b47264efa9bde63d6225c6276a52e05e91bf37eaa8afd0032d6b71" -dependencies = [ - "askama_shared", - "proc-macro2", - "syn", -] +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" [[package]] -name = "askama_escape" -version = "0.10.3" +name = "arc-swap" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341" +checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" [[package]] -name = "askama_shared" -version = "0.12.2" +name = "async-recursion" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf722b94118a07fcbc6640190f247334027685d4e218b794dbfe17c32bf38ed0" +checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11" dependencies = [ - "askama_escape", - "mime", - "mime_guess", - "nom", "proc-macro2", "quote", "syn", @@ -77,9 +90,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.61" +version = "0.1.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "705339e0e4a9690e2908d2b3d049d85682cf19fbd5782494498fbf7003a6a282" +checksum = "d556ec1359574147ec0c4fc5eb525f3f23263a592b1a9c07e0a75b427de55c97" dependencies = [ "proc-macro2", "quote", @@ -87,43 +100,86 @@ dependencies = [ ] [[package]] -name = "async_once" -version = "0.2.6" +name = "atom_syndication" +version = "0.12.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ce4f10ea3abcd6617873bae9f91d1c5332b4a778bd9ce34d0cd517474c1de82" +checksum = "d2f68d23e2cb4fd958c705b91a6b4c80ceeaf27a9e11651272a8389d5ce1a4a3" +dependencies = [ + "chrono", + "derive_builder", + "diligent-date-parser", + "never", + "quick-xml", +] [[package]] name = "autocfg" -version = "1.1.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" + +[[package]] +name = "backtrace" +version = "0.3.74" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", + "windows-targets", +] + +[[package]] +name = "base2048" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71f4fe417e8cc3bb9b437dfa9290ce92bd2730ba5374719bdfd9147fbc8f17cd" [[package]] name = "base64" -version = "0.21.0" +version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] [[package]] name = "bitflags" -version = "1.3.2" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" [[package]] name = "block-buffer" -version = "0.10.3" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] [[package]] name = "brotli" -version = "3.3.4" +version = "7.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1a0b1dbcc8ae29329621f8d4f0d835787c1c38bb1401979b49d13b0b305ff68" +checksum = "cc97b8f16f944bba54f0433f07e30be199b6dc2bd25937444bbad560bcea29bd" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -132,9 +188,9 @@ dependencies = [ [[package]] name = "brotli-decompressor" -version = "2.3.4" +version = "4.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b6561fd3f895a11e8f72af2cb7d22e08366bebc2b6b57f7744c4bda27034744" +checksum = "74fa05ad7d803d413eb8380983b092cbbaf9a85f151b871360e7b00cd7060b37" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -142,9 +198,9 @@ dependencies = [ [[package]] name = "bstr" -version = "1.1.0" +version = "1.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45ea9b00a7b3f2988e9a65ad3917e62123c38dba709b666506207be96d1790b" +checksum = "531a9155a481e2ee699d4f98f43c0ca4ff8ee1bfd55c31e9e98fb29d2b176fe0" dependencies = [ "memchr", "serde", @@ -152,48 +208,52 @@ dependencies = [ [[package]] name = "build_html" -version = "2.2.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3ef018b44d829e1b3364b4969059c098743595ec57a7eed176fbc9d909ac217" +checksum = "01b01f54cbdd56298a506b086691594ded3b68dcbc9437adc87c616a35e7fc89" [[package]] name = "bumpalo" -version = "3.12.0" +version = "3.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" +checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.3.0" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfb24e866b15a1af2a1b663f10c6b6b8f397a84aadb828f12e5b289ec23a3a3c" +checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" [[package]] name = "cached" -version = "0.42.0" +version = "0.54.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e5877db5d1af7fae60d06b5db9430b68056a69b3582a0be8e3691e87654aeb6" +checksum = "9718806c4a2fe9e8a56fd736f97b340dd10ed1be8ed733ed50449f351dc33cae" dependencies = [ + "ahash", "async-trait", - "async_once", "cached_proc_macro", "cached_proc_macro_types", "futures", - "hashbrown 0.13.2", - "instant", - "lazy_static", + "hashbrown 0.14.5", "once_cell", - "thiserror", + "thiserror 1.0.69", "tokio", + "web-time", ] [[package]] name = "cached_proc_macro" -version = "0.16.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e10ca87c81aaa3a949dbbe2b5e6c2c45dbc94ba4897e45ea31ff9ec5087be3dc" +checksum = "2f42a145ed2d10dce2191e1dcf30cfccfea9026660e143662ba5eec4017d5daa" dependencies = [ - "cached_proc_macro_types", "darling", "proc-macro2", "quote", @@ -202,15 +262,18 @@ dependencies = [ [[package]] name = "cached_proc_macro_types" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a4f925191b4367301851c6d99b09890311d74b0d43f274c0b34c86d308a3663" +checksum = "ade8366b8bd5ba243f0a58f036cc0ca8a2f069cff1a2351ef1cac6b083e16fc0" [[package]] name = "cc" -version = "1.0.78" +version = "1.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a20104e2335ce8a659d6dd92a51a767a0c062599c73b343fd152cb401e828c3d" +checksum = "be714c154be609ec7f5dad223a33bf1482fff90472de28f7362806e6d4832b8c" +dependencies = [ + "shlex", +] [[package]] name = "cfg-if" @@ -219,29 +282,57 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] -name = "clap" -version = "4.1.1" +name = "chrono" +version = "0.4.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ec7a4128863c188deefe750ac1d1dfe66c236909f845af04beed823638dc1b2" +checksum = "1a7964611d71df112cb1730f2ee67324fcf4d0fc6606acbbe9bfe06df124637c" dependencies = [ - "bitflags", + "num-traits", +] + +[[package]] +name = "clap" +version = "4.5.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "027bb0d98429ae334a8698531da7077bdf906419543a35a55c2cb1b66437d767" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5589e0cba072e0f3d23791efac0fd8627b49c829c196a492e88168e6a669d863" +dependencies = [ + "anstyle", "clap_lex", ] [[package]] -name = "clap_lex" -version = "0.3.1" +name = "clap_derive" +version = "4.5.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "783fe232adfca04f90f56201b26d79682d4cd2625e0bc7290b95123afe558ade" +checksum = "bf4ced95c6f4a675af3da73304b9ac4ed991640c36374e4b46795c49e17cf1ed" dependencies = [ - "os_str_bytes", + "heck", + "proc-macro2", + "quote", + "syn", ] [[package]] -name = "cookie" -version = "0.16.2" +name = "clap_lex" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" + +[[package]] +name = "cookie" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ddef33a339a91ea89fb53151bd0a4689cfce27055c291dfa69945475d22c747" dependencies = [ "time", "version_check", @@ -249,9 +340,9 @@ dependencies = [ [[package]] name = "core-foundation" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ "core-foundation-sys", "libc", @@ -259,24 +350,33 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.3" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "core2" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b49ba7ef1ad6107f8824dbe97de947cbaac53c44e7f9756a1fba0d37c1eec505" +dependencies = [ + "memchr", +] [[package]] name = "cpufeatures" -version = "0.2.5" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] [[package]] name = "crc32fast" -version = "1.3.2" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" dependencies = [ "cfg-if", ] @@ -293,9 +393,9 @@ dependencies = [ [[package]] name = "darling" -version = "0.14.2" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0dd3cd20dc6b5a876612a6e5accfe7f3dd883db6d07acfbf14c128f61550dfa" +checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" dependencies = [ "darling_core", "darling_macro", @@ -303,9 +403,9 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.14.2" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a784d2ccaf7c98501746bf0be29b2022ba41fd62a2e622af997a03e9f972859f" +checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" dependencies = [ "fnv", "ident_case", @@ -317,9 +417,9 @@ dependencies = [ [[package]] name = "darling_macro" -version = "0.14.2" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7618812407e9402654622dd402b0a89dff9ba93badd6540781526117b92aab7e" +checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", @@ -327,44 +427,130 @@ dependencies = [ ] [[package]] -name = "digest" -version = "0.10.6" +name = "dary_heap" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f" +checksum = "04d2cd9c18b9f454ed67da600630b021a8a80bf33f8c95896ab33aaf1c26b728" + +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", +] + +[[package]] +name = "derive_builder" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "507dfb09ea8b7fa618fcf76e953f4f5e192547945816d5358edffe39f6f94947" +dependencies = [ + "derive_builder_macro", +] + +[[package]] +name = "derive_builder_core" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "derive_builder_macro" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" +dependencies = [ + "derive_builder_core", + "syn", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "crypto-common", ] [[package]] -name = "errno" -version = "0.2.8" +name = "diligent-date-parser" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1" +checksum = "c8ede7d79366f419921e2e2f67889c12125726692a313bffb474bd5f37a581e9" dependencies = [ - "errno-dragonfly", - "libc", - "winapi", + "chrono", ] [[package]] -name = "errno-dragonfly" -version = "0.1.2" +name = "displaydoc" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "env_logger" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580" +dependencies = [ + "humantime", + "is-terminal", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ - "cc", "libc", + "windows-sys 0.59.0", ] [[package]] name = "fastrand" -version = "1.8.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499" -dependencies = [ - "instant", -] +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "fnv" @@ -374,24 +560,24 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "form_urlencoded" -version = "1.1.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] [[package]] name = "fs_extra" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2022715d62ab30faffd124d40b76f4134a550a87792276512b18d63272333394" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" [[package]] name = "futures" -version = "0.3.25" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38390104763dc37a5145a53c29c63c1290b5d316d6086ec32c293f6736051bb0" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" dependencies = [ "futures-channel", "futures-core", @@ -403,9 +589,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.25" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52ba265a92256105f45b719605a571ffe2d1f0fea3807304b522c1d778f79eed" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", "futures-sink", @@ -413,48 +599,46 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.25" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04909a7a7e4633ae6c4a9ab280aeb86da1236243a77b694a49eacd659a4bd3ac" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" [[package]] name = "futures-io" -version = "0.3.25" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00f5fb52a06bdcadeb54e8d3671f8888a39697dcb0b81b23b55174030427f4eb" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" [[package]] name = "futures-lite" -version = "1.12.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7694489acd39452c77daa48516b894c153f192c3578d5a839b62c58099fcbf48" +checksum = "f5edaec856126859abb19ed65f39e90fea3a9574b9707f13539acf4abf7eb532" dependencies = [ "fastrand", "futures-core", "futures-io", - "memchr", "parking", "pin-project-lite", - "waker-fn", ] [[package]] name = "futures-sink" -version = "0.3.25" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39c15cf1a4aa79df40f1bb462fb39676d0ad9e366c2a33b590d7c66f4f81fcf9" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" [[package]] name = "futures-task" -version = "0.3.25" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ffb393ac5d9a6eaa9d3fdf37ae2776656b706e200c8e16b1bdb227f5198e6ea" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" [[package]] name = "futures-util" -version = "0.3.25" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "197676987abd2f9cadff84926f410af1c183608d36641465df73ae8211dc65d6" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures-core", "futures-sink", @@ -465,9 +649,9 @@ dependencies = [ [[package]] name = "generic-array" -version = "0.14.6" +version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", @@ -475,33 +659,51 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.8" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "libc", - "wasi", + "wasi 0.11.0+wasi-snapshot-preview1", ] [[package]] -name = "globset" -version = "0.4.10" +name = "getrandom" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "029d74589adefde59de1a0c4f4732695c32805624aec7b68d91503d4dba79afc" +checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.13.3+wasi-0.2.2", + "windows-targets", +] + +[[package]] +name = "gimli" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" + +[[package]] +name = "globset" +version = "0.4.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54a1028dfc5f5df5da8a56a73e6c153c9a9708ec57232470703592a3f18e49f5" dependencies = [ "aho-corasick", "bstr", - "fnv", "log", - "regex", + "regex-automata", + "regex-syntax", ] [[package]] name = "h2" -version = "0.3.15" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f9f29bc9dda355256b2916cf526ab02ce0aeaaaf2bad60d65ef3f12f11dd0f4" +checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" dependencies = [ "bytes", "fnv", @@ -518,30 +720,43 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.12.3" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" - -[[package]] -name = "hashbrown" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" - -[[package]] -name = "hermit-abi" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ - "libc", + "ahash", + "allocator-api2", ] [[package]] -name = "http" -version = "0.2.8" +name = "hashbrown" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbd780fe5cc30f81464441920d82ac8740e2e46b29a6fad543ddd075229ce37e" + +[[package]] +name = "htmlescape" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9025058dae765dee5070ec375f591e2ba14638c63feff74f13805a72e523163" + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" dependencies = [ "bytes", "fnv", @@ -550,9 +765,9 @@ dependencies = [ [[package]] name = "http-body" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", "http", @@ -561,21 +776,27 @@ dependencies = [ [[package]] name = "httparse" -version = "1.8.0" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" [[package]] name = "httpdate" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.23" +version = "0.14.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "034711faac9d2166cb1baf1a2fb0b60b1f277f8492fd72176c17f3515e1abd3c" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" dependencies = [ "bytes", "futures-channel", @@ -597,10 +818,11 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.23.2" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1788965e61b367cd03a62950836d5cd41560c3577d90e40e0819373194d1661c" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ + "futures-util", "http", "hyper", "log", @@ -610,6 +832,124 @@ dependencies = [ "tokio-rustls", ] +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "ident_case" version = "1.0.1" @@ -618,141 +958,128 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "0.3.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" dependencies = [ - "unicode-bidi", - "unicode-normalization", + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", ] [[package]] name = "indexmap" -version = "1.9.2" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" +checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652" dependencies = [ - "autocfg", - "hashbrown 0.12.3", + "equivalent", + "hashbrown 0.15.2", ] [[package]] -name = "instant" -version = "0.1.12" +name = "inventory" +version = "0.3.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +checksum = "ab08d7cd2c5897f2c949e5383ea7c7db03fb19130ffcfbf7eda795137ae3cb83" dependencies = [ - "cfg-if", + "rustversion", ] [[package]] -name = "io-lifetimes" -version = "1.0.5" +name = "is-terminal" +version = "0.4.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1abeb7a0dd0f8181267ff8adc397075586500b81b28a73e8a0208b00fc170fb3" +checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" dependencies = [ + "hermit-abi", "libc", - "windows-sys 0.45.0", + "windows-sys 0.59.0", ] [[package]] name = "itoa" -version = "1.0.5" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "js-sys" -version = "0.3.60" +version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49409df3e3bf0856b916e2ceaca09ee28e6871cf7d9ce97a692cacfdb2a25a47" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" dependencies = [ + "once_cell", "wasm-bindgen", ] -[[package]] -name = "lazy_static" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" - [[package]] name = "libc" -version = "0.2.139" +version = "0.2.170" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79" +checksum = "875b3680cb2f8f71bdcf9a30f38d48282f5d3c95cbf9b3fa57269bb5d5c06828" [[package]] name = "libflate" -version = "1.2.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05605ab2bce11bcfc0e9c635ff29ef8b2ea83f29be257ee7d730cac3ee373093" +checksum = "45d9dfdc14ea4ef0900c1cddbc8dcd553fbaacd8a4a282cf4018ae9dd04fb21e" dependencies = [ "adler32", + "core2", "crc32fast", + "dary_heap", "libflate_lz77", ] [[package]] name = "libflate_lz77" -version = "1.1.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39a734c0493409afcd49deee13c006a04e3586b9761a03543c6272c9c51f2f5a" +checksum = "e6e0d73b369f386f1c44abd9c570d5318f55ccde816ff4b562fa452e5182863d" dependencies = [ + "core2", + "hashbrown 0.14.5", "rle-decode-fast", ] -[[package]] -name = "libreddit" -version = "0.30.0" -dependencies = [ - "askama", - "brotli", - "build_html", - "cached", - "clap", - "cookie", - "futures-lite", - "hyper", - "hyper-rustls", - "libflate", - "lipsum", - "once_cell", - "percent-encoding", - "regex", - "route-recognizer", - "rust-embed", - "sealed_test", - "serde", - "serde_json", - "serde_yaml", - "time", - "tokio", - "toml", - "url", -] - [[package]] name = "linux-raw-sys" -version = "0.1.4" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" +checksum = "6db9c683daf087dc577b7506e9695b3d556a9f3849903fa28186283afd6809e9" [[package]] name = "lipsum" -version = "0.8.2" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8451846f1f337e44486666989fbce40be804da139d5a4477d6b88ece5dc69f4" +checksum = "636860251af8963cc40f6b4baadee105f02e21b28131d76eba8e40ce84ab8064" dependencies = [ "rand", "rand_chacha", ] [[package]] -name = "lock_api" -version = "0.4.9" +name = "litemap" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +checksum = "23fb14cb19457329c82206317a5663005a4d404783dc74f4252769b0d5f42856" + +[[package]] +name = "lock_api" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", @@ -760,30 +1087,27 @@ dependencies = [ [[package]] name = "log" -version = "0.4.17" +version = "0.4.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" -dependencies = [ - "cfg-if", -] +checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e" [[package]] name = "memchr" -version = "2.5.0" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "mime" -version = "0.3.16" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "mime_guess" -version = "2.0.4" +version = "2.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef" +checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" dependencies = [ "mime", "unicase", @@ -796,17 +1120,31 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] -name = "mio" +name = "miniz_oxide" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5d732bc30207a6423068df043e3d02e0735b155ad7ce1a6f76fe2baa5b158de" +checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5" +dependencies = [ + "adler2", +] + +[[package]] +name = "mio" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ "libc", - "log", - "wasi", - "windows-sys 0.42.0", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys 0.52.0", ] +[[package]] +name = "never" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c96aba5aa877601bb3f6dd6a63a969e1f82e60646e81e71b14496995e9853c91" + [[package]] name = "nom" version = "7.1.3" @@ -818,53 +1156,61 @@ dependencies = [ ] [[package]] -name = "num_cpus" -version = "1.15.0" +name = "num-conv" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ - "hermit-abi", - "libc", + "autocfg", ] [[package]] name = "num_threads" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44" +checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" dependencies = [ "libc", ] [[package]] -name = "once_cell" -version = "1.17.0" +name = "object" +version = "0.36.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e" [[package]] name = "openssl-probe" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" - -[[package]] -name = "os_str_bytes" -version = "6.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b7820b9daea5457c9f21c69448905d723fbd21136ccf521748f23fd49e723ee" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "parking" -version = "2.0.0" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "427c3892f9e783d91cc128285287e70a59e206ca452770ece88a76f7a3eddd72" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" [[package]] name = "parking_lot" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", "parking_lot_core", @@ -872,28 +1218,28 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.6" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba1ef8814b5c993410bb3adfad7a5ed269563e4a2f90c41f5d85be7fb47133bf" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", - "windows-sys 0.42.0", + "windows-targets", ] [[package]] name = "percent-encoding" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pin-project-lite" -version = "0.2.9" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "pin-utils" @@ -902,20 +1248,57 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] -name = "ppv-lite86" -version = "0.2.17" +name = "powerfmt" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "pretty_env_logger" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "865724d4dbe39d9f3dd3b52b88d859d66bcb2d6a0acfd5ea68a65fb66d4bdc1c" +dependencies = [ + "env_logger", + "log", +] [[package]] name = "proc-macro2" -version = "1.0.50" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2" +checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" dependencies = [ "unicode-ident", ] +[[package]] +name = "pulldown-cmark" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f86ba2052aebccc42cbbb3ed234b8b13ce76f75c3551a303cb2bcffcff12bb14" +dependencies = [ + "bitflags", + "memchr", + "pulldown-cmark-escape", + "unicase", +] + +[[package]] +name = "pulldown-cmark-escape" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "007d8adb5ddab6f8e3f491ac63566a7d5002cc7ed73901f72057943fa71ae1ae" + [[package]] name = "quick-error" version = "1.2.3" @@ -923,10 +1306,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] -name = "quote" -version = "1.0.23" +name = "quick-xml" +version = "0.37.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b" +checksum = "165859e9e55f79d67b96c5d96f4e88b6f2695a1972849c15a6a3f5c59fc2c003" +dependencies = [ + "encoding_rs", + "memchr", +] + +[[package]] +name = "quote" +version = "1.0.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1f1914ce909e1658d9907913b4b91947430c7d9be598b15a1912935b8c04801" dependencies = [ "proc-macro2", ] @@ -958,23 +1351,83 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom", + "getrandom 0.2.15", +] + +[[package]] +name = "redlib" +version = "0.36.0" +dependencies = [ + "arc-swap", + "async-recursion", + "base2048", + "base64 0.22.1", + "bincode", + "brotli", + "build_html", + "cached", + "chrono", + "clap", + "cookie", + "dotenvy", + "fastrand", + "futures-lite", + "htmlescape", + "hyper", + "hyper-rustls", + "libflate", + "lipsum", + "log", + "once_cell", + "percent-encoding", + "pretty_env_logger", + "pulldown-cmark", + "regex", + "revision", + "rinja", + "route-recognizer", + "rss", + "rust-embed", + "sealed_test", + "serde", + "serde_json", + "serde_json_path", + "serde_urlencoded", + "serde_yaml", + "tegen", + "time", + "tokio", + "toml", + "url", + "uuid", ] [[package]] name = "redox_syscall" -version = "0.2.16" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +checksum = "0b8c0c260b63a8219631167be35e6a988e9554dbd323f8bd08439c8ed1302bd1" dependencies = [ "bitflags", ] [[package]] name = "regex" -version = "1.7.1" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", @@ -983,23 +1436,78 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.28" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" + +[[package]] +name = "revision" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22f53179a035f881adad8c4d58a2c599c6b4a8325b989c68d178d7a34d1b1e4c" +dependencies = [ + "revision-derive", +] + +[[package]] +name = "revision-derive" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f0ec466e5d8dca9965eb6871879677bef5590cf7525ad96cae14376efb75073" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] [[package]] name = "ring" -version = "0.16.20" +version = "0.17.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +checksum = "70ac5d832aa16abd7d1def883a8545280c20a60f523a370aa3a9617c2b8550ee" dependencies = [ "cc", + "cfg-if", + "getrandom 0.2.15", "libc", - "once_cell", - "spin", "untrusted", - "web-sys", - "winapi", + "windows-sys 0.52.0", +] + +[[package]] +name = "rinja" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3dc4940d00595430b3d7d5a01f6222b5e5b51395d1120bdb28d854bb8abb17a5" +dependencies = [ + "itoa", + "rinja_derive", +] + +[[package]] +name = "rinja_derive" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d9ed0146aef6e2825f1b1515f074510549efba38d71f4554eec32eb36ba18b" +dependencies = [ + "memchr", + "mime", + "mime_guess", + "proc-macro2", + "quote", + "rinja_parser", + "rustc-hash", + "syn", +] + +[[package]] +name = "rinja_parser" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93f9a866e2e00a7a1fb27e46e9e324a6f7c0e7edc4543cae1d38f4e4a100c610" +dependencies = [ + "memchr", + "nom", ] [[package]] @@ -1015,10 +1523,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "afab94fb28594581f62d981211a9a4d53cc8130bbcbbb89a0440d9b8e81a7746" [[package]] -name = "rust-embed" -version = "6.4.2" +name = "rss" +version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "283ffe2f866869428c92e0d61c2f35dfb4355293cdfdc48f49e895c15f1333d1" +checksum = "b2107738f003660f0a91f56fd3e3bd3ab5d918b2ddaf1e1ec2136fb1c46f71bf" +dependencies = [ + "atom_syndication", + "derive_builder", + "never", + "quick-xml", +] + +[[package]] +name = "rust-embed" +version = "8.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b3aba5104622db5c9fc61098de54708feb732e7763d7faa2fa625899f00bf6f" dependencies = [ "rust-embed-impl", "rust-embed-utils", @@ -1027,9 +1547,9 @@ dependencies = [ [[package]] name = "rust-embed-impl" -version = "6.3.1" +version = "8.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31ab23d42d71fb9be1b643fe6765d292c5e14d46912d13f3ae2815ca048ea04d" +checksum = "1f198c73be048d2c5aa8e12f7960ad08443e56fd39cc26336719fdb4ea0ebaae" dependencies = [ "proc-macro2", "quote", @@ -1040,9 +1560,9 @@ dependencies = [ [[package]] name = "rust-embed-utils" -version = "7.3.0" +version = "8.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1669d81dfabd1b5f8e2856b8bbe146c6192b0ba22162edc738ac0a5de18f054" +checksum = "5a2fcdc9f40c8dc2922842ca9add611ad19f332227fc651d015881ad1552bd9a" dependencies = [ "globset", "sha2", @@ -1050,36 +1570,47 @@ dependencies = [ ] [[package]] -name = "rustix" -version = "0.36.8" +name = "rustc-demangle" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f43abb88211988493c1abb44a70efa56ff0ce98f233b7b276146f1f3f7ba9644" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustc-hash" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" + +[[package]] +name = "rustix" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dade4812df5c384711475be5fcd8c162555352945401aed22a35bffeab61f657" dependencies = [ "bitflags", "errno", - "io-lifetimes", "libc", "linux-raw-sys", - "windows-sys 0.45.0", + "windows-sys 0.59.0", ] [[package]] name = "rustls" -version = "0.20.8" +version = "0.21.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fff78fc74d175294f4e83b28343315ffcfb114b156f0185e9741cb5570f50e2f" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "log", "ring", + "rustls-webpki", "sct", - "webpki", ] [[package]] name = "rustls-native-certs" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0167bac7a9f490495f3c33013e7722b53cb087ecbe082fb0c6387c96f634ea50" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" dependencies = [ "openssl-probe", "rustls-pemfile", @@ -1089,13 +1620,29 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "1.0.2" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d194b56d58803a43635bdc398cd17e383d6f71f9182b9a192c127ca42494a59b" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" dependencies = [ - "base64", + "base64 0.21.7", ] +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2" + [[package]] name = "rusty-forkfork" version = "0.4.0" @@ -1110,9 +1657,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.12" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "same-file" @@ -1125,24 +1672,24 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.21" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "713cfb06c7059f3588fb8044c0fad1d09e3c01d225e25b9220dbfdcf16dbb1b3" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" dependencies = [ - "windows-sys 0.42.0", + "windows-sys 0.59.0", ] [[package]] name = "scopeguard" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sct" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ "ring", "untrusted", @@ -1150,9 +1697,9 @@ dependencies = [ [[package]] name = "sealed_test" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a608d94641cc17fe203b102db2ae86d47a236630192f0244ddbbbb0044c0272" +checksum = "2a1867f8f005bd7fb73c367e2e45dd628417906a2ca27597fe59cbf04279a222" dependencies = [ "fs_extra", "rusty-forkfork", @@ -1162,9 +1709,9 @@ dependencies = [ [[package]] name = "sealed_test_derive" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b672e005ae58fef5da619d90b9f1c5b44b061890f4a371b3c96257a8a15e697" +checksum = "77253fb2d4451418d07025826028bcb96ee42d3e58859689a70ce62908009db6" dependencies = [ "quote", "syn", @@ -1172,9 +1719,9 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.7.0" +version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bc1bb97804af6631813c55739f771071e0f2ed33ee20b68c86ec505d906356c" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ "bitflags", "core-foundation", @@ -1185,9 +1732,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.6.1" +version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0160a13a177a45bfb43ce71c01580998474f556ad854dcbca936dd2841a5c556" +checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" dependencies = [ "core-foundation-sys", "libc", @@ -1195,18 +1742,18 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.152" +version = "1.0.218" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb" +checksum = "e8dfc9d19bdbf6d17e22319da49161d5d0108e4188e8b680aef6299eed22df60" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.152" +version = "1.0.218" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" +checksum = "f09503e191f4e797cb8aac08e9a4a4695c5edf6a2e70e376d961ddd5c969f82b" dependencies = [ "proc-macro2", "quote", @@ -1215,10 +1762,82 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.91" +version = "1.0.140" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883" +checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "serde_json_path" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b992cea3194eea663ba99a042d61cea4bd1872da37021af56f6a37e0359b9d33" +dependencies = [ + "inventory", + "nom", + "regex", + "serde", + "serde_json", + "serde_json_path_core", + "serde_json_path_macros", + "thiserror 2.0.12", +] + +[[package]] +name = "serde_json_path_core" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dde67d8dfe7d4967b5a95e247d4148368ddd1e753e500adb34b3ffe40c6bc1bc" +dependencies = [ + "inventory", + "serde", + "serde_json", + "thiserror 2.0.12", +] + +[[package]] +name = "serde_json_path_macros" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "517acfa7f77ddaf5c43d5f119c44a683774e130b4247b7d3210f8924506cfac8" +dependencies = [ + "inventory", + "serde_json_path_core", + "serde_json_path_macros_internal", +] + +[[package]] +name = "serde_json_path_macros_internal" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aafbefbe175fa9bf03ca83ef89beecff7d2a95aaacd5732325b90ac8c3bd7b90" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_spanned" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", "itoa", "ryu", "serde", @@ -1226,9 +1845,9 @@ dependencies = [ [[package]] name = "serde_yaml" -version = "0.9.17" +version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fb06d4b6cdaef0e0c51fa881acb721bed3c924cfaa71d9c94a3b771dfdf6567" +checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ "indexmap", "itoa", @@ -1239,9 +1858,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.6" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if", "cpufeatures", @@ -1249,56 +1868,62 @@ dependencies = [ ] [[package]] -name = "signal-hook-registry" -version = "1.4.0" +name = "shlex" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" dependencies = [ "libc", ] [[package]] name = "slab" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" dependencies = [ "autocfg", ] [[package]] name = "smallvec" -version = "1.10.0" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" +checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" [[package]] name = "socket2" -version = "0.4.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02e2d2db9033d13a1567121ddd7a095ee144db4e1ca1b1bda3419bc0da294ebd" +checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" dependencies = [ "libc", - "winapi", + "windows-sys 0.52.0", ] [[package]] -name = "spin" -version = "0.5.2" +name = "stable_deref_trait" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "strsim" -version = "0.10.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "syn" -version = "1.0.107" +version = "2.0.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" +checksum = "e02e925281e18ffd9d640e234264753c43edc62d64b2d4cf898f1bc5e75f3fc2" dependencies = [ "proc-macro2", "quote", @@ -1306,32 +1931,82 @@ dependencies = [ ] [[package]] -name = "tempfile" -version = "3.4.0" +name = "synstructure" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af18f7ae1acd354b992402e9ec5864359d693cd8a79dcbef59f76891701c1e95" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tegen" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10a2d5a357b7c859b410139734a875136473c3b18b1bbd8d5bdc1769d9002acd" +dependencies = [ + "rand", +] + +[[package]] +name = "tempfile" +version = "3.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c317e0a526ee6120d8dabad239c8dadca62b24b6f168914bbbc8e2fb1f0e567" dependencies = [ "cfg-if", "fastrand", - "redox_syscall", + "getrandom 0.3.1", + "once_cell", "rustix", - "windows-sys 0.42.0", + "windows-sys 0.59.0", +] + +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", ] [[package]] name = "thiserror" -version = "1.0.38" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ - "thiserror-impl", + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" +dependencies = [ + "thiserror-impl 2.0.12", ] [[package]] name = "thiserror-impl" -version = "1.0.38" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" dependencies = [ "proc-macro2", "quote", @@ -1340,13 +2015,16 @@ dependencies = [ [[package]] name = "time" -version = "0.3.17" +version = "0.3.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376" +checksum = "dad298b01a40a23aac4580b67e3dbedb7cc8402f3592d7f49469de2ea4aecdd8" dependencies = [ + "deranged", "itoa", "libc", + "num-conv", "num_threads", + "powerfmt", "serde", "time-core", "time-macros", @@ -1354,59 +2032,53 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.0" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd" +checksum = "765c97a5b985b7c11d7bc27fa927dc4fe6af3a6dfb021d28deb60d3bf51e76ef" [[package]] name = "time-macros" -version = "0.2.6" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d967f99f534ca7e495c575c62638eebc2898a8c84c119b89e250477bc4ba16b2" +checksum = "e8093bc3e81c3bc5f7879de09619d06c9a5a5e45ca44dfeeb7225bae38005c5c" dependencies = [ + "num-conv", "time-core", ] [[package]] -name = "tinyvec" -version = "1.6.0" +name = "tinystr" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" dependencies = [ - "tinyvec_macros", + "displaydoc", + "zerovec", ] -[[package]] -name = "tinyvec_macros" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" - [[package]] name = "tokio" -version = "1.24.2" +version = "1.44.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a12a59981d9e3c38d216785b0c37399f6e415e8d0712047620f189371b0bb" +checksum = "9975ea0f48b5aa3972bf2d888c238182458437cc2a19374b81b25cdf1023fb3a" dependencies = [ - "autocfg", + "backtrace", "bytes", "libc", - "memchr", "mio", - "num_cpus", "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2", "tokio-macros", - "windows-sys 0.42.0", + "windows-sys 0.52.0", ] [[package]] name = "tokio-macros" -version = "1.8.2" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d266c00fde287f55d3f1c3e96c500c362a2b8c695076ec180f27918820bc6df8" +checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", @@ -1415,123 +2087,127 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.23.4" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ "rustls", "tokio", - "webpki", ] [[package]] name = "tokio-util" -version = "0.7.4" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bb2e075f03b3d66d8d8785356224ba688d2906a371015e225beeb65ca92c740" +checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" dependencies = [ "bytes", "futures-core", "futures-sink", "pin-project-lite", "tokio", - "tracing", ] [[package]] name = "toml" -version = "0.5.10" +version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1333c76748e868a4d9d1017b5ab53171dfd095f70c712fdb4653a406547f598f" +checksum = "cd87a5cdd6ffab733b2f74bc4fd7ee5fff6634124999ac278c35fc78c6120148" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" dependencies = [ "serde", ] [[package]] -name = "tower-service" -version = "0.3.2" +name = "toml_edit" +version = "0.22.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" +checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "winnow", +] + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.37" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ - "cfg-if", "pin-project-lite", "tracing-core", ] [[package]] name = "tracing-core" -version = "0.1.30" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", ] [[package]] name = "try-lock" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "typenum" -version = "1.16.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" [[package]] name = "unicase" -version = "2.6.0" +version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" -dependencies = [ - "version_check", -] - -[[package]] -name = "unicode-bidi" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992" +checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-ident" -version = "1.0.6" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc" - -[[package]] -name = "unicode-normalization" -version = "0.1.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" -dependencies = [ - "tinyvec", -] +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" [[package]] name = "unsafe-libyaml" -version = "0.2.5" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc7ed8ba44ca06be78ea1ad2c3682a43349126c8818054231ee6f4748012aed2" +checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" [[package]] name = "untrusted" -version = "0.7.1" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.3.1" +version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna", @@ -1539,44 +2215,57 @@ dependencies = [ ] [[package]] -name = "version_check" -version = "0.9.4" +name = "utf16_iter" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "uuid" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0f540e3240398cce6128b64ba83fdbdd86129c16a3aa1a3a252efd66eb3d587" +dependencies = [ + "getrandom 0.3.1", +] + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "wait-timeout" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" dependencies = [ "libc", ] -[[package]] -name = "waker-fn" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" - [[package]] name = "walkdir" -version = "2.3.2" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" dependencies = [ "same-file", - "winapi", "winapi-util", ] [[package]] name = "want" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" dependencies = [ - "log", "try-lock", ] @@ -1587,24 +2276,33 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] -name = "wasm-bindgen" -version = "0.2.83" +name = "wasi" +version = "0.13.3+wasi-0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaf9f5aceeec8be17c128b2e93e031fb8a4d469bb9c4ae2d7dc1888b26887268" +checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" +dependencies = [ + "wit-bindgen-rt", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" dependencies = [ "cfg-if", + "once_cell", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.83" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c8ffb332579b0557b52d268b91feab8df3615f265d5270fec2a8c95b17c1142" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" dependencies = [ "bumpalo", "log", - "once_cell", "proc-macro2", "quote", "syn", @@ -1613,9 +2311,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.83" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -1623,9 +2321,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.83" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", @@ -1636,94 +2334,60 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.83" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] [[package]] -name = "web-sys" -version = "0.3.60" +name = "web-time" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcda906d8be16e728fd5adc5b729afad4e444e106ab28cd1c7256e54fa61510f" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" dependencies = [ "js-sys", "wasm-bindgen", ] -[[package]] -name = "webpki" -version = "0.22.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" -dependencies = [ - "ring", - "untrusted", -] - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - [[package]] name = "winapi-util" -version = "0.1.5" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "winapi", -] - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "windows-sys" -version = "0.42.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" -dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", + "windows-sys 0.59.0", ] [[package]] name = "windows-sys" -version = "0.45.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ "windows-targets", ] [[package]] name = "windows-targets" -version = "0.42.1" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e2522491fbfcd58cc84d47aeb2958948c4b8982e9a2d8a2a35bbaed431390e7" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", "windows_i686_gnu", + "windows_i686_gnullvm", "windows_i686_msvc", "windows_x86_64_gnu", "windows_x86_64_gnullvm", @@ -1732,42 +2396,166 @@ dependencies = [ [[package]] name = "windows_aarch64_gnullvm" -version = "0.42.1" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c9864e83243fdec7fc9c5444389dcbbfd258f745e7853198f365e3c4968a608" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" -version = "0.42.1" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c8b1b673ffc16c47a9ff48570a9d85e25d265735c503681332589af6253c6c7" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" -version = "0.42.1" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3887528ad530ba7bdbb1faa8275ec7a1155a45ffa57c37993960277145d640" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" -version = "0.42.1" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf4d1122317eddd6ff351aa852118a2418ad4214e6613a50e0191f7004372605" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" -version = "0.42.1" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1040f221285e17ebccbc2591ffdc2d44ee1f9186324dd3e84e99ac68d699c45" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" -version = "0.42.1" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "628bfdf232daa22b0d64fdb62b09fcc36bb01f05a3939e20ab73aaf9470d0463" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" -version = "0.42.1" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "winnow" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7f4ea97f6f78012141bcdb6a216b2609f0979ada50b20ca5b52dde2eac2bb1" +dependencies = [ + "memchr", +] + +[[package]] +name = "wit-bindgen-rt" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" +dependencies = [ + "bitflags", +] + +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerocopy" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "byteorder", + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] diff --git a/Cargo.toml b/Cargo.toml index fe0d8df..bb76d0a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,38 +1,65 @@ [package] -name = "libreddit" +name = "redlib" description = " Alternative private front-end to Reddit" -license = "AGPL-3.0" -repository = "https://github.com/spikecodes/libreddit" -version = "0.30.1" -authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"] +license = "AGPL-3.0-only" +repository = "https://github.com/redlib-org/redlib" +version = "0.36.0" +authors = [ + "Matthew Esposito ", + "spikecodes <19519553+spikecodes@users.noreply.github.com>", +] edition = "2021" +default-run = "redlib" [dependencies] -askama = { version = "0.11.1", default-features = false } -cached = "0.42.0" -clap = { version = "4.1.1", default-features = false, features = ["std", "env"] } -regex = "1.7.1" -serde = { version = "1.0.152", features = ["derive"] } -cookie = "0.16.2" -futures-lite = "1.12.0" -hyper = { version = "0.14.23", features = ["full"] } -hyper-rustls = "0.23.2" -percent-encoding = "2.2.0" +rinja = { version = "0.3.4", default-features = false } +cached = { version = "0.54.0", features = ["async"] } +clap = { version = "4.4.11", default-features = false, features = [ + "std", + "env", + "derive", +] } +regex = "1.10.2" +serde = { version = "1.0.193", features = ["derive"] } +cookie = "0.18.0" +futures-lite = "2.2.0" +hyper = { version = "0.14.31", features = ["full"] } +percent-encoding = "2.3.1" route-recognizer = "0.3.1" -serde_json = "1.0.91" -tokio = { version = "1.24.2", features = ["full"] } -time = { version = "0.3.17", features = ["local-offset"] } -url = "2.3.1" -rust-embed = { version = "6.4.2", features = ["include-exclude"] } -libflate = "1.2.0" -brotli = { version = "3.3.4", features = ["std"] } -toml = "0.5.10" -once_cell = "1.17.0" -serde_yaml = "0.9.16" -build_html = "2.2.0" +serde_json = "1.0.133" +tokio = { version = "1.35.1", features = ["full"] } +time = { version = "0.3.31", features = ["local-offset"] } +url = "2.5.0" +rust-embed = { version = "8.1.0", features = ["include-exclude"] } +libflate = "2.0.0" +brotli = { version = "7.0.0", features = ["std"] } +toml = "0.8.8" +once_cell = "1.19.0" +serde_yaml = "0.9.29" +build_html = "2.4.0" +uuid = { version = "1.6.1", features = ["v4"] } +base64 = "0.22.1" +fastrand = "2.0.1" +log = "0.4.20" +pretty_env_logger = "0.5.0" +dotenvy = "0.15.7" +rss = "2.0.7" +arc-swap = "1.7.1" +serde_json_path = "0.7.1" +async-recursion = "1.1.1" +pulldown-cmark = { version = "0.12.0", features = ["simd", "html"], default-features = false } +hyper-rustls = { version = "0.24.2", features = [ "http2" ] } +tegen = "0.1.4" +serde_urlencoded = "0.7.1" +chrono = { version = "0.4.39", default-features = false, features = [ "std" ] } +htmlescape = "0.3.1" +bincode = "1.3.3" +base2048 = "2.0.2" +revision = "0.10.0" + [dev-dependencies] -lipsum = "0.8.2" +lipsum = "0.9.0" sealed_test = "1.0.0" [profile.release] diff --git a/Dockerfile b/Dockerfile index 0820de4..8b275e7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,36 +1,20 @@ -#################################################################################################### -## Builder -#################################################################################################### -FROM rust:alpine AS builder +FROM alpine:3.19 -RUN apk add --no-cache musl-dev +ARG TARGET -WORKDIR /libreddit +RUN apk add --no-cache curl -COPY . . +RUN curl -L "https://github.com/redlib-org/redlib/releases/latest/download/redlib-${TARGET}.tar.gz" | \ + tar xz -C /usr/local/bin/ -RUN cargo build --target x86_64-unknown-linux-musl --release - -#################################################################################################### -## Final image -#################################################################################################### -FROM alpine:latest - -# Import ca-certificates from builder -COPY --from=builder /usr/share/ca-certificates /usr/share/ca-certificates -COPY --from=builder /etc/ssl/certs /etc/ssl/certs - -# Copy our build -COPY --from=builder /libreddit/target/x86_64-unknown-linux-musl/release/libreddit /usr/local/bin/libreddit - -# Use an unprivileged user. -RUN adduser --home /nonexistent --no-create-home --disabled-password libreddit -USER libreddit +RUN adduser --home /nonexistent --no-create-home --disabled-password redlib +USER redlib # Tell Docker to expose port 8080 EXPOSE 8080 -# Run a healthcheck every minute to make sure Libreddit is functional -HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1 +# Run a healthcheck every minute to make sure redlib is functional +HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider -q http://localhost:8080/settings || exit 1 + +CMD ["redlib"] -CMD ["libreddit"] \ No newline at end of file diff --git a/Dockerfile.alpine b/Dockerfile.alpine new file mode 100644 index 0000000..051476a --- /dev/null +++ b/Dockerfile.alpine @@ -0,0 +1,45 @@ +# supported versions here: https://hub.docker.com/_/rust +ARG ALPINE_VERSION=3.20 + +######################## +## builder image +######################## +FROM rust:alpine${ALPINE_VERSION} AS builder + +RUN apk add --no-cache musl-dev + +WORKDIR /redlib + +# download (most) dependencies in their own layer +COPY Cargo.lock Cargo.toml ./ +RUN mkdir src && echo "fn main() { panic!(\"why am i running?\") }" > src/main.rs +RUN cargo build --release --locked --bin redlib +RUN rm ./src/main.rs && rmdir ./src + +# copy the source and build the redlib binary +COPY . ./ +RUN cargo build --release --locked --bin redlib +RUN echo "finished building redlib!" + +######################## +## release image +######################## +FROM alpine:${ALPINE_VERSION} AS release + +# Import redlib binary from builder +COPY --from=builder /redlib/target/release/redlib /usr/local/bin/redlib + +# Add non-root user for running redlib +RUN adduser --home /nonexistent --no-create-home --disabled-password redlib +USER redlib + +# Document that we intend to expose port 8080 to whoever runs the container +EXPOSE 8080 + +# Run a healthcheck every minute to make sure redlib is functional +HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1 + +# Add container metadata +LABEL org.opencontainers.image.authors="sigaloid" + +CMD ["redlib"] diff --git a/Dockerfile.arm b/Dockerfile.arm deleted file mode 100644 index 098bf13..0000000 --- a/Dockerfile.arm +++ /dev/null @@ -1,41 +0,0 @@ -#################################################################################################### -## Builder -#################################################################################################### -FROM rust:alpine AS builder - -RUN apk add --no-cache g++ git - -WORKDIR /usr/src/libreddit - -COPY . . - -# net.git-fetch-with-cli is specified in order to prevent a potential OOM kill -# in low memory environments. See: -# https://users.rust-lang.org/t/cargo-uses-too-much-memory-being-run-in-qemu/76531 -# This is tracked under issue #641. This also requires us to install git in the -# builder. -RUN cargo install --config net.git-fetch-with-cli=true --path . - -#################################################################################################### -## Final image -#################################################################################################### -FROM alpine:latest - -# Import ca-certificates from builder -COPY --from=builder /usr/share/ca-certificates /usr/share/ca-certificates -COPY --from=builder /etc/ssl/certs /etc/ssl/certs - -# Copy our build -COPY --from=builder /usr/local/cargo/bin/libreddit /usr/local/bin/libreddit - -# Use an unprivileged user. -RUN adduser --home /nonexistent --no-create-home --disabled-password libreddit -USER libreddit - -# Tell Docker to expose port 8080 -EXPOSE 8080 - -# Run a healthcheck every minute to make sure Libreddit is functional -HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1 - -CMD ["libreddit"] diff --git a/Dockerfile.armv7 b/Dockerfile.armv7 deleted file mode 100644 index d28141c..0000000 --- a/Dockerfile.armv7 +++ /dev/null @@ -1,43 +0,0 @@ -#################################################################################################### -## Builder -#################################################################################################### -FROM --platform=$BUILDPLATFORM rust:slim AS builder - -ENV CARGO_TARGET_ARMV7_UNKNOWN_LINUX_MUSLEABIHF_LINKER=arm-linux-gnueabihf-gcc -ENV CC_armv7_unknown_linux_musleabihf=arm-linux-gnueabihf-gcc - -RUN apt-get update && apt-get -y install gcc-arm-linux-gnueabihf \ - binutils-arm-linux-gnueabihf \ - musl-tools - -RUN rustup target add armv7-unknown-linux-musleabihf - -WORKDIR /libreddit - -COPY . . - -RUN cargo build --target armv7-unknown-linux-musleabihf --release - -#################################################################################################### -## Final image -#################################################################################################### -FROM alpine:latest - -# Import ca-certificates from builder -COPY --from=builder /usr/share/ca-certificates /usr/share/ca-certificates -COPY --from=builder /etc/ssl/certs /etc/ssl/certs - -# Copy our build -COPY --from=builder /libreddit/target/armv7-unknown-linux-musleabihf/release/libreddit /usr/local/bin/libreddit - -# Use an unprivileged user. -RUN adduser --home /nonexistent --no-create-home --disabled-password libreddit -USER libreddit - -# Tell Docker to expose port 8080 -EXPOSE 8080 - -# Run a healthcheck every minute to make sure Libreddit is functional -HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1 - -CMD ["libreddit"] diff --git a/Dockerfile.ubuntu b/Dockerfile.ubuntu new file mode 100644 index 0000000..2e277c5 --- /dev/null +++ b/Dockerfile.ubuntu @@ -0,0 +1,51 @@ +# supported versions here: https://hub.docker.com/_/rust +ARG RUST_BUILDER_VERSION=slim-bookworm +ARG UBUNTU_RELEASE_VERSION=noble + +######################## +## builder image +######################## +FROM rust:${RUST_BUILDER_VERSION} AS builder + +WORKDIR /redlib + +# download (most) dependencies in their own layer +COPY Cargo.lock Cargo.toml ./ +RUN mkdir src && echo "fn main() { panic!(\"why am i running?\") }" > src/main.rs +RUN cargo build --release --locked --bin redlib +RUN rm ./src/main.rs && rmdir ./src + +# copy the source and build the redlib binary +COPY . ./ +RUN cargo build --release --locked --bin redlib +RUN echo "finished building redlib!" + +######################## +## release image +######################## +FROM ubuntu:${UBUNTU_RELEASE_VERSION} AS release + +# Install ca-certificates +RUN apt-get update && apt-get install -y ca-certificates + +# Import redlib binary from builder +COPY --from=builder /redlib/target/release/redlib /usr/local/bin/redlib + +# Add non-root user for running redlib +RUN useradd \ + --no-create-home \ + --password "!" \ + --comment "user for running redlib" \ + redlib +USER redlib + +# Document that we intend to expose port 8080 to whoever runs the container +EXPOSE 8080 + +# Run a healthcheck every minute to make sure redlib is functional +HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1 + +# Add container metadata +LABEL org.opencontainers.image.authors="sigaloid" + +CMD ["redlib"] diff --git a/README.md b/README.md index ff825ed..fcae126 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,12 @@ -# Libreddit +# Redlib -> An alternative private front-end to Reddit +> An alternative private front-end to Reddit, with its origins in [Libreddit](https://github.com/libreddit/libreddit). -![screenshot](https://i.ibb.co/QYbqTQt/libreddit-rust.png) +![screenshot](https://i.ibb.co/18vrdxk/redlib-rust.png) --- -**10 second pitch:** Libreddit is a portmanteau of "libre" (meaning freedom) and "Reddit". It is a private front-end like [Invidious](https://github.com/iv-org/invidious) but for Reddit. Browse the coldest takes of [r/unpopularopinion](https://libreddit.spike.codes/r/unpopularopinion) without being [tracked](#reddit). +**10-second pitch:** Redlib is a private front-end like [Invidious](https://github.com/iv-org/invidious) but for Reddit. Browse the coldest takes of [r/unpopularopinion](https://farside.link/redlib/r/unpopularopinion) without being [tracked](#reddit). - 🚀 Fast: written in Rust for blazing-fast speeds and memory safety - ☁️ Light: no JavaScript, no ads, no tracking, no bloat @@ -15,75 +15,113 @@ --- -I appreciate any donations! Your support allows me to continue developing Libreddit. +## Table of Contents -Buy Me A Coffee -Donate using Liberapay +1. [Redlib](#redlib) +2. [Instances](#instances) +3. [About](#about) + - [Built with](#built-with) + - [How is it different from other Reddit front ends?](#how-is-it-different-from-other-reddit-front-ends) + - [Teddit](#teddit) + - [Libreddit](#libreddit) +4. [Comparison](#comparison) + - [Speed](#speed) + - [Privacy](#privacy) + - [Reddit](#reddit) + - [Redlib](#redlib-1) + - [Server](#server) +5. [Deployment](#deployment) + - [Docker](#docker) + - [Docker Compose](#docker-compose) + - [Docker CLI](#docker-cli) + - Podman + - Quadlets - -**Bitcoin:** `bc1qwyxjnafpu3gypcpgs025cw9wa7ryudtecmwa6y` - -**Monero:** `45FJrEuFPtG2o7QZz2Nps77TbHD4sPqxViwbdyV9A6ktfHiWs47UngG5zXPcLoDXAc8taeuBgeNjfeprwgeXYXhN3C9tVSR` + - [Binary](#binary) + - [Running as a systemd service](#running-as-a-systemd-service) + - [Building from source](#building-from-source) + - [Replit/Heroku/Glitch](#replit-heroku-glitch) + - [launchd (macOS)](#launchd-macos) +6. [Configuration](#configuration) + - [Instance settings](#instance-settings) + - [Default user settings](#default-user-settings) --- # Instances -🔗 **Want to automatically redirect Reddit links to Libreddit? Use [LibRedirect](https://github.com/libredirect/libredirect) or [Privacy Redirect](https://github.com/SimonBrazell/privacy-redirect)!** +> [!TIP] +> 🔗 **Want to automatically redirect Reddit links to Redlib? Use [LibRedirect](https://github.com/libredirect/libredirect) or [Privacy Redirect](https://github.com/SimonBrazell/privacy-redirect)!** -[Follow this link](https://github.com/libreddit/libreddit-instances/blob/master/instances.md) for an up-to-date table of instances in markdown format. This list is also available as [a machine-readable JSON](https://github.com/libreddit/libreddit-instances/blob/master/instances.json). +An up-to-date table of instances is available in [Markdown](https://github.com/redlib-org/redlib-instances/blob/main/instances.md) and [machine-readable JSON](https://github.com/redlib-org/redlib-instances/blob/main/instances.json). -Both files are part of the [libreddit-instances](https://github.com/libreddit/libreddit-instances) repository. To contribute your [self-hosted instance](#deployment) to the list, see the [libreddit-instances README](https://github.com/libreddit/libreddit-instances/blob/master/README.md). +Both files are part of the [redlib-instances](https://github.com/redlib-org/redlib-instances) repository. To contribute your [self-hosted instance](#deployment) to the list, see the [redlib-instances README](https://github.com/redlib-org/redlib-instances/blob/main/README.md). + +For information on instance uptime, see the [Uptime Robot status page](https://stats.uptimerobot.com/mpmqAs1G2Q). --- # About -Find Libreddit on 💬 [Matrix](https://matrix.to/#/#libreddit:kde.org), 🐋 [Docker](https://hub.docker.com/r/libreddit/libreddit), :octocat: [GitHub](https://github.com/libreddit/libreddit), and 🦊 [GitLab](https://gitlab.com/libreddit/libreddit). +> [!NOTE] +> Find Redlib on 💬 [Matrix](https://matrix.to/#/#redlib:matrix.org), 🐋 [Quay.io](https://quay.io/repository/redlib/redlib), :octocat: [GitHub](https://github.com/redlib-org/redlib), and 🦊 [GitLab](https://gitlab.com/redlib/redlib). + +Redlib hopes to provide an easier way to browse Reddit, without the ads, trackers, and bloat. Redlib was inspired by other alternative front-ends to popular services such as [Invidious](https://github.com/iv-org/invidious) for YouTube, [Nitter](https://github.com/zedeus/nitter) for Twitter, and [Bibliogram](https://sr.ht/~cadence/bibliogram/) for Instagram. + +Redlib currently implements most of Reddit's (signed-out) functionalities but still lacks [a few features](https://github.com/redlib-org/redlib/issues). ## Built with - [Rust](https://www.rust-lang.org/) - Programming language - [Hyper](https://github.com/hyperium/hyper) - HTTP server and client -- [Askama](https://github.com/djc/askama) - Templating engine -- [Rustls](https://github.com/ctz/rustls) - TLS library +- [Rinja](https://github.com/rinja-rs/rinja) - Templating engine +- [Rustls](https://github.com/rustls/rustls) - TLS library -## Info -Libreddit hopes to provide an easier way to browse Reddit, without the ads, trackers, and bloat. Libreddit was inspired by other alternative front-ends to popular services such as [Invidious](https://github.com/iv-org/invidious) for YouTube, [Nitter](https://github.com/zedeus/nitter) for Twitter, and [Bibliogram](https://sr.ht/~cadence/bibliogram/) for Instagram. +## How is it different from other Reddit front ends? -Libreddit currently implements most of Reddit's (signed-out) functionalities but still lacks [a few features](https://github.com/libreddit/libreddit/issues). +### Teddit -## How does it compare to Teddit? - -Teddit is another awesome open source project designed to provide an alternative frontend to Reddit. There is no connection between the two and you're welcome to use whichever one you favor. Competition fosters innovation and Teddit's release has motivated me to build Libreddit into an even more polished product. +Teddit is another awesome open source project designed to provide an alternative frontend to Reddit. There is no connection between the two, and you're welcome to use whichever one you favor. Competition fosters innovation and Teddit's release has motivated me to build Redlib into an even more polished product. If you are looking to compare, the biggest differences I have noticed are: -- Libreddit is themed around Reddit's redesign whereas Teddit appears to stick much closer to Reddit's old design. This may suit some users better as design is always subjective. -- Libreddit is written in [Rust](https://www.rust-lang.org) for speed and memory safety. It uses [Hyper](https://hyper.rs), a speedy and lightweight HTTP server/client implementation. + +- Redlib is themed around Reddit's redesign whereas Teddit appears to stick much closer to Reddit's old design. This may suit some users better as design is always subjective. +- Redlib is written in [Rust](https://www.rust-lang.org) for speed and memory safety. It uses [Hyper](https://hyper.rs), a speedy and lightweight HTTP server/client implementation. + +### Libreddit + +While originating as a fork of Libreddit, the name "Redlib" was adopted to avoid legal issues, as Reddit only allows the use of their name if structured as "XYZ For Reddit". + +Several technical improvements have also been made, including: + +- **OAuth token spoofing**: To circumvent rate limits imposed by Reddit, OAuth token spoofing is used to mimick the most common iOS and Android clients. While spoofing both iOS and Android clients was explored, only the Android client was chosen due to content restrictions when using an anonymous iOS client. +- **Token refreshing**: The authentication token is refreshed every 24 hours, emulating the behavior of the official Android app. +- **HTTP header mimicking**: Efforts are made to send along as many of the official app's headers as possible to reduce the likelihood of Reddit's crackdown on Redlib's requests. --- # Comparison -This section outlines how Libreddit compares to Reddit. +This section outlines how Redlib compares to Reddit in terms of speed and privacy. ## Speed -Lasted tested Nov 11, 2022. +Last tested on January 12, 2024. -Results from Google PageSpeed Insights ([Libreddit Report](https://pagespeed.web.dev/report?url=https%3A%2F%2Flibreddit.spike.codes%2F), [Reddit Report](https://pagespeed.web.dev/report?url=https://www.reddit.com)). +Results from Google PageSpeed Insights ([Redlib Report](https://pagespeed.web.dev/report?url=https%3A%2F%2Fredlib.matthew.science%2F), [Reddit Report](https://pagespeed.web.dev/report?url=https://www.reddit.com)). -| | Libreddit | Reddit | -|------------------------|-------------|-----------| -| Requests | 60 | 83 | -| Speed Index | 2.0s | 10.4s | -| Time to Interactive | **2.8s** | **12.4s** | +| Performance metric | Redlib | Reddit | +| ------------------- | -------- | --------- | +| Speed Index | 0.6s | 1.9s | +| Performance Score | 100% | 64% | +| Time to Interactive | **2.8s** | **12.4s** | ## Privacy ### Reddit **Logging:** According to Reddit's [privacy policy](https://www.redditinc.com/policies/privacy-policy), they "may [automatically] log information" including: + - IP address - User-agent string - Browser type @@ -97,12 +135,14 @@ Results from Google PageSpeed Insights ([Libreddit Report](https://pagespeed.web - Search terms **Location:** The same privacy policy goes on to describe that location data may be collected through the use of: + - GPS (consensual) - Bluetooth (consensual) - Content associated with a location (consensual) - Your IP Address **Cookies:** Reddit's [cookie notice](https://www.redditinc.com/policies/cookies) documents the array of cookies used by Reddit including/regarding: + - Authentication - Functionality - Analytics and Performance @@ -110,180 +150,295 @@ Results from Google PageSpeed Insights ([Libreddit Report](https://pagespeed.web - Third-Party Cookies - Third-Party Site -### Libreddit +### Redlib -For transparency, I hope to describe all the ways Libreddit handles user privacy. +For transparency, I hope to describe all the ways Redlib handles user privacy. #### Server -* **Logging:** In production (when running the binary, hosting with docker, or using the official instances), Libreddit logs nothing. When debugging (running from source without `--release`), Libreddit logs post IDs fetched to aid with troubleshooting. +- **Logging:** In production (when running the binary, hosting with docker, or using the official instances), Redlib logs nothing. When debugging (running from source without `--release`), Redlib logs post IDs fetched to aid with troubleshooting. -* **Cookies:** Libreddit uses optional cookies to store any configured settings in [the settings menu](https://libreddit.spike.codes/settings). These are not cross-site cookies and the cookies hold no personal data. - -#### Official instance (libreddit.spike.codes) - -The official instance is hosted at https://libreddit.spike.codes. - -* **Server:** The official instance runs a production binary, and thus logs nothing. - -* **DNS:** The domain for the official instance uses Cloudflare as the DNS resolver. However, this site is not proxied through Cloudflare, and thus Cloudflare doesn't have access to user traffic. - -* **Hosting:** The official instance is hosted on [Replit](https://replit.com/), which monitors usage to prevent abuse. I can understand if this invalidates certain users' threat models, and therefore, self-hosting, using unofficial instances, and browsing through Tor are welcomed. - ---- - -# Installation - -## 1) Cargo - -Make sure Rust stable is installed along with `cargo`, Rust's package manager. - -``` -cargo install libreddit -``` - -## 2) Docker - -Deploy the [Docker image](https://hub.docker.com/r/libreddit/libreddit) of Libreddit: -``` -docker pull libreddit/libreddit -docker run -d --name libreddit -p 8080:8080 libreddit/libreddit -``` - -Deploy using a different port (in this case, port 80): -``` -docker pull libreddit/libreddit -docker run -d --name libreddit -p 80:8080 libreddit/libreddit -``` - -To deploy on `arm64` platforms, simply replace `libreddit/libreddit` in the commands above with `libreddit/libreddit:arm`. - -To deploy on `armv7` platforms, simply replace `libreddit/libreddit` in the commands above with `libreddit/libreddit:armv7`. - -## 3) AUR - -For ArchLinux users, Libreddit is available from the AUR as [`libreddit-git`](https://aur.archlinux.org/packages/libreddit-git). - -``` -yay -S libreddit-git -``` -## 4) NetBSD/pkgsrc - -For NetBSD users, Libreddit is available from the official repositories. - -``` -pkgin install libreddit -``` - -Or, if you prefer to build from source - -``` -cd /usr/pkgsrc/libreddit -make install -``` - -## 5) GitHub Releases - -If you're on Linux and none of these methods work for you, you can grab a Linux binary from [the newest release](https://github.com/libreddit/libreddit/releases/latest). - -## 6) Replit/Heroku/Glitch - -> **Warning** -> These are free hosting options but they are *not* private and will monitor server usage to prevent abuse. If you need a free and easy setup, this method may work best for you. - -Run on Repl.it -[![Deploy](https://www.herokucdn.com/deploy/button.svg)](https://heroku.com/deploy?template=https://github.com/libreddit/libreddit) -[![Remix on Glitch](https://cdn.glitch.com/2703baf2-b643-4da7-ab91-7ee2a2d00b5b%2Fremix-button-v2.svg)](https://glitch.com/edit/#!/remix/libreddit) +- **Cookies:** Redlib uses optional cookies to store any configured settings in the settings menu. These are not cross-site cookies and the cookies hold no personal data. --- # Deployment -Once installed, deploy Libreddit to `0.0.0.0:8080` by running: +This section covers multiple ways of deploying Redlib. Using [Docker](#docker) is recommended for production. -``` -libreddit -``` +For configuration options, see the [Configuration section](#Configuration). -## Instance settings +## Docker -Assign a default value for each instance-specific setting by passing environment variables to Libreddit in the format `LIBREDDIT_{X}`. Replace `{X}` with the setting name (see list below) in capital letters. +[Docker](https://www.docker.com) lets you run containerized applications. Containers are loosely isolated environments that are lightweight and contain everything needed to run the application, so there's no need to rely on what's installed on the host. -|Name|Possible values|Default value|Description| -|-|-|-|-| -| `SFW_ONLY` | `["on", "off"]` | `off` | Enables SFW-only mode for the instance, i.e. all NSFW content is filtered. | -| `BANNER` | String | (empty) | Allows the server to set a banner to be displayed. Currently this is displayed on the instance info page. | +Container images for Redlib are available at [quay.io](https://quay.io/repository/redlib/redlib), with support for `amd64`, `arm64`, and `armv7` platforms. -## Default User Settings +### Docker Compose -Assign a default value for each user-modifiable setting by passing environment variables to Libreddit in the format `LIBREDDIT_DEFAULT_{Y}`. Replace `{Y}` with the setting name (see list below) in capital letters. +> [!IMPORTANT] +> These instructions assume the [Compose plugin](https://docs.docker.com/compose/migrate/#what-are-the-differences-between-compose-v1-and-compose-v2) has already been installed. If not, follow these [instructions on the Docker Docs](https://docs.docker.com/compose/install) for how to do so. -| Name | Possible values | Default value | -|-------------------------|-----------------------------------------------------------------------------------------------------|---------------| -| `THEME` | `["system", "light", "dark", "black", "dracula", "nord", "laserwave", "violet", "gold", "rosebox", "gruvboxdark", "gruvboxlight"]` | `system` | -| `FRONT_PAGE` | `["default", "popular", "all"]` | `default` | -| `LAYOUT` | `["card", "clean", "compact"]` | `card` | -| `WIDE` | `["on", "off"]` | `off` | -| `POST_SORT` | `["hot", "new", "top", "rising", "controversial"]` | `hot` | -| `COMMENT_SORT` | `["confidence", "top", "new", "controversial", "old"]` | `confidence` | -| `SHOW_NSFW` | `["on", "off"]` | `off` | -| `BLUR_NSFW` | `["on", "off"]` | `off` | -| `USE_HLS` | `["on", "off"]` | `off` | -| `HIDE_HLS_NOTIFICATION` | `["on", "off"]` | `off` | -| `AUTOPLAY_VIDEOS` | `["on", "off"]` | `off` | -| `SUBSCRIPTIONS` | `+`-delimited list of subreddits (`sub1+sub2+sub3+...`) | _(none)_ | -| `HIDE_AWARDS` | `["on", "off"]` | `off` -| `DISABLE_VISIT_REDDIT_CONFIRMATION` | `["on", "off"]` | `off` | +Copy `compose.yaml` and modify any relevant values (for example, the ports Redlib should listen on). -You can also configure Libreddit with a configuration file. An example `libreddit.toml` can be found below: - -```toml -LIBREDDIT_DEFAULT_WIDE = "on" -LIBREDDIT_DEFAULT_USE_HLS = "on" -``` - -### Examples +Start Redlib in detached mode (running in the background): ```bash -LIBREDDIT_DEFAULT_SHOW_NSFW=on libreddit +docker compose up -d ``` +Stream logs from the Redlib container: + ```bash -LIBREDDIT_DEFAULT_WIDE=on LIBREDDIT_DEFAULT_THEME=dark libreddit -r +docker logs -f redlib ``` -## Proxying using NGINX +### Docker CLI -> **Note** -> If you're [proxying Libreddit through an NGINX Reverse Proxy](https://github.com/libreddit/libreddit/issues/122#issuecomment-782226853), add +Deploy Redlib: + +```bash +docker pull quay.io/redlib/redlib:latest +docker run -d --name redlib -p 8080:8080 quay.io/redlib/redlib:latest +``` + +Deploy using a different port on the host (in this case, port 80): + +```bash +docker pull quay.io/redlib/redlib:latest +docker run -d --name redlib -p 80:8080 quay.io/redlib/redlib:latest +``` + +If you're using a reverse proxy in front of Redlib, prefix the port numbers with `127.0.0.1` so that Redlib only listens on the host port **locally**. For example, if the host port for Redlib is `8080`, specify `127.0.0.1:8080:8080`. + +Stream logs from the Redlib container: + +```bash +docker logs -f redlib +``` +## Podman + +[Podman](https://podman.io/) lets you run containerized applications in a rootless fashion. Containers are loosely isolated environments that are lightweight and contain everything needed to run the application, so there's no need to rely on what's installed on the host. + +Container images for Redlib are available at [quay.io](https://quay.io/repository/redlib/redlib), with support for `amd64`, `arm64`, and `armv7` platforms. + +### Quadlets + +> [!IMPORTANT] +> These instructions assume that you are on a systemd based distro with [podman](https://podman.io/). If not, follow these [instructions on podman's website](https://podman.io/docs/installation) for how to do so. +> It also assumes you have used `loginctl enable-linger ` to enable the service to start for your user without logging in. + +Copy the `redlib.container` and `.env.example` files to `.config/containers/systemd/` and modify any relevant values (for example, the ports Redlib should listen on, renaming the .env file and editing its values, etc.). + +To start Redlib either reboot or follow the instructions below: + +Notify systemd of the new files +```bash +systemctl --user daemon-reload +``` + +Start the newly generated service file + +```bash +systemctl --user start redlib.service +``` + +You can check the status of your container by using the following command: +```bash +systemctl --user status redlib.service +``` + +## Binary + +If you're on Linux, you can grab a binary from [the newest release](https://github.com/redlib-org/redlib/releases/latest) from GitHub. + +Download the binary using [Wget](https://www.gnu.org/software/wget/): + +```bash +wget https://github.com/redlib-org/redlib/releases/download/v0.31.0/redlib +``` + +Make the binary executable and change its ownership to `root`: + +```bash +sudo chmod +x redlib && sudo chown root:root redlib +``` + +Copy the binary to `/usr/bin`: + +```bash +sudo cp ./redlib /usr/bin/redlib +``` + +Deploy Redlib to `0.0.0.0:8080`: + +```bash +redlib +``` + +> [!IMPORTANT] +> If you're proxying Redlib through NGINX (see [issue #122](https://github.com/libreddit/libreddit/issues/122#issuecomment-782226853)), add +> > ```nginx > proxy_http_version 1.1; > ``` +> > to your NGINX configuration file above your `proxy_pass` line. -## systemd +### Running as a systemd service -You can use the systemd service available in `contrib/libreddit.service` -(install it on `/etc/systemd/system/libreddit.service`). +You can use the systemd service available in `contrib/redlib.service` +(install it on `/etc/systemd/system/redlib.service`). That service can be optionally configured in terms of environment variables by -creating a file in `/etc/libreddit.conf`. Use the `contrib/libreddit.conf` as a -template. You can also add the `LIBREDDIT_DEFAULT__{X}` settings explained +creating a file in `/etc/redlib.conf`. Use the `contrib/redlib.conf` as a +template. You can also add the `REDLIB_DEFAULT__{X}` settings explained above. When "Proxying using NGINX" where the proxy is on the same machine, you should guarantee nginx waits for this service to start. Edit -`/etc/systemd/system/libreddit.service.d/reverse-proxy.conf`: +`/etc/systemd/system/redlib.service.d/reverse-proxy.conf`: ```conf [Unit] Before=nginx.service ``` -## Building +## Building from source -``` -git clone https://github.com/libreddit/libreddit -cd libreddit +To deploy Redlib with changes not yet included in the latest release, you can build the application from source. + +```bash +git clone https://github.com/redlib-org/redlib && cd redlib cargo run ``` + +## Replit/Heroku + +> [!WARNING] +> These are free hosting options, but they are _not_ private and will monitor server usage to prevent abuse. If you need a free and easy setup, this method may work best for you. + +Run on Repl.it +[![Deploy](https://www.herokucdn.com/deploy/button.svg)](https://heroku.com/deploy?template=https://github.com/redlib-org/redlib) + +## launchd (macOS) + +If you are on macOS, you can use the [launchd](https://en.wikipedia.org/wiki/Launchd) service available in `contrib/redlib.plist`. + +Install it with `cp contrib/redlib.plist ~/Library/LaunchAgents/`. + +Load and start it with `launchctl load ~/Library/LaunchAgents/redlib.plist`. + + + + + +--- + +# Configuration + +You can configure Redlib further using environment variables. For example: + +```bash +REDLIB_DEFAULT_SHOW_NSFW=on redlib +``` + +```bash +REDLIB_DEFAULT_WIDE=on REDLIB_DEFAULT_THEME=dark redlib -r +``` + +You can also configure Redlib with a configuration file named `redlib.toml`. For example: + +```toml +REDLIB_DEFAULT_WIDE = "on" +REDLIB_DEFAULT_USE_HLS = "on" +``` + +> [!NOTE] +> If you're deploying Redlib using the **Docker CLI or Docker Compose**, environment variables can be defined in a [`.env` file](https://docs.docker.com/compose/environment-variables/set-environment-variables/), allowing you to centralize and manage configuration in one place. +> +> To configure Redlib using a `.env` file, copy the `.env.example` file to `.env` and edit it accordingly. +> +> If using the Docker CLI, add ` --env-file .env` to the command that runs Redlib. For example: +> +> ```bash +> docker run -d --name redlib -p 8080:8080 --env-file .env quay.io/redlib/redlib:latest +> ``` +> +> If using Docker Compose, no changes are needed as the `.env` file is already referenced in `compose.yaml` via the `env_file: .env` line. + +## Command Line Flags + +Redlib supports the following command line flags: + +- `-4`, `--ipv4-only`: Listen on IPv4 only. +- `-6`, `--ipv6-only`: Listen on IPv6 only. +- `-r`, `--redirect-https`: Redirect all HTTP requests to HTTPS (no longer functional). +- `-a`, `--address
`: Sets address to listen on. Default is `[::]`. +- `-p`, `--port `: Port to listen on. Default is `8080`. +- `-H`, `--hsts `: HSTS header to tell browsers that this site should only be accessed over HTTPS. Default is `604800`. + +## Instance settings + +Assign a default value for each instance-specific setting by passing environment variables to Redlib in the format `REDLIB_{X}`. Replace `{X}` with the setting name (see list below) in capital letters. + +| Name | Possible values | Default value | Description | +| ------------------------- | --------------- | ---------------- | --------------------------------------------------------------------------------------------------------- | +| `SFW_ONLY` | `["on", "off"]` | `off` | Enables SFW-only mode for the instance, i.e. all NSFW content is filtered. | +| `BANNER` | String | (empty) | Allows the server to set a banner to be displayed. Currently this is displayed on the instance info page. | +| `ROBOTS_DISABLE_INDEXING` | `["on", "off"]` | `off` | Disables indexing of the instance by search engines. | +| `PUSHSHIFT_FRONTEND` | String | `undelete.pullpush.io` | Allows the server to set the Pushshift frontend to be used with "removed" links. | +| `PORT` | Integer 0-65535 | `8080` | The **internal** port Redlib listens on. | +| `ENABLE_RSS` | `["on", "off"]` | `off` | Enables RSS feed generation. | +| `FULL_URL` | String | (empty) | Allows for proper URLs (for now, only needed by RSS) +## Default user settings + +Assign a default value for each user-modifiable setting by passing environment variables to Redlib in the format `REDLIB_DEFAULT_{Y}`. Replace `{Y}` with the setting name (see list below) in capital letters. + +| Name | Possible values | Default value | +| ----------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ------------- | +| `THEME` | `["system", "light", "dark", "black", "dracula", "nord", "laserwave", "violet", "gold", "rosebox", "gruvboxdark", "gruvboxlight", "tokyoNight", "icebergDark", "doomone", "libredditBlack", "libredditDark", "libredditLight"]` | `system` | +| `FRONT_PAGE` | `["default", "popular", "all"]` | `default` | +| `LAYOUT` | `["card", "clean", "compact"]` | `card` | +| `WIDE` | `["on", "off"]` | `off` | +| `POST_SORT` | `["hot", "new", "top", "rising", "controversial"]` | `hot` | +| `COMMENT_SORT` | `["confidence", "top", "new", "controversial", "old"]` | `confidence` | +| `BLUR_SPOILER` | `["on", "off"]` | `off` | +| `SHOW_NSFW` | `["on", "off"]` | `off` | +| `BLUR_NSFW` | `["on", "off"]` | `off` | +| `USE_HLS` | `["on", "off"]` | `off` | +| `HIDE_HLS_NOTIFICATION` | `["on", "off"]` | `off` | +| `AUTOPLAY_VIDEOS` | `["on", "off"]` | `off` | +| `SUBSCRIPTIONS` | `+`-delimited list of subreddits (`sub1+sub2+sub3+...`) | _(none)_ | +| `HIDE_AWARDS` | `["on", "off"]` | `off` | +| `DISABLE_VISIT_REDDIT_CONFIRMATION` | `["on", "off"]` | `off` | +| `HIDE_SCORE` | `["on", "off"]` | `off` | +| `HIDE_SIDEBAR_AND_SUMMARY` | `["on", "off"]` | `off` | +| `FIXED_NAVBAR` | `["on", "off"]` | `on` | +| `REMOVE_DEFAULT_FEEDS` | `["on", "off"]` | `off` | \ No newline at end of file diff --git a/app.json b/app.json index b4e0f3d..4af7cfe 100644 --- a/app.json +++ b/app.json @@ -1,5 +1,5 @@ { - "name": "Libreddit", + "name": "Redlib", "description": "Private front-end for Reddit", "buildpacks": [ { @@ -11,49 +11,73 @@ ], "stack": "container", "env": { - "LIBREDDIT_DEFAULT_THEME": { + "REDLIB_DEFAULT_THEME": { "required": false }, - "LIBREDDIT_DEFAULT_FRONT_PAGE": { + "REDLIB_DEFAULT_FRONT_PAGE": { "required": false }, - "LIBREDDIT_DEFAULT_LAYOUT": { + "REDLIB_DEFAULT_LAYOUT": { "required": false }, - "LIBREDDIT_DEFAULT_WIDE": { + "REDLIB_DEFAULT_WIDE": { "required": false }, - "LIBREDDIT_DEFAULT_COMMENT_SORT": { + "REDLIB_DEFAULT_COMMENT_SORT": { "required": false }, - "LIBREDDIT_DEFAULT_POST_SORT": { + "REDLIB_DEFAULT_POST_SORT": { "required": false }, - "LIBREDDIT_DEFAULT_SHOW_NSFW": { + "REDLIB_DEFAULT_BLUR_SPOILER": { "required": false }, - "LIBREDDIT_DEFAULT_BLUR_NSFW": { + "REDLIB_DEFAULT_SHOW_NSFW": { "required": false }, - "LIBREDDIT_USE_HLS": { + "REDLIB_DEFAULT_BLUR_NSFW": { "required": false }, - "LIBREDDIT_HIDE_HLS_NOTIFICATION": { + "REDLIB_USE_HLS": { "required": false }, - "LIBREDDIT_SFW_ONLY": { + "REDLIB_HIDE_HLS_NOTIFICATION": { "required": false }, - "LIBREDDIT_DEFAULT_HIDE_AWARDS": { + "REDLIB_SFW_ONLY": { "required": false }, - "LIBREDDIT_BANNER": { + "REDLIB_DEFAULT_HIDE_AWARDS": { "required": false }, - "LIBREDDIT_DEFAULT_SUBSCRIPTIONS": { + "REDLIB_DEFAULT_HIDE_SCORE": { "required": false }, - "LIBREDDIT_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION": { + "REDLIB_BANNER": { + "required": false + }, + "REDLIB_ROBOTS_DISABLE_INDEXING": { + "required": false + }, + "REDLIB_DEFAULT_SUBSCRIPTIONS": { + "required": false + }, + "REDLIB_DEFAULT_FILTERS": { + "required": false + }, + "REDLIB_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION": { + "required": false + }, + "REDLIB_PUSHSHIFT_FRONTEND": { + "required": false + }, + "REDLIB_ENABLE_RSS": { + "required": false + }, + "REDLIB_FULL_URL": { + "required": false + }, + "REDLIB_DEFAULT_REMOVE_DEFAULT_FEEDS": { "required": false } } diff --git a/build.rs b/build.rs index 3ee44a4..0bdbda0 100644 --- a/build.rs +++ b/build.rs @@ -1,6 +1,4 @@ -use std::{ - process::{Command, ExitStatus, Output}, -}; +use std::process::{Command, ExitStatus, Output}; #[cfg(not(target_os = "windows"))] use std::os::unix::process::ExitStatusExt; @@ -9,6 +7,7 @@ use std::os::unix::process::ExitStatusExt; use std::os::windows::process::ExitStatusExt; fn main() { + println!("cargo:rerun-if-changed=src/"); let output = String::from_utf8( Command::new("git") .args(["rev-parse", "HEAD"]) diff --git a/compose.dev.yaml b/compose.dev.yaml new file mode 100644 index 0000000..8203b34 --- /dev/null +++ b/compose.dev.yaml @@ -0,0 +1,26 @@ +# docker-compose -f docker-compose.dev.yml up -d +version: "3.8" + +services: + redlib: + build: . + restart: always + container_name: "redlib" + ports: + - 8080:8080 # Specify `127.0.0.1:8080:8080` instead if using a reverse proxy + user: nobody + read_only: true + security_opt: + - no-new-privileges:true + # - seccomp=seccomp-redlib.json + cap_drop: + - ALL + networks: + - redlib + healthcheck: + test: ["CMD", "wget", "--spider", "-q", "--tries=1", "http://localhost:8080/settings"] + interval: 5m + timeout: 3s + +networks: + redlib: diff --git a/compose.yaml b/compose.yaml new file mode 100644 index 0000000..4260d65 --- /dev/null +++ b/compose.yaml @@ -0,0 +1,24 @@ +services: + redlib: + image: quay.io/redlib/redlib:latest + restart: always + container_name: "redlib" + ports: + - 8080:8080 # Specify `127.0.0.1:8080:8080` instead if using a reverse proxy + user: nobody + read_only: true + security_opt: + - no-new-privileges:true + # - seccomp=seccomp-redlib.json + cap_drop: + - ALL + env_file: .env + networks: + - redlib + healthcheck: + test: ["CMD", "wget", "--spider", "-q", "--tries=1", "http://localhost:8080/settings"] + interval: 5m + timeout: 3s + +networks: + redlib: diff --git a/contrib/libreddit.conf b/contrib/libreddit.conf deleted file mode 100644 index dd91e3b..0000000 --- a/contrib/libreddit.conf +++ /dev/null @@ -1,2 +0,0 @@ -ADDRESS=0.0.0.0 -PORT=12345 diff --git a/contrib/redlib.conf b/contrib/redlib.conf new file mode 100644 index 0000000..e670455 --- /dev/null +++ b/contrib/redlib.conf @@ -0,0 +1,17 @@ +ADDRESS=0.0.0.0 +PORT=12345 +#REDLIB_DEFAULT_THEME=default +#REDLIB_DEFAULT_FRONT_PAGE=default +#REDLIB_DEFAULT_LAYOUT=card +#REDLIB_DEFAULT_WIDE=off +#REDLIB_DEFAULT_POST_SORT=hot +#REDLIB_DEFAULT_COMMENT_SORT=confidence +#REDLIB_DEFAULT_BLUR_SPOILER=off +#REDLIB_DEFAULT_SHOW_NSFW=off +#REDLIB_DEFAULT_BLUR_NSFW=off +#REDLIB_DEFAULT_USE_HLS=off +#REDLIB_DEFAULT_HIDE_HLS_NOTIFICATION=off +#REDLIB_DEFAULT_AUTOPLAY_VIDEOS=off +#REDLIB_DEFAULT_SUBSCRIPTIONS=(sub1+sub2+sub3) +#REDLIB_DEFAULT_HIDE_AWARDS=off +#REDLIB_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION=off diff --git a/contrib/redlib.plist b/contrib/redlib.plist new file mode 100644 index 0000000..1fff8e9 --- /dev/null +++ b/contrib/redlib.plist @@ -0,0 +1,19 @@ + + + + + Label + redlib + + Program + redlib + + KeepAlive + + + RunAtLoad + + + + + diff --git a/contrib/libreddit.service b/contrib/redlib.service similarity index 70% rename from contrib/libreddit.service rename to contrib/redlib.service index 8ed5da7..e483460 100644 --- a/contrib/libreddit.service +++ b/contrib/redlib.service @@ -1,15 +1,15 @@ [Unit] -Description=libreddit daemon +Description=redlib daemon After=network.service [Service] DynamicUser=yes # Default Values -Environment=ADDRESS=0.0.0.0 -Environment=PORT=8080 +#Environment=ADDRESS=0.0.0.0 +#Environment=PORT=8080 # Optional Override -EnvironmentFile=-/etc/libreddit.conf -ExecStart=/usr/bin/libreddit -a ${ADDRESS} -p ${PORT} +EnvironmentFile=-/etc/redlib.conf +ExecStart=/usr/bin/redlib -a ${ADDRESS} -p ${PORT} # Hardening DeviceAllow= @@ -30,7 +30,8 @@ RestrictNamespaces=yes RestrictRealtime=yes RestrictSUIDSGID=yes SystemCallArchitectures=native -SystemCallFilter=@system-service ~@privileged ~@resources +SystemCallFilter=@system-service +SystemCallFilter=~@privileged @resources UMask=0077 [Install] diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index 2688e9d..0000000 --- a/docker-compose.yml +++ /dev/null @@ -1,13 +0,0 @@ -version: "3.8" - -services: - web: - build: . - restart: always - container_name: "libreddit" - ports: - - 8080:8080 - healthcheck: - test: ["CMD", "wget", "--spider", "-q", "--tries=1", "http://localhost:8080/settings"] - interval: 5m - timeout: 3s diff --git a/flake.lock b/flake.lock new file mode 100644 index 0000000..2b0b585 --- /dev/null +++ b/flake.lock @@ -0,0 +1,98 @@ +{ + "nodes": { + "crane": { + "locked": { + "lastModified": 1731974733, + "narHash": "sha256-enYSSZVVl15FI5p+0Y5/Ckf5DZAvXe6fBrHxyhA/njc=", + "owner": "ipetkov", + "repo": "crane", + "rev": "3cb338ce81076ce5e461cf77f7824476addb0e1c", + "type": "github" + }, + "original": { + "owner": "ipetkov", + "repo": "crane", + "type": "github" + } + }, + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1731890469, + "narHash": "sha256-D1FNZ70NmQEwNxpSSdTXCSklBH1z2isPR84J6DQrJGs=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "5083ec887760adfe12af64830a66807423a859a7", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixpkgs-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "crane": "crane", + "flake-utils": "flake-utils", + "nixpkgs": "nixpkgs", + "rust-overlay": "rust-overlay" + } + }, + "rust-overlay": { + "inputs": { + "nixpkgs": [ + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1732069891, + "narHash": "sha256-moKx8AVJrViCSdA0e0nSsG8b1dAsObI4sRAtbqbvBY8=", + "owner": "oxalica", + "repo": "rust-overlay", + "rev": "8509a51241c407d583b1963d5079585a992506e8", + "type": "github" + }, + "original": { + "owner": "oxalica", + "repo": "rust-overlay", + "type": "github" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 0000000..0180c8d --- /dev/null +++ b/flake.nix @@ -0,0 +1,65 @@ +{ + description = "Redlib: Private front-end for Reddit"; + + inputs = { + nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; + + crane.url = "github:ipetkov/crane"; + + flake-utils.url = "github:numtide/flake-utils"; + + rust-overlay = { + url = "github:oxalica/rust-overlay"; + inputs.nixpkgs.follows = "nixpkgs"; + }; + }; + + outputs = { nixpkgs, crane, flake-utils, rust-overlay, ... }: + flake-utils.lib.eachSystem [ "x86_64-linux" ] (system: + let + pkgs = import nixpkgs { + inherit system; + overlays = [ (import rust-overlay) ]; + }; + + inherit (pkgs) lib; + + rustToolchain = pkgs.rust-bin.stable.latest.default.override { + targets = [ "x86_64-unknown-linux-musl" ]; + }; + + craneLib = (crane.mkLib pkgs).overrideToolchain rustToolchain; + + + src = lib.cleanSourceWith { + src = craneLib.path ./.; + filter = path: type: + (lib.hasInfix "/templates/" path) || + (lib.hasInfix "/static/" path) || + (craneLib.filterCargoSources path type); + }; + + redlib = craneLib.buildPackage { + inherit src; + strictDeps = true; + doCheck = false; + + CARGO_BUILD_TARGET = "x86_64-unknown-linux-musl"; + CARGO_BUILD_RUSTFLAGS = "-C target-feature=+crt-static"; + }; + in + { + checks = { + my-crate = redlib; + }; + + packages.default = redlib; + packages.docker = pkgs.dockerTools.buildImage { + name = "quay.io/redlib/redlib"; + tag = "latest"; + created = "now"; + copyToRoot = with pkgs.dockerTools; [ caCertificates fakeNss ]; + config.Cmd = "${redlib}/bin/redlib"; + }; + }); +} diff --git a/redlib.container b/redlib.container new file mode 100644 index 0000000..e66051e --- /dev/null +++ b/redlib.container @@ -0,0 +1,16 @@ +[Install] +WantedBy=default.target + +[Container] +AutoUpdate=registry +ContainerName=redlib +DropCapability=ALL +EnvironmentFile=.env +HealthCmd=["wget","--spider","-q","--tries=1","http://localhost:8080/settings"] +HealthInterval=5m +HealthTimeout=3s +Image=quay.io/redlib/redlib:latest +NoNewPrivileges=true +PublishPort=8080:8080 +ReadOnly=true +User=nobody diff --git a/scripts/gen-credits.sh b/scripts/gen-credits.sh index 33ce9f4..825ac0f 100755 --- a/scripts/gen-credits.sh +++ b/scripts/gen-credits.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash # This scripts generates the CREDITS file in the repository root, which -# contains a list of all contributors ot the Libreddit project. +# contains a list of all contributors ot the Redlib project. # # We use git-log to surface the names and emails of all authors and committers, # and grep will filter any automated commits due to GitHub. @@ -9,7 +9,7 @@ set -o pipefail cd "$(dirname "${BASH_SOURCE[0]}")/../" || exit 1 -git --no-pager log --pretty='%an <%ae>%n%cn <%ce>' master \ +git --no-pager log --pretty='%an <%ae>%n%cn <%ce>' main \ | sort -t'<' -u -k1,1 -k2,2 \ | grep -Fv -- 'GitHub ' \ > CREDITS diff --git a/scripts/load_test.py b/scripts/load_test.py new file mode 100644 index 0000000..00e4793 --- /dev/null +++ b/scripts/load_test.py @@ -0,0 +1,31 @@ +import requests +from bs4 import BeautifulSoup +from concurrent.futures import ThreadPoolExecutor + +base_url = "http://localhost:8080" + +full_path = f"{base_url}/r/politics" + +ctr = 0 + +def fetch_url(url): + global ctr + response = requests.get(url) + ctr += 1 + print(f"Request count: {ctr}") + return response + +while full_path: + response = requests.get(full_path) + ctr += 1 + print(f"Request count: {ctr}") + soup = BeautifulSoup(response.text, 'html.parser') + comment_links = soup.find_all('a', class_='post_comments') + comment_urls = [base_url + link['href'] for link in comment_links] + with ThreadPoolExecutor(max_workers=10) as executor: + executor.map(fetch_url, comment_urls) + next_link = soup.find('a', accesskey='N') + if next_link: + full_path = base_url + next_link['href'] + else: + break diff --git a/scripts/update_hls_js.sh b/scripts/update_hls_js.sh new file mode 100755 index 0000000..86e2749 --- /dev/null +++ b/scripts/update_hls_js.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +cd "$(dirname "$0")" +LATEST_TAG=$(curl -s https://api.github.com/repos/video-dev/hls.js/releases/latest | jq -r '.tag_name') + +if [[ -z "$LATEST_TAG" || "$LATEST_TAG" == "null" ]]; then + echo "Failed to fetch the latest release tag from GitHub." + exit 1 +fi + +LICENSE="// @license http://www.apache.org/licenses/LICENSE-2.0 Apache-2.0 +// @source https://github.com/video-dev/hls.js/tree/$LATEST_TAG" + +echo "$LICENSE" > ../static/hls.min.js + +curl -s https://cdn.jsdelivr.net/npm/hls.js@${LATEST_TAG}/dist/hls.min.js >> ../static/hls.min.js + +echo "Update complete. The latest hls.js (${LATEST_TAG}) has been saved to static/hls.min.js." diff --git a/scripts/update_oauth_resources.sh b/scripts/update_oauth_resources.sh new file mode 100755 index 0000000..7eeb959 --- /dev/null +++ b/scripts/update_oauth_resources.sh @@ -0,0 +1,112 @@ +#!/bin/bash + +# Requirements +# - curl +# - rg +# - jq + +# Fetch iOS app versions +ios_version_list=$(curl -s "https://ipaarchive.com/app/usa/1064216828" | rg "(20\d{2}\.\d+.\d+) / (\d+)" --only-matching -r "Version \$1/Build \$2" | sort | uniq) + +# Count the number of lines in the version list +ios_app_count=$(echo "$ios_version_list" | wc -l) + +echo -e "Fetching \e[34m$ios_app_count iOS app versions...\e[0m" + + +# Specify the filename as a variable +filename="src/oauth_resources.rs" + +# Add comment that it is user generated +echo "// This file was generated by scripts/update_oauth_resources.sh" > "$filename" +echo "// Rerun scripts/update_oauth_resources.sh to update this file" >> "$filename" +echo "// Please do not edit manually" >> "$filename" +echo "// Filled in with real app versions" >> "$filename" + +# Open the array in the source file +echo "pub const _IOS_APP_VERSION_LIST: &[&str; $ios_app_count] = &[" >> "$filename" + +num=0 + +# Append the version list to the source file +echo "$ios_version_list" | while IFS= read -r line; do + num=$((num+1)) + echo " \"$line\"," >> "$filename" + echo -e "[$num/$ios_app_count] Fetched \e[34m$line\e[0m." +done + +# Close the array in the source file +echo "];" >> "$filename" + +# Fetch Android app versions +page_1=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions/" | rg "" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g') +# Append with pages +page_2=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=2" | rg "" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g') +page_3=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=3" | rg "" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g') +page_4=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=4" | rg "" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g') +page_5=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=5" | rg "" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g') + +# Concatenate all pages +versions="${page_1}" +versions+=$'\n' +versions+="${page_2}" +versions+=$'\n' +versions+="${page_3}" +versions+=$'\n' +versions+="${page_4}" +versions+=$'\n' +versions+="${page_5}" + +# Count the number of lines in the version list +android_count=$(echo "$versions" | wc -l) + +echo -e "Fetching \e[32m$android_count Android app versions...\e[0m" + +# Append to the source file +echo "pub const ANDROID_APP_VERSION_LIST: &[&str; $android_count] = &[" >> "$filename" + +num=0 + +# For each in versions, curl the page and extract the build number +echo "$versions" | while IFS= read -r line; do + num=$((num+1)) + fetch_page=$(curl -s "$line") + build=$(echo "$fetch_page" | rg "\((\d+)\)" --only-matching -r "\$1" | head -n1) + version=$(echo "$fetch_page" | rg "Reddit (20\d{2}\.\d+\.\d+)" --only-matching -r "\$1" | head -n1) + echo " \"Version $version/Build $build\"," >> "$filename" + echo -e "[$num/$android_count] Fetched \e[32mVersion $version/Build $build\e[0m." +done + +# Close the array in the source file +echo "];" >> "$filename" + +# Retrieve iOS versions +table=$(curl -s "https://en.wikipedia.org/w/api.php?action=parse&page=IOS_17&prop=wikitext§ion=31&format=json" | jq ".parse.wikitext.\"*\"" | rg "(17\.[\d\.]*)\\\n\|(\w*)\\\n\|" --only-matching -r "Version \$1 (Build \$2)") + +# Count the number of lines in the version list +ios_count=$(echo "$table" | wc -l) + +echo -e "Fetching \e[34m$ios_count iOS versions...\e[0m" + +# Append to the source file +echo "pub const _IOS_OS_VERSION_LIST: &[&str; $ios_count] = &[" >> "$filename" + +num=0 + +# For each in versions, curl the page and extract the build number +echo "$table" | while IFS= read -r line; do + num=$((num+1)) + echo " \"$line\"," >> "$filename" + echo -e "\e[34m[$num/$ios_count] Fetched $line\e[0m." +done + +# Close the array in the source file +echo "];" >> "$filename" + +echo -e "\e[34mRetrieved $ios_app_count iOS app versions.\e[0m" +echo -e "\e[32mRetrieved $android_count Android app versions.\e[0m" +echo -e "\e[34mRetrieved $ios_count iOS versions.\e[0m" + +echo -e "\e[34mTotal: $((ios_app_count + android_count + ios_count))\e[0m" + +echo -e "\e[32mSuccess!\e[0m" diff --git a/seccomp-redlib.json b/seccomp-redlib.json new file mode 100644 index 0000000..264c9b7 --- /dev/null +++ b/seccomp-redlib.json @@ -0,0 +1,125 @@ +{ + "defaultAction": "SCMP_ACT_ERRNO", + "archMap": [ + { + "architecture": "SCMP_ARCH_X86_64", + "subArchitectures": [ + "SCMP_ARCH_X86", + "SCMP_ARCH_X32" + ] + }, + { + "architecture": "SCMP_ARCH_AARCH64", + "subArchitectures": [ + "SCMP_ARCH_ARM" + ] + }, + { + "architecture": "SCMP_ARCH_MIPS64", + "subArchitectures": [ + "SCMP_ARCH_MIPS", + "SCMP_ARCH_MIPS64N32" + ] + }, + { + "architecture": "SCMP_ARCH_MIPS64N32", + "subArchitectures": [ + "SCMP_ARCH_MIPS", + "SCMP_ARCH_MIPS64" + ] + }, + { + "architecture": "SCMP_ARCH_MIPSEL64", + "subArchitectures": [ + "SCMP_ARCH_MIPSEL", + "SCMP_ARCH_MIPSEL64N32" + ] + }, + { + "architecture": "SCMP_ARCH_MIPSEL64N32", + "subArchitectures": [ + "SCMP_ARCH_MIPSEL", + "SCMP_ARCH_MIPSEL64" + ] + }, + { + "architecture": "SCMP_ARCH_S390X", + "subArchitectures": [ + "SCMP_ARCH_S390" + ] + } + ], + "syscalls": [ + { + "names": [ + "accept4", + "arch_prctl", + "bind", + "brk", + "clock_gettime", + "clone", + "close", + "connect", + "epoll_create1", + "epoll_ctl", + "epoll_pwait", + "eventfd2", + "execve", + "exit", + "exit_group", + "fcntl", + "flock", + "fork", + "fstat", + "futex", + "getcwd", + "getpeername", + "getpid", + "getrandom", + "getsockname", + "getsockopt", + "getgid", + "getppid", + "gettid", + "getuid", + "ioctl", + "listen", + "lseek", + "madvise", + "mmap", + "mprotect", + "mremap", + "munmap", + "newfstatat", + "open", + "openat", + "prctl", + "poll", + "read", + "recvfrom", + "rt_sigaction", + "rt_sigprocmask", + "rt_sigreturn", + "sched_getaffinity", + "sched_yield", + "sendto", + "setitimer", + "setsockopt", + "set_tid_address", + "shutdown", + "sigaltstack", + "socket", + "socketpair", + "stat", + "wait4", + "write", + "writev" + ], + "action": "SCMP_ACT_ALLOW", + "args": [], + "comment": "", + "includes": {}, + "excludes": {} + } + ] +} diff --git a/src/client.rs b/src/client.rs index 4c174cd..76369ca 100644 --- a/src/client.rs +++ b/src/client.rs @@ -1,24 +1,55 @@ +use arc_swap::ArcSwap; use cached::proc_macro::cached; +use futures_lite::future::block_on; use futures_lite::{future::Boxed, FutureExt}; use hyper::client::HttpConnector; -use hyper::{body, body::Buf, client, header, Body, Client, Method, Request, Response, Uri}; +use hyper::header::HeaderValue; +use hyper::{body, body::Buf, header, Body, Client, Method, Request, Response, Uri}; use hyper_rustls::HttpsConnector; use libflate::gzip; +use log::{error, trace, warn}; use once_cell::sync::Lazy; use percent_encoding::{percent_encode, CONTROLS}; use serde_json::Value; + +use std::sync::atomic::Ordering; +use std::sync::atomic::{AtomicBool, AtomicU16}; use std::{io, result::Result}; use crate::dbg_msg; +use crate::oauth::{force_refresh_token, token_daemon, Oauth}; use crate::server::RequestExt; +use crate::utils::{format_url, Post}; -const REDDIT_URL_BASE: &str = "https://www.reddit.com"; +const REDDIT_URL_BASE: &str = "https://oauth.reddit.com"; +const REDDIT_URL_BASE_HOST: &str = "oauth.reddit.com"; -static CLIENT: Lazy>> = Lazy::new(|| { - let https = hyper_rustls::HttpsConnectorBuilder::new().with_native_roots().https_only().enable_http1().build(); - client::Client::builder().build(https) +const REDDIT_SHORT_URL_BASE: &str = "https://redd.it"; +const REDDIT_SHORT_URL_BASE_HOST: &str = "redd.it"; + +const ALTERNATIVE_REDDIT_URL_BASE: &str = "https://www.reddit.com"; +const ALTERNATIVE_REDDIT_URL_BASE_HOST: &str = "www.reddit.com"; + +pub static HTTPS_CONNECTOR: Lazy> = + Lazy::new(|| hyper_rustls::HttpsConnectorBuilder::new().with_native_roots().https_only().enable_http2().build()); + +pub static CLIENT: Lazy>> = Lazy::new(|| Client::builder().build::<_, Body>(HTTPS_CONNECTOR.clone())); + +pub static OAUTH_CLIENT: Lazy> = Lazy::new(|| { + let client = block_on(Oauth::new()); + tokio::spawn(token_daemon()); + ArcSwap::new(client.into()) }); +pub static OAUTH_RATELIMIT_REMAINING: AtomicU16 = AtomicU16::new(99); + +pub static OAUTH_IS_ROLLING_OVER: AtomicBool = AtomicBool::new(false); + +const URL_PAIRS: [(&str, &str); 2] = [ + (ALTERNATIVE_REDDIT_URL_BASE, ALTERNATIVE_REDDIT_URL_BASE_HOST), + (REDDIT_SHORT_URL_BASE, REDDIT_SHORT_URL_BASE_HOST), +]; + /// Gets the canonical path for a resource on Reddit. This is accomplished by /// making a `HEAD` request to Reddit at the path given in `path`. /// @@ -32,39 +63,87 @@ static CLIENT: Lazy>> = Lazy::new(|| { /// `Location` header. An `Err(String)` is returned if Reddit responds with a /// 429, or if we were unable to decode the value in the `Location` header. #[cached(size = 1024, time = 600, result = true)] -pub async fn canonical_path(path: String) -> Result, String> { - let res = reddit_head(path.clone(), true).await?; - - if res.status() == 429 { - return Err("Too many requests.".to_string()); - }; - - // If Reddit responds with a 2xx, then the path is already canonical. - if res.status().to_string().starts_with('2') { - return Ok(Some(path)); - } - - // If Reddit responds with anything other than 3xx (except for the 2xx as - // above), return a None. - if !res.status().to_string().starts_with('3') { +#[async_recursion::async_recursion] +pub async fn canonical_path(path: String, tries: i8) -> Result, String> { + if tries == 0 { return Ok(None); } - Ok( + // for each URL pair, try the HEAD request + let res = { + // for url base and host in URL_PAIRS, try reddit_short_head(path.clone(), true, url_base, url_base_host) and if it succeeds, set res. else, res = None + let mut res = None; + for (url_base, url_base_host) in URL_PAIRS { + res = reddit_short_head(path.clone(), true, url_base, url_base_host).await.ok(); + if let Some(res) = &res { + if !res.status().is_client_error() { + break; + } + } + } res - .headers() - .get(header::LOCATION) - .map(|val| percent_encode(val.as_bytes(), CONTROLS).to_string().trim_start_matches(REDDIT_URL_BASE).to_string()), - ) + }; + + let res = res.ok_or_else(|| "Unable to make HEAD request to Reddit.".to_string())?; + let status = res.status().as_u16(); + let policy_error = res.headers().get(header::RETRY_AFTER).is_some(); + + match status { + // If Reddit responds with a 2xx, then the path is already canonical. + 200..=299 => Ok(Some(path)), + + // If Reddit responds with a 301, then the path is redirected. + 301 => match res.headers().get(header::LOCATION) { + Some(val) => { + let Ok(original) = val.to_str() else { + return Err("Unable to decode Location header.".to_string()); + }; + + // We need to strip the .json suffix from the original path. + // In addition, we want to remove share parameters. + // Cut it off here instead of letting it propagate all the way + // to main.rs + let stripped_uri = original.strip_suffix(".json").unwrap_or(original).split('?').next().unwrap_or_default(); + + // The reason why we now have to format_url, is because the new OAuth + // endpoints seem to return full paths, instead of relative paths. + // So we need to strip the .json suffix from the original path, and + // also remove all Reddit domain parts with format_url. + // Otherwise, it will literally redirect to Reddit.com. + let uri = format_url(stripped_uri); + + // Decrement tries and try again + canonical_path(uri, tries - 1).await + } + None => Ok(None), + }, + + // If Reddit responds with anything other than 3xx (except for the 2xx and 301 + // as above), return a None. + 300..=399 => Ok(None), + + // Rate limiting + 429 => Err("Too many requests.".to_string()), + + // Special condition rate limiting - https://github.com/redlib-org/redlib/issues/229 + 403 if policy_error => Err("Too many requests.".to_string()), + + _ => Ok( + res + .headers() + .get(header::LOCATION) + .map(|val| percent_encode(val.as_bytes(), CONTROLS).to_string().trim_start_matches(REDDIT_URL_BASE).to_string()), + ), + } } pub async fn proxy(req: Request, format: &str) -> Result, String> { - let mut url = format!("{}?{}", format, req.uri().query().unwrap_or_default()); + let mut url = format!("{format}?{}", req.uri().query().unwrap_or_default()); // For each parameter in request - for (name, value) in req.params().iter() { + for (name, value) in &req.params() { // Fill the parameter value in the url - url = url.replace(&format!("{{{}}}", name), value); + url = url.replace(&format!("{{{name}}}"), value); } stream(&url, &req).await @@ -72,12 +151,12 @@ pub async fn proxy(req: Request, format: &str) -> Result, S async fn stream(url: &str, req: &Request) -> Result, String> { // First parameter is target URL (mandatory). - let uri = url.parse::().map_err(|_| "Couldn't parse URL".to_string())?; + let parsed_uri = url.parse::().map_err(|_| "Couldn't parse URL".to_string())?; // Build the hyper client from the HTTPS connector. - let client: client::Client<_, hyper::Body> = CLIENT.clone(); + let client: &Lazy> = &CLIENT; - let mut builder = Request::get(uri); + let mut builder = Request::get(parsed_uri); // Copy useful headers from original request for &key in &["Range", "If-Modified-Since", "Cache-Control"] { @@ -104,6 +183,8 @@ async fn stream(url: &str, req: &Request) -> Result, String rm("x-cdn-server-region"); rm("x-reddit-cdn"); rm("x-reddit-video-features"); + rm("Nel"); + rm("Report-To"); res }) @@ -113,44 +194,62 @@ async fn stream(url: &str, req: &Request) -> Result, String /// Makes a GET request to Reddit at `path`. By default, this will honor HTTP /// 3xx codes Reddit returns and will automatically redirect. fn reddit_get(path: String, quarantine: bool) -> Boxed, String>> { - request(&Method::GET, path, true, quarantine) + request(&Method::GET, path, true, quarantine, REDDIT_URL_BASE, REDDIT_URL_BASE_HOST) } -/// Makes a HEAD request to Reddit at `path`. This will not follow redirects. -fn reddit_head(path: String, quarantine: bool) -> Boxed, String>> { - request(&Method::HEAD, path, false, quarantine) +/// Makes a HEAD request to Reddit at `path, using the short URL base. This will not follow redirects. +fn reddit_short_head(path: String, quarantine: bool, base_path: &'static str, host: &'static str) -> Boxed, String>> { + request(&Method::HEAD, path, false, quarantine, base_path, host) } -/// Makes a request to Reddit. If `redirect` is `true`, request_with_redirect +// /// Makes a HEAD request to Reddit at `path`. This will not follow redirects. +// fn reddit_head(path: String, quarantine: bool) -> Boxed, String>> { +// request(&Method::HEAD, path, false, quarantine, false) +// } +// Unused - reddit_head is only ever called in the context of a short URL + +/// Makes a request to Reddit. If `redirect` is `true`, `request_with_redirect` /// will recurse on the URL that Reddit provides in the Location HTTP header /// in its response. -fn request(method: &'static Method, path: String, redirect: bool, quarantine: bool) -> Boxed, String>> { +fn request(method: &'static Method, path: String, redirect: bool, quarantine: bool, base_path: &'static str, host: &'static str) -> Boxed, String>> { // Build Reddit URL from path. - let url = format!("{}{}", REDDIT_URL_BASE, path); + let url = format!("{base_path}{path}"); // Construct the hyper client from the HTTPS connector. - let client: client::Client<_, hyper::Body> = CLIENT.clone(); + let client: &Lazy> = &CLIENT; // Build request to Reddit. When making a GET, request gzip compression. // (Reddit doesn't do brotli yet.) - let builder = Request::builder() - .method(method) - .uri(&url) - .header("User-Agent", format!("web:libreddit:{}", env!("CARGO_PKG_VERSION"))) - .header("Host", "www.reddit.com") - .header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8") - .header("Accept-Encoding", if method == Method::GET { "gzip" } else { "identity" }) - .header("Accept-Language", "en-US,en;q=0.5") - .header("Connection", "keep-alive") - .header( - "Cookie", + let mut headers: Vec<(String, String)> = vec![ + ("Host".into(), host.into()), + ("Accept-Encoding".into(), if method == Method::GET { "gzip".into() } else { "identity".into() }), + ( + "Cookie".into(), if quarantine { - "_options=%7B%22pref_quarantine_optin%22%3A%20true%2C%20%22pref_gated_sr_optin%22%3A%20true%7D" + "_options=%7B%22pref_quarantine_optin%22%3A%20true%2C%20%22pref_gated_sr_optin%22%3A%20true%7D".into() } else { - "" + "".into() }, - ) - .body(Body::empty()); + ), + ]; + + { + let client = OAUTH_CLIENT.load_full(); + for (key, value) in client.headers_map.clone() { + headers.push((key, value)); + } + } + + // shuffle headers: https://github.com/redlib-org/redlib/issues/324 + fastrand::shuffle(&mut headers); + + let mut builder = Request::builder().method(method).uri(&url); + + for (key, value) in headers { + builder = builder.header(key, value); + } + + let builder = builder.body(Body::empty()); async move { match builder { @@ -158,16 +257,17 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo Ok(mut response) => { // Reddit may respond with a 3xx. Decide whether or not to // redirect based on caller params. - if response.status().to_string().starts_with('3') { + if response.status().is_redirection() { if !redirect { return Ok(response); }; - + let location_header = response.headers().get(header::LOCATION); + if location_header == Some(&HeaderValue::from_static(ALTERNATIVE_REDDIT_URL_BASE)) { + return Err("Reddit response was invalid".to_string()); + } return request( method, - response - .headers() - .get(header::LOCATION) + location_header .map(|val| { // We need to make adjustments to the URI // we get back from Reddit. Namely, we @@ -180,13 +280,19 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo // required. // // 2. Percent-encode the path. - let new_path = percent_encode(val.as_bytes(), CONTROLS).to_string().trim_start_matches(REDDIT_URL_BASE).to_string(); - format!("{}{}raw_json=1", new_path, if new_path.contains('?') { "&" } else { "?" }) + let new_path = percent_encode(val.as_bytes(), CONTROLS) + .to_string() + .trim_start_matches(REDDIT_URL_BASE) + .trim_start_matches(ALTERNATIVE_REDDIT_URL_BASE) + .to_string(); + format!("{new_path}{}raw_json=1", if new_path.contains('?') { "&" } else { "?" }) }) .unwrap_or_default() .to_string(), true, quarantine, + base_path, + host, ) .await; }; @@ -239,7 +345,7 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo } } Err(e) => { - dbg_msg!("{} {}: {}", method, path, e); + dbg_msg!("{method} {REDDIT_URL_BASE}{path}: {}", e); Err(e.to_string()) } @@ -254,52 +360,208 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo #[cached(size = 100, time = 30, result = true)] pub async fn json(path: String, quarantine: bool) -> Result { // Closure to quickly build errors - let err = |msg: &str, e: String| -> Result { + let err = |msg: &str, e: String, path: String| -> Result { // eprintln!("{} - {}: {}", url, msg, e); - Err(format!("{}: {}", msg, e)) + Err(format!("{msg}: {e} | {path}")) }; + // First, handle rolling over the OAUTH_CLIENT if need be. + let current_rate_limit = OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst); + let is_rolling_over = OAUTH_IS_ROLLING_OVER.load(Ordering::SeqCst); + if current_rate_limit < 10 && !is_rolling_over { + warn!("Rate limit {current_rate_limit} is low. Spawning force_refresh_token()"); + tokio::spawn(force_refresh_token()); + } + OAUTH_RATELIMIT_REMAINING.fetch_sub(1, Ordering::SeqCst); + // Fetch the url... match reddit_get(path.clone(), quarantine).await { Ok(response) => { let status = response.status(); + let reset: Option = if let (Some(remaining), Some(reset), Some(used)) = ( + response.headers().get("x-ratelimit-remaining").and_then(|val| val.to_str().ok().map(|s| s.to_string())), + response.headers().get("x-ratelimit-reset").and_then(|val| val.to_str().ok().map(|s| s.to_string())), + response.headers().get("x-ratelimit-used").and_then(|val| val.to_str().ok().map(|s| s.to_string())), + ) { + trace!( + "Ratelimit remaining: Header says {remaining}, we have {current_rate_limit}. Resets in {reset}. Rollover: {}. Ratelimit used: {used}", + if is_rolling_over { "yes" } else { "no" }, + ); + + // If can parse remaining as a float, round to a u16 and save + if let Ok(val) = remaining.parse::() { + OAUTH_RATELIMIT_REMAINING.store(val.round() as u16, Ordering::SeqCst); + } + + Some(reset) + } else { + None + }; + // asynchronously aggregate the chunks of the body match hyper::body::aggregate(response).await { Ok(body) => { + let has_remaining = body.has_remaining(); + + if !has_remaining { + // Rate limited, so spawn a force_refresh_token() + tokio::spawn(force_refresh_token()); + return match reset { + Some(val) => Err(format!( + "Reddit rate limit exceeded. Try refreshing in a few seconds.\ + Rate limit will reset in: {val}" + )), + None => Err("Reddit rate limit exceeded".to_string()), + }; + } + // Parse the response from Reddit as JSON match serde_json::from_reader(body.reader()) { Ok(value) => { let json: Value = value; + + // If user is suspended + if let Some(data) = json.get("data") { + if let Some(is_suspended) = data.get("is_suspended").and_then(Value::as_bool) { + if is_suspended { + return Err("suspended".into()); + } + } + } + // If Reddit returned an error if json["error"].is_i64() { - Err( - json["reason"] - .as_str() - .unwrap_or_else(|| { - json["message"].as_str().unwrap_or_else(|| { - eprintln!("{}{} - Error parsing reddit error", REDDIT_URL_BASE, path); - "Error parsing reddit error" - }) - }) - .to_string(), - ) + // OAuth token has expired; http status 401 + if json["message"] == "Unauthorized" { + error!("Forcing a token refresh"); + let () = force_refresh_token().await; + return Err("OAuth token has expired. Please refresh the page!".to_string()); + } + + // Handle quarantined + if json["reason"] == "quarantined" { + return Err("quarantined".into()); + } + // Handle gated + if json["reason"] == "gated" { + return Err("gated".into()); + } + // Handle private subs + if json["reason"] == "private" { + return Err("private".into()); + } + // Handle banned subs + if json["reason"] == "banned" { + return Err("banned".into()); + } + + Err(format!("Reddit error {} \"{}\": {} | {path}", json["error"], json["reason"], json["message"])) } else { Ok(json) } } Err(e) => { + error!("Got an invalid response from reddit {e}. Status code: {status}"); if status.is_server_error() { Err("Reddit is having issues, check if there's an outage".to_string()) } else { - err("Failed to parse page JSON data", e.to_string()) + err("Failed to parse page JSON data", e.to_string(), path) } } } } - Err(e) => err("Failed receiving body from Reddit", e.to_string()), + Err(e) => err("Failed receiving body from Reddit", e.to_string(), path), } } - Err(e) => err("Couldn't send request to Reddit", e), + Err(e) => err("Couldn't send request to Reddit", e, path), } } + +async fn self_check(sub: &str) -> Result<(), String> { + let query = format!("/r/{sub}/hot.json?&raw_json=1"); + + match Post::fetch(&query, true).await { + Ok(_) => Ok(()), + Err(e) => Err(e), + } +} + +pub async fn rate_limit_check() -> Result<(), String> { + // First, check a subreddit. + self_check("reddit").await?; + // This will reduce the rate limit to 99. Assert this check. + if OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst) != 99 { + return Err(format!("Rate limit check failed: expected 99, got {}", OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst))); + } + // Now, we switch out the OAuth client. + // This checks for the IP rate limit association. + force_refresh_token().await; + // Now, check a new sub to break cache. + self_check("rust").await?; + // Again, assert the rate limit check. + if OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst) != 99 { + return Err(format!("Rate limit check failed: expected 99, got {}", OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst))); + } + + Ok(()) +} + +#[cfg(test)] +use {crate::config::get_setting, sealed_test::prelude::*}; + +#[tokio::test(flavor = "multi_thread")] +async fn test_rate_limit_check() { + rate_limit_check().await.unwrap(); +} + +#[test] +#[sealed_test(env = [("REDLIB_DEFAULT_SUBSCRIPTIONS", "rust")])] +fn test_default_subscriptions() { + tokio::runtime::Builder::new_multi_thread().enable_all().build().unwrap().block_on(async { + let subscriptions = get_setting("REDLIB_DEFAULT_SUBSCRIPTIONS"); + assert!(subscriptions.is_some()); + + // check rate limit + rate_limit_check().await.unwrap(); + }); +} + +#[cfg(test)] +const POPULAR_URL: &str = "/r/popular/hot.json?&raw_json=1&geo_filter=GLOBAL"; + +#[tokio::test(flavor = "multi_thread")] +async fn test_localization_popular() { + let val = json(POPULAR_URL.to_string(), false).await.unwrap(); + assert_eq!("GLOBAL", val["data"]["geo_filter"].as_str().unwrap()); +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_obfuscated_share_link() { + let share_link = "/r/rust/s/kPgq8WNHRK".into(); + // Correct link without share parameters + let canonical_link = "/r/rust/comments/18t5968/why_use_tuple_struct_over_standard_struct/kfbqlbc/".into(); + assert_eq!(canonical_path(share_link, 3).await, Ok(Some(canonical_link))); +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_private_sub() { + let link = json("/r/suicide/about.json?raw_json=1".into(), true).await; + assert!(link.is_err()); + assert_eq!(link, Err("private".into())); +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_banned_sub() { + let link = json("/r/aaa/about.json?raw_json=1".into(), true).await; + assert!(link.is_err()); + assert_eq!(link, Err("banned".into())); +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_gated_sub() { + // quarantine to false to specifically catch when we _don't_ catch it + let link = json("/r/drugs/about.json?raw_json=1".into(), false).await; + assert!(link.is_err()); + assert_eq!(link, Err("gated".into())); +} diff --git a/src/config.rs b/src/config.rs index b552504..7b1c95c 100644 --- a/src/config.rs +++ b/src/config.rs @@ -7,59 +7,111 @@ use std::{env::var, fs::read_to_string}; // // This is the local static that is initialized at runtime (technically at // first request) and contains the instance settings. -pub(crate) static CONFIG: Lazy = Lazy::new(Config::load); +pub static CONFIG: Lazy = Lazy::new(Config::load); + +// This serves as the frontend for an archival API - on removed comments, this URL +// will be the base of a link, to display removed content (on another site). +pub const DEFAULT_PUSHSHIFT_FRONTEND: &str = "undelete.pullpush.io"; /// Stores the configuration parsed from the environment variables and the /// config file. `Config::Default()` contains None for each setting. /// When adding more config settings, add it to `Config::load`, /// `get_setting_from_config`, both below, as well as -/// instance_info::InstanceInfo.to_string(), README.md and app.json. -#[derive(Default, Serialize, Deserialize, Clone)] +/// `instance_info::InstanceInfo.to_string`(), README.md and app.json. +#[derive(Default, Serialize, Deserialize, Clone, Debug)] pub struct Config { - #[serde(rename = "LIBREDDIT_SFW_ONLY")] + #[serde(rename = "REDLIB_SFW_ONLY")] + #[serde(alias = "LIBREDDIT_SFW_ONLY")] pub(crate) sfw_only: Option, - #[serde(rename = "LIBREDDIT_DEFAULT_THEME")] + #[serde(rename = "REDLIB_DEFAULT_THEME")] + #[serde(alias = "LIBREDDIT_DEFAULT_THEME")] pub(crate) default_theme: Option, - #[serde(rename = "LIBREDDIT_DEFAULT_FRONT_PAGE")] + #[serde(rename = "REDLIB_DEFAULT_FRONT_PAGE")] + #[serde(alias = "LIBREDDIT_DEFAULT_FRONT_PAGE")] pub(crate) default_front_page: Option, - #[serde(rename = "LIBREDDIT_DEFAULT_LAYOUT")] + #[serde(rename = "REDLIB_DEFAULT_LAYOUT")] + #[serde(alias = "LIBREDDIT_DEFAULT_LAYOUT")] pub(crate) default_layout: Option, - #[serde(rename = "LIBREDDIT_DEFAULT_WIDE")] + #[serde(rename = "REDLIB_DEFAULT_WIDE")] + #[serde(alias = "LIBREDDIT_DEFAULT_WIDE")] pub(crate) default_wide: Option, - #[serde(rename = "LIBREDDIT_DEFAULT_COMMENT_SORT")] + #[serde(rename = "REDLIB_DEFAULT_COMMENT_SORT")] + #[serde(alias = "LIBREDDIT_DEFAULT_COMMENT_SORT")] pub(crate) default_comment_sort: Option, - #[serde(rename = "LIBREDDIT_DEFAULT_POST_SORT")] + #[serde(rename = "REDLIB_DEFAULT_POST_SORT")] + #[serde(alias = "LIBREDDIT_DEFAULT_POST_SORT")] pub(crate) default_post_sort: Option, - #[serde(rename = "LIBREDDIT_DEFAULT_SHOW_NSFW")] + #[serde(rename = "REDLIB_DEFAULT_BLUR_SPOILER")] + #[serde(alias = "LIBREDDIT_DEFAULT_BLUR_SPOILER")] + pub(crate) default_blur_spoiler: Option, + + #[serde(rename = "REDLIB_DEFAULT_SHOW_NSFW")] + #[serde(alias = "LIBREDDIT_DEFAULT_SHOW_NSFW")] pub(crate) default_show_nsfw: Option, - #[serde(rename = "LIBREDDIT_DEFAULT_BLUR_NSFW")] + #[serde(rename = "REDLIB_DEFAULT_BLUR_NSFW")] + #[serde(alias = "LIBREDDIT_DEFAULT_BLUR_NSFW")] pub(crate) default_blur_nsfw: Option, - #[serde(rename = "LIBREDDIT_DEFAULT_USE_HLS")] + #[serde(rename = "REDLIB_DEFAULT_USE_HLS")] + #[serde(alias = "LIBREDDIT_DEFAULT_USE_HLS")] pub(crate) default_use_hls: Option, - #[serde(rename = "LIBREDDIT_DEFAULT_HIDE_HLS_NOTIFICATION")] + #[serde(rename = "REDLIB_DEFAULT_HIDE_HLS_NOTIFICATION")] + #[serde(alias = "LIBREDDIT_DEFAULT_HIDE_HLS_NOTIFICATION")] pub(crate) default_hide_hls_notification: Option, - #[serde(rename = "LIBREDDIT_DEFAULT_HIDE_AWARDS")] + #[serde(rename = "REDLIB_DEFAULT_HIDE_AWARDS")] + #[serde(alias = "LIBREDDIT_DEFAULT_HIDE_AWARDS")] pub(crate) default_hide_awards: Option, - #[serde(rename = "LIBREDDIT_DEFAULT_SUBSCRIPTIONS")] + #[serde(rename = "REDLIB_DEFAULT_HIDE_SIDEBAR_AND_SUMMARY")] + #[serde(alias = "LIBREDDIT_DEFAULT_HIDE_SIDEBAR_AND_SUMMARY")] + pub(crate) default_hide_sidebar_and_summary: Option, + + #[serde(rename = "REDLIB_DEFAULT_HIDE_SCORE")] + #[serde(alias = "LIBREDDIT_DEFAULT_HIDE_SCORE")] + pub(crate) default_hide_score: Option, + + #[serde(rename = "REDLIB_DEFAULT_SUBSCRIPTIONS")] + #[serde(alias = "LIBREDDIT_DEFAULT_SUBSCRIPTIONS")] pub(crate) default_subscriptions: Option, - #[serde(rename = "LIBREDDIT_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION")] + #[serde(rename = "REDLIB_DEFAULT_FILTERS")] + #[serde(alias = "LIBREDDIT_DEFAULT_FILTERS")] + pub(crate) default_filters: Option, + + #[serde(rename = "REDLIB_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION")] + #[serde(alias = "LIBREDDIT_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION")] pub(crate) default_disable_visit_reddit_confirmation: Option, - #[serde(rename = "LIBREDDIT_BANNER")] + #[serde(rename = "REDLIB_BANNER")] + #[serde(alias = "LIBREDDIT_BANNER")] pub(crate) banner: Option, + + #[serde(rename = "REDLIB_ROBOTS_DISABLE_INDEXING")] + #[serde(alias = "LIBREDDIT_ROBOTS_DISABLE_INDEXING")] + pub(crate) robots_disable_indexing: Option, + + #[serde(rename = "REDLIB_PUSHSHIFT_FRONTEND")] + #[serde(alias = "LIBREDDIT_PUSHSHIFT_FRONTEND")] + pub(crate) pushshift: Option, + + #[serde(rename = "REDLIB_ENABLE_RSS")] + pub(crate) enable_rss: Option, + + #[serde(rename = "REDLIB_FULL_URL")] + pub(crate) full_url: Option, + + #[serde(rename = "REDLIB_DEFAULT_REMOVE_DEFAULT_FEEDS")] + pub(crate) default_remove_default_feeds: Option, } impl Config { @@ -67,56 +119,83 @@ impl Config { /// In the case that there are no environment variables set and there is no /// config file, this function returns a Config that contains all None values. pub fn load() -> Self { - // Read from libreddit.toml config file. If for any reason, it fails, the - // default `Config` is used (all None values) - let config: Config = toml::from_str(&read_to_string("libreddit.toml").unwrap_or_default()).unwrap_or_default(); + let load_config = |name: &str| { + let new_file = read_to_string(name); + new_file.ok().and_then(|new_file| toml::from_str::(&new_file).ok()) + }; + + let config = load_config("redlib.toml").or_else(|| load_config("libreddit.toml")).unwrap_or_default(); + // This function defines the order of preference - first check for - // environment variables with "LIBREDDIT", then check the config, then if - // both are `None`, return a `None` via the `map_or_else` function - let parse = |key: &str| -> Option { var(key).ok().map_or_else(|| get_setting_from_config(key, &config), Some) }; + // environment variables with "REDLIB", then check the legacy LIBREDDIT + // option, then check the config, then if all are `None`, return a `None` + let parse = |key: &str| -> Option { + // Return the first non-`None` value + // If all are `None`, return `None` + let legacy_key = key.replace("REDLIB_", "LIBREDDIT_"); + var(key).ok().or_else(|| var(legacy_key).ok()).or_else(|| get_setting_from_config(key, &config)) + }; Self { - sfw_only: parse("LIBREDDIT_SFW_ONLY"), - default_theme: parse("LIBREDDIT_DEFAULT_THEME"), - default_front_page: parse("LIBREDDIT_DEFAULT_FRONT_PAGE"), - default_layout: parse("LIBREDDIT_DEFAULT_LAYOUT"), - default_post_sort: parse("LIBREDDIT_DEFAULT_POST_SORT"), - default_wide: parse("LIBREDDIT_DEFAULT_WIDE"), - default_comment_sort: parse("LIBREDDIT_DEFAULT_COMMENT_SORT"), - default_show_nsfw: parse("LIBREDDIT_DEFAULT_SHOW_NSFW"), - default_blur_nsfw: parse("LIBREDDIT_DEFAULT_BLUR_NSFW"), - default_use_hls: parse("LIBREDDIT_DEFAULT_USE_HLS"), - default_hide_hls_notification: parse("LIBREDDIT_DEFAULT_HIDE_HLS"), - default_hide_awards: parse("LIBREDDIT_DEFAULT_HIDE_AWARDS"), - default_subscriptions: parse("LIBREDDIT_DEFAULT_SUBSCRIPTIONS"), - default_disable_visit_reddit_confirmation: parse("LIBREDDIT_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION"), - banner: parse("LIBREDDIT_BANNER"), + sfw_only: parse("REDLIB_SFW_ONLY"), + default_theme: parse("REDLIB_DEFAULT_THEME"), + default_front_page: parse("REDLIB_DEFAULT_FRONT_PAGE"), + default_layout: parse("REDLIB_DEFAULT_LAYOUT"), + default_post_sort: parse("REDLIB_DEFAULT_POST_SORT"), + default_wide: parse("REDLIB_DEFAULT_WIDE"), + default_comment_sort: parse("REDLIB_DEFAULT_COMMENT_SORT"), + default_blur_spoiler: parse("REDLIB_DEFAULT_BLUR_SPOILER"), + default_show_nsfw: parse("REDLIB_DEFAULT_SHOW_NSFW"), + default_blur_nsfw: parse("REDLIB_DEFAULT_BLUR_NSFW"), + default_use_hls: parse("REDLIB_DEFAULT_USE_HLS"), + default_hide_hls_notification: parse("REDLIB_DEFAULT_HIDE_HLS_NOTIFICATION"), + default_hide_awards: parse("REDLIB_DEFAULT_HIDE_AWARDS"), + default_hide_sidebar_and_summary: parse("REDLIB_DEFAULT_HIDE_SIDEBAR_AND_SUMMARY"), + default_hide_score: parse("REDLIB_DEFAULT_HIDE_SCORE"), + default_subscriptions: parse("REDLIB_DEFAULT_SUBSCRIPTIONS"), + default_filters: parse("REDLIB_DEFAULT_FILTERS"), + default_disable_visit_reddit_confirmation: parse("REDLIB_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION"), + banner: parse("REDLIB_BANNER"), + robots_disable_indexing: parse("REDLIB_ROBOTS_DISABLE_INDEXING"), + pushshift: parse("REDLIB_PUSHSHIFT_FRONTEND"), + enable_rss: parse("REDLIB_ENABLE_RSS"), + full_url: parse("REDLIB_FULL_URL"), + default_remove_default_feeds: parse("REDLIB_DEFAULT_REMOVE_DEFAULT_FEEDS"), } } } fn get_setting_from_config(name: &str, config: &Config) -> Option { match name { - "LIBREDDIT_SFW_ONLY" => config.sfw_only.clone(), - "LIBREDDIT_DEFAULT_THEME" => config.default_theme.clone(), - "LIBREDDIT_DEFAULT_FRONT_PAGE" => config.default_front_page.clone(), - "LIBREDDIT_DEFAULT_LAYOUT" => config.default_layout.clone(), - "LIBREDDIT_DEFAULT_COMMENT_SORT" => config.default_comment_sort.clone(), - "LIBREDDIT_DEFAULT_POST_SORT" => config.default_post_sort.clone(), - "LIBREDDIT_DEFAULT_SHOW_NSFW" => config.default_show_nsfw.clone(), - "LIBREDDIT_DEFAULT_BLUR_NSFW" => config.default_blur_nsfw.clone(), - "LIBREDDIT_DEFAULT_USE_HLS" => config.default_use_hls.clone(), - "LIBREDDIT_DEFAULT_HIDE_HLS_NOTIFICATION" => config.default_hide_hls_notification.clone(), - "LIBREDDIT_DEFAULT_WIDE" => config.default_wide.clone(), - "LIBREDDIT_DEFAULT_HIDE_AWARDS" => config.default_hide_awards.clone(), - "LIBREDDIT_DEFAULT_SUBSCRIPTIONS" => config.default_subscriptions.clone(), - "LIBREDDIT_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION" => config.default_disable_visit_reddit_confirmation.clone(), - "LIBREDDIT_BANNER" => config.banner.clone(), + "REDLIB_SFW_ONLY" => config.sfw_only.clone(), + "REDLIB_DEFAULT_THEME" => config.default_theme.clone(), + "REDLIB_DEFAULT_FRONT_PAGE" => config.default_front_page.clone(), + "REDLIB_DEFAULT_LAYOUT" => config.default_layout.clone(), + "REDLIB_DEFAULT_COMMENT_SORT" => config.default_comment_sort.clone(), + "REDLIB_DEFAULT_POST_SORT" => config.default_post_sort.clone(), + "REDLIB_DEFAULT_BLUR_SPOILER" => config.default_blur_spoiler.clone(), + "REDLIB_DEFAULT_SHOW_NSFW" => config.default_show_nsfw.clone(), + "REDLIB_DEFAULT_BLUR_NSFW" => config.default_blur_nsfw.clone(), + "REDLIB_DEFAULT_USE_HLS" => config.default_use_hls.clone(), + "REDLIB_DEFAULT_HIDE_HLS_NOTIFICATION" => config.default_hide_hls_notification.clone(), + "REDLIB_DEFAULT_WIDE" => config.default_wide.clone(), + "REDLIB_DEFAULT_HIDE_AWARDS" => config.default_hide_awards.clone(), + "REDLIB_DEFAULT_HIDE_SIDEBAR_AND_SUMMARY" => config.default_hide_sidebar_and_summary.clone(), + "REDLIB_DEFAULT_HIDE_SCORE" => config.default_hide_score.clone(), + "REDLIB_DEFAULT_SUBSCRIPTIONS" => config.default_subscriptions.clone(), + "REDLIB_DEFAULT_FILTERS" => config.default_filters.clone(), + "REDLIB_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION" => config.default_disable_visit_reddit_confirmation.clone(), + "REDLIB_BANNER" => config.banner.clone(), + "REDLIB_ROBOTS_DISABLE_INDEXING" => config.robots_disable_indexing.clone(), + "REDLIB_PUSHSHIFT_FRONTEND" => config.pushshift.clone(), + "REDLIB_ENABLE_RSS" => config.enable_rss.clone(), + "REDLIB_FULL_URL" => config.full_url.clone(), + "REDLIB_DEFAULT_REMOVE_DEFAULT_FEEDS" => config.default_remove_default_feeds.clone(), _ => None, } } /// Retrieves setting from environment variable or config file. -pub(crate) fn get_setting(name: &str) -> Option { +pub fn get_setting(name: &str) -> Option { get_setting_from_config(name, &CONFIG) } @@ -124,7 +203,14 @@ pub(crate) fn get_setting(name: &str) -> Option { use {sealed_test::prelude::*, std::fs::write}; #[test] -#[sealed_test(env = [("LIBREDDIT_SFW_ONLY", "on")])] +fn test_deserialize() { + // Must handle empty input + let result = toml::from_str::(""); + assert!(result.is_ok(), "Error: {}", result.unwrap_err()); +} + +#[test] +#[sealed_test(env = [("REDLIB_SFW_ONLY", "on")])] fn test_env_var() { assert!(crate::utils::sfw_only()) } @@ -132,28 +218,57 @@ fn test_env_var() { #[test] #[sealed_test] fn test_config() { - let config_to_write = r#"LIBREDDIT_DEFAULT_COMMENT_SORT = "best""#; - write("libreddit.toml", config_to_write).unwrap(); - assert_eq!(get_setting("LIBREDDIT_DEFAULT_COMMENT_SORT"), Some("best".into())); + let config_to_write = r#"REDLIB_DEFAULT_COMMENT_SORT = "best""#; + write("redlib.toml", config_to_write).unwrap(); + assert_eq!(get_setting("REDLIB_DEFAULT_COMMENT_SORT"), Some("best".into())); } #[test] -#[sealed_test(env = [("LIBREDDIT_DEFAULT_COMMENT_SORT", "top")])] +#[sealed_test] +fn test_config_legacy() { + let config_to_write = r#"LIBREDDIT_DEFAULT_COMMENT_SORT = "best""#; + write("libreddit.toml", config_to_write).unwrap(); + assert_eq!(get_setting("REDLIB_DEFAULT_COMMENT_SORT"), Some("best".into())); +} + +#[test] +#[sealed_test(env = [("LIBREDDIT_SFW_ONLY", "on")])] +fn test_env_var_legacy() { + assert!(crate::utils::sfw_only()) +} + +#[test] +#[sealed_test(env = [("REDLIB_DEFAULT_COMMENT_SORT", "top")])] fn test_env_config_precedence() { - let config_to_write = r#"LIBREDDIT_DEFAULT_COMMENT_SORT = "best""#; - write("libreddit.toml", config_to_write).unwrap(); - assert_eq!(get_setting("LIBREDDIT_DEFAULT_COMMENT_SORT"), Some("top".into())) + let config_to_write = r#"REDLIB_DEFAULT_COMMENT_SORT = "best""#; + write("redlib.toml", config_to_write).unwrap(); + assert_eq!(get_setting("REDLIB_DEFAULT_COMMENT_SORT"), Some("top".into())) } #[test] -#[sealed_test(env = [("LIBREDDIT_DEFAULT_COMMENT_SORT", "top")])] +#[sealed_test(env = [("REDLIB_DEFAULT_COMMENT_SORT", "top")])] fn test_alt_env_config_precedence() { - let config_to_write = r#"LIBREDDIT_DEFAULT_COMMENT_SORT = "best""#; - write("libreddit.toml", config_to_write).unwrap(); - assert_eq!(get_setting("LIBREDDIT_DEFAULT_COMMENT_SORT"), Some("top".into())) + let config_to_write = r#"REDLIB_DEFAULT_COMMENT_SORT = "best""#; + write("redlib.toml", config_to_write).unwrap(); + assert_eq!(get_setting("REDLIB_DEFAULT_COMMENT_SORT"), Some("top".into())) } #[test] -#[sealed_test(env = [("LIBREDDIT_DEFAULT_SUBSCRIPTIONS", "news+bestof")])] +#[sealed_test(env = [("REDLIB_DEFAULT_SUBSCRIPTIONS", "news+bestof")])] fn test_default_subscriptions() { - assert_eq!(get_setting("LIBREDDIT_DEFAULT_SUBSCRIPTIONS"), Some("news+bestof".into())); + assert_eq!(get_setting("REDLIB_DEFAULT_SUBSCRIPTIONS"), Some("news+bestof".into())); +} + +#[test] +#[sealed_test(env = [("REDLIB_DEFAULT_FILTERS", "news+bestof")])] +fn test_default_filters() { + assert_eq!(get_setting("REDLIB_DEFAULT_FILTERS"), Some("news+bestof".into())); +} + +#[test] +#[sealed_test] +fn test_pushshift() { + let config_to_write = r#"REDLIB_PUSHSHIFT_FRONTEND = "https://api.pushshift.io""#; + write("redlib.toml", config_to_write).unwrap(); + assert!(get_setting("REDLIB_PUSHSHIFT_FRONTEND").is_some()); + assert_eq!(get_setting("REDLIB_PUSHSHIFT_FRONTEND"), Some("https://api.pushshift.io".into())); } diff --git a/src/duplicates.rs b/src/duplicates.rs index d747d46..b533198 100644 --- a/src/duplicates.rs +++ b/src/duplicates.rs @@ -3,23 +3,23 @@ use crate::client::json; use crate::server::RequestExt; use crate::subreddit::{can_access_quarantine, quarantine}; -use crate::utils::{error, filter_posts, get_filters, nsfw_landing, parse_post, setting, template, Post, Preferences}; +use crate::utils::{error, filter_posts, get_filters, nsfw_landing, parse_post, template, Post, Preferences}; -use askama::Template; use hyper::{Body, Request, Response}; +use rinja::Template; use serde_json::Value; use std::borrow::ToOwned; use std::collections::HashSet; use std::vec::Vec; -/// DuplicatesParams contains the parameters in the URL. +/// `DuplicatesParams` contains the parameters in the URL. struct DuplicatesParams { before: String, after: String, sort: String, } -/// DuplicatesTemplate defines an Askama template for rendering duplicate +/// `DuplicatesTemplate` defines an Askama template for rendering duplicate /// posts. #[derive(Template)] #[template(path = "duplicates.html")] @@ -59,7 +59,7 @@ pub async fn item(req: Request) -> Result, String> { // Log the request in debugging mode #[cfg(debug_assertions)] - dbg!(req.param("id").unwrap_or_default()); + req.param("id").unwrap_or_default(); // Send the GET, and await JSON. match json(path, quarantined).await { @@ -67,11 +67,12 @@ pub async fn item(req: Request) -> Result, String> { Ok(response) => { let post = parse_post(&response[0]["data"]["children"][0]).await; + let req_url = req.uri().to_string(); // Return landing page if this post if this Reddit deems this post // NSFW, but we have also disabled the display of NSFW content - // or if the instance is SFW-only. - if post.nsfw && (setting(&req, "show_nsfw") != "on" || crate::utils::sfw_only()) { - return Ok(nsfw_landing(req).await.unwrap_or_default()); + // or if the instance is SFW-only + if post.nsfw && crate::utils::should_be_nsfw_gated(&req, &req_url) { + return Ok(nsfw_landing(req, req_url).await.unwrap_or_default()); } let filters = get_filters(&req); @@ -150,7 +151,7 @@ pub async fn item(req: Request) -> Result, String> { } if have_after { - before = "t3_".to_owned(); + "t3_".clone_into(&mut before); before.push_str(&duplicates[0].id); } @@ -160,7 +161,7 @@ pub async fn item(req: Request) -> Result, String> { if have_before { // The next batch will need to start from one after the // last post in the current batch. - after = "t3_".to_owned(); + "t3_".clone_into(&mut after); after.push_str(&duplicates[l - 1].id); // Here is where things get terrible. Notice that we @@ -181,52 +182,51 @@ pub async fn item(req: Request) -> Result, String> { match json(new_path, true).await { Ok(response) => { if !response[1]["data"]["children"].as_array().unwrap_or(&Vec::new()).is_empty() { - before = "t3_".to_owned(); + "t3_".clone_into(&mut before); before.push_str(&duplicates[0].id); } } Err(msg) => { // Abort entirely if we couldn't get the previous // batch. - return error(req, msg).await; + return error(req, &msg).await; } } } else { after = response[1]["data"]["after"].as_str().unwrap_or_default().to_string(); } } - let url = req.uri().to_string(); - template(DuplicatesTemplate { + Ok(template(&DuplicatesTemplate { params: DuplicatesParams { before, after, sort }, post, duplicates, prefs: Preferences::new(&req), - url, + url: req_url, num_posts_filtered, all_posts_filtered, - }) + })) } // Process error. Err(msg) => { if msg == "quarantined" || msg == "gated" { let sub = req.param("sub").unwrap_or_default(); - quarantine(req, sub, msg) + Ok(quarantine(&req, sub, &msg)) } else { - error(req, msg).await + error(req, &msg).await } } } } // DUPLICATES -async fn parse_duplicates(json: &serde_json::Value, filters: &HashSet) -> (Vec, u64, bool) { +async fn parse_duplicates(json: &Value, filters: &HashSet) -> (Vec, u64, bool) { let post_duplicates: &Vec = &json["data"]["children"].as_array().map_or(Vec::new(), ToOwned::to_owned); let mut duplicates: Vec = Vec::new(); // Process each post and place them in the Vec. - for val in post_duplicates.iter() { + for val in post_duplicates { let post: Post = parse_post(val).await; duplicates.push(post); } diff --git a/src/instance_info.rs b/src/instance_info.rs index f61796c..a573953 100644 --- a/src/instance_info.rs +++ b/src/instance_info.rs @@ -3,28 +3,28 @@ use crate::{ server::RequestExt, utils::{ErrorTemplate, Preferences}, }; -use askama::Template; use build_html::{Container, Html, HtmlContainer, Table}; use hyper::{http::Error, Body, Request, Response}; use once_cell::sync::Lazy; +use rinja::Template; use serde::{Deserialize, Serialize}; use time::OffsetDateTime; // This is the local static that is intialized at runtime (technically at // the first request to the info endpoint) and contains the data // retrieved from the info endpoint. -pub(crate) static INSTANCE_INFO: Lazy = Lazy::new(InstanceInfo::new); +pub static INSTANCE_INFO: Lazy = Lazy::new(InstanceInfo::new); /// Handles instance info endpoint pub async fn instance_info(req: Request) -> Result, String> { // This will retrieve the extension given, or create a new string - which will // simply become the last option, an HTML page. - let extension = req.param("extension").unwrap_or(String::new()); + let extension = req.param("extension").unwrap_or_default(); let response = match extension.as_str() { "yaml" | "yml" => info_yaml(), "txt" => info_txt(), "json" => info_json(), - "html" | "" => info_html(req), + "html" | "" => info_html(&req), _ => { let error = ErrorTemplate { msg: "Error: Invalid info extension".into(), @@ -68,13 +68,13 @@ fn info_txt() -> Result, Error> { Response::builder() .status(200) .header("content-type", "text/plain") - .body(Body::from(INSTANCE_INFO.to_string(StringType::Raw))) + .body(Body::from(INSTANCE_INFO.to_string(&StringType::Raw))) } -fn info_html(req: Request) -> Result, Error> { +fn info_html(req: &Request) -> Result, Error> { let message = MessageTemplate { title: String::from("Instance information"), - body: INSTANCE_INFO.to_string(StringType::Html), - prefs: Preferences::new(&req), + body: INSTANCE_INFO.to_string(&StringType::Html), + prefs: Preferences::new(req), url: req.uri().to_string(), } .render() @@ -82,9 +82,10 @@ fn info_html(req: Request) -> Result, Error> { Response::builder().status(200).header("content-type", "text/html; charset=utf8").body(Body::from(message)) } #[derive(Serialize, Deserialize, Default)] -pub(crate) struct InstanceInfo { +pub struct InstanceInfo { + package_name: String, crate_version: String, - git_commit: String, + pub git_commit: String, deploy_date: String, compile_mode: String, deploy_unix_ts: i64, @@ -94,6 +95,7 @@ pub(crate) struct InstanceInfo { impl InstanceInfo { pub fn new() -> Self { Self { + package_name: env!("CARGO_PKG_NAME").to_string(), crate_version: env!("CARGO_PKG_VERSION").to_string(), git_commit: env!("GIT_HASH").to_string(), deploy_date: OffsetDateTime::now_local().unwrap_or_else(|_| OffsetDateTime::now_utc()).to_string(), @@ -107,7 +109,7 @@ impl InstanceInfo { } fn to_table(&self) -> String { let mut container = Container::default(); - let convert = |o: &Option| -> String { o.clone().unwrap_or("Unset".to_owned()) }; + let convert = |o: &Option| -> String { o.clone().unwrap_or_else(|| "Unset".to_owned()) }; if let Some(banner) = &self.config.banner { container.add_header(3, "Instance banner"); container.add_raw("
"); @@ -116,12 +118,18 @@ impl InstanceInfo { } container.add_table( Table::from([ + ["Package name", &self.package_name], ["Crate version", &self.crate_version], ["Git commit", &self.git_commit], ["Deploy date", &self.deploy_date], ["Deploy timestamp", &self.deploy_unix_ts.to_string()], ["Compile mode", &self.compile_mode], ["SFW only", &convert(&self.config.sfw_only)], + ["Pushshift frontend", &convert(&self.config.pushshift)], + ["RSS enabled", &convert(&self.config.enable_rss)], + ["Full URL", &convert(&self.config.full_url)], + ["Remove default feeds", &convert(&self.config.default_remove_default_feeds)], + //TODO: fallback to crate::config::DEFAULT_PUSHSHIFT_FRONTEND ]) .with_header_row(["Settings"]), ); @@ -129,65 +137,84 @@ impl InstanceInfo { container.add_table( Table::from([ ["Hide awards", &convert(&self.config.default_hide_awards)], + ["Hide score", &convert(&self.config.default_hide_score)], ["Theme", &convert(&self.config.default_theme)], ["Front page", &convert(&self.config.default_front_page)], ["Layout", &convert(&self.config.default_layout)], ["Wide", &convert(&self.config.default_wide)], ["Comment sort", &convert(&self.config.default_comment_sort)], ["Post sort", &convert(&self.config.default_post_sort)], + ["Blur Spoiler", &convert(&self.config.default_blur_spoiler)], ["Show NSFW", &convert(&self.config.default_show_nsfw)], ["Blur NSFW", &convert(&self.config.default_blur_nsfw)], ["Use HLS", &convert(&self.config.default_use_hls)], ["Hide HLS notification", &convert(&self.config.default_hide_hls_notification)], ["Subscriptions", &convert(&self.config.default_subscriptions)], + ["Filters", &convert(&self.config.default_filters)], ]) .with_header_row(["Default preferences"]), ); container.to_html_string().replace("", "") } - fn to_string(&self, string_type: StringType) -> String { + fn to_string(&self, string_type: &StringType) -> String { match string_type { StringType::Raw => { format!( - "Crate version: {}\n + "Package name: {}\n + Crate version: {}\n Git commit: {}\n Deploy date: {}\n Deploy timestamp: {}\n Compile mode: {}\n SFW only: {:?}\n + Pushshift frontend: {:?}\n + RSS enabled: {:?}\n + Full URL: {:?}\n + Remove default feeds: {:?}\n Config:\n Banner: {:?}\n Hide awards: {:?}\n + Hide score: {:?}\n Default theme: {:?}\n Default front page: {:?}\n Default layout: {:?}\n Default wide: {:?}\n Default comment sort: {:?}\n Default post sort: {:?}\n + Default blur Spoiler: {:?}\n Default show NSFW: {:?}\n Default blur NSFW: {:?}\n Default use HLS: {:?}\n Default hide HLS notification: {:?}\n - Default subscriptions: {:?}\n", + Default subscriptions: {:?}\n + Default filters: {:?}\n", + self.package_name, self.crate_version, self.git_commit, self.deploy_date, self.deploy_unix_ts, self.compile_mode, self.config.sfw_only, + self.config.enable_rss, + self.config.full_url, + self.config.default_remove_default_feeds, + self.config.pushshift, self.config.banner, self.config.default_hide_awards, + self.config.default_hide_score, self.config.default_theme, self.config.default_front_page, self.config.default_layout, self.config.default_wide, self.config.default_comment_sort, self.config.default_post_sort, + self.config.default_blur_spoiler, self.config.default_show_nsfw, self.config.default_blur_nsfw, self.config.default_use_hls, self.config.default_hide_hls_notification, self.config.default_subscriptions, + self.config.default_filters, ) } StringType::Html => self.to_table(), diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..b8eb17e --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,13 @@ +pub mod client; +pub mod config; +pub mod duplicates; +pub mod instance_info; +pub mod oauth; +pub mod oauth_resources; +pub mod post; +pub mod search; +pub mod server; +pub mod settings; +pub mod subreddit; +pub mod user; +pub mod utils; diff --git a/src/main.rs b/src/main.rs index 4f5d3d2..e1b010d 100644 --- a/src/main.rs +++ b/src/main.rs @@ -2,30 +2,21 @@ #![forbid(unsafe_code)] #![allow(clippy::cmp_owned)] -// Reference local files -mod config; -mod duplicates; -mod instance_info; -mod post; -mod search; -mod settings; -mod subreddit; -mod user; -mod utils; - -// Import Crates +use cached::proc_macro::cached; use clap::{Arg, ArgAction, Command}; +use std::str::FromStr; use futures_lite::FutureExt; +use hyper::Uri; use hyper::{header::HeaderValue, Body, Request, Response}; - -mod client; -use client::{canonical_path, proxy}; +use log::{info, warn}; use once_cell::sync::Lazy; -use server::RequestExt; -use utils::{error, redirect, ThemeAssets}; +use redlib::client::{canonical_path, proxy, rate_limit_check, CLIENT}; +use redlib::server::{self, RequestExt}; +use redlib::utils::{error, redirect, ThemeAssets}; +use redlib::{config, duplicates, headers, instance_info, post, search, settings, subreddit, user}; -mod server; +use redlib::client::OAUTH_CLIENT; // Create Services @@ -73,6 +64,17 @@ async fn font() -> Result, String> { ) } +async fn opensearch() -> Result, String> { + Ok( + Response::builder() + .status(200) + .header("content-type", "application/opensearchdescription+xml") + .header("Cache-Control", "public, max-age=1209600, s-maxage=86400") + .body(include_bytes!("../static/opensearch.xml").as_ref().into()) + .unwrap_or_default(), + ) +} + async fn resource(body: &str, content_type: &str, cache: bool) -> Result, String> { let mut res = Response::builder() .status(200) @@ -108,9 +110,17 @@ async fn style() -> Result, String> { #[tokio::main] async fn main() { - let matches = Command::new("Libreddit") + // Load environment variables + _ = dotenvy::dotenv(); + + // Initialize logger + pretty_env_logger::init(); + + let matches = Command::new("Redlib") .version(env!("CARGO_PKG_VERSION")) .about("Private front-end for Reddit written in Rust ") + .arg(Arg::new("ipv4-only").short('4').long("ipv4-only").help("Listen on IPv4 only").num_args(0)) + .arg(Arg::new("ipv6-only").short('6').long("ipv6-only").help("Listen on IPv6 only").num_args(0)) .arg( Arg::new("redirect-https") .short('r') @@ -124,7 +134,7 @@ async fn main() { .long("address") .value_name("ADDRESS") .help("Sets address to listen on") - .default_value("0.0.0.0") + .default_value("[::]") .num_args(1), ) .arg( @@ -149,23 +159,52 @@ async fn main() { ) .get_matches(); + match rate_limit_check().await { + Ok(()) => { + info!("[✅] Rate limit check passed"); + } + Err(e) => { + let mut message = format!("Rate limit check failed: {}", e); + message += "\nThis may cause issues with the rate limit."; + message += "\nPlease report this error with the above information."; + message += "\nhttps://github.com/redlib-org/redlib/issues/new?assignees=sigaloid&labels=bug&title=%F0%9F%90%9B+Bug+Report%3A+Rate+limit+mismatch"; + warn!("{}", message); + eprintln!("{}", message); + } + } + let address = matches.get_one::("address").unwrap(); let port = matches.get_one::("port").unwrap(); let hsts = matches.get_one("hsts").map(|m: &String| m.as_str()); - let listener = [address, ":", port].concat(); + let ipv4_only = std::env::var("IPV4_ONLY").is_ok() || matches.get_flag("ipv4-only"); + let ipv6_only = std::env::var("IPV6_ONLY").is_ok() || matches.get_flag("ipv6-only"); - println!("Starting Libreddit..."); + let listener = if ipv4_only { + format!("0.0.0.0:{}", port) + } else if ipv6_only { + format!("[::]:{}", port) + } else { + [address, ":", port].concat() + }; + + println!("Starting Redlib..."); // Begin constructing a server let mut app = server::Server::new(); // Force evaluation of statics. In instance_info case, we need to evaluate // the timestamp so deploy date is accurate - in config case, we need to - // evaluate the configuration to avoid paying penalty at first request. + // evaluate the configuration to avoid paying penalty at first request - + // in OAUTH case, we need to retrieve the token to avoid paying penalty + // at first request + info!("Evaluating config."); Lazy::force(&config::CONFIG); + info!("Evaluating instance info."); Lazy::force(&instance_info::INSTANCE_INFO); + info!("Creating OAUTH client."); + Lazy::force(&OAUTH_CLIENT); // Define default headers (added to all responses) app.default_headers = headers! { @@ -176,7 +215,7 @@ async fn main() { }; if let Some(expire_time) = hsts { - if let Ok(val) = HeaderValue::from_str(&format!("max-age={}", expire_time)) { + if let Ok(val) = HeaderValue::from_str(&format!("max-age={expire_time}")) { app.default_headers.insert("Strict-Transport-Security", val); } } @@ -186,27 +225,53 @@ async fn main() { app .at("/manifest.json") .get(|_| resource(include_str!("../static/manifest.json"), "application/json", false).boxed()); - app - .at("/robots.txt") - .get(|_| resource("User-agent: *\nDisallow: /u/\nDisallow: /user/", "text/plain", true).boxed()); + app.at("/robots.txt").get(|_| { + resource( + if match config::get_setting("REDLIB_ROBOTS_DISABLE_INDEXING") { + Some(val) => val == "on", + None => false, + } { + "User-agent: *\nDisallow: /" + } else { + "User-agent: *\nDisallow: /u/\nDisallow: /user/" + }, + "text/plain", + true, + ) + .boxed() + }); app.at("/favicon.ico").get(|_| favicon().boxed()); app.at("/logo.png").get(|_| pwa_logo().boxed()); app.at("/Inter.var.woff2").get(|_| font().boxed()); app.at("/touch-icon-iphone.png").get(|_| iphone_logo().boxed()); app.at("/apple-touch-icon.png").get(|_| iphone_logo().boxed()); + app.at("/opensearch.xml").get(|_| opensearch().boxed()); app .at("/playHLSVideo.js") .get(|_| resource(include_str!("../static/playHLSVideo.js"), "text/javascript", false).boxed()); app .at("/hls.min.js") .get(|_| resource(include_str!("../static/hls.min.js"), "text/javascript", false).boxed()); + app + .at("/highlighted.js") + .get(|_| resource(include_str!("../static/highlighted.js"), "text/javascript", false).boxed()); + app + .at("/check_update.js") + .get(|_| resource(include_str!("../static/check_update.js"), "text/javascript", false).boxed()); + app.at("/copy.js").get(|_| resource(include_str!("../static/copy.js"), "text/javascript", false).boxed()); - // Proxy media through Libreddit + app.at("/commits.atom").get(|_| async move { proxy_commit_info().await }.boxed()); + app.at("/instances.json").get(|_| async move { proxy_instances().await }.boxed()); + + // Proxy media through Redlib app.at("/vid/:id/:size").get(|r| proxy(r, "https://v.redd.it/{id}/DASH_{size}").boxed()); app.at("/hls/:id/*path").get(|r| proxy(r, "https://v.redd.it/{id}/{path}").boxed()); app.at("/img/*path").get(|r| proxy(r, "https://i.redd.it/{path}").boxed()); app.at("/thumb/:point/:id").get(|r| proxy(r, "https://{point}.thumbs.redditmedia.com/{id}").boxed()); app.at("/emoji/:id/:name").get(|r| proxy(r, "https://emoji.redditmedia.com/{id}/{name}").boxed()); + app + .at("/emote/:subreddit_id/:filename") + .get(|r| proxy(r, "https://reddit-econ-prod-assets-permanent.s3.amazonaws.com/asset-manager/{subreddit_id}/{filename}").boxed()); app .at("/preview/:loc/award_images/:fullname/:id") .get(|r| proxy(r, "https://{loc}view.redd.it/award_images/{fullname}/{id}").boxed()); @@ -217,11 +282,12 @@ async fn main() { // Browse user profile app .at("/u/:name") - .get(|r| async move { Ok(redirect(format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed()); + .get(|r| async move { Ok(redirect(&format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed()); app.at("/u/:name/comments/:id/:title").get(|r| post::item(r).boxed()); app.at("/u/:name/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed()); - app.at("/user/[deleted]").get(|req| error(req, "User has deleted their account".to_string()).boxed()); + app.at("/user/[deleted]").get(|req| error(req, "User has deleted their account").boxed()); + app.at("/user/:name.rss").get(|r| user::rss(r).boxed()); app.at("/user/:name").get(|r| user::profile(r).boxed()); app.at("/user/:name/:listing").get(|r| user::profile(r).boxed()); app.at("/user/:name/comments/:id").get(|r| post::item(r).boxed()); @@ -231,8 +297,12 @@ async fn main() { // Configure settings app.at("/settings").get(|r| settings::get(r).boxed()).post(|r| settings::set(r).boxed()); app.at("/settings/restore").get(|r| settings::restore(r).boxed()); + app.at("/settings/encoded-restore").post(|r| settings::encoded_restore(r).boxed()); app.at("/settings/update").get(|r| settings::update(r).boxed()); + // RSS Subscriptions + app.at("/r/:sub.rss").get(|r| subreddit::rss(r).boxed()); + // Subreddit services app .at("/r/:sub") @@ -241,7 +311,7 @@ async fn main() { app .at("/r/u_:name") - .get(|r| async move { Ok(redirect(format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed()); + .get(|r| async move { Ok(redirect(&format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed()); app.at("/r/:sub/subscribe").post(|r| subreddit::subscriptions_filters(r).boxed()); app.at("/r/:sub/unsubscribe").post(|r| subreddit::subscriptions_filters(r).boxed()); @@ -266,10 +336,10 @@ async fn main() { app .at("/r/:sub/w") - .get(|r| async move { Ok(redirect(format!("/r/{}/wiki", r.param("sub").unwrap_or_default()))) }.boxed()); + .get(|r| async move { Ok(redirect(&format!("/r/{}/wiki", r.param("sub").unwrap_or_default()))) }.boxed()); app .at("/r/:sub/w/*page") - .get(|r| async move { Ok(redirect(format!("/r/{}/wiki/{}", r.param("sub").unwrap_or_default(), r.param("wiki").unwrap_or_default()))) }.boxed()); + .get(|r| async move { Ok(redirect(&format!("/r/{}/wiki/{}", r.param("sub").unwrap_or_default(), r.param("wiki").unwrap_or_default()))) }.boxed()); app.at("/r/:sub/wiki").get(|r| subreddit::wiki(r).boxed()); app.at("/r/:sub/wiki/*page").get(|r| subreddit::wiki(r).boxed()); @@ -281,10 +351,10 @@ async fn main() { app.at("/").get(|r| subreddit::community(r).boxed()); // View Reddit wiki - app.at("/w").get(|_| async { Ok(redirect("/wiki".to_string())) }.boxed()); + app.at("/w").get(|_| async { Ok(redirect("/wiki")) }.boxed()); app .at("/w/*page") - .get(|r| async move { Ok(redirect(format!("/wiki/{}", r.param("page").unwrap_or_default()))) }.boxed()); + .get(|r| async move { Ok(redirect(&format!("/wiki/{}", r.param("page").unwrap_or_default()))) }.boxed()); app.at("/wiki").get(|r| subreddit::wiki(r).boxed()); app.at("/wiki/*page").get(|r| subreddit::wiki(r).boxed()); @@ -292,12 +362,31 @@ async fn main() { app.at("/search").get(|r| search::find(r).boxed()); // Handle about pages - app.at("/about").get(|req| error(req, "About pages aren't added yet".to_string()).boxed()); + app.at("/about").get(|req| error(req, "About pages aren't added yet").boxed()); // Instance info page app.at("/info").get(|r| instance_info::instance_info(r).boxed()); app.at("/info.:extension").get(|r| instance_info::instance_info(r).boxed()); + // Handle obfuscated share links. + // Note that this still forces the server to follow the share link to get to the post, so maybe this wants to be updated with a warning before it follow it + app.at("/r/:sub/s/:id").get(|req: Request| { + Box::pin(async move { + let sub = req.param("sub").unwrap_or_default(); + match req.param("id").as_deref() { + // Share link + Some(id) if (8..12).contains(&id.len()) => match canonical_path(format!("/r/{sub}/s/{id}"), 3).await { + Ok(Some(path)) => Ok(redirect(&path)), + Ok(None) => error(req, "Post ID is invalid. It may point to a post on a community that has been banned.").await, + Err(e) => error(req, &e).await, + }, + + // Error message for unknown pages + _ => error(req, "Nothing here").await, + } + }) + }); + app.at("/:id").get(|req: Request| { Box::pin(async move { match req.param("id").as_deref() { @@ -305,29 +394,67 @@ async fn main() { Some("best" | "hot" | "new" | "top" | "rising" | "controversial") => subreddit::community(req).await, // Short link for post - Some(id) if (5..8).contains(&id.len()) => match canonical_path(format!("/{}", id)).await { + Some(id) if (5..8).contains(&id.len()) => match canonical_path(format!("/comments/{id}"), 3).await { Ok(path_opt) => match path_opt { - Some(path) => Ok(redirect(path)), + Some(path) => Ok(redirect(&path)), None => error(req, "Post ID is invalid. It may point to a post on a community that has been banned.").await, }, - Err(e) => error(req, e).await, + Err(e) => error(req, &e).await, }, // Error message for unknown pages - _ => error(req, "Nothing here".to_string()).await, + _ => error(req, "Nothing here").await, } }) }); // Default service in case no routes match - app.at("/*").get(|req| error(req, "Nothing here".to_string()).boxed()); + app.at("/*").get(|req| error(req, "Nothing here").boxed()); - println!("Running Libreddit v{} on {}!", env!("CARGO_PKG_VERSION"), listener); + println!("Running Redlib v{} on {listener}!", env!("CARGO_PKG_VERSION")); - let server = app.listen(listener); + let server = app.listen(&listener); // Run this server for... forever! if let Err(e) = server.await { - eprintln!("Server error: {}", e); + eprintln!("Server error: {e}"); } } + +pub async fn proxy_commit_info() -> Result, String> { + Ok( + Response::builder() + .status(200) + .header("content-type", "application/atom+xml") + .body(Body::from(fetch_commit_info().await)) + .unwrap_or_default(), + ) +} + +#[cached(time = 600)] +async fn fetch_commit_info() -> String { + let uri = Uri::from_str("https://github.com/redlib-org/redlib/commits/main.atom").expect("Invalid URI"); + + let resp: Body = CLIENT.get(uri).await.expect("Failed to request GitHub").into_body(); + + hyper::body::to_bytes(resp).await.expect("Failed to read body").iter().copied().map(|x| x as char).collect() +} + +pub async fn proxy_instances() -> Result, String> { + Ok( + Response::builder() + .status(200) + .header("content-type", "application/json") + .body(Body::from(fetch_instances().await)) + .unwrap_or_default(), + ) +} + +#[cached(time = 600)] +async fn fetch_instances() -> String { + let uri = Uri::from_str("https://raw.githubusercontent.com/redlib-org/redlib-instances/refs/heads/main/instances.json").expect("Invalid URI"); + + let resp: Body = CLIENT.get(uri).await.expect("Failed to request GitHub").into_body(); + + hyper::body::to_bytes(resp).await.expect("Failed to read body").iter().copied().map(|x| x as char).collect() +} diff --git a/src/oauth.rs b/src/oauth.rs new file mode 100644 index 0000000..5627900 --- /dev/null +++ b/src/oauth.rs @@ -0,0 +1,247 @@ +use std::{collections::HashMap, sync::atomic::Ordering, time::Duration}; + +use crate::{ + client::{CLIENT, OAUTH_CLIENT, OAUTH_IS_ROLLING_OVER, OAUTH_RATELIMIT_REMAINING}, + oauth_resources::ANDROID_APP_VERSION_LIST, +}; +use base64::{engine::general_purpose, Engine as _}; +use hyper::{client, Body, Method, Request}; +use log::{error, info, trace}; +use serde_json::json; +use tegen::tegen::TextGenerator; +use tokio::time::{error::Elapsed, timeout}; + +const REDDIT_ANDROID_OAUTH_CLIENT_ID: &str = "ohXpoqrZYub1kg"; + +const AUTH_ENDPOINT: &str = "https://www.reddit.com"; + +// Spoofed client for Android devices +#[derive(Debug, Clone, Default)] +pub struct Oauth { + pub(crate) initial_headers: HashMap, + pub(crate) headers_map: HashMap, + pub(crate) token: String, + expires_in: u64, + device: Device, +} + +impl Oauth { + /// Create a new OAuth client + pub(crate) async fn new() -> Self { + // Call new_internal until it succeeds + loop { + let attempt = Self::new_with_timeout().await; + match attempt { + Ok(Some(oauth)) => { + info!("[✅] Successfully created OAuth client"); + return oauth; + } + Ok(None) => { + error!("Failed to create OAuth client. Retrying in 5 seconds..."); + } + Err(duration) => { + error!("Failed to create OAuth client in {duration:?}. Retrying in 5 seconds..."); + } + } + tokio::time::sleep(Duration::from_secs(5)).await; + } + } + + async fn new_with_timeout() -> Result, Elapsed> { + let mut oauth = Self::default(); + timeout(Duration::from_secs(5), oauth.login()).await.map(|result| result.map(|_| oauth)) + } + + pub(crate) fn default() -> Self { + // Generate a device to spoof + let device = Device::new(); + let headers_map = device.headers.clone(); + let initial_headers = device.initial_headers.clone(); + // For now, just insert headers - no token request + Self { + headers_map, + initial_headers, + token: String::new(), + expires_in: 0, + device, + } + } + async fn login(&mut self) -> Option<()> { + // Construct URL for OAuth token + let url = format!("{AUTH_ENDPOINT}/auth/v2/oauth/access-token/loid"); + let mut builder = Request::builder().method(Method::POST).uri(&url); + + // Add headers from spoofed client + for (key, value) in &self.initial_headers { + builder = builder.header(key, value); + } + // Set up HTTP Basic Auth - basically just the const OAuth ID's with no password, + // Base64-encoded. https://en.wikipedia.org/wiki/Basic_access_authentication + // This could be constant, but I don't think it's worth it. OAuth ID's can change + // over time and we want to be flexible. + let auth = general_purpose::STANDARD.encode(format!("{}:", self.device.oauth_id)); + builder = builder.header("Authorization", format!("Basic {auth}")); + + // Set JSON body. I couldn't tell you what this means. But that's what the client sends + let json = json!({ + "scopes": ["*","email", "pii"] + }); + let body = Body::from(json.to_string()); + + // Build request + let request = builder.body(body).unwrap(); + + trace!("Sending token request...\n\n{request:?}"); + + // Send request + let client: &once_cell::sync::Lazy> = &CLIENT; + let resp = client.request(request).await.ok()?; + + trace!("Received response with status {} and length {:?}", resp.status(), resp.headers().get("content-length")); + trace!("OAuth headers: {:#?}", resp.headers()); + + // Parse headers - loid header _should_ be saved sent on subsequent token refreshes. + // Technically it's not needed, but it's easy for Reddit API to check for this. + // It's some kind of header that uniquely identifies the device. + // Not worried about the privacy implications, since this is randomly changed + // and really only as privacy-concerning as the OAuth token itself. + if let Some(header) = resp.headers().get("x-reddit-loid") { + self.headers_map.insert("x-reddit-loid".to_owned(), header.to_str().ok()?.to_string()); + } + + // Same with x-reddit-session + if let Some(header) = resp.headers().get("x-reddit-session") { + self.headers_map.insert("x-reddit-session".to_owned(), header.to_str().ok()?.to_string()); + } + + trace!("Serializing response..."); + + // Serialize response + let body_bytes = hyper::body::to_bytes(resp.into_body()).await.ok()?; + let json: serde_json::Value = serde_json::from_slice(&body_bytes).ok()?; + + trace!("Accessing relevant fields..."); + + // Save token and expiry + self.token = json.get("access_token")?.as_str()?.to_string(); + self.expires_in = json.get("expires_in")?.as_u64()?; + self.headers_map.insert("Authorization".to_owned(), format!("Bearer {}", self.token)); + + info!("[✅] Success - Retrieved token \"{}...\", expires in {}", &self.token[..32], self.expires_in); + + Some(()) + } +} + +pub async fn token_daemon() { + // Monitor for refreshing token + loop { + // Get expiry time - be sure to not hold the read lock + let expires_in = { OAUTH_CLIENT.load_full().expires_in }; + + // sleep for the expiry time minus 2 minutes + let duration = Duration::from_secs(expires_in - 120); + + info!("[⏳] Waiting for {duration:?} seconds before refreshing OAuth token..."); + + tokio::time::sleep(duration).await; + + info!("[⌛] {duration:?} Elapsed! Refreshing OAuth token..."); + + // Refresh token - in its own scope + { + force_refresh_token().await; + } + } +} + +pub async fn force_refresh_token() { + if OAUTH_IS_ROLLING_OVER.compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst).is_err() { + trace!("Skipping refresh token roll over, already in progress"); + return; + } + + trace!("Rolling over refresh token. Current rate limit: {}", OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst)); + let new_client = Oauth::new().await; + OAUTH_CLIENT.swap(new_client.into()); + OAUTH_RATELIMIT_REMAINING.store(99, Ordering::SeqCst); + OAUTH_IS_ROLLING_OVER.store(false, Ordering::SeqCst); +} + +#[derive(Debug, Clone, Default)] +struct Device { + oauth_id: String, + initial_headers: HashMap, + headers: HashMap, +} + +impl Device { + fn android() -> Self { + // Generate uuid + let uuid = uuid::Uuid::new_v4().to_string(); + + // Generate random user-agent + let android_app_version = choose(ANDROID_APP_VERSION_LIST).to_string(); + let android_version = fastrand::u8(9..=14); + + let android_user_agent = format!("Reddit/{android_app_version}/Android {android_version}"); + + let qos = fastrand::u32(1000..=100_000); + let qos: f32 = qos as f32 / 1000.0; + let qos = format!("{:.3}", qos); + + let codecs = TextGenerator::new().generate("available-codecs=video/avc, video/hevc{, video/x-vnd.on2.vp9|}"); + + // Android device headers + let headers: HashMap = HashMap::from([ + ("User-Agent".into(), android_user_agent), + ("x-reddit-retry".into(), "algo=no-retries".into()), + ("x-reddit-compression".into(), "1".into()), + ("x-reddit-qos".into(), qos), + ("x-reddit-media-codecs".into(), codecs), + ("Content-Type".into(), "application/json; charset=UTF-8".into()), + ("client-vendor-id".into(), uuid.clone()), + ("X-Reddit-Device-Id".into(), uuid.clone()), + ]); + + info!("[🔄] Spoofing Android client with headers: {headers:?}, uuid: \"{uuid}\", and OAuth ID \"{REDDIT_ANDROID_OAUTH_CLIENT_ID}\""); + + Self { + oauth_id: REDDIT_ANDROID_OAUTH_CLIENT_ID.to_string(), + headers: headers.clone(), + initial_headers: headers, + } + } + fn new() -> Self { + // See https://github.com/redlib-org/redlib/issues/8 + Self::android() + } +} + +fn choose(list: &[T]) -> T { + *fastrand::choose_multiple(list.iter(), 1)[0] +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_oauth_client() { + assert!(!OAUTH_CLIENT.load_full().token.is_empty()); +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_oauth_client_refresh() { + force_refresh_token().await; +} +#[tokio::test(flavor = "multi_thread")] +async fn test_oauth_token_exists() { + assert!(!OAUTH_CLIENT.load_full().token.is_empty()); +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_oauth_headers_len() { + assert!(OAUTH_CLIENT.load_full().headers_map.len() >= 3); +} + +#[test] +fn test_creating_device() { + Device::new(); +} diff --git a/src/oauth_resources.rs b/src/oauth_resources.rs new file mode 100644 index 0000000..01928c3 --- /dev/null +++ b/src/oauth_resources.rs @@ -0,0 +1,158 @@ +// This file was generated by scripts/update_oauth_resources.sh +// Rerun scripts/update_oauth_resources.sh to update this file +// Please do not edit manually +// Filled in with real app versions +pub const _IOS_APP_VERSION_LIST: &[&str; 1] = &[""]; +pub const ANDROID_APP_VERSION_LIST: &[&str; 150] = &[ + "Version 2024.22.1/Build 1652272", + "Version 2024.23.1/Build 1665606", + "Version 2024.24.1/Build 1682520", + "Version 2024.25.0/Build 1693595", + "Version 2024.25.2/Build 1700401", + "Version 2024.25.3/Build 1703490", + "Version 2024.26.0/Build 1710470", + "Version 2024.26.1/Build 1717435", + "Version 2024.28.0/Build 1737665", + "Version 2024.28.1/Build 1741165", + "Version 2024.30.0/Build 1770787", + "Version 2024.31.0/Build 1786202", + "Version 2024.32.0/Build 1809095", + "Version 2024.32.1/Build 1813258", + "Version 2024.33.0/Build 1819908", + "Version 2024.34.0/Build 1837909", + "Version 2024.35.0/Build 1861437", + "Version 2024.36.0/Build 1875012", + "Version 2024.37.0/Build 1888053", + "Version 2024.38.0/Build 1902791", + "Version 2024.39.0/Build 1916713", + "Version 2024.40.0/Build 1928580", + "Version 2024.41.0/Build 1941199", + "Version 2024.41.1/Build 1947805", + "Version 2024.42.0/Build 1952440", + "Version 2024.43.0/Build 1972250", + "Version 2024.44.0/Build 1988458", + "Version 2024.45.0/Build 2001943", + "Version 2024.46.0/Build 2012731", + "Version 2024.47.0/Build 2029755", + "Version 2023.48.0/Build 1319123", + "Version 2023.49.0/Build 1321715", + "Version 2023.49.1/Build 1322281", + "Version 2023.50.0/Build 1332338", + "Version 2023.50.1/Build 1345844", + "Version 2024.02.0/Build 1368985", + "Version 2024.03.0/Build 1379408", + "Version 2024.04.0/Build 1391236", + "Version 2024.05.0/Build 1403584", + "Version 2024.06.0/Build 1418489", + "Version 2024.07.0/Build 1429651", + "Version 2024.08.0/Build 1439531", + "Version 2024.10.0/Build 1470045", + "Version 2024.10.1/Build 1478645", + "Version 2024.11.0/Build 1480707", + "Version 2024.12.0/Build 1494694", + "Version 2024.13.0/Build 1505187", + "Version 2024.14.0/Build 1520556", + "Version 2024.15.0/Build 1536823", + "Version 2024.16.0/Build 1551366", + "Version 2024.17.0/Build 1568106", + "Version 2024.18.0/Build 1577901", + "Version 2024.18.1/Build 1585304", + "Version 2024.19.0/Build 1593346", + "Version 2024.20.0/Build 1612800", + "Version 2024.20.1/Build 1615586", + "Version 2024.20.2/Build 1624969", + "Version 2024.20.3/Build 1624970", + "Version 2024.21.0/Build 1631686", + "Version 2024.22.0/Build 1645257", + "Version 2023.21.0/Build 956283", + "Version 2023.22.0/Build 968223", + "Version 2023.23.0/Build 983896", + "Version 2023.24.0/Build 998541", + "Version 2023.25.0/Build 1014750", + "Version 2023.25.1/Build 1018737", + "Version 2023.26.0/Build 1019073", + "Version 2023.27.0/Build 1031923", + "Version 2023.28.0/Build 1046887", + "Version 2023.29.0/Build 1059855", + "Version 2023.30.0/Build 1078734", + "Version 2023.31.0/Build 1091027", + "Version 2023.32.0/Build 1109919", + "Version 2023.32.1/Build 1114141", + "Version 2023.33.1/Build 1129741", + "Version 2023.34.0/Build 1144243", + "Version 2023.35.0/Build 1157967", + "Version 2023.36.0/Build 1168982", + "Version 2023.37.0/Build 1182743", + "Version 2023.38.0/Build 1198522", + "Version 2023.39.0/Build 1211607", + "Version 2023.39.1/Build 1221505", + "Version 2023.40.0/Build 1221521", + "Version 2023.41.0/Build 1233125", + "Version 2023.41.1/Build 1239615", + "Version 2023.42.0/Build 1245088", + "Version 2023.43.0/Build 1257426", + "Version 2023.44.0/Build 1268622", + "Version 2023.45.0/Build 1281371", + "Version 2023.47.0/Build 1303604", + "Version 2022.42.0/Build 638508", + "Version 2022.43.0/Build 648277", + "Version 2022.44.0/Build 664348", + "Version 2022.45.0/Build 677985", + "Version 2023.01.0/Build 709875", + "Version 2023.02.0/Build 717912", + "Version 2023.03.0/Build 729220", + "Version 2023.04.0/Build 744681", + "Version 2023.05.0/Build 755453", + "Version 2023.06.0/Build 775017", + "Version 2023.07.0/Build 788827", + "Version 2023.07.1/Build 790267", + "Version 2023.08.0/Build 798718", + "Version 2023.09.0/Build 812015", + "Version 2023.09.1/Build 816833", + "Version 2023.10.0/Build 821148", + "Version 2023.11.0/Build 830610", + "Version 2023.12.0/Build 841150", + "Version 2023.13.0/Build 852246", + "Version 2023.14.0/Build 861593", + "Version 2023.14.1/Build 864826", + "Version 2023.15.0/Build 870628", + "Version 2023.16.0/Build 883294", + "Version 2023.16.1/Build 886269", + "Version 2023.17.0/Build 896030", + "Version 2023.17.1/Build 900542", + "Version 2023.18.0/Build 911877", + "Version 2023.19.0/Build 927681", + "Version 2023.20.0/Build 943980", + "Version 2023.20.1/Build 946732", + "Version 2022.20.0/Build 487703", + "Version 2022.21.0/Build 492436", + "Version 2022.22.0/Build 498700", + "Version 2022.23.0/Build 502374", + "Version 2022.23.1/Build 506606", + "Version 2022.24.0/Build 510950", + "Version 2022.24.1/Build 513462", + "Version 2022.25.0/Build 515072", + "Version 2022.25.1/Build 516394", + "Version 2022.25.2/Build 519915", + "Version 2022.26.0/Build 521193", + "Version 2022.27.0/Build 527406", + "Version 2022.27.1/Build 529687", + "Version 2022.28.0/Build 533235", + "Version 2022.30.0/Build 548620", + "Version 2022.31.0/Build 556666", + "Version 2022.31.1/Build 562612", + "Version 2022.32.0/Build 567875", + "Version 2022.33.0/Build 572600", + "Version 2022.34.0/Build 579352", + "Version 2022.35.0/Build 588016", + "Version 2022.35.1/Build 589034", + "Version 2022.36.0/Build 593102", + "Version 2022.37.0/Build 601691", + "Version 2022.38.0/Build 607460", + "Version 2022.39.0/Build 615385", + "Version 2022.39.1/Build 619019", + "Version 2022.40.0/Build 624782", + "Version 2022.41.0/Build 630468", + "Version 2022.41.1/Build 634168", +]; +pub const _IOS_OS_VERSION_LIST: &[&str; 1] = &[""]; diff --git a/src/post.rs b/src/post.rs index f2f5eaf..20b917d 100644 --- a/src/post.rs +++ b/src/post.rs @@ -1,14 +1,19 @@ +#![allow(clippy::cmp_owned)] + // CRATES use crate::client::json; +use crate::config::get_setting; use crate::server::RequestExt; use crate::subreddit::{can_access_quarantine, quarantine}; use crate::utils::{ - error, format_num, get_filters, nsfw_landing, param, parse_post, rewrite_urls, setting, template, time, val, Author, Awards, Comment, Flair, FlairPart, Post, Preferences, + error, format_num, get_filters, nsfw_landing, param, parse_post, rewrite_emotes, setting, template, time, val, Author, Awards, Comment, Flair, FlairPart, Post, Preferences, }; use hyper::{Body, Request, Response}; -use askama::Template; -use std::collections::HashSet; +use once_cell::sync::Lazy; +use regex::Regex; +use rinja::Template; +use std::collections::{HashMap, HashSet}; // STRUCTS #[derive(Template)] @@ -20,13 +25,18 @@ struct PostTemplate { prefs: Preferences, single_thread: bool, url: String, + url_without_query: String, + comment_query: String, } +static COMMENT_SEARCH_CAPTURE: Lazy = Lazy::new(|| Regex::new(r"\?q=(.*)&type=comment").unwrap()); + pub async fn item(req: Request) -> Result, String> { // Build Reddit API path let mut path: String = format!("{}.json?{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default()); let sub = req.param("sub").unwrap_or_default(); let quarantined = can_access_quarantine(&req, &sub); + let url = req.uri().to_string(); // Set sort to sort query parameter let sort = param(&path, "sort").unwrap_or_else(|| { @@ -44,7 +54,7 @@ pub async fn item(req: Request) -> Result, String> { // Log the post ID being fetched in debug mode #[cfg(debug_assertions)] - dbg!(req.param("id").unwrap_or_default()); + req.param("id").unwrap_or_default(); let single_thread = req.param("comment_id").is_some(); let highlighted_comment = &req.param("comment_id").unwrap_or_default(); @@ -56,39 +66,54 @@ pub async fn item(req: Request) -> Result, String> { // Parse the JSON into Post and Comment structs let post = parse_post(&response[0]["data"]["children"][0]).await; + let req_url = req.uri().to_string(); // Return landing page if this post if this Reddit deems this post // NSFW, but we have also disabled the display of NSFW content // or if the instance is SFW-only. - if post.nsfw && (setting(&req, "show_nsfw") != "on" || crate::utils::sfw_only()) { - return Ok(nsfw_landing(req).await.unwrap_or_default()); + if post.nsfw && crate::utils::should_be_nsfw_gated(&req, &req_url) { + return Ok(nsfw_landing(req, req_url).await.unwrap_or_default()); } - let comments = parse_comments(&response[1], &post.permalink, &post.author.name, highlighted_comment, &get_filters(&req), &req); - let url = req.uri().to_string(); + let query_body = match COMMENT_SEARCH_CAPTURE.captures(&url) { + Some(captures) => captures.get(1).unwrap().as_str().replace("%20", " ").replace('+', " "), + None => String::new(), + }; + + let query_string = format!("q={query_body}&type=comment"); + let form = url::form_urlencoded::parse(query_string.as_bytes()).collect::>(); + let query = form.get("q").unwrap().clone().to_string(); + + let comments = match query.as_str() { + "" => parse_comments(&response[1], &post.permalink, &post.author.name, highlighted_comment, &get_filters(&req), &req), + _ => query_comments(&response[1], &post.permalink, &post.author.name, highlighted_comment, &get_filters(&req), &query, &req), + }; // Use the Post and Comment structs to generate a website to show users - template(PostTemplate { + Ok(template(&PostTemplate { comments, post, + url_without_query: url.clone().trim_end_matches(&format!("?q={query}&type=comment")).to_string(), sort, prefs: Preferences::new(&req), single_thread, - url, - }) + url: req_url, + comment_query: query, + })) } // If the Reddit API returns an error, exit and send error page to user Err(msg) => { if msg == "quarantined" || msg == "gated" { let sub = req.param("sub").unwrap_or_default(); - quarantine(req, sub, msg) + Ok(quarantine(&req, sub, &msg)) } else { - error(req, msg).await + error(req, &msg).await } } } } // COMMENTS + fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str, highlighted_comment: &str, filters: &HashSet, req: &Request) -> Vec { // Parse the comment JSON into a Vector of Comments let comments = json["data"]["children"].as_array().map_or(Vec::new(), std::borrow::ToOwned::to_owned); @@ -97,96 +122,136 @@ fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str, comments .into_iter() .map(|comment| { - let kind = comment["kind"].as_str().unwrap_or_default().to_string(); let data = &comment["data"]; - - let unix_time = data["created_utc"].as_f64().unwrap_or_default(); - let (rel_time, created) = time(unix_time); - - let edited = data["edited"].as_f64().map_or((String::new(), String::new()), time); - - let score = data["score"].as_i64().unwrap_or(0); - - // The JSON API only provides comments up to some threshold. - // Further comments have to be loaded by subsequent requests. - // The "kind" value will be "more" and the "count" - // shows how many more (sub-)comments exist in the respective nesting level. - // Note that in certain (seemingly random) cases, the count is simply wrong. - let more_count = data["count"].as_i64().unwrap_or_default(); - - // If this comment contains replies, handle those too let replies: Vec = if data["replies"].is_object() { parse_comments(&data["replies"], post_link, post_author, highlighted_comment, filters, req) } else { Vec::new() }; - - let awards: Awards = Awards::parse(&data["all_awardings"]); - - let parent_kind_and_id = val(&comment, "parent_id"); - let parent_info = parent_kind_and_id.split('_').collect::>(); - - let id = val(&comment, "id"); - let highlighted = id == highlighted_comment; - - let body = if (val(&comment, "author") == "[deleted]" && val(&comment, "body") == "[removed]") || val(&comment, "body") == "[ Removed by Reddit ]" { - format!( - "
", - post_link, id - ) - } else { - rewrite_urls(&val(&comment, "body_html")) - }; - - let author = Author { - name: val(&comment, "author"), - flair: Flair { - flair_parts: FlairPart::parse( - data["author_flair_type"].as_str().unwrap_or_default(), - data["author_flair_richtext"].as_array(), - data["author_flair_text"].as_str(), - ), - text: val(&comment, "link_flair_text"), - background_color: val(&comment, "author_flair_background_color"), - foreground_color: val(&comment, "author_flair_text_color"), - }, - distinguished: val(&comment, "distinguished"), - }; - let is_filtered = filters.contains(&["u_", author.name.as_str()].concat()); - - // Many subreddits have a default comment posted about the sub's rules etc. - // Many libreddit users do not wish to see this kind of comment by default. - // Reddit does not tell us which users are "bots", so a good heuristic is to - // collapse stickied moderator comments. - let is_moderator_comment = data["distinguished"].as_str().unwrap_or_default() == "moderator"; - let is_stickied = data["stickied"].as_bool().unwrap_or_default(); - let collapsed = (is_moderator_comment && is_stickied) || is_filtered; - - Comment { - id, - kind, - parent_id: parent_info[1].to_string(), - parent_kind: parent_info[0].to_string(), - post_link: post_link.to_string(), - post_author: post_author.to_string(), - body, - author, - score: if data["score_hidden"].as_bool().unwrap_or_default() { - ("\u{2022}".to_string(), "Hidden".to_string()) - } else { - format_num(score) - }, - rel_time, - created, - edited, - replies, - highlighted, - awards, - collapsed, - is_filtered, - more_count, - prefs: Preferences::new(req), - } + build_comment(&comment, data, replies, post_link, post_author, highlighted_comment, filters, req) }) .collect() } + +fn query_comments( + json: &serde_json::Value, + post_link: &str, + post_author: &str, + highlighted_comment: &str, + filters: &HashSet, + query: &str, + req: &Request, +) -> Vec { + let comments = json["data"]["children"].as_array().map_or(Vec::new(), std::borrow::ToOwned::to_owned); + let mut results = Vec::new(); + + for comment in comments { + let data = &comment["data"]; + + // If this comment contains replies, handle those too + if data["replies"].is_object() { + results.append(&mut query_comments(&data["replies"], post_link, post_author, highlighted_comment, filters, query, req)); + } + + let c = build_comment(&comment, data, Vec::new(), post_link, post_author, highlighted_comment, filters, req); + if c.body.to_lowercase().contains(&query.to_lowercase()) { + results.push(c); + } + } + + results +} +#[allow(clippy::too_many_arguments)] +fn build_comment( + comment: &serde_json::Value, + data: &serde_json::Value, + replies: Vec, + post_link: &str, + post_author: &str, + highlighted_comment: &str, + filters: &HashSet, + req: &Request, +) -> Comment { + let id = val(comment, "id"); + + let body = if (val(comment, "author") == "[deleted]" && val(comment, "body") == "[removed]") || val(comment, "body") == "[ Removed by Reddit ]" { + format!( + "

[removed] — view removed comment

", + get_setting("REDLIB_PUSHSHIFT_FRONTEND").unwrap_or_else(|| String::from(crate::config::DEFAULT_PUSHSHIFT_FRONTEND)), + ) + } else { + rewrite_emotes(&data["media_metadata"], val(comment, "body_html")) + }; + let kind = comment["kind"].as_str().unwrap_or_default().to_string(); + + let unix_time = data["created_utc"].as_f64().unwrap_or_default(); + let (rel_time, created) = time(unix_time); + + let edited = data["edited"].as_f64().map_or((String::new(), String::new()), time); + + let score = data["score"].as_i64().unwrap_or(0); + + // The JSON API only provides comments up to some threshold. + // Further comments have to be loaded by subsequent requests. + // The "kind" value will be "more" and the "count" + // shows how many more (sub-)comments exist in the respective nesting level. + // Note that in certain (seemingly random) cases, the count is simply wrong. + let more_count = data["count"].as_i64().unwrap_or_default(); + + let awards: Awards = Awards::parse(&data["all_awardings"]); + + let parent_kind_and_id = val(comment, "parent_id"); + let parent_info = parent_kind_and_id.split('_').collect::>(); + + let highlighted = id == highlighted_comment; + + let author = Author { + name: val(comment, "author"), + flair: Flair { + flair_parts: FlairPart::parse( + data["author_flair_type"].as_str().unwrap_or_default(), + data["author_flair_richtext"].as_array(), + data["author_flair_text"].as_str(), + ), + text: val(comment, "link_flair_text"), + background_color: val(comment, "author_flair_background_color"), + foreground_color: val(comment, "author_flair_text_color"), + }, + distinguished: val(comment, "distinguished"), + }; + let is_filtered = filters.contains(&["u_", author.name.as_str()].concat()); + + // Many subreddits have a default comment posted about the sub's rules etc. + // Many Redlib users do not wish to see this kind of comment by default. + // Reddit does not tell us which users are "bots", so a good heuristic is to + // collapse stickied moderator comments. + let is_moderator_comment = data["distinguished"].as_str().unwrap_or_default() == "moderator"; + let is_stickied = data["stickied"].as_bool().unwrap_or_default(); + let collapsed = (is_moderator_comment && is_stickied) || is_filtered; + + Comment { + id, + kind, + parent_id: parent_info[1].to_string(), + parent_kind: parent_info[0].to_string(), + post_link: post_link.to_string(), + post_author: post_author.to_string(), + body, + author, + score: if data["score_hidden"].as_bool().unwrap_or_default() { + ("\u{2022}".to_string(), "Hidden".to_string()) + } else { + format_num(score) + }, + rel_time, + created, + edited, + replies, + highlighted, + awards, + collapsed, + is_filtered, + more_count, + prefs: Preferences::new(req), + } +} diff --git a/src/search.rs b/src/search.rs index 35c0f96..88dcfdd 100644 --- a/src/search.rs +++ b/src/search.rs @@ -1,14 +1,16 @@ +#![allow(clippy::cmp_owned)] + // CRATES use crate::utils::{self, catch_random, error, filter_posts, format_num, format_url, get_filters, param, redirect, setting, template, val, Post, Preferences}; use crate::{ client::json, + server::RequestExt, subreddit::{can_access_quarantine, quarantine}, - RequestExt, }; -use askama::Template; use hyper::{Body, Request, Response}; use once_cell::sync::Lazy; use regex::Regex; +use rinja::Template; // STRUCTS struct SearchParams { @@ -60,16 +62,25 @@ pub async fn find(req: Request) -> Result, String> { } else { "" }; - let path = format!("{}.json?{}{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default(), nsfw_results); + let uri_path = req.uri().path().replace("+", "%2B"); + let path = format!("{}.json?{}{}&raw_json=1", uri_path, req.uri().query().unwrap_or_default(), nsfw_results); let mut query = param(&path, "q").unwrap_or_default(); query = REDDIT_URL_MATCH.replace(&query, "").to_string(); if query.is_empty() { - return Ok(redirect("/".to_string())); + return Ok(redirect("/")); } - if query.starts_with("r/") { - return Ok(redirect(format!("/{}", query))); + if query.starts_with("r/") || query.starts_with("user/") { + return Ok(redirect(&format!("/{query}"))); + } + + if query.starts_with("R/") { + return Ok(redirect(&format!("/r{}", &query[1..]))); + } + + if query.starts_with("u/") || query.starts_with("U/") { + return Ok(redirect(&format!("/user{}", &query[1..]))); } let sub = req.param("sub").unwrap_or_default(); @@ -97,7 +108,7 @@ pub async fn find(req: Request) -> Result, String> { // If all requested subs are filtered, we don't need to fetch posts. if sub.split('+').all(|s| filters.contains(s)) { - template(SearchTemplate { + Ok(template(&SearchTemplate { posts: Vec::new(), subreddits, sub, @@ -106,7 +117,7 @@ pub async fn find(req: Request) -> Result, String> { sort, t: param(&path, "t").unwrap_or_default(), before: param(&path, "after").unwrap_or_default(), - after: "".to_string(), + after: String::new(), restrict_sr: param(&path, "restrict_sr").unwrap_or_default(), typed, }, @@ -116,14 +127,14 @@ pub async fn find(req: Request) -> Result, String> { all_posts_filtered: false, all_posts_hidden_nsfw: false, no_posts: false, - }) + })) } else { match Post::fetch(&path, quarantined).await { Ok((mut posts, after)) => { let (_, all_posts_filtered) = filter_posts(&mut posts, &filters); let no_posts = posts.is_empty(); let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on"); - template(SearchTemplate { + Ok(template(&SearchTemplate { posts, subreddits, sub, @@ -142,14 +153,14 @@ pub async fn find(req: Request) -> Result, String> { all_posts_filtered, all_posts_hidden_nsfw, no_posts, - }) + })) } Err(msg) => { if msg == "quarantined" || msg == "gated" { let sub = req.param("sub").unwrap_or_default(); - quarantine(req, sub, msg) + Ok(quarantine(&req, sub, &msg)) } else { - error(req, msg).await + error(req, &msg).await } } } @@ -158,7 +169,7 @@ pub async fn find(req: Request) -> Result, String> { async fn search_subreddits(q: &str, typed: &str) -> Vec { let limit = if typed == "sr_user" { "50" } else { "3" }; - let subreddit_search_path = format!("/subreddits/search.json?q={}&limit={}", q.replace(' ', "+"), limit); + let subreddit_search_path = format!("/subreddits/search.json?q={}&limit={limit}", q.replace(' ', "+")); // Send a request to the url json(subreddit_search_path, false).await.unwrap_or_default()["data"]["children"] diff --git a/src/server.rs b/src/server.rs index 501b933..a287de2 100644 --- a/src/server.rs +++ b/src/server.rs @@ -1,3 +1,6 @@ +#![allow(dead_code)] +#![allow(clippy::cmp_owned)] + use brotli::enc::{BrotliCompress, BrotliEncoderParams}; use cached::proc_macro::cached; use cookie::Cookie; @@ -15,13 +18,14 @@ use libflate::gzip; use route_recognizer::{Params, Router}; use std::{ cmp::Ordering, + fmt::Display, io, pin::Pin, result::Result, str::{from_utf8, Split}, string::ToString, }; -use time::Duration; +use time::OffsetDateTime; use crate::dbg_msg; @@ -46,11 +50,11 @@ impl CompressionType { /// Returns a `CompressionType` given a content coding /// in [RFC 7231](https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.4) /// format. - fn parse(s: &str) -> Option { + fn parse(s: &str) -> Option { let c = match s { // Compressors we support. - "gzip" => CompressionType::Gzip, - "br" => CompressionType::Brotli, + "gzip" => Self::Gzip, + "br" => Self::Brotli, // The wildcard means that we can choose whatever // compression we prefer. In this case, use the @@ -65,12 +69,12 @@ impl CompressionType { } } -impl ToString for CompressionType { - fn to_string(&self) -> String { +impl Display for CompressionType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - CompressionType::Gzip => "gzip".to_string(), - CompressionType::Brotli => "br".to_string(), - _ => String::new(), + Self::Gzip => write!(f, "gzip"), + Self::Brotli => write!(f, "br"), + Self::Passthrough => Ok(()), } } } @@ -104,13 +108,13 @@ pub trait RequestExt { fn params(&self) -> Params; fn param(&self, name: &str) -> Option; fn set_params(&mut self, params: Params) -> Option; - fn cookies(&self) -> Vec; - fn cookie(&self, name: &str) -> Option; + fn cookies(&self) -> Vec>; + fn cookie(&self, name: &str) -> Option>; } pub trait ResponseExt { - fn cookies(&self) -> Vec; - fn insert_cookie(&mut self, cookie: Cookie); + fn cookies(&self) -> Vec>; + fn insert_cookie(&mut self, cookie: Cookie<'_>); fn remove_cookie(&mut self, name: String); } @@ -131,83 +135,87 @@ impl RequestExt for Request { self.extensions_mut().insert(params) } - fn cookies(&self) -> Vec { + fn cookies(&self) -> Vec> { self.headers().get("Cookie").map_or(Vec::new(), |header| { header .to_str() .unwrap_or_default() .split("; ") - .map(|cookie| Cookie::parse(cookie).unwrap_or_else(|_| Cookie::named(""))) + .map(|cookie| Cookie::parse(cookie).unwrap_or_else(|_| Cookie::from(""))) .collect() }) } - fn cookie(&self, name: &str) -> Option { + fn cookie(&self, name: &str) -> Option> { self.cookies().into_iter().find(|c| c.name() == name) } } impl ResponseExt for Response { - fn cookies(&self) -> Vec { + fn cookies(&self) -> Vec> { self.headers().get("Cookie").map_or(Vec::new(), |header| { header .to_str() .unwrap_or_default() .split("; ") - .map(|cookie| Cookie::parse(cookie).unwrap_or_else(|_| Cookie::named(""))) + .map(|cookie| Cookie::parse(cookie).unwrap_or_else(|_| Cookie::from(""))) .collect() }) } - fn insert_cookie(&mut self, cookie: Cookie) { + fn insert_cookie(&mut self, cookie: Cookie<'_>) { if let Ok(val) = header::HeaderValue::from_str(&cookie.to_string()) { self.headers_mut().append("Set-Cookie", val); } } fn remove_cookie(&mut self, name: String) { - let mut cookie = Cookie::named(name); - cookie.set_path("/"); - cookie.set_max_age(Duration::seconds(1)); - if let Ok(val) = header::HeaderValue::from_str(&cookie.to_string()) { + let removal_cookie = Cookie::build(name).path("/").http_only(true).expires(OffsetDateTime::now_utc()); + if let Ok(val) = header::HeaderValue::from_str(&removal_cookie.to_string()) { self.headers_mut().append("Set-Cookie", val); } } } impl Route<'_> { - fn method(&mut self, method: Method, dest: fn(Request) -> BoxResponse) -> &mut Self { + fn method(&mut self, method: &Method, dest: fn(Request) -> BoxResponse) -> &mut Self { self.router.add(&format!("/{}{}", method.as_str(), self.path), dest); self } /// Add an endpoint for `GET` requests pub fn get(&mut self, dest: fn(Request) -> BoxResponse) -> &mut Self { - self.method(Method::GET, dest) + self.method(&Method::GET, dest) } /// Add an endpoint for `POST` requests pub fn post(&mut self, dest: fn(Request) -> BoxResponse) -> &mut Self { - self.method(Method::POST, dest) + self.method(&Method::POST, dest) + } +} + +impl Default for Server { + fn default() -> Self { + Self::new() } } impl Server { pub fn new() -> Self { - Server { + Self { default_headers: HeaderMap::new(), router: Router::new(), } } - pub fn at(&mut self, path: &str) -> Route { + pub fn at(&mut self, path: &str) -> Route<'_> { Route { path: path.to_owned(), router: &mut self.router, } } - pub fn listen(self, addr: String) -> Boxed> { + pub fn listen(self, addr: &str) -> Boxed> { let make_svc = make_service_fn(move |_conn| { // For correct borrowing, these values need to be borrowed let router = self.router.clone(); @@ -230,8 +238,14 @@ impl Server { path.pop(); } + // Replace HEAD with GET for routing + let (method, is_head) = match req.method() { + &Method::HEAD => (&Method::GET, true), + method => (method, false), + }; + // Match the visited path with an added route - match router.recognize(&format!("/{}{}", req.method().as_str(), path)) { + match router.recognize(&format!("/{}{}", method.as_str(), path)) { // If a route was configured for this path Ok(found) => { let mut parammed = req; @@ -243,29 +257,44 @@ impl Server { match func.await { Ok(mut res) => { res.headers_mut().extend(def_headers); - let _ = compress_response(&req_headers, &mut res).await; + if is_head { + *res.body_mut() = Body::empty(); + } else { + let _ = compress_response(&req_headers, &mut res).await; + } Ok(res) } - Err(msg) => new_boilerplate(def_headers, req_headers, 500, Body::from(msg)).await, + Err(msg) => new_boilerplate(def_headers, req_headers, 500, if is_head { Body::empty() } else { Body::from(msg) }).await, } } .boxed() } // If there was a routing error - Err(e) => async move { new_boilerplate(def_headers, req_headers, 404, e.into()).await }.boxed(), + Err(e) => new_boilerplate(def_headers, req_headers, 404, if is_head { Body::empty() } else { e.into() }).boxed(), } })) } }); // Build SocketAddr from provided address - let address = &addr.parse().unwrap_or_else(|_| panic!("Cannot parse {} as address (example format: 0.0.0.0:8080)", addr)); + let address = &addr.parse().unwrap_or_else(|_| panic!("Cannot parse {addr} as address (example format: 0.0.0.0:8080)")); // Bind server to address specified above. Gracefully shut down if CTRL+C is pressed let server = HyperServer::bind(address).serve(make_svc).with_graceful_shutdown(async { + #[cfg(windows)] // Wait for the CTRL+C signal tokio::signal::ctrl_c().await.expect("Failed to install CTRL+C signal handler"); + + #[cfg(unix)] + { + // Wait for CTRL+C or SIGTERM signals + let mut signal_terminate = tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate()).expect("Failed to install SIGTERM signal handler"); + tokio::select! { + _ = tokio::signal::ctrl_c() => (), + _ = signal_terminate.recv() => () + } + } }); server.boxed() @@ -347,14 +376,6 @@ fn determine_compressor(accept_encoding: String) -> Option { impl PartialOrd for CompressorCandidate { fn partial_cmp(&self, other: &Self) -> Option { - // Guard against NAN, both on our end and on the other. - if self.q.is_nan() || other.q.is_nan() { - return None; - }; - - // f64 and CompressionType are ordered, except in the case - // where the f64 is NAN (which we checked against), so we - // can safely return a Some here. Some(self.cmp(other)) } } @@ -379,12 +400,12 @@ fn determine_compressor(accept_encoding: String) -> Option { // This loop reads the requested compressors and keeps track of whichever // one has the highest priority per our heuristic. - for val in accept_encoding.to_string().split(',') { + for val in accept_encoding.split(',') { let mut q: f64 = 1.0; // The compressor and q-value (if the latter is defined) // will be delimited by semicolons. - let mut spl: Split = val.split(';'); + let mut spl: Split<'_, char> = val.split(';'); // Get the compressor. For example, in // gzip;q=0.8 @@ -446,10 +467,10 @@ fn determine_compressor(accept_encoding: String) -> Option { }; } - if cur_candidate.q != f64::NEG_INFINITY { - Some(cur_candidate.alg) - } else { + if cur_candidate.q == f64::NEG_INFINITY { None + } else { + Some(cur_candidate.alg) } } @@ -461,16 +482,16 @@ fn determine_compressor(accept_encoding: String) -> Option { /// conditions are met: /// /// 1. the HTTP client requests a compression encoding in the Content-Encoding -/// header (hence the need for the req_headers); +/// header (hence the need for the `req_headers`); /// /// 2. the content encoding corresponds to a compression algorithm we support; /// /// 3. the Media type in the Content-Type response header is text with any /// subtype (e.g. text/plain) or application/json. /// -/// compress_response returns Ok on successful compression, or if not all three +/// `compress_response` returns Ok on successful compression, or if not all three /// conditions above are met. It returns Err if there was a problem decoding -/// any header in either req_headers or res, but res will remain intact. +/// any header in either `req_headers` or res, but res will remain intact. /// /// This function logs errors to stderr, but only in debug mode. No information /// is logged in release builds. @@ -609,7 +630,7 @@ fn compress_body(compressor: CompressionType, body_bytes: Vec) -> Result { + CompressionType::Passthrough => { let msg = "unsupported compressor".to_string(); return Err(msg); } @@ -685,7 +706,7 @@ mod tests { // Perform the compression. if let Err(e) = block_on(compress_response(&req_headers, &mut res)) { - panic!("compress_response(&req_headers, &mut res) => Err(\"{}\")", e); + panic!("compress_response(&req_headers, &mut res) => Err(\"{e}\")"); }; // If the content was compressed, we expect the Content-Encoding @@ -707,7 +728,7 @@ mod tests { // the Response is the same as what with which we start. let body_vec = match block_on(body::to_bytes(res.body_mut())) { Ok(b) => b.to_vec(), - Err(e) => panic!("{}", e), + Err(e) => panic!("{e}"), }; if expected_encoding == CompressionType::Passthrough { @@ -723,17 +744,17 @@ mod tests { let mut decoder: Box = match expected_encoding { CompressionType::Gzip => match gzip::Decoder::new(&mut body_cursor) { Ok(dgz) => Box::new(dgz), - Err(e) => panic!("{}", e), + Err(e) => panic!("{e}"), }, CompressionType::Brotli => Box::new(BrotliDecompressor::new(body_cursor, expected_lorem_ipsum.len())), - _ => panic!("no decompressor for {}", expected_encoding.to_string()), + _ => panic!("no decompressor for {}", expected_encoding), }; let mut decompressed = Vec::::new(); if let Err(e) = io::copy(&mut decoder, &mut decompressed) { - panic!("{}", e); + panic!("{e}"); }; assert!(decompressed.eq(&expected_lorem_ipsum)); diff --git a/src/settings.rs b/src/settings.rs index 3dd4e45..2efbbba 100644 --- a/src/settings.rs +++ b/src/settings.rs @@ -1,13 +1,18 @@ +#![allow(clippy::cmp_owned)] + use std::collections::HashMap; // CRATES use crate::server::ResponseExt; -use crate::utils::{redirect, template, Preferences}; -use askama::Template; +use crate::subreddit::join_until_size_limit; +use crate::utils::{deflate_decompress, redirect, template, Preferences}; use cookie::Cookie; use futures_lite::StreamExt; use hyper::{Body, Request, Response}; +use rinja::Template; use time::{Duration, OffsetDateTime}; +use tokio::time::timeout; +use url::form_urlencoded; // STRUCTS #[derive(Template)] @@ -19,20 +24,26 @@ struct SettingsTemplate { // CONSTANTS -const PREFS: [&str; 13] = [ +const PREFS: [&str; 19] = [ "theme", "front_page", "layout", "wide", "comment_sort", "post_sort", + "blur_spoiler", "show_nsfw", "blur_nsfw", "use_hls", "hide_hls_notification", "autoplay_videos", + "hide_sidebar_and_summary", + "fixed_navbar", "hide_awards", + "hide_score", "disable_visit_reddit_confirmation", + "video_quality", + "remove_default_feeds", ]; // FUNCTIONS @@ -40,10 +51,10 @@ const PREFS: [&str; 13] = [ // Retrieve cookies from request "Cookie" header pub async fn get(req: Request) -> Result, String> { let url = req.uri().to_string(); - template(SettingsTemplate { + Ok(template(&SettingsTemplate { prefs: Preferences::new(&req), url, - }) + })) } // Set cookies using response "Set-Cookie" header @@ -52,7 +63,7 @@ pub async fn set(req: Request) -> Result, String> { let (parts, mut body) = req.into_parts(); // Grab existing cookies - let _cookies: Vec = parts + let _cookies: Vec> = parts .headers .get_all("Cookie") .iter() @@ -71,16 +82,16 @@ pub async fn set(req: Request) -> Result, String> { let form = url::form_urlencoded::parse(&body_bytes).collect::>(); - let mut response = redirect("/settings".to_string()); + let mut response = redirect("/settings"); for &name in &PREFS { match form.get(name) { Some(value) => response.insert_cookie( - Cookie::build(name.to_owned(), value.clone()) + Cookie::build((name.to_owned(), value.clone())) .path("/") .http_only(true) .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) - .finish(), + .into(), ), None => response.remove_cookie(name.to_string()), }; @@ -94,7 +105,7 @@ fn set_cookies_method(req: Request, remove_cookies: bool) -> Response = parts + let _cookies: Vec> = parts .headers .get_all("Cookie") .iter() @@ -110,16 +121,16 @@ fn set_cookies_method(req: Request, remove_cookies: bool) -> Response "/".to_string(), }; - let mut response = redirect(path); + let mut response = redirect(&path); - for name in [PREFS.to_vec(), vec!["subscriptions", "filters"]].concat() { + for name in PREFS { match form.get(name) { Some(value) => response.insert_cookie( - Cookie::build(name.to_owned(), value.clone()) + Cookie::build((name.to_owned(), value.clone())) .path("/") .http_only(true) .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) - .finish(), + .into(), ), None => { if remove_cookies { @@ -129,6 +140,119 @@ fn set_cookies_method(req: Request, remove_cookies: bool) -> Response = subscriptions.expect("Subscriptions").split('+').map(str::to_string).collect(); + + // Start at 0 to keep track of what number we need to start deleting old subscription cookies from + let mut subscriptions_number_to_delete_from = 0; + + // Starting at 0 so we handle the subscription cookie without a number first + for (subscriptions_number, list) in join_until_size_limit(&sub_list).into_iter().enumerate() { + let subscriptions_cookie = if subscriptions_number == 0 { + "subscriptions".to_string() + } else { + format!("subscriptions{}", subscriptions_number) + }; + + response.insert_cookie( + Cookie::build((subscriptions_cookie, list)) + .path("/") + .http_only(true) + .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) + .into(), + ); + + subscriptions_number_to_delete_from += 1; + } + + // While subscriptionsNUMBER= is in the string of cookies add a response removing that cookie + while cookies_string.contains(&format!("subscriptions{subscriptions_number_to_delete_from}=")) { + // Remove that subscriptions cookie + response.remove_cookie(format!("subscriptions{subscriptions_number_to_delete_from}")); + + // Increment subscriptions cookie number + subscriptions_number_to_delete_from += 1; + } + } else { + // Remove unnumbered subscriptions cookie + response.remove_cookie("subscriptions".to_string()); + + // Starts at one to deal with the first numbered subscription cookie and onwards + let mut subscriptions_number_to_delete_from = 1; + + // While subscriptionsNUMBER= is in the string of cookies add a response removing that cookie + while cookies_string.contains(&format!("subscriptions{subscriptions_number_to_delete_from}=")) { + // Remove that subscriptions cookie + response.remove_cookie(format!("subscriptions{subscriptions_number_to_delete_from}")); + + // Increment subscriptions cookie number + subscriptions_number_to_delete_from += 1; + } + } + + // If there are filters to restore set them and delete any old filters cookies, otherwise delete them all + if filters.is_some() { + let filters_list: Vec = filters.expect("Filters").split('+').map(str::to_string).collect(); + + // Start at 0 to keep track of what number we need to start deleting old subscription cookies from + let mut filters_number_to_delete_from = 0; + + // Starting at 0 so we handle the subscription cookie without a number first + for (filters_number, list) in join_until_size_limit(&filters_list).into_iter().enumerate() { + let filters_cookie = if filters_number == 0 { + "filters".to_string() + } else { + format!("filters{}", filters_number) + }; + + response.insert_cookie( + Cookie::build((filters_cookie, list)) + .path("/") + .http_only(true) + .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) + .into(), + ); + + filters_number_to_delete_from += 1; + } + + // While filtersNUMBER= is in the string of cookies add a response removing that cookie + while cookies_string.contains(&format!("filters{filters_number_to_delete_from}=")) { + // Remove that filters cookie + response.remove_cookie(format!("filters{filters_number_to_delete_from}")); + + // Increment filters cookie number + filters_number_to_delete_from += 1; + } + } else { + // Remove unnumbered filters cookie + response.remove_cookie("filters".to_string()); + + // Starts at one to deal with the first numbered subscription cookie and onwards + let mut filters_number_to_delete_from = 1; + + // While filtersNUMBER= is in the string of cookies add a response removing that cookie + while cookies_string.contains(&format!("filters{filters_number_to_delete_from}=")) { + // Remove that sfilters cookie + response.remove_cookie(format!("filters{filters_number_to_delete_from}")); + + // Increment filters cookie number + filters_number_to_delete_from += 1; + } + } + response } @@ -140,3 +264,35 @@ pub async fn restore(req: Request) -> Result, String> { pub async fn update(req: Request) -> Result, String> { Ok(set_cookies_method(req, false)) } + +pub async fn encoded_restore(req: Request) -> Result, String> { + let body = hyper::body::to_bytes(req.into_body()) + .await + .map_err(|e| format!("Failed to get bytes from request body: {}", e))?; + + if body.len() > 1024 * 1024 { + return Err("Request body too large".to_string()); + } + + let encoded_prefs = form_urlencoded::parse(&body) + .find(|(key, _)| key == "encoded_prefs") + .map(|(_, value)| value) + .ok_or_else(|| "encoded_prefs parameter not found in request body".to_string())?; + + let bytes = base2048::decode(&encoded_prefs).ok_or_else(|| "Failed to decode base2048 encoded preferences".to_string())?; + + let out = timeout(std::time::Duration::from_secs(1), async { deflate_decompress(bytes) }) + .await + .map_err(|e| format!("Failed to decompress bytes: {}", e))??; + + let mut prefs: Preferences = timeout(std::time::Duration::from_secs(1), async { bincode::deserialize(&out) }) + .await + .map_err(|e| format!("Failed to deserialize preferences: {}", e))? + .map_err(|e| format!("Failed to deserialize bytes into Preferences struct: {}", e))?; + + prefs.available_themes = vec![]; + + let url = format!("/settings/restore/?{}", prefs.to_urlencoded()?); + + Ok(redirect(&url)) +} diff --git a/src/subreddit.rs b/src/subreddit.rs index e253885..f84cca3 100644 --- a/src/subreddit.rs +++ b/src/subreddit.rs @@ -1,11 +1,20 @@ +#![allow(clippy::cmp_owned)] + +use crate::{config, utils}; // CRATES use crate::utils::{ - catch_random, error, filter_posts, format_num, format_url, get_filters, nsfw_landing, param, redirect, rewrite_urls, setting, template, val, Post, Preferences, Subreddit, + catch_random, error, filter_posts, format_num, format_url, get_filters, info, nsfw_landing, param, redirect, rewrite_urls, setting, template, val, Post, Preferences, + Subreddit, }; -use crate::{client::json, server::ResponseExt, RequestExt}; -use askama::Template; +use crate::{client::json, server::RequestExt, server::ResponseExt}; use cookie::Cookie; +use htmlescape::decode_html; use hyper::{Body, Request, Response}; +use rinja::Template; + +use chrono::DateTime; +use once_cell::sync::Lazy; +use regex::Regex; use time::{Duration, OffsetDateTime}; // STRUCTS @@ -49,12 +58,16 @@ struct WallTemplate { url: String, } +static GEO_FILTER_MATCH: Lazy = Lazy::new(|| Regex::new(r"geo_filter=(?\w+)").unwrap()); + // SERVICES pub async fn community(req: Request) -> Result, String> { // Build Reddit API path let root = req.uri().path() == "/"; + let query = req.uri().query().unwrap_or_default().to_string(); let subscribed = setting(&req, "subscriptions"); let front_page = setting(&req, "front_page"); + let remove_default_feeds = setting(&req, "remove_default_feeds") == "on"; let post_sort = req.cookie("post_sort").map_or_else(|| "hot".to_string(), |c| c.value().to_string()); let sort = req.param("sort").unwrap_or_else(|| req.param("id").unwrap_or(post_sort)); @@ -67,6 +80,21 @@ pub async fn community(req: Request) -> Result, String> { } else { front_page.clone() }); + + if (sub_name == "popular" || sub_name == "all") && remove_default_feeds { + if subscribed.is_empty() { + return info(req, "Subscribe to some subreddits! (Default feeds disabled in settings)").await; + } else { + // If there are subscribed subs, but we get here, then the problem is that front_page pref is set to something besides default. + // Tell user to go to settings and change front page to default. + return info( + req, + "You have subscribed to some subreddits, but your front page is not set to default. Visit settings and change front page to default.", + ) + .await; + } + } + let quarantined = can_access_quarantine(&req, &sub_name) || root; // Handle random subreddits @@ -75,7 +103,7 @@ pub async fn community(req: Request) -> Result, String> { } if req.param("sub").is_some() && sub_name.starts_with("u_") { - return Ok(redirect(["/user/", &sub_name[2..]].concat())); + return Ok(redirect(&["/user/", &sub_name[2..]].concat())); } // Request subreddit metadata @@ -97,24 +125,34 @@ pub async fn community(req: Request) -> Result, String> { } }; + let req_url = req.uri().to_string(); // Return landing page if this post if this is NSFW community but the user // has disabled the display of NSFW content or if the instance is SFW-only. - if sub.nsfw && (setting(&req, "show_nsfw") != "on" || crate::utils::sfw_only()) { - return Ok(nsfw_landing(req).await.unwrap_or_default()); + if sub.nsfw && crate::utils::should_be_nsfw_gated(&req, &req_url) { + return Ok(nsfw_landing(req, req_url).await.unwrap_or_default()); } - let path = format!("/r/{}/{}.json?{}&raw_json=1", sub_name.clone(), sort, req.uri().query().unwrap_or_default()); + let mut params = String::from("&raw_json=1"); + if sub_name == "popular" { + let geo_filter = match GEO_FILTER_MATCH.captures(&query) { + Some(geo_filter) => geo_filter["region"].to_string(), + None => "GLOBAL".to_owned(), + }; + params.push_str(&format!("&geo_filter={geo_filter}")); + } + + let path = format!("/r/{}/{sort}.json?{}{params}", sub_name.replace('+', "%2B"), req.uri().query().unwrap_or_default()); let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str())); let redirect_url = url[1..].replace('?', "%3F").replace('&', "%26").replace('+', "%2B"); let filters = get_filters(&req); // If all requested subs are filtered, we don't need to fetch posts. if sub_name.split('+').all(|s| filters.contains(s)) { - template(SubredditTemplate { + Ok(template(&SubredditTemplate { sub, posts: Vec::new(), sort: (sort, param(&path, "t").unwrap_or_default()), - ends: (param(&path, "after").unwrap_or_default(), "".to_string()), + ends: (param(&path, "after").unwrap_or_default(), String::new()), prefs: Preferences::new(&req), url, redirect_url, @@ -122,14 +160,18 @@ pub async fn community(req: Request) -> Result, String> { all_posts_filtered: false, all_posts_hidden_nsfw: false, no_posts: false, - }) + })) } else { match Post::fetch(&path, quarantined).await { Ok((mut posts, after)) => { let (_, all_posts_filtered) = filter_posts(&mut posts, &filters); let no_posts = posts.is_empty(); let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on"); - template(SubredditTemplate { + if sort == "new" { + posts.sort_by(|a, b| b.created_ts.cmp(&a.created_ts)); + posts.sort_by(|a, b| b.flags.stickied.cmp(&a.flags.stickied)); + } + Ok(template(&SubredditTemplate { sub, posts, sort: (sort, param(&path, "t").unwrap_or_default()), @@ -141,46 +183,44 @@ pub async fn community(req: Request) -> Result, String> { all_posts_filtered, all_posts_hidden_nsfw, no_posts, - }) + })) } Err(msg) => match msg.as_str() { - "quarantined" | "gated" => quarantine(req, sub_name, msg), - "private" => error(req, format!("r/{} is a private community", sub_name)).await, - "banned" => error(req, format!("r/{} has been banned from Reddit", sub_name)).await, - _ => error(req, msg).await, + "quarantined" | "gated" => Ok(quarantine(&req, sub_name, &msg)), + "private" => error(req, &format!("r/{sub_name} is a private community")).await, + "banned" => error(req, &format!("r/{sub_name} has been banned from Reddit")).await, + _ => error(req, &msg).await, }, } } } -pub fn quarantine(req: Request, sub: String, restriction: String) -> Result, String> { +pub fn quarantine(req: &Request, sub: String, restriction: &str) -> Response { let wall = WallTemplate { - title: format!("r/{} is {}", sub, restriction), + title: format!("r/{sub} is {restriction}"), msg: "Please click the button below to continue to this subreddit.".to_string(), url: req.uri().to_string(), sub, - prefs: Preferences::new(&req), + prefs: Preferences::new(req), }; - Ok( - Response::builder() - .status(403) - .header("content-type", "text/html") - .body(wall.render().unwrap_or_default().into()) - .unwrap_or_default(), - ) + Response::builder() + .status(403) + .header("content-type", "text/html") + .body(wall.render().unwrap_or_default().into()) + .unwrap_or_default() } pub async fn add_quarantine_exception(req: Request) -> Result, String> { let subreddit = req.param("sub").ok_or("Invalid URL")?; let redir = param(&format!("?{}", req.uri().query().unwrap_or_default()), "redir").ok_or("Invalid URL")?; - let mut response = redirect(redir); + let mut response = redirect(&redir); response.insert_cookie( - Cookie::build(&format!("allow_quaran_{}", subreddit.to_lowercase()), "true") + Cookie::build((&format!("allow_quaran_{}", subreddit.to_lowercase()), "true")) .path("/") .http_only(true) .expires(cookie::Expiration::Session) - .finish(), + .into(), ); Ok(response) } @@ -190,6 +230,41 @@ pub fn can_access_quarantine(req: &Request, sub: &str) -> bool { setting(req, &format!("allow_quaran_{}", sub.to_lowercase())).parse().unwrap_or_default() } +// Join items in chunks of 4000 bytes in length for cookies +pub fn join_until_size_limit(vec: &[T]) -> Vec { + let mut result = Vec::new(); + let mut list = String::new(); + let mut current_size = 0; + + for item in vec { + // Size in bytes + let item_size = item.to_string().len(); + // Use 4000 bytes to leave us some headroom because the name and options of the cookie count towards the 4096 byte cap + if current_size + item_size > 4000 { + // If last item add a seperator on the end of the list so it's interpreted properly in tanden with the next cookie + list.push('+'); + + // Push current list to result vector + result.push(list); + + // Reset the list variable so we can continue with only new items + list = String::new(); + } + // Add separator if not the first item + if !list.is_empty() { + list.push('+'); + } + // Add current item to list + list.push_str(&item.to_string()); + current_size = list.len() + item_size; + } + // Make sure to push whatever the remaining subreddits are there into the result vector + result.push(list); + + // Return resulting vector + result +} + // Sub, filter, unfilter, or unsub by setting subscription cookie using response "Set-Cookie" header pub async fn subscriptions_filters(req: Request) -> Result, String> { let sub = req.param("sub").unwrap_or_default(); @@ -199,9 +274,8 @@ pub async fn subscriptions_filters(req: Request) -> Result, if sub == "random" || sub == "randnsfw" { if action.contains(&"filter".to_string()) || action.contains(&"unfilter".to_string()) { return Err("Can't filter random subreddit!".to_string()); - } else { - return Err("Can't subscribe to random subreddit!".to_string()); } + return Err("Can't subscribe to random subreddit!".to_string()); } let query = req.uri().query().unwrap_or_default().to_string(); @@ -211,19 +285,23 @@ pub async fn subscriptions_filters(req: Request) -> Result, let mut filters = preferences.filters; // Retrieve list of posts for these subreddits to extract display names - let posts = json(format!("/r/{}/hot.json?raw_json=1", sub), true).await?; - let display_lookup: Vec<(String, &str)> = posts["data"]["children"] - .as_array() - .map(|list| { - list - .iter() - .map(|post| { - let display_name = post["data"]["subreddit"].as_str().unwrap_or_default(); - (display_name.to_lowercase(), display_name) - }) - .collect::>() - }) - .unwrap_or_default(); + + let posts = json(format!("/r/{sub}/hot.json?raw_json=1"), true).await; + let display_lookup: Vec<(String, &str)> = match &posts { + Ok(posts) => posts["data"]["children"] + .as_array() + .map(|list| { + list + .iter() + .map(|post| { + let display_name = post["data"]["subreddit"].as_str().unwrap_or_default(); + (display_name.to_lowercase(), display_name) + }) + .collect::>() + }) + .unwrap_or_default(), + Err(_) => vec![], + }; // Find each subreddit name (separated by '+') in sub parameter for part in sub.split('+').filter(|x| x != &"") { @@ -236,9 +314,13 @@ pub async fn subscriptions_filters(req: Request) -> Result, display } else { // This subreddit display name isn't known, retrieve it - let path: String = format!("/r/{}/about.json?raw_json=1", part); - display = json(path, true).await?; - display["data"]["display_name"].as_str().ok_or_else(|| "Failed to query subreddit name".to_string())? + let path: String = format!("/r/{part}/about.json?raw_json=1"); + display = json(path, true).await; + match &display { + Ok(display) => display["data"]["display_name"].as_str(), + Err(_) => None, + } + .unwrap_or(part) }; // Modify sub list based on action @@ -267,36 +349,109 @@ pub async fn subscriptions_filters(req: Request) -> Result, // Redirect back to subreddit // check for redirect parameter if unsubscribing/unfiltering from outside sidebar - let path = if let Some(redirect_path) = param(&format!("?{}", query), "redirect") { - format!("/{}", redirect_path) + let path = if let Some(redirect_path) = param(&format!("?{query}"), "redirect") { + format!("/{redirect_path}") } else { - format!("/r/{}", sub) + format!("/r/{sub}") }; - let mut response = redirect(path); + let mut response = redirect(&path); - // Delete cookie if empty, else set + // If sub_list is empty remove all subscriptions cookies, otherwise update them and remove old ones if sub_list.is_empty() { + // Remove subscriptions cookie response.remove_cookie("subscriptions".to_string()); + + // Start with first numbered subscriptions cookie + let mut subscriptions_number = 1; + + // While whatever subscriptionsNUMBER cookie we're looking at has a value + while req.cookie(&format!("subscriptions{}", subscriptions_number)).is_some() { + // Remove that subscriptions cookie + response.remove_cookie(format!("subscriptions{}", subscriptions_number)); + + // Increment subscriptions cookie number + subscriptions_number += 1; + } } else { - response.insert_cookie( - Cookie::build("subscriptions", sub_list.join("+")) - .path("/") - .http_only(true) - .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) - .finish(), - ); + // Start at 0 to keep track of what number we need to start deleting old subscription cookies from + let mut subscriptions_number_to_delete_from = 0; + + // Starting at 0 so we handle the subscription cookie without a number first + for (subscriptions_number, list) in join_until_size_limit(&sub_list).into_iter().enumerate() { + let subscriptions_cookie = if subscriptions_number == 0 { + "subscriptions".to_string() + } else { + format!("subscriptions{}", subscriptions_number) + }; + + response.insert_cookie( + Cookie::build((subscriptions_cookie, list)) + .path("/") + .http_only(true) + .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) + .into(), + ); + + subscriptions_number_to_delete_from += 1; + } + + // While whatever subscriptionsNUMBER cookie we're looking at has a value + while req.cookie(&format!("subscriptions{}", subscriptions_number_to_delete_from)).is_some() { + // Remove that subscriptions cookie + response.remove_cookie(format!("subscriptions{}", subscriptions_number_to_delete_from)); + + // Increment subscriptions cookie number + subscriptions_number_to_delete_from += 1; + } } + + // If filters is empty remove all filters cookies, otherwise update them and remove old ones if filters.is_empty() { + // Remove filters cookie response.remove_cookie("filters".to_string()); + + // Start with first numbered filters cookie + let mut filters_number = 1; + + // While whatever filtersNUMBER cookie we're looking at has a value + while req.cookie(&format!("filters{}", filters_number)).is_some() { + // Remove that filters cookie + response.remove_cookie(format!("filters{}", filters_number)); + + // Increment filters cookie number + filters_number += 1; + } } else { - response.insert_cookie( - Cookie::build("filters", filters.join("+")) - .path("/") - .http_only(true) - .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) - .finish(), - ); + // Start at 0 to keep track of what number we need to start deleting old filters cookies from + let mut filters_number_to_delete_from = 0; + + for (filters_number, list) in join_until_size_limit(&filters).into_iter().enumerate() { + let filters_cookie = if filters_number == 0 { + "filters".to_string() + } else { + format!("filters{}", filters_number) + }; + + response.insert_cookie( + Cookie::build((filters_cookie, list)) + .path("/") + .http_only(true) + .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) + .into(), + ); + + filters_number_to_delete_from += 1; + } + + // While whatever filtersNUMBER cookie we're looking at has a value + while req.cookie(&format!("filters{}", filters_number_to_delete_from)).is_some() { + // Remove that filters cookie + response.remove_cookie(format!("filters{}", filters_number_to_delete_from)); + + // Increment filters cookie number + filters_number_to_delete_from += 1; + } } Ok(response) @@ -311,22 +466,22 @@ pub async fn wiki(req: Request) -> Result, String> { } let page = req.param("page").unwrap_or_else(|| "index".to_string()); - let path: String = format!("/r/{}/wiki/{}.json?raw_json=1", sub, page); + let path: String = format!("/r/{sub}/wiki/{page}.json?raw_json=1"); let url = req.uri().to_string(); match json(path, quarantined).await { - Ok(response) => template(WikiTemplate { + Ok(response) => Ok(template(&WikiTemplate { sub, wiki: rewrite_urls(response["data"]["content_html"].as_str().unwrap_or("

Wiki not found

")), page, prefs: Preferences::new(&req), url, - }), + })), Err(msg) => { if msg == "quarantined" || msg == "gated" { - quarantine(req, sub, msg) + Ok(quarantine(&req, sub, &msg)) } else { - error(req, msg).await + error(req, &msg).await } } } @@ -342,13 +497,13 @@ pub async fn sidebar(req: Request) -> Result, String> { } // Build the Reddit JSON API url - let path: String = format!("/r/{}/about.json?raw_json=1", sub); + let path: String = format!("/r/{sub}/about.json?raw_json=1"); let url = req.uri().to_string(); // Send a request to the url match json(path, quarantined).await { // If success, receive JSON in response - Ok(response) => template(WikiTemplate { + Ok(response) => Ok(template(&WikiTemplate { wiki: rewrite_urls(&val(&response, "description_html")), // wiki: format!( // "{}

Moderators


    {}
", @@ -359,12 +514,12 @@ pub async fn sidebar(req: Request) -> Result, String> { page: "Sidebar".to_string(), prefs: Preferences::new(&req), url, - }), + })), Err(msg) => { if msg == "quarantined" || msg == "gated" { - quarantine(req, sub, msg) + Ok(quarantine(&req, sub, &msg)) } else { - error(req, msg).await + error(req, &msg).await } } } @@ -407,7 +562,7 @@ pub async fn sidebar(req: Request) -> Result, String> { // SUBREDDIT async fn subreddit(sub: &str, quarantined: bool) -> Result { // Build the Reddit JSON API url - let path: String = format!("/r/{}/about.json?raw_json=1", sub); + let path: String = format!("/r/{sub}/about.json?raw_json=1"); // Send a request to the url let res = json(path, quarantined).await?; @@ -433,3 +588,73 @@ async fn subreddit(sub: &str, quarantined: bool) -> Result { nsfw: res["data"]["over18"].as_bool().unwrap_or_default(), }) } + +pub async fn rss(req: Request) -> Result, String> { + if config::get_setting("REDLIB_ENABLE_RSS").is_none() { + return Ok(error(req, "RSS is disabled on this instance.").await.unwrap_or_default()); + } + + use hyper::header::CONTENT_TYPE; + use rss::{ChannelBuilder, Item}; + + // Get subreddit + let sub = req.param("sub").unwrap_or_default(); + let post_sort = req.cookie("post_sort").map_or_else(|| "hot".to_string(), |c| c.value().to_string()); + let sort = req.param("sort").unwrap_or_else(|| req.param("id").unwrap_or(post_sort)); + + // Get path + let path = format!("/r/{sub}/{sort}.json?{}", req.uri().query().unwrap_or_default()); + + // Get subreddit data + let subreddit = subreddit(&sub, false).await?; + + // Get posts + let (posts, _) = Post::fetch(&path, false).await?; + + // Build the RSS feed + let channel = ChannelBuilder::default() + .title(&subreddit.title) + .description(&subreddit.description) + .items( + posts + .into_iter() + .map(|post| Item { + title: Some(post.title.to_string()), + link: Some(format_url(&utils::get_post_url(&post))), + author: Some(post.author.name), + content: Some(rewrite_urls(&decode_html(&post.body).unwrap())), + pub_date: Some(DateTime::from_timestamp(post.created_ts as i64, 0).unwrap_or_default().to_rfc2822()), + description: Some(format!( + "Comments", + config::get_setting("REDLIB_FULL_URL").unwrap_or_default(), + post.permalink + )), + ..Default::default() + }) + .collect::>(), + ) + .build(); + + // Serialize the feed to RSS + let body = channel.to_string().into_bytes(); + + // Create the HTTP response + let mut res = Response::new(Body::from(body)); + res.headers_mut().insert(CONTENT_TYPE, hyper::header::HeaderValue::from_static("application/rss+xml")); + + Ok(res) +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_fetching_subreddit() { + let subreddit = subreddit("rust", false).await; + assert!(subreddit.is_ok()); +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_gated_and_quarantined() { + let quarantined = subreddit("edgy", true).await; + assert!(quarantined.is_ok()); + let gated = subreddit("drugs", true).await; + assert!(gated.is_ok()); +} diff --git a/src/user.rs b/src/user.rs index 53f4090..592389d 100644 --- a/src/user.rs +++ b/src/user.rs @@ -1,9 +1,14 @@ +#![allow(clippy::cmp_owned)] + // CRATES use crate::client::json; use crate::server::RequestExt; use crate::utils::{error, filter_posts, format_url, get_filters, nsfw_landing, param, setting, template, Post, Preferences, User}; -use askama::Template; +use crate::{config, utils}; +use chrono::DateTime; +use htmlescape::decode_html; use hyper::{Body, Request, Response}; +use rinja::Template; use time::{macros::format_description, OffsetDateTime}; // STRUCTS @@ -35,35 +40,35 @@ pub async fn profile(req: Request) -> Result, String> { // Build the Reddit JSON API path let path = format!( - "/user/{}/{}.json?{}&raw_json=1", + "/user/{}/{listing}.json?{}&raw_json=1", req.param("name").unwrap_or_else(|| "reddit".to_string()), - listing, req.uri().query().unwrap_or_default(), ); let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str())); let redirect_url = url[1..].replace('?', "%3F").replace('&', "%26"); - // Retrieve other variables from Libreddit request + // Retrieve other variables from Redlib request let sort = param(&path, "sort").unwrap_or_default(); let username = req.param("name").unwrap_or_default(); // Retrieve info from user about page. let user = user(&username).await.unwrap_or_default(); + let req_url = req.uri().to_string(); // Return landing page if this post if this Reddit deems this user NSFW, // but we have also disabled the display of NSFW content or if the instance // is SFW-only. - if user.nsfw && (setting(&req, "show_nsfw") != "on" || crate::utils::sfw_only()) { - return Ok(nsfw_landing(req).await.unwrap_or_default()); + if user.nsfw && crate::utils::should_be_nsfw_gated(&req, &req_url) { + return Ok(nsfw_landing(req, req_url).await.unwrap_or_default()); } let filters = get_filters(&req); if filters.contains(&["u_", &username].concat()) { - template(UserTemplate { + Ok(template(&UserTemplate { user, posts: Vec::new(), sort: (sort, param(&path, "t").unwrap_or_default()), - ends: (param(&path, "after").unwrap_or_default(), "".to_string()), + ends: (param(&path, "after").unwrap_or_default(), String::new()), listing, prefs: Preferences::new(&req), url, @@ -72,7 +77,7 @@ pub async fn profile(req: Request) -> Result, String> { all_posts_filtered: false, all_posts_hidden_nsfw: false, no_posts: false, - }) + })) } else { // Request user posts/comments from Reddit match Post::fetch(&path, false).await { @@ -80,7 +85,7 @@ pub async fn profile(req: Request) -> Result, String> { let (_, all_posts_filtered) = filter_posts(&mut posts, &filters); let no_posts = posts.is_empty(); let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on"); - template(UserTemplate { + Ok(template(&UserTemplate { user, posts, sort: (sort, param(&path, "t").unwrap_or_default()), @@ -93,10 +98,10 @@ pub async fn profile(req: Request) -> Result, String> { all_posts_filtered, all_posts_hidden_nsfw, no_posts, - }) + })) } // If there is an error show error page - Err(msg) => error(req, msg).await, + Err(msg) => error(req, &msg).await, } } } @@ -104,7 +109,7 @@ pub async fn profile(req: Request) -> Result, String> { // USER async fn user(name: &str) -> Result { // Build the Reddit JSON API path - let path: String = format!("/user/{}/about.json?raw_json=1", name); + let path: String = format!("/user/{name}/about.json?raw_json=1"); // Send a request to the url json(path, false).await.map(|res| { @@ -128,3 +133,61 @@ async fn user(name: &str) -> Result { } }) } + +pub async fn rss(req: Request) -> Result, String> { + if config::get_setting("REDLIB_ENABLE_RSS").is_none() { + return Ok(error(req, "RSS is disabled on this instance.").await.unwrap_or_default()); + } + use crate::utils::rewrite_urls; + use hyper::header::CONTENT_TYPE; + use rss::{ChannelBuilder, Item}; + + // Get user + let user_str = req.param("name").unwrap_or_default(); + + let listing = req.param("listing").unwrap_or_else(|| "overview".to_string()); + + // Get path + let path = format!("/user/{user_str}/{listing}.json?{}&raw_json=1", req.uri().query().unwrap_or_default(),); + + // Get user + let user_obj = user(&user_str).await.unwrap_or_default(); + + // Get posts + let (posts, _) = Post::fetch(&path, false).await?; + + // Build the RSS feed + let channel = ChannelBuilder::default() + .title(user_str) + .description(user_obj.description) + .items( + posts + .into_iter() + .map(|post| Item { + title: Some(post.title.to_string()), + link: Some(format_url(&utils::get_post_url(&post))), + author: Some(post.author.name), + pub_date: Some(DateTime::from_timestamp(post.created_ts as i64, 0).unwrap_or_default().to_rfc2822()), + content: Some(rewrite_urls(&decode_html(&post.body).unwrap())), + ..Default::default() + }) + .collect::>(), + ) + .build(); + + // Serialize the feed to RSS + let body = channel.to_string().into_bytes(); + + // Create the HTTP response + let mut res = Response::new(Body::from(body)); + res.headers_mut().insert(CONTENT_TYPE, hyper::header::HeaderValue::from_static("application/rss+xml")); + + Ok(res) +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_fetching_user() { + let user = user("spez").await; + assert!(user.is_ok()); + assert!(user.unwrap().karma > 100); +} diff --git a/src/utils.rs b/src/utils.rs index e6cb2f7..f5046cb 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,16 +1,28 @@ +#![allow(dead_code)] +#![allow(clippy::cmp_owned)] + +use crate::config::{self, get_setting}; // // CRATES // use crate::{client::json, server::RequestExt}; -use askama::Template; use cookie::Cookie; use hyper::{Body, Request, Response}; +use libflate::deflate::{Decoder, Encoder}; +use log::error; +use once_cell::sync::Lazy; use regex::Regex; +use revision::revisioned; +use rinja::Template; use rust_embed::RustEmbed; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde_json::Value; +use serde_json_path::{JsonPath, JsonPathExt}; use std::collections::{HashMap, HashSet}; use std::env; +use std::io::{Read, Write}; use std::str::FromStr; +use std::string::ToString; use time::{macros::format_description, Duration, OffsetDateTime}; use url::Url; @@ -40,6 +52,7 @@ pub enum ResourceType { } // Post flair with content, background color and foreground color +#[derive(Serialize)] pub struct Flair { pub flair_parts: Vec, pub text: String, @@ -48,7 +61,7 @@ pub struct Flair { } // Part of flair, either emoji or text -#[derive(Clone)] +#[derive(Clone, Serialize)] pub struct FlairPart { pub flair_part_type: String, pub value: String, @@ -90,25 +103,86 @@ impl FlairPart { } } +#[derive(Serialize)] pub struct Author { pub name: String, pub flair: Flair, pub distinguished: String, } +#[derive(Serialize)] +pub struct Poll { + pub poll_options: Vec, + pub voting_end_timestamp: (String, String), + pub total_vote_count: u64, +} + +impl Poll { + pub fn parse(poll_data: &Value) -> Option { + poll_data.as_object()?; + + let total_vote_count = poll_data["total_vote_count"].as_u64()?; + // voting_end_timestamp is in the format of milliseconds + let voting_end_timestamp = time(poll_data["voting_end_timestamp"].as_f64()? / 1000.0); + let poll_options = PollOption::parse(&poll_data["options"])?; + + Some(Self { + poll_options, + voting_end_timestamp, + total_vote_count, + }) + } + + pub fn most_votes(&self) -> u64 { + self.poll_options.iter().filter_map(|o| o.vote_count).max().unwrap_or(0) + } +} + +#[derive(Serialize)] +pub struct PollOption { + pub id: u64, + pub text: String, + pub vote_count: Option, +} + +impl PollOption { + pub fn parse(options: &Value) -> Option> { + Some( + options + .as_array()? + .iter() + .filter_map(|option| { + // For each poll option + + // we can't just use as_u64() because "id": String("...") and serde would parse it as None + let id = option["id"].as_str()?.parse::().ok()?; + let text = option["text"].as_str()?.to_owned(); + let vote_count = option["vote_count"].as_u64(); + + // Construct PollOption items + Some(Self { id, text, vote_count }) + }) + .collect::>(), + ) + } +} + // Post flags with nsfw and stickied +#[derive(Serialize)] pub struct Flags { + pub spoiler: bool, pub nsfw: bool, pub stickied: bool, } -#[derive(Debug)] +#[derive(Debug, Serialize)] pub struct Media { pub url: String, pub alt_url: String, pub width: i64, pub height: i64, pub poster: String, + pub download_name: String, } impl Media { @@ -163,6 +237,17 @@ impl Media { gallery = GalleryMedia::parse(&data["gallery_data"]["items"], &data["media_metadata"]); ("gallery", &data["url"], None) + } else if data["crosspost_parent_list"][0]["is_gallery"].as_bool().unwrap_or_default() { + // If this post contains a gallery of images + gallery = GalleryMedia::parse( + &data["crosspost_parent_list"][0]["gallery_data"]["items"], + &data["crosspost_parent_list"][0]["media_metadata"], + ); + + ("gallery", &data["url"], None) + } else if data["is_reddit_media_domain"].as_bool().unwrap_or_default() && data["domain"] == "i.redd.it" { + // If this post contains a reddit media (image) URL. + ("image", &data["url"], None) } else { // If type can't be determined, return url ("link", &data["url"], None) @@ -172,20 +257,33 @@ impl Media { let alt_url = alt_url_val.map_or(String::new(), |val| format_url(val.as_str().unwrap_or_default())); + let download_name = if post_type == "image" || post_type == "gif" || post_type == "video" { + let permalink_base = url_path_basename(data["permalink"].as_str().unwrap_or_default()); + let media_url_base = url_path_basename(url_val.as_str().unwrap_or_default()); + + format!("redlib_{permalink_base}_{media_url_base}") + } else { + String::new() + }; + ( post_type.to_string(), Self { url: format_url(url_val.as_str().unwrap_or_default()), alt_url, + // Note: in the data["is_reddit_media_domain"] path above + // width and height will be 0. width: source["width"].as_i64().unwrap_or_default(), height: source["height"].as_i64().unwrap_or_default(), poster: format_url(source["url"].as_str().unwrap_or_default()), + download_name, }, gallery, ) } } +#[derive(Serialize)] pub struct GalleryMedia { pub url: String, pub width: i64, @@ -226,6 +324,7 @@ impl GalleryMedia { } // Post containing content, metadata and media +#[derive(Serialize)] pub struct Post { pub id: String, pub title: String, @@ -233,6 +332,8 @@ pub struct Post { pub body: String, pub author: Author, pub permalink: String, + pub link_title: String, + pub poll: Option, pub score: (String, String), pub upvote_ratio: i64, pub post_type: String, @@ -243,11 +344,14 @@ pub struct Post { pub domain: String, pub rel_time: String, pub created: String, + pub created_ts: u64, pub num_duplicates: u64, pub comments: (String, String), pub gallery: Vec, pub awards: Awards, pub nsfw: bool, + pub out_url: Option, + pub ws_url: String, } impl Post { @@ -262,9 +366,8 @@ impl Post { }; // Fetch the list of posts from the JSON response - let post_list = match res["data"]["children"].as_array() { - Some(list) => list, - None => return Err("No posts found".to_string()), + let Some(post_list) = res["data"]["children"].as_array() else { + return Err("No posts found".to_string()); }; let mut posts: Vec = Vec::new(); @@ -274,6 +377,7 @@ impl Post { let data = &post["data"]; let (rel_time, created) = time(data["created_utc"].as_f64().unwrap_or_default()); + let created_ts = data["created_utc"].as_f64().unwrap_or_default().round() as u64; let score = data["score"].as_i64().unwrap_or_default(); let ratio: f64 = data["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0; let title = val(post, "title"); @@ -319,7 +423,8 @@ impl Post { alt_url: String::new(), width: data["thumbnail_width"].as_i64().unwrap_or_default(), height: data["thumbnail_height"].as_i64().unwrap_or_default(), - poster: "".to_string(), + poster: String::new(), + download_name: String::new(), }, media, domain: val(post, "domain"), @@ -338,20 +443,25 @@ impl Post { }, }, flags: Flags { + spoiler: data["spoiler"].as_bool().unwrap_or_default(), nsfw: data["over_18"].as_bool().unwrap_or_default(), stickied: data["stickied"].as_bool().unwrap_or_default() || data["pinned"].as_bool().unwrap_or_default(), }, permalink: val(post, "permalink"), + link_title: val(post, "link_title"), + poll: Poll::parse(&data["poll_data"]), rel_time, created, + created_ts, num_duplicates: post["data"]["num_duplicates"].as_u64().unwrap_or(0), comments: format_num(data["num_comments"].as_i64().unwrap_or_default()), gallery, awards, nsfw: post["data"]["over_18"].as_bool().unwrap_or_default(), + ws_url: val(post, "websocket_url"), + out_url: post["data"]["url_overridden_by_dest"].as_str().map(|a| a.to_string()), }); } - Ok((posts, res["data"]["after"].as_str().unwrap_or_default().to_string())) } } @@ -381,7 +491,7 @@ pub struct Comment { pub prefs: Preferences, } -#[derive(Default, Clone)] +#[derive(Default, Clone, Serialize)] pub struct Award { pub name: String, pub icon_url: String, @@ -390,11 +500,12 @@ pub struct Award { } impl std::fmt::Display for Award { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{} {} {}", self.name, self.icon_url, self.description) } } +#[derive(Serialize)] pub struct Awards(pub Vec); impl std::ops::Deref for Awards { @@ -406,8 +517,8 @@ impl std::ops::Deref for Awards { } impl std::fmt::Display for Awards { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - self.iter().fold(Ok(()), |result, award| result.and_then(|_| writeln!(f, "{}", award))) + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.iter().try_fold((), |_, award| writeln!(f, "{award}")) } } @@ -442,6 +553,14 @@ pub struct ErrorTemplate { pub url: String, } +#[derive(Template)] +#[template(path = "info.html")] +pub struct InfoTemplate { + pub msg: String, + pub prefs: Preferences, + pub url: String, +} + /// Template for NSFW landing page. The landing page is displayed when a page's /// content is wholly NSFW, but a user has not enabled the option to view NSFW /// posts. @@ -501,24 +620,74 @@ pub struct Params { pub before: Option, } -#[derive(Default)] +#[derive(Default, Serialize, Deserialize, Debug, PartialEq, Eq)] +#[revisioned(revision = 1)] pub struct Preferences { + #[revision(start = 1)] + #[serde(skip_serializing, skip_deserializing)] pub available_themes: Vec, + #[revision(start = 1)] pub theme: String, + #[revision(start = 1)] pub front_page: String, + #[revision(start = 1)] pub layout: String, + #[revision(start = 1)] pub wide: String, + #[revision(start = 1)] + pub blur_spoiler: String, + #[revision(start = 1)] pub show_nsfw: String, + #[revision(start = 1)] pub blur_nsfw: String, + #[revision(start = 1)] pub hide_hls_notification: String, + #[revision(start = 1)] + pub video_quality: String, + #[revision(start = 1)] + pub hide_sidebar_and_summary: String, + #[revision(start = 1)] pub use_hls: String, + #[revision(start = 1)] pub autoplay_videos: String, + #[revision(start = 1)] + pub fixed_navbar: String, + #[revision(start = 1)] pub disable_visit_reddit_confirmation: String, + #[revision(start = 1)] pub comment_sort: String, + #[revision(start = 1)] pub post_sort: String, + #[revision(start = 1)] + #[serde(serialize_with = "serialize_vec_with_plus", deserialize_with = "deserialize_vec_with_plus")] pub subscriptions: Vec, + #[revision(start = 1)] + #[serde(serialize_with = "serialize_vec_with_plus", deserialize_with = "deserialize_vec_with_plus")] pub filters: Vec, + #[revision(start = 1)] pub hide_awards: String, + #[revision(start = 1)] + pub hide_score: String, + #[revision(start = 1)] + pub remove_default_feeds: String, +} + +fn serialize_vec_with_plus(vec: &[String], serializer: S) -> Result +where + S: Serializer, +{ + serializer.serialize_str(&vec.join("+")) +} + +fn deserialize_vec_with_plus<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + let string = String::deserialize(deserializer)?; + if string.is_empty() { + return Ok(Vec::new()); + } + Ok(string.split('+').map(|s| s.to_string()).collect()) } #[derive(RustEmbed)] @@ -534,27 +703,60 @@ impl Preferences { let mut themes = vec!["system".to_string()]; for file in ThemeAssets::iter() { let chunks: Vec<&str> = file.as_ref().split(".css").collect(); - themes.push(chunks[0].to_owned()) + themes.push(chunks[0].to_owned()); } Self { available_themes: themes, - theme: setting(&req, "theme"), - front_page: setting(&req, "front_page"), - layout: setting(&req, "layout"), - wide: setting(&req, "wide"), - show_nsfw: setting(&req, "show_nsfw"), - blur_nsfw: setting(&req, "blur_nsfw"), - use_hls: setting(&req, "use_hls"), - hide_hls_notification: setting(&req, "hide_hls_notification"), - autoplay_videos: setting(&req, "autoplay_videos"), - disable_visit_reddit_confirmation: setting(&req, "disable_visit_reddit_confirmation"), - comment_sort: setting(&req, "comment_sort"), - post_sort: setting(&req, "post_sort"), - subscriptions: setting(&req, "subscriptions").split('+').map(String::from).filter(|s| !s.is_empty()).collect(), - filters: setting(&req, "filters").split('+').map(String::from).filter(|s| !s.is_empty()).collect(), - hide_awards: setting(&req, "hide_awards"), + theme: setting(req, "theme"), + front_page: setting(req, "front_page"), + layout: setting(req, "layout"), + wide: setting(req, "wide"), + blur_spoiler: setting(req, "blur_spoiler"), + show_nsfw: setting(req, "show_nsfw"), + hide_sidebar_and_summary: setting(req, "hide_sidebar_and_summary"), + blur_nsfw: setting(req, "blur_nsfw"), + use_hls: setting(req, "use_hls"), + hide_hls_notification: setting(req, "hide_hls_notification"), + video_quality: setting(req, "video_quality"), + autoplay_videos: setting(req, "autoplay_videos"), + fixed_navbar: setting_or_default(req, "fixed_navbar", "on".to_string()), + disable_visit_reddit_confirmation: setting(req, "disable_visit_reddit_confirmation"), + comment_sort: setting(req, "comment_sort"), + post_sort: setting(req, "post_sort"), + subscriptions: setting(req, "subscriptions").split('+').map(String::from).filter(|s| !s.is_empty()).collect(), + filters: setting(req, "filters").split('+').map(String::from).filter(|s| !s.is_empty()).collect(), + hide_awards: setting(req, "hide_awards"), + hide_score: setting(req, "hide_score"), + remove_default_feeds: setting(req, "remove_default_feeds"), } } + + pub fn to_urlencoded(&self) -> Result { + serde_urlencoded::to_string(self).map_err(|e| e.to_string()) + } + + pub fn to_bincode(&self) -> Result, String> { + bincode::serialize(self).map_err(|e| e.to_string()) + } + pub fn to_compressed_bincode(&self) -> Result, String> { + deflate_compress(self.to_bincode()?) + } + pub fn to_bincode_str(&self) -> Result { + Ok(base2048::encode(&self.to_compressed_bincode()?)) + } +} + +pub fn deflate_compress(i: Vec) -> Result, String> { + let mut e = Encoder::new(Vec::new()); + e.write_all(&i).map_err(|e| e.to_string())?; + e.finish().into_result().map_err(|e| e.to_string()) +} + +pub fn deflate_decompress(i: Vec) -> Result, String> { + let mut decoder = Decoder::new(&i[..]); + let mut out = Vec::new(); + decoder.read_to_end(&mut out).map_err(|e| format!("Failed to read from gzip decoder: {}", e))?; + Ok(out) } /// Gets a `HashSet` of filters from the cookie in the given `Request`. @@ -586,7 +788,7 @@ pub fn filter_posts(posts: &mut Vec, filters: &HashSet) -> (u64, b } /// Creates a [`Post`] from a provided JSON. -pub async fn parse_post(post: &serde_json::Value) -> Post { +pub async fn parse_post(post: &Value) -> Post { // Grab UTC time as unix timestamp let (rel_time, created) = time(post["data"]["created_utc"].as_f64().unwrap_or_default()); // Parse post score and upvote ratio @@ -596,17 +798,29 @@ pub async fn parse_post(post: &serde_json::Value) -> Post { // Determine the type of media along with the media URL let (post_type, media, gallery) = Media::parse(&post["data"]).await; + let created_ts = post["data"]["created_utc"].as_f64().unwrap_or_default().round() as u64; + let awards: Awards = Awards::parse(&post["data"]["all_awardings"]); let permalink = val(post, "permalink"); + let poll = Poll::parse(&post["data"]["poll_data"]); + let body = if val(post, "removed_by_category") == "moderator" { format!( - "

[removed] — view removed post

", - permalink + "

[removed] — view removed post

", + get_setting("REDLIB_PUSHSHIFT_FRONTEND").unwrap_or_else(|| String::from(crate::config::DEFAULT_PUSHSHIFT_FRONTEND)), ) } else { - rewrite_urls(&val(post, "selftext_html")) + let selftext = val(post, "selftext"); + if selftext.contains("```") { + let mut html_output = String::new(); + let parser = pulldown_cmark::Parser::new(&selftext); + pulldown_cmark::html::push_html(&mut html_output, parser); + rewrite_urls(&html_output) + } else { + rewrite_urls(&val(post, "selftext_html")) + } }; // Build a post using data parsed from Reddit post API @@ -630,6 +844,8 @@ pub async fn parse_post(post: &serde_json::Value) -> Post { distinguished: val(post, "distinguished"), }, permalink, + link_title: val(post, "link_title"), + poll, score: format_num(score), upvote_ratio: ratio as i64, post_type, @@ -640,6 +856,7 @@ pub async fn parse_post(post: &serde_json::Value) -> Post { width: post["data"]["thumbnail_width"].as_i64().unwrap_or_default(), height: post["data"]["thumbnail_height"].as_i64().unwrap_or_default(), poster: String::new(), + download_name: String::new(), }, flair: Flair { flair_parts: FlairPart::parse( @@ -656,17 +873,21 @@ pub async fn parse_post(post: &serde_json::Value) -> Post { }, }, flags: Flags { + spoiler: post["data"]["spoiler"].as_bool().unwrap_or_default(), nsfw: post["data"]["over_18"].as_bool().unwrap_or_default(), stickied: post["data"]["stickied"].as_bool().unwrap_or_default() || post["data"]["pinned"].as_bool().unwrap_or(false), }, domain: val(post, "domain"), rel_time, created, + created_ts, num_duplicates: post["data"]["num_duplicates"].as_u64().unwrap_or(0), comments: format_num(post["data"]["num_comments"].as_i64().unwrap_or_default()), gallery, awards, nsfw: post["data"]["over_18"].as_bool().unwrap_or_default(), + ws_url: val(post, "websocket_url"), + out_url: post["data"]["url_overridden_by_dest"].as_str().map(|a| a.to_string()), } } @@ -677,7 +898,7 @@ pub async fn parse_post(post: &serde_json::Value) -> Post { // Grab a query parameter from a url pub fn param(path: &str, value: &str) -> Option { Some( - Url::parse(format!("https://libredd.it/{}", path).as_str()) + Url::parse(format!("https://libredd.it/{path}").as_str()) .ok()? .query_pairs() .into_owned() @@ -690,33 +911,113 @@ pub fn param(path: &str, value: &str) -> Option { // Retrieve the value of a setting by name pub fn setting(req: &Request, name: &str) -> String { // Parse a cookie value from request - req - .cookie(name) - .unwrap_or_else(|| { - // If there is no cookie for this setting, try receiving a default from the config - if let Some(default) = crate::config::get_setting(&format!("LIBREDDIT_DEFAULT_{}", name.to_uppercase())) { - Cookie::new(name, default) - } else { - Cookie::named(name) - } - }) - .value() - .to_string() + + // If this was called with "subscriptions" and the "subscriptions" cookie has a value + if name == "subscriptions" && req.cookie("subscriptions").is_some() { + // Create subscriptions string + let mut subscriptions = String::new(); + + // Default subscriptions cookie + if req.cookie("subscriptions").is_some() { + subscriptions.push_str(req.cookie("subscriptions").unwrap().value()); + } + + // Start with first numbered subscription cookie + let mut subscriptions_number = 1; + + // While whatever subscriptionsNUMBER cookie we're looking at has a value + while req.cookie(&format!("subscriptions{}", subscriptions_number)).is_some() { + // Push whatever subscriptionsNUMBER cookie we're looking at into the subscriptions string + subscriptions.push_str(req.cookie(&format!("subscriptions{}", subscriptions_number)).unwrap().value()); + + // Increment subscription cookie number + subscriptions_number += 1; + } + + // Return the subscriptions cookies as one large string + subscriptions + } + // If this was called with "filters" and the "filters" cookie has a value + else if name == "filters" && req.cookie("filters").is_some() { + // Create filters string + let mut filters = String::new(); + + // Default filters cookie + if req.cookie("filters").is_some() { + filters.push_str(req.cookie("filters").unwrap().value()); + } + + // Start with first numbered filters cookie + let mut filters_number = 1; + + // While whatever filtersNUMBER cookie we're looking at has a value + while req.cookie(&format!("filters{}", filters_number)).is_some() { + // Push whatever filtersNUMBER cookie we're looking at into the filters string + filters.push_str(req.cookie(&format!("filters{}", filters_number)).unwrap().value()); + + // Increment filters cookie number + filters_number += 1; + } + + // Return the filters cookies as one large string + filters + } + // The above two still come to this if there was no existing value + else { + req + .cookie(name) + .unwrap_or_else(|| { + // If there is no cookie for this setting, try receiving a default from the config + if let Some(default) = get_setting(&format!("REDLIB_DEFAULT_{}", name.to_uppercase())) { + Cookie::new(name, default) + } else { + Cookie::from(name) + } + }) + .value() + .to_string() + } +} + +// Retrieve the value of a setting by name or the default value +pub fn setting_or_default(req: &Request, name: &str, default: String) -> String { + let value = setting(req, name); + if value.is_empty() { + default + } else { + value + } } // Detect and redirect in the event of a random subreddit pub async fn catch_random(sub: &str, additional: &str) -> Result, String> { if sub == "random" || sub == "randnsfw" { - let new_sub = json(format!("/r/{}/about.json?raw_json=1", sub), false).await?["data"]["display_name"] - .as_str() - .unwrap_or_default() - .to_string(); - Ok(redirect(format!("/r/{}{}", new_sub, additional))) + Ok(redirect(&format!( + "/r/{}{additional}", + json(format!("/r/{sub}/about.json?raw_json=1"), false).await?["data"]["display_name"] + .as_str() + .unwrap_or_default() + ))) } else { Err("No redirect needed".to_string()) } } +static REGEX_URL_WWW: Lazy = Lazy::new(|| Regex::new(r"https?://www\.reddit\.com/(.*)").unwrap()); +static REGEX_URL_OLD: Lazy = Lazy::new(|| Regex::new(r"https?://old\.reddit\.com/(.*)").unwrap()); +static REGEX_URL_NP: Lazy = Lazy::new(|| Regex::new(r"https?://np\.reddit\.com/(.*)").unwrap()); +static REGEX_URL_PLAIN: Lazy = Lazy::new(|| Regex::new(r"https?://reddit\.com/(.*)").unwrap()); +static REGEX_URL_VIDEOS: Lazy = Lazy::new(|| Regex::new(r"https?://v\.redd\.it/(.*)/DASH_([0-9]{2,4}(\.mp4|$|\?source=fallback))").unwrap()); +static REGEX_URL_VIDEOS_HLS: Lazy = Lazy::new(|| Regex::new(r"https?://v\.redd\.it/(.+)/(HLSPlaylist\.m3u8.*)$").unwrap()); +static REGEX_URL_IMAGES: Lazy = Lazy::new(|| Regex::new(r"https?://i\.redd\.it/(.*)").unwrap()); +static REGEX_URL_THUMBS_A: Lazy = Lazy::new(|| Regex::new(r"https?://a\.thumbs\.redditmedia\.com/(.*)").unwrap()); +static REGEX_URL_THUMBS_B: Lazy = Lazy::new(|| Regex::new(r"https?://b\.thumbs\.redditmedia\.com/(.*)").unwrap()); +static REGEX_URL_EMOJI: Lazy = Lazy::new(|| Regex::new(r"https?://emoji\.redditmedia\.com/(.*)/(.*)").unwrap()); +static REGEX_URL_PREVIEW: Lazy = Lazy::new(|| Regex::new(r"https?://preview\.redd\.it/(.*)").unwrap()); +static REGEX_URL_EXTERNAL_PREVIEW: Lazy = Lazy::new(|| Regex::new(r"https?://external\-preview\.redd\.it/(.*)").unwrap()); +static REGEX_URL_STYLES: Lazy = Lazy::new(|| Regex::new(r"https?://styles\.redditmedia\.com/(.*)").unwrap()); +static REGEX_URL_STATIC_MEDIA: Lazy = Lazy::new(|| Regex::new(r"https?://www\.redditstatic\.com/(.*)").unwrap()); + // Direct urls to proxy if proxy is enabled pub fn format_url(url: &str) -> String { if url.is_empty() || url == "self" || url == "default" || url == "nsfw" || url == "spoiler" { @@ -725,13 +1026,11 @@ pub fn format_url(url: &str) -> String { Url::parse(url).map_or(url.to_string(), |parsed| { let domain = parsed.domain().unwrap_or_default(); - let capture = |regex: &str, format: &str, segments: i16| { - Regex::new(regex).map_or(String::new(), |re| { - re.captures(url).map_or(String::new(), |caps| match segments { - 1 => [format, &caps[1]].join(""), - 2 => [format, &caps[1], "/", &caps[2]].join(""), - _ => String::new(), - }) + let capture = |regex: &Regex, format: &str, segments: i16| { + regex.captures(url).map_or(String::new(), |caps| match segments { + 1 => [format, &caps[1]].join(""), + 2 => [format, &caps[1], "/", &caps[2]].join(""), + _ => String::new(), }) }; @@ -757,44 +1056,190 @@ pub fn format_url(url: &str) -> String { } match domain { - "www.reddit.com" => capture(r"https://www\.reddit\.com/(.*)", "/", 1), - "old.reddit.com" => capture(r"https://old\.reddit\.com/(.*)", "/", 1), - "np.reddit.com" => capture(r"https://np\.reddit\.com/(.*)", "/", 1), - "reddit.com" => capture(r"https://reddit\.com/(.*)", "/", 1), - "v.redd.it" => chain!( - capture(r"https://v\.redd\.it/(.*)/DASH_([0-9]{2,4}(\.mp4|$|\?source=fallback))", "/vid/", 2), - capture(r"https://v\.redd\.it/(.+)/(HLSPlaylist\.m3u8.*)$", "/hls/", 2) - ), - "i.redd.it" => capture(r"https://i\.redd\.it/(.*)", "/img/", 1), - "a.thumbs.redditmedia.com" => capture(r"https://a\.thumbs\.redditmedia\.com/(.*)", "/thumb/a/", 1), - "b.thumbs.redditmedia.com" => capture(r"https://b\.thumbs\.redditmedia\.com/(.*)", "/thumb/b/", 1), - "emoji.redditmedia.com" => capture(r"https://emoji\.redditmedia\.com/(.*)/(.*)", "/emoji/", 2), - "preview.redd.it" => capture(r"https://preview\.redd\.it/(.*)", "/preview/pre/", 1), - "external-preview.redd.it" => capture(r"https://external\-preview\.redd\.it/(.*)", "/preview/external-pre/", 1), - "styles.redditmedia.com" => capture(r"https://styles\.redditmedia\.com/(.*)", "/style/", 1), - "www.redditstatic.com" => capture(r"https://www\.redditstatic\.com/(.*)", "/static/", 1), + "www.reddit.com" => capture(®EX_URL_WWW, "/", 1), + "old.reddit.com" => capture(®EX_URL_OLD, "/", 1), + "np.reddit.com" => capture(®EX_URL_NP, "/", 1), + "reddit.com" => capture(®EX_URL_PLAIN, "/", 1), + "v.redd.it" => chain!(capture(®EX_URL_VIDEOS, "/vid/", 2), capture(®EX_URL_VIDEOS_HLS, "/hls/", 2)), + "i.redd.it" => capture(®EX_URL_IMAGES, "/img/", 1), + "a.thumbs.redditmedia.com" => capture(®EX_URL_THUMBS_A, "/thumb/a/", 1), + "b.thumbs.redditmedia.com" => capture(®EX_URL_THUMBS_B, "/thumb/b/", 1), + "emoji.redditmedia.com" => capture(®EX_URL_EMOJI, "/emoji/", 2), + "preview.redd.it" => capture(®EX_URL_PREVIEW, "/preview/pre/", 1), + "external-preview.redd.it" => capture(®EX_URL_EXTERNAL_PREVIEW, "/preview/external-pre/", 1), + "styles.redditmedia.com" => capture(®EX_URL_STYLES, "/style/", 1), + "www.redditstatic.com" => capture(®EX_URL_STATIC_MEDIA, "/static/", 1), _ => url.to_string(), } }) } } -// Rewrite Reddit links to Libreddit in body of text -pub fn rewrite_urls(input_text: &str) -> String { - let text1 = Regex::new(r#"href="(https|http|)://(www\.|old\.|np\.|amp\.|)(reddit\.com|redd\.it)/"#) - .map_or(String::new(), |re| re.replace_all(input_text, r#"href="/"#).to_string()) - // Remove (html-encoded) "\" from URLs. - .replace("%5C", "") - .replace('\\', ""); +static REGEX_BULLET: Lazy = Lazy::new(|| Regex::new(r"(?m)^- (.*)$").unwrap()); +static REGEX_BULLET_CONSECUTIVE_LINES: Lazy = Lazy::new(|| Regex::new(r"\n