mirror of
https://github.com/DNSCrypt/doh-server.git
synced 2025-04-04 13:37:39 +03:00
Compare commits
68 commits
Author | SHA1 | Date | |
---|---|---|---|
|
f0242354d3 | ||
|
25fa6946e6 | ||
|
2254632d33 | ||
|
672d1a11f1 | ||
|
9e4a931bce | ||
|
40b0b02972 | ||
|
bf443c33b9 | ||
|
1a0a0566c4 | ||
|
890a74276f | ||
|
34f614e938 | ||
|
d6635eebb7 | ||
|
c79501aea3 | ||
|
e73964fa1d | ||
|
bafbdc0926 | ||
|
30a55a0f2f | ||
|
7bb8293c28 | ||
|
a6517472d5 | ||
|
3511672d49 | ||
|
bd85572368 | ||
|
02b3a67a00 | ||
|
66c66c7a28 | ||
|
1165fab90c | ||
|
c92308ccbb | ||
|
78c47830ff | ||
|
9e2853da86 | ||
|
e5f6f2a5d6 | ||
|
e8df0458ac | ||
|
19040f1e88 | ||
|
6f9f63e754 | ||
|
678bd04bed | ||
|
ffa0828515 | ||
|
6580f6ffb5 | ||
|
f64770bdd7 | ||
|
18297228c7 | ||
|
908e7d64db | ||
|
c54b3303fc | ||
|
1c5c83803a | ||
|
1386b7d13a | ||
|
920d31b502 | ||
|
651224d900 | ||
|
b5d525abcd | ||
|
11d8f4cb31 | ||
|
47330ebcad | ||
|
d5fd8231ff | ||
|
8cba04338e | ||
|
85280f4525 | ||
|
1c28a28b78 | ||
|
fbf82068d1 | ||
|
c9e084b2b4 | ||
|
37dc663b6e | ||
|
b81cc3e5d2 | ||
|
3f1bbcd8dc | ||
|
e92fddb165 | ||
|
d573a20c86 | ||
|
f5c07a205b | ||
|
d277c0a806 | ||
|
fc61c79a9f | ||
|
a92f4a77ae | ||
|
a373957045 | ||
|
6f5213838b | ||
|
eede3f4ab3 | ||
|
fdcc797fcb | ||
|
3e59f42558 | ||
|
a1fc5bbffc | ||
|
4b887d6705 | ||
|
6818fbe8a1 | ||
|
c82fb339ed | ||
|
06a3fa0499 |
14 changed files with 325 additions and 155 deletions
17
.github/workflows/issues.yml
vendored
Normal file
17
.github/workflows/issues.yml
vendored
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
name: Close inactive issues
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "30 1 * * *"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
close-issues:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
steps:
|
||||||
|
- uses: actions/stale@v9
|
||||||
|
with:
|
||||||
|
stale-issue-message: "This issue is stale because it has been open for 30 days with no activity."
|
||||||
|
close-issue-message: "This issue was closed because it has been inactive for 14 days since being marked as stale."
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
118
.github/workflows/release.yml
vendored
118
.github/workflows/release.yml
vendored
|
@ -14,7 +14,11 @@ jobs:
|
||||||
id: get_version
|
id: get_version
|
||||||
run: echo ::set-output name=VERSION::${GITHUB_REF#refs/tags/}
|
run: echo ::set-output name=VERSION::${GITHUB_REF#refs/tags/}
|
||||||
|
|
||||||
- uses: actions/checkout@master
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- uses: mlugg/setup-zig@v1
|
||||||
|
with:
|
||||||
|
version: 0.10.1
|
||||||
|
|
||||||
- uses: hecrj/setup-rust-action@master
|
- uses: hecrj/setup-rust-action@master
|
||||||
with:
|
with:
|
||||||
|
@ -27,18 +31,60 @@ jobs:
|
||||||
run: rustup default | grep stable
|
run: rustup default | grep stable
|
||||||
|
|
||||||
- name: Install cargo-deb
|
- name: Install cargo-deb
|
||||||
run: cargo install --debug cargo-deb
|
run: cargo install cargo-deb
|
||||||
|
|
||||||
- name: Release build
|
- name: Install cargo-generate-rpm
|
||||||
|
run: cargo install cargo-generate-rpm
|
||||||
|
|
||||||
|
- name: Install cargo-zigbuild
|
||||||
|
run: cargo install cargo-zigbuild
|
||||||
|
|
||||||
|
- name: Release build Linux-x86-64
|
||||||
run: |
|
run: |
|
||||||
env RUSTFLAGS="-C link-arg=-s" cargo build --release
|
rustup target add x86_64-unknown-linux-musl
|
||||||
|
env RUSTFLAGS="-C strip=symbols" cargo zigbuild --release --target x86_64-unknown-linux-musl
|
||||||
mkdir doh-proxy
|
mkdir doh-proxy
|
||||||
mv target/release/doh-proxy doh-proxy/
|
mv target/x86_64-unknown-linux-musl/release/doh-proxy doh-proxy/
|
||||||
cp README.md localhost.pem doh-proxy/
|
cp README.md localhost.pem doh-proxy/
|
||||||
tar cJpf doh-proxy_${{ steps.get_version.outputs.VERSION }}_linux-x86_64.tar.bz2 doh-proxy
|
tar cjpf doh-proxy_${{ steps.get_version.outputs.VERSION }}_linux-x86_64.tar.bz2 doh-proxy
|
||||||
- name: Debian package
|
rm -fr doh-proxy
|
||||||
|
|
||||||
|
- name: Release build Linux-aarch64
|
||||||
run: |
|
run: |
|
||||||
cargo deb
|
rustup target add aarch64-unknown-linux-musl
|
||||||
|
env RUSTFLAGS="-C strip=symbols" cargo zigbuild --release --target aarch64-unknown-linux-musl
|
||||||
|
mkdir doh-proxy
|
||||||
|
mv target/aarch64-unknown-linux-musl/release/doh-proxy doh-proxy/
|
||||||
|
cp README.md localhost.pem doh-proxy/
|
||||||
|
tar cjpf doh-proxy_${{ steps.get_version.outputs.VERSION }}_linux-aarch64.tar.bz2 doh-proxy
|
||||||
|
rm -fr doh-proxy
|
||||||
|
|
||||||
|
- name: Release build Windows-x86_64
|
||||||
|
run: |
|
||||||
|
rustup target add x86_64-pc-windows-gnu
|
||||||
|
env RUSTFLAGS="-C strip=symbols" cargo zigbuild --release --target x86_64-pc-windows-gnu
|
||||||
|
mkdir doh-proxy
|
||||||
|
mv target/x86_64-pc-windows-gnu/release/doh-proxy.exe doh-proxy/
|
||||||
|
cp README.md localhost.pem doh-proxy/
|
||||||
|
zip -9 -r doh-proxy_${{ steps.get_version.outputs.VERSION }}_windows-x86_64.zip doh-proxy
|
||||||
|
rm -fr doh-proxy
|
||||||
|
|
||||||
|
- name: Debian packages
|
||||||
|
run: |
|
||||||
|
rustup target add x86_64-unknown-linux-musl
|
||||||
|
env RUSTFLAGS="-C strip=symbols" cargo deb --no-strip --cargo-build=zigbuild --target=x86_64-unknown-linux-musl
|
||||||
|
rustup target add aarch64-unknown-linux-musl
|
||||||
|
env RUSTFLAGS="-C strip=symbols" cargo deb --no-strip --cargo-build=zigbuild --target=aarch64-unknown-linux-musl
|
||||||
|
|
||||||
|
- name: RPM packages
|
||||||
|
run: |
|
||||||
|
rustup target add x86_64-unknown-linux-gnu
|
||||||
|
env RUSTFLAGS="-C strip=symbols" cargo-zigbuild build --target=x86_64-unknown-linux-gnu.2.17 --release
|
||||||
|
mv target/x86_64-unknown-linux-musl/release/doh-proxy target/release/
|
||||||
|
cargo generate-rpm --target x86_64-unknown-linux-gnu
|
||||||
|
rustup target add aarch64-unknown-linux-gnu
|
||||||
|
env RUSTFLAGS="-C strip=symbols" cargo-zigbuild build --target=aarch64-unknown-linux-gnu.2.17 --release
|
||||||
|
cargo generate-rpm --target aarch64-unknown-linux-gnu
|
||||||
|
|
||||||
- name: Create release
|
- name: Create release
|
||||||
id: create_release
|
id: create_release
|
||||||
|
@ -51,19 +97,41 @@ jobs:
|
||||||
draft: true
|
draft: true
|
||||||
prerelease: false
|
prerelease: false
|
||||||
|
|
||||||
- name: Upload Debian package
|
- name: Upload Debian package for x86_64
|
||||||
id: upload-release-asset-debian
|
id: upload-release-asset-debian-x86_64
|
||||||
uses: actions/upload-release-asset@v1
|
uses: actions/upload-release-asset@v1
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||||
asset_name: "doh-proxy_${{ steps.get_version.outputs.VERSION }}_amd64.deb"
|
asset_name: "doh-proxy_${{ steps.get_version.outputs.VERSION }}-1_amd64.deb"
|
||||||
asset_path: "target/debian/doh-proxy_${{ steps.get_version.outputs.VERSION }}_amd64.deb"
|
asset_path: "target/x86_64-unknown-linux-musl/debian/doh-proxy_${{ steps.get_version.outputs.VERSION }}-1_amd64.deb"
|
||||||
asset_content_type: application/x-debian-package
|
asset_content_type: application/x-debian-package
|
||||||
|
|
||||||
- name: Upload tarball
|
- name: Upload RPM package for x86_64
|
||||||
id: upload-release-asset-tarball
|
id: upload-release-asset-rpm-x86_64
|
||||||
|
uses: actions/upload-release-asset@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||||
|
asset_name: "doh-proxy-${{ steps.get_version.outputs.VERSION }}-1.x86_64.rpm"
|
||||||
|
asset_path: "target/x86_64-unknown-linux-gnu/generate-rpm/doh-proxy-${{ steps.get_version.outputs.VERSION }}-1.x86_64.rpm"
|
||||||
|
asset_content_type: application/x-redhat-package-manager
|
||||||
|
|
||||||
|
- name: Upload RPM package for aarch64
|
||||||
|
id: upload-release-asset-rpm-aarch64
|
||||||
|
uses: actions/upload-release-asset@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||||
|
asset_name: "doh-proxy-${{ steps.get_version.outputs.VERSION }}-1.aarch64.rpm"
|
||||||
|
asset_path: "target/aarch64-unknown-linux-gnu/generate-rpm/doh-proxy-${{ steps.get_version.outputs.VERSION }}-1.aarch64.rpm"
|
||||||
|
asset_content_type: application/x-redhat-package-manager
|
||||||
|
|
||||||
|
- name: Upload tarball for linux-x86_64
|
||||||
|
id: upload-release-asset-tarball-linux-x86_64
|
||||||
uses: actions/upload-release-asset@v1
|
uses: actions/upload-release-asset@v1
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
@ -72,3 +140,25 @@ jobs:
|
||||||
asset_name: "doh-proxy_${{ steps.get_version.outputs.VERSION }}_linux-x86_64.tar.bz2"
|
asset_name: "doh-proxy_${{ steps.get_version.outputs.VERSION }}_linux-x86_64.tar.bz2"
|
||||||
asset_path: "doh-proxy_${{ steps.get_version.outputs.VERSION }}_linux-x86_64.tar.bz2"
|
asset_path: "doh-proxy_${{ steps.get_version.outputs.VERSION }}_linux-x86_64.tar.bz2"
|
||||||
asset_content_type: application/x-tar
|
asset_content_type: application/x-tar
|
||||||
|
|
||||||
|
- name: Upload tarball for linux-aarch64
|
||||||
|
id: upload-release-asset-tarball-linux-aarch64
|
||||||
|
uses: actions/upload-release-asset@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||||
|
asset_name: "doh-proxy_${{ steps.get_version.outputs.VERSION }}_linux-aarch64.tar.bz2"
|
||||||
|
asset_path: "doh-proxy_${{ steps.get_version.outputs.VERSION }}_linux-aarch64.tar.bz2"
|
||||||
|
asset_content_type: application/x-tar
|
||||||
|
|
||||||
|
- name: Upload tarball for windows-x86_64
|
||||||
|
id: upload-release-asset-tarball-windows-x86_64
|
||||||
|
uses: actions/upload-release-asset@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||||
|
asset_name: "doh-proxy_${{ steps.get_version.outputs.VERSION }}_windows-x86_64.zip"
|
||||||
|
asset_path: "doh-proxy_${{ steps.get_version.outputs.VERSION }}_windows-x86_64.zip"
|
||||||
|
asset_content_type: application/zip
|
||||||
|
|
16
Cargo.toml
16
Cargo.toml
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "doh-proxy"
|
name = "doh-proxy"
|
||||||
version = "0.9.5"
|
version = "0.9.11"
|
||||||
authors = ["Frank Denis <github@pureftpd.org>"]
|
authors = ["Frank Denis <github@pureftpd.org>"]
|
||||||
description = "A DNS-over-HTTPS (DoH) and ODoH (Oblivious DoH) proxy"
|
description = "A DNS-over-HTTPS (DoH) and ODoH (Oblivious DoH) proxy"
|
||||||
keywords = ["dns", "https", "doh", "odoh", "proxy"]
|
keywords = ["dns", "https", "doh", "odoh", "proxy"]
|
||||||
|
@ -16,10 +16,16 @@ default = ["tls"]
|
||||||
tls = ["libdoh/tls"]
|
tls = ["libdoh/tls"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
libdoh = { path = "src/libdoh", version = "0.9.5", default-features = false }
|
libdoh = { path = "src/libdoh", version = "0.9.9", default-features = false }
|
||||||
clap = { version = "3", features = ["std", "cargo", "wrap_help"] }
|
clap = { version = "4", features = ["std", "cargo", "wrap_help", "string"] }
|
||||||
dnsstamps = "0.1.9"
|
dnsstamps = "0.1.10"
|
||||||
mimalloc = { version = "0.1.29", default-features = false }
|
mimalloc = { version = "0.1.44", default-features = false }
|
||||||
|
|
||||||
|
[package.metadata.generate-rpm]
|
||||||
|
assets = [
|
||||||
|
{ source = "target/release/doh-proxy", dest = "/usr/bin/doh-proxy", mode = "755" },
|
||||||
|
{ source = "README.md", dest = "/usr/share/doc/doh-proxy/README.md", mode = "644", doc = true },
|
||||||
|
]
|
||||||
|
|
||||||
[package.metadata.deb]
|
[package.metadata.deb]
|
||||||
extended-description = """\
|
extended-description = """\
|
||||||
|
|
2
LICENSE
2
LICENSE
|
@ -1,6 +1,6 @@
|
||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2018-2022 Frank Denis
|
Copyright (c) 2018-2025 Frank Denis
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
30
README.md
30
README.md
|
@ -1,4 +1,4 @@
|
||||||
# doh-proxy
|
# 
|
||||||
|
|
||||||
A fast and secure DoH (DNS-over-HTTPS) and ODoH (Oblivious DoH) server.
|
A fast and secure DoH (DNS-over-HTTPS) and ODoH (Oblivious DoH) server.
|
||||||
|
|
||||||
|
@ -60,13 +60,21 @@ OPTIONS:
|
||||||
Path to the PEM/PKCS#8-encoded certificates (only required for built-in TLS)
|
Path to the PEM/PKCS#8-encoded certificates (only required for built-in TLS)
|
||||||
```
|
```
|
||||||
|
|
||||||
## HTTP/2 termination
|
Example command-line:
|
||||||
|
|
||||||
The recommended way to use `doh-proxy` is to use a TLS termination proxy (such as [hitch](https://github.com/varnish/hitch) or [relayd](https://bsd.plumbing/about.html)), a CDN or a web server with proxying abilities as a front-end.
|
```sh
|
||||||
|
doh-proxy -H 'doh.example.com' -u 127.0.0.1:53 -g 233.252.0.5
|
||||||
|
```
|
||||||
|
|
||||||
|
Here, `doh.example.com` is the host name (which should match a name included in the TLS certificate), `127.0.0.1:53` is the address of the DNS resolver, and `233.252.0.5` is the public IP address of the DoH server.
|
||||||
|
|
||||||
|
## HTTP/2 and HTTP/3 termination
|
||||||
|
|
||||||
|
The recommended way to use `doh-proxy` is to use a TLS termination proxy (such as [hitch](https://github.com/varnish/hitch) or [relayd](https://man.openbsd.org/relayd.8)), a CDN or a web server with proxying abilities as a front-end.
|
||||||
|
|
||||||
That way, the DoH service can be exposed as a virtual host, sharing the same IP addresses as existing websites.
|
That way, the DoH service can be exposed as a virtual host, sharing the same IP addresses as existing websites.
|
||||||
|
|
||||||
If `doh-proxy` and the HTTP/2 front-end run on the same host, using the HTTP protocol to communicate between both is fine.
|
If `doh-proxy` and the HTTP/2 (/ HTTP/3) front-end run on the same host, using the HTTP protocol to communicate between both is fine.
|
||||||
|
|
||||||
If both are on distinct networks, such as when using a CDN, `doh-proxy` can handle HTTPS requests, provided that it was compiled with the `tls` feature.
|
If both are on distinct networks, such as when using a CDN, `doh-proxy` can handle HTTPS requests, provided that it was compiled with the `tls` feature.
|
||||||
|
|
||||||
|
@ -128,7 +136,7 @@ This can be achieved with the `--allow-odoh-post` command-line switch.
|
||||||
* When using DoH, DNS stamps should include a resolver IP address in order to remove a dependency on non-encrypted, non-authenticated, easy-to-block resolvers.
|
* When using DoH, DNS stamps should include a resolver IP address in order to remove a dependency on non-encrypted, non-authenticated, easy-to-block resolvers.
|
||||||
* Unlike DNSCrypt where users must explicitly trust a DNS server's public key, the security of DoH relies on traditional public Certificate Authorities. Additional root certificates (required by governments, security software, enterprise gateways) installed on a client immediately make DoH vulnerable to MITM. In order to prevent this, DNS stamps should include the hash of the parent certificate.
|
* Unlike DNSCrypt where users must explicitly trust a DNS server's public key, the security of DoH relies on traditional public Certificate Authorities. Additional root certificates (required by governments, security software, enterprise gateways) installed on a client immediately make DoH vulnerable to MITM. In order to prevent this, DNS stamps should include the hash of the parent certificate.
|
||||||
* TLS certificates are tied to host names. But domains expire, get reassigned and switch hands all the time. If a domain originally used for a DoH service gets a new, possibly malicious owner, clients still configured to use the service will blindly keep trusting it if the CA is the same. As a mitigation, the CA should sign an intermediate certificate (the only one present in the stamp), itself used to sign the name used by the DoH server. While commercial CAs offer this, Let's Encrypt currently doesn't.
|
* TLS certificates are tied to host names. But domains expire, get reassigned and switch hands all the time. If a domain originally used for a DoH service gets a new, possibly malicious owner, clients still configured to use the service will blindly keep trusting it if the CA is the same. As a mitigation, the CA should sign an intermediate certificate (the only one present in the stamp), itself used to sign the name used by the DoH server. While commercial CAs offer this, Let's Encrypt currently doesn't.
|
||||||
* Make sure that the front-end supports HTTP/2 and TLS 1.3.
|
* Make sure that the front-end supports at least HTTP/2 and TLS 1.3.
|
||||||
* Internal DoH servers still require TLS certificates. So, if you are planning to deploy an internal server, you need to set up an internal CA, or add self-signed certificates to every single client.
|
* Internal DoH servers still require TLS certificates. So, if you are planning to deploy an internal server, you need to set up an internal CA, or add self-signed certificates to every single client.
|
||||||
|
|
||||||
## Example usage with `encrypted-dns-server`
|
## Example usage with `encrypted-dns-server`
|
||||||
|
@ -142,10 +150,10 @@ upstream_addr = "127.0.0.1:3000"
|
||||||
|
|
||||||
## Example usage with `nginx`
|
## Example usage with `nginx`
|
||||||
|
|
||||||
In an existing `server`, a `/doh` endpoint can be exposed that way:
|
In an existing `server`, a `/dns-query` endpoint can be exposed that way:
|
||||||
|
|
||||||
```text
|
```text
|
||||||
location /doh {
|
location /dns-query {
|
||||||
proxy_pass http://127.0.0.1:3000;
|
proxy_pass http://127.0.0.1:3000;
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
@ -187,10 +195,14 @@ This [Go code snippet](https://gist.github.com/d6cb41742a1ceb54d48cc286f3d5c5fa)
|
||||||
|
|
||||||
### Common certificate hashes
|
### Common certificate hashes
|
||||||
|
|
||||||
* Let's Encrypt R3:
|
|
||||||
* `444ebd67bb83f8807b3921e938ac9178b882bd50aadb11231f044cf5f08df7ce`
|
|
||||||
* Let's Encrypt E1:
|
* Let's Encrypt E1:
|
||||||
* `cc1060d39c8329b62b6fbc7d0d6df9309869b981e7e6392d5cd8fa408f4d80e6`
|
* `cc1060d39c8329b62b6fbc7d0d6df9309869b981e7e6392d5cd8fa408f4d80e6`
|
||||||
|
* Let's Encrypt R3:
|
||||||
|
* `444ebd67bb83f8807b3921e938ac9178b882bd50aadb11231f044cf5f08df7ce`
|
||||||
|
* Let's Encrypt R10:
|
||||||
|
* `e644ba6963e335fe765cb9976b12b10eb54294b42477764ccb3a3acca3acb2fc`
|
||||||
|
* ZeroSSL:
|
||||||
|
* `9a3a34f727deb9bca51003d9ce9c39f8f27dd9c5242901c2bab1a44e635a0219`
|
||||||
|
|
||||||
## Clients
|
## Clients
|
||||||
|
|
||||||
|
|
BIN
logo.png
Normal file
BIN
logo.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 58 KiB |
177
src/config.rs
177
src/config.rs
|
@ -3,7 +3,7 @@ use std::net::{Ipv4Addr, Ipv6Addr, SocketAddr, SocketAddrV4, SocketAddrV6, ToSoc
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use clap::Arg;
|
use clap::{Arg, ArgAction::SetTrue};
|
||||||
use libdoh::*;
|
use libdoh::*;
|
||||||
|
|
||||||
use crate::constants::*;
|
use crate::constants::*;
|
||||||
|
@ -24,54 +24,54 @@ pub fn parse_opts(globals: &mut Globals) {
|
||||||
Arg::new("hostname")
|
Arg::new("hostname")
|
||||||
.short('H')
|
.short('H')
|
||||||
.long("hostname")
|
.long("hostname")
|
||||||
.takes_value(true)
|
.num_args(1)
|
||||||
.help("Host name (not IP address) DoH clients will use to connect"),
|
.help("Host name (not IP address) DoH clients will use to connect"),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new("public_address")
|
Arg::new("public_address")
|
||||||
.short('g')
|
.short('g')
|
||||||
.long("public-address")
|
.long("public-address")
|
||||||
.takes_value(true)
|
.num_args(1)
|
||||||
.help("External IP address DoH clients will connect to"),
|
.help("External IP address DoH clients will connect to"),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new("public_port")
|
Arg::new("public_port")
|
||||||
.short('j')
|
.short('j')
|
||||||
.long("public-port")
|
.long("public-port")
|
||||||
.takes_value(true)
|
.num_args(1)
|
||||||
.help("External port DoH clients will connect to, if not 443"),
|
.help("External port DoH clients will connect to, if not 443"),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new("listen_address")
|
Arg::new("listen_address")
|
||||||
.short('l')
|
.short('l')
|
||||||
.long("listen-address")
|
.long("listen-address")
|
||||||
.takes_value(true)
|
.num_args(1)
|
||||||
.default_value(LISTEN_ADDRESS)
|
.default_value(LISTEN_ADDRESS)
|
||||||
.validator(verify_sock_addr)
|
.value_parser(verify_sock_addr)
|
||||||
.help("Address to listen to"),
|
.help("Address to listen to"),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new("server_address")
|
Arg::new("server_address")
|
||||||
.short('u')
|
.short('u')
|
||||||
.long("server-address")
|
.long("server-address")
|
||||||
.takes_value(true)
|
.num_args(1)
|
||||||
.default_value(SERVER_ADDRESS)
|
.default_value(SERVER_ADDRESS)
|
||||||
.validator(verify_remote_server)
|
.value_parser(verify_remote_server)
|
||||||
.help("Address to connect to"),
|
.help("Address to connect to"),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new("local_bind_address")
|
Arg::new("local_bind_address")
|
||||||
.short('b')
|
.short('b')
|
||||||
.long("local-bind-address")
|
.long("local-bind-address")
|
||||||
.takes_value(true)
|
.num_args(1)
|
||||||
.validator(verify_sock_addr)
|
.value_parser(verify_sock_addr)
|
||||||
.help("Address to connect from"),
|
.help("Address to connect from"),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new("path")
|
Arg::new("path")
|
||||||
.short('p')
|
.short('p')
|
||||||
.long("path")
|
.long("path")
|
||||||
.takes_value(true)
|
.num_args(1)
|
||||||
.default_value(PATH)
|
.default_value(PATH)
|
||||||
.help("URI path"),
|
.help("URI path"),
|
||||||
)
|
)
|
||||||
|
@ -79,65 +79,68 @@ pub fn parse_opts(globals: &mut Globals) {
|
||||||
Arg::new("max_clients")
|
Arg::new("max_clients")
|
||||||
.short('c')
|
.short('c')
|
||||||
.long("max-clients")
|
.long("max-clients")
|
||||||
.takes_value(true)
|
.num_args(1)
|
||||||
.default_value(&max_clients)
|
.default_value(max_clients)
|
||||||
.help("Maximum number of simultaneous clients"),
|
.help("Maximum number of simultaneous clients"),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new("max_concurrent")
|
Arg::new("max_concurrent")
|
||||||
.short('C')
|
.short('C')
|
||||||
.long("max-concurrent")
|
.long("max-concurrent")
|
||||||
.takes_value(true)
|
.num_args(1)
|
||||||
.default_value(&max_concurrent_streams)
|
.default_value(max_concurrent_streams)
|
||||||
.help("Maximum number of concurrent requests per client"),
|
.help("Maximum number of concurrent requests per client"),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new("timeout")
|
Arg::new("timeout")
|
||||||
.short('t')
|
.short('t')
|
||||||
.long("timeout")
|
.long("timeout")
|
||||||
.takes_value(true)
|
.num_args(1)
|
||||||
.default_value(&timeout_sec)
|
.default_value(timeout_sec)
|
||||||
.help("Timeout, in seconds"),
|
.help("Timeout, in seconds"),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new("min_ttl")
|
Arg::new("min_ttl")
|
||||||
.short('T')
|
.short('T')
|
||||||
.long("min-ttl")
|
.long("min-ttl")
|
||||||
.takes_value(true)
|
.num_args(1)
|
||||||
.default_value(&min_ttl)
|
.default_value(min_ttl)
|
||||||
.help("Minimum TTL, in seconds"),
|
.help("Minimum TTL, in seconds"),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new("max_ttl")
|
Arg::new("max_ttl")
|
||||||
.short('X')
|
.short('X')
|
||||||
.long("max-ttl")
|
.long("max-ttl")
|
||||||
.takes_value(true)
|
.num_args(1)
|
||||||
.default_value(&max_ttl)
|
.default_value(max_ttl)
|
||||||
.help("Maximum TTL, in seconds"),
|
.help("Maximum TTL, in seconds"),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new("err_ttl")
|
Arg::new("err_ttl")
|
||||||
.short('E')
|
.short('E')
|
||||||
.long("err-ttl")
|
.long("err-ttl")
|
||||||
.takes_value(true)
|
.num_args(1)
|
||||||
.default_value(&err_ttl)
|
.default_value(err_ttl)
|
||||||
.help("TTL for errors, in seconds"),
|
.help("TTL for errors, in seconds"),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new("disable_keepalive")
|
Arg::new("disable_keepalive")
|
||||||
.short('K')
|
.short('K')
|
||||||
|
.action(SetTrue)
|
||||||
.long("disable-keepalive")
|
.long("disable-keepalive")
|
||||||
.help("Disable keepalive"),
|
.help("Disable keepalive"),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new("disable_post")
|
Arg::new("disable_post")
|
||||||
.short('P')
|
.short('P')
|
||||||
|
.action(SetTrue)
|
||||||
.long("disable-post")
|
.long("disable-post")
|
||||||
.help("Disable POST queries"),
|
.help("Disable POST queries"),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new("allow_odoh_post")
|
Arg::new("allow_odoh_post")
|
||||||
.short('O')
|
.short('O')
|
||||||
|
.action(SetTrue)
|
||||||
.long("allow-odoh-post")
|
.long("allow-odoh-post")
|
||||||
.help("Allow POST queries over ODoH even if they have been disabed for DoH"),
|
.help("Allow POST queries over ODoH even if they have been disabed for DoH"),
|
||||||
);
|
);
|
||||||
|
@ -148,7 +151,7 @@ pub fn parse_opts(globals: &mut Globals) {
|
||||||
Arg::new("tls_cert_path")
|
Arg::new("tls_cert_path")
|
||||||
.short('i')
|
.short('i')
|
||||||
.long("tls-cert-path")
|
.long("tls-cert-path")
|
||||||
.takes_value(true)
|
.num_args(1)
|
||||||
.help(
|
.help(
|
||||||
"Path to the PEM/PKCS#8-encoded certificates (only required for built-in TLS)",
|
"Path to the PEM/PKCS#8-encoded certificates (only required for built-in TLS)",
|
||||||
),
|
),
|
||||||
|
@ -157,21 +160,24 @@ pub fn parse_opts(globals: &mut Globals) {
|
||||||
Arg::new("tls_cert_key_path")
|
Arg::new("tls_cert_key_path")
|
||||||
.short('I')
|
.short('I')
|
||||||
.long("tls-cert-key-path")
|
.long("tls-cert-key-path")
|
||||||
.takes_value(true)
|
.num_args(1)
|
||||||
.help("Path to the PEM-encoded secret keys (only required for built-in TLS)"),
|
.help("Path to the PEM-encoded secret keys (only required for built-in TLS)"),
|
||||||
);
|
);
|
||||||
|
|
||||||
let matches = options.get_matches();
|
let matches = options.get_matches();
|
||||||
globals.listen_address = matches.value_of("listen_address").unwrap().parse().unwrap();
|
globals.listen_address = matches
|
||||||
|
.get_one::<String>("listen_address")
|
||||||
|
.unwrap()
|
||||||
|
.parse()
|
||||||
|
.unwrap();
|
||||||
globals.server_address = matches
|
globals.server_address = matches
|
||||||
.value_of("server_address")
|
.get_one::<String>("server_address")
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_socket_addrs()
|
.to_socket_addrs()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.next()
|
.next()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
globals.local_bind_address = match matches.value_of("local_bind_address") {
|
globals.local_bind_address = match matches.get_one::<String>("local_bind_address") {
|
||||||
Some(address) => address.parse().unwrap(),
|
Some(address) => address.parse().unwrap(),
|
||||||
None => match globals.server_address {
|
None => match globals.server_address {
|
||||||
SocketAddr::V4(_) => SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::UNSPECIFIED, 0)),
|
SocketAddr::V4(_) => SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::UNSPECIFIED, 0)),
|
||||||
|
@ -183,62 +189,93 @@ pub fn parse_opts(globals: &mut Globals) {
|
||||||
)),
|
)),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
globals.path = matches.value_of("path").unwrap().to_string();
|
globals.path = matches.get_one::<String>("path").unwrap().to_string();
|
||||||
if !globals.path.starts_with('/') {
|
if !globals.path.starts_with('/') {
|
||||||
globals.path = format!("/{}", globals.path);
|
globals.path = format!("/{}", globals.path);
|
||||||
}
|
}
|
||||||
globals.max_clients = matches.value_of("max_clients").unwrap().parse().unwrap();
|
globals.max_clients = matches
|
||||||
globals.timeout = Duration::from_secs(matches.value_of("timeout").unwrap().parse().unwrap());
|
.get_one::<String>("max_clients")
|
||||||
globals.max_concurrent_streams = matches.value_of("max_concurrent").unwrap().parse().unwrap();
|
.unwrap()
|
||||||
globals.min_ttl = matches.value_of("min_ttl").unwrap().parse().unwrap();
|
.parse()
|
||||||
globals.max_ttl = matches.value_of("max_ttl").unwrap().parse().unwrap();
|
.unwrap();
|
||||||
globals.err_ttl = matches.value_of("err_ttl").unwrap().parse().unwrap();
|
globals.timeout = Duration::from_secs(
|
||||||
globals.keepalive = !matches.is_present("disable_keepalive");
|
matches
|
||||||
globals.disable_post = matches.is_present("disable_post");
|
.get_one::<String>("timeout")
|
||||||
globals.allow_odoh_post = matches.is_present("allow_odoh_post");
|
.unwrap()
|
||||||
|
.parse()
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
|
globals.max_concurrent_streams = matches
|
||||||
|
.get_one::<String>("max_concurrent")
|
||||||
|
.unwrap()
|
||||||
|
.parse()
|
||||||
|
.unwrap();
|
||||||
|
globals.min_ttl = matches
|
||||||
|
.get_one::<String>("min_ttl")
|
||||||
|
.unwrap()
|
||||||
|
.parse()
|
||||||
|
.unwrap();
|
||||||
|
globals.max_ttl = matches
|
||||||
|
.get_one::<String>("max_ttl")
|
||||||
|
.unwrap()
|
||||||
|
.parse()
|
||||||
|
.unwrap();
|
||||||
|
globals.err_ttl = matches
|
||||||
|
.get_one::<String>("err_ttl")
|
||||||
|
.unwrap()
|
||||||
|
.parse()
|
||||||
|
.unwrap();
|
||||||
|
globals.keepalive = !matches.get_flag("disable_keepalive");
|
||||||
|
globals.disable_post = matches.get_flag("disable_post");
|
||||||
|
globals.allow_odoh_post = matches.get_flag("allow_odoh_post");
|
||||||
|
|
||||||
#[cfg(feature = "tls")]
|
#[cfg(feature = "tls")]
|
||||||
{
|
{
|
||||||
globals.tls_cert_path = matches.value_of("tls_cert_path").map(PathBuf::from);
|
globals.tls_cert_path = matches
|
||||||
|
.get_one::<String>("tls_cert_path")
|
||||||
|
.map(PathBuf::from);
|
||||||
globals.tls_cert_key_path = matches
|
globals.tls_cert_key_path = matches
|
||||||
.value_of("tls_cert_key_path")
|
.get_one::<String>("tls_cert_key_path")
|
||||||
.map(PathBuf::from)
|
.map(PathBuf::from)
|
||||||
.or_else(|| globals.tls_cert_path.clone());
|
.or_else(|| globals.tls_cert_path.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(hostname) = matches.value_of("hostname") {
|
match matches.get_one::<String>("hostname") {
|
||||||
let mut builder =
|
Some(hostname) => {
|
||||||
dnsstamps::DoHBuilder::new(hostname.to_string(), globals.path.to_string());
|
let mut builder =
|
||||||
if let Some(public_address) = matches.value_of("public_address") {
|
dnsstamps::DoHBuilder::new(hostname.to_string(), globals.path.to_string());
|
||||||
builder = builder.with_address(public_address.to_string());
|
if let Some(public_address) = matches.get_one::<String>("public_address") {
|
||||||
}
|
builder = builder.with_address(public_address.to_string());
|
||||||
if let Some(public_port) = matches.value_of("public_port") {
|
}
|
||||||
let public_port = public_port.parse().expect("Invalid public port");
|
if let Some(public_port) = matches.get_one::<String>("public_port") {
|
||||||
builder = builder.with_port(public_port);
|
let public_port = public_port.parse().expect("Invalid public port");
|
||||||
}
|
builder = builder.with_port(public_port);
|
||||||
println!(
|
}
|
||||||
"Test DNS stamp to reach [{}] over DoH: [{}]\n",
|
println!(
|
||||||
hostname,
|
"Test DNS stamp to reach [{}] over DoH: [{}]\n",
|
||||||
builder.serialize().unwrap()
|
hostname,
|
||||||
);
|
builder.serialize().unwrap()
|
||||||
|
);
|
||||||
|
|
||||||
let mut builder =
|
let mut builder =
|
||||||
dnsstamps::ODoHTargetBuilder::new(hostname.to_string(), globals.path.to_string());
|
dnsstamps::ODoHTargetBuilder::new(hostname.to_string(), globals.path.to_string());
|
||||||
if let Some(public_port) = matches.value_of("public_port") {
|
if let Some(public_port) = matches.get_one::<String>("public_port") {
|
||||||
let public_port = public_port.parse().expect("Invalid public port");
|
let public_port = public_port.parse().expect("Invalid public port");
|
||||||
builder = builder.with_port(public_port);
|
builder = builder.with_port(public_port);
|
||||||
}
|
}
|
||||||
println!(
|
println!(
|
||||||
"Test DNS stamp to reach [{}] over Oblivious DoH: [{}]\n",
|
"Test DNS stamp to reach [{}] over Oblivious DoH: [{}]\n",
|
||||||
hostname,
|
hostname,
|
||||||
builder.serialize().unwrap()
|
builder.serialize().unwrap()
|
||||||
);
|
);
|
||||||
|
|
||||||
println!("Check out https://dnscrypt.info/stamps/ to compute the actual stamps.\n")
|
println!("Check out https://dnscrypt.info/stamps/ to compute the actual stamps.\n")
|
||||||
} else {
|
}
|
||||||
println!(
|
_ => {
|
||||||
|
println!(
|
||||||
"Please provide a fully qualified hostname (-H <hostname> command-line option) to get \
|
"Please provide a fully qualified hostname (-H <hostname> command-line option) to get \
|
||||||
test DNS stamps for your server.\n"
|
test DNS stamps for your server.\n"
|
||||||
);
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
[package]
|
[package]
|
||||||
name = "libdoh"
|
name = "libdoh"
|
||||||
version = "0.9.5"
|
version = "0.9.11"
|
||||||
authors = ["Frank Denis <github@pureftpd.org>"]
|
authors = ["Frank Denis <github@pureftpd.org>"]
|
||||||
description = "DoH and Oblivious DoH library for the rust-doh app"
|
description = "DoH and Oblivious DoH library for the rust-doh app"
|
||||||
keywords = ["dns","https","doh","odoh","proxy"]
|
keywords = ["dns", "https", "doh", "odoh", "proxy"]
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
homepage = "https://github.com/jedisct1/rust-doh"
|
homepage = "https://github.com/jedisct1/rust-doh"
|
||||||
repository = "https://github.com/jedisct1/rust-doh"
|
repository = "https://github.com/jedisct1/rust-doh"
|
||||||
categories = ["asynchronous", "network-programming","command-line-utilities"]
|
categories = ["asynchronous", "network-programming", "command-line-utilities"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
@ -15,18 +15,31 @@ default = ["tls"]
|
||||||
tls = ["tokio-rustls"]
|
tls = ["tokio-rustls"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.65"
|
anyhow = "1.0.97"
|
||||||
arc-swap = "1.5.1"
|
arc-swap = "1.7.1"
|
||||||
base64 = "0.13.0"
|
base64 = "0.22.1"
|
||||||
byteorder = "1.4.3"
|
byteorder = "1.5.0"
|
||||||
bytes = "1.2.1"
|
bytes = "1.10.1"
|
||||||
futures = "0.3.24"
|
futures = "0.3.31"
|
||||||
hyper = { version = "0.14.20", default-features = false, features = ["server", "http1", "http2", "stream"] }
|
hyper = { version = "^0.14.32", default-features = false, features = [
|
||||||
odoh-rs = "1.0.0"
|
"server",
|
||||||
rand = "0.8.5"
|
"http1",
|
||||||
tokio = { version = "1.21.2", features = ["net", "rt-multi-thread", "time", "sync"] }
|
"http2",
|
||||||
tokio-rustls = { version = "0.23.4", features = ["early-data"], optional = true }
|
"stream",
|
||||||
rustls-pemfile = "1.0.1"
|
"runtime",
|
||||||
|
] }
|
||||||
|
odoh-rs = "1.0.3"
|
||||||
|
rand = "^0.8.5"
|
||||||
|
tokio = { version = "1.44.1", features = [
|
||||||
|
"net",
|
||||||
|
"rt-multi-thread",
|
||||||
|
"time",
|
||||||
|
"sync",
|
||||||
|
] }
|
||||||
|
tokio-rustls = { version = "^0.24.1", features = [
|
||||||
|
"early-data",
|
||||||
|
], optional = true }
|
||||||
|
rustls-pemfile = "^1.0.4"
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
codegen-units = 1
|
codegen-units = 1
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2018-2022 Frank Denis
|
Copyright (c) 2018-2025 Frank Denis
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
|
@ -27,9 +27,9 @@ impl std::fmt::Display for DoHError {
|
||||||
DoHError::UpstreamIssue => write!(fmt, "Upstream error"),
|
DoHError::UpstreamIssue => write!(fmt, "Upstream error"),
|
||||||
DoHError::UpstreamTimeout => write!(fmt, "Upstream timeout"),
|
DoHError::UpstreamTimeout => write!(fmt, "Upstream timeout"),
|
||||||
DoHError::StaleKey => write!(fmt, "Stale key material"),
|
DoHError::StaleKey => write!(fmt, "Stale key material"),
|
||||||
DoHError::Hyper(e) => write!(fmt, "HTTP error: {}", e),
|
DoHError::Hyper(e) => write!(fmt, "HTTP error: {e}"),
|
||||||
DoHError::Io(e) => write!(fmt, "IO error: {}", e),
|
DoHError::Io(e) => write!(fmt, "IO error: {e}"),
|
||||||
DoHError::ODoHConfigError(e) => write!(fmt, "ODoH config error: {}", e),
|
DoHError::ODoHConfigError(e) => write!(fmt, "ODoH config error: {e}"),
|
||||||
DoHError::TooManyTcpSessions => write!(fmt, "Too many TCP sessions"),
|
DoHError::TooManyTcpSessions => write!(fmt, "Too many TCP sessions"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,6 +11,7 @@ use std::pin::Pin;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use base64::engine::Engine;
|
||||||
use byteorder::{BigEndian, ByteOrder};
|
use byteorder::{BigEndian, ByteOrder};
|
||||||
use futures::prelude::*;
|
use futures::prelude::*;
|
||||||
use futures::task::{Context, Poll};
|
use futures::task::{Context, Poll};
|
||||||
|
@ -29,6 +30,14 @@ pub mod reexports {
|
||||||
pub use tokio;
|
pub use tokio;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const BASE64_URL_SAFE_NO_PAD: base64::engine::GeneralPurpose =
|
||||||
|
base64::engine::general_purpose::GeneralPurpose::new(
|
||||||
|
&base64::alphabet::URL_SAFE,
|
||||||
|
base64::engine::general_purpose::GeneralPurposeConfig::new()
|
||||||
|
.with_encode_padding(false)
|
||||||
|
.with_decode_padding_mode(base64::engine::DecodePaddingMode::Indifferent),
|
||||||
|
);
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
struct DnsResponse {
|
struct DnsResponse {
|
||||||
packet: Vec<u8>,
|
packet: Vec<u8>,
|
||||||
|
@ -161,9 +170,9 @@ impl DoH {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let query = match question_str.and_then(|question_str| {
|
let query = match question_str
|
||||||
base64::decode_config(question_str, base64::URL_SAFE_NO_PAD).ok()
|
.and_then(|question_str| BASE64_URL_SAFE_NO_PAD.decode(question_str).ok())
|
||||||
}) {
|
{
|
||||||
Some(query) => query,
|
Some(query) => query,
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
|
@ -248,10 +257,7 @@ impl DoH {
|
||||||
content_types: &[&'static str],
|
content_types: &[&'static str],
|
||||||
) -> Option<&'static str> {
|
) -> Option<&'static str> {
|
||||||
let accept = headers.get(hyper::header::ACCEPT);
|
let accept = headers.get(hyper::header::ACCEPT);
|
||||||
let accept = match accept {
|
let accept = accept?;
|
||||||
None => return None,
|
|
||||||
Some(accept) => accept,
|
|
||||||
};
|
|
||||||
for part in accept.to_str().unwrap_or("").split(',').map(|s| s.trim()) {
|
for part in accept.to_str().unwrap_or("").split(',').map(|s| s.trim()) {
|
||||||
if let Some(found) = part
|
if let Some(found) = part
|
||||||
.split(';')
|
.split(';')
|
||||||
|
@ -427,8 +433,8 @@ impl DoH {
|
||||||
.header(
|
.header(
|
||||||
hyper::header::CACHE_CONTROL,
|
hyper::header::CACHE_CONTROL,
|
||||||
format!(
|
format!(
|
||||||
"max-age={}, stale-if-error={}, stale-while-revalidate={}",
|
"max-age={ttl}, stale-if-error={STALE_IF_ERROR_SECS}, \
|
||||||
ttl, STALE_IF_ERROR_SECS, STALE_WHILE_REVALIDATE_SECS
|
stale-while-revalidate={STALE_WHILE_REVALIDATE_SECS}"
|
||||||
)
|
)
|
||||||
.as_str(),
|
.as_str(),
|
||||||
);
|
);
|
||||||
|
@ -495,9 +501,9 @@ impl DoH {
|
||||||
self.globals.tls_cert_path.is_some() && self.globals.tls_cert_key_path.is_some();
|
self.globals.tls_cert_path.is_some() && self.globals.tls_cert_key_path.is_some();
|
||||||
}
|
}
|
||||||
if tls_enabled {
|
if tls_enabled {
|
||||||
println!("Listening on https://{}{}", listen_address, path);
|
println!("Listening on https://{listen_address}{path}");
|
||||||
} else {
|
} else {
|
||||||
println!("Listening on http://{}{}", listen_address, path);
|
println!("Listening on http://{listen_address}{path}");
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut server = Http::new();
|
let mut server = Http::new();
|
||||||
|
|
|
@ -77,7 +77,7 @@ impl ODoHPublicKey {
|
||||||
|
|
||||||
impl ODoHQueryContext {
|
impl ODoHQueryContext {
|
||||||
pub fn encrypt_response(self, response_body: Vec<u8>) -> Result<Vec<u8>, DoHError> {
|
pub fn encrypt_response(self, response_body: Vec<u8>) -> Result<Vec<u8>, DoHError> {
|
||||||
let response_nonce = rand::thread_rng().gen::<ResponseNonce>();
|
let response_nonce = rand::thread_rng().r#gen::<ResponseNonce>();
|
||||||
let response_body_ = ObliviousDoHMessagePlaintext::new(response_body, 0);
|
let response_body_ = ObliviousDoHMessagePlaintext::new(response_body, 0);
|
||||||
let encrypted_response = odoh_rs::encrypt_response(
|
let encrypted_response = odoh_rs::encrypt_response(
|
||||||
&self.query,
|
&self.query,
|
||||||
|
@ -115,7 +115,7 @@ impl ODoHRotator {
|
||||||
Ok(key) => {
|
Ok(key) => {
|
||||||
current_key.store(Arc::new(key));
|
current_key.store(Arc::new(key));
|
||||||
}
|
}
|
||||||
Err(e) => eprintln!("ODoH key rotation error: {}", e),
|
Err(e) => eprintln!("ODoH key rotation error: {e}"),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -29,10 +29,7 @@ where
|
||||||
let mut reader = BufReader::new(File::open(certs_path).map_err(|e| {
|
let mut reader = BufReader::new(File::open(certs_path).map_err(|e| {
|
||||||
io::Error::new(
|
io::Error::new(
|
||||||
e.kind(),
|
e.kind(),
|
||||||
format!(
|
format!("Unable to load the certificates [{certs_path_str}]: {e}"),
|
||||||
"Unable to load the certificates [{}]: {}",
|
|
||||||
certs_path_str, e
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
})?);
|
})?);
|
||||||
rustls_pemfile::certs(&mut reader).map_err(|_| {
|
rustls_pemfile::certs(&mut reader).map_err(|_| {
|
||||||
|
@ -53,10 +50,7 @@ where
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
io::Error::new(
|
io::Error::new(
|
||||||
e.kind(),
|
e.kind(),
|
||||||
format!(
|
format!("Unable to load the certificate keys [{certs_keys_path_str}]: {e}"),
|
||||||
"Unable to load the certificate keys [{}]: {}",
|
|
||||||
certs_keys_path_str, e
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
})?
|
})?
|
||||||
.read_to_end(&mut encoded_keys)?;
|
.read_to_end(&mut encoded_keys)?;
|
||||||
|
@ -93,12 +87,9 @@ where
|
||||||
let server_config_builder = ServerConfig::builder()
|
let server_config_builder = ServerConfig::builder()
|
||||||
.with_safe_defaults()
|
.with_safe_defaults()
|
||||||
.with_no_client_auth();
|
.with_no_client_auth();
|
||||||
if let Ok(found_config) =
|
match server_config_builder.with_single_cert(certs.clone(), certs_key) {
|
||||||
server_config_builder.with_single_cert(certs.clone(), certs_key)
|
Ok(found_config) => Some(found_config),
|
||||||
{
|
_ => None,
|
||||||
Some(found_config)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.ok_or_else(|| {
|
.ok_or_else(|| {
|
||||||
|
@ -163,7 +154,7 @@ impl DoH {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => eprintln!("TLS certificates error: {}", e),
|
Err(e) => eprintln!("TLS certificates error: {e}"),
|
||||||
}
|
}
|
||||||
tokio::time::sleep(Duration::from_secs(CERTS_WATCH_DELAY_SECS.into())).await;
|
tokio::time::sleep(Duration::from_secs(CERTS_WATCH_DELAY_SECS.into())).await;
|
||||||
}
|
}
|
||||||
|
|
16
src/utils.rs
16
src/utils.rs
|
@ -2,25 +2,23 @@
|
||||||
|
|
||||||
use std::net::{SocketAddr, ToSocketAddrs};
|
use std::net::{SocketAddr, ToSocketAddrs};
|
||||||
|
|
||||||
pub(crate) fn verify_sock_addr(arg_val: &str) -> Result<(), String> {
|
pub(crate) fn verify_sock_addr(arg_val: &str) -> Result<String, String> {
|
||||||
match arg_val.parse::<SocketAddr>() {
|
match arg_val.parse::<SocketAddr>() {
|
||||||
Ok(_addr) => Ok(()),
|
Ok(_addr) => Ok(arg_val.to_string()),
|
||||||
Err(_) => Err(format!(
|
Err(_) => Err(format!(
|
||||||
"Could not parse \"{}\" as a valid socket address (with port).",
|
"Could not parse \"{arg_val}\" as a valid socket address (with port)."
|
||||||
arg_val
|
|
||||||
)),
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn verify_remote_server(arg_val: &str) -> Result<(), String> {
|
pub(crate) fn verify_remote_server(arg_val: &str) -> Result<String, String> {
|
||||||
match arg_val.to_socket_addrs() {
|
match arg_val.to_socket_addrs() {
|
||||||
Ok(mut addr_iter) => match addr_iter.next() {
|
Ok(mut addr_iter) => match addr_iter.next() {
|
||||||
Some(_) => Ok(()),
|
Some(_) => Ok(arg_val.to_string()),
|
||||||
None => Err(format!(
|
None => Err(format!(
|
||||||
"Could not parse \"{}\" as a valid remote uri",
|
"Could not parse \"{arg_val}\" as a valid remote uri"
|
||||||
arg_val
|
|
||||||
)),
|
)),
|
||||||
},
|
},
|
||||||
Err(err) => Err(format!("{}", err)),
|
Err(err) => Err(format!("{err}")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue