mirror of
https://github.com/DNSCrypt/doh-server.git
synced 2025-04-03 21:17:36 +03:00
Compare commits
89 commits
Author | SHA1 | Date | |
---|---|---|---|
|
f0242354d3 | ||
|
25fa6946e6 | ||
|
2254632d33 | ||
|
672d1a11f1 | ||
|
9e4a931bce | ||
|
40b0b02972 | ||
|
bf443c33b9 | ||
|
1a0a0566c4 | ||
|
890a74276f | ||
|
34f614e938 | ||
|
d6635eebb7 | ||
|
c79501aea3 | ||
|
e73964fa1d | ||
|
bafbdc0926 | ||
|
30a55a0f2f | ||
|
7bb8293c28 | ||
|
a6517472d5 | ||
|
3511672d49 | ||
|
bd85572368 | ||
|
02b3a67a00 | ||
|
66c66c7a28 | ||
|
1165fab90c | ||
|
c92308ccbb | ||
|
78c47830ff | ||
|
9e2853da86 | ||
|
e5f6f2a5d6 | ||
|
e8df0458ac | ||
|
19040f1e88 | ||
|
6f9f63e754 | ||
|
678bd04bed | ||
|
ffa0828515 | ||
|
6580f6ffb5 | ||
|
f64770bdd7 | ||
|
18297228c7 | ||
|
908e7d64db | ||
|
c54b3303fc | ||
|
1c5c83803a | ||
|
1386b7d13a | ||
|
920d31b502 | ||
|
651224d900 | ||
|
b5d525abcd | ||
|
11d8f4cb31 | ||
|
47330ebcad | ||
|
d5fd8231ff | ||
|
8cba04338e | ||
|
85280f4525 | ||
|
1c28a28b78 | ||
|
fbf82068d1 | ||
|
c9e084b2b4 | ||
|
37dc663b6e | ||
|
b81cc3e5d2 | ||
|
3f1bbcd8dc | ||
|
e92fddb165 | ||
|
d573a20c86 | ||
|
f5c07a205b | ||
|
d277c0a806 | ||
|
fc61c79a9f | ||
|
a92f4a77ae | ||
|
a373957045 | ||
|
6f5213838b | ||
|
eede3f4ab3 | ||
|
fdcc797fcb | ||
|
3e59f42558 | ||
|
a1fc5bbffc | ||
|
4b887d6705 | ||
|
6818fbe8a1 | ||
|
c82fb339ed | ||
|
06a3fa0499 | ||
|
8b9f9377b3 | ||
|
767b3e17b1 | ||
|
a60ced8782 | ||
|
25d1261730 | ||
|
ff62b6a24b | ||
|
fd65582aa6 | ||
|
d12b9deb35 | ||
|
965bca7fde | ||
|
5b11bc520e | ||
|
ab4c27ef86 | ||
|
db9c8634e3 | ||
|
533c29ec1e | ||
|
e27ab7dee9 | ||
|
511b0b4388 | ||
|
74939bdc6c | ||
|
054beb390c | ||
|
16ab626cc2 | ||
|
115938f90f | ||
|
c6c9d64681 | ||
|
d586c50019 | ||
|
46be8b9662 |
18 changed files with 446 additions and 255 deletions
17
.github/workflows/issues.yml
vendored
Normal file
17
.github/workflows/issues.yml
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
name: Close inactive issues
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 1 * * *"
|
||||
|
||||
jobs:
|
||||
close-issues:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/stale@v9
|
||||
with:
|
||||
stale-issue-message: "This issue is stale because it has been open for 30 days with no activity."
|
||||
close-issue-message: "This issue was closed because it has been inactive for 14 days since being marked as stale."
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
118
.github/workflows/release.yml
vendored
118
.github/workflows/release.yml
vendored
|
@ -14,7 +14,11 @@ jobs:
|
|||
id: get_version
|
||||
run: echo ::set-output name=VERSION::${GITHUB_REF#refs/tags/}
|
||||
|
||||
- uses: actions/checkout@master
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: mlugg/setup-zig@v1
|
||||
with:
|
||||
version: 0.10.1
|
||||
|
||||
- uses: hecrj/setup-rust-action@master
|
||||
with:
|
||||
|
@ -27,18 +31,60 @@ jobs:
|
|||
run: rustup default | grep stable
|
||||
|
||||
- name: Install cargo-deb
|
||||
run: cargo install --debug cargo-deb
|
||||
run: cargo install cargo-deb
|
||||
|
||||
- name: Release build
|
||||
- name: Install cargo-generate-rpm
|
||||
run: cargo install cargo-generate-rpm
|
||||
|
||||
- name: Install cargo-zigbuild
|
||||
run: cargo install cargo-zigbuild
|
||||
|
||||
- name: Release build Linux-x86-64
|
||||
run: |
|
||||
env RUSTFLAGS="-C link-arg=-s" cargo build --release
|
||||
rustup target add x86_64-unknown-linux-musl
|
||||
env RUSTFLAGS="-C strip=symbols" cargo zigbuild --release --target x86_64-unknown-linux-musl
|
||||
mkdir doh-proxy
|
||||
mv target/release/doh-proxy doh-proxy/
|
||||
mv target/x86_64-unknown-linux-musl/release/doh-proxy doh-proxy/
|
||||
cp README.md localhost.pem doh-proxy/
|
||||
tar cJpf doh-proxy_${{ steps.get_version.outputs.VERSION }}_linux-x86_64.tar.bz2 doh-proxy
|
||||
- name: Debian package
|
||||
tar cjpf doh-proxy_${{ steps.get_version.outputs.VERSION }}_linux-x86_64.tar.bz2 doh-proxy
|
||||
rm -fr doh-proxy
|
||||
|
||||
- name: Release build Linux-aarch64
|
||||
run: |
|
||||
cargo deb
|
||||
rustup target add aarch64-unknown-linux-musl
|
||||
env RUSTFLAGS="-C strip=symbols" cargo zigbuild --release --target aarch64-unknown-linux-musl
|
||||
mkdir doh-proxy
|
||||
mv target/aarch64-unknown-linux-musl/release/doh-proxy doh-proxy/
|
||||
cp README.md localhost.pem doh-proxy/
|
||||
tar cjpf doh-proxy_${{ steps.get_version.outputs.VERSION }}_linux-aarch64.tar.bz2 doh-proxy
|
||||
rm -fr doh-proxy
|
||||
|
||||
- name: Release build Windows-x86_64
|
||||
run: |
|
||||
rustup target add x86_64-pc-windows-gnu
|
||||
env RUSTFLAGS="-C strip=symbols" cargo zigbuild --release --target x86_64-pc-windows-gnu
|
||||
mkdir doh-proxy
|
||||
mv target/x86_64-pc-windows-gnu/release/doh-proxy.exe doh-proxy/
|
||||
cp README.md localhost.pem doh-proxy/
|
||||
zip -9 -r doh-proxy_${{ steps.get_version.outputs.VERSION }}_windows-x86_64.zip doh-proxy
|
||||
rm -fr doh-proxy
|
||||
|
||||
- name: Debian packages
|
||||
run: |
|
||||
rustup target add x86_64-unknown-linux-musl
|
||||
env RUSTFLAGS="-C strip=symbols" cargo deb --no-strip --cargo-build=zigbuild --target=x86_64-unknown-linux-musl
|
||||
rustup target add aarch64-unknown-linux-musl
|
||||
env RUSTFLAGS="-C strip=symbols" cargo deb --no-strip --cargo-build=zigbuild --target=aarch64-unknown-linux-musl
|
||||
|
||||
- name: RPM packages
|
||||
run: |
|
||||
rustup target add x86_64-unknown-linux-gnu
|
||||
env RUSTFLAGS="-C strip=symbols" cargo-zigbuild build --target=x86_64-unknown-linux-gnu.2.17 --release
|
||||
mv target/x86_64-unknown-linux-musl/release/doh-proxy target/release/
|
||||
cargo generate-rpm --target x86_64-unknown-linux-gnu
|
||||
rustup target add aarch64-unknown-linux-gnu
|
||||
env RUSTFLAGS="-C strip=symbols" cargo-zigbuild build --target=aarch64-unknown-linux-gnu.2.17 --release
|
||||
cargo generate-rpm --target aarch64-unknown-linux-gnu
|
||||
|
||||
- name: Create release
|
||||
id: create_release
|
||||
|
@ -51,19 +97,41 @@ jobs:
|
|||
draft: true
|
||||
prerelease: false
|
||||
|
||||
- name: Upload Debian package
|
||||
id: upload-release-asset-debian
|
||||
- name: Upload Debian package for x86_64
|
||||
id: upload-release-asset-debian-x86_64
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_name: "doh-proxy_${{ steps.get_version.outputs.VERSION }}_amd64.deb"
|
||||
asset_path: "target/debian/doh-proxy_${{ steps.get_version.outputs.VERSION }}_amd64.deb"
|
||||
asset_name: "doh-proxy_${{ steps.get_version.outputs.VERSION }}-1_amd64.deb"
|
||||
asset_path: "target/x86_64-unknown-linux-musl/debian/doh-proxy_${{ steps.get_version.outputs.VERSION }}-1_amd64.deb"
|
||||
asset_content_type: application/x-debian-package
|
||||
|
||||
- name: Upload tarball
|
||||
id: upload-release-asset-tarball
|
||||
- name: Upload RPM package for x86_64
|
||||
id: upload-release-asset-rpm-x86_64
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_name: "doh-proxy-${{ steps.get_version.outputs.VERSION }}-1.x86_64.rpm"
|
||||
asset_path: "target/x86_64-unknown-linux-gnu/generate-rpm/doh-proxy-${{ steps.get_version.outputs.VERSION }}-1.x86_64.rpm"
|
||||
asset_content_type: application/x-redhat-package-manager
|
||||
|
||||
- name: Upload RPM package for aarch64
|
||||
id: upload-release-asset-rpm-aarch64
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_name: "doh-proxy-${{ steps.get_version.outputs.VERSION }}-1.aarch64.rpm"
|
||||
asset_path: "target/aarch64-unknown-linux-gnu/generate-rpm/doh-proxy-${{ steps.get_version.outputs.VERSION }}-1.aarch64.rpm"
|
||||
asset_content_type: application/x-redhat-package-manager
|
||||
|
||||
- name: Upload tarball for linux-x86_64
|
||||
id: upload-release-asset-tarball-linux-x86_64
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
@ -72,3 +140,25 @@ jobs:
|
|||
asset_name: "doh-proxy_${{ steps.get_version.outputs.VERSION }}_linux-x86_64.tar.bz2"
|
||||
asset_path: "doh-proxy_${{ steps.get_version.outputs.VERSION }}_linux-x86_64.tar.bz2"
|
||||
asset_content_type: application/x-tar
|
||||
|
||||
- name: Upload tarball for linux-aarch64
|
||||
id: upload-release-asset-tarball-linux-aarch64
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_name: "doh-proxy_${{ steps.get_version.outputs.VERSION }}_linux-aarch64.tar.bz2"
|
||||
asset_path: "doh-proxy_${{ steps.get_version.outputs.VERSION }}_linux-aarch64.tar.bz2"
|
||||
asset_content_type: application/x-tar
|
||||
|
||||
- name: Upload tarball for windows-x86_64
|
||||
id: upload-release-asset-tarball-windows-x86_64
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_name: "doh-proxy_${{ steps.get_version.outputs.VERSION }}_windows-x86_64.zip"
|
||||
asset_path: "doh-proxy_${{ steps.get_version.outputs.VERSION }}_windows-x86_64.zip"
|
||||
asset_content_type: application/zip
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
language: rust
|
||||
rust:
|
||||
- nightly
|
||||
- stable
|
30
Cargo.toml
30
Cargo.toml
|
@ -1,9 +1,9 @@
|
|||
[package]
|
||||
name = "doh-proxy"
|
||||
version = "0.9.2"
|
||||
version = "0.9.11"
|
||||
authors = ["Frank Denis <github@pureftpd.org>"]
|
||||
description = "A DNS-over-HTTPS (DoH) and ODoH (Oblivious DoH) proxy"
|
||||
keywords = ["dns","https","doh","odoh","proxy"]
|
||||
keywords = ["dns", "https", "doh", "odoh", "proxy"]
|
||||
license = "MIT"
|
||||
homepage = "https://github.com/jedisct1/rust-doh"
|
||||
repository = "https://github.com/jedisct1/rust-doh"
|
||||
|
@ -16,17 +16,31 @@ default = ["tls"]
|
|||
tls = ["libdoh/tls"]
|
||||
|
||||
[dependencies]
|
||||
libdoh = { path = "src/libdoh", version = "0.9.0", default-features = false }
|
||||
clap = "2.33.3"
|
||||
dnsstamps = "0.1.9"
|
||||
jemallocator = "0.3.2"
|
||||
libdoh = { path = "src/libdoh", version = "0.9.9", default-features = false }
|
||||
clap = { version = "4", features = ["std", "cargo", "wrap_help", "string"] }
|
||||
dnsstamps = "0.1.10"
|
||||
mimalloc = { version = "0.1.44", default-features = false }
|
||||
|
||||
[package.metadata.generate-rpm]
|
||||
assets = [
|
||||
{ source = "target/release/doh-proxy", dest = "/usr/bin/doh-proxy", mode = "755" },
|
||||
{ source = "README.md", dest = "/usr/share/doc/doh-proxy/README.md", mode = "644", doc = true },
|
||||
]
|
||||
|
||||
[package.metadata.deb]
|
||||
extended-description = """\
|
||||
A fast and secure DoH (DNS-over-HTTPS) and ODoH server written in Rust."""
|
||||
assets = [
|
||||
["target/release/doh-proxy", "usr/bin/", "755"],
|
||||
["README.md", "usr/share/doc/doh-proxy/README.md", "644"]
|
||||
[
|
||||
"target/release/doh-proxy",
|
||||
"usr/bin/",
|
||||
"755",
|
||||
],
|
||||
[
|
||||
"README.md",
|
||||
"usr/share/doc/doh-proxy/README.md",
|
||||
"644",
|
||||
],
|
||||
]
|
||||
section = "network"
|
||||
depends = "$auto"
|
||||
|
|
2
LICENSE
2
LICENSE
|
@ -1,6 +1,6 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2018-2021 Frank Denis
|
||||
Copyright (c) 2018-2025 Frank Denis
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
30
README.md
30
README.md
|
@ -1,4 +1,4 @@
|
|||
# doh-proxy
|
||||
# 
|
||||
|
||||
A fast and secure DoH (DNS-over-HTTPS) and ODoH (Oblivious DoH) server.
|
||||
|
||||
|
@ -60,13 +60,21 @@ OPTIONS:
|
|||
Path to the PEM/PKCS#8-encoded certificates (only required for built-in TLS)
|
||||
```
|
||||
|
||||
## HTTP/2 termination
|
||||
Example command-line:
|
||||
|
||||
The recommended way to use `doh-proxy` is to use a TLS termination proxy (such as [hitch](https://github.com/varnish/hitch) or [relayd](https://bsd.plumbing/about.html)), a CDN or a web server with proxying abilities as a front-end.
|
||||
```sh
|
||||
doh-proxy -H 'doh.example.com' -u 127.0.0.1:53 -g 233.252.0.5
|
||||
```
|
||||
|
||||
Here, `doh.example.com` is the host name (which should match a name included in the TLS certificate), `127.0.0.1:53` is the address of the DNS resolver, and `233.252.0.5` is the public IP address of the DoH server.
|
||||
|
||||
## HTTP/2 and HTTP/3 termination
|
||||
|
||||
The recommended way to use `doh-proxy` is to use a TLS termination proxy (such as [hitch](https://github.com/varnish/hitch) or [relayd](https://man.openbsd.org/relayd.8)), a CDN or a web server with proxying abilities as a front-end.
|
||||
|
||||
That way, the DoH service can be exposed as a virtual host, sharing the same IP addresses as existing websites.
|
||||
|
||||
If `doh-proxy` and the HTTP/2 front-end run on the same host, using the HTTP protocol to communicate between both is fine.
|
||||
If `doh-proxy` and the HTTP/2 (/ HTTP/3) front-end run on the same host, using the HTTP protocol to communicate between both is fine.
|
||||
|
||||
If both are on distinct networks, such as when using a CDN, `doh-proxy` can handle HTTPS requests, provided that it was compiled with the `tls` feature.
|
||||
|
||||
|
@ -128,7 +136,7 @@ This can be achieved with the `--allow-odoh-post` command-line switch.
|
|||
* When using DoH, DNS stamps should include a resolver IP address in order to remove a dependency on non-encrypted, non-authenticated, easy-to-block resolvers.
|
||||
* Unlike DNSCrypt where users must explicitly trust a DNS server's public key, the security of DoH relies on traditional public Certificate Authorities. Additional root certificates (required by governments, security software, enterprise gateways) installed on a client immediately make DoH vulnerable to MITM. In order to prevent this, DNS stamps should include the hash of the parent certificate.
|
||||
* TLS certificates are tied to host names. But domains expire, get reassigned and switch hands all the time. If a domain originally used for a DoH service gets a new, possibly malicious owner, clients still configured to use the service will blindly keep trusting it if the CA is the same. As a mitigation, the CA should sign an intermediate certificate (the only one present in the stamp), itself used to sign the name used by the DoH server. While commercial CAs offer this, Let's Encrypt currently doesn't.
|
||||
* Make sure that the front-end supports HTTP/2 and TLS 1.3.
|
||||
* Make sure that the front-end supports at least HTTP/2 and TLS 1.3.
|
||||
* Internal DoH servers still require TLS certificates. So, if you are planning to deploy an internal server, you need to set up an internal CA, or add self-signed certificates to every single client.
|
||||
|
||||
## Example usage with `encrypted-dns-server`
|
||||
|
@ -142,10 +150,10 @@ upstream_addr = "127.0.0.1:3000"
|
|||
|
||||
## Example usage with `nginx`
|
||||
|
||||
In an existing `server`, a `/doh` endpoint can be exposed that way:
|
||||
In an existing `server`, a `/dns-query` endpoint can be exposed that way:
|
||||
|
||||
```text
|
||||
location /doh {
|
||||
location /dns-query {
|
||||
proxy_pass http://127.0.0.1:3000;
|
||||
}
|
||||
```
|
||||
|
@ -187,10 +195,14 @@ This [Go code snippet](https://gist.github.com/d6cb41742a1ceb54d48cc286f3d5c5fa)
|
|||
|
||||
### Common certificate hashes
|
||||
|
||||
* Let's Encrypt R3:
|
||||
* `444ebd67bb83f8807b3921e938ac9178b882bd50aadb11231f044cf5f08df7ce`
|
||||
* Let's Encrypt E1:
|
||||
* `cc1060d39c8329b62b6fbc7d0d6df9309869b981e7e6392d5cd8fa408f4d80e6`
|
||||
* Let's Encrypt R3:
|
||||
* `444ebd67bb83f8807b3921e938ac9178b882bd50aadb11231f044cf5f08df7ce`
|
||||
* Let's Encrypt R10:
|
||||
* `e644ba6963e335fe765cb9976b12b10eb54294b42477764ccb3a3acca3acb2fc`
|
||||
* ZeroSSL:
|
||||
* `9a3a34f727deb9bca51003d9ce9c39f8f27dd9c5242901c2bab1a44e635a0219`
|
||||
|
||||
## Clients
|
||||
|
||||
|
|
BIN
logo.png
Normal file
BIN
logo.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 58 KiB |
265
src/config.rs
265
src/config.rs
|
@ -1,14 +1,13 @@
|
|||
use std::net::{Ipv4Addr, Ipv6Addr, SocketAddr, SocketAddrV4, SocketAddrV6, ToSocketAddrs};
|
||||
#[cfg(feature = "tls")]
|
||||
use std::path::PathBuf;
|
||||
use std::time::Duration;
|
||||
|
||||
use clap::{Arg, ArgAction::SetTrue};
|
||||
use libdoh::*;
|
||||
|
||||
use crate::constants::*;
|
||||
|
||||
use clap::Arg;
|
||||
use std::net::{Ipv4Addr, Ipv6Addr, SocketAddr, SocketAddrV4, SocketAddrV6, ToSocketAddrs};
|
||||
use std::time::Duration;
|
||||
|
||||
#[cfg(feature = "tls")]
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub fn parse_opts(globals: &mut Globals) {
|
||||
use crate::utils::{verify_remote_server, verify_sock_addr};
|
||||
|
||||
|
@ -20,125 +19,128 @@ pub fn parse_opts(globals: &mut Globals) {
|
|||
let err_ttl = ERR_TTL.to_string();
|
||||
|
||||
let _ = include_str!("../Cargo.toml");
|
||||
let options = app_from_crate!()
|
||||
let options = command!()
|
||||
.arg(
|
||||
Arg::with_name("hostname")
|
||||
.short("H")
|
||||
Arg::new("hostname")
|
||||
.short('H')
|
||||
.long("hostname")
|
||||
.takes_value(true)
|
||||
.num_args(1)
|
||||
.help("Host name (not IP address) DoH clients will use to connect"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("public_address")
|
||||
.short("g")
|
||||
Arg::new("public_address")
|
||||
.short('g')
|
||||
.long("public-address")
|
||||
.takes_value(true)
|
||||
.num_args(1)
|
||||
.help("External IP address DoH clients will connect to"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("public_port")
|
||||
.short("j")
|
||||
Arg::new("public_port")
|
||||
.short('j')
|
||||
.long("public-port")
|
||||
.takes_value(true)
|
||||
.num_args(1)
|
||||
.help("External port DoH clients will connect to, if not 443"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("listen_address")
|
||||
.short("l")
|
||||
Arg::new("listen_address")
|
||||
.short('l')
|
||||
.long("listen-address")
|
||||
.takes_value(true)
|
||||
.num_args(1)
|
||||
.default_value(LISTEN_ADDRESS)
|
||||
.validator(verify_sock_addr)
|
||||
.value_parser(verify_sock_addr)
|
||||
.help("Address to listen to"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("server_address")
|
||||
.short("u")
|
||||
Arg::new("server_address")
|
||||
.short('u')
|
||||
.long("server-address")
|
||||
.takes_value(true)
|
||||
.num_args(1)
|
||||
.default_value(SERVER_ADDRESS)
|
||||
.validator(verify_remote_server)
|
||||
.value_parser(verify_remote_server)
|
||||
.help("Address to connect to"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("local_bind_address")
|
||||
.short("b")
|
||||
Arg::new("local_bind_address")
|
||||
.short('b')
|
||||
.long("local-bind-address")
|
||||
.takes_value(true)
|
||||
.validator(verify_sock_addr)
|
||||
.num_args(1)
|
||||
.value_parser(verify_sock_addr)
|
||||
.help("Address to connect from"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("path")
|
||||
.short("p")
|
||||
Arg::new("path")
|
||||
.short('p')
|
||||
.long("path")
|
||||
.takes_value(true)
|
||||
.num_args(1)
|
||||
.default_value(PATH)
|
||||
.help("URI path"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("max_clients")
|
||||
.short("c")
|
||||
Arg::new("max_clients")
|
||||
.short('c')
|
||||
.long("max-clients")
|
||||
.takes_value(true)
|
||||
.default_value(&max_clients)
|
||||
.num_args(1)
|
||||
.default_value(max_clients)
|
||||
.help("Maximum number of simultaneous clients"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("max_concurrent")
|
||||
.short("C")
|
||||
Arg::new("max_concurrent")
|
||||
.short('C')
|
||||
.long("max-concurrent")
|
||||
.takes_value(true)
|
||||
.default_value(&max_concurrent_streams)
|
||||
.num_args(1)
|
||||
.default_value(max_concurrent_streams)
|
||||
.help("Maximum number of concurrent requests per client"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("timeout")
|
||||
.short("t")
|
||||
Arg::new("timeout")
|
||||
.short('t')
|
||||
.long("timeout")
|
||||
.takes_value(true)
|
||||
.default_value(&timeout_sec)
|
||||
.num_args(1)
|
||||
.default_value(timeout_sec)
|
||||
.help("Timeout, in seconds"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("min_ttl")
|
||||
.short("T")
|
||||
Arg::new("min_ttl")
|
||||
.short('T')
|
||||
.long("min-ttl")
|
||||
.takes_value(true)
|
||||
.default_value(&min_ttl)
|
||||
.num_args(1)
|
||||
.default_value(min_ttl)
|
||||
.help("Minimum TTL, in seconds"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("max_ttl")
|
||||
.short("X")
|
||||
Arg::new("max_ttl")
|
||||
.short('X')
|
||||
.long("max-ttl")
|
||||
.takes_value(true)
|
||||
.default_value(&max_ttl)
|
||||
.num_args(1)
|
||||
.default_value(max_ttl)
|
||||
.help("Maximum TTL, in seconds"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("err_ttl")
|
||||
.short("E")
|
||||
Arg::new("err_ttl")
|
||||
.short('E')
|
||||
.long("err-ttl")
|
||||
.takes_value(true)
|
||||
.default_value(&err_ttl)
|
||||
.num_args(1)
|
||||
.default_value(err_ttl)
|
||||
.help("TTL for errors, in seconds"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("disable_keepalive")
|
||||
.short("K")
|
||||
Arg::new("disable_keepalive")
|
||||
.short('K')
|
||||
.action(SetTrue)
|
||||
.long("disable-keepalive")
|
||||
.help("Disable keepalive"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("disable_post")
|
||||
.short("P")
|
||||
Arg::new("disable_post")
|
||||
.short('P')
|
||||
.action(SetTrue)
|
||||
.long("disable-post")
|
||||
.help("Disable POST queries"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("allow_odoh_post")
|
||||
.short("O")
|
||||
Arg::new("allow_odoh_post")
|
||||
.short('O')
|
||||
.action(SetTrue)
|
||||
.long("allow-odoh-post")
|
||||
.help("Allow POST queries over ODoH even if they have been disabed for DoH"),
|
||||
);
|
||||
|
@ -146,33 +148,36 @@ pub fn parse_opts(globals: &mut Globals) {
|
|||
#[cfg(feature = "tls")]
|
||||
let options = options
|
||||
.arg(
|
||||
Arg::with_name("tls_cert_path")
|
||||
.short("i")
|
||||
Arg::new("tls_cert_path")
|
||||
.short('i')
|
||||
.long("tls-cert-path")
|
||||
.takes_value(true)
|
||||
.num_args(1)
|
||||
.help(
|
||||
"Path to the PEM/PKCS#8-encoded certificates (only required for built-in TLS)",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("tls_cert_key_path")
|
||||
.short("I")
|
||||
Arg::new("tls_cert_key_path")
|
||||
.short('I')
|
||||
.long("tls-cert-key-path")
|
||||
.takes_value(true)
|
||||
.num_args(1)
|
||||
.help("Path to the PEM-encoded secret keys (only required for built-in TLS)"),
|
||||
);
|
||||
|
||||
let matches = options.get_matches();
|
||||
globals.listen_address = matches.value_of("listen_address").unwrap().parse().unwrap();
|
||||
|
||||
globals.listen_address = matches
|
||||
.get_one::<String>("listen_address")
|
||||
.unwrap()
|
||||
.parse()
|
||||
.unwrap();
|
||||
globals.server_address = matches
|
||||
.value_of("server_address")
|
||||
.get_one::<String>("server_address")
|
||||
.unwrap()
|
||||
.to_socket_addrs()
|
||||
.unwrap()
|
||||
.next()
|
||||
.unwrap();
|
||||
globals.local_bind_address = match matches.value_of("local_bind_address") {
|
||||
globals.local_bind_address = match matches.get_one::<String>("local_bind_address") {
|
||||
Some(address) => address.parse().unwrap(),
|
||||
None => match globals.server_address {
|
||||
SocketAddr::V4(_) => SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::UNSPECIFIED, 0)),
|
||||
|
@ -184,59 +189,93 @@ pub fn parse_opts(globals: &mut Globals) {
|
|||
)),
|
||||
},
|
||||
};
|
||||
globals.path = matches.value_of("path").unwrap().to_string();
|
||||
globals.path = matches.get_one::<String>("path").unwrap().to_string();
|
||||
if !globals.path.starts_with('/') {
|
||||
globals.path = format!("/{}", globals.path);
|
||||
}
|
||||
globals.max_clients = matches.value_of("max_clients").unwrap().parse().unwrap();
|
||||
globals.timeout = Duration::from_secs(matches.value_of("timeout").unwrap().parse().unwrap());
|
||||
globals.max_concurrent_streams = matches.value_of("max_concurrent").unwrap().parse().unwrap();
|
||||
globals.min_ttl = matches.value_of("min_ttl").unwrap().parse().unwrap();
|
||||
globals.max_ttl = matches.value_of("max_ttl").unwrap().parse().unwrap();
|
||||
globals.err_ttl = matches.value_of("err_ttl").unwrap().parse().unwrap();
|
||||
globals.keepalive = !matches.is_present("disable_keepalive");
|
||||
globals.disable_post = matches.is_present("disable_post");
|
||||
globals.allow_odoh_post = matches.is_present("allow_odoh_post");
|
||||
globals.max_clients = matches
|
||||
.get_one::<String>("max_clients")
|
||||
.unwrap()
|
||||
.parse()
|
||||
.unwrap();
|
||||
globals.timeout = Duration::from_secs(
|
||||
matches
|
||||
.get_one::<String>("timeout")
|
||||
.unwrap()
|
||||
.parse()
|
||||
.unwrap(),
|
||||
);
|
||||
globals.max_concurrent_streams = matches
|
||||
.get_one::<String>("max_concurrent")
|
||||
.unwrap()
|
||||
.parse()
|
||||
.unwrap();
|
||||
globals.min_ttl = matches
|
||||
.get_one::<String>("min_ttl")
|
||||
.unwrap()
|
||||
.parse()
|
||||
.unwrap();
|
||||
globals.max_ttl = matches
|
||||
.get_one::<String>("max_ttl")
|
||||
.unwrap()
|
||||
.parse()
|
||||
.unwrap();
|
||||
globals.err_ttl = matches
|
||||
.get_one::<String>("err_ttl")
|
||||
.unwrap()
|
||||
.parse()
|
||||
.unwrap();
|
||||
globals.keepalive = !matches.get_flag("disable_keepalive");
|
||||
globals.disable_post = matches.get_flag("disable_post");
|
||||
globals.allow_odoh_post = matches.get_flag("allow_odoh_post");
|
||||
|
||||
#[cfg(feature = "tls")]
|
||||
{
|
||||
globals.tls_cert_path = matches.value_of("tls_cert_path").map(PathBuf::from);
|
||||
globals.tls_cert_path = matches
|
||||
.get_one::<String>("tls_cert_path")
|
||||
.map(PathBuf::from);
|
||||
globals.tls_cert_key_path = matches
|
||||
.value_of("tls_cert_key_path")
|
||||
.get_one::<String>("tls_cert_key_path")
|
||||
.map(PathBuf::from)
|
||||
.or_else(|| globals.tls_cert_path.clone());
|
||||
}
|
||||
|
||||
if let Some(hostname) = matches.value_of("hostname") {
|
||||
let mut builder =
|
||||
dnsstamps::DoHBuilder::new(hostname.to_string(), globals.path.to_string());
|
||||
if let Some(public_address) = matches.value_of("public_address") {
|
||||
builder = builder.with_address(public_address.to_string());
|
||||
}
|
||||
if let Some(public_port) = matches.value_of("public_port") {
|
||||
let public_port = public_port.parse().expect("Invalid public port");
|
||||
builder = builder.with_port(public_port);
|
||||
}
|
||||
println!(
|
||||
"Test DNS stamp to reach [{}] over DoH: [{}]\n",
|
||||
hostname,
|
||||
builder.serialize().unwrap()
|
||||
);
|
||||
match matches.get_one::<String>("hostname") {
|
||||
Some(hostname) => {
|
||||
let mut builder =
|
||||
dnsstamps::DoHBuilder::new(hostname.to_string(), globals.path.to_string());
|
||||
if let Some(public_address) = matches.get_one::<String>("public_address") {
|
||||
builder = builder.with_address(public_address.to_string());
|
||||
}
|
||||
if let Some(public_port) = matches.get_one::<String>("public_port") {
|
||||
let public_port = public_port.parse().expect("Invalid public port");
|
||||
builder = builder.with_port(public_port);
|
||||
}
|
||||
println!(
|
||||
"Test DNS stamp to reach [{}] over DoH: [{}]\n",
|
||||
hostname,
|
||||
builder.serialize().unwrap()
|
||||
);
|
||||
|
||||
let mut builder =
|
||||
dnsstamps::ODoHTargetBuilder::new(hostname.to_string(), globals.path.to_string());
|
||||
if let Some(public_port) = matches.value_of("public_port") {
|
||||
let public_port = public_port.parse().expect("Invalid public port");
|
||||
builder = builder.with_port(public_port);
|
||||
}
|
||||
println!(
|
||||
"Test DNS stamp to reach [{}] over Oblivious DoH: [{}]\n",
|
||||
hostname,
|
||||
builder.serialize().unwrap()
|
||||
);
|
||||
let mut builder =
|
||||
dnsstamps::ODoHTargetBuilder::new(hostname.to_string(), globals.path.to_string());
|
||||
if let Some(public_port) = matches.get_one::<String>("public_port") {
|
||||
let public_port = public_port.parse().expect("Invalid public port");
|
||||
builder = builder.with_port(public_port);
|
||||
}
|
||||
println!(
|
||||
"Test DNS stamp to reach [{}] over Oblivious DoH: [{}]\n",
|
||||
hostname,
|
||||
builder.serialize().unwrap()
|
||||
);
|
||||
|
||||
println!("Check out https://dnscrypt.info/stamps/ to compute the actual stamps.\n")
|
||||
} else {
|
||||
println!("Please provide a fully qualified hostname (-H <hostname> command-line option) to get test DNS stamps for your server.\n");
|
||||
println!("Check out https://dnscrypt.info/stamps/ to compute the actual stamps.\n")
|
||||
}
|
||||
_ => {
|
||||
println!(
|
||||
"Please provide a fully qualified hostname (-H <hostname> command-line option) to get \
|
||||
test DNS stamps for your server.\n"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
[package]
|
||||
name = "libdoh"
|
||||
version = "0.9.2"
|
||||
version = "0.9.11"
|
||||
authors = ["Frank Denis <github@pureftpd.org>"]
|
||||
description = "DoH and Oblivious DoH library for the rust-doh app"
|
||||
keywords = ["dns","https","doh","odoh","proxy"]
|
||||
keywords = ["dns", "https", "doh", "odoh", "proxy"]
|
||||
license = "MIT"
|
||||
homepage = "https://github.com/jedisct1/rust-doh"
|
||||
repository = "https://github.com/jedisct1/rust-doh"
|
||||
categories = ["asynchronous", "network-programming","command-line-utilities"]
|
||||
categories = ["asynchronous", "network-programming", "command-line-utilities"]
|
||||
edition = "2018"
|
||||
|
||||
[features]
|
||||
|
@ -15,18 +15,31 @@ default = ["tls"]
|
|||
tls = ["tokio-rustls"]
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.43"
|
||||
arc-swap = "1.3.2"
|
||||
base64 = "0.13.0"
|
||||
byteorder = "1.4.3"
|
||||
bytes = "1.1.0"
|
||||
futures = "0.3.17"
|
||||
hpke = "0.5.1"
|
||||
hyper = { version = "0.14.12", default-features = false, features = ["server", "http1", "http2", "stream"] }
|
||||
odoh-rs = "1.0.0-alpha.1"
|
||||
rand = "0.8.4"
|
||||
tokio = { version = "1.11.0", features = ["net", "rt-multi-thread", "parking_lot", "time", "sync"] }
|
||||
tokio-rustls = { version = "0.22.0", features = ["early-data"], optional = true }
|
||||
anyhow = "1.0.97"
|
||||
arc-swap = "1.7.1"
|
||||
base64 = "0.22.1"
|
||||
byteorder = "1.5.0"
|
||||
bytes = "1.10.1"
|
||||
futures = "0.3.31"
|
||||
hyper = { version = "^0.14.32", default-features = false, features = [
|
||||
"server",
|
||||
"http1",
|
||||
"http2",
|
||||
"stream",
|
||||
"runtime",
|
||||
] }
|
||||
odoh-rs = "1.0.3"
|
||||
rand = "^0.8.5"
|
||||
tokio = { version = "1.44.1", features = [
|
||||
"net",
|
||||
"rt-multi-thread",
|
||||
"time",
|
||||
"sync",
|
||||
] }
|
||||
tokio-rustls = { version = "^0.24.1", features = [
|
||||
"early-data",
|
||||
], optional = true }
|
||||
rustls-pemfile = "^1.0.4"
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2018-2021 Frank Denis
|
||||
Copyright (c) 2018-2025 Frank Denis
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -180,7 +180,7 @@ fn add_edns_section(packet: &mut Vec<u8>, max_payload_size: u16) -> Result<(), E
|
|||
"Packet would be too large to add a new record"
|
||||
);
|
||||
arcount_inc(packet)?;
|
||||
packet.extend(&opt_rr);
|
||||
packet.extend(opt_rr);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use hyper::StatusCode;
|
||||
use std::io;
|
||||
|
||||
use hyper::StatusCode;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum DoHError {
|
||||
Incomplete,
|
||||
|
@ -26,9 +27,9 @@ impl std::fmt::Display for DoHError {
|
|||
DoHError::UpstreamIssue => write!(fmt, "Upstream error"),
|
||||
DoHError::UpstreamTimeout => write!(fmt, "Upstream timeout"),
|
||||
DoHError::StaleKey => write!(fmt, "Stale key material"),
|
||||
DoHError::Hyper(e) => write!(fmt, "HTTP error: {}", e),
|
||||
DoHError::Io(e) => write!(fmt, "IO error: {}", e),
|
||||
DoHError::ODoHConfigError(e) => write!(fmt, "ODoH config error: {}", e),
|
||||
DoHError::Hyper(e) => write!(fmt, "HTTP error: {e}"),
|
||||
DoHError::Io(e) => write!(fmt, "IO error: {e}"),
|
||||
DoHError::ODoHConfigError(e) => write!(fmt, "ODoH config error: {e}"),
|
||||
DoHError::TooManyTcpSessions => write!(fmt, "Too many TCP sessions"),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
use crate::odoh::ODoHRotator;
|
||||
use std::net::SocketAddr;
|
||||
#[cfg(feature = "tls")]
|
||||
use std::path::PathBuf;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use tokio::runtime;
|
||||
|
||||
#[cfg(feature = "tls")]
|
||||
use std::path::PathBuf;
|
||||
use crate::odoh::ODoHRotator;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Globals {
|
||||
|
|
|
@ -6,28 +6,38 @@ pub mod odoh;
|
|||
#[cfg(feature = "tls")]
|
||||
mod tls;
|
||||
|
||||
use crate::constants::*;
|
||||
pub use crate::errors::*;
|
||||
pub use crate::globals::*;
|
||||
use std::net::SocketAddr;
|
||||
use std::pin::Pin;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use base64::engine::Engine;
|
||||
use byteorder::{BigEndian, ByteOrder};
|
||||
use futures::prelude::*;
|
||||
use futures::task::{Context, Poll};
|
||||
use hyper::http;
|
||||
use hyper::server::conn::Http;
|
||||
use hyper::{Body, HeaderMap, Method, Request, Response, StatusCode};
|
||||
use std::net::SocketAddr;
|
||||
use std::pin::Pin;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt};
|
||||
use tokio::net::{TcpListener, TcpSocket, UdpSocket};
|
||||
use tokio::runtime;
|
||||
|
||||
use crate::constants::*;
|
||||
pub use crate::errors::*;
|
||||
pub use crate::globals::*;
|
||||
|
||||
pub mod reexports {
|
||||
pub use tokio;
|
||||
}
|
||||
|
||||
const BASE64_URL_SAFE_NO_PAD: base64::engine::GeneralPurpose =
|
||||
base64::engine::general_purpose::GeneralPurpose::new(
|
||||
&base64::alphabet::URL_SAFE,
|
||||
base64::engine::general_purpose::GeneralPurposeConfig::new()
|
||||
.with_encode_padding(false)
|
||||
.with_decode_padding_mode(base64::engine::DecodePaddingMode::Indifferent),
|
||||
);
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct DnsResponse {
|
||||
packet: Vec<u8>,
|
||||
|
@ -86,9 +96,9 @@ where
|
|||
|
||||
#[allow(clippy::type_complexity)]
|
||||
impl hyper::service::Service<http::Request<Body>> for DoH {
|
||||
type Response = Response<Body>;
|
||||
type Error = http::Error;
|
||||
type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send>>;
|
||||
type Response = Response<Body>;
|
||||
|
||||
fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
|
||||
Poll::Ready(Ok(()))
|
||||
|
@ -160,9 +170,9 @@ impl DoH {
|
|||
return None;
|
||||
}
|
||||
}
|
||||
let query = match question_str.and_then(|question_str| {
|
||||
base64::decode_config(question_str, base64::URL_SAFE_NO_PAD).ok()
|
||||
}) {
|
||||
let query = match question_str
|
||||
.and_then(|question_str| BASE64_URL_SAFE_NO_PAD.decode(question_str).ok())
|
||||
{
|
||||
Some(query) => query,
|
||||
_ => return None,
|
||||
};
|
||||
|
@ -247,10 +257,7 @@ impl DoH {
|
|||
content_types: &[&'static str],
|
||||
) -> Option<&'static str> {
|
||||
let accept = headers.get(hyper::header::ACCEPT);
|
||||
let accept = match accept {
|
||||
None => return None,
|
||||
Some(accept) => accept,
|
||||
};
|
||||
let accept = accept?;
|
||||
for part in accept.to_str().unwrap_or("").split(',').map(|s| s.trim()) {
|
||||
if let Some(found) = part
|
||||
.split(';')
|
||||
|
@ -426,8 +433,8 @@ impl DoH {
|
|||
.header(
|
||||
hyper::header::CACHE_CONTROL,
|
||||
format!(
|
||||
"max-age={}, stale-if-error={}, stale-while-revalidate={}",
|
||||
ttl, STALE_IF_ERROR_SECS, STALE_WHILE_REVALIDATE_SECS
|
||||
"max-age={ttl}, stale-if-error={STALE_IF_ERROR_SECS}, \
|
||||
stale-while-revalidate={STALE_WHILE_REVALIDATE_SECS}"
|
||||
)
|
||||
.as_str(),
|
||||
);
|
||||
|
@ -494,9 +501,9 @@ impl DoH {
|
|||
self.globals.tls_cert_path.is_some() && self.globals.tls_cert_key_path.is_some();
|
||||
}
|
||||
if tls_enabled {
|
||||
println!("Listening on https://{}{}", listen_address, path);
|
||||
println!("Listening on https://{listen_address}{path}");
|
||||
} else {
|
||||
println!("Listening on http://{}{}", listen_address, path);
|
||||
println!("Listening on http://{listen_address}{path}");
|
||||
}
|
||||
|
||||
let mut server = Http::new();
|
||||
|
|
|
@ -1,17 +1,18 @@
|
|||
use crate::constants::ODOH_KEY_ROTATION_SECS;
|
||||
use crate::errors::DoHError;
|
||||
use arc_swap::ArcSwap;
|
||||
use std::fmt;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use arc_swap::ArcSwap;
|
||||
use odoh_rs::{
|
||||
Deserialize, ObliviousDoHConfig, ObliviousDoHConfigs, ObliviousDoHKeyPair, ObliviousDoHMessage,
|
||||
ObliviousDoHMessagePlaintext, OdohSecret, ResponseNonce, Serialize,
|
||||
};
|
||||
use rand::Rng;
|
||||
use std::fmt;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tokio::runtime;
|
||||
|
||||
use crate::constants::ODOH_KEY_ROTATION_SECS;
|
||||
use crate::errors::DoHError;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ODoHPublicKey {
|
||||
key_pair: ObliviousDoHKeyPair,
|
||||
|
@ -76,7 +77,7 @@ impl ODoHPublicKey {
|
|||
|
||||
impl ODoHQueryContext {
|
||||
pub fn encrypt_response(self, response_body: Vec<u8>) -> Result<Vec<u8>, DoHError> {
|
||||
let response_nonce = rand::thread_rng().gen::<ResponseNonce>();
|
||||
let response_nonce = rand::thread_rng().r#gen::<ResponseNonce>();
|
||||
let response_body_ = ObliviousDoHMessagePlaintext::new(response_body, 0);
|
||||
let encrypted_response = odoh_rs::encrypt_response(
|
||||
&self.query,
|
||||
|
@ -114,7 +115,7 @@ impl ODoHRotator {
|
|||
Ok(key) => {
|
||||
current_key.store(Arc::new(key));
|
||||
}
|
||||
Err(e) => eprintln!("ODoH key rotation error: {}", e),
|
||||
Err(e) => eprintln!("ODoH key rotation error: {e}"),
|
||||
};
|
||||
}
|
||||
});
|
||||
|
|
|
@ -1,48 +1,48 @@
|
|||
use crate::constants::CERTS_WATCH_DELAY_SECS;
|
||||
use crate::errors::*;
|
||||
use crate::{DoH, LocalExecutor};
|
||||
|
||||
use futures::{future::FutureExt, join, select};
|
||||
use hyper::server::conn::Http;
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufReader, Cursor, Read};
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use futures::{future::FutureExt, join, select};
|
||||
use hyper::server::conn::Http;
|
||||
use tokio::{
|
||||
net::TcpListener,
|
||||
sync::mpsc::{self, Receiver},
|
||||
};
|
||||
use tokio_rustls::{
|
||||
rustls::{internal::pemfile, NoClientAuth, ServerConfig},
|
||||
rustls::{Certificate, PrivateKey, ServerConfig},
|
||||
TlsAcceptor,
|
||||
};
|
||||
|
||||
use crate::constants::CERTS_WATCH_DELAY_SECS;
|
||||
use crate::errors::*;
|
||||
use crate::{DoH, LocalExecutor};
|
||||
|
||||
pub fn create_tls_acceptor<P, P2>(certs_path: P, certs_keys_path: P2) -> io::Result<TlsAcceptor>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
P2: AsRef<Path>,
|
||||
{
|
||||
let certs = {
|
||||
let certs: Vec<_> = {
|
||||
let certs_path_str = certs_path.as_ref().display().to_string();
|
||||
let mut reader = BufReader::new(File::open(certs_path).map_err(|e| {
|
||||
io::Error::new(
|
||||
e.kind(),
|
||||
format!(
|
||||
"Unable to load the certificates [{}]: {}",
|
||||
certs_path_str,
|
||||
e.to_string()
|
||||
),
|
||||
format!("Unable to load the certificates [{certs_path_str}]: {e}"),
|
||||
)
|
||||
})?);
|
||||
pemfile::certs(&mut reader).map_err(|_| {
|
||||
rustls_pemfile::certs(&mut reader).map_err(|_| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Unable to parse the certificates",
|
||||
)
|
||||
})?
|
||||
};
|
||||
let certs_keys = {
|
||||
}
|
||||
.drain(..)
|
||||
.map(Certificate)
|
||||
.collect();
|
||||
let certs_keys: Vec<_> = {
|
||||
let certs_keys_path_str = certs_keys_path.as_ref().display().to_string();
|
||||
let encoded_keys = {
|
||||
let mut encoded_keys = vec![];
|
||||
|
@ -50,25 +50,21 @@ where
|
|||
.map_err(|e| {
|
||||
io::Error::new(
|
||||
e.kind(),
|
||||
format!(
|
||||
"Unable to load the certificate keys [{}]: {}",
|
||||
certs_keys_path_str,
|
||||
e.to_string()
|
||||
),
|
||||
format!("Unable to load the certificate keys [{certs_keys_path_str}]: {e}"),
|
||||
)
|
||||
})?
|
||||
.read_to_end(&mut encoded_keys)?;
|
||||
encoded_keys
|
||||
};
|
||||
let mut reader = Cursor::new(encoded_keys);
|
||||
let pkcs8_keys = pemfile::pkcs8_private_keys(&mut reader).map_err(|_| {
|
||||
let pkcs8_keys = rustls_pemfile::pkcs8_private_keys(&mut reader).map_err(|_| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Unable to parse the certificates private keys (PKCS8)",
|
||||
)
|
||||
})?;
|
||||
reader.set_position(0);
|
||||
let mut rsa_keys = pemfile::rsa_private_keys(&mut reader).map_err(|_| {
|
||||
let mut rsa_keys = rustls_pemfile::rsa_private_keys(&mut reader).map_err(|_| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Unable to parse the certificates private keys (RSA)",
|
||||
|
@ -82,21 +78,27 @@ where
|
|||
"No private keys found - Make sure that they are in PKCS#8/PEM format",
|
||||
));
|
||||
}
|
||||
keys
|
||||
keys.drain(..).map(PrivateKey).collect()
|
||||
};
|
||||
let mut server_config = ServerConfig::new(NoClientAuth::new());
|
||||
server_config.set_protocols(&[b"h2".to_vec(), b"http/1.1".to_vec()]);
|
||||
let has_valid_cert_and_key = certs_keys.into_iter().any(|certs_key| {
|
||||
server_config
|
||||
.set_single_cert(certs.clone(), certs_key)
|
||||
.is_ok()
|
||||
});
|
||||
if !has_valid_cert_and_key {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid private key for the given certificate",
|
||||
));
|
||||
}
|
||||
|
||||
let mut server_config = certs_keys
|
||||
.into_iter()
|
||||
.find_map(|certs_key| {
|
||||
let server_config_builder = ServerConfig::builder()
|
||||
.with_safe_defaults()
|
||||
.with_no_client_auth();
|
||||
match server_config_builder.with_single_cert(certs.clone(), certs_key) {
|
||||
Ok(found_config) => Some(found_config),
|
||||
_ => None,
|
||||
}
|
||||
})
|
||||
.ok_or_else(|| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Unable to find a valid certificate and key",
|
||||
)
|
||||
})?;
|
||||
server_config.alpn_protocols = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
|
||||
Ok(TlsAcceptor::from(Arc::new(server_config)))
|
||||
}
|
||||
|
||||
|
@ -152,12 +154,12 @@ impl DoH {
|
|||
break;
|
||||
}
|
||||
}
|
||||
Err(e) => eprintln!("TLS certificates error: {}", e),
|
||||
Err(e) => eprintln!("TLS certificates error: {e}"),
|
||||
}
|
||||
tokio::time::sleep(Duration::from_secs(CERTS_WATCH_DELAY_SECS.into())).await;
|
||||
}
|
||||
Ok::<_, DoHError>(())
|
||||
};
|
||||
return join!(https_service, cert_service).0;
|
||||
join!(https_service, cert_service).0
|
||||
}
|
||||
}
|
||||
|
|
14
src/main.rs
14
src/main.rs
|
@ -1,5 +1,5 @@
|
|||
#[global_allocator]
|
||||
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
|
||||
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
|
||||
#[macro_use]
|
||||
extern crate clap;
|
||||
|
@ -8,17 +8,17 @@ mod config;
|
|||
mod constants;
|
||||
mod utils;
|
||||
|
||||
use std::net::{IpAddr, Ipv4Addr, SocketAddr};
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use libdoh::odoh::ODoHRotator;
|
||||
use libdoh::reexports::tokio;
|
||||
use libdoh::*;
|
||||
|
||||
use crate::config::*;
|
||||
use crate::constants::*;
|
||||
|
||||
use libdoh::odoh::ODoHRotator;
|
||||
use libdoh::reexports::tokio;
|
||||
use std::net::{IpAddr, Ipv4Addr, SocketAddr};
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
fn main() {
|
||||
let mut runtime_builder = tokio::runtime::Builder::new_multi_thread();
|
||||
runtime_builder.enable_all();
|
||||
|
|
16
src/utils.rs
16
src/utils.rs
|
@ -2,25 +2,23 @@
|
|||
|
||||
use std::net::{SocketAddr, ToSocketAddrs};
|
||||
|
||||
pub(crate) fn verify_sock_addr(arg_val: String) -> Result<(), String> {
|
||||
pub(crate) fn verify_sock_addr(arg_val: &str) -> Result<String, String> {
|
||||
match arg_val.parse::<SocketAddr>() {
|
||||
Ok(_addr) => Ok(()),
|
||||
Ok(_addr) => Ok(arg_val.to_string()),
|
||||
Err(_) => Err(format!(
|
||||
"Could not parse \"{}\" as a valid socket address (with port).",
|
||||
arg_val
|
||||
"Could not parse \"{arg_val}\" as a valid socket address (with port)."
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn verify_remote_server(arg_val: String) -> Result<(), String> {
|
||||
pub(crate) fn verify_remote_server(arg_val: &str) -> Result<String, String> {
|
||||
match arg_val.to_socket_addrs() {
|
||||
Ok(mut addr_iter) => match addr_iter.next() {
|
||||
Some(_) => Ok(()),
|
||||
Some(_) => Ok(arg_val.to_string()),
|
||||
None => Err(format!(
|
||||
"Could not parse \"{}\" as a valid remote uri",
|
||||
arg_val
|
||||
"Could not parse \"{arg_val}\" as a valid remote uri"
|
||||
)),
|
||||
},
|
||||
Err(err) => Err(format!("{}", err)),
|
||||
Err(err) => Err(format!("{err}")),
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue