mirror of
https://github.com/Kozea/Radicale.git
synced 2025-04-03 05:07:40 +03:00
Compare commits
293 commits
Author | SHA1 | Date | |
---|---|---|---|
|
8cdf262560 | ||
|
69587d3f5c | ||
|
f41533cca7 | ||
|
393a26814b | ||
|
3bdcbbdc56 | ||
|
9ca82a8aa2 | ||
|
ffe5fcc6f3 | ||
|
ecaed3188c | ||
|
c23821ad0c | ||
|
b744e9658c | ||
|
3ee5433397 | ||
|
29915b20c8 | ||
|
c91b8e49d5 | ||
|
14fb50954c | ||
|
312e26977b | ||
|
3bdc438283 | ||
|
3eb61a82a6 | ||
|
fb986ea02e | ||
|
af09d532c3 | ||
|
70b66ddfe2 | ||
|
6b83c409d4 | ||
|
7fcf473662 | ||
|
d25786c190 | ||
|
5d5b12c124 | ||
|
23387fa2f3 | ||
|
e0a24b14b4 | ||
|
2439266d0e | ||
|
9f7941d428 | ||
|
3af690fcb6 | ||
|
0d1dcec61a | ||
|
98152062df | ||
|
bcbf0918a9 | ||
|
f40c4d6e9b | ||
|
633dfbc875 | ||
|
34f51033b7 | ||
|
94ad295124 | ||
|
7399286ec9 | ||
|
7d351d6692 | ||
|
d4e23e6731 | ||
|
de527632e0 | ||
|
217978e9d5 | ||
|
2772305dde | ||
|
2ef99e5e85 | ||
|
26eab43f40 | ||
|
a3880480a9 | ||
|
9f8ac21130 | ||
|
e8c974a72a | ||
|
be43ce5161 | ||
|
7bb4beeae2 | ||
|
c9ffde27d8 | ||
|
dc56d67c33 | ||
|
081b8a7fcc | ||
|
76753d271a | ||
|
69f85a0bdf | ||
|
820691ca53 | ||
|
358ae55540 | ||
|
e22fbe282b | ||
|
b0d649f8b9 | ||
|
8f2099baf8 | ||
|
3a13ffbc51 | ||
|
0f67336987 | ||
|
cf727101f8 | ||
|
9f0385fd67 | ||
|
3963bb4d82 | ||
|
cffb2aaae3 | ||
|
4f0e607583 | ||
|
2f1db01083 | ||
|
95a8899002 | ||
|
41ab96e142 | ||
|
a284d18c16 | ||
|
30664f9346 | ||
|
36aba7a8b9 | ||
|
914320826f | ||
|
9372344bb1 | ||
|
c4a48828d3 | ||
|
ebe0418a4c | ||
|
c3c78db8ae | ||
|
0fa50210c9 | ||
|
25402ab641 | ||
|
76281ad1ff | ||
|
1d0ff9e84a | ||
|
e52056dea3 | ||
|
75711b46dc | ||
|
45df5a3b94 | ||
|
2ae1762daa | ||
|
7839ac5783 | ||
|
4086665d16 | ||
|
78dccbdc92 | ||
|
63b98913e0 | ||
|
b729a4c192 | ||
|
a3eb754967 | ||
|
d89ada0c17 | ||
|
7afff7ad2b | ||
|
451712d01d | ||
|
d7013ce726 | ||
|
280968e694 | ||
|
7b4da3a128 | ||
|
c6bd129fa2 | ||
|
bc2444bb9a | ||
|
dc35d4d0ad | ||
|
68f0eafe7d | ||
|
aa248f2b97 | ||
|
a2cd430f64 | ||
|
36e33ffee1 | ||
|
b8c2bc29ec | ||
|
65ce0c57e5 | ||
|
2958201454 | ||
|
73681a7767 | ||
|
cdbad007b6 | ||
|
78b94b1d4d | ||
|
e3ae7b3ab5 | ||
|
4419aa2285 | ||
|
eb8dc61952 | ||
|
3a4ec11733 | ||
|
7318f592c8 | ||
|
3910457a8d | ||
|
fcaee51ceb | ||
|
c2013ec901 | ||
|
29b1da4652 | ||
|
36a0501484 | ||
|
0b5dd82109 | ||
|
9b671beceb | ||
|
50f5d2e5ef | ||
|
8218081f58 | ||
|
16ece44faf | ||
|
5302863f53 | ||
|
6518f1b63a | ||
|
7f3fedc048 | ||
|
0759673e67 | ||
|
855e3743ca | ||
|
c8f650bc2c | ||
|
046d39b1bd | ||
|
954ddea006 | ||
|
6683775c81 | ||
|
9791a4db0f | ||
|
970d4ba468 | ||
|
809e35689b | ||
|
c3c61c692e | ||
|
53251231d4 | ||
|
63e414850e | ||
|
18338b3c6e | ||
|
d5cb05f817 | ||
|
4ab1cedee3 | ||
|
13a78d7365 | ||
|
93970a1001 | ||
|
c60627141f | ||
|
f6b5cb8a1e | ||
|
3914735ec0 | ||
|
48a634af9f | ||
|
3d50ae4a70 | ||
|
018978edd8 | ||
|
aa35c678ce | ||
|
19a47158bd | ||
|
a62da71aa2 | ||
|
67bbc9a31b | ||
|
dc83c6d7d0 | ||
|
484616f363 | ||
|
718089e3bf | ||
|
b078a8f002 | ||
|
fde0ecb9b2 | ||
|
803763729a | ||
|
37b18cf5a2 | ||
|
cd51581f38 | ||
|
88accdb672 | ||
|
c157dd7d19 | ||
|
605fc65584 | ||
|
f0d06cbc7d | ||
|
77f69f2b1e | ||
|
b011fa4e61 | ||
|
dcaec20681 | ||
|
d79abc2b7a | ||
|
938f6a97fd | ||
|
c2def71ce6 | ||
|
6f68a64855 | ||
|
f3a7641baa | ||
|
cfcfbbd231 | ||
|
e0d20edbcd | ||
|
d2be086cd1 | ||
|
7b6146405f | ||
|
04523e5087 | ||
|
23a68b2fb1 | ||
|
87dc5538d2 | ||
|
e28b719233 | ||
|
937acf38f7 | ||
|
063883797c | ||
|
30389f4525 | ||
|
780aaa7e3e | ||
|
98e65d88a4 | ||
|
10a79b9483 | ||
|
26637a1240 | ||
|
f9457f00f7 | ||
|
3df5d28432 | ||
|
e80bf58901 | ||
|
bc939522dc | ||
|
50b76f7114 | ||
|
72c7d32e44 | ||
|
c24659c5ec | ||
|
3e18644423 | ||
|
a93af6f177 | ||
|
ed6a5a834e | ||
|
dd9bb2beff | ||
|
0713041929 | ||
![]() |
3f04914de4 | ||
|
1c77fd819f | ||
|
08a35b19c8 | ||
|
1634ce9498 | ||
|
be64e57ae8 | ||
|
8172b87077 | ||
|
c853ec4a74 | ||
|
5ebaf4ef1c | ||
|
d6c4e6487a | ||
|
f9dd3efc3a | ||
|
6c1445d8db | ||
|
1ca41e2128 | ||
|
607b3af67b | ||
|
841df09312 | ||
|
c81e19616c | ||
|
b0d56f898b | ||
|
73f8f950d0 | ||
|
976dfe4a3f | ||
|
b122002077 | ||
|
ad94acddf1 | ||
|
2442a794ae | ||
|
a9f2e6fe7b | ||
|
5a00baab3f | ||
|
cf914450ee | ||
|
0d43a49ffb | ||
|
234be74b87 | ||
|
45f2a4cc0e | ||
|
532fad9ba6 | ||
|
99f5ec389d | ||
|
0253682c00 | ||
|
8c2feb4726 | ||
|
c243ae4ebf | ||
|
6f82333ff7 | ||
|
6f0ac545f0 | ||
|
70c4a34eb8 | ||
|
3763f28ae4 | ||
|
0a5ae5b0b4 | ||
|
5d48ba5d1e | ||
|
5a591b6471 | ||
|
8604dacad0 | ||
|
ca665c4849 | ||
|
8fdbd0dbf6 | ||
|
46fe98f60b | ||
|
c10ce7ae46 | ||
|
6ebca08423 | ||
|
c1be04abd1 | ||
|
c00ab76c83 | ||
|
5357e692d9 | ||
|
9cac3008b7 | ||
|
2489356dda | ||
|
5ce0cee8bf | ||
|
79ba07e16b | ||
|
c0acbd4402 | ||
|
b75e303556 | ||
|
a794a51885 | ||
|
4f2990342d | ||
|
ac8abbd12c | ||
|
9af15e6656 | ||
|
30e2ab490e | ||
|
ddd099accd | ||
|
8e97b709bf | ||
|
74311560c9 | ||
|
b22038c746 | ||
|
c2b2274dad | ||
|
2674f9a382 | ||
|
51960bcab8 | ||
|
a5dd4d8a7d | ||
|
94898ef6c1 | ||
|
7df2fb35a7 | ||
|
a4266c9690 | ||
|
1e8d9eda50 | ||
|
0b00218d75 | ||
|
7e23c603c1 | ||
|
6569e481df | ||
|
b19418f43c | ||
|
e2934a12c0 | ||
|
c8010fa4be | ||
|
b784f476b4 | ||
|
335584a6b7 | ||
|
9e9d036387 | ||
|
006c2d2bc0 | ||
|
b356edd6be | ||
|
59450e8c2d | ||
|
1a76e1ad50 | ||
|
6ebe9aee76 | ||
|
6214111f4f | ||
|
0f6dcb7192 | ||
|
4b1183ae00 | ||
|
c1c8ab2887 | ||
|
836827ac8f | ||
|
3d4cd7f034 |
51 changed files with 2277 additions and 402 deletions
2
.github/workflows/generate-documentation.yml
vendored
2
.github/workflows/generate-documentation.yml
vendored
|
@ -6,7 +6,7 @@ on:
|
|||
|
||||
jobs:
|
||||
generate:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
|
|
4
.github/workflows/test.yml
vendored
4
.github/workflows/test.yml
vendored
|
@ -6,10 +6,8 @@ jobs:
|
|||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
python-version: ['3.8', '3.9', '3.10', '3.11', '3.12.3', '3.13.0', pypy-3.9]
|
||||
python-version: ['3.9', '3.10', '3.11', '3.12.3', '3.13.0', pypy-3.9]
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
python-version: pypy-3.8
|
||||
- os: windows-latest
|
||||
python-version: pypy-3.9
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
|
51
CHANGELOG.md
51
CHANGELOG.md
|
@ -1,5 +1,56 @@
|
|||
# Changelog
|
||||
|
||||
## 3.5.1.dev
|
||||
|
||||
* Fix: auth/htpasswd related to detection and use of bcrypt
|
||||
* Add: option [auth] ldap_ignore_attribute_create_modify_timestamp for support of Authentik LDAP server
|
||||
* Extend: [storage] hook supports now placeholder for "cwd" and "path" (and catches unsupported placeholders)
|
||||
* Fix: location of lock file for in case of dedicated cache folder is activated
|
||||
* Extend: log and create base folders if not existing during startup
|
||||
|
||||
## 3.5.0
|
||||
|
||||
* Add: option [auth] type oauth2 by code migration from https://gitlab.mim-libre.fr/alphabet/radicale_oauth/-/blob/dev/oauth2/
|
||||
* Fix: catch OS errors on PUT MKCOL MKCALENDAR MOVE PROPPATCH (insufficient storage, access denied, internal server error)
|
||||
* Test: skip bcrypt related tests if module is missing
|
||||
* Improve: relax mtime check on storage filesystem, change test file location to "collection-root" directory
|
||||
* Add: option [auth] type pam by code migration from v1, add new option pam_serivce
|
||||
* Cosmetics: extend list of used modules with their version on startup
|
||||
* Improve: WebUI
|
||||
* Add: option [server] script_name for reverse proxy base_prefix handling
|
||||
* Fix: proper base_prefix stripping if running behind reverse proxy
|
||||
* Review: Apache reverse proxy config example
|
||||
* Add: on-the-fly link activation and default content adjustment in case of bundled InfCloud (tested with 0.13.1)
|
||||
* Adjust: [auth] imap: use AUTHENTICATE PLAIN instead of LOGIN towards remote IMAP server
|
||||
* Improve: log client IP on SSL error and SSL protocol+cipher if successful
|
||||
* Improve: catch htpasswd hash verification errors
|
||||
* Improve: add support for more bcrypt algos on autodetection, extend logging for autodetection fallback to PLAIN in case of hash length is not matching
|
||||
* Add: warning in case of started standalone and not listen on loopback interface but trusting external authentication
|
||||
* Adjust: Change default [auth] type from "none" to "denyall" for secure-by-default
|
||||
|
||||
## 3.4.1
|
||||
* Add: option [auth] dovecot_connection_type / dovecot_host / dovecot_port
|
||||
* Add: option [auth] type imap by code migration from https://github.com/Unrud/RadicaleIMAP/
|
||||
|
||||
## 3.4.0
|
||||
* Add: option [auth] cache_logins/cache_successful_logins_expiry/cache_failed_logins for caching logins
|
||||
* Improve: [auth] log used hash method and result on debug for htpasswd authentication
|
||||
* Improve: [auth] htpasswd file now read and verified on start
|
||||
* Add: option [auth] htpasswd_cache to automatic re-read triggered on change (mtime or size) instead reading on each request
|
||||
* Improve: [auth] htpasswd: module 'bcrypt' is no longer mandatory in case digest method not used in file
|
||||
* Improve: [auth] successful/failed login logs now type and whether result was taken from cache
|
||||
* Improve: [auth] constant execution time for failed logins independent of external backend or by htpasswd used digest method
|
||||
* Drop: support for Python 3.8
|
||||
* Add: option [auth] ldap_user_attribute
|
||||
* Add: option [auth] ldap_groups_attribute as a more flexible replacement of removed ldap_load_groups
|
||||
|
||||
## 3.3.3
|
||||
* Add: display mtime_ns precision of storage folder with condition warning if too less
|
||||
* Improve: disable fsync during storage verification
|
||||
* Improve: suppress duplicate log lines on startup
|
||||
* Contrib: logwatch config and script
|
||||
* Improve: log precondition result on PUT request
|
||||
|
||||
## 3.3.2
|
||||
* Fix: debug logging in rights/from_file
|
||||
* Add: option [storage] use_cache_subfolder_for_item for storing 'item' cache outside collection-root
|
||||
|
|
408
DOCUMENTATION.md
408
DOCUMENTATION.md
|
@ -20,23 +20,19 @@ Radicale is a small but powerful CalDAV (calendars, to-do lists) and CardDAV
|
|||
|
||||
#### Installation
|
||||
|
||||
Radicale is really easy to install and works out-of-the-box.
|
||||
Check
|
||||
|
||||
```bash
|
||||
python3 -m pip install --upgrade https://github.com/Kozea/Radicale/archive/master.tar.gz
|
||||
python3 -m radicale --logging-level info --storage-filesystem-folder=~/.var/lib/radicale/collections
|
||||
```
|
||||
* [Tutorials](#tutorials)
|
||||
* [Documentation](#documentation-1)
|
||||
* [Wiki on GitHub](https://github.com/Kozea/Radicale/wiki)
|
||||
* [Disussions on GitHub](https://github.com/Kozea/Radicale/discussions)
|
||||
* [Open and already Closed Issues on GitHub](https://github.com/Kozea/Radicale/issues?q=is%3Aissue)
|
||||
|
||||
When the server is launched, open <http://localhost:5232> in your browser!
|
||||
You can login with any username and password.
|
||||
|
||||
Want more? Check the [tutorials](#tutorials) and the
|
||||
[documentation](#documentation-1).
|
||||
Hint: instead of downloading from PyPI look for packages provided by used [distribution](#linux-distribution-packages), they contain also startup scripts to run daemonized.
|
||||
|
||||
#### What's New?
|
||||
|
||||
Read the
|
||||
[changelog on GitHub.](https://github.com/Kozea/Radicale/blob/master/CHANGELOG.md)
|
||||
Read the [Changelog on GitHub](https://github.com/Kozea/Radicale/blob/master/CHANGELOG.md).
|
||||
|
||||
## Tutorials
|
||||
|
||||
|
@ -46,29 +42,66 @@ You want to try Radicale but only have 5 minutes free in your calendar? Let's
|
|||
go right now and play a bit with Radicale!
|
||||
|
||||
When everything works, you can get a [client](#supported-clients)
|
||||
and start creating calendars and address books. The server **only** binds to
|
||||
localhost (is **not** reachable over the network) and you can log in with any
|
||||
username and password. If Radicale fits your needs, it may be time for
|
||||
[some basic configuration](#basic-configuration).
|
||||
and start creating calendars and address books. By default, the server only binds to localhost (is not reachable over the network)
|
||||
and you can log in with any user name and password. When everything works, you may get a local client and start creating calendars and address books. If Radicale fits your needs, it may be time for some [basic configuration](#basic-configuration) to support remote clients.
|
||||
|
||||
Follow one of the chapters below depending on your operating system.
|
||||
|
||||
#### Linux / \*BSD
|
||||
|
||||
First, make sure that **python** 3.8 or later and **pip** are installed. On most distributions it should be
|
||||
First, make sure that **python** 3.9 or later and **pip** are installed. On most distributions it should be
|
||||
enough to install the package ``python3-pip``.
|
||||
|
||||
Then open a console and type:
|
||||
##### as normal user
|
||||
|
||||
Recommended only for testing - open a console and type:
|
||||
|
||||
```bash
|
||||
# Run the following command as root or
|
||||
# add the --user argument to only install for the current user
|
||||
$ python3 -m pip install --upgrade https://github.com/Kozea/Radicale/archive/master.tar.gz
|
||||
$ python3 -m radicale --storage-filesystem-folder=~/.var/lib/radicale/collections
|
||||
# Run the following command to only install for the current user
|
||||
python3 -m pip install --user --upgrade https://github.com/Kozea/Radicale/archive/master.tar.gz
|
||||
```
|
||||
|
||||
If _install_ is not working and instead `error: externally-managed-environment` is displayed, create and activate a virtual environment in advance
|
||||
|
||||
```bash
|
||||
python3 -m venv ~/venv
|
||||
source ~/venv/bin/activate
|
||||
```
|
||||
|
||||
and try to install with
|
||||
|
||||
```bash
|
||||
python3 -m pip install --upgrade https://github.com/Kozea/Radicale/archive/master.tar.gz
|
||||
```
|
||||
|
||||
Start the service manually, data is stored only for the current user
|
||||
|
||||
```bash
|
||||
# Start, data is stored for the current user only
|
||||
python3 -m radicale --storage-filesystem-folder=~/.var/lib/radicale/collections
|
||||
```
|
||||
|
||||
##### as system user (or as root)
|
||||
|
||||
Alternative one can install and run as system user or as root (not recommended)
|
||||
|
||||
```bash
|
||||
# Run the following command as root (not required)
|
||||
# or non-root system user (can require --user in case of dependencies are not available system-wide and/or virtual environment)
|
||||
python3 -m pip install --upgrade https://github.com/Kozea/Radicale/archive/master.tar.gz
|
||||
```
|
||||
|
||||
Start the service manually, data is stored in a system folder
|
||||
|
||||
```bash
|
||||
# Start, data is stored in a system folder (requires write permissions to /var/lib/radicale/collections)
|
||||
python3 -m radicale --storage-filesystem-folder=/var/lib/radicale/collections --auth-type none
|
||||
```
|
||||
|
||||
##### common
|
||||
|
||||
Victory! Open <http://localhost:5232> in your browser!
|
||||
You can log in with any username and password.
|
||||
You can log in with any username and password (no authentication is required as long as not proper configured - INSECURE).
|
||||
|
||||
#### Windows
|
||||
|
||||
|
@ -82,11 +115,11 @@ Launch a command prompt and type:
|
|||
|
||||
```powershell
|
||||
python -m pip install --upgrade https://github.com/Kozea/Radicale/archive/master.tar.gz
|
||||
python -m radicale --storage-filesystem-folder=~/radicale/collections
|
||||
python -m radicale --storage-filesystem-folder=~/radicale/collections --auth-type none
|
||||
```
|
||||
|
||||
Victory! Open <http://localhost:5232> in your browser!
|
||||
You can log in with any username and password.
|
||||
You can log in with any username and password (no authentication is required as long as not proper configured - INSECURE).
|
||||
|
||||
### Basic Configuration
|
||||
|
||||
|
@ -120,6 +153,12 @@ It can be stored in the same directory as the configuration file.
|
|||
The `users` file can be created and managed with
|
||||
[htpasswd](https://httpd.apache.org/docs/current/programs/htpasswd.html):
|
||||
|
||||
Note: some OS contain unpatched `htpasswd` (< 2.4.59) without supporting SHA-256 or SHA-512
|
||||
(e.g. Ubuntu LTS 22), in this case use '-B' for "bcrypt" hash method or stay with
|
||||
insecure MD5 (default) or SHA-1 ('-s').
|
||||
|
||||
Note that support of SHA-256 or SHA-512 was introduced with 3.1.9
|
||||
|
||||
```bash
|
||||
# Create a new htpasswd file with the user "user1" using SHA-512 as hash method
|
||||
$ htpasswd -5 -c /path/to/users user1
|
||||
|
@ -248,7 +287,7 @@ ProtectKernelTunables=true
|
|||
ProtectKernelModules=true
|
||||
ProtectControlGroups=true
|
||||
NoNewPrivileges=true
|
||||
ReadWritePaths=/var/lib/radicale/collections
|
||||
ReadWritePaths=/var/lib/radicale/ /var/cache/radicale/
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
@ -491,7 +530,9 @@ RequestHeader set X-Remote-User expr=%{REMOTE_USER}
|
|||
```
|
||||
|
||||
> **Security:** Untrusted clients should not be able to access the Radicale
|
||||
> server directly. Otherwise, they can authenticate as any user.
|
||||
> server directly. Otherwise, they can authenticate as any user by simply
|
||||
> setting related HTTP header. This can be prevented by restrict listen to
|
||||
> loopback interface only or at least a local firewall rule.
|
||||
|
||||
#### Secure connection between Radicale and the reverse proxy
|
||||
|
||||
|
@ -672,6 +713,24 @@ python3 -m radicale --server-hosts 0.0.0.0:5232,[::]:5232 \
|
|||
Add the argument `--config ""` to stop Radicale from loading the default
|
||||
configuration files. Run `python3 -m radicale --help` for more information.
|
||||
|
||||
One can also use command line options in startup scripts using following examples:
|
||||
|
||||
```bash
|
||||
## simple variable containing multiple options
|
||||
RADICALE_OPTIONS="--logging-level=debug --config=/etc/radicale/config --logging-request-header-on-debug --logging-rights-rule-doesnt-match-on-debug"
|
||||
/usr/bin/radicale $RADICALE_OPTIONS
|
||||
|
||||
## variable as array method #1
|
||||
RADICALE_OPTIONS=("--logging-level=debug" "--config=/etc/radicale/config" "--logging-request-header-on-debug" "--logging-rights-rule-doesnt-match-on-debug")
|
||||
/usr/bin/radicale ${RADICALE_OPTIONS[@]}
|
||||
|
||||
## variable as array method #2
|
||||
RADICALE_OPTIONS=()
|
||||
RADICALE_OPTIONS+=("--logging-level=debug")
|
||||
RADICALE_OPTIONS+=("--config=/etc/radicale/config")
|
||||
/usr/bin/radicale ${RADICALE_OPTIONS[@]}
|
||||
```
|
||||
|
||||
In the following, all configuration categories and options are described.
|
||||
|
||||
#### server
|
||||
|
@ -728,10 +787,12 @@ to secure TCP traffic between Radicale and a reverse proxy. If you want to
|
|||
authenticate users with client-side certificates, you also have to write an
|
||||
authentication plugin that extracts the username from the certificate.
|
||||
|
||||
Default:
|
||||
Default: (unset)
|
||||
|
||||
##### protocol
|
||||
|
||||
_(>= 3.3.1)_
|
||||
|
||||
Accepted SSL protocol (maybe not all supported by underlying OpenSSL version)
|
||||
Example for secure configuration: ALL -SSLv3 -TLSv1 -TLSv1.1
|
||||
Format: Apache SSLProtocol list (from "mod_ssl")
|
||||
|
@ -740,12 +801,22 @@ Default: (system default)
|
|||
|
||||
##### ciphersuite
|
||||
|
||||
_(>= 3.3.1)_
|
||||
|
||||
Accepted SSL ciphersuite (maybe not all supported by underlying OpenSSL version)
|
||||
Example for secure configuration: DHE:ECDHE:-NULL:-SHA
|
||||
Format: OpenSSL cipher list (see also "man openssl-ciphers")
|
||||
|
||||
Default: (system-default)
|
||||
|
||||
##### script_name
|
||||
|
||||
_(>= 3.5.0)_
|
||||
|
||||
Strip script name from URI if called by reverse proxy
|
||||
|
||||
Default: (taken from HTTP_X_SCRIPT_NAME or SCRIPT_NAME)
|
||||
|
||||
#### encoding
|
||||
|
||||
##### request
|
||||
|
@ -771,6 +842,9 @@ Available backends:
|
|||
`none`
|
||||
: Just allows all usernames and passwords.
|
||||
|
||||
`denyall` _(>= 3.2.2)_
|
||||
: Just denies all usernames and passwords.
|
||||
|
||||
`htpasswd`
|
||||
: Use an
|
||||
[Apache htpasswd file](https://httpd.apache.org/docs/current/programs/htpasswd.html)
|
||||
|
@ -779,20 +853,58 @@ Available backends:
|
|||
`remote_user`
|
||||
: Takes the username from the `REMOTE_USER` environment variable and disables
|
||||
HTTP authentication. This can be used to provide the username from a WSGI
|
||||
server.
|
||||
server which authenticated the client upfront. Required to validate, otherwise
|
||||
client can supply the header itself which is unconditionally trusted then.
|
||||
|
||||
`http_x_remote_user`
|
||||
: Takes the username from the `X-Remote-User` HTTP header and disables HTTP
|
||||
authentication. This can be used to provide the username from a reverse
|
||||
proxy.
|
||||
proxy which authenticated the client upfront. Required to validate, otherwise
|
||||
client can supply the header itself which is unconditionally trusted then.
|
||||
|
||||
`ldap`
|
||||
: Use a LDAP or AD server to authenticate users.
|
||||
`ldap` _(>= 3.3.0)_
|
||||
: Use a LDAP or AD server to authenticate users by relaying credentials from client and handle result.
|
||||
|
||||
`dovecot`
|
||||
: Use a local Dovecot server to authenticate users.
|
||||
`dovecot` _(>= 3.3.1)_
|
||||
: Use a Dovecot server to authenticate users by relaying credentials from client and handle result.
|
||||
|
||||
Default: `none`
|
||||
`imap` _(>= 3.4.1)_
|
||||
: Use an IMAP server to authenticate users by relaying credentials from client and handle result.
|
||||
|
||||
`oauth2` _(>= 3.5.0)_
|
||||
: Use an OAuth2 server to authenticate users by relaying credentials from client and handle result.
|
||||
Oauth2 authentication (SSO) directly on client is not supported. Use herefore `http_x_remote_user`
|
||||
in combination with SSO support in reverse proxy (e.g. Apache+mod_auth_openidc).
|
||||
|
||||
`pam` _(>= 3.5.0)_
|
||||
: Use local PAM to authenticate users by relaying credentials from client and handle result..
|
||||
|
||||
Default: `none` _(< 3.5.0)_ `denyall` _(>= 3.5.0)_
|
||||
|
||||
##### cache_logins
|
||||
|
||||
_(>= 3.4.0)_
|
||||
|
||||
Cache successful/failed logins until expiration time. Enable this to avoid
|
||||
overload of authentication backends.
|
||||
|
||||
Default: `false`
|
||||
|
||||
##### cache_successful_logins_expiry
|
||||
|
||||
_(>= 3.4.0)_
|
||||
|
||||
Expiration time of caching successful logins in seconds
|
||||
|
||||
Default: `15`
|
||||
|
||||
##### cache_failed_logins_expiry
|
||||
|
||||
_(>= 3.4.0)_
|
||||
|
||||
Expiration time of caching failed logins in seconds
|
||||
|
||||
Default: `90`
|
||||
|
||||
##### htpasswd_filename
|
||||
|
||||
|
@ -824,16 +936,24 @@ Available methods:
|
|||
`md5`
|
||||
: This uses an iterated MD5 digest of the password with a salt (nowadays insecure).
|
||||
|
||||
`sha256`
|
||||
`sha256` _(>= 3.1.9)_
|
||||
: This uses an iterated SHA-256 digest of the password with a salt.
|
||||
|
||||
`sha512`
|
||||
`sha512` _(>= 3.1.9)_
|
||||
: This uses an iterated SHA-512 digest of the password with a salt.
|
||||
|
||||
`autodetect`
|
||||
`autodetect` _(>= 3.1.9)_
|
||||
: This selects autodetection of method per entry.
|
||||
|
||||
Default: `autodetect`
|
||||
Default: `md5` _(< 3.3.0)_ `autodetect` _(>= 3.3.0)_
|
||||
|
||||
##### htpasswd_cache
|
||||
|
||||
_(>= 3.4.0)_
|
||||
|
||||
Enable caching of htpasswd file based on size and mtime_ns
|
||||
|
||||
Default: `False`
|
||||
|
||||
##### delay
|
||||
|
||||
|
@ -849,71 +969,183 @@ Default: `Radicale - Password Required`
|
|||
|
||||
##### ldap_uri
|
||||
|
||||
_(>= 3.3.0)_
|
||||
|
||||
The URI to the ldap server
|
||||
|
||||
Default: `ldap://localhost`
|
||||
|
||||
##### ldap_base
|
||||
|
||||
_(>= 3.3.0)_
|
||||
|
||||
LDAP base DN of the ldap server. This parameter must be provided if auth type is ldap.
|
||||
|
||||
Default:
|
||||
|
||||
##### ldap_reader_dn
|
||||
|
||||
_(>= 3.3.0)_
|
||||
|
||||
The DN of a ldap user with read access to get the user accounts. This parameter must be provided if auth type is ldap.
|
||||
|
||||
Default:
|
||||
|
||||
##### ldap_secret
|
||||
|
||||
_(>= 3.3.0)_
|
||||
|
||||
The password of the ldap_reader_dn. Either this parameter or `ldap_secret_file` must be provided if auth type is ldap.
|
||||
|
||||
Default:
|
||||
|
||||
##### ldap_secret_file
|
||||
|
||||
_(>= 3.3.0)_
|
||||
|
||||
Path of the file containing the password of the ldap_reader_dn. Either this parameter or `ldap_secret` must be provided if auth type is ldap.
|
||||
|
||||
Default:
|
||||
|
||||
##### ldap_filter
|
||||
|
||||
_(>= 3.3.0)_
|
||||
|
||||
The search filter to find the user DN to authenticate by the username. User '{0}' as placeholder for the user name.
|
||||
|
||||
Default: `(cn={0})`
|
||||
|
||||
##### ldap_load_groups
|
||||
##### ldap_user_attribute
|
||||
|
||||
Load the ldap groups of the authenticated user. These groups can be used later on to define rights. This also gives you access to the group calendars, if they exist.
|
||||
_(>= 3.4.0)_
|
||||
|
||||
The LDAP attribute whose value shall be used as the user name after successful authentication
|
||||
|
||||
Default: not set, i.e. the login name given is used directly.
|
||||
|
||||
##### ldap_groups_attribute
|
||||
|
||||
_(>= 3.4.0)_
|
||||
|
||||
The LDAP attribute to read the group memberships from in the authenticated user's LDAP entry.
|
||||
|
||||
If set, load the LDAP group memberships from the attribute given
|
||||
These memberships can be used later on to define rights.
|
||||
This also gives you access to the group calendars, if they exist.
|
||||
* The group calendar will be placed under collection_root_folder/GROUPS
|
||||
* The name of the calendar directory is the base64 encoded group name.
|
||||
* The group calendar folders will not be created automaticaly. This must be created manually. [Here](https://github.com/Kozea/Radicale/wiki/LDAP-authentication) you can find a script to create group calendar folders https://github.com/Kozea/Radicale/wiki/LDAP-authentication
|
||||
* The group calendar folders will not be created automatically. This must be done manually. In the [LDAP-authentication section of Radicale's wiki](https://github.com/Kozea/Radicale/wiki/LDAP-authentication) you can find a script to create a group calendar.
|
||||
|
||||
Default: False
|
||||
Use 'memberOf' if you want to load groups on Active Directory and alikes, 'groupMembership' on Novell eDirectory, ...
|
||||
|
||||
Default: (unset)
|
||||
|
||||
##### ldap_use_ssl
|
||||
|
||||
_(>= 3.3.0)_
|
||||
|
||||
Use ssl on the ldap connection
|
||||
|
||||
Default: False
|
||||
|
||||
##### ldap_ssl_verify_mode
|
||||
|
||||
_(>= 3.3.0)_
|
||||
|
||||
The certificate verification mode. NONE, OPTIONAL or REQUIRED
|
||||
|
||||
Default: REQUIRED
|
||||
|
||||
##### ldap_ssl_ca_file
|
||||
|
||||
_(>= 3.3.0)_
|
||||
|
||||
The path to the CA file in pem format which is used to certificate the server certificate
|
||||
|
||||
Default:
|
||||
|
||||
##### ldap_ignore_attribute_create_modify_timestamp
|
||||
|
||||
_(>= 3.5.1)_
|
||||
|
||||
Add modifyTimestamp and createTimestamp to the exclusion list of internal ldap3 client
|
||||
so that these schema attributes are not checked. This is needed at least for Authentik
|
||||
LDAP server as not providing these both attributes.
|
||||
|
||||
Default: false
|
||||
|
||||
##### dovecot_connection_type = AF_UNIX
|
||||
|
||||
_(>= 3.4.1)_
|
||||
|
||||
Connection type for dovecot authentication (AF_UNIX|AF_INET|AF_INET6)
|
||||
|
||||
Note: credentials are transmitted in cleartext
|
||||
|
||||
Default: `AF_UNIX`
|
||||
|
||||
##### dovecot_socket
|
||||
|
||||
_(>= 3.3.1)_
|
||||
|
||||
The path to the Dovecot client authentication socket (eg. /run/dovecot/auth-client on Fedora). Radicale must have read / write access to the socket.
|
||||
|
||||
Default: `/var/run/dovecot/auth-client`
|
||||
|
||||
##### dovecot_host
|
||||
|
||||
_(>= 3.4.1)_
|
||||
|
||||
Host of via network exposed dovecot socket
|
||||
|
||||
Default: `localhost`
|
||||
|
||||
##### dovecot_port
|
||||
|
||||
_(>= 3.4.1)_
|
||||
|
||||
Port of via network exposed dovecot socket
|
||||
|
||||
Default: `12345`
|
||||
|
||||
##### imap_host
|
||||
|
||||
_(>= 3.4.1)_
|
||||
|
||||
IMAP server hostname: address | address:port | [address]:port | imap.server.tld
|
||||
|
||||
Default: `localhost`
|
||||
|
||||
##### imap_security
|
||||
|
||||
_(>= 3.4.1)_
|
||||
|
||||
Secure the IMAP connection: tls | starttls | none
|
||||
|
||||
Default: `tls`
|
||||
|
||||
##### oauth2_token_endpoint
|
||||
|
||||
_(>= 3.5.0)_
|
||||
|
||||
OAuth2 token endpoint URL
|
||||
|
||||
Default:
|
||||
|
||||
##### pam_service
|
||||
|
||||
_(>= 3.5.0)_
|
||||
|
||||
PAM service
|
||||
|
||||
Default: radicale
|
||||
|
||||
##### pam_group_membership
|
||||
|
||||
_(>= 3.5.0)_
|
||||
|
||||
PAM group user should be member of
|
||||
|
||||
Default:
|
||||
|
||||
##### lc_username
|
||||
|
@ -927,6 +1159,8 @@ Note: cannot be enabled together with `uc_username`
|
|||
|
||||
##### uc_username
|
||||
|
||||
_(>= 3.3.2)_
|
||||
|
||||
Сonvert username to uppercase, must be true for case-insensitive auth
|
||||
providers like ldap, kerberos
|
||||
|
||||
|
@ -936,6 +1170,8 @@ Note: cannot be enabled together with `lc_username`
|
|||
|
||||
##### strip_domain
|
||||
|
||||
_(>= 3.2.3)_
|
||||
|
||||
Strip domain from username
|
||||
|
||||
Default: `False`
|
||||
|
@ -977,7 +1213,7 @@ File for the rights backend `from_file`. See the
|
|||
|
||||
##### permit_delete_collection
|
||||
|
||||
(New since 3.1.9)
|
||||
_(>= 3.1.9)_
|
||||
|
||||
Global control of permission to delete complete collection (default: True)
|
||||
|
||||
|
@ -986,7 +1222,7 @@ If True it can be forbidden by permissions per section with: d
|
|||
|
||||
##### permit_overwrite_collection
|
||||
|
||||
(New since 3.3.0)
|
||||
_(>= 3.3.0)_
|
||||
|
||||
Global control of permission to overwrite complete collection (default: True)
|
||||
|
||||
|
@ -1018,6 +1254,8 @@ Default: `/var/lib/radicale/collections`
|
|||
|
||||
##### filesystem_cache_folder
|
||||
|
||||
_(>= 3.3.2)_
|
||||
|
||||
Folder for storing cache of local collections, created if not present
|
||||
|
||||
Default: (filesystem_folder)
|
||||
|
@ -1028,6 +1266,8 @@ Note: can be used on multi-instance setup to cache files on local node (see belo
|
|||
|
||||
##### use_cache_subfolder_for_item
|
||||
|
||||
_(>= 3.3.2)_
|
||||
|
||||
Use subfolder `collection-cache` for cache file structure of 'item' instead of inside collection folders, created if not present
|
||||
|
||||
Default: `False`
|
||||
|
@ -1036,6 +1276,8 @@ Note: can be used on multi-instance setup to cache 'item' on local node
|
|||
|
||||
##### use_cache_subfolder_for_history
|
||||
|
||||
_(>= 3.3.2)_
|
||||
|
||||
Use subfolder `collection-cache` for cache file structure of 'history' instead of inside collection folders, created if not present
|
||||
|
||||
Default: `False`
|
||||
|
@ -1044,6 +1286,8 @@ Note: use only on single-instance setup, will break consistency with client in m
|
|||
|
||||
##### use_cache_subfolder_for_synctoken
|
||||
|
||||
_(>= 3.3.2)_
|
||||
|
||||
Use subfolder `collection-cache` for cache file structure of 'sync-token' instead of inside collection folders, created if not present
|
||||
|
||||
Default: `False`
|
||||
|
@ -1052,6 +1296,8 @@ Note: use only on single-instance setup, will break consistency with client in m
|
|||
|
||||
##### use_mtime_and_size_for_item_cache
|
||||
|
||||
_(>= 3.3.2)_
|
||||
|
||||
Use last modifiction time (nanoseconds) and size (bytes) for 'item' cache instead of SHA256 (improves speed)
|
||||
|
||||
Default: `False`
|
||||
|
@ -1062,6 +1308,8 @@ Note: conversion is done on access, bulk conversion can be done offline using st
|
|||
|
||||
##### folder_umask
|
||||
|
||||
_(>= 3.3.2)_
|
||||
|
||||
Use configured umask for folder creation (not applicable for OS Windows)
|
||||
|
||||
Default: (system-default, usual `0022`)
|
||||
|
@ -1076,6 +1324,8 @@ Default: `2592000`
|
|||
|
||||
##### skip_broken_item
|
||||
|
||||
_(>= 3.2.2)_
|
||||
|
||||
Skip broken item instead of triggering an exception
|
||||
|
||||
Default: `True`
|
||||
|
@ -1088,7 +1338,9 @@ Command that is run after changes to storage. Take a look at the
|
|||
Default:
|
||||
|
||||
Supported placeholders:
|
||||
- `%(user)`: logged-in user
|
||||
- `%(user)s`: logged-in user
|
||||
- `%(cwd)s`: current working directory _(>= 3.5.1)_
|
||||
- `%(path)s`: full path of item _(>= 3.5.1)_
|
||||
|
||||
Command will be executed with base directory defined in `filesystem_folder` (see above)
|
||||
|
||||
|
@ -1136,7 +1388,7 @@ Set the logging level.
|
|||
|
||||
Available levels: **debug**, **info**, **warning**, **error**, **critical**
|
||||
|
||||
Default: `warning`
|
||||
Default: `warning` _(< 3.2.0)_ `info` _(>= 3.2.0)_
|
||||
|
||||
##### mask_passwords
|
||||
|
||||
|
@ -1146,30 +1398,40 @@ Default: `True`
|
|||
|
||||
##### bad_put_request_content
|
||||
|
||||
_(>= 3.2.1)_
|
||||
|
||||
Log bad PUT request content (for further diagnostics)
|
||||
|
||||
Default: `False`
|
||||
|
||||
##### backtrace_on_debug
|
||||
|
||||
_(>= 3.2.2)_
|
||||
|
||||
Log backtrace on level=debug
|
||||
|
||||
Default: `False`
|
||||
|
||||
##### request_header_on_debug
|
||||
|
||||
_(>= 3.2.2)_
|
||||
|
||||
Log request on level=debug
|
||||
|
||||
Default: `False`
|
||||
|
||||
##### request_content_on_debug
|
||||
|
||||
_(>= 3.2.2)_
|
||||
|
||||
Log request on level=debug
|
||||
|
||||
Default: `False`
|
||||
|
||||
##### response_content_on_debug
|
||||
|
||||
_(>= 3.2.2)_
|
||||
|
||||
Log response on level=debug
|
||||
|
||||
Default: `False`
|
||||
|
@ -1182,6 +1444,8 @@ Default: `False`
|
|||
|
||||
##### storage_cache_actions_on_debug
|
||||
|
||||
_(>= 3.3.2)_
|
||||
|
||||
Log storage cache actions on level=debug
|
||||
|
||||
Default: `False`
|
||||
|
@ -1207,33 +1471,42 @@ Available types:
|
|||
`none`
|
||||
: Disabled. Nothing will be notified.
|
||||
|
||||
`rabbitmq`
|
||||
`rabbitmq` _(>= 3.2.0)_
|
||||
: Push the message to the rabbitmq server.
|
||||
|
||||
Default: `none`
|
||||
|
||||
#### rabbitmq_endpoint
|
||||
##### rabbitmq_endpoint
|
||||
|
||||
_(>= 3.2.0)_
|
||||
|
||||
End-point address for rabbitmq server.
|
||||
Ex: amqp://user:password@localhost:5672/
|
||||
|
||||
Default:
|
||||
|
||||
#### rabbitmq_topic
|
||||
##### rabbitmq_topic
|
||||
|
||||
_(>= 3.2.0)_
|
||||
|
||||
RabbitMQ topic to publish message.
|
||||
|
||||
Default:
|
||||
|
||||
#### rabbitmq_queue_type
|
||||
##### rabbitmq_queue_type
|
||||
|
||||
_(>= 3.2.0)_
|
||||
|
||||
RabbitMQ queue type for the topic.
|
||||
|
||||
Default: classic
|
||||
|
||||
#### reporting
|
||||
|
||||
##### max_freebusy_occurrence
|
||||
|
||||
_(>= 3.2.3)_
|
||||
|
||||
When returning a free-busy report, a list of busy time occurrences are
|
||||
generated based on a given time frame. Large time frames could
|
||||
generate a lot of occurrences based on the time frame supplied. This
|
||||
|
@ -1248,7 +1521,8 @@ Default: 10000
|
|||
Radicale has been tested with:
|
||||
|
||||
* [Android](https://android.com/) with
|
||||
[DAVx⁵](https://www.davx5.com/) (formerly DAVdroid)
|
||||
[DAVx⁵](https://www.davx5.com/) (formerly DAVdroid),
|
||||
* [OneCalendar](https://www.onecalendar.nl/)
|
||||
* [GNOME Calendar](https://wiki.gnome.org/Apps/Calendar),
|
||||
[Contacts](https://wiki.gnome.org/Apps/Contacts) and
|
||||
[Evolution](https://wiki.gnome.org/Apps/Evolution)
|
||||
|
@ -1279,6 +1553,13 @@ Enter the URL of the Radicale server (e.g. `http://localhost:5232`) and your
|
|||
username. DAVx⁵ will show all existing calendars and address books and you
|
||||
can create new.
|
||||
|
||||
#### OneCalendar
|
||||
|
||||
When adding account, select CalDAV account type, then enter user name, password and the
|
||||
Radicale server (e.g. `https://yourdomain:5232`). OneCalendar will show all
|
||||
existing calendars and (FIXME: address books), you need to select which ones
|
||||
you want to see. OneCalendar supports many other server types too.
|
||||
|
||||
#### GNOME Calendar, Contacts
|
||||
|
||||
GNOME 46 added CalDAV and CardDAV support to _GNOME Online Accounts_.
|
||||
|
@ -1308,16 +1589,13 @@ It will list your existing address books.
|
|||
|
||||
#### InfCloud, CalDavZAP and CardDavMATE
|
||||
|
||||
You can integrate InfCloud into Radicale's web interface with
|
||||
[RadicaleInfCloud](https://github.com/Unrud/RadicaleInfCloud). No additional
|
||||
configuration is required.
|
||||
You can integrate InfCloud into Radicale's web interface with by simply
|
||||
download latest package from [InfCloud](https://www.inf-it.com/open-source/clients/infcloud/)
|
||||
and extract content to new folder `infcloud` in `radicale/web/internal_data/`.
|
||||
|
||||
Set the URL of the Radicale server in ``config.js``. If **InfCloud** is not
|
||||
hosted on the same server and port as Radicale, the browser will deny access to
|
||||
the Radicale server, because of the
|
||||
[same-origin policy](https://en.wikipedia.org/wiki/Same-origin_policy).
|
||||
You have to add additional HTTP header in the `headers` section of Radicale's
|
||||
configuration. The documentation of **InfCloud** has more details on this.
|
||||
No further adjustments are required as content is adjusted on the fly (tested with 0.13.1).
|
||||
|
||||
See also [Wiki/Client InfCloud](https://github.com/Kozea/Radicale/wiki/Client-InfCloud).
|
||||
|
||||
#### Command line
|
||||
|
||||
|
@ -1401,7 +1679,7 @@ An example rights file:
|
|||
[root]
|
||||
user: .+
|
||||
collection:
|
||||
permissions: r
|
||||
permissions: R
|
||||
|
||||
# Allow reading and writing principal collection (same as username)
|
||||
[principal]
|
||||
|
@ -1443,8 +1721,8 @@ The following `permissions` are recognized:
|
|||
(CalDAV/CardDAV is susceptible to expensive search requests)
|
||||
* **W:** write collections (excluding address books and calendars)
|
||||
* **w:** write address book and calendar collections
|
||||
* **D:** permit delete of collection in case permit_delete_collection=False
|
||||
* **d:** forbid delete of collection in case permit_delete_collection=True
|
||||
* **D:** permit delete of collection in case permit_delete_collection=False _(>= 3.3.0)_
|
||||
* **d:** forbid delete of collection in case permit_delete_collection=True _(>= 3.3.0)_
|
||||
* **O:** permit overwrite of collection in case permit_overwrite_collection=False
|
||||
* **o:** forbid overwrite of collection in case permit_overwrite_collection=True
|
||||
|
||||
|
@ -1699,7 +1977,7 @@ class Auth(BaseAuth):
|
|||
def __init__(self, configuration):
|
||||
super().__init__(configuration.copy(PLUGIN_CONFIG_SCHEMA))
|
||||
|
||||
def login(self, login, password):
|
||||
def _login(self, login, password):
|
||||
# Get password from configuration option
|
||||
static_password = self.configuration.get("auth", "password")
|
||||
# Check authentication
|
||||
|
|
68
config
68
config
|
@ -46,6 +46,9 @@
|
|||
# SSL ciphersuite, secure configuration: DHE:ECDHE:-NULL:-SHA (see also "man openssl-ciphers")
|
||||
#ciphersuite = (default)
|
||||
|
||||
# script name to strip from URI if called by reverse proxy
|
||||
#script_name = (default taken from HTTP_X_SCRIPT_NAME or SCRIPT_NAME)
|
||||
|
||||
|
||||
[encoding]
|
||||
|
||||
|
@ -59,8 +62,20 @@
|
|||
[auth]
|
||||
|
||||
# Authentication method
|
||||
# Value: none | htpasswd | remote_user | http_x_remote_user | ldap | denyall
|
||||
#type = none
|
||||
# Value: none | htpasswd | remote_user | http_x_remote_user | dovecot | ldap | oauth2 | pam | denyall
|
||||
#type = denyall
|
||||
|
||||
# Cache logins for until expiration time
|
||||
#cache_logins = false
|
||||
|
||||
# Expiration time for caching successful logins in seconds
|
||||
#cache_successful_logins_expiry = 15
|
||||
|
||||
## Expiration time of caching failed logins in seconds
|
||||
#cache_failed_logins_expiry = 90
|
||||
|
||||
# Ignore modifyTimestamp and createTimestamp attributes. Required e.g. for Authentik LDAP server
|
||||
#ldap_ignore_attribute_create_modify_timestamp = false
|
||||
|
||||
# URI to the LDAP server
|
||||
#ldap_uri = ldap://localhost
|
||||
|
@ -77,12 +92,15 @@
|
|||
# Path of the file containing password of the reader DN
|
||||
#ldap_secret_file = /run/secrets/ldap_password
|
||||
|
||||
# If the ldap groups of the user need to be loaded
|
||||
#ldap_load_groups = True
|
||||
# the attribute to read the group memberships from in the user's LDAP entry (default: not set)
|
||||
#ldap_groups_attribute = memberOf
|
||||
|
||||
# The filter to find the DN of the user. This filter must contain a python-style placeholder for the login
|
||||
#ldap_filter = (&(objectClass=person)(uid={0}))
|
||||
|
||||
# the attribute holding the value to be used as username after authentication
|
||||
#ldap_user_attribute = cn
|
||||
|
||||
# Use ssl on the ldap connection
|
||||
#ldap_use_ssl = False
|
||||
|
||||
|
@ -92,6 +110,36 @@
|
|||
# The path to the CA file in pem format which is used to certificate the server certificate
|
||||
#ldap_ssl_ca_file =
|
||||
|
||||
# Connection type for dovecot authentication (AF_UNIX|AF_INET|AF_INET6)
|
||||
# Note: credentials are transmitted in cleartext
|
||||
#dovecot_connection_type = AF_UNIX
|
||||
|
||||
# The path to the Dovecot client authentication socket (eg. /run/dovecot/auth-client on Fedora). Radicale must have read / write access to the socket.
|
||||
#dovecot_socket = /var/run/dovecot/auth-client
|
||||
|
||||
# Host of via network exposed dovecot socket
|
||||
#dovecot_host = localhost
|
||||
|
||||
# Port of via network exposed dovecot socket
|
||||
#dovecot_port = 12345
|
||||
|
||||
# IMAP server hostname
|
||||
# Syntax: address | address:port | [address]:port | imap.server.tld
|
||||
#imap_host = localhost
|
||||
|
||||
# Secure the IMAP connection
|
||||
# Value: tls | starttls | none
|
||||
#imap_security = tls
|
||||
|
||||
# OAuth2 token endpoint URL
|
||||
#oauth2_token_endpoint = <URL>
|
||||
|
||||
# PAM service
|
||||
#pam_serivce = radicale
|
||||
|
||||
# PAM group user should be member of
|
||||
#pam_group_membership =
|
||||
|
||||
# Htpasswd filename
|
||||
#htpasswd_filename = /etc/radicale/users
|
||||
|
||||
|
@ -100,6 +148,9 @@
|
|||
# bcrypt requires the installation of 'bcrypt' module.
|
||||
#htpasswd_encryption = autodetect
|
||||
|
||||
# Enable caching of htpasswd file based on size and mtime_ns
|
||||
#htpasswd_cache = False
|
||||
|
||||
# Incorrect authentication delay (seconds)
|
||||
#delay = 1
|
||||
|
||||
|
@ -158,7 +209,7 @@
|
|||
# Use last modifiction time (nanoseconds) and size (bytes) for 'item' cache instead of SHA256 (improves speed)
|
||||
# Note: check used filesystem mtime precision before enabling
|
||||
# Note: conversion is done on access, bulk conversion can be done offline using storage verification option: radicale --verify-storage
|
||||
#use_mtime_and_size_for_item_cache=False
|
||||
#use_mtime_and_size_for_item_cache = False
|
||||
|
||||
# Use configured umask for folder creation (not applicable for OS Windows)
|
||||
# Useful value: 0077 | 0027 | 0007 | 0022
|
||||
|
@ -172,10 +223,13 @@
|
|||
|
||||
# Command that is run after changes to storage, default is emtpy
|
||||
# Supported placeholders:
|
||||
# %(user): logged-in user
|
||||
# %(user)s: logged-in user
|
||||
# %(cwd)s : current working directory
|
||||
# %(path)s: full path of item
|
||||
# Command will be executed with base directory defined in filesystem_folder
|
||||
# For "git" check DOCUMENTATION.md for bootstrap instructions
|
||||
# Example: git add -A && (git diff --cached --quiet || git commit -m "Changes by \"%(user)s\"")
|
||||
# Example(test): echo \"user=%(user)s path=%(path)s cwd=%(cwd)s\"
|
||||
# Example(git): git add -A && (git diff --cached --quiet || git commit -m "Changes by \"%(user)s\"")
|
||||
#hook =
|
||||
|
||||
# Create predefined user collections
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
## Apache acting as reverse proxy and forward requests via ProxyPass to a running "radicale" server
|
||||
# SELinux WARNING: To use this correctly, you will need to set:
|
||||
# setsebool -P httpd_can_network_connect=1
|
||||
# URI prefix: /radicale
|
||||
#Define RADICALE_SERVER_REVERSE_PROXY
|
||||
|
||||
|
||||
|
@ -11,11 +12,12 @@
|
|||
# MAY CONFLICT with other WSG servers on same system -> use then inside a VirtualHost
|
||||
# SELinux WARNING: To use this correctly, you will need to set:
|
||||
# setsebool -P httpd_can_read_write_radicale=1
|
||||
# URI prefix: /radicale
|
||||
#Define RADICALE_SERVER_WSGI
|
||||
|
||||
|
||||
### Extra options
|
||||
## Apache starting a dedicated VHOST with SSL
|
||||
## Apache starting a dedicated VHOST with SSL without "/radicale" prefix in URI on port 8443
|
||||
#Define RADICALE_SERVER_VHOST_SSL
|
||||
|
||||
|
||||
|
@ -27,8 +29,13 @@
|
|||
#Define RADICALE_ENFORCE_SSL
|
||||
|
||||
|
||||
### enable authentication by web server (config: [auth] type = http_x_remote_user)
|
||||
#Define RADICALE_SERVER_USER_AUTHENTICATION
|
||||
|
||||
|
||||
### Particular configuration EXAMPLES, adjust/extend/override to your needs
|
||||
|
||||
|
||||
##########################
|
||||
### default host
|
||||
##########################
|
||||
|
@ -37,9 +44,14 @@
|
|||
## RADICALE_SERVER_REVERSE_PROXY
|
||||
<IfDefine RADICALE_SERVER_REVERSE_PROXY>
|
||||
RewriteEngine On
|
||||
|
||||
RewriteRule ^/radicale$ /radicale/ [R,L]
|
||||
|
||||
<Location /radicale>
|
||||
RewriteCond %{REQUEST_METHOD} GET
|
||||
RewriteRule ^/radicale/$ /radicale/.web/ [R,L]
|
||||
|
||||
<LocationMatch "^/radicale/\.web.*>
|
||||
# Internal WebUI does not need authentication at all
|
||||
RequestHeader set X-Script-Name /radicale
|
||||
|
||||
RequestHeader set X-Forwarded-Port "%{SERVER_PORT}s"
|
||||
|
@ -48,21 +60,40 @@
|
|||
ProxyPass http://localhost:5232/ retry=0
|
||||
ProxyPassReverse http://localhost:5232/
|
||||
|
||||
## User authentication handled by "radicale"
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</LocationMatch>
|
||||
|
||||
## You may want to use apache's authentication (config: [auth] type = http_x_remote_user)
|
||||
## e.g. create a new file with a testuser: htpasswd -c -B /etc/httpd/conf/htpasswd-radicale testuser
|
||||
#AuthBasicProvider file
|
||||
#AuthType Basic
|
||||
#AuthName "Enter your credentials"
|
||||
#AuthUserFile /etc/httpd/conf/htpasswd-radicale
|
||||
#AuthGroupFile /dev/null
|
||||
#Require valid-user
|
||||
#RequestHeader set X-Remote-User expr=%{REMOTE_USER}
|
||||
<LocationMatch "^/radicale(?!/\.web)">
|
||||
RequestHeader set X-Script-Name /radicale
|
||||
|
||||
RequestHeader set X-Forwarded-Port "%{SERVER_PORT}s"
|
||||
RequestHeader set X-Forwarded-Proto expr=%{REQUEST_SCHEME}
|
||||
|
||||
ProxyPass http://localhost:5232/ retry=0
|
||||
ProxyPassReverse http://localhost:5232/
|
||||
|
||||
<IfDefine !RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## User authentication handled by "radicale"
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</IfDefine>
|
||||
|
||||
<IfDefine RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## You may want to use apache's authentication (config: [auth] type = http_x_remote_user)
|
||||
## e.g. create a new file with a testuser: htpasswd -c -B /etc/httpd/conf/htpasswd-radicale testuser
|
||||
AuthBasicProvider file
|
||||
AuthType Basic
|
||||
AuthName "Enter your credentials"
|
||||
AuthUserFile /etc/httpd/conf/htpasswd-radicale
|
||||
AuthGroupFile /dev/null
|
||||
Require valid-user
|
||||
RequestHeader set X-Remote-User expr=%{REMOTE_USER}
|
||||
</IfDefine>
|
||||
|
||||
<IfDefine RADICALE_ENFORCE_SSL>
|
||||
<IfModule !ssl_module>
|
||||
|
@ -70,7 +101,7 @@
|
|||
</IfModule>
|
||||
SSLRequireSSL
|
||||
</IfDefine>
|
||||
</Location>
|
||||
</LocationMatch>
|
||||
</IfDefine>
|
||||
|
||||
|
||||
|
@ -96,24 +127,38 @@
|
|||
|
||||
WSGIScriptAlias /radicale /usr/share/radicale/radicale.wsgi
|
||||
|
||||
<Location /radicale>
|
||||
# Internal WebUI does not need authentication at all
|
||||
<LocationMatch "^/radicale/\.web.*>
|
||||
RequestHeader set X-Script-Name /radicale
|
||||
|
||||
## User authentication handled by "radicale"
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</LocationMatch>
|
||||
|
||||
## You may want to use apache's authentication (config: [auth] type = http_x_remote_user)
|
||||
## e.g. create a new file with a testuser: htpasswd -c -B /etc/httpd/conf/htpasswd-radicale testuser
|
||||
#AuthBasicProvider file
|
||||
#AuthType Basic
|
||||
#AuthName "Enter your credentials"
|
||||
#AuthUserFile /etc/httpd/conf/htpasswd-radicale
|
||||
#AuthGroupFile /dev/null
|
||||
#Require valid-user
|
||||
#RequestHeader set X-Remote-User expr=%{REMOTE_USER}
|
||||
<LocationMatch "^/radicale(?!/\.web)">
|
||||
RequestHeader set X-Script-Name /radicale
|
||||
|
||||
<IfDefine !RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## User authentication handled by "radicale"
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</IfDefine>
|
||||
|
||||
<IfDefine RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## You may want to use apache's authentication (config: [auth] type = http_x_remote_user)
|
||||
## e.g. create a new file with a testuser: htpasswd -c -B /etc/httpd/conf/htpasswd-radicale testuser
|
||||
AuthBasicProvider file
|
||||
AuthType Basic
|
||||
AuthName "Enter your credentials"
|
||||
AuthUserFile /etc/httpd/conf/htpasswd-radicale
|
||||
AuthGroupFile /dev/null
|
||||
Require valid-user
|
||||
RequestHeader set X-Remote-User expr=%{REMOTE_USER}
|
||||
</IfDefine>
|
||||
|
||||
<IfDefine RADICALE_ENFORCE_SSL>
|
||||
<IfModule !ssl_module>
|
||||
|
@ -121,7 +166,7 @@
|
|||
</IfModule>
|
||||
SSLRequireSSL
|
||||
</IfDefine>
|
||||
</Location>
|
||||
</LocationMatch>
|
||||
</IfModule>
|
||||
<IfModule !wsgi_module>
|
||||
Error "RADICALE_SERVER_WSGI selected but wsgi module not loaded/enabled"
|
||||
|
@ -165,30 +210,51 @@ CustomLog logs/ssl_request_log "%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b"
|
|||
|
||||
## RADICALE_SERVER_REVERSE_PROXY
|
||||
<IfDefine RADICALE_SERVER_REVERSE_PROXY>
|
||||
<Location />
|
||||
RequestHeader set X-Script-Name /
|
||||
RewriteEngine On
|
||||
|
||||
RewriteCond %{REQUEST_METHOD} GET
|
||||
RewriteRule ^/$ /.web/ [R,L]
|
||||
|
||||
<LocationMatch "^/\.web.*>
|
||||
RequestHeader set X-Forwarded-Port "%{SERVER_PORT}s"
|
||||
RequestHeader set X-Forwarded-Proto expr=%{REQUEST_SCHEME}
|
||||
|
||||
ProxyPass http://localhost:5232/ retry=0
|
||||
ProxyPassReverse http://localhost:5232/
|
||||
|
||||
## User authentication handled by "radicale"
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</LocationMatch>
|
||||
|
||||
## You may want to use apache's authentication (config: [auth] type = http_x_remote_user)
|
||||
## e.g. create a new file with a testuser: htpasswd -c -B /etc/httpd/conf/htpasswd-radicale testuser
|
||||
#AuthBasicProvider file
|
||||
#AuthType Basic
|
||||
#AuthName "Enter your credentials"
|
||||
#AuthUserFile /etc/httpd/conf/htpasswd-radicale
|
||||
#AuthGroupFile /dev/null
|
||||
#Require valid-user
|
||||
</Location>
|
||||
<LocationMatch "^(?!/\.web)">
|
||||
RequestHeader set X-Forwarded-Port "%{SERVER_PORT}s"
|
||||
RequestHeader set X-Forwarded-Proto expr=%{REQUEST_SCHEME}
|
||||
|
||||
ProxyPass http://localhost:5232/ retry=0
|
||||
ProxyPassReverse http://localhost:5232/
|
||||
|
||||
<IfDefine !RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## User authentication handled by "radicale"
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</IfDefine>
|
||||
|
||||
<IfDefine RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## You may want to use apache's authentication (config: [auth] type = http_x_remote_user)
|
||||
## e.g. create a new file with a testuser: htpasswd -c -B /etc/httpd/conf/htpasswd-radicale testuser
|
||||
AuthBasicProvider file
|
||||
AuthType Basic
|
||||
AuthName "Enter your credentials"
|
||||
AuthUserFile /etc/httpd/conf/htpasswd-radicale
|
||||
AuthGroupFile /dev/null
|
||||
Require valid-user
|
||||
RequestHeader set X-Remote-User expr=%{REMOTE_USER}
|
||||
</IfDefine>
|
||||
</LocationMatch>
|
||||
</IfDefine>
|
||||
|
||||
|
||||
|
@ -214,24 +280,27 @@ CustomLog logs/ssl_request_log "%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b"
|
|||
|
||||
WSGIScriptAlias / /usr/share/radicale/radicale.wsgi
|
||||
|
||||
<Location />
|
||||
RequestHeader set X-Script-Name /
|
||||
|
||||
## User authentication handled by "radicale"
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
<LocationMatch "^/(?!/\.web)">
|
||||
<IfDefine !RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## User authentication handled by "radicale"
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</IfDefine>
|
||||
|
||||
## You may want to use apache's authentication (config: [auth] type = http_x_remote_user)
|
||||
## e.g. create a new file with a testuser: htpasswd -c -B /etc/httpd/conf/htpasswd-radicale testuser
|
||||
#AuthBasicProvider file
|
||||
#AuthType Basic
|
||||
#AuthName "Enter your credentials"
|
||||
#AuthUserFile /etc/httpd/conf/htpasswd-radicale
|
||||
#AuthGroupFile /dev/null
|
||||
#Require valid-user
|
||||
</Location>
|
||||
<IfDefine RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## You may want to use apache's authentication (config: [auth] type = http_x_remote_user)
|
||||
## e.g. create a new file with a testuser: htpasswd -c -B /etc/httpd/conf/htpasswd-radicale testuser
|
||||
AuthBasicProvider file
|
||||
AuthType Basic
|
||||
AuthName "Enter your credentials"
|
||||
AuthUserFile /etc/httpd/conf/htpasswd-radicale
|
||||
AuthGroupFile /dev/null
|
||||
Require valid-user
|
||||
RequestHeader set X-Remote-User expr=%{REMOTE_USER}
|
||||
</IfDefine>
|
||||
</LocationMatch>
|
||||
</IfModule>
|
||||
<IfModule !wsgi_module>
|
||||
Error "RADICALE_SERVER_WSGI selected but wsgi module not loaded/enabled"
|
||||
|
|
193
contrib/logwatch/radicale
Normal file
193
contrib/logwatch/radicale
Normal file
|
@ -0,0 +1,193 @@
|
|||
# This file is related to Radicale - CalDAV and CardDAV server
|
||||
# for logwatch (script)
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# Detail levels
|
||||
# >= 5: Logins
|
||||
# >= 10: ResponseTimes
|
||||
|
||||
$Detail = $ENV{'LOGWATCH_DETAIL_LEVEL'} || 0;
|
||||
|
||||
my %ResponseTimes;
|
||||
my %Responses;
|
||||
my %Requests;
|
||||
my %Logins;
|
||||
my %Loglevel;
|
||||
my %OtherEvents;
|
||||
|
||||
my $sum;
|
||||
my $length;
|
||||
|
||||
sub ResponseTimesMinMaxSum($$) {
|
||||
my $req = $_[0];
|
||||
my $time = $_[1];
|
||||
|
||||
$ResponseTimes{$req}->{'cnt'}++;
|
||||
|
||||
if (! defined $ResponseTimes{$req}->{'min'}) {
|
||||
$ResponseTimes{$req}->{'min'} = $time;
|
||||
} elsif ($ResponseTimes->{$req}->{'min'} > $time) {
|
||||
$ResponseTimes{$req}->{'min'} = $time;
|
||||
}
|
||||
|
||||
if (! defined $ResponseTimes{$req}->{'max'}) {
|
||||
$ResponseTimes{$req}{'max'} = $time;
|
||||
} elsif ($ResponseTimes{$req}->{'max'} < $time) {
|
||||
$ResponseTimes{$req}{'max'} = $time;
|
||||
}
|
||||
|
||||
$ResponseTimes{$req}->{'sum'} += $time;
|
||||
}
|
||||
|
||||
sub Sum($) {
|
||||
my $phash = $_[0];
|
||||
my $sum = 0;
|
||||
foreach my $entry (keys %$phash) {
|
||||
$sum += $phash->{$entry};
|
||||
}
|
||||
return $sum;
|
||||
}
|
||||
|
||||
sub MaxLength($) {
|
||||
my $phash = $_[0];
|
||||
my $length = 0;
|
||||
foreach my $entry (keys %$phash) {
|
||||
$length = length($entry) if (length($entry) > $length);
|
||||
}
|
||||
return $length;
|
||||
}
|
||||
|
||||
while (defined($ThisLine = <STDIN>)) {
|
||||
# count loglevel
|
||||
if ( $ThisLine =~ /\[(DEBUG|INFO|WARNING|ERROR|CRITICAL)\] /o ) {
|
||||
$Loglevel{$1}++
|
||||
}
|
||||
|
||||
# parse log for events
|
||||
if ( $ThisLine =~ /Radicale server ready/o ) {
|
||||
$OtherEvents{"Radicale server started"}++;
|
||||
}
|
||||
elsif ( $ThisLine =~ /Stopping Radicale/o ) {
|
||||
$OtherEvents{"Radicale server stopped"}++;
|
||||
}
|
||||
elsif ( $ThisLine =~ / (\S+) response status/o ) {
|
||||
my $req = $1;
|
||||
if ( $ThisLine =~ / \S+ response status for .* with depth '(\d)' in ([0-9.]+) seconds: (\d+)/o ) {
|
||||
$req .= ":D=" . $1 . ":R=" . $3;
|
||||
ResponseTimesMinMaxSum($req, $2) if ($Detail >= 10);
|
||||
} elsif ( $ThisLine =~ / \S+ response status for .* in ([0-9.]+) seconds: (\d+)/ ) {
|
||||
$req .= ":R=" . $2;
|
||||
ResponseTimesMinMaxSum($req, $1) if ($Detail >= 10);
|
||||
}
|
||||
$Responses{$req}++;
|
||||
}
|
||||
elsif ( $ThisLine =~ / (\S+) request for/o ) {
|
||||
my $req = $1;
|
||||
if ( $ThisLine =~ / \S+ request for .* with depth '(\d)' received/o ) {
|
||||
$req .= ":D=" . $1;
|
||||
}
|
||||
$Requests{$req}++;
|
||||
}
|
||||
elsif ( $ThisLine =~ / (Successful login): '([^']+)'/o ) {
|
||||
$Logins{$2}++ if ($Detail >= 5);
|
||||
$OtherEvents{$1}++;
|
||||
}
|
||||
elsif ( $ThisLine =~ / (Failed login attempt) /o ) {
|
||||
$OtherEvents{$1}++;
|
||||
}
|
||||
elsif ( $ThisLine =~ /\[(DEBUG|INFO)\] /o ) {
|
||||
# skip if DEBUG+INFO
|
||||
}
|
||||
else {
|
||||
# Report any unmatched entries...
|
||||
$ThisLine =~ s/^\[\d+(\/Thread-\d+)?\] //; # remove process/Thread ID
|
||||
chomp($ThisLine);
|
||||
$OtherList{$ThisLine}++;
|
||||
}
|
||||
}
|
||||
|
||||
if ($Started) {
|
||||
print "\nStatistics:\n";
|
||||
print " Radicale started: $Started Time(s)\n";
|
||||
}
|
||||
|
||||
if (keys %Loglevel) {
|
||||
$sum = Sum(\%Loglevel);
|
||||
print "\n**Loglevel counters**\n";
|
||||
printf "%-18s | %7s | %5s |\n", "Loglevel", "cnt", "ratio";
|
||||
print "-" x38 . "\n";
|
||||
foreach my $level (sort keys %Loglevel) {
|
||||
printf "%-18s | %7d | %3d%% |\n", $level, $Loglevel{$level}, int(($Loglevel{$level} * 100) / $sum);
|
||||
}
|
||||
print "-" x38 . "\n";
|
||||
printf "%-18s | %7d | %3d%% |\n", "", $sum, 100;
|
||||
}
|
||||
|
||||
if (keys %Requests) {
|
||||
$sum = Sum(\%Requests);
|
||||
print "\n**Request counters (D=<depth>)**\n";
|
||||
printf "%-18s | %7s | %5s |\n", "Request", "cnt", "ratio";
|
||||
print "-" x38 . "\n";
|
||||
foreach my $req (sort keys %Requests) {
|
||||
printf "%-18s | %7d | %3d%% |\n", $req, $Requests{$req}, int(($Requests{$req} * 100) / $sum);
|
||||
}
|
||||
print "-" x38 . "\n";
|
||||
printf "%-18s | %7d | %3d%% |\n", "", $sum, 100;
|
||||
}
|
||||
|
||||
if (keys %Responses) {
|
||||
$sum = Sum(\%Responses);
|
||||
print "\n**Response result counters ((D=<depth> R=<result>)**\n";
|
||||
printf "%-18s | %7s | %5s |\n", "Response", "cnt", "ratio";
|
||||
print "-" x38 . "\n";
|
||||
foreach my $req (sort keys %Responses) {
|
||||
printf "%-18s | %7d | %3d%% |\n", $req, $Responses{$req}, int(($Responses{$req} * 100) / $sum);
|
||||
}
|
||||
print "-" x38 . "\n";
|
||||
printf "%-18s | %7d | %3d%% |\n", "", $sum, 100;
|
||||
}
|
||||
|
||||
if (keys %Logins) {
|
||||
$sum = Sum(\%Logins);
|
||||
$length = MaxLength(\%Logins);
|
||||
print "\n**Successful login counters**\n";
|
||||
printf "%-" . $length . "s | %7s | %5s |\n", "Login", "cnt", "ratio";
|
||||
print "-" x($length + 20) . "\n";
|
||||
foreach my $login (sort keys %Logins) {
|
||||
printf "%-" . $length . "s | %7d | %3d%% |\n", $login, $Logins{$login}, int(($Logins{$login} * 100) / $sum);
|
||||
}
|
||||
print "-" x($length + 20) . "\n";
|
||||
printf "%-" . $length . "s | %7d | %3d%% |\n", "", $sum, 100;
|
||||
}
|
||||
|
||||
if (keys %ResponseTimes) {
|
||||
print "\n**Response timings (counts, seconds) (D=<depth> R=<result>)**\n";
|
||||
printf "%-18s | %7s | %7s | %7s | %7s |\n", "Response", "cnt", "min", "max", "avg";
|
||||
print "-" x60 . "\n";
|
||||
foreach my $req (sort keys %ResponseTimes) {
|
||||
printf "%-18s | %7d | %7.3f | %7.3f | %7.3f |\n", $req
|
||||
, $ResponseTimes{$req}->{'cnt'}
|
||||
, $ResponseTimes{$req}->{'min'}
|
||||
, $ResponseTimes{$req}->{'max'}
|
||||
, $ResponseTimes{$req}->{'sum'} / $ResponseTimes{$req}->{'cnt'};
|
||||
}
|
||||
print "-" x60 . "\n";
|
||||
}
|
||||
|
||||
if (keys %OtherEvents) {
|
||||
print "\n**Other Events**\n";
|
||||
foreach $ThisOne (sort keys %OtherEvents) {
|
||||
print "$ThisOne: $OtherEvents{$ThisOne} Time(s)\n";
|
||||
}
|
||||
}
|
||||
|
||||
if (keys %OtherList) {
|
||||
print "\n**Unmatched Entries**\n";
|
||||
foreach $ThisOne (sort keys %OtherList) {
|
||||
print "$ThisOne: $OtherList{$ThisOne} Time(s)\n";
|
||||
}
|
||||
}
|
||||
|
||||
exit(0);
|
||||
|
||||
# vim: shiftwidth=3 tabstop=3 syntax=perl et smartindent
|
11
contrib/logwatch/radicale-journald.conf
Normal file
11
contrib/logwatch/radicale-journald.conf
Normal file
|
@ -0,0 +1,11 @@
|
|||
# This file is related to Radicale - CalDAV and CardDAV server
|
||||
# for logwatch (config) - input from journald
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
|
||||
Title = "Radicale"
|
||||
|
||||
LogFile = none
|
||||
|
||||
*JournalCtl = "--output=cat --unit=radicale.service"
|
||||
|
||||
# vi: shiftwidth=3 tabstop=3 et
|
13
contrib/logwatch/radicale-syslog.conf
Normal file
13
contrib/logwatch/radicale-syslog.conf
Normal file
|
@ -0,0 +1,13 @@
|
|||
# This file is related to Radicale - CalDAV and CardDAV server
|
||||
# for logwatch (config) - input from syslog file
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
|
||||
Title = "Radicale"
|
||||
|
||||
LogFile = messages
|
||||
|
||||
*OnlyService = radicale
|
||||
|
||||
*RemoveHeaders
|
||||
|
||||
# vi: shiftwidth=3 tabstop=3 et
|
|
@ -2,6 +2,10 @@
|
|||
###
|
||||
### Usual configuration file location: /etc/nginx/default.d/
|
||||
|
||||
## "well-known" redirect at least for Apple devices
|
||||
rewrite ^/.well-known/carddav /radicale/ redirect;
|
||||
rewrite ^/.well-known/caldav /radicale/ redirect;
|
||||
|
||||
## Base URI: /radicale/
|
||||
location /radicale/ {
|
||||
proxy_pass http://localhost:5232/;
|
||||
|
|
|
@ -3,7 +3,7 @@ name = "Radicale"
|
|||
# When the version is updated, a new section in the CHANGELOG.md file must be
|
||||
# added too.
|
||||
readme = "README.md"
|
||||
version = "3.3.2"
|
||||
version = "3.5.1.dev"
|
||||
authors = [{name = "Guillaume Ayoub", email = "guillaume.ayoub@kozea.fr"}, {name = "Unrud", email = "unrud@outlook.com"}, {name = "Peter Bieringer", email = "pb@bieringer.de"}]
|
||||
license = {text = "GNU GPL v3"}
|
||||
description = "CalDAV and CardDAV Server"
|
||||
|
@ -17,7 +17,6 @@ classifiers = [
|
|||
"License :: OSI Approved :: GNU General Public License (GPL)",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
|
@ -28,12 +27,13 @@ classifiers = [
|
|||
"Topic :: Office/Business :: Groupware",
|
||||
]
|
||||
urls = {Homepage = "https://radicale.org/"}
|
||||
requires-python = ">=3.8.0"
|
||||
requires-python = ">=3.9.0"
|
||||
dependencies = [
|
||||
"defusedxml",
|
||||
"passlib",
|
||||
"vobject>=0.9.6",
|
||||
"pika>=1.1.0",
|
||||
"requests",
|
||||
]
|
||||
|
||||
|
||||
|
@ -73,7 +73,7 @@ skip_install = true
|
|||
|
||||
[tool.tox.env.mypy]
|
||||
deps = ["mypy==1.11.0"]
|
||||
commands = [["mypy", "."]]
|
||||
commands = [["mypy", "--install-types", "--non-interactive", "."]]
|
||||
skip_install = true
|
||||
|
||||
|
||||
|
|
|
@ -3,4 +3,8 @@ Radicale WSGI file (mod_wsgi and uWSGI compliant).
|
|||
|
||||
"""
|
||||
|
||||
import os
|
||||
from radicale import application
|
||||
|
||||
# set an environment variable
|
||||
os.environ.setdefault('SERVER_GATEWAY_INTERFACE', 'Web')
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -68,6 +68,7 @@ class Application(ApplicationPartDelete, ApplicationPartHead,
|
|||
_internal_server: bool
|
||||
_max_content_length: int
|
||||
_auth_realm: str
|
||||
_script_name: str
|
||||
_extra_headers: Mapping[str, str]
|
||||
_permit_delete_collection: bool
|
||||
_permit_overwrite_collection: bool
|
||||
|
@ -87,6 +88,19 @@ class Application(ApplicationPartDelete, ApplicationPartHead,
|
|||
self._response_content_on_debug = configuration.get("logging", "response_content_on_debug")
|
||||
self._auth_delay = configuration.get("auth", "delay")
|
||||
self._internal_server = configuration.get("server", "_internal_server")
|
||||
self._script_name = configuration.get("server", "script_name")
|
||||
if self._script_name:
|
||||
if self._script_name[0] != "/":
|
||||
logger.error("server.script_name must start with '/': %r", self._script_name)
|
||||
raise RuntimeError("server.script_name option has to start with '/'")
|
||||
else:
|
||||
if self._script_name.endswith("/"):
|
||||
logger.error("server.script_name must not end with '/': %r", self._script_name)
|
||||
raise RuntimeError("server.script_name option must not end with '/'")
|
||||
else:
|
||||
logger.info("Provided script name to strip from URI if called by reverse proxy: %r", self._script_name)
|
||||
else:
|
||||
logger.info("Default script name to strip from URI if called by reverse proxy is taken from HTTP_X_SCRIPT_NAME or SCRIPT_NAME")
|
||||
self._max_content_length = configuration.get(
|
||||
"server", "max_content_length")
|
||||
self._auth_realm = configuration.get("auth", "realm")
|
||||
|
@ -136,6 +150,7 @@ class Application(ApplicationPartDelete, ApplicationPartHead,
|
|||
time_begin = datetime.datetime.now()
|
||||
request_method = environ["REQUEST_METHOD"].upper()
|
||||
unsafe_path = environ.get("PATH_INFO", "")
|
||||
https = environ.get("HTTPS", "")
|
||||
|
||||
"""Manage a request."""
|
||||
def response(status: int, headers: types.WSGIResponseHeaders,
|
||||
|
@ -178,23 +193,31 @@ class Application(ApplicationPartDelete, ApplicationPartHead,
|
|||
# Return response content
|
||||
return status_text, list(headers.items()), answers
|
||||
|
||||
reverse_proxy = False
|
||||
remote_host = "unknown"
|
||||
if environ.get("REMOTE_HOST"):
|
||||
remote_host = repr(environ["REMOTE_HOST"])
|
||||
elif environ.get("REMOTE_ADDR"):
|
||||
remote_host = environ["REMOTE_ADDR"]
|
||||
if environ.get("HTTP_X_FORWARDED_FOR"):
|
||||
reverse_proxy = True
|
||||
remote_host = "%s (forwarded for %r)" % (
|
||||
remote_host, environ["HTTP_X_FORWARDED_FOR"])
|
||||
if environ.get("HTTP_X_FORWARDED_HOST") or environ.get("HTTP_X_FORWARDED_PROTO") or environ.get("HTTP_X_FORWARDED_SERVER"):
|
||||
reverse_proxy = True
|
||||
remote_useragent = ""
|
||||
if environ.get("HTTP_USER_AGENT"):
|
||||
remote_useragent = " using %r" % environ["HTTP_USER_AGENT"]
|
||||
depthinfo = ""
|
||||
if environ.get("HTTP_DEPTH"):
|
||||
depthinfo = " with depth %r" % environ["HTTP_DEPTH"]
|
||||
logger.info("%s request for %r%s received from %s%s",
|
||||
if https:
|
||||
https_info = " " + environ.get("SSL_PROTOCOL", "") + " " + environ.get("SSL_CIPHER", "")
|
||||
else:
|
||||
https_info = ""
|
||||
logger.info("%s request for %r%s received from %s%s%s",
|
||||
request_method, unsafe_path, depthinfo,
|
||||
remote_host, remote_useragent)
|
||||
remote_host, remote_useragent, https_info)
|
||||
if self._request_header_on_debug:
|
||||
logger.debug("Request header:\n%s",
|
||||
pprint.pformat(self._scrub_headers(environ)))
|
||||
|
@ -204,24 +227,37 @@ class Application(ApplicationPartDelete, ApplicationPartHead,
|
|||
# SCRIPT_NAME is already removed from PATH_INFO, according to the
|
||||
# WSGI specification.
|
||||
# Reverse proxies can overwrite SCRIPT_NAME with X-SCRIPT-NAME header
|
||||
base_prefix_src = ("HTTP_X_SCRIPT_NAME" if "HTTP_X_SCRIPT_NAME" in
|
||||
environ else "SCRIPT_NAME")
|
||||
base_prefix = environ.get(base_prefix_src, "")
|
||||
if base_prefix and base_prefix[0] != "/":
|
||||
logger.error("Base prefix (from %s) must start with '/': %r",
|
||||
base_prefix_src, base_prefix)
|
||||
if base_prefix_src == "HTTP_X_SCRIPT_NAME":
|
||||
return response(*httputils.BAD_REQUEST)
|
||||
return response(*httputils.INTERNAL_SERVER_ERROR)
|
||||
if base_prefix.endswith("/"):
|
||||
logger.warning("Base prefix (from %s) must not end with '/': %r",
|
||||
base_prefix_src, base_prefix)
|
||||
base_prefix = base_prefix.rstrip("/")
|
||||
logger.debug("Base prefix (from %s): %r", base_prefix_src, base_prefix)
|
||||
if self._script_name and (reverse_proxy is True):
|
||||
base_prefix_src = "config"
|
||||
base_prefix = self._script_name
|
||||
else:
|
||||
base_prefix_src = ("HTTP_X_SCRIPT_NAME" if "HTTP_X_SCRIPT_NAME" in
|
||||
environ else "SCRIPT_NAME")
|
||||
base_prefix = environ.get(base_prefix_src, "")
|
||||
if base_prefix and base_prefix[0] != "/":
|
||||
logger.error("Base prefix (from %s) must start with '/': %r",
|
||||
base_prefix_src, base_prefix)
|
||||
if base_prefix_src == "HTTP_X_SCRIPT_NAME":
|
||||
return response(*httputils.BAD_REQUEST)
|
||||
return response(*httputils.INTERNAL_SERVER_ERROR)
|
||||
if base_prefix.endswith("/"):
|
||||
logger.warning("Base prefix (from %s) must not end with '/': %r",
|
||||
base_prefix_src, base_prefix)
|
||||
base_prefix = base_prefix.rstrip("/")
|
||||
if base_prefix:
|
||||
logger.debug("Base prefix (from %s): %r", base_prefix_src, base_prefix)
|
||||
|
||||
# Sanitize request URI (a WSGI server indicates with an empty path,
|
||||
# that the URL targets the application root without a trailing slash)
|
||||
path = pathutils.sanitize_path(unsafe_path)
|
||||
logger.debug("Sanitized path: %r", path)
|
||||
if (reverse_proxy is True) and (len(base_prefix) > 0):
|
||||
if path.startswith(base_prefix):
|
||||
path_new = path.removeprefix(base_prefix)
|
||||
logger.debug("Called by reverse proxy, remove base prefix %r from path: %r => %r", base_prefix, path, path_new)
|
||||
path = path_new
|
||||
else:
|
||||
logger.warning("Called by reverse proxy, cannot removed base prefix %r from path: %r as not matching", base_prefix, path)
|
||||
|
||||
# Get function corresponding to method
|
||||
function = getattr(self, "do_%s" % request_method, None)
|
||||
|
@ -252,7 +288,7 @@ class Application(ApplicationPartDelete, ApplicationPartHead,
|
|||
self.configuration, environ, base64.b64decode(
|
||||
authorization.encode("ascii"))).split(":", 1)
|
||||
|
||||
user = self._auth.login(login, password) or "" if login else ""
|
||||
(user, info) = self._auth.login(login, password) or ("", "") if login else ("", "")
|
||||
if self.configuration.get("auth", "type") == "ldap":
|
||||
try:
|
||||
logger.debug("Groups %r", ",".join(self._auth._ldap_groups))
|
||||
|
@ -260,16 +296,16 @@ class Application(ApplicationPartDelete, ApplicationPartHead,
|
|||
except AttributeError:
|
||||
pass
|
||||
if user and login == user:
|
||||
logger.info("Successful login: %r", user)
|
||||
logger.info("Successful login: %r (%s)", user, info)
|
||||
elif user:
|
||||
logger.info("Successful login: %r -> %r", login, user)
|
||||
logger.info("Successful login: %r -> %r (%s)", login, user, info)
|
||||
elif login:
|
||||
logger.warning("Failed login attempt from %s: %r",
|
||||
remote_host, login)
|
||||
logger.warning("Failed login attempt from %s: %r (%s)",
|
||||
remote_host, login, info)
|
||||
# Random delay to avoid timing oracles and bruteforce attacks
|
||||
if self._auth_delay > 0:
|
||||
random_delay = self._auth_delay * (0.5 + random.random())
|
||||
logger.debug("Sleeping %.3f seconds", random_delay)
|
||||
logger.debug("Failed login, sleeping random: %.3f sec", random_delay)
|
||||
time.sleep(random_delay)
|
||||
|
||||
if user and not pathutils.is_safe_path_component(user):
|
||||
|
|
|
@ -66,6 +66,8 @@ class ApplicationPartGet(ApplicationBase):
|
|||
if path == "/.web" or path.startswith("/.web/"):
|
||||
# Redirect to sanitized path for all subpaths of /.web
|
||||
unsafe_path = environ.get("PATH_INFO", "")
|
||||
if len(base_prefix) > 0:
|
||||
unsafe_path = unsafe_path.removeprefix(base_prefix)
|
||||
if unsafe_path != path:
|
||||
location = base_prefix + path
|
||||
logger.info("Redirecting to sanitized path: %r ==> %r",
|
||||
|
|
|
@ -2,7 +2,8 @@
|
|||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -17,7 +18,9 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import errno
|
||||
import posixpath
|
||||
import re
|
||||
import socket
|
||||
from http import client
|
||||
|
||||
|
@ -70,7 +73,20 @@ class ApplicationPartMkcalendar(ApplicationBase):
|
|||
try:
|
||||
self._storage.create_collection(path, props=props)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad MKCALENDAR request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
# return better matching HTTP result in case errno is provided and catched
|
||||
errno_match = re.search("\\[Errno ([0-9]+)\\]", str(e))
|
||||
if errno_match:
|
||||
logger.error(
|
||||
"Failed MKCALENDAR request on %r: %s", path, e, exc_info=True)
|
||||
errno_e = int(errno_match.group(1))
|
||||
if errno_e == errno.ENOSPC:
|
||||
return httputils.INSUFFICIENT_STORAGE
|
||||
elif errno_e in [errno.EPERM, errno.EACCES]:
|
||||
return httputils.FORBIDDEN
|
||||
else:
|
||||
return httputils.INTERNAL_SERVER_ERROR
|
||||
else:
|
||||
logger.warning(
|
||||
"Bad MKCALENDAR request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
return client.CREATED, {}, None
|
||||
|
|
|
@ -2,7 +2,8 @@
|
|||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -17,7 +18,9 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import errno
|
||||
import posixpath
|
||||
import re
|
||||
import socket
|
||||
from http import client
|
||||
|
||||
|
@ -74,8 +77,21 @@ class ApplicationPartMkcol(ApplicationBase):
|
|||
try:
|
||||
self._storage.create_collection(path, props=props)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad MKCOL request on %r (type:%s): %s", path, collection_type, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
# return better matching HTTP result in case errno is provided and catched
|
||||
errno_match = re.search("\\[Errno ([0-9]+)\\]", str(e))
|
||||
if errno_match:
|
||||
logger.error(
|
||||
"Failed MKCOL request on %r (type:%s): %s", path, collection_type, e, exc_info=True)
|
||||
errno_e = int(errno_match.group(1))
|
||||
if errno_e == errno.ENOSPC:
|
||||
return httputils.INSUFFICIENT_STORAGE
|
||||
elif errno_e in [errno.EPERM, errno.EACCES]:
|
||||
return httputils.FORBIDDEN
|
||||
else:
|
||||
return httputils.INTERNAL_SERVER_ERROR
|
||||
else:
|
||||
logger.warning(
|
||||
"Bad MKCOL request on %r (type:%s): %s", path, collection_type, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
logger.info("MKCOL request %r (type:%s): %s", path, collection_type, "successful")
|
||||
return client.CREATED, {}, None
|
||||
|
|
|
@ -2,7 +2,8 @@
|
|||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2023 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2023-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -17,6 +18,7 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import errno
|
||||
import posixpath
|
||||
import re
|
||||
from http import client
|
||||
|
@ -109,7 +111,20 @@ class ApplicationPartMove(ApplicationBase):
|
|||
try:
|
||||
self._storage.move(item, to_collection, to_href)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad MOVE request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
# return better matching HTTP result in case errno is provided and catched
|
||||
errno_match = re.search("\\[Errno ([0-9]+)\\]", str(e))
|
||||
if errno_match:
|
||||
logger.error(
|
||||
"Failed MOVE request on %r: %s", path, e, exc_info=True)
|
||||
errno_e = int(errno_match.group(1))
|
||||
if errno_e == errno.ENOSPC:
|
||||
return httputils.INSUFFICIENT_STORAGE
|
||||
elif errno_e in [errno.EPERM, errno.EACCES]:
|
||||
return httputils.FORBIDDEN
|
||||
else:
|
||||
return httputils.INTERNAL_SERVER_ERROR
|
||||
else:
|
||||
logger.warning(
|
||||
"Bad MOVE request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
return client.NO_CONTENT if to_item else client.CREATED, {}, None
|
||||
|
|
|
@ -2,7 +2,9 @@
|
|||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2020 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2020-2020 Tuna Celik <tuna@jakpark.com>
|
||||
# Copyright © 2025-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -17,6 +19,8 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import errno
|
||||
import re
|
||||
import socket
|
||||
import xml.etree.ElementTree as ET
|
||||
from http import client
|
||||
|
@ -107,7 +111,20 @@ class ApplicationPartProppatch(ApplicationBase):
|
|||
)
|
||||
self._hook.notify(hook_notification_item)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad PROPPATCH request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
# return better matching HTTP result in case errno is provided and catched
|
||||
errno_match = re.search("\\[Errno ([0-9]+)\\]", str(e))
|
||||
if errno_match:
|
||||
logger.error(
|
||||
"Failed PROPPATCH request on %r: %s", path, e, exc_info=True)
|
||||
errno_e = int(errno_match.group(1))
|
||||
if errno_e == errno.ENOSPC:
|
||||
return httputils.INSUFFICIENT_STORAGE
|
||||
elif errno_e in [errno.EPERM, errno.EACCES]:
|
||||
return httputils.FORBIDDEN
|
||||
else:
|
||||
return httputils.INTERNAL_SERVER_ERROR
|
||||
else:
|
||||
logger.warning(
|
||||
"Bad PROPPATCH request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
return client.MULTI_STATUS, headers, self._xml_response(xml_answer)
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2020 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2020-2023 Tuna Celik <tuna@jakpark.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -19,8 +19,10 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import errno
|
||||
import itertools
|
||||
import posixpath
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
from http import client
|
||||
|
@ -163,7 +165,7 @@ class ApplicationPartPut(ApplicationBase):
|
|||
bool(rights.intersect(access.permissions, "Ww")),
|
||||
bool(rights.intersect(access.parent_permissions, "w")))
|
||||
|
||||
with self._storage.acquire_lock("w", user):
|
||||
with self._storage.acquire_lock("w", user, path=path):
|
||||
item = next(iter(self._storage.discover(path)), None)
|
||||
parent_item = next(iter(
|
||||
self._storage.discover(access.parent_path)), None)
|
||||
|
@ -198,15 +200,22 @@ class ApplicationPartPut(ApplicationBase):
|
|||
etag = environ.get("HTTP_IF_MATCH", "")
|
||||
if not item and etag:
|
||||
# Etag asked but no item found: item has been removed
|
||||
logger.warning("Precondition failed on PUT request for %r (HTTP_IF_MATCH: %s, item not existing)", path, etag)
|
||||
return httputils.PRECONDITION_FAILED
|
||||
if item and etag and item.etag != etag:
|
||||
# Etag asked but item not matching: item has changed
|
||||
logger.warning("Precondition failed on PUT request for %r (HTTP_IF_MATCH: %s, item has different etag: %s)", path, etag, item.etag)
|
||||
return httputils.PRECONDITION_FAILED
|
||||
if item and etag:
|
||||
logger.debug("Precondition passed on PUT request for %r (HTTP_IF_MATCH: %s, item has etag: %s)", path, etag, item.etag)
|
||||
|
||||
match = environ.get("HTTP_IF_NONE_MATCH", "") == "*"
|
||||
if item and match:
|
||||
# Creation asked but item found: item can't be replaced
|
||||
logger.warning("Precondition failed on PUT request for %r (HTTP_IF_NONE_MATCH: *, creation requested but item found with etag: %s)", path, item.etag)
|
||||
return httputils.PRECONDITION_FAILED
|
||||
if match:
|
||||
logger.debug("Precondition passed on PUT request for %r (HTTP_IF_NONE_MATCH: *)", path)
|
||||
|
||||
if (tag != prepared_tag or
|
||||
prepared_write_whole_collection != write_whole_collection):
|
||||
|
@ -257,9 +266,22 @@ class ApplicationPartPut(ApplicationBase):
|
|||
)
|
||||
self._hook.notify(hook_notification_item)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad PUT request on %r (upload): %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
# return better matching HTTP result in case errno is provided and catched
|
||||
errno_match = re.search("\\[Errno ([0-9]+)\\]", str(e))
|
||||
if errno_match:
|
||||
logger.error(
|
||||
"Failed PUT request on %r (upload): %s", path, e, exc_info=True)
|
||||
errno_e = int(errno_match.group(1))
|
||||
if errno_e == errno.ENOSPC:
|
||||
return httputils.INSUFFICIENT_STORAGE
|
||||
elif errno_e in [errno.EPERM, errno.EACCES]:
|
||||
return httputils.FORBIDDEN
|
||||
else:
|
||||
return httputils.INTERNAL_SERVER_ERROR
|
||||
else:
|
||||
logger.warning(
|
||||
"Bad PUT request on %r (upload): %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
|
||||
headers = {"ETag": etag}
|
||||
return client.CREATED, headers, None
|
||||
|
|
|
@ -2,7 +2,11 @@
|
|||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Pieter Hijma <pieterhijma@users.noreply.github.com>
|
||||
# Copyright © 2024-2024 Ray <ray@react0r.com>
|
||||
# Copyright © 2024-2024 Georgiy <metallerok@gmail.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -171,7 +175,11 @@ def xml_report(base_prefix: str, path: str, xml_request: Optional[ET.Element],
|
|||
xmlutils.make_human_tag(root.tag), path)
|
||||
return client.FORBIDDEN, xmlutils.webdav_error("D:supported-report")
|
||||
|
||||
props: Union[ET.Element, List] = root.find(xmlutils.make_clark("D:prop")) or []
|
||||
props: Union[ET.Element, List]
|
||||
if root.find(xmlutils.make_clark("D:prop")) is not None:
|
||||
props = root.find(xmlutils.make_clark("D:prop")) # type: ignore[assignment]
|
||||
else:
|
||||
props = []
|
||||
|
||||
hreferences: Iterable[str]
|
||||
if root.tag in (
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -29,7 +29,11 @@ Take a look at the class ``BaseAuth`` if you want to implement your own.
|
|||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Set, Tuple, Union
|
||||
import hashlib
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
from typing import List, Sequence, Set, Tuple, Union, final
|
||||
|
||||
from radicale import config, types, utils
|
||||
from radicale.log import logger
|
||||
|
@ -38,15 +42,50 @@ INTERNAL_TYPES: Sequence[str] = ("none", "remote_user", "http_x_remote_user",
|
|||
"denyall",
|
||||
"htpasswd",
|
||||
"ldap",
|
||||
"imap",
|
||||
"oauth2",
|
||||
"pam",
|
||||
"dovecot")
|
||||
|
||||
CACHE_LOGIN_TYPES: Sequence[str] = (
|
||||
"dovecot",
|
||||
"ldap",
|
||||
"htpasswd",
|
||||
"imap",
|
||||
"oauth2",
|
||||
"pam",
|
||||
)
|
||||
|
||||
INSECURE_IF_NO_LOOPBACK_TYPES: Sequence[str] = (
|
||||
"remote_user",
|
||||
"http_x_remote_user",
|
||||
)
|
||||
|
||||
AUTH_SOCKET_FAMILY: Sequence[str] = ("AF_UNIX", "AF_INET", "AF_INET6")
|
||||
|
||||
|
||||
def load(configuration: "config.Configuration") -> "BaseAuth":
|
||||
"""Load the authentication module chosen in configuration."""
|
||||
if configuration.get("auth", "type") == "none":
|
||||
logger.warning("No user authentication is selected: '[auth] type=none' (insecure)")
|
||||
if configuration.get("auth", "type") == "denyall":
|
||||
logger.warning("All access is blocked by: '[auth] type=denyall'")
|
||||
_type = configuration.get("auth", "type")
|
||||
if _type == "none":
|
||||
logger.warning("No user authentication is selected: '[auth] type=none' (INSECURE)")
|
||||
elif _type == "denyall":
|
||||
logger.warning("All user authentication is blocked by: '[auth] type=denyall'")
|
||||
elif _type in INSECURE_IF_NO_LOOPBACK_TYPES:
|
||||
sgi = os.environ.get('SERVER_GATEWAY_INTERFACE') or None
|
||||
if not sgi:
|
||||
hosts: List[Tuple[str, int]] = configuration.get("server", "hosts")
|
||||
localhost_only = True
|
||||
address_lo = []
|
||||
address = []
|
||||
for address_port in hosts:
|
||||
if address_port[0] in ["localhost", "localhost6", "127.0.0.1", "::1"]:
|
||||
address_lo.append(utils.format_address(address_port))
|
||||
else:
|
||||
address.append(utils.format_address(address_port))
|
||||
localhost_only = False
|
||||
if localhost_only is False:
|
||||
logger.warning("User authentication '[auth] type=%s' is selected but server is not only listen on loopback address (potentially INSECURE): %s", _type, " ".join(address))
|
||||
return utils.load_plugin(INTERNAL_TYPES, "auth", "Auth", BaseAuth,
|
||||
configuration)
|
||||
|
||||
|
@ -57,6 +96,16 @@ class BaseAuth:
|
|||
_lc_username: bool
|
||||
_uc_username: bool
|
||||
_strip_domain: bool
|
||||
_auth_delay: float
|
||||
_failed_auth_delay: float
|
||||
_type: str
|
||||
_cache_logins: bool
|
||||
_cache_successful: dict # login -> (digest, time_ns)
|
||||
_cache_successful_logins_expiry: int
|
||||
_cache_failed: dict # digest_failed -> (time_ns, login)
|
||||
_cache_failed_logins_expiry: int
|
||||
_cache_failed_logins_salt_ns: int # persistent over runtime
|
||||
_lock: threading.Lock
|
||||
|
||||
def __init__(self, configuration: "config.Configuration") -> None:
|
||||
"""Initialize BaseAuth.
|
||||
|
@ -75,6 +124,38 @@ class BaseAuth:
|
|||
logger.info("auth.uc_username: %s", self._uc_username)
|
||||
if self._lc_username is True and self._uc_username is True:
|
||||
raise RuntimeError("auth.lc_username and auth.uc_username cannot be enabled together")
|
||||
self._auth_delay = configuration.get("auth", "delay")
|
||||
logger.info("auth.delay: %f", self._auth_delay)
|
||||
self._failed_auth_delay = 0
|
||||
self._lock = threading.Lock()
|
||||
# cache_successful_logins
|
||||
self._cache_logins = configuration.get("auth", "cache_logins")
|
||||
self._type = configuration.get("auth", "type")
|
||||
if (self._type in CACHE_LOGIN_TYPES) or (self._cache_logins is False):
|
||||
logger.info("auth.cache_logins: %s", self._cache_logins)
|
||||
else:
|
||||
logger.info("auth.cache_logins: %s (but not required for type '%s' and disabled therefore)", self._cache_logins, self._type)
|
||||
self._cache_logins = False
|
||||
if self._cache_logins is True:
|
||||
self._cache_successful_logins_expiry = configuration.get("auth", "cache_successful_logins_expiry")
|
||||
if self._cache_successful_logins_expiry < 0:
|
||||
raise RuntimeError("self._cache_successful_logins_expiry cannot be < 0")
|
||||
self._cache_failed_logins_expiry = configuration.get("auth", "cache_failed_logins_expiry")
|
||||
if self._cache_failed_logins_expiry < 0:
|
||||
raise RuntimeError("self._cache_failed_logins_expiry cannot be < 0")
|
||||
logger.info("auth.cache_successful_logins_expiry: %s seconds", self._cache_successful_logins_expiry)
|
||||
logger.info("auth.cache_failed_logins_expiry: %s seconds", self._cache_failed_logins_expiry)
|
||||
# cache init
|
||||
self._cache_successful = dict()
|
||||
self._cache_failed = dict()
|
||||
self._cache_failed_logins_salt_ns = time.time_ns()
|
||||
|
||||
def _cache_digest(self, login: str, password: str, salt: str) -> str:
|
||||
h = hashlib.sha3_512()
|
||||
h.update(salt.encode())
|
||||
h.update(login.encode())
|
||||
h.update(password.encode())
|
||||
return str(h.digest())
|
||||
|
||||
def get_external_login(self, environ: types.WSGIEnviron) -> Union[
|
||||
Tuple[()], Tuple[str, str]]:
|
||||
|
@ -102,11 +183,132 @@ class BaseAuth:
|
|||
|
||||
raise NotImplementedError
|
||||
|
||||
def login(self, login: str, password: str) -> str:
|
||||
def _sleep_for_constant_exec_time(self, time_ns_begin: int):
|
||||
"""Sleep some time to reach a constant execution time for failed logins
|
||||
|
||||
Independent of time required by external backend or used digest methods
|
||||
|
||||
Increase final execution time in case initial limit exceeded
|
||||
|
||||
See also issue 591
|
||||
|
||||
"""
|
||||
time_delta = (time.time_ns() - time_ns_begin) / 1000 / 1000 / 1000
|
||||
with self._lock:
|
||||
# avoid that another thread is changing global value at the same time
|
||||
failed_auth_delay = self._failed_auth_delay
|
||||
failed_auth_delay_old = failed_auth_delay
|
||||
if time_delta > failed_auth_delay:
|
||||
# set new
|
||||
failed_auth_delay = time_delta
|
||||
# store globally
|
||||
self._failed_auth_delay = failed_auth_delay
|
||||
if (failed_auth_delay_old != failed_auth_delay):
|
||||
logger.debug("Failed login constant execution time need increase of failed_auth_delay: %.9f -> %.9f sec", failed_auth_delay_old, failed_auth_delay)
|
||||
# sleep == 0
|
||||
else:
|
||||
sleep = failed_auth_delay - time_delta
|
||||
logger.debug("Failed login constant exection time alignment, sleeping: %.9f sec", sleep)
|
||||
time.sleep(sleep)
|
||||
|
||||
@final
|
||||
def login(self, login: str, password: str) -> Tuple[str, str]:
|
||||
time_ns_begin = time.time_ns()
|
||||
result_from_cache = False
|
||||
if self._lc_username:
|
||||
login = login.lower()
|
||||
if self._uc_username:
|
||||
login = login.upper()
|
||||
if self._strip_domain:
|
||||
login = login.split('@')[0]
|
||||
return self._login(login, password)
|
||||
if self._cache_logins is True:
|
||||
# time_ns is also used as salt
|
||||
result = ""
|
||||
digest = ""
|
||||
time_ns = time.time_ns()
|
||||
# cleanup failed login cache to avoid out-of-memory
|
||||
cache_failed_entries = len(self._cache_failed)
|
||||
if cache_failed_entries > 0:
|
||||
logger.debug("Login failed cache investigation start (entries: %d)", cache_failed_entries)
|
||||
self._lock.acquire()
|
||||
cache_failed_cleanup = dict()
|
||||
for digest in self._cache_failed:
|
||||
(time_ns_cache, login_cache) = self._cache_failed[digest]
|
||||
age_failed = int((time_ns - time_ns_cache) / 1000 / 1000 / 1000)
|
||||
if age_failed > self._cache_failed_logins_expiry:
|
||||
cache_failed_cleanup[digest] = (login_cache, age_failed)
|
||||
cache_failed_cleanup_entries = len(cache_failed_cleanup)
|
||||
logger.debug("Login failed cache cleanup start (entries: %d)", cache_failed_cleanup_entries)
|
||||
if cache_failed_cleanup_entries > 0:
|
||||
for digest in cache_failed_cleanup:
|
||||
(login, age_failed) = cache_failed_cleanup[digest]
|
||||
logger.debug("Login failed cache entry for user+password expired: '%s' (age: %d > %d sec)", login_cache, age_failed, self._cache_failed_logins_expiry)
|
||||
del self._cache_failed[digest]
|
||||
self._lock.release()
|
||||
logger.debug("Login failed cache investigation finished")
|
||||
# check for cache failed login
|
||||
digest_failed = login + ":" + self._cache_digest(login, password, str(self._cache_failed_logins_salt_ns))
|
||||
if self._cache_failed.get(digest_failed):
|
||||
# login+password found in cache "failed" -> shortcut return
|
||||
(time_ns_cache, login_cache) = self._cache_failed[digest]
|
||||
age_failed = int((time_ns - time_ns_cache) / 1000 / 1000 / 1000)
|
||||
logger.debug("Login failed cache entry for user+password found: '%s' (age: %d sec)", login_cache, age_failed)
|
||||
self._sleep_for_constant_exec_time(time_ns_begin)
|
||||
return ("", self._type + " / cached")
|
||||
if self._cache_successful.get(login):
|
||||
# login found in cache "successful"
|
||||
(digest_cache, time_ns_cache) = self._cache_successful[login]
|
||||
digest = self._cache_digest(login, password, str(time_ns_cache))
|
||||
if digest == digest_cache:
|
||||
age_success = int((time_ns - time_ns_cache) / 1000 / 1000 / 1000)
|
||||
if age_success > self._cache_successful_logins_expiry:
|
||||
logger.debug("Login successful cache entry for user+password found but expired: '%s' (age: %d > %d sec)", login, age_success, self._cache_successful_logins_expiry)
|
||||
# delete expired success from cache
|
||||
del self._cache_successful[login]
|
||||
digest = ""
|
||||
else:
|
||||
logger.debug("Login successful cache entry for user+password found: '%s' (age: %d sec)", login, age_success)
|
||||
result = login
|
||||
result_from_cache = True
|
||||
else:
|
||||
logger.debug("Login successful cache entry for user+password not matching: '%s'", login)
|
||||
else:
|
||||
# login not found in cache, caculate always to avoid timing attacks
|
||||
digest = self._cache_digest(login, password, str(time_ns))
|
||||
if result == "":
|
||||
# verify login+password via configured backend
|
||||
logger.debug("Login verification for user+password via backend: '%s'", login)
|
||||
result = self._login(login, password)
|
||||
if result != "":
|
||||
logger.debug("Login successful for user+password via backend: '%s'", login)
|
||||
if digest == "":
|
||||
# successful login, but expired, digest must be recalculated
|
||||
digest = self._cache_digest(login, password, str(time_ns))
|
||||
# store successful login in cache
|
||||
self._lock.acquire()
|
||||
self._cache_successful[login] = (digest, time_ns)
|
||||
self._lock.release()
|
||||
logger.debug("Login successful cache for user set: '%s'", login)
|
||||
if self._cache_failed.get(digest_failed):
|
||||
logger.debug("Login failed cache for user cleared: '%s'", login)
|
||||
del self._cache_failed[digest_failed]
|
||||
else:
|
||||
logger.debug("Login failed for user+password via backend: '%s'", login)
|
||||
self._lock.acquire()
|
||||
self._cache_failed[digest_failed] = (time_ns, login)
|
||||
self._lock.release()
|
||||
logger.debug("Login failed cache for user set: '%s'", login)
|
||||
if result_from_cache is True:
|
||||
if result == "":
|
||||
self._sleep_for_constant_exec_time(time_ns_begin)
|
||||
return (result, self._type + " / cached")
|
||||
else:
|
||||
if result == "":
|
||||
self._sleep_for_constant_exec_time(time_ns_begin)
|
||||
return (result, self._type)
|
||||
else:
|
||||
# self._cache_logins is False
|
||||
result = self._login(login, password)
|
||||
if result == "":
|
||||
self._sleep_for_constant_exec_time(time_ns_begin)
|
||||
return (result, self._type)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# Copyright © 2014 Giel van Schijndel
|
||||
# Copyright © 2019 (GalaxyMaster)
|
||||
# Copyright © 2025-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -28,11 +29,24 @@ from radicale.log import logger
|
|||
class Auth(auth.BaseAuth):
|
||||
def __init__(self, configuration):
|
||||
super().__init__(configuration)
|
||||
self.socket = configuration.get("auth", "dovecot_socket")
|
||||
self.timeout = 5
|
||||
self.request_id_gen = itertools.count(1)
|
||||
|
||||
def login(self, login, password):
|
||||
config_family = configuration.get("auth", "dovecot_connection_type")
|
||||
if config_family == "AF_UNIX":
|
||||
self.family = socket.AF_UNIX
|
||||
self.address = configuration.get("auth", "dovecot_socket")
|
||||
logger.info("auth dovecot socket: %r", self.address)
|
||||
return
|
||||
|
||||
self.address = configuration.get("auth", "dovecot_host"), configuration.get("auth", "dovecot_port")
|
||||
logger.warning("auth dovecot address: %r (INSECURE, credentials are transmitted in clear text)", self.address)
|
||||
if config_family == "AF_INET":
|
||||
self.family = socket.AF_INET
|
||||
else:
|
||||
self.family = socket.AF_INET6
|
||||
|
||||
def _login(self, login, password):
|
||||
"""Validate credentials.
|
||||
|
||||
Check if the ``login``/``password`` pair is valid according to Dovecot.
|
||||
|
@ -49,12 +63,12 @@ class Auth(auth.BaseAuth):
|
|||
return ""
|
||||
|
||||
with closing(socket.socket(
|
||||
socket.AF_UNIX,
|
||||
self.family,
|
||||
socket.SOCK_STREAM)
|
||||
) as sock:
|
||||
try:
|
||||
sock.settimeout(self.timeout)
|
||||
sock.connect(self.socket)
|
||||
sock.connect(self.address)
|
||||
|
||||
buf = bytes()
|
||||
supported_mechs = []
|
||||
|
@ -171,8 +185,8 @@ class Auth(auth.BaseAuth):
|
|||
|
||||
except socket.error as e:
|
||||
logger.fatal(
|
||||
"Failed to communicate with Dovecot socket %r: %s" %
|
||||
(self.socket, e)
|
||||
"Failed to communicate with Dovecot: %s" %
|
||||
(e)
|
||||
)
|
||||
|
||||
return ""
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024 Peter Bieringer <pb@bieringer.de>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -50,7 +50,11 @@ When bcrypt is installed:
|
|||
|
||||
import functools
|
||||
import hmac
|
||||
from typing import Any
|
||||
import os
|
||||
import re
|
||||
import threading
|
||||
import time
|
||||
from typing import Any, Tuple
|
||||
|
||||
from passlib.hash import apr_md5_crypt, sha256_crypt, sha512_crypt
|
||||
|
||||
|
@ -61,72 +65,202 @@ class Auth(auth.BaseAuth):
|
|||
|
||||
_filename: str
|
||||
_encoding: str
|
||||
_htpasswd: dict # login -> digest
|
||||
_htpasswd_mtime_ns: int
|
||||
_htpasswd_size: int
|
||||
_htpasswd_ok: bool
|
||||
_htpasswd_not_ok_time: float
|
||||
_htpasswd_not_ok_reminder_seconds: int
|
||||
_htpasswd_bcrypt_use: int
|
||||
_htpasswd_cache: bool
|
||||
_has_bcrypt: bool
|
||||
_encryption: str
|
||||
_lock: threading.Lock
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
self._filename = configuration.get("auth", "htpasswd_filename")
|
||||
logger.info("auth htpasswd file: %r", self._filename)
|
||||
self._encoding = configuration.get("encoding", "stock")
|
||||
encryption: str = configuration.get("auth", "htpasswd_encryption")
|
||||
logger.info("auth htpasswd file encoding: %r", self._encoding)
|
||||
self._htpasswd_cache = configuration.get("auth", "htpasswd_cache")
|
||||
logger.info("auth htpasswd cache: %s", self._htpasswd_cache)
|
||||
self._encryption: str = configuration.get("auth", "htpasswd_encryption")
|
||||
logger.info("auth htpasswd encryption is 'radicale.auth.htpasswd_encryption.%s'", self._encryption)
|
||||
|
||||
logger.info("auth htpasswd encryption is 'radicale.auth.htpasswd_encryption.%s'", encryption)
|
||||
self._has_bcrypt = False
|
||||
self._htpasswd_ok = False
|
||||
self._htpasswd_not_ok_reminder_seconds = 60 # currently hardcoded
|
||||
(self._htpasswd_ok, self._htpasswd_bcrypt_use, self._htpasswd, self._htpasswd_size, self._htpasswd_mtime_ns) = self._read_htpasswd(True, False)
|
||||
self._lock = threading.Lock()
|
||||
|
||||
if encryption == "plain":
|
||||
if self._encryption == "plain":
|
||||
self._verify = self._plain
|
||||
elif encryption == "md5":
|
||||
elif self._encryption == "md5":
|
||||
self._verify = self._md5apr1
|
||||
elif encryption == "sha256":
|
||||
elif self._encryption == "sha256":
|
||||
self._verify = self._sha256
|
||||
elif encryption == "sha512":
|
||||
elif self._encryption == "sha512":
|
||||
self._verify = self._sha512
|
||||
elif encryption == "bcrypt" or encryption == "autodetect":
|
||||
elif self._encryption == "bcrypt" or self._encryption == "autodetect":
|
||||
try:
|
||||
import bcrypt
|
||||
except ImportError as e:
|
||||
raise RuntimeError(
|
||||
"The htpasswd encryption method 'bcrypt' or 'autodetect' requires "
|
||||
"the bcrypt module.") from e
|
||||
if encryption == "bcrypt":
|
||||
if (self._encryption == "autodetect") and (self._htpasswd_bcrypt_use == 0):
|
||||
logger.warning("auth htpasswd encryption is 'radicale.auth.htpasswd_encryption.%s' which can require bycrypt module, but currently no entries found", self._encryption)
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"The htpasswd encryption method 'bcrypt' or 'autodetect' requires "
|
||||
"the bcrypt module (entries found: %d)." % self._htpasswd_bcrypt_use) from e
|
||||
else:
|
||||
self._has_bcrypt = True
|
||||
if self._encryption == "autodetect":
|
||||
if self._htpasswd_bcrypt_use == 0:
|
||||
logger.info("auth htpasswd encryption is 'radicale.auth.htpasswd_encryption.%s' and bycrypt module found, but currently not required", self._encryption)
|
||||
else:
|
||||
logger.info("auth htpasswd encryption is 'radicale.auth.htpasswd_encryption.%s' and bycrypt module found (bcrypt entries found: %d)", self._encryption, self._htpasswd_bcrypt_use)
|
||||
if self._encryption == "bcrypt":
|
||||
self._verify = functools.partial(self._bcrypt, bcrypt)
|
||||
else:
|
||||
self._verify = self._autodetect
|
||||
self._verify_bcrypt = functools.partial(self._bcrypt, bcrypt)
|
||||
if self._htpasswd_bcrypt_use:
|
||||
self._verify_bcrypt = functools.partial(self._bcrypt, bcrypt)
|
||||
else:
|
||||
raise RuntimeError("The htpasswd encryption method %r is not "
|
||||
"supported." % encryption)
|
||||
"supported." % self._encryption)
|
||||
|
||||
def _plain(self, hash_value: str, password: str) -> bool:
|
||||
def _plain(self, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
"""Check if ``hash_value`` and ``password`` match, plain method."""
|
||||
return hmac.compare_digest(hash_value.encode(), password.encode())
|
||||
return ("PLAIN", hmac.compare_digest(hash_value.encode(), password.encode()))
|
||||
|
||||
def _bcrypt(self, bcrypt: Any, hash_value: str, password: str) -> bool:
|
||||
return bcrypt.checkpw(password=password.encode('utf-8'), hashed_password=hash_value.encode())
|
||||
def _plain_fallback(self, method_orig, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
"""Check if ``hash_value`` and ``password`` match, plain method / fallback in case of hash length is not matching on autodetection."""
|
||||
info = "PLAIN/fallback as hash length not matching for " + method_orig + ": " + str(len(hash_value))
|
||||
return (info, hmac.compare_digest(hash_value.encode(), password.encode()))
|
||||
|
||||
def _md5apr1(self, hash_value: str, password: str) -> bool:
|
||||
return apr_md5_crypt.verify(password, hash_value.strip())
|
||||
def _bcrypt(self, bcrypt: Any, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
if self._encryption == "autodetect" and len(hash_value) != 60:
|
||||
return self._plain_fallback("BCRYPT", hash_value, password)
|
||||
else:
|
||||
return ("BCRYPT", bcrypt.checkpw(password=password.encode('utf-8'), hashed_password=hash_value.encode()))
|
||||
|
||||
def _sha256(self, hash_value: str, password: str) -> bool:
|
||||
return sha256_crypt.verify(password, hash_value.strip())
|
||||
def _md5apr1(self, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
if self._encryption == "autodetect" and len(hash_value) != 37:
|
||||
return self._plain_fallback("MD5-APR1", hash_value, password)
|
||||
else:
|
||||
return ("MD5-APR1", apr_md5_crypt.verify(password, hash_value.strip()))
|
||||
|
||||
def _sha512(self, hash_value: str, password: str) -> bool:
|
||||
return sha512_crypt.verify(password, hash_value.strip())
|
||||
def _sha256(self, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
if self._encryption == "autodetect" and len(hash_value) != 63:
|
||||
return self._plain_fallback("SHA-256", hash_value, password)
|
||||
else:
|
||||
return ("SHA-256", sha256_crypt.verify(password, hash_value.strip()))
|
||||
|
||||
def _autodetect(self, hash_value: str, password: str) -> bool:
|
||||
if hash_value.startswith("$apr1$", 0, 6) and len(hash_value) == 37:
|
||||
def _sha512(self, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
if self._encryption == "autodetect" and len(hash_value) != 106:
|
||||
return self._plain_fallback("SHA-512", hash_value, password)
|
||||
else:
|
||||
return ("SHA-512", sha512_crypt.verify(password, hash_value.strip()))
|
||||
|
||||
def _autodetect(self, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
if hash_value.startswith("$apr1$", 0, 6):
|
||||
# MD5-APR1
|
||||
return self._md5apr1(hash_value, password)
|
||||
elif hash_value.startswith("$2y$", 0, 4) and len(hash_value) == 60:
|
||||
elif re.match(r"^\$2(a|b|x|y)?\$", hash_value):
|
||||
# BCRYPT
|
||||
return self._verify_bcrypt(hash_value, password)
|
||||
elif hash_value.startswith("$5$", 0, 3) and len(hash_value) == 63:
|
||||
elif hash_value.startswith("$5$", 0, 3):
|
||||
# SHA-256
|
||||
return self._sha256(hash_value, password)
|
||||
elif hash_value.startswith("$6$", 0, 3) and len(hash_value) == 106:
|
||||
elif hash_value.startswith("$6$", 0, 3):
|
||||
# SHA-512
|
||||
return self._sha512(hash_value, password)
|
||||
else:
|
||||
# assumed plaintext
|
||||
return self._plain(hash_value, password)
|
||||
|
||||
def _read_htpasswd(self, init: bool, suppress: bool) -> Tuple[bool, int, dict, int, int]:
|
||||
"""Read htpasswd file
|
||||
|
||||
init == True: stop on error
|
||||
init == False: warn/skip on error and set mark to log reminder every interval
|
||||
suppress == True: suppress warnings, change info to debug (used in non-caching mode)
|
||||
suppress == False: do not suppress warnings (used in caching mode)
|
||||
|
||||
"""
|
||||
htpasswd_ok = True
|
||||
bcrypt_use = 0
|
||||
if (init is True) or (suppress is True):
|
||||
info = "Read"
|
||||
else:
|
||||
info = "Re-read"
|
||||
if suppress is False:
|
||||
logger.info("%s content of htpasswd file start: %r", info, self._filename)
|
||||
else:
|
||||
logger.debug("%s content of htpasswd file start: %r", info, self._filename)
|
||||
htpasswd: dict[str, str] = dict()
|
||||
entries = 0
|
||||
duplicates = 0
|
||||
errors = 0
|
||||
try:
|
||||
with open(self._filename, encoding=self._encoding) as f:
|
||||
line_num = 0
|
||||
for line in f:
|
||||
line_num += 1
|
||||
line = line.rstrip("\n")
|
||||
if line.lstrip() and not line.lstrip().startswith("#"):
|
||||
try:
|
||||
login, digest = line.split(":", maxsplit=1)
|
||||
skip = False
|
||||
if login == "" or digest == "":
|
||||
if init is True:
|
||||
raise ValueError("htpasswd file contains problematic line not matching <login>:<digest> in line: %d" % line_num)
|
||||
else:
|
||||
errors += 1
|
||||
logger.warning("htpasswd file contains problematic line not matching <login>:<digest> in line: %d (ignored)", line_num)
|
||||
htpasswd_ok = False
|
||||
skip = True
|
||||
else:
|
||||
if htpasswd.get(login):
|
||||
duplicates += 1
|
||||
if init is True:
|
||||
raise ValueError("htpasswd file contains duplicate login: '%s'", login, line_num)
|
||||
else:
|
||||
logger.warning("htpasswd file contains duplicate login: '%s' (line: %d / ignored)", login, line_num)
|
||||
htpasswd_ok = False
|
||||
skip = True
|
||||
else:
|
||||
if re.match(r"^\$2(a|b|x|y)?\$", digest) and len(digest) == 60:
|
||||
if init is True:
|
||||
bcrypt_use += 1
|
||||
else:
|
||||
if self._has_bcrypt is False:
|
||||
logger.warning("htpasswd file contains bcrypt digest login: '%s' (line: %d / ignored because module is not loaded)", login, line_num)
|
||||
skip = True
|
||||
htpasswd_ok = False
|
||||
if skip is False:
|
||||
htpasswd[login] = digest
|
||||
entries += 1
|
||||
except ValueError as e:
|
||||
if init is True:
|
||||
raise RuntimeError("Invalid htpasswd file %r: %s" % (self._filename, e)) from e
|
||||
except OSError as e:
|
||||
if init is True:
|
||||
raise RuntimeError("Failed to load htpasswd file %r: %s" % (self._filename, e)) from e
|
||||
else:
|
||||
logger.warning("Failed to load htpasswd file on re-read: %r" % self._filename)
|
||||
htpasswd_ok = False
|
||||
htpasswd_size = os.stat(self._filename).st_size
|
||||
htpasswd_mtime_ns = os.stat(self._filename).st_mtime_ns
|
||||
if suppress is False:
|
||||
logger.info("%s content of htpasswd file done: %r (entries: %d, duplicates: %d, errors: %d)", info, self._filename, entries, duplicates, errors)
|
||||
else:
|
||||
logger.debug("%s content of htpasswd file done: %r (entries: %d, duplicates: %d, errors: %d)", info, self._filename, entries, duplicates, errors)
|
||||
if htpasswd_ok is True:
|
||||
self._htpasswd_not_ok_time = 0
|
||||
else:
|
||||
self._htpasswd_not_ok_time = time.time()
|
||||
return (htpasswd_ok, bcrypt_use, htpasswd, htpasswd_size, htpasswd_mtime_ns)
|
||||
|
||||
def _login(self, login: str, password: str) -> str:
|
||||
"""Validate credentials.
|
||||
|
||||
|
@ -134,30 +268,52 @@ class Auth(auth.BaseAuth):
|
|||
hash (encrypted password) and check hash against password,
|
||||
using the method specified in the Radicale config.
|
||||
|
||||
The content of the file is not cached because reading is generally a
|
||||
very cheap operation, and it's useful to get live updates of the
|
||||
htpasswd file.
|
||||
Optional: the content of the file is cached and live updates will be detected by
|
||||
comparing mtime_ns and size
|
||||
|
||||
"""
|
||||
try:
|
||||
with open(self._filename, encoding=self._encoding) as f:
|
||||
for line in f:
|
||||
line = line.rstrip("\n")
|
||||
if line.lstrip() and not line.lstrip().startswith("#"):
|
||||
try:
|
||||
hash_login, hash_value = line.split(
|
||||
":", maxsplit=1)
|
||||
# Always compare both login and password to avoid
|
||||
# timing attacks, see #591.
|
||||
login_ok = hmac.compare_digest(
|
||||
hash_login.encode(), login.encode())
|
||||
password_ok = self._verify(hash_value, password)
|
||||
if login_ok and password_ok:
|
||||
return login
|
||||
except ValueError as e:
|
||||
raise RuntimeError("Invalid htpasswd file %r: %s" %
|
||||
(self._filename, e)) from e
|
||||
except OSError as e:
|
||||
raise RuntimeError("Failed to load htpasswd file %r: %s" %
|
||||
(self._filename, e)) from e
|
||||
login_ok = False
|
||||
digest: str
|
||||
if self._htpasswd_cache is True:
|
||||
# check and re-read file if required
|
||||
with self._lock:
|
||||
htpasswd_size = os.stat(self._filename).st_size
|
||||
htpasswd_mtime_ns = os.stat(self._filename).st_mtime_ns
|
||||
if (htpasswd_size != self._htpasswd_size) or (htpasswd_mtime_ns != self._htpasswd_mtime_ns):
|
||||
(self._htpasswd_ok, self._htpasswd_bcrypt_use, self._htpasswd, self._htpasswd_size, self._htpasswd_mtime_ns) = self._read_htpasswd(False, False)
|
||||
self._htpasswd_not_ok_time = 0
|
||||
|
||||
# log reminder of problemantic file every interval
|
||||
current_time = time.time()
|
||||
if (self._htpasswd_ok is False):
|
||||
if (self._htpasswd_not_ok_time > 0):
|
||||
if (current_time - self._htpasswd_not_ok_time) > self._htpasswd_not_ok_reminder_seconds:
|
||||
logger.warning("htpasswd file still contains issues (REMINDER, check warnings in the past): %r" % self._filename)
|
||||
self._htpasswd_not_ok_time = current_time
|
||||
else:
|
||||
self._htpasswd_not_ok_time = current_time
|
||||
|
||||
if self._htpasswd.get(login):
|
||||
digest = self._htpasswd[login]
|
||||
login_ok = True
|
||||
else:
|
||||
# read file on every request
|
||||
(htpasswd_ok, htpasswd_bcrypt_use, htpasswd, htpasswd_size, htpasswd_mtime_ns) = self._read_htpasswd(False, True)
|
||||
if htpasswd.get(login):
|
||||
digest = htpasswd[login]
|
||||
login_ok = True
|
||||
|
||||
if login_ok is True:
|
||||
try:
|
||||
(method, password_ok) = self._verify(digest, password)
|
||||
except ValueError as e:
|
||||
logger.error("Login verification failed for user: '%s' (htpasswd/%s) with errror '%s'", login, self._encryption, e)
|
||||
return ""
|
||||
if password_ok:
|
||||
logger.debug("Login verification successful for user: '%s' (htpasswd/%s/%s)", login, self._encryption, method)
|
||||
return login
|
||||
else:
|
||||
logger.warning("Login verification failed for user: '%s' (htpasswd/%s/%s)", login, self._encryption, method)
|
||||
else:
|
||||
logger.warning("Login verification user not found (htpasswd): '%s'", login)
|
||||
return ""
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
|
73
radicale/auth/imap.py
Normal file
73
radicale/auth/imap.py
Normal file
|
@ -0,0 +1,73 @@
|
|||
# RadicaleIMAP IMAP authentication plugin for Radicale.
|
||||
# Copyright © 2017, 2020 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2025-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import imaplib
|
||||
import ssl
|
||||
|
||||
from radicale import auth
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class Auth(auth.BaseAuth):
|
||||
"""Authenticate user with IMAP."""
|
||||
|
||||
def __init__(self, configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
self._host, self._port = self.configuration.get("auth", "imap_host")
|
||||
logger.info("auth imap host: %r", self._host)
|
||||
self._security = self.configuration.get("auth", "imap_security")
|
||||
if self._security == "none":
|
||||
logger.warning("auth imap security: %s (INSECURE, credentials are transmitted in clear text)", self._security)
|
||||
else:
|
||||
logger.info("auth imap security: %s", self._security)
|
||||
if self._security == "tls":
|
||||
if self._port is None:
|
||||
self._port = 993
|
||||
logger.info("auth imap port (autoselected): %d", self._port)
|
||||
else:
|
||||
logger.info("auth imap port: %d", self._port)
|
||||
else:
|
||||
if self._port is None:
|
||||
self._port = 143
|
||||
logger.info("auth imap port (autoselected): %d", self._port)
|
||||
else:
|
||||
logger.info("auth imap port: %d", self._port)
|
||||
|
||||
def _login(self, login, password) -> str:
|
||||
try:
|
||||
connection: imaplib.IMAP4 | imaplib.IMAP4_SSL
|
||||
if self._security == "tls":
|
||||
connection = imaplib.IMAP4_SSL(
|
||||
host=self._host, port=self._port,
|
||||
ssl_context=ssl.create_default_context())
|
||||
else:
|
||||
connection = imaplib.IMAP4(host=self._host, port=self._port)
|
||||
if self._security == "starttls":
|
||||
connection.starttls(ssl.create_default_context())
|
||||
try:
|
||||
connection.authenticate(
|
||||
"PLAIN",
|
||||
lambda _: "{0}\x00{0}\x00{1}".format(login, password).encode(),
|
||||
)
|
||||
except imaplib.IMAP4.error as e:
|
||||
logger.warning("IMAP authentication failed for user %r: %s", login, e, exc_info=False)
|
||||
return ""
|
||||
connection.logout()
|
||||
return login
|
||||
except (OSError, imaplib.IMAP4.error) as e:
|
||||
logger.error("Failed to communicate with IMAP server %r: %s" % ("[%s]:%d" % (self._host, self._port), e))
|
||||
return ""
|
|
@ -15,15 +15,16 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""
|
||||
Authentication backend that checks credentials with a ldap server.
|
||||
Authentication backend that checks credentials with a LDAP server.
|
||||
Following parameters are needed in the configuration:
|
||||
ldap_uri The ldap url to the server like ldap://localhost
|
||||
ldap_base The baseDN of the ldap server
|
||||
ldap_reader_dn The DN of a ldap user with read access to get the user accounts
|
||||
ldap_secret The password of the ldap_reader_dn
|
||||
ldap_secret_file The path of the file containing the password of the ldap_reader_dn
|
||||
ldap_filter The search filter to find the user to authenticate by the username
|
||||
ldap_load_groups If the groups of the authenticated users need to be loaded
|
||||
ldap_uri The LDAP URL to the server like ldap://localhost
|
||||
ldap_base The baseDN of the LDAP server
|
||||
ldap_reader_dn The DN of a LDAP user with read access to get the user accounts
|
||||
ldap_secret The password of the ldap_reader_dn
|
||||
ldap_secret_file The path of the file containing the password of the ldap_reader_dn
|
||||
ldap_filter The search filter to find the user to authenticate by the username
|
||||
ldap_user_attribute The attribute to be used as username after authentication
|
||||
ldap_groups_attribute The attribute containing group memberships in the LDAP user entry
|
||||
Following parameters controls SSL connections:
|
||||
ldap_use_ssl If the connection
|
||||
ldap_ssl_verify_mode The certificate verification mode. NONE, OPTIONAL, default is REQUIRED
|
||||
|
@ -42,8 +43,10 @@ class Auth(auth.BaseAuth):
|
|||
_ldap_reader_dn: str
|
||||
_ldap_secret: str
|
||||
_ldap_filter: str
|
||||
_ldap_load_groups: bool
|
||||
_ldap_version: int = 3
|
||||
_ldap_attributes: list[str] = []
|
||||
_ldap_user_attr: str
|
||||
_ldap_groups_attr: str
|
||||
_ldap_module_version: int = 3
|
||||
_ldap_use_ssl: bool = False
|
||||
_ldap_ssl_verify_mode: int = ssl.CERT_REQUIRED
|
||||
_ldap_ssl_ca_file: str = ""
|
||||
|
@ -56,21 +59,28 @@ class Auth(auth.BaseAuth):
|
|||
except ImportError:
|
||||
try:
|
||||
import ldap
|
||||
self._ldap_version = 2
|
||||
self._ldap_module_version = 2
|
||||
self.ldap = ldap
|
||||
except ImportError as e:
|
||||
raise RuntimeError("LDAP authentication requires the ldap3 module") from e
|
||||
|
||||
self._ldap_ignore_attribute_create_modify_timestamp = configuration.get("auth", "ldap_ignore_attribute_create_modify_timestamp")
|
||||
if self._ldap_ignore_attribute_create_modify_timestamp:
|
||||
self.ldap3.utils.config._ATTRIBUTES_EXCLUDED_FROM_CHECK.extend(['createTimestamp', 'modifyTimestamp'])
|
||||
logger.info("auth.ldap_ignore_attribute_create_modify_timestamp applied")
|
||||
|
||||
self._ldap_uri = configuration.get("auth", "ldap_uri")
|
||||
self._ldap_base = configuration.get("auth", "ldap_base")
|
||||
self._ldap_reader_dn = configuration.get("auth", "ldap_reader_dn")
|
||||
self._ldap_load_groups = configuration.get("auth", "ldap_load_groups")
|
||||
self._ldap_secret = configuration.get("auth", "ldap_secret")
|
||||
self._ldap_filter = configuration.get("auth", "ldap_filter")
|
||||
self._ldap_user_attr = configuration.get("auth", "ldap_user_attribute")
|
||||
self._ldap_groups_attr = configuration.get("auth", "ldap_groups_attribute")
|
||||
ldap_secret_file_path = configuration.get("auth", "ldap_secret_file")
|
||||
if ldap_secret_file_path:
|
||||
with open(ldap_secret_file_path, 'r') as file:
|
||||
self._ldap_secret = file.read().rstrip('\n')
|
||||
if self._ldap_version == 3:
|
||||
if self._ldap_module_version == 3:
|
||||
self._ldap_use_ssl = configuration.get("auth", "ldap_use_ssl")
|
||||
if self._ldap_use_ssl:
|
||||
self._ldap_ssl_ca_file = configuration.get("auth", "ldap_ssl_ca_file")
|
||||
|
@ -82,8 +92,15 @@ class Auth(auth.BaseAuth):
|
|||
logger.info("auth.ldap_uri : %r" % self._ldap_uri)
|
||||
logger.info("auth.ldap_base : %r" % self._ldap_base)
|
||||
logger.info("auth.ldap_reader_dn : %r" % self._ldap_reader_dn)
|
||||
logger.info("auth.ldap_load_groups : %s" % self._ldap_load_groups)
|
||||
logger.info("auth.ldap_filter : %r" % self._ldap_filter)
|
||||
if self._ldap_user_attr:
|
||||
logger.info("auth.ldap_user_attribute : %r" % self._ldap_user_attr)
|
||||
else:
|
||||
logger.info("auth.ldap_user_attribute : (not provided)")
|
||||
if self._ldap_groups_attr:
|
||||
logger.info("auth.ldap_groups_attribute: %r" % self._ldap_groups_attr)
|
||||
else:
|
||||
logger.info("auth.ldap_groups_attribute: (not provided)")
|
||||
if ldap_secret_file_path:
|
||||
logger.info("auth.ldap_secret_file_path: %r" % ldap_secret_file_path)
|
||||
if self._ldap_secret:
|
||||
|
@ -94,7 +111,7 @@ class Auth(auth.BaseAuth):
|
|||
logger.info("auth.ldap_secret : (from config)")
|
||||
if self._ldap_reader_dn and not self._ldap_secret:
|
||||
logger.error("auth.ldap_secret : (not provided)")
|
||||
raise RuntimeError("LDAP authentication requires ldap_secret for reader_dn")
|
||||
raise RuntimeError("LDAP authentication requires ldap_secret for ldap_reader_dn")
|
||||
logger.info("auth.ldap_use_ssl : %s" % self._ldap_use_ssl)
|
||||
if self._ldap_use_ssl is True:
|
||||
logger.info("auth.ldap_ssl_verify_mode : %s" % self._ldap_ssl_verify_mode)
|
||||
|
@ -102,6 +119,12 @@ class Auth(auth.BaseAuth):
|
|||
logger.info("auth.ldap_ssl_ca_file : %r" % self._ldap_ssl_ca_file)
|
||||
else:
|
||||
logger.info("auth.ldap_ssl_ca_file : (not provided)")
|
||||
"""Extend attributes to to be returned in the user query"""
|
||||
if self._ldap_groups_attr:
|
||||
self._ldap_attributes.append(self._ldap_groups_attr)
|
||||
if self._ldap_user_attr:
|
||||
self._ldap_attributes.append(self._ldap_user_attr)
|
||||
logger.info("ldap_attributes : %r" % self._ldap_attributes)
|
||||
|
||||
def _login2(self, login: str, password: str) -> str:
|
||||
try:
|
||||
|
@ -112,16 +135,25 @@ class Auth(auth.BaseAuth):
|
|||
conn.set_option(self.ldap.OPT_REFERRALS, 0)
|
||||
conn.simple_bind_s(self._ldap_reader_dn, self._ldap_secret)
|
||||
"""Search for the dn of user to authenticate"""
|
||||
res = conn.search_s(self._ldap_base, self.ldap.SCOPE_SUBTREE, filterstr=self._ldap_filter.format(login), attrlist=['memberOf'])
|
||||
if len(res) == 0:
|
||||
"""User could not be find"""
|
||||
escaped_login = self.ldap.filter.escape_filter_chars(login)
|
||||
logger.debug(f"_login2 login escaped for LDAP filters: {escaped_login}")
|
||||
res = conn.search_s(
|
||||
self._ldap_base,
|
||||
self.ldap.SCOPE_SUBTREE,
|
||||
filterstr=self._ldap_filter.format(escaped_login),
|
||||
attrlist=self._ldap_attributes
|
||||
)
|
||||
if len(res) != 1:
|
||||
"""User could not be found unambiguously"""
|
||||
logger.debug(f"_login2 no unique DN found for '{login}'")
|
||||
return ""
|
||||
user_dn = res[0][0]
|
||||
logger.debug("LDAP Auth user: %s", user_dn)
|
||||
"""Close ldap connection"""
|
||||
user_entry = res[0]
|
||||
user_dn = user_entry[0]
|
||||
logger.debug(f"_login2 found LDAP user DN {user_dn}")
|
||||
"""Close LDAP connection"""
|
||||
conn.unbind()
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Invalid ldap configuration:{e}")
|
||||
raise RuntimeError(f"Invalid LDAP configuration:{e}")
|
||||
|
||||
try:
|
||||
"""Bind as user to authenticate"""
|
||||
|
@ -130,13 +162,24 @@ class Auth(auth.BaseAuth):
|
|||
conn.set_option(self.ldap.OPT_REFERRALS, 0)
|
||||
conn.simple_bind_s(user_dn, password)
|
||||
tmp: list[str] = []
|
||||
if self._ldap_load_groups:
|
||||
if self._ldap_groups_attr:
|
||||
tmp = []
|
||||
for t in res[0][1]['memberOf']:
|
||||
tmp.append(t.decode('utf-8').split(',')[0][3:])
|
||||
for g in user_entry[1][self._ldap_groups_attr]:
|
||||
"""Get group g's RDN's attribute value"""
|
||||
try:
|
||||
rdns = self.ldap.dn.explode_dn(g, notypes=True)
|
||||
tmp.append(rdns[0])
|
||||
except Exception:
|
||||
tmp.append(g.decode('utf8'))
|
||||
self._ldap_groups = set(tmp)
|
||||
logger.debug("LDAP Auth groups of user: %s", ",".join(self._ldap_groups))
|
||||
logger.debug("_login2 LDAP groups of user: %s", ",".join(self._ldap_groups))
|
||||
if self._ldap_user_attr:
|
||||
if user_entry[1][self._ldap_user_attr]:
|
||||
tmplogin = user_entry[1][self._ldap_user_attr][0]
|
||||
login = tmplogin.decode('utf-8')
|
||||
logger.debug(f"_login2 user set to: '{login}'")
|
||||
conn.unbind()
|
||||
logger.debug(f"_login2 {login} successfully authenticated")
|
||||
return login
|
||||
except self.ldap.INVALID_CREDENTIALS:
|
||||
return ""
|
||||
|
@ -157,57 +200,70 @@ class Auth(auth.BaseAuth):
|
|||
server = self.ldap3.Server(self._ldap_uri)
|
||||
conn = self.ldap3.Connection(server, self._ldap_reader_dn, password=self._ldap_secret)
|
||||
except self.ldap3.core.exceptions.LDAPSocketOpenError:
|
||||
raise RuntimeError("Unable to reach ldap server")
|
||||
raise RuntimeError("Unable to reach LDAP server")
|
||||
except Exception as e:
|
||||
logger.debug(f"_login3 error 1 {e}")
|
||||
pass
|
||||
|
||||
if not conn.bind():
|
||||
logger.debug("_login3 can not bind")
|
||||
raise RuntimeError("Unable to read from ldap server")
|
||||
logger.debug("_login3 cannot bind")
|
||||
raise RuntimeError("Unable to read from LDAP server")
|
||||
|
||||
logger.debug(f"_login3 bind as {self._ldap_reader_dn}")
|
||||
"""Search the user dn"""
|
||||
escaped_login = self.ldap3.utils.conv.escape_filter_chars(login)
|
||||
logger.debug(f"_login3 login escaped for LDAP filters: {escaped_login}")
|
||||
conn.search(
|
||||
search_base=self._ldap_base,
|
||||
search_filter=self._ldap_filter.format(login),
|
||||
search_filter=self._ldap_filter.format(escaped_login),
|
||||
search_scope=self.ldap3.SUBTREE,
|
||||
attributes=['memberOf']
|
||||
attributes=self._ldap_attributes
|
||||
)
|
||||
if len(conn.entries) == 0:
|
||||
logger.debug(f"_login3 user '{login}' can not be find")
|
||||
"""User could not be find"""
|
||||
if len(conn.entries) != 1:
|
||||
"""User could not be found unambiguously"""
|
||||
logger.debug(f"_login3 no unique DN found for '{login}'")
|
||||
return ""
|
||||
|
||||
user_entry = conn.response[0]
|
||||
conn.unbind()
|
||||
user_dn = user_entry['dn']
|
||||
logger.debug(f"_login3 found user_dn {user_dn}")
|
||||
logger.debug(f"_login3 found LDAP user DN {user_dn}")
|
||||
try:
|
||||
"""Try to bind as the user itself"""
|
||||
conn = self.ldap3.Connection(server, user_dn, password=password)
|
||||
if not conn.bind():
|
||||
logger.debug(f"_login3 user '{login}' can not be find")
|
||||
logger.debug(f"_login3 user '{login}' cannot be found")
|
||||
return ""
|
||||
if self._ldap_load_groups:
|
||||
tmp: list[str] = []
|
||||
if self._ldap_groups_attr:
|
||||
tmp = []
|
||||
for g in user_entry['attributes']['memberOf']:
|
||||
tmp.append(g.split(',')[0][3:])
|
||||
for g in user_entry['attributes'][self._ldap_groups_attr]:
|
||||
"""Get group g's RDN's attribute value"""
|
||||
try:
|
||||
rdns = self.ldap3.utils.dn.parse_dn(g)
|
||||
tmp.append(rdns[0][1])
|
||||
except Exception:
|
||||
tmp.append(g)
|
||||
self._ldap_groups = set(tmp)
|
||||
logger.debug("_login3 LDAP groups of user: %s", ",".join(self._ldap_groups))
|
||||
if self._ldap_user_attr:
|
||||
if user_entry['attributes'][self._ldap_user_attr]:
|
||||
login = user_entry['attributes'][self._ldap_user_attr]
|
||||
logger.debug(f"_login3 user set to: '{login}'")
|
||||
conn.unbind()
|
||||
logger.debug(f"_login3 {login} successfully authorized")
|
||||
logger.debug(f"_login3 {login} successfully authenticated")
|
||||
return login
|
||||
except Exception as e:
|
||||
logger.debug(f"_login3 error 2 {e}")
|
||||
pass
|
||||
return ""
|
||||
|
||||
def login(self, login: str, password: str) -> str:
|
||||
def _login(self, login: str, password: str) -> str:
|
||||
"""Validate credentials.
|
||||
In first step we make a connection to the ldap server with the ldap_reader_dn credential.
|
||||
In first step we make a connection to the LDAP server with the ldap_reader_dn credential.
|
||||
In next step the DN of the user to authenticate will be searched.
|
||||
In the last step the authentication of the user will be proceeded.
|
||||
"""
|
||||
if self._ldap_version == 2:
|
||||
if self._ldap_module_version == 2:
|
||||
return self._login2(login, password)
|
||||
return self._login3(login, password)
|
||||
|
|
66
radicale/auth/oauth2.py
Normal file
66
radicale/auth/oauth2.py
Normal file
|
@ -0,0 +1,66 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
#
|
||||
# Original from https://gitlab.mim-libre.fr/alphabet/radicale_oauth/
|
||||
# Copyright © 2021-2022 Bruno Boiget
|
||||
# Copyright © 2022-2022 Daniel Dehennin
|
||||
#
|
||||
# Since migration into upstream
|
||||
# Copyright © 2025-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Authentication backend that checks credentials against an oauth2 server auth endpoint
|
||||
"""
|
||||
|
||||
import requests
|
||||
|
||||
from radicale import auth
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class Auth(auth.BaseAuth):
|
||||
def __init__(self, configuration):
|
||||
super().__init__(configuration)
|
||||
self._endpoint = configuration.get("auth", "oauth2_token_endpoint")
|
||||
if not self._endpoint:
|
||||
logger.error("auth.oauth2_token_endpoint URL missing")
|
||||
raise RuntimeError("OAuth2 token endpoint URL is required")
|
||||
logger.info("auth OAuth2 token endpoint: %s" % (self._endpoint))
|
||||
|
||||
def _login(self, login, password):
|
||||
"""Validate credentials.
|
||||
Sends login credentials to oauth token endpoint and checks that a token is returned
|
||||
"""
|
||||
try:
|
||||
# authenticate to authentication endpoint and return login if ok, else ""
|
||||
req_params = {
|
||||
"username": login,
|
||||
"password": password,
|
||||
"grant_type": "password",
|
||||
"client_id": "radicale",
|
||||
}
|
||||
req_headers = {"Content-Type": "application/x-www-form-urlencoded"}
|
||||
response = requests.post(
|
||||
self._endpoint, data=req_params, headers=req_headers
|
||||
)
|
||||
if (
|
||||
response.status_code == requests.codes.ok
|
||||
and "access_token" in response.json()
|
||||
):
|
||||
return login
|
||||
except OSError as e:
|
||||
logger.critical("Failed to authenticate against OAuth2 server %s: %s" % (self._endpoint, e))
|
||||
logger.warning("User failed to authenticate using OAuth2: %r" % login)
|
||||
return ""
|
105
radicale/auth/pam.py
Normal file
105
radicale/auth/pam.py
Normal file
|
@ -0,0 +1,105 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# This file is part of Radicale Server - Calendar Server
|
||||
# Copyright © 2011 Henry-Nicolas Tourneur
|
||||
# Copyright © 2021-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2025-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
PAM authentication.
|
||||
|
||||
Authentication using the ``pam-python`` module.
|
||||
|
||||
Important: radicale user need access to /etc/shadow by e.g.
|
||||
chgrp radicale /etc/shadow
|
||||
chmod g+r
|
||||
"""
|
||||
|
||||
import grp
|
||||
import pwd
|
||||
|
||||
from radicale import auth
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class Auth(auth.BaseAuth):
|
||||
def __init__(self, configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
try:
|
||||
import pam
|
||||
self.pam = pam
|
||||
except ImportError as e:
|
||||
raise RuntimeError("PAM authentication requires the Python pam module") from e
|
||||
self._service = configuration.get("auth", "pam_service")
|
||||
logger.info("auth.pam_service: %s" % self._service)
|
||||
self._group_membership = configuration.get("auth", "pam_group_membership")
|
||||
if (self._group_membership):
|
||||
logger.info("auth.pam_group_membership: %s" % self._group_membership)
|
||||
else:
|
||||
logger.warning("auth.pam_group_membership: (empty, nothing to check / INSECURE)")
|
||||
|
||||
def pam_authenticate(self, *args, **kwargs):
|
||||
return self.pam.authenticate(*args, **kwargs)
|
||||
|
||||
def _login(self, login: str, password: str) -> str:
|
||||
"""Check if ``user``/``password`` couple is valid."""
|
||||
if login is None or password is None:
|
||||
return ""
|
||||
|
||||
# Check whether the user exists in the PAM system
|
||||
try:
|
||||
pwd.getpwnam(login).pw_uid
|
||||
except KeyError:
|
||||
logger.debug("PAM user not found: %r" % login)
|
||||
return ""
|
||||
else:
|
||||
logger.debug("PAM user found: %r" % login)
|
||||
|
||||
# Check whether the user has a primary group (mandatory)
|
||||
try:
|
||||
# Get user primary group
|
||||
primary_group = grp.getgrgid(pwd.getpwnam(login).pw_gid).gr_name
|
||||
logger.debug("PAM user %r has primary group: %r" % (login, primary_group))
|
||||
except KeyError:
|
||||
logger.debug("PAM user has no primary group: %r" % login)
|
||||
return ""
|
||||
|
||||
# Obtain supplementary groups
|
||||
members = []
|
||||
if (self._group_membership):
|
||||
try:
|
||||
members = grp.getgrnam(self._group_membership).gr_mem
|
||||
except KeyError:
|
||||
logger.debug(
|
||||
"PAM membership required group doesn't exist: %r" %
|
||||
self._group_membership)
|
||||
return ""
|
||||
|
||||
# Check whether the user belongs to the required group
|
||||
# (primary or supplementary)
|
||||
if (self._group_membership):
|
||||
if (primary_group != self._group_membership) and (login not in members):
|
||||
logger.warning("PAM user %r belongs not to the required group: %r" % (login, self._group_membership))
|
||||
return ""
|
||||
else:
|
||||
logger.debug("PAM user %r belongs to the required group: %r" % (login, self._group_membership))
|
||||
|
||||
# Check the password
|
||||
if self.pam_authenticate(login, password, service=self._service):
|
||||
return login
|
||||
else:
|
||||
logger.debug("PAM authentication not successful for user: %r (service %r)" % (login, self._service))
|
||||
return ""
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2017-2020 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -104,6 +104,29 @@ def _convert_to_bool(value: Any) -> bool:
|
|||
return RawConfigParser.BOOLEAN_STATES[value.lower()]
|
||||
|
||||
|
||||
def imap_address(value):
|
||||
if "]" in value:
|
||||
pre_address, pre_address_port = value.rsplit("]", 1)
|
||||
else:
|
||||
pre_address, pre_address_port = "", value
|
||||
if ":" in pre_address_port:
|
||||
pre_address2, port = pre_address_port.rsplit(":", 1)
|
||||
address = pre_address + pre_address2
|
||||
else:
|
||||
address, port = pre_address + pre_address_port, None
|
||||
try:
|
||||
return (address.strip(string.whitespace + "[]"),
|
||||
None if port is None else int(port))
|
||||
except ValueError:
|
||||
raise ValueError("malformed IMAP address: %r" % value)
|
||||
|
||||
|
||||
def imap_security(value):
|
||||
if value not in ("tls", "starttls", "none"):
|
||||
raise ValueError("unsupported IMAP security: %r" % value)
|
||||
return value
|
||||
|
||||
|
||||
def json_str(value: Any) -> dict:
|
||||
if not value:
|
||||
return {}
|
||||
|
@ -164,6 +187,10 @@ DEFAULT_CONFIG_SCHEMA: types.CONFIG_SCHEMA = OrderedDict([
|
|||
"help": "set CA certificate for validating clients",
|
||||
"aliases": ("--certificate-authority",),
|
||||
"type": filepath}),
|
||||
("script_name", {
|
||||
"value": "",
|
||||
"help": "script name to strip from URI if called by reverse proxy (default taken from HTTP_X_SCRIPT_NAME or SCRIPT_NAME)",
|
||||
"type": str}),
|
||||
("_internal_server", {
|
||||
"value": "False",
|
||||
"help": "the internal server is used",
|
||||
|
@ -179,10 +206,22 @@ DEFAULT_CONFIG_SCHEMA: types.CONFIG_SCHEMA = OrderedDict([
|
|||
"type": str})])),
|
||||
("auth", OrderedDict([
|
||||
("type", {
|
||||
"value": "none",
|
||||
"help": "authentication method",
|
||||
"value": "denyall",
|
||||
"help": "authentication method (" + "|".join(auth.INTERNAL_TYPES) + ")",
|
||||
"type": str_or_callable,
|
||||
"internal": auth.INTERNAL_TYPES}),
|
||||
("cache_logins", {
|
||||
"value": "false",
|
||||
"help": "cache successful/failed logins for until expiration time",
|
||||
"type": bool}),
|
||||
("cache_successful_logins_expiry", {
|
||||
"value": "15",
|
||||
"help": "expiration time for caching successful logins in seconds",
|
||||
"type": int}),
|
||||
("cache_failed_logins_expiry", {
|
||||
"value": "90",
|
||||
"help": "expiration time for caching failed logins in seconds",
|
||||
"type": int}),
|
||||
("htpasswd_filename", {
|
||||
"value": "/etc/radicale/users",
|
||||
"help": "htpasswd filename",
|
||||
|
@ -191,10 +230,27 @@ DEFAULT_CONFIG_SCHEMA: types.CONFIG_SCHEMA = OrderedDict([
|
|||
"value": "autodetect",
|
||||
"help": "htpasswd encryption method",
|
||||
"type": str}),
|
||||
("htpasswd_cache", {
|
||||
"value": "False",
|
||||
"help": "enable caching of htpasswd file",
|
||||
"type": bool}),
|
||||
("dovecot_connection_type", {
|
||||
"value": "AF_UNIX",
|
||||
"help": "Connection type for dovecot authentication",
|
||||
"type": str_or_callable,
|
||||
"internal": auth.AUTH_SOCKET_FAMILY}),
|
||||
("dovecot_socket", {
|
||||
"value": "/var/run/dovecot/auth-client",
|
||||
"help": "dovecot auth socket",
|
||||
"help": "dovecot auth AF_UNIX socket",
|
||||
"type": str}),
|
||||
("dovecot_host", {
|
||||
"value": "localhost",
|
||||
"help": "dovecot auth AF_INET or AF_INET6 host",
|
||||
"type": str}),
|
||||
("dovecot_port", {
|
||||
"value": "12345",
|
||||
"help": "dovecot auth port",
|
||||
"type": int}),
|
||||
("realm", {
|
||||
"value": "Radicale - Password Required",
|
||||
"help": "message displayed when a password is needed",
|
||||
|
@ -203,6 +259,10 @@ DEFAULT_CONFIG_SCHEMA: types.CONFIG_SCHEMA = OrderedDict([
|
|||
"value": "1",
|
||||
"help": "incorrect authentication delay",
|
||||
"type": positive_float}),
|
||||
("ldap_ignore_attribute_create_modify_timestamp", {
|
||||
"value": "false",
|
||||
"help": "Ignore modifyTimestamp and createTimestamp attributes. Need if Authentik LDAP server is used.",
|
||||
"type": bool}),
|
||||
("ldap_uri", {
|
||||
"value": "ldap://localhost",
|
||||
"help": "URI to the ldap server",
|
||||
|
@ -227,10 +287,14 @@ DEFAULT_CONFIG_SCHEMA: types.CONFIG_SCHEMA = OrderedDict([
|
|||
"value": "(cn={0})",
|
||||
"help": "the search filter to find the user DN to authenticate by the username",
|
||||
"type": str}),
|
||||
("ldap_load_groups", {
|
||||
"value": "False",
|
||||
"help": "load the ldap groups of the authenticated user",
|
||||
"type": bool}),
|
||||
("ldap_user_attribute", {
|
||||
"value": "",
|
||||
"help": "the attribute to be used as username after authentication",
|
||||
"type": str}),
|
||||
("ldap_groups_attribute", {
|
||||
"value": "",
|
||||
"help": "attribute to read the group memberships from",
|
||||
"type": str}),
|
||||
("ldap_use_ssl", {
|
||||
"value": "False",
|
||||
"help": "Use ssl on the ldap connection",
|
||||
|
@ -243,6 +307,26 @@ DEFAULT_CONFIG_SCHEMA: types.CONFIG_SCHEMA = OrderedDict([
|
|||
"value": "",
|
||||
"help": "The path to the CA file in pem format which is used to certificate the server certificate",
|
||||
"type": str}),
|
||||
("imap_host", {
|
||||
"value": "localhost",
|
||||
"help": "IMAP server hostname: address|address:port|[address]:port|*localhost*",
|
||||
"type": imap_address}),
|
||||
("imap_security", {
|
||||
"value": "tls",
|
||||
"help": "Secure the IMAP connection: *tls*|starttls|none",
|
||||
"type": imap_security}),
|
||||
("oauth2_token_endpoint", {
|
||||
"value": "",
|
||||
"help": "OAuth2 token endpoint URL",
|
||||
"type": str}),
|
||||
("pam_group_membership", {
|
||||
"value": "",
|
||||
"help": "PAM group user should be member of",
|
||||
"type": str}),
|
||||
("pam_service", {
|
||||
"value": "radicale",
|
||||
"help": "PAM service",
|
||||
"type": str}),
|
||||
("strip_domain", {
|
||||
"value": "False",
|
||||
"help": "strip domain from username",
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -79,6 +79,9 @@ REMOTE_DESTINATION: types.WSGIResponse = (
|
|||
DIRECTORY_LISTING: types.WSGIResponse = (
|
||||
client.FORBIDDEN, (("Content-Type", "text/plain"),),
|
||||
"Directory listings are not supported.")
|
||||
INSUFFICIENT_STORAGE: types.WSGIResponse = (
|
||||
client.INSUFFICIENT_STORAGE, (("Content-Type", "text/plain"),),
|
||||
"Insufficient Storage. Please contact the administrator.")
|
||||
INTERNAL_SERVER_ERROR: types.WSGIResponse = (
|
||||
client.INTERNAL_SERVER_ERROR, (("Content-Type", "text/plain"),),
|
||||
"A server error occurred. Please contact the administrator.")
|
||||
|
@ -193,6 +196,24 @@ def _serve_traversable(
|
|||
"%a, %d %b %Y %H:%M:%S GMT",
|
||||
time.gmtime(traversable.stat().st_mtime))
|
||||
answer = traversable.read_bytes()
|
||||
if path == "/.web/index.html" or path == "/.web/":
|
||||
# enable link on the fly in index.html if InfCloud index.html is existing
|
||||
# class="infcloudlink-hidden" -> class="infcloudlink"
|
||||
path_posix = str(traversable)
|
||||
path_posix_infcloud = path_posix.replace("/internal_data/index.html", "/internal_data/infcloud/index.html")
|
||||
if os.path.isfile(path_posix_infcloud):
|
||||
# logger.debug("Enable InfCloud link in served page: %r", path)
|
||||
answer = answer.replace(b"infcloudlink-hidden", b"infcloud")
|
||||
elif path == "/.web/infcloud/config.js":
|
||||
# adjust on the fly default config.js of InfCloud installation
|
||||
# logger.debug("Adjust on-the-fly default InfCloud config.js in served page: %r", path)
|
||||
answer = answer.replace(b"location.pathname.replace(RegExp('/+[^/]+/*(index\\.html)?$'),'')+", b"location.pathname.replace(RegExp('/\\.web\\.infcloud/(index\\.html)?$'),'')+")
|
||||
answer = answer.replace(b"'/caldav.php/',", b"'/',")
|
||||
answer = answer.replace(b"settingsAccount: true,", b"settingsAccount: false,")
|
||||
elif path == "/.web/infcloud/main.js":
|
||||
# adjust on the fly default main.js of InfCloud installation
|
||||
logger.debug("Adjust on-the-fly default InfCloud main.js in served page: %r", path)
|
||||
answer = answer.replace(b"'InfCloud - the open source CalDAV/CardDAV web client'", b"'InfCloud - the open source CalDAV/CardDAV web client - served through Radicale CalDAV/CardDAV server'")
|
||||
return client.OK, headers, answer
|
||||
|
||||
|
||||
|
|
|
@ -221,18 +221,31 @@ def setup() -> None:
|
|||
logger.error("Invalid RADICALE_LOG_FORMAT: %r", format_name)
|
||||
|
||||
|
||||
logger_display_backtrace_disabled: bool = False
|
||||
logger_display_backtrace_enabled: bool = False
|
||||
|
||||
|
||||
def set_level(level: Union[int, str], backtrace_on_debug: bool) -> None:
|
||||
"""Set logging level for global logger."""
|
||||
global logger_display_backtrace_disabled
|
||||
global logger_display_backtrace_enabled
|
||||
if isinstance(level, str):
|
||||
level = getattr(logging, level.upper())
|
||||
assert isinstance(level, int)
|
||||
logger.setLevel(level)
|
||||
if level > logging.DEBUG:
|
||||
logger.info("Logging of backtrace is disabled in this loglevel")
|
||||
if logger_display_backtrace_disabled is False:
|
||||
logger.info("Logging of backtrace is disabled in this loglevel")
|
||||
logger_display_backtrace_disabled = True
|
||||
logger.addFilter(REMOVE_TRACEBACK_FILTER)
|
||||
else:
|
||||
if not backtrace_on_debug:
|
||||
logger.debug("Logging of backtrace is disabled by option in this loglevel")
|
||||
if logger_display_backtrace_disabled is False:
|
||||
logger.debug("Logging of backtrace is disabled by option in this loglevel")
|
||||
logger_display_backtrace_disabled = True
|
||||
logger.addFilter(REMOVE_TRACEBACK_FILTER)
|
||||
else:
|
||||
if logger_display_backtrace_enabled is False:
|
||||
logger.debug("Logging of backtrace is enabled by option in this loglevel")
|
||||
logger_display_backtrace_enabled = True
|
||||
logger.removeFilter(REMOVE_TRACEBACK_FILTER)
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2023 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -58,19 +58,7 @@ elif sys.platform == "win32":
|
|||
|
||||
|
||||
# IPv4 (host, port) and IPv6 (host, port, flowinfo, scopeid)
|
||||
ADDRESS_TYPE = Union[Tuple[Union[str, bytes, bytearray], int],
|
||||
Tuple[str, int, int, int]]
|
||||
|
||||
|
||||
def format_address(address: ADDRESS_TYPE) -> str:
|
||||
host, port, *_ = address
|
||||
if not isinstance(host, str):
|
||||
raise NotImplementedError("Unsupported address format: %r" %
|
||||
(address,))
|
||||
if host.find(":") == -1:
|
||||
return "%s:%d" % (host, port)
|
||||
else:
|
||||
return "[%s]:%d" % (host, port)
|
||||
ADDRESS_TYPE = utils.ADDRESS_TYPE
|
||||
|
||||
|
||||
class ParallelHTTPServer(socketserver.ThreadingMixIn,
|
||||
|
@ -226,7 +214,7 @@ class ParallelHTTPSServer(ParallelHTTPServer):
|
|||
except socket.timeout:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise RuntimeError("SSL handshake failed: %s" % e) from e
|
||||
raise RuntimeError("SSL handshake failed: %s client %s" % (e, str(client_address[0]))) from e
|
||||
except Exception:
|
||||
try:
|
||||
self.handle_error(request, client_address)
|
||||
|
@ -262,6 +250,9 @@ class RequestHandler(wsgiref.simple_server.WSGIRequestHandler):
|
|||
def get_environ(self) -> Dict[str, Any]:
|
||||
env = super().get_environ()
|
||||
if isinstance(self.connection, ssl.SSLSocket):
|
||||
env["HTTPS"] = "on"
|
||||
env["SSL_CIPHER"] = self.request.cipher()[0]
|
||||
env["SSL_PROTOCOL"] = self.request.version()
|
||||
# The certificate can be evaluated by the auth module
|
||||
env["REMOTE_CERTIFICATE"] = self.connection.getpeercert()
|
||||
# Parent class only tries latin1 encoding
|
||||
|
@ -318,20 +309,20 @@ def serve(configuration: config.Configuration,
|
|||
try:
|
||||
getaddrinfo = socket.getaddrinfo(address_port[0], address_port[1], 0, socket.SOCK_STREAM, socket.IPPROTO_TCP)
|
||||
except OSError as e:
|
||||
logger.warning("cannot retrieve IPv4 or IPv6 address of '%s': %s" % (format_address(address_port), e))
|
||||
logger.warning("cannot retrieve IPv4 or IPv6 address of '%s': %s" % (utils.format_address(address_port), e))
|
||||
continue
|
||||
logger.debug("getaddrinfo of '%s': %s" % (format_address(address_port), getaddrinfo))
|
||||
logger.debug("getaddrinfo of '%s': %s" % (utils.format_address(address_port), getaddrinfo))
|
||||
for (address_family, socket_kind, socket_proto, socket_flags, socket_address) in getaddrinfo:
|
||||
logger.debug("try to create server socket on '%s'" % (format_address(socket_address)))
|
||||
logger.debug("try to create server socket on '%s'" % (utils.format_address(socket_address)))
|
||||
try:
|
||||
server = server_class(configuration, address_family, (socket_address[0], socket_address[1]), RequestHandler)
|
||||
except OSError as e:
|
||||
logger.warning("cannot create server socket on '%s': %s" % (format_address(socket_address), e))
|
||||
logger.warning("cannot create server socket on '%s': %s" % (utils.format_address(socket_address), e))
|
||||
continue
|
||||
servers[server.socket] = server
|
||||
server.set_app(application)
|
||||
logger.info("Listening on %r%s",
|
||||
format_address(server.server_address),
|
||||
utils.format_address(server.server_address),
|
||||
" with SSL" if use_ssl else "")
|
||||
if not servers:
|
||||
raise RuntimeError("No servers started")
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -47,6 +47,9 @@ from radicale.storage.multifilesystem.sync import CollectionPartSync
|
|||
from radicale.storage.multifilesystem.upload import CollectionPartUpload
|
||||
from radicale.storage.multifilesystem.verify import StoragePartVerify
|
||||
|
||||
# 999 second, 999 ms, 999 us, 999 ns
|
||||
MTIME_NS_TEST: int = 999999999999
|
||||
|
||||
|
||||
class Collection(
|
||||
CollectionPartDelete, CollectionPartMeta, CollectionPartSync,
|
||||
|
@ -89,24 +92,97 @@ class Storage(
|
|||
|
||||
_collection_class: ClassVar[Type[Collection]] = Collection
|
||||
|
||||
def _analyse_mtime(self):
|
||||
# calculate and display mtime resolution
|
||||
path = os.path.join(self._get_collection_root_folder(), ".Radicale.mtime_test")
|
||||
logger.debug("Storage item mtime resolution test with file: %r", path)
|
||||
try:
|
||||
with open(path, "w") as f:
|
||||
f.write("mtime_test")
|
||||
f.close
|
||||
except Exception as e:
|
||||
logger.warning("Storage item mtime resolution test not possible, cannot write file: %r (%s)", path, e)
|
||||
raise
|
||||
# set mtime_ns for tests
|
||||
try:
|
||||
os.utime(path, times=None, ns=(MTIME_NS_TEST, MTIME_NS_TEST))
|
||||
except Exception as e:
|
||||
logger.warning("Storage item mtime resolution test not possible, cannot set utime on file: %r (%s)", path, e)
|
||||
os.remove(path)
|
||||
raise
|
||||
logger.debug("Storage item mtime resoultion test set: %d" % MTIME_NS_TEST)
|
||||
mtime_ns = os.stat(path).st_mtime_ns
|
||||
logger.debug("Storage item mtime resoultion test get: %d" % mtime_ns)
|
||||
# start analysis
|
||||
precision = 1
|
||||
mtime_ns_test = MTIME_NS_TEST
|
||||
while mtime_ns > 0:
|
||||
if mtime_ns == mtime_ns_test:
|
||||
break
|
||||
factor = 2
|
||||
if int(mtime_ns / factor) == int(mtime_ns_test / factor):
|
||||
precision = precision * factor
|
||||
break
|
||||
factor = 5
|
||||
if int(mtime_ns / factor) == int(mtime_ns_test / factor):
|
||||
precision = precision * factor
|
||||
break
|
||||
precision = precision * 10
|
||||
mtime_ns = int(mtime_ns / 10)
|
||||
mtime_ns_test = int(mtime_ns_test / 10)
|
||||
unit = "ns"
|
||||
precision_unit = precision
|
||||
if precision >= 1000000000:
|
||||
precision_unit = int(precision / 1000000000)
|
||||
unit = "s"
|
||||
elif precision >= 1000000:
|
||||
precision_unit = int(precision / 1000000)
|
||||
unit = "ms"
|
||||
elif precision >= 1000:
|
||||
precision_unit = int(precision / 1000)
|
||||
unit = "us"
|
||||
os.remove(path)
|
||||
return (precision, precision_unit, unit)
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
logger.info("storage location: %r", self._filesystem_folder)
|
||||
self._makedirs_synced(self._filesystem_folder)
|
||||
logger.info("storage location subfolder: %r", self._get_collection_root_folder())
|
||||
logger.info("storage cache subfolder usage for 'item': %s", self._use_cache_subfolder_for_item)
|
||||
logger.info("storage cache subfolder usage for 'history': %s", self._use_cache_subfolder_for_history)
|
||||
logger.info("storage cache subfolder usage for 'sync-token': %s", self._use_cache_subfolder_for_synctoken)
|
||||
logger.info("storage cache use mtime and size for 'item': %s", self._use_mtime_and_size_for_item_cache)
|
||||
logger.debug("storage cache action logging: %s", self._debug_cache_actions)
|
||||
logger.info("Storage location: %r", self._filesystem_folder)
|
||||
if not os.path.exists(self._filesystem_folder):
|
||||
logger.warning("Storage location: %r not existing, create now", self._filesystem_folder)
|
||||
self._makedirs_synced(self._filesystem_folder)
|
||||
logger.info("Storage location subfolder: %r", self._get_collection_root_folder())
|
||||
if not os.path.exists(self._get_collection_root_folder()):
|
||||
logger.warning("Storage location subfolder: %r not existing, create now", self._get_collection_root_folder())
|
||||
self._makedirs_synced(self._get_collection_root_folder())
|
||||
logger.info("Storage cache subfolder usage for 'item': %s", self._use_cache_subfolder_for_item)
|
||||
logger.info("Storage cache subfolder usage for 'history': %s", self._use_cache_subfolder_for_history)
|
||||
logger.info("Storage cache subfolder usage for 'sync-token': %s", self._use_cache_subfolder_for_synctoken)
|
||||
logger.info("Storage cache use mtime and size for 'item': %s", self._use_mtime_and_size_for_item_cache)
|
||||
try:
|
||||
(precision, precision_unit, unit) = self._analyse_mtime()
|
||||
if precision >= 100000000:
|
||||
# >= 100 ms
|
||||
logger.warning("Storage item mtime resolution test result: %d %s (VERY RISKY ON PRODUCTION SYSTEMS)" % (precision_unit, unit))
|
||||
elif precision >= 10000000:
|
||||
# >= 10 ms
|
||||
logger.warning("Storage item mtime resolution test result: %d %s (RISKY ON PRODUCTION SYSTEMS)" % (precision_unit, unit))
|
||||
else:
|
||||
logger.info("Storage item mtime resolution test result: %d %s" % (precision_unit, unit))
|
||||
if self._use_mtime_and_size_for_item_cache is False:
|
||||
logger.info("Storage cache using mtime and size for 'item' may be an option in case of performance issues")
|
||||
except Exception:
|
||||
logger.warning("Storage item mtime resolution test result not successful")
|
||||
logger.debug("Storage cache action logging: %s", self._debug_cache_actions)
|
||||
if self._use_cache_subfolder_for_item is True or self._use_cache_subfolder_for_history is True or self._use_cache_subfolder_for_synctoken is True:
|
||||
logger.info("storage cache subfolder: %r", self._get_collection_cache_folder())
|
||||
self._makedirs_synced(self._get_collection_cache_folder())
|
||||
logger.info("Storage cache subfolder: %r", self._get_collection_cache_folder())
|
||||
if not os.path.exists(self._get_collection_cache_folder()):
|
||||
logger.warning("Storage cache subfolder: %r not existing, create now", self._get_collection_cache_folder())
|
||||
self._makedirs_synced(self._get_collection_cache_folder())
|
||||
if sys.platform != "win32":
|
||||
if not self._folder_umask:
|
||||
# retrieve current umask by setting a dummy umask
|
||||
current_umask = os.umask(0o0022)
|
||||
logger.info("storage folder umask (from system): '%04o'", current_umask)
|
||||
logger.info("Storage folder umask (from system): '%04o'", current_umask)
|
||||
# reset to original
|
||||
os.umask(current_umask)
|
||||
else:
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -50,27 +50,31 @@ class StoragePartCreateCollection(StorageBase):
|
|||
self._makedirs_synced(parent_dir)
|
||||
|
||||
# Create a temporary directory with an unsafe name
|
||||
with TemporaryDirectory(prefix=".Radicale.tmp-", dir=parent_dir
|
||||
) as tmp_dir:
|
||||
# The temporary directory itself can't be renamed
|
||||
tmp_filesystem_path = os.path.join(tmp_dir, "collection")
|
||||
os.makedirs(tmp_filesystem_path)
|
||||
col = self._collection_class(
|
||||
cast(multifilesystem.Storage, self),
|
||||
pathutils.unstrip_path(sane_path, True),
|
||||
filesystem_path=tmp_filesystem_path)
|
||||
col.set_meta(props)
|
||||
if items is not None:
|
||||
if props.get("tag") == "VCALENDAR":
|
||||
col._upload_all_nonatomic(items, suffix=".ics")
|
||||
elif props.get("tag") == "VADDRESSBOOK":
|
||||
col._upload_all_nonatomic(items, suffix=".vcf")
|
||||
try:
|
||||
with TemporaryDirectory(prefix=".Radicale.tmp-", dir=parent_dir
|
||||
) as tmp_dir:
|
||||
# The temporary directory itself can't be renamed
|
||||
tmp_filesystem_path = os.path.join(tmp_dir, "collection")
|
||||
os.makedirs(tmp_filesystem_path)
|
||||
col = self._collection_class(
|
||||
cast(multifilesystem.Storage, self),
|
||||
pathutils.unstrip_path(sane_path, True),
|
||||
filesystem_path=tmp_filesystem_path)
|
||||
col.set_meta(props)
|
||||
if items is not None:
|
||||
if props.get("tag") == "VCALENDAR":
|
||||
col._upload_all_nonatomic(items, suffix=".ics")
|
||||
elif props.get("tag") == "VADDRESSBOOK":
|
||||
col._upload_all_nonatomic(items, suffix=".vcf")
|
||||
|
||||
if os.path.lexists(filesystem_path):
|
||||
pathutils.rename_exchange(tmp_filesystem_path, filesystem_path)
|
||||
else:
|
||||
os.rename(tmp_filesystem_path, filesystem_path)
|
||||
self._sync_directory(parent_dir)
|
||||
if os.path.lexists(filesystem_path):
|
||||
pathutils.rename_exchange(tmp_filesystem_path, filesystem_path)
|
||||
else:
|
||||
os.rename(tmp_filesystem_path, filesystem_path)
|
||||
self._sync_directory(parent_dir)
|
||||
except Exception as e:
|
||||
raise ValueError("Failed to create collection %r as %r %s" %
|
||||
(href, filesystem_path, e)) from e
|
||||
|
||||
return self._collection_class(
|
||||
cast(multifilesystem.Storage, self),
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2023-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -37,10 +38,11 @@ class CollectionPartLock(CollectionBase):
|
|||
if self._storage._lock.locked == "w":
|
||||
yield
|
||||
return
|
||||
cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache")
|
||||
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", ns)
|
||||
self._storage._makedirs_synced(cache_folder)
|
||||
lock_path = os.path.join(cache_folder,
|
||||
".Radicale.lock" + (".%s" % ns if ns else ""))
|
||||
logger.debug("Lock file (CollectionPartLock): %r" % lock_path)
|
||||
lock = pathutils.RwLock(lock_path)
|
||||
with lock.acquire("w"):
|
||||
yield
|
||||
|
@ -54,11 +56,12 @@ class StoragePartLock(StorageBase):
|
|||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
lock_path = os.path.join(self._filesystem_folder, ".Radicale.lock")
|
||||
logger.debug("Lock file (StoragePartLock): %r" % lock_path)
|
||||
self._lock = pathutils.RwLock(lock_path)
|
||||
self._hook = configuration.get("storage", "hook")
|
||||
|
||||
@types.contextmanager
|
||||
def acquire_lock(self, mode: str, user: str = "") -> Iterator[None]:
|
||||
def acquire_lock(self, mode: str, user: str = "", *args, **kwargs) -> Iterator[None]:
|
||||
with self._lock.acquire(mode):
|
||||
yield
|
||||
# execute hook
|
||||
|
@ -73,8 +76,17 @@ class StoragePartLock(StorageBase):
|
|||
else:
|
||||
# Process group is also used to identify child processes
|
||||
preexec_fn = os.setpgrp
|
||||
command = self._hook % {
|
||||
"user": shlex.quote(user or "Anonymous")}
|
||||
# optional argument
|
||||
path = kwargs.get('path', "")
|
||||
try:
|
||||
command = self._hook % {
|
||||
"path": shlex.quote(self._get_collection_root_folder() + path),
|
||||
"cwd": shlex.quote(self._filesystem_folder),
|
||||
"user": shlex.quote(user or "Anonymous")}
|
||||
except KeyError as e:
|
||||
logger.error("Storage hook contains not supported placeholder %s (skip execution of: %r)" % (e, self._hook))
|
||||
return
|
||||
|
||||
logger.debug("Executing storage hook: '%s'" % command)
|
||||
try:
|
||||
p = subprocess.Popen(
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -62,6 +63,9 @@ class CollectionPartMeta(CollectionBase):
|
|||
|
||||
def set_meta(self, props: Mapping[str, str]) -> None:
|
||||
# TODO: better fix for "mypy"
|
||||
with self._atomic_write(self._props_path, "w") as fo: # type: ignore
|
||||
f = cast(TextIO, fo)
|
||||
json.dump(props, f, sort_keys=True)
|
||||
try:
|
||||
with self._atomic_write(self._props_path, "w") as fo: # type: ignore
|
||||
f = cast(TextIO, fo)
|
||||
json.dump(props, f, sort_keys=True)
|
||||
except OSError as e:
|
||||
raise ValueError("Failed to write meta data %r %s" % (self._props_path, e)) from e
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -21,6 +21,7 @@ import os
|
|||
|
||||
from radicale import item as radicale_item
|
||||
from radicale import pathutils, storage
|
||||
from radicale.log import logger
|
||||
from radicale.storage import multifilesystem
|
||||
from radicale.storage.multifilesystem.base import StorageBase
|
||||
|
||||
|
@ -34,10 +35,12 @@ class StoragePartMove(StorageBase):
|
|||
assert isinstance(to_collection, multifilesystem.Collection)
|
||||
assert isinstance(item.collection, multifilesystem.Collection)
|
||||
assert item.href
|
||||
os.replace(pathutils.path_to_filesystem(
|
||||
item.collection._filesystem_path, item.href),
|
||||
pathutils.path_to_filesystem(
|
||||
to_collection._filesystem_path, to_href))
|
||||
move_from = pathutils.path_to_filesystem(item.collection._filesystem_path, item.href)
|
||||
move_to = pathutils.path_to_filesystem(to_collection._filesystem_path, to_href)
|
||||
try:
|
||||
os.replace(move_from, move_to)
|
||||
except OSError as e:
|
||||
raise ValueError("Failed to move file %r => %r %s" % (move_from, move_to, e)) from e
|
||||
self._sync_directory(to_collection._filesystem_path)
|
||||
if item.collection._filesystem_path != to_collection._filesystem_path:
|
||||
self._sync_directory(item.collection._filesystem_path)
|
||||
|
@ -45,11 +48,15 @@ class StoragePartMove(StorageBase):
|
|||
cache_folder = self._get_collection_cache_subfolder(item.collection._filesystem_path, ".Radicale.cache", "item")
|
||||
to_cache_folder = self._get_collection_cache_subfolder(to_collection._filesystem_path, ".Radicale.cache", "item")
|
||||
self._makedirs_synced(to_cache_folder)
|
||||
move_from = os.path.join(cache_folder, item.href)
|
||||
move_to = os.path.join(to_cache_folder, to_href)
|
||||
try:
|
||||
os.replace(os.path.join(cache_folder, item.href),
|
||||
os.path.join(to_cache_folder, to_href))
|
||||
os.replace(move_from, move_to)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
except OSError as e:
|
||||
logger.error("Failed to move cache file %r => %r %s" % (move_from, move_to, e))
|
||||
pass
|
||||
else:
|
||||
self._makedirs_synced(to_cache_folder)
|
||||
if cache_folder != to_cache_folder:
|
||||
|
|
|
@ -29,6 +29,8 @@ class StoragePartVerify(StoragePartDiscover, StorageBase):
|
|||
|
||||
def verify(self) -> bool:
|
||||
item_errors = collection_errors = 0
|
||||
logger.info("Disable fsync during storage verification")
|
||||
self._filesystem_fsync = False
|
||||
|
||||
@types.contextmanager
|
||||
def exception_cm(sane_path: str, href: Optional[str]
|
||||
|
|
|
@ -29,7 +29,7 @@ from radicale import auth
|
|||
|
||||
class Auth(auth.BaseAuth):
|
||||
|
||||
def login(self, login: str, password: str) -> str:
|
||||
def _login(self, login: str, password: str) -> str:
|
||||
if login == "tmp":
|
||||
return login
|
||||
return ""
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright © 2012-2016 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -23,6 +23,7 @@ Radicale tests with simple requests and authentication.
|
|||
"""
|
||||
|
||||
import base64
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from typing import Iterable, Tuple, Union
|
||||
|
@ -40,6 +41,14 @@ class TestBaseAuthRequests(BaseTest):
|
|||
|
||||
"""
|
||||
|
||||
# test for available bcrypt module
|
||||
try:
|
||||
import bcrypt
|
||||
except ImportError:
|
||||
has_bcrypt = 0
|
||||
else:
|
||||
has_bcrypt = 1
|
||||
|
||||
def _test_htpasswd(self, htpasswd_encryption: str, htpasswd_content: str,
|
||||
test_matrix: Union[str, Iterable[Tuple[str, str, bool]]]
|
||||
= "ascii") -> None:
|
||||
|
@ -70,6 +79,9 @@ class TestBaseAuthRequests(BaseTest):
|
|||
def test_htpasswd_plain(self) -> None:
|
||||
self._test_htpasswd("plain", "tmp:bepo")
|
||||
|
||||
def test_htpasswd_plain_autodetect(self) -> None:
|
||||
self._test_htpasswd("autodetect", "tmp:bepo")
|
||||
|
||||
def test_htpasswd_plain_password_split(self) -> None:
|
||||
self._test_htpasswd("plain", "tmp:be:po", (
|
||||
("tmp", "be:po", True), ("tmp", "bepo", False)))
|
||||
|
@ -80,6 +92,9 @@ class TestBaseAuthRequests(BaseTest):
|
|||
def test_htpasswd_md5(self) -> None:
|
||||
self._test_htpasswd("md5", "tmp:$apr1$BI7VKCZh$GKW4vq2hqDINMr8uv7lDY/")
|
||||
|
||||
def test_htpasswd_md5_autodetect(self) -> None:
|
||||
self._test_htpasswd("autodetect", "tmp:$apr1$BI7VKCZh$GKW4vq2hqDINMr8uv7lDY/")
|
||||
|
||||
def test_htpasswd_md5_unicode(self):
|
||||
self._test_htpasswd(
|
||||
"md5", "😀:$apr1$w4ev89r1$29xO8EvJmS2HEAadQ5qy11", "unicode")
|
||||
|
@ -87,20 +102,99 @@ class TestBaseAuthRequests(BaseTest):
|
|||
def test_htpasswd_sha256(self) -> None:
|
||||
self._test_htpasswd("sha256", "tmp:$5$i4Ni4TQq6L5FKss5$ilpTjkmnxkwZeV35GB9cYSsDXTALBn6KtWRJAzNlCL/")
|
||||
|
||||
def test_htpasswd_sha256_autodetect(self) -> None:
|
||||
self._test_htpasswd("autodetect", "tmp:$5$i4Ni4TQq6L5FKss5$ilpTjkmnxkwZeV35GB9cYSsDXTALBn6KtWRJAzNlCL/")
|
||||
|
||||
def test_htpasswd_sha512(self) -> None:
|
||||
self._test_htpasswd("sha512", "tmp:$6$3Qhl8r6FLagYdHYa$UCH9yXCed4A.J9FQsFPYAOXImzZUMfvLa0lwcWOxWYLOF5sE/lF99auQ4jKvHY2vijxmefl7G6kMqZ8JPdhIJ/")
|
||||
|
||||
def test_htpasswd_bcrypt(self) -> None:
|
||||
self._test_htpasswd("bcrypt", "tmp:$2y$05$oD7hbiQFQlvCM7zoalo/T.MssV3V"
|
||||
"NTRI3w5KDnj8NTUKJNWfVpvRq")
|
||||
def test_htpasswd_sha512_autodetect(self) -> None:
|
||||
self._test_htpasswd("autodetect", "tmp:$6$3Qhl8r6FLagYdHYa$UCH9yXCed4A.J9FQsFPYAOXImzZUMfvLa0lwcWOxWYLOF5sE/lF99auQ4jKvHY2vijxmefl7G6kMqZ8JPdhIJ/")
|
||||
|
||||
@pytest.mark.skipif(has_bcrypt == 0, reason="No bcrypt module installed")
|
||||
def test_htpasswd_bcrypt_2a(self) -> None:
|
||||
self._test_htpasswd("bcrypt", "tmp:$2a$10$Mj4A9vMecAp/K7.0fMKoVOk1SjgR.RBhl06a52nvzXhxlT3HB7Reu")
|
||||
|
||||
@pytest.mark.skipif(has_bcrypt == 0, reason="No bcrypt module installed")
|
||||
def test_htpasswd_bcrypt_2a_autodetect(self) -> None:
|
||||
self._test_htpasswd("autodetect", "tmp:$2a$10$Mj4A9vMecAp/K7.0fMKoVOk1SjgR.RBhl06a52nvzXhxlT3HB7Reu")
|
||||
|
||||
@pytest.mark.skipif(has_bcrypt == 0, reason="No bcrypt module installed")
|
||||
def test_htpasswd_bcrypt_2b(self) -> None:
|
||||
self._test_htpasswd("bcrypt", "tmp:$2b$12$7a4z/fdmXlBIfkz0smvzW.1Nds8wpgC/bo2DVOb4OSQKWCDL1A1wu")
|
||||
|
||||
@pytest.mark.skipif(has_bcrypt == 0, reason="No bcrypt module installed")
|
||||
def test_htpasswd_bcrypt_2b_autodetect(self) -> None:
|
||||
self._test_htpasswd("autodetect", "tmp:$2b$12$7a4z/fdmXlBIfkz0smvzW.1Nds8wpgC/bo2DVOb4OSQKWCDL1A1wu")
|
||||
|
||||
@pytest.mark.skipif(has_bcrypt == 0, reason="No bcrypt module installed")
|
||||
def test_htpasswd_bcrypt_2y(self) -> None:
|
||||
self._test_htpasswd("bcrypt", "tmp:$2y$05$oD7hbiQFQlvCM7zoalo/T.MssV3VNTRI3w5KDnj8NTUKJNWfVpvRq")
|
||||
|
||||
@pytest.mark.skipif(has_bcrypt == 0, reason="No bcrypt module installed")
|
||||
def test_htpasswd_bcrypt_2y_autodetect(self) -> None:
|
||||
self._test_htpasswd("autodetect", "tmp:$2y$05$oD7hbiQFQlvCM7zoalo/T.MssV3VNTRI3w5KDnj8NTUKJNWfVpvRq")
|
||||
|
||||
@pytest.mark.skipif(has_bcrypt == 0, reason="No bcrypt module installed")
|
||||
def test_htpasswd_bcrypt_C10(self) -> None:
|
||||
self._test_htpasswd("bcrypt", "tmp:$2y$10$bZsWq06ECzxqi7RmulQvC.T1YHUnLW2E3jn.MU2pvVTGn1dfORt2a")
|
||||
|
||||
@pytest.mark.skipif(has_bcrypt == 0, reason="No bcrypt module installed")
|
||||
def test_htpasswd_bcrypt_C10_autodetect(self) -> None:
|
||||
self._test_htpasswd("bcrypt", "tmp:$2y$10$bZsWq06ECzxqi7RmulQvC.T1YHUnLW2E3jn.MU2pvVTGn1dfORt2a")
|
||||
|
||||
@pytest.mark.skipif(has_bcrypt == 0, reason="No bcrypt module installed")
|
||||
def test_htpasswd_bcrypt_unicode(self) -> None:
|
||||
self._test_htpasswd("bcrypt", "😀:$2y$10$Oyz5aHV4MD9eQJbk6GPemOs4T6edK"
|
||||
"6U9Sqlzr.W1mMVCS8wJUftnW", "unicode")
|
||||
self._test_htpasswd("bcrypt", "😀:$2y$10$Oyz5aHV4MD9eQJbk6GPemOs4T6edK6U9Sqlzr.W1mMVCS8wJUftnW", "unicode")
|
||||
|
||||
def test_htpasswd_multi(self) -> None:
|
||||
self._test_htpasswd("plain", "ign:ign\ntmp:bepo")
|
||||
|
||||
# login cache successful
|
||||
def test_htpasswd_login_cache_successful_plain(self, caplog) -> None:
|
||||
caplog.set_level(logging.INFO)
|
||||
self.configure({"auth": {"cache_logins": "True"}})
|
||||
self._test_htpasswd("plain", "tmp:bepo", (("tmp", "bepo", True), ("tmp", "bepo", True)))
|
||||
htpasswd_found = False
|
||||
htpasswd_cached_found = False
|
||||
for line in caplog.messages:
|
||||
if line == "Successful login: 'tmp' (htpasswd)":
|
||||
htpasswd_found = True
|
||||
elif line == "Successful login: 'tmp' (htpasswd / cached)":
|
||||
htpasswd_cached_found = True
|
||||
if (htpasswd_found is False) or (htpasswd_cached_found is False):
|
||||
raise ValueError("Logging misses expected log lines")
|
||||
|
||||
# login cache failed
|
||||
def test_htpasswd_login_cache_failed_plain(self, caplog) -> None:
|
||||
caplog.set_level(logging.INFO)
|
||||
self.configure({"auth": {"cache_logins": "True"}})
|
||||
self._test_htpasswd("plain", "tmp:bepo", (("tmp", "bepo1", False), ("tmp", "bepo1", False)))
|
||||
htpasswd_found = False
|
||||
htpasswd_cached_found = False
|
||||
for line in caplog.messages:
|
||||
if line == "Failed login attempt from unknown: 'tmp' (htpasswd)":
|
||||
htpasswd_found = True
|
||||
elif line == "Failed login attempt from unknown: 'tmp' (htpasswd / cached)":
|
||||
htpasswd_cached_found = True
|
||||
if (htpasswd_found is False) or (htpasswd_cached_found is False):
|
||||
raise ValueError("Logging misses expected log lines")
|
||||
|
||||
# htpasswd file cache
|
||||
def test_htpasswd_file_cache(self, caplog) -> None:
|
||||
self.configure({"auth": {"htpasswd_cache": "True"}})
|
||||
self._test_htpasswd("plain", "tmp:bepo")
|
||||
|
||||
# detection of broken htpasswd file entries
|
||||
def test_htpasswd_broken(self) -> None:
|
||||
for userpass in ["tmp:", ":tmp"]:
|
||||
try:
|
||||
self._test_htpasswd("plain", userpass)
|
||||
except RuntimeError:
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="leading and trailing "
|
||||
"whitespaces not allowed in file names")
|
||||
def test_htpasswd_whitespace_user(self) -> None:
|
||||
|
|
|
@ -1714,6 +1714,7 @@ permissions: RrWw""")
|
|||
assert status == 200 and prop.text == "text/vcard;charset=utf-8"
|
||||
|
||||
def test_authorization(self) -> None:
|
||||
self.configure({"auth": {"type": "none"}})
|
||||
_, responses = self.propfind("/", """\
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<propfind xmlns="DAV:">
|
||||
|
@ -1740,6 +1741,7 @@ permissions: RrWw""")
|
|||
|
||||
def test_principal_collection_creation(self) -> None:
|
||||
"""Verify existence of the principal collection."""
|
||||
self.configure({"auth": {"type": "none"}})
|
||||
self.propfind("/user/", login="user:")
|
||||
|
||||
def test_authentication_current_user_principal_hack(self) -> None:
|
||||
|
|
|
@ -143,6 +143,7 @@ collection: public/[^/]*
|
|||
permissions: i""")
|
||||
self.configure({"rights": {"type": "from_file",
|
||||
"file": rights_file_path}})
|
||||
self.configure({"auth": {"type": "none"}})
|
||||
self.mkcalendar("/tmp/calendar", login="tmp:bepo")
|
||||
self.mkcol("/public", login="tmp:bepo")
|
||||
self.mkcalendar("/public/calendar", login="tmp:bepo")
|
||||
|
@ -165,6 +166,7 @@ permissions: i""")
|
|||
Items are allowed at "/.../.../...".
|
||||
|
||||
"""
|
||||
self.configure({"auth": {"type": "none"}})
|
||||
self.mkcalendar("/", check=401)
|
||||
self.mkcalendar("/user/", check=401)
|
||||
self.mkcol("/user/")
|
||||
|
@ -175,6 +177,7 @@ permissions: i""")
|
|||
|
||||
def test_put_collections_and_items(self) -> None:
|
||||
"""Test rights for creation of calendars and items with PUT."""
|
||||
self.configure({"auth": {"type": "none"}})
|
||||
self.put("/user/", "BEGIN:VCALENDAR\r\nEND:VCALENDAR", check=401)
|
||||
self.mkcol("/user/")
|
||||
self.put("/user/calendar/", "BEGIN:VCALENDAR\r\nEND:VCALENDAR")
|
||||
|
|
|
@ -141,13 +141,19 @@ class TestBaseServerRequests(BaseTest):
|
|||
def test_bind_fail(self) -> None:
|
||||
for address_family, address in [(socket.AF_INET, "::1"),
|
||||
(socket.AF_INET6, "127.0.0.1")]:
|
||||
with socket.socket(address_family, socket.SOCK_STREAM) as sock:
|
||||
if address_family == socket.AF_INET6:
|
||||
# Only allow IPv6 connections to the IPv6 socket
|
||||
sock.setsockopt(server.COMPAT_IPPROTO_IPV6,
|
||||
socket.IPV6_V6ONLY, 1)
|
||||
with pytest.raises(OSError) as exc_info:
|
||||
sock.bind((address, 0))
|
||||
try:
|
||||
with socket.socket(address_family, socket.SOCK_STREAM) as sock:
|
||||
if address_family == socket.AF_INET6:
|
||||
# Only allow IPv6 connections to the IPv6 socket
|
||||
sock.setsockopt(server.COMPAT_IPPROTO_IPV6,
|
||||
socket.IPV6_V6ONLY, 1)
|
||||
with pytest.raises(OSError) as exc_info:
|
||||
sock.bind((address, 0))
|
||||
except OSError as e:
|
||||
if e.errno in (errno.EADDRNOTAVAIL, errno.EAFNOSUPPORT,
|
||||
errno.EPROTONOSUPPORT):
|
||||
continue
|
||||
raise
|
||||
# See ``radicale.server.serve``
|
||||
assert (isinstance(exc_info.value, socket.gaierror) and
|
||||
exc_info.value.errno in (
|
||||
|
|
|
@ -77,6 +77,7 @@ class TestMultiFileSystem(BaseTest):
|
|||
"""Verify that the hooks runs when a new user is created."""
|
||||
self.configure({"storage": {"hook": "mkdir %s" % os.path.join(
|
||||
"collection-root", "created_by_hook")}})
|
||||
self.configure({"auth": {"type": "none"}})
|
||||
self.propfind("/", login="user:")
|
||||
self.propfind("/created_by_hook/")
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -18,15 +18,27 @@
|
|||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import ssl
|
||||
import sys
|
||||
from importlib import import_module, metadata
|
||||
from typing import Callable, Sequence, Type, TypeVar, Union
|
||||
from typing import Callable, Sequence, Tuple, Type, TypeVar, Union
|
||||
|
||||
from radicale import config
|
||||
from radicale.log import logger
|
||||
|
||||
_T_co = TypeVar("_T_co", covariant=True)
|
||||
|
||||
RADICALE_MODULES: Sequence[str] = ("radicale", "vobject", "passlib", "defusedxml")
|
||||
RADICALE_MODULES: Sequence[str] = ("radicale", "vobject", "passlib", "defusedxml",
|
||||
"dateutil",
|
||||
"bcrypt",
|
||||
"pika",
|
||||
"ldap",
|
||||
"ldap3",
|
||||
"pam")
|
||||
|
||||
|
||||
# IPv4 (host, port) and IPv6 (host, port, flowinfo, scopeid)
|
||||
ADDRESS_TYPE = Union[Tuple[Union[str, bytes, bytearray], int],
|
||||
Tuple[str, int, int, int]]
|
||||
|
||||
|
||||
def load_plugin(internal_types: Sequence[str], module_name: str,
|
||||
|
@ -55,11 +67,29 @@ def package_version(name):
|
|||
|
||||
def packages_version():
|
||||
versions = []
|
||||
versions.append("python=%s.%s.%s" % (sys.version_info[0], sys.version_info[1], sys.version_info[2]))
|
||||
for pkg in RADICALE_MODULES:
|
||||
versions.append("%s=%s" % (pkg, package_version(pkg)))
|
||||
try:
|
||||
versions.append("%s=%s" % (pkg, package_version(pkg)))
|
||||
except Exception:
|
||||
try:
|
||||
versions.append("%s=%s" % (pkg, package_version("python-" + pkg)))
|
||||
except Exception:
|
||||
versions.append("%s=%s" % (pkg, "n/a"))
|
||||
return " ".join(versions)
|
||||
|
||||
|
||||
def format_address(address: ADDRESS_TYPE) -> str:
|
||||
host, port, *_ = address
|
||||
if not isinstance(host, str):
|
||||
raise NotImplementedError("Unsupported address format: %r" %
|
||||
(address,))
|
||||
if host.find(":") == -1:
|
||||
return "%s:%d" % (host, port)
|
||||
else:
|
||||
return "[%s]:%d" % (host, port)
|
||||
|
||||
|
||||
def ssl_context_options_by_protocol(protocol: str, ssl_context_options):
|
||||
logger.debug("SSL protocol string: '%s' and current SSL context options: '0x%x'", protocol, ssl_context_options)
|
||||
# disable any protocol by default
|
||||
|
|
|
@ -39,6 +39,17 @@ main{
|
|||
color: #484848;
|
||||
}
|
||||
|
||||
#loginscene .infcloudlink{
|
||||
margin: 0;
|
||||
width: 100%;
|
||||
text-align: center;
|
||||
color: #484848;
|
||||
}
|
||||
|
||||
#loginscene .infcloudlink-hidden{
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
#loginscene input{
|
||||
}
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
* This file is part of Radicale Server - Calendar Server
|
||||
* Copyright © 2017-2024 Unrud <unrud@outlook.com>
|
||||
* Copyright © 2023-2024 Matthew Hana <matthew.hana@gmail.com>
|
||||
* Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
|
@ -874,8 +875,7 @@ function UploadCollectionScene(user, password, collection) {
|
|||
upload_btn.onclick = upload_start;
|
||||
uploadfile_form.onchange = onfileschange;
|
||||
|
||||
let href = random_uuid();
|
||||
href_form.value = href;
|
||||
href_form.value = "";
|
||||
|
||||
/** @type {?number} */ let scene_index = null;
|
||||
/** @type {?XMLHttpRequest} */ let upload_req = null;
|
||||
|
@ -927,7 +927,7 @@ function UploadCollectionScene(user, password, collection) {
|
|||
if(files.length > 1 || href.length == 0){
|
||||
href = random_uuid();
|
||||
}
|
||||
let upload_href = collection.href + "/" + href + "/";
|
||||
let upload_href = collection.href + href + "/";
|
||||
upload_req = upload_collection(user, password, upload_href, file, function(result) {
|
||||
upload_req = null;
|
||||
results.push(result);
|
||||
|
@ -993,10 +993,12 @@ function UploadCollectionScene(user, password, collection) {
|
|||
hreflimitmsg_html.classList.remove("hidden");
|
||||
href_form.classList.add("hidden");
|
||||
href_label.classList.add("hidden");
|
||||
href_form.value = random_uuid(); // dummy, will be replaced on upload
|
||||
}else{
|
||||
hreflimitmsg_html.classList.add("hidden");
|
||||
href_form.classList.remove("hidden");
|
||||
href_label.classList.remove("hidden");
|
||||
href_form.value = files[0].name.replace(/\.(ics|vcf)$/, '');
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@ -1005,6 +1007,12 @@ function UploadCollectionScene(user, password, collection) {
|
|||
scene_index = scene_stack.length - 1;
|
||||
html_scene.classList.remove("hidden");
|
||||
close_btn.onclick = onclose;
|
||||
if(error){
|
||||
error_form.textContent = "Error: " + error;
|
||||
error_form.classList.remove("hidden");
|
||||
}else{
|
||||
error_form.classList.add("hidden");
|
||||
}
|
||||
};
|
||||
|
||||
this.hide = function() {
|
||||
|
@ -1213,7 +1221,7 @@ function CreateEditCollectionScene(user, password, collection) {
|
|||
alert("You must enter a valid HREF");
|
||||
return false;
|
||||
}
|
||||
href = collection.href + "/" + newhreftxtvalue + "/";
|
||||
href = collection.href + newhreftxtvalue + "/";
|
||||
}
|
||||
displayname = displayname_form.value;
|
||||
description = description_form.value;
|
||||
|
@ -1317,6 +1325,12 @@ function CreateEditCollectionScene(user, password, collection) {
|
|||
fill_form();
|
||||
submit_btn.onclick = onsubmit;
|
||||
cancel_btn.onclick = oncancel;
|
||||
if(error){
|
||||
error_form.textContent = "Error: " + error;
|
||||
error_form.classList.remove("hidden");
|
||||
}else{
|
||||
error_form.classList.add("hidden");
|
||||
}
|
||||
};
|
||||
this.hide = function() {
|
||||
read_form();
|
||||
|
@ -1348,8 +1362,10 @@ function cleanHREFinput(a) {
|
|||
href_form = a.target;
|
||||
}
|
||||
let currentTxtVal = href_form.value.trim().toLowerCase();
|
||||
//Clean the HREF to remove non lowercase letters and dashes
|
||||
currentTxtVal = currentTxtVal.replace(/(?![0-9a-z\-\_])./g, '');
|
||||
//Clean the HREF to remove not permitted chars
|
||||
currentTxtVal = currentTxtVal.replace(/(?![0-9a-z\-\_\.])./g, '');
|
||||
//Clean the HREF to remove leading . (would result in hidden directory)
|
||||
currentTxtVal = currentTxtVal.replace(/^\./, '');
|
||||
href_form.value = currentTxtVal;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
<!DOCTYPE html>
|
||||
<!--
|
||||
* Copyright © 2018-2020 Unrud <unrud@outlook.com>
|
||||
* Copyright © 2023-2023 Henning <github@henning-ullrich.de>
|
||||
* Copyright © 2023-2024 Matthew Hana <matthew.hana@gmail.com>
|
||||
* Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
-->
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
|
@ -27,8 +33,15 @@
|
|||
</section>
|
||||
|
||||
<section id="loginscene" class="container hidden">
|
||||
<div class="infcloudlink-hidden">
|
||||
<form action="infcloud/" method="get" target="_blank">
|
||||
<button class="blue" type="submit">Collection content<br>(InfCloud web client)</button>
|
||||
</form>
|
||||
</div>
|
||||
<div class="logocontainer">
|
||||
<img src="css/logo.svg" alt="Radicale">
|
||||
<br>
|
||||
Collection management
|
||||
</div>
|
||||
<h1>Sign in</h1>
|
||||
<br>
|
||||
|
@ -116,6 +129,8 @@
|
|||
<button type="submit" class="green" data-name="submit">Save</button>
|
||||
<button type="button" class="red" data-name="cancel">Cancel</button>
|
||||
</form>
|
||||
<span class="error hidden" data-name="error"></span>
|
||||
<br>
|
||||
</section>
|
||||
|
||||
<section id="createcollectionscene" class="container hidden">
|
||||
|
@ -149,6 +164,8 @@
|
|||
<button type="submit" class="green" data-name="submit">Create</button>
|
||||
<button type="button" class="red" data-name="cancel">Cancel</button>
|
||||
</form>
|
||||
<span class="error hidden" data-name="error"></span>
|
||||
<br>
|
||||
</section>
|
||||
|
||||
<section id="uploadcollectionscene" class="container hidden">
|
||||
|
@ -172,6 +189,8 @@
|
|||
<button type="submit" class="green" data-name="submit">Upload</button>
|
||||
<button type="button" class="red" data-name="close">Close</button>
|
||||
</form>
|
||||
<span class="error hidden" data-name="error"></span>
|
||||
<br>
|
||||
</section>
|
||||
|
||||
<section id="deletecollectionscene" class="container hidden">
|
||||
|
|
|
@ -24,7 +24,7 @@ skip_install = True
|
|||
|
||||
[testenv:mypy]
|
||||
deps = mypy==1.11.0
|
||||
commands = mypy .
|
||||
commands = mypy --install-types --non-interactive .
|
||||
skip_install = True
|
||||
|
||||
[tool:isort]
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2009-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -20,7 +20,7 @@ from setuptools import find_packages, setup
|
|||
|
||||
# When the version is updated, a new section in the CHANGELOG.md file must be
|
||||
# added too.
|
||||
VERSION = "3.3.2"
|
||||
VERSION = "3.5.1.dev"
|
||||
|
||||
with open("README.md", encoding="utf-8") as f:
|
||||
long_description = f.read()
|
||||
|
@ -38,6 +38,7 @@ web_files = ["web/internal_data/css/icon.png",
|
|||
|
||||
install_requires = ["defusedxml", "passlib", "vobject>=0.9.6",
|
||||
"pika>=1.1.0",
|
||||
"requests",
|
||||
]
|
||||
bcrypt_requires = ["bcrypt"]
|
||||
ldap_requires = ["ldap3"]
|
||||
|
@ -61,7 +62,7 @@ setup(
|
|||
install_requires=install_requires,
|
||||
extras_require={"test": test_requires, "bcrypt": bcrypt_requires, "ldap": ldap_requires},
|
||||
keywords=["calendar", "addressbook", "CalDAV", "CardDAV"],
|
||||
python_requires=">=3.8.0",
|
||||
python_requires=">=3.9.0",
|
||||
classifiers=[
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Environment :: Console",
|
||||
|
@ -71,7 +72,6 @@ setup(
|
|||
"License :: OSI Approved :: GNU General Public License (GPL)",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue