mirror of
https://github.com/Kozea/Radicale.git
synced 2025-04-04 05:37:37 +03:00
Compare commits
1137 commits
Author | SHA1 | Date | |
---|---|---|---|
|
8cdf262560 | ||
|
69587d3f5c | ||
|
f41533cca7 | ||
|
393a26814b | ||
|
3bdcbbdc56 | ||
|
9ca82a8aa2 | ||
|
ffe5fcc6f3 | ||
|
ecaed3188c | ||
|
c23821ad0c | ||
|
b744e9658c | ||
|
3ee5433397 | ||
|
29915b20c8 | ||
|
c91b8e49d5 | ||
|
14fb50954c | ||
|
312e26977b | ||
|
3bdc438283 | ||
|
3eb61a82a6 | ||
|
fb986ea02e | ||
|
af09d532c3 | ||
|
70b66ddfe2 | ||
|
6b83c409d4 | ||
|
7fcf473662 | ||
|
d25786c190 | ||
|
5d5b12c124 | ||
|
23387fa2f3 | ||
|
e0a24b14b4 | ||
|
2439266d0e | ||
|
9f7941d428 | ||
|
3af690fcb6 | ||
|
0d1dcec61a | ||
|
98152062df | ||
|
bcbf0918a9 | ||
|
f40c4d6e9b | ||
|
633dfbc875 | ||
|
34f51033b7 | ||
|
94ad295124 | ||
|
7399286ec9 | ||
|
7d351d6692 | ||
|
d4e23e6731 | ||
|
de527632e0 | ||
|
217978e9d5 | ||
|
2772305dde | ||
|
2ef99e5e85 | ||
|
26eab43f40 | ||
|
a3880480a9 | ||
|
9f8ac21130 | ||
|
e8c974a72a | ||
|
be43ce5161 | ||
|
7bb4beeae2 | ||
|
c9ffde27d8 | ||
|
dc56d67c33 | ||
|
081b8a7fcc | ||
|
76753d271a | ||
|
69f85a0bdf | ||
|
820691ca53 | ||
|
358ae55540 | ||
|
e22fbe282b | ||
|
b0d649f8b9 | ||
|
8f2099baf8 | ||
|
3a13ffbc51 | ||
|
0f67336987 | ||
|
cf727101f8 | ||
|
9f0385fd67 | ||
|
3963bb4d82 | ||
|
cffb2aaae3 | ||
|
4f0e607583 | ||
|
2f1db01083 | ||
|
95a8899002 | ||
|
41ab96e142 | ||
|
a284d18c16 | ||
|
30664f9346 | ||
|
36aba7a8b9 | ||
|
914320826f | ||
|
9372344bb1 | ||
|
c4a48828d3 | ||
|
ebe0418a4c | ||
|
c3c78db8ae | ||
|
0fa50210c9 | ||
|
25402ab641 | ||
|
76281ad1ff | ||
|
1d0ff9e84a | ||
|
e52056dea3 | ||
|
75711b46dc | ||
|
45df5a3b94 | ||
|
2ae1762daa | ||
|
7839ac5783 | ||
|
4086665d16 | ||
|
78dccbdc92 | ||
|
63b98913e0 | ||
|
b729a4c192 | ||
|
a3eb754967 | ||
|
d89ada0c17 | ||
|
7afff7ad2b | ||
|
451712d01d | ||
|
d7013ce726 | ||
|
280968e694 | ||
|
7b4da3a128 | ||
|
c6bd129fa2 | ||
|
bc2444bb9a | ||
|
dc35d4d0ad | ||
|
68f0eafe7d | ||
|
aa248f2b97 | ||
|
a2cd430f64 | ||
|
36e33ffee1 | ||
|
b8c2bc29ec | ||
|
65ce0c57e5 | ||
|
2958201454 | ||
|
73681a7767 | ||
|
cdbad007b6 | ||
|
78b94b1d4d | ||
|
e3ae7b3ab5 | ||
|
4419aa2285 | ||
|
eb8dc61952 | ||
|
3a4ec11733 | ||
|
7318f592c8 | ||
|
3910457a8d | ||
|
fcaee51ceb | ||
|
c2013ec901 | ||
|
29b1da4652 | ||
|
36a0501484 | ||
|
0b5dd82109 | ||
|
9b671beceb | ||
|
50f5d2e5ef | ||
|
8218081f58 | ||
|
16ece44faf | ||
|
5302863f53 | ||
|
6518f1b63a | ||
|
7f3fedc048 | ||
|
0759673e67 | ||
|
855e3743ca | ||
|
c8f650bc2c | ||
|
046d39b1bd | ||
|
954ddea006 | ||
|
6683775c81 | ||
|
9791a4db0f | ||
|
970d4ba468 | ||
|
809e35689b | ||
|
c3c61c692e | ||
|
53251231d4 | ||
|
63e414850e | ||
|
18338b3c6e | ||
|
d5cb05f817 | ||
|
4ab1cedee3 | ||
|
13a78d7365 | ||
|
93970a1001 | ||
|
c60627141f | ||
|
f6b5cb8a1e | ||
|
3914735ec0 | ||
|
48a634af9f | ||
|
3d50ae4a70 | ||
|
018978edd8 | ||
|
aa35c678ce | ||
|
19a47158bd | ||
|
a62da71aa2 | ||
|
67bbc9a31b | ||
|
dc83c6d7d0 | ||
|
484616f363 | ||
|
718089e3bf | ||
|
b078a8f002 | ||
|
fde0ecb9b2 | ||
|
803763729a | ||
|
37b18cf5a2 | ||
|
cd51581f38 | ||
|
88accdb672 | ||
|
c157dd7d19 | ||
|
605fc65584 | ||
|
f0d06cbc7d | ||
|
77f69f2b1e | ||
|
b011fa4e61 | ||
|
dcaec20681 | ||
|
d79abc2b7a | ||
|
938f6a97fd | ||
|
c2def71ce6 | ||
|
6f68a64855 | ||
|
f3a7641baa | ||
|
cfcfbbd231 | ||
|
e0d20edbcd | ||
|
d2be086cd1 | ||
|
7b6146405f | ||
|
04523e5087 | ||
|
23a68b2fb1 | ||
|
87dc5538d2 | ||
|
e28b719233 | ||
|
937acf38f7 | ||
|
063883797c | ||
|
30389f4525 | ||
|
780aaa7e3e | ||
|
98e65d88a4 | ||
|
10a79b9483 | ||
|
26637a1240 | ||
|
f9457f00f7 | ||
|
3df5d28432 | ||
|
e80bf58901 | ||
|
bc939522dc | ||
|
50b76f7114 | ||
|
72c7d32e44 | ||
|
c24659c5ec | ||
|
3e18644423 | ||
|
a93af6f177 | ||
|
ed6a5a834e | ||
|
dd9bb2beff | ||
|
0713041929 | ||
![]() |
3f04914de4 | ||
|
1c77fd819f | ||
|
08a35b19c8 | ||
|
1634ce9498 | ||
|
be64e57ae8 | ||
|
8172b87077 | ||
|
c853ec4a74 | ||
|
5ebaf4ef1c | ||
|
d6c4e6487a | ||
|
f9dd3efc3a | ||
|
6c1445d8db | ||
|
1ca41e2128 | ||
|
607b3af67b | ||
|
841df09312 | ||
|
c81e19616c | ||
|
b0d56f898b | ||
|
73f8f950d0 | ||
|
976dfe4a3f | ||
|
b122002077 | ||
|
ad94acddf1 | ||
|
2442a794ae | ||
|
a9f2e6fe7b | ||
|
5a00baab3f | ||
|
cf914450ee | ||
|
0d43a49ffb | ||
|
234be74b87 | ||
|
45f2a4cc0e | ||
|
532fad9ba6 | ||
|
99f5ec389d | ||
|
0253682c00 | ||
|
8c2feb4726 | ||
|
c243ae4ebf | ||
|
6f82333ff7 | ||
|
6f0ac545f0 | ||
|
70c4a34eb8 | ||
|
3763f28ae4 | ||
|
0a5ae5b0b4 | ||
|
5d48ba5d1e | ||
|
5a591b6471 | ||
|
8604dacad0 | ||
|
ca665c4849 | ||
|
8fdbd0dbf6 | ||
|
46fe98f60b | ||
|
c10ce7ae46 | ||
|
6ebca08423 | ||
|
c1be04abd1 | ||
|
c00ab76c83 | ||
|
5357e692d9 | ||
|
9cac3008b7 | ||
|
2489356dda | ||
|
5ce0cee8bf | ||
|
79ba07e16b | ||
|
c0acbd4402 | ||
|
b75e303556 | ||
|
a794a51885 | ||
|
4f2990342d | ||
|
ac8abbd12c | ||
|
9af15e6656 | ||
|
30e2ab490e | ||
|
ddd099accd | ||
|
8e97b709bf | ||
|
74311560c9 | ||
|
b22038c746 | ||
|
c2b2274dad | ||
|
2674f9a382 | ||
|
51960bcab8 | ||
|
a5dd4d8a7d | ||
|
94898ef6c1 | ||
|
7df2fb35a7 | ||
|
a4266c9690 | ||
|
1e8d9eda50 | ||
|
0b00218d75 | ||
|
7e23c603c1 | ||
|
6569e481df | ||
|
b19418f43c | ||
|
e2934a12c0 | ||
|
c8010fa4be | ||
|
b784f476b4 | ||
|
335584a6b7 | ||
|
9e9d036387 | ||
|
006c2d2bc0 | ||
|
b356edd6be | ||
|
59450e8c2d | ||
|
1a76e1ad50 | ||
|
6ebe9aee76 | ||
|
6214111f4f | ||
|
0f6dcb7192 | ||
|
4b1183ae00 | ||
|
c1c8ab2887 | ||
|
836827ac8f | ||
|
3d4cd7f034 | ||
|
a606477e3f | ||
|
c33e96c5a3 | ||
|
dc51a74e5a | ||
|
5f79b089c8 | ||
|
fc7c50b4cb | ||
|
11dad85404 | ||
|
dc20f518dd | ||
|
62bdfeab40 | ||
|
ff3f2fc3de | ||
|
4bb00e6070 | ||
|
b7ae6b378b | ||
|
7597c7d4a5 | ||
|
855e983ae2 | ||
|
0a5773a844 | ||
|
f1d007a51e | ||
|
4d04c85f2d | ||
|
f7d6f6442f | ||
|
a7ce8f032c | ||
|
05b8172f8f | ||
|
3ebe51a4cb | ||
|
0d29de6db9 | ||
|
886f4ee8d0 | ||
|
46acbfd987 | ||
|
0e0592e3b8 | ||
|
be5eab8671 | ||
|
9787f87cc7 | ||
|
1e318c81cf | ||
|
119cefce34 | ||
|
3983b5c887 | ||
|
778f56cc4d | ||
|
2bb2d6385b | ||
|
b3d0c16407 | ||
|
e1ee3d4529 | ||
|
644548c866 | ||
|
05d4e91856 | ||
|
99b6889d91 | ||
|
2d8903dc44 | ||
|
5681b45298 | ||
|
5515d1e790 | ||
|
eef33f76d1 | ||
|
05c349a15f | ||
|
916c9db3c8 | ||
|
ff5fae1663 | ||
|
d9e15dd7c6 | ||
|
675c5ce8cf | ||
|
b85c0758d8 | ||
|
3232b34392 | ||
|
873bf80131 | ||
|
38c236aa02 | ||
|
f725ee780f | ||
|
804170a4d5 | ||
|
2a5b12e21c | ||
|
6943eb659f | ||
|
24f5f9b98e | ||
|
edd6d0a513 | ||
|
92ce13e348 | ||
|
0fe53e62db | ||
|
f754f28518 | ||
|
1d241d9e2f | ||
|
d6bacc9047 | ||
|
43466078e7 | ||
|
8f80e0eb92 | ||
|
a54fb10e17 | ||
|
166d4ed27b | ||
|
2c234b97d1 | ||
|
64acfe27f4 | ||
|
48bab4b033 | ||
|
e07a248451 | ||
|
62e6aad2d2 | ||
|
37f7df2786 | ||
|
f26facba3e | ||
|
4696d252f4 | ||
|
287c0e7171 | ||
|
fbb6b1684a | ||
|
82064f823a | ||
|
19f5aa0edd | ||
|
92e5032278 | ||
|
6fa15dae4a | ||
|
5b64ef9fe7 | ||
|
69780dd0ee | ||
|
4781b48a1c | ||
|
6f2c1037d5 | ||
|
e4daddc186 | ||
|
f7e46ebf39 | ||
|
1ea782e3b2 | ||
|
c13e0e60fd | ||
|
8fea1f907e | ||
|
a6b1e000e7 | ||
|
a64f0e1093 | ||
|
18e8ab1ccc | ||
|
7b0d3ed29d | ||
|
0baf67147e | ||
|
0f9bf4c063 | ||
|
df5ca97442 | ||
|
416081a81f | ||
|
07b7d28323 | ||
|
5380629bda | ||
|
243b888c8e | ||
|
9ecb95ce37 | ||
|
6929f3d0b3 | ||
|
00dac0c030 | ||
|
fb904320d2 | ||
|
1d07d72946 | ||
|
d7840b8bff | ||
|
cfc1e94ad8 | ||
|
bf77844d34 | ||
|
6a6fec5bdd | ||
|
b0d1ccc0f6 | ||
|
2d5dc5186b | ||
|
36ef753b0e | ||
|
74f4412761 | ||
|
ae274911d5 | ||
|
1ee93f32b2 | ||
|
ee2af306d7 | ||
|
687624a403 | ||
|
19cca41a43 | ||
|
56c375fca2 | ||
|
a1b8c65def | ||
|
c6cc7f3486 | ||
|
652e768650 | ||
|
f25a5fbc79 | ||
|
bf4f5834af | ||
|
f7c731e189 | ||
|
059afef35d | ||
|
e0c04f2ae3 | ||
|
5cafd29d7f | ||
|
0badab86a6 | ||
|
b6fa3c47c3 | ||
|
c63d00a550 | ||
|
8bfed78926 | ||
|
1670e4a793 | ||
|
8e9fdf391a | ||
|
ccddf877ee | ||
|
48e4203856 | ||
|
bd001fe1d5 | ||
|
dbc939aff2 | ||
|
5ec34ed163 | ||
|
372e62bb54 | ||
|
59c638461b | ||
|
a8baea9b19 | ||
|
c438ccb215 | ||
|
d7c09e218f | ||
|
37148b7124 | ||
|
2c15b1b8f4 | ||
|
6e103b9c7e | ||
|
a78e32de4d | ||
|
9faf89880b | ||
|
a01e53616e | ||
|
e59e4d3aff | ||
|
67362189f5 | ||
|
ba9776d688 | ||
|
0505b7b603 | ||
|
eed6bcee01 | ||
|
110ee9d247 | ||
|
457af284e1 | ||
|
e0594d5b33 | ||
|
d41aa60d61 | ||
|
973b26b2e9 | ||
|
bfe0ccc463 | ||
|
77749cbbb9 | ||
|
fc77cf9d66 | ||
|
06a9cf2886 | ||
|
53bc6167d3 | ||
|
72e4c4fadd | ||
|
3e478ee6da | ||
|
0ab99d4e8f | ||
|
4ef5cad20f | ||
|
a449d8774b | ||
|
0f87897eb7 | ||
|
40c8b3d038 | ||
|
d15e836079 | ||
|
fce3f0b1df | ||
|
499b37fd2f | ||
|
e887b06d21 | ||
|
b1c682de57 | ||
|
c000408429 | ||
|
0feca04086 | ||
|
fdb014d068 | ||
|
ccb59444c3 | ||
|
97479190e8 | ||
|
d1ceb620e4 | ||
|
040a433696 | ||
|
187886e797 | ||
|
3cb9b73a16 | ||
|
a272d3039e | ||
|
98c5ffdc87 | ||
|
9945a9f65a | ||
|
7fbc0e70e9 | ||
|
15ed41fa09 | ||
|
a92a621b9b | ||
|
645619bac8 | ||
|
b30cdbbabf | ||
|
b081b3ea06 | ||
|
da844f48e6 | ||
|
a7f33c8795 | ||
|
b47c76e9ca | ||
|
da04d95b75 | ||
|
e05fbeb950 | ||
|
d75b071fec | ||
|
5cb16a3a2d | ||
|
606bd30514 | ||
|
6a78466af4 | ||
|
c63dee71ec | ||
|
b1ce69882c | ||
|
e70486900d | ||
|
368c43137a | ||
|
3f62982e1d | ||
|
a79c2ad83e | ||
|
6d11738243 | ||
|
e852c887d7 | ||
|
107fe1bc53 | ||
|
4f1e8ce889 | ||
|
39662fc680 | ||
|
336972316e | ||
|
7da46f392e | ||
|
d7fa90a976 | ||
|
13d56f0918 | ||
|
8b8d7729a2 | ||
|
5167f12624 | ||
|
19e5972b4f | ||
|
bd66d58540 | ||
|
408a03a3c0 | ||
|
3cba4b32a3 | ||
|
906d391fe3 | ||
|
d6c0a05771 | ||
|
29b7cd8d54 | ||
|
204623d656 | ||
|
b0f131cac2 | ||
|
4c1d295e81 | ||
|
7b0d88ff0d | ||
|
2d0496b888 | ||
|
773f09fe74 | ||
|
34b449f27f | ||
|
45f0b8809b | ||
|
7388a095f5 | ||
|
5ffaf6e837 | ||
|
0f505222d9 | ||
|
b1cf1f2e28 | ||
|
01d4851581 | ||
|
5019a3e974 | ||
|
c046c6ae34 | ||
|
897a679c1c | ||
|
b47a253ccb | ||
|
c499c313c2 | ||
|
1dceaf5385 | ||
|
47bc966a13 | ||
|
61be51e9f3 | ||
|
e5096d31af | ||
|
e5e80ebbe6 | ||
|
13b1aaed39 | ||
|
f117fd06af | ||
|
055489f79c | ||
|
53befe72db | ||
|
7fd7ec7f7a | ||
|
9809fbcba4 | ||
|
fe3d9d3f48 | ||
|
bb112784fd | ||
|
f1d84cea35 | ||
|
fe33d79eb1 | ||
|
dd8b62eef5 | ||
|
3094bc3936 | ||
|
6b34323c1e | ||
|
6de06fd75f | ||
|
b015f9dc16 | ||
|
bb203812e6 | ||
|
e24702a65b | ||
|
55f181da65 | ||
|
51a7136b93 | ||
|
bbe7088561 | ||
|
6b65800770 | ||
|
defa767c8a | ||
|
d83885d108 | ||
|
6eb6ff44d0 | ||
|
258b1024b3 | ||
|
54dfbf15d2 | ||
|
e9c7d4a671 | ||
|
b4967f8e26 | ||
|
ac5e33c723 | ||
|
02019e73e6 | ||
|
1acfa480fa | ||
|
addf5a25c8 | ||
|
fe630b46ba | ||
|
0cf8ede6c7 | ||
|
5b5273abbf | ||
|
59bd8e8330 | ||
|
e02a31af89 | ||
|
a70c69ee28 | ||
|
fc7b50d69f | ||
|
518de6b360 | ||
|
695c5d8416 | ||
|
ad596002f3 | ||
|
ac14b01dda | ||
|
a07b39eaad | ||
|
b603acf89f | ||
|
e8c092bd2d | ||
|
ce32134452 | ||
|
d3bfa968f8 | ||
|
bf112d6b5f | ||
|
9c338b34eb | ||
|
ad3a8d9370 | ||
|
27dfaa8663 | ||
|
c7c3119267 | ||
|
28fa28aaff | ||
|
e4949a1f2f | ||
|
d8cbe0e206 | ||
|
5dd27d3c80 | ||
|
eb577422f6 | ||
|
e9d92f10f1 | ||
|
e124e9d8c9 | ||
|
7c54d8a96c | ||
|
71ab791935 | ||
|
19f3c3edfb | ||
|
2a07c7d230 | ||
|
63db0483d0 | ||
|
138317e6fd | ||
|
9179550162 | ||
|
68551d2321 | ||
|
c6d01b7874 | ||
|
b1ae3edea8 | ||
|
2a35d349b8 | ||
|
86a69b431a | ||
|
d1e01aadb5 | ||
|
7d39354c37 | ||
|
acf65e9d6a | ||
|
fb7630f9eb | ||
|
496b9f1d7c | ||
|
2c0da6f37c | ||
|
4678612194 | ||
|
96b63ed65f | ||
|
c11a08cf88 | ||
|
61fef9c9df | ||
|
2296f4952b | ||
|
b78730d570 | ||
|
6f7abbcba5 | ||
|
f3f3995b01 | ||
|
27e1b04529 | ||
|
515afb52ed | ||
|
3e4dbc5d79 | ||
|
8db31b49fe | ||
|
2dec736fdf | ||
|
211972bd09 | ||
|
b0345a424d | ||
|
db87eba400 | ||
|
2354b56578 | ||
|
a26ec29899 | ||
|
a6368d8e66 | ||
|
a009bb562f | ||
|
706e663486 | ||
|
a7e631668f | ||
|
9e5f6db84b | ||
|
16eadd11b5 | ||
|
f7e01d55ed | ||
|
7340ddc9d2 | ||
|
a7882b98bd | ||
|
239e17d735 | ||
|
76dc9dce0d | ||
|
f0e21b14c2 | ||
|
06a95d7597 | ||
|
f610384374 | ||
|
d228892b7c | ||
|
a8bc232883 | ||
|
4e4af2aca5 | ||
|
aac5188fc0 | ||
|
2741d73d68 | ||
|
d1da63569b | ||
|
513415d201 | ||
|
df874a273f | ||
|
b8f401056a | ||
|
2ce5ba1e38 | ||
|
9d91564d10 | ||
|
85e1f46383 | ||
|
53fdf08a17 | ||
|
56a0752429 | ||
|
353ad7a9b3 | ||
|
ca99016200 | ||
|
e6429b4bfd | ||
|
150efe2a0c | ||
|
17169bbfdb | ||
|
95cd6938d9 | ||
|
a49454d36e | ||
|
f0f4213760 | ||
|
aadcc42eb3 | ||
|
9457699a32 | ||
|
5d143ca0e8 | ||
|
421cc2b1ca | ||
|
df0e97fab9 | ||
|
88a2ae71b6 | ||
|
dfa21a57ce | ||
|
ffebbf1928 | ||
|
84fd30f357 | ||
|
4e99105225 | ||
|
d05d726dc2 | ||
|
e66055de08 | ||
|
a24a97f880 | ||
|
ff4f8bfaf4 | ||
|
be53538738 | ||
|
922de6ce14 | ||
|
9a2d42afab | ||
|
46b258b1bc | ||
|
a65e995c53 | ||
|
4c55b6db28 | ||
|
546086af2e | ||
|
e85ec93291 | ||
|
b1336c2f1f | ||
|
0b9a53c73d | ||
|
e0a22074b4 | ||
|
10dd5aff3c | ||
|
beb33fec02 | ||
|
2ce7c2a45a | ||
|
02d157269e | ||
|
61f3557e78 | ||
|
020fd560a3 | ||
|
b16bc212f6 | ||
|
efd562b38d | ||
|
50e8f1e28a | ||
|
f508af580c | ||
|
0e1d502d0a | ||
|
7d27b4eacc | ||
|
f4e0825aec | ||
|
e76011ecc6 | ||
|
9c9be73093 | ||
|
17e6269400 | ||
|
bb6dcb6172 | ||
|
98bac38006 | ||
|
8891f1ab89 | ||
|
e4842ef7df | ||
|
5b99b5a88d | ||
|
74be6168cb | ||
|
8b4e28a179 | ||
|
a8a7e23a37 | ||
|
c128b0d773 | ||
|
ed6432706f | ||
|
d6e295c272 | ||
|
0750108152 | ||
|
b945749d1b | ||
|
7927b0c935 | ||
|
eac1722393 | ||
|
39c339638d | ||
|
3c81f43404 | ||
|
825464f102 | ||
|
2c13b8d2e0 | ||
|
ee2fc74bc0 | ||
|
dc21aa1b4f | ||
|
36285143ce | ||
|
bb185a941d | ||
|
80bf824b91 | ||
|
401b68fe08 | ||
|
b5d022fe08 | ||
|
caefa489f9 | ||
|
213ab0fcfa | ||
|
dba399bce3 | ||
|
29a2a80bfd | ||
|
acc06587ce | ||
|
947cb08bb9 | ||
|
1f25d6e27d | ||
|
5ec9aaec07 | ||
|
760b01ee25 | ||
|
37c975d938 | ||
|
48910bf3a9 | ||
|
6f5ee56c2d | ||
|
f05753be71 | ||
|
bf824838d8 | ||
|
32d303805a | ||
|
1afc34c2bb | ||
|
9d30cbc5c2 | ||
|
2bb811b3fc | ||
|
7bf2c18887 | ||
|
8fc5352e27 | ||
|
08a4c792b1 | ||
|
ab28d65343 | ||
|
1faa7bd4ba | ||
|
7b87a598ac | ||
|
d9be20539f | ||
|
49d0ad5b18 | ||
|
b4d7eb5f04 | ||
|
44cfd38263 | ||
|
21ebbca2d9 | ||
|
1c64fdc5b1 | ||
|
0f355114ae | ||
|
3ee6e55d4a | ||
|
f407915227 | ||
|
66b374bc28 | ||
|
1c32919739 | ||
|
3a04b2247e | ||
|
d8ab8aa42c | ||
|
c4d80fd385 | ||
|
dfaef5da75 | ||
|
1593742ce2 | ||
|
438d5f1735 | ||
|
34612c71f0 | ||
|
96a49274f4 | ||
|
72aab54636 | ||
|
5b0cc60cb9 | ||
|
3fdc15fccd | ||
|
1e684ef699 | ||
|
71fd91631e | ||
|
8baf4b7e3f | ||
|
78a5813831 | ||
|
363be35e61 | ||
|
e7b9ec3549 | ||
|
80d91a8987 | ||
|
6474f8f31c | ||
|
d387491fb6 | ||
|
eb67c57ce2 | ||
|
b45c97d5a5 | ||
|
32050ef117 | ||
|
94a5ff0d68 | ||
|
afff2731e1 | ||
|
7936e714d4 | ||
|
c5b48c1ee4 | ||
|
989cbefc64 | ||
|
a72964ab3f | ||
|
b24eae8369 | ||
|
1485777bc6 | ||
|
50140a54f5 | ||
|
682c048569 | ||
|
22c843c49c | ||
|
b7272be481 | ||
|
88f65671ce | ||
|
913635a17e | ||
|
551b5c2272 | ||
|
cc2e1553d3 | ||
|
5678453b95 | ||
|
76e06ea3fc | ||
|
8e3f3b5bf2 | ||
|
93e5dd4a70 | ||
|
9f1e243f00 | ||
|
e6d4611980 | ||
|
d42e9edfd0 | ||
|
2f97fc5b88 | ||
|
668ad03fa3 | ||
|
2b6626d053 | ||
|
998e2f96bd | ||
|
b892379a8d | ||
|
86f37e0250 | ||
|
dcd6456339 | ||
|
f64488b918 | ||
|
f57e738156 | ||
|
2dd7328859 | ||
|
5b0830ea08 | ||
|
1c82eb5e05 | ||
|
cb5b92cb7a | ||
|
4cac895901 | ||
|
02c949d5d3 | ||
|
8e8c652225 | ||
|
d3b78e0246 | ||
|
110ec3a788 | ||
|
fa6ec95e8c | ||
|
701a9794bc | ||
|
3fd3bf5192 | ||
|
a58e68ea37 | ||
|
3e95c0ab0e | ||
|
7c25c7715f | ||
|
fadf281734 | ||
|
a3aa0ce7d9 | ||
|
ecafa1d32b | ||
|
d7ce2f0b98 | ||
|
513e04e636 | ||
|
72103c30c2 | ||
|
a07813ecc9 | ||
|
ae731290c1 | ||
|
6ae831a324 | ||
|
8efb942892 | ||
|
a2be03fdaf | ||
|
77626e5aed | ||
|
390240c35a | ||
|
5070533a0b | ||
|
e23f0283b0 | ||
|
e8e709191a | ||
|
9276c65462 | ||
|
360484e2d5 | ||
|
1a78114a56 | ||
|
7d4a0fe70e | ||
|
7b98a0028b | ||
|
11a2b43b60 | ||
|
6a96b1f5a7 | ||
|
794c1f84fb | ||
|
f8e28f6b6e | ||
|
526d835b59 | ||
|
3e6d8db98d | ||
|
4a0b2e8791 | ||
|
22731f3d26 | ||
|
dd723dae5d | ||
|
cf81d1f9a7 | ||
|
cdb5160c3e | ||
|
f4a87afab7 | ||
|
6a56a6026f | ||
|
c2a159a6cd | ||
|
ad5ce94817 | ||
|
865e0dd629 | ||
|
9bed0af669 | ||
|
cd6ebaae1a | ||
|
f8f6e47081 | ||
|
5aa2f59b38 | ||
|
156ce91f35 | ||
|
3594217570 | ||
|
d8604becd0 | ||
|
f7fd323dea | ||
|
f9d9b88a77 | ||
|
86ada4cf97 | ||
|
c589c9fc0d | ||
|
14a3c3d763 | ||
|
abcc0c2ef6 | ||
|
f08912ace1 | ||
|
9aae5655cf | ||
|
413c74c27c | ||
|
a2ceaa41a4 | ||
|
515b196fda | ||
|
6d8976795c | ||
|
395f53b3d5 | ||
|
8caa90f4be | ||
|
497b5141b0 | ||
|
a5716a7d84 | ||
|
f06af066f9 | ||
|
e96277e671 | ||
|
c14defcba8 | ||
|
2b8f4b9419 | ||
|
a97093d001 | ||
|
b64c9baa5f | ||
|
ed8a2284a4 | ||
|
3c218ecd9c | ||
|
0baf1dc908 | ||
|
b0f8d37294 | ||
|
c5b5910de4 | ||
|
8d19fd7a64 | ||
|
eda8309a04 | ||
|
2dc0fd29dc | ||
|
47e42a46c1 | ||
|
a7c4a00eb6 | ||
|
129ebf7b86 | ||
|
730332d680 | ||
|
b87d1c8038 | ||
|
c155e2a351 | ||
|
11dd0e9380 | ||
|
4d4c3bda75 | ||
|
7e29d9b5c3 | ||
|
8c69bb71aa | ||
|
b2b4651fc4 | ||
|
cd5bc3590f | ||
|
e42b46c722 | ||
|
580b97fa0f | ||
|
0221fc357b | ||
|
523960bc9f | ||
|
3779d749cd | ||
|
b610c3214d | ||
|
c3d22e680f | ||
|
af6c6b96b9 | ||
|
45ff34f6c3 | ||
|
47f3a6d684 | ||
|
2cbbd4dc9c | ||
|
e4cc73098a | ||
|
10d2571d89 | ||
|
17a5e5b6e0 | ||
|
340582f84c | ||
|
3763ed46c4 | ||
|
7fde7d5005 | ||
|
0b7e9d73c9 | ||
|
c96e5b6667 | ||
|
4822807c4d | ||
|
da8475908e | ||
|
4224c60e9b | ||
|
59e4f2d594 | ||
|
81106fa647 | ||
|
33fcda7c32 | ||
|
555e4ccc51 | ||
|
685a91bfe6 | ||
|
22fc38850c | ||
|
4ed77cabc6 | ||
|
d1532aa466 | ||
|
1336c02079 | ||
|
e0adecf30c | ||
|
402bd3580e | ||
|
9c0b6cdaeb | ||
|
75df1093be | ||
|
4a0bcde7a3 | ||
|
b93842b10c | ||
|
6dee974b74 | ||
|
e3a982dbce | ||
|
4c44940ec1 | ||
|
8fa4345b6f | ||
|
cfba4c17b6 | ||
|
d3f99d349d | ||
|
bd0a95c098 | ||
|
7bfb6c0132 | ||
|
4564de9f9d | ||
|
f75671354c | ||
|
2cd0a3189e | ||
|
eac460d4d9 | ||
|
fb3de73d1c | ||
|
06f93a032b | ||
|
c4745680e0 | ||
|
11fd29a2d1 | ||
|
d90369b67c | ||
|
ec19a1a12c | ||
|
481bd4e4b9 | ||
|
a20791e0c3 | ||
|
537737da32 | ||
|
fb9cfeb81e | ||
|
bfba027446 | ||
|
83f53cb5cb | ||
|
4252747646 | ||
|
34771f6850 | ||
|
24ee523cc8 | ||
|
e1e563cc28 | ||
|
f25d7eebb8 | ||
|
e0f7fe6526 | ||
|
90bd33f466 | ||
|
91c06041f8 | ||
|
4b5165dc42 | ||
|
208ae11683 | ||
|
1234802f51 | ||
|
e176567ad0 | ||
|
e38ae96227 | ||
|
bbaf0ebd8c | ||
|
f14e1de071 | ||
|
e629e9a2e1 | ||
|
98b49ac2b6 | ||
|
08e789d993 | ||
|
b23aa4629c | ||
|
dba6338968 | ||
|
7c9c873b13 | ||
|
f72b344981 | ||
|
35e7ee5a08 | ||
|
056ce5b69f | ||
|
cd3f834a27 | ||
|
f921e48648 | ||
|
111a79f082 | ||
|
574e6f8c7b | ||
|
60f25bf19a | ||
|
698ae875ce | ||
|
cecb17df03 | ||
|
12fe5ce637 | ||
|
73e42f8101 | ||
|
34bec01c9b | ||
|
8f9734d797 | ||
|
b8848348d6 | ||
|
398e93e215 | ||
|
21099f2240 | ||
|
9997a32629 | ||
|
0cabc64584 | ||
|
66f14ee91c | ||
|
5333751e45 | ||
|
742a067171 | ||
|
2aafcd5df5 | ||
|
f05251bd01 | ||
|
b4c76c94ad | ||
|
4d4b040b81 | ||
|
9909454761 | ||
|
571567a4ec | ||
|
0e8949ff71 | ||
|
c8b31637ef | ||
|
b14889e170 | ||
|
0b3e4204a5 | ||
|
18f21e26d5 | ||
|
a9804dd550 | ||
|
7096ab74e8 | ||
|
236eedb555 | ||
|
d48bacc8e3 | ||
|
ca27156605 | ||
|
10dafde32d | ||
|
1fe011020b | ||
|
7642d72919 | ||
|
d31eaf79ec | ||
|
dd30aea7a5 | ||
|
81218906c6 | ||
|
a3ca887a37 | ||
|
0ce90d6b34 | ||
|
42ad18bc84 | ||
|
80e8750c8a | ||
|
d3bb19800c | ||
|
6091bd46a3 | ||
|
9d25cc6c0a | ||
|
41bccb265a | ||
|
2851525e15 | ||
|
1e011e7011 | ||
|
d4af2cd1a6 | ||
|
dbe95641c0 | ||
|
30c9c55358 | ||
|
46c39b28d6 | ||
|
cd3fe3e73c | ||
|
ea6649b365 | ||
|
9b3bb2de2b | ||
|
90f10f2c4a | ||
|
3214c498d1 | ||
|
06fbac67a1 | ||
|
896963dd3c | ||
|
b98cd98c4c | ||
|
46d1a31441 | ||
|
95eb44a87f | ||
|
b8af0c7490 | ||
|
e38b88f9f7 | ||
|
896642b374 | ||
|
bf5272e83d | ||
|
da31f80ba5 | ||
|
d3d0437bce | ||
|
ecff5fac82 | ||
|
3882cf2bc8 | ||
|
b2fc8bbb0c | ||
|
df80a7f6ef | ||
|
3d8f1b3b08 | ||
|
fbe2024342 | ||
|
69314f3cde | ||
|
7cc9db0d90 | ||
|
d19c16c8d3 | ||
|
1289003da1 | ||
|
b2a0067a57 | ||
|
2e93c012bf | ||
|
dc92a88584 | ||
|
bfe4332ac5 | ||
|
389a6b9906 | ||
|
5253a464ab | ||
|
f950ce98ab | ||
|
03e7e209da | ||
|
69b1a4ea77 | ||
|
58bcedde98 | ||
|
f3b6b4869d | ||
|
c6f6f2c1f4 | ||
|
e824a2587c | ||
|
c8c330d481 | ||
|
ac0cfeabb9 | ||
|
1edfb16143 | ||
|
53c1648738 | ||
|
f780853d8f | ||
|
e07f047fad | ||
|
b87ac43952 | ||
|
e0247f8f92 | ||
|
7ed5122636 | ||
|
e63a6e0c85 | ||
|
d3b90506f5 | ||
|
b9bb017edf | ||
|
6ec63ccc9b | ||
|
70a8d632fb | ||
|
14c8d99547 | ||
|
7c4409f93d | ||
|
8914567e58 | ||
|
83c02a64b9 | ||
|
10aee24056 | ||
|
3be9a22a91 | ||
|
8740357eb2 | ||
|
150dd0c4cd | ||
|
593f9b688c | ||
|
e077bb5a18 | ||
|
969502cd7b | ||
|
2909cae817 |
139 changed files with 12506 additions and 4423 deletions
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
custom: https://github.com/Kozea/Radicale/wiki/Donations
|
2
.github/workflows/generate-documentation.yml
vendored
2
.github/workflows/generate-documentation.yml
vendored
|
@ -8,7 +8,7 @@ jobs:
|
|||
generate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: gh-pages
|
||||
- name: Run generator
|
||||
|
|
18
.github/workflows/pypi-publish.yml
vendored
18
.github/workflows/pypi-publish.yml
vendored
|
@ -5,19 +5,17 @@ on:
|
|||
|
||||
jobs:
|
||||
publish:
|
||||
permissions:
|
||||
id-token: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.x
|
||||
- name: Install dependencies
|
||||
run: python -m pip install wheel
|
||||
- name: Install Build dependencies
|
||||
run: pip install build
|
||||
- name: Build
|
||||
run: python setup.py sdist bdist_wheel
|
||||
run: python -m build --sdist --wheel
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@master
|
||||
with:
|
||||
user: __token__
|
||||
password: ${{ secrets.pypi_password }}
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
|
|
66
.github/workflows/test.yml
vendored
66
.github/workflows/test.yml
vendored
|
@ -3,52 +3,56 @@ on: [push, pull_request]
|
|||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
python-version: [3.5, 3.6, 3.7, 3.8, pypy3]
|
||||
python-version: ['3.9', '3.10', '3.11', '3.12.3', '3.13.0', pypy-3.9]
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
python-version: pypy-3.9
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install from source
|
||||
run: python -m pip install --editable .[test,bcrypt]
|
||||
- name: Run tests
|
||||
run: python setup.py test
|
||||
- name: Install Test dependencies
|
||||
run: pip install tox
|
||||
- name: Test
|
||||
run: tox -e py
|
||||
- name: Install Coveralls
|
||||
if: github.event_name == 'push'
|
||||
run: pip install coveralls
|
||||
- name: Upload coverage to Coveralls
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
|
||||
if: github.event_name == 'push'
|
||||
env:
|
||||
COVERALLS_PARALLEL: true
|
||||
COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
|
||||
run: |
|
||||
python -m pip install coveralls
|
||||
python -m coveralls
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: coveralls --service=github
|
||||
|
||||
coveralls-finish:
|
||||
needs: test
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
|
||||
if: github.event_name == 'push'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v1
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.x
|
||||
- name: Install Coveralls
|
||||
run: python -m pip install coveralls
|
||||
- name: Call Coveralls parallel builds webhook
|
||||
shell: python
|
||||
run: pip install coveralls
|
||||
- name: Finish Coveralls parallel builds
|
||||
env:
|
||||
COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
|
||||
run: |
|
||||
import json, os, sys
|
||||
from urllib.request import Request, urlopen
|
||||
from coveralls import Coveralls
|
||||
_, job, _ = Coveralls.load_config_from_github()
|
||||
data = json.dumps({'repo_token': os.environ.get('COVERALLS_REPO_TOKEN', ''),
|
||||
'payload': {'status': 'done', 'build_num': job}}).encode()
|
||||
headers = {'Content-type': 'application/json'}
|
||||
with urlopen(Request('https://coveralls.io/webhook', data, headers)) as f:
|
||||
sys.stderr.buffer.write(f.read() + b'\n')
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: coveralls --service=github --finish
|
||||
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
- name: Install tox
|
||||
run: pip install tox
|
||||
- name: Lint
|
||||
run: tox -e flake8,mypy,isort
|
||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -15,6 +15,7 @@ coverage.xml
|
|||
.coverage
|
||||
.coverage.*
|
||||
.eggs
|
||||
.mypy_cache
|
||||
.project
|
||||
.pydevproject
|
||||
.settings
|
||||
|
|
4
.mdl.style
Normal file
4
.mdl.style
Normal file
|
@ -0,0 +1,4 @@
|
|||
all
|
||||
rule 'MD026', :punctuation => '.,;:!'
|
||||
exclude_rule 'MD001'
|
||||
exclude_rule 'MD024'
|
1
.mdlrc
Normal file
1
.mdlrc
Normal file
|
@ -0,0 +1 @@
|
|||
style File.join(File.dirname(__FILE__), '.mdl.style')
|
692
CHANGELOG.md
Normal file
692
CHANGELOG.md
Normal file
|
@ -0,0 +1,692 @@
|
|||
# Changelog
|
||||
|
||||
## 3.5.1.dev
|
||||
|
||||
* Fix: auth/htpasswd related to detection and use of bcrypt
|
||||
* Add: option [auth] ldap_ignore_attribute_create_modify_timestamp for support of Authentik LDAP server
|
||||
* Extend: [storage] hook supports now placeholder for "cwd" and "path" (and catches unsupported placeholders)
|
||||
* Fix: location of lock file for in case of dedicated cache folder is activated
|
||||
* Extend: log and create base folders if not existing during startup
|
||||
|
||||
## 3.5.0
|
||||
|
||||
* Add: option [auth] type oauth2 by code migration from https://gitlab.mim-libre.fr/alphabet/radicale_oauth/-/blob/dev/oauth2/
|
||||
* Fix: catch OS errors on PUT MKCOL MKCALENDAR MOVE PROPPATCH (insufficient storage, access denied, internal server error)
|
||||
* Test: skip bcrypt related tests if module is missing
|
||||
* Improve: relax mtime check on storage filesystem, change test file location to "collection-root" directory
|
||||
* Add: option [auth] type pam by code migration from v1, add new option pam_serivce
|
||||
* Cosmetics: extend list of used modules with their version on startup
|
||||
* Improve: WebUI
|
||||
* Add: option [server] script_name for reverse proxy base_prefix handling
|
||||
* Fix: proper base_prefix stripping if running behind reverse proxy
|
||||
* Review: Apache reverse proxy config example
|
||||
* Add: on-the-fly link activation and default content adjustment in case of bundled InfCloud (tested with 0.13.1)
|
||||
* Adjust: [auth] imap: use AUTHENTICATE PLAIN instead of LOGIN towards remote IMAP server
|
||||
* Improve: log client IP on SSL error and SSL protocol+cipher if successful
|
||||
* Improve: catch htpasswd hash verification errors
|
||||
* Improve: add support for more bcrypt algos on autodetection, extend logging for autodetection fallback to PLAIN in case of hash length is not matching
|
||||
* Add: warning in case of started standalone and not listen on loopback interface but trusting external authentication
|
||||
* Adjust: Change default [auth] type from "none" to "denyall" for secure-by-default
|
||||
|
||||
## 3.4.1
|
||||
* Add: option [auth] dovecot_connection_type / dovecot_host / dovecot_port
|
||||
* Add: option [auth] type imap by code migration from https://github.com/Unrud/RadicaleIMAP/
|
||||
|
||||
## 3.4.0
|
||||
* Add: option [auth] cache_logins/cache_successful_logins_expiry/cache_failed_logins for caching logins
|
||||
* Improve: [auth] log used hash method and result on debug for htpasswd authentication
|
||||
* Improve: [auth] htpasswd file now read and verified on start
|
||||
* Add: option [auth] htpasswd_cache to automatic re-read triggered on change (mtime or size) instead reading on each request
|
||||
* Improve: [auth] htpasswd: module 'bcrypt' is no longer mandatory in case digest method not used in file
|
||||
* Improve: [auth] successful/failed login logs now type and whether result was taken from cache
|
||||
* Improve: [auth] constant execution time for failed logins independent of external backend or by htpasswd used digest method
|
||||
* Drop: support for Python 3.8
|
||||
* Add: option [auth] ldap_user_attribute
|
||||
* Add: option [auth] ldap_groups_attribute as a more flexible replacement of removed ldap_load_groups
|
||||
|
||||
## 3.3.3
|
||||
* Add: display mtime_ns precision of storage folder with condition warning if too less
|
||||
* Improve: disable fsync during storage verification
|
||||
* Improve: suppress duplicate log lines on startup
|
||||
* Contrib: logwatch config and script
|
||||
* Improve: log precondition result on PUT request
|
||||
|
||||
## 3.3.2
|
||||
* Fix: debug logging in rights/from_file
|
||||
* Add: option [storage] use_cache_subfolder_for_item for storing 'item' cache outside collection-root
|
||||
* Fix: ignore empty RRULESET in item
|
||||
* Add: option [storage] filesystem_cache_folder for defining location of cache outside collection-root
|
||||
* Add: option [storage] use_cache_subfolder_for_history for storing 'history' cache outside collection-root
|
||||
* Add: option [storage] use_cache_subfolder_for_synctoken for storing 'sync-token' cache outside collection-root
|
||||
* Add: option [storage] folder_umask for configuration of umask (overwrite system-default)
|
||||
* Fix: also remove 'item' from cache on delete
|
||||
* Improve: avoid automatically invalid cache on upgrade in case no change on cache structure
|
||||
* Improve: log important module versions on startup
|
||||
* Improve: auth.ldap config shown on startup, terminate in case no password is supplied for bind user
|
||||
* Add: option [auth] uc_username for uppercase conversion (similar to existing lc_username)
|
||||
* Add: option [logging] storage_cache_action_on_debug for conditional logging
|
||||
* Fix: set PRODID on collection upload (instead of vobject is inserting default one)
|
||||
* Add: option [storage] use_mtime_and_size_for_item_cache for changing cache lookup from SHA256 to mtime_ns + size
|
||||
* Fix: buggy cache file content creation on collection upload
|
||||
|
||||
## 3.3.1
|
||||
|
||||
* Add: option [auth] type=dovecot
|
||||
* Enhancement: log content in case of multiple main components error
|
||||
* Fix: expand does not take timezones into account
|
||||
* Fix: expand does not support overridden recurring events
|
||||
* Fix: expand does not honor start and end times
|
||||
* Add: option [server] protocol + ciphersuite for optional restrictions on SSL socket
|
||||
* Enhancement: [storage] hook documentation, logging, error behavior (no longer throwing an exception)
|
||||
|
||||
## 3.3.0
|
||||
|
||||
* Adjustment: option [auth] htpasswd_encryption change default from "md5" to "autodetect"
|
||||
* Add: option [auth] type=ldap with (group) rights management via LDAP/LDAPS
|
||||
* Enhancement: permit_delete_collection can be now controlled also per collection by rights 'D' or 'd'
|
||||
* Add: option [rights] permit_overwrite_collection (default=True) which can be also controlled per collection by rights 'O' or 'o'
|
||||
* Fix: only expand VEVENT on REPORT request containing 'expand'
|
||||
* Adjustment: switch from setup.py to pyproject.toml (but keep files for legacy packaging)
|
||||
* Adjustment: 'rights' file is now read only during startup
|
||||
* Cleanup: Python 3.7 leftovers
|
||||
|
||||
## 3.2.3
|
||||
* Add: support for Python 3.13
|
||||
* Fix: Using icalendar's tzinfo on created datetime to fix issue with icalendar
|
||||
* Fix: typos in code
|
||||
* Enhancement: Added free-busy report
|
||||
* Enhancement: Added 'max_freebusy_occurrences` setting to avoid potential DOS on reports
|
||||
* Enhancement: remove unexpected control codes from uploaded items
|
||||
* Enhancement: add 'strip_domain' setting for username handling
|
||||
* Enhancement: add option to toggle debug log of rights rule with doesn't match
|
||||
* Drop: remove unused requirement "typeguard"
|
||||
* Improve: Refactored some date parsing code
|
||||
|
||||
## 3.2.2
|
||||
* Enhancement: add support for auth.type=denyall (will be default for security reasons in upcoming releases)
|
||||
* Enhancement: display warning in case only default config is active
|
||||
* Enhancement: display warning in case no user authentication is active
|
||||
* Enhancement: add option to skip broken item to avoid triggering exception (default: enabled)
|
||||
* Enhancement: add support for predefined collections for new users
|
||||
* Enhancement: add options to enable several parts in debug log like backtrace, request_header, request_content, response_content (default: disabled)
|
||||
* Enhancement: rights/from_file: display resulting permission of a match in debug log
|
||||
* Enhancement: add Apache config file example (see contrib directory)
|
||||
* Fix: "verify-collection" skips non-collection directories, logging improved
|
||||
|
||||
## 3.2.1
|
||||
|
||||
* Enhancement: add option for logging bad PUT request content
|
||||
* Enhancement: extend logging with step where bad PUT request failed
|
||||
* Fix: support for recurrence "full day"
|
||||
* Fix: list of web_files related to HTML pages
|
||||
* Test: update/adjustments for workflows (pytest>=7, typeguard<4.3)
|
||||
|
||||
## 3.2.0
|
||||
|
||||
* Enhancement: add hook support for event changes+deletion hooks (initial support: "rabbitmq")
|
||||
* Dependency: pika >= 1.1.0
|
||||
* Enhancement: add support for webcal subscriptions
|
||||
* Enhancement: major update of WebUI (design+features)
|
||||
* Adjust: change default loglevel to "info"
|
||||
* Enhancement: support "expand-property" on REPORT request
|
||||
* Drop: support for Python 3.7 (EOSL, can't be tested anymore)
|
||||
* Fix: allow quoted-printable encoding for vObjects
|
||||
|
||||
## 3.1.9
|
||||
|
||||
* Add: support for Python 3.11 + 3.12
|
||||
* Drop: support for Python 3.6
|
||||
* Fix: MOVE in case listen on non-standard ports or behind reverse proxy
|
||||
* Fix: stricter requirements of Python 3.11
|
||||
* Fix: HTML pages
|
||||
* Fix: Main Component is missing when only recurrence id exists
|
||||
* Fix: passlib don't support bcrypt>=4.1
|
||||
* Fix: web login now proper encodes passwords containing %XX (hexdigits)
|
||||
* Enhancement: user-selectable log formats
|
||||
* Enhancement: autodetect logging to systemd journal
|
||||
* Enhancement: test code
|
||||
* Enhancement: option for global permit to delete collection
|
||||
* Enhancement: auth type 'htpasswd' supports now 'htpasswd_encryption' sha256/sha512 and "autodetect" for smooth transition
|
||||
* Improve: Dockerfiles
|
||||
* Improve: server socket listen code + address format in log
|
||||
* Update: documentations + examples
|
||||
* Dependency: limit typegard version < 3
|
||||
* General: code cosmetics
|
||||
|
||||
## 3.1.8
|
||||
|
||||
* Fix setuptools requirement if installing wheel
|
||||
* Tests: Switch from `python setup.py test` to `tox`
|
||||
* Small changes to build system configuration and tests
|
||||
|
||||
## 3.1.7
|
||||
|
||||
* Fix random href fallback
|
||||
|
||||
## 3.1.6
|
||||
|
||||
* Ignore `Not a directory` error for optional config paths
|
||||
* Fix upload of whole address book/calendar with UIDs that collide on
|
||||
case-insensitive filesystem
|
||||
* Remove runtime dependency on setuptools for Python>=3.9
|
||||
* Windows: Block ADS paths
|
||||
|
||||
## 3.1.5
|
||||
|
||||
* Ignore configuration file if access is denied
|
||||
* Use F_FULLFSYNC with PyPy on MacOS
|
||||
* Fallback if F_FULLFSYNC is not supported by the filesystem
|
||||
|
||||
## 3.1.4
|
||||
|
||||
* Fallback if RENAME_EXCHANGE is not supported by the filesystem
|
||||
* Assume POSIX compatibility if `sys.platform` is not `win32`
|
||||
|
||||
## 3.1.3
|
||||
|
||||
* Redirect '…/.well-known/caldav' and '…/.well-known/carddav' to base prefix
|
||||
* Warning instead of error when base prefix ends with '/'
|
||||
|
||||
## 3.1.2
|
||||
|
||||
* Verify that base prefix starts with '/' but doesn't end with '/'
|
||||
* Improve base prefix log message
|
||||
* Never send body for HEAD requests (again)
|
||||
|
||||
## 3.1.1
|
||||
|
||||
* Workaround for contact photo bug in InfCloud
|
||||
* Redirect GET and HEAD requests under `/.web` to sanitized path
|
||||
* Set `Content-Length` header for HEAD requests
|
||||
* Never send body for HEAD requests
|
||||
* Improve error messages for `from_file` rights backend
|
||||
* Don't sanitize WSGI script name
|
||||
|
||||
## 3.1.0
|
||||
|
||||
* Single `<D:propstat>` element in PROPPATCH response
|
||||
* Allow multiple `<D:set>` and `<D:remove>` elements
|
||||
* Improve log messages
|
||||
* Fix date filter
|
||||
* Improve sanitization of collection properties
|
||||
* Cancel mkcalendar request on error
|
||||
* Use **renameat2** on Linux for atomic overwriting of collections
|
||||
* Command Line Parser
|
||||
* Disallow abbreviated arguments
|
||||
* Support backend specific options and HTTP headers
|
||||
* Optional argument for boolean options
|
||||
* Load no config file for `--config` without argument
|
||||
* Allow float for server->timeout setting
|
||||
* Fix **is-not-defined** filter in **addressbook-query** report
|
||||
* Add python type hints
|
||||
* Add **multifilesystem_nolock** storage
|
||||
* Add support for Python 3.9 and 3.10
|
||||
* Drop support for Python 3.5
|
||||
* Fix compatibility with Evolution (Exceptions from recurrence rules)
|
||||
|
||||
## 3.0.6
|
||||
|
||||
* Allow web plugins to handle POST requests
|
||||
|
||||
## 3.0.5
|
||||
|
||||
* Start storage hook in own process group
|
||||
* Kill storage hook on error or exit
|
||||
* Try to kill child processes of storage hook
|
||||
* Internal Server: Exit immediately when signal is received
|
||||
(do not wait for clients or storage hook to finish)
|
||||
|
||||
## 3.0.4
|
||||
|
||||
* Fix internal server on FreeBSD
|
||||
|
||||
## 3.0.3
|
||||
|
||||
* Fix internal server on OpenBSD
|
||||
|
||||
## 3.0.2
|
||||
|
||||
* Use 403 response for supported-report and valid-sync-token errors
|
||||
* Internal server: Handle missing IPv6 support
|
||||
|
||||
## 3.0.1
|
||||
|
||||
* Fix XML error messages
|
||||
|
||||
## 3.0.0
|
||||
|
||||
This release is incompatible with previous releases.
|
||||
See the upgrade checklist below.
|
||||
|
||||
* Parallel write requests
|
||||
* Support PyPy
|
||||
* Protect against XML denial-of-service attacks
|
||||
* Check for duplicated UIDs in calendars/address books
|
||||
* Only add missing UIDs for uploaded whole calendars/address books
|
||||
* Switch from md5 to sha256 for UIDs and tokens
|
||||
* Code cleanup:
|
||||
* All plugin interfaces were simplified and are incompatible with
|
||||
old plugins
|
||||
* Major refactor
|
||||
* Never sanitize paths multiple times (check if they are sanitized)
|
||||
* Config
|
||||
* Multiple configuration files separated by `:` (resp. `;`
|
||||
on Windows)
|
||||
* Optional configuration files by prepending file path with `?`
|
||||
* Check validity of every configuration file and command line
|
||||
arguments separately
|
||||
* Report the source of invalid configuration parameters in
|
||||
error messages
|
||||
* Code cleanup:
|
||||
* Store configuration as parsed values
|
||||
* Use Schema that describes configuration and allow plugins to apply
|
||||
their own schemas
|
||||
* Mark internal settings with `_`
|
||||
* Internal server
|
||||
* Bind to IPv4 and IPv6 address, when both are available for hostname
|
||||
* Set default address to `localhost:5232`
|
||||
* Remove settings for SSL ciphers and protocol versions (enforce safe
|
||||
defaults instead)
|
||||
* Remove settings for file locking because they are of little use
|
||||
* Remove daemonization (should be handled by service managers)
|
||||
* Logging
|
||||
* Replace complex Python logger configuration with simple
|
||||
`logging.level` setting
|
||||
* Write PID and `threadName` instead of cryptic id's in log messages
|
||||
* Use `wsgi.errors` for logging (as required by the WSGI spec)
|
||||
* Code cleanup:
|
||||
* Don't pass logger object around (use `logging.getLogger()`
|
||||
instead)
|
||||
* Auth
|
||||
* Use `md5` as default for `htpasswd_encryption` setting
|
||||
* Move setting `realm` from section `server` to `auth`
|
||||
* Rights
|
||||
* Use permissions `RW` for non-leaf collections and `rw` for
|
||||
address books/calendars
|
||||
* New permission `i` that only allows access with HTTP method GET
|
||||
(CalDAV/CardDAV is susceptible to expensive search requests)
|
||||
* Web
|
||||
* Add upload dialog for calendars/address books from file
|
||||
* Show startup loading message
|
||||
* Show warning if JavaScript is disabled
|
||||
* Pass HTML Validator
|
||||
* Storage
|
||||
* Check for missing UIDs in items
|
||||
* Check for child collections in address books and calendars
|
||||
* Code cleanup:
|
||||
* Split BaseCollection in BaseStorage and BaseCollection
|
||||
|
||||
## Upgrade checklist
|
||||
|
||||
* Config
|
||||
* Some settings were removed
|
||||
* The default of `auth.htpasswd_encryption` changed to `md5`
|
||||
* The setting `server.realm` moved to `auth.realm`
|
||||
* The setting `logging.debug` was replaced by `logging.level`
|
||||
* The format of the `rights.file` configuration file changed:
|
||||
* Permission `r` replaced by `Rr`
|
||||
* Permission `w` replaced by `Ww`
|
||||
* New permission `i` added as subset of `r`
|
||||
* Replaced variable `%(login)s` by `{user}`
|
||||
* Removed variable `%(path)s`
|
||||
* `{` must be escaped as `{{` and `}` as `}}` in regexes
|
||||
* File system storage
|
||||
* The storage format is compatible with Radicale 2.x.x
|
||||
* Run `radicale --verify-storage` to check for errors
|
||||
* Custom plugins:
|
||||
* `auth` and `web` plugins require minor adjustments
|
||||
* `rights` plugins must be adapted to the new permission model
|
||||
* `storage` plugins require major changes
|
||||
|
||||
## 2.1.10 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Update required versions for dependencies
|
||||
* Get `RADICALE_CONFIG` from WSGI environ
|
||||
* Improve HTTP status codes
|
||||
* Fix race condition in storage lock creation
|
||||
* Raise default limits for content length and timeout
|
||||
* Log output from hook
|
||||
|
||||
## 2.1.9 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Specify versions for dependencies
|
||||
* Move WSGI initialization into module
|
||||
* Check if `REPORT` method is actually supported
|
||||
* Include `rights` file in source distribution
|
||||
* Specify `md5` and `bcrypt` as extras
|
||||
* Improve logging messages
|
||||
* Windows: Fix crash when item path is a directory
|
||||
|
||||
## 2.1.8 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Flush files before fsync'ing
|
||||
|
||||
## 2.1.7 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Don't print warning when cache format changes
|
||||
* Add documentation for `BaseAuth`
|
||||
* Add `is_authenticated2(login, user, password)` to `BaseAuth`
|
||||
* Fix names of custom properties in PROPFIND requests with
|
||||
`D:propname` or `D:allprop`
|
||||
* Return all properties in PROPFIND requests with `D:propname` or
|
||||
`D:allprop`
|
||||
* Allow `D:displayname` property on all collections
|
||||
* Answer with `D:unauthenticated` for `D:current-user-principal` property
|
||||
when not logged in
|
||||
* Remove non-existing `ICAL:calendar-color` and `C:calendar-timezone`
|
||||
properties from PROPFIND requests with `D:propname` or `D:allprop`
|
||||
* Add `D:owner` property to calendar and address book objects
|
||||
* Remove `D:getetag` and `D:getlastmodified` properties from regular
|
||||
collections
|
||||
|
||||
## 2.1.6 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Fix content-type of VLIST
|
||||
* Specify correct COMPONENT in content-type of VCALENDAR
|
||||
* Cache COMPONENT of calendar objects (improves speed with some clients)
|
||||
* Stricter parsing of filters
|
||||
* Improve support for CardDAV filter
|
||||
* Fix some smaller bugs in CalDAV filter
|
||||
* Add X-WR-CALNAME and X-WR-CALDESC to calendars downloaded via HTTP/WebDAV
|
||||
* Use X-WR-CALNAME and X-WR-CALDESC from calendars published via WebDAV
|
||||
|
||||
## 2.1.5 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Add `--verify-storage` command-line argument
|
||||
* Allow comments in the htpasswd file
|
||||
* Don't strip whitespaces from user names and passwords in the htpasswd file
|
||||
* Remove cookies from logging output
|
||||
* Allow uploads of whole collections with many components
|
||||
* Show warning message if server.timeout is used with Python < 3.5.2
|
||||
|
||||
## 2.1.4 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Fix incorrect time range matching and calculation for some edge-cases with
|
||||
rescheduled recurrences
|
||||
* Fix owner property
|
||||
|
||||
## 2.1.3 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Enable timeout for SSL handshakes and move them out of the main thread
|
||||
* Create cache entries during upload of items
|
||||
* Stop built-in server on Windows when Ctrl+C is pressed
|
||||
* Prevent slow down when multiple requests hit a collection during cache warm-up
|
||||
|
||||
## 2.1.2 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Remove workarounds for bugs in VObject < 0.9.5
|
||||
* Error checking of collection tags and associated components
|
||||
* Improve error checking of uploaded collections and components
|
||||
* Don't delete empty collection properties implicitly
|
||||
* Improve logging of VObject serialization
|
||||
|
||||
## 2.1.1 - Wild Radish Again
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Add missing UIDs instead of failing
|
||||
* Improve error checking of calendar and address book objects
|
||||
* Fix upload of whole address books
|
||||
|
||||
## 2.1.0 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Built-in web interface for creating and managing address books and calendars
|
||||
* can be extended with web plugins
|
||||
* Much faster storage backend
|
||||
* Significant reduction in memory usage
|
||||
* Improved logging
|
||||
* Include paths (of invalid items / requests) in log messages
|
||||
* Include configuration values causing problems in log messages
|
||||
* Log warning message for invalid requests by clients
|
||||
* Log error message for invalid files in the storage backend
|
||||
* No stack traces unless debugging is enabled
|
||||
* Time range filter also regards overwritten recurrences
|
||||
* Items that couldn't be filtered because of bugs in VObject are always
|
||||
returned (and a warning message is logged)
|
||||
* Basic error checking of configuration files
|
||||
* File system locking isn't disabled implicitly anymore, instead a new
|
||||
configuration option gets introduced
|
||||
* The permissions of the lock file are not changed anymore
|
||||
* Support for sync-token
|
||||
* Support for client-side SSL certificates
|
||||
* Rights plugins can decide if access to an item is granted explicitly
|
||||
* Respond with 403 instead of 404 for principal collections of non-existing
|
||||
users when `owner_only` plugin is used (information leakage)
|
||||
* Authentication plugins can provide the login and password from the
|
||||
environment
|
||||
* new `remote_user` plugin, that gets the login from the `REMOTE_USER`
|
||||
environment variable (for WSGI server)
|
||||
* new `http_x_remote_user` plugin, that gets the login from the
|
||||
`X-Remote-User` HTTP header (for reverse proxies)
|
||||
|
||||
## 2.0.0 - Little Big Radish
|
||||
|
||||
This feature is not compatible with the 1.x.x versions. Follow our
|
||||
[migration guide](https://radicale.org/2.1.html#documentation/migration-from-1xx-to-2xx)
|
||||
if you want to switch from 1.x.x to 2.0.0.
|
||||
|
||||
* Support Python 3.3+ only, Python 2 is not supported anymore
|
||||
* Keep only one simple filesystem-based storage system
|
||||
* Remove built-in Git support
|
||||
* Remove built-in authentication modules
|
||||
* Keep the WSGI interface, use Python HTTP server by default
|
||||
* Use a real iCal parser, rely on the "vobject" external module
|
||||
* Add a solid calendar discovery
|
||||
* Respect the difference between "files" and "folders", don't rely on slashes
|
||||
* Remove the calendar creation with GET requests
|
||||
* Be stateless
|
||||
* Use a file locker
|
||||
* Add threading
|
||||
* Get atomic writes
|
||||
* Support new filters
|
||||
* Support read-only permissions
|
||||
* Allow External plugins for authentication, rights management, storage and
|
||||
version control
|
||||
|
||||
## 1.1.4 - Fifth Law of Nature
|
||||
|
||||
* Use `shutil.move` for `--export-storage`
|
||||
|
||||
## 1.1.3 - Fourth Law of Nature
|
||||
|
||||
* Add a `--export-storage=FOLDER` command-line argument (by Unrud, see #606)
|
||||
|
||||
## 1.1.2 - Third Law of Nature
|
||||
|
||||
* **Security fix**: Add a random timer to avoid timing oracles and simple
|
||||
bruteforce attacks when using the htpasswd authentication method.
|
||||
* Various minor fixes.
|
||||
|
||||
## 1.1.1 - Second Law of Nature
|
||||
|
||||
* Fix the owner_write rights rule
|
||||
|
||||
## 1.1 - Law of Nature
|
||||
|
||||
One feature in this release is **not backward compatible**:
|
||||
|
||||
* Use the first matching section for rights (inspired from daald)
|
||||
|
||||
Now, the first section matching the path and current user in your custom rights
|
||||
file is used. In the previous versions, the most permissive rights of all the
|
||||
matching sections were applied. This new behaviour gives a simple way to make
|
||||
specific rules at the top of the file independant from the generic ones.
|
||||
|
||||
Many **improvements in this release are related to security**, you should
|
||||
upgrade Radicale as soon as possible:
|
||||
|
||||
* Improve the regex used for well-known URIs (by Unrud)
|
||||
* Prevent regex injection in rights management (by Unrud)
|
||||
* Prevent crafted HTTP request from calling arbitrary functions (by Unrud)
|
||||
* Improve URI sanitation and conversion to filesystem path (by Unrud)
|
||||
* Decouple the daemon from its parent environment (by Unrud)
|
||||
|
||||
Some bugs have been fixed and little enhancements have been added:
|
||||
|
||||
* Assign new items to corret key (by Unrud)
|
||||
* Avoid race condition in PID file creation (by Unrud)
|
||||
* Improve the docker version (by cdpb)
|
||||
* Encode message and commiter for git commits
|
||||
* Test with Python 3.5
|
||||
|
||||
## 1.0.1 - Sunflower Again
|
||||
|
||||
* Update the version because of a **stupid** "feature"™ of PyPI
|
||||
|
||||
## 1.0 - Sunflower
|
||||
|
||||
* Enhanced performances (by Mathieu Dupuy)
|
||||
* Add MD5-APR1 and BCRYPT for htpasswd-based authentication (by Jan-Philip Gehrcke)
|
||||
* Use PAM service (by Stephen Paul Weber)
|
||||
* Don't discard PROPPATCH on empty collections (by Markus Unterwaditzer)
|
||||
* Write the path of the collection in the git message (by Matthew Monaco)
|
||||
* Tests launched on Travis
|
||||
|
||||
## 0.10 - Lovely Endless Grass
|
||||
|
||||
* Support well-known URLs (by Mathieu Dupuy)
|
||||
* Fix collection discovery (by Markus Unterwaditzer)
|
||||
* Reload logger config on SIGHUP (by Élie Bouttier)
|
||||
* Remove props files when deleting a collection (by Vincent Untz)
|
||||
* Support salted SHA1 passwords (by Marc Kleine-Budde)
|
||||
* Don't spam the logs about non-SSL IMAP connections to localhost (by Giel van Schijndel)
|
||||
|
||||
## 0.9 - Rivers
|
||||
|
||||
* Custom handlers for auth, storage and rights (by Sergey Fursov)
|
||||
* 1-file-per-event storage (by Jean-Marc Martins)
|
||||
* Git support for filesystem storages (by Jean-Marc Martins)
|
||||
* DB storage working with PostgreSQL, MariaDB and SQLite (by Jean-Marc Martins)
|
||||
* Clean rights manager based on regular expressions (by Sweil)
|
||||
* Support of contacts for Apple's clients
|
||||
* Support colors (by Jochen Sprickerhof)
|
||||
* Decode URLs in XML (by Jean-Marc Martins)
|
||||
* Fix PAM authentication (by Stepan Henek)
|
||||
* Use consistent etags (by 9m66p93w)
|
||||
* Use consistent sorting order (by Daniel Danner)
|
||||
* Return 401 on unauthorized DELETE requests (by Eduard Braun)
|
||||
* Move pid file creation in child process (by Mathieu Dupuy)
|
||||
* Allow requests without base_prefix (by jheidemann)
|
||||
|
||||
## 0.8 - Rainbow
|
||||
|
||||
* New authentication and rights management modules (by Matthias Jordan)
|
||||
* Experimental database storage
|
||||
* Command-line option for custom configuration file (by Mark Adams)
|
||||
* Root URL not at the root of a domain (by Clint Adams, Fabrice Bellet, Vincent Untz)
|
||||
* Improved support for iCal, CalDAVSync, CardDAVSync, CalDavZAP and CardDavMATE
|
||||
* Empty PROPFIND requests handled (by Christoph Polcin)
|
||||
* Colon allowed in passwords
|
||||
* Configurable realm message
|
||||
|
||||
## 0.7.1 - Waterfalls
|
||||
|
||||
* Many address books fixes
|
||||
* New IMAP ACL (by Daniel Aleksandersen)
|
||||
* PAM ACL fixed (by Daniel Aleksandersen)
|
||||
* Courier ACL fixed (by Benjamin Frank)
|
||||
* Always set display name to collections (by Oskari Timperi)
|
||||
* Various DELETE responses fixed
|
||||
|
||||
## 0.7 - Eternal Sunshine
|
||||
|
||||
* Repeating events
|
||||
* Collection deletion
|
||||
* Courier and PAM authentication methods
|
||||
* CardDAV support
|
||||
* Custom LDAP filters supported
|
||||
|
||||
## 0.6.4 - Tulips
|
||||
|
||||
* Fix the installation with Python 3.1
|
||||
|
||||
## 0.6.3 - Red Roses
|
||||
|
||||
* MOVE requests fixed
|
||||
* Faster REPORT answers
|
||||
* Executable script moved into the package
|
||||
|
||||
## 0.6.2 - Seeds
|
||||
|
||||
* iPhone and iPad support fixed
|
||||
* Backslashes replaced by slashes in PROPFIND answers on Windows
|
||||
* PyPI archive set as default download URL
|
||||
|
||||
## 0.6.1 - Growing Up
|
||||
|
||||
* Example files included in the tarball
|
||||
* htpasswd support fixed
|
||||
* Redirection loop bug fixed
|
||||
* Testing message on GET requests
|
||||
|
||||
## 0.6 - Sapling
|
||||
|
||||
* WSGI support
|
||||
* IPv6 support
|
||||
* Smart, verbose and configurable logs
|
||||
* Apple iCal 4 and iPhone support (by Łukasz Langa)
|
||||
* KDE KOrganizer support
|
||||
* LDAP auth backend (by Corentin Le Bail)
|
||||
* Public and private calendars (by René Neumann)
|
||||
* PID file
|
||||
* MOVE requests management
|
||||
* Journal entries support
|
||||
* Drop Python 2.5 support
|
||||
|
||||
## 0.5 - Historical Artifacts
|
||||
|
||||
* Calendar depth
|
||||
* MacOS and Windows support
|
||||
* HEAD requests management
|
||||
* htpasswd user from calendar path
|
||||
|
||||
## 0.4 - Hot Days Back
|
||||
|
||||
* Personal calendars
|
||||
* Last-Modified HTTP header
|
||||
* `no-ssl` and `foreground` options
|
||||
* Default configuration file
|
||||
|
||||
## 0.3 - Dancing Flowers
|
||||
|
||||
* Evolution support
|
||||
* Version management
|
||||
|
||||
## 0.2 - Snowflakes
|
||||
|
||||
* Sunbird pre-1.0 support
|
||||
* SSL connection
|
||||
* Htpasswd authentication
|
||||
* Daemon mode
|
||||
* User configuration
|
||||
* Twisted dependency removed
|
||||
* Python 3 support
|
||||
* Real URLs for PUT and DELETE
|
||||
* Concurrent modification reported to users
|
||||
* Many bugs fixed (by Roger Wenham)
|
||||
|
||||
## 0.1 - Crazy Vegetables
|
||||
|
||||
* First release
|
||||
* Lightning/Sunbird 0.9 compatibility
|
||||
* Easy installer
|
674
COPYING
674
COPYING
|
@ -1,674 +0,0 @@
|
|||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
|
675
COPYING.md
Normal file
675
COPYING.md
Normal file
|
@ -0,0 +1,675 @@
|
|||
### GNU GENERAL PUBLIC LICENSE
|
||||
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc.
|
||||
<https://fsf.org/>
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim copies of this
|
||||
license document, but changing it is not allowed.
|
||||
|
||||
### Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom
|
||||
to share and change all versions of a program--to make sure it remains
|
||||
free software for all its users. We, the Free Software Foundation, use
|
||||
the GNU General Public License for most of our software; it applies
|
||||
also to any other work released this way by its authors. You can apply
|
||||
it to your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you
|
||||
have certain responsibilities if you distribute copies of the
|
||||
software, or if you modify it: responsibilities to respect the freedom
|
||||
of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the
|
||||
manufacturer can do so. This is fundamentally incompatible with the
|
||||
aim of protecting users' freedom to change the software. The
|
||||
systematic pattern of such abuse occurs in the area of products for
|
||||
individuals to use, which is precisely where it is most unacceptable.
|
||||
Therefore, we have designed this version of the GPL to prohibit the
|
||||
practice for those products. If such problems arise substantially in
|
||||
other domains, we stand ready to extend this provision to those
|
||||
domains in future versions of the GPL, as needed to protect the
|
||||
freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish
|
||||
to avoid the special danger that patents applied to a free program
|
||||
could make it effectively proprietary. To prevent this, the GPL
|
||||
assures that patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
### TERMS AND CONDITIONS
|
||||
|
||||
#### 0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds
|
||||
of works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of
|
||||
an exact copy. The resulting work is called a "modified version" of
|
||||
the earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user
|
||||
through a computer network, with no transfer of a copy, is not
|
||||
conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices" to
|
||||
the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
#### 1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work for
|
||||
making modifications to it. "Object code" means any non-source form of
|
||||
a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users can
|
||||
regenerate automatically from other parts of the Corresponding Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that same
|
||||
work.
|
||||
|
||||
#### 2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not convey,
|
||||
without conditions so long as your license otherwise remains in force.
|
||||
You may convey covered works to others for the sole purpose of having
|
||||
them make modifications exclusively for you, or provide you with
|
||||
facilities for running those works, provided that you comply with the
|
||||
terms of this License in conveying all material for which you do not
|
||||
control copyright. Those thus making or running the covered works for
|
||||
you must do so exclusively on your behalf, under your direction and
|
||||
control, on terms that prohibit them from making any copies of your
|
||||
copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under the
|
||||
conditions stated below. Sublicensing is not allowed; section 10 makes
|
||||
it unnecessary.
|
||||
|
||||
#### 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such
|
||||
circumvention is effected by exercising rights under this License with
|
||||
respect to the covered work, and you disclaim any intention to limit
|
||||
operation or modification of the work as a means of enforcing, against
|
||||
the work's users, your or third parties' legal rights to forbid
|
||||
circumvention of technological measures.
|
||||
|
||||
#### 4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
#### 5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these
|
||||
conditions:
|
||||
|
||||
- a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
- b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under
|
||||
section 7. This requirement modifies the requirement in section 4
|
||||
to "keep intact all notices".
|
||||
- c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
- d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
#### 6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms of
|
||||
sections 4 and 5, provided that you also convey the machine-readable
|
||||
Corresponding Source under the terms of this License, in one of these
|
||||
ways:
|
||||
|
||||
- a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
- b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the Corresponding
|
||||
Source from a network server at no charge.
|
||||
- c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
- d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
- e) Convey the object code using peer-to-peer transmission,
|
||||
provided you inform other peers where the object code and
|
||||
Corresponding Source of the work are being offered to the general
|
||||
public at no charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal,
|
||||
family, or household purposes, or (2) anything designed or sold for
|
||||
incorporation into a dwelling. In determining whether a product is a
|
||||
consumer product, doubtful cases shall be resolved in favor of
|
||||
coverage. For a particular product received by a particular user,
|
||||
"normally used" refers to a typical or common use of that class of
|
||||
product, regardless of the status of the particular user or of the way
|
||||
in which the particular user actually uses, or expects or is expected
|
||||
to use, the product. A product is a consumer product regardless of
|
||||
whether the product has substantial commercial, industrial or
|
||||
non-consumer uses, unless such uses represent the only significant
|
||||
mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to
|
||||
install and execute modified versions of a covered work in that User
|
||||
Product from a modified version of its Corresponding Source. The
|
||||
information must suffice to ensure that the continued functioning of
|
||||
the modified object code is in no case prevented or interfered with
|
||||
solely because modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or
|
||||
updates for a work that has been modified or installed by the
|
||||
recipient, or for the User Product in which it has been modified or
|
||||
installed. Access to a network may be denied when the modification
|
||||
itself materially and adversely affects the operation of the network
|
||||
or violates the rules and protocols for communication across the
|
||||
network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
#### 7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders
|
||||
of that material) supplement the terms of this License with terms:
|
||||
|
||||
- a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
- b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
- c) Prohibiting misrepresentation of the origin of that material,
|
||||
or requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
- d) Limiting the use for publicity purposes of names of licensors
|
||||
or authors of the material; or
|
||||
- e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
- f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions
|
||||
of it) with contractual assumptions of liability to the recipient,
|
||||
for any liability that these contractual assumptions directly
|
||||
impose on those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions; the
|
||||
above requirements apply either way.
|
||||
|
||||
#### 8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your license
|
||||
from a particular copyright holder is reinstated (a) provisionally,
|
||||
unless and until the copyright holder explicitly and finally
|
||||
terminates your license, and (b) permanently, if the copyright holder
|
||||
fails to notify you of the violation by some reasonable means prior to
|
||||
60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
#### 9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or run
|
||||
a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
#### 10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
#### 11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims owned
|
||||
or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within the
|
||||
scope of its coverage, prohibits the exercise of, or is conditioned on
|
||||
the non-exercise of one or more of the rights that are specifically
|
||||
granted under this License. You may not convey a covered work if you
|
||||
are a party to an arrangement with a third party that is in the
|
||||
business of distributing software, under which you make payment to the
|
||||
third party based on the extent of your activity of conveying the
|
||||
work, and under which the third party grants, to any of the parties
|
||||
who would receive the covered work from you, a discriminatory patent
|
||||
license (a) in connection with copies of the covered work conveyed by
|
||||
you (or copies made from those copies), or (b) primarily for and in
|
||||
connection with specific products or compilations that contain the
|
||||
covered work, unless you entered into that arrangement, or that patent
|
||||
license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
#### 12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under
|
||||
this License and any other pertinent obligations, then as a
|
||||
consequence you may not convey it at all. For example, if you agree to
|
||||
terms that obligate you to collect a royalty for further conveying
|
||||
from those to whom you convey the Program, the only way you could
|
||||
satisfy both those terms and this License would be to refrain entirely
|
||||
from conveying the Program.
|
||||
|
||||
#### 13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
#### 14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions
|
||||
of the GNU General Public License from time to time. Such new versions
|
||||
will be similar in spirit to the present version, but may differ in
|
||||
detail to address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Program
|
||||
specifies that a certain numbered version of the GNU General Public
|
||||
License "or any later version" applies to it, you have the option of
|
||||
following the terms and conditions either of that numbered version or
|
||||
of any later version published by the Free Software Foundation. If the
|
||||
Program does not specify a version number of the GNU General Public
|
||||
License, you may choose any version ever published by the Free
|
||||
Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future versions
|
||||
of the GNU General Public License can be used, that proxy's public
|
||||
statement of acceptance of a version permanently authorizes you to
|
||||
choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
#### 15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT
|
||||
WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND
|
||||
PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE
|
||||
DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR
|
||||
CORRECTION.
|
||||
|
||||
#### 16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR
|
||||
CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
|
||||
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES
|
||||
ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT
|
||||
NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR
|
||||
LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM
|
||||
TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER
|
||||
PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
|
||||
|
||||
#### 17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
### How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these
|
||||
terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest to
|
||||
attach them to the start of each source file to most effectively state
|
||||
the exclusion of warranty; and each file should have at least the
|
||||
"copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper
|
||||
mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands \`show w' and \`show c' should show the
|
||||
appropriate parts of the General Public License. Of course, your
|
||||
program's commands might be different; for a GUI interface, you would
|
||||
use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or
|
||||
school, if any, to sign a "copyright disclaimer" for the program, if
|
||||
necessary. For more information on this, and how to apply and follow
|
||||
the GNU GPL, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your
|
||||
program into proprietary programs. If your program is a subroutine
|
||||
library, you may consider it more useful to permit linking proprietary
|
||||
applications with the library. If this is what you want to do, use the
|
||||
GNU Lesser General Public License instead of this License. But first,
|
||||
please read <https://www.gnu.org/licenses/why-not-lgpl.html>.
|
1400
DOCUMENTATION.md
1400
DOCUMENTATION.md
File diff suppressed because it is too large
Load diff
55
Dockerfile
55
Dockerfile
|
@ -1,31 +1,34 @@
|
|||
FROM alpine:latest
|
||||
# This file is intended to be used apart from the containing source code tree.
|
||||
|
||||
# Version of Radicale (e.g. 3.0.0)
|
||||
FROM python:3-alpine AS builder
|
||||
|
||||
# Version of Radicale (e.g. v3)
|
||||
ARG VERSION=master
|
||||
|
||||
# Install dependencies
|
||||
RUN apk add --no-cache \
|
||||
python3 \
|
||||
python3-dev \
|
||||
build-base \
|
||||
libffi-dev \
|
||||
ca-certificates \
|
||||
openssl
|
||||
# Install Radicale
|
||||
RUN wget --quiet https://github.com/Kozea/Radicale/archive/${VERSION}.tar.gz --output-document=radicale.tar.gz && \
|
||||
tar xzf radicale.tar.gz && \
|
||||
pip3 install ./Radicale-${VERSION} && \
|
||||
rm -r radicale.tar.gz Radicale-${VERSION}
|
||||
# Remove build dependencies
|
||||
RUN apk del \
|
||||
python3-dev \
|
||||
build-base \
|
||||
libffi-dev
|
||||
# Persistent storage for data (Mount it somewhere on the host!)
|
||||
# Optional dependencies (e.g. bcrypt or ldap)
|
||||
ARG DEPENDENCIES=bcrypt
|
||||
|
||||
RUN apk add --no-cache --virtual gcc libffi-dev musl-dev \
|
||||
&& python -m venv /app/venv \
|
||||
&& /app/venv/bin/pip install --no-cache-dir "Radicale[${DEPENDENCIES}] @ https://github.com/Kozea/Radicale/archive/${VERSION}.tar.gz"
|
||||
|
||||
|
||||
FROM python:3-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN addgroup -g 1000 radicale \
|
||||
&& adduser radicale --home /var/lib/radicale --system --uid 1000 --disabled-password -G radicale \
|
||||
&& apk add --no-cache ca-certificates openssl
|
||||
|
||||
COPY --chown=radicale:radicale --from=builder /app/venv /app
|
||||
|
||||
# Persistent storage for data
|
||||
VOLUME /var/lib/radicale
|
||||
# Configuration data (Put the "config" file here!)
|
||||
VOLUME /etc/radicale
|
||||
# TCP port of Radicale (Publish it on a host interface!)
|
||||
# TCP port of Radicale
|
||||
EXPOSE 5232
|
||||
# Run Radicale (Configure it here or provide a "config" file!)
|
||||
CMD ["radicale", "--hosts", "0.0.0.0:5232"]
|
||||
# Run Radicale
|
||||
ENTRYPOINT [ "/app/bin/python", "/app/bin/radicale"]
|
||||
CMD ["--hosts", "0.0.0.0:5232,[::]:5232"]
|
||||
|
||||
USER radicale
|
||||
|
|
32
Dockerfile.dev
Normal file
32
Dockerfile.dev
Normal file
|
@ -0,0 +1,32 @@
|
|||
FROM python:3-alpine AS builder
|
||||
|
||||
# Optional dependencies (e.g. bcrypt or ldap)
|
||||
ARG DEPENDENCIES=bcrypt
|
||||
|
||||
COPY . /app
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apk add --no-cache --virtual gcc libffi-dev musl-dev \
|
||||
&& python -m venv /app/venv \
|
||||
&& /app/venv/bin/pip install --no-cache-dir .[${DEPENDENCIES}]
|
||||
|
||||
FROM python:3-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN addgroup -g 1000 radicale \
|
||||
&& adduser radicale --home /var/lib/radicale --system --uid 1000 --disabled-password -G radicale \
|
||||
&& apk add --no-cache ca-certificates openssl
|
||||
|
||||
COPY --chown=radicale:radicale --from=builder /app/venv /app
|
||||
|
||||
# Persistent storage for data
|
||||
VOLUME /var/lib/radicale
|
||||
# TCP port of Radicale
|
||||
EXPOSE 5232
|
||||
# Run Radicale
|
||||
ENTRYPOINT [ "/app/bin/python", "/app/bin/radicale"]
|
||||
CMD ["--hosts", "0.0.0.0:5232"]
|
||||
|
||||
USER radicale
|
|
@ -1,3 +1,3 @@
|
|||
include COPYING DOCUMENTATION.md NEWS.md README.md
|
||||
include CHANGELOG.md COPYING.md DOCUMENTATION.md README.md
|
||||
include config rights
|
||||
include radicale.py radicale.fcgi radicale.wsgi
|
||||
include radicale.wsgi
|
||||
|
|
463
NEWS.md
463
NEWS.md
|
@ -1,463 +0,0 @@
|
|||
# News
|
||||
|
||||
## master
|
||||
|
||||
This release is incompatible with previous releases.
|
||||
See the upgrade checklist below.
|
||||
|
||||
* Parallel write requests
|
||||
* Support PyPy
|
||||
* Protect against XML denial-of-service attacks
|
||||
* Check for duplicated UIDs in calendars/address books
|
||||
* Only add missing UIDs for uploaded whole calendars/address books
|
||||
* Switch from md5 to sha256 for UIDs and tokens
|
||||
* Code cleanup:
|
||||
* All plugin interfaces were simplified and are incompatible with
|
||||
old plugins
|
||||
* Major refactor
|
||||
* Never sanitize paths multiple times (check if they are sanitized)
|
||||
* Config
|
||||
* Multiple configuration files separated by ``:`` (resp. ``;``
|
||||
on Windows)
|
||||
* Optional configuration files by prepending file path with ``?``
|
||||
* Check validity of every configuration file and command line
|
||||
arguments separately
|
||||
* Report the source of invalid configuration parameters in
|
||||
error messages
|
||||
* Code cleanup:
|
||||
* Store configuration as parsed values
|
||||
* Use Schema that describes configuration and allow plugins to apply
|
||||
their own schemas
|
||||
* Mark internal settings with ``_``
|
||||
* Internal server
|
||||
* Bind to IPv4 and IPv6 address, when both are available for hostname
|
||||
* Set default address to ``localhost:5232``
|
||||
* Remove settings for SSL ciphers and protocol versions (enforce safe
|
||||
defaults instead)
|
||||
* Remove settings for file locking because they are of little use
|
||||
* Remove daemonization (should be handled by service managers)
|
||||
* Logging
|
||||
* Replace complex Python logger configuration with simple
|
||||
``logging.level`` setting
|
||||
* Write PID and ``threadName`` instead of cryptic id's in log messages
|
||||
* Use ``wsgi.errors`` for logging (as required by the WSGI spec)
|
||||
* Code cleanup:
|
||||
* Don't pass logger object around (use ``logging.getLogger()``
|
||||
instead)
|
||||
* Auth
|
||||
* Use ``md5`` as default for ``htpasswd_encryption`` setting
|
||||
* Move setting ``realm`` from section ``server`` to ``auth``
|
||||
* Rights
|
||||
* Use permissions ``RW`` for non-leaf collections and ``rw`` for
|
||||
address books/calendars
|
||||
* New permission ``i`` that only allows access with HTTP method GET
|
||||
(CalDAV/CardDAV is susceptible to expensive search requests)
|
||||
* Web
|
||||
* Add upload dialog for calendars/address books from file
|
||||
* Show startup loading message
|
||||
* Show warning if JavaScript is disabled
|
||||
* Pass HTML Validator
|
||||
* Storage
|
||||
* Check for missing UIDs in items
|
||||
* Check for child collections in address books and calendars
|
||||
* Code cleanup:
|
||||
* Split BaseCollection in BaseStorage and BaseCollection
|
||||
|
||||
## Upgrade checklist
|
||||
|
||||
* Config
|
||||
* Some settings were removed
|
||||
* The default of ``auth.htpasswd_encryption`` changed to ``md5``
|
||||
* The settings ``server.realm`` moved to ``auth.realm``
|
||||
* The settings ``logging.debug`` was replaced by ``logging.level``
|
||||
* The format of the ``rights.file`` configuration file changed:
|
||||
* Permission ``r`` replaced by ``Rr``
|
||||
* Permission ``w`` replaced by ``Ww``
|
||||
* New permission ``i`` added as subset of ``r``
|
||||
* Replaced variable ``%(login)s`` by ``{user}``
|
||||
* Removed variable ``%(path)s``
|
||||
* ``{`` must be escaped as ``{{`` and ``}`` as ``}}`` in regexes
|
||||
* File system storage
|
||||
* The storage format is compatible with Radicale 2.x.x
|
||||
* Run ``radiale --verify-storage`` to check for errors
|
||||
* Custom plugins:
|
||||
* ``auth`` and ``web`` plugins require minor adjustments
|
||||
* ``rights`` plugins must be adapted to the new permission model
|
||||
* ``storage`` plugins require major changes
|
||||
|
||||
## 2.1.10 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Update required versions for dependencies
|
||||
* Get ``RADICALE_CONFIG`` from WSGI environ
|
||||
* Improve HTTP status codes
|
||||
* Fix race condition in storage lock creation
|
||||
* Raise default limits for content length and timeout
|
||||
* Log output from hook
|
||||
|
||||
## 2.1.9 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Specify versions for dependencies
|
||||
* Move WSGI initialization into module
|
||||
* Check if ``REPORT`` method is actually supported
|
||||
* Include ``rights`` file in source distribution
|
||||
* Specify ``md5`` and ``bcrypt`` as extras
|
||||
* Improve logging messages
|
||||
* Windows: Fix crash when item path is a directory
|
||||
|
||||
## 2.1.8 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Flush files before fsync'ing
|
||||
|
||||
## 2.1.7 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Don't print warning when cache format changes
|
||||
* Add documentation for ``BaseAuth``
|
||||
* Add ``is_authenticated2(login, user, password)`` to ``BaseAuth``
|
||||
* Fix names of custom properties in PROPFIND requests with
|
||||
``D:propname`` or ``D:allprop``
|
||||
* Return all properties in PROPFIND requests with ``D:propname`` or
|
||||
``D:allprop``
|
||||
* Allow ``D:displayname`` property on all collections
|
||||
* Answer with ``D:unauthenticated`` for ``D:current-user-principal`` property
|
||||
when not logged in
|
||||
* Remove non-existing ``ICAL:calendar-color`` and ``C:calendar-timezone``
|
||||
properties from PROPFIND requests with ``D:propname`` or ``D:allprop``
|
||||
* Add ``D:owner`` property to calendar and address book objects
|
||||
* Remove ``D:getetag`` and ``D:getlastmodified`` properties from regular
|
||||
collections
|
||||
|
||||
|
||||
## 2.1.6 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Fix content-type of VLIST
|
||||
* Specify correct COMPONENT in content-type of VCALENDAR
|
||||
* Cache COMPONENT of calendar objects (improves speed with some clients)
|
||||
* Stricter parsing of filters
|
||||
* Improve support for CardDAV filter
|
||||
* Fix some smaller bugs in CalDAV filter
|
||||
* Add X-WR-CALNAME and X-WR-CALDESC to calendars downloaded via HTTP/WebDAV
|
||||
* Use X-WR-CALNAME and X-WR-CALDESC from calendars published via WebDAV
|
||||
|
||||
## 2.1.5 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Add ``--verify-storage`` command-line argument
|
||||
* Allow comments in the htpasswd file
|
||||
* Don't strip whitespaces from user names and passwords in the htpasswd file
|
||||
* Remove cookies from logging output
|
||||
* Allow uploads of whole collections with many components
|
||||
* Show warning message if server.timeout is used with Python < 3.5.2
|
||||
|
||||
## 2.1.4 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Fix incorrect time range matching and calculation for some edge-cases with
|
||||
rescheduled recurrences
|
||||
* Fix owner property
|
||||
|
||||
## 2.1.3 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Enable timeout for SSL handshakes and move them out of the main thread
|
||||
* Create cache entries during upload of items
|
||||
* Stop built-in server on Windows when Ctrl+C is pressed
|
||||
* Prevent slow down when multiple requests hit a collection during cache warm-up
|
||||
|
||||
## 2.1.2 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Remove workarounds for bugs in VObject < 0.9.5
|
||||
* Error checking of collection tags and associated components
|
||||
* Improve error checking of uploaded collections and components
|
||||
* Don't delete empty collection properties implicitly
|
||||
* Improve logging of VObject serialization
|
||||
|
||||
## 2.1.1 - Wild Radish Again
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Add missing UIDs instead of failing
|
||||
* Improve error checking of calendar and address book objects
|
||||
* Fix upload of whole address books
|
||||
|
||||
## 2.1.0 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Built-in web interface for creating and managing address books and calendars
|
||||
* can be extended with web plugins
|
||||
* Much faster storage backend
|
||||
* Significant reduction in memory usage
|
||||
* Improved logging
|
||||
* Include paths (of invalid items / requests) in log messages
|
||||
* Include configuration values causing problems in log messages
|
||||
* Log warning message for invalid requests by clients
|
||||
* Log error message for invalid files in the storage backend
|
||||
* No stack traces unless debugging is enabled
|
||||
* Time range filter also regards overwritten recurrences
|
||||
* Items that couldn't be filtered because of bugs in VObject are always
|
||||
returned (and a warning message is logged)
|
||||
* Basic error checking of configuration files
|
||||
* File system locking isn't disabled implicitly anymore, instead a new
|
||||
configuration option gets introduced
|
||||
* The permissions of the lock file are not changed anymore
|
||||
* Support for sync-token
|
||||
* Support for client-side SSL certificates
|
||||
* Rights plugins can decide if access to an item is granted explicitly
|
||||
* Respond with 403 instead of 404 for principal collections of non-existing
|
||||
users when ``owner_only`` plugin is used (information leakage)
|
||||
* Authentication plugins can provide the login and password from the
|
||||
environment
|
||||
* new ``remote_user`` plugin, that gets the login from the ``REMOTE_USER``
|
||||
environment variable (for WSGI server)
|
||||
* new ``http_x_remote_user`` plugin, that gets the login from the
|
||||
``X-Remote-User`` HTTP header (for reverse proxies)
|
||||
|
||||
|
||||
## 2.0.0 - Little Big Radish
|
||||
|
||||
This feature is not compatible with the 1.x.x versions. Follow our
|
||||
[migration guide](https://radicale.org/2.1.html#documentation/migration-from-1xx-to-2xx) if you want to switch from 1.x.x to
|
||||
2.0.0.
|
||||
|
||||
* Support Python 3.3+ only, Python 2 is not supported anymore
|
||||
* Keep only one simple filesystem-based storage system
|
||||
* Remove built-in Git support
|
||||
* Remove built-in authentication modules
|
||||
* Keep the WSGI interface, use Python HTTP server by default
|
||||
* Use a real iCal parser, rely on the "vobject" external module
|
||||
* Add a solid calendar discovery
|
||||
* Respect the difference between "files" and "folders", don't rely on slashes
|
||||
* Remove the calendar creation with GET requests
|
||||
* Be stateless
|
||||
* Use a file locker
|
||||
* Add threading
|
||||
* Get atomic writes
|
||||
* Support new filters
|
||||
* Support read-only permissions
|
||||
* Allow External plugins for authentication, rights management, storage and
|
||||
version control
|
||||
|
||||
|
||||
## 1.1.4 - Fifth Law of Nature
|
||||
|
||||
* Use ``shutil.move`` for ``--export-storage``
|
||||
|
||||
|
||||
## 1.1.3 - Fourth Law of Nature
|
||||
|
||||
* Add a ``--export-storage=FOLDER`` command-line argument (by Unrud, see #606)
|
||||
|
||||
|
||||
## 1.1.2 - Third Law of Nature
|
||||
|
||||
* **Security fix**: Add a random timer to avoid timing oracles and simple
|
||||
bruteforce attacks when using the htpasswd authentication method.
|
||||
* Various minor fixes.
|
||||
|
||||
|
||||
## 1.1.1 - Second Law of Nature
|
||||
|
||||
* Fix the owner_write rights rule
|
||||
|
||||
|
||||
## 1.1 - Law of Nature
|
||||
|
||||
One feature in this release is **not backward compatible**:
|
||||
|
||||
* Use the first matching section for rights (inspired from daald)
|
||||
|
||||
Now, the first section matching the path and current user in your custom rights
|
||||
file is used. In the previous versions, the most permissive rights of all the
|
||||
matching sections were applied. This new behaviour gives a simple way to make
|
||||
specific rules at the top of the file independant from the generic ones.
|
||||
|
||||
Many **improvements in this release are related to security**, you should
|
||||
upgrade Radicale as soon as possible:
|
||||
|
||||
* Improve the regex used for well-known URIs (by Unrud)
|
||||
* Prevent regex injection in rights management (by Unrud)
|
||||
* Prevent crafted HTTP request from calling arbitrary functions (by Unrud)
|
||||
* Improve URI sanitation and conversion to filesystem path (by Unrud)
|
||||
* Decouple the daemon from its parent environment (by Unrud)
|
||||
|
||||
Some bugs have been fixed and little enhancements have been added:
|
||||
|
||||
* Assign new items to corret key (by Unrud)
|
||||
* Avoid race condition in PID file creation (by Unrud)
|
||||
* Improve the docker version (by cdpb)
|
||||
* Encode message and commiter for git commits
|
||||
* Test with Python 3.5
|
||||
|
||||
|
||||
## 1.0.1 - Sunflower Again
|
||||
|
||||
* Update the version because of a **stupid** "feature"™ of PyPI
|
||||
|
||||
|
||||
## 1.0 - Sunflower
|
||||
|
||||
* Enhanced performances (by Mathieu Dupuy)
|
||||
* Add MD5-APR1 and BCRYPT for htpasswd-based authentication (by Jan-Philip Gehrcke)
|
||||
* Use PAM service (by Stephen Paul Weber)
|
||||
* Don't discard PROPPATCH on empty collections (by Markus Unterwaditzer)
|
||||
* Write the path of the collection in the git message (by Matthew Monaco)
|
||||
* Tests launched on Travis
|
||||
|
||||
|
||||
## 0.10 - Lovely Endless Grass
|
||||
|
||||
* Support well-known URLs (by Mathieu Dupuy)
|
||||
* Fix collection discovery (by Markus Unterwaditzer)
|
||||
* Reload logger config on SIGHUP (by Élie Bouttier)
|
||||
* Remove props files when deleting a collection (by Vincent Untz)
|
||||
* Support salted SHA1 passwords (by Marc Kleine-Budde)
|
||||
* Don't spam the logs about non-SSL IMAP connections to localhost (by Giel van Schijndel)
|
||||
|
||||
|
||||
## 0.9 - Rivers
|
||||
|
||||
* Custom handlers for auth, storage and rights (by Sergey Fursov)
|
||||
* 1-file-per-event storage (by Jean-Marc Martins)
|
||||
* Git support for filesystem storages (by Jean-Marc Martins)
|
||||
* DB storage working with PostgreSQL, MariaDB and SQLite (by Jean-Marc Martins)
|
||||
* Clean rights manager based on regular expressions (by Sweil)
|
||||
* Support of contacts for Apple's clients
|
||||
* Support colors (by Jochen Sprickerhof)
|
||||
* Decode URLs in XML (by Jean-Marc Martins)
|
||||
* Fix PAM authentication (by Stepan Henek)
|
||||
* Use consistent etags (by 9m66p93w)
|
||||
* Use consistent sorting order (by Daniel Danner)
|
||||
* Return 401 on unauthorized DELETE requests (by Eduard Braun)
|
||||
* Move pid file creation in child process (by Mathieu Dupuy)
|
||||
* Allow requests without base_prefix (by jheidemann)
|
||||
|
||||
|
||||
## 0.8 - Rainbow
|
||||
|
||||
* New authentication and rights management modules (by Matthias Jordan)
|
||||
* Experimental database storage
|
||||
* Command-line option for custom configuration file (by Mark Adams)
|
||||
* Root URL not at the root of a domain (by Clint Adams, Fabrice Bellet, Vincent Untz)
|
||||
* Improved support for iCal, CalDAVSync, CardDAVSync, CalDavZAP and CardDavMATE
|
||||
* Empty PROPFIND requests handled (by Christoph Polcin)
|
||||
* Colon allowed in passwords
|
||||
* Configurable realm message
|
||||
|
||||
|
||||
## 0.7.1 - Waterfalls
|
||||
|
||||
* Many address books fixes
|
||||
* New IMAP ACL (by Daniel Aleksandersen)
|
||||
* PAM ACL fixed (by Daniel Aleksandersen)
|
||||
* Courier ACL fixed (by Benjamin Frank)
|
||||
* Always set display name to collections (by Oskari Timperi)
|
||||
* Various DELETE responses fixed
|
||||
|
||||
|
||||
## 0.7 - Eternal Sunshine
|
||||
|
||||
* Repeating events
|
||||
* Collection deletion
|
||||
* Courier and PAM authentication methods
|
||||
* CardDAV support
|
||||
* Custom LDAP filters supported
|
||||
|
||||
|
||||
## 0.6.4 - Tulips
|
||||
|
||||
* Fix the installation with Python 3.1
|
||||
|
||||
|
||||
## 0.6.3 - Red Roses
|
||||
|
||||
* MOVE requests fixed
|
||||
* Faster REPORT answers
|
||||
* Executable script moved into the package
|
||||
|
||||
|
||||
## 0.6.2 - Seeds
|
||||
|
||||
* iPhone and iPad support fixed
|
||||
* Backslashes replaced by slashes in PROPFIND answers on Windows
|
||||
* PyPI archive set as default download URL
|
||||
|
||||
|
||||
## 0.6.1 - Growing Up
|
||||
|
||||
* Example files included in the tarball
|
||||
* htpasswd support fixed
|
||||
* Redirection loop bug fixed
|
||||
* Testing message on GET requests
|
||||
|
||||
|
||||
## 0.6 - Sapling
|
||||
|
||||
* WSGI support
|
||||
* IPv6 support
|
||||
* Smart, verbose and configurable logs
|
||||
* Apple iCal 4 and iPhone support (by Łukasz Langa)
|
||||
* KDE KOrganizer support
|
||||
* LDAP auth backend (by Corentin Le Bail)
|
||||
* Public and private calendars (by René Neumann)
|
||||
* PID file
|
||||
* MOVE requests management
|
||||
* Journal entries support
|
||||
* Drop Python 2.5 support
|
||||
|
||||
|
||||
## 0.5 - Historical Artifacts
|
||||
|
||||
* Calendar depth
|
||||
* MacOS and Windows support
|
||||
* HEAD requests management
|
||||
* htpasswd user from calendar path
|
||||
|
||||
|
||||
## 0.4 - Hot Days Back
|
||||
|
||||
* Personal calendars
|
||||
* Last-Modified HTTP header
|
||||
* ``no-ssl`` and ``foreground`` options
|
||||
* Default configuration file
|
||||
|
||||
|
||||
## 0.3 - Dancing Flowers
|
||||
|
||||
* Evolution support
|
||||
* Version management
|
||||
|
||||
|
||||
## 0.2 - Snowflakes
|
||||
|
||||
* Sunbird pre-1.0 support
|
||||
* SSL connection
|
||||
* Htpasswd authentication
|
||||
* Daemon mode
|
||||
* User configuration
|
||||
* Twisted dependency removed
|
||||
* Python 3 support
|
||||
* Real URLs for PUT and DELETE
|
||||
* Concurrent modification reported to users
|
||||
* Many bugs fixed (by Roger Wenham)
|
||||
|
||||
|
||||
## 0.1 - Crazy Vegetables
|
||||
|
||||
* First release
|
||||
* Lightning/Sunbird 0.9 compatibility
|
||||
* Easy installer
|
27
README.md
27
README.md
|
@ -1,9 +1,28 @@
|
|||
# Read Me
|
||||
# Radicale
|
||||
|
||||

|
||||
[](https://github.com/Kozea/Radicale/actions/workflows/test.yml)
|
||||
[](https://coveralls.io/github/Kozea/Radicale?branch=master)
|
||||
|
||||
Radicale is a free and open-source CalDAV and CardDAV server.
|
||||
Radicale is a small but powerful CalDAV (calendars, to-do lists) and CardDAV
|
||||
(contacts) server, that:
|
||||
|
||||
* Shares calendars and contact lists through CalDAV, CardDAV and HTTP.
|
||||
* Supports events, todos, journal entries and business cards.
|
||||
* Works out-of-the-box, no complicated setup or configuration required.
|
||||
* Can limit access by authentication.
|
||||
* Can secure connections with TLS.
|
||||
* Works with many CalDAV and CardDAV clients
|
||||
* Stores all data on the file system in a simple folder structure.
|
||||
* Can be extended with plugins.
|
||||
* Is GPLv3-licensed free software.
|
||||
|
||||
For the complete documentation, please visit
|
||||
[Radicale "master" documentation](https://radicale.org/master.html).
|
||||
[Radicale master Documentation](https://radicale.org/master.html).
|
||||
|
||||
Additional hints can be found
|
||||
* [Radicale Wiki](https://github.com/Kozea/Radicale/wiki)
|
||||
* [Radicale Issues](https://github.com/Kozea/Radicale/issues)
|
||||
* [Radicale Discussions](https://github.com/Kozea/Radicale/discussions)
|
||||
|
||||
Before reporting an issue, please check
|
||||
* [Radicale Wiki / Reporting Issues](https://github.com/Kozea/Radicale/wiki/Reporting-Issues)
|
||||
|
|
213
config
213
config
|
@ -14,7 +14,8 @@
|
|||
# CalDAV server hostnames separated by a comma
|
||||
# IPv4 syntax: address:port
|
||||
# IPv6 syntax: [address]:port
|
||||
# For example: 0.0.0.0:9999, [::]:9999
|
||||
# Hostname syntax (using "getaddrinfo" to resolve to IPv4/IPv6 adress(es)): hostname:port
|
||||
# For example: 0.0.0.0:9999, [::]:9999, localhost:9999
|
||||
#hosts = localhost:5232
|
||||
|
||||
# Max parallel connections
|
||||
|
@ -39,6 +40,15 @@
|
|||
# TCP traffic between Radicale and a reverse proxy
|
||||
#certificate_authority =
|
||||
|
||||
# SSL protocol, secure configuration: ALL -SSLv3 -TLSv1 -TLSv1.1
|
||||
#protocol = (default)
|
||||
|
||||
# SSL ciphersuite, secure configuration: DHE:ECDHE:-NULL:-SHA (see also "man openssl-ciphers")
|
||||
#ciphersuite = (default)
|
||||
|
||||
# script name to strip from URI if called by reverse proxy
|
||||
#script_name = (default taken from HTTP_X_SCRIPT_NAME or SCRIPT_NAME)
|
||||
|
||||
|
||||
[encoding]
|
||||
|
||||
|
@ -52,16 +62,94 @@
|
|||
[auth]
|
||||
|
||||
# Authentication method
|
||||
# Value: none | htpasswd | remote_user | http_x_remote_user
|
||||
#type = none
|
||||
# Value: none | htpasswd | remote_user | http_x_remote_user | dovecot | ldap | oauth2 | pam | denyall
|
||||
#type = denyall
|
||||
|
||||
# Cache logins for until expiration time
|
||||
#cache_logins = false
|
||||
|
||||
# Expiration time for caching successful logins in seconds
|
||||
#cache_successful_logins_expiry = 15
|
||||
|
||||
## Expiration time of caching failed logins in seconds
|
||||
#cache_failed_logins_expiry = 90
|
||||
|
||||
# Ignore modifyTimestamp and createTimestamp attributes. Required e.g. for Authentik LDAP server
|
||||
#ldap_ignore_attribute_create_modify_timestamp = false
|
||||
|
||||
# URI to the LDAP server
|
||||
#ldap_uri = ldap://localhost
|
||||
|
||||
# The base DN where the user accounts have to be searched
|
||||
#ldap_base = ##BASE_DN##
|
||||
|
||||
# The reader DN of the LDAP server
|
||||
#ldap_reader_dn = CN=ldapreader,CN=Users,##BASE_DN##
|
||||
|
||||
# Password of the reader DN
|
||||
#ldap_secret = ldapreader-secret
|
||||
|
||||
# Path of the file containing password of the reader DN
|
||||
#ldap_secret_file = /run/secrets/ldap_password
|
||||
|
||||
# the attribute to read the group memberships from in the user's LDAP entry (default: not set)
|
||||
#ldap_groups_attribute = memberOf
|
||||
|
||||
# The filter to find the DN of the user. This filter must contain a python-style placeholder for the login
|
||||
#ldap_filter = (&(objectClass=person)(uid={0}))
|
||||
|
||||
# the attribute holding the value to be used as username after authentication
|
||||
#ldap_user_attribute = cn
|
||||
|
||||
# Use ssl on the ldap connection
|
||||
#ldap_use_ssl = False
|
||||
|
||||
# The certificate verification mode. NONE, OPTIONAL, default is REQUIRED
|
||||
#ldap_ssl_verify_mode = REQUIRED
|
||||
|
||||
# The path to the CA file in pem format which is used to certificate the server certificate
|
||||
#ldap_ssl_ca_file =
|
||||
|
||||
# Connection type for dovecot authentication (AF_UNIX|AF_INET|AF_INET6)
|
||||
# Note: credentials are transmitted in cleartext
|
||||
#dovecot_connection_type = AF_UNIX
|
||||
|
||||
# The path to the Dovecot client authentication socket (eg. /run/dovecot/auth-client on Fedora). Radicale must have read / write access to the socket.
|
||||
#dovecot_socket = /var/run/dovecot/auth-client
|
||||
|
||||
# Host of via network exposed dovecot socket
|
||||
#dovecot_host = localhost
|
||||
|
||||
# Port of via network exposed dovecot socket
|
||||
#dovecot_port = 12345
|
||||
|
||||
# IMAP server hostname
|
||||
# Syntax: address | address:port | [address]:port | imap.server.tld
|
||||
#imap_host = localhost
|
||||
|
||||
# Secure the IMAP connection
|
||||
# Value: tls | starttls | none
|
||||
#imap_security = tls
|
||||
|
||||
# OAuth2 token endpoint URL
|
||||
#oauth2_token_endpoint = <URL>
|
||||
|
||||
# PAM service
|
||||
#pam_serivce = radicale
|
||||
|
||||
# PAM group user should be member of
|
||||
#pam_group_membership =
|
||||
|
||||
# Htpasswd filename
|
||||
#htpasswd_filename = /etc/radicale/users
|
||||
|
||||
# Htpasswd encryption method
|
||||
# Value: plain | bcrypt | md5
|
||||
# bcrypt requires the installation of radicale[bcrypt].
|
||||
#htpasswd_encryption = md5
|
||||
# Value: plain | bcrypt | md5 | sha256 | sha512 | autodetect
|
||||
# bcrypt requires the installation of 'bcrypt' module.
|
||||
#htpasswd_encryption = autodetect
|
||||
|
||||
# Enable caching of htpasswd file based on size and mtime_ns
|
||||
#htpasswd_cache = False
|
||||
|
||||
# Incorrect authentication delay (seconds)
|
||||
#delay = 1
|
||||
|
@ -69,33 +157,99 @@
|
|||
# Message displayed in the client when a password is needed
|
||||
#realm = Radicale - Password Required
|
||||
|
||||
# Convert username to lowercase, must be true for case-insensitive auth providers
|
||||
#lc_username = False
|
||||
|
||||
# Strip domain name from username
|
||||
#strip_domain = False
|
||||
|
||||
|
||||
[rights]
|
||||
|
||||
# Rights backend
|
||||
# Value: none | authenticated | owner_only | owner_write | from_file
|
||||
# Value: authenticated | owner_only | owner_write | from_file
|
||||
#type = owner_only
|
||||
|
||||
# File for rights management from_file
|
||||
#file = /etc/radicale/rights
|
||||
|
||||
# Permit delete of a collection (global)
|
||||
#permit_delete_collection = True
|
||||
|
||||
# Permit overwrite of a collection (global)
|
||||
#permit_overwrite_collection = True
|
||||
|
||||
|
||||
[storage]
|
||||
|
||||
# Storage backend
|
||||
# Value: multifilesystem
|
||||
# Value: multifilesystem | multifilesystem_nolock
|
||||
#type = multifilesystem
|
||||
|
||||
# Folder for storing local collections, created if not present
|
||||
#filesystem_folder = /var/lib/radicale/collections
|
||||
|
||||
# Folder for storing cache of local collections, created if not present
|
||||
# Note: only used in case of use_cache_subfolder_* options are active
|
||||
# Note: can be used on multi-instance setup to cache files on local node (see below)
|
||||
#filesystem_cache_folder = (filesystem_folder)
|
||||
|
||||
# Use subfolder 'collection-cache' for 'item' cache file structure instead of inside collection folder
|
||||
# Note: can be used on multi-instance setup to cache 'item' on local node
|
||||
#use_cache_subfolder_for_item = False
|
||||
|
||||
# Use subfolder 'collection-cache' for 'history' cache file structure instead of inside collection folder
|
||||
# Note: use only on single-instance setup, will break consistency with client in multi-instance setup
|
||||
#use_cache_subfolder_for_history = False
|
||||
|
||||
# Use subfolder 'collection-cache' for 'sync-token' cache file structure instead of inside collection folder
|
||||
# Note: use only on single-instance setup, will break consistency with client in multi-instance setup
|
||||
#use_cache_subfolder_for_synctoken = False
|
||||
|
||||
# Use last modifiction time (nanoseconds) and size (bytes) for 'item' cache instead of SHA256 (improves speed)
|
||||
# Note: check used filesystem mtime precision before enabling
|
||||
# Note: conversion is done on access, bulk conversion can be done offline using storage verification option: radicale --verify-storage
|
||||
#use_mtime_and_size_for_item_cache = False
|
||||
|
||||
# Use configured umask for folder creation (not applicable for OS Windows)
|
||||
# Useful value: 0077 | 0027 | 0007 | 0022
|
||||
#folder_umask = (system default, usual 0022)
|
||||
|
||||
# Delete sync token that are older (seconds)
|
||||
#max_sync_token_age = 2592000
|
||||
|
||||
# Command that is run after changes to storage
|
||||
# Example: ([ -d .git ] || git init) && git add -A && (git diff --cached --quiet || git commit -m "Changes by "%(user)s)
|
||||
# Skip broken item instead of triggering an exception
|
||||
#skip_broken_item = True
|
||||
|
||||
# Command that is run after changes to storage, default is emtpy
|
||||
# Supported placeholders:
|
||||
# %(user)s: logged-in user
|
||||
# %(cwd)s : current working directory
|
||||
# %(path)s: full path of item
|
||||
# Command will be executed with base directory defined in filesystem_folder
|
||||
# For "git" check DOCUMENTATION.md for bootstrap instructions
|
||||
# Example(test): echo \"user=%(user)s path=%(path)s cwd=%(cwd)s\"
|
||||
# Example(git): git add -A && (git diff --cached --quiet || git commit -m "Changes by \"%(user)s\"")
|
||||
#hook =
|
||||
|
||||
# Create predefined user collections
|
||||
#
|
||||
# json format:
|
||||
#
|
||||
# {
|
||||
# "def-addressbook": {
|
||||
# "D:displayname": "Personal Address Book",
|
||||
# "tag": "VADDRESSBOOK"
|
||||
# },
|
||||
# "def-calendar": {
|
||||
# "C:supported-calendar-component-set": "VEVENT,VJOURNAL,VTODO",
|
||||
# "D:displayname": "Personal Calendar",
|
||||
# "tag": "VCALENDAR"
|
||||
# }
|
||||
# }
|
||||
#
|
||||
#predefined_collections =
|
||||
|
||||
|
||||
[web]
|
||||
|
||||
|
@ -108,13 +262,50 @@
|
|||
|
||||
# Threshold for the logger
|
||||
# Value: debug | info | warning | error | critical
|
||||
#level = warning
|
||||
#level = info
|
||||
|
||||
# Don't include passwords in logs
|
||||
#mask_passwords = True
|
||||
|
||||
# Log bad PUT request content
|
||||
#bad_put_request_content = False
|
||||
|
||||
# Log backtrace on level=debug
|
||||
#backtrace_on_debug = False
|
||||
|
||||
# Log request header on level=debug
|
||||
#request_header_on_debug = False
|
||||
|
||||
# Log request content on level=debug
|
||||
#request_content_on_debug = False
|
||||
|
||||
# Log response content on level=debug
|
||||
#response_content_on_debug = False
|
||||
|
||||
# Log rights rule which doesn't match on level=debug
|
||||
#rights_rule_doesnt_match_on_debug = False
|
||||
|
||||
# Log storage cache actions on level=debug
|
||||
#storage_cache_actions_on_debug = False
|
||||
|
||||
[headers]
|
||||
|
||||
# Additional HTTP headers
|
||||
#Access-Control-Allow-Origin = *
|
||||
|
||||
|
||||
[hook]
|
||||
|
||||
# Hook types
|
||||
# Value: none | rabbitmq
|
||||
#type = none
|
||||
#rabbitmq_endpoint =
|
||||
#rabbitmq_topic =
|
||||
#rabbitmq_queue_type = classic
|
||||
|
||||
|
||||
[reporting]
|
||||
|
||||
# When returning a free-busy report, limit the number of returned
|
||||
# occurences per event to prevent DOS attacks.
|
||||
#max_freebusy_occurrence = 10000
|
||||
|
|
318
contrib/apache/radicale.conf
Normal file
318
contrib/apache/radicale.conf
Normal file
|
@ -0,0 +1,318 @@
|
|||
### Define how Apache should serve "radicale"
|
||||
## !!! Do not enable both at the same time !!!
|
||||
|
||||
## Apache acting as reverse proxy and forward requests via ProxyPass to a running "radicale" server
|
||||
# SELinux WARNING: To use this correctly, you will need to set:
|
||||
# setsebool -P httpd_can_network_connect=1
|
||||
# URI prefix: /radicale
|
||||
#Define RADICALE_SERVER_REVERSE_PROXY
|
||||
|
||||
|
||||
## Apache starting WSGI server running with "radicale" application
|
||||
# MAY CONFLICT with other WSG servers on same system -> use then inside a VirtualHost
|
||||
# SELinux WARNING: To use this correctly, you will need to set:
|
||||
# setsebool -P httpd_can_read_write_radicale=1
|
||||
# URI prefix: /radicale
|
||||
#Define RADICALE_SERVER_WSGI
|
||||
|
||||
|
||||
### Extra options
|
||||
## Apache starting a dedicated VHOST with SSL without "/radicale" prefix in URI on port 8443
|
||||
#Define RADICALE_SERVER_VHOST_SSL
|
||||
|
||||
|
||||
### permit public access to "radicale"
|
||||
#Define RADICALE_PERMIT_PUBLIC_ACCESS
|
||||
|
||||
|
||||
### enforce SSL on default host
|
||||
#Define RADICALE_ENFORCE_SSL
|
||||
|
||||
|
||||
### enable authentication by web server (config: [auth] type = http_x_remote_user)
|
||||
#Define RADICALE_SERVER_USER_AUTHENTICATION
|
||||
|
||||
|
||||
### Particular configuration EXAMPLES, adjust/extend/override to your needs
|
||||
|
||||
|
||||
##########################
|
||||
### default host
|
||||
##########################
|
||||
<IfDefine !RADICALE_SERVER_VHOST_SSL>
|
||||
|
||||
## RADICALE_SERVER_REVERSE_PROXY
|
||||
<IfDefine RADICALE_SERVER_REVERSE_PROXY>
|
||||
RewriteEngine On
|
||||
|
||||
RewriteRule ^/radicale$ /radicale/ [R,L]
|
||||
|
||||
RewriteCond %{REQUEST_METHOD} GET
|
||||
RewriteRule ^/radicale/$ /radicale/.web/ [R,L]
|
||||
|
||||
<LocationMatch "^/radicale/\.web.*>
|
||||
# Internal WebUI does not need authentication at all
|
||||
RequestHeader set X-Script-Name /radicale
|
||||
|
||||
RequestHeader set X-Forwarded-Port "%{SERVER_PORT}s"
|
||||
RequestHeader set X-Forwarded-Proto expr=%{REQUEST_SCHEME}
|
||||
|
||||
ProxyPass http://localhost:5232/ retry=0
|
||||
ProxyPassReverse http://localhost:5232/
|
||||
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</LocationMatch>
|
||||
|
||||
<LocationMatch "^/radicale(?!/\.web)">
|
||||
RequestHeader set X-Script-Name /radicale
|
||||
|
||||
RequestHeader set X-Forwarded-Port "%{SERVER_PORT}s"
|
||||
RequestHeader set X-Forwarded-Proto expr=%{REQUEST_SCHEME}
|
||||
|
||||
ProxyPass http://localhost:5232/ retry=0
|
||||
ProxyPassReverse http://localhost:5232/
|
||||
|
||||
<IfDefine !RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## User authentication handled by "radicale"
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</IfDefine>
|
||||
|
||||
<IfDefine RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## You may want to use apache's authentication (config: [auth] type = http_x_remote_user)
|
||||
## e.g. create a new file with a testuser: htpasswd -c -B /etc/httpd/conf/htpasswd-radicale testuser
|
||||
AuthBasicProvider file
|
||||
AuthType Basic
|
||||
AuthName "Enter your credentials"
|
||||
AuthUserFile /etc/httpd/conf/htpasswd-radicale
|
||||
AuthGroupFile /dev/null
|
||||
Require valid-user
|
||||
RequestHeader set X-Remote-User expr=%{REMOTE_USER}
|
||||
</IfDefine>
|
||||
|
||||
<IfDefine RADICALE_ENFORCE_SSL>
|
||||
<IfModule !ssl_module>
|
||||
Error "RADICALE_ENFORCE_SSL selected but ssl module not loaded/enabled"
|
||||
</IfModule>
|
||||
SSLRequireSSL
|
||||
</IfDefine>
|
||||
</LocationMatch>
|
||||
</IfDefine>
|
||||
|
||||
|
||||
## RADICALE_SERVER_WSGI
|
||||
# For more information, visit:
|
||||
# http://radicale.org/user_documentation/#idapache-and-mod-wsgi
|
||||
<IfDefine RADICALE_SERVER_WSGI>
|
||||
<IfModule wsgi_module>
|
||||
|
||||
<Files /usr/share/radicale/radicale.wsgi>
|
||||
SetHandler wsgi-script
|
||||
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</Files>
|
||||
|
||||
WSGIDaemonProcess radicale user=radicale group=radicale threads=1 umask=0027
|
||||
WSGIProcessGroup radicale
|
||||
WSGIApplicationGroup %{GLOBAL}
|
||||
WSGIPassAuthorization On
|
||||
|
||||
WSGIScriptAlias /radicale /usr/share/radicale/radicale.wsgi
|
||||
|
||||
# Internal WebUI does not need authentication at all
|
||||
<LocationMatch "^/radicale/\.web.*>
|
||||
RequestHeader set X-Script-Name /radicale
|
||||
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</LocationMatch>
|
||||
|
||||
<LocationMatch "^/radicale(?!/\.web)">
|
||||
RequestHeader set X-Script-Name /radicale
|
||||
|
||||
<IfDefine !RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## User authentication handled by "radicale"
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</IfDefine>
|
||||
|
||||
<IfDefine RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## You may want to use apache's authentication (config: [auth] type = http_x_remote_user)
|
||||
## e.g. create a new file with a testuser: htpasswd -c -B /etc/httpd/conf/htpasswd-radicale testuser
|
||||
AuthBasicProvider file
|
||||
AuthType Basic
|
||||
AuthName "Enter your credentials"
|
||||
AuthUserFile /etc/httpd/conf/htpasswd-radicale
|
||||
AuthGroupFile /dev/null
|
||||
Require valid-user
|
||||
RequestHeader set X-Remote-User expr=%{REMOTE_USER}
|
||||
</IfDefine>
|
||||
|
||||
<IfDefine RADICALE_ENFORCE_SSL>
|
||||
<IfModule !ssl_module>
|
||||
Error "RADICALE_ENFORCE_SSL selected but ssl module not loaded/enabled"
|
||||
</IfModule>
|
||||
SSLRequireSSL
|
||||
</IfDefine>
|
||||
</LocationMatch>
|
||||
</IfModule>
|
||||
<IfModule !wsgi_module>
|
||||
Error "RADICALE_SERVER_WSGI selected but wsgi module not loaded/enabled"
|
||||
</IfModule>
|
||||
</IfDefine>
|
||||
|
||||
</IfDefine>
|
||||
|
||||
|
||||
##########################
|
||||
### VHOST with SSL
|
||||
##########################
|
||||
<IfDefine RADICALE_SERVER_VHOST_SSL>
|
||||
|
||||
<IfModule ssl_module>
|
||||
Listen 8443 https
|
||||
|
||||
<VirtualHost _default_:8443>
|
||||
## taken from ssl.conf
|
||||
|
||||
#ServerName www.example.com:443
|
||||
ErrorLog logs/ssl_error_log
|
||||
TransferLog logs/ssl_access_log
|
||||
LogLevel warn
|
||||
SSLEngine on
|
||||
SSLProtocol all -SSLv3 -TLSv1 -TLSv1.1
|
||||
SSLProxyProtocol all -SSLv3 -TLSv1 -TLSv1.1
|
||||
SSLHonorCipherOrder on
|
||||
SSLCipherSuite PROFILE=SYSTEM
|
||||
SSLProxyCipherSuite PROFILE=SYSTEM
|
||||
SSLCertificateFile /etc/pki/tls/certs/localhost.crt
|
||||
SSLCertificateKeyFile /etc/pki/tls/private/localhost.key
|
||||
#SSLCertificateChainFile /etc/pki/tls/certs/server-chain.crt
|
||||
#SSLCACertificateFile /etc/pki/tls/certs/ca-bundle.crt
|
||||
#SSLVerifyClient require
|
||||
#SSLVerifyDepth 10
|
||||
#SSLOptions +FakeBasicAuth +ExportCertData +StrictRequire
|
||||
BrowserMatch "MSIE [2-5]" \ nokeepalive ssl-unclean-shutdown \ downgrade-1.0 force-response-1.0
|
||||
CustomLog logs/ssl_request_log "%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b"
|
||||
|
||||
|
||||
## RADICALE_SERVER_REVERSE_PROXY
|
||||
<IfDefine RADICALE_SERVER_REVERSE_PROXY>
|
||||
RewriteEngine On
|
||||
|
||||
RewriteCond %{REQUEST_METHOD} GET
|
||||
RewriteRule ^/$ /.web/ [R,L]
|
||||
|
||||
<LocationMatch "^/\.web.*>
|
||||
RequestHeader set X-Forwarded-Port "%{SERVER_PORT}s"
|
||||
RequestHeader set X-Forwarded-Proto expr=%{REQUEST_SCHEME}
|
||||
|
||||
ProxyPass http://localhost:5232/ retry=0
|
||||
ProxyPassReverse http://localhost:5232/
|
||||
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</LocationMatch>
|
||||
|
||||
<LocationMatch "^(?!/\.web)">
|
||||
RequestHeader set X-Forwarded-Port "%{SERVER_PORT}s"
|
||||
RequestHeader set X-Forwarded-Proto expr=%{REQUEST_SCHEME}
|
||||
|
||||
ProxyPass http://localhost:5232/ retry=0
|
||||
ProxyPassReverse http://localhost:5232/
|
||||
|
||||
<IfDefine !RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## User authentication handled by "radicale"
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</IfDefine>
|
||||
|
||||
<IfDefine RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## You may want to use apache's authentication (config: [auth] type = http_x_remote_user)
|
||||
## e.g. create a new file with a testuser: htpasswd -c -B /etc/httpd/conf/htpasswd-radicale testuser
|
||||
AuthBasicProvider file
|
||||
AuthType Basic
|
||||
AuthName "Enter your credentials"
|
||||
AuthUserFile /etc/httpd/conf/htpasswd-radicale
|
||||
AuthGroupFile /dev/null
|
||||
Require valid-user
|
||||
RequestHeader set X-Remote-User expr=%{REMOTE_USER}
|
||||
</IfDefine>
|
||||
</LocationMatch>
|
||||
</IfDefine>
|
||||
|
||||
|
||||
## RADICALE_SERVER_WSGI
|
||||
# For more information, visit:
|
||||
# http://radicale.org/user_documentation/#idapache-and-mod-wsgi
|
||||
<IfDefine RADICALE_SERVER_WSGI>
|
||||
<IfModule wsgi_module>
|
||||
|
||||
<Files /usr/share/radicale/radicale.wsgi>
|
||||
SetHandler wsgi-script
|
||||
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</Files>
|
||||
|
||||
WSGIDaemonProcess radicale user=radicale group=radicale threads=1 umask=0027
|
||||
WSGIProcessGroup radicale
|
||||
WSGIApplicationGroup %{GLOBAL}
|
||||
WSGIPassAuthorization On
|
||||
|
||||
WSGIScriptAlias / /usr/share/radicale/radicale.wsgi
|
||||
|
||||
<LocationMatch "^/(?!/\.web)">
|
||||
<IfDefine !RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## User authentication handled by "radicale"
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</IfDefine>
|
||||
|
||||
<IfDefine RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## You may want to use apache's authentication (config: [auth] type = http_x_remote_user)
|
||||
## e.g. create a new file with a testuser: htpasswd -c -B /etc/httpd/conf/htpasswd-radicale testuser
|
||||
AuthBasicProvider file
|
||||
AuthType Basic
|
||||
AuthName "Enter your credentials"
|
||||
AuthUserFile /etc/httpd/conf/htpasswd-radicale
|
||||
AuthGroupFile /dev/null
|
||||
Require valid-user
|
||||
RequestHeader set X-Remote-User expr=%{REMOTE_USER}
|
||||
</IfDefine>
|
||||
</LocationMatch>
|
||||
</IfModule>
|
||||
<IfModule !wsgi_module>
|
||||
Error "RADICALE_SERVER_WSGI selected but wsgi module not loaded/enabled"
|
||||
</IfModule>
|
||||
</IfDefine>
|
||||
|
||||
|
||||
</VirtualHost>
|
||||
</IfModule>
|
||||
|
||||
<IfModule !ssl_module>
|
||||
Error "RADICALE_SERVER_VHOST_SSL selected but ssl module not loaded/enabled"
|
||||
</IfModule>
|
||||
|
||||
</IfDefine>
|
193
contrib/logwatch/radicale
Normal file
193
contrib/logwatch/radicale
Normal file
|
@ -0,0 +1,193 @@
|
|||
# This file is related to Radicale - CalDAV and CardDAV server
|
||||
# for logwatch (script)
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# Detail levels
|
||||
# >= 5: Logins
|
||||
# >= 10: ResponseTimes
|
||||
|
||||
$Detail = $ENV{'LOGWATCH_DETAIL_LEVEL'} || 0;
|
||||
|
||||
my %ResponseTimes;
|
||||
my %Responses;
|
||||
my %Requests;
|
||||
my %Logins;
|
||||
my %Loglevel;
|
||||
my %OtherEvents;
|
||||
|
||||
my $sum;
|
||||
my $length;
|
||||
|
||||
sub ResponseTimesMinMaxSum($$) {
|
||||
my $req = $_[0];
|
||||
my $time = $_[1];
|
||||
|
||||
$ResponseTimes{$req}->{'cnt'}++;
|
||||
|
||||
if (! defined $ResponseTimes{$req}->{'min'}) {
|
||||
$ResponseTimes{$req}->{'min'} = $time;
|
||||
} elsif ($ResponseTimes->{$req}->{'min'} > $time) {
|
||||
$ResponseTimes{$req}->{'min'} = $time;
|
||||
}
|
||||
|
||||
if (! defined $ResponseTimes{$req}->{'max'}) {
|
||||
$ResponseTimes{$req}{'max'} = $time;
|
||||
} elsif ($ResponseTimes{$req}->{'max'} < $time) {
|
||||
$ResponseTimes{$req}{'max'} = $time;
|
||||
}
|
||||
|
||||
$ResponseTimes{$req}->{'sum'} += $time;
|
||||
}
|
||||
|
||||
sub Sum($) {
|
||||
my $phash = $_[0];
|
||||
my $sum = 0;
|
||||
foreach my $entry (keys %$phash) {
|
||||
$sum += $phash->{$entry};
|
||||
}
|
||||
return $sum;
|
||||
}
|
||||
|
||||
sub MaxLength($) {
|
||||
my $phash = $_[0];
|
||||
my $length = 0;
|
||||
foreach my $entry (keys %$phash) {
|
||||
$length = length($entry) if (length($entry) > $length);
|
||||
}
|
||||
return $length;
|
||||
}
|
||||
|
||||
while (defined($ThisLine = <STDIN>)) {
|
||||
# count loglevel
|
||||
if ( $ThisLine =~ /\[(DEBUG|INFO|WARNING|ERROR|CRITICAL)\] /o ) {
|
||||
$Loglevel{$1}++
|
||||
}
|
||||
|
||||
# parse log for events
|
||||
if ( $ThisLine =~ /Radicale server ready/o ) {
|
||||
$OtherEvents{"Radicale server started"}++;
|
||||
}
|
||||
elsif ( $ThisLine =~ /Stopping Radicale/o ) {
|
||||
$OtherEvents{"Radicale server stopped"}++;
|
||||
}
|
||||
elsif ( $ThisLine =~ / (\S+) response status/o ) {
|
||||
my $req = $1;
|
||||
if ( $ThisLine =~ / \S+ response status for .* with depth '(\d)' in ([0-9.]+) seconds: (\d+)/o ) {
|
||||
$req .= ":D=" . $1 . ":R=" . $3;
|
||||
ResponseTimesMinMaxSum($req, $2) if ($Detail >= 10);
|
||||
} elsif ( $ThisLine =~ / \S+ response status for .* in ([0-9.]+) seconds: (\d+)/ ) {
|
||||
$req .= ":R=" . $2;
|
||||
ResponseTimesMinMaxSum($req, $1) if ($Detail >= 10);
|
||||
}
|
||||
$Responses{$req}++;
|
||||
}
|
||||
elsif ( $ThisLine =~ / (\S+) request for/o ) {
|
||||
my $req = $1;
|
||||
if ( $ThisLine =~ / \S+ request for .* with depth '(\d)' received/o ) {
|
||||
$req .= ":D=" . $1;
|
||||
}
|
||||
$Requests{$req}++;
|
||||
}
|
||||
elsif ( $ThisLine =~ / (Successful login): '([^']+)'/o ) {
|
||||
$Logins{$2}++ if ($Detail >= 5);
|
||||
$OtherEvents{$1}++;
|
||||
}
|
||||
elsif ( $ThisLine =~ / (Failed login attempt) /o ) {
|
||||
$OtherEvents{$1}++;
|
||||
}
|
||||
elsif ( $ThisLine =~ /\[(DEBUG|INFO)\] /o ) {
|
||||
# skip if DEBUG+INFO
|
||||
}
|
||||
else {
|
||||
# Report any unmatched entries...
|
||||
$ThisLine =~ s/^\[\d+(\/Thread-\d+)?\] //; # remove process/Thread ID
|
||||
chomp($ThisLine);
|
||||
$OtherList{$ThisLine}++;
|
||||
}
|
||||
}
|
||||
|
||||
if ($Started) {
|
||||
print "\nStatistics:\n";
|
||||
print " Radicale started: $Started Time(s)\n";
|
||||
}
|
||||
|
||||
if (keys %Loglevel) {
|
||||
$sum = Sum(\%Loglevel);
|
||||
print "\n**Loglevel counters**\n";
|
||||
printf "%-18s | %7s | %5s |\n", "Loglevel", "cnt", "ratio";
|
||||
print "-" x38 . "\n";
|
||||
foreach my $level (sort keys %Loglevel) {
|
||||
printf "%-18s | %7d | %3d%% |\n", $level, $Loglevel{$level}, int(($Loglevel{$level} * 100) / $sum);
|
||||
}
|
||||
print "-" x38 . "\n";
|
||||
printf "%-18s | %7d | %3d%% |\n", "", $sum, 100;
|
||||
}
|
||||
|
||||
if (keys %Requests) {
|
||||
$sum = Sum(\%Requests);
|
||||
print "\n**Request counters (D=<depth>)**\n";
|
||||
printf "%-18s | %7s | %5s |\n", "Request", "cnt", "ratio";
|
||||
print "-" x38 . "\n";
|
||||
foreach my $req (sort keys %Requests) {
|
||||
printf "%-18s | %7d | %3d%% |\n", $req, $Requests{$req}, int(($Requests{$req} * 100) / $sum);
|
||||
}
|
||||
print "-" x38 . "\n";
|
||||
printf "%-18s | %7d | %3d%% |\n", "", $sum, 100;
|
||||
}
|
||||
|
||||
if (keys %Responses) {
|
||||
$sum = Sum(\%Responses);
|
||||
print "\n**Response result counters ((D=<depth> R=<result>)**\n";
|
||||
printf "%-18s | %7s | %5s |\n", "Response", "cnt", "ratio";
|
||||
print "-" x38 . "\n";
|
||||
foreach my $req (sort keys %Responses) {
|
||||
printf "%-18s | %7d | %3d%% |\n", $req, $Responses{$req}, int(($Responses{$req} * 100) / $sum);
|
||||
}
|
||||
print "-" x38 . "\n";
|
||||
printf "%-18s | %7d | %3d%% |\n", "", $sum, 100;
|
||||
}
|
||||
|
||||
if (keys %Logins) {
|
||||
$sum = Sum(\%Logins);
|
||||
$length = MaxLength(\%Logins);
|
||||
print "\n**Successful login counters**\n";
|
||||
printf "%-" . $length . "s | %7s | %5s |\n", "Login", "cnt", "ratio";
|
||||
print "-" x($length + 20) . "\n";
|
||||
foreach my $login (sort keys %Logins) {
|
||||
printf "%-" . $length . "s | %7d | %3d%% |\n", $login, $Logins{$login}, int(($Logins{$login} * 100) / $sum);
|
||||
}
|
||||
print "-" x($length + 20) . "\n";
|
||||
printf "%-" . $length . "s | %7d | %3d%% |\n", "", $sum, 100;
|
||||
}
|
||||
|
||||
if (keys %ResponseTimes) {
|
||||
print "\n**Response timings (counts, seconds) (D=<depth> R=<result>)**\n";
|
||||
printf "%-18s | %7s | %7s | %7s | %7s |\n", "Response", "cnt", "min", "max", "avg";
|
||||
print "-" x60 . "\n";
|
||||
foreach my $req (sort keys %ResponseTimes) {
|
||||
printf "%-18s | %7d | %7.3f | %7.3f | %7.3f |\n", $req
|
||||
, $ResponseTimes{$req}->{'cnt'}
|
||||
, $ResponseTimes{$req}->{'min'}
|
||||
, $ResponseTimes{$req}->{'max'}
|
||||
, $ResponseTimes{$req}->{'sum'} / $ResponseTimes{$req}->{'cnt'};
|
||||
}
|
||||
print "-" x60 . "\n";
|
||||
}
|
||||
|
||||
if (keys %OtherEvents) {
|
||||
print "\n**Other Events**\n";
|
||||
foreach $ThisOne (sort keys %OtherEvents) {
|
||||
print "$ThisOne: $OtherEvents{$ThisOne} Time(s)\n";
|
||||
}
|
||||
}
|
||||
|
||||
if (keys %OtherList) {
|
||||
print "\n**Unmatched Entries**\n";
|
||||
foreach $ThisOne (sort keys %OtherList) {
|
||||
print "$ThisOne: $OtherList{$ThisOne} Time(s)\n";
|
||||
}
|
||||
}
|
||||
|
||||
exit(0);
|
||||
|
||||
# vim: shiftwidth=3 tabstop=3 syntax=perl et smartindent
|
11
contrib/logwatch/radicale-journald.conf
Normal file
11
contrib/logwatch/radicale-journald.conf
Normal file
|
@ -0,0 +1,11 @@
|
|||
# This file is related to Radicale - CalDAV and CardDAV server
|
||||
# for logwatch (config) - input from journald
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
|
||||
Title = "Radicale"
|
||||
|
||||
LogFile = none
|
||||
|
||||
*JournalCtl = "--output=cat --unit=radicale.service"
|
||||
|
||||
# vi: shiftwidth=3 tabstop=3 et
|
13
contrib/logwatch/radicale-syslog.conf
Normal file
13
contrib/logwatch/radicale-syslog.conf
Normal file
|
@ -0,0 +1,13 @@
|
|||
# This file is related to Radicale - CalDAV and CardDAV server
|
||||
# for logwatch (config) - input from syslog file
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
|
||||
Title = "Radicale"
|
||||
|
||||
LogFile = messages
|
||||
|
||||
*OnlyService = radicale
|
||||
|
||||
*RemoveHeaders
|
||||
|
||||
# vi: shiftwidth=3 tabstop=3 et
|
31
contrib/nginx/radicale.conf
Normal file
31
contrib/nginx/radicale.conf
Normal file
|
@ -0,0 +1,31 @@
|
|||
### Proxy Forward to local running "radicale" server
|
||||
###
|
||||
### Usual configuration file location: /etc/nginx/default.d/
|
||||
|
||||
## "well-known" redirect at least for Apple devices
|
||||
rewrite ^/.well-known/carddav /radicale/ redirect;
|
||||
rewrite ^/.well-known/caldav /radicale/ redirect;
|
||||
|
||||
## Base URI: /radicale/
|
||||
location /radicale/ {
|
||||
proxy_pass http://localhost:5232/;
|
||||
proxy_set_header X-Script-Name /radicale;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Forwarded-Port $server_port;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_pass_header Authorization;
|
||||
}
|
||||
|
||||
## Base URI: /
|
||||
#location / {
|
||||
# proxy_pass http://localhost:5232/;
|
||||
# proxy_set_header X-Script-Name /radicale;
|
||||
# proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# proxy_set_header X-Forwarded-Host $host;
|
||||
# proxy_set_header X-Forwarded-Port $server_port;
|
||||
# proxy_set_header X-Forwarded-Proto $scheme;
|
||||
# proxy_set_header Host $http_host;
|
||||
# proxy_pass_header Authorization;
|
||||
#}
|
128
pyproject.toml
Normal file
128
pyproject.toml
Normal file
|
@ -0,0 +1,128 @@
|
|||
[project]
|
||||
name = "Radicale"
|
||||
# When the version is updated, a new section in the CHANGELOG.md file must be
|
||||
# added too.
|
||||
readme = "README.md"
|
||||
version = "3.5.1.dev"
|
||||
authors = [{name = "Guillaume Ayoub", email = "guillaume.ayoub@kozea.fr"}, {name = "Unrud", email = "unrud@outlook.com"}, {name = "Peter Bieringer", email = "pb@bieringer.de"}]
|
||||
license = {text = "GNU GPL v3"}
|
||||
description = "CalDAV and CardDAV Server"
|
||||
keywords = ["calendar", "addressbook", "CalDAV", "CardDAV"]
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Environment :: Console",
|
||||
"Environment :: Web Environment",
|
||||
"Intended Audience :: End Users/Desktop",
|
||||
"Intended Audience :: Information Technology",
|
||||
"License :: OSI Approved :: GNU General Public License (GPL)",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
"Topic :: Office/Business :: Groupware",
|
||||
]
|
||||
urls = {Homepage = "https://radicale.org/"}
|
||||
requires-python = ">=3.9.0"
|
||||
dependencies = [
|
||||
"defusedxml",
|
||||
"passlib",
|
||||
"vobject>=0.9.6",
|
||||
"pika>=1.1.0",
|
||||
"requests",
|
||||
]
|
||||
|
||||
|
||||
[project.optional-dependencies]
|
||||
test = ["pytest>=7", "waitress", "bcrypt"]
|
||||
bcrypt = ["bcrypt"]
|
||||
ldap = ["ldap3"]
|
||||
|
||||
[project.scripts]
|
||||
radicale = "radicale.__main__:run"
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools>=61.2"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.tox]
|
||||
min_version = "4.0"
|
||||
envlist = ["py", "flake8", "isort", "mypy"]
|
||||
|
||||
[tool.tox.env.py]
|
||||
extras = ["test"]
|
||||
deps = [
|
||||
"pytest",
|
||||
"pytest-cov"
|
||||
]
|
||||
commands = [["pytest", "-r", "s", "--cov", "--cov-report=term", "--cov-report=xml", "."]]
|
||||
|
||||
[tool.tox.env.flake8]
|
||||
deps = ["flake8==7.1.0"]
|
||||
commands = [["flake8", "."]]
|
||||
skip_install = true
|
||||
|
||||
[tool.tox.env.isort]
|
||||
deps = ["isort==5.13.2"]
|
||||
commands = [["isort", "--check", "--diff", "."]]
|
||||
skip_install = true
|
||||
|
||||
[tool.tox.env.mypy]
|
||||
deps = ["mypy==1.11.0"]
|
||||
commands = [["mypy", "--install-types", "--non-interactive", "."]]
|
||||
skip_install = true
|
||||
|
||||
|
||||
[tool.setuptools]
|
||||
platforms = ["Any"]
|
||||
include-package-data = false
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
exclude = ["*.tests"] # *.tests.*; tests.*; tests
|
||||
namespaces = false
|
||||
|
||||
[tool.setuptools.package-data]
|
||||
radicale = [
|
||||
"web/internal_data/css/icon.png",
|
||||
"web/internal_data/css/loading.svg",
|
||||
"web/internal_data/css/logo.svg",
|
||||
"web/internal_data/css/main.css",
|
||||
"web/internal_data/css/icons/delete.svg",
|
||||
"web/internal_data/css/icons/download.svg",
|
||||
"web/internal_data/css/icons/edit.svg",
|
||||
"web/internal_data/css/icons/new.svg",
|
||||
"web/internal_data/css/icons/upload.svg",
|
||||
"web/internal_data/fn.js",
|
||||
"web/internal_data/index.html",
|
||||
"py.typed",
|
||||
]
|
||||
|
||||
[tool.isort]
|
||||
known_standard_library = "_dummy_thread,_thread,abc,aifc,argparse,array,ast,asynchat,asyncio,asyncore,atexit,audioop,base64,bdb,binascii,binhex,bisect,builtins,bz2,cProfile,calendar,cgi,cgitb,chunk,cmath,cmd,code,codecs,codeop,collections,colorsys,compileall,concurrent,configparser,contextlib,contextvars,copy,copyreg,crypt,csv,ctypes,curses,dataclasses,datetime,dbm,decimal,difflib,dis,distutils,doctest,dummy_threading,email,encodings,ensurepip,enum,errno,faulthandler,fcntl,filecmp,fileinput,fnmatch,formatter,fpectl,fractions,ftplib,functools,gc,getopt,getpass,gettext,glob,grp,gzip,hashlib,heapq,hmac,html,http,imaplib,imghdr,imp,importlib,inspect,io,ipaddress,itertools,json,keyword,lib2to3,linecache,locale,logging,lzma,macpath,mailbox,mailcap,marshal,math,mimetypes,mmap,modulefinder,msilib,msvcrt,multiprocessing,netrc,nis,nntplib,ntpath,numbers,operator,optparse,os,ossaudiodev,parser,pathlib,pdb,pickle,pickletools,pipes,pkgutil,platform,plistlib,poplib,posix,posixpath,pprint,profile,pstats,pty,pwd,py_compile,pyclbr,pydoc,queue,quopri,random,re,readline,reprlib,resource,rlcompleter,runpy,sched,secrets,select,selectors,shelve,shlex,shutil,signal,site,smtpd,smtplib,sndhdr,socket,socketserver,spwd,sqlite3,sre,sre_compile,sre_constants,sre_parse,ssl,stat,statistics,string,stringprep,struct,subprocess,sunau,symbol,symtable,sys,sysconfig,syslog,tabnanny,tarfile,telnetlib,tempfile,termios,test,textwrap,threading,time,timeit,tkinter,token,tokenize,trace,traceback,tracemalloc,tty,turtle,turtledemo,types,typing,unicodedata,unittest,urllib,uu,uuid,venv,warnings,wave,weakref,webbrowser,winreg,winsound,wsgiref,xdrlib,xml,xmlrpc,zipapp,zipfile,zipimport,zlib"
|
||||
known_third_party = "defusedxml,passlib,pkg_resources,pytest,vobject"
|
||||
|
||||
[tool.mypy]
|
||||
ignore_missing_imports = true
|
||||
show_error_codes = true
|
||||
exclude = "(^|/)build($|/)"
|
||||
|
||||
[tool.coverage.run]
|
||||
branch = true
|
||||
source = ["radicale"]
|
||||
omit = ["tests/*", "*/tests/*"]
|
||||
|
||||
[tool.coverage.report]
|
||||
# Regexes for lines to exclude from consideration
|
||||
exclude_lines = [
|
||||
# Have to re-enable the standard pragma
|
||||
"pragma: no cover",
|
||||
# Don't complain if tests don't hit defensive assertion code:
|
||||
"raise AssertionError",
|
||||
"raise NotImplementedError",
|
||||
# Don't complain if non-runnable code isn't run:
|
||||
"if __name__ == .__main__.:",
|
||||
]
|
|
@ -1,17 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Radicale FastCGI Example.
|
||||
|
||||
Launch a Radicale FastCGI server according to configuration.
|
||||
|
||||
This script relies on flup but can be easily adapted to use another
|
||||
WSGI-to-FastCGI mapper.
|
||||
|
||||
"""
|
||||
|
||||
from flup.server.fcgi import WSGIServer
|
||||
from radicale import application
|
||||
|
||||
if __name__ == "__main__":
|
||||
WSGIServer(application).run()
|
13
radicale.py
13
radicale.py
|
@ -1,13 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Radicale CalDAV Server.
|
||||
|
||||
Launch the server according to configuration and command-line options.
|
||||
|
||||
"""
|
||||
|
||||
import runpy
|
||||
|
||||
if __name__ == "__main__":
|
||||
runpy.run_module("radicale", run_name="__main__")
|
4
radicale.wsgi
Executable file → Normal file
4
radicale.wsgi
Executable file → Normal file
|
@ -3,4 +3,8 @@ Radicale WSGI file (mod_wsgi and uWSGI compliant).
|
|||
|
||||
"""
|
||||
|
||||
import os
|
||||
from radicale import application
|
||||
|
||||
# set an environment variable
|
||||
os.environ.setdefault('SERVER_GATEWAY_INTERFACE', 'Web')
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -27,46 +28,51 @@ Configuration files can be specified in the environment variable
|
|||
|
||||
import os
|
||||
import threading
|
||||
from typing import Iterable, Optional, cast
|
||||
|
||||
import pkg_resources
|
||||
|
||||
from radicale import config, log
|
||||
from radicale import config, log, types, utils
|
||||
from radicale.app import Application
|
||||
from radicale.log import logger
|
||||
|
||||
VERSION = pkg_resources.get_distribution("radicale").version
|
||||
VERSION: str = utils.package_version("radicale")
|
||||
|
||||
_application = None
|
||||
_application_config_path = None
|
||||
_application_instance: Optional[Application] = None
|
||||
_application_config_path: Optional[str] = None
|
||||
_application_lock = threading.Lock()
|
||||
|
||||
|
||||
def _init_application(config_path, wsgi_errors):
|
||||
global _application, _application_config_path
|
||||
def _get_application_instance(config_path: str, wsgi_errors: types.ErrorStream
|
||||
) -> Application:
|
||||
global _application_instance, _application_config_path
|
||||
with _application_lock:
|
||||
if _application is not None:
|
||||
return
|
||||
log.setup()
|
||||
with log.register_stream(wsgi_errors):
|
||||
_application_config_path = config_path
|
||||
configuration = config.load(config.parse_compound_paths(
|
||||
config.DEFAULT_CONFIG_PATH,
|
||||
config_path))
|
||||
log.set_level(configuration.get("logging", "level"))
|
||||
# Log configuration after logger is configured
|
||||
for source, miss in configuration.sources():
|
||||
logger.info("%s %s", "Skipped missing" if miss else "Loaded",
|
||||
source)
|
||||
_application = Application(configuration)
|
||||
if _application_instance is None:
|
||||
log.setup()
|
||||
with log.register_stream(wsgi_errors):
|
||||
_application_config_path = config_path
|
||||
configuration = config.load(config.parse_compound_paths(
|
||||
config.DEFAULT_CONFIG_PATH,
|
||||
config_path))
|
||||
log.set_level(cast(str, configuration.get("logging", "level")), configuration.get("logging", "backtrace_on_debug"))
|
||||
# Log configuration after logger is configured
|
||||
default_config_active = True
|
||||
for source, miss in configuration.sources():
|
||||
logger.info("%s %s", "Skipped missing/unreadable" if miss
|
||||
else "Loaded", source)
|
||||
if not miss and source != "default config":
|
||||
default_config_active = False
|
||||
if default_config_active:
|
||||
logger.warning("%s", "No config file found/readable - only default config is active")
|
||||
_application_instance = Application(configuration)
|
||||
if _application_config_path != config_path:
|
||||
raise ValueError("RADICALE_CONFIG must not change: %r != %r" %
|
||||
(config_path, _application_config_path))
|
||||
return _application_instance
|
||||
|
||||
|
||||
def application(environ, start_response):
|
||||
def application(environ: types.WSGIEnviron,
|
||||
start_response: types.WSGIStartResponse) -> Iterable[bytes]:
|
||||
"""Entry point for external WSGI servers."""
|
||||
config_path = environ.get("RADICALE_CONFIG",
|
||||
os.environ.get("RADICALE_CONFIG"))
|
||||
if _application is None:
|
||||
_init_application(config_path, environ["wsgi.errors"])
|
||||
if _application_config_path != config_path:
|
||||
raise ValueError("RADICALE_CONFIG must not change: %s != %s" %
|
||||
(repr(config_path), repr(_application_config_path)))
|
||||
return _application(environ, start_response)
|
||||
app = _get_application_instance(config_path, environ["wsgi.errors"])
|
||||
return app(environ, start_response)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2011-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -29,128 +30,182 @@ import os
|
|||
import signal
|
||||
import socket
|
||||
import sys
|
||||
from types import FrameType
|
||||
from typing import List, Optional, cast
|
||||
|
||||
from radicale import VERSION, config, log, server, storage
|
||||
from radicale import VERSION, config, log, server, storage, types
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
def run():
|
||||
def run() -> None:
|
||||
"""Run Radicale as a standalone server."""
|
||||
exit_signal_numbers = [signal.SIGTERM, signal.SIGINT]
|
||||
if sys.platform == "win32":
|
||||
exit_signal_numbers.append(signal.SIGBREAK)
|
||||
else:
|
||||
exit_signal_numbers.append(signal.SIGHUP)
|
||||
exit_signal_numbers.append(signal.SIGQUIT)
|
||||
|
||||
# Raise SystemExit when signal arrives to run cleanup code
|
||||
# (like destructors, try-finish etc.), otherwise the process exits
|
||||
# without running any of them
|
||||
def exit_signal_handler(signal_number: int,
|
||||
stack_frame: Optional[FrameType]) -> None:
|
||||
sys.exit(1)
|
||||
for signal_number in exit_signal_numbers:
|
||||
signal.signal(signal_number, exit_signal_handler)
|
||||
|
||||
log.setup()
|
||||
|
||||
# Get command-line arguments
|
||||
parser = argparse.ArgumentParser(usage="radicale [OPTIONS]")
|
||||
# Configuration options are stored in dest with format "c:SECTION:OPTION"
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="radicale", usage="%(prog)s [OPTIONS]", allow_abbrev=False)
|
||||
|
||||
parser.add_argument("--version", action="version", version=VERSION)
|
||||
parser.add_argument("--verify-storage", action="store_true",
|
||||
help="check the storage for errors and exit")
|
||||
parser.add_argument(
|
||||
"-C", "--config", help="use specific configuration files", nargs="*")
|
||||
parser.add_argument("-D", "--debug", action="store_true",
|
||||
parser.add_argument("-C", "--config",
|
||||
help="use specific configuration files", nargs="*")
|
||||
parser.add_argument("-D", "--debug", action="store_const", const="debug",
|
||||
dest="c:logging:level", default=argparse.SUPPRESS,
|
||||
help="print debug information")
|
||||
|
||||
groups = {}
|
||||
for section, values in config.DEFAULT_CONFIG_SCHEMA.items():
|
||||
for section, section_data in config.DEFAULT_CONFIG_SCHEMA.items():
|
||||
if section.startswith("_"):
|
||||
continue
|
||||
group = parser.add_argument_group(section)
|
||||
groups[group] = []
|
||||
for option, data in values.items():
|
||||
assert ":" not in section # check field separator
|
||||
assert "-" not in section and "_" not in section # not implemented
|
||||
group_description = None
|
||||
if section_data.get("_allow_extra"):
|
||||
group_description = "additional options allowed"
|
||||
if section == "headers":
|
||||
group_description += " (e.g. --headers-Pragma=no-cache)"
|
||||
elif "type" in section_data:
|
||||
group_description = "backend specific options omitted"
|
||||
group = parser.add_argument_group(section, group_description)
|
||||
for option, data in section_data.items():
|
||||
if option.startswith("_"):
|
||||
continue
|
||||
kwargs = data.copy()
|
||||
long_name = "--%s-%s" % (section, option.replace("_", "-"))
|
||||
args = kwargs.pop("aliases", [])
|
||||
args: List[str] = list(kwargs.pop("aliases", ()))
|
||||
args.append(long_name)
|
||||
kwargs["dest"] = "%s_%s" % (section, option)
|
||||
groups[group].append(kwargs["dest"])
|
||||
kwargs["dest"] = "c:%s:%s" % (section, option)
|
||||
kwargs["metavar"] = "VALUE"
|
||||
kwargs["default"] = argparse.SUPPRESS
|
||||
del kwargs["value"]
|
||||
with contextlib.suppress(KeyError):
|
||||
del kwargs["internal"]
|
||||
|
||||
if kwargs["type"] == bool:
|
||||
del kwargs["type"]
|
||||
kwargs["action"] = "store_const"
|
||||
kwargs["const"] = "True"
|
||||
opposite_args = kwargs.pop("opposite", [])
|
||||
opposite_args = list(kwargs.pop("opposite_aliases", ()))
|
||||
opposite_args.append("--no%s" % long_name[1:])
|
||||
group.add_argument(*args, **kwargs)
|
||||
|
||||
kwargs["const"] = "False"
|
||||
group.add_argument(*args, nargs="?", const="True", **kwargs)
|
||||
# Opposite argument
|
||||
kwargs["help"] = "do not %s (opposite of %s)" % (
|
||||
kwargs["help"], long_name)
|
||||
group.add_argument(*opposite_args, **kwargs)
|
||||
group.add_argument(*opposite_args, action="store_const",
|
||||
const="False", **kwargs)
|
||||
else:
|
||||
del kwargs["type"]
|
||||
group.add_argument(*args, **kwargs)
|
||||
|
||||
args = parser.parse_args()
|
||||
args_ns, remaining_args = parser.parse_known_args()
|
||||
unrecognized_args = []
|
||||
while remaining_args:
|
||||
arg = remaining_args.pop(0)
|
||||
for section, data in config.DEFAULT_CONFIG_SCHEMA.items():
|
||||
if "type" not in data and not data.get("_allow_extra"):
|
||||
continue
|
||||
prefix = "--%s-" % section
|
||||
if arg.startswith(prefix):
|
||||
arg = arg[len(prefix):]
|
||||
break
|
||||
else:
|
||||
unrecognized_args.append(arg)
|
||||
continue
|
||||
value = ""
|
||||
if "=" in arg:
|
||||
arg, value = arg.split("=", maxsplit=1)
|
||||
elif remaining_args and not remaining_args[0].startswith("-"):
|
||||
value = remaining_args.pop(0)
|
||||
option = arg
|
||||
if not data.get("_allow_extra"): # preserve dash in HTTP header names
|
||||
option = option.replace("-", "_")
|
||||
vars(args_ns)["c:%s:%s" % (section, option)] = value
|
||||
if unrecognized_args:
|
||||
parser.error("unrecognized arguments: %s" %
|
||||
" ".join(unrecognized_args))
|
||||
|
||||
# Preliminary configure logging
|
||||
if args.debug:
|
||||
args.logging_level = "debug"
|
||||
with contextlib.suppress(ValueError):
|
||||
log.set_level(config.DEFAULT_CONFIG_SCHEMA["logging"]["level"]["type"](
|
||||
args.logging_level))
|
||||
vars(args_ns).get("c:logging:level", "")), True)
|
||||
|
||||
# Update Radicale configuration according to arguments
|
||||
arguments_config = {}
|
||||
for group, actions in groups.items():
|
||||
section = group.title
|
||||
section_config = {}
|
||||
for action in actions:
|
||||
value = getattr(args, action)
|
||||
if value is not None:
|
||||
section_config[action.split('_', 1)[1]] = value
|
||||
if section_config:
|
||||
arguments_config[section] = section_config
|
||||
arguments_config: types.MUTABLE_CONFIG = {}
|
||||
for key, value in vars(args_ns).items():
|
||||
if key.startswith("c:"):
|
||||
_, section, option = key.split(":", maxsplit=2)
|
||||
arguments_config[section] = arguments_config.get(section, {})
|
||||
arguments_config[section][option] = value
|
||||
|
||||
try:
|
||||
configuration = config.load(config.parse_compound_paths(
|
||||
config.DEFAULT_CONFIG_PATH,
|
||||
os.environ.get("RADICALE_CONFIG"),
|
||||
os.pathsep.join(args.config) if args.config else None))
|
||||
os.pathsep.join(args_ns.config) if args_ns.config is not None
|
||||
else None))
|
||||
if arguments_config:
|
||||
configuration.update(arguments_config, "arguments")
|
||||
configuration.update(arguments_config, "command line arguments")
|
||||
except Exception as e:
|
||||
logger.fatal("Invalid configuration: %s", e, exc_info=True)
|
||||
logger.critical("Invalid configuration: %s", e, exc_info=True)
|
||||
sys.exit(1)
|
||||
|
||||
# Configure logging
|
||||
log.set_level(configuration.get("logging", "level"))
|
||||
log.set_level(cast(str, configuration.get("logging", "level")), configuration.get("logging", "backtrace_on_debug"))
|
||||
|
||||
# Log configuration after logger is configured
|
||||
default_config_active = True
|
||||
for source, miss in configuration.sources():
|
||||
logger.info("%s %s", "Skipped missing" if miss else "Loaded", source)
|
||||
logger.info("%s %s", "Skipped missing/unreadable" if miss else "Loaded", source)
|
||||
if not miss and source != "default config":
|
||||
default_config_active = False
|
||||
|
||||
if args.verify_storage:
|
||||
if default_config_active:
|
||||
logger.warning("%s", "No config file found/readable - only default config is active")
|
||||
|
||||
if args_ns.verify_storage:
|
||||
logger.info("Verifying storage")
|
||||
try:
|
||||
storage_ = storage.load(configuration)
|
||||
with storage_.acquire_lock("r"):
|
||||
if not storage_.verify():
|
||||
logger.fatal("Storage verifcation failed")
|
||||
logger.critical("Storage verification failed")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
logger.fatal("An exception occurred during storage verification: "
|
||||
"%s", e, exc_info=True)
|
||||
logger.critical("An exception occurred during storage "
|
||||
"verification: %s", e, exc_info=True)
|
||||
sys.exit(1)
|
||||
return
|
||||
|
||||
# Create a socket pair to notify the server of program shutdown
|
||||
shutdown_socket, shutdown_socket_out = socket.socketpair()
|
||||
|
||||
# SIGTERM and SIGINT (aka KeyboardInterrupt) shutdown the server
|
||||
def shutdown(signal_number, stack_frame):
|
||||
# Shutdown server when signal arrives
|
||||
def shutdown_signal_handler(signal_number: int,
|
||||
stack_frame: Optional[FrameType]) -> None:
|
||||
shutdown_socket.close()
|
||||
signal.signal(signal.SIGTERM, shutdown)
|
||||
signal.signal(signal.SIGINT, shutdown)
|
||||
for signal_number in exit_signal_numbers:
|
||||
signal.signal(signal_number, shutdown_signal_handler)
|
||||
|
||||
try:
|
||||
server.serve(configuration, shutdown_socket_out)
|
||||
except Exception as e:
|
||||
logger.fatal("An exception occurred during server startup: %s", e,
|
||||
exc_info=True)
|
||||
logger.critical("An exception occurred during server startup: %s", e,
|
||||
exc_info=False)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -27,47 +28,52 @@ the built-in server (see ``radicale.server`` module).
|
|||
|
||||
import base64
|
||||
import datetime
|
||||
import io
|
||||
import logging
|
||||
import posixpath
|
||||
import pprint
|
||||
import random
|
||||
import time
|
||||
import zlib
|
||||
from http import client
|
||||
from xml.etree import ElementTree as ET
|
||||
from typing import Iterable, List, Mapping, Tuple, Union
|
||||
|
||||
import defusedxml.ElementTree as DefusedET
|
||||
import pkg_resources
|
||||
|
||||
from radicale import (auth, httputils, log, pathutils, rights, storage, web,
|
||||
xmlutils)
|
||||
from radicale.app.delete import ApplicationDeleteMixin
|
||||
from radicale.app.get import ApplicationGetMixin
|
||||
from radicale.app.head import ApplicationHeadMixin
|
||||
from radicale.app.mkcalendar import ApplicationMkcalendarMixin
|
||||
from radicale.app.mkcol import ApplicationMkcolMixin
|
||||
from radicale.app.move import ApplicationMoveMixin
|
||||
from radicale.app.options import ApplicationOptionsMixin
|
||||
from radicale.app.propfind import ApplicationPropfindMixin
|
||||
from radicale.app.proppatch import ApplicationProppatchMixin
|
||||
from radicale.app.put import ApplicationPutMixin
|
||||
from radicale.app.report import ApplicationReportMixin
|
||||
from radicale import config, httputils, log, pathutils, types
|
||||
from radicale.app.base import ApplicationBase
|
||||
from radicale.app.delete import ApplicationPartDelete
|
||||
from radicale.app.get import ApplicationPartGet
|
||||
from radicale.app.head import ApplicationPartHead
|
||||
from radicale.app.mkcalendar import ApplicationPartMkcalendar
|
||||
from radicale.app.mkcol import ApplicationPartMkcol
|
||||
from radicale.app.move import ApplicationPartMove
|
||||
from radicale.app.options import ApplicationPartOptions
|
||||
from radicale.app.post import ApplicationPartPost
|
||||
from radicale.app.propfind import ApplicationPartPropfind
|
||||
from radicale.app.proppatch import ApplicationPartProppatch
|
||||
from radicale.app.put import ApplicationPartPut
|
||||
from radicale.app.report import ApplicationPartReport
|
||||
from radicale.log import logger
|
||||
|
||||
VERSION = pkg_resources.get_distribution("radicale").version
|
||||
# Combination of types.WSGIStartResponse and WSGI application return value
|
||||
_IntermediateResponse = Tuple[str, List[Tuple[str, str]], Iterable[bytes]]
|
||||
|
||||
|
||||
class Application(
|
||||
ApplicationDeleteMixin, ApplicationGetMixin, ApplicationHeadMixin,
|
||||
ApplicationMkcalendarMixin, ApplicationMkcolMixin,
|
||||
ApplicationMoveMixin, ApplicationOptionsMixin,
|
||||
ApplicationPropfindMixin, ApplicationProppatchMixin,
|
||||
ApplicationPutMixin, ApplicationReportMixin):
|
||||
|
||||
class Application(ApplicationPartDelete, ApplicationPartHead,
|
||||
ApplicationPartGet, ApplicationPartMkcalendar,
|
||||
ApplicationPartMkcol, ApplicationPartMove,
|
||||
ApplicationPartOptions, ApplicationPartPropfind,
|
||||
ApplicationPartProppatch, ApplicationPartPost,
|
||||
ApplicationPartPut, ApplicationPartReport, ApplicationBase):
|
||||
"""WSGI application."""
|
||||
|
||||
def __init__(self, configuration):
|
||||
_mask_passwords: bool
|
||||
_auth_delay: float
|
||||
_internal_server: bool
|
||||
_max_content_length: int
|
||||
_auth_realm: str
|
||||
_script_name: str
|
||||
_extra_headers: Mapping[str, str]
|
||||
_permit_delete_collection: bool
|
||||
_permit_overwrite_collection: bool
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
"""Initialize Application.
|
||||
|
||||
``configuration`` see ``radicale.config`` module.
|
||||
|
@ -75,85 +81,90 @@ class Application(
|
|||
this object, it is kept as an internal reference.
|
||||
|
||||
"""
|
||||
super().__init__()
|
||||
self.configuration = configuration
|
||||
self._auth = auth.load(configuration)
|
||||
self._storage = storage.load(configuration)
|
||||
self._rights = rights.load(configuration)
|
||||
self._web = web.load(configuration)
|
||||
self._encoding = configuration.get("encoding", "request")
|
||||
super().__init__(configuration)
|
||||
self._mask_passwords = configuration.get("logging", "mask_passwords")
|
||||
self._bad_put_request_content = configuration.get("logging", "bad_put_request_content")
|
||||
self._request_header_on_debug = configuration.get("logging", "request_header_on_debug")
|
||||
self._response_content_on_debug = configuration.get("logging", "response_content_on_debug")
|
||||
self._auth_delay = configuration.get("auth", "delay")
|
||||
self._internal_server = configuration.get("server", "_internal_server")
|
||||
self._script_name = configuration.get("server", "script_name")
|
||||
if self._script_name:
|
||||
if self._script_name[0] != "/":
|
||||
logger.error("server.script_name must start with '/': %r", self._script_name)
|
||||
raise RuntimeError("server.script_name option has to start with '/'")
|
||||
else:
|
||||
if self._script_name.endswith("/"):
|
||||
logger.error("server.script_name must not end with '/': %r", self._script_name)
|
||||
raise RuntimeError("server.script_name option must not end with '/'")
|
||||
else:
|
||||
logger.info("Provided script name to strip from URI if called by reverse proxy: %r", self._script_name)
|
||||
else:
|
||||
logger.info("Default script name to strip from URI if called by reverse proxy is taken from HTTP_X_SCRIPT_NAME or SCRIPT_NAME")
|
||||
self._max_content_length = configuration.get(
|
||||
"server", "max_content_length")
|
||||
self._auth_realm = configuration.get("auth", "realm")
|
||||
self._permit_delete_collection = configuration.get("rights", "permit_delete_collection")
|
||||
logger.info("permit delete of collection: %s", self._permit_delete_collection)
|
||||
self._permit_overwrite_collection = configuration.get("rights", "permit_overwrite_collection")
|
||||
logger.info("permit overwrite of collection: %s", self._permit_overwrite_collection)
|
||||
self._extra_headers = dict()
|
||||
for key in self.configuration.options("headers"):
|
||||
self._extra_headers[key] = configuration.get("headers", key)
|
||||
|
||||
def _headers_log(self, environ):
|
||||
"""Sanitize headers for logging."""
|
||||
request_environ = dict(environ)
|
||||
def _scrub_headers(self, environ: types.WSGIEnviron) -> types.WSGIEnviron:
|
||||
"""Mask passwords and cookies."""
|
||||
headers = dict(environ)
|
||||
if (self._mask_passwords and
|
||||
headers.get("HTTP_AUTHORIZATION", "").startswith("Basic")):
|
||||
headers["HTTP_AUTHORIZATION"] = "Basic **masked**"
|
||||
if headers.get("HTTP_COOKIE"):
|
||||
headers["HTTP_COOKIE"] = "**masked**"
|
||||
return headers
|
||||
|
||||
# Mask passwords
|
||||
mask_passwords = self.configuration.get("logging", "mask_passwords")
|
||||
authorization = request_environ.get("HTTP_AUTHORIZATION", "")
|
||||
if mask_passwords and authorization.startswith("Basic"):
|
||||
request_environ["HTTP_AUTHORIZATION"] = "Basic **masked**"
|
||||
if request_environ.get("HTTP_COOKIE"):
|
||||
request_environ["HTTP_COOKIE"] = "**masked**"
|
||||
|
||||
return request_environ
|
||||
|
||||
def _decode(self, text, environ):
|
||||
"""Try to magically decode ``text`` according to given ``environ``."""
|
||||
# List of charsets to try
|
||||
charsets = []
|
||||
|
||||
# First append content charset given in the request
|
||||
content_type = environ.get("CONTENT_TYPE")
|
||||
if content_type and "charset=" in content_type:
|
||||
charsets.append(
|
||||
content_type.split("charset=")[1].split(";")[0].strip())
|
||||
# Then append default Radicale charset
|
||||
charsets.append(self._encoding)
|
||||
# Then append various fallbacks
|
||||
charsets.append("utf-8")
|
||||
charsets.append("iso8859-1")
|
||||
|
||||
# Try to decode
|
||||
for charset in charsets:
|
||||
try:
|
||||
return text.decode(charset)
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
raise UnicodeDecodeError
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
def __call__(self, environ: types.WSGIEnviron, start_response:
|
||||
types.WSGIStartResponse) -> Iterable[bytes]:
|
||||
with log.register_stream(environ["wsgi.errors"]):
|
||||
try:
|
||||
status, headers, answers = self._handle_request(environ)
|
||||
status_text, headers, answers = self._handle_request(environ)
|
||||
except Exception as e:
|
||||
try:
|
||||
method = str(environ["REQUEST_METHOD"])
|
||||
except Exception:
|
||||
method = "unknown"
|
||||
try:
|
||||
path = str(environ.get("PATH_INFO", ""))
|
||||
except Exception:
|
||||
path = ""
|
||||
logger.error("An exception occurred during %s request on %r: "
|
||||
"%s", method, path, e, exc_info=True)
|
||||
status, headers, answer = httputils.INTERNAL_SERVER_ERROR
|
||||
answer = answer.encode("ascii")
|
||||
status = "%d %s" % (
|
||||
status.value, client.responses.get(status, "Unknown"))
|
||||
headers = [
|
||||
("Content-Length", str(len(answer)))] + list(headers)
|
||||
"%s", environ.get("REQUEST_METHOD", "unknown"),
|
||||
environ.get("PATH_INFO", ""), e, exc_info=True)
|
||||
# Make minimal response
|
||||
status, raw_headers, raw_answer = (
|
||||
httputils.INTERNAL_SERVER_ERROR)
|
||||
assert isinstance(raw_answer, str)
|
||||
answer = raw_answer.encode("ascii")
|
||||
status_text = "%d %s" % (
|
||||
status, client.responses.get(status, "Unknown"))
|
||||
headers = [*raw_headers, ("Content-Length", str(len(answer)))]
|
||||
answers = [answer]
|
||||
start_response(status, headers)
|
||||
start_response(status_text, headers)
|
||||
if environ.get("REQUEST_METHOD") == "HEAD":
|
||||
return []
|
||||
return answers
|
||||
|
||||
def _handle_request(self, environ):
|
||||
def _handle_request(self, environ: types.WSGIEnviron
|
||||
) -> _IntermediateResponse:
|
||||
time_begin = datetime.datetime.now()
|
||||
request_method = environ["REQUEST_METHOD"].upper()
|
||||
unsafe_path = environ.get("PATH_INFO", "")
|
||||
https = environ.get("HTTPS", "")
|
||||
|
||||
"""Manage a request."""
|
||||
def response(status, headers=(), answer=None):
|
||||
def response(status: int, headers: types.WSGIResponseHeaders,
|
||||
answer: Union[None, str, bytes]) -> _IntermediateResponse:
|
||||
"""Helper to create response from internal types.WSGIResponse"""
|
||||
headers = dict(headers)
|
||||
# Set content length
|
||||
if answer:
|
||||
if hasattr(answer, "encode"):
|
||||
logger.debug("Response content:\n%s", answer)
|
||||
answers = []
|
||||
if answer is not None:
|
||||
if isinstance(answer, str):
|
||||
if self._response_content_on_debug:
|
||||
logger.debug("Response content:\n%s", answer)
|
||||
else:
|
||||
logger.debug("Response content: suppressed by config/option [logging] response_content_on_debug")
|
||||
headers["Content-Type"] += "; charset=%s" % self._encoding
|
||||
answer = answer.encode(self._encoding)
|
||||
accept_encoding = [
|
||||
|
@ -167,67 +178,101 @@ class Application(
|
|||
headers["Content-Encoding"] = "gzip"
|
||||
|
||||
headers["Content-Length"] = str(len(answer))
|
||||
answers.append(answer)
|
||||
|
||||
# Add extra headers set in configuration
|
||||
for key in self.configuration.options("headers"):
|
||||
headers[key] = self.configuration.get("headers", key)
|
||||
headers.update(self._extra_headers)
|
||||
|
||||
# Start response
|
||||
time_end = datetime.datetime.now()
|
||||
status = "%d %s" % (
|
||||
status_text = "%d %s" % (
|
||||
status, client.responses.get(status, "Unknown"))
|
||||
logger.info(
|
||||
"%s response status for %r%s in %.3f seconds: %s",
|
||||
environ["REQUEST_METHOD"], environ.get("PATH_INFO", ""),
|
||||
depthinfo, (time_end - time_begin).total_seconds(), status)
|
||||
logger.info("%s response status for %r%s in %.3f seconds: %s",
|
||||
request_method, unsafe_path, depthinfo,
|
||||
(time_end - time_begin).total_seconds(), status_text)
|
||||
# Return response content
|
||||
return status, list(headers.items()), [answer] if answer else []
|
||||
return status_text, list(headers.items()), answers
|
||||
|
||||
reverse_proxy = False
|
||||
remote_host = "unknown"
|
||||
if environ.get("REMOTE_HOST"):
|
||||
remote_host = repr(environ["REMOTE_HOST"])
|
||||
elif environ.get("REMOTE_ADDR"):
|
||||
remote_host = environ["REMOTE_ADDR"]
|
||||
if environ.get("HTTP_X_FORWARDED_FOR"):
|
||||
remote_host = "%r (forwarded by %s)" % (
|
||||
environ["HTTP_X_FORWARDED_FOR"], remote_host)
|
||||
reverse_proxy = True
|
||||
remote_host = "%s (forwarded for %r)" % (
|
||||
remote_host, environ["HTTP_X_FORWARDED_FOR"])
|
||||
if environ.get("HTTP_X_FORWARDED_HOST") or environ.get("HTTP_X_FORWARDED_PROTO") or environ.get("HTTP_X_FORWARDED_SERVER"):
|
||||
reverse_proxy = True
|
||||
remote_useragent = ""
|
||||
if environ.get("HTTP_USER_AGENT"):
|
||||
remote_useragent = " using %r" % environ["HTTP_USER_AGENT"]
|
||||
depthinfo = ""
|
||||
if environ.get("HTTP_DEPTH"):
|
||||
depthinfo = " with depth %r" % environ["HTTP_DEPTH"]
|
||||
time_begin = datetime.datetime.now()
|
||||
logger.info(
|
||||
"%s request for %r%s received from %s%s",
|
||||
environ["REQUEST_METHOD"], environ.get("PATH_INFO", ""), depthinfo,
|
||||
remote_host, remote_useragent)
|
||||
headers = pprint.pformat(self._headers_log(environ))
|
||||
logger.debug("Request headers:\n%s", headers)
|
||||
|
||||
# Let reverse proxies overwrite SCRIPT_NAME
|
||||
if "HTTP_X_SCRIPT_NAME" in environ:
|
||||
# script_name must be removed from PATH_INFO by the client.
|
||||
unsafe_base_prefix = environ["HTTP_X_SCRIPT_NAME"]
|
||||
logger.debug("Script name overwritten by client: %r",
|
||||
unsafe_base_prefix)
|
||||
if https:
|
||||
https_info = " " + environ.get("SSL_PROTOCOL", "") + " " + environ.get("SSL_CIPHER", "")
|
||||
else:
|
||||
# SCRIPT_NAME is already removed from PATH_INFO, according to the
|
||||
# WSGI specification.
|
||||
unsafe_base_prefix = environ.get("SCRIPT_NAME", "")
|
||||
# Sanitize base prefix
|
||||
base_prefix = pathutils.sanitize_path(unsafe_base_prefix).rstrip("/")
|
||||
logger.debug("Sanitized script name: %r", base_prefix)
|
||||
https_info = ""
|
||||
logger.info("%s request for %r%s received from %s%s%s",
|
||||
request_method, unsafe_path, depthinfo,
|
||||
remote_host, remote_useragent, https_info)
|
||||
if self._request_header_on_debug:
|
||||
logger.debug("Request header:\n%s",
|
||||
pprint.pformat(self._scrub_headers(environ)))
|
||||
else:
|
||||
logger.debug("Request header: suppressed by config/option [logging] request_header_on_debug")
|
||||
|
||||
# SCRIPT_NAME is already removed from PATH_INFO, according to the
|
||||
# WSGI specification.
|
||||
# Reverse proxies can overwrite SCRIPT_NAME with X-SCRIPT-NAME header
|
||||
if self._script_name and (reverse_proxy is True):
|
||||
base_prefix_src = "config"
|
||||
base_prefix = self._script_name
|
||||
else:
|
||||
base_prefix_src = ("HTTP_X_SCRIPT_NAME" if "HTTP_X_SCRIPT_NAME" in
|
||||
environ else "SCRIPT_NAME")
|
||||
base_prefix = environ.get(base_prefix_src, "")
|
||||
if base_prefix and base_prefix[0] != "/":
|
||||
logger.error("Base prefix (from %s) must start with '/': %r",
|
||||
base_prefix_src, base_prefix)
|
||||
if base_prefix_src == "HTTP_X_SCRIPT_NAME":
|
||||
return response(*httputils.BAD_REQUEST)
|
||||
return response(*httputils.INTERNAL_SERVER_ERROR)
|
||||
if base_prefix.endswith("/"):
|
||||
logger.warning("Base prefix (from %s) must not end with '/': %r",
|
||||
base_prefix_src, base_prefix)
|
||||
base_prefix = base_prefix.rstrip("/")
|
||||
if base_prefix:
|
||||
logger.debug("Base prefix (from %s): %r", base_prefix_src, base_prefix)
|
||||
|
||||
# Sanitize request URI (a WSGI server indicates with an empty path,
|
||||
# that the URL targets the application root without a trailing slash)
|
||||
path = pathutils.sanitize_path(environ.get("PATH_INFO", ""))
|
||||
path = pathutils.sanitize_path(unsafe_path)
|
||||
logger.debug("Sanitized path: %r", path)
|
||||
if (reverse_proxy is True) and (len(base_prefix) > 0):
|
||||
if path.startswith(base_prefix):
|
||||
path_new = path.removeprefix(base_prefix)
|
||||
logger.debug("Called by reverse proxy, remove base prefix %r from path: %r => %r", base_prefix, path, path_new)
|
||||
path = path_new
|
||||
else:
|
||||
logger.warning("Called by reverse proxy, cannot removed base prefix %r from path: %r as not matching", base_prefix, path)
|
||||
|
||||
# Get function corresponding to method
|
||||
function = getattr(self, "do_%s" % environ["REQUEST_METHOD"].upper())
|
||||
function = getattr(self, "do_%s" % request_method, None)
|
||||
if not function:
|
||||
return response(*httputils.METHOD_NOT_ALLOWED)
|
||||
|
||||
# If "/.well-known" is not available, clients query "/"
|
||||
if path == "/.well-known" or path.startswith("/.well-known/"):
|
||||
# Redirect all "…/.well-known/{caldav,carddav}" paths to "/".
|
||||
# This shouldn't be necessary but some clients like TbSync require it.
|
||||
# Status must be MOVED PERMANENTLY using FOUND causes problems
|
||||
if (path.rstrip("/").endswith("/.well-known/caldav") or
|
||||
path.rstrip("/").endswith("/.well-known/carddav")):
|
||||
return response(*httputils.redirect(
|
||||
base_prefix + "/", client.MOVED_PERMANENTLY))
|
||||
# Return NOT FOUND for all other paths containing ".well-known"
|
||||
if path.endswith("/.well-known") or "/.well-known/" in path:
|
||||
return response(*httputils.NOT_FOUND)
|
||||
|
||||
# Ask authentication backend to check rights
|
||||
|
@ -239,21 +284,28 @@ class Application(
|
|||
login, password = login or "", password or ""
|
||||
elif authorization.startswith("Basic"):
|
||||
authorization = authorization[len("Basic"):].strip()
|
||||
login, password = self._decode(base64.b64decode(
|
||||
authorization.encode("ascii")), environ).split(":", 1)
|
||||
login, password = httputils.decode_request(
|
||||
self.configuration, environ, base64.b64decode(
|
||||
authorization.encode("ascii"))).split(":", 1)
|
||||
|
||||
user = self._auth.login(login, password) or "" if login else ""
|
||||
(user, info) = self._auth.login(login, password) or ("", "") if login else ("", "")
|
||||
if self.configuration.get("auth", "type") == "ldap":
|
||||
try:
|
||||
logger.debug("Groups %r", ",".join(self._auth._ldap_groups))
|
||||
self._rights._user_groups = self._auth._ldap_groups
|
||||
except AttributeError:
|
||||
pass
|
||||
if user and login == user:
|
||||
logger.info("Successful login: %r", user)
|
||||
logger.info("Successful login: %r (%s)", user, info)
|
||||
elif user:
|
||||
logger.info("Successful login: %r -> %r", login, user)
|
||||
logger.info("Successful login: %r -> %r (%s)", login, user, info)
|
||||
elif login:
|
||||
logger.info("Failed login attempt: %r", login)
|
||||
logger.warning("Failed login attempt from %s: %r (%s)",
|
||||
remote_host, login, info)
|
||||
# Random delay to avoid timing oracles and bruteforce attacks
|
||||
delay = self.configuration.get("auth", "delay")
|
||||
if delay > 0:
|
||||
random_delay = delay * (0.5 + random.random())
|
||||
logger.debug("Sleeping %.3f seconds", random_delay)
|
||||
if self._auth_delay > 0:
|
||||
random_delay = self._auth_delay * (0.5 + random.random())
|
||||
logger.debug("Failed login, sleeping random: %.3f sec", random_delay)
|
||||
time.sleep(random_delay)
|
||||
|
||||
if user and not pathutils.is_safe_path_component(user):
|
||||
|
@ -265,13 +317,20 @@ class Application(
|
|||
if user:
|
||||
principal_path = "/%s/" % user
|
||||
with self._storage.acquire_lock("r", user):
|
||||
principal = next(self._storage.discover(
|
||||
principal_path, depth="1"), None)
|
||||
principal = next(iter(self._storage.discover(
|
||||
principal_path, depth="1")), None)
|
||||
if not principal:
|
||||
if "W" in self._rights.authorization(user, principal_path):
|
||||
with self._storage.acquire_lock("w", user):
|
||||
try:
|
||||
self._storage.create_collection(principal_path)
|
||||
new_coll = self._storage.create_collection(principal_path)
|
||||
if new_coll:
|
||||
jsn_coll = self.configuration.get("storage", "predefined_collections")
|
||||
for (name_coll, props) in jsn_coll.items():
|
||||
try:
|
||||
self._storage.create_collection(principal_path + name_coll, props=props)
|
||||
except ValueError as e:
|
||||
logger.warning("Failed to create predefined collection %r: %s", name_coll, e)
|
||||
except ValueError as e:
|
||||
logger.warning("Failed to create principal "
|
||||
"collection %r: %s", user, e)
|
||||
|
@ -280,13 +339,12 @@ class Application(
|
|||
logger.warning("Access to principal path %r denied by "
|
||||
"rights backend", principal_path)
|
||||
|
||||
if self.configuration.get("server", "_internal_server"):
|
||||
if self._internal_server:
|
||||
# Verify content length
|
||||
content_length = int(environ.get("CONTENT_LENGTH") or 0)
|
||||
if content_length:
|
||||
max_content_length = self.configuration.get(
|
||||
"server", "max_content_length")
|
||||
if max_content_length and content_length > max_content_length:
|
||||
if (self._max_content_length > 0 and
|
||||
content_length > self._max_content_length):
|
||||
logger.info("Request body too large: %d", content_length)
|
||||
return response(*httputils.REQUEST_ENTITY_TOO_LARGE)
|
||||
|
||||
|
@ -304,95 +362,9 @@ class Application(
|
|||
# Unknown or unauthorized user
|
||||
logger.debug("Asking client for authentication")
|
||||
status = client.UNAUTHORIZED
|
||||
realm = self.configuration.get("auth", "realm")
|
||||
headers = dict(headers)
|
||||
headers.update({
|
||||
"WWW-Authenticate":
|
||||
"Basic realm=\"%s\"" % realm})
|
||||
"Basic realm=\"%s\"" % self._auth_realm})
|
||||
|
||||
return response(status, headers, answer)
|
||||
|
||||
def _read_raw_content(self, environ):
|
||||
content_length = int(environ.get("CONTENT_LENGTH") or 0)
|
||||
if not content_length:
|
||||
return b""
|
||||
content = environ["wsgi.input"].read(content_length)
|
||||
if len(content) < content_length:
|
||||
raise RuntimeError("Request body too short: %d" % len(content))
|
||||
return content
|
||||
|
||||
def _read_content(self, environ):
|
||||
content = self._decode(self._read_raw_content(environ), environ)
|
||||
logger.debug("Request content:\n%s", content)
|
||||
return content
|
||||
|
||||
def _read_xml_content(self, environ):
|
||||
content = self._decode(self._read_raw_content(environ), environ)
|
||||
if not content:
|
||||
return None
|
||||
try:
|
||||
xml_content = DefusedET.fromstring(content)
|
||||
except ET.ParseError as e:
|
||||
logger.debug("Request content (Invalid XML):\n%s", content)
|
||||
raise RuntimeError("Failed to parse XML: %s" % e) from e
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
logger.debug("Request content:\n%s",
|
||||
xmlutils.pretty_xml(xml_content))
|
||||
return xml_content
|
||||
|
||||
def _write_xml_content(self, xml_content):
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
logger.debug("Response content:\n%s",
|
||||
xmlutils.pretty_xml(xml_content))
|
||||
f = io.BytesIO()
|
||||
ET.ElementTree(xml_content).write(f, encoding=self._encoding,
|
||||
xml_declaration=True)
|
||||
return f.getvalue()
|
||||
|
||||
def _webdav_error_response(self, human_tag,
|
||||
status=httputils.WEBDAV_PRECONDITION_FAILED[0]):
|
||||
"""Generate XML error response."""
|
||||
headers = {"Content-Type": "text/xml; charset=%s" % self._encoding}
|
||||
content = self._write_xml_content(xmlutils.webdav_error(human_tag))
|
||||
return status, headers, content
|
||||
|
||||
|
||||
class Access:
|
||||
"""Helper class to check access rights of an item"""
|
||||
|
||||
def __init__(self, rights, user, path):
|
||||
self._rights = rights
|
||||
self.user = user
|
||||
self.path = path
|
||||
self.parent_path = pathutils.unstrip_path(
|
||||
posixpath.dirname(pathutils.strip_path(path)), True)
|
||||
self.permissions = self._rights.authorization(self.user, self.path)
|
||||
self._parent_permissions = None
|
||||
|
||||
@property
|
||||
def parent_permissions(self):
|
||||
if self.path == self.parent_path:
|
||||
return self.permissions
|
||||
if self._parent_permissions is None:
|
||||
self._parent_permissions = self._rights.authorization(
|
||||
self.user, self.parent_path)
|
||||
return self._parent_permissions
|
||||
|
||||
def check(self, permission, item=None):
|
||||
if permission not in "rw":
|
||||
raise ValueError("Invalid permission argument: %r" % permission)
|
||||
if not item:
|
||||
permissions = permission + permission.upper()
|
||||
parent_permissions = permission
|
||||
elif isinstance(item, storage.BaseCollection):
|
||||
if item.get_meta("tag"):
|
||||
permissions = permission
|
||||
else:
|
||||
permissions = permission.upper()
|
||||
parent_permissions = ""
|
||||
else:
|
||||
permissions = ""
|
||||
parent_permissions = permission
|
||||
return bool(rights.intersect(self.permissions, permissions) or (
|
||||
self.path != self.parent_path and
|
||||
rights.intersect(self.parent_permissions, parent_permissions)))
|
||||
|
|
145
radicale/app/base.py
Normal file
145
radicale/app/base.py
Normal file
|
@ -0,0 +1,145 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2020 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import io
|
||||
import logging
|
||||
import posixpath
|
||||
import sys
|
||||
import xml.etree.ElementTree as ET
|
||||
from typing import Optional
|
||||
|
||||
from radicale import (auth, config, hook, httputils, pathutils, rights,
|
||||
storage, types, web, xmlutils)
|
||||
from radicale.log import logger
|
||||
|
||||
# HACK: https://github.com/tiran/defusedxml/issues/54
|
||||
import defusedxml.ElementTree as DefusedET # isort:skip
|
||||
sys.modules["xml.etree"].ElementTree = ET # type:ignore[attr-defined]
|
||||
|
||||
|
||||
class ApplicationBase:
|
||||
|
||||
configuration: config.Configuration
|
||||
_auth: auth.BaseAuth
|
||||
_storage: storage.BaseStorage
|
||||
_rights: rights.BaseRights
|
||||
_web: web.BaseWeb
|
||||
_encoding: str
|
||||
_permit_delete_collection: bool
|
||||
_permit_overwrite_collection: bool
|
||||
_hook: hook.BaseHook
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
self.configuration = configuration
|
||||
self._auth = auth.load(configuration)
|
||||
self._storage = storage.load(configuration)
|
||||
self._rights = rights.load(configuration)
|
||||
self._web = web.load(configuration)
|
||||
self._encoding = configuration.get("encoding", "request")
|
||||
self._log_bad_put_request_content = configuration.get("logging", "bad_put_request_content")
|
||||
self._response_content_on_debug = configuration.get("logging", "response_content_on_debug")
|
||||
self._request_content_on_debug = configuration.get("logging", "request_content_on_debug")
|
||||
self._hook = hook.load(configuration)
|
||||
|
||||
def _read_xml_request_body(self, environ: types.WSGIEnviron
|
||||
) -> Optional[ET.Element]:
|
||||
content = httputils.decode_request(
|
||||
self.configuration, environ,
|
||||
httputils.read_raw_request_body(self.configuration, environ))
|
||||
if not content:
|
||||
return None
|
||||
try:
|
||||
xml_content = DefusedET.fromstring(content)
|
||||
except ET.ParseError as e:
|
||||
logger.debug("Request content (Invalid XML):\n%s", content)
|
||||
raise RuntimeError("Failed to parse XML: %s" % e) from e
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
if self._request_content_on_debug:
|
||||
logger.debug("Request content (XML):\n%s",
|
||||
xmlutils.pretty_xml(xml_content))
|
||||
else:
|
||||
logger.debug("Request content (XML): suppressed by config/option [logging] request_content_on_debug")
|
||||
return xml_content
|
||||
|
||||
def _xml_response(self, xml_content: ET.Element) -> bytes:
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
if self._response_content_on_debug:
|
||||
logger.debug("Response content (XML):\n%s",
|
||||
xmlutils.pretty_xml(xml_content))
|
||||
else:
|
||||
logger.debug("Response content (XML): suppressed by config/option [logging] response_content_on_debug")
|
||||
f = io.BytesIO()
|
||||
ET.ElementTree(xml_content).write(f, encoding=self._encoding,
|
||||
xml_declaration=True)
|
||||
return f.getvalue()
|
||||
|
||||
def _webdav_error_response(self, status: int, human_tag: str
|
||||
) -> types.WSGIResponse:
|
||||
"""Generate XML error response."""
|
||||
headers = {"Content-Type": "text/xml; charset=%s" % self._encoding}
|
||||
content = self._xml_response(xmlutils.webdav_error(human_tag))
|
||||
return status, headers, content
|
||||
|
||||
|
||||
class Access:
|
||||
"""Helper class to check access rights of an item"""
|
||||
|
||||
user: str
|
||||
path: str
|
||||
parent_path: str
|
||||
permissions: str
|
||||
_rights: rights.BaseRights
|
||||
_parent_permissions: Optional[str]
|
||||
|
||||
def __init__(self, rights: rights.BaseRights, user: str, path: str
|
||||
) -> None:
|
||||
self._rights = rights
|
||||
self.user = user
|
||||
self.path = path
|
||||
self.parent_path = pathutils.unstrip_path(
|
||||
posixpath.dirname(pathutils.strip_path(path)), True)
|
||||
self.permissions = self._rights.authorization(self.user, self.path)
|
||||
self._parent_permissions = None
|
||||
|
||||
@property
|
||||
def parent_permissions(self) -> str:
|
||||
if self.path == self.parent_path:
|
||||
return self.permissions
|
||||
if self._parent_permissions is None:
|
||||
self._parent_permissions = self._rights.authorization(
|
||||
self.user, self.parent_path)
|
||||
return self._parent_permissions
|
||||
|
||||
def check(self, permission: str,
|
||||
item: Optional[types.CollectionOrItem] = None) -> bool:
|
||||
if permission not in "rwdDoO":
|
||||
raise ValueError("Invalid permission argument: %r" % permission)
|
||||
if not item:
|
||||
permissions = permission + permission.upper()
|
||||
parent_permissions = permission
|
||||
elif isinstance(item, storage.BaseCollection):
|
||||
if item.tag:
|
||||
permissions = permission
|
||||
else:
|
||||
permissions = permission.upper()
|
||||
parent_permissions = ""
|
||||
else:
|
||||
permissions = ""
|
||||
parent_permissions = permission
|
||||
return bool(rights.intersect(self.permissions, permissions) or (
|
||||
self.path != self.parent_path and
|
||||
rights.intersect(self.parent_permissions, parent_permissions)))
|
|
@ -1,8 +1,9 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -17,27 +18,32 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import xml.etree.ElementTree as ET
|
||||
from http import client
|
||||
from xml.etree import ElementTree as ET
|
||||
from typing import Optional
|
||||
|
||||
from radicale import app, httputils, storage, xmlutils
|
||||
from radicale import httputils, storage, types, xmlutils
|
||||
from radicale.app.base import Access, ApplicationBase
|
||||
from radicale.hook import HookNotificationItem, HookNotificationItemTypes
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
def xml_delete(base_prefix, path, collection, href=None):
|
||||
def xml_delete(base_prefix: str, path: str, collection: storage.BaseCollection,
|
||||
item_href: Optional[str] = None) -> ET.Element:
|
||||
"""Read and answer DELETE requests.
|
||||
|
||||
Read rfc4918-9.6 for info.
|
||||
|
||||
"""
|
||||
collection.delete(href)
|
||||
collection.delete(item_href)
|
||||
|
||||
multistatus = ET.Element(xmlutils.make_clark("D:multistatus"))
|
||||
response = ET.Element(xmlutils.make_clark("D:response"))
|
||||
multistatus.append(response)
|
||||
|
||||
href = ET.Element(xmlutils.make_clark("D:href"))
|
||||
href.text = xmlutils.make_href(base_prefix, path)
|
||||
response.append(href)
|
||||
href_element = ET.Element(xmlutils.make_clark("D:href"))
|
||||
href_element.text = xmlutils.make_href(base_prefix, path)
|
||||
response.append(href_element)
|
||||
|
||||
status = ET.Element(xmlutils.make_clark("D:status"))
|
||||
status.text = xmlutils.make_response(200)
|
||||
|
@ -46,14 +52,16 @@ def xml_delete(base_prefix, path, collection, href=None):
|
|||
return multistatus
|
||||
|
||||
|
||||
class ApplicationDeleteMixin:
|
||||
def do_DELETE(self, environ, base_prefix, path, user):
|
||||
class ApplicationPartDelete(ApplicationBase):
|
||||
|
||||
def do_DELETE(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
path: str, user: str) -> types.WSGIResponse:
|
||||
"""Manage DELETE request."""
|
||||
access = app.Access(self._rights, user, path)
|
||||
access = Access(self._rights, user, path)
|
||||
if not access.check("w"):
|
||||
return httputils.NOT_ALLOWED
|
||||
with self._storage.acquire_lock("w", user):
|
||||
item = next(self._storage.discover(path), None)
|
||||
item = next(iter(self._storage.discover(path)), None)
|
||||
if not item:
|
||||
return httputils.NOT_FOUND
|
||||
if not access.check("w", item):
|
||||
|
@ -62,10 +70,38 @@ class ApplicationDeleteMixin:
|
|||
if if_match not in ("*", item.etag):
|
||||
# ETag precondition not verified, do not delete item
|
||||
return httputils.PRECONDITION_FAILED
|
||||
hook_notification_item_list = []
|
||||
if isinstance(item, storage.BaseCollection):
|
||||
if self._permit_delete_collection:
|
||||
if access.check("d", item):
|
||||
logger.info("delete of collection is permitted by config/option [rights] permit_delete_collection but explicit forbidden by permission 'd': %s", path)
|
||||
return httputils.NOT_ALLOWED
|
||||
else:
|
||||
if not access.check("D", item):
|
||||
logger.info("delete of collection is prevented by config/option [rights] permit_delete_collection and not explicit allowed by permission 'D': %s", path)
|
||||
return httputils.NOT_ALLOWED
|
||||
for i in item.get_all():
|
||||
hook_notification_item_list.append(
|
||||
HookNotificationItem(
|
||||
HookNotificationItemTypes.DELETE,
|
||||
access.path,
|
||||
i.uid
|
||||
)
|
||||
)
|
||||
xml_answer = xml_delete(base_prefix, path, item)
|
||||
else:
|
||||
assert item.collection is not None
|
||||
assert item.href is not None
|
||||
hook_notification_item_list.append(
|
||||
HookNotificationItem(
|
||||
HookNotificationItemTypes.DELETE,
|
||||
access.path,
|
||||
item.uid
|
||||
)
|
||||
)
|
||||
xml_answer = xml_delete(
|
||||
base_prefix, path, item.collection, item.href)
|
||||
for notification_item in hook_notification_item_list:
|
||||
self._hook.notify(notification_item)
|
||||
headers = {"Content-Type": "text/xml; charset=%s" % self._encoding}
|
||||
return client.OK, headers, self._write_xml_content(xml_answer)
|
||||
return client.OK, headers, self._xml_response(xml_answer)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
|
@ -21,17 +21,17 @@ import posixpath
|
|||
from http import client
|
||||
from urllib.parse import quote
|
||||
|
||||
from radicale import app, httputils, pathutils, storage, xmlutils
|
||||
from radicale import httputils, pathutils, storage, types, xmlutils
|
||||
from radicale.app.base import Access, ApplicationBase
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
def propose_filename(collection):
|
||||
def propose_filename(collection: storage.BaseCollection) -> str:
|
||||
"""Propose a filename for a collection."""
|
||||
tag = collection.get_meta("tag")
|
||||
if tag == "VADDRESSBOOK":
|
||||
if collection.tag == "VADDRESSBOOK":
|
||||
fallback_title = "Address book"
|
||||
suffix = ".vcf"
|
||||
elif tag == "VCALENDAR":
|
||||
elif collection.tag == "VCALENDAR":
|
||||
fallback_title = "Calendar"
|
||||
suffix = ".ics"
|
||||
else:
|
||||
|
@ -43,9 +43,10 @@ def propose_filename(collection):
|
|||
return title
|
||||
|
||||
|
||||
class ApplicationGetMixin:
|
||||
def _content_disposition_attachement(self, filename):
|
||||
value = "attachement"
|
||||
class ApplicationPartGet(ApplicationBase):
|
||||
|
||||
def _content_disposition_attachment(self, filename: str) -> str:
|
||||
value = "attachment"
|
||||
try:
|
||||
encoded_filename = quote(filename, encoding=self._encoding)
|
||||
except UnicodeEncodeError:
|
||||
|
@ -56,25 +57,29 @@ class ApplicationGetMixin:
|
|||
value += "; filename*=%s''%s" % (self._encoding, encoded_filename)
|
||||
return value
|
||||
|
||||
def do_GET(self, environ, base_prefix, path, user):
|
||||
def do_GET(self, environ: types.WSGIEnviron, base_prefix: str, path: str,
|
||||
user: str) -> types.WSGIResponse:
|
||||
"""Manage GET request."""
|
||||
# Redirect to .web if the root URL is requested
|
||||
# Redirect to /.web if the root path is requested
|
||||
if not pathutils.strip_path(path):
|
||||
web_path = ".web"
|
||||
if not environ.get("PATH_INFO"):
|
||||
web_path = posixpath.join(posixpath.basename(base_prefix),
|
||||
web_path)
|
||||
return (client.FOUND,
|
||||
{"Location": web_path, "Content-Type": "text/plain"},
|
||||
"Redirected to %s" % web_path)
|
||||
# Dispatch .web URL to web module
|
||||
return httputils.redirect(base_prefix + "/.web")
|
||||
if path == "/.web" or path.startswith("/.web/"):
|
||||
# Redirect to sanitized path for all subpaths of /.web
|
||||
unsafe_path = environ.get("PATH_INFO", "")
|
||||
if len(base_prefix) > 0:
|
||||
unsafe_path = unsafe_path.removeprefix(base_prefix)
|
||||
if unsafe_path != path:
|
||||
location = base_prefix + path
|
||||
logger.info("Redirecting to sanitized path: %r ==> %r",
|
||||
base_prefix + unsafe_path, location)
|
||||
return httputils.redirect(location, client.MOVED_PERMANENTLY)
|
||||
# Dispatch /.web path to web module
|
||||
return self._web.get(environ, base_prefix, path, user)
|
||||
access = app.Access(self._rights, user, path)
|
||||
access = Access(self._rights, user, path)
|
||||
if not access.check("r") and "i" not in access.permissions:
|
||||
return httputils.NOT_ALLOWED
|
||||
with self._storage.acquire_lock("r", user):
|
||||
item = next(self._storage.discover(path), None)
|
||||
item = next(iter(self._storage.discover(path)), None)
|
||||
if not item:
|
||||
return httputils.NOT_FOUND
|
||||
if access.check("r", item):
|
||||
|
@ -84,18 +89,18 @@ class ApplicationGetMixin:
|
|||
else:
|
||||
return httputils.NOT_ALLOWED
|
||||
if isinstance(item, storage.BaseCollection):
|
||||
tag = item.get_meta("tag")
|
||||
if not tag:
|
||||
if not item.tag:
|
||||
return (httputils.NOT_ALLOWED if limited_access else
|
||||
httputils.DIRECTORY_LISTING)
|
||||
content_type = xmlutils.MIMETYPES[tag]
|
||||
content_disposition = self._content_disposition_attachement(
|
||||
content_type = xmlutils.MIMETYPES[item.tag]
|
||||
content_disposition = self._content_disposition_attachment(
|
||||
propose_filename(item))
|
||||
elif limited_access:
|
||||
return httputils.NOT_ALLOWED
|
||||
else:
|
||||
content_type = xmlutils.OBJECT_MIMETYPES[item.name]
|
||||
content_disposition = ""
|
||||
assert item.last_modified
|
||||
headers = {
|
||||
"Content-Type": content_type,
|
||||
"Last-Modified": item.last_modified,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
|
@ -17,9 +17,15 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from radicale import types
|
||||
from radicale.app.base import ApplicationBase
|
||||
from radicale.app.get import ApplicationPartGet
|
||||
|
||||
class ApplicationHeadMixin:
|
||||
def do_HEAD(self, environ, base_prefix, path, user):
|
||||
|
||||
class ApplicationPartHead(ApplicationPartGet, ApplicationBase):
|
||||
|
||||
def do_HEAD(self, environ: types.WSGIEnviron, base_prefix: str, path: str,
|
||||
user: str) -> types.WSGIResponse:
|
||||
"""Manage HEAD request."""
|
||||
status, headers, _ = self.do_GET(environ, base_prefix, path, user)
|
||||
return status, headers, None
|
||||
# Body is dropped in `Application.__call__` for HEAD requests
|
||||
return self.do_GET(environ, base_prefix, path, user)
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -17,56 +18,75 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import errno
|
||||
import posixpath
|
||||
import re
|
||||
import socket
|
||||
from http import client
|
||||
|
||||
from radicale import httputils
|
||||
from radicale import item as radicale_item
|
||||
from radicale import pathutils, storage, xmlutils
|
||||
import radicale.item as radicale_item
|
||||
from radicale import httputils, pathutils, storage, types, xmlutils
|
||||
from radicale.app.base import ApplicationBase
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class ApplicationMkcalendarMixin:
|
||||
def do_MKCALENDAR(self, environ, base_prefix, path, user):
|
||||
class ApplicationPartMkcalendar(ApplicationBase):
|
||||
|
||||
def do_MKCALENDAR(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
path: str, user: str) -> types.WSGIResponse:
|
||||
"""Manage MKCALENDAR request."""
|
||||
if "w" not in self._rights.authorization(user, path):
|
||||
return httputils.NOT_ALLOWED
|
||||
try:
|
||||
xml_content = self._read_xml_content(environ)
|
||||
xml_content = self._read_xml_request_body(environ)
|
||||
except RuntimeError as e:
|
||||
logger.warning(
|
||||
"Bad MKCALENDAR request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
except socket.timeout:
|
||||
logger.debug("client timed out", exc_info=True)
|
||||
logger.debug("Client timed out", exc_info=True)
|
||||
return httputils.REQUEST_TIMEOUT
|
||||
# Prepare before locking
|
||||
props = xmlutils.props_from_request(xml_content)
|
||||
props["tag"] = "VCALENDAR"
|
||||
# TODO: use this?
|
||||
# timezone = props.get("C:calendar-timezone")
|
||||
props_with_remove = xmlutils.props_from_request(xml_content)
|
||||
props_with_remove["tag"] = "VCALENDAR"
|
||||
try:
|
||||
radicale_item.check_and_sanitize_props(props)
|
||||
props = radicale_item.check_and_sanitize_props(props_with_remove)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad MKCALENDAR request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
# TODO: use this?
|
||||
# timezone = props.get("C:calendar-timezone")
|
||||
with self._storage.acquire_lock("w", user):
|
||||
item = next(self._storage.discover(path), None)
|
||||
item = next(iter(self._storage.discover(path)), None)
|
||||
if item:
|
||||
return self._webdav_error_response("D:resource-must-be-null")
|
||||
return self._webdav_error_response(
|
||||
client.CONFLICT, "D:resource-must-be-null")
|
||||
parent_path = pathutils.unstrip_path(
|
||||
posixpath.dirname(pathutils.strip_path(path)), True)
|
||||
parent_item = next(self._storage.discover(parent_path), None)
|
||||
parent_item = next(iter(self._storage.discover(parent_path)), None)
|
||||
if not parent_item:
|
||||
return httputils.CONFLICT
|
||||
if (not isinstance(parent_item, storage.BaseCollection) or
|
||||
parent_item.get_meta("tag")):
|
||||
parent_item.tag):
|
||||
return httputils.FORBIDDEN
|
||||
try:
|
||||
self._storage.create_collection(path, props=props)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad MKCALENDAR request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
# return better matching HTTP result in case errno is provided and catched
|
||||
errno_match = re.search("\\[Errno ([0-9]+)\\]", str(e))
|
||||
if errno_match:
|
||||
logger.error(
|
||||
"Failed MKCALENDAR request on %r: %s", path, e, exc_info=True)
|
||||
errno_e = int(errno_match.group(1))
|
||||
if errno_e == errno.ENOSPC:
|
||||
return httputils.INSUFFICIENT_STORAGE
|
||||
elif errno_e in [errno.EPERM, errno.EACCES]:
|
||||
return httputils.FORBIDDEN
|
||||
else:
|
||||
return httputils.INTERNAL_SERVER_ERROR
|
||||
else:
|
||||
logger.warning(
|
||||
"Bad MKCALENDAR request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
return client.CREATED, {}, None
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -17,58 +18,80 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import errno
|
||||
import posixpath
|
||||
import re
|
||||
import socket
|
||||
from http import client
|
||||
|
||||
from radicale import httputils
|
||||
from radicale import item as radicale_item
|
||||
from radicale import pathutils, rights, storage, xmlutils
|
||||
import radicale.item as radicale_item
|
||||
from radicale import httputils, pathutils, rights, storage, types, xmlutils
|
||||
from radicale.app.base import ApplicationBase
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class ApplicationMkcolMixin:
|
||||
def do_MKCOL(self, environ, base_prefix, path, user):
|
||||
class ApplicationPartMkcol(ApplicationBase):
|
||||
|
||||
def do_MKCOL(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
path: str, user: str) -> types.WSGIResponse:
|
||||
"""Manage MKCOL request."""
|
||||
permissions = self._rights.authorization(user, path)
|
||||
if not rights.intersect(permissions, "Ww"):
|
||||
return httputils.NOT_ALLOWED
|
||||
try:
|
||||
xml_content = self._read_xml_content(environ)
|
||||
xml_content = self._read_xml_request_body(environ)
|
||||
except RuntimeError as e:
|
||||
logger.warning(
|
||||
"Bad MKCOL request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
except socket.timeout:
|
||||
logger.debug("client timed out", exc_info=True)
|
||||
logger.debug("Client timed out", exc_info=True)
|
||||
return httputils.REQUEST_TIMEOUT
|
||||
# Prepare before locking
|
||||
props = xmlutils.props_from_request(xml_content)
|
||||
props_with_remove = xmlutils.props_from_request(xml_content)
|
||||
try:
|
||||
radicale_item.check_and_sanitize_props(props)
|
||||
props = radicale_item.check_and_sanitize_props(props_with_remove)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad MKCOL request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
if (props.get("tag") and "w" not in permissions or
|
||||
not props.get("tag") and "W" not in permissions):
|
||||
collection_type = props.get("tag") or "UNKNOWN"
|
||||
if props.get("tag") and "w" not in permissions:
|
||||
logger.warning("MKCOL request %r (type:%s): %s", path, collection_type, "rejected because of missing rights 'w'")
|
||||
return httputils.NOT_ALLOWED
|
||||
if not props.get("tag") and "W" not in permissions:
|
||||
logger.warning("MKCOL request %r (type:%s): %s", path, collection_type, "rejected because of missing rights 'W'")
|
||||
return httputils.NOT_ALLOWED
|
||||
with self._storage.acquire_lock("w", user):
|
||||
item = next(self._storage.discover(path), None)
|
||||
item = next(iter(self._storage.discover(path)), None)
|
||||
if item:
|
||||
return httputils.METHOD_NOT_ALLOWED
|
||||
parent_path = pathutils.unstrip_path(
|
||||
posixpath.dirname(pathutils.strip_path(path)), True)
|
||||
parent_item = next(self._storage.discover(parent_path), None)
|
||||
parent_item = next(iter(self._storage.discover(parent_path)), None)
|
||||
if not parent_item:
|
||||
return httputils.CONFLICT
|
||||
if (not isinstance(parent_item, storage.BaseCollection) or
|
||||
parent_item.get_meta("tag")):
|
||||
parent_item.tag):
|
||||
return httputils.FORBIDDEN
|
||||
try:
|
||||
self._storage.create_collection(path, props=props)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad MKCOL request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
# return better matching HTTP result in case errno is provided and catched
|
||||
errno_match = re.search("\\[Errno ([0-9]+)\\]", str(e))
|
||||
if errno_match:
|
||||
logger.error(
|
||||
"Failed MKCOL request on %r (type:%s): %s", path, collection_type, e, exc_info=True)
|
||||
errno_e = int(errno_match.group(1))
|
||||
if errno_e == errno.ENOSPC:
|
||||
return httputils.INSUFFICIENT_STORAGE
|
||||
elif errno_e in [errno.EPERM, errno.EACCES]:
|
||||
return httputils.FORBIDDEN
|
||||
else:
|
||||
return httputils.INTERNAL_SERVER_ERROR
|
||||
else:
|
||||
logger.warning(
|
||||
"Bad MKCOL request on %r (type:%s): %s", path, collection_type, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
logger.info("MKCOL request %r (type:%s): %s", path, collection_type, "successful")
|
||||
return client.CREATED, {}, None
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2023 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2023-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -17,24 +18,49 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import errno
|
||||
import posixpath
|
||||
import re
|
||||
from http import client
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from radicale import app, httputils, pathutils, storage
|
||||
from radicale import httputils, pathutils, storage, types
|
||||
from radicale.app.base import Access, ApplicationBase
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class ApplicationMoveMixin:
|
||||
def do_MOVE(self, environ, base_prefix, path, user):
|
||||
def get_server_netloc(environ: types.WSGIEnviron, force_port: bool = False):
|
||||
if environ.get("HTTP_X_FORWARDED_HOST"):
|
||||
host = environ["HTTP_X_FORWARDED_HOST"]
|
||||
proto = environ.get("HTTP_X_FORWARDED_PROTO") or "http"
|
||||
port = "443" if proto == "https" else "80"
|
||||
port = environ["HTTP_X_FORWARDED_PORT"] or port
|
||||
else:
|
||||
host = environ.get("HTTP_HOST") or environ["SERVER_NAME"]
|
||||
proto = environ["wsgi.url_scheme"]
|
||||
port = environ["SERVER_PORT"]
|
||||
if (not force_port and port == ("443" if proto == "https" else "80") or
|
||||
re.search(r":\d+$", host)):
|
||||
return host
|
||||
return host + ":" + port
|
||||
|
||||
|
||||
class ApplicationPartMove(ApplicationBase):
|
||||
|
||||
def do_MOVE(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
path: str, user: str) -> types.WSGIResponse:
|
||||
"""Manage MOVE request."""
|
||||
raw_dest = environ.get("HTTP_DESTINATION", "")
|
||||
to_url = urlparse(raw_dest)
|
||||
if to_url.netloc != environ["HTTP_HOST"]:
|
||||
to_netloc_with_port = to_url.netloc
|
||||
if to_url.port is None:
|
||||
to_netloc_with_port += (":443" if to_url.scheme == "https"
|
||||
else ":80")
|
||||
if to_netloc_with_port != get_server_netloc(environ, force_port=True):
|
||||
logger.info("Unsupported destination address: %r", raw_dest)
|
||||
# Remote destination server, not supported
|
||||
return httputils.REMOTE_DESTINATION
|
||||
access = app.Access(self._rights, user, path)
|
||||
access = Access(self._rights, user, path)
|
||||
if not access.check("w"):
|
||||
return httputils.NOT_ALLOWED
|
||||
to_path = pathutils.sanitize_path(to_url.path)
|
||||
|
@ -43,12 +69,12 @@ class ApplicationMoveMixin:
|
|||
"start with base prefix", to_path, path)
|
||||
return httputils.NOT_ALLOWED
|
||||
to_path = to_path[len(base_prefix):]
|
||||
to_access = app.Access(self._rights, user, to_path)
|
||||
to_access = Access(self._rights, user, to_path)
|
||||
if not to_access.check("w"):
|
||||
return httputils.NOT_ALLOWED
|
||||
|
||||
with self._storage.acquire_lock("w", user):
|
||||
item = next(self._storage.discover(path), None)
|
||||
item = next(iter(self._storage.discover(path)), None)
|
||||
if not item:
|
||||
return httputils.NOT_FOUND
|
||||
if (not access.check("w", item) or
|
||||
|
@ -58,17 +84,19 @@ class ApplicationMoveMixin:
|
|||
# TODO: support moving collections
|
||||
return httputils.METHOD_NOT_ALLOWED
|
||||
|
||||
to_item = next(self._storage.discover(to_path), None)
|
||||
to_item = next(iter(self._storage.discover(to_path)), None)
|
||||
if isinstance(to_item, storage.BaseCollection):
|
||||
return httputils.FORBIDDEN
|
||||
to_parent_path = pathutils.unstrip_path(
|
||||
posixpath.dirname(pathutils.strip_path(to_path)), True)
|
||||
to_collection = next(
|
||||
self._storage.discover(to_parent_path), None)
|
||||
to_collection = next(iter(
|
||||
self._storage.discover(to_parent_path)), None)
|
||||
if not to_collection:
|
||||
return httputils.CONFLICT
|
||||
tag = item.collection.get_meta("tag")
|
||||
if not tag or tag != to_collection.get_meta("tag"):
|
||||
assert isinstance(to_collection, storage.BaseCollection)
|
||||
assert item.collection is not None
|
||||
collection_tag = item.collection.tag
|
||||
if not collection_tag or collection_tag != to_collection.tag:
|
||||
return httputils.FORBIDDEN
|
||||
if to_item and environ.get("HTTP_OVERWRITE", "F") != "T":
|
||||
return httputils.PRECONDITION_FAILED
|
||||
|
@ -76,13 +104,27 @@ class ApplicationMoveMixin:
|
|||
not to_item and
|
||||
to_collection.path != item.collection.path and
|
||||
to_collection.has_uid(item.uid)):
|
||||
return self._webdav_error_response("%s:no-uid-conflict" % (
|
||||
"C" if tag == "VCALENDAR" else "CR"))
|
||||
return self._webdav_error_response(
|
||||
client.CONFLICT, "%s:no-uid-conflict" % (
|
||||
"C" if collection_tag == "VCALENDAR" else "CR"))
|
||||
to_href = posixpath.basename(pathutils.strip_path(to_path))
|
||||
try:
|
||||
self._storage.move(item, to_collection, to_href)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad MOVE request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
# return better matching HTTP result in case errno is provided and catched
|
||||
errno_match = re.search("\\[Errno ([0-9]+)\\]", str(e))
|
||||
if errno_match:
|
||||
logger.error(
|
||||
"Failed MOVE request on %r: %s", path, e, exc_info=True)
|
||||
errno_e = int(errno_match.group(1))
|
||||
if errno_e == errno.ENOSPC:
|
||||
return httputils.INSUFFICIENT_STORAGE
|
||||
elif errno_e in [errno.EPERM, errno.EACCES]:
|
||||
return httputils.FORBIDDEN
|
||||
else:
|
||||
return httputils.INTERNAL_SERVER_ERROR
|
||||
else:
|
||||
logger.warning(
|
||||
"Bad MOVE request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
return client.NO_CONTENT if to_item else client.CREATED, {}, None
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
|
@ -19,11 +19,14 @@
|
|||
|
||||
from http import client
|
||||
|
||||
from radicale import httputils
|
||||
from radicale import httputils, types
|
||||
from radicale.app.base import ApplicationBase
|
||||
|
||||
|
||||
class ApplicationOptionsMixin:
|
||||
def do_OPTIONS(self, environ, base_prefix, path, user):
|
||||
class ApplicationPartOptions(ApplicationBase):
|
||||
|
||||
def do_OPTIONS(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
path: str, user: str) -> types.WSGIResponse:
|
||||
"""Manage OPTIONS request."""
|
||||
headers = {
|
||||
"Allow": ", ".join(
|
||||
|
|
32
radicale/app/post.py
Normal file
32
radicale/app/post.py
Normal file
|
@ -0,0 +1,32 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2020 Tom Hacohen <tom@stosb.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from radicale import httputils, types
|
||||
from radicale.app.base import ApplicationBase
|
||||
|
||||
|
||||
class ApplicationPartPost(ApplicationBase):
|
||||
|
||||
def do_POST(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
path: str, user: str) -> types.WSGIResponse:
|
||||
"""Manage POST request."""
|
||||
if path == "/.web" or path.startswith("/.web/"):
|
||||
return self._web.post(environ, base_prefix, path, user)
|
||||
return httputils.METHOD_NOT_ALLOWED
|
|
@ -1,4 +1,4 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
|
@ -21,15 +21,19 @@ import collections
|
|||
import itertools
|
||||
import posixpath
|
||||
import socket
|
||||
import xml.etree.ElementTree as ET
|
||||
from http import client
|
||||
from xml.etree import ElementTree as ET
|
||||
from typing import Dict, Iterable, Iterator, List, Optional, Sequence, Tuple
|
||||
|
||||
from radicale import app, httputils, pathutils, rights, storage, xmlutils
|
||||
from radicale import httputils, pathutils, rights, storage, types, xmlutils
|
||||
from radicale.app.base import Access, ApplicationBase
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
def xml_propfind(base_prefix, path, xml_request, allowed_items, user,
|
||||
encoding):
|
||||
def xml_propfind(base_prefix: str, path: str,
|
||||
xml_request: Optional[ET.Element],
|
||||
allowed_items: Iterable[Tuple[types.CollectionOrItem, str]],
|
||||
user: str, encoding: str) -> Optional[ET.Element]:
|
||||
"""Read and answer PROPFIND requests.
|
||||
|
||||
Read rfc4918-9.1 for info.
|
||||
|
@ -40,24 +44,24 @@ def xml_propfind(base_prefix, path, xml_request, allowed_items, user,
|
|||
"""
|
||||
# A client may choose not to submit a request body. An empty PROPFIND
|
||||
# request body MUST be treated as if it were an 'allprop' request.
|
||||
top_tag = (xml_request[0] if xml_request is not None else
|
||||
ET.Element(xmlutils.make_clark("D:allprop")))
|
||||
top_element = (xml_request[0] if xml_request is not None else
|
||||
ET.Element(xmlutils.make_clark("D:allprop")))
|
||||
|
||||
props = ()
|
||||
props: List[str] = []
|
||||
allprop = False
|
||||
propname = False
|
||||
if top_tag.tag == xmlutils.make_clark("D:allprop"):
|
||||
if top_element.tag == xmlutils.make_clark("D:allprop"):
|
||||
allprop = True
|
||||
elif top_tag.tag == xmlutils.make_clark("D:propname"):
|
||||
elif top_element.tag == xmlutils.make_clark("D:propname"):
|
||||
propname = True
|
||||
elif top_tag.tag == xmlutils.make_clark("D:prop"):
|
||||
props = [prop.tag for prop in top_tag]
|
||||
elif top_element.tag == xmlutils.make_clark("D:prop"):
|
||||
props.extend(prop.tag for prop in top_element)
|
||||
|
||||
if xmlutils.make_clark("D:current-user-principal") in props and not user:
|
||||
# Ask for authentication
|
||||
# Returning the DAV:unauthenticated pseudo-principal as specified in
|
||||
# RFC 5397 doesn't seem to work with DAVdroid.
|
||||
return client.FORBIDDEN, None
|
||||
# RFC 5397 doesn't seem to work with DAVx5.
|
||||
return None
|
||||
|
||||
# Writing answer
|
||||
multistatus = ET.Element(xmlutils.make_clark("D:multistatus"))
|
||||
|
@ -68,29 +72,32 @@ def xml_propfind(base_prefix, path, xml_request, allowed_items, user,
|
|||
base_prefix, path, item, props, user, encoding, write=write,
|
||||
allprop=allprop, propname=propname))
|
||||
|
||||
return client.MULTI_STATUS, multistatus
|
||||
return multistatus
|
||||
|
||||
|
||||
def xml_propfind_response(base_prefix, path, item, props, user, encoding,
|
||||
write=False, propname=False, allprop=False):
|
||||
def xml_propfind_response(
|
||||
base_prefix: str, path: str, item: types.CollectionOrItem,
|
||||
props: Sequence[str], user: str, encoding: str, write: bool = False,
|
||||
propname: bool = False, allprop: bool = False) -> ET.Element:
|
||||
"""Build and return a PROPFIND response."""
|
||||
if propname and allprop or (props and (propname or allprop)):
|
||||
raise ValueError("Only use one of props, propname and allprops")
|
||||
is_collection = isinstance(item, storage.BaseCollection)
|
||||
if is_collection:
|
||||
is_leaf = item.get_meta("tag") in ("VADDRESSBOOK", "VCALENDAR")
|
||||
collection = item
|
||||
else:
|
||||
collection = item.collection
|
||||
|
||||
response = ET.Element(xmlutils.make_clark("D:response"))
|
||||
href = ET.Element(xmlutils.make_clark("D:href"))
|
||||
if is_collection:
|
||||
# Some clients expect collections to end with /
|
||||
if isinstance(item, storage.BaseCollection):
|
||||
is_collection = True
|
||||
is_leaf = item.tag in ("VADDRESSBOOK", "VCALENDAR", "VSUBSCRIBED")
|
||||
collection = item
|
||||
# Some clients expect collections to end with `/`
|
||||
uri = pathutils.unstrip_path(item.path, True)
|
||||
else:
|
||||
uri = pathutils.unstrip_path(
|
||||
posixpath.join(collection.path, item.href))
|
||||
is_collection = is_leaf = False
|
||||
assert item.collection is not None
|
||||
assert item.href
|
||||
collection = item.collection
|
||||
uri = pathutils.unstrip_path(posixpath.join(
|
||||
collection.path, item.href))
|
||||
response = ET.Element(xmlutils.make_clark("D:response"))
|
||||
href = ET.Element(xmlutils.make_clark("D:href"))
|
||||
href.text = xmlutils.make_href(base_prefix, uri)
|
||||
response.append(href)
|
||||
|
||||
|
@ -120,12 +127,12 @@ def xml_propfind_response(base_prefix, path, item, props, user, encoding,
|
|||
if is_leaf:
|
||||
props.append(xmlutils.make_clark("D:displayname"))
|
||||
props.append(xmlutils.make_clark("D:sync-token"))
|
||||
if collection.get_meta("tag") == "VCALENDAR":
|
||||
if collection.tag == "VCALENDAR":
|
||||
props.append(xmlutils.make_clark("CS:getctag"))
|
||||
props.append(
|
||||
xmlutils.make_clark("C:supported-calendar-component-set"))
|
||||
|
||||
meta = item.get_meta()
|
||||
meta = collection.get_meta()
|
||||
for tag in meta:
|
||||
if tag == "tag":
|
||||
continue
|
||||
|
@ -133,11 +140,11 @@ def xml_propfind_response(base_prefix, path, item, props, user, encoding,
|
|||
if clark_tag not in props:
|
||||
props.append(clark_tag)
|
||||
|
||||
responses = collections.defaultdict(list)
|
||||
responses: Dict[int, List[ET.Element]] = collections.defaultdict(list)
|
||||
if propname:
|
||||
for tag in props:
|
||||
responses[200].append(ET.Element(tag))
|
||||
props = ()
|
||||
props = []
|
||||
for tag in props:
|
||||
element = ET.Element(tag)
|
||||
is404 = False
|
||||
|
@ -152,25 +159,25 @@ def xml_propfind_response(base_prefix, path, item, props, user, encoding,
|
|||
else:
|
||||
is404 = True
|
||||
elif tag == xmlutils.make_clark("D:principal-collection-set"):
|
||||
tag = ET.Element(xmlutils.make_clark("D:href"))
|
||||
tag.text = xmlutils.make_href(base_prefix, "/")
|
||||
element.append(tag)
|
||||
child_element = ET.Element(xmlutils.make_clark("D:href"))
|
||||
child_element.text = xmlutils.make_href(base_prefix, "/")
|
||||
element.append(child_element)
|
||||
elif (tag in (xmlutils.make_clark("C:calendar-user-address-set"),
|
||||
xmlutils.make_clark("D:principal-URL"),
|
||||
xmlutils.make_clark("CR:addressbook-home-set"),
|
||||
xmlutils.make_clark("C:calendar-home-set")) and
|
||||
collection.is_principal and is_collection):
|
||||
tag = ET.Element(xmlutils.make_clark("D:href"))
|
||||
tag.text = xmlutils.make_href(base_prefix, path)
|
||||
element.append(tag)
|
||||
is_collection and collection.is_principal):
|
||||
child_element = ET.Element(xmlutils.make_clark("D:href"))
|
||||
child_element.text = xmlutils.make_href(base_prefix, path)
|
||||
element.append(child_element)
|
||||
elif tag == xmlutils.make_clark("C:supported-calendar-component-set"):
|
||||
human_tag = xmlutils.make_human_tag(tag)
|
||||
if is_collection and is_leaf:
|
||||
meta = item.get_meta(human_tag)
|
||||
if meta:
|
||||
components = meta.split(",")
|
||||
components_text = collection.get_meta(human_tag)
|
||||
if components_text:
|
||||
components = components_text.split(",")
|
||||
else:
|
||||
components = ("VTODO", "VEVENT", "VJOURNAL")
|
||||
components = ["VTODO", "VEVENT", "VJOURNAL"]
|
||||
for component in components:
|
||||
comp = ET.Element(xmlutils.make_clark("C:comp"))
|
||||
comp.set("name", component)
|
||||
|
@ -179,9 +186,10 @@ def xml_propfind_response(base_prefix, path, item, props, user, encoding,
|
|||
is404 = True
|
||||
elif tag == xmlutils.make_clark("D:current-user-principal"):
|
||||
if user:
|
||||
tag = ET.Element(xmlutils.make_clark("D:href"))
|
||||
tag.text = xmlutils.make_href(base_prefix, "/%s/" % user)
|
||||
element.append(tag)
|
||||
child_element = ET.Element(xmlutils.make_clark("D:href"))
|
||||
child_element.text = xmlutils.make_href(
|
||||
base_prefix, "/%s/" % user)
|
||||
element.append(child_element)
|
||||
else:
|
||||
element.append(ET.Element(
|
||||
xmlutils.make_clark("D:unauthenticated")))
|
||||
|
@ -204,18 +212,19 @@ def xml_propfind_response(base_prefix, path, item, props, user, encoding,
|
|||
"D:principal-property-search"]
|
||||
if is_collection and is_leaf:
|
||||
reports.append("D:sync-collection")
|
||||
if item.get_meta("tag") == "VADDRESSBOOK":
|
||||
if collection.tag == "VADDRESSBOOK":
|
||||
reports.append("CR:addressbook-multiget")
|
||||
reports.append("CR:addressbook-query")
|
||||
elif item.get_meta("tag") == "VCALENDAR":
|
||||
elif collection.tag == "VCALENDAR":
|
||||
reports.append("C:calendar-multiget")
|
||||
reports.append("C:calendar-query")
|
||||
for human_tag in reports:
|
||||
supported_report = ET.Element(
|
||||
xmlutils.make_clark("D:supported-report"))
|
||||
report_tag = ET.Element(xmlutils.make_clark("D:report"))
|
||||
report_tag.append(ET.Element(xmlutils.make_clark(human_tag)))
|
||||
supported_report.append(report_tag)
|
||||
report_element = ET.Element(xmlutils.make_clark("D:report"))
|
||||
report_element.append(
|
||||
ET.Element(xmlutils.make_clark(human_tag)))
|
||||
supported_report.append(report_element)
|
||||
element.append(supported_report)
|
||||
elif tag == xmlutils.make_clark("D:getcontentlength"):
|
||||
if not is_collection or is_leaf:
|
||||
|
@ -225,64 +234,85 @@ def xml_propfind_response(base_prefix, path, item, props, user, encoding,
|
|||
elif tag == xmlutils.make_clark("D:owner"):
|
||||
# return empty elment, if no owner available (rfc3744-5.1)
|
||||
if collection.owner:
|
||||
tag = ET.Element(xmlutils.make_clark("D:href"))
|
||||
tag.text = xmlutils.make_href(
|
||||
child_element = ET.Element(xmlutils.make_clark("D:href"))
|
||||
child_element.text = xmlutils.make_href(
|
||||
base_prefix, "/%s/" % collection.owner)
|
||||
element.append(tag)
|
||||
element.append(child_element)
|
||||
elif is_collection:
|
||||
if tag == xmlutils.make_clark("D:getcontenttype"):
|
||||
if is_leaf:
|
||||
element.text = xmlutils.MIMETYPES[item.get_meta("tag")]
|
||||
element.text = xmlutils.MIMETYPES[
|
||||
collection.tag]
|
||||
else:
|
||||
is404 = True
|
||||
elif tag == xmlutils.make_clark("D:resourcetype"):
|
||||
if item.is_principal:
|
||||
tag = ET.Element(xmlutils.make_clark("D:principal"))
|
||||
element.append(tag)
|
||||
if collection.is_principal:
|
||||
child_element = ET.Element(
|
||||
xmlutils.make_clark("D:principal"))
|
||||
element.append(child_element)
|
||||
if is_leaf:
|
||||
if item.get_meta("tag") == "VADDRESSBOOK":
|
||||
tag = ET.Element(
|
||||
if collection.tag == "VADDRESSBOOK":
|
||||
child_element = ET.Element(
|
||||
xmlutils.make_clark("CR:addressbook"))
|
||||
element.append(tag)
|
||||
elif item.get_meta("tag") == "VCALENDAR":
|
||||
tag = ET.Element(xmlutils.make_clark("C:calendar"))
|
||||
element.append(tag)
|
||||
tag = ET.Element(xmlutils.make_clark("D:collection"))
|
||||
element.append(tag)
|
||||
element.append(child_element)
|
||||
elif collection.tag == "VCALENDAR":
|
||||
child_element = ET.Element(
|
||||
xmlutils.make_clark("C:calendar"))
|
||||
element.append(child_element)
|
||||
elif collection.tag == "VSUBSCRIBED":
|
||||
child_element = ET.Element(
|
||||
xmlutils.make_clark("CS:subscribed"))
|
||||
element.append(child_element)
|
||||
child_element = ET.Element(xmlutils.make_clark("D:collection"))
|
||||
element.append(child_element)
|
||||
elif tag == xmlutils.make_clark("RADICALE:displayname"):
|
||||
# Only for internal use by the web interface
|
||||
displayname = item.get_meta("D:displayname")
|
||||
displayname = collection.get_meta("D:displayname")
|
||||
if displayname is not None:
|
||||
element.text = displayname
|
||||
else:
|
||||
is404 = True
|
||||
elif tag == xmlutils.make_clark("RADICALE:getcontentcount"):
|
||||
# Only for internal use by the web interface
|
||||
if isinstance(item, storage.BaseCollection) and not collection.is_principal:
|
||||
element.text = str(sum(1 for x in item.get_all()))
|
||||
else:
|
||||
is404 = True
|
||||
elif tag == xmlutils.make_clark("D:displayname"):
|
||||
displayname = item.get_meta("D:displayname")
|
||||
displayname = collection.get_meta("D:displayname")
|
||||
if not displayname and is_leaf:
|
||||
displayname = item.path
|
||||
displayname = collection.path
|
||||
if displayname is not None:
|
||||
element.text = displayname
|
||||
else:
|
||||
is404 = True
|
||||
elif tag == xmlutils.make_clark("CS:getctag"):
|
||||
if is_leaf:
|
||||
element.text = item.etag
|
||||
element.text = collection.etag
|
||||
else:
|
||||
is404 = True
|
||||
elif tag == xmlutils.make_clark("D:sync-token"):
|
||||
if is_leaf:
|
||||
element.text, _ = item.sync()
|
||||
element.text, _ = collection.sync()
|
||||
else:
|
||||
is404 = True
|
||||
elif tag == xmlutils.make_clark("CS:source"):
|
||||
if is_leaf:
|
||||
child_element = ET.Element(xmlutils.make_clark("D:href"))
|
||||
child_element.text = collection.get_meta('CS:source')
|
||||
element.append(child_element)
|
||||
else:
|
||||
is404 = True
|
||||
else:
|
||||
human_tag = xmlutils.make_human_tag(tag)
|
||||
meta = item.get_meta(human_tag)
|
||||
if meta is not None:
|
||||
element.text = meta
|
||||
tag_text = collection.get_meta(human_tag)
|
||||
if tag_text is not None:
|
||||
element.text = tag_text
|
||||
else:
|
||||
is404 = True
|
||||
# Not for collections
|
||||
elif tag == xmlutils.make_clark("D:getcontenttype"):
|
||||
assert not isinstance(item, storage.BaseCollection)
|
||||
element.text = xmlutils.get_content_type(item, encoding)
|
||||
elif tag == xmlutils.make_clark("D:resourcetype"):
|
||||
# resourcetype must be returned empty for non-collection elements
|
||||
|
@ -292,13 +322,13 @@ def xml_propfind_response(base_prefix, path, item, props, user, encoding,
|
|||
|
||||
responses[404 if is404 else 200].append(element)
|
||||
|
||||
for status_code, childs in responses.items():
|
||||
if not childs:
|
||||
for status_code, children in responses.items():
|
||||
if not children:
|
||||
continue
|
||||
propstat = ET.Element(xmlutils.make_clark("D:propstat"))
|
||||
response.append(propstat)
|
||||
prop = ET.Element(xmlutils.make_clark("D:prop"))
|
||||
prop.extend(childs)
|
||||
prop.extend(children)
|
||||
propstat.append(prop)
|
||||
status = ET.Element(xmlutils.make_clark("D:status"))
|
||||
status.text = xmlutils.make_response(status_code)
|
||||
|
@ -307,13 +337,16 @@ def xml_propfind_response(base_prefix, path, item, props, user, encoding,
|
|||
return response
|
||||
|
||||
|
||||
class ApplicationPropfindMixin:
|
||||
def _collect_allowed_items(self, items, user):
|
||||
class ApplicationPartPropfind(ApplicationBase):
|
||||
|
||||
def _collect_allowed_items(
|
||||
self, items: Iterable[types.CollectionOrItem], user: str
|
||||
) -> Iterator[Tuple[types.CollectionOrItem, str]]:
|
||||
"""Get items from request that user is allowed to access."""
|
||||
for item in items:
|
||||
if isinstance(item, storage.BaseCollection):
|
||||
path = pathutils.unstrip_path(item.path, True)
|
||||
if item.get_meta("tag"):
|
||||
if item.tag:
|
||||
permissions = rights.intersect(
|
||||
self._rights.authorization(user, path), "rw")
|
||||
target = "collection with tag %r" % item.path
|
||||
|
@ -322,6 +355,7 @@ class ApplicationPropfindMixin:
|
|||
self._rights.authorization(user, path), "RW")
|
||||
target = "collection %r" % item.path
|
||||
else:
|
||||
assert item.collection is not None
|
||||
path = pathutils.unstrip_path(item.collection.path, True)
|
||||
permissions = rights.intersect(
|
||||
self._rights.authorization(user, path), "rw")
|
||||
|
@ -341,37 +375,38 @@ class ApplicationPropfindMixin:
|
|||
if permission:
|
||||
yield item, permission
|
||||
|
||||
def do_PROPFIND(self, environ, base_prefix, path, user):
|
||||
def do_PROPFIND(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
path: str, user: str) -> types.WSGIResponse:
|
||||
"""Manage PROPFIND request."""
|
||||
access = app.Access(self._rights, user, path)
|
||||
access = Access(self._rights, user, path)
|
||||
if not access.check("r"):
|
||||
return httputils.NOT_ALLOWED
|
||||
try:
|
||||
xml_content = self._read_xml_content(environ)
|
||||
xml_content = self._read_xml_request_body(environ)
|
||||
except RuntimeError as e:
|
||||
logger.warning(
|
||||
"Bad PROPFIND request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
except socket.timeout:
|
||||
logger.debug("client timed out", exc_info=True)
|
||||
logger.debug("Client timed out", exc_info=True)
|
||||
return httputils.REQUEST_TIMEOUT
|
||||
with self._storage.acquire_lock("r", user):
|
||||
items = self._storage.discover(
|
||||
path, environ.get("HTTP_DEPTH", "0"))
|
||||
items_iter = iter(self._storage.discover(
|
||||
path, environ.get("HTTP_DEPTH", "0"),
|
||||
None, self._rights._user_groups))
|
||||
# take root item for rights checking
|
||||
item = next(items, None)
|
||||
item = next(items_iter, None)
|
||||
if not item:
|
||||
return httputils.NOT_FOUND
|
||||
if not access.check("r", item):
|
||||
return httputils.NOT_ALLOWED
|
||||
# put item back
|
||||
items = itertools.chain([item], items)
|
||||
allowed_items = self._collect_allowed_items(items, user)
|
||||
items_iter = itertools.chain([item], items_iter)
|
||||
allowed_items = self._collect_allowed_items(items_iter, user)
|
||||
headers = {"DAV": httputils.DAV_HEADERS,
|
||||
"Content-Type": "text/xml; charset=%s" % self._encoding}
|
||||
status, xml_answer = xml_propfind(
|
||||
base_prefix, path, xml_content, allowed_items, user,
|
||||
self._encoding)
|
||||
if status == client.FORBIDDEN:
|
||||
xml_answer = xml_propfind(base_prefix, path, xml_content,
|
||||
allowed_items, user, self._encoding)
|
||||
if xml_answer is None:
|
||||
return httputils.NOT_ALLOWED
|
||||
return status, headers, self._write_xml_content(xml_answer)
|
||||
return client.MULTI_STATUS, headers, self._xml_response(xml_answer)
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2020 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2020-2020 Tuna Celik <tuna@jakpark.com>
|
||||
# Copyright © 2025-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -17,90 +19,76 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import errno
|
||||
import re
|
||||
import socket
|
||||
import xml.etree.ElementTree as ET
|
||||
from http import client
|
||||
from xml.etree import ElementTree as ET
|
||||
from typing import Dict, Optional, cast
|
||||
|
||||
from radicale import app, httputils
|
||||
from radicale import item as radicale_item
|
||||
from radicale import storage, xmlutils
|
||||
import defusedxml.ElementTree as DefusedET
|
||||
|
||||
import radicale.item as radicale_item
|
||||
from radicale import httputils, storage, types, xmlutils
|
||||
from radicale.app.base import Access, ApplicationBase
|
||||
from radicale.hook import HookNotificationItem, HookNotificationItemTypes
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
def xml_add_propstat_to(element, tag, status_number):
|
||||
"""Add a PROPSTAT response structure to an element.
|
||||
|
||||
The PROPSTAT answer structure is defined in rfc4918-9.1. It is added to the
|
||||
given ``element``, for the following ``tag`` with the given
|
||||
``status_number``.
|
||||
|
||||
"""
|
||||
propstat = ET.Element(xmlutils.make_clark("D:propstat"))
|
||||
element.append(propstat)
|
||||
|
||||
prop = ET.Element(xmlutils.make_clark("D:prop"))
|
||||
propstat.append(prop)
|
||||
|
||||
clark_tag = xmlutils.make_clark(tag)
|
||||
prop_tag = ET.Element(clark_tag)
|
||||
prop.append(prop_tag)
|
||||
|
||||
status = ET.Element(xmlutils.make_clark("D:status"))
|
||||
status.text = xmlutils.make_response(status_number)
|
||||
propstat.append(status)
|
||||
|
||||
|
||||
def xml_proppatch(base_prefix, path, xml_request, collection):
|
||||
def xml_proppatch(base_prefix: str, path: str,
|
||||
xml_request: Optional[ET.Element],
|
||||
collection: storage.BaseCollection) -> ET.Element:
|
||||
"""Read and answer PROPPATCH requests.
|
||||
|
||||
Read rfc4918-9.2 for info.
|
||||
|
||||
"""
|
||||
props_to_set = xmlutils.props_from_request(xml_request, actions=("set",))
|
||||
props_to_remove = xmlutils.props_from_request(xml_request,
|
||||
actions=("remove",))
|
||||
|
||||
multistatus = ET.Element(xmlutils.make_clark("D:multistatus"))
|
||||
response = ET.Element(xmlutils.make_clark("D:response"))
|
||||
multistatus.append(response)
|
||||
|
||||
href = ET.Element(xmlutils.make_clark("D:href"))
|
||||
href.text = xmlutils.make_href(base_prefix, path)
|
||||
response.append(href)
|
||||
# Create D:propstat element for props with status 200 OK
|
||||
propstat = ET.Element(xmlutils.make_clark("D:propstat"))
|
||||
status = ET.Element(xmlutils.make_clark("D:status"))
|
||||
status.text = xmlutils.make_response(200)
|
||||
props_ok = ET.Element(xmlutils.make_clark("D:prop"))
|
||||
propstat.append(props_ok)
|
||||
propstat.append(status)
|
||||
response.append(propstat)
|
||||
|
||||
new_props = collection.get_meta()
|
||||
for short_name, value in props_to_set.items():
|
||||
new_props[short_name] = value
|
||||
xml_add_propstat_to(response, short_name, 200)
|
||||
for short_name in props_to_remove:
|
||||
try:
|
||||
del new_props[short_name]
|
||||
except KeyError:
|
||||
pass
|
||||
xml_add_propstat_to(response, short_name, 200)
|
||||
radicale_item.check_and_sanitize_props(new_props)
|
||||
collection.set_meta(new_props)
|
||||
props_with_remove = xmlutils.props_from_request(xml_request)
|
||||
all_props_with_remove = cast(Dict[str, Optional[str]],
|
||||
dict(collection.get_meta()))
|
||||
all_props_with_remove.update(props_with_remove)
|
||||
all_props = radicale_item.check_and_sanitize_props(all_props_with_remove)
|
||||
collection.set_meta(all_props)
|
||||
for short_name in props_with_remove:
|
||||
props_ok.append(ET.Element(xmlutils.make_clark(short_name)))
|
||||
|
||||
return multistatus
|
||||
|
||||
|
||||
class ApplicationProppatchMixin:
|
||||
def do_PROPPATCH(self, environ, base_prefix, path, user):
|
||||
class ApplicationPartProppatch(ApplicationBase):
|
||||
|
||||
def do_PROPPATCH(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
path: str, user: str) -> types.WSGIResponse:
|
||||
"""Manage PROPPATCH request."""
|
||||
access = app.Access(self._rights, user, path)
|
||||
access = Access(self._rights, user, path)
|
||||
if not access.check("w"):
|
||||
return httputils.NOT_ALLOWED
|
||||
try:
|
||||
xml_content = self._read_xml_content(environ)
|
||||
xml_content = self._read_xml_request_body(environ)
|
||||
except RuntimeError as e:
|
||||
logger.warning(
|
||||
"Bad PROPPATCH request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
except socket.timeout:
|
||||
logger.debug("client timed out", exc_info=True)
|
||||
logger.debug("Client timed out", exc_info=True)
|
||||
return httputils.REQUEST_TIMEOUT
|
||||
with self._storage.acquire_lock("w", user):
|
||||
item = next(self._storage.discover(path), None)
|
||||
item = next(iter(self._storage.discover(path)), None)
|
||||
if not item:
|
||||
return httputils.NOT_FOUND
|
||||
if not access.check("w", item):
|
||||
|
@ -112,9 +100,31 @@ class ApplicationProppatchMixin:
|
|||
try:
|
||||
xml_answer = xml_proppatch(base_prefix, path, xml_content,
|
||||
item)
|
||||
if xml_content is not None:
|
||||
hook_notification_item = HookNotificationItem(
|
||||
HookNotificationItemTypes.CPATCH,
|
||||
access.path,
|
||||
DefusedET.tostring(
|
||||
xml_content,
|
||||
encoding=self._encoding
|
||||
).decode(encoding=self._encoding)
|
||||
)
|
||||
self._hook.notify(hook_notification_item)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad PROPPATCH request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
return (client.MULTI_STATUS, headers,
|
||||
self._write_xml_content(xml_answer))
|
||||
# return better matching HTTP result in case errno is provided and catched
|
||||
errno_match = re.search("\\[Errno ([0-9]+)\\]", str(e))
|
||||
if errno_match:
|
||||
logger.error(
|
||||
"Failed PROPPATCH request on %r: %s", path, e, exc_info=True)
|
||||
errno_e = int(errno_match.group(1))
|
||||
if errno_e == errno.ENOSPC:
|
||||
return httputils.INSUFFICIENT_STORAGE
|
||||
elif errno_e in [errno.EPERM, errno.EACCES]:
|
||||
return httputils.FORBIDDEN
|
||||
else:
|
||||
return httputils.INTERNAL_SERVER_ERROR
|
||||
else:
|
||||
logger.warning(
|
||||
"Bad PROPPATCH request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
return client.MULTI_STATUS, headers, self._xml_response(xml_answer)
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2020 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2020-2023 Tuna Celik <tuna@jakpark.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -17,25 +19,41 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import errno
|
||||
import itertools
|
||||
import posixpath
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
from http import client
|
||||
from types import TracebackType
|
||||
from typing import Iterator, List, Mapping, MutableMapping, Optional, Tuple
|
||||
|
||||
import vobject
|
||||
|
||||
from radicale import app, httputils
|
||||
from radicale import item as radicale_item
|
||||
from radicale import pathutils, rights, storage, xmlutils
|
||||
import radicale.item as radicale_item
|
||||
from radicale import (httputils, pathutils, rights, storage, types, utils,
|
||||
xmlutils)
|
||||
from radicale.app.base import Access, ApplicationBase
|
||||
from radicale.hook import HookNotificationItem, HookNotificationItemTypes
|
||||
from radicale.log import logger
|
||||
|
||||
MIMETYPE_TAGS = {value: key for key, value in xmlutils.MIMETYPES.items()}
|
||||
MIMETYPE_TAGS: Mapping[str, str] = {value: key for key, value in
|
||||
xmlutils.MIMETYPES.items()}
|
||||
|
||||
PRODID = u"-//Radicale//NONSGML Version " + utils.package_version("radicale") + "//EN"
|
||||
|
||||
|
||||
def prepare(vobject_items, path, content_type, permissions, parent_permissions,
|
||||
tag=None, write_whole_collection=None):
|
||||
if (write_whole_collection or permissions and not parent_permissions):
|
||||
def prepare(vobject_items: List[vobject.base.Component], path: str,
|
||||
content_type: str, permission: bool, parent_permission: bool,
|
||||
tag: Optional[str] = None,
|
||||
write_whole_collection: Optional[bool] = None) -> Tuple[
|
||||
Iterator[radicale_item.Item], # items
|
||||
Optional[str], # tag
|
||||
Optional[bool], # write_whole_collection
|
||||
Optional[MutableMapping[str, str]], # props
|
||||
Optional[Tuple[type, BaseException, Optional[TracebackType]]]]:
|
||||
if (write_whole_collection or permission and not parent_permission):
|
||||
write_whole_collection = True
|
||||
tag = radicale_item.predict_tag_of_whole_collection(
|
||||
vobject_items, MIMETYPE_TAGS.get(content_type))
|
||||
|
@ -43,20 +61,20 @@ def prepare(vobject_items, path, content_type, permissions, parent_permissions,
|
|||
raise ValueError("Can't determine collection tag")
|
||||
collection_path = pathutils.strip_path(path)
|
||||
elif (write_whole_collection is not None and not write_whole_collection or
|
||||
not permissions and parent_permissions):
|
||||
not permission and parent_permission):
|
||||
write_whole_collection = False
|
||||
if tag is None:
|
||||
tag = radicale_item.predict_tag_of_parent_collection(vobject_items)
|
||||
collection_path = posixpath.dirname(pathutils.strip_path(path))
|
||||
props = None
|
||||
props: Optional[MutableMapping[str, str]] = None
|
||||
stored_exc_info = None
|
||||
items = []
|
||||
try:
|
||||
if tag:
|
||||
if tag and write_whole_collection is not None:
|
||||
radicale_item.check_and_sanitize_items(
|
||||
vobject_items, is_collection=write_whole_collection, tag=tag)
|
||||
if write_whole_collection and tag == "VCALENDAR":
|
||||
vobject_components = []
|
||||
vobject_components: List[vobject.base.Component] = []
|
||||
vobject_item, = vobject_items
|
||||
for content in ("vevent", "vtodo", "vjournal"):
|
||||
vobject_components.extend(
|
||||
|
@ -68,6 +86,7 @@ def prepare(vobject_items, path, content_type, permissions, parent_permissions,
|
|||
vobject_collection = vobject.iCalendar()
|
||||
for component in components:
|
||||
vobject_collection.add(component)
|
||||
vobject_collection.add(vobject.base.ContentLine("PRODID", [], PRODID))
|
||||
item = radicale_item.Item(collection_path=collection_path,
|
||||
vobject_item=vobject_collection)
|
||||
item.prepare()
|
||||
|
@ -98,40 +117,47 @@ def prepare(vobject_items, path, content_type, permissions, parent_permissions,
|
|||
caldesc = vobject_items[0].x_wr_caldesc.value
|
||||
if caldesc:
|
||||
props["C:calendar-description"] = caldesc
|
||||
radicale_item.check_and_sanitize_props(props)
|
||||
props = radicale_item.check_and_sanitize_props(props)
|
||||
except Exception:
|
||||
stored_exc_info = sys.exc_info()
|
||||
exc_info_or_none_tuple = sys.exc_info()
|
||||
assert exc_info_or_none_tuple[0] is not None
|
||||
stored_exc_info = exc_info_or_none_tuple
|
||||
|
||||
# Use generator for items and delete references to free memory
|
||||
# early
|
||||
def items_generator():
|
||||
# Use iterator for items and delete references to free memory early
|
||||
def items_iter() -> Iterator[radicale_item.Item]:
|
||||
while items:
|
||||
yield items.pop(0)
|
||||
return (items_generator(), tag, write_whole_collection, props,
|
||||
stored_exc_info)
|
||||
return items_iter(), tag, write_whole_collection, props, stored_exc_info
|
||||
|
||||
|
||||
class ApplicationPutMixin:
|
||||
def do_PUT(self, environ, base_prefix, path, user):
|
||||
class ApplicationPartPut(ApplicationBase):
|
||||
|
||||
def do_PUT(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
path: str, user: str) -> types.WSGIResponse:
|
||||
"""Manage PUT request."""
|
||||
access = app.Access(self._rights, user, path)
|
||||
access = Access(self._rights, user, path)
|
||||
if not access.check("w"):
|
||||
return httputils.NOT_ALLOWED
|
||||
try:
|
||||
content = self._read_content(environ)
|
||||
content = httputils.read_request_body(self.configuration, environ)
|
||||
except RuntimeError as e:
|
||||
logger.warning("Bad PUT request on %r: %s", path, e, exc_info=True)
|
||||
logger.warning("Bad PUT request on %r (read_request_body): %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
except socket.timeout:
|
||||
logger.debug("client timed out", exc_info=True)
|
||||
logger.debug("Client timed out", exc_info=True)
|
||||
return httputils.REQUEST_TIMEOUT
|
||||
# Prepare before locking
|
||||
content_type = environ.get("CONTENT_TYPE", "").split(";")[0]
|
||||
content_type = environ.get("CONTENT_TYPE", "").split(";",
|
||||
maxsplit=1)[0]
|
||||
try:
|
||||
vobject_items = tuple(vobject.readComponents(content or ""))
|
||||
vobject_items = radicale_item.read_components(content or "")
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Bad PUT request on %r: %s", path, e, exc_info=True)
|
||||
"Bad PUT request on %r (read_components): %s", path, e, exc_info=True)
|
||||
if self._log_bad_put_request_content:
|
||||
logger.warning("Bad PUT request content of %r:\n%s", path, content)
|
||||
else:
|
||||
logger.debug("Bad PUT request content: suppressed by config/option [logging] bad_put_request_content")
|
||||
return httputils.BAD_REQUEST
|
||||
(prepared_items, prepared_tag, prepared_write_whole_collection,
|
||||
prepared_props, prepared_exc_info) = prepare(
|
||||
|
@ -139,40 +165,57 @@ class ApplicationPutMixin:
|
|||
bool(rights.intersect(access.permissions, "Ww")),
|
||||
bool(rights.intersect(access.parent_permissions, "w")))
|
||||
|
||||
with self._storage.acquire_lock("w", user):
|
||||
item = next(self._storage.discover(path), None)
|
||||
parent_item = next(
|
||||
self._storage.discover(access.parent_path), None)
|
||||
if not parent_item:
|
||||
with self._storage.acquire_lock("w", user, path=path):
|
||||
item = next(iter(self._storage.discover(path)), None)
|
||||
parent_item = next(iter(
|
||||
self._storage.discover(access.parent_path)), None)
|
||||
if not isinstance(parent_item, storage.BaseCollection):
|
||||
return httputils.CONFLICT
|
||||
|
||||
write_whole_collection = (
|
||||
isinstance(item, storage.BaseCollection) or
|
||||
not parent_item.get_meta("tag"))
|
||||
not parent_item.tag)
|
||||
|
||||
if write_whole_collection:
|
||||
tag = prepared_tag
|
||||
else:
|
||||
tag = parent_item.get_meta("tag")
|
||||
tag = parent_item.tag
|
||||
|
||||
if write_whole_collection:
|
||||
if ("w" if tag else "W") not in access.permissions:
|
||||
if not parent_item.tag:
|
||||
logger.warning("Not a collection (check .Radicale.props): %r", parent_item.path)
|
||||
return httputils.NOT_ALLOWED
|
||||
if not self._permit_overwrite_collection:
|
||||
if ("O") not in access.permissions:
|
||||
logger.info("overwrite of collection is prevented by config/option [rights] permit_overwrite_collection and not explicit allowed by permssion 'O': %r", path)
|
||||
return httputils.NOT_ALLOWED
|
||||
else:
|
||||
if ("o") in access.permissions:
|
||||
logger.info("overwrite of collection is allowed by config/option [rights] permit_overwrite_collection but explicit forbidden by permission 'o': %r", path)
|
||||
return httputils.NOT_ALLOWED
|
||||
elif "w" not in access.parent_permissions:
|
||||
return httputils.NOT_ALLOWED
|
||||
|
||||
etag = environ.get("HTTP_IF_MATCH", "")
|
||||
if not item and etag:
|
||||
# Etag asked but no item found: item has been removed
|
||||
logger.warning("Precondition failed on PUT request for %r (HTTP_IF_MATCH: %s, item not existing)", path, etag)
|
||||
return httputils.PRECONDITION_FAILED
|
||||
if item and etag and item.etag != etag:
|
||||
# Etag asked but item not matching: item has changed
|
||||
logger.warning("Precondition failed on PUT request for %r (HTTP_IF_MATCH: %s, item has different etag: %s)", path, etag, item.etag)
|
||||
return httputils.PRECONDITION_FAILED
|
||||
if item and etag:
|
||||
logger.debug("Precondition passed on PUT request for %r (HTTP_IF_MATCH: %s, item has etag: %s)", path, etag, item.etag)
|
||||
|
||||
match = environ.get("HTTP_IF_NONE_MATCH", "") == "*"
|
||||
if item and match:
|
||||
# Creation asked but item found: item can't be replaced
|
||||
logger.warning("Precondition failed on PUT request for %r (HTTP_IF_NONE_MATCH: *, creation requested but item found with etag: %s)", path, item.etag)
|
||||
return httputils.PRECONDITION_FAILED
|
||||
if match:
|
||||
logger.debug("Precondition passed on PUT request for %r (HTTP_IF_NONE_MATCH: *)", path)
|
||||
|
||||
if (tag != prepared_tag or
|
||||
prepared_write_whole_collection != write_whole_collection):
|
||||
|
@ -185,7 +228,7 @@ class ApplicationPutMixin:
|
|||
props = prepared_props
|
||||
if prepared_exc_info:
|
||||
logger.warning(
|
||||
"Bad PUT request on %r: %s", path, prepared_exc_info[1],
|
||||
"Bad PUT request on %r (prepare): %s", path, prepared_exc_info[1],
|
||||
exc_info=prepared_exc_info)
|
||||
return httputils.BAD_REQUEST
|
||||
|
||||
|
@ -193,24 +236,52 @@ class ApplicationPutMixin:
|
|||
try:
|
||||
etag = self._storage.create_collection(
|
||||
path, prepared_items, props).etag
|
||||
for item in prepared_items:
|
||||
hook_notification_item = HookNotificationItem(
|
||||
HookNotificationItemTypes.UPSERT,
|
||||
access.path,
|
||||
item.serialize()
|
||||
)
|
||||
self._hook.notify(hook_notification_item)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad PUT request on %r: %s", path, e, exc_info=True)
|
||||
"Bad PUT request on %r (create_collection): %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
else:
|
||||
assert not isinstance(item, storage.BaseCollection)
|
||||
prepared_item, = prepared_items
|
||||
if (item and item.uid != prepared_item.uid or
|
||||
not item and parent_item.has_uid(prepared_item.uid)):
|
||||
return self._webdav_error_response("%s:no-uid-conflict" % (
|
||||
"C" if tag == "VCALENDAR" else "CR"))
|
||||
return self._webdav_error_response(
|
||||
client.CONFLICT, "%s:no-uid-conflict" % (
|
||||
"C" if tag == "VCALENDAR" else "CR"))
|
||||
|
||||
href = posixpath.basename(pathutils.strip_path(path))
|
||||
try:
|
||||
etag = parent_item.upload(href, prepared_item).etag
|
||||
hook_notification_item = HookNotificationItem(
|
||||
HookNotificationItemTypes.UPSERT,
|
||||
access.path,
|
||||
prepared_item.serialize()
|
||||
)
|
||||
self._hook.notify(hook_notification_item)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad PUT request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
# return better matching HTTP result in case errno is provided and catched
|
||||
errno_match = re.search("\\[Errno ([0-9]+)\\]", str(e))
|
||||
if errno_match:
|
||||
logger.error(
|
||||
"Failed PUT request on %r (upload): %s", path, e, exc_info=True)
|
||||
errno_e = int(errno_match.group(1))
|
||||
if errno_e == errno.ENOSPC:
|
||||
return httputils.INSUFFICIENT_STORAGE
|
||||
elif errno_e in [errno.EPERM, errno.EACCES]:
|
||||
return httputils.FORBIDDEN
|
||||
else:
|
||||
return httputils.INTERNAL_SERVER_ERROR
|
||||
else:
|
||||
logger.warning(
|
||||
"Bad PUT request on %r (upload): %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
|
||||
headers = {"ETag": etag}
|
||||
return client.CREATED, headers, None
|
||||
|
|
|
@ -1,8 +1,12 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Pieter Hijma <pieterhijma@users.noreply.github.com>
|
||||
# Copyright © 2024-2024 Ray <ray@react0r.com>
|
||||
# Copyright © 2024-2024 Georgiy <metallerok@gmail.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -18,20 +22,131 @@
|
|||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import contextlib
|
||||
import copy
|
||||
import datetime
|
||||
import posixpath
|
||||
import socket
|
||||
import xml.etree.ElementTree as ET
|
||||
from http import client
|
||||
from typing import (Callable, Iterable, Iterator, List, Optional, Sequence,
|
||||
Tuple, Union)
|
||||
from urllib.parse import unquote, urlparse
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from radicale import app, httputils, pathutils, storage, xmlutils
|
||||
import vobject
|
||||
import vobject.base
|
||||
from vobject.base import ContentLine
|
||||
|
||||
import radicale.item as radicale_item
|
||||
from radicale import httputils, pathutils, storage, types, xmlutils
|
||||
from radicale.app.base import Access, ApplicationBase
|
||||
from radicale.item import filter as radicale_filter
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
def xml_report(base_prefix, path, xml_request, collection, encoding,
|
||||
unlock_storage_fn):
|
||||
"""Read and answer REPORT requests.
|
||||
def free_busy_report(base_prefix: str, path: str, xml_request: Optional[ET.Element],
|
||||
collection: storage.BaseCollection, encoding: str,
|
||||
unlock_storage_fn: Callable[[], None],
|
||||
max_occurrence: int
|
||||
) -> Tuple[int, Union[ET.Element, str]]:
|
||||
# NOTE: this function returns both an Element and a string because
|
||||
# free-busy reports are an edge-case on the return type according
|
||||
# to the spec.
|
||||
|
||||
multistatus = ET.Element(xmlutils.make_clark("D:multistatus"))
|
||||
if xml_request is None:
|
||||
return client.MULTI_STATUS, multistatus
|
||||
root = xml_request
|
||||
if (root.tag == xmlutils.make_clark("C:free-busy-query") and
|
||||
collection.tag != "VCALENDAR"):
|
||||
logger.warning("Invalid REPORT method %r on %r requested",
|
||||
xmlutils.make_human_tag(root.tag), path)
|
||||
return client.FORBIDDEN, xmlutils.webdav_error("D:supported-report")
|
||||
|
||||
time_range_element = root.find(xmlutils.make_clark("C:time-range"))
|
||||
assert isinstance(time_range_element, ET.Element)
|
||||
|
||||
# Build a single filter from the free busy query for retrieval
|
||||
# TODO: filter for VFREEBUSY in additional to VEVENT but
|
||||
# test_filter doesn't support that yet.
|
||||
vevent_cf_element = ET.Element(xmlutils.make_clark("C:comp-filter"),
|
||||
attrib={'name': 'VEVENT'})
|
||||
vevent_cf_element.append(time_range_element)
|
||||
vcalendar_cf_element = ET.Element(xmlutils.make_clark("C:comp-filter"),
|
||||
attrib={'name': 'VCALENDAR'})
|
||||
vcalendar_cf_element.append(vevent_cf_element)
|
||||
filter_element = ET.Element(xmlutils.make_clark("C:filter"))
|
||||
filter_element.append(vcalendar_cf_element)
|
||||
filters = (filter_element,)
|
||||
|
||||
# First pull from storage
|
||||
retrieved_items = list(collection.get_filtered(filters))
|
||||
# !!! Don't access storage after this !!!
|
||||
unlock_storage_fn()
|
||||
|
||||
cal = vobject.iCalendar()
|
||||
collection_tag = collection.tag
|
||||
while retrieved_items:
|
||||
# Second filtering before evaluating occurrences.
|
||||
# ``item.vobject_item`` might be accessed during filtering.
|
||||
# Don't keep reference to ``item``, because VObject requires a lot of
|
||||
# memory.
|
||||
item, filter_matched = retrieved_items.pop(0)
|
||||
if not filter_matched:
|
||||
try:
|
||||
if not test_filter(collection_tag, item, filter_element):
|
||||
continue
|
||||
except ValueError as e:
|
||||
raise ValueError("Failed to free-busy filter item %r from %r: %s" %
|
||||
(item.href, collection.path, e)) from e
|
||||
except Exception as e:
|
||||
raise RuntimeError("Failed to free-busy filter item %r from %r: %s" %
|
||||
(item.href, collection.path, e)) from e
|
||||
|
||||
fbtype = None
|
||||
if item.component_name == 'VEVENT':
|
||||
transp = getattr(item.vobject_item.vevent, 'transp', None)
|
||||
if transp and transp.value != 'OPAQUE':
|
||||
continue
|
||||
|
||||
status = getattr(item.vobject_item.vevent, 'status', None)
|
||||
if not status or status.value == 'CONFIRMED':
|
||||
fbtype = 'BUSY'
|
||||
elif status.value == 'CANCELLED':
|
||||
fbtype = 'FREE'
|
||||
elif status.value == 'TENTATIVE':
|
||||
fbtype = 'BUSY-TENTATIVE'
|
||||
else:
|
||||
# Could do fbtype = status.value for x-name, I prefer this
|
||||
fbtype = 'BUSY'
|
||||
|
||||
# TODO: coalesce overlapping periods
|
||||
|
||||
if max_occurrence > 0:
|
||||
n_occurrences = max_occurrence+1
|
||||
else:
|
||||
n_occurrences = 0
|
||||
occurrences = radicale_filter.time_range_fill(item.vobject_item,
|
||||
time_range_element,
|
||||
"VEVENT",
|
||||
n=n_occurrences)
|
||||
if len(occurrences) >= max_occurrence:
|
||||
raise ValueError("FREEBUSY occurrences limit of {} hit"
|
||||
.format(max_occurrence))
|
||||
|
||||
for occurrence in occurrences:
|
||||
vfb = cal.add('vfreebusy')
|
||||
vfb.add('dtstamp').value = item.vobject_item.vevent.dtstamp.value
|
||||
vfb.add('dtstart').value, vfb.add('dtend').value = occurrence
|
||||
if fbtype:
|
||||
vfb.add('fbtype').value = fbtype
|
||||
return (client.OK, cal.serialize())
|
||||
|
||||
|
||||
def xml_report(base_prefix: str, path: str, xml_request: Optional[ET.Element],
|
||||
collection: storage.BaseCollection, encoding: str,
|
||||
unlock_storage_fn: Callable[[], None]
|
||||
) -> Tuple[int, ET.Element]:
|
||||
"""Read and answer REPORT requests that return XML.
|
||||
|
||||
Read rfc3253-3.6 for info.
|
||||
|
||||
|
@ -40,10 +155,9 @@ def xml_report(base_prefix, path, xml_request, collection, encoding,
|
|||
if xml_request is None:
|
||||
return client.MULTI_STATUS, multistatus
|
||||
root = xml_request
|
||||
if root.tag in (
|
||||
xmlutils.make_clark("D:principal-search-property-set"),
|
||||
xmlutils.make_clark("D:principal-property-search"),
|
||||
xmlutils.make_clark("D:expand-property")):
|
||||
if root.tag in (xmlutils.make_clark("D:principal-search-property-set"),
|
||||
xmlutils.make_clark("D:principal-property-search"),
|
||||
xmlutils.make_clark("D:expand-property")):
|
||||
# We don't support searching for principals or indirect retrieving of
|
||||
# properties, just return an empty result.
|
||||
# InfCloud asks for expand-property reports (even if we don't announce
|
||||
|
@ -52,28 +166,31 @@ def xml_report(base_prefix, path, xml_request, collection, encoding,
|
|||
xmlutils.make_human_tag(root.tag), path)
|
||||
return client.MULTI_STATUS, multistatus
|
||||
if (root.tag == xmlutils.make_clark("C:calendar-multiget") and
|
||||
collection.get_meta("tag") != "VCALENDAR" or
|
||||
collection.tag != "VCALENDAR" or
|
||||
root.tag == xmlutils.make_clark("CR:addressbook-multiget") and
|
||||
collection.get_meta("tag") != "VADDRESSBOOK" or
|
||||
collection.tag != "VADDRESSBOOK" or
|
||||
root.tag == xmlutils.make_clark("D:sync-collection") and
|
||||
collection.get_meta("tag") not in ("VADDRESSBOOK", "VCALENDAR")):
|
||||
collection.tag not in ("VADDRESSBOOK", "VCALENDAR")):
|
||||
logger.warning("Invalid REPORT method %r on %r requested",
|
||||
xmlutils.make_human_tag(root.tag), path)
|
||||
return (client.CONFLICT,
|
||||
xmlutils.webdav_error("D:supported-report"))
|
||||
prop_element = root.find(xmlutils.make_clark("D:prop"))
|
||||
props = (
|
||||
[prop.tag for prop in prop_element]
|
||||
if prop_element is not None else [])
|
||||
return client.FORBIDDEN, xmlutils.webdav_error("D:supported-report")
|
||||
|
||||
props: Union[ET.Element, List]
|
||||
if root.find(xmlutils.make_clark("D:prop")) is not None:
|
||||
props = root.find(xmlutils.make_clark("D:prop")) # type: ignore[assignment]
|
||||
else:
|
||||
props = []
|
||||
|
||||
hreferences: Iterable[str]
|
||||
if root.tag in (
|
||||
xmlutils.make_clark("C:calendar-multiget"),
|
||||
xmlutils.make_clark("CR:addressbook-multiget")):
|
||||
# Read rfc4791-7.9 for info
|
||||
hreferences = set()
|
||||
for href_element in root.findall(xmlutils.make_clark("D:href")):
|
||||
href_path = pathutils.sanitize_path(
|
||||
unquote(urlparse(href_element.text).path))
|
||||
temp_url_path = urlparse(href_element.text).path
|
||||
assert isinstance(temp_url_path, str)
|
||||
href_path = pathutils.sanitize_path(unquote(temp_url_path))
|
||||
if (href_path + "/").startswith(base_prefix + "/"):
|
||||
hreferences.add(href_path[len(base_prefix):])
|
||||
else:
|
||||
|
@ -92,7 +209,8 @@ def xml_report(base_prefix, path, xml_request, collection, encoding,
|
|||
# Invalid sync token
|
||||
logger.warning("Client provided invalid sync token %r: %s",
|
||||
old_sync_token, e, exc_info=True)
|
||||
return (client.CONFLICT,
|
||||
# client.CONFLICT doesn't work with some clients (e.g. InfCloud)
|
||||
return (client.FORBIDDEN,
|
||||
xmlutils.webdav_error("D:valid-sync-token"))
|
||||
hreferences = (pathutils.unstrip_path(
|
||||
posixpath.join(collection.path, n)) for n in names)
|
||||
|
@ -103,85 +221,16 @@ def xml_report(base_prefix, path, xml_request, collection, encoding,
|
|||
else:
|
||||
hreferences = (path,)
|
||||
filters = (
|
||||
root.findall("./%s" % xmlutils.make_clark("C:filter")) +
|
||||
root.findall("./%s" % xmlutils.make_clark("CR:filter")))
|
||||
|
||||
def retrieve_items(collection, hreferences, multistatus):
|
||||
"""Retrieves all items that are referenced in ``hreferences`` from
|
||||
``collection`` and adds 404 responses for missing and invalid items
|
||||
to ``multistatus``."""
|
||||
collection_requested = False
|
||||
|
||||
def get_names():
|
||||
"""Extracts all names from references in ``hreferences`` and adds
|
||||
404 responses for invalid references to ``multistatus``.
|
||||
If the whole collections is referenced ``collection_requested``
|
||||
gets set to ``True``."""
|
||||
nonlocal collection_requested
|
||||
for hreference in hreferences:
|
||||
try:
|
||||
name = pathutils.name_from_path(hreference, collection)
|
||||
except ValueError as e:
|
||||
logger.warning("Skipping invalid path %r in REPORT request"
|
||||
" on %r: %s", hreference, path, e)
|
||||
response = xml_item_response(base_prefix, hreference,
|
||||
found_item=False)
|
||||
multistatus.append(response)
|
||||
continue
|
||||
if name:
|
||||
# Reference is an item
|
||||
yield name
|
||||
else:
|
||||
# Reference is a collection
|
||||
collection_requested = True
|
||||
|
||||
for name, item in collection.get_multi(get_names()):
|
||||
if not item:
|
||||
uri = pathutils.unstrip_path(
|
||||
posixpath.join(collection.path, name))
|
||||
response = xml_item_response(base_prefix, uri,
|
||||
found_item=False)
|
||||
multistatus.append(response)
|
||||
else:
|
||||
yield item, False
|
||||
if collection_requested:
|
||||
yield from collection.get_filtered(filters)
|
||||
root.findall(xmlutils.make_clark("C:filter")) +
|
||||
root.findall(xmlutils.make_clark("CR:filter")))
|
||||
|
||||
# Retrieve everything required for finishing the request.
|
||||
retrieved_items = list(retrieve_items(collection, hreferences,
|
||||
multistatus))
|
||||
collection_tag = collection.get_meta("tag")
|
||||
# Don't access storage after this!
|
||||
retrieved_items = list(retrieve_items(
|
||||
base_prefix, path, collection, hreferences, filters, multistatus))
|
||||
collection_tag = collection.tag
|
||||
# !!! Don't access storage after this !!!
|
||||
unlock_storage_fn()
|
||||
|
||||
def match(item, filter_):
|
||||
tag = collection_tag
|
||||
if (tag == "VCALENDAR" and
|
||||
filter_.tag != xmlutils.make_clark("C:%s" % filter_)):
|
||||
if len(filter_) == 0:
|
||||
return True
|
||||
if len(filter_) > 1:
|
||||
raise ValueError("Filter with %d children" % len(filter_))
|
||||
if filter_[0].tag != xmlutils.make_clark("C:comp-filter"):
|
||||
raise ValueError("Unexpected %r in filter" % filter_[0].tag)
|
||||
return radicale_filter.comp_match(item, filter_[0])
|
||||
if (tag == "VADDRESSBOOK" and
|
||||
filter_.tag != xmlutils.make_clark("CR:%s" % filter_)):
|
||||
for child in filter_:
|
||||
if child.tag != xmlutils.make_clark("CR:prop-filter"):
|
||||
raise ValueError("Unexpected %r in filter" % child.tag)
|
||||
test = filter_.get("test", "anyof")
|
||||
if test == "anyof":
|
||||
return any(
|
||||
radicale_filter.prop_match(item.vobject_item, f, "CR")
|
||||
for f in filter_)
|
||||
if test == "allof":
|
||||
return all(
|
||||
radicale_filter.prop_match(item.vobject_item, f, "CR")
|
||||
for f in filter_)
|
||||
raise ValueError("Unsupported filter test: %r" % test)
|
||||
raise ValueError("unsupported filter %r for %r" % (filter_.tag, tag))
|
||||
|
||||
while retrieved_items:
|
||||
# ``item.vobject_item`` might be accessed during filtering.
|
||||
# Don't keep reference to ``item``, because VObject requires a lot of
|
||||
|
@ -189,7 +238,8 @@ def xml_report(base_prefix, path, xml_request, collection, encoding,
|
|||
item, filters_matched = retrieved_items.pop(0)
|
||||
if filters and not filters_matched:
|
||||
try:
|
||||
if not all(match(item, filter_) for filter_ in filters):
|
||||
if not all(test_filter(collection_tag, item, filter_)
|
||||
for filter_ in filters):
|
||||
continue
|
||||
except ValueError as e:
|
||||
raise ValueError("Failed to filter item %r from %r: %s" %
|
||||
|
@ -201,22 +251,44 @@ def xml_report(base_prefix, path, xml_request, collection, encoding,
|
|||
found_props = []
|
||||
not_found_props = []
|
||||
|
||||
for tag in props:
|
||||
element = ET.Element(tag)
|
||||
if tag == xmlutils.make_clark("D:getetag"):
|
||||
for prop in props:
|
||||
element = ET.Element(prop.tag)
|
||||
if prop.tag == xmlutils.make_clark("D:getetag"):
|
||||
element.text = item.etag
|
||||
found_props.append(element)
|
||||
elif tag == xmlutils.make_clark("D:getcontenttype"):
|
||||
elif prop.tag == xmlutils.make_clark("D:getcontenttype"):
|
||||
element.text = xmlutils.get_content_type(item, encoding)
|
||||
found_props.append(element)
|
||||
elif tag in (
|
||||
elif prop.tag in (
|
||||
xmlutils.make_clark("C:calendar-data"),
|
||||
xmlutils.make_clark("CR:address-data")):
|
||||
element.text = item.serialize()
|
||||
found_props.append(element)
|
||||
|
||||
expand = prop.find(xmlutils.make_clark("C:expand"))
|
||||
if expand is not None and item.component_name == 'VEVENT':
|
||||
start = expand.get('start')
|
||||
end = expand.get('end')
|
||||
|
||||
if (start is None) or (end is None):
|
||||
return client.FORBIDDEN, \
|
||||
xmlutils.webdav_error("C:expand")
|
||||
|
||||
start = datetime.datetime.strptime(
|
||||
start, '%Y%m%dT%H%M%SZ'
|
||||
).replace(tzinfo=datetime.timezone.utc)
|
||||
end = datetime.datetime.strptime(
|
||||
end, '%Y%m%dT%H%M%SZ'
|
||||
).replace(tzinfo=datetime.timezone.utc)
|
||||
|
||||
expanded_element = _expand(
|
||||
element, copy.copy(item), start, end)
|
||||
found_props.append(expanded_element)
|
||||
else:
|
||||
found_props.append(element)
|
||||
else:
|
||||
not_found_props.append(element)
|
||||
|
||||
assert item.href
|
||||
uri = pathutils.unstrip_path(
|
||||
posixpath.join(collection.path, item.href))
|
||||
multistatus.append(xml_item_response(
|
||||
|
@ -226,13 +298,201 @@ def xml_report(base_prefix, path, xml_request, collection, encoding,
|
|||
return client.MULTI_STATUS, multistatus
|
||||
|
||||
|
||||
def xml_item_response(base_prefix, href, found_props=(), not_found_props=(),
|
||||
found_item=True):
|
||||
def _expand(
|
||||
element: ET.Element,
|
||||
item: radicale_item.Item,
|
||||
start: datetime.datetime,
|
||||
end: datetime.datetime,
|
||||
) -> ET.Element:
|
||||
vevent_component: vobject.base.Component = copy.copy(item.vobject_item)
|
||||
|
||||
# Split the vevents included in the component into one that contains the
|
||||
# recurrence information and others that contain a recurrence id to
|
||||
# override instances.
|
||||
vevent_recurrence, vevents_overridden = _split_overridden_vevents(vevent_component)
|
||||
|
||||
dt_format = '%Y%m%dT%H%M%SZ'
|
||||
all_day_event = False
|
||||
|
||||
if type(vevent_recurrence.dtstart.value) is datetime.date:
|
||||
# If an event comes to us with a dtstart specified as a date
|
||||
# then in the response we return the date, not datetime
|
||||
dt_format = '%Y%m%d'
|
||||
all_day_event = True
|
||||
# In case of dates, we need to remove timezone information since
|
||||
# rruleset.between computes with datetimes without timezone information
|
||||
start = start.replace(tzinfo=None)
|
||||
end = end.replace(tzinfo=None)
|
||||
|
||||
for vevent in vevents_overridden:
|
||||
_strip_single_event(vevent, dt_format)
|
||||
|
||||
duration = None
|
||||
if hasattr(vevent_recurrence, "dtend"):
|
||||
duration = vevent_recurrence.dtend.value - vevent_recurrence.dtstart.value
|
||||
|
||||
rruleset = None
|
||||
if hasattr(vevent_recurrence, 'rrule'):
|
||||
rruleset = vevent_recurrence.getrruleset()
|
||||
|
||||
if rruleset:
|
||||
# This function uses datetimes internally without timezone info for dates
|
||||
recurrences = rruleset.between(start, end, inc=True)
|
||||
|
||||
_strip_component(vevent_component)
|
||||
_strip_single_event(vevent_recurrence, dt_format)
|
||||
|
||||
is_component_filled: bool = False
|
||||
i_overridden = 0
|
||||
|
||||
for recurrence_dt in recurrences:
|
||||
recurrence_utc = recurrence_dt.astimezone(datetime.timezone.utc)
|
||||
i_overridden, vevent = _find_overridden(i_overridden, vevents_overridden, recurrence_utc, dt_format)
|
||||
|
||||
if not vevent:
|
||||
# We did not find an overridden instance, so create a new one
|
||||
vevent = copy.deepcopy(vevent_recurrence)
|
||||
|
||||
# For all day events, the system timezone may influence the
|
||||
# results, so use recurrence_dt
|
||||
recurrence_id = recurrence_dt if all_day_event else recurrence_utc
|
||||
vevent.recurrence_id = ContentLine(
|
||||
name='RECURRENCE-ID',
|
||||
value=recurrence_id, params={}
|
||||
)
|
||||
_convert_to_utc(vevent, 'recurrence_id', dt_format)
|
||||
vevent.dtstart = ContentLine(
|
||||
name='DTSTART',
|
||||
value=recurrence_id.strftime(dt_format), params={}
|
||||
)
|
||||
if duration:
|
||||
vevent.dtend = ContentLine(
|
||||
name='DTEND',
|
||||
value=(recurrence_id + duration).strftime(dt_format), params={}
|
||||
)
|
||||
|
||||
if not is_component_filled:
|
||||
vevent_component.vevent = vevent
|
||||
is_component_filled = True
|
||||
else:
|
||||
vevent_component.add(vevent)
|
||||
|
||||
element.text = vevent_component.serialize()
|
||||
|
||||
return element
|
||||
|
||||
|
||||
def _convert_timezone(vevent: vobject.icalendar.RecurringComponent,
|
||||
name_prop: str,
|
||||
name_content_line: str):
|
||||
prop = getattr(vevent, name_prop, None)
|
||||
if prop:
|
||||
if type(prop.value) is datetime.date:
|
||||
date_time = datetime.datetime.fromordinal(
|
||||
prop.value.toordinal()
|
||||
).replace(tzinfo=datetime.timezone.utc)
|
||||
else:
|
||||
date_time = prop.value.astimezone(datetime.timezone.utc)
|
||||
|
||||
setattr(vevent, name_prop, ContentLine(name=name_content_line, value=date_time, params=[]))
|
||||
|
||||
|
||||
def _convert_to_utc(vevent: vobject.icalendar.RecurringComponent,
|
||||
name_prop: str,
|
||||
dt_format: str):
|
||||
prop = getattr(vevent, name_prop, None)
|
||||
if prop:
|
||||
setattr(vevent, name_prop, ContentLine(name=prop.name, value=prop.value.strftime(dt_format), params=[]))
|
||||
|
||||
|
||||
def _strip_single_event(vevent: vobject.icalendar.RecurringComponent, dt_format: str) -> None:
|
||||
_convert_timezone(vevent, 'dtstart', 'DTSTART')
|
||||
_convert_timezone(vevent, 'dtend', 'DTEND')
|
||||
_convert_timezone(vevent, 'recurrence_id', 'RECURRENCE-ID')
|
||||
|
||||
# There is something strange behaviour during serialization native datetime, so converting manually
|
||||
_convert_to_utc(vevent, 'dtstart', dt_format)
|
||||
_convert_to_utc(vevent, 'dtend', dt_format)
|
||||
_convert_to_utc(vevent, 'recurrence_id', dt_format)
|
||||
|
||||
try:
|
||||
delattr(vevent, 'rrule')
|
||||
delattr(vevent, 'exdate')
|
||||
delattr(vevent, 'exrule')
|
||||
delattr(vevent, 'rdate')
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
||||
def _strip_component(vevent: vobject.base.Component) -> None:
|
||||
timezones_to_remove = []
|
||||
for component in vevent.components():
|
||||
if component.name == 'VTIMEZONE':
|
||||
timezones_to_remove.append(component)
|
||||
|
||||
for timezone in timezones_to_remove:
|
||||
vevent.remove(timezone)
|
||||
|
||||
|
||||
def _split_overridden_vevents(
|
||||
component: vobject.base.Component,
|
||||
) -> Tuple[
|
||||
vobject.icalendar.RecurringComponent,
|
||||
List[vobject.icalendar.RecurringComponent]
|
||||
]:
|
||||
vevent_recurrence = None
|
||||
vevents_overridden = []
|
||||
|
||||
for vevent in component.vevent_list:
|
||||
if hasattr(vevent, 'recurrence_id'):
|
||||
vevents_overridden += [vevent]
|
||||
elif vevent_recurrence:
|
||||
raise ValueError(
|
||||
f"component with UID {vevent.uid} "
|
||||
f"has more than one vevent with recurrence information"
|
||||
)
|
||||
else:
|
||||
vevent_recurrence = vevent
|
||||
|
||||
if vevent_recurrence:
|
||||
return (
|
||||
vevent_recurrence, sorted(
|
||||
vevents_overridden,
|
||||
key=lambda vevent: vevent.recurrence_id.value
|
||||
)
|
||||
)
|
||||
else:
|
||||
raise ValueError(
|
||||
f"component with UID {vevent.uid} "
|
||||
f"does not have a vevent without a recurrence_id"
|
||||
)
|
||||
|
||||
|
||||
def _find_overridden(
|
||||
start: int,
|
||||
vevents: List[vobject.icalendar.RecurringComponent],
|
||||
dt: datetime.datetime,
|
||||
dt_format: str
|
||||
) -> Tuple[int, Optional[vobject.icalendar.RecurringComponent]]:
|
||||
for i in range(start, len(vevents)):
|
||||
dt_event = datetime.datetime.strptime(
|
||||
vevents[i].recurrence_id.value,
|
||||
dt_format
|
||||
).replace(tzinfo=datetime.timezone.utc)
|
||||
if dt_event == dt:
|
||||
return (i + 1, vevents[i])
|
||||
return (start, None)
|
||||
|
||||
|
||||
def xml_item_response(base_prefix: str, href: str,
|
||||
found_props: Sequence[ET.Element] = (),
|
||||
not_found_props: Sequence[ET.Element] = (),
|
||||
found_item: bool = True) -> ET.Element:
|
||||
response = ET.Element(xmlutils.make_clark("D:response"))
|
||||
|
||||
href_tag = ET.Element(xmlutils.make_clark("D:href"))
|
||||
href_tag.text = xmlutils.make_href(base_prefix, href)
|
||||
response.append(href_tag)
|
||||
href_element = ET.Element(xmlutils.make_clark("D:href"))
|
||||
href_element.text = xmlutils.make_href(base_prefix, href)
|
||||
response.append(href_element)
|
||||
|
||||
if found_item:
|
||||
for code, props in ((200, found_props), (404, not_found_props)):
|
||||
|
@ -240,10 +500,10 @@ def xml_item_response(base_prefix, href, found_props=(), not_found_props=(),
|
|||
propstat = ET.Element(xmlutils.make_clark("D:propstat"))
|
||||
status = ET.Element(xmlutils.make_clark("D:status"))
|
||||
status.text = xmlutils.make_response(code)
|
||||
prop_tag = ET.Element(xmlutils.make_clark("D:prop"))
|
||||
prop_element = ET.Element(xmlutils.make_clark("D:prop"))
|
||||
for prop in props:
|
||||
prop_tag.append(prop)
|
||||
propstat.append(prop_tag)
|
||||
prop_element.append(prop)
|
||||
propstat.append(prop_element)
|
||||
propstat.append(status)
|
||||
response.append(propstat)
|
||||
else:
|
||||
|
@ -254,24 +514,98 @@ def xml_item_response(base_prefix, href, found_props=(), not_found_props=(),
|
|||
return response
|
||||
|
||||
|
||||
class ApplicationReportMixin:
|
||||
def do_REPORT(self, environ, base_prefix, path, user):
|
||||
def retrieve_items(
|
||||
base_prefix: str, path: str, collection: storage.BaseCollection,
|
||||
hreferences: Iterable[str], filters: Sequence[ET.Element],
|
||||
multistatus: ET.Element) -> Iterator[Tuple[radicale_item.Item, bool]]:
|
||||
"""Retrieves all items that are referenced in ``hreferences`` from
|
||||
``collection`` and adds 404 responses for missing and invalid items
|
||||
to ``multistatus``."""
|
||||
collection_requested = False
|
||||
|
||||
def get_names() -> Iterator[str]:
|
||||
"""Extracts all names from references in ``hreferences`` and adds
|
||||
404 responses for invalid references to ``multistatus``.
|
||||
If the whole collections is referenced ``collection_requested``
|
||||
gets set to ``True``."""
|
||||
nonlocal collection_requested
|
||||
for hreference in hreferences:
|
||||
try:
|
||||
name = pathutils.name_from_path(hreference, collection)
|
||||
except ValueError as e:
|
||||
logger.warning("Skipping invalid path %r in REPORT request on "
|
||||
"%r: %s", hreference, path, e)
|
||||
response = xml_item_response(base_prefix, hreference,
|
||||
found_item=False)
|
||||
multistatus.append(response)
|
||||
continue
|
||||
if name:
|
||||
# Reference is an item
|
||||
yield name
|
||||
else:
|
||||
# Reference is a collection
|
||||
collection_requested = True
|
||||
|
||||
for name, item in collection.get_multi(get_names()):
|
||||
if not item:
|
||||
uri = pathutils.unstrip_path(posixpath.join(collection.path, name))
|
||||
response = xml_item_response(base_prefix, uri, found_item=False)
|
||||
multistatus.append(response)
|
||||
else:
|
||||
yield item, False
|
||||
if collection_requested:
|
||||
yield from collection.get_filtered(filters)
|
||||
|
||||
|
||||
def test_filter(collection_tag: str, item: radicale_item.Item,
|
||||
filter_: ET.Element) -> bool:
|
||||
"""Match an item against a filter."""
|
||||
if (collection_tag == "VCALENDAR" and
|
||||
filter_.tag != xmlutils.make_clark("C:%s" % filter_)):
|
||||
if len(filter_) == 0:
|
||||
return True
|
||||
if len(filter_) > 1:
|
||||
raise ValueError("Filter with %d children" % len(filter_))
|
||||
if filter_[0].tag != xmlutils.make_clark("C:comp-filter"):
|
||||
raise ValueError("Unexpected %r in filter" % filter_[0].tag)
|
||||
return radicale_filter.comp_match(item, filter_[0])
|
||||
if (collection_tag == "VADDRESSBOOK" and
|
||||
filter_.tag != xmlutils.make_clark("CR:%s" % filter_)):
|
||||
for child in filter_:
|
||||
if child.tag != xmlutils.make_clark("CR:prop-filter"):
|
||||
raise ValueError("Unexpected %r in filter" % child.tag)
|
||||
test = filter_.get("test", "anyof")
|
||||
if test == "anyof":
|
||||
return any(radicale_filter.prop_match(item.vobject_item, f, "CR")
|
||||
for f in filter_)
|
||||
if test == "allof":
|
||||
return all(radicale_filter.prop_match(item.vobject_item, f, "CR")
|
||||
for f in filter_)
|
||||
raise ValueError("Unsupported filter test: %r" % test)
|
||||
raise ValueError("Unsupported filter %r for %r" %
|
||||
(filter_.tag, collection_tag))
|
||||
|
||||
|
||||
class ApplicationPartReport(ApplicationBase):
|
||||
|
||||
def do_REPORT(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
path: str, user: str) -> types.WSGIResponse:
|
||||
"""Manage REPORT request."""
|
||||
access = app.Access(self._rights, user, path)
|
||||
access = Access(self._rights, user, path)
|
||||
if not access.check("r"):
|
||||
return httputils.NOT_ALLOWED
|
||||
try:
|
||||
xml_content = self._read_xml_content(environ)
|
||||
xml_content = self._read_xml_request_body(environ)
|
||||
except RuntimeError as e:
|
||||
logger.warning(
|
||||
"Bad REPORT request on %r: %s", path, e, exc_info=True)
|
||||
logger.warning("Bad REPORT request on %r: %s", path, e,
|
||||
exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
except socket.timeout:
|
||||
logger.debug("client timed out", exc_info=True)
|
||||
logger.debug("Client timed out", exc_info=True)
|
||||
return httputils.REQUEST_TIMEOUT
|
||||
with contextlib.ExitStack() as lock_stack:
|
||||
lock_stack.enter_context(self._storage.acquire_lock("r", user))
|
||||
item = next(self._storage.discover(path), None)
|
||||
item = next(iter(self._storage.discover(path)), None)
|
||||
if not item:
|
||||
return httputils.NOT_FOUND
|
||||
if not access.check("r", item):
|
||||
|
@ -279,14 +613,30 @@ class ApplicationReportMixin:
|
|||
if isinstance(item, storage.BaseCollection):
|
||||
collection = item
|
||||
else:
|
||||
assert item.collection is not None
|
||||
collection = item.collection
|
||||
headers = {"Content-Type": "text/xml; charset=%s" % self._encoding}
|
||||
try:
|
||||
status, xml_answer = xml_report(
|
||||
base_prefix, path, xml_content, collection, self._encoding,
|
||||
lock_stack.close)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad REPORT request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
return (status, headers, self._write_xml_content(xml_answer))
|
||||
|
||||
if xml_content is not None and \
|
||||
xml_content.tag == xmlutils.make_clark("C:free-busy-query"):
|
||||
max_occurrence = self.configuration.get("reporting", "max_freebusy_occurrence")
|
||||
try:
|
||||
status, body = free_busy_report(
|
||||
base_prefix, path, xml_content, collection, self._encoding,
|
||||
lock_stack.close, max_occurrence)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad REPORT request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
headers = {"Content-Type": "text/calendar; charset=%s" % self._encoding}
|
||||
return status, headers, str(body)
|
||||
else:
|
||||
try:
|
||||
status, xml_answer = xml_report(
|
||||
base_prefix, path, xml_content, collection, self._encoding,
|
||||
lock_stack.close)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad REPORT request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
headers = {"Content-Type": "text/xml; charset=%s" % self._encoding}
|
||||
return status, headers, self._xml_response(xml_answer)
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -28,18 +29,85 @@ Take a look at the class ``BaseAuth`` if you want to implement your own.
|
|||
|
||||
"""
|
||||
|
||||
from radicale import utils
|
||||
import hashlib
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
from typing import List, Sequence, Set, Tuple, Union, final
|
||||
|
||||
INTERNAL_TYPES = ("none", "remote_user", "http_x_remote_user", "htpasswd")
|
||||
from radicale import config, types, utils
|
||||
from radicale.log import logger
|
||||
|
||||
INTERNAL_TYPES: Sequence[str] = ("none", "remote_user", "http_x_remote_user",
|
||||
"denyall",
|
||||
"htpasswd",
|
||||
"ldap",
|
||||
"imap",
|
||||
"oauth2",
|
||||
"pam",
|
||||
"dovecot")
|
||||
|
||||
CACHE_LOGIN_TYPES: Sequence[str] = (
|
||||
"dovecot",
|
||||
"ldap",
|
||||
"htpasswd",
|
||||
"imap",
|
||||
"oauth2",
|
||||
"pam",
|
||||
)
|
||||
|
||||
INSECURE_IF_NO_LOOPBACK_TYPES: Sequence[str] = (
|
||||
"remote_user",
|
||||
"http_x_remote_user",
|
||||
)
|
||||
|
||||
AUTH_SOCKET_FAMILY: Sequence[str] = ("AF_UNIX", "AF_INET", "AF_INET6")
|
||||
|
||||
|
||||
def load(configuration):
|
||||
def load(configuration: "config.Configuration") -> "BaseAuth":
|
||||
"""Load the authentication module chosen in configuration."""
|
||||
return utils.load_plugin(INTERNAL_TYPES, "auth", "Auth", configuration)
|
||||
_type = configuration.get("auth", "type")
|
||||
if _type == "none":
|
||||
logger.warning("No user authentication is selected: '[auth] type=none' (INSECURE)")
|
||||
elif _type == "denyall":
|
||||
logger.warning("All user authentication is blocked by: '[auth] type=denyall'")
|
||||
elif _type in INSECURE_IF_NO_LOOPBACK_TYPES:
|
||||
sgi = os.environ.get('SERVER_GATEWAY_INTERFACE') or None
|
||||
if not sgi:
|
||||
hosts: List[Tuple[str, int]] = configuration.get("server", "hosts")
|
||||
localhost_only = True
|
||||
address_lo = []
|
||||
address = []
|
||||
for address_port in hosts:
|
||||
if address_port[0] in ["localhost", "localhost6", "127.0.0.1", "::1"]:
|
||||
address_lo.append(utils.format_address(address_port))
|
||||
else:
|
||||
address.append(utils.format_address(address_port))
|
||||
localhost_only = False
|
||||
if localhost_only is False:
|
||||
logger.warning("User authentication '[auth] type=%s' is selected but server is not only listen on loopback address (potentially INSECURE): %s", _type, " ".join(address))
|
||||
return utils.load_plugin(INTERNAL_TYPES, "auth", "Auth", BaseAuth,
|
||||
configuration)
|
||||
|
||||
|
||||
class BaseAuth:
|
||||
def __init__(self, configuration):
|
||||
|
||||
_ldap_groups: Set[str] = set([])
|
||||
_lc_username: bool
|
||||
_uc_username: bool
|
||||
_strip_domain: bool
|
||||
_auth_delay: float
|
||||
_failed_auth_delay: float
|
||||
_type: str
|
||||
_cache_logins: bool
|
||||
_cache_successful: dict # login -> (digest, time_ns)
|
||||
_cache_successful_logins_expiry: int
|
||||
_cache_failed: dict # digest_failed -> (time_ns, login)
|
||||
_cache_failed_logins_expiry: int
|
||||
_cache_failed_logins_salt_ns: int # persistent over runtime
|
||||
_lock: threading.Lock
|
||||
|
||||
def __init__(self, configuration: "config.Configuration") -> None:
|
||||
"""Initialize BaseAuth.
|
||||
|
||||
``configuration`` see ``radicale.config`` module.
|
||||
|
@ -48,8 +116,49 @@ class BaseAuth:
|
|||
|
||||
"""
|
||||
self.configuration = configuration
|
||||
self._lc_username = configuration.get("auth", "lc_username")
|
||||
self._uc_username = configuration.get("auth", "uc_username")
|
||||
self._strip_domain = configuration.get("auth", "strip_domain")
|
||||
logger.info("auth.strip_domain: %s", self._strip_domain)
|
||||
logger.info("auth.lc_username: %s", self._lc_username)
|
||||
logger.info("auth.uc_username: %s", self._uc_username)
|
||||
if self._lc_username is True and self._uc_username is True:
|
||||
raise RuntimeError("auth.lc_username and auth.uc_username cannot be enabled together")
|
||||
self._auth_delay = configuration.get("auth", "delay")
|
||||
logger.info("auth.delay: %f", self._auth_delay)
|
||||
self._failed_auth_delay = 0
|
||||
self._lock = threading.Lock()
|
||||
# cache_successful_logins
|
||||
self._cache_logins = configuration.get("auth", "cache_logins")
|
||||
self._type = configuration.get("auth", "type")
|
||||
if (self._type in CACHE_LOGIN_TYPES) or (self._cache_logins is False):
|
||||
logger.info("auth.cache_logins: %s", self._cache_logins)
|
||||
else:
|
||||
logger.info("auth.cache_logins: %s (but not required for type '%s' and disabled therefore)", self._cache_logins, self._type)
|
||||
self._cache_logins = False
|
||||
if self._cache_logins is True:
|
||||
self._cache_successful_logins_expiry = configuration.get("auth", "cache_successful_logins_expiry")
|
||||
if self._cache_successful_logins_expiry < 0:
|
||||
raise RuntimeError("self._cache_successful_logins_expiry cannot be < 0")
|
||||
self._cache_failed_logins_expiry = configuration.get("auth", "cache_failed_logins_expiry")
|
||||
if self._cache_failed_logins_expiry < 0:
|
||||
raise RuntimeError("self._cache_failed_logins_expiry cannot be < 0")
|
||||
logger.info("auth.cache_successful_logins_expiry: %s seconds", self._cache_successful_logins_expiry)
|
||||
logger.info("auth.cache_failed_logins_expiry: %s seconds", self._cache_failed_logins_expiry)
|
||||
# cache init
|
||||
self._cache_successful = dict()
|
||||
self._cache_failed = dict()
|
||||
self._cache_failed_logins_salt_ns = time.time_ns()
|
||||
|
||||
def get_external_login(self, environ):
|
||||
def _cache_digest(self, login: str, password: str, salt: str) -> str:
|
||||
h = hashlib.sha3_512()
|
||||
h.update(salt.encode())
|
||||
h.update(login.encode())
|
||||
h.update(password.encode())
|
||||
return str(h.digest())
|
||||
|
||||
def get_external_login(self, environ: types.WSGIEnviron) -> Union[
|
||||
Tuple[()], Tuple[str, str]]:
|
||||
"""Optionally provide the login and password externally.
|
||||
|
||||
``environ`` a dict with the WSGI environment
|
||||
|
@ -61,15 +170,145 @@ class BaseAuth:
|
|||
"""
|
||||
return ()
|
||||
|
||||
def login(self, login, password):
|
||||
def _login(self, login: str, password: str) -> str:
|
||||
"""Check credentials and map login to internal user
|
||||
|
||||
``login`` the login name
|
||||
|
||||
``password`` the password
|
||||
|
||||
Returns the user name or ``""`` for invalid credentials.
|
||||
Returns the username or ``""`` for invalid credentials.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
def _sleep_for_constant_exec_time(self, time_ns_begin: int):
|
||||
"""Sleep some time to reach a constant execution time for failed logins
|
||||
|
||||
Independent of time required by external backend or used digest methods
|
||||
|
||||
Increase final execution time in case initial limit exceeded
|
||||
|
||||
See also issue 591
|
||||
|
||||
"""
|
||||
time_delta = (time.time_ns() - time_ns_begin) / 1000 / 1000 / 1000
|
||||
with self._lock:
|
||||
# avoid that another thread is changing global value at the same time
|
||||
failed_auth_delay = self._failed_auth_delay
|
||||
failed_auth_delay_old = failed_auth_delay
|
||||
if time_delta > failed_auth_delay:
|
||||
# set new
|
||||
failed_auth_delay = time_delta
|
||||
# store globally
|
||||
self._failed_auth_delay = failed_auth_delay
|
||||
if (failed_auth_delay_old != failed_auth_delay):
|
||||
logger.debug("Failed login constant execution time need increase of failed_auth_delay: %.9f -> %.9f sec", failed_auth_delay_old, failed_auth_delay)
|
||||
# sleep == 0
|
||||
else:
|
||||
sleep = failed_auth_delay - time_delta
|
||||
logger.debug("Failed login constant exection time alignment, sleeping: %.9f sec", sleep)
|
||||
time.sleep(sleep)
|
||||
|
||||
@final
|
||||
def login(self, login: str, password: str) -> Tuple[str, str]:
|
||||
time_ns_begin = time.time_ns()
|
||||
result_from_cache = False
|
||||
if self._lc_username:
|
||||
login = login.lower()
|
||||
if self._uc_username:
|
||||
login = login.upper()
|
||||
if self._strip_domain:
|
||||
login = login.split('@')[0]
|
||||
if self._cache_logins is True:
|
||||
# time_ns is also used as salt
|
||||
result = ""
|
||||
digest = ""
|
||||
time_ns = time.time_ns()
|
||||
# cleanup failed login cache to avoid out-of-memory
|
||||
cache_failed_entries = len(self._cache_failed)
|
||||
if cache_failed_entries > 0:
|
||||
logger.debug("Login failed cache investigation start (entries: %d)", cache_failed_entries)
|
||||
self._lock.acquire()
|
||||
cache_failed_cleanup = dict()
|
||||
for digest in self._cache_failed:
|
||||
(time_ns_cache, login_cache) = self._cache_failed[digest]
|
||||
age_failed = int((time_ns - time_ns_cache) / 1000 / 1000 / 1000)
|
||||
if age_failed > self._cache_failed_logins_expiry:
|
||||
cache_failed_cleanup[digest] = (login_cache, age_failed)
|
||||
cache_failed_cleanup_entries = len(cache_failed_cleanup)
|
||||
logger.debug("Login failed cache cleanup start (entries: %d)", cache_failed_cleanup_entries)
|
||||
if cache_failed_cleanup_entries > 0:
|
||||
for digest in cache_failed_cleanup:
|
||||
(login, age_failed) = cache_failed_cleanup[digest]
|
||||
logger.debug("Login failed cache entry for user+password expired: '%s' (age: %d > %d sec)", login_cache, age_failed, self._cache_failed_logins_expiry)
|
||||
del self._cache_failed[digest]
|
||||
self._lock.release()
|
||||
logger.debug("Login failed cache investigation finished")
|
||||
# check for cache failed login
|
||||
digest_failed = login + ":" + self._cache_digest(login, password, str(self._cache_failed_logins_salt_ns))
|
||||
if self._cache_failed.get(digest_failed):
|
||||
# login+password found in cache "failed" -> shortcut return
|
||||
(time_ns_cache, login_cache) = self._cache_failed[digest]
|
||||
age_failed = int((time_ns - time_ns_cache) / 1000 / 1000 / 1000)
|
||||
logger.debug("Login failed cache entry for user+password found: '%s' (age: %d sec)", login_cache, age_failed)
|
||||
self._sleep_for_constant_exec_time(time_ns_begin)
|
||||
return ("", self._type + " / cached")
|
||||
if self._cache_successful.get(login):
|
||||
# login found in cache "successful"
|
||||
(digest_cache, time_ns_cache) = self._cache_successful[login]
|
||||
digest = self._cache_digest(login, password, str(time_ns_cache))
|
||||
if digest == digest_cache:
|
||||
age_success = int((time_ns - time_ns_cache) / 1000 / 1000 / 1000)
|
||||
if age_success > self._cache_successful_logins_expiry:
|
||||
logger.debug("Login successful cache entry for user+password found but expired: '%s' (age: %d > %d sec)", login, age_success, self._cache_successful_logins_expiry)
|
||||
# delete expired success from cache
|
||||
del self._cache_successful[login]
|
||||
digest = ""
|
||||
else:
|
||||
logger.debug("Login successful cache entry for user+password found: '%s' (age: %d sec)", login, age_success)
|
||||
result = login
|
||||
result_from_cache = True
|
||||
else:
|
||||
logger.debug("Login successful cache entry for user+password not matching: '%s'", login)
|
||||
else:
|
||||
# login not found in cache, caculate always to avoid timing attacks
|
||||
digest = self._cache_digest(login, password, str(time_ns))
|
||||
if result == "":
|
||||
# verify login+password via configured backend
|
||||
logger.debug("Login verification for user+password via backend: '%s'", login)
|
||||
result = self._login(login, password)
|
||||
if result != "":
|
||||
logger.debug("Login successful for user+password via backend: '%s'", login)
|
||||
if digest == "":
|
||||
# successful login, but expired, digest must be recalculated
|
||||
digest = self._cache_digest(login, password, str(time_ns))
|
||||
# store successful login in cache
|
||||
self._lock.acquire()
|
||||
self._cache_successful[login] = (digest, time_ns)
|
||||
self._lock.release()
|
||||
logger.debug("Login successful cache for user set: '%s'", login)
|
||||
if self._cache_failed.get(digest_failed):
|
||||
logger.debug("Login failed cache for user cleared: '%s'", login)
|
||||
del self._cache_failed[digest_failed]
|
||||
else:
|
||||
logger.debug("Login failed for user+password via backend: '%s'", login)
|
||||
self._lock.acquire()
|
||||
self._cache_failed[digest_failed] = (time_ns, login)
|
||||
self._lock.release()
|
||||
logger.debug("Login failed cache for user set: '%s'", login)
|
||||
if result_from_cache is True:
|
||||
if result == "":
|
||||
self._sleep_for_constant_exec_time(time_ns_begin)
|
||||
return (result, self._type + " / cached")
|
||||
else:
|
||||
if result == "":
|
||||
self._sleep_for_constant_exec_time(time_ns_begin)
|
||||
return (result, self._type)
|
||||
else:
|
||||
# self._cache_logins is False
|
||||
result = self._login(login, password)
|
||||
if result == "":
|
||||
self._sleep_for_constant_exec_time(time_ns_begin)
|
||||
return (result, self._type)
|
||||
|
|
30
radicale/auth/denyall.py
Normal file
30
radicale/auth/denyall.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
A dummy backend that denies any username and password.
|
||||
|
||||
Used as default for security reasons.
|
||||
|
||||
"""
|
||||
|
||||
from radicale import auth
|
||||
|
||||
|
||||
class Auth(auth.BaseAuth):
|
||||
|
||||
def _login(self, login: str, password: str) -> str:
|
||||
return ""
|
192
radicale/auth/dovecot.py
Normal file
192
radicale/auth/dovecot.py
Normal file
|
@ -0,0 +1,192 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# Copyright © 2014 Giel van Schijndel
|
||||
# Copyright © 2019 (GalaxyMaster)
|
||||
# Copyright © 2025-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import base64
|
||||
import itertools
|
||||
import os
|
||||
import socket
|
||||
from contextlib import closing
|
||||
|
||||
from radicale import auth
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class Auth(auth.BaseAuth):
|
||||
def __init__(self, configuration):
|
||||
super().__init__(configuration)
|
||||
self.timeout = 5
|
||||
self.request_id_gen = itertools.count(1)
|
||||
|
||||
config_family = configuration.get("auth", "dovecot_connection_type")
|
||||
if config_family == "AF_UNIX":
|
||||
self.family = socket.AF_UNIX
|
||||
self.address = configuration.get("auth", "dovecot_socket")
|
||||
logger.info("auth dovecot socket: %r", self.address)
|
||||
return
|
||||
|
||||
self.address = configuration.get("auth", "dovecot_host"), configuration.get("auth", "dovecot_port")
|
||||
logger.warning("auth dovecot address: %r (INSECURE, credentials are transmitted in clear text)", self.address)
|
||||
if config_family == "AF_INET":
|
||||
self.family = socket.AF_INET
|
||||
else:
|
||||
self.family = socket.AF_INET6
|
||||
|
||||
def _login(self, login, password):
|
||||
"""Validate credentials.
|
||||
|
||||
Check if the ``login``/``password`` pair is valid according to Dovecot.
|
||||
|
||||
This implementation communicates with a Dovecot server through the
|
||||
Dovecot Authentication Protocol v1.1.
|
||||
|
||||
https://dovecot.org/doc/auth-protocol.txt
|
||||
|
||||
"""
|
||||
|
||||
logger.info("Authentication request (dovecot): '{}'".format(login))
|
||||
if not login or not password:
|
||||
return ""
|
||||
|
||||
with closing(socket.socket(
|
||||
self.family,
|
||||
socket.SOCK_STREAM)
|
||||
) as sock:
|
||||
try:
|
||||
sock.settimeout(self.timeout)
|
||||
sock.connect(self.address)
|
||||
|
||||
buf = bytes()
|
||||
supported_mechs = []
|
||||
done = False
|
||||
seen_part = [0, 0, 0]
|
||||
# Upon the initial connection we only care about the
|
||||
# handshake, which is usually just around 100 bytes long,
|
||||
# e.g.
|
||||
#
|
||||
# VERSION 1 2
|
||||
# MECH PLAIN plaintext
|
||||
# SPID 22901
|
||||
# CUID 1
|
||||
# COOKIE 2dbe4116a30fb4b8a8719f4448420af7
|
||||
# DONE
|
||||
#
|
||||
# Hence, we try to read just once with a buffer big
|
||||
# enough to hold all of it.
|
||||
buf = sock.recv(1024)
|
||||
while b'\n' in buf and not done:
|
||||
line, buf = buf.split(b'\n', 1)
|
||||
parts = line.split(b'\t')
|
||||
first, parts = parts[0], parts[1:]
|
||||
|
||||
if first == b'VERSION':
|
||||
if seen_part[0]:
|
||||
logger.warning(
|
||||
"Server presented multiple VERSION "
|
||||
"tokens, ignoring"
|
||||
)
|
||||
continue
|
||||
version = parts
|
||||
logger.debug("Dovecot server version: '{}'".format(
|
||||
(b'.'.join(version)).decode()
|
||||
))
|
||||
if int(version[0]) != 1:
|
||||
logger.fatal(
|
||||
"Only Dovecot 1.x versions are supported!"
|
||||
)
|
||||
return ""
|
||||
seen_part[0] += 1
|
||||
elif first == b'MECH':
|
||||
supported_mechs.append(parts[0])
|
||||
seen_part[1] += 1
|
||||
elif first == b'DONE':
|
||||
seen_part[2] += 1
|
||||
if not (seen_part[0] and seen_part[1]):
|
||||
logger.fatal(
|
||||
"An unexpected end of the server "
|
||||
"handshake received!"
|
||||
)
|
||||
return ""
|
||||
done = True
|
||||
|
||||
if not done:
|
||||
logger.fatal("Encountered a broken server handshake!")
|
||||
return ""
|
||||
|
||||
logger.debug(
|
||||
"Supported auth methods: '{}'"
|
||||
.format((b"', '".join(supported_mechs)).decode())
|
||||
)
|
||||
if b'PLAIN' not in supported_mechs:
|
||||
logger.info(
|
||||
"Authentication method 'PLAIN' is not supported, "
|
||||
"but is required!"
|
||||
)
|
||||
return ""
|
||||
|
||||
# Handshake
|
||||
logger.debug("Sending auth handshake")
|
||||
sock.send(b'VERSION\t1\t1\n')
|
||||
sock.send(b'CPID\t%u\n' % os.getpid())
|
||||
|
||||
request_id = next(self.request_id_gen)
|
||||
logger.debug(
|
||||
"Authenticating with request id: '{}'"
|
||||
.format(request_id)
|
||||
)
|
||||
sock.send(
|
||||
b'AUTH\t%u\tPLAIN\tservice=radicale\tresp=%b\n' %
|
||||
(
|
||||
request_id, base64.b64encode(
|
||||
b'\0%b\0%b' %
|
||||
(login.encode(), password.encode())
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
logger.debug("Processing auth response")
|
||||
buf = sock.recv(1024)
|
||||
line = buf.split(b'\n', 1)[0]
|
||||
parts = line.split(b'\t')[:2]
|
||||
resp, reply_id, params = (
|
||||
parts[0], int(parts[1]),
|
||||
dict(part.split('=', 1) for part in parts[2:])
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
"Auth response: result='{}', id='{}', parameters={}"
|
||||
.format(resp.decode(), reply_id, params)
|
||||
)
|
||||
if request_id != reply_id:
|
||||
logger.fatal(
|
||||
"Unexpected reply ID {} received (expected {})"
|
||||
.format(
|
||||
reply_id, request_id
|
||||
)
|
||||
)
|
||||
return ""
|
||||
|
||||
if resp == b'OK':
|
||||
return login
|
||||
|
||||
except socket.error as e:
|
||||
logger.fatal(
|
||||
"Failed to communicate with Dovecot: %s" %
|
||||
(e)
|
||||
)
|
||||
|
||||
return ""
|
|
@ -1,8 +1,9 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -22,12 +23,12 @@ Authentication backend that checks credentials with a htpasswd file.
|
|||
|
||||
Apache's htpasswd command (httpd.apache.org/docs/programs/htpasswd.html)
|
||||
manages a file for storing user credentials. It can encrypt passwords using
|
||||
different the methods BCRYPT or MD5-APR1 (a version of MD5 modified for
|
||||
Apache). MD5-APR1 provides medium security as of 2015. Only BCRYPT can be
|
||||
different the methods BCRYPT/SHA256/SHA512 or MD5-APR1 (a version of MD5 modified for
|
||||
Apache). MD5-APR1 provides medium security as of 2015. Only BCRYPT/SHA256/SHA512 can be
|
||||
considered secure by current standards.
|
||||
|
||||
MD5-APR1-encrypted credentials can be written by all versions of htpasswd (it
|
||||
is the default, in fact), whereas BCRYPT requires htpasswd 2.4.x or newer.
|
||||
is the default, in fact), whereas BCRYPT/SHA256/SHA512 requires htpasswd 2.4.x or newer.
|
||||
|
||||
The `is_authenticated(user, password)` function provided by this module
|
||||
verifies the user-given credentials by parsing the htpasswd credential file
|
||||
|
@ -35,94 +36,284 @@ pointed to by the ``htpasswd_filename`` configuration value while assuming
|
|||
the password encryption method specified via the ``htpasswd_encryption``
|
||||
configuration value.
|
||||
|
||||
The following htpasswd password encrpytion methods are supported by Radicale
|
||||
The following htpasswd password encryption methods are supported by Radicale
|
||||
out-of-the-box:
|
||||
- plain-text (created by htpasswd -p ...) -- INSECURE
|
||||
- MD5-APR1 (htpasswd -m ...) -- htpasswd's default method, INSECURE
|
||||
- SHA256 (htpasswd -2 ...)
|
||||
- SHA512 (htpasswd -5 ...)
|
||||
|
||||
- plain-text (created by htpasswd -p...) -- INSECURE
|
||||
- MD5-APR1 (htpasswd -m...) -- htpasswd's default method
|
||||
|
||||
When passlib[bcrypt] is installed:
|
||||
|
||||
- BCRYPT (htpasswd -B...) -- Requires htpasswd 2.4.x
|
||||
When bcrypt is installed:
|
||||
- BCRYPT (htpasswd -B ...) -- Requires htpasswd 2.4.x
|
||||
|
||||
"""
|
||||
|
||||
import functools
|
||||
import hmac
|
||||
import os
|
||||
import re
|
||||
import threading
|
||||
import time
|
||||
from typing import Any, Tuple
|
||||
|
||||
from passlib.hash import apr_md5_crypt
|
||||
from passlib.hash import apr_md5_crypt, sha256_crypt, sha512_crypt
|
||||
|
||||
from radicale import auth
|
||||
from radicale import auth, config, logger
|
||||
|
||||
|
||||
class Auth(auth.BaseAuth):
|
||||
def __init__(self, configuration):
|
||||
|
||||
_filename: str
|
||||
_encoding: str
|
||||
_htpasswd: dict # login -> digest
|
||||
_htpasswd_mtime_ns: int
|
||||
_htpasswd_size: int
|
||||
_htpasswd_ok: bool
|
||||
_htpasswd_not_ok_time: float
|
||||
_htpasswd_not_ok_reminder_seconds: int
|
||||
_htpasswd_bcrypt_use: int
|
||||
_htpasswd_cache: bool
|
||||
_has_bcrypt: bool
|
||||
_encryption: str
|
||||
_lock: threading.Lock
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
self._filename = configuration.get("auth", "htpasswd_filename")
|
||||
self._encoding = self.configuration.get("encoding", "stock")
|
||||
encryption = configuration.get("auth", "htpasswd_encryption")
|
||||
logger.info("auth htpasswd file: %r", self._filename)
|
||||
self._encoding = configuration.get("encoding", "stock")
|
||||
logger.info("auth htpasswd file encoding: %r", self._encoding)
|
||||
self._htpasswd_cache = configuration.get("auth", "htpasswd_cache")
|
||||
logger.info("auth htpasswd cache: %s", self._htpasswd_cache)
|
||||
self._encryption: str = configuration.get("auth", "htpasswd_encryption")
|
||||
logger.info("auth htpasswd encryption is 'radicale.auth.htpasswd_encryption.%s'", self._encryption)
|
||||
|
||||
if encryption == "plain":
|
||||
self._has_bcrypt = False
|
||||
self._htpasswd_ok = False
|
||||
self._htpasswd_not_ok_reminder_seconds = 60 # currently hardcoded
|
||||
(self._htpasswd_ok, self._htpasswd_bcrypt_use, self._htpasswd, self._htpasswd_size, self._htpasswd_mtime_ns) = self._read_htpasswd(True, False)
|
||||
self._lock = threading.Lock()
|
||||
|
||||
if self._encryption == "plain":
|
||||
self._verify = self._plain
|
||||
elif encryption == "md5":
|
||||
elif self._encryption == "md5":
|
||||
self._verify = self._md5apr1
|
||||
elif encryption == "bcrypt":
|
||||
elif self._encryption == "sha256":
|
||||
self._verify = self._sha256
|
||||
elif self._encryption == "sha512":
|
||||
self._verify = self._sha512
|
||||
elif self._encryption == "bcrypt" or self._encryption == "autodetect":
|
||||
try:
|
||||
from passlib.hash import bcrypt
|
||||
import bcrypt
|
||||
except ImportError as e:
|
||||
raise RuntimeError(
|
||||
"The htpasswd encryption method 'bcrypt' requires "
|
||||
"the passlib[bcrypt] module.") from e
|
||||
# A call to `encrypt` raises passlib.exc.MissingBackendError with a
|
||||
# good error message if bcrypt backend is not available. Trigger
|
||||
# this here.
|
||||
bcrypt.hash("test-bcrypt-backend")
|
||||
self._verify = functools.partial(self._bcrypt, bcrypt)
|
||||
if (self._encryption == "autodetect") and (self._htpasswd_bcrypt_use == 0):
|
||||
logger.warning("auth htpasswd encryption is 'radicale.auth.htpasswd_encryption.%s' which can require bycrypt module, but currently no entries found", self._encryption)
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"The htpasswd encryption method 'bcrypt' or 'autodetect' requires "
|
||||
"the bcrypt module (entries found: %d)." % self._htpasswd_bcrypt_use) from e
|
||||
else:
|
||||
self._has_bcrypt = True
|
||||
if self._encryption == "autodetect":
|
||||
if self._htpasswd_bcrypt_use == 0:
|
||||
logger.info("auth htpasswd encryption is 'radicale.auth.htpasswd_encryption.%s' and bycrypt module found, but currently not required", self._encryption)
|
||||
else:
|
||||
logger.info("auth htpasswd encryption is 'radicale.auth.htpasswd_encryption.%s' and bycrypt module found (bcrypt entries found: %d)", self._encryption, self._htpasswd_bcrypt_use)
|
||||
if self._encryption == "bcrypt":
|
||||
self._verify = functools.partial(self._bcrypt, bcrypt)
|
||||
else:
|
||||
self._verify = self._autodetect
|
||||
if self._htpasswd_bcrypt_use:
|
||||
self._verify_bcrypt = functools.partial(self._bcrypt, bcrypt)
|
||||
else:
|
||||
raise RuntimeError("The htpasswd encryption method %r is not "
|
||||
"supported." % encryption)
|
||||
"supported." % self._encryption)
|
||||
|
||||
def _plain(self, hash_value, password):
|
||||
def _plain(self, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
"""Check if ``hash_value`` and ``password`` match, plain method."""
|
||||
return hmac.compare_digest(hash_value.encode(), password.encode())
|
||||
return ("PLAIN", hmac.compare_digest(hash_value.encode(), password.encode()))
|
||||
|
||||
def _bcrypt(self, bcrypt, hash_value, password):
|
||||
return bcrypt.verify(password, hash_value.strip())
|
||||
def _plain_fallback(self, method_orig, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
"""Check if ``hash_value`` and ``password`` match, plain method / fallback in case of hash length is not matching on autodetection."""
|
||||
info = "PLAIN/fallback as hash length not matching for " + method_orig + ": " + str(len(hash_value))
|
||||
return (info, hmac.compare_digest(hash_value.encode(), password.encode()))
|
||||
|
||||
def _md5apr1(self, hash_value, password):
|
||||
return apr_md5_crypt.verify(password, hash_value.strip())
|
||||
def _bcrypt(self, bcrypt: Any, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
if self._encryption == "autodetect" and len(hash_value) != 60:
|
||||
return self._plain_fallback("BCRYPT", hash_value, password)
|
||||
else:
|
||||
return ("BCRYPT", bcrypt.checkpw(password=password.encode('utf-8'), hashed_password=hash_value.encode()))
|
||||
|
||||
def login(self, login, password):
|
||||
def _md5apr1(self, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
if self._encryption == "autodetect" and len(hash_value) != 37:
|
||||
return self._plain_fallback("MD5-APR1", hash_value, password)
|
||||
else:
|
||||
return ("MD5-APR1", apr_md5_crypt.verify(password, hash_value.strip()))
|
||||
|
||||
def _sha256(self, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
if self._encryption == "autodetect" and len(hash_value) != 63:
|
||||
return self._plain_fallback("SHA-256", hash_value, password)
|
||||
else:
|
||||
return ("SHA-256", sha256_crypt.verify(password, hash_value.strip()))
|
||||
|
||||
def _sha512(self, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
if self._encryption == "autodetect" and len(hash_value) != 106:
|
||||
return self._plain_fallback("SHA-512", hash_value, password)
|
||||
else:
|
||||
return ("SHA-512", sha512_crypt.verify(password, hash_value.strip()))
|
||||
|
||||
def _autodetect(self, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
if hash_value.startswith("$apr1$", 0, 6):
|
||||
# MD5-APR1
|
||||
return self._md5apr1(hash_value, password)
|
||||
elif re.match(r"^\$2(a|b|x|y)?\$", hash_value):
|
||||
# BCRYPT
|
||||
return self._verify_bcrypt(hash_value, password)
|
||||
elif hash_value.startswith("$5$", 0, 3):
|
||||
# SHA-256
|
||||
return self._sha256(hash_value, password)
|
||||
elif hash_value.startswith("$6$", 0, 3):
|
||||
# SHA-512
|
||||
return self._sha512(hash_value, password)
|
||||
else:
|
||||
return self._plain(hash_value, password)
|
||||
|
||||
def _read_htpasswd(self, init: bool, suppress: bool) -> Tuple[bool, int, dict, int, int]:
|
||||
"""Read htpasswd file
|
||||
|
||||
init == True: stop on error
|
||||
init == False: warn/skip on error and set mark to log reminder every interval
|
||||
suppress == True: suppress warnings, change info to debug (used in non-caching mode)
|
||||
suppress == False: do not suppress warnings (used in caching mode)
|
||||
|
||||
"""
|
||||
htpasswd_ok = True
|
||||
bcrypt_use = 0
|
||||
if (init is True) or (suppress is True):
|
||||
info = "Read"
|
||||
else:
|
||||
info = "Re-read"
|
||||
if suppress is False:
|
||||
logger.info("%s content of htpasswd file start: %r", info, self._filename)
|
||||
else:
|
||||
logger.debug("%s content of htpasswd file start: %r", info, self._filename)
|
||||
htpasswd: dict[str, str] = dict()
|
||||
entries = 0
|
||||
duplicates = 0
|
||||
errors = 0
|
||||
try:
|
||||
with open(self._filename, encoding=self._encoding) as f:
|
||||
line_num = 0
|
||||
for line in f:
|
||||
line_num += 1
|
||||
line = line.rstrip("\n")
|
||||
if line.lstrip() and not line.lstrip().startswith("#"):
|
||||
try:
|
||||
login, digest = line.split(":", maxsplit=1)
|
||||
skip = False
|
||||
if login == "" or digest == "":
|
||||
if init is True:
|
||||
raise ValueError("htpasswd file contains problematic line not matching <login>:<digest> in line: %d" % line_num)
|
||||
else:
|
||||
errors += 1
|
||||
logger.warning("htpasswd file contains problematic line not matching <login>:<digest> in line: %d (ignored)", line_num)
|
||||
htpasswd_ok = False
|
||||
skip = True
|
||||
else:
|
||||
if htpasswd.get(login):
|
||||
duplicates += 1
|
||||
if init is True:
|
||||
raise ValueError("htpasswd file contains duplicate login: '%s'", login, line_num)
|
||||
else:
|
||||
logger.warning("htpasswd file contains duplicate login: '%s' (line: %d / ignored)", login, line_num)
|
||||
htpasswd_ok = False
|
||||
skip = True
|
||||
else:
|
||||
if re.match(r"^\$2(a|b|x|y)?\$", digest) and len(digest) == 60:
|
||||
if init is True:
|
||||
bcrypt_use += 1
|
||||
else:
|
||||
if self._has_bcrypt is False:
|
||||
logger.warning("htpasswd file contains bcrypt digest login: '%s' (line: %d / ignored because module is not loaded)", login, line_num)
|
||||
skip = True
|
||||
htpasswd_ok = False
|
||||
if skip is False:
|
||||
htpasswd[login] = digest
|
||||
entries += 1
|
||||
except ValueError as e:
|
||||
if init is True:
|
||||
raise RuntimeError("Invalid htpasswd file %r: %s" % (self._filename, e)) from e
|
||||
except OSError as e:
|
||||
if init is True:
|
||||
raise RuntimeError("Failed to load htpasswd file %r: %s" % (self._filename, e)) from e
|
||||
else:
|
||||
logger.warning("Failed to load htpasswd file on re-read: %r" % self._filename)
|
||||
htpasswd_ok = False
|
||||
htpasswd_size = os.stat(self._filename).st_size
|
||||
htpasswd_mtime_ns = os.stat(self._filename).st_mtime_ns
|
||||
if suppress is False:
|
||||
logger.info("%s content of htpasswd file done: %r (entries: %d, duplicates: %d, errors: %d)", info, self._filename, entries, duplicates, errors)
|
||||
else:
|
||||
logger.debug("%s content of htpasswd file done: %r (entries: %d, duplicates: %d, errors: %d)", info, self._filename, entries, duplicates, errors)
|
||||
if htpasswd_ok is True:
|
||||
self._htpasswd_not_ok_time = 0
|
||||
else:
|
||||
self._htpasswd_not_ok_time = time.time()
|
||||
return (htpasswd_ok, bcrypt_use, htpasswd, htpasswd_size, htpasswd_mtime_ns)
|
||||
|
||||
def _login(self, login: str, password: str) -> str:
|
||||
"""Validate credentials.
|
||||
|
||||
Iterate through htpasswd credential file until login matches, extract
|
||||
hash (encrypted password) and check hash against password,
|
||||
using the method specified in the Radicale config.
|
||||
|
||||
The content of the file is not cached because reading is generally a
|
||||
very cheap operation, and it's useful to get live updates of the
|
||||
htpasswd file.
|
||||
Optional: the content of the file is cached and live updates will be detected by
|
||||
comparing mtime_ns and size
|
||||
|
||||
"""
|
||||
try:
|
||||
with open(self._filename, encoding=self._encoding) as f:
|
||||
for line in f:
|
||||
line = line.rstrip("\n")
|
||||
if line.lstrip() and not line.lstrip().startswith("#"):
|
||||
try:
|
||||
hash_login, hash_value = line.split(
|
||||
":", maxsplit=1)
|
||||
# Always compare both login and password to avoid
|
||||
# timing attacks, see #591.
|
||||
login_ok = hmac.compare_digest(
|
||||
hash_login.encode(), login.encode())
|
||||
password_ok = self._verify(hash_value, password)
|
||||
if login_ok and password_ok:
|
||||
return login
|
||||
except ValueError as e:
|
||||
raise RuntimeError("Invalid htpasswd file %r: %s" %
|
||||
(self._filename, e)) from e
|
||||
except OSError as e:
|
||||
raise RuntimeError("Failed to load htpasswd file %r: %s" %
|
||||
(self._filename, e)) from e
|
||||
login_ok = False
|
||||
digest: str
|
||||
if self._htpasswd_cache is True:
|
||||
# check and re-read file if required
|
||||
with self._lock:
|
||||
htpasswd_size = os.stat(self._filename).st_size
|
||||
htpasswd_mtime_ns = os.stat(self._filename).st_mtime_ns
|
||||
if (htpasswd_size != self._htpasswd_size) or (htpasswd_mtime_ns != self._htpasswd_mtime_ns):
|
||||
(self._htpasswd_ok, self._htpasswd_bcrypt_use, self._htpasswd, self._htpasswd_size, self._htpasswd_mtime_ns) = self._read_htpasswd(False, False)
|
||||
self._htpasswd_not_ok_time = 0
|
||||
|
||||
# log reminder of problemantic file every interval
|
||||
current_time = time.time()
|
||||
if (self._htpasswd_ok is False):
|
||||
if (self._htpasswd_not_ok_time > 0):
|
||||
if (current_time - self._htpasswd_not_ok_time) > self._htpasswd_not_ok_reminder_seconds:
|
||||
logger.warning("htpasswd file still contains issues (REMINDER, check warnings in the past): %r" % self._filename)
|
||||
self._htpasswd_not_ok_time = current_time
|
||||
else:
|
||||
self._htpasswd_not_ok_time = current_time
|
||||
|
||||
if self._htpasswd.get(login):
|
||||
digest = self._htpasswd[login]
|
||||
login_ok = True
|
||||
else:
|
||||
# read file on every request
|
||||
(htpasswd_ok, htpasswd_bcrypt_use, htpasswd, htpasswd_size, htpasswd_mtime_ns) = self._read_htpasswd(False, True)
|
||||
if htpasswd.get(login):
|
||||
digest = htpasswd[login]
|
||||
login_ok = True
|
||||
|
||||
if login_ok is True:
|
||||
try:
|
||||
(method, password_ok) = self._verify(digest, password)
|
||||
except ValueError as e:
|
||||
logger.error("Login verification failed for user: '%s' (htpasswd/%s) with errror '%s'", login, self._encryption, e)
|
||||
return ""
|
||||
if password_ok:
|
||||
logger.debug("Login verification successful for user: '%s' (htpasswd/%s/%s)", login, self._encryption, method)
|
||||
return login
|
||||
else:
|
||||
logger.warning("Login verification failed for user: '%s' (htpasswd/%s/%s)", login, self._encryption, method)
|
||||
else:
|
||||
logger.warning("Login verification user not found (htpasswd): '%s'", login)
|
||||
return ""
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -26,9 +26,14 @@ if the reverse proxy is not configured properly.
|
|||
|
||||
"""
|
||||
|
||||
import radicale.auth.none as none
|
||||
from typing import Tuple, Union
|
||||
|
||||
from radicale import types
|
||||
from radicale.auth import none
|
||||
|
||||
|
||||
class Auth(none.Auth):
|
||||
def get_external_login(self, environ):
|
||||
|
||||
def get_external_login(self, environ: types.WSGIEnviron) -> Union[
|
||||
Tuple[()], Tuple[str, str]]:
|
||||
return environ.get("HTTP_X_REMOTE_USER", ""), ""
|
||||
|
|
73
radicale/auth/imap.py
Normal file
73
radicale/auth/imap.py
Normal file
|
@ -0,0 +1,73 @@
|
|||
# RadicaleIMAP IMAP authentication plugin for Radicale.
|
||||
# Copyright © 2017, 2020 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2025-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import imaplib
|
||||
import ssl
|
||||
|
||||
from radicale import auth
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class Auth(auth.BaseAuth):
|
||||
"""Authenticate user with IMAP."""
|
||||
|
||||
def __init__(self, configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
self._host, self._port = self.configuration.get("auth", "imap_host")
|
||||
logger.info("auth imap host: %r", self._host)
|
||||
self._security = self.configuration.get("auth", "imap_security")
|
||||
if self._security == "none":
|
||||
logger.warning("auth imap security: %s (INSECURE, credentials are transmitted in clear text)", self._security)
|
||||
else:
|
||||
logger.info("auth imap security: %s", self._security)
|
||||
if self._security == "tls":
|
||||
if self._port is None:
|
||||
self._port = 993
|
||||
logger.info("auth imap port (autoselected): %d", self._port)
|
||||
else:
|
||||
logger.info("auth imap port: %d", self._port)
|
||||
else:
|
||||
if self._port is None:
|
||||
self._port = 143
|
||||
logger.info("auth imap port (autoselected): %d", self._port)
|
||||
else:
|
||||
logger.info("auth imap port: %d", self._port)
|
||||
|
||||
def _login(self, login, password) -> str:
|
||||
try:
|
||||
connection: imaplib.IMAP4 | imaplib.IMAP4_SSL
|
||||
if self._security == "tls":
|
||||
connection = imaplib.IMAP4_SSL(
|
||||
host=self._host, port=self._port,
|
||||
ssl_context=ssl.create_default_context())
|
||||
else:
|
||||
connection = imaplib.IMAP4(host=self._host, port=self._port)
|
||||
if self._security == "starttls":
|
||||
connection.starttls(ssl.create_default_context())
|
||||
try:
|
||||
connection.authenticate(
|
||||
"PLAIN",
|
||||
lambda _: "{0}\x00{0}\x00{1}".format(login, password).encode(),
|
||||
)
|
||||
except imaplib.IMAP4.error as e:
|
||||
logger.warning("IMAP authentication failed for user %r: %s", login, e, exc_info=False)
|
||||
return ""
|
||||
connection.logout()
|
||||
return login
|
||||
except (OSError, imaplib.IMAP4.error) as e:
|
||||
logger.error("Failed to communicate with IMAP server %r: %s" % ("[%s]:%d" % (self._host, self._port), e))
|
||||
return ""
|
269
radicale/auth/ldap.py
Normal file
269
radicale/auth/ldap.py
Normal file
|
@ -0,0 +1,269 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2022-2024 Peter Varkoly
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""
|
||||
Authentication backend that checks credentials with a LDAP server.
|
||||
Following parameters are needed in the configuration:
|
||||
ldap_uri The LDAP URL to the server like ldap://localhost
|
||||
ldap_base The baseDN of the LDAP server
|
||||
ldap_reader_dn The DN of a LDAP user with read access to get the user accounts
|
||||
ldap_secret The password of the ldap_reader_dn
|
||||
ldap_secret_file The path of the file containing the password of the ldap_reader_dn
|
||||
ldap_filter The search filter to find the user to authenticate by the username
|
||||
ldap_user_attribute The attribute to be used as username after authentication
|
||||
ldap_groups_attribute The attribute containing group memberships in the LDAP user entry
|
||||
Following parameters controls SSL connections:
|
||||
ldap_use_ssl If the connection
|
||||
ldap_ssl_verify_mode The certificate verification mode. NONE, OPTIONAL, default is REQUIRED
|
||||
ldap_ssl_ca_file
|
||||
|
||||
"""
|
||||
import ssl
|
||||
|
||||
from radicale import auth, config
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class Auth(auth.BaseAuth):
|
||||
_ldap_uri: str
|
||||
_ldap_base: str
|
||||
_ldap_reader_dn: str
|
||||
_ldap_secret: str
|
||||
_ldap_filter: str
|
||||
_ldap_attributes: list[str] = []
|
||||
_ldap_user_attr: str
|
||||
_ldap_groups_attr: str
|
||||
_ldap_module_version: int = 3
|
||||
_ldap_use_ssl: bool = False
|
||||
_ldap_ssl_verify_mode: int = ssl.CERT_REQUIRED
|
||||
_ldap_ssl_ca_file: str = ""
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
try:
|
||||
import ldap3
|
||||
self.ldap3 = ldap3
|
||||
except ImportError:
|
||||
try:
|
||||
import ldap
|
||||
self._ldap_module_version = 2
|
||||
self.ldap = ldap
|
||||
except ImportError as e:
|
||||
raise RuntimeError("LDAP authentication requires the ldap3 module") from e
|
||||
|
||||
self._ldap_ignore_attribute_create_modify_timestamp = configuration.get("auth", "ldap_ignore_attribute_create_modify_timestamp")
|
||||
if self._ldap_ignore_attribute_create_modify_timestamp:
|
||||
self.ldap3.utils.config._ATTRIBUTES_EXCLUDED_FROM_CHECK.extend(['createTimestamp', 'modifyTimestamp'])
|
||||
logger.info("auth.ldap_ignore_attribute_create_modify_timestamp applied")
|
||||
|
||||
self._ldap_uri = configuration.get("auth", "ldap_uri")
|
||||
self._ldap_base = configuration.get("auth", "ldap_base")
|
||||
self._ldap_reader_dn = configuration.get("auth", "ldap_reader_dn")
|
||||
self._ldap_secret = configuration.get("auth", "ldap_secret")
|
||||
self._ldap_filter = configuration.get("auth", "ldap_filter")
|
||||
self._ldap_user_attr = configuration.get("auth", "ldap_user_attribute")
|
||||
self._ldap_groups_attr = configuration.get("auth", "ldap_groups_attribute")
|
||||
ldap_secret_file_path = configuration.get("auth", "ldap_secret_file")
|
||||
if ldap_secret_file_path:
|
||||
with open(ldap_secret_file_path, 'r') as file:
|
||||
self._ldap_secret = file.read().rstrip('\n')
|
||||
if self._ldap_module_version == 3:
|
||||
self._ldap_use_ssl = configuration.get("auth", "ldap_use_ssl")
|
||||
if self._ldap_use_ssl:
|
||||
self._ldap_ssl_ca_file = configuration.get("auth", "ldap_ssl_ca_file")
|
||||
tmp = configuration.get("auth", "ldap_ssl_verify_mode")
|
||||
if tmp == "NONE":
|
||||
self._ldap_ssl_verify_mode = ssl.CERT_NONE
|
||||
elif tmp == "OPTIONAL":
|
||||
self._ldap_ssl_verify_mode = ssl.CERT_OPTIONAL
|
||||
logger.info("auth.ldap_uri : %r" % self._ldap_uri)
|
||||
logger.info("auth.ldap_base : %r" % self._ldap_base)
|
||||
logger.info("auth.ldap_reader_dn : %r" % self._ldap_reader_dn)
|
||||
logger.info("auth.ldap_filter : %r" % self._ldap_filter)
|
||||
if self._ldap_user_attr:
|
||||
logger.info("auth.ldap_user_attribute : %r" % self._ldap_user_attr)
|
||||
else:
|
||||
logger.info("auth.ldap_user_attribute : (not provided)")
|
||||
if self._ldap_groups_attr:
|
||||
logger.info("auth.ldap_groups_attribute: %r" % self._ldap_groups_attr)
|
||||
else:
|
||||
logger.info("auth.ldap_groups_attribute: (not provided)")
|
||||
if ldap_secret_file_path:
|
||||
logger.info("auth.ldap_secret_file_path: %r" % ldap_secret_file_path)
|
||||
if self._ldap_secret:
|
||||
logger.info("auth.ldap_secret : (from file)")
|
||||
else:
|
||||
logger.info("auth.ldap_secret_file_path: (not provided)")
|
||||
if self._ldap_secret:
|
||||
logger.info("auth.ldap_secret : (from config)")
|
||||
if self._ldap_reader_dn and not self._ldap_secret:
|
||||
logger.error("auth.ldap_secret : (not provided)")
|
||||
raise RuntimeError("LDAP authentication requires ldap_secret for ldap_reader_dn")
|
||||
logger.info("auth.ldap_use_ssl : %s" % self._ldap_use_ssl)
|
||||
if self._ldap_use_ssl is True:
|
||||
logger.info("auth.ldap_ssl_verify_mode : %s" % self._ldap_ssl_verify_mode)
|
||||
if self._ldap_ssl_ca_file:
|
||||
logger.info("auth.ldap_ssl_ca_file : %r" % self._ldap_ssl_ca_file)
|
||||
else:
|
||||
logger.info("auth.ldap_ssl_ca_file : (not provided)")
|
||||
"""Extend attributes to to be returned in the user query"""
|
||||
if self._ldap_groups_attr:
|
||||
self._ldap_attributes.append(self._ldap_groups_attr)
|
||||
if self._ldap_user_attr:
|
||||
self._ldap_attributes.append(self._ldap_user_attr)
|
||||
logger.info("ldap_attributes : %r" % self._ldap_attributes)
|
||||
|
||||
def _login2(self, login: str, password: str) -> str:
|
||||
try:
|
||||
"""Bind as reader dn"""
|
||||
logger.debug(f"_login2 {self._ldap_uri}, {self._ldap_reader_dn}")
|
||||
conn = self.ldap.initialize(self._ldap_uri)
|
||||
conn.protocol_version = 3
|
||||
conn.set_option(self.ldap.OPT_REFERRALS, 0)
|
||||
conn.simple_bind_s(self._ldap_reader_dn, self._ldap_secret)
|
||||
"""Search for the dn of user to authenticate"""
|
||||
escaped_login = self.ldap.filter.escape_filter_chars(login)
|
||||
logger.debug(f"_login2 login escaped for LDAP filters: {escaped_login}")
|
||||
res = conn.search_s(
|
||||
self._ldap_base,
|
||||
self.ldap.SCOPE_SUBTREE,
|
||||
filterstr=self._ldap_filter.format(escaped_login),
|
||||
attrlist=self._ldap_attributes
|
||||
)
|
||||
if len(res) != 1:
|
||||
"""User could not be found unambiguously"""
|
||||
logger.debug(f"_login2 no unique DN found for '{login}'")
|
||||
return ""
|
||||
user_entry = res[0]
|
||||
user_dn = user_entry[0]
|
||||
logger.debug(f"_login2 found LDAP user DN {user_dn}")
|
||||
"""Close LDAP connection"""
|
||||
conn.unbind()
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Invalid LDAP configuration:{e}")
|
||||
|
||||
try:
|
||||
"""Bind as user to authenticate"""
|
||||
conn = self.ldap.initialize(self._ldap_uri)
|
||||
conn.protocol_version = 3
|
||||
conn.set_option(self.ldap.OPT_REFERRALS, 0)
|
||||
conn.simple_bind_s(user_dn, password)
|
||||
tmp: list[str] = []
|
||||
if self._ldap_groups_attr:
|
||||
tmp = []
|
||||
for g in user_entry[1][self._ldap_groups_attr]:
|
||||
"""Get group g's RDN's attribute value"""
|
||||
try:
|
||||
rdns = self.ldap.dn.explode_dn(g, notypes=True)
|
||||
tmp.append(rdns[0])
|
||||
except Exception:
|
||||
tmp.append(g.decode('utf8'))
|
||||
self._ldap_groups = set(tmp)
|
||||
logger.debug("_login2 LDAP groups of user: %s", ",".join(self._ldap_groups))
|
||||
if self._ldap_user_attr:
|
||||
if user_entry[1][self._ldap_user_attr]:
|
||||
tmplogin = user_entry[1][self._ldap_user_attr][0]
|
||||
login = tmplogin.decode('utf-8')
|
||||
logger.debug(f"_login2 user set to: '{login}'")
|
||||
conn.unbind()
|
||||
logger.debug(f"_login2 {login} successfully authenticated")
|
||||
return login
|
||||
except self.ldap.INVALID_CREDENTIALS:
|
||||
return ""
|
||||
|
||||
def _login3(self, login: str, password: str) -> str:
|
||||
"""Connect the server"""
|
||||
try:
|
||||
logger.debug(f"_login3 {self._ldap_uri}, {self._ldap_reader_dn}")
|
||||
if self._ldap_use_ssl:
|
||||
tls = self.ldap3.Tls(validate=self._ldap_ssl_verify_mode)
|
||||
if self._ldap_ssl_ca_file != "":
|
||||
tls = self.ldap3.Tls(
|
||||
validate=self._ldap_ssl_verify_mode,
|
||||
ca_certs_file=self._ldap_ssl_ca_file
|
||||
)
|
||||
server = self.ldap3.Server(self._ldap_uri, use_ssl=True, tls=tls)
|
||||
else:
|
||||
server = self.ldap3.Server(self._ldap_uri)
|
||||
conn = self.ldap3.Connection(server, self._ldap_reader_dn, password=self._ldap_secret)
|
||||
except self.ldap3.core.exceptions.LDAPSocketOpenError:
|
||||
raise RuntimeError("Unable to reach LDAP server")
|
||||
except Exception as e:
|
||||
logger.debug(f"_login3 error 1 {e}")
|
||||
pass
|
||||
|
||||
if not conn.bind():
|
||||
logger.debug("_login3 cannot bind")
|
||||
raise RuntimeError("Unable to read from LDAP server")
|
||||
|
||||
logger.debug(f"_login3 bind as {self._ldap_reader_dn}")
|
||||
"""Search the user dn"""
|
||||
escaped_login = self.ldap3.utils.conv.escape_filter_chars(login)
|
||||
logger.debug(f"_login3 login escaped for LDAP filters: {escaped_login}")
|
||||
conn.search(
|
||||
search_base=self._ldap_base,
|
||||
search_filter=self._ldap_filter.format(escaped_login),
|
||||
search_scope=self.ldap3.SUBTREE,
|
||||
attributes=self._ldap_attributes
|
||||
)
|
||||
if len(conn.entries) != 1:
|
||||
"""User could not be found unambiguously"""
|
||||
logger.debug(f"_login3 no unique DN found for '{login}'")
|
||||
return ""
|
||||
|
||||
user_entry = conn.response[0]
|
||||
conn.unbind()
|
||||
user_dn = user_entry['dn']
|
||||
logger.debug(f"_login3 found LDAP user DN {user_dn}")
|
||||
try:
|
||||
"""Try to bind as the user itself"""
|
||||
conn = self.ldap3.Connection(server, user_dn, password=password)
|
||||
if not conn.bind():
|
||||
logger.debug(f"_login3 user '{login}' cannot be found")
|
||||
return ""
|
||||
tmp: list[str] = []
|
||||
if self._ldap_groups_attr:
|
||||
tmp = []
|
||||
for g in user_entry['attributes'][self._ldap_groups_attr]:
|
||||
"""Get group g's RDN's attribute value"""
|
||||
try:
|
||||
rdns = self.ldap3.utils.dn.parse_dn(g)
|
||||
tmp.append(rdns[0][1])
|
||||
except Exception:
|
||||
tmp.append(g)
|
||||
self._ldap_groups = set(tmp)
|
||||
logger.debug("_login3 LDAP groups of user: %s", ",".join(self._ldap_groups))
|
||||
if self._ldap_user_attr:
|
||||
if user_entry['attributes'][self._ldap_user_attr]:
|
||||
login = user_entry['attributes'][self._ldap_user_attr]
|
||||
logger.debug(f"_login3 user set to: '{login}'")
|
||||
conn.unbind()
|
||||
logger.debug(f"_login3 {login} successfully authenticated")
|
||||
return login
|
||||
except Exception as e:
|
||||
logger.debug(f"_login3 error 2 {e}")
|
||||
pass
|
||||
return ""
|
||||
|
||||
def _login(self, login: str, password: str) -> str:
|
||||
"""Validate credentials.
|
||||
In first step we make a connection to the LDAP server with the ldap_reader_dn credential.
|
||||
In next step the DN of the user to authenticate will be searched.
|
||||
In the last step the authentication of the user will be proceeded.
|
||||
"""
|
||||
if self._ldap_module_version == 2:
|
||||
return self._login2(login, password)
|
||||
return self._login3(login, password)
|
|
@ -1,4 +1,4 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
|
@ -26,5 +26,6 @@ from radicale import auth
|
|||
|
||||
|
||||
class Auth(auth.BaseAuth):
|
||||
def login(self, login, password):
|
||||
|
||||
def _login(self, login: str, password: str) -> str:
|
||||
return login
|
||||
|
|
66
radicale/auth/oauth2.py
Normal file
66
radicale/auth/oauth2.py
Normal file
|
@ -0,0 +1,66 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
#
|
||||
# Original from https://gitlab.mim-libre.fr/alphabet/radicale_oauth/
|
||||
# Copyright © 2021-2022 Bruno Boiget
|
||||
# Copyright © 2022-2022 Daniel Dehennin
|
||||
#
|
||||
# Since migration into upstream
|
||||
# Copyright © 2025-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Authentication backend that checks credentials against an oauth2 server auth endpoint
|
||||
"""
|
||||
|
||||
import requests
|
||||
|
||||
from radicale import auth
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class Auth(auth.BaseAuth):
|
||||
def __init__(self, configuration):
|
||||
super().__init__(configuration)
|
||||
self._endpoint = configuration.get("auth", "oauth2_token_endpoint")
|
||||
if not self._endpoint:
|
||||
logger.error("auth.oauth2_token_endpoint URL missing")
|
||||
raise RuntimeError("OAuth2 token endpoint URL is required")
|
||||
logger.info("auth OAuth2 token endpoint: %s" % (self._endpoint))
|
||||
|
||||
def _login(self, login, password):
|
||||
"""Validate credentials.
|
||||
Sends login credentials to oauth token endpoint and checks that a token is returned
|
||||
"""
|
||||
try:
|
||||
# authenticate to authentication endpoint and return login if ok, else ""
|
||||
req_params = {
|
||||
"username": login,
|
||||
"password": password,
|
||||
"grant_type": "password",
|
||||
"client_id": "radicale",
|
||||
}
|
||||
req_headers = {"Content-Type": "application/x-www-form-urlencoded"}
|
||||
response = requests.post(
|
||||
self._endpoint, data=req_params, headers=req_headers
|
||||
)
|
||||
if (
|
||||
response.status_code == requests.codes.ok
|
||||
and "access_token" in response.json()
|
||||
):
|
||||
return login
|
||||
except OSError as e:
|
||||
logger.critical("Failed to authenticate against OAuth2 server %s: %s" % (self._endpoint, e))
|
||||
logger.warning("User failed to authenticate using OAuth2: %r" % login)
|
||||
return ""
|
105
radicale/auth/pam.py
Normal file
105
radicale/auth/pam.py
Normal file
|
@ -0,0 +1,105 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# This file is part of Radicale Server - Calendar Server
|
||||
# Copyright © 2011 Henry-Nicolas Tourneur
|
||||
# Copyright © 2021-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2025-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
PAM authentication.
|
||||
|
||||
Authentication using the ``pam-python`` module.
|
||||
|
||||
Important: radicale user need access to /etc/shadow by e.g.
|
||||
chgrp radicale /etc/shadow
|
||||
chmod g+r
|
||||
"""
|
||||
|
||||
import grp
|
||||
import pwd
|
||||
|
||||
from radicale import auth
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class Auth(auth.BaseAuth):
|
||||
def __init__(self, configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
try:
|
||||
import pam
|
||||
self.pam = pam
|
||||
except ImportError as e:
|
||||
raise RuntimeError("PAM authentication requires the Python pam module") from e
|
||||
self._service = configuration.get("auth", "pam_service")
|
||||
logger.info("auth.pam_service: %s" % self._service)
|
||||
self._group_membership = configuration.get("auth", "pam_group_membership")
|
||||
if (self._group_membership):
|
||||
logger.info("auth.pam_group_membership: %s" % self._group_membership)
|
||||
else:
|
||||
logger.warning("auth.pam_group_membership: (empty, nothing to check / INSECURE)")
|
||||
|
||||
def pam_authenticate(self, *args, **kwargs):
|
||||
return self.pam.authenticate(*args, **kwargs)
|
||||
|
||||
def _login(self, login: str, password: str) -> str:
|
||||
"""Check if ``user``/``password`` couple is valid."""
|
||||
if login is None or password is None:
|
||||
return ""
|
||||
|
||||
# Check whether the user exists in the PAM system
|
||||
try:
|
||||
pwd.getpwnam(login).pw_uid
|
||||
except KeyError:
|
||||
logger.debug("PAM user not found: %r" % login)
|
||||
return ""
|
||||
else:
|
||||
logger.debug("PAM user found: %r" % login)
|
||||
|
||||
# Check whether the user has a primary group (mandatory)
|
||||
try:
|
||||
# Get user primary group
|
||||
primary_group = grp.getgrgid(pwd.getpwnam(login).pw_gid).gr_name
|
||||
logger.debug("PAM user %r has primary group: %r" % (login, primary_group))
|
||||
except KeyError:
|
||||
logger.debug("PAM user has no primary group: %r" % login)
|
||||
return ""
|
||||
|
||||
# Obtain supplementary groups
|
||||
members = []
|
||||
if (self._group_membership):
|
||||
try:
|
||||
members = grp.getgrnam(self._group_membership).gr_mem
|
||||
except KeyError:
|
||||
logger.debug(
|
||||
"PAM membership required group doesn't exist: %r" %
|
||||
self._group_membership)
|
||||
return ""
|
||||
|
||||
# Check whether the user belongs to the required group
|
||||
# (primary or supplementary)
|
||||
if (self._group_membership):
|
||||
if (primary_group != self._group_membership) and (login not in members):
|
||||
logger.warning("PAM user %r belongs not to the required group: %r" % (login, self._group_membership))
|
||||
return ""
|
||||
else:
|
||||
logger.debug("PAM user %r belongs to the required group: %r" % (login, self._group_membership))
|
||||
|
||||
# Check the password
|
||||
if self.pam_authenticate(login, password, service=self._service):
|
||||
return login
|
||||
else:
|
||||
logger.debug("PAM authentication not successful for user: %r (service %r)" % (login, self._service))
|
||||
return ""
|
|
@ -1,8 +1,8 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -25,9 +25,14 @@ It's intended for use with an external WSGI server.
|
|||
|
||||
"""
|
||||
|
||||
import radicale.auth.none as none
|
||||
from typing import Tuple, Union
|
||||
|
||||
from radicale import types
|
||||
from radicale.auth import none
|
||||
|
||||
|
||||
class Auth(none.Auth):
|
||||
def get_external_login(self, environ):
|
||||
|
||||
def get_external_login(self, environ: types.WSGIEnviron
|
||||
) -> Union[Tuple[()], Tuple[str, str]]:
|
||||
return environ.get("REMOTE_USER", ""), ""
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2020 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -26,26 +27,32 @@ Use ``load()`` to obtain an instance of ``Configuration`` for use with
|
|||
"""
|
||||
|
||||
import contextlib
|
||||
import json
|
||||
import math
|
||||
import os
|
||||
import string
|
||||
import sys
|
||||
from collections import OrderedDict
|
||||
from configparser import RawConfigParser
|
||||
from typing import (Any, Callable, ClassVar, Iterable, List, Optional,
|
||||
Sequence, Tuple, TypeVar, Union)
|
||||
|
||||
from radicale import auth, rights, storage, web
|
||||
from radicale import auth, hook, rights, storage, types, web
|
||||
from radicale.item import check_and_sanitize_props
|
||||
|
||||
DEFAULT_CONFIG_PATH = os.pathsep.join([
|
||||
DEFAULT_CONFIG_PATH: str = os.pathsep.join([
|
||||
"?/etc/radicale/config",
|
||||
"?~/.config/radicale/config"])
|
||||
|
||||
|
||||
def positive_int(value):
|
||||
def positive_int(value: Any) -> int:
|
||||
value = int(value)
|
||||
if value < 0:
|
||||
raise ValueError("value is negative: %d" % value)
|
||||
return value
|
||||
|
||||
|
||||
def positive_float(value):
|
||||
def positive_float(value: Any) -> float:
|
||||
value = float(value)
|
||||
if not math.isfinite(value):
|
||||
raise ValueError("value is infinite")
|
||||
|
@ -56,55 +63,88 @@ def positive_float(value):
|
|||
return value
|
||||
|
||||
|
||||
def logging_level(value):
|
||||
def logging_level(value: Any) -> str:
|
||||
if value not in ("debug", "info", "warning", "error", "critical"):
|
||||
raise ValueError("unsupported level: %r" % value)
|
||||
return value
|
||||
|
||||
|
||||
def filepath(value):
|
||||
def filepath(value: Any) -> str:
|
||||
if not value:
|
||||
return ""
|
||||
value = os.path.expanduser(value)
|
||||
if os.name == "nt":
|
||||
if sys.platform == "win32":
|
||||
value = os.path.expandvars(value)
|
||||
return os.path.abspath(value)
|
||||
|
||||
|
||||
def list_of_ip_address(value):
|
||||
def list_of_ip_address(value: Any) -> List[Tuple[str, int]]:
|
||||
def ip_address(value):
|
||||
try:
|
||||
address, port = value.strip().rsplit(":", 1)
|
||||
return address.strip("[] "), int(port)
|
||||
address, port = value.rsplit(":", 1)
|
||||
return address.strip(string.whitespace + "[]"), int(port)
|
||||
except ValueError:
|
||||
raise ValueError("malformed IP address: %r" % value)
|
||||
return [ip_address(s.strip()) for s in value.split(",")]
|
||||
return [ip_address(s) for s in value.split(",")]
|
||||
|
||||
|
||||
def str_or_callable(value):
|
||||
def str_or_callable(value: Any) -> Union[str, Callable]:
|
||||
if callable(value):
|
||||
return value
|
||||
return str(value)
|
||||
|
||||
|
||||
def unspecified_type(value):
|
||||
def unspecified_type(value: Any) -> Any:
|
||||
return value
|
||||
|
||||
|
||||
def _convert_to_bool(value):
|
||||
def _convert_to_bool(value: Any) -> bool:
|
||||
if value.lower() not in RawConfigParser.BOOLEAN_STATES:
|
||||
raise ValueError("Not a boolean: %r" % value)
|
||||
raise ValueError("not a boolean: %r" % value)
|
||||
return RawConfigParser.BOOLEAN_STATES[value.lower()]
|
||||
|
||||
|
||||
INTERNAL_OPTIONS = ("_allow_extra",)
|
||||
def imap_address(value):
|
||||
if "]" in value:
|
||||
pre_address, pre_address_port = value.rsplit("]", 1)
|
||||
else:
|
||||
pre_address, pre_address_port = "", value
|
||||
if ":" in pre_address_port:
|
||||
pre_address2, port = pre_address_port.rsplit(":", 1)
|
||||
address = pre_address + pre_address2
|
||||
else:
|
||||
address, port = pre_address + pre_address_port, None
|
||||
try:
|
||||
return (address.strip(string.whitespace + "[]"),
|
||||
None if port is None else int(port))
|
||||
except ValueError:
|
||||
raise ValueError("malformed IMAP address: %r" % value)
|
||||
|
||||
|
||||
def imap_security(value):
|
||||
if value not in ("tls", "starttls", "none"):
|
||||
raise ValueError("unsupported IMAP security: %r" % value)
|
||||
return value
|
||||
|
||||
|
||||
def json_str(value: Any) -> dict:
|
||||
if not value:
|
||||
return {}
|
||||
ret = json.loads(value)
|
||||
for (name_coll, props) in ret.items():
|
||||
checked_props = check_and_sanitize_props(props)
|
||||
ret[name_coll] = checked_props
|
||||
return ret
|
||||
|
||||
|
||||
INTERNAL_OPTIONS: Sequence[str] = ("_allow_extra",)
|
||||
# Default configuration
|
||||
DEFAULT_CONFIG_SCHEMA = OrderedDict([
|
||||
DEFAULT_CONFIG_SCHEMA: types.CONFIG_SCHEMA = OrderedDict([
|
||||
("server", OrderedDict([
|
||||
("hosts", {
|
||||
"value": "localhost:5232",
|
||||
"help": "set server hostnames including ports",
|
||||
"aliases": ["-H", "--hosts"],
|
||||
"aliases": ("-H", "--hosts",),
|
||||
"type": list_of_ip_address}),
|
||||
("max_connections", {
|
||||
"value": "8",
|
||||
|
@ -117,28 +157,40 @@ DEFAULT_CONFIG_SCHEMA = OrderedDict([
|
|||
("timeout", {
|
||||
"value": "30",
|
||||
"help": "socket timeout",
|
||||
"type": positive_int}),
|
||||
"type": positive_float}),
|
||||
("ssl", {
|
||||
"value": "False",
|
||||
"help": "use SSL connection",
|
||||
"aliases": ["-s", "--ssl"],
|
||||
"opposite": ["-S", "--no-ssl"],
|
||||
"aliases": ("-s", "--ssl",),
|
||||
"opposite_aliases": ("-S", "--no-ssl",),
|
||||
"type": bool}),
|
||||
("protocol", {
|
||||
"value": "",
|
||||
"help": "SSL/TLS protocol (Apache SSLProtocol format)",
|
||||
"type": str}),
|
||||
("ciphersuite", {
|
||||
"value": "",
|
||||
"help": "SSL/TLS Cipher Suite (OpenSSL cipher list format)",
|
||||
"type": str}),
|
||||
("certificate", {
|
||||
"value": "/etc/ssl/radicale.cert.pem",
|
||||
"help": "set certificate file",
|
||||
"aliases": ["-c", "--certificate"],
|
||||
"aliases": ("-c", "--certificate",),
|
||||
"type": filepath}),
|
||||
("key", {
|
||||
"value": "/etc/ssl/radicale.key.pem",
|
||||
"help": "set private key file",
|
||||
"aliases": ["-k", "--key"],
|
||||
"aliases": ("-k", "--key",),
|
||||
"type": filepath}),
|
||||
("certificate_authority", {
|
||||
"value": "",
|
||||
"help": "set CA certificate for validating clients",
|
||||
"aliases": ["--certificate-authority"],
|
||||
"aliases": ("--certificate-authority",),
|
||||
"type": filepath}),
|
||||
("script_name", {
|
||||
"value": "",
|
||||
"help": "script name to strip from URI if called by reverse proxy (default taken from HTTP_X_SCRIPT_NAME or SCRIPT_NAME)",
|
||||
"type": str}),
|
||||
("_internal_server", {
|
||||
"value": "False",
|
||||
"help": "the internal server is used",
|
||||
|
@ -154,18 +206,51 @@ DEFAULT_CONFIG_SCHEMA = OrderedDict([
|
|||
"type": str})])),
|
||||
("auth", OrderedDict([
|
||||
("type", {
|
||||
"value": "none",
|
||||
"help": "authentication method",
|
||||
"value": "denyall",
|
||||
"help": "authentication method (" + "|".join(auth.INTERNAL_TYPES) + ")",
|
||||
"type": str_or_callable,
|
||||
"internal": auth.INTERNAL_TYPES}),
|
||||
("cache_logins", {
|
||||
"value": "false",
|
||||
"help": "cache successful/failed logins for until expiration time",
|
||||
"type": bool}),
|
||||
("cache_successful_logins_expiry", {
|
||||
"value": "15",
|
||||
"help": "expiration time for caching successful logins in seconds",
|
||||
"type": int}),
|
||||
("cache_failed_logins_expiry", {
|
||||
"value": "90",
|
||||
"help": "expiration time for caching failed logins in seconds",
|
||||
"type": int}),
|
||||
("htpasswd_filename", {
|
||||
"value": "/etc/radicale/users",
|
||||
"help": "htpasswd filename",
|
||||
"type": filepath}),
|
||||
("htpasswd_encryption", {
|
||||
"value": "md5",
|
||||
"value": "autodetect",
|
||||
"help": "htpasswd encryption method",
|
||||
"type": str}),
|
||||
("htpasswd_cache", {
|
||||
"value": "False",
|
||||
"help": "enable caching of htpasswd file",
|
||||
"type": bool}),
|
||||
("dovecot_connection_type", {
|
||||
"value": "AF_UNIX",
|
||||
"help": "Connection type for dovecot authentication",
|
||||
"type": str_or_callable,
|
||||
"internal": auth.AUTH_SOCKET_FAMILY}),
|
||||
("dovecot_socket", {
|
||||
"value": "/var/run/dovecot/auth-client",
|
||||
"help": "dovecot auth AF_UNIX socket",
|
||||
"type": str}),
|
||||
("dovecot_host", {
|
||||
"value": "localhost",
|
||||
"help": "dovecot auth AF_INET or AF_INET6 host",
|
||||
"type": str}),
|
||||
("dovecot_port", {
|
||||
"value": "12345",
|
||||
"help": "dovecot auth port",
|
||||
"type": int}),
|
||||
("realm", {
|
||||
"value": "Radicale - Password Required",
|
||||
"help": "message displayed when a password is needed",
|
||||
|
@ -173,13 +258,101 @@ DEFAULT_CONFIG_SCHEMA = OrderedDict([
|
|||
("delay", {
|
||||
"value": "1",
|
||||
"help": "incorrect authentication delay",
|
||||
"type": positive_float})])),
|
||||
"type": positive_float}),
|
||||
("ldap_ignore_attribute_create_modify_timestamp", {
|
||||
"value": "false",
|
||||
"help": "Ignore modifyTimestamp and createTimestamp attributes. Need if Authentik LDAP server is used.",
|
||||
"type": bool}),
|
||||
("ldap_uri", {
|
||||
"value": "ldap://localhost",
|
||||
"help": "URI to the ldap server",
|
||||
"type": str}),
|
||||
("ldap_base", {
|
||||
"value": "",
|
||||
"help": "LDAP base DN of the ldap server",
|
||||
"type": str}),
|
||||
("ldap_reader_dn", {
|
||||
"value": "",
|
||||
"help": "the DN of a ldap user with read access to get the user accounts",
|
||||
"type": str}),
|
||||
("ldap_secret", {
|
||||
"value": "",
|
||||
"help": "the password of the ldap_reader_dn",
|
||||
"type": str}),
|
||||
("ldap_secret_file", {
|
||||
"value": "",
|
||||
"help": "path of the file containing the password of the ldap_reader_dn",
|
||||
"type": str}),
|
||||
("ldap_filter", {
|
||||
"value": "(cn={0})",
|
||||
"help": "the search filter to find the user DN to authenticate by the username",
|
||||
"type": str}),
|
||||
("ldap_user_attribute", {
|
||||
"value": "",
|
||||
"help": "the attribute to be used as username after authentication",
|
||||
"type": str}),
|
||||
("ldap_groups_attribute", {
|
||||
"value": "",
|
||||
"help": "attribute to read the group memberships from",
|
||||
"type": str}),
|
||||
("ldap_use_ssl", {
|
||||
"value": "False",
|
||||
"help": "Use ssl on the ldap connection",
|
||||
"type": bool}),
|
||||
("ldap_ssl_verify_mode", {
|
||||
"value": "REQUIRED",
|
||||
"help": "The certificate verification mode. NONE, OPTIONAL, default is REQUIRED",
|
||||
"type": str}),
|
||||
("ldap_ssl_ca_file", {
|
||||
"value": "",
|
||||
"help": "The path to the CA file in pem format which is used to certificate the server certificate",
|
||||
"type": str}),
|
||||
("imap_host", {
|
||||
"value": "localhost",
|
||||
"help": "IMAP server hostname: address|address:port|[address]:port|*localhost*",
|
||||
"type": imap_address}),
|
||||
("imap_security", {
|
||||
"value": "tls",
|
||||
"help": "Secure the IMAP connection: *tls*|starttls|none",
|
||||
"type": imap_security}),
|
||||
("oauth2_token_endpoint", {
|
||||
"value": "",
|
||||
"help": "OAuth2 token endpoint URL",
|
||||
"type": str}),
|
||||
("pam_group_membership", {
|
||||
"value": "",
|
||||
"help": "PAM group user should be member of",
|
||||
"type": str}),
|
||||
("pam_service", {
|
||||
"value": "radicale",
|
||||
"help": "PAM service",
|
||||
"type": str}),
|
||||
("strip_domain", {
|
||||
"value": "False",
|
||||
"help": "strip domain from username",
|
||||
"type": bool}),
|
||||
("uc_username", {
|
||||
"value": "False",
|
||||
"help": "convert username to uppercase, must be true for case-insensitive auth providers",
|
||||
"type": bool}),
|
||||
("lc_username", {
|
||||
"value": "False",
|
||||
"help": "convert username to lowercase, must be true for case-insensitive auth providers",
|
||||
"type": bool})])),
|
||||
("rights", OrderedDict([
|
||||
("type", {
|
||||
"value": "owner_only",
|
||||
"help": "rights backend",
|
||||
"type": str_or_callable,
|
||||
"internal": rights.INTERNAL_TYPES}),
|
||||
("permit_delete_collection", {
|
||||
"value": "True",
|
||||
"help": "permit delete of a collection",
|
||||
"type": bool}),
|
||||
("permit_overwrite_collection", {
|
||||
"value": "True",
|
||||
"help": "permit overwrite of a collection",
|
||||
"type": bool}),
|
||||
("file", {
|
||||
"value": "/etc/radicale/rights",
|
||||
"help": "file for rights management from_file",
|
||||
|
@ -194,10 +367,38 @@ DEFAULT_CONFIG_SCHEMA = OrderedDict([
|
|||
"value": "/var/lib/radicale/collections",
|
||||
"help": "path where collections are stored",
|
||||
"type": filepath}),
|
||||
("filesystem_cache_folder", {
|
||||
"value": "",
|
||||
"help": "path where cache of collections is stored in case of use_cache_subfolder_* options are active",
|
||||
"type": filepath}),
|
||||
("use_cache_subfolder_for_item", {
|
||||
"value": "False",
|
||||
"help": "use subfolder 'collection-cache' for 'item' cache file structure instead of inside collection folder",
|
||||
"type": bool}),
|
||||
("use_cache_subfolder_for_history", {
|
||||
"value": "False",
|
||||
"help": "use subfolder 'collection-cache' for 'history' cache file structure instead of inside collection folder",
|
||||
"type": bool}),
|
||||
("use_cache_subfolder_for_synctoken", {
|
||||
"value": "False",
|
||||
"help": "use subfolder 'collection-cache' for 'sync-token' cache file structure instead of inside collection folder",
|
||||
"type": bool}),
|
||||
("use_mtime_and_size_for_item_cache", {
|
||||
"value": "False",
|
||||
"help": "use mtime and file size instead of SHA256 for 'item' cache (improves speed)",
|
||||
"type": bool}),
|
||||
("folder_umask", {
|
||||
"value": "",
|
||||
"help": "umask for folder creation (empty: system default)",
|
||||
"type": str}),
|
||||
("max_sync_token_age", {
|
||||
"value": "2592000", # 30 days
|
||||
"help": "delete sync token that are older",
|
||||
"type": positive_int}),
|
||||
("skip_broken_item", {
|
||||
"value": "True",
|
||||
"help": "skip broken item instead of triggering exception",
|
||||
"type": bool}),
|
||||
("hook", {
|
||||
"value": "",
|
||||
"help": "command that is run after changes to storage",
|
||||
|
@ -205,7 +406,29 @@ DEFAULT_CONFIG_SCHEMA = OrderedDict([
|
|||
("_filesystem_fsync", {
|
||||
"value": "True",
|
||||
"help": "sync all changes to filesystem during requests",
|
||||
"type": bool})])),
|
||||
"type": bool}),
|
||||
("predefined_collections", {
|
||||
"value": "",
|
||||
"help": "predefined user collections",
|
||||
"type": json_str})])),
|
||||
("hook", OrderedDict([
|
||||
("type", {
|
||||
"value": "none",
|
||||
"help": "hook backend",
|
||||
"type": str,
|
||||
"internal": hook.INTERNAL_TYPES}),
|
||||
("rabbitmq_endpoint", {
|
||||
"value": "",
|
||||
"help": "endpoint where rabbitmq server is running",
|
||||
"type": str}),
|
||||
("rabbitmq_topic", {
|
||||
"value": "",
|
||||
"help": "topic to declare queue",
|
||||
"type": str}),
|
||||
("rabbitmq_queue_type", {
|
||||
"value": "",
|
||||
"help": "queue type for topic declaration",
|
||||
"type": str})])),
|
||||
("web", OrderedDict([
|
||||
("type", {
|
||||
"value": "internal",
|
||||
|
@ -214,18 +437,53 @@ DEFAULT_CONFIG_SCHEMA = OrderedDict([
|
|||
"internal": web.INTERNAL_TYPES})])),
|
||||
("logging", OrderedDict([
|
||||
("level", {
|
||||
"value": "warning",
|
||||
"value": "info",
|
||||
"help": "threshold for the logger",
|
||||
"type": logging_level}),
|
||||
("bad_put_request_content", {
|
||||
"value": "False",
|
||||
"help": "log bad PUT request content",
|
||||
"type": bool}),
|
||||
("backtrace_on_debug", {
|
||||
"value": "False",
|
||||
"help": "log backtrace on level=debug",
|
||||
"type": bool}),
|
||||
("request_header_on_debug", {
|
||||
"value": "False",
|
||||
"help": "log request header on level=debug",
|
||||
"type": bool}),
|
||||
("request_content_on_debug", {
|
||||
"value": "False",
|
||||
"help": "log request content on level=debug",
|
||||
"type": bool}),
|
||||
("response_content_on_debug", {
|
||||
"value": "False",
|
||||
"help": "log response content on level=debug",
|
||||
"type": bool}),
|
||||
("rights_rule_doesnt_match_on_debug", {
|
||||
"value": "False",
|
||||
"help": "log rights rules which doesn't match on level=debug",
|
||||
"type": bool}),
|
||||
("storage_cache_actions_on_debug", {
|
||||
"value": "False",
|
||||
"help": "log storage cache action on level=debug",
|
||||
"type": bool}),
|
||||
("mask_passwords", {
|
||||
"value": "True",
|
||||
"help": "mask passwords in logs",
|
||||
"type": bool})])),
|
||||
("headers", OrderedDict([
|
||||
("_allow_extra", str)]))])
|
||||
("_allow_extra", str)])),
|
||||
("reporting", OrderedDict([
|
||||
("max_freebusy_occurrence", {
|
||||
"value": "10000",
|
||||
"help": "number of occurrences per event when reporting",
|
||||
"type": positive_int})]))
|
||||
])
|
||||
|
||||
|
||||
def parse_compound_paths(*compound_paths):
|
||||
def parse_compound_paths(*compound_paths: Optional[str]
|
||||
) -> List[Tuple[str, bool]]:
|
||||
"""Parse a compound path and return the individual paths.
|
||||
Paths in a compound path are joined by ``os.pathsep``. If a path starts
|
||||
with ``?`` the return value ``IGNORE_IF_MISSING`` is set.
|
||||
|
@ -251,7 +509,8 @@ def parse_compound_paths(*compound_paths):
|
|||
return paths
|
||||
|
||||
|
||||
def load(paths=()):
|
||||
def load(paths: Optional[Iterable[Tuple[str, bool]]] = None
|
||||
) -> "Configuration":
|
||||
"""
|
||||
Create instance of ``Configuration`` for use with
|
||||
``radicale.app.Application``.
|
||||
|
@ -264,29 +523,40 @@ def load(paths=()):
|
|||
The configuration can later be changed with ``Configuration.update()``.
|
||||
|
||||
"""
|
||||
if paths is None:
|
||||
paths = []
|
||||
configuration = Configuration(DEFAULT_CONFIG_SCHEMA)
|
||||
for path, ignore_if_missing in paths:
|
||||
parser = RawConfigParser()
|
||||
config_source = "config file %r" % path
|
||||
config: types.CONFIG
|
||||
try:
|
||||
if not parser.read(path):
|
||||
config = Configuration.SOURCE_MISSING
|
||||
if not ignore_if_missing:
|
||||
raise RuntimeError("No such file: %r" % path)
|
||||
else:
|
||||
with open(path) as f:
|
||||
parser.read_file(f)
|
||||
config = {s: {o: parser[s][o] for o in parser.options(s)}
|
||||
for s in parser.sections()}
|
||||
except Exception as e:
|
||||
raise RuntimeError(
|
||||
"Failed to load %s: %s" % (config_source, e)) from e
|
||||
if not (ignore_if_missing and isinstance(e, (
|
||||
FileNotFoundError, NotADirectoryError, PermissionError))):
|
||||
raise RuntimeError("Failed to load %s: %s" % (config_source, e)
|
||||
) from e
|
||||
config = Configuration.SOURCE_MISSING
|
||||
configuration.update(config, config_source)
|
||||
return configuration
|
||||
|
||||
|
||||
class Configuration:
|
||||
SOURCE_MISSING = {}
|
||||
_Self = TypeVar("_Self", bound="Configuration")
|
||||
|
||||
def __init__(self, schema):
|
||||
|
||||
class Configuration:
|
||||
|
||||
SOURCE_MISSING: ClassVar[types.CONFIG] = {}
|
||||
|
||||
_schema: types.CONFIG_SCHEMA
|
||||
_values: types.MUTABLE_CONFIG
|
||||
_configs: List[Tuple[types.CONFIG, str, bool]]
|
||||
|
||||
def __init__(self, schema: types.CONFIG_SCHEMA) -> None:
|
||||
"""Initialize configuration.
|
||||
|
||||
``schema`` a dict that describes the configuration format.
|
||||
|
@ -307,7 +577,8 @@ class Configuration:
|
|||
for section in self._schema}
|
||||
self.update(default, "default config", privileged=True)
|
||||
|
||||
def update(self, config, source=None, privileged=False):
|
||||
def update(self, config: types.CONFIG, source: Optional[str] = None,
|
||||
privileged: bool = False) -> None:
|
||||
"""Update the configuration.
|
||||
|
||||
``config`` a dict of the format {SECTION: {OPTION: VALUE, ...}, ...}.
|
||||
|
@ -321,8 +592,9 @@ class Configuration:
|
|||
``privileged`` allows updating sections and options starting with "_".
|
||||
|
||||
"""
|
||||
source = source or "unspecified config"
|
||||
new_values = {}
|
||||
if source is None:
|
||||
source = "unspecified config"
|
||||
new_values: types.MUTABLE_CONFIG = {}
|
||||
for section in config:
|
||||
if (section not in self._schema or
|
||||
section.startswith("_") and not privileged):
|
||||
|
@ -361,40 +633,41 @@ class Configuration:
|
|||
self._values[section] = self._values.get(section, {})
|
||||
self._values[section].update(new_values[section])
|
||||
|
||||
def get(self, section, option):
|
||||
def get(self, section: str, option: str) -> Any:
|
||||
"""Get the value of ``option`` in ``section``."""
|
||||
with contextlib.suppress(KeyError):
|
||||
return self._values[section][option]
|
||||
raise KeyError(section, option)
|
||||
|
||||
def get_raw(self, section, option):
|
||||
def get_raw(self, section: str, option: str) -> Any:
|
||||
"""Get the raw value of ``option`` in ``section``."""
|
||||
for config, _, _ in reversed(self._configs):
|
||||
if option in config.get(section, {}):
|
||||
return config[section][option]
|
||||
raise KeyError(section, option)
|
||||
|
||||
def get_source(self, section, option):
|
||||
def get_source(self, section: str, option: str) -> str:
|
||||
"""Get the source that provides ``option`` in ``section``."""
|
||||
for config, source, _ in reversed(self._configs):
|
||||
if option in config.get(section, {}):
|
||||
return source
|
||||
raise KeyError(section, option)
|
||||
|
||||
def sections(self):
|
||||
def sections(self) -> List[str]:
|
||||
"""List all sections."""
|
||||
return self._values.keys()
|
||||
return list(self._values.keys())
|
||||
|
||||
def options(self, section):
|
||||
def options(self, section: str) -> List[str]:
|
||||
"""List all options in ``section``"""
|
||||
return self._values[section].keys()
|
||||
return list(self._values[section].keys())
|
||||
|
||||
def sources(self):
|
||||
def sources(self) -> List[Tuple[str, bool]]:
|
||||
"""List all config sources."""
|
||||
return [(source, config is self.SOURCE_MISSING) for
|
||||
config, source, _ in self._configs]
|
||||
|
||||
def copy(self, plugin_schema=None):
|
||||
def copy(self: _Self, plugin_schema: Optional[types.CONFIG_SCHEMA] = None
|
||||
) -> _Self:
|
||||
"""Create a copy of the configuration
|
||||
|
||||
``plugin_schema`` is a optional dict that contains additional options
|
||||
|
@ -404,20 +677,23 @@ class Configuration:
|
|||
if plugin_schema is None:
|
||||
schema = self._schema
|
||||
else:
|
||||
schema = self._schema.copy()
|
||||
new_schema = dict(self._schema)
|
||||
for section, options in plugin_schema.items():
|
||||
if (section not in schema or "type" not in schema[section] or
|
||||
"internal" not in schema[section]["type"]):
|
||||
if (section not in new_schema or
|
||||
"type" not in new_schema[section] or
|
||||
"internal" not in new_schema[section]["type"]):
|
||||
raise ValueError("not a plugin section: %r" % section)
|
||||
schema[section] = schema[section].copy()
|
||||
schema[section]["type"] = schema[section]["type"].copy()
|
||||
schema[section]["type"]["internal"] = [
|
||||
self.get(section, "type")]
|
||||
new_section = dict(new_schema[section])
|
||||
new_type = dict(new_section["type"])
|
||||
new_type["internal"] = (self.get(section, "type"),)
|
||||
new_section["type"] = new_type
|
||||
for option, value in options.items():
|
||||
if option in schema[section]:
|
||||
raise ValueError("option already exists in %r: %r" % (
|
||||
section, option))
|
||||
schema[section][option] = value
|
||||
if option in new_section:
|
||||
raise ValueError("option already exists in %r: %r" %
|
||||
(section, option))
|
||||
new_section[option] = value
|
||||
new_schema[section] = new_section
|
||||
schema = new_schema
|
||||
copy = type(self)(schema)
|
||||
for config, source, privileged in self._configs:
|
||||
copy.update(config, source, privileged)
|
||||
|
|
69
radicale/hook/__init__.py
Normal file
69
radicale/hook/__init__.py
Normal file
|
@ -0,0 +1,69 @@
|
|||
import json
|
||||
from enum import Enum
|
||||
from typing import Sequence
|
||||
|
||||
from radicale import pathutils, utils
|
||||
from radicale.log import logger
|
||||
|
||||
INTERNAL_TYPES: Sequence[str] = ("none", "rabbitmq")
|
||||
|
||||
|
||||
def load(configuration):
|
||||
"""Load the storage module chosen in configuration."""
|
||||
try:
|
||||
return utils.load_plugin(
|
||||
INTERNAL_TYPES, "hook", "Hook", BaseHook, configuration)
|
||||
except Exception as e:
|
||||
logger.warning(e)
|
||||
logger.warning("Hook \"%s\" failed to load, falling back to \"none\"." % configuration.get("hook", "type"))
|
||||
configuration = configuration.copy()
|
||||
configuration.update({"hook": {"type": "none"}}, "hook", privileged=True)
|
||||
return utils.load_plugin(
|
||||
INTERNAL_TYPES, "hook", "Hook", BaseHook, configuration)
|
||||
|
||||
|
||||
class BaseHook:
|
||||
def __init__(self, configuration):
|
||||
"""Initialize BaseHook.
|
||||
|
||||
``configuration`` see ``radicale.config`` module.
|
||||
The ``configuration`` must not change during the lifetime of
|
||||
this object, it is kept as an internal reference.
|
||||
|
||||
"""
|
||||
self.configuration = configuration
|
||||
|
||||
def notify(self, notification_item):
|
||||
"""Upload a new or replace an existing item."""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class HookNotificationItemTypes(Enum):
|
||||
CPATCH = "cpatch"
|
||||
UPSERT = "upsert"
|
||||
DELETE = "delete"
|
||||
|
||||
|
||||
def _cleanup(path):
|
||||
sane_path = pathutils.strip_path(path)
|
||||
attributes = sane_path.split("/") if sane_path else []
|
||||
|
||||
if len(attributes) < 2:
|
||||
return ""
|
||||
return attributes[0] + "/" + attributes[1]
|
||||
|
||||
|
||||
class HookNotificationItem:
|
||||
|
||||
def __init__(self, notification_item_type, path, content):
|
||||
self.type = notification_item_type.value
|
||||
self.point = _cleanup(path)
|
||||
self.content = content
|
||||
|
||||
def to_json(self):
|
||||
return json.dumps(
|
||||
self,
|
||||
default=lambda o: o.__dict__,
|
||||
sort_keys=True,
|
||||
indent=4
|
||||
)
|
6
radicale/hook/none.py
Normal file
6
radicale/hook/none.py
Normal file
|
@ -0,0 +1,6 @@
|
|||
from radicale import hook
|
||||
|
||||
|
||||
class Hook(hook.BaseHook):
|
||||
def notify(self, notification_item):
|
||||
"""Notify nothing. Empty hook."""
|
50
radicale/hook/rabbitmq/__init__.py
Normal file
50
radicale/hook/rabbitmq/__init__.py
Normal file
|
@ -0,0 +1,50 @@
|
|||
import pika
|
||||
from pika.exceptions import ChannelWrongStateError, StreamLostError
|
||||
|
||||
from radicale import hook
|
||||
from radicale.hook import HookNotificationItem
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class Hook(hook.BaseHook):
|
||||
|
||||
def __init__(self, configuration):
|
||||
super().__init__(configuration)
|
||||
self._endpoint = configuration.get("hook", "rabbitmq_endpoint")
|
||||
self._topic = configuration.get("hook", "rabbitmq_topic")
|
||||
self._queue_type = configuration.get("hook", "rabbitmq_queue_type")
|
||||
self._encoding = configuration.get("encoding", "stock")
|
||||
|
||||
self._make_connection_synced()
|
||||
self._make_declare_queue_synced()
|
||||
|
||||
def _make_connection_synced(self):
|
||||
parameters = pika.URLParameters(self._endpoint)
|
||||
connection = pika.BlockingConnection(parameters)
|
||||
self._channel = connection.channel()
|
||||
|
||||
def _make_declare_queue_synced(self):
|
||||
self._channel.queue_declare(queue=self._topic, durable=True, arguments={"x-queue-type": self._queue_type})
|
||||
|
||||
def notify(self, notification_item):
|
||||
if isinstance(notification_item, HookNotificationItem):
|
||||
self._notify(notification_item, True)
|
||||
|
||||
def _notify(self, notification_item, recall):
|
||||
try:
|
||||
self._channel.basic_publish(
|
||||
exchange='',
|
||||
routing_key=self._topic,
|
||||
body=notification_item.to_json().encode(
|
||||
encoding=self._encoding
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
if (isinstance(e, ChannelWrongStateError) or
|
||||
isinstance(e, StreamLostError)) and recall:
|
||||
self._make_connection_synced()
|
||||
self._notify(notification_item, False)
|
||||
return
|
||||
logger.error("An exception occurred during "
|
||||
"publishing hook notification item: %s",
|
||||
e, exc_info=True)
|
|
@ -1,8 +1,9 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -22,45 +23,220 @@ Helper functions for HTTP.
|
|||
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
import time
|
||||
from http import client
|
||||
from typing import List, Mapping, Union, cast
|
||||
|
||||
NOT_ALLOWED = (
|
||||
from radicale import config, pathutils, types
|
||||
from radicale.log import logger
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
import pkg_resources
|
||||
|
||||
_TRAVERSABLE_LIKE_TYPE = pathlib.Path
|
||||
else:
|
||||
import importlib.abc
|
||||
from importlib import resources
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
_TRAVERSABLE_LIKE_TYPE = Union[importlib.abc.Traversable, pathlib.Path]
|
||||
else:
|
||||
_TRAVERSABLE_LIKE_TYPE = Union[importlib.resources.abc.Traversable, pathlib.Path]
|
||||
|
||||
NOT_ALLOWED: types.WSGIResponse = (
|
||||
client.FORBIDDEN, (("Content-Type", "text/plain"),),
|
||||
"Access to the requested resource forbidden.")
|
||||
FORBIDDEN = (
|
||||
FORBIDDEN: types.WSGIResponse = (
|
||||
client.FORBIDDEN, (("Content-Type", "text/plain"),),
|
||||
"Action on the requested resource refused.")
|
||||
BAD_REQUEST = (
|
||||
BAD_REQUEST: types.WSGIResponse = (
|
||||
client.BAD_REQUEST, (("Content-Type", "text/plain"),), "Bad Request")
|
||||
NOT_FOUND = (
|
||||
NOT_FOUND: types.WSGIResponse = (
|
||||
client.NOT_FOUND, (("Content-Type", "text/plain"),),
|
||||
"The requested resource could not be found.")
|
||||
CONFLICT = (
|
||||
CONFLICT: types.WSGIResponse = (
|
||||
client.CONFLICT, (("Content-Type", "text/plain"),),
|
||||
"Conflict in the request.")
|
||||
WEBDAV_PRECONDITION_FAILED = (
|
||||
client.CONFLICT, (("Content-Type", "text/plain"),),
|
||||
"WebDAV precondition failed.")
|
||||
METHOD_NOT_ALLOWED = (
|
||||
METHOD_NOT_ALLOWED: types.WSGIResponse = (
|
||||
client.METHOD_NOT_ALLOWED, (("Content-Type", "text/plain"),),
|
||||
"The method is not allowed on the requested resource.")
|
||||
PRECONDITION_FAILED = (
|
||||
PRECONDITION_FAILED: types.WSGIResponse = (
|
||||
client.PRECONDITION_FAILED,
|
||||
(("Content-Type", "text/plain"),), "Precondition failed.")
|
||||
REQUEST_TIMEOUT = (
|
||||
REQUEST_TIMEOUT: types.WSGIResponse = (
|
||||
client.REQUEST_TIMEOUT, (("Content-Type", "text/plain"),),
|
||||
"Connection timed out.")
|
||||
REQUEST_ENTITY_TOO_LARGE = (
|
||||
REQUEST_ENTITY_TOO_LARGE: types.WSGIResponse = (
|
||||
client.REQUEST_ENTITY_TOO_LARGE, (("Content-Type", "text/plain"),),
|
||||
"Request body too large.")
|
||||
REMOTE_DESTINATION = (
|
||||
REMOTE_DESTINATION: types.WSGIResponse = (
|
||||
client.BAD_GATEWAY, (("Content-Type", "text/plain"),),
|
||||
"Remote destination not supported.")
|
||||
DIRECTORY_LISTING = (
|
||||
DIRECTORY_LISTING: types.WSGIResponse = (
|
||||
client.FORBIDDEN, (("Content-Type", "text/plain"),),
|
||||
"Directory listings are not supported.")
|
||||
INTERNAL_SERVER_ERROR = (
|
||||
INSUFFICIENT_STORAGE: types.WSGIResponse = (
|
||||
client.INSUFFICIENT_STORAGE, (("Content-Type", "text/plain"),),
|
||||
"Insufficient Storage. Please contact the administrator.")
|
||||
INTERNAL_SERVER_ERROR: types.WSGIResponse = (
|
||||
client.INTERNAL_SERVER_ERROR, (("Content-Type", "text/plain"),),
|
||||
"A server error occurred. Please contact the administrator.")
|
||||
|
||||
DAV_HEADERS = "1, 2, 3, calendar-access, addressbook, extended-mkcol"
|
||||
DAV_HEADERS: str = "1, 2, 3, calendar-access, addressbook, extended-mkcol"
|
||||
|
||||
MIMETYPES: Mapping[str, str] = {
|
||||
".css": "text/css",
|
||||
".eot": "application/vnd.ms-fontobject",
|
||||
".gif": "image/gif",
|
||||
".html": "text/html",
|
||||
".js": "application/javascript",
|
||||
".manifest": "text/cache-manifest",
|
||||
".png": "image/png",
|
||||
".svg": "image/svg+xml",
|
||||
".ttf": "application/font-sfnt",
|
||||
".txt": "text/plain",
|
||||
".woff": "application/font-woff",
|
||||
".woff2": "font/woff2",
|
||||
".xml": "text/xml"}
|
||||
FALLBACK_MIMETYPE: str = "application/octet-stream"
|
||||
|
||||
|
||||
def decode_request(configuration: "config.Configuration",
|
||||
environ: types.WSGIEnviron, text: bytes) -> str:
|
||||
"""Try to magically decode ``text`` according to given ``environ``."""
|
||||
# List of charsets to try
|
||||
charsets: List[str] = []
|
||||
|
||||
# First append content charset given in the request
|
||||
content_type = environ.get("CONTENT_TYPE")
|
||||
if content_type and "charset=" in content_type:
|
||||
charsets.append(
|
||||
content_type.split("charset=")[1].split(";")[0].strip())
|
||||
# Then append default Radicale charset
|
||||
charsets.append(cast(str, configuration.get("encoding", "request")))
|
||||
# Then append various fallbacks
|
||||
charsets.append("utf-8")
|
||||
charsets.append("iso8859-1")
|
||||
# Remove duplicates
|
||||
for i, s in reversed(list(enumerate(charsets))):
|
||||
if s in charsets[:i]:
|
||||
del charsets[i]
|
||||
|
||||
# Try to decode
|
||||
for charset in charsets:
|
||||
with contextlib.suppress(UnicodeDecodeError):
|
||||
return text.decode(charset)
|
||||
raise UnicodeDecodeError("decode_request", text, 0, len(text),
|
||||
"all codecs failed [%s]" % ", ".join(charsets))
|
||||
|
||||
|
||||
def read_raw_request_body(configuration: "config.Configuration",
|
||||
environ: types.WSGIEnviron) -> bytes:
|
||||
content_length = int(environ.get("CONTENT_LENGTH") or 0)
|
||||
if not content_length:
|
||||
return b""
|
||||
content = environ["wsgi.input"].read(content_length)
|
||||
if len(content) < content_length:
|
||||
raise RuntimeError("Request body too short: %d" % len(content))
|
||||
return content
|
||||
|
||||
|
||||
def read_request_body(configuration: "config.Configuration",
|
||||
environ: types.WSGIEnviron) -> str:
|
||||
content = decode_request(configuration, environ,
|
||||
read_raw_request_body(configuration, environ))
|
||||
if configuration.get("logging", "request_content_on_debug"):
|
||||
logger.debug("Request content:\n%s", content)
|
||||
else:
|
||||
logger.debug("Request content: suppressed by config/option [logging] request_content_on_debug")
|
||||
return content
|
||||
|
||||
|
||||
def redirect(location: str, status: int = client.FOUND) -> types.WSGIResponse:
|
||||
return (status,
|
||||
{"Location": location, "Content-Type": "text/plain"},
|
||||
"Redirected to %s" % location)
|
||||
|
||||
|
||||
def _serve_traversable(
|
||||
traversable: _TRAVERSABLE_LIKE_TYPE, base_prefix: str, path: str,
|
||||
path_prefix: str, index_file: str, mimetypes: Mapping[str, str],
|
||||
fallback_mimetype: str) -> types.WSGIResponse:
|
||||
if path != path_prefix and not path.startswith(path_prefix):
|
||||
raise ValueError("path must start with path_prefix: %r --> %r" %
|
||||
(path_prefix, path))
|
||||
assert pathutils.sanitize_path(path) == path
|
||||
parts_path = path[len(path_prefix):].strip('/')
|
||||
parts = parts_path.split("/") if parts_path else []
|
||||
for part in parts:
|
||||
if not pathutils.is_safe_filesystem_path_component(part):
|
||||
logger.debug("Web content with unsafe path %r requested", path)
|
||||
return NOT_FOUND
|
||||
if (not traversable.is_dir() or
|
||||
all(part != entry.name for entry in traversable.iterdir())):
|
||||
return NOT_FOUND
|
||||
traversable = traversable.joinpath(part)
|
||||
if traversable.is_dir():
|
||||
if not path.endswith("/"):
|
||||
return redirect(base_prefix + path + "/")
|
||||
if not index_file:
|
||||
return NOT_FOUND
|
||||
traversable = traversable.joinpath(index_file)
|
||||
if not traversable.is_file():
|
||||
return NOT_FOUND
|
||||
content_type = MIMETYPES.get(
|
||||
os.path.splitext(traversable.name)[1].lower(), FALLBACK_MIMETYPE)
|
||||
headers = {"Content-Type": content_type}
|
||||
if isinstance(traversable, pathlib.Path):
|
||||
headers["Last-Modified"] = time.strftime(
|
||||
"%a, %d %b %Y %H:%M:%S GMT",
|
||||
time.gmtime(traversable.stat().st_mtime))
|
||||
answer = traversable.read_bytes()
|
||||
if path == "/.web/index.html" or path == "/.web/":
|
||||
# enable link on the fly in index.html if InfCloud index.html is existing
|
||||
# class="infcloudlink-hidden" -> class="infcloudlink"
|
||||
path_posix = str(traversable)
|
||||
path_posix_infcloud = path_posix.replace("/internal_data/index.html", "/internal_data/infcloud/index.html")
|
||||
if os.path.isfile(path_posix_infcloud):
|
||||
# logger.debug("Enable InfCloud link in served page: %r", path)
|
||||
answer = answer.replace(b"infcloudlink-hidden", b"infcloud")
|
||||
elif path == "/.web/infcloud/config.js":
|
||||
# adjust on the fly default config.js of InfCloud installation
|
||||
# logger.debug("Adjust on-the-fly default InfCloud config.js in served page: %r", path)
|
||||
answer = answer.replace(b"location.pathname.replace(RegExp('/+[^/]+/*(index\\.html)?$'),'')+", b"location.pathname.replace(RegExp('/\\.web\\.infcloud/(index\\.html)?$'),'')+")
|
||||
answer = answer.replace(b"'/caldav.php/',", b"'/',")
|
||||
answer = answer.replace(b"settingsAccount: true,", b"settingsAccount: false,")
|
||||
elif path == "/.web/infcloud/main.js":
|
||||
# adjust on the fly default main.js of InfCloud installation
|
||||
logger.debug("Adjust on-the-fly default InfCloud main.js in served page: %r", path)
|
||||
answer = answer.replace(b"'InfCloud - the open source CalDAV/CardDAV web client'", b"'InfCloud - the open source CalDAV/CardDAV web client - served through Radicale CalDAV/CardDAV server'")
|
||||
return client.OK, headers, answer
|
||||
|
||||
|
||||
def serve_resource(
|
||||
package: str, resource: str, base_prefix: str, path: str,
|
||||
path_prefix: str = "/.web", index_file: str = "index.html",
|
||||
mimetypes: Mapping[str, str] = MIMETYPES,
|
||||
fallback_mimetype: str = FALLBACK_MIMETYPE) -> types.WSGIResponse:
|
||||
if sys.version_info < (3, 9):
|
||||
traversable = pathlib.Path(
|
||||
pkg_resources.resource_filename(package, resource))
|
||||
else:
|
||||
traversable = resources.files(package).joinpath(resource)
|
||||
return _serve_traversable(traversable, base_prefix, path, path_prefix,
|
||||
index_file, mimetypes, fallback_mimetype)
|
||||
|
||||
|
||||
def serve_folder(
|
||||
folder: str, base_prefix: str, path: str,
|
||||
path_prefix: str = "/.web", index_file: str = "index.html",
|
||||
mimetypes: Mapping[str, str] = MIMETYPES,
|
||||
fallback_mimetype: str = FALLBACK_MIMETYPE) -> types.WSGIResponse:
|
||||
# deprecated: use `serve_resource` instead
|
||||
traversable = pathlib.Path(folder)
|
||||
return _serve_traversable(traversable, base_prefix, path, path_prefix,
|
||||
index_file, mimetypes, fallback_mimetype)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
|
@ -24,30 +24,56 @@ Module for address books and calendar entries (see ``Item``).
|
|||
"""
|
||||
|
||||
import binascii
|
||||
import contextlib
|
||||
import math
|
||||
import os
|
||||
import sys
|
||||
from datetime import timedelta
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
from hashlib import sha256
|
||||
from itertools import chain
|
||||
from typing import (Any, Callable, List, MutableMapping, Optional, Sequence,
|
||||
Tuple)
|
||||
|
||||
import vobject
|
||||
|
||||
from radicale import storage # noqa:F401
|
||||
from radicale import pathutils
|
||||
from radicale.item import filter as radicale_filter
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
def predict_tag_of_parent_collection(vobject_items):
|
||||
def read_components(s: str) -> List[vobject.base.Component]:
|
||||
"""Wrapper for vobject.readComponents"""
|
||||
# Workaround for bug in InfCloud
|
||||
# PHOTO is a data URI
|
||||
s = re.sub(r"^(PHOTO(?:;[^:\r\n]*)?;ENCODING=b(?:;[^:\r\n]*)?:)"
|
||||
r"data:[^;,\r\n]*;base64,", r"\1", s,
|
||||
flags=re.MULTILINE | re.IGNORECASE)
|
||||
# Workaround for bug with malformed ICS files containing control codes
|
||||
# Filter out all control codes except those we expect to find:
|
||||
# * 0x09 Horizontal Tab
|
||||
# * 0x0A Line Feed
|
||||
# * 0x0D Carriage Return
|
||||
s = re.sub(r'[\x00-\x08\x0B\x0C\x0E-\x1F]', '', s)
|
||||
return list(vobject.readComponents(s, allowQP=True))
|
||||
|
||||
|
||||
def predict_tag_of_parent_collection(
|
||||
vobject_items: Sequence[vobject.base.Component]) -> Optional[str]:
|
||||
"""Returns the predicted tag or `None`"""
|
||||
if len(vobject_items) != 1:
|
||||
return ""
|
||||
return None
|
||||
if vobject_items[0].name == "VCALENDAR":
|
||||
return "VCALENDAR"
|
||||
if vobject_items[0].name in ("VCARD", "VLIST"):
|
||||
return "VADDRESSBOOK"
|
||||
return ""
|
||||
return None
|
||||
|
||||
|
||||
def predict_tag_of_whole_collection(vobject_items, fallback_tag=None):
|
||||
def predict_tag_of_whole_collection(
|
||||
vobject_items: Sequence[vobject.base.Component],
|
||||
fallback_tag: Optional[str] = None) -> Optional[str]:
|
||||
"""Returns the predicted tag or `fallback_tag`"""
|
||||
if vobject_items and vobject_items[0].name == "VCALENDAR":
|
||||
return "VCALENDAR"
|
||||
if vobject_items and vobject_items[0].name in ("VCARD", "VLIST"):
|
||||
|
@ -58,16 +84,20 @@ def predict_tag_of_whole_collection(vobject_items, fallback_tag=None):
|
|||
return fallback_tag
|
||||
|
||||
|
||||
def check_and_sanitize_items(vobject_items, is_collection=False, tag=None):
|
||||
def check_and_sanitize_items(
|
||||
vobject_items: List[vobject.base.Component],
|
||||
is_collection: bool = False, tag: str = "") -> None:
|
||||
"""Check vobject items for common errors and add missing UIDs.
|
||||
|
||||
Modifies the list `vobject_items`.
|
||||
|
||||
``is_collection`` indicates that vobject_item contains unrelated
|
||||
components.
|
||||
|
||||
The ``tag`` of the collection.
|
||||
|
||||
"""
|
||||
if tag and tag not in ("VCALENDAR", "VADDRESSBOOK"):
|
||||
if tag and tag not in ("VCALENDAR", "VADDRESSBOOK", "VSUBSCRIBED"):
|
||||
raise ValueError("Unsupported collection tag: %r" % tag)
|
||||
if not is_collection and len(vobject_items) != 1:
|
||||
raise ValueError("Item contains %d components" % len(vobject_items))
|
||||
|
@ -130,12 +160,31 @@ def check_and_sanitize_items(vobject_items, is_collection=False, tag=None):
|
|||
logger.debug("Quirks: Removing zero duration from %s in "
|
||||
"object %r", component_name, component_uid)
|
||||
del component.duration
|
||||
# Workaround for Evolution
|
||||
# EXDATE has value DATE even if DTSTART/DTEND is DATE-TIME.
|
||||
# The RFC is vaguely formulated on the issue.
|
||||
# To resolve the issue convert EXDATE and RDATE to
|
||||
# the same type as DTDSTART
|
||||
if hasattr(component, "dtstart"):
|
||||
ref_date = component.dtstart.value
|
||||
ref_value_param = component.dtstart.params.get("VALUE")
|
||||
for dates in chain(component.contents.get("exdate", []),
|
||||
component.contents.get("rdate", [])):
|
||||
if all(type(d) is type(ref_date) for d in dates.value):
|
||||
continue
|
||||
for i, date in enumerate(dates.value):
|
||||
dates.value[i] = ref_date.replace(
|
||||
date.year, date.month, date.day)
|
||||
with contextlib.suppress(KeyError):
|
||||
del dates.params["VALUE"]
|
||||
if ref_value_param is not None:
|
||||
dates.params["VALUE"] = ref_value_param
|
||||
# vobject interprets recurrence rules on demand
|
||||
try:
|
||||
component.rruleset
|
||||
except Exception as e:
|
||||
raise ValueError("invalid recurrence rules in %s" %
|
||||
component.name) from e
|
||||
raise ValueError("Invalid recurrence rules in %s in object %r"
|
||||
% (component.name, component_uid)) from e
|
||||
elif tag == "VADDRESSBOOK":
|
||||
# https://tools.ietf.org/html/rfc6352#section-5.1
|
||||
object_uids = set()
|
||||
|
@ -164,19 +213,36 @@ def check_and_sanitize_items(vobject_items, is_collection=False, tag=None):
|
|||
else:
|
||||
vobject_item.add("UID").value = object_uid
|
||||
else:
|
||||
for i in vobject_items:
|
||||
for item in vobject_items:
|
||||
raise ValueError("Item type %r not supported in %s collection" %
|
||||
(i.name, repr(tag) if tag else "generic"))
|
||||
(item.name, repr(tag) if tag else "generic"))
|
||||
|
||||
|
||||
def check_and_sanitize_props(props):
|
||||
"""Check collection properties for common errors."""
|
||||
tag = props.get("tag")
|
||||
if tag and tag not in ("VCALENDAR", "VADDRESSBOOK"):
|
||||
raise ValueError("Unsupported collection tag: %r" % tag)
|
||||
def check_and_sanitize_props(props: MutableMapping[Any, Any]
|
||||
) -> MutableMapping[str, str]:
|
||||
"""Check collection properties for common errors.
|
||||
|
||||
Modifies the dict `props`.
|
||||
|
||||
"""
|
||||
for k, v in list(props.items()): # Make copy to be able to delete items
|
||||
if not isinstance(k, str):
|
||||
raise ValueError("Key must be %r not %r: %r" % (
|
||||
str.__name__, type(k).__name__, k))
|
||||
if not isinstance(v, str):
|
||||
if v is None:
|
||||
del props[k]
|
||||
continue
|
||||
raise ValueError("Value of %r must be %r not %r: %r" % (
|
||||
k, str.__name__, type(v).__name__, v))
|
||||
if k == "tag":
|
||||
if v not in ("", "VCALENDAR", "VADDRESSBOOK", "VSUBSCRIBED"):
|
||||
raise ValueError("Unsupported collection tag: %r" % v)
|
||||
return props
|
||||
|
||||
|
||||
def find_available_uid(exists_fn, suffix=""):
|
||||
def find_available_uid(exists_fn: Callable[[str], bool], suffix: str = ""
|
||||
) -> str:
|
||||
"""Generate a pseudo-random UID"""
|
||||
# Prevent infinite loop
|
||||
for _ in range(1000):
|
||||
|
@ -185,11 +251,11 @@ def find_available_uid(exists_fn, suffix=""):
|
|||
r[:8], r[8:12], r[12:16], r[16:20], r[20:], suffix)
|
||||
if not exists_fn(name):
|
||||
return name
|
||||
# something is wrong with the PRNG
|
||||
raise RuntimeError("No unique random sequence found")
|
||||
# Something is wrong with the PRNG or `exists_fn`
|
||||
raise RuntimeError("No available random UID found")
|
||||
|
||||
|
||||
def get_etag(text):
|
||||
def get_etag(text: str) -> str:
|
||||
"""Etag from collection or item.
|
||||
|
||||
Encoded as quoted-string (see RFC 2616).
|
||||
|
@ -200,13 +266,13 @@ def get_etag(text):
|
|||
return '"%s"' % etag.hexdigest()
|
||||
|
||||
|
||||
def get_uid(vobject_component):
|
||||
def get_uid(vobject_component: vobject.base.Component) -> str:
|
||||
"""UID value of an item if defined."""
|
||||
return (vobject_component.uid.value
|
||||
if hasattr(vobject_component, "uid") else None)
|
||||
return (vobject_component.uid.value or ""
|
||||
if hasattr(vobject_component, "uid") else "")
|
||||
|
||||
|
||||
def get_uid_from_object(vobject_item):
|
||||
def get_uid_from_object(vobject_item: vobject.base.Component) -> str:
|
||||
"""UID value of an calendar/addressbook object."""
|
||||
if vobject_item.name == "VCALENDAR":
|
||||
if hasattr(vobject_item, "vevent"):
|
||||
|
@ -217,10 +283,10 @@ def get_uid_from_object(vobject_item):
|
|||
return get_uid(vobject_item.vtodo)
|
||||
elif vobject_item.name == "VCARD":
|
||||
return get_uid(vobject_item)
|
||||
return None
|
||||
return ""
|
||||
|
||||
|
||||
def find_tag(vobject_item):
|
||||
def find_tag(vobject_item: vobject.base.Component) -> str:
|
||||
"""Find component name from ``vobject_item``."""
|
||||
if vobject_item.name == "VCALENDAR":
|
||||
for component in vobject_item.components():
|
||||
|
@ -229,22 +295,24 @@ def find_tag(vobject_item):
|
|||
return ""
|
||||
|
||||
|
||||
def find_tag_and_time_range(vobject_item):
|
||||
"""Find component name and enclosing time range from ``vobject item``.
|
||||
def find_time_range(vobject_item: vobject.base.Component, tag: str
|
||||
) -> Tuple[int, int]:
|
||||
"""Find enclosing time range from ``vobject item``.
|
||||
|
||||
Returns a tuple (``tag``, ``start``, ``end``) where ``tag`` is a string
|
||||
and ``start`` and ``end`` are POSIX timestamps (as int).
|
||||
``tag`` must be set to the return value of ``find_tag``.
|
||||
|
||||
This is intened to be used for matching against simplified prefilters.
|
||||
Returns a tuple (``start``, ``end``) where ``start`` and ``end`` are
|
||||
POSIX timestamps.
|
||||
|
||||
This is intended to be used for matching against simplified prefilters.
|
||||
|
||||
"""
|
||||
tag = find_tag(vobject_item)
|
||||
if not tag:
|
||||
return (
|
||||
tag, radicale_filter.TIMESTAMP_MIN, radicale_filter.TIMESTAMP_MAX)
|
||||
return radicale_filter.TIMESTAMP_MIN, radicale_filter.TIMESTAMP_MAX
|
||||
start = end = None
|
||||
|
||||
def range_fn(range_start, range_end, is_recurrence):
|
||||
def range_fn(range_start: datetime, range_end: datetime,
|
||||
is_recurrence: bool) -> bool:
|
||||
nonlocal start, end
|
||||
if start is None or range_start < start:
|
||||
start = range_start
|
||||
|
@ -252,7 +320,7 @@ def find_tag_and_time_range(vobject_item):
|
|||
end = range_end
|
||||
return False
|
||||
|
||||
def infinity_fn(range_start):
|
||||
def infinity_fn(range_start: datetime) -> bool:
|
||||
nonlocal start, end
|
||||
if start is None or range_start < start:
|
||||
start = range_start
|
||||
|
@ -264,22 +332,37 @@ def find_tag_and_time_range(vobject_item):
|
|||
start = radicale_filter.DATETIME_MIN
|
||||
if end is None:
|
||||
end = radicale_filter.DATETIME_MAX
|
||||
try:
|
||||
return tag, math.floor(start.timestamp()), math.ceil(end.timestamp())
|
||||
except ValueError as e:
|
||||
if str(e) == ("offset must be a timedelta representing a whole "
|
||||
"number of minutes") and sys.version_info < (3, 6):
|
||||
raise RuntimeError("Unsupported in Python < 3.6: %s" % e) from e
|
||||
raise
|
||||
return math.floor(start.timestamp()), math.ceil(end.timestamp())
|
||||
|
||||
|
||||
class Item:
|
||||
"""Class for address book and calendar entries."""
|
||||
|
||||
def __init__(self, collection_path=None, collection=None,
|
||||
vobject_item=None, href=None, last_modified=None, text=None,
|
||||
etag=None, uid=None, name=None, component_name=None,
|
||||
time_range=None):
|
||||
collection: Optional["storage.BaseCollection"]
|
||||
href: Optional[str]
|
||||
last_modified: Optional[str]
|
||||
|
||||
_collection_path: str
|
||||
_text: Optional[str]
|
||||
_vobject_item: Optional[vobject.base.Component]
|
||||
_etag: Optional[str]
|
||||
_uid: Optional[str]
|
||||
_name: Optional[str]
|
||||
_component_name: Optional[str]
|
||||
_time_range: Optional[Tuple[int, int]]
|
||||
|
||||
def __init__(self,
|
||||
collection_path: Optional[str] = None,
|
||||
collection: Optional["storage.BaseCollection"] = None,
|
||||
vobject_item: Optional[vobject.base.Component] = None,
|
||||
href: Optional[str] = None,
|
||||
last_modified: Optional[str] = None,
|
||||
text: Optional[str] = None,
|
||||
etag: Optional[str] = None,
|
||||
uid: Optional[str] = None,
|
||||
name: Optional[str] = None,
|
||||
component_name: Optional[str] = None,
|
||||
time_range: Optional[Tuple[int, int]] = None):
|
||||
"""Initialize an item.
|
||||
|
||||
``collection_path`` the path of the parent collection (optional if
|
||||
|
@ -305,16 +388,15 @@ class Item:
|
|||
``component_name`` the name of the primary component (optional).
|
||||
See ``find_tag``.
|
||||
|
||||
``time_range`` the enclosing time range.
|
||||
See ``find_tag_and_time_range``.
|
||||
``time_range`` the enclosing time range. See ``find_time_range``.
|
||||
|
||||
"""
|
||||
if text is None and vobject_item is None:
|
||||
raise ValueError(
|
||||
"at least one of 'text' or 'vobject_item' must be set")
|
||||
"At least one of 'text' or 'vobject_item' must be set")
|
||||
if collection_path is None:
|
||||
if collection is None:
|
||||
raise ValueError("at least one of 'collection_path' or "
|
||||
raise ValueError("At least one of 'collection_path' or "
|
||||
"'collection' must be set")
|
||||
collection_path = collection.path
|
||||
assert collection_path == pathutils.strip_path(
|
||||
|
@ -331,7 +413,7 @@ class Item:
|
|||
self._component_name = component_name
|
||||
self._time_range = time_range
|
||||
|
||||
def serialize(self):
|
||||
def serialize(self) -> str:
|
||||
if self._text is None:
|
||||
try:
|
||||
self._text = self.vobject_item.serialize()
|
||||
|
@ -353,38 +435,38 @@ class Item:
|
|||
return self._vobject_item
|
||||
|
||||
@property
|
||||
def etag(self):
|
||||
def etag(self) -> str:
|
||||
"""Encoded as quoted-string (see RFC 2616)."""
|
||||
if self._etag is None:
|
||||
self._etag = get_etag(self.serialize())
|
||||
return self._etag
|
||||
|
||||
@property
|
||||
def uid(self):
|
||||
def uid(self) -> str:
|
||||
if self._uid is None:
|
||||
self._uid = get_uid_from_object(self.vobject_item)
|
||||
return self._uid
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
if self._name is None:
|
||||
self._name = self.vobject_item.name or ""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def component_name(self):
|
||||
if self._component_name is not None:
|
||||
return self._component_name
|
||||
return find_tag(self.vobject_item)
|
||||
def component_name(self) -> str:
|
||||
if self._component_name is None:
|
||||
self._component_name = find_tag(self.vobject_item)
|
||||
return self._component_name
|
||||
|
||||
@property
|
||||
def time_range(self):
|
||||
def time_range(self) -> Tuple[int, int]:
|
||||
if self._time_range is None:
|
||||
self._component_name, *self._time_range = (
|
||||
find_tag_and_time_range(self.vobject_item))
|
||||
self._time_range = find_time_range(
|
||||
self.vobject_item, self.component_name)
|
||||
return self._time_range
|
||||
|
||||
def prepare(self):
|
||||
def prepare(self) -> None:
|
||||
"""Fill cache with values."""
|
||||
orig_vobject_item = self._vobject_item
|
||||
self.serialize()
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2015 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -19,35 +20,64 @@
|
|||
|
||||
|
||||
import math
|
||||
import xml.etree.ElementTree as ET
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
from itertools import chain
|
||||
from typing import (Callable, Iterable, Iterator, List, Optional, Sequence,
|
||||
Tuple)
|
||||
|
||||
from radicale import xmlutils
|
||||
import vobject
|
||||
|
||||
from radicale import item, xmlutils
|
||||
from radicale.log import logger
|
||||
|
||||
DAY = timedelta(days=1)
|
||||
SECOND = timedelta(seconds=1)
|
||||
DATETIME_MIN = datetime.min.replace(tzinfo=timezone.utc)
|
||||
DATETIME_MAX = datetime.max.replace(tzinfo=timezone.utc)
|
||||
TIMESTAMP_MIN = math.floor(DATETIME_MIN.timestamp())
|
||||
TIMESTAMP_MAX = math.ceil(DATETIME_MAX.timestamp())
|
||||
DAY: timedelta = timedelta(days=1)
|
||||
SECOND: timedelta = timedelta(seconds=1)
|
||||
DATETIME_MIN: datetime = datetime.min.replace(tzinfo=timezone.utc)
|
||||
DATETIME_MAX: datetime = datetime.max.replace(tzinfo=timezone.utc)
|
||||
TIMESTAMP_MIN: int = math.floor(DATETIME_MIN.timestamp())
|
||||
TIMESTAMP_MAX: int = math.ceil(DATETIME_MAX.timestamp())
|
||||
|
||||
|
||||
def date_to_datetime(date_):
|
||||
"""Transform a date to a UTC datetime.
|
||||
def date_to_datetime(d: date) -> datetime:
|
||||
"""Transform any date to a UTC datetime.
|
||||
|
||||
If date_ is a datetime without timezone, return as UTC datetime. If date_
|
||||
If ``d`` is a datetime without timezone, return as UTC datetime. If ``d``
|
||||
is already a datetime with timezone, return as is.
|
||||
|
||||
"""
|
||||
if not isinstance(date_, datetime):
|
||||
date_ = datetime.combine(date_, datetime.min.time())
|
||||
if not date_.tzinfo:
|
||||
date_ = date_.replace(tzinfo=timezone.utc)
|
||||
return date_
|
||||
if not isinstance(d, datetime):
|
||||
d = datetime.combine(d, datetime.min.time())
|
||||
if not d.tzinfo:
|
||||
# NOTE: using vobject's UTC as it wasn't playing well with datetime's.
|
||||
d = d.replace(tzinfo=vobject.icalendar.utc)
|
||||
return d
|
||||
|
||||
|
||||
def comp_match(item, filter_, level=0):
|
||||
def parse_time_range(time_filter: ET.Element) -> Tuple[datetime, datetime]:
|
||||
start_text = time_filter.get("start")
|
||||
end_text = time_filter.get("end")
|
||||
if start_text:
|
||||
start = datetime.strptime(
|
||||
start_text, "%Y%m%dT%H%M%SZ").replace(
|
||||
tzinfo=timezone.utc)
|
||||
else:
|
||||
start = DATETIME_MIN
|
||||
if end_text:
|
||||
end = datetime.strptime(
|
||||
end_text, "%Y%m%dT%H%M%SZ").replace(
|
||||
tzinfo=timezone.utc)
|
||||
else:
|
||||
end = DATETIME_MAX
|
||||
return start, end
|
||||
|
||||
|
||||
def time_range_timestamps(time_filter: ET.Element) -> Tuple[int, int]:
|
||||
start, end = parse_time_range(time_filter)
|
||||
return (math.floor(start.timestamp()), math.ceil(end.timestamp()))
|
||||
|
||||
|
||||
def comp_match(item: "item.Item", filter_: ET.Element, level: int = 0) -> bool:
|
||||
"""Check whether the ``item`` matches the comp ``filter_``.
|
||||
|
||||
If ``level`` is ``0``, the filter is applied on the
|
||||
|
@ -70,7 +100,7 @@ def comp_match(item, filter_, level=0):
|
|||
return True
|
||||
if not tag:
|
||||
return False
|
||||
name = filter_.get("name").upper()
|
||||
name = filter_.get("name", "").upper()
|
||||
if len(filter_) == 0:
|
||||
# Point #1 of rfc4791-9.7.1
|
||||
return name == tag
|
||||
|
@ -104,18 +134,19 @@ def comp_match(item, filter_, level=0):
|
|||
return True
|
||||
|
||||
|
||||
def prop_match(vobject_item, filter_, ns):
|
||||
def prop_match(vobject_item: vobject.base.Component,
|
||||
filter_: ET.Element, ns: str) -> bool:
|
||||
"""Check whether the ``item`` matches the prop ``filter_``.
|
||||
|
||||
See rfc4791-9.7.2 and rfc6352-10.5.1.
|
||||
|
||||
"""
|
||||
name = filter_.get("name").lower()
|
||||
name = filter_.get("name", "").lower()
|
||||
if len(filter_) == 0:
|
||||
# Point #1 of rfc4791-9.7.2
|
||||
return name in vobject_item.contents
|
||||
if len(filter_) == 1:
|
||||
if filter_[0].tag == xmlutils.make_clark("C:is-not-defined"):
|
||||
if filter_[0].tag == xmlutils.make_clark("%s:is-not-defined" % ns):
|
||||
# Point #2 of rfc4791-9.7.2
|
||||
return name not in vobject_item.contents
|
||||
if name not in vobject_item.contents:
|
||||
|
@ -136,28 +167,19 @@ def prop_match(vobject_item, filter_, ns):
|
|||
return True
|
||||
|
||||
|
||||
def time_range_match(vobject_item, filter_, child_name):
|
||||
def time_range_match(vobject_item: vobject.base.Component,
|
||||
filter_: ET.Element, child_name: str) -> bool:
|
||||
"""Check whether the component/property ``child_name`` of
|
||||
``vobject_item`` matches the time-range ``filter_``."""
|
||||
|
||||
start = filter_.get("start")
|
||||
end = filter_.get("end")
|
||||
if not start and not end:
|
||||
if not filter_.get("start") and not filter_.get("end"):
|
||||
return False
|
||||
if start:
|
||||
start = datetime.strptime(start, "%Y%m%dT%H%M%SZ")
|
||||
else:
|
||||
start = datetime.min
|
||||
if end:
|
||||
end = datetime.strptime(end, "%Y%m%dT%H%M%SZ")
|
||||
else:
|
||||
end = datetime.max
|
||||
start = start.replace(tzinfo=timezone.utc)
|
||||
end = end.replace(tzinfo=timezone.utc)
|
||||
|
||||
start, end = parse_time_range(filter_)
|
||||
matched = False
|
||||
|
||||
def range_fn(range_start, range_end, is_recurrence):
|
||||
def range_fn(range_start: datetime, range_end: datetime,
|
||||
is_recurrence: bool) -> bool:
|
||||
nonlocal matched
|
||||
if start < range_end and range_start < end:
|
||||
matched = True
|
||||
|
@ -166,14 +188,45 @@ def time_range_match(vobject_item, filter_, child_name):
|
|||
return True
|
||||
return False
|
||||
|
||||
def infinity_fn(start):
|
||||
def infinity_fn(start: datetime) -> bool:
|
||||
return False
|
||||
|
||||
visit_time_ranges(vobject_item, child_name, range_fn, infinity_fn)
|
||||
return matched
|
||||
|
||||
|
||||
def visit_time_ranges(vobject_item, child_name, range_fn, infinity_fn):
|
||||
def time_range_fill(vobject_item: vobject.base.Component,
|
||||
filter_: ET.Element, child_name: str, n: int = 1
|
||||
) -> List[Tuple[datetime, datetime]]:
|
||||
"""Create a list of ``n`` occurances from the component/property ``child_name``
|
||||
of ``vobject_item``."""
|
||||
if not filter_.get("start") and not filter_.get("end"):
|
||||
return []
|
||||
|
||||
start, end = parse_time_range(filter_)
|
||||
ranges: List[Tuple[datetime, datetime]] = []
|
||||
|
||||
def range_fn(range_start: datetime, range_end: datetime,
|
||||
is_recurrence: bool) -> bool:
|
||||
nonlocal ranges
|
||||
if start < range_end and range_start < end:
|
||||
ranges.append((range_start, range_end))
|
||||
if n > 0 and len(ranges) >= n:
|
||||
return True
|
||||
if end < range_start and not is_recurrence:
|
||||
return True
|
||||
return False
|
||||
|
||||
def infinity_fn(range_start: datetime) -> bool:
|
||||
return False
|
||||
|
||||
visit_time_ranges(vobject_item, child_name, range_fn, infinity_fn)
|
||||
return ranges
|
||||
|
||||
|
||||
def visit_time_ranges(vobject_item: vobject.base.Component, child_name: str,
|
||||
range_fn: Callable[[datetime, datetime, bool], bool],
|
||||
infinity_fn: Callable[[datetime], bool]) -> None:
|
||||
"""Visit all time ranges in the component/property ``child_name`` of
|
||||
`vobject_item`` with visitors ``range_fn`` and ``infinity_fn``.
|
||||
|
||||
|
@ -181,7 +234,7 @@ def visit_time_ranges(vobject_item, child_name, range_fn, infinity_fn):
|
|||
datetimes and ``is_recurrence`` as arguments. If the function returns True,
|
||||
the operation is cancelled.
|
||||
|
||||
``infinity_fn`` gets called when an infiite recurrence rule is detected
|
||||
``infinity_fn`` gets called when an infinite recurrence rule is detected
|
||||
with ``start`` datetime as argument. If the function returns True, the
|
||||
operation is cancelled.
|
||||
|
||||
|
@ -189,15 +242,20 @@ def visit_time_ranges(vobject_item, child_name, range_fn, infinity_fn):
|
|||
|
||||
"""
|
||||
|
||||
# HACK: According to rfc5545-3.8.4.4 an recurrance that is resheduled
|
||||
# HACK: According to rfc5545-3.8.4.4 a recurrence that is rescheduled
|
||||
# with Recurrence ID affects the recurrence itself and all following
|
||||
# recurrences too. This is not respected and client don't seem to bother
|
||||
# either.
|
||||
|
||||
def getrruleset(child, ignore=()):
|
||||
if (hasattr(child, "rrule") and
|
||||
";UNTIL=" not in child.rrule.value.upper() and
|
||||
";COUNT=" not in child.rrule.value.upper()):
|
||||
def getrruleset(child: vobject.base.Component, ignore: Sequence[date]
|
||||
) -> Tuple[Iterable[date], bool]:
|
||||
infinite = False
|
||||
for rrule in child.contents.get("rrule", []):
|
||||
if (";UNTIL=" not in rrule.value.upper() and
|
||||
";COUNT=" not in rrule.value.upper()):
|
||||
infinite = True
|
||||
break
|
||||
if infinite:
|
||||
for dtstart in child.getrruleset(addRDate=True):
|
||||
if dtstart in ignore:
|
||||
continue
|
||||
|
@ -207,20 +265,28 @@ def visit_time_ranges(vobject_item, child_name, range_fn, infinity_fn):
|
|||
return filter(lambda dtstart: dtstart not in ignore,
|
||||
child.getrruleset(addRDate=True)), False
|
||||
|
||||
def get_children(components):
|
||||
def get_children(components: Iterable[vobject.base.Component]) -> Iterator[
|
||||
Tuple[vobject.base.Component, bool, List[date]]]:
|
||||
main = None
|
||||
rec_main = None
|
||||
recurrences = []
|
||||
for comp in components:
|
||||
if hasattr(comp, "recurrence_id") and comp.recurrence_id.value:
|
||||
recurrences.append(comp.recurrence_id.value)
|
||||
if comp.rruleset:
|
||||
# Prevent possible infinite loop
|
||||
raise ValueError("Overwritten recurrence with RRULESET")
|
||||
yield comp, True, ()
|
||||
if comp.rruleset._len is None:
|
||||
logger.warning("Ignore empty RRULESET in item at RECURRENCE-ID with value '%s' and UID '%s'", comp.recurrence_id.value, comp.uid.value)
|
||||
else:
|
||||
# Prevent possible infinite loop
|
||||
raise ValueError("Overwritten recurrence with RRULESET")
|
||||
rec_main = comp
|
||||
yield comp, True, []
|
||||
else:
|
||||
if main is not None:
|
||||
raise ValueError("Multiple main components")
|
||||
raise ValueError("Multiple main components. Got comp: {}".format(comp))
|
||||
main = comp
|
||||
if main is None and len(recurrences) == 1:
|
||||
main = rec_main
|
||||
if main is None:
|
||||
raise ValueError("Main component missing")
|
||||
yield main, False, recurrences
|
||||
|
@ -410,12 +476,17 @@ def visit_time_ranges(vobject_item, child_name, range_fn, infinity_fn):
|
|||
# Match a property
|
||||
child = getattr(vobject_item, child_name.lower())
|
||||
if isinstance(child, date):
|
||||
range_fn(child, child + DAY, False)
|
||||
elif isinstance(child, datetime):
|
||||
range_fn(child, child + SECOND, False)
|
||||
child_is_datetime = isinstance(child, datetime)
|
||||
child = date_to_datetime(child)
|
||||
if child_is_datetime:
|
||||
range_fn(child, child + SECOND, False)
|
||||
else:
|
||||
range_fn(child, child + DAY, False)
|
||||
|
||||
|
||||
def text_match(vobject_item, filter_, child_name, ns, attrib_name=None):
|
||||
def text_match(vobject_item: vobject.base.Component,
|
||||
filter_: ET.Element, child_name: str, ns: str,
|
||||
attrib_name: Optional[str] = None) -> bool:
|
||||
"""Check whether the ``item`` matches the text-match ``filter_``.
|
||||
|
||||
See rfc4791-9.7.5.
|
||||
|
@ -429,7 +500,7 @@ def text_match(vobject_item, filter_, child_name, ns, attrib_name=None):
|
|||
if ns == "CR":
|
||||
match_type = filter_.get("match-type", match_type)
|
||||
|
||||
def match(value):
|
||||
def match(value: str) -> bool:
|
||||
value = value.lower()
|
||||
if match_type == "equals":
|
||||
return value == text
|
||||
|
@ -442,24 +513,33 @@ def text_match(vobject_item, filter_, child_name, ns, attrib_name=None):
|
|||
raise ValueError("Unexpected text-match match-type: %r" % match_type)
|
||||
|
||||
children = getattr(vobject_item, "%s_list" % child_name, [])
|
||||
if attrib_name:
|
||||
if attrib_name is not None:
|
||||
condition = any(
|
||||
match(attrib) for child in children
|
||||
for attrib in child.params.get(attrib_name, []))
|
||||
else:
|
||||
condition = any(match(child.value) for child in children)
|
||||
res = []
|
||||
for child in children:
|
||||
# Some filters such as CATEGORIES provide a list in child.value
|
||||
if type(child.value) is list:
|
||||
for value in child.value:
|
||||
res.append(match(value))
|
||||
else:
|
||||
res.append(match(child.value))
|
||||
condition = any(res)
|
||||
if filter_.get("negate-condition") == "yes":
|
||||
return not condition
|
||||
return condition
|
||||
|
||||
|
||||
def param_filter_match(vobject_item, filter_, parent_name, ns):
|
||||
def param_filter_match(vobject_item: vobject.base.Component,
|
||||
filter_: ET.Element, parent_name: str, ns: str) -> bool:
|
||||
"""Check whether the ``item`` matches the param-filter ``filter_``.
|
||||
|
||||
See rfc4791-9.7.3.
|
||||
|
||||
"""
|
||||
name = filter_.get("name").upper()
|
||||
name = filter_.get("name", "").upper()
|
||||
children = getattr(vobject_item, "%s_list" % parent_name, [])
|
||||
condition = any(name in child.params for child in children)
|
||||
if len(filter_) > 0:
|
||||
|
@ -471,7 +551,8 @@ def param_filter_match(vobject_item, filter_, parent_name, ns):
|
|||
return condition
|
||||
|
||||
|
||||
def simplify_prefilters(filters, collection_tag="VCALENDAR"):
|
||||
def simplify_prefilters(filters: Iterable[ET.Element], collection_tag: str
|
||||
) -> Tuple[Optional[str], int, int, bool]:
|
||||
"""Creates a simplified condition from ``filters``.
|
||||
|
||||
Returns a tuple (``tag``, ``start``, ``end``, ``simple``) where ``tag`` is
|
||||
|
@ -480,14 +561,14 @@ def simplify_prefilters(filters, collection_tag="VCALENDAR"):
|
|||
and the simplified condition are identical.
|
||||
|
||||
"""
|
||||
flat_filters = tuple(chain.from_iterable(filters))
|
||||
flat_filters = list(chain.from_iterable(filters))
|
||||
simple = len(flat_filters) <= 1
|
||||
for col_filter in flat_filters:
|
||||
if collection_tag != "VCALENDAR":
|
||||
simple = False
|
||||
break
|
||||
if (col_filter.tag != xmlutils.make_clark("C:comp-filter") or
|
||||
col_filter.get("name").upper() != "VCALENDAR"):
|
||||
col_filter.get("name", "").upper() != "VCALENDAR"):
|
||||
simple = False
|
||||
continue
|
||||
simple &= len(col_filter) <= 1
|
||||
|
@ -495,7 +576,7 @@ def simplify_prefilters(filters, collection_tag="VCALENDAR"):
|
|||
if comp_filter.tag != xmlutils.make_clark("C:comp-filter"):
|
||||
simple = False
|
||||
continue
|
||||
tag = comp_filter.get("name").upper()
|
||||
tag = comp_filter.get("name", "").upper()
|
||||
if comp_filter.find(
|
||||
xmlutils.make_clark("C:is-not-defined")) is not None:
|
||||
simple = False
|
||||
|
@ -508,20 +589,7 @@ def simplify_prefilters(filters, collection_tag="VCALENDAR"):
|
|||
if time_filter.tag != xmlutils.make_clark("C:time-range"):
|
||||
simple = False
|
||||
continue
|
||||
start = time_filter.get("start")
|
||||
end = time_filter.get("end")
|
||||
if start:
|
||||
start = math.floor(datetime.strptime(
|
||||
start, "%Y%m%dT%H%M%SZ").replace(
|
||||
tzinfo=timezone.utc).timestamp())
|
||||
else:
|
||||
start = TIMESTAMP_MIN
|
||||
if end:
|
||||
end = math.ceil(datetime.strptime(
|
||||
end, "%Y%m%dT%H%M%SZ").replace(
|
||||
tzinfo=timezone.utc).timestamp())
|
||||
else:
|
||||
end = TIMESTAMP_MAX
|
||||
start, end = time_range_timestamps(time_filter)
|
||||
return tag, start, end, simple
|
||||
return tag, TIMESTAMP_MIN, TIMESTAMP_MAX, simple
|
||||
return None, TIMESTAMP_MIN, TIMESTAMP_MAX, simple
|
||||
|
|
204
radicale/log.py
204
radicale/log.py
|
@ -1,6 +1,7 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2011-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2023 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -26,41 +27,59 @@ Log messages are sent to the first available target of:
|
|||
"""
|
||||
|
||||
import contextlib
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
import struct
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from typing import (Any, Callable, ClassVar, Dict, Iterator, Mapping, Optional,
|
||||
Tuple, Union, cast)
|
||||
|
||||
LOGGER_NAME = "radicale"
|
||||
LOGGER_FORMAT = "[%(asctime)s] [%(ident)s] [%(levelname)s] %(message)s"
|
||||
DATE_FORMAT = "%Y-%m-%d %H:%M:%S %z"
|
||||
from radicale import types
|
||||
|
||||
logger = logging.getLogger(LOGGER_NAME)
|
||||
LOGGER_NAME: str = "radicale"
|
||||
LOGGER_FORMATS: Mapping[str, str] = {
|
||||
"verbose": "[%(asctime)s] [%(ident)s] [%(levelname)s] %(message)s",
|
||||
"journal": "[%(ident)s] [%(levelname)s] %(message)s",
|
||||
}
|
||||
DATE_FORMAT: str = "%Y-%m-%d %H:%M:%S %z"
|
||||
|
||||
logger: logging.Logger = logging.getLogger(LOGGER_NAME)
|
||||
|
||||
|
||||
class RemoveTracebackFilter(logging.Filter):
|
||||
def filter(self, record):
|
||||
|
||||
def filter(self, record: logging.LogRecord) -> bool:
|
||||
record.exc_info = None
|
||||
return True
|
||||
|
||||
|
||||
REMOVE_TRACEBACK_FILTER = RemoveTracebackFilter()
|
||||
REMOVE_TRACEBACK_FILTER: logging.Filter = RemoveTracebackFilter()
|
||||
|
||||
|
||||
class IdentLogRecordFactory:
|
||||
"""LogRecordFactory that adds ``ident`` attribute."""
|
||||
|
||||
def __init__(self, upstream_factory):
|
||||
self.upstream_factory = upstream_factory
|
||||
def __init__(self, upstream_factory: Callable[..., logging.LogRecord]
|
||||
) -> None:
|
||||
self._upstream_factory = upstream_factory
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
record = self.upstream_factory(*args, **kwargs)
|
||||
ident = "%d" % os.getpid()
|
||||
main_thread = threading.main_thread()
|
||||
current_thread = threading.current_thread()
|
||||
if current_thread.name and main_thread != current_thread:
|
||||
ident += "/%s" % current_thread.name
|
||||
record.ident = ident
|
||||
def __call__(self, *args: Any, **kwargs: Any) -> logging.LogRecord:
|
||||
record = self._upstream_factory(*args, **kwargs)
|
||||
ident = ("%d" % record.process if record.process is not None
|
||||
else record.processName or "unknown")
|
||||
tid = None
|
||||
if record.thread is not None:
|
||||
if record.thread != threading.main_thread().ident:
|
||||
ident += "/%s" % (record.threadName or "unknown")
|
||||
if (sys.version_info >= (3, 8) and
|
||||
record.thread == threading.get_ident()):
|
||||
tid = threading.get_native_id()
|
||||
record.ident = ident # type:ignore[attr-defined]
|
||||
record.tid = tid # type:ignore[attr-defined]
|
||||
return record
|
||||
|
||||
|
||||
|
@ -68,25 +87,110 @@ class ThreadedStreamHandler(logging.Handler):
|
|||
"""Sends logging output to the stream registered for the current thread or
|
||||
``sys.stderr`` when no stream was registered."""
|
||||
|
||||
terminator = "\n"
|
||||
terminator: ClassVar[str] = "\n"
|
||||
|
||||
def __init__(self):
|
||||
_streams: Dict[int, types.ErrorStream]
|
||||
_journal_stream_id: Optional[Tuple[int, int]]
|
||||
_journal_socket: Optional[socket.socket]
|
||||
_journal_socket_failed: bool
|
||||
_formatters: Mapping[str, logging.Formatter]
|
||||
_formatter: Optional[logging.Formatter]
|
||||
|
||||
def __init__(self, format_name: Optional[str] = None) -> None:
|
||||
super().__init__()
|
||||
self._streams = {}
|
||||
self._journal_stream_id = None
|
||||
with contextlib.suppress(TypeError, ValueError):
|
||||
dev, inode = os.environ.get("JOURNAL_STREAM", "").split(":", 1)
|
||||
self._journal_stream_id = (int(dev), int(inode))
|
||||
self._journal_socket = None
|
||||
self._journal_socket_failed = False
|
||||
self._formatters = {name: logging.Formatter(fmt, DATE_FORMAT)
|
||||
for name, fmt in LOGGER_FORMATS.items()}
|
||||
self._formatter = (self._formatters[format_name]
|
||||
if format_name is not None else None)
|
||||
|
||||
def emit(self, record):
|
||||
def _get_formatter(self, default_format_name: str) -> logging.Formatter:
|
||||
return self._formatter or self._formatters[default_format_name]
|
||||
|
||||
def _detect_journal(self, stream: types.ErrorStream) -> bool:
|
||||
if not self._journal_stream_id or not isinstance(stream, io.IOBase):
|
||||
return False
|
||||
try:
|
||||
stat = os.fstat(stream.fileno())
|
||||
except OSError:
|
||||
return False
|
||||
return self._journal_stream_id == (stat.st_dev, stat.st_ino)
|
||||
|
||||
@staticmethod
|
||||
def _encode_journal(data: Mapping[str, Optional[Union[str, int]]]
|
||||
) -> bytes:
|
||||
msg = b""
|
||||
for key, value in data.items():
|
||||
if value is None:
|
||||
continue
|
||||
keyb = key.encode()
|
||||
valueb = str(value).encode()
|
||||
if b"\n" in valueb:
|
||||
msg += (keyb + b"\n" +
|
||||
struct.pack("<Q", len(valueb)) + valueb + b"\n")
|
||||
else:
|
||||
msg += keyb + b"=" + valueb + b"\n"
|
||||
return msg
|
||||
|
||||
def _try_emit_journal(self, record: logging.LogRecord) -> bool:
|
||||
if not self._journal_socket:
|
||||
# Try to connect to systemd journal socket
|
||||
if self._journal_socket_failed or not hasattr(socket, "AF_UNIX"):
|
||||
return False
|
||||
journal_socket = None
|
||||
try:
|
||||
journal_socket = socket.socket(
|
||||
socket.AF_UNIX, socket.SOCK_DGRAM)
|
||||
journal_socket.connect("/run/systemd/journal/socket")
|
||||
except OSError as e:
|
||||
self._journal_socket_failed = True
|
||||
if journal_socket:
|
||||
journal_socket.close()
|
||||
# Log after setting `_journal_socket_failed` to prevent loop!
|
||||
logger.error("Failed to connect to systemd journal: %s",
|
||||
e, exc_info=True)
|
||||
return False
|
||||
self._journal_socket = journal_socket
|
||||
|
||||
priority = {"DEBUG": 7,
|
||||
"INFO": 6,
|
||||
"WARNING": 4,
|
||||
"ERROR": 3,
|
||||
"CRITICAL": 2}.get(record.levelname, 4)
|
||||
timestamp = time.strftime("%Y-%m-%dT%H:%M:%S.%%03dZ",
|
||||
time.gmtime(record.created)) % record.msecs
|
||||
data = {"PRIORITY": priority,
|
||||
"TID": cast(Optional[int], getattr(record, "tid", None)),
|
||||
"SYSLOG_IDENTIFIER": record.name,
|
||||
"SYSLOG_FACILITY": 1,
|
||||
"SYSLOG_PID": record.process,
|
||||
"SYSLOG_TIMESTAMP": timestamp,
|
||||
"CODE_FILE": record.pathname,
|
||||
"CODE_LINE": record.lineno,
|
||||
"CODE_FUNC": record.funcName,
|
||||
"MESSAGE": self._get_formatter("journal").format(record)}
|
||||
self._journal_socket.sendall(self._encode_journal(data))
|
||||
return True
|
||||
|
||||
def emit(self, record: logging.LogRecord) -> None:
|
||||
try:
|
||||
stream = self._streams.get(threading.get_ident(), sys.stderr)
|
||||
msg = self.format(record)
|
||||
stream.write(msg)
|
||||
stream.write(self.terminator)
|
||||
if hasattr(stream, "flush"):
|
||||
stream.flush()
|
||||
if self._detect_journal(stream) and self._try_emit_journal(record):
|
||||
return
|
||||
msg = self._get_formatter("verbose").format(record)
|
||||
stream.write(msg + self.terminator)
|
||||
stream.flush()
|
||||
except Exception:
|
||||
self.handleError(record)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def register_stream(self, stream):
|
||||
@types.contextmanager
|
||||
def register_stream(self, stream: types.ErrorStream) -> Iterator[None]:
|
||||
"""Register stream for logging output of the current thread."""
|
||||
key = threading.get_ident()
|
||||
self._streams[key] = stream
|
||||
|
@ -96,30 +200,52 @@ class ThreadedStreamHandler(logging.Handler):
|
|||
del self._streams[key]
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def register_stream(stream):
|
||||
@types.contextmanager
|
||||
def register_stream(stream: types.ErrorStream) -> Iterator[None]:
|
||||
"""Register stream for logging output of the current thread."""
|
||||
yield
|
||||
|
||||
|
||||
def setup():
|
||||
def setup() -> None:
|
||||
"""Set global logging up."""
|
||||
global register_stream
|
||||
handler = ThreadedStreamHandler()
|
||||
logging.basicConfig(format=LOGGER_FORMAT, datefmt=DATE_FORMAT,
|
||||
handlers=[handler])
|
||||
format_name = os.environ.get("RADICALE_LOG_FORMAT") or None
|
||||
sane_format_name = format_name if format_name in LOGGER_FORMATS else None
|
||||
handler = ThreadedStreamHandler(sane_format_name)
|
||||
logging.basicConfig(handlers=[handler])
|
||||
register_stream = handler.register_stream
|
||||
log_record_factory = IdentLogRecordFactory(logging.getLogRecordFactory())
|
||||
logging.setLogRecordFactory(log_record_factory)
|
||||
set_level(logging.WARNING)
|
||||
set_level(logging.INFO, True)
|
||||
if format_name != sane_format_name:
|
||||
logger.error("Invalid RADICALE_LOG_FORMAT: %r", format_name)
|
||||
|
||||
|
||||
def set_level(level):
|
||||
logger_display_backtrace_disabled: bool = False
|
||||
logger_display_backtrace_enabled: bool = False
|
||||
|
||||
|
||||
def set_level(level: Union[int, str], backtrace_on_debug: bool) -> None:
|
||||
"""Set logging level for global logger."""
|
||||
global logger_display_backtrace_disabled
|
||||
global logger_display_backtrace_enabled
|
||||
if isinstance(level, str):
|
||||
level = getattr(logging, level.upper())
|
||||
assert isinstance(level, int)
|
||||
logger.setLevel(level)
|
||||
if level == logging.DEBUG:
|
||||
logger.removeFilter(REMOVE_TRACEBACK_FILTER)
|
||||
else:
|
||||
if level > logging.DEBUG:
|
||||
if logger_display_backtrace_disabled is False:
|
||||
logger.info("Logging of backtrace is disabled in this loglevel")
|
||||
logger_display_backtrace_disabled = True
|
||||
logger.addFilter(REMOVE_TRACEBACK_FILTER)
|
||||
else:
|
||||
if not backtrace_on_debug:
|
||||
if logger_display_backtrace_disabled is False:
|
||||
logger.debug("Logging of backtrace is disabled by option in this loglevel")
|
||||
logger_display_backtrace_disabled = True
|
||||
logger.addFilter(REMOVE_TRACEBACK_FILTER)
|
||||
else:
|
||||
if logger_display_backtrace_enabled is False:
|
||||
logger.debug("Logging of backtrace is enabled by option in this loglevel")
|
||||
logger_display_backtrace_enabled = True
|
||||
logger.removeFilter(REMOVE_TRACEBACK_FILTER)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
|
@ -21,17 +21,23 @@ Helper functions for working with the file system.
|
|||
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
import errno
|
||||
import os
|
||||
import posixpath
|
||||
import sys
|
||||
import threading
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Iterator, Type, Union
|
||||
|
||||
if os.name == "nt":
|
||||
from radicale import storage, types
|
||||
|
||||
if sys.platform == "win32":
|
||||
import ctypes
|
||||
import ctypes.wintypes
|
||||
import msvcrt
|
||||
|
||||
LOCKFILE_EXCLUSIVE_LOCK = 2
|
||||
LOCKFILE_EXCLUSIVE_LOCK: int = 2
|
||||
ULONG_PTR: Union[Type[ctypes.c_uint32], Type[ctypes.c_uint64]]
|
||||
if ctypes.sizeof(ctypes.c_void_p) == 4:
|
||||
ULONG_PTR = ctypes.c_uint32
|
||||
else:
|
||||
|
@ -45,7 +51,8 @@ if os.name == "nt":
|
|||
("offset_high", ctypes.wintypes.DWORD),
|
||||
("h_event", ctypes.wintypes.HANDLE)]
|
||||
|
||||
lock_file_ex = ctypes.windll.kernel32.LockFileEx
|
||||
kernel32 = ctypes.WinDLL("kernel32", use_last_error=True)
|
||||
lock_file_ex = kernel32.LockFileEx
|
||||
lock_file_ex.argtypes = [
|
||||
ctypes.wintypes.HANDLE,
|
||||
ctypes.wintypes.DWORD,
|
||||
|
@ -54,7 +61,7 @@ if os.name == "nt":
|
|||
ctypes.wintypes.DWORD,
|
||||
ctypes.POINTER(Overlapped)]
|
||||
lock_file_ex.restype = ctypes.wintypes.BOOL
|
||||
unlock_file_ex = ctypes.windll.kernel32.UnlockFileEx
|
||||
unlock_file_ex = kernel32.UnlockFileEx
|
||||
unlock_file_ex.argtypes = [
|
||||
ctypes.wintypes.HANDLE,
|
||||
ctypes.wintypes.DWORD,
|
||||
|
@ -62,21 +69,46 @@ if os.name == "nt":
|
|||
ctypes.wintypes.DWORD,
|
||||
ctypes.POINTER(Overlapped)]
|
||||
unlock_file_ex.restype = ctypes.wintypes.BOOL
|
||||
elif os.name == "posix":
|
||||
else:
|
||||
import fcntl
|
||||
|
||||
if sys.platform == "linux":
|
||||
import ctypes
|
||||
|
||||
RENAME_EXCHANGE: int = 2
|
||||
renameat2 = None
|
||||
try:
|
||||
renameat2 = ctypes.CDLL(None, use_errno=True).renameat2
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
renameat2.argtypes = [
|
||||
ctypes.c_int, ctypes.c_char_p,
|
||||
ctypes.c_int, ctypes.c_char_p,
|
||||
ctypes.c_uint]
|
||||
renameat2.restype = ctypes.c_int
|
||||
|
||||
if sys.platform == "darwin":
|
||||
# Definition missing in PyPy
|
||||
F_FULLFSYNC: int = getattr(fcntl, "F_FULLFSYNC", 51)
|
||||
|
||||
|
||||
class RwLock:
|
||||
"""A readers-Writer lock that locks a file."""
|
||||
|
||||
def __init__(self, path):
|
||||
_path: str
|
||||
_readers: int
|
||||
_writer: bool
|
||||
_lock: threading.Lock
|
||||
|
||||
def __init__(self, path: str) -> None:
|
||||
self._path = path
|
||||
self._readers = 0
|
||||
self._writer = False
|
||||
self._lock = threading.Lock()
|
||||
|
||||
@property
|
||||
def locked(self):
|
||||
def locked(self) -> str:
|
||||
with self._lock:
|
||||
if self._readers > 0:
|
||||
return "r"
|
||||
|
@ -84,28 +116,28 @@ class RwLock:
|
|||
return "w"
|
||||
return ""
|
||||
|
||||
@contextlib.contextmanager
|
||||
def acquire(self, mode):
|
||||
@types.contextmanager
|
||||
def acquire(self, mode: str) -> Iterator[None]:
|
||||
if mode not in "rw":
|
||||
raise ValueError("Invalid mode: %r" % mode)
|
||||
with open(self._path, "w+") as lock_file:
|
||||
if os.name == "nt":
|
||||
if sys.platform == "win32":
|
||||
handle = msvcrt.get_osfhandle(lock_file.fileno())
|
||||
flags = LOCKFILE_EXCLUSIVE_LOCK if mode == "w" else 0
|
||||
overlapped = Overlapped()
|
||||
if not lock_file_ex(handle, flags, 0, 1, 0, overlapped):
|
||||
raise RuntimeError("Locking the storage failed: %s" %
|
||||
ctypes.FormatError())
|
||||
elif os.name == "posix":
|
||||
try:
|
||||
if not lock_file_ex(handle, flags, 0, 1, 0, overlapped):
|
||||
raise ctypes.WinError()
|
||||
except OSError as e:
|
||||
raise RuntimeError("Locking the storage failed: %s" % e
|
||||
) from e
|
||||
else:
|
||||
_cmd = fcntl.LOCK_EX if mode == "w" else fcntl.LOCK_SH
|
||||
try:
|
||||
fcntl.flock(lock_file.fileno(), _cmd)
|
||||
except OSError as e:
|
||||
raise RuntimeError("Locking the storage failed: %s" %
|
||||
e) from e
|
||||
else:
|
||||
raise RuntimeError("Locking the storage failed: "
|
||||
"Unsupported operating system")
|
||||
raise RuntimeError("Locking the storage failed: %s" % e
|
||||
) from e
|
||||
with self._lock:
|
||||
if self._writer or mode == "w" and self._readers != 0:
|
||||
raise RuntimeError("Locking the storage failed: "
|
||||
|
@ -123,19 +155,65 @@ class RwLock:
|
|||
self._writer = False
|
||||
|
||||
|
||||
def fsync(fd):
|
||||
if os.name == "posix" and hasattr(fcntl, "F_FULLFSYNC"):
|
||||
fcntl.fcntl(fd, fcntl.F_FULLFSYNC)
|
||||
else:
|
||||
os.fsync(fd)
|
||||
def rename_exchange(src: str, dst: str) -> None:
|
||||
"""Exchange the files or directories `src` and `dst`.
|
||||
|
||||
Both `src` and `dst` must exist but may be of different types.
|
||||
|
||||
On Linux with renameat2 the operation is atomic.
|
||||
On other platforms it's not atomic.
|
||||
|
||||
"""
|
||||
src_dir, src_base = os.path.split(src)
|
||||
dst_dir, dst_base = os.path.split(dst)
|
||||
src_dir = src_dir or os.curdir
|
||||
dst_dir = dst_dir or os.curdir
|
||||
if not src_base or not dst_base:
|
||||
raise ValueError("Invalid arguments: %r -> %r" % (src, dst))
|
||||
if sys.platform == "linux" and renameat2:
|
||||
src_base_bytes = os.fsencode(src_base)
|
||||
dst_base_bytes = os.fsencode(dst_base)
|
||||
src_dir_fd = os.open(src_dir, 0)
|
||||
try:
|
||||
dst_dir_fd = os.open(dst_dir, 0)
|
||||
try:
|
||||
if renameat2(src_dir_fd, src_base_bytes,
|
||||
dst_dir_fd, dst_base_bytes,
|
||||
RENAME_EXCHANGE) == 0:
|
||||
return
|
||||
errno_ = ctypes.get_errno()
|
||||
# Fallback if RENAME_EXCHANGE not supported by filesystem
|
||||
if errno_ != errno.EINVAL:
|
||||
raise OSError(errno_, os.strerror(errno_))
|
||||
finally:
|
||||
os.close(dst_dir_fd)
|
||||
finally:
|
||||
os.close(src_dir_fd)
|
||||
with TemporaryDirectory(prefix=".Radicale.tmp-", dir=src_dir
|
||||
) as tmp_dir:
|
||||
os.rename(dst, os.path.join(tmp_dir, "interim"))
|
||||
os.rename(src, dst)
|
||||
os.rename(os.path.join(tmp_dir, "interim"), src)
|
||||
|
||||
|
||||
def strip_path(path):
|
||||
def fsync(fd: int) -> None:
|
||||
if sys.platform == "darwin":
|
||||
try:
|
||||
fcntl.fcntl(fd, F_FULLFSYNC)
|
||||
return
|
||||
except OSError as e:
|
||||
# Fallback if F_FULLFSYNC not supported by filesystem
|
||||
if e.errno != errno.EINVAL:
|
||||
raise
|
||||
os.fsync(fd)
|
||||
|
||||
|
||||
def strip_path(path: str) -> str:
|
||||
assert sanitize_path(path) == path
|
||||
return path.strip("/")
|
||||
|
||||
|
||||
def unstrip_path(stripped_path, trailing_slash=False):
|
||||
def unstrip_path(stripped_path: str, trailing_slash: bool = False) -> str:
|
||||
assert strip_path(sanitize_path(stripped_path)) == stripped_path
|
||||
assert stripped_path or trailing_slash
|
||||
path = "/%s" % stripped_path
|
||||
|
@ -144,7 +222,7 @@ def unstrip_path(stripped_path, trailing_slash=False):
|
|||
return path
|
||||
|
||||
|
||||
def sanitize_path(path):
|
||||
def sanitize_path(path: str) -> str:
|
||||
"""Make path absolute with leading slash to prevent access to other data.
|
||||
|
||||
Preserve potential trailing slash.
|
||||
|
@ -161,16 +239,16 @@ def sanitize_path(path):
|
|||
return new_path + trailing_slash
|
||||
|
||||
|
||||
def is_safe_path_component(path):
|
||||
def is_safe_path_component(path: str) -> bool:
|
||||
"""Check if path is a single component of a path.
|
||||
|
||||
Check that the path is safe to join too.
|
||||
|
||||
"""
|
||||
return path and "/" not in path and path not in (".", "..")
|
||||
return bool(path) and "/" not in path and path not in (".", "..")
|
||||
|
||||
|
||||
def is_safe_filesystem_path_component(path):
|
||||
def is_safe_filesystem_path_component(path: str) -> bool:
|
||||
"""Check if path is a single component of a local and posix filesystem
|
||||
path.
|
||||
|
||||
|
@ -178,13 +256,14 @@ def is_safe_filesystem_path_component(path):
|
|||
|
||||
"""
|
||||
return (
|
||||
path and not os.path.splitdrive(path)[0] and
|
||||
bool(path) and not os.path.splitdrive(path)[0] and
|
||||
(sys.platform != "win32" or ":" not in path) and # Block NTFS-ADS
|
||||
not os.path.split(path)[0] and path not in (os.curdir, os.pardir) and
|
||||
not path.startswith(".") and not path.endswith("~") and
|
||||
is_safe_path_component(path))
|
||||
|
||||
|
||||
def path_to_filesystem(root, sane_path):
|
||||
def path_to_filesystem(root: str, sane_path: str) -> str:
|
||||
"""Convert `sane_path` to a local filesystem path relative to `root`.
|
||||
|
||||
`root` must be a secure filesystem path, it will be prepend to the path.
|
||||
|
@ -206,25 +285,25 @@ def path_to_filesystem(root, sane_path):
|
|||
# Check for conflicting files (e.g. case-insensitive file systems
|
||||
# or short names on Windows file systems)
|
||||
if (os.path.lexists(safe_path) and
|
||||
part not in (e.name for e in
|
||||
os.scandir(safe_path_parent))):
|
||||
part not in (e.name for e in os.scandir(safe_path_parent))):
|
||||
raise CollidingPathError(part)
|
||||
return safe_path
|
||||
|
||||
|
||||
class UnsafePathError(ValueError):
|
||||
def __init__(self, path):
|
||||
message = "Can't translate name safely to filesystem: %r" % path
|
||||
super().__init__(message)
|
||||
|
||||
def __init__(self, path: str) -> None:
|
||||
super().__init__("Can't translate name safely to filesystem: %r" %
|
||||
path)
|
||||
|
||||
|
||||
class CollidingPathError(ValueError):
|
||||
def __init__(self, path):
|
||||
message = "File name collision: %r" % path
|
||||
super().__init__(message)
|
||||
|
||||
def __init__(self, path: str) -> None:
|
||||
super().__init__("File name collision: %r" % path)
|
||||
|
||||
|
||||
def name_from_path(path, collection):
|
||||
def name_from_path(path: str, collection: "storage.BaseCollection") -> str:
|
||||
"""Return Radicale item name from ``path``."""
|
||||
assert sanitize_path(path) == path
|
||||
start = unstrip_path(collection.path, True)
|
||||
|
|
0
radicale/py.typed
Normal file
0
radicale/py.typed
Normal file
|
@ -1,4 +1,4 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
|
@ -32,17 +32,21 @@ Take a look at the class ``BaseRights`` if you want to implement your own.
|
|||
|
||||
"""
|
||||
|
||||
from radicale import utils
|
||||
from typing import Sequence, Set
|
||||
|
||||
INTERNAL_TYPES = ("authenticated", "owner_write", "owner_only", "from_file")
|
||||
from radicale import config, utils
|
||||
|
||||
INTERNAL_TYPES: Sequence[str] = ("authenticated", "owner_write", "owner_only",
|
||||
"from_file")
|
||||
|
||||
|
||||
def load(configuration):
|
||||
def load(configuration: "config.Configuration") -> "BaseRights":
|
||||
"""Load the rights module chosen in configuration."""
|
||||
return utils.load_plugin(INTERNAL_TYPES, "rights", "Rights", configuration)
|
||||
return utils.load_plugin(INTERNAL_TYPES, "rights", "Rights", BaseRights,
|
||||
configuration)
|
||||
|
||||
|
||||
def intersect(a, b):
|
||||
def intersect(a: str, b: str) -> str:
|
||||
"""Intersect two lists of rights.
|
||||
|
||||
Returns all rights that are both in ``a`` and ``b``.
|
||||
|
@ -52,7 +56,10 @@ def intersect(a, b):
|
|||
|
||||
|
||||
class BaseRights:
|
||||
def __init__(self, configuration):
|
||||
|
||||
_user_groups: Set[str] = set([])
|
||||
|
||||
def __init__(self, configuration: "config.Configuration") -> None:
|
||||
"""Initialize BaseRights.
|
||||
|
||||
``configuration`` see ``radicale.config`` module.
|
||||
|
@ -62,7 +69,7 @@ class BaseRights:
|
|||
"""
|
||||
self.configuration = configuration
|
||||
|
||||
def authorization(self, user, path):
|
||||
def authorization(self, user: str, path: str) -> str:
|
||||
"""Get granted rights of ``user`` for the collection ``path``.
|
||||
|
||||
If ``user`` is empty, check for anonymous rights.
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
|
@ -21,15 +21,16 @@ calendars and address books.
|
|||
|
||||
"""
|
||||
|
||||
from radicale import pathutils, rights
|
||||
from radicale import config, pathutils, rights
|
||||
|
||||
|
||||
class Rights(rights.BaseRights):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
self._verify_user = self.configuration.get("auth", "type") != "none"
|
||||
|
||||
def authorization(self, user, path):
|
||||
def authorization(self, user: str, path: str) -> str:
|
||||
if self._verify_user and not user:
|
||||
return ""
|
||||
sane_path = pathutils.strip_path(path)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -22,7 +23,7 @@ config (section "rights", key "file").
|
|||
The login is matched against the "user" key, and the collection path
|
||||
is matched against the "collection" key. In the "collection" regex you can use
|
||||
`{user}` and get groups from the "user" regex with `{0}`, `{1}`, etc.
|
||||
In consequence of the parameter subsitution you have to write `{{` and `}}`
|
||||
In consequence of the parameter substitution you have to write `{{` and `}}`
|
||||
if you want to use regular curly braces in the "user" and "collection" regexes.
|
||||
|
||||
For example, for the "user" key, ".+" means "authenticated user" and ".*"
|
||||
|
@ -37,48 +38,72 @@ Leading or ending slashes are trimmed from collection's path.
|
|||
import configparser
|
||||
import re
|
||||
|
||||
from radicale import pathutils, rights
|
||||
from radicale import config, pathutils, rights
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class Rights(rights.BaseRights):
|
||||
def __init__(self, configuration):
|
||||
|
||||
_filename: str
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
self._filename = configuration.get("rights", "file")
|
||||
self._log_rights_rule_doesnt_match_on_debug = configuration.get("logging", "rights_rule_doesnt_match_on_debug")
|
||||
self._rights_config = configparser.ConfigParser()
|
||||
try:
|
||||
with open(self._filename, "r") as f:
|
||||
self._rights_config.read_file(f)
|
||||
logger.debug("Read rights file")
|
||||
except Exception as e:
|
||||
raise RuntimeError("Failed to load rights file %r: %s" %
|
||||
(self._filename, e)) from e
|
||||
|
||||
def authorization(self, user, path):
|
||||
def authorization(self, user: str, path: str) -> str:
|
||||
user = user or ""
|
||||
sane_path = pathutils.strip_path(path)
|
||||
# Prevent "regex injection"
|
||||
escaped_user = re.escape(user)
|
||||
rights_config = configparser.ConfigParser()
|
||||
try:
|
||||
if not rights_config.read(self._filename):
|
||||
raise RuntimeError("No such file: %r" %
|
||||
self._filename)
|
||||
except Exception as e:
|
||||
raise RuntimeError("Failed to load rights file %r: %s" %
|
||||
(self._filename, e)) from e
|
||||
for section in rights_config.sections():
|
||||
if not self._log_rights_rule_doesnt_match_on_debug:
|
||||
logger.debug("logging of rules which doesn't match suppressed by config/option [logging] rights_rule_doesnt_match_on_debug")
|
||||
for section in self._rights_config.sections():
|
||||
group_match = None
|
||||
user_match = None
|
||||
try:
|
||||
user_pattern = rights_config.get(section, "user")
|
||||
collection_pattern = rights_config.get(section, "collection")
|
||||
user_pattern = self._rights_config.get(section, "user", fallback="")
|
||||
collection_pattern = self._rights_config.get(section, "collection")
|
||||
allowed_groups = self._rights_config.get(section, "groups", fallback="").split(",")
|
||||
try:
|
||||
group_match = len(self._user_groups.intersection(allowed_groups)) > 0
|
||||
except Exception:
|
||||
pass
|
||||
# Use empty format() for harmonized handling of curly braces
|
||||
user_match = re.fullmatch(user_pattern.format(), user)
|
||||
collection_match = user_match and re.fullmatch(
|
||||
if user_pattern != "":
|
||||
user_match = re.fullmatch(user_pattern.format(), user)
|
||||
user_collection_match = user_match and re.fullmatch(
|
||||
collection_pattern.format(
|
||||
*map(re.escape, user_match.groups()),
|
||||
*(re.escape(s) for s in user_match.groups()),
|
||||
user=escaped_user), sane_path)
|
||||
group_collection_match = group_match and re.fullmatch(
|
||||
collection_pattern.format(user=escaped_user), sane_path)
|
||||
except Exception as e:
|
||||
raise RuntimeError("Error in section %r of rights file %r: "
|
||||
"%s" % (section, self._filename, e)) from e
|
||||
if user_match and collection_match:
|
||||
logger.debug("Rule %r:%r matches %r:%r from section %r",
|
||||
if user_match and user_collection_match:
|
||||
permission = self._rights_config.get(section, "permissions")
|
||||
logger.debug("Rule %r:%r matches %r:%r from section %r permission %r",
|
||||
user, sane_path, user_pattern,
|
||||
collection_pattern, section)
|
||||
return rights_config.get(section, "permissions")
|
||||
logger.debug("Rule %r:%r doesn't match %r:%r from section %r",
|
||||
user, sane_path, user_pattern, collection_pattern,
|
||||
section)
|
||||
logger.info("Rights: %r:%r doesn't match any section", user, sane_path)
|
||||
collection_pattern, section, permission)
|
||||
return permission
|
||||
if group_match and group_collection_match:
|
||||
permission = self._rights_config.get(section, "permissions")
|
||||
logger.debug("Rule %r:%r matches %r:%r from section %r permission %r by group membership",
|
||||
user, sane_path, user_pattern,
|
||||
collection_pattern, section, permission)
|
||||
return permission
|
||||
if self._log_rights_rule_doesnt_match_on_debug:
|
||||
logger.debug("Rule %r:%r doesn't match %r:%r from section %r",
|
||||
user, sane_path, user_pattern, collection_pattern,
|
||||
section)
|
||||
logger.debug("Rights: %r:%r doesn't match any section", user, sane_path)
|
||||
return ""
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
|
@ -26,7 +26,8 @@ from radicale import pathutils
|
|||
|
||||
|
||||
class Rights(authenticated.Rights):
|
||||
def authorization(self, user, path):
|
||||
|
||||
def authorization(self, user: str, path: str) -> str:
|
||||
if self._verify_user and not user:
|
||||
return ""
|
||||
sane_path = pathutils.strip_path(path)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
|
@ -26,7 +26,8 @@ from radicale import pathutils
|
|||
|
||||
|
||||
class Rights(authenticated.Rights):
|
||||
def authorization(self, user, path):
|
||||
|
||||
def authorization(self, user: str, path: str) -> str:
|
||||
if self._verify_user and not user:
|
||||
return ""
|
||||
sane_path = pathutils.strip_path(path)
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2023 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -22,82 +23,125 @@ Built-in WSGI server.
|
|||
|
||||
"""
|
||||
|
||||
import errno
|
||||
import os
|
||||
import http
|
||||
import select
|
||||
import socket
|
||||
import socketserver
|
||||
import ssl
|
||||
import sys
|
||||
import wsgiref.simple_server
|
||||
from typing import (Any, Callable, Dict, List, MutableMapping, Optional, Set,
|
||||
Tuple, Union)
|
||||
from urllib.parse import unquote
|
||||
|
||||
from radicale import Application, config
|
||||
from radicale import Application, config, utils
|
||||
from radicale.log import logger
|
||||
|
||||
COMPAT_EAI_ADDRFAMILY: int
|
||||
if hasattr(socket, "EAI_ADDRFAMILY"):
|
||||
COMPAT_EAI_ADDRFAMILY = socket.EAI_ADDRFAMILY
|
||||
COMPAT_EAI_ADDRFAMILY = socket.EAI_ADDRFAMILY # type:ignore[attr-defined]
|
||||
elif hasattr(socket, "EAI_NONAME"):
|
||||
# Windows and BSD don't have a special error code for this
|
||||
COMPAT_EAI_ADDRFAMILY = socket.EAI_NONAME
|
||||
COMPAT_EAI_NODATA: int
|
||||
if hasattr(socket, "EAI_NODATA"):
|
||||
COMPAT_EAI_NODATA = socket.EAI_NODATA
|
||||
elif hasattr(socket, "EAI_NONAME"):
|
||||
# Windows and BSD don't have a special error code for this
|
||||
COMPAT_EAI_NODATA = socket.EAI_NONAME
|
||||
COMPAT_IPPROTO_IPV6: int
|
||||
if hasattr(socket, "IPPROTO_IPV6"):
|
||||
COMPAT_IPPROTO_IPV6 = socket.IPPROTO_IPV6
|
||||
elif os.name == "nt":
|
||||
# Workaround: https://bugs.python.org/issue29515
|
||||
elif sys.platform == "win32":
|
||||
# HACK: https://bugs.python.org/issue29515
|
||||
COMPAT_IPPROTO_IPV6 = 41
|
||||
|
||||
|
||||
def format_address(address):
|
||||
return "[%s]:%d" % address[:2]
|
||||
# IPv4 (host, port) and IPv6 (host, port, flowinfo, scopeid)
|
||||
ADDRESS_TYPE = utils.ADDRESS_TYPE
|
||||
|
||||
|
||||
class ParallelHTTPServer(socketserver.ThreadingMixIn,
|
||||
wsgiref.simple_server.WSGIServer):
|
||||
|
||||
# We wait for child threads ourself
|
||||
block_on_close = False
|
||||
configuration: config.Configuration
|
||||
worker_sockets: Set[socket.socket]
|
||||
_timeout: float
|
||||
|
||||
def __init__(self, configuration, family, address, RequestHandlerClass):
|
||||
# We wait for child threads ourself (ThreadingMixIn)
|
||||
block_on_close: bool = False
|
||||
daemon_threads: bool = True
|
||||
|
||||
def __init__(self, configuration: config.Configuration, family: int,
|
||||
address: Tuple[str, int], RequestHandlerClass:
|
||||
Callable[..., http.server.BaseHTTPRequestHandler]) -> None:
|
||||
self.configuration = configuration
|
||||
self.address_family = family
|
||||
super().__init__(address, RequestHandlerClass)
|
||||
self.client_sockets = set()
|
||||
self.worker_sockets = set()
|
||||
self._timeout = configuration.get("server", "timeout")
|
||||
|
||||
def server_bind(self):
|
||||
def server_bind(self) -> None:
|
||||
if self.address_family == socket.AF_INET6:
|
||||
# Only allow IPv6 connections to the IPv6 socket
|
||||
self.socket.setsockopt(COMPAT_IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
|
||||
super().server_bind()
|
||||
|
||||
def get_request(self):
|
||||
def get_request( # type:ignore[override]
|
||||
self) -> Tuple[socket.socket, Tuple[ADDRESS_TYPE, socket.socket]]:
|
||||
# Set timeout for client
|
||||
request, client_address = super().get_request()
|
||||
timeout = self.configuration.get("server", "timeout")
|
||||
if timeout:
|
||||
request.settimeout(timeout)
|
||||
client_socket, client_socket_out = socket.socketpair()
|
||||
self.client_sockets.add(client_socket_out)
|
||||
return request, (*client_address, client_socket)
|
||||
request: socket.socket
|
||||
client_address: ADDRESS_TYPE
|
||||
request, client_address = super().get_request() # type:ignore[misc]
|
||||
if self._timeout > 0:
|
||||
request.settimeout(self._timeout)
|
||||
worker_socket, worker_socket_out = socket.socketpair()
|
||||
self.worker_sockets.add(worker_socket_out)
|
||||
# HACK: Forward `worker_socket` via `client_address` return value
|
||||
# to worker thread.
|
||||
# The super class calls `verify_request`, `process_request` and
|
||||
# `handle_error` with modified `client_address` value.
|
||||
return request, (client_address, worker_socket)
|
||||
|
||||
def finish_request_locked(self, request, client_address):
|
||||
return super().finish_request(request, client_address)
|
||||
def verify_request( # type:ignore[override]
|
||||
self, request: socket.socket, client_address_and_socket:
|
||||
Tuple[ADDRESS_TYPE, socket.socket]) -> bool:
|
||||
return True
|
||||
|
||||
def finish_request(self, request, client_address):
|
||||
*client_address, client_socket = client_address
|
||||
client_address = tuple(client_address)
|
||||
def process_request( # type:ignore[override]
|
||||
self, request: socket.socket, client_address_and_socket:
|
||||
Tuple[ADDRESS_TYPE, socket.socket]) -> None:
|
||||
# HACK: Super class calls `finish_request` in new thread with
|
||||
# `client_address_and_socket`
|
||||
return super().process_request(
|
||||
request, client_address_and_socket) # type:ignore[arg-type]
|
||||
|
||||
def finish_request( # type:ignore[override]
|
||||
self, request: socket.socket, client_address_and_socket:
|
||||
Tuple[ADDRESS_TYPE, socket.socket]) -> None:
|
||||
# HACK: Unpack `client_address_and_socket` and call super class
|
||||
# `finish_request` with original `client_address`
|
||||
client_address, worker_socket = client_address_and_socket
|
||||
try:
|
||||
return self.finish_request_locked(request, client_address)
|
||||
finally:
|
||||
client_socket.close()
|
||||
worker_socket.close()
|
||||
|
||||
def handle_error(self, request, client_address):
|
||||
if issubclass(sys.exc_info()[0], socket.timeout):
|
||||
logger.info("client timed out", exc_info=True)
|
||||
def finish_request_locked(self, request: socket.socket,
|
||||
client_address: ADDRESS_TYPE) -> None:
|
||||
return super().finish_request(
|
||||
request, client_address) # type:ignore[arg-type]
|
||||
|
||||
def handle_error( # type:ignore[override]
|
||||
self, request: socket.socket,
|
||||
client_address_or_client_address_and_socket:
|
||||
Union[ADDRESS_TYPE, Tuple[ADDRESS_TYPE, socket.socket]]) -> None:
|
||||
# HACK: This method can be called with the modified
|
||||
# `client_address_and_socket` or the original `client_address` value
|
||||
e = sys.exc_info()[1]
|
||||
assert e is not None
|
||||
if isinstance(e, socket.timeout):
|
||||
logger.info("Client timed out", exc_info=True)
|
||||
else:
|
||||
logger.error("An exception occurred during request: %s",
|
||||
sys.exc_info()[1], exc_info=True)
|
||||
|
@ -105,12 +149,14 @@ class ParallelHTTPServer(socketserver.ThreadingMixIn,
|
|||
|
||||
class ParallelHTTPSServer(ParallelHTTPServer):
|
||||
|
||||
def server_bind(self):
|
||||
def server_bind(self) -> None:
|
||||
super().server_bind()
|
||||
# Wrap the TCP socket in an SSL socket
|
||||
certfile = self.configuration.get("server", "certificate")
|
||||
keyfile = self.configuration.get("server", "key")
|
||||
cafile = self.configuration.get("server", "certificate_authority")
|
||||
certfile: str = self.configuration.get("server", "certificate")
|
||||
keyfile: str = self.configuration.get("server", "key")
|
||||
cafile: str = self.configuration.get("server", "certificate_authority")
|
||||
protocol: str = self.configuration.get("server", "protocol")
|
||||
ciphersuite: str = self.configuration.get("server", "ciphersuite")
|
||||
# Test if the files can be read
|
||||
for name, filename in [("certificate", certfile), ("key", keyfile),
|
||||
("certificate_authority", cafile)]:
|
||||
|
@ -120,33 +166,60 @@ class ParallelHTTPSServer(ParallelHTTPServer):
|
|||
if name == "certificate_authority" and not filename:
|
||||
continue
|
||||
try:
|
||||
open(filename, "r").close()
|
||||
open(filename).close()
|
||||
except OSError as e:
|
||||
raise RuntimeError(
|
||||
"Invalid %s value for option %r in section %r in %s: %r "
|
||||
"(%s)" % (type_name, name, "server", source, filename,
|
||||
e)) from e
|
||||
context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
logger.info("SSL load files certificate='%s' key='%s'", certfile, keyfile)
|
||||
context.load_cert_chain(certfile=certfile, keyfile=keyfile)
|
||||
if protocol:
|
||||
logger.info("SSL set explicit protocols (maybe not all supported by underlying OpenSSL): '%s'", protocol)
|
||||
context.options = utils.ssl_context_options_by_protocol(protocol, context.options)
|
||||
context.minimum_version = utils.ssl_context_minimum_version_by_options(context.options)
|
||||
if (context.minimum_version == 0):
|
||||
raise RuntimeError("No SSL minimum protocol active")
|
||||
context.maximum_version = utils.ssl_context_maximum_version_by_options(context.options)
|
||||
if (context.maximum_version == 0):
|
||||
raise RuntimeError("No SSL maximum protocol active")
|
||||
else:
|
||||
logger.info("SSL active protocols: (system-default)")
|
||||
logger.debug("SSL minimum acceptable protocol: %s", context.minimum_version)
|
||||
logger.debug("SSL maximum acceptable protocol: %s", context.maximum_version)
|
||||
logger.info("SSL accepted protocols: %s", ' '.join(utils.ssl_get_protocols(context)))
|
||||
if ciphersuite:
|
||||
logger.info("SSL set explicit ciphersuite (maybe not all supported by underlying OpenSSL): '%s'", ciphersuite)
|
||||
context.set_ciphers(ciphersuite)
|
||||
else:
|
||||
logger.info("SSL active ciphersuite: (system-default)")
|
||||
cipherlist = []
|
||||
for entry in context.get_ciphers():
|
||||
cipherlist.append(entry["name"])
|
||||
logger.info("SSL accepted ciphers: %s", ' '.join(cipherlist))
|
||||
if cafile:
|
||||
logger.info("SSL enable mandatory client certificate verification using CA file='%s'", cafile)
|
||||
context.load_verify_locations(cafile=cafile)
|
||||
context.verify_mode = ssl.CERT_REQUIRED
|
||||
self.socket = context.wrap_socket(
|
||||
self.socket, server_side=True, do_handshake_on_connect=False)
|
||||
|
||||
def finish_request_locked(self, request, client_address):
|
||||
def finish_request_locked( # type:ignore[override]
|
||||
self, request: ssl.SSLSocket, client_address: ADDRESS_TYPE
|
||||
) -> None:
|
||||
try:
|
||||
try:
|
||||
request.do_handshake()
|
||||
except socket.timeout:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise RuntimeError("SSL handshake failed: %s" % e) from e
|
||||
raise RuntimeError("SSL handshake failed: %s client %s" % (e, str(client_address[0]))) from e
|
||||
except Exception:
|
||||
try:
|
||||
self.handle_error(request, client_address)
|
||||
finally:
|
||||
self.shutdown_request(request)
|
||||
self.shutdown_request(request) # type:ignore[attr-defined]
|
||||
return
|
||||
return super().finish_request_locked(request, client_address)
|
||||
|
||||
|
@ -154,32 +227,39 @@ class ParallelHTTPSServer(ParallelHTTPServer):
|
|||
class ServerHandler(wsgiref.simple_server.ServerHandler):
|
||||
|
||||
# Don't pollute WSGI environ with OS environment
|
||||
os_environ = {}
|
||||
os_environ: MutableMapping[str, str] = {}
|
||||
|
||||
def log_exception(self, exc_info):
|
||||
def log_exception(self, exc_info) -> None:
|
||||
logger.error("An exception occurred during request: %s",
|
||||
exc_info[1], exc_info=exc_info)
|
||||
exc_info[1], exc_info=exc_info) # type:ignore[arg-type]
|
||||
|
||||
|
||||
class RequestHandler(wsgiref.simple_server.WSGIRequestHandler):
|
||||
"""HTTP requests handler."""
|
||||
|
||||
def log_request(self, code="-", size="-"):
|
||||
# HACK: Assigned in `socketserver.StreamRequestHandler`
|
||||
connection: socket.socket
|
||||
|
||||
def log_request(self, code: Union[int, str] = "-",
|
||||
size: Union[int, str] = "-") -> None:
|
||||
pass # Disable request logging.
|
||||
|
||||
def log_error(self, format_, *args):
|
||||
def log_error(self, format_: str, *args: Any) -> None:
|
||||
logger.error("An error occurred during request: %s", format_ % args)
|
||||
|
||||
def get_environ(self):
|
||||
def get_environ(self) -> Dict[str, Any]:
|
||||
env = super().get_environ()
|
||||
if hasattr(self.connection, "getpeercert"):
|
||||
if isinstance(self.connection, ssl.SSLSocket):
|
||||
env["HTTPS"] = "on"
|
||||
env["SSL_CIPHER"] = self.request.cipher()[0]
|
||||
env["SSL_PROTOCOL"] = self.request.version()
|
||||
# The certificate can be evaluated by the auth module
|
||||
env["REMOTE_CERTIFICATE"] = self.connection.getpeercert()
|
||||
# Parent class only tries latin1 encoding
|
||||
env["PATH_INFO"] = unquote(self.path.split("?", 1)[0])
|
||||
return env
|
||||
|
||||
def handle(self):
|
||||
def handle(self) -> None:
|
||||
"""Copy of WSGIRequestHandler.handle with different ServerHandler"""
|
||||
|
||||
self.raw_requestline = self.rfile.readline(65537)
|
||||
|
@ -196,100 +276,96 @@ class RequestHandler(wsgiref.simple_server.WSGIRequestHandler):
|
|||
handler = ServerHandler(
|
||||
self.rfile, self.wfile, self.get_stderr(), self.get_environ()
|
||||
)
|
||||
handler.request_handler = self
|
||||
handler.run(self.server.get_app())
|
||||
handler.request_handler = self # type:ignore[attr-defined]
|
||||
app = self.server.get_app() # type:ignore[attr-defined]
|
||||
handler.run(app)
|
||||
|
||||
|
||||
def serve(configuration, shutdown_socket):
|
||||
"""Serve radicale from configuration."""
|
||||
logger.info("Starting Radicale")
|
||||
def serve(configuration: config.Configuration,
|
||||
shutdown_socket: Optional[socket.socket] = None) -> None:
|
||||
"""Serve radicale from configuration.
|
||||
|
||||
`shutdown_socket` can be used to gracefully shutdown the server.
|
||||
The socket can be created with `socket.socketpair()`, when the other socket
|
||||
gets closed the server stops accepting new requests by clients and the
|
||||
function returns after all active requests are finished.
|
||||
|
||||
"""
|
||||
|
||||
logger.info("Starting Radicale (%s)", utils.packages_version())
|
||||
# Copy configuration before modifying
|
||||
configuration = configuration.copy()
|
||||
configuration.update({"server": {"_internal_server": "True"}}, "server",
|
||||
privileged=True)
|
||||
|
||||
use_ssl = configuration.get("server", "ssl")
|
||||
use_ssl: bool = configuration.get("server", "ssl")
|
||||
server_class = ParallelHTTPSServer if use_ssl else ParallelHTTPServer
|
||||
application = Application(configuration)
|
||||
servers = {}
|
||||
try:
|
||||
for address in configuration.get("server", "hosts"):
|
||||
# Try to bind sockets for IPv4 and IPv6
|
||||
possible_families = (socket.AF_INET, socket.AF_INET6)
|
||||
bind_ok = False
|
||||
for i, family in enumerate(possible_families):
|
||||
is_last = i == len(possible_families) - 1
|
||||
hosts: List[Tuple[str, int]] = configuration.get("server", "hosts")
|
||||
for address_port in hosts:
|
||||
# retrieve IPv4/IPv6 address of address
|
||||
try:
|
||||
getaddrinfo = socket.getaddrinfo(address_port[0], address_port[1], 0, socket.SOCK_STREAM, socket.IPPROTO_TCP)
|
||||
except OSError as e:
|
||||
logger.warning("cannot retrieve IPv4 or IPv6 address of '%s': %s" % (utils.format_address(address_port), e))
|
||||
continue
|
||||
logger.debug("getaddrinfo of '%s': %s" % (utils.format_address(address_port), getaddrinfo))
|
||||
for (address_family, socket_kind, socket_proto, socket_flags, socket_address) in getaddrinfo:
|
||||
logger.debug("try to create server socket on '%s'" % (utils.format_address(socket_address)))
|
||||
try:
|
||||
server = server_class(configuration, family, address,
|
||||
RequestHandler)
|
||||
server = server_class(configuration, address_family, (socket_address[0], socket_address[1]), RequestHandler)
|
||||
except OSError as e:
|
||||
# Ignore unsupported families (only one must work)
|
||||
if ((bind_ok or not is_last) and (
|
||||
isinstance(e, socket.gaierror) and (
|
||||
# Hostname does not exist or doesn't have
|
||||
# address for address family
|
||||
# macOS: IPv6 address for INET address family
|
||||
e.errno == socket.EAI_NONAME or
|
||||
# Address not for address family
|
||||
e.errno == COMPAT_EAI_ADDRFAMILY or
|
||||
e.errno == COMPAT_EAI_NODATA) or
|
||||
# Workaround for PyPy
|
||||
str(e) == "address family mismatched" or
|
||||
# Address family not available (e.g. IPv6 disabled)
|
||||
# macOS: IPv4 address for INET6 address family with
|
||||
# IPV6_V6ONLY set
|
||||
e.errno == errno.EADDRNOTAVAIL)):
|
||||
continue
|
||||
raise RuntimeError("Failed to start server %r: %s" % (
|
||||
format_address(address), e)) from e
|
||||
logger.warning("cannot create server socket on '%s': %s" % (utils.format_address(socket_address), e))
|
||||
continue
|
||||
servers[server.socket] = server
|
||||
bind_ok = True
|
||||
server.set_app(application)
|
||||
logger.info("Listening on %r%s",
|
||||
format_address(server.server_address),
|
||||
utils.format_address(server.server_address),
|
||||
" with SSL" if use_ssl else "")
|
||||
assert servers, "no servers started"
|
||||
if not servers:
|
||||
raise RuntimeError("No servers started")
|
||||
|
||||
# Mainloop
|
||||
select_timeout = None
|
||||
if os.name == "nt":
|
||||
if sys.platform == "win32":
|
||||
# Fallback to busy waiting. (select(...) blocks SIGINT on Windows.)
|
||||
select_timeout = 1.0
|
||||
max_connections = configuration.get("server", "max_connections")
|
||||
max_connections: int = configuration.get("server", "max_connections")
|
||||
logger.info("Radicale server ready")
|
||||
while True:
|
||||
rlist = xlist = []
|
||||
rlist: List[socket.socket] = []
|
||||
# Wait for finished clients
|
||||
for server in servers.values():
|
||||
rlist.extend(server.client_sockets)
|
||||
rlist.extend(server.worker_sockets)
|
||||
# Accept new connections if max_connections is not reached
|
||||
if max_connections <= 0 or len(rlist) < max_connections:
|
||||
rlist.extend(servers)
|
||||
# Use socket to get notified of program shutdown
|
||||
rlist.append(shutdown_socket)
|
||||
rlist, _, xlist = select.select(rlist, [], xlist, select_timeout)
|
||||
if xlist:
|
||||
raise RuntimeError("unhandled socket error")
|
||||
rlist = set(rlist)
|
||||
if shutdown_socket in rlist:
|
||||
if shutdown_socket is not None:
|
||||
rlist.append(shutdown_socket)
|
||||
rlist, _, _ = select.select(rlist, [], [], select_timeout)
|
||||
rset = set(rlist)
|
||||
if shutdown_socket in rset:
|
||||
logger.info("Stopping Radicale")
|
||||
break
|
||||
for server in servers.values():
|
||||
finished_sockets = server.client_sockets.intersection(rlist)
|
||||
finished_sockets = server.worker_sockets.intersection(rset)
|
||||
for s in finished_sockets:
|
||||
s.close()
|
||||
server.client_sockets.remove(s)
|
||||
rlist.remove(s)
|
||||
server.worker_sockets.remove(s)
|
||||
rset.remove(s)
|
||||
if finished_sockets:
|
||||
server.service_actions()
|
||||
if rlist:
|
||||
server = servers.get(rlist.pop())
|
||||
if server:
|
||||
server.handle_request()
|
||||
if rset:
|
||||
active_server = servers.get(rset.pop())
|
||||
if active_server:
|
||||
active_server.handle_request()
|
||||
finally:
|
||||
# Wait for clients to finish and close servers
|
||||
for server in servers.values():
|
||||
for s in server.client_sockets:
|
||||
for s in server.worker_sockets:
|
||||
s.recv(1)
|
||||
s.close()
|
||||
server.server_close()
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -23,37 +24,45 @@ Take a look at the class ``BaseCollection`` if you want to implement your own.
|
|||
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
import json
|
||||
import xml.etree.ElementTree as ET
|
||||
from hashlib import sha256
|
||||
from typing import (Callable, ContextManager, Iterable, Iterator, Mapping,
|
||||
Optional, Sequence, Set, Tuple, Union, overload)
|
||||
|
||||
import pkg_resources
|
||||
import vobject
|
||||
|
||||
from radicale import utils
|
||||
from radicale import config
|
||||
from radicale import item as radicale_item
|
||||
from radicale import types, utils
|
||||
from radicale.item import filter as radicale_filter
|
||||
from radicale.log import logger
|
||||
|
||||
INTERNAL_TYPES = ("multifilesystem",)
|
||||
INTERNAL_TYPES: Sequence[str] = ("multifilesystem", "multifilesystem_nolock",)
|
||||
|
||||
CACHE_DEPS = ("radicale", "vobject", "python-dateutil",)
|
||||
CACHE_VERSION = (";".join(pkg_resources.get_distribution(pkg).version
|
||||
for pkg in CACHE_DEPS) + ";").encode()
|
||||
# NOTE: change only if cache structure is modified to avoid cache invalidation on update
|
||||
CACHE_VERSION_RADICALE = "3.3.1"
|
||||
|
||||
CACHE_VERSION: bytes = ("%s=%s;%s=%s;" % ("radicale", CACHE_VERSION_RADICALE, "vobject", utils.package_version("vobject"))).encode()
|
||||
|
||||
|
||||
def load(configuration):
|
||||
def load(configuration: "config.Configuration") -> "BaseStorage":
|
||||
"""Load the storage module chosen in configuration."""
|
||||
return utils.load_plugin(
|
||||
INTERNAL_TYPES, "storage", "Storage", configuration)
|
||||
logger.debug("storage cache version: %r", str(CACHE_VERSION))
|
||||
return utils.load_plugin(INTERNAL_TYPES, "storage", "Storage", BaseStorage,
|
||||
configuration)
|
||||
|
||||
|
||||
class ComponentExistsError(ValueError):
|
||||
def __init__(self, path):
|
||||
|
||||
def __init__(self, path: str) -> None:
|
||||
message = "Component already exists: %r" % path
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
class ComponentNotFoundError(ValueError):
|
||||
def __init__(self, path):
|
||||
|
||||
def __init__(self, path: str) -> None:
|
||||
message = "Component doesn't exist: %r" % path
|
||||
super().__init__(message)
|
||||
|
||||
|
@ -61,47 +70,58 @@ class ComponentNotFoundError(ValueError):
|
|||
class BaseCollection:
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
def path(self) -> str:
|
||||
"""The sanitized path of the collection without leading or
|
||||
trailing ``/``."""
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def owner(self):
|
||||
def owner(self) -> str:
|
||||
"""The owner of the collection."""
|
||||
return self.path.split("/", maxsplit=1)[0]
|
||||
|
||||
@property
|
||||
def is_principal(self):
|
||||
def is_principal(self) -> bool:
|
||||
"""Collection is a principal."""
|
||||
return bool(self.path) and "/" not in self.path
|
||||
|
||||
@property
|
||||
def etag(self):
|
||||
def etag(self) -> str:
|
||||
"""Encoded as quoted-string (see RFC 2616)."""
|
||||
etag = sha256()
|
||||
for item in self.get_all():
|
||||
assert item.href
|
||||
etag.update((item.href + "/" + item.etag).encode())
|
||||
etag.update(json.dumps(self.get_meta(), sort_keys=True).encode())
|
||||
return '"%s"' % etag.hexdigest()
|
||||
|
||||
def sync(self, old_token=None):
|
||||
@property
|
||||
def tag(self) -> str:
|
||||
"""The tag of the collection."""
|
||||
return self.get_meta("tag") or ""
|
||||
|
||||
def sync(self, old_token: str = "") -> Tuple[str, Iterable[str]]:
|
||||
"""Get the current sync token and changed items for synchronization.
|
||||
|
||||
``old_token`` an old sync token which is used as the base of the
|
||||
delta update. If sync token is missing, all items are returned.
|
||||
delta update. If sync token is empty, all items are returned.
|
||||
ValueError is raised for invalid or old tokens.
|
||||
|
||||
WARNING: This simple default implementation treats all sync-token as
|
||||
invalid.
|
||||
|
||||
"""
|
||||
def hrefs_iter() -> Iterator[str]:
|
||||
for item in self.get_all():
|
||||
assert item.href
|
||||
yield item.href
|
||||
token = "http://radicale.org/ns/sync/%s" % self.etag.strip("\"")
|
||||
if old_token:
|
||||
raise ValueError("Sync token are not supported")
|
||||
return token, (item.href for item in self.get_all())
|
||||
return token, hrefs_iter()
|
||||
|
||||
def get_multi(self, hrefs):
|
||||
def get_multi(self, hrefs: Iterable[str]
|
||||
) -> Iterable[Tuple[str, Optional["radicale_item.Item"]]]:
|
||||
"""Fetch multiple items.
|
||||
|
||||
It's not required to return the requested items in the correct order.
|
||||
|
@ -113,11 +133,12 @@ class BaseCollection:
|
|||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_all(self):
|
||||
def get_all(self) -> Iterable["radicale_item.Item"]:
|
||||
"""Fetch all items."""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_filtered(self, filters):
|
||||
def get_filtered(self, filters: Iterable[ET.Element]
|
||||
) -> Iterable[Tuple["radicale_item.Item", bool]]:
|
||||
"""Fetch all items with optional filtering.
|
||||
|
||||
This can largely improve performance of reports depending on
|
||||
|
@ -128,32 +149,31 @@ class BaseCollection:
|
|||
matched.
|
||||
|
||||
"""
|
||||
if not self.tag:
|
||||
return
|
||||
tag, start, end, simple = radicale_filter.simplify_prefilters(
|
||||
filters, collection_tag=self.get_meta("tag"))
|
||||
filters, self.tag)
|
||||
for item in self.get_all():
|
||||
if tag:
|
||||
if tag != item.component_name:
|
||||
continue
|
||||
istart, iend = item.time_range
|
||||
if istart >= end or iend <= start:
|
||||
continue
|
||||
item_simple = simple and (start <= istart or iend <= end)
|
||||
else:
|
||||
item_simple = simple
|
||||
yield item, item_simple
|
||||
if tag is not None and tag != item.component_name:
|
||||
continue
|
||||
istart, iend = item.time_range
|
||||
if istart >= end or iend <= start:
|
||||
continue
|
||||
yield item, simple and (start <= istart or iend <= end)
|
||||
|
||||
def has_uid(self, uid):
|
||||
def has_uid(self, uid: str) -> bool:
|
||||
"""Check if a UID exists in the collection."""
|
||||
for item in self.get_all():
|
||||
if item.uid == uid:
|
||||
return True
|
||||
return False
|
||||
|
||||
def upload(self, href, item):
|
||||
def upload(self, href: str, item: "radicale_item.Item") -> (
|
||||
"radicale_item.Item"):
|
||||
"""Upload a new or replace an existing item."""
|
||||
raise NotImplementedError
|
||||
|
||||
def delete(self, href=None):
|
||||
def delete(self, href: Optional[str] = None) -> None:
|
||||
"""Delete an item.
|
||||
|
||||
When ``href`` is ``None``, delete the collection.
|
||||
|
@ -161,7 +181,14 @@ class BaseCollection:
|
|||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_meta(self, key=None):
|
||||
@overload
|
||||
def get_meta(self, key: None = None) -> Mapping[str, str]: ...
|
||||
|
||||
@overload
|
||||
def get_meta(self, key: str) -> Optional[str]: ...
|
||||
|
||||
def get_meta(self, key: Optional[str] = None
|
||||
) -> Union[Mapping[str, str], Optional[str]]:
|
||||
"""Get metadata value for collection.
|
||||
|
||||
Return the value of the property ``key``. If ``key`` is ``None`` return
|
||||
|
@ -170,7 +197,7 @@ class BaseCollection:
|
|||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def set_meta(self, props):
|
||||
def set_meta(self, props: Mapping[str, str]) -> None:
|
||||
"""Set metadata values for collection.
|
||||
|
||||
``props`` a dict with values for properties.
|
||||
|
@ -179,23 +206,23 @@ class BaseCollection:
|
|||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def last_modified(self):
|
||||
def last_modified(self) -> str:
|
||||
"""Get the HTTP-datetime of when the collection was modified."""
|
||||
raise NotImplementedError
|
||||
|
||||
def serialize(self):
|
||||
def serialize(self) -> str:
|
||||
"""Get the unicode string representing the whole collection."""
|
||||
if self.get_meta("tag") == "VCALENDAR":
|
||||
if self.tag == "VCALENDAR":
|
||||
in_vcalendar = False
|
||||
vtimezones = ""
|
||||
included_tzids = set()
|
||||
included_tzids: Set[str] = set()
|
||||
vtimezone = []
|
||||
tzid = None
|
||||
components = ""
|
||||
# Concatenate all child elements of VCALENDAR from all items
|
||||
# together, while preventing duplicated VTIMEZONE entries.
|
||||
# VTIMEZONEs are only distinguished by their TZID, if different
|
||||
# timezones share the same TZID this produces errornous ouput.
|
||||
# timezones share the same TZID this produces erroneous output.
|
||||
# VObject fails at this too.
|
||||
for item in self.get_all():
|
||||
depth = 0
|
||||
|
@ -216,6 +243,7 @@ class BaseCollection:
|
|||
elif depth == 2 and line.startswith("END:"):
|
||||
if tzid is None or tzid not in included_tzids:
|
||||
vtimezones += "".join(vtimezone)
|
||||
if tzid is not None:
|
||||
included_tzids.add(tzid)
|
||||
vtimezone.clear()
|
||||
tzid = None
|
||||
|
@ -240,13 +268,14 @@ class BaseCollection:
|
|||
return (template[:template_insert_pos] +
|
||||
vtimezones + components +
|
||||
template[template_insert_pos:])
|
||||
if self.get_meta("tag") == "VADDRESSBOOK":
|
||||
if self.tag == "VADDRESSBOOK":
|
||||
return "".join((item.serialize() for item in self.get_all()))
|
||||
return ""
|
||||
|
||||
|
||||
class BaseStorage:
|
||||
def __init__(self, configuration):
|
||||
|
||||
def __init__(self, configuration: "config.Configuration") -> None:
|
||||
"""Initialize BaseStorage.
|
||||
|
||||
``configuration`` see ``radicale.config`` module.
|
||||
|
@ -256,7 +285,11 @@ class BaseStorage:
|
|||
"""
|
||||
self.configuration = configuration
|
||||
|
||||
def discover(self, path, depth="0"):
|
||||
def discover(
|
||||
self, path: str, depth: str = "0",
|
||||
child_context_manager: Optional[
|
||||
Callable[[str, Optional[str]], ContextManager[None]]] = None,
|
||||
user_groups: Set[str] = set([])) -> Iterable["types.CollectionOrItem"]:
|
||||
"""Discover a list of collections under the given ``path``.
|
||||
|
||||
``path`` is sanitized.
|
||||
|
@ -272,7 +305,8 @@ class BaseStorage:
|
|||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def move(self, item, to_collection, to_href):
|
||||
def move(self, item: "radicale_item.Item", to_collection: BaseCollection,
|
||||
to_href: str) -> None:
|
||||
"""Move an object.
|
||||
|
||||
``item`` is the item to move.
|
||||
|
@ -285,7 +319,10 @@ class BaseStorage:
|
|||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def create_collection(self, href, items=None, props=None):
|
||||
def create_collection(
|
||||
self, href: str,
|
||||
items: Optional[Iterable["radicale_item.Item"]] = None,
|
||||
props: Optional[Mapping[str, str]] = None) -> BaseCollection:
|
||||
"""Create a collection.
|
||||
|
||||
``href`` is the sanitized path.
|
||||
|
@ -298,15 +335,14 @@ class BaseStorage:
|
|||
|
||||
``props`` are metadata values for the collection.
|
||||
|
||||
``props["tag"]`` is the type of collection (VCALENDAR or
|
||||
VADDRESSBOOK). If the key ``tag`` is missing, it is guessed from the
|
||||
collection.
|
||||
``props["tag"]`` is the type of collection (VCALENDAR or VADDRESSBOOK).
|
||||
If the key ``tag`` is missing, ``items`` is ignored.
|
||||
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@contextlib.contextmanager
|
||||
def acquire_lock(self, mode, user=None):
|
||||
@types.contextmanager
|
||||
def acquire_lock(self, mode: str, user: str = "") -> Iterator[None]:
|
||||
"""Set a context manager to lock the whole storage.
|
||||
|
||||
``mode`` must either be "r" for shared access or "w" for exclusive
|
||||
|
@ -317,6 +353,6 @@ class BaseStorage:
|
|||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def verify(self):
|
||||
def verify(self) -> bool:
|
||||
"""Check the storage for errors."""
|
||||
raise NotImplementedError
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -23,75 +24,62 @@ Uses one folder per collection and one file per collection entry.
|
|||
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from itertools import chain
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import ClassVar, Iterator, Optional, Type
|
||||
|
||||
from radicale import pathutils, storage
|
||||
from radicale.storage.multifilesystem.cache import CollectionCacheMixin
|
||||
from radicale import config
|
||||
from radicale.log import logger
|
||||
from radicale.storage.multifilesystem.base import CollectionBase, StorageBase
|
||||
from radicale.storage.multifilesystem.cache import CollectionPartCache
|
||||
from radicale.storage.multifilesystem.create_collection import \
|
||||
StorageCreateCollectionMixin
|
||||
from radicale.storage.multifilesystem.delete import CollectionDeleteMixin
|
||||
from radicale.storage.multifilesystem.discover import StorageDiscoverMixin
|
||||
from radicale.storage.multifilesystem.get import CollectionGetMixin
|
||||
from radicale.storage.multifilesystem.history import CollectionHistoryMixin
|
||||
from radicale.storage.multifilesystem.lock import (CollectionLockMixin,
|
||||
StorageLockMixin)
|
||||
from radicale.storage.multifilesystem.meta import CollectionMetaMixin
|
||||
from radicale.storage.multifilesystem.move import StorageMoveMixin
|
||||
from radicale.storage.multifilesystem.sync import CollectionSyncMixin
|
||||
from radicale.storage.multifilesystem.upload import CollectionUploadMixin
|
||||
from radicale.storage.multifilesystem.verify import StorageVerifyMixin
|
||||
StoragePartCreateCollection
|
||||
from radicale.storage.multifilesystem.delete import CollectionPartDelete
|
||||
from radicale.storage.multifilesystem.discover import StoragePartDiscover
|
||||
from radicale.storage.multifilesystem.get import CollectionPartGet
|
||||
from radicale.storage.multifilesystem.history import CollectionPartHistory
|
||||
from radicale.storage.multifilesystem.lock import (CollectionPartLock,
|
||||
StoragePartLock)
|
||||
from radicale.storage.multifilesystem.meta import CollectionPartMeta
|
||||
from radicale.storage.multifilesystem.move import StoragePartMove
|
||||
from radicale.storage.multifilesystem.sync import CollectionPartSync
|
||||
from radicale.storage.multifilesystem.upload import CollectionPartUpload
|
||||
from radicale.storage.multifilesystem.verify import StoragePartVerify
|
||||
|
||||
# 999 second, 999 ms, 999 us, 999 ns
|
||||
MTIME_NS_TEST: int = 999999999999
|
||||
|
||||
|
||||
class Collection(
|
||||
CollectionCacheMixin, CollectionDeleteMixin, CollectionGetMixin,
|
||||
CollectionHistoryMixin, CollectionLockMixin, CollectionMetaMixin,
|
||||
CollectionSyncMixin, CollectionUploadMixin, storage.BaseCollection):
|
||||
CollectionPartDelete, CollectionPartMeta, CollectionPartSync,
|
||||
CollectionPartUpload, CollectionPartGet, CollectionPartCache,
|
||||
CollectionPartLock, CollectionPartHistory, CollectionBase):
|
||||
|
||||
def __init__(self, storage_, path, filesystem_path=None):
|
||||
self._storage = storage_
|
||||
folder = self._storage._get_collection_root_folder()
|
||||
# Path should already be sanitized
|
||||
self._path = pathutils.strip_path(path)
|
||||
self._encoding = self._storage.configuration.get("encoding", "stock")
|
||||
if filesystem_path is None:
|
||||
filesystem_path = pathutils.path_to_filesystem(folder, self.path)
|
||||
self._filesystem_path = filesystem_path
|
||||
_etag_cache: Optional[str]
|
||||
|
||||
def __init__(self, storage_: "Storage", path: str,
|
||||
filesystem_path: Optional[str] = None) -> None:
|
||||
super().__init__(storage_, path, filesystem_path)
|
||||
self._etag_cache = None
|
||||
super().__init__()
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
def path(self) -> str:
|
||||
return self._path
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _atomic_write(self, path, mode="w", newline=None):
|
||||
parent_dir, name = os.path.split(path)
|
||||
# Do not use mkstemp because it creates with permissions 0o600
|
||||
with TemporaryDirectory(
|
||||
prefix=".Radicale.tmp-", dir=parent_dir) as tmp_dir:
|
||||
with open(os.path.join(tmp_dir, name), mode, newline=newline,
|
||||
encoding=None if "b" in mode else self._encoding) as tmp:
|
||||
yield tmp
|
||||
tmp.flush()
|
||||
self._storage._fsync(tmp)
|
||||
os.replace(os.path.join(tmp_dir, name), path)
|
||||
self._storage._sync_directory(parent_dir)
|
||||
|
||||
@property
|
||||
def last_modified(self):
|
||||
relevant_files = chain(
|
||||
(self._filesystem_path,),
|
||||
(self._props_path,) if os.path.exists(self._props_path) else (),
|
||||
(os.path.join(self._filesystem_path, h) for h in self._list()))
|
||||
last = max(map(os.path.getmtime, relevant_files))
|
||||
def last_modified(self) -> str:
|
||||
def relevant_files_iter() -> Iterator[str]:
|
||||
yield self._filesystem_path
|
||||
if os.path.exists(self._props_path):
|
||||
yield self._props_path
|
||||
for href in self._list():
|
||||
yield os.path.join(self._filesystem_path, href)
|
||||
last = max(map(os.path.getmtime, relevant_files_iter()))
|
||||
return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(last))
|
||||
|
||||
@property
|
||||
def etag(self):
|
||||
def etag(self) -> str:
|
||||
# reuse cached value if the storage is read-only
|
||||
if self._storage._lock.locked == "w" or self._etag_cache is None:
|
||||
self._etag_cache = super().etag
|
||||
|
@ -99,61 +87,109 @@ class Collection(
|
|||
|
||||
|
||||
class Storage(
|
||||
StorageCreateCollectionMixin, StorageDiscoverMixin, StorageLockMixin,
|
||||
StorageMoveMixin, StorageVerifyMixin, storage.BaseStorage):
|
||||
StoragePartCreateCollection, StoragePartLock, StoragePartMove,
|
||||
StoragePartVerify, StoragePartDiscover, StorageBase):
|
||||
|
||||
_collection_class = Collection
|
||||
_collection_class: ClassVar[Type[Collection]] = Collection
|
||||
|
||||
def __init__(self, configuration):
|
||||
def _analyse_mtime(self):
|
||||
# calculate and display mtime resolution
|
||||
path = os.path.join(self._get_collection_root_folder(), ".Radicale.mtime_test")
|
||||
logger.debug("Storage item mtime resolution test with file: %r", path)
|
||||
try:
|
||||
with open(path, "w") as f:
|
||||
f.write("mtime_test")
|
||||
f.close
|
||||
except Exception as e:
|
||||
logger.warning("Storage item mtime resolution test not possible, cannot write file: %r (%s)", path, e)
|
||||
raise
|
||||
# set mtime_ns for tests
|
||||
try:
|
||||
os.utime(path, times=None, ns=(MTIME_NS_TEST, MTIME_NS_TEST))
|
||||
except Exception as e:
|
||||
logger.warning("Storage item mtime resolution test not possible, cannot set utime on file: %r (%s)", path, e)
|
||||
os.remove(path)
|
||||
raise
|
||||
logger.debug("Storage item mtime resoultion test set: %d" % MTIME_NS_TEST)
|
||||
mtime_ns = os.stat(path).st_mtime_ns
|
||||
logger.debug("Storage item mtime resoultion test get: %d" % mtime_ns)
|
||||
# start analysis
|
||||
precision = 1
|
||||
mtime_ns_test = MTIME_NS_TEST
|
||||
while mtime_ns > 0:
|
||||
if mtime_ns == mtime_ns_test:
|
||||
break
|
||||
factor = 2
|
||||
if int(mtime_ns / factor) == int(mtime_ns_test / factor):
|
||||
precision = precision * factor
|
||||
break
|
||||
factor = 5
|
||||
if int(mtime_ns / factor) == int(mtime_ns_test / factor):
|
||||
precision = precision * factor
|
||||
break
|
||||
precision = precision * 10
|
||||
mtime_ns = int(mtime_ns / 10)
|
||||
mtime_ns_test = int(mtime_ns_test / 10)
|
||||
unit = "ns"
|
||||
precision_unit = precision
|
||||
if precision >= 1000000000:
|
||||
precision_unit = int(precision / 1000000000)
|
||||
unit = "s"
|
||||
elif precision >= 1000000:
|
||||
precision_unit = int(precision / 1000000)
|
||||
unit = "ms"
|
||||
elif precision >= 1000:
|
||||
precision_unit = int(precision / 1000)
|
||||
unit = "us"
|
||||
os.remove(path)
|
||||
return (precision, precision_unit, unit)
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
folder = configuration.get("storage", "filesystem_folder")
|
||||
self._makedirs_synced(folder)
|
||||
|
||||
def _get_collection_root_folder(self):
|
||||
filesystem_folder = self.configuration.get(
|
||||
"storage", "filesystem_folder")
|
||||
return os.path.join(filesystem_folder, "collection-root")
|
||||
|
||||
def _fsync(self, f):
|
||||
if self.configuration.get("storage", "_filesystem_fsync"):
|
||||
try:
|
||||
pathutils.fsync(f.fileno())
|
||||
except OSError as e:
|
||||
raise RuntimeError("Fsync'ing file %r failed: %s" %
|
||||
(f.name, e)) from e
|
||||
|
||||
def _sync_directory(self, path):
|
||||
"""Sync directory to disk.
|
||||
|
||||
This only works on POSIX and does nothing on other systems.
|
||||
|
||||
"""
|
||||
if not self.configuration.get("storage", "_filesystem_fsync"):
|
||||
return
|
||||
if os.name == "posix":
|
||||
try:
|
||||
fd = os.open(path, 0)
|
||||
logger.info("Storage location: %r", self._filesystem_folder)
|
||||
if not os.path.exists(self._filesystem_folder):
|
||||
logger.warning("Storage location: %r not existing, create now", self._filesystem_folder)
|
||||
self._makedirs_synced(self._filesystem_folder)
|
||||
logger.info("Storage location subfolder: %r", self._get_collection_root_folder())
|
||||
if not os.path.exists(self._get_collection_root_folder()):
|
||||
logger.warning("Storage location subfolder: %r not existing, create now", self._get_collection_root_folder())
|
||||
self._makedirs_synced(self._get_collection_root_folder())
|
||||
logger.info("Storage cache subfolder usage for 'item': %s", self._use_cache_subfolder_for_item)
|
||||
logger.info("Storage cache subfolder usage for 'history': %s", self._use_cache_subfolder_for_history)
|
||||
logger.info("Storage cache subfolder usage for 'sync-token': %s", self._use_cache_subfolder_for_synctoken)
|
||||
logger.info("Storage cache use mtime and size for 'item': %s", self._use_mtime_and_size_for_item_cache)
|
||||
try:
|
||||
(precision, precision_unit, unit) = self._analyse_mtime()
|
||||
if precision >= 100000000:
|
||||
# >= 100 ms
|
||||
logger.warning("Storage item mtime resolution test result: %d %s (VERY RISKY ON PRODUCTION SYSTEMS)" % (precision_unit, unit))
|
||||
elif precision >= 10000000:
|
||||
# >= 10 ms
|
||||
logger.warning("Storage item mtime resolution test result: %d %s (RISKY ON PRODUCTION SYSTEMS)" % (precision_unit, unit))
|
||||
else:
|
||||
logger.info("Storage item mtime resolution test result: %d %s" % (precision_unit, unit))
|
||||
if self._use_mtime_and_size_for_item_cache is False:
|
||||
logger.info("Storage cache using mtime and size for 'item' may be an option in case of performance issues")
|
||||
except Exception:
|
||||
logger.warning("Storage item mtime resolution test result not successful")
|
||||
logger.debug("Storage cache action logging: %s", self._debug_cache_actions)
|
||||
if self._use_cache_subfolder_for_item is True or self._use_cache_subfolder_for_history is True or self._use_cache_subfolder_for_synctoken is True:
|
||||
logger.info("Storage cache subfolder: %r", self._get_collection_cache_folder())
|
||||
if not os.path.exists(self._get_collection_cache_folder()):
|
||||
logger.warning("Storage cache subfolder: %r not existing, create now", self._get_collection_cache_folder())
|
||||
self._makedirs_synced(self._get_collection_cache_folder())
|
||||
if sys.platform != "win32":
|
||||
if not self._folder_umask:
|
||||
# retrieve current umask by setting a dummy umask
|
||||
current_umask = os.umask(0o0022)
|
||||
logger.info("Storage folder umask (from system): '%04o'", current_umask)
|
||||
# reset to original
|
||||
os.umask(current_umask)
|
||||
else:
|
||||
try:
|
||||
pathutils.fsync(fd)
|
||||
finally:
|
||||
os.close(fd)
|
||||
except OSError as e:
|
||||
raise RuntimeError("Fsync'ing directory %r failed: %s" %
|
||||
(path, e)) from e
|
||||
|
||||
def _makedirs_synced(self, filesystem_path):
|
||||
"""Recursively create a directory and its parents in a sync'ed way.
|
||||
|
||||
This method acts silently when the folder already exists.
|
||||
|
||||
"""
|
||||
if os.path.isdir(filesystem_path):
|
||||
return
|
||||
parent_filesystem_path = os.path.dirname(filesystem_path)
|
||||
# Prevent infinite loop
|
||||
if filesystem_path != parent_filesystem_path:
|
||||
# Create parent dirs recursively
|
||||
self._makedirs_synced(parent_filesystem_path)
|
||||
# Possible race!
|
||||
os.makedirs(filesystem_path, exist_ok=True)
|
||||
self._sync_directory(parent_filesystem_path)
|
||||
config_umask = int(self._folder_umask, 8)
|
||||
except Exception:
|
||||
logger.critical("storage folder umask defined but invalid: '%s'", self._folder_umask)
|
||||
raise
|
||||
logger.info("storage folder umask defined: '%04o'", config_umask)
|
||||
self._config_umask = config_umask
|
||||
|
|
167
radicale/storage/multifilesystem/base.py
Normal file
167
radicale/storage/multifilesystem/base.py
Normal file
|
@ -0,0 +1,167 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import sys
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import IO, AnyStr, ClassVar, Iterator, Optional, Type
|
||||
|
||||
from radicale import config, pathutils, storage, types
|
||||
from radicale.storage import multifilesystem # noqa:F401
|
||||
|
||||
|
||||
class CollectionBase(storage.BaseCollection):
|
||||
|
||||
_storage: "multifilesystem.Storage"
|
||||
_path: str
|
||||
_encoding: str
|
||||
_filesystem_path: str
|
||||
|
||||
def __init__(self, storage_: "multifilesystem.Storage", path: str,
|
||||
filesystem_path: Optional[str] = None) -> None:
|
||||
super().__init__()
|
||||
self._storage = storage_
|
||||
folder = storage_._get_collection_root_folder()
|
||||
# Path should already be sanitized
|
||||
self._path = pathutils.strip_path(path)
|
||||
self._encoding = storage_.configuration.get("encoding", "stock")
|
||||
self._skip_broken_item = storage_.configuration.get("storage", "skip_broken_item")
|
||||
if filesystem_path is None:
|
||||
filesystem_path = pathutils.path_to_filesystem(folder, self.path)
|
||||
self._filesystem_path = filesystem_path
|
||||
|
||||
# TODO: better fix for "mypy"
|
||||
@types.contextmanager # type: ignore
|
||||
def _atomic_write(self, path: str, mode: str = "w",
|
||||
newline: Optional[str] = None) -> Iterator[IO[AnyStr]]:
|
||||
# TODO: Overload with Literal when dropping support for Python < 3.8
|
||||
parent_dir, name = os.path.split(path)
|
||||
# Do not use mkstemp because it creates with permissions 0o600
|
||||
with TemporaryDirectory(
|
||||
prefix=".Radicale.tmp-", dir=parent_dir) as tmp_dir:
|
||||
with open(os.path.join(tmp_dir, name), mode, newline=newline,
|
||||
encoding=None if "b" in mode else self._encoding) as tmp:
|
||||
yield tmp
|
||||
tmp.flush()
|
||||
self._storage._fsync(tmp)
|
||||
os.replace(os.path.join(tmp_dir, name), path)
|
||||
self._storage._sync_directory(parent_dir)
|
||||
|
||||
|
||||
class StorageBase(storage.BaseStorage):
|
||||
|
||||
_collection_class: ClassVar[Type["multifilesystem.Collection"]]
|
||||
|
||||
_filesystem_folder: str
|
||||
_filesystem_cache_folder: str
|
||||
_filesystem_fsync: bool
|
||||
_use_cache_subfolder_for_item: bool
|
||||
_use_cache_subfolder_for_history: bool
|
||||
_use_cache_subfolder_for_synctoken: bool
|
||||
_use_mtime_and_size_for_item_cache: bool
|
||||
_debug_cache_actions: bool
|
||||
_folder_umask: str
|
||||
_config_umask: int
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
self._filesystem_folder = configuration.get(
|
||||
"storage", "filesystem_folder")
|
||||
self._filesystem_fsync = configuration.get(
|
||||
"storage", "_filesystem_fsync")
|
||||
self._filesystem_cache_folder = configuration.get(
|
||||
"storage", "filesystem_cache_folder")
|
||||
self._use_cache_subfolder_for_item = configuration.get(
|
||||
"storage", "use_cache_subfolder_for_item")
|
||||
self._use_cache_subfolder_for_history = configuration.get(
|
||||
"storage", "use_cache_subfolder_for_history")
|
||||
self._use_cache_subfolder_for_synctoken = configuration.get(
|
||||
"storage", "use_cache_subfolder_for_synctoken")
|
||||
self._use_mtime_and_size_for_item_cache = configuration.get(
|
||||
"storage", "use_mtime_and_size_for_item_cache")
|
||||
self._folder_umask = configuration.get(
|
||||
"storage", "folder_umask")
|
||||
self._debug_cache_actions = configuration.get(
|
||||
"logging", "storage_cache_actions_on_debug")
|
||||
|
||||
def _get_collection_root_folder(self) -> str:
|
||||
return os.path.join(self._filesystem_folder, "collection-root")
|
||||
|
||||
def _get_collection_cache_folder(self) -> str:
|
||||
if self._filesystem_cache_folder:
|
||||
return os.path.join(self._filesystem_cache_folder, "collection-cache")
|
||||
else:
|
||||
return os.path.join(self._filesystem_folder, "collection-cache")
|
||||
|
||||
def _get_collection_cache_subfolder(self, path, folder, subfolder) -> str:
|
||||
if (self._use_cache_subfolder_for_item is True) and (subfolder == "item"):
|
||||
path = path.replace(self._get_collection_root_folder(), self._get_collection_cache_folder())
|
||||
elif (self._use_cache_subfolder_for_history is True) and (subfolder == "history"):
|
||||
path = path.replace(self._get_collection_root_folder(), self._get_collection_cache_folder())
|
||||
elif (self._use_cache_subfolder_for_synctoken is True) and (subfolder == "sync-token"):
|
||||
path = path.replace(self._get_collection_root_folder(), self._get_collection_cache_folder())
|
||||
return os.path.join(path, folder, subfolder)
|
||||
|
||||
def _fsync(self, f: IO[AnyStr]) -> None:
|
||||
if self._filesystem_fsync:
|
||||
try:
|
||||
pathutils.fsync(f.fileno())
|
||||
except OSError as e:
|
||||
raise RuntimeError("Fsync'ing file %r failed: %s" %
|
||||
(f.name, e)) from e
|
||||
|
||||
def _sync_directory(self, path: str) -> None:
|
||||
"""Sync directory to disk.
|
||||
|
||||
This only works on POSIX and does nothing on other systems.
|
||||
|
||||
"""
|
||||
if not self._filesystem_fsync:
|
||||
return
|
||||
if sys.platform != "win32":
|
||||
try:
|
||||
fd = os.open(path, 0)
|
||||
try:
|
||||
pathutils.fsync(fd)
|
||||
finally:
|
||||
os.close(fd)
|
||||
except OSError as e:
|
||||
raise RuntimeError("Fsync'ing directory %r failed: %s" %
|
||||
(path, e)) from e
|
||||
|
||||
def _makedirs_synced(self, filesystem_path: str) -> None:
|
||||
"""Recursively create a directory and its parents in a sync'ed way.
|
||||
|
||||
This method acts silently when the folder already exists.
|
||||
|
||||
"""
|
||||
if os.path.isdir(filesystem_path):
|
||||
return
|
||||
parent_filesystem_path = os.path.dirname(filesystem_path)
|
||||
if sys.platform != "win32" and self._folder_umask:
|
||||
oldmask = os.umask(self._config_umask)
|
||||
# Prevent infinite loop
|
||||
if filesystem_path != parent_filesystem_path:
|
||||
# Create parent dirs recursively
|
||||
self._makedirs_synced(parent_filesystem_path)
|
||||
# Possible race!
|
||||
os.makedirs(filesystem_path, exist_ok=True)
|
||||
self._sync_directory(parent_filesystem_path)
|
||||
if sys.platform != "win32" and self._folder_umask:
|
||||
os.umask(oldmask)
|
|
@ -1,7 +1,8 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -16,20 +17,32 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import pickle
|
||||
import time
|
||||
from hashlib import sha256
|
||||
from typing import BinaryIO, Iterable, NamedTuple, Optional, cast
|
||||
|
||||
import radicale.item as radicale_item
|
||||
from radicale import pathutils, storage
|
||||
from radicale.log import logger
|
||||
from radicale.storage.multifilesystem.base import CollectionBase
|
||||
|
||||
CacheContent = NamedTuple("CacheContent", [
|
||||
("uid", str), ("etag", str), ("text", str), ("name", str), ("tag", str),
|
||||
("start", int), ("end", int)])
|
||||
|
||||
|
||||
class CollectionCacheMixin:
|
||||
def _clean_cache(self, folder, names, max_age=None):
|
||||
class CollectionPartCache(CollectionBase):
|
||||
|
||||
def _clean_cache(self, folder: str, names: Iterable[str],
|
||||
max_age: int = 0) -> None:
|
||||
"""Delete all ``names`` in ``folder`` that are older than ``max_age``.
|
||||
"""
|
||||
age_limit = time.time() - max_age if max_age is not None else None
|
||||
age_limit: Optional[float] = None
|
||||
if max_age is not None and max_age > 0:
|
||||
age_limit = time.time() - max_age
|
||||
modified = False
|
||||
for name in names:
|
||||
if not pathutils.is_safe_filesystem_path_component(name):
|
||||
|
@ -54,53 +67,64 @@ class CollectionCacheMixin:
|
|||
self._storage._sync_directory(folder)
|
||||
|
||||
@staticmethod
|
||||
def _item_cache_hash(raw_text):
|
||||
def _item_cache_hash(raw_text: bytes) -> str:
|
||||
_hash = sha256()
|
||||
_hash.update(storage.CACHE_VERSION)
|
||||
_hash.update(raw_text)
|
||||
return _hash.hexdigest()
|
||||
|
||||
def _item_cache_content(self, item, cache_hash=None):
|
||||
text = item.serialize()
|
||||
if cache_hash is None:
|
||||
cache_hash = self._item_cache_hash(text.encode(self._encoding))
|
||||
return (cache_hash, item.uid, item.etag, text, item.name,
|
||||
item.component_name, *item.time_range)
|
||||
@staticmethod
|
||||
def _item_cache_mtime_and_size(size: int, raw_text: int) -> str:
|
||||
return str(storage.CACHE_VERSION.decode()) + "size=" + str(size) + ";mtime=" + str(raw_text)
|
||||
|
||||
def _store_item_cache(self, href, item, cache_hash=None):
|
||||
cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
|
||||
"item")
|
||||
content = self._item_cache_content(item, cache_hash)
|
||||
def _item_cache_content(self, item: radicale_item.Item) -> CacheContent:
|
||||
return CacheContent(item.uid, item.etag, item.serialize(), item.name,
|
||||
item.component_name, *item.time_range)
|
||||
|
||||
def _store_item_cache(self, href: str, item: radicale_item.Item,
|
||||
cache_hash: str = "") -> CacheContent:
|
||||
if not cache_hash:
|
||||
if self._storage._use_mtime_and_size_for_item_cache is True:
|
||||
raise RuntimeError("_store_item_cache called without cache_hash is not supported if [storage] use_mtime_and_size_for_item_cache is True")
|
||||
else:
|
||||
cache_hash = self._item_cache_hash(
|
||||
item.serialize().encode(self._encoding))
|
||||
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
|
||||
content = self._item_cache_content(item)
|
||||
self._storage._makedirs_synced(cache_folder)
|
||||
try:
|
||||
# Race: Other processes might have created and locked the
|
||||
# file.
|
||||
with self._atomic_write(os.path.join(cache_folder, href),
|
||||
"wb") as f:
|
||||
pickle.dump(content, f)
|
||||
except PermissionError:
|
||||
pass
|
||||
# Race: Other processes might have created and locked the file.
|
||||
# TODO: better fix for "mypy"
|
||||
with contextlib.suppress(PermissionError), self._atomic_write( # type: ignore
|
||||
os.path.join(cache_folder, href), "wb") as fo:
|
||||
fb = cast(BinaryIO, fo)
|
||||
pickle.dump((cache_hash, *content), fb)
|
||||
return content
|
||||
|
||||
def _load_item_cache(self, href, input_hash):
|
||||
cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
|
||||
"item")
|
||||
cache_hash = uid = etag = text = name = tag = start = end = None
|
||||
def _load_item_cache(self, href: str, cache_hash: str
|
||||
) -> Optional[CacheContent]:
|
||||
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
|
||||
path = os.path.join(cache_folder, href)
|
||||
try:
|
||||
with open(os.path.join(cache_folder, href), "rb") as f:
|
||||
cache_hash, *content = pickle.load(f)
|
||||
if cache_hash == input_hash:
|
||||
uid, etag, text, name, tag, start, end = content
|
||||
with open(path, "rb") as f:
|
||||
hash_, *remainder = pickle.load(f)
|
||||
if hash_ and hash_ == cache_hash:
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache match : %r with hash %r", path, cache_hash)
|
||||
return CacheContent(*remainder)
|
||||
else:
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache no match : %r with hash %r", path, cache_hash)
|
||||
except FileNotFoundError:
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache not found : %r with hash %r", path, cache_hash)
|
||||
pass
|
||||
except (pickle.UnpicklingError, ValueError) as e:
|
||||
logger.warning("Failed to load item cache entry %r in %r: %s",
|
||||
href, self.path, e, exc_info=True)
|
||||
return cache_hash, uid, etag, text, name, tag, start, end
|
||||
return None
|
||||
|
||||
def _clean_item_cache(self):
|
||||
cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
|
||||
"item")
|
||||
def _clean_item_cache(self) -> None:
|
||||
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
|
||||
self._clean_cache(cache_folder, (
|
||||
e.name for e in os.scandir(cache_folder) if not
|
||||
os.path.isfile(os.path.join(self._filesystem_path, e.name))))
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -18,50 +19,63 @@
|
|||
|
||||
import os
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Iterable, Optional, cast
|
||||
|
||||
import radicale.item as radicale_item
|
||||
from radicale import pathutils
|
||||
from radicale.log import logger
|
||||
from radicale.storage import multifilesystem
|
||||
from radicale.storage.multifilesystem.base import StorageBase
|
||||
|
||||
|
||||
class StorageCreateCollectionMixin:
|
||||
class StoragePartCreateCollection(StorageBase):
|
||||
|
||||
def create_collection(self, href, items=None, props=None):
|
||||
def create_collection(self, href: str,
|
||||
items: Optional[Iterable[radicale_item.Item]] = None,
|
||||
props=None) -> "multifilesystem.Collection":
|
||||
folder = self._get_collection_root_folder()
|
||||
|
||||
# Path should already be sanitized
|
||||
sane_path = pathutils.strip_path(href)
|
||||
filesystem_path = pathutils.path_to_filesystem(folder, sane_path)
|
||||
logger.debug("Create collection: %r" % filesystem_path)
|
||||
|
||||
if not props:
|
||||
self._makedirs_synced(filesystem_path)
|
||||
return self._collection_class(
|
||||
self, pathutils.unstrip_path(sane_path, True))
|
||||
cast(multifilesystem.Storage, self),
|
||||
pathutils.unstrip_path(sane_path, True))
|
||||
|
||||
parent_dir = os.path.dirname(filesystem_path)
|
||||
self._makedirs_synced(parent_dir)
|
||||
|
||||
# Create a temporary directory with an unsafe name
|
||||
with TemporaryDirectory(
|
||||
prefix=".Radicale.tmp-", dir=parent_dir) as tmp_dir:
|
||||
# The temporary directory itself can't be renamed
|
||||
tmp_filesystem_path = os.path.join(tmp_dir, "collection")
|
||||
os.makedirs(tmp_filesystem_path)
|
||||
col = self._collection_class(
|
||||
self, pathutils.unstrip_path(sane_path, True),
|
||||
filesystem_path=tmp_filesystem_path)
|
||||
col.set_meta(props)
|
||||
if items is not None:
|
||||
if props.get("tag") == "VCALENDAR":
|
||||
col._upload_all_nonatomic(items, suffix=".ics")
|
||||
elif props.get("tag") == "VADDRESSBOOK":
|
||||
col._upload_all_nonatomic(items, suffix=".vcf")
|
||||
try:
|
||||
with TemporaryDirectory(prefix=".Radicale.tmp-", dir=parent_dir
|
||||
) as tmp_dir:
|
||||
# The temporary directory itself can't be renamed
|
||||
tmp_filesystem_path = os.path.join(tmp_dir, "collection")
|
||||
os.makedirs(tmp_filesystem_path)
|
||||
col = self._collection_class(
|
||||
cast(multifilesystem.Storage, self),
|
||||
pathutils.unstrip_path(sane_path, True),
|
||||
filesystem_path=tmp_filesystem_path)
|
||||
col.set_meta(props)
|
||||
if items is not None:
|
||||
if props.get("tag") == "VCALENDAR":
|
||||
col._upload_all_nonatomic(items, suffix=".ics")
|
||||
elif props.get("tag") == "VADDRESSBOOK":
|
||||
col._upload_all_nonatomic(items, suffix=".vcf")
|
||||
|
||||
# This operation is not atomic on the filesystem level but it's
|
||||
# very unlikely that one rename operations succeeds while the
|
||||
# other fails or that only one gets written to disk.
|
||||
if os.path.exists(filesystem_path):
|
||||
os.rename(filesystem_path, os.path.join(tmp_dir, "delete"))
|
||||
os.rename(tmp_filesystem_path, filesystem_path)
|
||||
self._sync_directory(parent_dir)
|
||||
if os.path.lexists(filesystem_path):
|
||||
pathutils.rename_exchange(tmp_filesystem_path, filesystem_path)
|
||||
else:
|
||||
os.rename(tmp_filesystem_path, filesystem_path)
|
||||
self._sync_directory(parent_dir)
|
||||
except Exception as e:
|
||||
raise ValueError("Failed to create collection %r as %r %s" %
|
||||
(href, filesystem_path, e)) from e
|
||||
|
||||
return self._collection_class(
|
||||
self, pathutils.unstrip_path(sane_path, True))
|
||||
cast(multifilesystem.Storage, self),
|
||||
pathutils.unstrip_path(sane_path, True))
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -18,20 +19,24 @@
|
|||
|
||||
import os
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Optional
|
||||
|
||||
from radicale import pathutils, storage
|
||||
from radicale.storage.multifilesystem.base import CollectionBase
|
||||
from radicale.storage.multifilesystem.history import CollectionPartHistory
|
||||
|
||||
|
||||
class CollectionDeleteMixin:
|
||||
def delete(self, href=None):
|
||||
class CollectionPartDelete(CollectionPartHistory, CollectionBase):
|
||||
|
||||
def delete(self, href: Optional[str] = None) -> None:
|
||||
if href is None:
|
||||
# Delete the collection
|
||||
parent_dir = os.path.dirname(self._filesystem_path)
|
||||
try:
|
||||
os.rmdir(self._filesystem_path)
|
||||
except OSError:
|
||||
with TemporaryDirectory(
|
||||
prefix=".Radicale.tmp-", dir=parent_dir) as tmp:
|
||||
with TemporaryDirectory(prefix=".Radicale.tmp-", dir=parent_dir
|
||||
) as tmp:
|
||||
os.rename(self._filesystem_path, os.path.join(
|
||||
tmp, os.path.basename(self._filesystem_path)))
|
||||
self._storage._sync_directory(parent_dir)
|
||||
|
@ -49,3 +54,9 @@ class CollectionDeleteMixin:
|
|||
# Track the change
|
||||
self._update_history_etag(href, None)
|
||||
self._clean_history()
|
||||
# Remove item from cache
|
||||
cache_folder = self._storage._get_collection_cache_subfolder(os.path.dirname(path), ".Radicale.cache", "item")
|
||||
cache_file = os.path.join(cache_folder, os.path.basename(path))
|
||||
if os.path.isfile(cache_file):
|
||||
os.remove(cache_file)
|
||||
self._storage._sync_directory(cache_folder)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
|
@ -16,18 +16,34 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import contextlib
|
||||
import base64
|
||||
import os
|
||||
import posixpath
|
||||
from typing import Callable, ContextManager, Iterator, Optional, Set, cast
|
||||
|
||||
from radicale import pathutils
|
||||
from radicale import pathutils, types
|
||||
from radicale.log import logger
|
||||
from radicale.storage import multifilesystem
|
||||
from radicale.storage.multifilesystem.base import StorageBase
|
||||
|
||||
|
||||
class StorageDiscoverMixin:
|
||||
@types.contextmanager
|
||||
def _null_child_context_manager(path: str,
|
||||
href: Optional[str]) -> Iterator[None]:
|
||||
yield
|
||||
|
||||
def discover(self, path, depth="0", child_context_manager=(
|
||||
lambda path, href=None: contextlib.ExitStack())):
|
||||
|
||||
class StoragePartDiscover(StorageBase):
|
||||
|
||||
def discover(
|
||||
self, path: str, depth: str = "0",
|
||||
child_context_manager: Optional[
|
||||
Callable[[str, Optional[str]], ContextManager[None]]] = None,
|
||||
user_groups: Set[str] = set([])
|
||||
) -> Iterator[types.CollectionOrItem]:
|
||||
# assert isinstance(self, multifilesystem.Storage)
|
||||
if child_context_manager is None:
|
||||
child_context_manager = _null_child_context_manager
|
||||
# Path should already be sanitized
|
||||
sane_path = pathutils.strip_path(path)
|
||||
attributes = sane_path.split("/") if sane_path else []
|
||||
|
@ -44,6 +60,7 @@ class StorageDiscoverMixin:
|
|||
return
|
||||
|
||||
# Check if the path exists and if it leads to a collection or an item
|
||||
href: Optional[str]
|
||||
if not os.path.isdir(filesystem_path):
|
||||
if attributes and os.path.isfile(filesystem_path):
|
||||
href = attributes.pop()
|
||||
|
@ -54,10 +71,13 @@ class StorageDiscoverMixin:
|
|||
|
||||
sane_path = "/".join(attributes)
|
||||
collection = self._collection_class(
|
||||
self, pathutils.unstrip_path(sane_path, True))
|
||||
cast(multifilesystem.Storage, self),
|
||||
pathutils.unstrip_path(sane_path, True))
|
||||
|
||||
if href:
|
||||
yield collection._get(href)
|
||||
item = collection._get(href)
|
||||
if item is not None:
|
||||
yield item
|
||||
return
|
||||
|
||||
yield collection
|
||||
|
@ -67,7 +87,9 @@ class StorageDiscoverMixin:
|
|||
|
||||
for href in collection._list():
|
||||
with child_context_manager(sane_path, href):
|
||||
yield collection._get(href)
|
||||
item = collection._get(href)
|
||||
if item is not None:
|
||||
yield item
|
||||
|
||||
for entry in os.scandir(filesystem_path):
|
||||
if not entry.is_dir():
|
||||
|
@ -80,5 +102,16 @@ class StorageDiscoverMixin:
|
|||
continue
|
||||
sane_child_path = posixpath.join(sane_path, href)
|
||||
child_path = pathutils.unstrip_path(sane_child_path, True)
|
||||
with child_context_manager(sane_child_path):
|
||||
yield self._collection_class(self, child_path)
|
||||
with child_context_manager(sane_child_path, None):
|
||||
yield self._collection_class(
|
||||
cast(multifilesystem.Storage, self), child_path)
|
||||
for group in user_groups:
|
||||
href = base64.b64encode(group.encode('utf-8')).decode('ascii')
|
||||
logger.debug(f"searching for group calendar {group} {href}")
|
||||
sane_child_path = f"GROUPS/{href}"
|
||||
if not os.path.isdir(pathutils.path_to_filesystem(folder, sane_child_path)):
|
||||
continue
|
||||
child_path = f"/GROUPS/{href}/"
|
||||
with child_context_manager(sane_child_path, None):
|
||||
yield self._collection_class(
|
||||
cast(multifilesystem.Storage, self), child_path)
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -17,21 +18,30 @@
|
|||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from typing import Iterable, Iterator, Optional, Tuple
|
||||
|
||||
import vobject
|
||||
|
||||
from radicale import item as radicale_item
|
||||
import radicale.item as radicale_item
|
||||
from radicale import pathutils
|
||||
from radicale.log import logger
|
||||
from radicale.storage import multifilesystem
|
||||
from radicale.storage.multifilesystem.base import CollectionBase
|
||||
from radicale.storage.multifilesystem.cache import CollectionPartCache
|
||||
from radicale.storage.multifilesystem.lock import CollectionPartLock
|
||||
|
||||
|
||||
class CollectionGetMixin:
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
class CollectionPartGet(CollectionPartCache, CollectionPartLock,
|
||||
CollectionBase):
|
||||
|
||||
_item_cache_cleaned: bool
|
||||
|
||||
def __init__(self, storage_: "multifilesystem.Storage", path: str,
|
||||
filesystem_path: Optional[str] = None) -> None:
|
||||
super().__init__(storage_, path, filesystem_path)
|
||||
self._item_cache_cleaned = False
|
||||
|
||||
def _list(self):
|
||||
def _list(self) -> Iterator[str]:
|
||||
for entry in os.scandir(self._filesystem_path):
|
||||
if not entry.is_file():
|
||||
continue
|
||||
|
@ -42,13 +52,14 @@ class CollectionGetMixin:
|
|||
continue
|
||||
yield href
|
||||
|
||||
def _get(self, href, verify_href=True):
|
||||
def _get(self, href: str, verify_href: bool = True
|
||||
) -> Optional[radicale_item.Item]:
|
||||
if verify_href:
|
||||
try:
|
||||
if not pathutils.is_safe_filesystem_path_component(href):
|
||||
raise pathutils.UnsafePathError(href)
|
||||
path = pathutils.path_to_filesystem(
|
||||
self._filesystem_path, href)
|
||||
path = pathutils.path_to_filesystem(self._filesystem_path,
|
||||
href)
|
||||
except ValueError as e:
|
||||
logger.debug(
|
||||
"Can't translate name %r safely to filesystem in %r: %s",
|
||||
|
@ -63,56 +74,74 @@ class CollectionGetMixin:
|
|||
return None
|
||||
except PermissionError:
|
||||
# Windows raises ``PermissionError`` when ``path`` is a directory
|
||||
if (os.name == "nt" and
|
||||
if (sys.platform == "win32" and
|
||||
os.path.isdir(path) and os.access(path, os.R_OK)):
|
||||
return None
|
||||
raise
|
||||
# The hash of the component in the file system. This is used to check,
|
||||
# if the entry in the cache is still valid.
|
||||
input_hash = self._item_cache_hash(raw_text)
|
||||
cache_hash, uid, etag, text, name, tag, start, end = \
|
||||
self._load_item_cache(href, input_hash)
|
||||
if input_hash != cache_hash:
|
||||
if self._storage._use_mtime_and_size_for_item_cache is True:
|
||||
cache_hash = self._item_cache_mtime_and_size(os.stat(path).st_size, os.stat(path).st_mtime_ns)
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache check for: %r with mtime and size %r", path, cache_hash)
|
||||
else:
|
||||
cache_hash = self._item_cache_hash(raw_text)
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache check for: %r with hash %r", path, cache_hash)
|
||||
cache_content = self._load_item_cache(href, cache_hash)
|
||||
if cache_content is None:
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache miss for: %r", path)
|
||||
with self._acquire_cache_lock("item"):
|
||||
# Lock the item cache to prevent multpile processes from
|
||||
# Lock the item cache to prevent multiple processes from
|
||||
# generating the same data in parallel.
|
||||
# This improves the performance for multiple requests.
|
||||
if self._storage._lock.locked == "r":
|
||||
# Check if another process created the file in the meantime
|
||||
cache_hash, uid, etag, text, name, tag, start, end = \
|
||||
self._load_item_cache(href, input_hash)
|
||||
if input_hash != cache_hash:
|
||||
cache_content = self._load_item_cache(href, cache_hash)
|
||||
if cache_content is None:
|
||||
try:
|
||||
vobject_items = tuple(vobject.readComponents(
|
||||
raw_text.decode(self._encoding)))
|
||||
vobject_items = radicale_item.read_components(
|
||||
raw_text.decode(self._encoding))
|
||||
radicale_item.check_and_sanitize_items(
|
||||
vobject_items, tag=self.get_meta("tag"))
|
||||
vobject_items, tag=self.tag)
|
||||
vobject_item, = vobject_items
|
||||
temp_item = radicale_item.Item(
|
||||
collection=self, vobject_item=vobject_item)
|
||||
cache_hash, uid, etag, text, name, tag, start, end = \
|
||||
self._store_item_cache(
|
||||
href, temp_item, input_hash)
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache store for: %r", path)
|
||||
cache_content = self._store_item_cache(
|
||||
href, temp_item, cache_hash)
|
||||
except Exception as e:
|
||||
raise RuntimeError("Failed to load item %r in %r: %s" %
|
||||
(href, self.path, e)) from e
|
||||
if self._skip_broken_item:
|
||||
logger.warning("Skip broken item %r in %r: %s", href, self.path, e)
|
||||
return None
|
||||
else:
|
||||
raise RuntimeError("Failed to load item %r in %r: %s" %
|
||||
(href, self.path, e)) from e
|
||||
# Clean cache entries once after the data in the file
|
||||
# system was edited externally.
|
||||
if not self._item_cache_cleaned:
|
||||
self._item_cache_cleaned = True
|
||||
self._clean_item_cache()
|
||||
else:
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache hit for: %r", path)
|
||||
last_modified = time.strftime(
|
||||
"%a, %d %b %Y %H:%M:%S GMT",
|
||||
time.gmtime(os.path.getmtime(path)))
|
||||
# Don't keep reference to ``vobject_item``, because it requires a lot
|
||||
# of memory.
|
||||
return radicale_item.Item(
|
||||
collection=self, href=href, last_modified=last_modified, etag=etag,
|
||||
text=text, uid=uid, name=name, component_name=tag,
|
||||
time_range=(start, end))
|
||||
collection=self, href=href, last_modified=last_modified,
|
||||
etag=cache_content.etag, text=cache_content.text,
|
||||
uid=cache_content.uid, name=cache_content.name,
|
||||
component_name=cache_content.tag,
|
||||
time_range=(cache_content.start, cache_content.end))
|
||||
|
||||
def get_multi(self, hrefs):
|
||||
# It's faster to check for file name collissions here, because
|
||||
def get_multi(self, hrefs: Iterable[str]
|
||||
) -> Iterator[Tuple[str, Optional[radicale_item.Item]]]:
|
||||
# It's faster to check for file name collisions here, because
|
||||
# we only need to call os.listdir once.
|
||||
files = None
|
||||
for href in hrefs:
|
||||
|
@ -123,13 +152,16 @@ class CollectionGetMixin:
|
|||
path = os.path.join(self._filesystem_path, href)
|
||||
if (not pathutils.is_safe_filesystem_path_component(href) or
|
||||
href not in files and os.path.lexists(path)):
|
||||
logger.debug(
|
||||
"Can't translate name safely to filesystem: %r", href)
|
||||
logger.debug("Can't translate name safely to filesystem: %r",
|
||||
href)
|
||||
yield (href, None)
|
||||
else:
|
||||
yield (href, self._get(href, verify_href=False))
|
||||
|
||||
def get_all(self):
|
||||
# We don't need to check for collissions, because the the file names
|
||||
# are from os.listdir.
|
||||
return (self._get(href, verify_href=False) for href in self._list())
|
||||
def get_all(self) -> Iterator[radicale_item.Item]:
|
||||
for href in self._list():
|
||||
# We don't need to check for collisions, because the file names
|
||||
# are from os.listdir.
|
||||
item = self._get(href, verify_href=False)
|
||||
if item is not None:
|
||||
yield item
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
|
@ -17,15 +17,28 @@
|
|||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import binascii
|
||||
import contextlib
|
||||
import os
|
||||
import pickle
|
||||
from typing import BinaryIO, Optional, cast
|
||||
|
||||
from radicale import item as radicale_item
|
||||
import radicale.item as radicale_item
|
||||
from radicale import pathutils
|
||||
from radicale.log import logger
|
||||
from radicale.storage import multifilesystem
|
||||
from radicale.storage.multifilesystem.base import CollectionBase
|
||||
|
||||
|
||||
class CollectionHistoryMixin:
|
||||
class CollectionPartHistory(CollectionBase):
|
||||
|
||||
_max_sync_token_age: int
|
||||
|
||||
def __init__(self, storage_: "multifilesystem.Storage", path: str,
|
||||
filesystem_path: Optional[str] = None) -> None:
|
||||
super().__init__(storage_, path, filesystem_path)
|
||||
self._max_sync_token_age = storage_.configuration.get(
|
||||
"storage", "max_sync_token_age")
|
||||
|
||||
def _update_history_etag(self, href, item):
|
||||
"""Updates and retrieves the history etag from the history cache.
|
||||
|
||||
|
@ -34,8 +47,7 @@ class CollectionHistoryMixin:
|
|||
string for deleted items) and a history etag, which is a hash over
|
||||
the previous history etag and the etag separated by "/".
|
||||
"""
|
||||
history_folder = os.path.join(self._filesystem_path,
|
||||
".Radicale.cache", "history")
|
||||
history_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "history")
|
||||
try:
|
||||
with open(os.path.join(history_folder, href), "rb") as f:
|
||||
cache_etag, history_etag = pickle.load(f)
|
||||
|
@ -53,21 +65,18 @@ class CollectionHistoryMixin:
|
|||
self._storage._makedirs_synced(history_folder)
|
||||
history_etag = radicale_item.get_etag(
|
||||
history_etag + "/" + etag).strip("\"")
|
||||
try:
|
||||
# Race: Other processes might have created and locked the file.
|
||||
with self._atomic_write(os.path.join(history_folder, href),
|
||||
"wb") as f:
|
||||
pickle.dump([etag, history_etag], f)
|
||||
except PermissionError:
|
||||
pass
|
||||
# Race: Other processes might have created and locked the file.
|
||||
with contextlib.suppress(PermissionError), self._atomic_write(
|
||||
os.path.join(history_folder, href), "wb") as fo:
|
||||
fb = cast(BinaryIO, fo)
|
||||
pickle.dump([etag, history_etag], fb)
|
||||
return history_etag
|
||||
|
||||
def _get_deleted_history_hrefs(self):
|
||||
"""Returns the hrefs of all deleted items that are still in the
|
||||
history cache."""
|
||||
history_folder = os.path.join(self._filesystem_path,
|
||||
".Radicale.cache", "history")
|
||||
try:
|
||||
history_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "history")
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
for entry in os.scandir(history_folder):
|
||||
href = entry.name
|
||||
if not pathutils.is_safe_filesystem_path_component(href):
|
||||
|
@ -75,13 +84,9 @@ class CollectionHistoryMixin:
|
|||
if os.path.isfile(os.path.join(self._filesystem_path, href)):
|
||||
continue
|
||||
yield href
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
def _clean_history(self):
|
||||
# Delete all expired history entries of deleted items.
|
||||
history_folder = os.path.join(self._filesystem_path,
|
||||
".Radicale.cache", "history")
|
||||
history_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "history")
|
||||
self._clean_cache(history_folder, self._get_deleted_history_hrefs(),
|
||||
max_age=self._storage.configuration.get(
|
||||
"storage", "max_sync_token_age"))
|
||||
max_age=self._max_sync_token_age)
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2023-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -20,52 +21,101 @@ import contextlib
|
|||
import logging
|
||||
import os
|
||||
import shlex
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Iterator
|
||||
|
||||
from radicale import pathutils
|
||||
from radicale import config, pathutils, types
|
||||
from radicale.log import logger
|
||||
from radicale.storage.multifilesystem.base import CollectionBase, StorageBase
|
||||
|
||||
|
||||
class CollectionLockMixin:
|
||||
def _acquire_cache_lock(self, ns=""):
|
||||
class CollectionPartLock(CollectionBase):
|
||||
|
||||
@types.contextmanager
|
||||
def _acquire_cache_lock(self, ns: str = "") -> Iterator[None]:
|
||||
if self._storage._lock.locked == "w":
|
||||
return contextlib.ExitStack()
|
||||
cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache")
|
||||
yield
|
||||
return
|
||||
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", ns)
|
||||
self._storage._makedirs_synced(cache_folder)
|
||||
lock_path = os.path.join(cache_folder,
|
||||
".Radicale.lock" + (".%s" % ns if ns else ""))
|
||||
logger.debug("Lock file (CollectionPartLock): %r" % lock_path)
|
||||
lock = pathutils.RwLock(lock_path)
|
||||
return lock.acquire("w")
|
||||
with lock.acquire("w"):
|
||||
yield
|
||||
|
||||
|
||||
class StorageLockMixin:
|
||||
class StoragePartLock(StorageBase):
|
||||
|
||||
def __init__(self, configuration):
|
||||
_lock: pathutils.RwLock
|
||||
_hook: str
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
folder = self.configuration.get("storage", "filesystem_folder")
|
||||
lock_path = os.path.join(folder, ".Radicale.lock")
|
||||
lock_path = os.path.join(self._filesystem_folder, ".Radicale.lock")
|
||||
logger.debug("Lock file (StoragePartLock): %r" % lock_path)
|
||||
self._lock = pathutils.RwLock(lock_path)
|
||||
self._hook = configuration.get("storage", "hook")
|
||||
|
||||
@contextlib.contextmanager
|
||||
def acquire_lock(self, mode, user=None):
|
||||
@types.contextmanager
|
||||
def acquire_lock(self, mode: str, user: str = "", *args, **kwargs) -> Iterator[None]:
|
||||
with self._lock.acquire(mode):
|
||||
yield
|
||||
# execute hook
|
||||
hook = self.configuration.get("storage", "hook")
|
||||
if mode == "w" and hook:
|
||||
folder = self.configuration.get("storage", "filesystem_folder")
|
||||
logger.debug("Running hook")
|
||||
if mode == "w" and self._hook:
|
||||
debug = logger.isEnabledFor(logging.DEBUG)
|
||||
p = subprocess.Popen(
|
||||
hook % {"user": shlex.quote(user or "Anonymous")},
|
||||
stdin=subprocess.DEVNULL,
|
||||
stdout=subprocess.PIPE if debug else subprocess.DEVNULL,
|
||||
stderr=subprocess.PIPE if debug else subprocess.DEVNULL,
|
||||
shell=True, universal_newlines=True, cwd=folder)
|
||||
stdout_data, stderr_data = p.communicate()
|
||||
# Use new process group for child to prevent terminals
|
||||
# from sending SIGINT etc.
|
||||
preexec_fn = None
|
||||
creationflags = 0
|
||||
if sys.platform == "win32":
|
||||
creationflags |= subprocess.CREATE_NEW_PROCESS_GROUP
|
||||
else:
|
||||
# Process group is also used to identify child processes
|
||||
preexec_fn = os.setpgrp
|
||||
# optional argument
|
||||
path = kwargs.get('path', "")
|
||||
try:
|
||||
command = self._hook % {
|
||||
"path": shlex.quote(self._get_collection_root_folder() + path),
|
||||
"cwd": shlex.quote(self._filesystem_folder),
|
||||
"user": shlex.quote(user or "Anonymous")}
|
||||
except KeyError as e:
|
||||
logger.error("Storage hook contains not supported placeholder %s (skip execution of: %r)" % (e, self._hook))
|
||||
return
|
||||
|
||||
logger.debug("Executing storage hook: '%s'" % command)
|
||||
try:
|
||||
p = subprocess.Popen(
|
||||
command, stdin=subprocess.DEVNULL,
|
||||
stdout=subprocess.PIPE if debug else subprocess.DEVNULL,
|
||||
stderr=subprocess.PIPE if debug else subprocess.DEVNULL,
|
||||
shell=True, universal_newlines=True, preexec_fn=preexec_fn,
|
||||
cwd=self._filesystem_folder, creationflags=creationflags)
|
||||
except Exception as e:
|
||||
logger.error("Execution of storage hook not successful on 'Popen': %s" % e)
|
||||
return
|
||||
logger.debug("Executing storage hook started 'Popen'")
|
||||
try:
|
||||
stdout_data, stderr_data = p.communicate()
|
||||
except BaseException as e: # e.g. KeyboardInterrupt or SystemExit
|
||||
logger.error("Execution of storage hook not successful on 'communicate': %s" % e)
|
||||
p.kill()
|
||||
p.wait()
|
||||
return
|
||||
finally:
|
||||
if sys.platform != "win32":
|
||||
# Kill remaining children identified by process group
|
||||
with contextlib.suppress(OSError):
|
||||
os.killpg(p.pid, signal.SIGKILL)
|
||||
logger.debug("Executing storage hook finished")
|
||||
if stdout_data:
|
||||
logger.debug("Captured stdout hook:\n%s", stdout_data)
|
||||
logger.debug("Captured stdout from storage hook:\n%s", stdout_data)
|
||||
if stderr_data:
|
||||
logger.debug("Captured stderr hook:\n%s", stderr_data)
|
||||
logger.debug("Captured stderr from storage hook:\n%s", stderr_data)
|
||||
if p.returncode != 0:
|
||||
raise subprocess.CalledProcessError(p.returncode, p.args)
|
||||
logger.error("Execution of storage hook not successful: %s" % subprocess.CalledProcessError(p.returncode, p.args))
|
||||
return
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -18,32 +19,53 @@
|
|||
|
||||
import json
|
||||
import os
|
||||
from typing import Mapping, Optional, TextIO, Union, cast, overload
|
||||
|
||||
from radicale import item as radicale_item
|
||||
import radicale.item as radicale_item
|
||||
from radicale.storage import multifilesystem
|
||||
from radicale.storage.multifilesystem.base import CollectionBase
|
||||
|
||||
|
||||
class CollectionMetaMixin:
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
class CollectionPartMeta(CollectionBase):
|
||||
|
||||
_meta_cache: Optional[Mapping[str, str]]
|
||||
_props_path: str
|
||||
|
||||
def __init__(self, storage_: "multifilesystem.Storage", path: str,
|
||||
filesystem_path: Optional[str] = None) -> None:
|
||||
super().__init__(storage_, path, filesystem_path)
|
||||
self._meta_cache = None
|
||||
self._props_path = os.path.join(
|
||||
self._filesystem_path, ".Radicale.props")
|
||||
|
||||
def get_meta(self, key=None):
|
||||
@overload
|
||||
def get_meta(self, key: None = None) -> Mapping[str, str]: ...
|
||||
|
||||
@overload
|
||||
def get_meta(self, key: str) -> Optional[str]: ...
|
||||
|
||||
def get_meta(self, key: Optional[str] = None) -> Union[Mapping[str, str],
|
||||
Optional[str]]:
|
||||
# reuse cached value if the storage is read-only
|
||||
if self._storage._lock.locked == "w" or self._meta_cache is None:
|
||||
try:
|
||||
try:
|
||||
with open(self._props_path, encoding=self._encoding) as f:
|
||||
self._meta_cache = json.load(f)
|
||||
temp_meta = json.load(f)
|
||||
except FileNotFoundError:
|
||||
self._meta_cache = {}
|
||||
radicale_item.check_and_sanitize_props(self._meta_cache)
|
||||
temp_meta = {}
|
||||
self._meta_cache = radicale_item.check_and_sanitize_props(
|
||||
temp_meta)
|
||||
except ValueError as e:
|
||||
raise RuntimeError("Failed to load properties of collection "
|
||||
"%r: %s" % (self.path, e)) from e
|
||||
return self._meta_cache.get(key) if key else self._meta_cache
|
||||
return self._meta_cache if key is None else self._meta_cache.get(key)
|
||||
|
||||
def set_meta(self, props):
|
||||
with self._atomic_write(self._props_path, "w") as f:
|
||||
json.dump(props, f, sort_keys=True)
|
||||
def set_meta(self, props: Mapping[str, str]) -> None:
|
||||
# TODO: better fix for "mypy"
|
||||
try:
|
||||
with self._atomic_write(self._props_path, "w") as fo: # type: ignore
|
||||
f = cast(TextIO, fo)
|
||||
json.dump(props, f, sort_keys=True)
|
||||
except OSError as e:
|
||||
raise ValueError("Failed to write meta data %r %s" % (self._props_path, e)) from e
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -18,33 +19,44 @@
|
|||
|
||||
import os
|
||||
|
||||
from radicale import pathutils
|
||||
from radicale import item as radicale_item
|
||||
from radicale import pathutils, storage
|
||||
from radicale.log import logger
|
||||
from radicale.storage import multifilesystem
|
||||
from radicale.storage.multifilesystem.base import StorageBase
|
||||
|
||||
|
||||
class StorageMoveMixin:
|
||||
class StoragePartMove(StorageBase):
|
||||
|
||||
def move(self, item, to_collection, to_href):
|
||||
def move(self, item: radicale_item.Item,
|
||||
to_collection: storage.BaseCollection, to_href: str) -> None:
|
||||
if not pathutils.is_safe_filesystem_path_component(to_href):
|
||||
raise pathutils.UnsafePathError(to_href)
|
||||
os.replace(
|
||||
pathutils.path_to_filesystem(
|
||||
item.collection._filesystem_path, item.href),
|
||||
pathutils.path_to_filesystem(
|
||||
to_collection._filesystem_path, to_href))
|
||||
assert isinstance(to_collection, multifilesystem.Collection)
|
||||
assert isinstance(item.collection, multifilesystem.Collection)
|
||||
assert item.href
|
||||
move_from = pathutils.path_to_filesystem(item.collection._filesystem_path, item.href)
|
||||
move_to = pathutils.path_to_filesystem(to_collection._filesystem_path, to_href)
|
||||
try:
|
||||
os.replace(move_from, move_to)
|
||||
except OSError as e:
|
||||
raise ValueError("Failed to move file %r => %r %s" % (move_from, move_to, e)) from e
|
||||
self._sync_directory(to_collection._filesystem_path)
|
||||
if item.collection._filesystem_path != to_collection._filesystem_path:
|
||||
self._sync_directory(item.collection._filesystem_path)
|
||||
# Move the item cache entry
|
||||
cache_folder = os.path.join(item.collection._filesystem_path,
|
||||
".Radicale.cache", "item")
|
||||
to_cache_folder = os.path.join(to_collection._filesystem_path,
|
||||
".Radicale.cache", "item")
|
||||
cache_folder = self._get_collection_cache_subfolder(item.collection._filesystem_path, ".Radicale.cache", "item")
|
||||
to_cache_folder = self._get_collection_cache_subfolder(to_collection._filesystem_path, ".Radicale.cache", "item")
|
||||
self._makedirs_synced(to_cache_folder)
|
||||
move_from = os.path.join(cache_folder, item.href)
|
||||
move_to = os.path.join(to_cache_folder, to_href)
|
||||
try:
|
||||
os.replace(os.path.join(cache_folder, item.href),
|
||||
os.path.join(to_cache_folder, to_href))
|
||||
os.replace(move_from, move_to)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
except OSError as e:
|
||||
logger.error("Failed to move cache file %r => %r %s" % (move_from, move_to, e))
|
||||
pass
|
||||
else:
|
||||
self._makedirs_synced(to_cache_folder)
|
||||
if cache_folder != to_cache_folder:
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
|
@ -16,20 +16,27 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import contextlib
|
||||
import itertools
|
||||
import os
|
||||
import pickle
|
||||
from hashlib import sha256
|
||||
from typing import BinaryIO, Iterable, Tuple, cast
|
||||
|
||||
from radicale.log import logger
|
||||
from radicale.storage.multifilesystem.base import CollectionBase
|
||||
from radicale.storage.multifilesystem.cache import CollectionPartCache
|
||||
from radicale.storage.multifilesystem.history import CollectionPartHistory
|
||||
|
||||
|
||||
class CollectionSyncMixin:
|
||||
def sync(self, old_token=None):
|
||||
class CollectionPartSync(CollectionPartCache, CollectionPartHistory,
|
||||
CollectionBase):
|
||||
|
||||
def sync(self, old_token: str = "") -> Tuple[str, Iterable[str]]:
|
||||
# The sync token has the form http://radicale.org/ns/sync/TOKEN_NAME
|
||||
# where TOKEN_NAME is the sha256 hash of all history etags of present
|
||||
# and past items of the collection.
|
||||
def check_token_name(token_name):
|
||||
def check_token_name(token_name: str) -> bool:
|
||||
if len(token_name) != 64:
|
||||
return False
|
||||
for c in token_name:
|
||||
|
@ -37,7 +44,7 @@ class CollectionSyncMixin:
|
|||
return False
|
||||
return True
|
||||
|
||||
old_token_name = None
|
||||
old_token_name = ""
|
||||
if old_token:
|
||||
# Extract the token name from the sync token
|
||||
if not old_token.startswith("http://radicale.org/ns/sync/"):
|
||||
|
@ -60,8 +67,7 @@ class CollectionSyncMixin:
|
|||
if token_name == old_token_name:
|
||||
# Nothing changed
|
||||
return token, ()
|
||||
token_folder = os.path.join(self._filesystem_path,
|
||||
".Radicale.cache", "sync-token")
|
||||
token_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "sync-token")
|
||||
token_path = os.path.join(token_folder, token_name)
|
||||
old_state = {}
|
||||
if old_token_name:
|
||||
|
@ -78,10 +84,9 @@ class CollectionSyncMixin:
|
|||
"Failed to load stored sync token %r in %r: %s",
|
||||
old_token_name, self.path, e, exc_info=True)
|
||||
# Delete the damaged file
|
||||
try:
|
||||
with contextlib.suppress(FileNotFoundError,
|
||||
PermissionError):
|
||||
os.remove(old_token_path)
|
||||
except (FileNotFoundError, PermissionError):
|
||||
pass
|
||||
raise ValueError("Token not found: %r" % old_token)
|
||||
# write the new token state or update the modification time of
|
||||
# existing token state
|
||||
|
@ -89,23 +94,22 @@ class CollectionSyncMixin:
|
|||
self._storage._makedirs_synced(token_folder)
|
||||
try:
|
||||
# Race: Other processes might have created and locked the file.
|
||||
with self._atomic_write(token_path, "wb") as f:
|
||||
pickle.dump(state, f)
|
||||
# TODO: better fix for "mypy"
|
||||
with self._atomic_write(token_path, "wb") as fo: # type: ignore
|
||||
fb = cast(BinaryIO, fo)
|
||||
pickle.dump(state, fb)
|
||||
except PermissionError:
|
||||
pass
|
||||
else:
|
||||
# clean up old sync tokens and item cache
|
||||
self._clean_cache(token_folder, os.listdir(token_folder),
|
||||
max_age=self._storage.configuration.get(
|
||||
"storage", "max_sync_token_age"))
|
||||
max_age=self._max_sync_token_age)
|
||||
self._clean_history()
|
||||
else:
|
||||
# Try to update the modification time
|
||||
try:
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
# Race: Another process might have deleted the file.
|
||||
os.utime(token_path)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
changes = []
|
||||
# Find all new, changed and deleted (that are still in the item cache)
|
||||
# items
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -16,89 +17,122 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import errno
|
||||
import os
|
||||
import pickle
|
||||
import sys
|
||||
from typing import Iterable, Iterator, TextIO, cast
|
||||
|
||||
from radicale import item as radicale_item
|
||||
import radicale.item as radicale_item
|
||||
from radicale import pathutils
|
||||
from radicale.log import logger
|
||||
from radicale.storage.multifilesystem.base import CollectionBase
|
||||
from radicale.storage.multifilesystem.cache import CollectionPartCache
|
||||
from radicale.storage.multifilesystem.get import CollectionPartGet
|
||||
from radicale.storage.multifilesystem.history import CollectionPartHistory
|
||||
|
||||
|
||||
class CollectionUploadMixin:
|
||||
def upload(self, href, item):
|
||||
class CollectionPartUpload(CollectionPartGet, CollectionPartCache,
|
||||
CollectionPartHistory, CollectionBase):
|
||||
|
||||
def upload(self, href: str, item: radicale_item.Item
|
||||
) -> radicale_item.Item:
|
||||
if not pathutils.is_safe_filesystem_path_component(href):
|
||||
raise pathutils.UnsafePathError(href)
|
||||
path = pathutils.path_to_filesystem(self._filesystem_path, href)
|
||||
try:
|
||||
self._store_item_cache(href, item)
|
||||
with self._atomic_write(path, newline="") as fo: # type: ignore
|
||||
f = cast(TextIO, fo)
|
||||
f.write(item.serialize())
|
||||
except Exception as e:
|
||||
raise ValueError("Failed to store item %r in collection %r: %s" %
|
||||
(href, self.path, e)) from e
|
||||
path = pathutils.path_to_filesystem(self._filesystem_path, href)
|
||||
with self._atomic_write(path, newline="") as fd:
|
||||
fd.write(item.serialize())
|
||||
# Clean the cache after the actual item is stored, or the cache entry
|
||||
# will be removed again.
|
||||
self._clean_item_cache()
|
||||
# store cache file
|
||||
if self._storage._use_mtime_and_size_for_item_cache is True:
|
||||
cache_hash = self._item_cache_mtime_and_size(os.stat(path).st_size, os.stat(path).st_mtime_ns)
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache store for: %r with mtime and size %r", path, cache_hash)
|
||||
else:
|
||||
cache_hash = self._item_cache_hash(item.serialize().encode(self._encoding))
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache store for: %r with hash %r", path, cache_hash)
|
||||
try:
|
||||
self._store_item_cache(href, item, cache_hash)
|
||||
except Exception as e:
|
||||
raise ValueError("Failed to store item cache of %r in collection %r: %s" %
|
||||
(href, self.path, e)) from e
|
||||
# Track the change
|
||||
self._update_history_etag(href, item)
|
||||
self._clean_history()
|
||||
return self._get(href, verify_href=False)
|
||||
uploaded_item = self._get(href, verify_href=False)
|
||||
if uploaded_item is None:
|
||||
raise RuntimeError("Storage modified externally")
|
||||
return uploaded_item
|
||||
|
||||
def _upload_all_nonatomic(self, items, suffix=""):
|
||||
"""Upload a new set of items.
|
||||
def _upload_all_nonatomic(self, items: Iterable[radicale_item.Item],
|
||||
suffix: str = "") -> None:
|
||||
"""Upload a new set of items non-atomic"""
|
||||
def is_safe_free_href(href: str) -> bool:
|
||||
return (pathutils.is_safe_filesystem_path_component(href) and
|
||||
not os.path.lexists(
|
||||
os.path.join(self._filesystem_path, href)))
|
||||
|
||||
This takes a list of vobject items and
|
||||
uploads them nonatomic and without existence checks.
|
||||
def get_safe_free_hrefs(uid: str) -> Iterator[str]:
|
||||
for href in [uid if uid.lower().endswith(suffix.lower())
|
||||
else uid + suffix,
|
||||
radicale_item.get_etag(uid).strip('"') + suffix]:
|
||||
if is_safe_free_href(href):
|
||||
yield href
|
||||
yield radicale_item.find_available_uid(
|
||||
lambda href: not is_safe_free_href(href), suffix)
|
||||
|
||||
"""
|
||||
cache_folder = os.path.join(self._filesystem_path,
|
||||
".Radicale.cache", "item")
|
||||
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
|
||||
self._storage._makedirs_synced(cache_folder)
|
||||
hrefs = set()
|
||||
for item in items:
|
||||
uid = item.uid
|
||||
logger.debug("Store item from list with uid: '%s'" % uid)
|
||||
cache_content = self._item_cache_content(item)
|
||||
for href in get_safe_free_hrefs(uid):
|
||||
path = os.path.join(self._filesystem_path, href)
|
||||
try:
|
||||
f = open(path,
|
||||
"w", newline="", encoding=self._encoding)
|
||||
except OSError as e:
|
||||
if (sys.platform != "win32" and e.errno == errno.EINVAL or
|
||||
sys.platform == "win32" and e.errno == 123):
|
||||
# not a valid filename
|
||||
continue
|
||||
raise
|
||||
break
|
||||
else:
|
||||
raise RuntimeError("No href found for item %r in temporary "
|
||||
"collection %r" % (uid, self.path))
|
||||
|
||||
try:
|
||||
cache_content = self._item_cache_content(item)
|
||||
with f:
|
||||
f.write(item.serialize())
|
||||
f.flush()
|
||||
self._storage._fsync(f)
|
||||
except Exception as e:
|
||||
raise ValueError(
|
||||
"Failed to store item %r in temporary collection %r: %s" %
|
||||
(uid, self.path, e)) from e
|
||||
href_candidate_funtions = []
|
||||
if os.name in ("nt", "posix"):
|
||||
href_candidate_funtions.append(
|
||||
lambda: uid if uid.lower().endswith(suffix.lower())
|
||||
else uid + suffix)
|
||||
href_candidate_funtions.extend((
|
||||
lambda: radicale_item.get_etag(uid).strip('"') + suffix,
|
||||
lambda: radicale_item.find_available_uid(hrefs.__contains__,
|
||||
suffix)))
|
||||
href = f = None
|
||||
while href_candidate_funtions:
|
||||
href = href_candidate_funtions.pop(0)()
|
||||
if href in hrefs:
|
||||
continue
|
||||
if not pathutils.is_safe_filesystem_path_component(href):
|
||||
if not href_candidate_funtions:
|
||||
raise pathutils.UnsafePathError(href)
|
||||
continue
|
||||
try:
|
||||
f = open(pathutils.path_to_filesystem(
|
||||
self._filesystem_path, href),
|
||||
"w", newline="", encoding=self._encoding)
|
||||
break
|
||||
except OSError as e:
|
||||
if href_candidate_funtions and (
|
||||
os.name == "posix" and e.errno == 22 or
|
||||
os.name == "nt" and e.errno == 123):
|
||||
continue
|
||||
raise
|
||||
with f:
|
||||
f.write(item.serialize())
|
||||
f.flush()
|
||||
self._storage._fsync(f)
|
||||
hrefs.add(href)
|
||||
with open(os.path.join(cache_folder, href), "wb") as f:
|
||||
pickle.dump(cache_content, f)
|
||||
f.flush()
|
||||
self._storage._fsync(f)
|
||||
|
||||
# store cache file
|
||||
if self._storage._use_mtime_and_size_for_item_cache is True:
|
||||
cache_hash = self._item_cache_mtime_and_size(os.stat(path).st_size, os.stat(path).st_mtime_ns)
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache store for: %r with mtime and size %r", path, cache_hash)
|
||||
else:
|
||||
cache_hash = self._item_cache_hash(item.serialize().encode(self._encoding))
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache store for: %r with hash %r", path, cache_hash)
|
||||
path_cache = os.path.join(cache_folder, href)
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache store into: %r", path_cache)
|
||||
with open(os.path.join(cache_folder, href), "wb") as fb:
|
||||
pickle.dump((cache_hash, *cache_content), fb)
|
||||
fb.flush()
|
||||
self._storage._fsync(fb)
|
||||
self._storage._sync_directory(cache_folder)
|
||||
self._storage._sync_directory(self._filesystem_path)
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -16,23 +17,29 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import contextlib
|
||||
from typing import Iterator, Optional, Set
|
||||
|
||||
from radicale import pathutils, storage
|
||||
from radicale import pathutils, storage, types
|
||||
from radicale.log import logger
|
||||
from radicale.storage.multifilesystem.base import StorageBase
|
||||
from radicale.storage.multifilesystem.discover import StoragePartDiscover
|
||||
|
||||
|
||||
class StorageVerifyMixin:
|
||||
def verify(self):
|
||||
class StoragePartVerify(StoragePartDiscover, StorageBase):
|
||||
|
||||
def verify(self) -> bool:
|
||||
item_errors = collection_errors = 0
|
||||
logger.info("Disable fsync during storage verification")
|
||||
self._filesystem_fsync = False
|
||||
|
||||
@contextlib.contextmanager
|
||||
def exception_cm(sane_path, href=None):
|
||||
@types.contextmanager
|
||||
def exception_cm(sane_path: str, href: Optional[str]
|
||||
) -> Iterator[None]:
|
||||
nonlocal item_errors, collection_errors
|
||||
try:
|
||||
yield
|
||||
except Exception as e:
|
||||
if href:
|
||||
if href is not None:
|
||||
item_errors += 1
|
||||
name = "item %r in %r" % (href, sane_path)
|
||||
else:
|
||||
|
@ -44,16 +51,22 @@ class StorageVerifyMixin:
|
|||
while remaining_sane_paths:
|
||||
sane_path = remaining_sane_paths.pop(0)
|
||||
path = pathutils.unstrip_path(sane_path, True)
|
||||
logger.debug("Verifying collection %r", sane_path)
|
||||
with exception_cm(sane_path):
|
||||
logger.info("Verifying path %r", sane_path)
|
||||
count = 0
|
||||
is_collection = True
|
||||
with exception_cm(sane_path, None):
|
||||
saved_item_errors = item_errors
|
||||
collection = None
|
||||
uids = set()
|
||||
collection: Optional[storage.BaseCollection] = None
|
||||
uids: Set[str] = set()
|
||||
has_child_collections = False
|
||||
for item in self.discover(path, "1", exception_cm):
|
||||
if not collection:
|
||||
assert isinstance(item, storage.BaseCollection)
|
||||
collection = item
|
||||
collection.get_meta()
|
||||
if not collection.tag:
|
||||
is_collection = False
|
||||
logger.info("Skip !collection %r", sane_path)
|
||||
continue
|
||||
if isinstance(item, storage.BaseCollection):
|
||||
has_child_collections = True
|
||||
|
@ -63,12 +76,17 @@ class StorageVerifyMixin:
|
|||
item.href, sane_path, item.uid)
|
||||
else:
|
||||
uids.add(item.uid)
|
||||
logger.debug("Verified item %r in %r",
|
||||
item.href, sane_path)
|
||||
count += 1
|
||||
logger.debug("Verified in %r item %r",
|
||||
sane_path, item.href)
|
||||
assert collection
|
||||
if item_errors == saved_item_errors:
|
||||
collection.sync()
|
||||
if has_child_collections and collection.get_meta("tag"):
|
||||
if is_collection:
|
||||
collection.sync()
|
||||
if has_child_collections and collection.tag:
|
||||
logger.error("Invalid collection %r: %r must not have "
|
||||
"child collections", sane_path,
|
||||
collection.get_meta("tag"))
|
||||
collection.tag)
|
||||
if is_collection:
|
||||
logger.info("Verified collect %r (items: %d)", sane_path, count)
|
||||
return item_errors == 0 and collection_errors == 0
|
||||
|
|
114
radicale/storage/multifilesystem_nolock.py
Normal file
114
radicale/storage/multifilesystem_nolock.py
Normal file
|
@ -0,0 +1,114 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2021 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
The multifilesystem backend without file-based locking.
|
||||
"""
|
||||
|
||||
import threading
|
||||
from collections import deque
|
||||
from typing import ClassVar, Deque, Dict, Hashable, Iterator, Type
|
||||
|
||||
from radicale import config, pathutils, types
|
||||
from radicale.storage import multifilesystem
|
||||
|
||||
|
||||
class RwLock(pathutils.RwLock):
|
||||
|
||||
_cond: threading.Condition
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__("")
|
||||
self._cond = threading.Condition(self._lock)
|
||||
|
||||
@types.contextmanager
|
||||
def acquire(self, mode: str, user: str = "") -> Iterator[None]:
|
||||
if mode not in "rw":
|
||||
raise ValueError("Invalid mode: %r" % mode)
|
||||
with self._cond:
|
||||
self._cond.wait_for(lambda: not self._writer and (
|
||||
mode == "r" or self._readers == 0))
|
||||
if mode == "r":
|
||||
self._readers += 1
|
||||
else:
|
||||
self._writer = True
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
with self._cond:
|
||||
if mode == "r":
|
||||
self._readers -= 1
|
||||
self._writer = False
|
||||
if self._readers == 0:
|
||||
self._cond.notify_all()
|
||||
|
||||
|
||||
class LockDict:
|
||||
|
||||
_lock: threading.Lock
|
||||
_dict: Dict[Hashable, Deque[threading.Lock]]
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._lock = threading.Lock()
|
||||
self._dict = {}
|
||||
|
||||
@types.contextmanager
|
||||
def acquire(self, key: Hashable) -> Iterator[None]:
|
||||
with self._lock:
|
||||
waiters = self._dict.get(key)
|
||||
if waiters is None:
|
||||
self._dict[key] = waiters = deque()
|
||||
wait = bool(waiters)
|
||||
waiter = threading.Lock()
|
||||
waiter.acquire()
|
||||
waiters.append(waiter)
|
||||
if wait:
|
||||
waiter.acquire()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
with self._lock:
|
||||
assert waiters[0] is waiter and self._dict[key] is waiters
|
||||
del waiters[0]
|
||||
if waiters:
|
||||
waiters[0].release()
|
||||
else:
|
||||
del self._dict[key]
|
||||
|
||||
|
||||
class Collection(multifilesystem.Collection):
|
||||
|
||||
_storage: "Storage"
|
||||
|
||||
@types.contextmanager
|
||||
def _acquire_cache_lock(self, ns: str = "") -> Iterator[None]:
|
||||
if self._storage._lock.locked == "w":
|
||||
yield
|
||||
return
|
||||
with self._storage._cache_lock.acquire((self.path, ns)):
|
||||
yield
|
||||
|
||||
|
||||
class Storage(multifilesystem.Storage):
|
||||
|
||||
_collection_class: ClassVar[Type[Collection]] = Collection
|
||||
|
||||
_cache_lock: LockDict
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
self._lock = RwLock()
|
||||
self._cache_lock = LockDict()
|
|
@ -1,4 +1,4 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
|
@ -22,13 +22,22 @@ Tests for Radicale.
|
|||
|
||||
import base64
|
||||
import logging
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import wsgiref.util
|
||||
import xml.etree.ElementTree as ET
|
||||
from io import BytesIO
|
||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||
from urllib.parse import quote
|
||||
|
||||
import defusedxml.ElementTree as DefusedET
|
||||
import vobject
|
||||
|
||||
import radicale
|
||||
from radicale import xmlutils
|
||||
from radicale import app, config, types, xmlutils
|
||||
|
||||
RESPONSES = Dict[str, Union[int, Dict[str, Tuple[int, ET.Element]], vobject.base.Component]]
|
||||
|
||||
# Enable debug output
|
||||
radicale.log.logger.setLevel(logging.DEBUG)
|
||||
|
@ -37,120 +46,183 @@ radicale.log.logger.setLevel(logging.DEBUG)
|
|||
class BaseTest:
|
||||
"""Base class for tests."""
|
||||
|
||||
def request(self, method, path, data=None, login=None, **args):
|
||||
colpath: str
|
||||
configuration: config.Configuration
|
||||
application: app.Application
|
||||
|
||||
def setup_method(self) -> None:
|
||||
self.configuration = config.load()
|
||||
self.colpath = tempfile.mkdtemp()
|
||||
self.configure({
|
||||
"storage": {"filesystem_folder": self.colpath,
|
||||
# Disable syncing to disk for better performance
|
||||
"_filesystem_fsync": "False"},
|
||||
# Set incorrect authentication delay to a short duration
|
||||
"auth": {"delay": "0.001"}})
|
||||
|
||||
def configure(self, config_: types.CONFIG) -> None:
|
||||
self.configuration.update(config_, "test", privileged=True)
|
||||
self.application = app.Application(self.configuration)
|
||||
|
||||
def teardown_method(self) -> None:
|
||||
shutil.rmtree(self.colpath)
|
||||
|
||||
def request(self, method: str, path: str, data: Optional[str] = None,
|
||||
check: Optional[int] = None, **kwargs
|
||||
) -> Tuple[int, Dict[str, str], str]:
|
||||
"""Send a request."""
|
||||
for key in args:
|
||||
args[key.upper()] = args[key]
|
||||
login = kwargs.pop("login", None)
|
||||
if login is not None and not isinstance(login, str):
|
||||
raise TypeError("login argument must be %r, not %r" %
|
||||
(str, type(login)))
|
||||
environ: Dict[str, Any] = {k.upper(): v for k, v in kwargs.items()}
|
||||
for k, v in environ.items():
|
||||
if not isinstance(v, str):
|
||||
raise TypeError("type of %r is %r, expected %r" %
|
||||
(k, type(v), str))
|
||||
encoding: str = self.configuration.get("encoding", "request")
|
||||
if login:
|
||||
args["HTTP_AUTHORIZATION"] = "Basic " + base64.b64encode(
|
||||
login.encode()).decode()
|
||||
args["REQUEST_METHOD"] = method.upper()
|
||||
args["PATH_INFO"] = path
|
||||
if data:
|
||||
data = data.encode()
|
||||
args["wsgi.input"] = BytesIO(data)
|
||||
args["CONTENT_LENGTH"] = str(len(data))
|
||||
args["wsgi.errors"] = sys.stderr
|
||||
environ["HTTP_AUTHORIZATION"] = "Basic " + base64.b64encode(
|
||||
login.encode(encoding)).decode()
|
||||
environ["REQUEST_METHOD"] = method.upper()
|
||||
environ["PATH_INFO"] = path
|
||||
if data is not None:
|
||||
data_bytes = data.encode(encoding)
|
||||
environ["wsgi.input"] = BytesIO(data_bytes)
|
||||
environ["CONTENT_LENGTH"] = str(len(data_bytes))
|
||||
environ["wsgi.errors"] = sys.stderr
|
||||
wsgiref.util.setup_testing_defaults(environ)
|
||||
status = headers = None
|
||||
|
||||
def start_response(status_, headers_):
|
||||
def start_response(status_: str, headers_: List[Tuple[str, str]]
|
||||
) -> None:
|
||||
nonlocal status, headers
|
||||
status = status_
|
||||
headers = headers_
|
||||
answer = self.application(args, start_response)
|
||||
status = int(status_.split()[0])
|
||||
headers = dict(headers_)
|
||||
answers = list(self.application(environ, start_response))
|
||||
assert status is not None and headers is not None
|
||||
assert check is None or status == check, "%d != %d" % (status, check)
|
||||
|
||||
return (int(status.split()[0]), dict(headers),
|
||||
answer[0].decode() if answer else None)
|
||||
return status, headers, answers[0].decode() if answers else ""
|
||||
|
||||
@staticmethod
|
||||
def parse_responses(text):
|
||||
def parse_responses(text: str) -> RESPONSES:
|
||||
xml = DefusedET.fromstring(text)
|
||||
assert xml.tag == xmlutils.make_clark("D:multistatus")
|
||||
path_responses = {}
|
||||
path_responses: RESPONSES = {}
|
||||
for response in xml.findall(xmlutils.make_clark("D:response")):
|
||||
href = response.find(xmlutils.make_clark("D:href"))
|
||||
assert href.text not in path_responses
|
||||
prop_respones = {}
|
||||
prop_responses: Dict[str, Tuple[int, ET.Element]] = {}
|
||||
for propstat in response.findall(
|
||||
xmlutils.make_clark("D:propstat")):
|
||||
status = propstat.find(xmlutils.make_clark("D:status"))
|
||||
assert status.text.startswith("HTTP/1.1 ")
|
||||
status_code = int(status.text.split(" ")[1])
|
||||
for prop in propstat.findall(xmlutils.make_clark("D:prop")):
|
||||
for element in prop:
|
||||
human_tag = xmlutils.make_human_tag(element.tag)
|
||||
assert human_tag not in prop_respones
|
||||
prop_respones[human_tag] = (status_code, element)
|
||||
for element in propstat.findall(
|
||||
"./%s/*" % xmlutils.make_clark("D:prop")):
|
||||
human_tag = xmlutils.make_human_tag(element.tag)
|
||||
assert human_tag not in prop_responses
|
||||
prop_responses[human_tag] = (status_code, element)
|
||||
status = response.find(xmlutils.make_clark("D:status"))
|
||||
if status is not None:
|
||||
assert not prop_respones
|
||||
assert not prop_responses
|
||||
assert status.text.startswith("HTTP/1.1 ")
|
||||
status_code = int(status.text.split(" ")[1])
|
||||
path_responses[href.text] = status_code
|
||||
else:
|
||||
path_responses[href.text] = prop_respones
|
||||
path_responses[href.text] = prop_responses
|
||||
return path_responses
|
||||
|
||||
@staticmethod
|
||||
def _check_status(status, good_status, check=True):
|
||||
if check is True:
|
||||
assert status == good_status
|
||||
elif check is not False:
|
||||
assert status == check
|
||||
return status == good_status
|
||||
def parse_free_busy(text: str) -> RESPONSES:
|
||||
path_responses: RESPONSES = {}
|
||||
path_responses[""] = vobject.readOne(text)
|
||||
return path_responses
|
||||
|
||||
def get(self, path, check=True, **args):
|
||||
status, _, answer = self.request("GET", path, **args)
|
||||
self._check_status(status, 200, check)
|
||||
def get(self, path: str, check: Optional[int] = 200, **kwargs
|
||||
) -> Tuple[int, str]:
|
||||
assert "data" not in kwargs
|
||||
status, _, answer = self.request("GET", path, check=check, **kwargs)
|
||||
return status, answer
|
||||
|
||||
def put(self, path, data, check=True, **args):
|
||||
status, _, answer = self.request("PUT", path, data, **args)
|
||||
self._check_status(status, 201, check)
|
||||
return status
|
||||
def post(self, path: str, data: Optional[str] = None,
|
||||
check: Optional[int] = 200, **kwargs) -> Tuple[int, str]:
|
||||
status, _, answer = self.request("POST", path, data, check=check,
|
||||
**kwargs)
|
||||
return status, answer
|
||||
|
||||
def propfind(self, path, data=None, check=True, **args):
|
||||
status, _, answer = self.request("PROPFIND", path, data, **args)
|
||||
if not self._check_status(status, 207, check):
|
||||
return status, None
|
||||
def put(self, path: str, data: str, check: Optional[int] = 201,
|
||||
**kwargs) -> Tuple[int, str]:
|
||||
status, _, answer = self.request("PUT", path, data, check=check,
|
||||
**kwargs)
|
||||
return status, answer
|
||||
|
||||
def propfind(self, path: str, data: Optional[str] = None,
|
||||
check: Optional[int] = 207, **kwargs
|
||||
) -> Tuple[int, RESPONSES]:
|
||||
status, _, answer = self.request("PROPFIND", path, data, check=check,
|
||||
**kwargs)
|
||||
if status < 200 or 300 <= status:
|
||||
return status, {}
|
||||
assert answer is not None
|
||||
responses = self.parse_responses(answer)
|
||||
if args.get("HTTP_DEPTH", 0) == 0:
|
||||
assert len(responses) == 1 and path in responses
|
||||
if kwargs.get("HTTP_DEPTH", "0") == "0":
|
||||
assert len(responses) == 1 and quote(path) in responses
|
||||
return status, responses
|
||||
|
||||
def proppatch(self, path, data=None, check=True, **args):
|
||||
status, _, answer = self.request("PROPPATCH", path, data, **args)
|
||||
if not self._check_status(status, 207, check):
|
||||
return status, None
|
||||
def proppatch(self, path: str, data: Optional[str] = None,
|
||||
check: Optional[int] = 207, **kwargs
|
||||
) -> Tuple[int, RESPONSES]:
|
||||
status, _, answer = self.request("PROPPATCH", path, data, check=check,
|
||||
**kwargs)
|
||||
if status < 200 or 300 <= status:
|
||||
return status, {}
|
||||
assert answer is not None
|
||||
responses = self.parse_responses(answer)
|
||||
assert len(responses) == 1 and path in responses
|
||||
return status, responses
|
||||
|
||||
def report(self, path, data, check=True, **args):
|
||||
status, _, answer = self.request("REPORT", path, data, **args)
|
||||
if not self._check_status(status, 207, check):
|
||||
return status, None
|
||||
return status, self.parse_responses(answer)
|
||||
def report(self, path: str, data: str, check: Optional[int] = 207,
|
||||
is_xml: Optional[bool] = True,
|
||||
**kwargs) -> Tuple[int, RESPONSES]:
|
||||
status, _, answer = self.request("REPORT", path, data, check=check,
|
||||
**kwargs)
|
||||
if status < 200 or 300 <= status:
|
||||
return status, {}
|
||||
assert answer is not None
|
||||
if is_xml:
|
||||
parsed = self.parse_responses(answer)
|
||||
else:
|
||||
parsed = self.parse_free_busy(answer)
|
||||
return status, parsed
|
||||
|
||||
def delete(self, path, check=True, **args):
|
||||
status, _, answer = self.request("DELETE", path, **args)
|
||||
if not self._check_status(status, 200, check):
|
||||
return status, None
|
||||
def delete(self, path: str, check: Optional[int] = 200, **kwargs
|
||||
) -> Tuple[int, RESPONSES]:
|
||||
assert "data" not in kwargs
|
||||
status, _, answer = self.request("DELETE", path, check=check, **kwargs)
|
||||
if status < 200 or 300 <= status:
|
||||
return status, {}
|
||||
assert answer is not None
|
||||
responses = self.parse_responses(answer)
|
||||
assert len(responses) == 1 and path in responses
|
||||
return status, responses
|
||||
|
||||
def mkcalendar(self, path, data=None, check=True, **args):
|
||||
status, _, _ = self.request("MKCALENDAR", path, data, **args)
|
||||
self._check_status(status, 201, check)
|
||||
def mkcalendar(self, path: str, data: Optional[str] = None,
|
||||
check: Optional[int] = 201, **kwargs
|
||||
) -> Tuple[int, str]:
|
||||
status, _, answer = self.request("MKCALENDAR", path, data, check=check,
|
||||
**kwargs)
|
||||
return status, answer
|
||||
|
||||
def mkcol(self, path: str, data: Optional[str] = None,
|
||||
check: Optional[int] = 201, **kwargs) -> int:
|
||||
status, _, _ = self.request("MKCOL", path, data, check=check, **kwargs)
|
||||
return status
|
||||
|
||||
def mkcol(self, path, data=None, check=True, **args):
|
||||
status, _, _ = self.request("MKCOL", path, data, **args)
|
||||
self._check_status(status, 201, check)
|
||||
return status
|
||||
|
||||
def create_addressbook(self, path, check=True, **args):
|
||||
def create_addressbook(self, path: str, check: Optional[int] = 201,
|
||||
**kwargs) -> int:
|
||||
assert "data" not in kwargs
|
||||
return self.mkcol(path, """\
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<create xmlns="DAV:" xmlns:CR="urn:ietf:params:xml:ns:carddav">
|
||||
|
@ -162,4 +234,4 @@ class BaseTest:
|
|||
</resourcetype>
|
||||
</prop>
|
||||
</set>
|
||||
</create>""", check=check, **args)
|
||||
</create>""", check=check, **kwargs)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
|
@ -28,7 +28,8 @@ from radicale import auth
|
|||
|
||||
|
||||
class Auth(auth.BaseAuth):
|
||||
def login(self, login, password):
|
||||
|
||||
def _login(self, login: str, password: str) -> str:
|
||||
if login == "tmp":
|
||||
return login
|
||||
return ""
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
|
@ -23,7 +23,8 @@ from radicale import pathutils, rights
|
|||
|
||||
|
||||
class Rights(rights.BaseRights):
|
||||
def authorization(self, user, path):
|
||||
|
||||
def authorization(self, user: str, path: str) -> str:
|
||||
sane_path = pathutils.strip_path(path)
|
||||
if sane_path not in ("tmp", "other"):
|
||||
return ""
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
|
@ -27,8 +27,10 @@ from radicale.storage import BaseCollection, multifilesystem
|
|||
|
||||
|
||||
class Collection(multifilesystem.Collection):
|
||||
|
||||
sync = BaseCollection.sync
|
||||
|
||||
|
||||
class Storage(multifilesystem.Storage):
|
||||
|
||||
_collection_class = Collection
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
|
@ -21,9 +21,16 @@ Custom web plugin.
|
|||
|
||||
from http import client
|
||||
|
||||
from radicale import web
|
||||
from radicale import httputils, types, web
|
||||
|
||||
|
||||
class Web(web.BaseWeb):
|
||||
def get(self, environ, base_prefix, path, user):
|
||||
|
||||
def get(self, environ: types.WSGIEnviron, base_prefix: str, path: str,
|
||||
user: str) -> types.WSGIResponse:
|
||||
return client.OK, {"Content-Type": "text/plain"}, "custom"
|
||||
|
||||
def post(self, environ: types.WSGIEnviron, base_prefix: str, path: str,
|
||||
user: str) -> types.WSGIResponse:
|
||||
content = httputils.read_request_body(self.configuration, environ)
|
||||
return client.OK, {"Content-Type": "text/plain"}, "echo:" + content
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
|
@ -26,22 +26,21 @@ This module offers helpers to use in tests.
|
|||
|
||||
import os
|
||||
|
||||
EXAMPLES_FOLDER = os.path.join(os.path.dirname(__file__), "static")
|
||||
from radicale import config, types
|
||||
|
||||
EXAMPLES_FOLDER: str = os.path.join(os.path.dirname(__file__), "static")
|
||||
|
||||
|
||||
def get_file_path(file_name):
|
||||
def get_file_path(file_name: str) -> str:
|
||||
return os.path.join(EXAMPLES_FOLDER, file_name)
|
||||
|
||||
|
||||
def get_file_content(file_name):
|
||||
try:
|
||||
with open(get_file_path(file_name), encoding="utf-8") as fd:
|
||||
return fd.read()
|
||||
except IOError:
|
||||
print("Couldn't open the file %s" % file_name)
|
||||
def get_file_content(file_name: str) -> str:
|
||||
with open(get_file_path(file_name), encoding="utf-8") as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def configuration_to_dict(configuration):
|
||||
def configuration_to_dict(configuration: config.Configuration) -> types.CONFIG:
|
||||
"""Convert configuration to a dict with raw values."""
|
||||
return {section: {option: configuration.get_raw(section, option)
|
||||
for option in configuration.options(section)
|
||||
|
|
8
radicale/tests/static/contact_photo_with_data_uri.vcf
Normal file
8
radicale/tests/static/contact_photo_with_data_uri.vcf
Normal file
|
@ -0,0 +1,8 @@
|
|||
BEGIN:VCARD
|
||||
VERSION:3.0
|
||||
UID:contact
|
||||
N:Contact;;;;
|
||||
FN:Contact
|
||||
NICKNAME:test
|
||||
PHOTO;ENCODING=b;TYPE=png:data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAAD0lEQVQIHQEEAPv/AP///wX+Av4DfRnGAAAAAElFTkSuQmCC
|
||||
END:VCARD
|
|
@ -25,6 +25,7 @@ LAST-MODIFIED:20130902T150158Z
|
|||
DTSTAMP:20130902T150158Z
|
||||
UID:event1
|
||||
SUMMARY:Event
|
||||
CATEGORIES:some_category1,another_category2
|
||||
ORGANIZER:mailto:unclesam@example.com
|
||||
ATTENDEE;ROLE=REQ-PARTICIPANT;PARTSTAT=TENTATIVE;CN=Jane Doe:MAILTO:janedoe@example.com
|
||||
ATTENDEE;ROLE=REQ-PARTICIPANT;DELEGATED-FROM="MAILTO:bob@host.com";PARTSTAT=ACCEPTED;CN=John Doe:MAILTO:johndoe@example.com
|
||||
|
|
|
@ -23,12 +23,14 @@ BEGIN:VEVENT
|
|||
CREATED:20130902T150157Z
|
||||
LAST-MODIFIED:20130902T150158Z
|
||||
DTSTAMP:20130902T150158Z
|
||||
UID:event1
|
||||
UID:event10
|
||||
SUMMARY:Event
|
||||
CATEGORIES:some_category1,another_category2
|
||||
ORGANIZER:mailto:unclesam@example.com
|
||||
ATTENDEE;ROLE=REQ-PARTICIPANT;PARTSTAT=TENTATIVE;CN=Jane Doe:MAILTO:janedoe@example.com
|
||||
ATTENDEE;ROLE=REQ-PARTICIPANT;DELEGATED-FROM="MAILTO:bob@host.com";PARTSTAT=ACCEPTED;CN=John Doe:MAILTO:johndoe@example.com
|
||||
DTSTART;TZID=Europe/Paris:20140901T180000
|
||||
DTEND;TZID=Europe/Paris:20140901T210000
|
||||
DTSTART;TZID=Europe/Paris:20130901T180000
|
||||
DTEND;TZID=Europe/Paris:20130901T190000
|
||||
STATUS:CANCELLED
|
||||
END:VEVENT
|
||||
END:VCALENDAR
|
28
radicale/tests/static/event_daily_rrule.ics
Normal file
28
radicale/tests/static/event_daily_rrule.ics
Normal file
|
@ -0,0 +1,28 @@
|
|||
BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
BEGIN:VTIMEZONE
|
||||
LAST-MODIFIED:20040110T032845Z
|
||||
TZID:US/Eastern
|
||||
BEGIN:DAYLIGHT
|
||||
DTSTART:20000404T020000
|
||||
RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4
|
||||
TZNAME:EDT
|
||||
TZOFFSETFROM:-0500
|
||||
TZOFFSETTO:-0400
|
||||
END:DAYLIGHT
|
||||
BEGIN:STANDARD
|
||||
DTSTART:20001026T020000
|
||||
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
|
||||
TZNAME:EST
|
||||
TZOFFSETFROM:-0400
|
||||
TZOFFSETTO:-0500
|
||||
END:STANDARD
|
||||
END:VTIMEZONE
|
||||
BEGIN:VEVENT
|
||||
DTSTART;TZID=US/Eastern:20060102T120000
|
||||
DURATION:PT1H
|
||||
RRULE:FREQ=DAILY;COUNT=5
|
||||
SUMMARY:Recurring event
|
||||
UID:event_daily_rrule
|
||||
END:VEVENT
|
||||
END:VCALENDAR
|
35
radicale/tests/static/event_daily_rrule_overridden.ics
Normal file
35
radicale/tests/static/event_daily_rrule_overridden.ics
Normal file
|
@ -0,0 +1,35 @@
|
|||
BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
BEGIN:VTIMEZONE
|
||||
LAST-MODIFIED:20040110T032845Z
|
||||
TZID:US/Eastern
|
||||
BEGIN:DAYLIGHT
|
||||
DTSTART:20000404T020000
|
||||
RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4
|
||||
TZNAME:EDT
|
||||
TZOFFSETFROM:-0500
|
||||
TZOFFSETTO:-0400
|
||||
END:DAYLIGHT
|
||||
BEGIN:STANDARD
|
||||
DTSTART:20001026T020000
|
||||
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
|
||||
TZNAME:EST
|
||||
TZOFFSETFROM:-0400
|
||||
TZOFFSETTO:-0500
|
||||
END:STANDARD
|
||||
END:VTIMEZONE
|
||||
BEGIN:VEVENT
|
||||
DTSTART;TZID=US/Eastern:20060102T120000
|
||||
DURATION:PT1H
|
||||
RRULE:FREQ=DAILY;COUNT=5
|
||||
SUMMARY:Event #2
|
||||
UID:event_daily_rrule_overridden
|
||||
END:VEVENT
|
||||
BEGIN:VEVENT
|
||||
DTSTART;TZID=US/Eastern:20060104T140000
|
||||
DURATION:PT1H
|
||||
RECURRENCE-ID;TZID=US/Eastern:20060104T120000
|
||||
SUMMARY:Event #2 bis
|
||||
UID:event_daily_rrule_overridden
|
||||
END:VEVENT
|
||||
END:VCALENDAR
|
55
radicale/tests/static/event_exdate_without_rrule.ics
Normal file
55
radicale/tests/static/event_exdate_without_rrule.ics
Normal file
|
@ -0,0 +1,55 @@
|
|||
BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
PRODID:DAVx5/4.4.3.2-ose ical4j/3.2.19
|
||||
BEGIN:VEVENT
|
||||
DTSTAMP:20241125T195941Z
|
||||
UID:9fb6578a-07a6-4c61-8406-69229713d40e
|
||||
SEQUENCE:3
|
||||
SUMMARY:Escalade
|
||||
DTSTART;TZID=Europe/Paris:20240606T193000
|
||||
DTEND;TZID=Europe/Paris:20240606T203000
|
||||
RRULE:FREQ=WEEKLY;WKST=MO;BYDAY=TH
|
||||
EXDATE;TZID=Europe/Paris:20240704T193000
|
||||
CLASS:PUBLIC
|
||||
STATUS:CONFIRMED
|
||||
BEGIN:VALARM
|
||||
TRIGGER:-P1D
|
||||
ACTION:DISPLAY
|
||||
DESCRIPTION:Escalade
|
||||
END:VALARM
|
||||
END:VEVENT
|
||||
BEGIN:VEVENT
|
||||
DTSTAMP:20241125T195941Z
|
||||
UID:9fb6578a-07a6-4c61-8406-69229713d40e
|
||||
RECURRENCE-ID;TZID=Europe/Paris:20241128T193000
|
||||
SEQUENCE:1
|
||||
SUMMARY:Escalade avec Romain
|
||||
DTSTART;TZID=Europe/Paris:20241128T193000
|
||||
DTEND;TZID=Europe/Paris:20241128T203000
|
||||
EXDATE;TZID=Europe/Paris:20240704T193000
|
||||
CLASS:PUBLIC
|
||||
STATUS:CONFIRMED
|
||||
BEGIN:VALARM
|
||||
TRIGGER:-P1D
|
||||
ACTION:DISPLAY
|
||||
DESCRIPTION:Escalade avec Romain
|
||||
END:VALARM
|
||||
END:VEVENT
|
||||
BEGIN:VTIMEZONE
|
||||
TZID:Europe/Paris
|
||||
BEGIN:STANDARD
|
||||
TZNAME:CET
|
||||
TZOFFSETFROM:+0200
|
||||
TZOFFSETTO:+0100
|
||||
DTSTART:19961027T030000
|
||||
RRULE:FREQ=YEARLY;BYMONTH=10;BYDAY=-1SU
|
||||
END:STANDARD
|
||||
BEGIN:DAYLIGHT
|
||||
TZNAME:CEST
|
||||
TZOFFSETFROM:+0100
|
||||
TZOFFSETTO:+0200
|
||||
DTSTART:19810329T020000
|
||||
RRULE:FREQ=YEARLY;BYMONTH=3;BYDAY=-1SU
|
||||
END:DAYLIGHT
|
||||
END:VTIMEZONE
|
||||
END:VCALENDAR
|
31
radicale/tests/static/event_full_day_rrule.ics
Normal file
31
radicale/tests/static/event_full_day_rrule.ics
Normal file
|
@ -0,0 +1,31 @@
|
|||
BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
|
||||
BEGIN:VTIMEZONE
|
||||
LAST-MODIFIED:20040110T032845Z
|
||||
TZID:US/Eastern
|
||||
BEGIN:DAYLIGHT
|
||||
DTSTART:20000404T020000
|
||||
RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4
|
||||
TZNAME:EDT
|
||||
TZOFFSETFROM:-0500
|
||||
TZOFFSETTO:-0400
|
||||
END:DAYLIGHT
|
||||
BEGIN:STANDARD
|
||||
DTSTART:20001026T020000
|
||||
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
|
||||
TZNAME:EST
|
||||
TZOFFSETFROM:-0400
|
||||
TZOFFSETTO:-0500
|
||||
END:STANDARD
|
||||
END:VTIMEZONE
|
||||
BEGIN:VEVENT
|
||||
DTSTART;TZID=US/Eastern:20060102
|
||||
DTEND;TZID=US/Eastern:20060103
|
||||
RRULE:FREQ=DAILY;COUNT=5
|
||||
SUMMARY:Recurring event
|
||||
UID:event_full_day_rrule
|
||||
DTSTAMP:20060102T094829Z
|
||||
END:VEVENT
|
||||
END:VCALENDAR
|
||||
|
33
radicale/tests/static/event_mixed_datetime_and_date.ics
Normal file
33
radicale/tests/static/event_mixed_datetime_and_date.ics
Normal file
|
@ -0,0 +1,33 @@
|
|||
BEGIN:VCALENDAR
|
||||
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
|
||||
VERSION:2.0
|
||||
BEGIN:VTIMEZONE
|
||||
TZID:Europe/Paris
|
||||
X-LIC-LOCATION:Europe/Paris
|
||||
BEGIN:DAYLIGHT
|
||||
TZOFFSETFROM:+0100
|
||||
TZOFFSETTO:+0200
|
||||
TZNAME:CEST
|
||||
DTSTART:19700329T020000
|
||||
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
|
||||
END:DAYLIGHT
|
||||
BEGIN:STANDARD
|
||||
TZOFFSETFROM:+0200
|
||||
TZOFFSETTO:+0100
|
||||
TZNAME:CET
|
||||
DTSTART:19701025T030000
|
||||
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
|
||||
END:STANDARD
|
||||
END:VTIMEZONE
|
||||
BEGIN:VEVENT
|
||||
CREATED:20130902T150157Z
|
||||
LAST-MODIFIED:20130902T150158Z
|
||||
DTSTAMP:20130902T150158Z
|
||||
UID:event_mixed_datetime_and_date
|
||||
SUMMARY:Event
|
||||
DTSTART;TZID=Europe/Paris:20130901T180000
|
||||
DTEND;TZID=Europe/Paris:20130901T190000
|
||||
RRULE:FREQ=DAILY;COUNT=3
|
||||
EXDATE;VALUE=DATE:20130902
|
||||
END:VEVENT
|
||||
END:VCALENDAR
|
16
radicale/tests/static/event_multiple_case_sensitive_uids.ics
Normal file
16
radicale/tests/static/event_multiple_case_sensitive_uids.ics
Normal file
|
@ -0,0 +1,16 @@
|
|||
BEGIN:VCALENDAR
|
||||
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
|
||||
VERSION:2.0
|
||||
BEGIN:VEVENT
|
||||
UID:event
|
||||
SUMMARY:Event 1
|
||||
DTSTART:20130901T190000
|
||||
DTEND:20130901T200000
|
||||
END:VEVENT
|
||||
BEGIN:VEVENT
|
||||
UID:EVENT
|
||||
SUMMARY:Event 2
|
||||
DTSTART:20130901T200000
|
||||
DTEND:20130901T210000
|
||||
END:VEVENT
|
||||
END:VCALENDAR
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue