mirror of
https://github.com/Kozea/Radicale.git
synced 2025-04-04 05:37:37 +03:00
Compare commits
1510 commits
Author | SHA1 | Date | |
---|---|---|---|
|
8cdf262560 | ||
|
69587d3f5c | ||
|
f41533cca7 | ||
|
393a26814b | ||
|
3bdcbbdc56 | ||
|
9ca82a8aa2 | ||
|
ffe5fcc6f3 | ||
|
ecaed3188c | ||
|
c23821ad0c | ||
|
b744e9658c | ||
|
3ee5433397 | ||
|
29915b20c8 | ||
|
c91b8e49d5 | ||
|
14fb50954c | ||
|
312e26977b | ||
|
3bdc438283 | ||
|
3eb61a82a6 | ||
|
fb986ea02e | ||
|
af09d532c3 | ||
|
70b66ddfe2 | ||
|
6b83c409d4 | ||
|
7fcf473662 | ||
|
d25786c190 | ||
|
5d5b12c124 | ||
|
23387fa2f3 | ||
|
e0a24b14b4 | ||
|
2439266d0e | ||
|
9f7941d428 | ||
|
3af690fcb6 | ||
|
0d1dcec61a | ||
|
98152062df | ||
|
bcbf0918a9 | ||
|
f40c4d6e9b | ||
|
633dfbc875 | ||
|
34f51033b7 | ||
|
94ad295124 | ||
|
7399286ec9 | ||
|
7d351d6692 | ||
|
d4e23e6731 | ||
|
de527632e0 | ||
|
217978e9d5 | ||
|
2772305dde | ||
|
2ef99e5e85 | ||
|
26eab43f40 | ||
|
a3880480a9 | ||
|
9f8ac21130 | ||
|
e8c974a72a | ||
|
be43ce5161 | ||
|
7bb4beeae2 | ||
|
c9ffde27d8 | ||
|
dc56d67c33 | ||
|
081b8a7fcc | ||
|
76753d271a | ||
|
69f85a0bdf | ||
|
820691ca53 | ||
|
358ae55540 | ||
|
e22fbe282b | ||
|
b0d649f8b9 | ||
|
8f2099baf8 | ||
|
3a13ffbc51 | ||
|
0f67336987 | ||
|
cf727101f8 | ||
|
9f0385fd67 | ||
|
3963bb4d82 | ||
|
cffb2aaae3 | ||
|
4f0e607583 | ||
|
2f1db01083 | ||
|
95a8899002 | ||
|
41ab96e142 | ||
|
a284d18c16 | ||
|
30664f9346 | ||
|
36aba7a8b9 | ||
|
914320826f | ||
|
9372344bb1 | ||
|
c4a48828d3 | ||
|
ebe0418a4c | ||
|
c3c78db8ae | ||
|
0fa50210c9 | ||
|
25402ab641 | ||
|
76281ad1ff | ||
|
1d0ff9e84a | ||
|
e52056dea3 | ||
|
75711b46dc | ||
|
45df5a3b94 | ||
|
2ae1762daa | ||
|
7839ac5783 | ||
|
4086665d16 | ||
|
78dccbdc92 | ||
|
63b98913e0 | ||
|
b729a4c192 | ||
|
a3eb754967 | ||
|
d89ada0c17 | ||
|
7afff7ad2b | ||
|
451712d01d | ||
|
d7013ce726 | ||
|
280968e694 | ||
|
7b4da3a128 | ||
|
c6bd129fa2 | ||
|
bc2444bb9a | ||
|
dc35d4d0ad | ||
|
68f0eafe7d | ||
|
aa248f2b97 | ||
|
a2cd430f64 | ||
|
36e33ffee1 | ||
|
b8c2bc29ec | ||
|
65ce0c57e5 | ||
|
2958201454 | ||
|
73681a7767 | ||
|
cdbad007b6 | ||
|
78b94b1d4d | ||
|
e3ae7b3ab5 | ||
|
4419aa2285 | ||
|
eb8dc61952 | ||
|
3a4ec11733 | ||
|
7318f592c8 | ||
|
3910457a8d | ||
|
fcaee51ceb | ||
|
c2013ec901 | ||
|
29b1da4652 | ||
|
36a0501484 | ||
|
0b5dd82109 | ||
|
9b671beceb | ||
|
50f5d2e5ef | ||
|
8218081f58 | ||
|
16ece44faf | ||
|
5302863f53 | ||
|
6518f1b63a | ||
|
7f3fedc048 | ||
|
0759673e67 | ||
|
855e3743ca | ||
|
c8f650bc2c | ||
|
046d39b1bd | ||
|
954ddea006 | ||
|
6683775c81 | ||
|
9791a4db0f | ||
|
970d4ba468 | ||
|
809e35689b | ||
|
c3c61c692e | ||
|
53251231d4 | ||
|
63e414850e | ||
|
18338b3c6e | ||
|
d5cb05f817 | ||
|
4ab1cedee3 | ||
|
13a78d7365 | ||
|
93970a1001 | ||
|
c60627141f | ||
|
f6b5cb8a1e | ||
|
3914735ec0 | ||
|
48a634af9f | ||
|
3d50ae4a70 | ||
|
018978edd8 | ||
|
aa35c678ce | ||
|
19a47158bd | ||
|
a62da71aa2 | ||
|
67bbc9a31b | ||
|
dc83c6d7d0 | ||
|
484616f363 | ||
|
718089e3bf | ||
|
b078a8f002 | ||
|
fde0ecb9b2 | ||
|
803763729a | ||
|
37b18cf5a2 | ||
|
cd51581f38 | ||
|
88accdb672 | ||
|
c157dd7d19 | ||
|
605fc65584 | ||
|
f0d06cbc7d | ||
|
77f69f2b1e | ||
|
b011fa4e61 | ||
|
dcaec20681 | ||
|
d79abc2b7a | ||
|
938f6a97fd | ||
|
c2def71ce6 | ||
|
6f68a64855 | ||
|
f3a7641baa | ||
|
cfcfbbd231 | ||
|
e0d20edbcd | ||
|
d2be086cd1 | ||
|
7b6146405f | ||
|
04523e5087 | ||
|
23a68b2fb1 | ||
|
87dc5538d2 | ||
|
e28b719233 | ||
|
937acf38f7 | ||
|
063883797c | ||
|
30389f4525 | ||
|
780aaa7e3e | ||
|
98e65d88a4 | ||
|
10a79b9483 | ||
|
26637a1240 | ||
|
f9457f00f7 | ||
|
3df5d28432 | ||
|
e80bf58901 | ||
|
bc939522dc | ||
|
50b76f7114 | ||
|
72c7d32e44 | ||
|
c24659c5ec | ||
|
3e18644423 | ||
|
a93af6f177 | ||
|
ed6a5a834e | ||
|
dd9bb2beff | ||
|
0713041929 | ||
![]() |
3f04914de4 | ||
|
1c77fd819f | ||
|
08a35b19c8 | ||
|
1634ce9498 | ||
|
be64e57ae8 | ||
|
8172b87077 | ||
|
c853ec4a74 | ||
|
5ebaf4ef1c | ||
|
d6c4e6487a | ||
|
f9dd3efc3a | ||
|
6c1445d8db | ||
|
1ca41e2128 | ||
|
607b3af67b | ||
|
841df09312 | ||
|
c81e19616c | ||
|
b0d56f898b | ||
|
73f8f950d0 | ||
|
976dfe4a3f | ||
|
b122002077 | ||
|
ad94acddf1 | ||
|
2442a794ae | ||
|
a9f2e6fe7b | ||
|
5a00baab3f | ||
|
cf914450ee | ||
|
0d43a49ffb | ||
|
234be74b87 | ||
|
45f2a4cc0e | ||
|
532fad9ba6 | ||
|
99f5ec389d | ||
|
0253682c00 | ||
|
8c2feb4726 | ||
|
c243ae4ebf | ||
|
6f82333ff7 | ||
|
6f0ac545f0 | ||
|
70c4a34eb8 | ||
|
3763f28ae4 | ||
|
0a5ae5b0b4 | ||
|
5d48ba5d1e | ||
|
5a591b6471 | ||
|
8604dacad0 | ||
|
ca665c4849 | ||
|
8fdbd0dbf6 | ||
|
46fe98f60b | ||
|
c10ce7ae46 | ||
|
6ebca08423 | ||
|
c1be04abd1 | ||
|
c00ab76c83 | ||
|
5357e692d9 | ||
|
9cac3008b7 | ||
|
2489356dda | ||
|
5ce0cee8bf | ||
|
79ba07e16b | ||
|
c0acbd4402 | ||
|
b75e303556 | ||
|
a794a51885 | ||
|
4f2990342d | ||
|
ac8abbd12c | ||
|
9af15e6656 | ||
|
30e2ab490e | ||
|
ddd099accd | ||
|
8e97b709bf | ||
|
74311560c9 | ||
|
b22038c746 | ||
|
c2b2274dad | ||
|
2674f9a382 | ||
|
51960bcab8 | ||
|
a5dd4d8a7d | ||
|
94898ef6c1 | ||
|
7df2fb35a7 | ||
|
a4266c9690 | ||
|
1e8d9eda50 | ||
|
0b00218d75 | ||
|
7e23c603c1 | ||
|
6569e481df | ||
|
b19418f43c | ||
|
e2934a12c0 | ||
|
c8010fa4be | ||
|
b784f476b4 | ||
|
335584a6b7 | ||
|
9e9d036387 | ||
|
006c2d2bc0 | ||
|
b356edd6be | ||
|
59450e8c2d | ||
|
1a76e1ad50 | ||
|
6ebe9aee76 | ||
|
6214111f4f | ||
|
0f6dcb7192 | ||
|
4b1183ae00 | ||
|
c1c8ab2887 | ||
|
836827ac8f | ||
|
3d4cd7f034 | ||
|
a606477e3f | ||
|
c33e96c5a3 | ||
|
dc51a74e5a | ||
|
5f79b089c8 | ||
|
fc7c50b4cb | ||
|
11dad85404 | ||
|
dc20f518dd | ||
|
62bdfeab40 | ||
|
ff3f2fc3de | ||
|
4bb00e6070 | ||
|
b7ae6b378b | ||
|
7597c7d4a5 | ||
|
855e983ae2 | ||
|
0a5773a844 | ||
|
f1d007a51e | ||
|
4d04c85f2d | ||
|
f7d6f6442f | ||
|
a7ce8f032c | ||
|
05b8172f8f | ||
|
3ebe51a4cb | ||
|
0d29de6db9 | ||
|
886f4ee8d0 | ||
|
46acbfd987 | ||
|
0e0592e3b8 | ||
|
be5eab8671 | ||
|
9787f87cc7 | ||
|
1e318c81cf | ||
|
119cefce34 | ||
|
3983b5c887 | ||
|
778f56cc4d | ||
|
2bb2d6385b | ||
|
b3d0c16407 | ||
|
e1ee3d4529 | ||
|
644548c866 | ||
|
05d4e91856 | ||
|
99b6889d91 | ||
|
2d8903dc44 | ||
|
5681b45298 | ||
|
5515d1e790 | ||
|
eef33f76d1 | ||
|
05c349a15f | ||
|
916c9db3c8 | ||
|
ff5fae1663 | ||
|
d9e15dd7c6 | ||
|
675c5ce8cf | ||
|
b85c0758d8 | ||
|
3232b34392 | ||
|
873bf80131 | ||
|
38c236aa02 | ||
|
f725ee780f | ||
|
804170a4d5 | ||
|
2a5b12e21c | ||
|
6943eb659f | ||
|
24f5f9b98e | ||
|
edd6d0a513 | ||
|
92ce13e348 | ||
|
0fe53e62db | ||
|
f754f28518 | ||
|
1d241d9e2f | ||
|
d6bacc9047 | ||
|
43466078e7 | ||
|
8f80e0eb92 | ||
|
a54fb10e17 | ||
|
166d4ed27b | ||
|
2c234b97d1 | ||
|
64acfe27f4 | ||
|
48bab4b033 | ||
|
e07a248451 | ||
|
62e6aad2d2 | ||
|
37f7df2786 | ||
|
f26facba3e | ||
|
4696d252f4 | ||
|
287c0e7171 | ||
|
fbb6b1684a | ||
|
82064f823a | ||
|
19f5aa0edd | ||
|
92e5032278 | ||
|
6fa15dae4a | ||
|
5b64ef9fe7 | ||
|
69780dd0ee | ||
|
4781b48a1c | ||
|
6f2c1037d5 | ||
|
e4daddc186 | ||
|
f7e46ebf39 | ||
|
1ea782e3b2 | ||
|
c13e0e60fd | ||
|
8fea1f907e | ||
|
a6b1e000e7 | ||
|
a64f0e1093 | ||
|
18e8ab1ccc | ||
|
7b0d3ed29d | ||
|
0baf67147e | ||
|
0f9bf4c063 | ||
|
df5ca97442 | ||
|
416081a81f | ||
|
07b7d28323 | ||
|
5380629bda | ||
|
243b888c8e | ||
|
9ecb95ce37 | ||
|
6929f3d0b3 | ||
|
00dac0c030 | ||
|
fb904320d2 | ||
|
1d07d72946 | ||
|
d7840b8bff | ||
|
cfc1e94ad8 | ||
|
bf77844d34 | ||
|
6a6fec5bdd | ||
|
b0d1ccc0f6 | ||
|
2d5dc5186b | ||
|
36ef753b0e | ||
|
74f4412761 | ||
|
ae274911d5 | ||
|
1ee93f32b2 | ||
|
ee2af306d7 | ||
|
687624a403 | ||
|
19cca41a43 | ||
|
56c375fca2 | ||
|
a1b8c65def | ||
|
c6cc7f3486 | ||
|
652e768650 | ||
|
f25a5fbc79 | ||
|
bf4f5834af | ||
|
f7c731e189 | ||
|
059afef35d | ||
|
e0c04f2ae3 | ||
|
5cafd29d7f | ||
|
0badab86a6 | ||
|
b6fa3c47c3 | ||
|
c63d00a550 | ||
|
8bfed78926 | ||
|
1670e4a793 | ||
|
8e9fdf391a | ||
|
ccddf877ee | ||
|
48e4203856 | ||
|
bd001fe1d5 | ||
|
dbc939aff2 | ||
|
5ec34ed163 | ||
|
372e62bb54 | ||
|
59c638461b | ||
|
a8baea9b19 | ||
|
c438ccb215 | ||
|
d7c09e218f | ||
|
37148b7124 | ||
|
2c15b1b8f4 | ||
|
6e103b9c7e | ||
|
a78e32de4d | ||
|
9faf89880b | ||
|
a01e53616e | ||
|
e59e4d3aff | ||
|
67362189f5 | ||
|
ba9776d688 | ||
|
0505b7b603 | ||
|
eed6bcee01 | ||
|
110ee9d247 | ||
|
457af284e1 | ||
|
e0594d5b33 | ||
|
d41aa60d61 | ||
|
973b26b2e9 | ||
|
bfe0ccc463 | ||
|
77749cbbb9 | ||
|
fc77cf9d66 | ||
|
06a9cf2886 | ||
|
53bc6167d3 | ||
|
72e4c4fadd | ||
|
3e478ee6da | ||
|
0ab99d4e8f | ||
|
4ef5cad20f | ||
|
a449d8774b | ||
|
0f87897eb7 | ||
|
40c8b3d038 | ||
|
d15e836079 | ||
|
fce3f0b1df | ||
|
499b37fd2f | ||
|
e887b06d21 | ||
|
b1c682de57 | ||
|
c000408429 | ||
|
0feca04086 | ||
|
fdb014d068 | ||
|
ccb59444c3 | ||
|
97479190e8 | ||
|
d1ceb620e4 | ||
|
040a433696 | ||
|
187886e797 | ||
|
3cb9b73a16 | ||
|
a272d3039e | ||
|
98c5ffdc87 | ||
|
9945a9f65a | ||
|
7fbc0e70e9 | ||
|
15ed41fa09 | ||
|
a92a621b9b | ||
|
645619bac8 | ||
|
b30cdbbabf | ||
|
b081b3ea06 | ||
|
da844f48e6 | ||
|
a7f33c8795 | ||
|
b47c76e9ca | ||
|
da04d95b75 | ||
|
e05fbeb950 | ||
|
d75b071fec | ||
|
5cb16a3a2d | ||
|
606bd30514 | ||
|
6a78466af4 | ||
|
c63dee71ec | ||
|
b1ce69882c | ||
|
e70486900d | ||
|
368c43137a | ||
|
3f62982e1d | ||
|
a79c2ad83e | ||
|
6d11738243 | ||
|
e852c887d7 | ||
|
107fe1bc53 | ||
|
4f1e8ce889 | ||
|
39662fc680 | ||
|
336972316e | ||
|
7da46f392e | ||
|
d7fa90a976 | ||
|
13d56f0918 | ||
|
8b8d7729a2 | ||
|
5167f12624 | ||
|
19e5972b4f | ||
|
bd66d58540 | ||
|
408a03a3c0 | ||
|
3cba4b32a3 | ||
|
906d391fe3 | ||
|
d6c0a05771 | ||
|
29b7cd8d54 | ||
|
204623d656 | ||
|
b0f131cac2 | ||
|
4c1d295e81 | ||
|
7b0d88ff0d | ||
|
2d0496b888 | ||
|
773f09fe74 | ||
|
34b449f27f | ||
|
45f0b8809b | ||
|
7388a095f5 | ||
|
5ffaf6e837 | ||
|
0f505222d9 | ||
|
b1cf1f2e28 | ||
|
01d4851581 | ||
|
5019a3e974 | ||
|
c046c6ae34 | ||
|
897a679c1c | ||
|
b47a253ccb | ||
|
c499c313c2 | ||
|
1dceaf5385 | ||
|
47bc966a13 | ||
|
61be51e9f3 | ||
|
e5096d31af | ||
|
e5e80ebbe6 | ||
|
13b1aaed39 | ||
|
f117fd06af | ||
|
055489f79c | ||
|
53befe72db | ||
|
7fd7ec7f7a | ||
|
9809fbcba4 | ||
|
fe3d9d3f48 | ||
|
bb112784fd | ||
|
f1d84cea35 | ||
|
fe33d79eb1 | ||
|
dd8b62eef5 | ||
|
3094bc3936 | ||
|
6b34323c1e | ||
|
6de06fd75f | ||
|
b015f9dc16 | ||
|
bb203812e6 | ||
|
e24702a65b | ||
|
55f181da65 | ||
|
51a7136b93 | ||
|
bbe7088561 | ||
|
6b65800770 | ||
|
defa767c8a | ||
|
d83885d108 | ||
|
6eb6ff44d0 | ||
|
258b1024b3 | ||
|
54dfbf15d2 | ||
|
e9c7d4a671 | ||
|
b4967f8e26 | ||
|
ac5e33c723 | ||
|
02019e73e6 | ||
|
1acfa480fa | ||
|
addf5a25c8 | ||
|
fe630b46ba | ||
|
0cf8ede6c7 | ||
|
5b5273abbf | ||
|
59bd8e8330 | ||
|
e02a31af89 | ||
|
a70c69ee28 | ||
|
fc7b50d69f | ||
|
518de6b360 | ||
|
695c5d8416 | ||
|
ad596002f3 | ||
|
ac14b01dda | ||
|
a07b39eaad | ||
|
b603acf89f | ||
|
e8c092bd2d | ||
|
ce32134452 | ||
|
d3bfa968f8 | ||
|
bf112d6b5f | ||
|
9c338b34eb | ||
|
ad3a8d9370 | ||
|
27dfaa8663 | ||
|
c7c3119267 | ||
|
28fa28aaff | ||
|
e4949a1f2f | ||
|
d8cbe0e206 | ||
|
5dd27d3c80 | ||
|
eb577422f6 | ||
|
e9d92f10f1 | ||
|
e124e9d8c9 | ||
|
7c54d8a96c | ||
|
71ab791935 | ||
|
19f3c3edfb | ||
|
2a07c7d230 | ||
|
63db0483d0 | ||
|
138317e6fd | ||
|
9179550162 | ||
|
68551d2321 | ||
|
c6d01b7874 | ||
|
b1ae3edea8 | ||
|
2a35d349b8 | ||
|
86a69b431a | ||
|
d1e01aadb5 | ||
|
7d39354c37 | ||
|
acf65e9d6a | ||
|
fb7630f9eb | ||
|
496b9f1d7c | ||
|
2c0da6f37c | ||
|
4678612194 | ||
|
96b63ed65f | ||
|
c11a08cf88 | ||
|
61fef9c9df | ||
|
2296f4952b | ||
|
b78730d570 | ||
|
6f7abbcba5 | ||
|
f3f3995b01 | ||
|
27e1b04529 | ||
|
515afb52ed | ||
|
3e4dbc5d79 | ||
|
8db31b49fe | ||
|
2dec736fdf | ||
|
211972bd09 | ||
|
b0345a424d | ||
|
db87eba400 | ||
|
2354b56578 | ||
|
a26ec29899 | ||
|
a6368d8e66 | ||
|
a009bb562f | ||
|
706e663486 | ||
|
a7e631668f | ||
|
9e5f6db84b | ||
|
16eadd11b5 | ||
|
f7e01d55ed | ||
|
7340ddc9d2 | ||
|
a7882b98bd | ||
|
239e17d735 | ||
|
76dc9dce0d | ||
|
f0e21b14c2 | ||
|
06a95d7597 | ||
|
f610384374 | ||
|
d228892b7c | ||
|
a8bc232883 | ||
|
4e4af2aca5 | ||
|
aac5188fc0 | ||
|
2741d73d68 | ||
|
d1da63569b | ||
|
513415d201 | ||
|
df874a273f | ||
|
b8f401056a | ||
|
2ce5ba1e38 | ||
|
9d91564d10 | ||
|
85e1f46383 | ||
|
53fdf08a17 | ||
|
56a0752429 | ||
|
353ad7a9b3 | ||
|
ca99016200 | ||
|
e6429b4bfd | ||
|
150efe2a0c | ||
|
17169bbfdb | ||
|
95cd6938d9 | ||
|
a49454d36e | ||
|
f0f4213760 | ||
|
aadcc42eb3 | ||
|
9457699a32 | ||
|
5d143ca0e8 | ||
|
421cc2b1ca | ||
|
df0e97fab9 | ||
|
88a2ae71b6 | ||
|
dfa21a57ce | ||
|
ffebbf1928 | ||
|
84fd30f357 | ||
|
4e99105225 | ||
|
d05d726dc2 | ||
|
e66055de08 | ||
|
a24a97f880 | ||
|
ff4f8bfaf4 | ||
|
be53538738 | ||
|
922de6ce14 | ||
|
9a2d42afab | ||
|
46b258b1bc | ||
|
a65e995c53 | ||
|
4c55b6db28 | ||
|
546086af2e | ||
|
e85ec93291 | ||
|
b1336c2f1f | ||
|
0b9a53c73d | ||
|
e0a22074b4 | ||
|
10dd5aff3c | ||
|
beb33fec02 | ||
|
2ce7c2a45a | ||
|
02d157269e | ||
|
61f3557e78 | ||
|
020fd560a3 | ||
|
b16bc212f6 | ||
|
efd562b38d | ||
|
50e8f1e28a | ||
|
f508af580c | ||
|
0e1d502d0a | ||
|
7d27b4eacc | ||
|
f4e0825aec | ||
|
e76011ecc6 | ||
|
9c9be73093 | ||
|
17e6269400 | ||
|
bb6dcb6172 | ||
|
98bac38006 | ||
|
8891f1ab89 | ||
|
e4842ef7df | ||
|
5b99b5a88d | ||
|
74be6168cb | ||
|
8b4e28a179 | ||
|
a8a7e23a37 | ||
|
c128b0d773 | ||
|
ed6432706f | ||
|
d6e295c272 | ||
|
0750108152 | ||
|
b945749d1b | ||
|
7927b0c935 | ||
|
eac1722393 | ||
|
39c339638d | ||
|
3c81f43404 | ||
|
825464f102 | ||
|
2c13b8d2e0 | ||
|
ee2fc74bc0 | ||
|
dc21aa1b4f | ||
|
36285143ce | ||
|
bb185a941d | ||
|
80bf824b91 | ||
|
401b68fe08 | ||
|
b5d022fe08 | ||
|
caefa489f9 | ||
|
213ab0fcfa | ||
|
dba399bce3 | ||
|
29a2a80bfd | ||
|
acc06587ce | ||
|
947cb08bb9 | ||
|
1f25d6e27d | ||
|
5ec9aaec07 | ||
|
760b01ee25 | ||
|
37c975d938 | ||
|
48910bf3a9 | ||
|
6f5ee56c2d | ||
|
f05753be71 | ||
|
bf824838d8 | ||
|
32d303805a | ||
|
1afc34c2bb | ||
|
9d30cbc5c2 | ||
|
2bb811b3fc | ||
|
7bf2c18887 | ||
|
8fc5352e27 | ||
|
08a4c792b1 | ||
|
ab28d65343 | ||
|
1faa7bd4ba | ||
|
7b87a598ac | ||
|
d9be20539f | ||
|
49d0ad5b18 | ||
|
b4d7eb5f04 | ||
|
44cfd38263 | ||
|
21ebbca2d9 | ||
|
1c64fdc5b1 | ||
|
0f355114ae | ||
|
3ee6e55d4a | ||
|
f407915227 | ||
|
66b374bc28 | ||
|
1c32919739 | ||
|
3a04b2247e | ||
|
d8ab8aa42c | ||
|
c4d80fd385 | ||
|
dfaef5da75 | ||
|
1593742ce2 | ||
|
438d5f1735 | ||
|
34612c71f0 | ||
|
96a49274f4 | ||
|
72aab54636 | ||
|
5b0cc60cb9 | ||
|
3fdc15fccd | ||
|
1e684ef699 | ||
|
71fd91631e | ||
|
8baf4b7e3f | ||
|
78a5813831 | ||
|
363be35e61 | ||
|
e7b9ec3549 | ||
|
80d91a8987 | ||
|
6474f8f31c | ||
|
d387491fb6 | ||
|
eb67c57ce2 | ||
|
b45c97d5a5 | ||
|
32050ef117 | ||
|
94a5ff0d68 | ||
|
afff2731e1 | ||
|
7936e714d4 | ||
|
c5b48c1ee4 | ||
|
989cbefc64 | ||
|
a72964ab3f | ||
|
b24eae8369 | ||
|
1485777bc6 | ||
|
50140a54f5 | ||
|
682c048569 | ||
|
22c843c49c | ||
|
b7272be481 | ||
|
88f65671ce | ||
|
913635a17e | ||
|
551b5c2272 | ||
|
cc2e1553d3 | ||
|
5678453b95 | ||
|
76e06ea3fc | ||
|
8e3f3b5bf2 | ||
|
93e5dd4a70 | ||
|
9f1e243f00 | ||
|
e6d4611980 | ||
|
d42e9edfd0 | ||
|
2f97fc5b88 | ||
|
668ad03fa3 | ||
|
2b6626d053 | ||
|
998e2f96bd | ||
|
b892379a8d | ||
|
86f37e0250 | ||
|
dcd6456339 | ||
|
f64488b918 | ||
|
f57e738156 | ||
|
2dd7328859 | ||
|
5b0830ea08 | ||
|
1c82eb5e05 | ||
|
cb5b92cb7a | ||
|
4cac895901 | ||
|
02c949d5d3 | ||
|
8e8c652225 | ||
|
d3b78e0246 | ||
|
110ec3a788 | ||
|
fa6ec95e8c | ||
|
701a9794bc | ||
|
3fd3bf5192 | ||
|
a58e68ea37 | ||
|
3e95c0ab0e | ||
|
7c25c7715f | ||
|
fadf281734 | ||
|
a3aa0ce7d9 | ||
|
ecafa1d32b | ||
|
d7ce2f0b98 | ||
|
513e04e636 | ||
|
72103c30c2 | ||
|
a07813ecc9 | ||
|
ae731290c1 | ||
|
6ae831a324 | ||
|
8efb942892 | ||
|
a2be03fdaf | ||
|
77626e5aed | ||
|
390240c35a | ||
|
5070533a0b | ||
|
e23f0283b0 | ||
|
e8e709191a | ||
|
9276c65462 | ||
|
360484e2d5 | ||
|
1a78114a56 | ||
|
7d4a0fe70e | ||
|
7b98a0028b | ||
|
11a2b43b60 | ||
|
6a96b1f5a7 | ||
|
794c1f84fb | ||
|
f8e28f6b6e | ||
|
526d835b59 | ||
|
3e6d8db98d | ||
|
4a0b2e8791 | ||
|
22731f3d26 | ||
|
dd723dae5d | ||
|
cf81d1f9a7 | ||
|
cdb5160c3e | ||
|
f4a87afab7 | ||
|
6a56a6026f | ||
|
c2a159a6cd | ||
|
ad5ce94817 | ||
|
865e0dd629 | ||
|
9bed0af669 | ||
|
cd6ebaae1a | ||
|
f8f6e47081 | ||
|
5aa2f59b38 | ||
|
156ce91f35 | ||
|
3594217570 | ||
|
d8604becd0 | ||
|
f7fd323dea | ||
|
f9d9b88a77 | ||
|
86ada4cf97 | ||
|
c589c9fc0d | ||
|
14a3c3d763 | ||
|
abcc0c2ef6 | ||
|
f08912ace1 | ||
|
9aae5655cf | ||
|
413c74c27c | ||
|
a2ceaa41a4 | ||
|
515b196fda | ||
|
6d8976795c | ||
|
395f53b3d5 | ||
|
8caa90f4be | ||
|
497b5141b0 | ||
|
a5716a7d84 | ||
|
f06af066f9 | ||
|
e96277e671 | ||
|
c14defcba8 | ||
|
2b8f4b9419 | ||
|
a97093d001 | ||
|
b64c9baa5f | ||
|
ed8a2284a4 | ||
|
3c218ecd9c | ||
|
0baf1dc908 | ||
|
b0f8d37294 | ||
|
c5b5910de4 | ||
|
8d19fd7a64 | ||
|
eda8309a04 | ||
|
2dc0fd29dc | ||
|
47e42a46c1 | ||
|
a7c4a00eb6 | ||
|
129ebf7b86 | ||
|
730332d680 | ||
|
b87d1c8038 | ||
|
c155e2a351 | ||
|
11dd0e9380 | ||
|
4d4c3bda75 | ||
|
7e29d9b5c3 | ||
|
8c69bb71aa | ||
|
b2b4651fc4 | ||
|
cd5bc3590f | ||
|
e42b46c722 | ||
|
580b97fa0f | ||
|
0221fc357b | ||
|
523960bc9f | ||
|
3779d749cd | ||
|
b610c3214d | ||
|
c3d22e680f | ||
|
af6c6b96b9 | ||
|
45ff34f6c3 | ||
|
47f3a6d684 | ||
|
2cbbd4dc9c | ||
|
e4cc73098a | ||
|
10d2571d89 | ||
|
17a5e5b6e0 | ||
|
340582f84c | ||
|
3763ed46c4 | ||
|
7fde7d5005 | ||
|
0b7e9d73c9 | ||
|
c96e5b6667 | ||
|
4822807c4d | ||
|
da8475908e | ||
|
4224c60e9b | ||
|
59e4f2d594 | ||
|
81106fa647 | ||
|
33fcda7c32 | ||
|
555e4ccc51 | ||
|
685a91bfe6 | ||
|
22fc38850c | ||
|
4ed77cabc6 | ||
|
d1532aa466 | ||
|
1336c02079 | ||
|
e0adecf30c | ||
|
402bd3580e | ||
|
9c0b6cdaeb | ||
|
75df1093be | ||
|
4a0bcde7a3 | ||
|
b93842b10c | ||
|
6dee974b74 | ||
|
e3a982dbce | ||
|
4c44940ec1 | ||
|
8fa4345b6f | ||
|
cfba4c17b6 | ||
|
d3f99d349d | ||
|
bd0a95c098 | ||
|
7bfb6c0132 | ||
|
4564de9f9d | ||
|
f75671354c | ||
|
2cd0a3189e | ||
|
eac460d4d9 | ||
|
fb3de73d1c | ||
|
06f93a032b | ||
|
c4745680e0 | ||
|
11fd29a2d1 | ||
|
d90369b67c | ||
|
ec19a1a12c | ||
|
481bd4e4b9 | ||
|
a20791e0c3 | ||
|
537737da32 | ||
|
fb9cfeb81e | ||
|
bfba027446 | ||
|
83f53cb5cb | ||
|
4252747646 | ||
|
34771f6850 | ||
|
24ee523cc8 | ||
|
e1e563cc28 | ||
|
f25d7eebb8 | ||
|
e0f7fe6526 | ||
|
90bd33f466 | ||
|
91c06041f8 | ||
|
4b5165dc42 | ||
|
208ae11683 | ||
|
1234802f51 | ||
|
e176567ad0 | ||
|
e38ae96227 | ||
|
bbaf0ebd8c | ||
|
f14e1de071 | ||
|
e629e9a2e1 | ||
|
98b49ac2b6 | ||
|
08e789d993 | ||
|
b23aa4629c | ||
|
dba6338968 | ||
|
7c9c873b13 | ||
|
f72b344981 | ||
|
35e7ee5a08 | ||
|
056ce5b69f | ||
|
cd3f834a27 | ||
|
f921e48648 | ||
|
111a79f082 | ||
|
574e6f8c7b | ||
|
60f25bf19a | ||
|
698ae875ce | ||
|
cecb17df03 | ||
|
12fe5ce637 | ||
|
73e42f8101 | ||
|
34bec01c9b | ||
|
8f9734d797 | ||
|
b8848348d6 | ||
|
398e93e215 | ||
|
21099f2240 | ||
|
9997a32629 | ||
|
0cabc64584 | ||
|
66f14ee91c | ||
|
5333751e45 | ||
|
742a067171 | ||
|
2aafcd5df5 | ||
|
f05251bd01 | ||
|
b4c76c94ad | ||
|
4d4b040b81 | ||
|
9909454761 | ||
|
571567a4ec | ||
|
0e8949ff71 | ||
|
c8b31637ef | ||
|
b14889e170 | ||
|
0b3e4204a5 | ||
|
18f21e26d5 | ||
|
a9804dd550 | ||
|
7096ab74e8 | ||
|
236eedb555 | ||
|
d48bacc8e3 | ||
|
ca27156605 | ||
|
10dafde32d | ||
|
1fe011020b | ||
|
7642d72919 | ||
|
d31eaf79ec | ||
|
dd30aea7a5 | ||
|
81218906c6 | ||
|
a3ca887a37 | ||
|
0ce90d6b34 | ||
|
42ad18bc84 | ||
|
80e8750c8a | ||
|
d3bb19800c | ||
|
6091bd46a3 | ||
|
9d25cc6c0a | ||
|
41bccb265a | ||
|
2851525e15 | ||
|
1e011e7011 | ||
|
d4af2cd1a6 | ||
|
dbe95641c0 | ||
|
30c9c55358 | ||
|
46c39b28d6 | ||
|
cd3fe3e73c | ||
|
ea6649b365 | ||
|
9b3bb2de2b | ||
|
90f10f2c4a | ||
|
3214c498d1 | ||
|
06fbac67a1 | ||
|
896963dd3c | ||
|
b98cd98c4c | ||
|
46d1a31441 | ||
|
95eb44a87f | ||
|
b8af0c7490 | ||
|
e38b88f9f7 | ||
|
896642b374 | ||
|
bf5272e83d | ||
|
da31f80ba5 | ||
|
d3d0437bce | ||
|
ecff5fac82 | ||
|
3882cf2bc8 | ||
|
b2fc8bbb0c | ||
|
df80a7f6ef | ||
|
3d8f1b3b08 | ||
|
fbe2024342 | ||
|
69314f3cde | ||
|
7cc9db0d90 | ||
|
d19c16c8d3 | ||
|
1289003da1 | ||
|
b2a0067a57 | ||
|
2e93c012bf | ||
|
dc92a88584 | ||
|
bfe4332ac5 | ||
|
389a6b9906 | ||
|
5253a464ab | ||
|
f950ce98ab | ||
|
03e7e209da | ||
|
69b1a4ea77 | ||
|
58bcedde98 | ||
|
f3b6b4869d | ||
|
c6f6f2c1f4 | ||
|
e824a2587c | ||
|
c8c330d481 | ||
|
ac0cfeabb9 | ||
|
1edfb16143 | ||
|
53c1648738 | ||
|
f780853d8f | ||
|
e07f047fad | ||
|
b87ac43952 | ||
|
e0247f8f92 | ||
|
7ed5122636 | ||
|
e63a6e0c85 | ||
|
d3b90506f5 | ||
|
b9bb017edf | ||
|
6ec63ccc9b | ||
|
70a8d632fb | ||
|
14c8d99547 | ||
|
7c4409f93d | ||
|
8914567e58 | ||
|
83c02a64b9 | ||
|
10aee24056 | ||
|
3be9a22a91 | ||
|
8740357eb2 | ||
|
150dd0c4cd | ||
|
593f9b688c | ||
|
e077bb5a18 | ||
|
969502cd7b | ||
|
2909cae817 | ||
|
b700b5ed44 | ||
|
d26ee9e7ed | ||
|
4d632a97f3 | ||
|
f598271583 | ||
|
630d49b7cf | ||
|
63e00ca677 | ||
|
7b79c00ae2 | ||
|
b3dd881277 | ||
|
05b0819474 | ||
|
2b60f8ca2a | ||
|
fbf631ef60 | ||
|
0c42930633 | ||
|
3c778ef95e | ||
|
8de3a234c4 | ||
|
1e6c89d11e | ||
|
9a5ba5aa1b | ||
|
82f05b58f3 | ||
|
2da55baa5a | ||
|
d8450f666a | ||
|
7f2d5cea62 | ||
|
9bd852ba5e | ||
|
d73a308294 | ||
|
d5f5eeeddf | ||
|
aef58bd55c | ||
|
99adeb19c1 | ||
|
215d2c4cd3 | ||
|
49aa033b1b | ||
|
7375063cec | ||
|
1e294b7147 | ||
|
7b64bf8e1e | ||
|
27ac0ed025 | ||
|
d6d5e512dc | ||
|
bb292c4faa | ||
|
7457fbf3a9 | ||
|
461afbc04e | ||
|
f6a3a19680 | ||
|
8ca01a4989 | ||
|
9c622b57d5 | ||
|
72f8b29190 | ||
|
ea4adff108 | ||
|
849b84180c | ||
|
28a4238605 | ||
|
ed6bd5a373 | ||
|
53b1021cf7 | ||
|
645d244c2d | ||
|
61b7928758 | ||
|
1e3afcfe62 | ||
|
74abe02b70 | ||
|
e747e9a34d | ||
|
3d357de70b | ||
|
cf1c9ed648 | ||
|
c38b38eba0 | ||
|
cfbed8377a | ||
|
5b685d376f | ||
|
5af2c6e69a | ||
|
5c4989f26c | ||
|
daced78962 | ||
|
63167300e6 | ||
|
bd8393585e | ||
|
bd71c04973 | ||
|
8373a8961e | ||
|
f46a3e3d24 | ||
|
ee984b49d4 | ||
|
363dd79833 | ||
|
d3b632e123 | ||
|
8890a4c030 | ||
|
9603aa3496 | ||
|
941bb157cc | ||
|
d8f1565f5b | ||
|
6b46b01fcb | ||
|
180e96b332 | ||
|
8e3465b5d4 | ||
|
5371be2b39 | ||
|
0bda1f4c16 | ||
|
66fabbead9 | ||
|
698980d7be | ||
|
3b99d64935 | ||
|
cc22927353 | ||
|
36483670d4 | ||
|
a872b633fb | ||
|
11ea3cc7a4 | ||
|
4a43b17840 | ||
|
db7587c593 | ||
|
fc180266d5 | ||
|
a03911f954 | ||
|
143a404168 | ||
|
d3776e55fb | ||
|
262d76cc87 | ||
|
e11661ff3e | ||
|
562d3aacec | ||
|
6108d8d759 | ||
|
0a5fd94577 | ||
|
866aa34f54 | ||
|
e07df9fd1d | ||
|
4f6a342211 | ||
|
0fb02cd026 | ||
|
7aca052859 | ||
|
41f8368c7e | ||
|
b4230c4249 | ||
|
e2787d8c2a | ||
|
8f55ab858c | ||
|
f844bbacda | ||
|
1ef067c7e9 | ||
|
d7c7d694e0 | ||
|
1e27581afd | ||
|
60de01870e | ||
|
8b7a680a1e | ||
|
ed72e697de | ||
|
428e4be0b8 | ||
|
c99a1f53df | ||
|
6697a6c8c4 | ||
|
1bd93a2947 | ||
|
41a91f7da1 | ||
|
514cd2b7cd | ||
|
0cd95f8a39 | ||
|
a8a1fc470b | ||
|
ea3dd622b6 | ||
|
8543f3ea1d | ||
|
dcca9bb6f3 | ||
|
040d8c0fff | ||
|
1453c0b72c | ||
|
9b51e495ea | ||
|
2e4924a0da | ||
|
3d7757fa38 | ||
|
a709e9d3a8 | ||
|
88a0af8ba1 | ||
|
6202257fc2 | ||
|
d3d11d0ec8 | ||
|
967b6a463d | ||
|
caf5ff1080 | ||
|
a7f4ffa7d4 | ||
|
b962bed9d1 | ||
|
b7590f8c84 | ||
|
63e6d091b9 | ||
|
ec63ec8760 | ||
|
e25f5bb4ae | ||
|
970efe3647 | ||
|
581774774f | ||
|
eed3d97458 | ||
|
14b46c04bf | ||
|
7527998135 | ||
|
27185f7291 | ||
|
2a6d2dcf94 | ||
|
c688b65e17 | ||
|
55cd363f10 | ||
|
edc20ed510 | ||
|
e5c4373606 | ||
|
818c9e273e | ||
|
73338ac20b | ||
|
6e19ed893a | ||
|
8c8886a808 | ||
|
6b9fc870c2 | ||
|
93c1581e21 | ||
|
f1de843188 | ||
|
171651e205 | ||
|
a146521500 | ||
|
515a5c9d18 | ||
|
ea7916e0fa | ||
|
95d8c273c4 | ||
|
e4ee569bd2 | ||
|
ae99584a7b | ||
|
7002d06f4d | ||
|
a5ebf6daef | ||
|
7efc3203fc | ||
|
aad7906090 | ||
|
39815b3833 | ||
|
e55d188b27 | ||
|
4fb851c80e | ||
|
db8a7302a0 | ||
|
1a26df865c | ||
|
94bb4fbdae | ||
|
7760f4edf0 | ||
|
8fe2d987e0 | ||
|
44d84baa5a | ||
|
9d72341df2 | ||
|
9b029ac084 | ||
|
dec2ad8bea | ||
|
2275ba4f93 | ||
|
f1a587e9d7 | ||
|
e0eeae02dd | ||
|
9c802e0f57 | ||
|
901c5d059f | ||
|
bfa711b939 | ||
|
35484c9b79 | ||
|
f2d63ad7f7 | ||
|
979414ce85 | ||
|
5e0a387ed9 | ||
|
5a433f5476 | ||
|
49d35cf618 | ||
|
17127d97a6 | ||
|
6fc69b480f | ||
|
aec2a62931 | ||
|
a8c587f984 | ||
|
8a817cf402 | ||
|
df5eb1bfcc | ||
|
e5de5f3c87 | ||
|
924ae17e7c | ||
|
5429f5c1a9 | ||
|
c08754cf92 | ||
|
6c12b13ec1 | ||
|
8869b34470 | ||
|
1bdc47bf44 | ||
|
2cb7060539 | ||
|
75c1168f54 | ||
|
e098046ad3 | ||
|
0a492a00b1 | ||
|
72501c6e23 | ||
|
09644414b5 | ||
|
aec94cc5b3 | ||
|
464e4ca920 | ||
|
c845c5a372 | ||
|
f2389a1e53 | ||
|
5f8b9e5672 | ||
|
af831ff13f | ||
|
b8627c33fb | ||
|
76f7adfed4 | ||
|
53b064bc2f | ||
|
30a9ecc06b | ||
|
ddd99a5329 | ||
|
5d969ff65c | ||
|
f708a7b2b6 | ||
|
24f835a2a1 | ||
|
8281769edf | ||
|
4282ea46e4 | ||
|
59f7104dce | ||
|
a5fa35e785 | ||
|
59d10ef9f7 | ||
|
6b281e1726 | ||
|
6c12bba8a8 | ||
|
3d77238a4b | ||
|
6f15cddfbc | ||
|
e5e13faa7c | ||
|
c7d1936cb6 | ||
|
e96410c6e7 | ||
|
c7e65fbb7f | ||
|
7964d288a5 | ||
|
d2e6566147 | ||
|
c657dda753 | ||
|
d7255df768 | ||
|
bd52dcd590 | ||
|
24815255be | ||
|
54b9995e22 | ||
|
6c9299cf16 | ||
|
3455ab4ba9 | ||
|
22ddfc6663 | ||
|
67bdbd8530 | ||
|
68740d10f3 | ||
|
546a52e34a | ||
|
b0a0debf17 | ||
|
ec82997487 | ||
|
f815f1be7f | ||
|
a6b1219ba5 | ||
|
ca6b958c16 | ||
|
e73270bbe5 | ||
|
e96fa64fa6 | ||
|
ad7c93628b | ||
|
ff287c2e5d | ||
|
6e73a1bebf | ||
|
ae4c8a0bd1 | ||
|
d2811b7fa5 | ||
|
5adcab77a3 | ||
|
3d2ffe0c8e | ||
|
b25e1f23c4 | ||
|
42276c0177 | ||
|
e62aa4f767 | ||
|
87a68a6ea8 | ||
|
d37a533423 | ||
|
e7e2c569b3 | ||
|
18e4677cb7 | ||
|
233a54f73b | ||
|
dc7ce824da | ||
|
5cd43acb3c | ||
|
126a31c82a | ||
|
735bec6be2 | ||
|
ae8e10d900 | ||
|
3e02134547 | ||
|
17d0b79069 | ||
|
dfc00b26a7 | ||
|
7173ab3ca1 | ||
|
70e7363be6 | ||
|
e6e3db6e05 | ||
|
16efa357a2 | ||
|
8b1547cbe5 | ||
|
3f9c34c8b1 | ||
|
b41d33bcdf | ||
|
b16fac2cad | ||
|
fe1cfad4d3 | ||
|
9b6860fa1f | ||
|
f6081bb07e | ||
|
478a9cb251 | ||
|
00de7d1aa3 | ||
|
b7099f154e | ||
|
2e9600c71f | ||
|
50d3d01ea3 | ||
|
db2e7f9568 | ||
|
062e85366d | ||
|
0ea72368b0 | ||
|
dca25b9e90 | ||
|
2b1d3ce3d2 | ||
|
bf272a99db | ||
|
cc89fc6f79 | ||
|
1dcc78ea36 | ||
|
cf30860136 | ||
|
3471561a9f | ||
|
4176ef44de | ||
|
ed6335aa15 | ||
|
21f5f5d70b | ||
|
b0ef194a4b | ||
|
24b5e98922 | ||
|
f72b065d8a | ||
|
99133a3476 | ||
|
152f9ea444 | ||
|
f146be3d6c | ||
|
8551319682 | ||
|
d6d2c006ad | ||
|
a025a46acd | ||
|
8be792280a | ||
|
106aeae781 | ||
|
d9aafd154e | ||
|
2c0669046c | ||
|
a9e5e00624 | ||
|
1065c0b359 | ||
|
c9a78908e8 | ||
|
1bd4e6abbe | ||
|
282a759174 | ||
|
ee3fb71f7d | ||
|
9ced675423 | ||
|
d0891d17b9 | ||
|
85d80fe9fc | ||
|
81c596eb1d | ||
|
b04c5e81ca | ||
|
8e53434912 | ||
|
f3c368e547 | ||
|
cfb9b1fc0a | ||
|
773e4cfca4 | ||
|
5ce67320e1 | ||
|
b0452b3014 | ||
|
f7d942bfeb | ||
|
ad0b4e5e85 | ||
|
31a0f65a7e | ||
|
c9177e2338 | ||
|
71e5dcf414 | ||
|
31ba4eb876 | ||
|
963e28d661 | ||
|
642b4236fe | ||
|
f37b22b682 | ||
|
a2a046f35f | ||
|
c1d279e63f | ||
|
bdb03b9665 | ||
|
d96faa35dd | ||
|
2df009fac8 | ||
|
36bca799e1 | ||
|
4cfe92cf50 | ||
|
31564c7cf2 | ||
|
574961417d | ||
|
f912642c20 | ||
|
73038e518a | ||
|
a73a7ab193 | ||
|
f87c16a42b | ||
|
86631e4ddd | ||
|
1821b872d2 | ||
|
59eded976b | ||
|
8e88a47978 | ||
|
c6ca643bcf | ||
|
5c15b30058 | ||
|
f91b2bc17e | ||
|
73fb669fd8 | ||
|
a6663f19f0 | ||
|
ae897c1cd3 | ||
|
748519e94d | ||
|
7678da7926 | ||
|
e060bb3d8b | ||
|
ba5fe590c9 |
161 changed files with 21019 additions and 8363 deletions
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
custom: https://github.com/Kozea/Radicale/wiki/Donations
|
15
.github/workflows/generate-documentation.yml
vendored
Normal file
15
.github/workflows/generate-documentation.yml
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
name: Generate documentation
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- DOCUMENTATION.md
|
||||
|
||||
jobs:
|
||||
generate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: gh-pages
|
||||
- name: Run generator
|
||||
run: documentation-generator/run.py
|
21
.github/workflows/pypi-publish.yml
vendored
Normal file
21
.github/workflows/pypi-publish.yml
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
name: PyPI publish
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
permissions:
|
||||
id-token: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.x
|
||||
- name: Install Build dependencies
|
||||
run: pip install build
|
||||
- name: Build
|
||||
run: python -m build --sdist --wheel
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
58
.github/workflows/test.yml
vendored
Normal file
58
.github/workflows/test.yml
vendored
Normal file
|
@ -0,0 +1,58 @@
|
|||
name: Test
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
test:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
python-version: ['3.9', '3.10', '3.11', '3.12.3', '3.13.0', pypy-3.9]
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
python-version: pypy-3.9
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Test dependencies
|
||||
run: pip install tox
|
||||
- name: Test
|
||||
run: tox -e py
|
||||
- name: Install Coveralls
|
||||
if: github.event_name == 'push'
|
||||
run: pip install coveralls
|
||||
- name: Upload coverage to Coveralls
|
||||
if: github.event_name == 'push'
|
||||
env:
|
||||
COVERALLS_PARALLEL: true
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: coveralls --service=github
|
||||
|
||||
coveralls-finish:
|
||||
needs: test
|
||||
if: github.event_name == 'push'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.x
|
||||
- name: Install Coveralls
|
||||
run: pip install coveralls
|
||||
- name: Finish Coveralls parallel builds
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: coveralls --service=github --finish
|
||||
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
- name: Install tox
|
||||
run: pip install tox
|
||||
- name: Lint
|
||||
run: tox -e flake8,mypy,isort
|
19
.gitignore
vendored
19
.gitignore
vendored
|
@ -1,16 +1,25 @@
|
|||
*~
|
||||
.*.swp
|
||||
*.pyc
|
||||
__pycache__
|
||||
|
||||
MANIFEST
|
||||
|
||||
build
|
||||
dist
|
||||
Radicale.egg-info
|
||||
/MANIFEST
|
||||
/build
|
||||
/dist
|
||||
/*.egg-info
|
||||
/_site
|
||||
|
||||
coverage.xml
|
||||
.pytest_cache
|
||||
.cache
|
||||
.coverage
|
||||
.coverage.*
|
||||
.eggs
|
||||
.mypy_cache
|
||||
.project
|
||||
.pydevproject
|
||||
.settings
|
||||
.tox
|
||||
.vscode
|
||||
.sass-cache
|
||||
Gemfile.lock
|
||||
|
|
4
.mdl.style
Normal file
4
.mdl.style
Normal file
|
@ -0,0 +1,4 @@
|
|||
all
|
||||
rule 'MD026', :punctuation => '.,;:!'
|
||||
exclude_rule 'MD001'
|
||||
exclude_rule 'MD024'
|
1
.mdlrc
Normal file
1
.mdlrc
Normal file
|
@ -0,0 +1 @@
|
|||
style File.join(File.dirname(__FILE__), '.mdl.style')
|
25
.travis.yml
25
.travis.yml
|
@ -1,25 +0,0 @@
|
|||
language: python
|
||||
sudo: false
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- os: linux
|
||||
python: 3.3
|
||||
- os: linux
|
||||
python: 3.4
|
||||
- os: linux
|
||||
python: 3.5
|
||||
- os: linux
|
||||
python: 3.6
|
||||
- os: osx
|
||||
language: generic
|
||||
|
||||
before_install:
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install python3; fi
|
||||
|
||||
install:
|
||||
- pip3 install --upgrade passlib bcrypt
|
||||
- pip3 install --upgrade --editable .[test]
|
||||
|
||||
script:
|
||||
- python3 setup.py test
|
692
CHANGELOG.md
Normal file
692
CHANGELOG.md
Normal file
|
@ -0,0 +1,692 @@
|
|||
# Changelog
|
||||
|
||||
## 3.5.1.dev
|
||||
|
||||
* Fix: auth/htpasswd related to detection and use of bcrypt
|
||||
* Add: option [auth] ldap_ignore_attribute_create_modify_timestamp for support of Authentik LDAP server
|
||||
* Extend: [storage] hook supports now placeholder for "cwd" and "path" (and catches unsupported placeholders)
|
||||
* Fix: location of lock file for in case of dedicated cache folder is activated
|
||||
* Extend: log and create base folders if not existing during startup
|
||||
|
||||
## 3.5.0
|
||||
|
||||
* Add: option [auth] type oauth2 by code migration from https://gitlab.mim-libre.fr/alphabet/radicale_oauth/-/blob/dev/oauth2/
|
||||
* Fix: catch OS errors on PUT MKCOL MKCALENDAR MOVE PROPPATCH (insufficient storage, access denied, internal server error)
|
||||
* Test: skip bcrypt related tests if module is missing
|
||||
* Improve: relax mtime check on storage filesystem, change test file location to "collection-root" directory
|
||||
* Add: option [auth] type pam by code migration from v1, add new option pam_serivce
|
||||
* Cosmetics: extend list of used modules with their version on startup
|
||||
* Improve: WebUI
|
||||
* Add: option [server] script_name for reverse proxy base_prefix handling
|
||||
* Fix: proper base_prefix stripping if running behind reverse proxy
|
||||
* Review: Apache reverse proxy config example
|
||||
* Add: on-the-fly link activation and default content adjustment in case of bundled InfCloud (tested with 0.13.1)
|
||||
* Adjust: [auth] imap: use AUTHENTICATE PLAIN instead of LOGIN towards remote IMAP server
|
||||
* Improve: log client IP on SSL error and SSL protocol+cipher if successful
|
||||
* Improve: catch htpasswd hash verification errors
|
||||
* Improve: add support for more bcrypt algos on autodetection, extend logging for autodetection fallback to PLAIN in case of hash length is not matching
|
||||
* Add: warning in case of started standalone and not listen on loopback interface but trusting external authentication
|
||||
* Adjust: Change default [auth] type from "none" to "denyall" for secure-by-default
|
||||
|
||||
## 3.4.1
|
||||
* Add: option [auth] dovecot_connection_type / dovecot_host / dovecot_port
|
||||
* Add: option [auth] type imap by code migration from https://github.com/Unrud/RadicaleIMAP/
|
||||
|
||||
## 3.4.0
|
||||
* Add: option [auth] cache_logins/cache_successful_logins_expiry/cache_failed_logins for caching logins
|
||||
* Improve: [auth] log used hash method and result on debug for htpasswd authentication
|
||||
* Improve: [auth] htpasswd file now read and verified on start
|
||||
* Add: option [auth] htpasswd_cache to automatic re-read triggered on change (mtime or size) instead reading on each request
|
||||
* Improve: [auth] htpasswd: module 'bcrypt' is no longer mandatory in case digest method not used in file
|
||||
* Improve: [auth] successful/failed login logs now type and whether result was taken from cache
|
||||
* Improve: [auth] constant execution time for failed logins independent of external backend or by htpasswd used digest method
|
||||
* Drop: support for Python 3.8
|
||||
* Add: option [auth] ldap_user_attribute
|
||||
* Add: option [auth] ldap_groups_attribute as a more flexible replacement of removed ldap_load_groups
|
||||
|
||||
## 3.3.3
|
||||
* Add: display mtime_ns precision of storage folder with condition warning if too less
|
||||
* Improve: disable fsync during storage verification
|
||||
* Improve: suppress duplicate log lines on startup
|
||||
* Contrib: logwatch config and script
|
||||
* Improve: log precondition result on PUT request
|
||||
|
||||
## 3.3.2
|
||||
* Fix: debug logging in rights/from_file
|
||||
* Add: option [storage] use_cache_subfolder_for_item for storing 'item' cache outside collection-root
|
||||
* Fix: ignore empty RRULESET in item
|
||||
* Add: option [storage] filesystem_cache_folder for defining location of cache outside collection-root
|
||||
* Add: option [storage] use_cache_subfolder_for_history for storing 'history' cache outside collection-root
|
||||
* Add: option [storage] use_cache_subfolder_for_synctoken for storing 'sync-token' cache outside collection-root
|
||||
* Add: option [storage] folder_umask for configuration of umask (overwrite system-default)
|
||||
* Fix: also remove 'item' from cache on delete
|
||||
* Improve: avoid automatically invalid cache on upgrade in case no change on cache structure
|
||||
* Improve: log important module versions on startup
|
||||
* Improve: auth.ldap config shown on startup, terminate in case no password is supplied for bind user
|
||||
* Add: option [auth] uc_username for uppercase conversion (similar to existing lc_username)
|
||||
* Add: option [logging] storage_cache_action_on_debug for conditional logging
|
||||
* Fix: set PRODID on collection upload (instead of vobject is inserting default one)
|
||||
* Add: option [storage] use_mtime_and_size_for_item_cache for changing cache lookup from SHA256 to mtime_ns + size
|
||||
* Fix: buggy cache file content creation on collection upload
|
||||
|
||||
## 3.3.1
|
||||
|
||||
* Add: option [auth] type=dovecot
|
||||
* Enhancement: log content in case of multiple main components error
|
||||
* Fix: expand does not take timezones into account
|
||||
* Fix: expand does not support overridden recurring events
|
||||
* Fix: expand does not honor start and end times
|
||||
* Add: option [server] protocol + ciphersuite for optional restrictions on SSL socket
|
||||
* Enhancement: [storage] hook documentation, logging, error behavior (no longer throwing an exception)
|
||||
|
||||
## 3.3.0
|
||||
|
||||
* Adjustment: option [auth] htpasswd_encryption change default from "md5" to "autodetect"
|
||||
* Add: option [auth] type=ldap with (group) rights management via LDAP/LDAPS
|
||||
* Enhancement: permit_delete_collection can be now controlled also per collection by rights 'D' or 'd'
|
||||
* Add: option [rights] permit_overwrite_collection (default=True) which can be also controlled per collection by rights 'O' or 'o'
|
||||
* Fix: only expand VEVENT on REPORT request containing 'expand'
|
||||
* Adjustment: switch from setup.py to pyproject.toml (but keep files for legacy packaging)
|
||||
* Adjustment: 'rights' file is now read only during startup
|
||||
* Cleanup: Python 3.7 leftovers
|
||||
|
||||
## 3.2.3
|
||||
* Add: support for Python 3.13
|
||||
* Fix: Using icalendar's tzinfo on created datetime to fix issue with icalendar
|
||||
* Fix: typos in code
|
||||
* Enhancement: Added free-busy report
|
||||
* Enhancement: Added 'max_freebusy_occurrences` setting to avoid potential DOS on reports
|
||||
* Enhancement: remove unexpected control codes from uploaded items
|
||||
* Enhancement: add 'strip_domain' setting for username handling
|
||||
* Enhancement: add option to toggle debug log of rights rule with doesn't match
|
||||
* Drop: remove unused requirement "typeguard"
|
||||
* Improve: Refactored some date parsing code
|
||||
|
||||
## 3.2.2
|
||||
* Enhancement: add support for auth.type=denyall (will be default for security reasons in upcoming releases)
|
||||
* Enhancement: display warning in case only default config is active
|
||||
* Enhancement: display warning in case no user authentication is active
|
||||
* Enhancement: add option to skip broken item to avoid triggering exception (default: enabled)
|
||||
* Enhancement: add support for predefined collections for new users
|
||||
* Enhancement: add options to enable several parts in debug log like backtrace, request_header, request_content, response_content (default: disabled)
|
||||
* Enhancement: rights/from_file: display resulting permission of a match in debug log
|
||||
* Enhancement: add Apache config file example (see contrib directory)
|
||||
* Fix: "verify-collection" skips non-collection directories, logging improved
|
||||
|
||||
## 3.2.1
|
||||
|
||||
* Enhancement: add option for logging bad PUT request content
|
||||
* Enhancement: extend logging with step where bad PUT request failed
|
||||
* Fix: support for recurrence "full day"
|
||||
* Fix: list of web_files related to HTML pages
|
||||
* Test: update/adjustments for workflows (pytest>=7, typeguard<4.3)
|
||||
|
||||
## 3.2.0
|
||||
|
||||
* Enhancement: add hook support for event changes+deletion hooks (initial support: "rabbitmq")
|
||||
* Dependency: pika >= 1.1.0
|
||||
* Enhancement: add support for webcal subscriptions
|
||||
* Enhancement: major update of WebUI (design+features)
|
||||
* Adjust: change default loglevel to "info"
|
||||
* Enhancement: support "expand-property" on REPORT request
|
||||
* Drop: support for Python 3.7 (EOSL, can't be tested anymore)
|
||||
* Fix: allow quoted-printable encoding for vObjects
|
||||
|
||||
## 3.1.9
|
||||
|
||||
* Add: support for Python 3.11 + 3.12
|
||||
* Drop: support for Python 3.6
|
||||
* Fix: MOVE in case listen on non-standard ports or behind reverse proxy
|
||||
* Fix: stricter requirements of Python 3.11
|
||||
* Fix: HTML pages
|
||||
* Fix: Main Component is missing when only recurrence id exists
|
||||
* Fix: passlib don't support bcrypt>=4.1
|
||||
* Fix: web login now proper encodes passwords containing %XX (hexdigits)
|
||||
* Enhancement: user-selectable log formats
|
||||
* Enhancement: autodetect logging to systemd journal
|
||||
* Enhancement: test code
|
||||
* Enhancement: option for global permit to delete collection
|
||||
* Enhancement: auth type 'htpasswd' supports now 'htpasswd_encryption' sha256/sha512 and "autodetect" for smooth transition
|
||||
* Improve: Dockerfiles
|
||||
* Improve: server socket listen code + address format in log
|
||||
* Update: documentations + examples
|
||||
* Dependency: limit typegard version < 3
|
||||
* General: code cosmetics
|
||||
|
||||
## 3.1.8
|
||||
|
||||
* Fix setuptools requirement if installing wheel
|
||||
* Tests: Switch from `python setup.py test` to `tox`
|
||||
* Small changes to build system configuration and tests
|
||||
|
||||
## 3.1.7
|
||||
|
||||
* Fix random href fallback
|
||||
|
||||
## 3.1.6
|
||||
|
||||
* Ignore `Not a directory` error for optional config paths
|
||||
* Fix upload of whole address book/calendar with UIDs that collide on
|
||||
case-insensitive filesystem
|
||||
* Remove runtime dependency on setuptools for Python>=3.9
|
||||
* Windows: Block ADS paths
|
||||
|
||||
## 3.1.5
|
||||
|
||||
* Ignore configuration file if access is denied
|
||||
* Use F_FULLFSYNC with PyPy on MacOS
|
||||
* Fallback if F_FULLFSYNC is not supported by the filesystem
|
||||
|
||||
## 3.1.4
|
||||
|
||||
* Fallback if RENAME_EXCHANGE is not supported by the filesystem
|
||||
* Assume POSIX compatibility if `sys.platform` is not `win32`
|
||||
|
||||
## 3.1.3
|
||||
|
||||
* Redirect '…/.well-known/caldav' and '…/.well-known/carddav' to base prefix
|
||||
* Warning instead of error when base prefix ends with '/'
|
||||
|
||||
## 3.1.2
|
||||
|
||||
* Verify that base prefix starts with '/' but doesn't end with '/'
|
||||
* Improve base prefix log message
|
||||
* Never send body for HEAD requests (again)
|
||||
|
||||
## 3.1.1
|
||||
|
||||
* Workaround for contact photo bug in InfCloud
|
||||
* Redirect GET and HEAD requests under `/.web` to sanitized path
|
||||
* Set `Content-Length` header for HEAD requests
|
||||
* Never send body for HEAD requests
|
||||
* Improve error messages for `from_file` rights backend
|
||||
* Don't sanitize WSGI script name
|
||||
|
||||
## 3.1.0
|
||||
|
||||
* Single `<D:propstat>` element in PROPPATCH response
|
||||
* Allow multiple `<D:set>` and `<D:remove>` elements
|
||||
* Improve log messages
|
||||
* Fix date filter
|
||||
* Improve sanitization of collection properties
|
||||
* Cancel mkcalendar request on error
|
||||
* Use **renameat2** on Linux for atomic overwriting of collections
|
||||
* Command Line Parser
|
||||
* Disallow abbreviated arguments
|
||||
* Support backend specific options and HTTP headers
|
||||
* Optional argument for boolean options
|
||||
* Load no config file for `--config` without argument
|
||||
* Allow float for server->timeout setting
|
||||
* Fix **is-not-defined** filter in **addressbook-query** report
|
||||
* Add python type hints
|
||||
* Add **multifilesystem_nolock** storage
|
||||
* Add support for Python 3.9 and 3.10
|
||||
* Drop support for Python 3.5
|
||||
* Fix compatibility with Evolution (Exceptions from recurrence rules)
|
||||
|
||||
## 3.0.6
|
||||
|
||||
* Allow web plugins to handle POST requests
|
||||
|
||||
## 3.0.5
|
||||
|
||||
* Start storage hook in own process group
|
||||
* Kill storage hook on error or exit
|
||||
* Try to kill child processes of storage hook
|
||||
* Internal Server: Exit immediately when signal is received
|
||||
(do not wait for clients or storage hook to finish)
|
||||
|
||||
## 3.0.4
|
||||
|
||||
* Fix internal server on FreeBSD
|
||||
|
||||
## 3.0.3
|
||||
|
||||
* Fix internal server on OpenBSD
|
||||
|
||||
## 3.0.2
|
||||
|
||||
* Use 403 response for supported-report and valid-sync-token errors
|
||||
* Internal server: Handle missing IPv6 support
|
||||
|
||||
## 3.0.1
|
||||
|
||||
* Fix XML error messages
|
||||
|
||||
## 3.0.0
|
||||
|
||||
This release is incompatible with previous releases.
|
||||
See the upgrade checklist below.
|
||||
|
||||
* Parallel write requests
|
||||
* Support PyPy
|
||||
* Protect against XML denial-of-service attacks
|
||||
* Check for duplicated UIDs in calendars/address books
|
||||
* Only add missing UIDs for uploaded whole calendars/address books
|
||||
* Switch from md5 to sha256 for UIDs and tokens
|
||||
* Code cleanup:
|
||||
* All plugin interfaces were simplified and are incompatible with
|
||||
old plugins
|
||||
* Major refactor
|
||||
* Never sanitize paths multiple times (check if they are sanitized)
|
||||
* Config
|
||||
* Multiple configuration files separated by `:` (resp. `;`
|
||||
on Windows)
|
||||
* Optional configuration files by prepending file path with `?`
|
||||
* Check validity of every configuration file and command line
|
||||
arguments separately
|
||||
* Report the source of invalid configuration parameters in
|
||||
error messages
|
||||
* Code cleanup:
|
||||
* Store configuration as parsed values
|
||||
* Use Schema that describes configuration and allow plugins to apply
|
||||
their own schemas
|
||||
* Mark internal settings with `_`
|
||||
* Internal server
|
||||
* Bind to IPv4 and IPv6 address, when both are available for hostname
|
||||
* Set default address to `localhost:5232`
|
||||
* Remove settings for SSL ciphers and protocol versions (enforce safe
|
||||
defaults instead)
|
||||
* Remove settings for file locking because they are of little use
|
||||
* Remove daemonization (should be handled by service managers)
|
||||
* Logging
|
||||
* Replace complex Python logger configuration with simple
|
||||
`logging.level` setting
|
||||
* Write PID and `threadName` instead of cryptic id's in log messages
|
||||
* Use `wsgi.errors` for logging (as required by the WSGI spec)
|
||||
* Code cleanup:
|
||||
* Don't pass logger object around (use `logging.getLogger()`
|
||||
instead)
|
||||
* Auth
|
||||
* Use `md5` as default for `htpasswd_encryption` setting
|
||||
* Move setting `realm` from section `server` to `auth`
|
||||
* Rights
|
||||
* Use permissions `RW` for non-leaf collections and `rw` for
|
||||
address books/calendars
|
||||
* New permission `i` that only allows access with HTTP method GET
|
||||
(CalDAV/CardDAV is susceptible to expensive search requests)
|
||||
* Web
|
||||
* Add upload dialog for calendars/address books from file
|
||||
* Show startup loading message
|
||||
* Show warning if JavaScript is disabled
|
||||
* Pass HTML Validator
|
||||
* Storage
|
||||
* Check for missing UIDs in items
|
||||
* Check for child collections in address books and calendars
|
||||
* Code cleanup:
|
||||
* Split BaseCollection in BaseStorage and BaseCollection
|
||||
|
||||
## Upgrade checklist
|
||||
|
||||
* Config
|
||||
* Some settings were removed
|
||||
* The default of `auth.htpasswd_encryption` changed to `md5`
|
||||
* The setting `server.realm` moved to `auth.realm`
|
||||
* The setting `logging.debug` was replaced by `logging.level`
|
||||
* The format of the `rights.file` configuration file changed:
|
||||
* Permission `r` replaced by `Rr`
|
||||
* Permission `w` replaced by `Ww`
|
||||
* New permission `i` added as subset of `r`
|
||||
* Replaced variable `%(login)s` by `{user}`
|
||||
* Removed variable `%(path)s`
|
||||
* `{` must be escaped as `{{` and `}` as `}}` in regexes
|
||||
* File system storage
|
||||
* The storage format is compatible with Radicale 2.x.x
|
||||
* Run `radicale --verify-storage` to check for errors
|
||||
* Custom plugins:
|
||||
* `auth` and `web` plugins require minor adjustments
|
||||
* `rights` plugins must be adapted to the new permission model
|
||||
* `storage` plugins require major changes
|
||||
|
||||
## 2.1.10 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Update required versions for dependencies
|
||||
* Get `RADICALE_CONFIG` from WSGI environ
|
||||
* Improve HTTP status codes
|
||||
* Fix race condition in storage lock creation
|
||||
* Raise default limits for content length and timeout
|
||||
* Log output from hook
|
||||
|
||||
## 2.1.9 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Specify versions for dependencies
|
||||
* Move WSGI initialization into module
|
||||
* Check if `REPORT` method is actually supported
|
||||
* Include `rights` file in source distribution
|
||||
* Specify `md5` and `bcrypt` as extras
|
||||
* Improve logging messages
|
||||
* Windows: Fix crash when item path is a directory
|
||||
|
||||
## 2.1.8 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Flush files before fsync'ing
|
||||
|
||||
## 2.1.7 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Don't print warning when cache format changes
|
||||
* Add documentation for `BaseAuth`
|
||||
* Add `is_authenticated2(login, user, password)` to `BaseAuth`
|
||||
* Fix names of custom properties in PROPFIND requests with
|
||||
`D:propname` or `D:allprop`
|
||||
* Return all properties in PROPFIND requests with `D:propname` or
|
||||
`D:allprop`
|
||||
* Allow `D:displayname` property on all collections
|
||||
* Answer with `D:unauthenticated` for `D:current-user-principal` property
|
||||
when not logged in
|
||||
* Remove non-existing `ICAL:calendar-color` and `C:calendar-timezone`
|
||||
properties from PROPFIND requests with `D:propname` or `D:allprop`
|
||||
* Add `D:owner` property to calendar and address book objects
|
||||
* Remove `D:getetag` and `D:getlastmodified` properties from regular
|
||||
collections
|
||||
|
||||
## 2.1.6 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Fix content-type of VLIST
|
||||
* Specify correct COMPONENT in content-type of VCALENDAR
|
||||
* Cache COMPONENT of calendar objects (improves speed with some clients)
|
||||
* Stricter parsing of filters
|
||||
* Improve support for CardDAV filter
|
||||
* Fix some smaller bugs in CalDAV filter
|
||||
* Add X-WR-CALNAME and X-WR-CALDESC to calendars downloaded via HTTP/WebDAV
|
||||
* Use X-WR-CALNAME and X-WR-CALDESC from calendars published via WebDAV
|
||||
|
||||
## 2.1.5 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Add `--verify-storage` command-line argument
|
||||
* Allow comments in the htpasswd file
|
||||
* Don't strip whitespaces from user names and passwords in the htpasswd file
|
||||
* Remove cookies from logging output
|
||||
* Allow uploads of whole collections with many components
|
||||
* Show warning message if server.timeout is used with Python < 3.5.2
|
||||
|
||||
## 2.1.4 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Fix incorrect time range matching and calculation for some edge-cases with
|
||||
rescheduled recurrences
|
||||
* Fix owner property
|
||||
|
||||
## 2.1.3 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Enable timeout for SSL handshakes and move them out of the main thread
|
||||
* Create cache entries during upload of items
|
||||
* Stop built-in server on Windows when Ctrl+C is pressed
|
||||
* Prevent slow down when multiple requests hit a collection during cache warm-up
|
||||
|
||||
## 2.1.2 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Remove workarounds for bugs in VObject < 0.9.5
|
||||
* Error checking of collection tags and associated components
|
||||
* Improve error checking of uploaded collections and components
|
||||
* Don't delete empty collection properties implicitly
|
||||
* Improve logging of VObject serialization
|
||||
|
||||
## 2.1.1 - Wild Radish Again
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Add missing UIDs instead of failing
|
||||
* Improve error checking of calendar and address book objects
|
||||
* Fix upload of whole address books
|
||||
|
||||
## 2.1.0 - Wild Radish
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Built-in web interface for creating and managing address books and calendars
|
||||
* can be extended with web plugins
|
||||
* Much faster storage backend
|
||||
* Significant reduction in memory usage
|
||||
* Improved logging
|
||||
* Include paths (of invalid items / requests) in log messages
|
||||
* Include configuration values causing problems in log messages
|
||||
* Log warning message for invalid requests by clients
|
||||
* Log error message for invalid files in the storage backend
|
||||
* No stack traces unless debugging is enabled
|
||||
* Time range filter also regards overwritten recurrences
|
||||
* Items that couldn't be filtered because of bugs in VObject are always
|
||||
returned (and a warning message is logged)
|
||||
* Basic error checking of configuration files
|
||||
* File system locking isn't disabled implicitly anymore, instead a new
|
||||
configuration option gets introduced
|
||||
* The permissions of the lock file are not changed anymore
|
||||
* Support for sync-token
|
||||
* Support for client-side SSL certificates
|
||||
* Rights plugins can decide if access to an item is granted explicitly
|
||||
* Respond with 403 instead of 404 for principal collections of non-existing
|
||||
users when `owner_only` plugin is used (information leakage)
|
||||
* Authentication plugins can provide the login and password from the
|
||||
environment
|
||||
* new `remote_user` plugin, that gets the login from the `REMOTE_USER`
|
||||
environment variable (for WSGI server)
|
||||
* new `http_x_remote_user` plugin, that gets the login from the
|
||||
`X-Remote-User` HTTP header (for reverse proxies)
|
||||
|
||||
## 2.0.0 - Little Big Radish
|
||||
|
||||
This feature is not compatible with the 1.x.x versions. Follow our
|
||||
[migration guide](https://radicale.org/2.1.html#documentation/migration-from-1xx-to-2xx)
|
||||
if you want to switch from 1.x.x to 2.0.0.
|
||||
|
||||
* Support Python 3.3+ only, Python 2 is not supported anymore
|
||||
* Keep only one simple filesystem-based storage system
|
||||
* Remove built-in Git support
|
||||
* Remove built-in authentication modules
|
||||
* Keep the WSGI interface, use Python HTTP server by default
|
||||
* Use a real iCal parser, rely on the "vobject" external module
|
||||
* Add a solid calendar discovery
|
||||
* Respect the difference between "files" and "folders", don't rely on slashes
|
||||
* Remove the calendar creation with GET requests
|
||||
* Be stateless
|
||||
* Use a file locker
|
||||
* Add threading
|
||||
* Get atomic writes
|
||||
* Support new filters
|
||||
* Support read-only permissions
|
||||
* Allow External plugins for authentication, rights management, storage and
|
||||
version control
|
||||
|
||||
## 1.1.4 - Fifth Law of Nature
|
||||
|
||||
* Use `shutil.move` for `--export-storage`
|
||||
|
||||
## 1.1.3 - Fourth Law of Nature
|
||||
|
||||
* Add a `--export-storage=FOLDER` command-line argument (by Unrud, see #606)
|
||||
|
||||
## 1.1.2 - Third Law of Nature
|
||||
|
||||
* **Security fix**: Add a random timer to avoid timing oracles and simple
|
||||
bruteforce attacks when using the htpasswd authentication method.
|
||||
* Various minor fixes.
|
||||
|
||||
## 1.1.1 - Second Law of Nature
|
||||
|
||||
* Fix the owner_write rights rule
|
||||
|
||||
## 1.1 - Law of Nature
|
||||
|
||||
One feature in this release is **not backward compatible**:
|
||||
|
||||
* Use the first matching section for rights (inspired from daald)
|
||||
|
||||
Now, the first section matching the path and current user in your custom rights
|
||||
file is used. In the previous versions, the most permissive rights of all the
|
||||
matching sections were applied. This new behaviour gives a simple way to make
|
||||
specific rules at the top of the file independant from the generic ones.
|
||||
|
||||
Many **improvements in this release are related to security**, you should
|
||||
upgrade Radicale as soon as possible:
|
||||
|
||||
* Improve the regex used for well-known URIs (by Unrud)
|
||||
* Prevent regex injection in rights management (by Unrud)
|
||||
* Prevent crafted HTTP request from calling arbitrary functions (by Unrud)
|
||||
* Improve URI sanitation and conversion to filesystem path (by Unrud)
|
||||
* Decouple the daemon from its parent environment (by Unrud)
|
||||
|
||||
Some bugs have been fixed and little enhancements have been added:
|
||||
|
||||
* Assign new items to corret key (by Unrud)
|
||||
* Avoid race condition in PID file creation (by Unrud)
|
||||
* Improve the docker version (by cdpb)
|
||||
* Encode message and commiter for git commits
|
||||
* Test with Python 3.5
|
||||
|
||||
## 1.0.1 - Sunflower Again
|
||||
|
||||
* Update the version because of a **stupid** "feature"™ of PyPI
|
||||
|
||||
## 1.0 - Sunflower
|
||||
|
||||
* Enhanced performances (by Mathieu Dupuy)
|
||||
* Add MD5-APR1 and BCRYPT for htpasswd-based authentication (by Jan-Philip Gehrcke)
|
||||
* Use PAM service (by Stephen Paul Weber)
|
||||
* Don't discard PROPPATCH on empty collections (by Markus Unterwaditzer)
|
||||
* Write the path of the collection in the git message (by Matthew Monaco)
|
||||
* Tests launched on Travis
|
||||
|
||||
## 0.10 - Lovely Endless Grass
|
||||
|
||||
* Support well-known URLs (by Mathieu Dupuy)
|
||||
* Fix collection discovery (by Markus Unterwaditzer)
|
||||
* Reload logger config on SIGHUP (by Élie Bouttier)
|
||||
* Remove props files when deleting a collection (by Vincent Untz)
|
||||
* Support salted SHA1 passwords (by Marc Kleine-Budde)
|
||||
* Don't spam the logs about non-SSL IMAP connections to localhost (by Giel van Schijndel)
|
||||
|
||||
## 0.9 - Rivers
|
||||
|
||||
* Custom handlers for auth, storage and rights (by Sergey Fursov)
|
||||
* 1-file-per-event storage (by Jean-Marc Martins)
|
||||
* Git support for filesystem storages (by Jean-Marc Martins)
|
||||
* DB storage working with PostgreSQL, MariaDB and SQLite (by Jean-Marc Martins)
|
||||
* Clean rights manager based on regular expressions (by Sweil)
|
||||
* Support of contacts for Apple's clients
|
||||
* Support colors (by Jochen Sprickerhof)
|
||||
* Decode URLs in XML (by Jean-Marc Martins)
|
||||
* Fix PAM authentication (by Stepan Henek)
|
||||
* Use consistent etags (by 9m66p93w)
|
||||
* Use consistent sorting order (by Daniel Danner)
|
||||
* Return 401 on unauthorized DELETE requests (by Eduard Braun)
|
||||
* Move pid file creation in child process (by Mathieu Dupuy)
|
||||
* Allow requests without base_prefix (by jheidemann)
|
||||
|
||||
## 0.8 - Rainbow
|
||||
|
||||
* New authentication and rights management modules (by Matthias Jordan)
|
||||
* Experimental database storage
|
||||
* Command-line option for custom configuration file (by Mark Adams)
|
||||
* Root URL not at the root of a domain (by Clint Adams, Fabrice Bellet, Vincent Untz)
|
||||
* Improved support for iCal, CalDAVSync, CardDAVSync, CalDavZAP and CardDavMATE
|
||||
* Empty PROPFIND requests handled (by Christoph Polcin)
|
||||
* Colon allowed in passwords
|
||||
* Configurable realm message
|
||||
|
||||
## 0.7.1 - Waterfalls
|
||||
|
||||
* Many address books fixes
|
||||
* New IMAP ACL (by Daniel Aleksandersen)
|
||||
* PAM ACL fixed (by Daniel Aleksandersen)
|
||||
* Courier ACL fixed (by Benjamin Frank)
|
||||
* Always set display name to collections (by Oskari Timperi)
|
||||
* Various DELETE responses fixed
|
||||
|
||||
## 0.7 - Eternal Sunshine
|
||||
|
||||
* Repeating events
|
||||
* Collection deletion
|
||||
* Courier and PAM authentication methods
|
||||
* CardDAV support
|
||||
* Custom LDAP filters supported
|
||||
|
||||
## 0.6.4 - Tulips
|
||||
|
||||
* Fix the installation with Python 3.1
|
||||
|
||||
## 0.6.3 - Red Roses
|
||||
|
||||
* MOVE requests fixed
|
||||
* Faster REPORT answers
|
||||
* Executable script moved into the package
|
||||
|
||||
## 0.6.2 - Seeds
|
||||
|
||||
* iPhone and iPad support fixed
|
||||
* Backslashes replaced by slashes in PROPFIND answers on Windows
|
||||
* PyPI archive set as default download URL
|
||||
|
||||
## 0.6.1 - Growing Up
|
||||
|
||||
* Example files included in the tarball
|
||||
* htpasswd support fixed
|
||||
* Redirection loop bug fixed
|
||||
* Testing message on GET requests
|
||||
|
||||
## 0.6 - Sapling
|
||||
|
||||
* WSGI support
|
||||
* IPv6 support
|
||||
* Smart, verbose and configurable logs
|
||||
* Apple iCal 4 and iPhone support (by Łukasz Langa)
|
||||
* KDE KOrganizer support
|
||||
* LDAP auth backend (by Corentin Le Bail)
|
||||
* Public and private calendars (by René Neumann)
|
||||
* PID file
|
||||
* MOVE requests management
|
||||
* Journal entries support
|
||||
* Drop Python 2.5 support
|
||||
|
||||
## 0.5 - Historical Artifacts
|
||||
|
||||
* Calendar depth
|
||||
* MacOS and Windows support
|
||||
* HEAD requests management
|
||||
* htpasswd user from calendar path
|
||||
|
||||
## 0.4 - Hot Days Back
|
||||
|
||||
* Personal calendars
|
||||
* Last-Modified HTTP header
|
||||
* `no-ssl` and `foreground` options
|
||||
* Default configuration file
|
||||
|
||||
## 0.3 - Dancing Flowers
|
||||
|
||||
* Evolution support
|
||||
* Version management
|
||||
|
||||
## 0.2 - Snowflakes
|
||||
|
||||
* Sunbird pre-1.0 support
|
||||
* SSL connection
|
||||
* Htpasswd authentication
|
||||
* Daemon mode
|
||||
* User configuration
|
||||
* Twisted dependency removed
|
||||
* Python 3 support
|
||||
* Real URLs for PUT and DELETE
|
||||
* Concurrent modification reported to users
|
||||
* Many bugs fixed (by Roger Wenham)
|
||||
|
||||
## 0.1 - Crazy Vegetables
|
||||
|
||||
* First release
|
||||
* Lightning/Sunbird 0.9 compatibility
|
||||
* Easy installer
|
674
COPYING
674
COPYING
|
@ -1,674 +0,0 @@
|
|||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
|
675
COPYING.md
Normal file
675
COPYING.md
Normal file
|
@ -0,0 +1,675 @@
|
|||
### GNU GENERAL PUBLIC LICENSE
|
||||
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc.
|
||||
<https://fsf.org/>
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim copies of this
|
||||
license document, but changing it is not allowed.
|
||||
|
||||
### Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom
|
||||
to share and change all versions of a program--to make sure it remains
|
||||
free software for all its users. We, the Free Software Foundation, use
|
||||
the GNU General Public License for most of our software; it applies
|
||||
also to any other work released this way by its authors. You can apply
|
||||
it to your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you
|
||||
have certain responsibilities if you distribute copies of the
|
||||
software, or if you modify it: responsibilities to respect the freedom
|
||||
of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the
|
||||
manufacturer can do so. This is fundamentally incompatible with the
|
||||
aim of protecting users' freedom to change the software. The
|
||||
systematic pattern of such abuse occurs in the area of products for
|
||||
individuals to use, which is precisely where it is most unacceptable.
|
||||
Therefore, we have designed this version of the GPL to prohibit the
|
||||
practice for those products. If such problems arise substantially in
|
||||
other domains, we stand ready to extend this provision to those
|
||||
domains in future versions of the GPL, as needed to protect the
|
||||
freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish
|
||||
to avoid the special danger that patents applied to a free program
|
||||
could make it effectively proprietary. To prevent this, the GPL
|
||||
assures that patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
### TERMS AND CONDITIONS
|
||||
|
||||
#### 0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds
|
||||
of works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of
|
||||
an exact copy. The resulting work is called a "modified version" of
|
||||
the earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user
|
||||
through a computer network, with no transfer of a copy, is not
|
||||
conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices" to
|
||||
the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
#### 1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work for
|
||||
making modifications to it. "Object code" means any non-source form of
|
||||
a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users can
|
||||
regenerate automatically from other parts of the Corresponding Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that same
|
||||
work.
|
||||
|
||||
#### 2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not convey,
|
||||
without conditions so long as your license otherwise remains in force.
|
||||
You may convey covered works to others for the sole purpose of having
|
||||
them make modifications exclusively for you, or provide you with
|
||||
facilities for running those works, provided that you comply with the
|
||||
terms of this License in conveying all material for which you do not
|
||||
control copyright. Those thus making or running the covered works for
|
||||
you must do so exclusively on your behalf, under your direction and
|
||||
control, on terms that prohibit them from making any copies of your
|
||||
copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under the
|
||||
conditions stated below. Sublicensing is not allowed; section 10 makes
|
||||
it unnecessary.
|
||||
|
||||
#### 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such
|
||||
circumvention is effected by exercising rights under this License with
|
||||
respect to the covered work, and you disclaim any intention to limit
|
||||
operation or modification of the work as a means of enforcing, against
|
||||
the work's users, your or third parties' legal rights to forbid
|
||||
circumvention of technological measures.
|
||||
|
||||
#### 4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
#### 5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these
|
||||
conditions:
|
||||
|
||||
- a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
- b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under
|
||||
section 7. This requirement modifies the requirement in section 4
|
||||
to "keep intact all notices".
|
||||
- c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
- d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
#### 6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms of
|
||||
sections 4 and 5, provided that you also convey the machine-readable
|
||||
Corresponding Source under the terms of this License, in one of these
|
||||
ways:
|
||||
|
||||
- a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
- b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the Corresponding
|
||||
Source from a network server at no charge.
|
||||
- c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
- d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
- e) Convey the object code using peer-to-peer transmission,
|
||||
provided you inform other peers where the object code and
|
||||
Corresponding Source of the work are being offered to the general
|
||||
public at no charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal,
|
||||
family, or household purposes, or (2) anything designed or sold for
|
||||
incorporation into a dwelling. In determining whether a product is a
|
||||
consumer product, doubtful cases shall be resolved in favor of
|
||||
coverage. For a particular product received by a particular user,
|
||||
"normally used" refers to a typical or common use of that class of
|
||||
product, regardless of the status of the particular user or of the way
|
||||
in which the particular user actually uses, or expects or is expected
|
||||
to use, the product. A product is a consumer product regardless of
|
||||
whether the product has substantial commercial, industrial or
|
||||
non-consumer uses, unless such uses represent the only significant
|
||||
mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to
|
||||
install and execute modified versions of a covered work in that User
|
||||
Product from a modified version of its Corresponding Source. The
|
||||
information must suffice to ensure that the continued functioning of
|
||||
the modified object code is in no case prevented or interfered with
|
||||
solely because modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or
|
||||
updates for a work that has been modified or installed by the
|
||||
recipient, or for the User Product in which it has been modified or
|
||||
installed. Access to a network may be denied when the modification
|
||||
itself materially and adversely affects the operation of the network
|
||||
or violates the rules and protocols for communication across the
|
||||
network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
#### 7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders
|
||||
of that material) supplement the terms of this License with terms:
|
||||
|
||||
- a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
- b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
- c) Prohibiting misrepresentation of the origin of that material,
|
||||
or requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
- d) Limiting the use for publicity purposes of names of licensors
|
||||
or authors of the material; or
|
||||
- e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
- f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions
|
||||
of it) with contractual assumptions of liability to the recipient,
|
||||
for any liability that these contractual assumptions directly
|
||||
impose on those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions; the
|
||||
above requirements apply either way.
|
||||
|
||||
#### 8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your license
|
||||
from a particular copyright holder is reinstated (a) provisionally,
|
||||
unless and until the copyright holder explicitly and finally
|
||||
terminates your license, and (b) permanently, if the copyright holder
|
||||
fails to notify you of the violation by some reasonable means prior to
|
||||
60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
#### 9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or run
|
||||
a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
#### 10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
#### 11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims owned
|
||||
or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within the
|
||||
scope of its coverage, prohibits the exercise of, or is conditioned on
|
||||
the non-exercise of one or more of the rights that are specifically
|
||||
granted under this License. You may not convey a covered work if you
|
||||
are a party to an arrangement with a third party that is in the
|
||||
business of distributing software, under which you make payment to the
|
||||
third party based on the extent of your activity of conveying the
|
||||
work, and under which the third party grants, to any of the parties
|
||||
who would receive the covered work from you, a discriminatory patent
|
||||
license (a) in connection with copies of the covered work conveyed by
|
||||
you (or copies made from those copies), or (b) primarily for and in
|
||||
connection with specific products or compilations that contain the
|
||||
covered work, unless you entered into that arrangement, or that patent
|
||||
license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
#### 12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under
|
||||
this License and any other pertinent obligations, then as a
|
||||
consequence you may not convey it at all. For example, if you agree to
|
||||
terms that obligate you to collect a royalty for further conveying
|
||||
from those to whom you convey the Program, the only way you could
|
||||
satisfy both those terms and this License would be to refrain entirely
|
||||
from conveying the Program.
|
||||
|
||||
#### 13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
#### 14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions
|
||||
of the GNU General Public License from time to time. Such new versions
|
||||
will be similar in spirit to the present version, but may differ in
|
||||
detail to address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Program
|
||||
specifies that a certain numbered version of the GNU General Public
|
||||
License "or any later version" applies to it, you have the option of
|
||||
following the terms and conditions either of that numbered version or
|
||||
of any later version published by the Free Software Foundation. If the
|
||||
Program does not specify a version number of the GNU General Public
|
||||
License, you may choose any version ever published by the Free
|
||||
Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future versions
|
||||
of the GNU General Public License can be used, that proxy's public
|
||||
statement of acceptance of a version permanently authorizes you to
|
||||
choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
#### 15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT
|
||||
WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND
|
||||
PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE
|
||||
DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR
|
||||
CORRECTION.
|
||||
|
||||
#### 16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR
|
||||
CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
|
||||
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES
|
||||
ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT
|
||||
NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR
|
||||
LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM
|
||||
TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER
|
||||
PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
|
||||
|
||||
#### 17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
### How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these
|
||||
terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest to
|
||||
attach them to the start of each source file to most effectively state
|
||||
the exclusion of warranty; and each file should have at least the
|
||||
"copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper
|
||||
mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands \`show w' and \`show c' should show the
|
||||
appropriate parts of the General Public License. Of course, your
|
||||
program's commands might be different; for a GUI interface, you would
|
||||
use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or
|
||||
school, if any, to sign a "copyright disclaimer" for the program, if
|
||||
necessary. For more information on this, and how to apply and follow
|
||||
the GNU GPL, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your
|
||||
program into proprietary programs. If your program is a subroutine
|
||||
library, you may consider it more useful to permit linking proprietary
|
||||
applications with the library. If this is what you want to do, use the
|
||||
GNU Lesser General Public License instead of this License. But first,
|
||||
please read <https://www.gnu.org/licenses/why-not-lgpl.html>.
|
2215
DOCUMENTATION.md
Normal file
2215
DOCUMENTATION.md
Normal file
File diff suppressed because it is too large
Load diff
55
Dockerfile
55
Dockerfile
|
@ -1,31 +1,34 @@
|
|||
FROM alpine:latest
|
||||
# This file is intended to be used apart from the containing source code tree.
|
||||
|
||||
# Version of Radicale (e.g. 2.0.0)
|
||||
FROM python:3-alpine AS builder
|
||||
|
||||
# Version of Radicale (e.g. v3)
|
||||
ARG VERSION=master
|
||||
|
||||
# Install dependencies
|
||||
RUN apk add --no-cache \
|
||||
python3 \
|
||||
python3-dev \
|
||||
build-base \
|
||||
libffi-dev \
|
||||
ca-certificates \
|
||||
openssl && \
|
||||
python3 -m pip install passlib bcrypt && \
|
||||
apk del \
|
||||
python3-dev \
|
||||
build-base \
|
||||
libffi-dev
|
||||
# Install Radicale
|
||||
RUN wget --quiet https://github.com/Kozea/Radicale/archive/${VERSION}.tar.gz --output-document=radicale.tar.gz && \
|
||||
tar xzf radicale.tar.gz && \
|
||||
python3 -m pip install ./Radicale-${VERSION} && \
|
||||
rm -r radicale.tar.gz Radicale-${VERSION}
|
||||
# Persistent storage for data (Mount it somewhere on the host!)
|
||||
# Optional dependencies (e.g. bcrypt or ldap)
|
||||
ARG DEPENDENCIES=bcrypt
|
||||
|
||||
RUN apk add --no-cache --virtual gcc libffi-dev musl-dev \
|
||||
&& python -m venv /app/venv \
|
||||
&& /app/venv/bin/pip install --no-cache-dir "Radicale[${DEPENDENCIES}] @ https://github.com/Kozea/Radicale/archive/${VERSION}.tar.gz"
|
||||
|
||||
|
||||
FROM python:3-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN addgroup -g 1000 radicale \
|
||||
&& adduser radicale --home /var/lib/radicale --system --uid 1000 --disabled-password -G radicale \
|
||||
&& apk add --no-cache ca-certificates openssl
|
||||
|
||||
COPY --chown=radicale:radicale --from=builder /app/venv /app
|
||||
|
||||
# Persistent storage for data
|
||||
VOLUME /var/lib/radicale
|
||||
# Configuration data (Put the "config" file here!)
|
||||
VOLUME /etc/radicale
|
||||
# TCP port of Radicale (Publish it on a host interface!)
|
||||
# TCP port of Radicale
|
||||
EXPOSE 5232
|
||||
# Run Radicale (Configure it here or provide a "config" file!)
|
||||
CMD ["radicale", "--hosts", "0.0.0.0:5232"]
|
||||
# Run Radicale
|
||||
ENTRYPOINT [ "/app/bin/python", "/app/bin/radicale"]
|
||||
CMD ["--hosts", "0.0.0.0:5232,[::]:5232"]
|
||||
|
||||
USER radicale
|
||||
|
|
32
Dockerfile.dev
Normal file
32
Dockerfile.dev
Normal file
|
@ -0,0 +1,32 @@
|
|||
FROM python:3-alpine AS builder
|
||||
|
||||
# Optional dependencies (e.g. bcrypt or ldap)
|
||||
ARG DEPENDENCIES=bcrypt
|
||||
|
||||
COPY . /app
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apk add --no-cache --virtual gcc libffi-dev musl-dev \
|
||||
&& python -m venv /app/venv \
|
||||
&& /app/venv/bin/pip install --no-cache-dir .[${DEPENDENCIES}]
|
||||
|
||||
FROM python:3-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN addgroup -g 1000 radicale \
|
||||
&& adduser radicale --home /var/lib/radicale --system --uid 1000 --disabled-password -G radicale \
|
||||
&& apk add --no-cache ca-certificates openssl
|
||||
|
||||
COPY --chown=radicale:radicale --from=builder /app/venv /app
|
||||
|
||||
# Persistent storage for data
|
||||
VOLUME /var/lib/radicale
|
||||
# TCP port of Radicale
|
||||
EXPOSE 5232
|
||||
# Run Radicale
|
||||
ENTRYPOINT [ "/app/bin/python", "/app/bin/radicale"]
|
||||
CMD ["--hosts", "0.0.0.0:5232"]
|
||||
|
||||
USER radicale
|
|
@ -1 +1,3 @@
|
|||
include COPYING NEWS.md README.md config logging radicale.py radicale.fcgi radicale.wsgi
|
||||
include CHANGELOG.md COPYING.md DOCUMENTATION.md README.md
|
||||
include config rights
|
||||
include radicale.wsgi
|
||||
|
|
328
NEWS.md
328
NEWS.md
|
@ -1,328 +0,0 @@
|
|||
News
|
||||
====
|
||||
|
||||
2.1.3 - Wild Radish
|
||||
-------------------
|
||||
|
||||
This feature is not compatible with the 1.x.x versions. See
|
||||
http://radicale.org/1to2/ if you want to switch from 1.x.x to
|
||||
2.x.x.
|
||||
|
||||
* Enable timeout for SSL handshakes and move them out of the main thread
|
||||
* Create cache entries during upload of items
|
||||
* Stop built-in server on Windows when Ctrl+C is pressed
|
||||
* Prevent slow down when multiple requests hit a collection during cache warm-up
|
||||
|
||||
2.1.2 - Wild Radish
|
||||
-------------------
|
||||
|
||||
This feature is not compatible with the 1.x.x versions. See
|
||||
http://radicale.org/1to2/ if you want to switch from 1.x.x to
|
||||
2.x.x.
|
||||
|
||||
* Remove workarounds for bugs in VObject < 0.9.5
|
||||
* Error checking of collection tags and associated components
|
||||
* Improve error checking of uploaded collections and components
|
||||
* Don't delete empty collection properties implicitly
|
||||
* Improve logging of VObject serialization
|
||||
|
||||
2.1.1 - Wild Radish Again
|
||||
-------------------
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Add missing UIDs instead of failing
|
||||
* Improve error checking of calendar and address book objects
|
||||
* Fix upload of whole address books
|
||||
|
||||
2.1.0 - Wild Radish
|
||||
-------------------
|
||||
|
||||
This release is compatible with version 2.0.0.
|
||||
|
||||
* Built-in web interface for creating and managing address books and calendars
|
||||
* can be extended with web plugins
|
||||
* Much faster storage backend
|
||||
* Significant reduction in memory usage
|
||||
* Improved logging
|
||||
* Include paths (of invalid items / requests) in log messages
|
||||
* Include configuration values causing problems in log messages
|
||||
* Log warning message for invalid requests by clients
|
||||
* Log error message for invalid files in the storage backend
|
||||
* No stack traces unless debugging is enabled
|
||||
* Time range filter also regards overwritten recurrences
|
||||
* Items that couldn't be filtered because of bugs in VObject are always
|
||||
returned (and a warning message is logged)
|
||||
* Basic error checking of configuration files
|
||||
* File system locking isn't disabled implicitly anymore, instead a new
|
||||
configuration option gets introduced
|
||||
* The permissions of the lock file are not changed anymore
|
||||
* Support for sync-token
|
||||
* Support for client-side SSL certificates
|
||||
* Rights plugins can decide if access to an item is granted explicitly
|
||||
* Respond with 403 instead of 404 for principal collections of non-existing
|
||||
users when ``owner_only`` plugin is used (information leakage)
|
||||
* Authentication plugins can provide the login and password from the
|
||||
environment
|
||||
* new ``remote_user`` plugin, that gets the login from the ``REMOTE_USER``
|
||||
environment variable (for WSGI server)
|
||||
* new ``http_x_remote_user`` plugin, that gets the login from the
|
||||
``X-Remote-User`` HTTP header (for reverse proxies)
|
||||
|
||||
|
||||
2.0.0 - Little Big Radish
|
||||
-------------------------
|
||||
|
||||
This feature is not compatible with the 1.x.x versions. See
|
||||
http://radicale.org/1to2/ if you want to switch from 1.x.x to
|
||||
2.0.0.
|
||||
|
||||
* Support Python 3.3+ only, Python 2 is not supported anymore
|
||||
* Keep only one simple filesystem-based storage system
|
||||
* Remove built-in Git support
|
||||
* Remove built-in authentication modules
|
||||
* Keep the WSGI interface, use Python HTTP server by default
|
||||
* Use a real iCal parser, rely on the "vobject" external module
|
||||
* Add a solid calendar discovery
|
||||
* Respect the difference between "files" and "folders", don't rely on slashes
|
||||
* Remove the calendar creation with GET requests
|
||||
* Be stateless
|
||||
* Use a file locker
|
||||
* Add threading
|
||||
* Get atomic writes
|
||||
* Support new filters
|
||||
* Support read-only permissions
|
||||
* Allow External plugins for authentication, rights management, storage and
|
||||
version control
|
||||
|
||||
|
||||
1.1.4 - Fifth Law of Nature
|
||||
---------------------------
|
||||
|
||||
* Use ``shutil.move`` for ``--export-storage``
|
||||
|
||||
|
||||
1.1.3 - Fourth Law of Nature
|
||||
----------------------------
|
||||
|
||||
* Add a ``--export-storage=FOLDER`` command-line argument (by Unrud, see #606)
|
||||
|
||||
|
||||
1.1.2 - Third Law of Nature
|
||||
---------------------------
|
||||
|
||||
* **Security fix**: Add a random timer to avoid timing oracles and simple
|
||||
bruteforce attacks when using the htpasswd authentication method.
|
||||
* Various minor fixes.
|
||||
|
||||
|
||||
1.1.1 - Second Law of Nature
|
||||
----------------------------
|
||||
|
||||
* Fix the owner_write rights rule
|
||||
|
||||
|
||||
1.1 - Law of Nature
|
||||
-------------------
|
||||
|
||||
One feature in this release is **not backward compatible**:
|
||||
|
||||
* Use the first matching section for rights (inspired from daald)
|
||||
|
||||
Now, the first section matching the path and current user in your custom rights
|
||||
file is used. In the previous versions, the most permissive rights of all the
|
||||
matching sections were applied. This new behaviour gives a simple way to make
|
||||
specific rules at the top of the file independant from the generic ones.
|
||||
|
||||
Many **improvements in this release are related to security**, you should
|
||||
upgrade Radicale as soon as possible:
|
||||
|
||||
* Improve the regex used for well-known URIs (by Unrud)
|
||||
* Prevent regex injection in rights management (by Unrud)
|
||||
* Prevent crafted HTTP request from calling arbitrary functions (by Unrud)
|
||||
* Improve URI sanitation and conversion to filesystem path (by Unrud)
|
||||
* Decouple the daemon from its parent environment (by Unrud)
|
||||
|
||||
Some bugs have been fixed and little enhancements have been added:
|
||||
|
||||
* Assign new items to corret key (by Unrud)
|
||||
* Avoid race condition in PID file creation (by Unrud)
|
||||
* Improve the docker version (by cdpb)
|
||||
* Encode message and commiter for git commits
|
||||
* Test with Python 3.5
|
||||
|
||||
|
||||
1.0.1 - Sunflower Again
|
||||
-----------------------
|
||||
|
||||
* Update the version because of a **stupid** "feature"™ of PyPI
|
||||
|
||||
|
||||
1.0 - Sunflower
|
||||
---------------
|
||||
|
||||
* Enhanced performances (by Mathieu Dupuy)
|
||||
* Add MD5-APR1 and BCRYPT for htpasswd-based authentication (by Jan-Philip Gehrcke)
|
||||
* Use PAM service (by Stephen Paul Weber)
|
||||
* Don't discard PROPPATCH on empty collections (by Markus Unterwaditzer)
|
||||
* Write the path of the collection in the git message (by Matthew Monaco)
|
||||
* Tests launched on Travis
|
||||
|
||||
|
||||
0.10 - Lovely Endless Grass
|
||||
---------------------------
|
||||
|
||||
* Support well-known URLs (by Mathieu Dupuy)
|
||||
* Fix collection discovery (by Markus Unterwaditzer)
|
||||
* Reload logger config on SIGHUP (by Élie Bouttier)
|
||||
* Remove props files when deleting a collection (by Vincent Untz)
|
||||
* Support salted SHA1 passwords (by Marc Kleine-Budde)
|
||||
* Don't spam the logs about non-SSL IMAP connections to localhost (by Giel van Schijndel)
|
||||
|
||||
|
||||
0.9 - Rivers
|
||||
------------
|
||||
|
||||
* Custom handlers for auth, storage and rights (by Sergey Fursov)
|
||||
* 1-file-per-event storage (by Jean-Marc Martins)
|
||||
* Git support for filesystem storages (by Jean-Marc Martins)
|
||||
* DB storage working with PostgreSQL, MariaDB and SQLite (by Jean-Marc Martins)
|
||||
* Clean rights manager based on regular expressions (by Sweil)
|
||||
* Support of contacts for Apple's clients
|
||||
* Support colors (by Jochen Sprickerhof)
|
||||
* Decode URLs in XML (by Jean-Marc Martins)
|
||||
* Fix PAM authentication (by Stepan Henek)
|
||||
* Use consistent etags (by 9m66p93w)
|
||||
* Use consistent sorting order (by Daniel Danner)
|
||||
* Return 401 on unauthorized DELETE requests (by Eduard Braun)
|
||||
* Move pid file creation in child process (by Mathieu Dupuy)
|
||||
* Allow requests without base_prefix (by jheidemann)
|
||||
|
||||
|
||||
0.8 - Rainbow
|
||||
-------------
|
||||
|
||||
* New authentication and rights management modules (by Matthias Jordan)
|
||||
* Experimental database storage
|
||||
* Command-line option for custom configuration file (by Mark Adams)
|
||||
* Root URL not at the root of a domain (by Clint Adams, Fabrice Bellet, Vincent Untz)
|
||||
* Improved support for iCal, CalDAVSync, CardDAVSync, CalDavZAP and CardDavMATE
|
||||
* Empty PROPFIND requests handled (by Christoph Polcin)
|
||||
* Colon allowed in passwords
|
||||
* Configurable realm message
|
||||
|
||||
|
||||
0.7.1 - Waterfalls
|
||||
------------------
|
||||
|
||||
* Many address books fixes
|
||||
* New IMAP ACL (by Daniel Aleksandersen)
|
||||
* PAM ACL fixed (by Daniel Aleksandersen)
|
||||
* Courier ACL fixed (by Benjamin Frank)
|
||||
* Always set display name to collections (by Oskari Timperi)
|
||||
* Various DELETE responses fixed
|
||||
|
||||
|
||||
0.7 - Eternal Sunshine
|
||||
----------------------
|
||||
|
||||
* Repeating events
|
||||
* Collection deletion
|
||||
* Courier and PAM authentication methods
|
||||
* CardDAV support
|
||||
* Custom LDAP filters supported
|
||||
|
||||
|
||||
0.6.4 - Tulips
|
||||
--------------
|
||||
|
||||
* Fix the installation with Python 3.1
|
||||
|
||||
|
||||
0.6.3 - Red Roses
|
||||
-----------------
|
||||
|
||||
* MOVE requests fixed
|
||||
* Faster REPORT answers
|
||||
* Executable script moved into the package
|
||||
|
||||
|
||||
0.6.2 - Seeds
|
||||
-------------
|
||||
|
||||
* iPhone and iPad support fixed
|
||||
* Backslashes replaced by slashes in PROPFIND answers on Windows
|
||||
* PyPI archive set as default download URL
|
||||
|
||||
|
||||
0.6.1 - Growing Up
|
||||
------------------
|
||||
|
||||
* Example files included in the tarball
|
||||
* htpasswd support fixed
|
||||
* Redirection loop bug fixed
|
||||
* Testing message on GET requests
|
||||
|
||||
|
||||
0.6 - Sapling
|
||||
-------------
|
||||
|
||||
* WSGI support
|
||||
* IPv6 support
|
||||
* Smart, verbose and configurable logs
|
||||
* Apple iCal 4 and iPhone support (by Łukasz Langa)
|
||||
* KDE KOrganizer support
|
||||
* LDAP auth backend (by Corentin Le Bail)
|
||||
* Public and private calendars (by René Neumann)
|
||||
* PID file
|
||||
* MOVE requests management
|
||||
* Journal entries support
|
||||
* Drop Python 2.5 support
|
||||
|
||||
|
||||
0.5 - Historical Artifacts
|
||||
--------------------------
|
||||
|
||||
* Calendar depth
|
||||
* MacOS and Windows support
|
||||
* HEAD requests management
|
||||
* htpasswd user from calendar path
|
||||
|
||||
|
||||
0.4 - Hot Days Back
|
||||
-------------------
|
||||
|
||||
* Personal calendars
|
||||
* Last-Modified HTTP header
|
||||
* ``no-ssl`` and ``foreground`` options
|
||||
* Default configuration file
|
||||
|
||||
|
||||
0.3 - Dancing Flowers
|
||||
---------------------
|
||||
|
||||
* Evolution support
|
||||
* Version management
|
||||
|
||||
|
||||
0.2 - Snowflakes
|
||||
----------------
|
||||
|
||||
* Sunbird pre-1.0 support
|
||||
* SSL connection
|
||||
* Htpasswd authentication
|
||||
* Daemon mode
|
||||
* User configuration
|
||||
* Twisted dependency removed
|
||||
* Python 3 support
|
||||
* Real URLs for PUT and DELETE
|
||||
* Concurrent modification reported to users
|
||||
* Many bugs fixed (by Roger Wenham)
|
||||
|
||||
|
||||
0.1 - Crazy Vegetables
|
||||
----------------------
|
||||
|
||||
* First release
|
||||
* Lightning/Sunbird 0.9 compatibility
|
||||
* Easy installer
|
31
README.md
31
README.md
|
@ -1,7 +1,28 @@
|
|||
Read Me
|
||||
=======
|
||||
# Radicale
|
||||
|
||||
Radicale is a free and open-source CalDAV and CardDAV server.
|
||||
[](https://github.com/Kozea/Radicale/actions/workflows/test.yml)
|
||||
[](https://coveralls.io/github/Kozea/Radicale?branch=master)
|
||||
|
||||
For complete documentation, please visit the
|
||||
[Radicale online documentation](http://www.radicale.org/documentation)
|
||||
Radicale is a small but powerful CalDAV (calendars, to-do lists) and CardDAV
|
||||
(contacts) server, that:
|
||||
|
||||
* Shares calendars and contact lists through CalDAV, CardDAV and HTTP.
|
||||
* Supports events, todos, journal entries and business cards.
|
||||
* Works out-of-the-box, no complicated setup or configuration required.
|
||||
* Can limit access by authentication.
|
||||
* Can secure connections with TLS.
|
||||
* Works with many CalDAV and CardDAV clients
|
||||
* Stores all data on the file system in a simple folder structure.
|
||||
* Can be extended with plugins.
|
||||
* Is GPLv3-licensed free software.
|
||||
|
||||
For the complete documentation, please visit
|
||||
[Radicale master Documentation](https://radicale.org/master.html).
|
||||
|
||||
Additional hints can be found
|
||||
* [Radicale Wiki](https://github.com/Kozea/Radicale/wiki)
|
||||
* [Radicale Issues](https://github.com/Kozea/Radicale/issues)
|
||||
* [Radicale Discussions](https://github.com/Kozea/Radicale/discussions)
|
||||
|
||||
Before reporting an issue, please check
|
||||
* [Radicale Wiki / Reporting Issues](https://github.com/Kozea/Radicale/wiki/Reporting-Issues)
|
||||
|
|
257
config
257
config
|
@ -14,23 +14,18 @@
|
|||
# CalDAV server hostnames separated by a comma
|
||||
# IPv4 syntax: address:port
|
||||
# IPv6 syntax: [address]:port
|
||||
# For example: 0.0.0.0:9999, [::]:9999
|
||||
#hosts = 127.0.0.1:5232
|
||||
|
||||
# Daemon flag
|
||||
#daemon = False
|
||||
|
||||
# File storing the PID in daemon mode
|
||||
#pid =
|
||||
# Hostname syntax (using "getaddrinfo" to resolve to IPv4/IPv6 adress(es)): hostname:port
|
||||
# For example: 0.0.0.0:9999, [::]:9999, localhost:9999
|
||||
#hosts = localhost:5232
|
||||
|
||||
# Max parallel connections
|
||||
#max_connections = 20
|
||||
#max_connections = 8
|
||||
|
||||
# Max size of request body (bytes)
|
||||
#max_content_length = 10000000
|
||||
#max_content_length = 100000000
|
||||
|
||||
# Socket timeout (seconds)
|
||||
#timeout = 10
|
||||
#timeout = 30
|
||||
|
||||
# SSL flag, enable HTTPS protocol
|
||||
#ssl = False
|
||||
|
@ -45,17 +40,14 @@
|
|||
# TCP traffic between Radicale and a reverse proxy
|
||||
#certificate_authority =
|
||||
|
||||
# SSL Protocol used. See python's ssl module for available values
|
||||
#protocol = PROTOCOL_TLSv1_2
|
||||
# SSL protocol, secure configuration: ALL -SSLv3 -TLSv1 -TLSv1.1
|
||||
#protocol = (default)
|
||||
|
||||
# Available ciphers. See python's ssl module for available ciphers
|
||||
#ciphers =
|
||||
# SSL ciphersuite, secure configuration: DHE:ECDHE:-NULL:-SHA (see also "man openssl-ciphers")
|
||||
#ciphersuite = (default)
|
||||
|
||||
# Reverse DNS to resolve client address in logs
|
||||
#dns_lookup = True
|
||||
|
||||
# Message displayed in the client when a password is needed
|
||||
#realm = Radicale - Password Required
|
||||
# script name to strip from URI if called by reverse proxy
|
||||
#script_name = (default taken from HTTP_X_SCRIPT_NAME or SCRIPT_NAME)
|
||||
|
||||
|
||||
[encoding]
|
||||
|
@ -70,62 +62,194 @@
|
|||
[auth]
|
||||
|
||||
# Authentication method
|
||||
# Value: none | htpasswd | remote_user | http_x_remote_user
|
||||
#type = none
|
||||
# Value: none | htpasswd | remote_user | http_x_remote_user | dovecot | ldap | oauth2 | pam | denyall
|
||||
#type = denyall
|
||||
|
||||
# Cache logins for until expiration time
|
||||
#cache_logins = false
|
||||
|
||||
# Expiration time for caching successful logins in seconds
|
||||
#cache_successful_logins_expiry = 15
|
||||
|
||||
## Expiration time of caching failed logins in seconds
|
||||
#cache_failed_logins_expiry = 90
|
||||
|
||||
# Ignore modifyTimestamp and createTimestamp attributes. Required e.g. for Authentik LDAP server
|
||||
#ldap_ignore_attribute_create_modify_timestamp = false
|
||||
|
||||
# URI to the LDAP server
|
||||
#ldap_uri = ldap://localhost
|
||||
|
||||
# The base DN where the user accounts have to be searched
|
||||
#ldap_base = ##BASE_DN##
|
||||
|
||||
# The reader DN of the LDAP server
|
||||
#ldap_reader_dn = CN=ldapreader,CN=Users,##BASE_DN##
|
||||
|
||||
# Password of the reader DN
|
||||
#ldap_secret = ldapreader-secret
|
||||
|
||||
# Path of the file containing password of the reader DN
|
||||
#ldap_secret_file = /run/secrets/ldap_password
|
||||
|
||||
# the attribute to read the group memberships from in the user's LDAP entry (default: not set)
|
||||
#ldap_groups_attribute = memberOf
|
||||
|
||||
# The filter to find the DN of the user. This filter must contain a python-style placeholder for the login
|
||||
#ldap_filter = (&(objectClass=person)(uid={0}))
|
||||
|
||||
# the attribute holding the value to be used as username after authentication
|
||||
#ldap_user_attribute = cn
|
||||
|
||||
# Use ssl on the ldap connection
|
||||
#ldap_use_ssl = False
|
||||
|
||||
# The certificate verification mode. NONE, OPTIONAL, default is REQUIRED
|
||||
#ldap_ssl_verify_mode = REQUIRED
|
||||
|
||||
# The path to the CA file in pem format which is used to certificate the server certificate
|
||||
#ldap_ssl_ca_file =
|
||||
|
||||
# Connection type for dovecot authentication (AF_UNIX|AF_INET|AF_INET6)
|
||||
# Note: credentials are transmitted in cleartext
|
||||
#dovecot_connection_type = AF_UNIX
|
||||
|
||||
# The path to the Dovecot client authentication socket (eg. /run/dovecot/auth-client on Fedora). Radicale must have read / write access to the socket.
|
||||
#dovecot_socket = /var/run/dovecot/auth-client
|
||||
|
||||
# Host of via network exposed dovecot socket
|
||||
#dovecot_host = localhost
|
||||
|
||||
# Port of via network exposed dovecot socket
|
||||
#dovecot_port = 12345
|
||||
|
||||
# IMAP server hostname
|
||||
# Syntax: address | address:port | [address]:port | imap.server.tld
|
||||
#imap_host = localhost
|
||||
|
||||
# Secure the IMAP connection
|
||||
# Value: tls | starttls | none
|
||||
#imap_security = tls
|
||||
|
||||
# OAuth2 token endpoint URL
|
||||
#oauth2_token_endpoint = <URL>
|
||||
|
||||
# PAM service
|
||||
#pam_serivce = radicale
|
||||
|
||||
# PAM group user should be member of
|
||||
#pam_group_membership =
|
||||
|
||||
# Htpasswd filename
|
||||
#htpasswd_filename = /etc/radicale/users
|
||||
|
||||
# Htpasswd encryption method
|
||||
# Value: plain | sha1 | ssha | crypt | bcrypt | md5
|
||||
# Only bcrypt can be considered secure.
|
||||
# bcrypt and md5 require the passlib library to be installed.
|
||||
#htpasswd_encryption = bcrypt
|
||||
# Value: plain | bcrypt | md5 | sha256 | sha512 | autodetect
|
||||
# bcrypt requires the installation of 'bcrypt' module.
|
||||
#htpasswd_encryption = autodetect
|
||||
|
||||
# Enable caching of htpasswd file based on size and mtime_ns
|
||||
#htpasswd_cache = False
|
||||
|
||||
# Incorrect authentication delay (seconds)
|
||||
#delay = 1
|
||||
|
||||
# Message displayed in the client when a password is needed
|
||||
#realm = Radicale - Password Required
|
||||
|
||||
# Convert username to lowercase, must be true for case-insensitive auth providers
|
||||
#lc_username = False
|
||||
|
||||
# Strip domain name from username
|
||||
#strip_domain = False
|
||||
|
||||
|
||||
[rights]
|
||||
|
||||
# Rights backend
|
||||
# Value: none | authenticated | owner_only | owner_write | from_file
|
||||
# Value: authenticated | owner_only | owner_write | from_file
|
||||
#type = owner_only
|
||||
|
||||
# File for rights management from_file
|
||||
#file = /etc/radicale/rights
|
||||
|
||||
# Permit delete of a collection (global)
|
||||
#permit_delete_collection = True
|
||||
|
||||
# Permit overwrite of a collection (global)
|
||||
#permit_overwrite_collection = True
|
||||
|
||||
|
||||
[storage]
|
||||
|
||||
# Storage backend
|
||||
# Value: multifilesystem
|
||||
# Value: multifilesystem | multifilesystem_nolock
|
||||
#type = multifilesystem
|
||||
|
||||
# Folder for storing local collections, created if not present
|
||||
#filesystem_folder = /var/lib/radicale/collections
|
||||
|
||||
# Lock the storage. Never start multiple instances of Radicale or edit the
|
||||
# storage externally while Radicale is running if disabled.
|
||||
#filesystem_locking = True
|
||||
# Folder for storing cache of local collections, created if not present
|
||||
# Note: only used in case of use_cache_subfolder_* options are active
|
||||
# Note: can be used on multi-instance setup to cache files on local node (see below)
|
||||
#filesystem_cache_folder = (filesystem_folder)
|
||||
|
||||
# Sync all changes to disk during requests. (This can impair performance.)
|
||||
# Disabling it increases the risk of data loss, when the system crashes or
|
||||
# power fails!
|
||||
#filesystem_fsync = True
|
||||
# Use subfolder 'collection-cache' for 'item' cache file structure instead of inside collection folder
|
||||
# Note: can be used on multi-instance setup to cache 'item' on local node
|
||||
#use_cache_subfolder_for_item = False
|
||||
|
||||
# Use subfolder 'collection-cache' for 'history' cache file structure instead of inside collection folder
|
||||
# Note: use only on single-instance setup, will break consistency with client in multi-instance setup
|
||||
#use_cache_subfolder_for_history = False
|
||||
|
||||
# Use subfolder 'collection-cache' for 'sync-token' cache file structure instead of inside collection folder
|
||||
# Note: use only on single-instance setup, will break consistency with client in multi-instance setup
|
||||
#use_cache_subfolder_for_synctoken = False
|
||||
|
||||
# Use last modifiction time (nanoseconds) and size (bytes) for 'item' cache instead of SHA256 (improves speed)
|
||||
# Note: check used filesystem mtime precision before enabling
|
||||
# Note: conversion is done on access, bulk conversion can be done offline using storage verification option: radicale --verify-storage
|
||||
#use_mtime_and_size_for_item_cache = False
|
||||
|
||||
# Use configured umask for folder creation (not applicable for OS Windows)
|
||||
# Useful value: 0077 | 0027 | 0007 | 0022
|
||||
#folder_umask = (system default, usual 0022)
|
||||
|
||||
# Delete sync token that are older (seconds)
|
||||
#max_sync_token_age = 2592000
|
||||
|
||||
# Close the lock file when no more clients are waiting.
|
||||
# This option is not very useful in general, but on Windows files that are
|
||||
# opened cannot be deleted.
|
||||
#filesystem_close_lock_file = False
|
||||
# Skip broken item instead of triggering an exception
|
||||
#skip_broken_item = True
|
||||
|
||||
# Command that is run after changes to storage
|
||||
# Example: ([ -d .git ] || git init) && git add -A && (git diff --cached --quiet || git commit -m "Changes by "%(user)s)
|
||||
# Command that is run after changes to storage, default is emtpy
|
||||
# Supported placeholders:
|
||||
# %(user)s: logged-in user
|
||||
# %(cwd)s : current working directory
|
||||
# %(path)s: full path of item
|
||||
# Command will be executed with base directory defined in filesystem_folder
|
||||
# For "git" check DOCUMENTATION.md for bootstrap instructions
|
||||
# Example(test): echo \"user=%(user)s path=%(path)s cwd=%(cwd)s\"
|
||||
# Example(git): git add -A && (git diff --cached --quiet || git commit -m "Changes by \"%(user)s\"")
|
||||
#hook =
|
||||
|
||||
# Create predefined user collections
|
||||
#
|
||||
# json format:
|
||||
#
|
||||
# {
|
||||
# "def-addressbook": {
|
||||
# "D:displayname": "Personal Address Book",
|
||||
# "tag": "VADDRESSBOOK"
|
||||
# },
|
||||
# "def-calendar": {
|
||||
# "C:supported-calendar-component-set": "VEVENT,VJOURNAL,VTODO",
|
||||
# "D:displayname": "Personal Calendar",
|
||||
# "tag": "VCALENDAR"
|
||||
# }
|
||||
# }
|
||||
#
|
||||
#predefined_collections =
|
||||
|
||||
|
||||
[web]
|
||||
|
||||
|
@ -136,23 +260,52 @@
|
|||
|
||||
[logging]
|
||||
|
||||
# Logging configuration file
|
||||
# If no config is given, simple information is printed on the standard output
|
||||
# For more information about the syntax of the configuration file, see:
|
||||
# http://docs.python.org/library/logging.config.html
|
||||
#config =
|
||||
|
||||
# Set the default logging level to debug
|
||||
#debug = False
|
||||
|
||||
# Store all environment variables (including those set in the shell)
|
||||
#full_environment = False
|
||||
# Threshold for the logger
|
||||
# Value: debug | info | warning | error | critical
|
||||
#level = info
|
||||
|
||||
# Don't include passwords in logs
|
||||
#mask_passwords = True
|
||||
|
||||
# Log bad PUT request content
|
||||
#bad_put_request_content = False
|
||||
|
||||
# Log backtrace on level=debug
|
||||
#backtrace_on_debug = False
|
||||
|
||||
# Log request header on level=debug
|
||||
#request_header_on_debug = False
|
||||
|
||||
# Log request content on level=debug
|
||||
#request_content_on_debug = False
|
||||
|
||||
# Log response content on level=debug
|
||||
#response_content_on_debug = False
|
||||
|
||||
# Log rights rule which doesn't match on level=debug
|
||||
#rights_rule_doesnt_match_on_debug = False
|
||||
|
||||
# Log storage cache actions on level=debug
|
||||
#storage_cache_actions_on_debug = False
|
||||
|
||||
[headers]
|
||||
|
||||
# Additional HTTP headers
|
||||
#Access-Control-Allow-Origin = *
|
||||
|
||||
|
||||
[hook]
|
||||
|
||||
# Hook types
|
||||
# Value: none | rabbitmq
|
||||
#type = none
|
||||
#rabbitmq_endpoint =
|
||||
#rabbitmq_topic =
|
||||
#rabbitmq_queue_type = classic
|
||||
|
||||
|
||||
[reporting]
|
||||
|
||||
# When returning a free-busy report, limit the number of returned
|
||||
# occurences per event to prevent DOS attacks.
|
||||
#max_freebusy_occurrence = 10000
|
||||
|
|
318
contrib/apache/radicale.conf
Normal file
318
contrib/apache/radicale.conf
Normal file
|
@ -0,0 +1,318 @@
|
|||
### Define how Apache should serve "radicale"
|
||||
## !!! Do not enable both at the same time !!!
|
||||
|
||||
## Apache acting as reverse proxy and forward requests via ProxyPass to a running "radicale" server
|
||||
# SELinux WARNING: To use this correctly, you will need to set:
|
||||
# setsebool -P httpd_can_network_connect=1
|
||||
# URI prefix: /radicale
|
||||
#Define RADICALE_SERVER_REVERSE_PROXY
|
||||
|
||||
|
||||
## Apache starting WSGI server running with "radicale" application
|
||||
# MAY CONFLICT with other WSG servers on same system -> use then inside a VirtualHost
|
||||
# SELinux WARNING: To use this correctly, you will need to set:
|
||||
# setsebool -P httpd_can_read_write_radicale=1
|
||||
# URI prefix: /radicale
|
||||
#Define RADICALE_SERVER_WSGI
|
||||
|
||||
|
||||
### Extra options
|
||||
## Apache starting a dedicated VHOST with SSL without "/radicale" prefix in URI on port 8443
|
||||
#Define RADICALE_SERVER_VHOST_SSL
|
||||
|
||||
|
||||
### permit public access to "radicale"
|
||||
#Define RADICALE_PERMIT_PUBLIC_ACCESS
|
||||
|
||||
|
||||
### enforce SSL on default host
|
||||
#Define RADICALE_ENFORCE_SSL
|
||||
|
||||
|
||||
### enable authentication by web server (config: [auth] type = http_x_remote_user)
|
||||
#Define RADICALE_SERVER_USER_AUTHENTICATION
|
||||
|
||||
|
||||
### Particular configuration EXAMPLES, adjust/extend/override to your needs
|
||||
|
||||
|
||||
##########################
|
||||
### default host
|
||||
##########################
|
||||
<IfDefine !RADICALE_SERVER_VHOST_SSL>
|
||||
|
||||
## RADICALE_SERVER_REVERSE_PROXY
|
||||
<IfDefine RADICALE_SERVER_REVERSE_PROXY>
|
||||
RewriteEngine On
|
||||
|
||||
RewriteRule ^/radicale$ /radicale/ [R,L]
|
||||
|
||||
RewriteCond %{REQUEST_METHOD} GET
|
||||
RewriteRule ^/radicale/$ /radicale/.web/ [R,L]
|
||||
|
||||
<LocationMatch "^/radicale/\.web.*>
|
||||
# Internal WebUI does not need authentication at all
|
||||
RequestHeader set X-Script-Name /radicale
|
||||
|
||||
RequestHeader set X-Forwarded-Port "%{SERVER_PORT}s"
|
||||
RequestHeader set X-Forwarded-Proto expr=%{REQUEST_SCHEME}
|
||||
|
||||
ProxyPass http://localhost:5232/ retry=0
|
||||
ProxyPassReverse http://localhost:5232/
|
||||
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</LocationMatch>
|
||||
|
||||
<LocationMatch "^/radicale(?!/\.web)">
|
||||
RequestHeader set X-Script-Name /radicale
|
||||
|
||||
RequestHeader set X-Forwarded-Port "%{SERVER_PORT}s"
|
||||
RequestHeader set X-Forwarded-Proto expr=%{REQUEST_SCHEME}
|
||||
|
||||
ProxyPass http://localhost:5232/ retry=0
|
||||
ProxyPassReverse http://localhost:5232/
|
||||
|
||||
<IfDefine !RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## User authentication handled by "radicale"
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</IfDefine>
|
||||
|
||||
<IfDefine RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## You may want to use apache's authentication (config: [auth] type = http_x_remote_user)
|
||||
## e.g. create a new file with a testuser: htpasswd -c -B /etc/httpd/conf/htpasswd-radicale testuser
|
||||
AuthBasicProvider file
|
||||
AuthType Basic
|
||||
AuthName "Enter your credentials"
|
||||
AuthUserFile /etc/httpd/conf/htpasswd-radicale
|
||||
AuthGroupFile /dev/null
|
||||
Require valid-user
|
||||
RequestHeader set X-Remote-User expr=%{REMOTE_USER}
|
||||
</IfDefine>
|
||||
|
||||
<IfDefine RADICALE_ENFORCE_SSL>
|
||||
<IfModule !ssl_module>
|
||||
Error "RADICALE_ENFORCE_SSL selected but ssl module not loaded/enabled"
|
||||
</IfModule>
|
||||
SSLRequireSSL
|
||||
</IfDefine>
|
||||
</LocationMatch>
|
||||
</IfDefine>
|
||||
|
||||
|
||||
## RADICALE_SERVER_WSGI
|
||||
# For more information, visit:
|
||||
# http://radicale.org/user_documentation/#idapache-and-mod-wsgi
|
||||
<IfDefine RADICALE_SERVER_WSGI>
|
||||
<IfModule wsgi_module>
|
||||
|
||||
<Files /usr/share/radicale/radicale.wsgi>
|
||||
SetHandler wsgi-script
|
||||
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</Files>
|
||||
|
||||
WSGIDaemonProcess radicale user=radicale group=radicale threads=1 umask=0027
|
||||
WSGIProcessGroup radicale
|
||||
WSGIApplicationGroup %{GLOBAL}
|
||||
WSGIPassAuthorization On
|
||||
|
||||
WSGIScriptAlias /radicale /usr/share/radicale/radicale.wsgi
|
||||
|
||||
# Internal WebUI does not need authentication at all
|
||||
<LocationMatch "^/radicale/\.web.*>
|
||||
RequestHeader set X-Script-Name /radicale
|
||||
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</LocationMatch>
|
||||
|
||||
<LocationMatch "^/radicale(?!/\.web)">
|
||||
RequestHeader set X-Script-Name /radicale
|
||||
|
||||
<IfDefine !RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## User authentication handled by "radicale"
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</IfDefine>
|
||||
|
||||
<IfDefine RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## You may want to use apache's authentication (config: [auth] type = http_x_remote_user)
|
||||
## e.g. create a new file with a testuser: htpasswd -c -B /etc/httpd/conf/htpasswd-radicale testuser
|
||||
AuthBasicProvider file
|
||||
AuthType Basic
|
||||
AuthName "Enter your credentials"
|
||||
AuthUserFile /etc/httpd/conf/htpasswd-radicale
|
||||
AuthGroupFile /dev/null
|
||||
Require valid-user
|
||||
RequestHeader set X-Remote-User expr=%{REMOTE_USER}
|
||||
</IfDefine>
|
||||
|
||||
<IfDefine RADICALE_ENFORCE_SSL>
|
||||
<IfModule !ssl_module>
|
||||
Error "RADICALE_ENFORCE_SSL selected but ssl module not loaded/enabled"
|
||||
</IfModule>
|
||||
SSLRequireSSL
|
||||
</IfDefine>
|
||||
</LocationMatch>
|
||||
</IfModule>
|
||||
<IfModule !wsgi_module>
|
||||
Error "RADICALE_SERVER_WSGI selected but wsgi module not loaded/enabled"
|
||||
</IfModule>
|
||||
</IfDefine>
|
||||
|
||||
</IfDefine>
|
||||
|
||||
|
||||
##########################
|
||||
### VHOST with SSL
|
||||
##########################
|
||||
<IfDefine RADICALE_SERVER_VHOST_SSL>
|
||||
|
||||
<IfModule ssl_module>
|
||||
Listen 8443 https
|
||||
|
||||
<VirtualHost _default_:8443>
|
||||
## taken from ssl.conf
|
||||
|
||||
#ServerName www.example.com:443
|
||||
ErrorLog logs/ssl_error_log
|
||||
TransferLog logs/ssl_access_log
|
||||
LogLevel warn
|
||||
SSLEngine on
|
||||
SSLProtocol all -SSLv3 -TLSv1 -TLSv1.1
|
||||
SSLProxyProtocol all -SSLv3 -TLSv1 -TLSv1.1
|
||||
SSLHonorCipherOrder on
|
||||
SSLCipherSuite PROFILE=SYSTEM
|
||||
SSLProxyCipherSuite PROFILE=SYSTEM
|
||||
SSLCertificateFile /etc/pki/tls/certs/localhost.crt
|
||||
SSLCertificateKeyFile /etc/pki/tls/private/localhost.key
|
||||
#SSLCertificateChainFile /etc/pki/tls/certs/server-chain.crt
|
||||
#SSLCACertificateFile /etc/pki/tls/certs/ca-bundle.crt
|
||||
#SSLVerifyClient require
|
||||
#SSLVerifyDepth 10
|
||||
#SSLOptions +FakeBasicAuth +ExportCertData +StrictRequire
|
||||
BrowserMatch "MSIE [2-5]" \ nokeepalive ssl-unclean-shutdown \ downgrade-1.0 force-response-1.0
|
||||
CustomLog logs/ssl_request_log "%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b"
|
||||
|
||||
|
||||
## RADICALE_SERVER_REVERSE_PROXY
|
||||
<IfDefine RADICALE_SERVER_REVERSE_PROXY>
|
||||
RewriteEngine On
|
||||
|
||||
RewriteCond %{REQUEST_METHOD} GET
|
||||
RewriteRule ^/$ /.web/ [R,L]
|
||||
|
||||
<LocationMatch "^/\.web.*>
|
||||
RequestHeader set X-Forwarded-Port "%{SERVER_PORT}s"
|
||||
RequestHeader set X-Forwarded-Proto expr=%{REQUEST_SCHEME}
|
||||
|
||||
ProxyPass http://localhost:5232/ retry=0
|
||||
ProxyPassReverse http://localhost:5232/
|
||||
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</LocationMatch>
|
||||
|
||||
<LocationMatch "^(?!/\.web)">
|
||||
RequestHeader set X-Forwarded-Port "%{SERVER_PORT}s"
|
||||
RequestHeader set X-Forwarded-Proto expr=%{REQUEST_SCHEME}
|
||||
|
||||
ProxyPass http://localhost:5232/ retry=0
|
||||
ProxyPassReverse http://localhost:5232/
|
||||
|
||||
<IfDefine !RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## User authentication handled by "radicale"
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</IfDefine>
|
||||
|
||||
<IfDefine RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## You may want to use apache's authentication (config: [auth] type = http_x_remote_user)
|
||||
## e.g. create a new file with a testuser: htpasswd -c -B /etc/httpd/conf/htpasswd-radicale testuser
|
||||
AuthBasicProvider file
|
||||
AuthType Basic
|
||||
AuthName "Enter your credentials"
|
||||
AuthUserFile /etc/httpd/conf/htpasswd-radicale
|
||||
AuthGroupFile /dev/null
|
||||
Require valid-user
|
||||
RequestHeader set X-Remote-User expr=%{REMOTE_USER}
|
||||
</IfDefine>
|
||||
</LocationMatch>
|
||||
</IfDefine>
|
||||
|
||||
|
||||
## RADICALE_SERVER_WSGI
|
||||
# For more information, visit:
|
||||
# http://radicale.org/user_documentation/#idapache-and-mod-wsgi
|
||||
<IfDefine RADICALE_SERVER_WSGI>
|
||||
<IfModule wsgi_module>
|
||||
|
||||
<Files /usr/share/radicale/radicale.wsgi>
|
||||
SetHandler wsgi-script
|
||||
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</Files>
|
||||
|
||||
WSGIDaemonProcess radicale user=radicale group=radicale threads=1 umask=0027
|
||||
WSGIProcessGroup radicale
|
||||
WSGIApplicationGroup %{GLOBAL}
|
||||
WSGIPassAuthorization On
|
||||
|
||||
WSGIScriptAlias / /usr/share/radicale/radicale.wsgi
|
||||
|
||||
<LocationMatch "^/(?!/\.web)">
|
||||
<IfDefine !RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## User authentication handled by "radicale"
|
||||
Require local
|
||||
<IfDefine RADICALE_PERMIT_PUBLIC_ACCESS>
|
||||
Require all granted
|
||||
</IfDefine>
|
||||
</IfDefine>
|
||||
|
||||
<IfDefine RADICALE_SERVER_USER_AUTHENTICATION>
|
||||
## You may want to use apache's authentication (config: [auth] type = http_x_remote_user)
|
||||
## e.g. create a new file with a testuser: htpasswd -c -B /etc/httpd/conf/htpasswd-radicale testuser
|
||||
AuthBasicProvider file
|
||||
AuthType Basic
|
||||
AuthName "Enter your credentials"
|
||||
AuthUserFile /etc/httpd/conf/htpasswd-radicale
|
||||
AuthGroupFile /dev/null
|
||||
Require valid-user
|
||||
RequestHeader set X-Remote-User expr=%{REMOTE_USER}
|
||||
</IfDefine>
|
||||
</LocationMatch>
|
||||
</IfModule>
|
||||
<IfModule !wsgi_module>
|
||||
Error "RADICALE_SERVER_WSGI selected but wsgi module not loaded/enabled"
|
||||
</IfModule>
|
||||
</IfDefine>
|
||||
|
||||
|
||||
</VirtualHost>
|
||||
</IfModule>
|
||||
|
||||
<IfModule !ssl_module>
|
||||
Error "RADICALE_SERVER_VHOST_SSL selected but ssl module not loaded/enabled"
|
||||
</IfModule>
|
||||
|
||||
</IfDefine>
|
193
contrib/logwatch/radicale
Normal file
193
contrib/logwatch/radicale
Normal file
|
@ -0,0 +1,193 @@
|
|||
# This file is related to Radicale - CalDAV and CardDAV server
|
||||
# for logwatch (script)
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# Detail levels
|
||||
# >= 5: Logins
|
||||
# >= 10: ResponseTimes
|
||||
|
||||
$Detail = $ENV{'LOGWATCH_DETAIL_LEVEL'} || 0;
|
||||
|
||||
my %ResponseTimes;
|
||||
my %Responses;
|
||||
my %Requests;
|
||||
my %Logins;
|
||||
my %Loglevel;
|
||||
my %OtherEvents;
|
||||
|
||||
my $sum;
|
||||
my $length;
|
||||
|
||||
sub ResponseTimesMinMaxSum($$) {
|
||||
my $req = $_[0];
|
||||
my $time = $_[1];
|
||||
|
||||
$ResponseTimes{$req}->{'cnt'}++;
|
||||
|
||||
if (! defined $ResponseTimes{$req}->{'min'}) {
|
||||
$ResponseTimes{$req}->{'min'} = $time;
|
||||
} elsif ($ResponseTimes->{$req}->{'min'} > $time) {
|
||||
$ResponseTimes{$req}->{'min'} = $time;
|
||||
}
|
||||
|
||||
if (! defined $ResponseTimes{$req}->{'max'}) {
|
||||
$ResponseTimes{$req}{'max'} = $time;
|
||||
} elsif ($ResponseTimes{$req}->{'max'} < $time) {
|
||||
$ResponseTimes{$req}{'max'} = $time;
|
||||
}
|
||||
|
||||
$ResponseTimes{$req}->{'sum'} += $time;
|
||||
}
|
||||
|
||||
sub Sum($) {
|
||||
my $phash = $_[0];
|
||||
my $sum = 0;
|
||||
foreach my $entry (keys %$phash) {
|
||||
$sum += $phash->{$entry};
|
||||
}
|
||||
return $sum;
|
||||
}
|
||||
|
||||
sub MaxLength($) {
|
||||
my $phash = $_[0];
|
||||
my $length = 0;
|
||||
foreach my $entry (keys %$phash) {
|
||||
$length = length($entry) if (length($entry) > $length);
|
||||
}
|
||||
return $length;
|
||||
}
|
||||
|
||||
while (defined($ThisLine = <STDIN>)) {
|
||||
# count loglevel
|
||||
if ( $ThisLine =~ /\[(DEBUG|INFO|WARNING|ERROR|CRITICAL)\] /o ) {
|
||||
$Loglevel{$1}++
|
||||
}
|
||||
|
||||
# parse log for events
|
||||
if ( $ThisLine =~ /Radicale server ready/o ) {
|
||||
$OtherEvents{"Radicale server started"}++;
|
||||
}
|
||||
elsif ( $ThisLine =~ /Stopping Radicale/o ) {
|
||||
$OtherEvents{"Radicale server stopped"}++;
|
||||
}
|
||||
elsif ( $ThisLine =~ / (\S+) response status/o ) {
|
||||
my $req = $1;
|
||||
if ( $ThisLine =~ / \S+ response status for .* with depth '(\d)' in ([0-9.]+) seconds: (\d+)/o ) {
|
||||
$req .= ":D=" . $1 . ":R=" . $3;
|
||||
ResponseTimesMinMaxSum($req, $2) if ($Detail >= 10);
|
||||
} elsif ( $ThisLine =~ / \S+ response status for .* in ([0-9.]+) seconds: (\d+)/ ) {
|
||||
$req .= ":R=" . $2;
|
||||
ResponseTimesMinMaxSum($req, $1) if ($Detail >= 10);
|
||||
}
|
||||
$Responses{$req}++;
|
||||
}
|
||||
elsif ( $ThisLine =~ / (\S+) request for/o ) {
|
||||
my $req = $1;
|
||||
if ( $ThisLine =~ / \S+ request for .* with depth '(\d)' received/o ) {
|
||||
$req .= ":D=" . $1;
|
||||
}
|
||||
$Requests{$req}++;
|
||||
}
|
||||
elsif ( $ThisLine =~ / (Successful login): '([^']+)'/o ) {
|
||||
$Logins{$2}++ if ($Detail >= 5);
|
||||
$OtherEvents{$1}++;
|
||||
}
|
||||
elsif ( $ThisLine =~ / (Failed login attempt) /o ) {
|
||||
$OtherEvents{$1}++;
|
||||
}
|
||||
elsif ( $ThisLine =~ /\[(DEBUG|INFO)\] /o ) {
|
||||
# skip if DEBUG+INFO
|
||||
}
|
||||
else {
|
||||
# Report any unmatched entries...
|
||||
$ThisLine =~ s/^\[\d+(\/Thread-\d+)?\] //; # remove process/Thread ID
|
||||
chomp($ThisLine);
|
||||
$OtherList{$ThisLine}++;
|
||||
}
|
||||
}
|
||||
|
||||
if ($Started) {
|
||||
print "\nStatistics:\n";
|
||||
print " Radicale started: $Started Time(s)\n";
|
||||
}
|
||||
|
||||
if (keys %Loglevel) {
|
||||
$sum = Sum(\%Loglevel);
|
||||
print "\n**Loglevel counters**\n";
|
||||
printf "%-18s | %7s | %5s |\n", "Loglevel", "cnt", "ratio";
|
||||
print "-" x38 . "\n";
|
||||
foreach my $level (sort keys %Loglevel) {
|
||||
printf "%-18s | %7d | %3d%% |\n", $level, $Loglevel{$level}, int(($Loglevel{$level} * 100) / $sum);
|
||||
}
|
||||
print "-" x38 . "\n";
|
||||
printf "%-18s | %7d | %3d%% |\n", "", $sum, 100;
|
||||
}
|
||||
|
||||
if (keys %Requests) {
|
||||
$sum = Sum(\%Requests);
|
||||
print "\n**Request counters (D=<depth>)**\n";
|
||||
printf "%-18s | %7s | %5s |\n", "Request", "cnt", "ratio";
|
||||
print "-" x38 . "\n";
|
||||
foreach my $req (sort keys %Requests) {
|
||||
printf "%-18s | %7d | %3d%% |\n", $req, $Requests{$req}, int(($Requests{$req} * 100) / $sum);
|
||||
}
|
||||
print "-" x38 . "\n";
|
||||
printf "%-18s | %7d | %3d%% |\n", "", $sum, 100;
|
||||
}
|
||||
|
||||
if (keys %Responses) {
|
||||
$sum = Sum(\%Responses);
|
||||
print "\n**Response result counters ((D=<depth> R=<result>)**\n";
|
||||
printf "%-18s | %7s | %5s |\n", "Response", "cnt", "ratio";
|
||||
print "-" x38 . "\n";
|
||||
foreach my $req (sort keys %Responses) {
|
||||
printf "%-18s | %7d | %3d%% |\n", $req, $Responses{$req}, int(($Responses{$req} * 100) / $sum);
|
||||
}
|
||||
print "-" x38 . "\n";
|
||||
printf "%-18s | %7d | %3d%% |\n", "", $sum, 100;
|
||||
}
|
||||
|
||||
if (keys %Logins) {
|
||||
$sum = Sum(\%Logins);
|
||||
$length = MaxLength(\%Logins);
|
||||
print "\n**Successful login counters**\n";
|
||||
printf "%-" . $length . "s | %7s | %5s |\n", "Login", "cnt", "ratio";
|
||||
print "-" x($length + 20) . "\n";
|
||||
foreach my $login (sort keys %Logins) {
|
||||
printf "%-" . $length . "s | %7d | %3d%% |\n", $login, $Logins{$login}, int(($Logins{$login} * 100) / $sum);
|
||||
}
|
||||
print "-" x($length + 20) . "\n";
|
||||
printf "%-" . $length . "s | %7d | %3d%% |\n", "", $sum, 100;
|
||||
}
|
||||
|
||||
if (keys %ResponseTimes) {
|
||||
print "\n**Response timings (counts, seconds) (D=<depth> R=<result>)**\n";
|
||||
printf "%-18s | %7s | %7s | %7s | %7s |\n", "Response", "cnt", "min", "max", "avg";
|
||||
print "-" x60 . "\n";
|
||||
foreach my $req (sort keys %ResponseTimes) {
|
||||
printf "%-18s | %7d | %7.3f | %7.3f | %7.3f |\n", $req
|
||||
, $ResponseTimes{$req}->{'cnt'}
|
||||
, $ResponseTimes{$req}->{'min'}
|
||||
, $ResponseTimes{$req}->{'max'}
|
||||
, $ResponseTimes{$req}->{'sum'} / $ResponseTimes{$req}->{'cnt'};
|
||||
}
|
||||
print "-" x60 . "\n";
|
||||
}
|
||||
|
||||
if (keys %OtherEvents) {
|
||||
print "\n**Other Events**\n";
|
||||
foreach $ThisOne (sort keys %OtherEvents) {
|
||||
print "$ThisOne: $OtherEvents{$ThisOne} Time(s)\n";
|
||||
}
|
||||
}
|
||||
|
||||
if (keys %OtherList) {
|
||||
print "\n**Unmatched Entries**\n";
|
||||
foreach $ThisOne (sort keys %OtherList) {
|
||||
print "$ThisOne: $OtherList{$ThisOne} Time(s)\n";
|
||||
}
|
||||
}
|
||||
|
||||
exit(0);
|
||||
|
||||
# vim: shiftwidth=3 tabstop=3 syntax=perl et smartindent
|
11
contrib/logwatch/radicale-journald.conf
Normal file
11
contrib/logwatch/radicale-journald.conf
Normal file
|
@ -0,0 +1,11 @@
|
|||
# This file is related to Radicale - CalDAV and CardDAV server
|
||||
# for logwatch (config) - input from journald
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
|
||||
Title = "Radicale"
|
||||
|
||||
LogFile = none
|
||||
|
||||
*JournalCtl = "--output=cat --unit=radicale.service"
|
||||
|
||||
# vi: shiftwidth=3 tabstop=3 et
|
13
contrib/logwatch/radicale-syslog.conf
Normal file
13
contrib/logwatch/radicale-syslog.conf
Normal file
|
@ -0,0 +1,13 @@
|
|||
# This file is related to Radicale - CalDAV and CardDAV server
|
||||
# for logwatch (config) - input from syslog file
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
|
||||
Title = "Radicale"
|
||||
|
||||
LogFile = messages
|
||||
|
||||
*OnlyService = radicale
|
||||
|
||||
*RemoveHeaders
|
||||
|
||||
# vi: shiftwidth=3 tabstop=3 et
|
31
contrib/nginx/radicale.conf
Normal file
31
contrib/nginx/radicale.conf
Normal file
|
@ -0,0 +1,31 @@
|
|||
### Proxy Forward to local running "radicale" server
|
||||
###
|
||||
### Usual configuration file location: /etc/nginx/default.d/
|
||||
|
||||
## "well-known" redirect at least for Apple devices
|
||||
rewrite ^/.well-known/carddav /radicale/ redirect;
|
||||
rewrite ^/.well-known/caldav /radicale/ redirect;
|
||||
|
||||
## Base URI: /radicale/
|
||||
location /radicale/ {
|
||||
proxy_pass http://localhost:5232/;
|
||||
proxy_set_header X-Script-Name /radicale;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Forwarded-Port $server_port;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_pass_header Authorization;
|
||||
}
|
||||
|
||||
## Base URI: /
|
||||
#location / {
|
||||
# proxy_pass http://localhost:5232/;
|
||||
# proxy_set_header X-Script-Name /radicale;
|
||||
# proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# proxy_set_header X-Forwarded-Host $host;
|
||||
# proxy_set_header X-Forwarded-Port $server_port;
|
||||
# proxy_set_header X-Forwarded-Proto $scheme;
|
||||
# proxy_set_header Host $http_host;
|
||||
# proxy_pass_header Authorization;
|
||||
#}
|
52
logging
52
logging
|
@ -1,52 +0,0 @@
|
|||
# -*- mode: conf -*-
|
||||
# vim:ft=cfg
|
||||
|
||||
# Logging config file for Radicale - A simple calendar server
|
||||
#
|
||||
# The recommended path for this file is /etc/radicale/logging
|
||||
# The path must be specified in the logging section of the configuration file
|
||||
#
|
||||
# Some examples are included in Radicale's documentation, see:
|
||||
# http://radicale.org/logging/
|
||||
#
|
||||
# Other handlers are available. For more information, see:
|
||||
# http://docs.python.org/library/logging.config.html
|
||||
|
||||
|
||||
# Loggers, handlers and formatters keys
|
||||
|
||||
[loggers]
|
||||
# Loggers names, main configuration slots
|
||||
keys = root
|
||||
|
||||
[handlers]
|
||||
# Logging handlers, defining logging output methods
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
# Logging formatters
|
||||
keys = simple
|
||||
|
||||
|
||||
# Loggers
|
||||
|
||||
[logger_root]
|
||||
# Root logger
|
||||
level = WARNING
|
||||
handlers = console
|
||||
|
||||
|
||||
# Handlers
|
||||
|
||||
[handler_console]
|
||||
# Console handler
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
formatter = simple
|
||||
|
||||
|
||||
# Formatters
|
||||
|
||||
[formatter_simple]
|
||||
# Simple output format
|
||||
format = [%(thread)x] %(levelname)s: %(message)s
|
128
pyproject.toml
Normal file
128
pyproject.toml
Normal file
|
@ -0,0 +1,128 @@
|
|||
[project]
|
||||
name = "Radicale"
|
||||
# When the version is updated, a new section in the CHANGELOG.md file must be
|
||||
# added too.
|
||||
readme = "README.md"
|
||||
version = "3.5.1.dev"
|
||||
authors = [{name = "Guillaume Ayoub", email = "guillaume.ayoub@kozea.fr"}, {name = "Unrud", email = "unrud@outlook.com"}, {name = "Peter Bieringer", email = "pb@bieringer.de"}]
|
||||
license = {text = "GNU GPL v3"}
|
||||
description = "CalDAV and CardDAV Server"
|
||||
keywords = ["calendar", "addressbook", "CalDAV", "CardDAV"]
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Environment :: Console",
|
||||
"Environment :: Web Environment",
|
||||
"Intended Audience :: End Users/Desktop",
|
||||
"Intended Audience :: Information Technology",
|
||||
"License :: OSI Approved :: GNU General Public License (GPL)",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
"Topic :: Office/Business :: Groupware",
|
||||
]
|
||||
urls = {Homepage = "https://radicale.org/"}
|
||||
requires-python = ">=3.9.0"
|
||||
dependencies = [
|
||||
"defusedxml",
|
||||
"passlib",
|
||||
"vobject>=0.9.6",
|
||||
"pika>=1.1.0",
|
||||
"requests",
|
||||
]
|
||||
|
||||
|
||||
[project.optional-dependencies]
|
||||
test = ["pytest>=7", "waitress", "bcrypt"]
|
||||
bcrypt = ["bcrypt"]
|
||||
ldap = ["ldap3"]
|
||||
|
||||
[project.scripts]
|
||||
radicale = "radicale.__main__:run"
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools>=61.2"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.tox]
|
||||
min_version = "4.0"
|
||||
envlist = ["py", "flake8", "isort", "mypy"]
|
||||
|
||||
[tool.tox.env.py]
|
||||
extras = ["test"]
|
||||
deps = [
|
||||
"pytest",
|
||||
"pytest-cov"
|
||||
]
|
||||
commands = [["pytest", "-r", "s", "--cov", "--cov-report=term", "--cov-report=xml", "."]]
|
||||
|
||||
[tool.tox.env.flake8]
|
||||
deps = ["flake8==7.1.0"]
|
||||
commands = [["flake8", "."]]
|
||||
skip_install = true
|
||||
|
||||
[tool.tox.env.isort]
|
||||
deps = ["isort==5.13.2"]
|
||||
commands = [["isort", "--check", "--diff", "."]]
|
||||
skip_install = true
|
||||
|
||||
[tool.tox.env.mypy]
|
||||
deps = ["mypy==1.11.0"]
|
||||
commands = [["mypy", "--install-types", "--non-interactive", "."]]
|
||||
skip_install = true
|
||||
|
||||
|
||||
[tool.setuptools]
|
||||
platforms = ["Any"]
|
||||
include-package-data = false
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
exclude = ["*.tests"] # *.tests.*; tests.*; tests
|
||||
namespaces = false
|
||||
|
||||
[tool.setuptools.package-data]
|
||||
radicale = [
|
||||
"web/internal_data/css/icon.png",
|
||||
"web/internal_data/css/loading.svg",
|
||||
"web/internal_data/css/logo.svg",
|
||||
"web/internal_data/css/main.css",
|
||||
"web/internal_data/css/icons/delete.svg",
|
||||
"web/internal_data/css/icons/download.svg",
|
||||
"web/internal_data/css/icons/edit.svg",
|
||||
"web/internal_data/css/icons/new.svg",
|
||||
"web/internal_data/css/icons/upload.svg",
|
||||
"web/internal_data/fn.js",
|
||||
"web/internal_data/index.html",
|
||||
"py.typed",
|
||||
]
|
||||
|
||||
[tool.isort]
|
||||
known_standard_library = "_dummy_thread,_thread,abc,aifc,argparse,array,ast,asynchat,asyncio,asyncore,atexit,audioop,base64,bdb,binascii,binhex,bisect,builtins,bz2,cProfile,calendar,cgi,cgitb,chunk,cmath,cmd,code,codecs,codeop,collections,colorsys,compileall,concurrent,configparser,contextlib,contextvars,copy,copyreg,crypt,csv,ctypes,curses,dataclasses,datetime,dbm,decimal,difflib,dis,distutils,doctest,dummy_threading,email,encodings,ensurepip,enum,errno,faulthandler,fcntl,filecmp,fileinput,fnmatch,formatter,fpectl,fractions,ftplib,functools,gc,getopt,getpass,gettext,glob,grp,gzip,hashlib,heapq,hmac,html,http,imaplib,imghdr,imp,importlib,inspect,io,ipaddress,itertools,json,keyword,lib2to3,linecache,locale,logging,lzma,macpath,mailbox,mailcap,marshal,math,mimetypes,mmap,modulefinder,msilib,msvcrt,multiprocessing,netrc,nis,nntplib,ntpath,numbers,operator,optparse,os,ossaudiodev,parser,pathlib,pdb,pickle,pickletools,pipes,pkgutil,platform,plistlib,poplib,posix,posixpath,pprint,profile,pstats,pty,pwd,py_compile,pyclbr,pydoc,queue,quopri,random,re,readline,reprlib,resource,rlcompleter,runpy,sched,secrets,select,selectors,shelve,shlex,shutil,signal,site,smtpd,smtplib,sndhdr,socket,socketserver,spwd,sqlite3,sre,sre_compile,sre_constants,sre_parse,ssl,stat,statistics,string,stringprep,struct,subprocess,sunau,symbol,symtable,sys,sysconfig,syslog,tabnanny,tarfile,telnetlib,tempfile,termios,test,textwrap,threading,time,timeit,tkinter,token,tokenize,trace,traceback,tracemalloc,tty,turtle,turtledemo,types,typing,unicodedata,unittest,urllib,uu,uuid,venv,warnings,wave,weakref,webbrowser,winreg,winsound,wsgiref,xdrlib,xml,xmlrpc,zipapp,zipfile,zipimport,zlib"
|
||||
known_third_party = "defusedxml,passlib,pkg_resources,pytest,vobject"
|
||||
|
||||
[tool.mypy]
|
||||
ignore_missing_imports = true
|
||||
show_error_codes = true
|
||||
exclude = "(^|/)build($|/)"
|
||||
|
||||
[tool.coverage.run]
|
||||
branch = true
|
||||
source = ["radicale"]
|
||||
omit = ["tests/*", "*/tests/*"]
|
||||
|
||||
[tool.coverage.report]
|
||||
# Regexes for lines to exclude from consideration
|
||||
exclude_lines = [
|
||||
# Have to re-enable the standard pragma
|
||||
"pragma: no cover",
|
||||
# Don't complain if tests don't hit defensive assertion code:
|
||||
"raise AssertionError",
|
||||
"raise NotImplementedError",
|
||||
# Don't complain if non-runnable code isn't run:
|
||||
"if __name__ == .__main__.:",
|
||||
]
|
|
@ -1,41 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# This file is part of Radicale Server - Calendar Server
|
||||
# Copyright © 2011-2017 Guillaume Ayoub
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Radicale FastCGI Example.
|
||||
|
||||
Launch a Radicale FastCGI server according to configuration.
|
||||
|
||||
This script relies on flup but can be easily adapted to use another
|
||||
WSGI-to-FastCGI mapper.
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
from flup.server.fcgi import WSGIServer
|
||||
from radicale import Application, config, log
|
||||
|
||||
|
||||
config_paths = []
|
||||
if os.environ.get("RADICALE_CONFIG"):
|
||||
config_paths.append(os.environ["RADICALE_CONFIG"])
|
||||
configuration = config.load(config_paths, ignore_missing_paths=False)
|
||||
filename = os.path.expanduser(configuration.get("logging", "config"))
|
||||
debug = configuration.getboolean("logging", "debug")
|
||||
logger = log.start("radicale", filename, debug)
|
||||
WSGIServer(Application(configuration, logger)).run()
|
31
radicale.wsgi
Executable file → Normal file
31
radicale.wsgi
Executable file → Normal file
|
@ -1,35 +1,10 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# This file is part of Radicale Server - Calendar Server
|
||||
# Copyright © 2011-2017 Guillaume Ayoub
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Radicale WSGI file (mod_wsgi and uWSGI compliant).
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
from radicale import Application, config, log
|
||||
from radicale import application
|
||||
|
||||
|
||||
config_paths = []
|
||||
if os.environ.get("RADICALE_CONFIG"):
|
||||
config_paths.append(os.environ["RADICALE_CONFIG"])
|
||||
configuration = config.load(config_paths, ignore_missing_paths=False)
|
||||
filename = os.path.expanduser(configuration.get("logging", "config"))
|
||||
debug = configuration.getboolean("logging", "debug")
|
||||
logger = log.start("radicale", filename, debug)
|
||||
application = Application(configuration, logger)
|
||||
# set an environment variable
|
||||
os.environ.setdefault('SERVER_GATEWAY_INTERFACE', 'Web')
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -17,908 +19,60 @@
|
|||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Radicale Server module.
|
||||
Entry point for external WSGI servers (like uWSGI or Gunicorn).
|
||||
|
||||
This module offers a WSGI application class.
|
||||
|
||||
To use this module, you should take a look at the file ``radicale.py`` that
|
||||
should have been included in this package.
|
||||
Configuration files can be specified in the environment variable
|
||||
``RADICALE_CONFIG``.
|
||||
|
||||
"""
|
||||
|
||||
import base64
|
||||
import contextlib
|
||||
import datetime
|
||||
import io
|
||||
import itertools
|
||||
import logging
|
||||
import os
|
||||
import posixpath
|
||||
import pprint
|
||||
import random
|
||||
import socket
|
||||
import socketserver
|
||||
import ssl
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import wsgiref.simple_server
|
||||
import zlib
|
||||
from http import client
|
||||
from urllib.parse import unquote, urlparse
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
import vobject
|
||||
|
||||
from . import auth, rights, storage, web, xmlutils
|
||||
|
||||
VERSION = "2.1.3"
|
||||
|
||||
NOT_ALLOWED = (
|
||||
client.FORBIDDEN, (("Content-Type", "text/plain"),),
|
||||
"Access to the requested resource forbidden.")
|
||||
BAD_REQUEST = (
|
||||
client.BAD_REQUEST, (("Content-Type", "text/plain"),), "Bad Request")
|
||||
NOT_FOUND = (
|
||||
client.NOT_FOUND, (("Content-Type", "text/plain"),),
|
||||
"The requested resource could not be found.")
|
||||
WEBDAV_PRECONDITION_FAILED = (
|
||||
client.CONFLICT, (("Content-Type", "text/plain"),),
|
||||
"WebDAV precondition failed.")
|
||||
PRECONDITION_FAILED = (
|
||||
client.PRECONDITION_FAILED,
|
||||
(("Content-Type", "text/plain"),), "Precondition failed.")
|
||||
REQUEST_TIMEOUT = (
|
||||
client.REQUEST_TIMEOUT, (("Content-Type", "text/plain"),),
|
||||
"Connection timed out.")
|
||||
REQUEST_ENTITY_TOO_LARGE = (
|
||||
client.REQUEST_ENTITY_TOO_LARGE, (("Content-Type", "text/plain"),),
|
||||
"Request body too large.")
|
||||
REMOTE_DESTINATION = (
|
||||
client.BAD_GATEWAY, (("Content-Type", "text/plain"),),
|
||||
"Remote destination not supported.")
|
||||
DIRECTORY_LISTING = (
|
||||
client.FORBIDDEN, (("Content-Type", "text/plain"),),
|
||||
"Directory listings are not supported.")
|
||||
INTERNAL_SERVER_ERROR = (
|
||||
client.INTERNAL_SERVER_ERROR, (("Content-Type", "text/plain"),),
|
||||
"A server error occurred. Please contact the administrator.")
|
||||
|
||||
DAV_HEADERS = "1, 2, 3, calendar-access, addressbook, extended-mkcol"
|
||||
|
||||
|
||||
class HTTPServer(wsgiref.simple_server.WSGIServer):
|
||||
"""HTTP server."""
|
||||
|
||||
# These class attributes must be set before creating instance
|
||||
client_timeout = None
|
||||
max_connections = None
|
||||
logger = None
|
||||
|
||||
def __init__(self, address, handler, bind_and_activate=True):
|
||||
"""Create server."""
|
||||
ipv6 = ":" in address[0]
|
||||
|
||||
if ipv6:
|
||||
self.address_family = socket.AF_INET6
|
||||
|
||||
# Do not bind and activate, as we might change socket options
|
||||
super().__init__(address, handler, False)
|
||||
|
||||
if ipv6:
|
||||
# Only allow IPv6 connections to the IPv6 socket
|
||||
self.socket.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
|
||||
|
||||
if bind_and_activate:
|
||||
self.server_bind()
|
||||
self.server_activate()
|
||||
|
||||
if self.max_connections:
|
||||
self.connections_guard = threading.BoundedSemaphore(
|
||||
self.max_connections)
|
||||
else:
|
||||
# use dummy context manager
|
||||
self.connections_guard = contextlib.ExitStack()
|
||||
|
||||
def get_request(self):
|
||||
# Set timeout for client
|
||||
_socket, address = super().get_request()
|
||||
if self.client_timeout:
|
||||
_socket.settimeout(self.client_timeout)
|
||||
return _socket, address
|
||||
|
||||
def handle_error(self, request, client_address):
|
||||
if issubclass(sys.exc_info()[0], socket.timeout):
|
||||
self.logger.info("client timed out", exc_info=True)
|
||||
else:
|
||||
self.logger.error("An exception occurred during request: %s",
|
||||
sys.exc_info()[1], exc_info=True)
|
||||
|
||||
|
||||
class HTTPSServer(HTTPServer):
|
||||
"""HTTPS server."""
|
||||
|
||||
# These class attributes must be set before creating instance
|
||||
certificate = None
|
||||
key = None
|
||||
protocol = None
|
||||
ciphers = None
|
||||
certificate_authority = None
|
||||
|
||||
def __init__(self, address, handler):
|
||||
"""Create server by wrapping HTTP socket in an SSL socket."""
|
||||
super().__init__(address, handler, bind_and_activate=False)
|
||||
|
||||
self.socket = ssl.wrap_socket(
|
||||
self.socket, self.key, self.certificate, server_side=True,
|
||||
cert_reqs=ssl.CERT_REQUIRED if self.certificate_authority else
|
||||
ssl.CERT_NONE,
|
||||
ca_certs=self.certificate_authority or None,
|
||||
ssl_version=self.protocol, ciphers=self.ciphers,
|
||||
do_handshake_on_connect=False)
|
||||
|
||||
self.server_bind()
|
||||
self.server_activate()
|
||||
|
||||
|
||||
class ThreadedHTTPServer(socketserver.ThreadingMixIn, HTTPServer):
|
||||
def process_request_thread(self, request, client_address):
|
||||
with self.connections_guard:
|
||||
return super().process_request_thread(request, client_address)
|
||||
|
||||
|
||||
class ThreadedHTTPSServer(socketserver.ThreadingMixIn, HTTPSServer):
|
||||
def process_request_thread(self, request, client_address):
|
||||
try:
|
||||
try:
|
||||
request.do_handshake()
|
||||
except socket.timeout:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise RuntimeError("SSL handshake failed: %s" % e) from e
|
||||
except Exception:
|
||||
try:
|
||||
self.handle_error(request, client_address)
|
||||
finally:
|
||||
self.shutdown_request(request)
|
||||
return
|
||||
with self.connections_guard:
|
||||
return super().process_request_thread(request, client_address)
|
||||
|
||||
|
||||
class RequestHandler(wsgiref.simple_server.WSGIRequestHandler):
|
||||
"""HTTP requests handler."""
|
||||
|
||||
# These class attributes must be set before creating instance
|
||||
logger = None
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Store exception for logging
|
||||
self.error_stream = io.StringIO()
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def get_stderr(self):
|
||||
return self.error_stream
|
||||
|
||||
def log_message(self, *args, **kwargs):
|
||||
"""Disable inner logging management."""
|
||||
|
||||
def get_environ(self):
|
||||
env = super().get_environ()
|
||||
if hasattr(self.connection, "getpeercert"):
|
||||
# The certificate can be evaluated by the auth module
|
||||
env["REMOTE_CERTIFICATE"] = self.connection.getpeercert()
|
||||
# Parent class only tries latin1 encoding
|
||||
env["PATH_INFO"] = unquote(self.path.split("?", 1)[0])
|
||||
return env
|
||||
|
||||
def handle(self):
|
||||
super().handle()
|
||||
# Log exception
|
||||
error = self.error_stream.getvalue().strip("\n")
|
||||
if error:
|
||||
self.logger.error(
|
||||
"An unhandled exception occurred during request:\n%s" % error)
|
||||
|
||||
|
||||
class Application:
|
||||
"""WSGI application managing collections."""
|
||||
|
||||
def __init__(self, configuration, logger):
|
||||
"""Initialize application."""
|
||||
super().__init__()
|
||||
self.configuration = configuration
|
||||
self.logger = logger
|
||||
self.Auth = auth.load(configuration, logger)
|
||||
self.Collection = storage.load(configuration, logger)
|
||||
self.Rights = rights.load(configuration, logger)
|
||||
self.Web = web.load(configuration, logger)
|
||||
self.encoding = configuration.get("encoding", "request")
|
||||
|
||||
def headers_log(self, environ):
|
||||
"""Sanitize headers for logging."""
|
||||
request_environ = dict(environ)
|
||||
|
||||
# Remove environment variables
|
||||
if not self.configuration.getboolean("logging", "full_environment"):
|
||||
for shell_variable in os.environ:
|
||||
request_environ.pop(shell_variable, None)
|
||||
|
||||
# Mask passwords
|
||||
mask_passwords = self.configuration.getboolean(
|
||||
"logging", "mask_passwords")
|
||||
authorization = request_environ.get(
|
||||
"HTTP_AUTHORIZATION", "").startswith("Basic")
|
||||
if mask_passwords and authorization:
|
||||
request_environ["HTTP_AUTHORIZATION"] = "Basic **masked**"
|
||||
|
||||
return request_environ
|
||||
|
||||
def decode(self, text, environ):
|
||||
"""Try to magically decode ``text`` according to given ``environ``."""
|
||||
# List of charsets to try
|
||||
charsets = []
|
||||
|
||||
# First append content charset given in the request
|
||||
content_type = environ.get("CONTENT_TYPE")
|
||||
if content_type and "charset=" in content_type:
|
||||
charsets.append(
|
||||
content_type.split("charset=")[1].split(";")[0].strip())
|
||||
# Then append default Radicale charset
|
||||
charsets.append(self.encoding)
|
||||
# Then append various fallbacks
|
||||
charsets.append("utf-8")
|
||||
charsets.append("iso8859-1")
|
||||
|
||||
# Try to decode
|
||||
for charset in charsets:
|
||||
try:
|
||||
return text.decode(charset)
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
raise UnicodeDecodeError
|
||||
|
||||
def collect_allowed_items(self, items, user):
|
||||
"""Get items from request that user is allowed to access."""
|
||||
read_allowed_items = []
|
||||
write_allowed_items = []
|
||||
for item in items:
|
||||
if isinstance(item, storage.BaseCollection):
|
||||
path = storage.sanitize_path("/%s/" % item.path)
|
||||
can_read = self.Rights.authorized(user, path, "r")
|
||||
can_write = self.Rights.authorized(user, path, "w")
|
||||
target = "collection %r" % item.path
|
||||
else:
|
||||
path = storage.sanitize_path("/%s/%s" % (item.collection.path,
|
||||
item.href))
|
||||
can_read = self.Rights.authorized_item(user, path, "r")
|
||||
can_write = self.Rights.authorized_item(user, path, "w")
|
||||
target = "item %r from %r" % (item.href, item.collection.path)
|
||||
text_status = []
|
||||
if can_read:
|
||||
text_status.append("read")
|
||||
read_allowed_items.append(item)
|
||||
if can_write:
|
||||
text_status.append("write")
|
||||
write_allowed_items.append(item)
|
||||
self.logger.debug(
|
||||
"%s has %s access to %s",
|
||||
repr(user) if user else "anonymous user",
|
||||
" and ".join(text_status) if text_status else "NO", target)
|
||||
return read_allowed_items, write_allowed_items
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
try:
|
||||
status, headers, answers = self._handle_request(environ)
|
||||
except Exception as e:
|
||||
try:
|
||||
method = str(environ["REQUEST_METHOD"])
|
||||
except Exception:
|
||||
method = "unknown"
|
||||
try:
|
||||
path = str(environ.get("PATH_INFO", ""))
|
||||
except Exception:
|
||||
path = ""
|
||||
self.logger.error("An exception occurred during %s request on %r: "
|
||||
"%s", method, path, e, exc_info=True)
|
||||
status, headers, answer = INTERNAL_SERVER_ERROR
|
||||
status = "%d %s" % (
|
||||
status, client.responses.get(status, "Unknown"))
|
||||
headers = [("Content-Length", str(len(answer)))] + list(headers)
|
||||
answers = [answer.encode("ascii")]
|
||||
start_response(status, headers)
|
||||
return answers
|
||||
|
||||
def _handle_request(self, environ):
|
||||
"""Manage a request."""
|
||||
def response(status, headers=(), answer=None):
|
||||
headers = dict(headers)
|
||||
# Set content length
|
||||
if answer:
|
||||
if hasattr(answer, "encode"):
|
||||
self.logger.debug("Response content:\n%s", answer)
|
||||
headers["Content-Type"] += "; charset=%s" % self.encoding
|
||||
answer = answer.encode(self.encoding)
|
||||
accept_encoding = [
|
||||
encoding.strip() for encoding in
|
||||
environ.get("HTTP_ACCEPT_ENCODING", "").split(",")
|
||||
if encoding.strip()]
|
||||
|
||||
if "gzip" in accept_encoding:
|
||||
zcomp = zlib.compressobj(wbits=16 + zlib.MAX_WBITS)
|
||||
answer = zcomp.compress(answer) + zcomp.flush()
|
||||
headers["Content-Encoding"] = "gzip"
|
||||
|
||||
headers["Content-Length"] = str(len(answer))
|
||||
|
||||
# Add extra headers set in configuration
|
||||
if self.configuration.has_section("headers"):
|
||||
for key in self.configuration.options("headers"):
|
||||
headers[key] = self.configuration.get("headers", key)
|
||||
|
||||
# Start response
|
||||
time_end = datetime.datetime.now()
|
||||
status = "%d %s" % (
|
||||
status, client.responses.get(status, "Unknown"))
|
||||
self.logger.info(
|
||||
"%s response status for %r%s in %.3f seconds: %s",
|
||||
environ["REQUEST_METHOD"], environ.get("PATH_INFO", ""),
|
||||
depthinfo, (time_end - time_begin).total_seconds(), status)
|
||||
# Return response content
|
||||
return status, list(headers.items()), [answer] if answer else []
|
||||
|
||||
remote_host = "unknown"
|
||||
if environ.get("REMOTE_HOST"):
|
||||
remote_host = repr(environ["REMOTE_HOST"])
|
||||
elif environ.get("REMOTE_ADDR"):
|
||||
remote_host = environ["REMOTE_ADDR"]
|
||||
if environ.get("HTTP_X_FORWARDED_FOR"):
|
||||
remote_host = "%r (forwarded by %s)" % (
|
||||
environ["HTTP_X_FORWARDED_FOR"], remote_host)
|
||||
remote_useragent = ""
|
||||
if environ.get("HTTP_USER_AGENT"):
|
||||
remote_useragent = " using %r" % environ["HTTP_USER_AGENT"]
|
||||
depthinfo = ""
|
||||
if environ.get("HTTP_DEPTH"):
|
||||
depthinfo = " with depth %r" % environ["HTTP_DEPTH"]
|
||||
time_begin = datetime.datetime.now()
|
||||
self.logger.info(
|
||||
"%s request for %r%s received from %s%s",
|
||||
environ["REQUEST_METHOD"], environ.get("PATH_INFO", ""), depthinfo,
|
||||
remote_host, remote_useragent)
|
||||
headers = pprint.pformat(self.headers_log(environ))
|
||||
self.logger.debug("Request headers:\n%s", headers)
|
||||
|
||||
# Let reverse proxies overwrite SCRIPT_NAME
|
||||
if "HTTP_X_SCRIPT_NAME" in environ:
|
||||
# script_name must be removed from PATH_INFO by the client.
|
||||
unsafe_base_prefix = environ["HTTP_X_SCRIPT_NAME"]
|
||||
self.logger.debug("Script name overwritten by client: %r",
|
||||
unsafe_base_prefix)
|
||||
else:
|
||||
# SCRIPT_NAME is already removed from PATH_INFO, according to the
|
||||
# WSGI specification.
|
||||
unsafe_base_prefix = environ.get("SCRIPT_NAME", "")
|
||||
# Sanitize base prefix
|
||||
base_prefix = storage.sanitize_path(unsafe_base_prefix).rstrip("/")
|
||||
self.logger.debug("Sanitized script name: %r", base_prefix)
|
||||
# Sanitize request URI (a WSGI server indicates with an empty path,
|
||||
# that the URL targets the application root without a trailing slash)
|
||||
path = storage.sanitize_path(environ.get("PATH_INFO", ""))
|
||||
self.logger.debug("Sanitized path: %r", path)
|
||||
|
||||
# Get function corresponding to method
|
||||
function = getattr(self, "do_%s" % environ["REQUEST_METHOD"].upper())
|
||||
|
||||
# Ask authentication backend to check rights
|
||||
external_login = self.Auth.get_external_login(environ)
|
||||
authorization = environ.get("HTTP_AUTHORIZATION", "")
|
||||
if external_login:
|
||||
login, password = external_login
|
||||
elif authorization.startswith("Basic"):
|
||||
authorization = authorization[len("Basic"):].strip()
|
||||
login, password = self.decode(base64.b64decode(
|
||||
authorization.encode("ascii")), environ).split(":", 1)
|
||||
else:
|
||||
# DEPRECATED: use remote_user backend instead
|
||||
login = environ.get("REMOTE_USER", "")
|
||||
password = ""
|
||||
user = self.Auth.map_login_to_user(login)
|
||||
|
||||
# If "/.well-known" is not available, clients query "/"
|
||||
if path == "/.well-known" or path.startswith("/.well-known/"):
|
||||
return response(*NOT_FOUND)
|
||||
|
||||
if not user:
|
||||
is_authenticated = True
|
||||
elif not storage.is_safe_path_component(user):
|
||||
# Prevent usernames like "user/calendar.ics"
|
||||
self.logger.info("Refused unsafe username: %r", user)
|
||||
is_authenticated = False
|
||||
else:
|
||||
is_authenticated = self.Auth.is_authenticated(user, password)
|
||||
if not is_authenticated:
|
||||
self.logger.info("Failed login attempt: %r", user)
|
||||
# Random delay to avoid timing oracles and bruteforce attacks
|
||||
delay = self.configuration.getfloat("auth", "delay")
|
||||
if delay > 0:
|
||||
random_delay = delay * (0.5 + random.random())
|
||||
self.logger.debug("Sleeping %.3f seconds", random_delay)
|
||||
time.sleep(random_delay)
|
||||
else:
|
||||
self.logger.info("Successful login: %r", user)
|
||||
|
||||
# Create principal collection
|
||||
if user and is_authenticated:
|
||||
principal_path = "/%s/" % user
|
||||
if self.Rights.authorized(user, principal_path, "w"):
|
||||
with self.Collection.acquire_lock("r", user):
|
||||
principal = next(
|
||||
self.Collection.discover(principal_path, depth="1"),
|
||||
None)
|
||||
if not principal:
|
||||
with self.Collection.acquire_lock("w", user):
|
||||
try:
|
||||
self.Collection.create_collection(principal_path)
|
||||
except ValueError as e:
|
||||
self.logger.warning("Failed to create principal "
|
||||
"collection %r: %s", user, e)
|
||||
is_authenticated = False
|
||||
else:
|
||||
self.logger.warning("Access to principal path %r denied by "
|
||||
"rights backend", principal_path)
|
||||
|
||||
# Verify content length
|
||||
content_length = int(environ.get("CONTENT_LENGTH") or 0)
|
||||
if content_length:
|
||||
max_content_length = self.configuration.getint(
|
||||
"server", "max_content_length")
|
||||
if max_content_length and content_length > max_content_length:
|
||||
self.logger.info(
|
||||
"Request body too large: %d", content_length)
|
||||
return response(*REQUEST_ENTITY_TOO_LARGE)
|
||||
|
||||
if is_authenticated:
|
||||
status, headers, answer = function(
|
||||
environ, base_prefix, path, user)
|
||||
if (status, headers, answer) == NOT_ALLOWED:
|
||||
self.logger.info("Access to %r denied for %s", path,
|
||||
repr(user) if user else "anonymous user")
|
||||
else:
|
||||
status, headers, answer = NOT_ALLOWED
|
||||
|
||||
if (status, headers, answer) == NOT_ALLOWED and not (
|
||||
user and is_authenticated) and not external_login:
|
||||
# Unknown or unauthorized user
|
||||
self.logger.debug("Asking client for authentication")
|
||||
status = client.UNAUTHORIZED
|
||||
realm = self.configuration.get("server", "realm")
|
||||
headers = dict(headers)
|
||||
headers.update({
|
||||
"WWW-Authenticate":
|
||||
"Basic realm=\"%s\"" % realm})
|
||||
|
||||
return response(status, headers, answer)
|
||||
|
||||
def _access(self, user, path, permission, item=None):
|
||||
"""Check if ``user`` can access ``path`` or the parent collection.
|
||||
|
||||
``permission`` must either be "r" or "w".
|
||||
|
||||
If ``item`` is given, only access to that class of item is checked.
|
||||
|
||||
"""
|
||||
allowed = False
|
||||
if not item or isinstance(item, storage.BaseCollection):
|
||||
allowed |= self.Rights.authorized(user, path, permission)
|
||||
if not item or not isinstance(item, storage.BaseCollection):
|
||||
allowed |= self.Rights.authorized_item(user, path, permission)
|
||||
return allowed
|
||||
|
||||
def _read_raw_content(self, environ):
|
||||
content_length = int(environ.get("CONTENT_LENGTH") or 0)
|
||||
if not content_length:
|
||||
return b""
|
||||
content = environ["wsgi.input"].read(content_length)
|
||||
if len(content) < content_length:
|
||||
raise RuntimeError("Request body too short: %d" % len(content))
|
||||
return content
|
||||
|
||||
def _read_content(self, environ):
|
||||
content = self.decode(self._read_raw_content(environ), environ)
|
||||
self.logger.debug("Request content:\n%s", content)
|
||||
return content
|
||||
|
||||
def _read_xml_content(self, environ):
|
||||
content = self.decode(self._read_raw_content(environ), environ)
|
||||
if not content:
|
||||
return None
|
||||
try:
|
||||
xml_content = ET.fromstring(content)
|
||||
except ET.ParseError as e:
|
||||
self.logger.debug("Request content (Invalid XML):\n%s", content)
|
||||
raise RuntimeError("Failed to parse XML: %s" % e) from e
|
||||
if self.logger.isEnabledFor(logging.DEBUG):
|
||||
self.logger.debug("Request content:\n%s",
|
||||
xmlutils.pretty_xml(xml_content))
|
||||
return xml_content
|
||||
|
||||
def _write_xml_content(self, xml_content):
|
||||
if self.logger.isEnabledFor(logging.DEBUG):
|
||||
self.logger.debug("Response content:\n%s",
|
||||
xmlutils.pretty_xml(xml_content))
|
||||
f = io.BytesIO()
|
||||
ET.ElementTree(xml_content).write(f, encoding=self.encoding,
|
||||
xml_declaration=True)
|
||||
return f.getvalue()
|
||||
|
||||
def do_DELETE(self, environ, base_prefix, path, user):
|
||||
"""Manage DELETE request."""
|
||||
if not self._access(user, path, "w"):
|
||||
return NOT_ALLOWED
|
||||
with self.Collection.acquire_lock("w", user):
|
||||
item = next(self.Collection.discover(path), None)
|
||||
if not self._access(user, path, "w", item):
|
||||
return NOT_ALLOWED
|
||||
if not item:
|
||||
return NOT_FOUND
|
||||
if_match = environ.get("HTTP_IF_MATCH", "*")
|
||||
if if_match not in ("*", item.etag):
|
||||
# ETag precondition not verified, do not delete item
|
||||
return PRECONDITION_FAILED
|
||||
if isinstance(item, storage.BaseCollection):
|
||||
xml_answer = xmlutils.delete(base_prefix, path, item)
|
||||
else:
|
||||
xml_answer = xmlutils.delete(
|
||||
base_prefix, path, item.collection, item.href)
|
||||
headers = {"Content-Type": "text/xml; charset=%s" % self.encoding}
|
||||
return client.OK, headers, self._write_xml_content(xml_answer)
|
||||
|
||||
def do_GET(self, environ, base_prefix, path, user):
|
||||
"""Manage GET request."""
|
||||
# Redirect to .web if the root URL is requested
|
||||
if not path.strip("/"):
|
||||
web_path = ".web"
|
||||
if not environ.get("PATH_INFO"):
|
||||
web_path = posixpath.join(posixpath.basename(base_prefix),
|
||||
web_path)
|
||||
return (client.FOUND,
|
||||
{"Location": web_path, "Content-Type": "text/plain"},
|
||||
"Redirected to %s" % web_path)
|
||||
# Dispatch .web URL to web module
|
||||
if path == "/.web" or path.startswith("/.web/"):
|
||||
return self.Web.get(environ, base_prefix, path, user)
|
||||
if not self._access(user, path, "r"):
|
||||
return NOT_ALLOWED
|
||||
with self.Collection.acquire_lock("r", user):
|
||||
item = next(self.Collection.discover(path), None)
|
||||
if not self._access(user, path, "r", item):
|
||||
return NOT_ALLOWED
|
||||
if not item:
|
||||
return NOT_FOUND
|
||||
if isinstance(item, storage.BaseCollection):
|
||||
collection = item
|
||||
if collection.get_meta("tag") not in (
|
||||
"VADDRESSBOOK", "VCALENDAR"):
|
||||
return DIRECTORY_LISTING
|
||||
else:
|
||||
collection = item.collection
|
||||
content_type = xmlutils.MIMETYPES.get(
|
||||
collection.get_meta("tag"), "text/plain")
|
||||
headers = {
|
||||
"Content-Type": content_type,
|
||||
"Last-Modified": item.last_modified,
|
||||
"ETag": item.etag}
|
||||
answer = item.serialize()
|
||||
return client.OK, headers, answer
|
||||
|
||||
def do_HEAD(self, environ, base_prefix, path, user):
|
||||
"""Manage HEAD request."""
|
||||
status, headers, answer = self.do_GET(
|
||||
environ, base_prefix, path, user)
|
||||
return status, headers, None
|
||||
|
||||
def do_MKCALENDAR(self, environ, base_prefix, path, user):
|
||||
"""Manage MKCALENDAR request."""
|
||||
if not self.Rights.authorized(user, path, "w"):
|
||||
return NOT_ALLOWED
|
||||
try:
|
||||
xml_content = self._read_xml_content(environ)
|
||||
except RuntimeError as e:
|
||||
self.logger.warning(
|
||||
"Bad MKCALENDAR request on %r: %s", path, e, exc_info=True)
|
||||
return BAD_REQUEST
|
||||
except socket.timeout as e:
|
||||
self.logger.debug("client timed out", exc_info=True)
|
||||
return REQUEST_TIMEOUT
|
||||
with self.Collection.acquire_lock("w", user):
|
||||
item = next(self.Collection.discover(path), None)
|
||||
if item:
|
||||
return WEBDAV_PRECONDITION_FAILED
|
||||
props = xmlutils.props_from_request(xml_content)
|
||||
props["tag"] = "VCALENDAR"
|
||||
# TODO: use this?
|
||||
# timezone = props.get("C:calendar-timezone")
|
||||
try:
|
||||
storage.check_and_sanitize_props(props)
|
||||
self.Collection.create_collection(path, props=props)
|
||||
except ValueError as e:
|
||||
self.logger.warning(
|
||||
"Bad MKCALENDAR request on %r: %s", path, e, exc_info=True)
|
||||
return BAD_REQUEST
|
||||
return client.CREATED, {}, None
|
||||
|
||||
def do_MKCOL(self, environ, base_prefix, path, user):
|
||||
"""Manage MKCOL request."""
|
||||
if not self.Rights.authorized(user, path, "w"):
|
||||
return NOT_ALLOWED
|
||||
try:
|
||||
xml_content = self._read_xml_content(environ)
|
||||
except RuntimeError as e:
|
||||
self.logger.warning(
|
||||
"Bad MKCOL request on %r: %s", path, e, exc_info=True)
|
||||
return BAD_REQUEST
|
||||
except socket.timeout as e:
|
||||
self.logger.debug("client timed out", exc_info=True)
|
||||
return REQUEST_TIMEOUT
|
||||
with self.Collection.acquire_lock("w", user):
|
||||
item = next(self.Collection.discover(path), None)
|
||||
if item:
|
||||
return WEBDAV_PRECONDITION_FAILED
|
||||
props = xmlutils.props_from_request(xml_content)
|
||||
try:
|
||||
storage.check_and_sanitize_props(props)
|
||||
self.Collection.create_collection(path, props=props)
|
||||
except ValueError as e:
|
||||
self.logger.warning(
|
||||
"Bad MKCOL request on %r: %s", path, e, exc_info=True)
|
||||
return BAD_REQUEST
|
||||
return client.CREATED, {}, None
|
||||
|
||||
def do_MOVE(self, environ, base_prefix, path, user):
|
||||
"""Manage MOVE request."""
|
||||
raw_dest = environ.get("HTTP_DESTINATION", "")
|
||||
to_url = urlparse(raw_dest)
|
||||
if to_url.netloc != environ["HTTP_HOST"]:
|
||||
self.logger.info("Unsupported destination address: %r", raw_dest)
|
||||
# Remote destination server, not supported
|
||||
return REMOTE_DESTINATION
|
||||
if not self._access(user, path, "w"):
|
||||
return NOT_ALLOWED
|
||||
to_path = storage.sanitize_path(to_url.path)
|
||||
if not (to_path + "/").startswith(base_prefix + "/"):
|
||||
self.logger.warning("Destination %r from MOVE request on %r does"
|
||||
"n't start with base prefix", to_path, path)
|
||||
return NOT_ALLOWED
|
||||
to_path = to_path[len(base_prefix):]
|
||||
if not self._access(user, to_path, "w"):
|
||||
return NOT_ALLOWED
|
||||
|
||||
with self.Collection.acquire_lock("w", user):
|
||||
item = next(self.Collection.discover(path), None)
|
||||
if not self._access(user, path, "w", item):
|
||||
return NOT_ALLOWED
|
||||
if not self._access(user, to_path, "w", item):
|
||||
return NOT_ALLOWED
|
||||
if not item:
|
||||
return NOT_FOUND
|
||||
if isinstance(item, storage.BaseCollection):
|
||||
return WEBDAV_PRECONDITION_FAILED
|
||||
|
||||
to_item = next(self.Collection.discover(to_path), None)
|
||||
if (isinstance(to_item, storage.BaseCollection) or
|
||||
to_item and environ.get("HTTP_OVERWRITE", "F") != "T"):
|
||||
return WEBDAV_PRECONDITION_FAILED
|
||||
to_parent_path = storage.sanitize_path(
|
||||
"/%s/" % posixpath.dirname(to_path.strip("/")))
|
||||
to_collection = next(
|
||||
self.Collection.discover(to_parent_path), None)
|
||||
if not to_collection:
|
||||
return WEBDAV_PRECONDITION_FAILED
|
||||
to_href = posixpath.basename(to_path.strip("/"))
|
||||
try:
|
||||
self.Collection.move(item, to_collection, to_href)
|
||||
except ValueError as e:
|
||||
self.logger.warning(
|
||||
"Bad MOVE request on %r: %s", path, e, exc_info=True)
|
||||
return BAD_REQUEST
|
||||
return client.CREATED, {}, None
|
||||
|
||||
def do_OPTIONS(self, environ, base_prefix, path, user):
|
||||
"""Manage OPTIONS request."""
|
||||
headers = {
|
||||
"Allow": ", ".join(
|
||||
name[3:] for name in dir(self) if name.startswith("do_")),
|
||||
"DAV": DAV_HEADERS}
|
||||
return client.OK, headers, None
|
||||
|
||||
def do_PROPFIND(self, environ, base_prefix, path, user):
|
||||
"""Manage PROPFIND request."""
|
||||
if not self._access(user, path, "r"):
|
||||
return NOT_ALLOWED
|
||||
try:
|
||||
xml_content = self._read_xml_content(environ)
|
||||
except RuntimeError as e:
|
||||
self.logger.warning(
|
||||
"Bad PROPFIND request on %r: %s", path, e, exc_info=True)
|
||||
return BAD_REQUEST
|
||||
except socket.timeout as e:
|
||||
self.logger.debug("client timed out", exc_info=True)
|
||||
return REQUEST_TIMEOUT
|
||||
with self.Collection.acquire_lock("r", user):
|
||||
items = self.Collection.discover(
|
||||
path, environ.get("HTTP_DEPTH", "0"))
|
||||
# take root item for rights checking
|
||||
item = next(items, None)
|
||||
if not self._access(user, path, "r", item):
|
||||
return NOT_ALLOWED
|
||||
if not item:
|
||||
return NOT_FOUND
|
||||
# put item back
|
||||
items = itertools.chain([item], items)
|
||||
read_items, write_items = self.collect_allowed_items(items, user)
|
||||
headers = {"DAV": DAV_HEADERS,
|
||||
"Content-Type": "text/xml; charset=%s" % self.encoding}
|
||||
status, xml_answer = xmlutils.propfind(
|
||||
base_prefix, path, xml_content, read_items, write_items, user)
|
||||
if status == client.FORBIDDEN:
|
||||
return NOT_ALLOWED
|
||||
else:
|
||||
return status, headers, self._write_xml_content(xml_answer)
|
||||
|
||||
def do_PROPPATCH(self, environ, base_prefix, path, user):
|
||||
"""Manage PROPPATCH request."""
|
||||
if not self.Rights.authorized(user, path, "w"):
|
||||
return NOT_ALLOWED
|
||||
try:
|
||||
xml_content = self._read_xml_content(environ)
|
||||
except RuntimeError as e:
|
||||
self.logger.warning(
|
||||
"Bad PROPPATCH request on %r: %s", path, e, exc_info=True)
|
||||
return BAD_REQUEST
|
||||
except socket.timeout as e:
|
||||
self.logger.debug("client timed out", exc_info=True)
|
||||
return REQUEST_TIMEOUT
|
||||
with self.Collection.acquire_lock("w", user):
|
||||
item = next(self.Collection.discover(path), None)
|
||||
if not isinstance(item, storage.BaseCollection):
|
||||
return WEBDAV_PRECONDITION_FAILED
|
||||
headers = {"DAV": DAV_HEADERS,
|
||||
"Content-Type": "text/xml; charset=%s" % self.encoding}
|
||||
try:
|
||||
xml_answer = xmlutils.proppatch(base_prefix, path, xml_content,
|
||||
item)
|
||||
except ValueError as e:
|
||||
self.logger.warning(
|
||||
"Bad PROPPATCH request on %r: %s", path, e, exc_info=True)
|
||||
return BAD_REQUEST
|
||||
return (client.MULTI_STATUS, headers,
|
||||
self._write_xml_content(xml_answer))
|
||||
|
||||
def do_PUT(self, environ, base_prefix, path, user):
|
||||
"""Manage PUT request."""
|
||||
if not self._access(user, path, "w"):
|
||||
return NOT_ALLOWED
|
||||
try:
|
||||
content = self._read_content(environ)
|
||||
except RuntimeError as e:
|
||||
self.logger.warning(
|
||||
"Bad PUT request on %r: %s", path, e, exc_info=True)
|
||||
return BAD_REQUEST
|
||||
except socket.timeout as e:
|
||||
self.logger.debug("client timed out", exc_info=True)
|
||||
return REQUEST_TIMEOUT
|
||||
with self.Collection.acquire_lock("w", user):
|
||||
parent_path = storage.sanitize_path(
|
||||
"/%s/" % posixpath.dirname(path.strip("/")))
|
||||
item = next(self.Collection.discover(path), None)
|
||||
parent_item = next(self.Collection.discover(parent_path), None)
|
||||
|
||||
write_whole_collection = (
|
||||
isinstance(item, storage.BaseCollection) or
|
||||
not parent_item or (
|
||||
not next(parent_item.list(), None) and
|
||||
parent_item.get_meta("tag") not in (
|
||||
"VADDRESSBOOK", "VCALENDAR")))
|
||||
if write_whole_collection:
|
||||
if not self.Rights.authorized(user, path, "w"):
|
||||
return NOT_ALLOWED
|
||||
elif not self.Rights.authorized_item(user, path, "w"):
|
||||
return NOT_ALLOWED
|
||||
|
||||
etag = environ.get("HTTP_IF_MATCH", "")
|
||||
if not item and etag:
|
||||
# Etag asked but no item found: item has been removed
|
||||
return PRECONDITION_FAILED
|
||||
if item and etag and item.etag != etag:
|
||||
# Etag asked but item not matching: item has changed
|
||||
return PRECONDITION_FAILED
|
||||
|
||||
match = environ.get("HTTP_IF_NONE_MATCH", "") == "*"
|
||||
if item and match:
|
||||
# Creation asked but item found: item can't be replaced
|
||||
return PRECONDITION_FAILED
|
||||
|
||||
try:
|
||||
items = tuple(vobject.readComponents(content or ""))
|
||||
if not write_whole_collection and len(items) != 1:
|
||||
raise RuntimeError(
|
||||
"Item contains %d components" % len(items))
|
||||
if write_whole_collection or not parent_item.get_meta("tag"):
|
||||
content_type = environ.get("CONTENT_TYPE",
|
||||
"").split(";")[0]
|
||||
tags = {value: key
|
||||
for key, value in xmlutils.MIMETYPES.items()}
|
||||
tag = tags.get(content_type)
|
||||
if items and items[0].name == "VCALENDAR":
|
||||
tag = "VCALENDAR"
|
||||
elif items and items[0].name in ("VCARD", "VLIST"):
|
||||
tag = "VADDRESSBOOK"
|
||||
else:
|
||||
tag = parent_item.get_meta("tag")
|
||||
if tag == "VCALENDAR" and len(items) > 1:
|
||||
raise RuntimeError("VCALENDAR collection contains %d "
|
||||
"components" % len(items))
|
||||
for i in items:
|
||||
storage.check_and_sanitize_item(
|
||||
i, is_collection=write_whole_collection, uid=item.uid
|
||||
if not write_whole_collection and item else None,
|
||||
tag=tag)
|
||||
except Exception as e:
|
||||
self.logger.warning(
|
||||
"Bad PUT request on %r: %s", path, e, exc_info=True)
|
||||
return BAD_REQUEST
|
||||
|
||||
if write_whole_collection:
|
||||
props = {"tag": tag} if tag else {}
|
||||
try:
|
||||
storage.check_and_sanitize_props(props)
|
||||
new_item = self.Collection.create_collection(
|
||||
path, items, props)
|
||||
except ValueError as e:
|
||||
self.logger.warning(
|
||||
"Bad PUT request on %r: %s", path, e, exc_info=True)
|
||||
return BAD_REQUEST
|
||||
else:
|
||||
href = posixpath.basename(path.strip("/"))
|
||||
try:
|
||||
if tag and not parent_item.get_meta("tag"):
|
||||
new_props = parent_item.get_meta()
|
||||
new_props["tag"] = tag
|
||||
storage.check_and_sanitize_props(new_props)
|
||||
parent_item.set_meta_all(new_props)
|
||||
new_item = parent_item.upload(href, items[0])
|
||||
except ValueError as e:
|
||||
self.logger.warning(
|
||||
"Bad PUT request on %r: %s", path, e, exc_info=True)
|
||||
return BAD_REQUEST
|
||||
headers = {"ETag": new_item.etag}
|
||||
return client.CREATED, headers, None
|
||||
|
||||
def do_REPORT(self, environ, base_prefix, path, user):
|
||||
"""Manage REPORT request."""
|
||||
if not self._access(user, path, "r"):
|
||||
return NOT_ALLOWED
|
||||
try:
|
||||
xml_content = self._read_xml_content(environ)
|
||||
except RuntimeError as e:
|
||||
self.logger.warning(
|
||||
"Bad REPORT request on %r: %s", path, e, exc_info=True)
|
||||
return BAD_REQUEST
|
||||
except socket.timeout as e:
|
||||
self.logger.debug("client timed out", exc_info=True)
|
||||
return REQUEST_TIMEOUT
|
||||
with self.Collection.acquire_lock("r", user):
|
||||
item = next(self.Collection.discover(path), None)
|
||||
if not self._access(user, path, "r", item):
|
||||
return NOT_ALLOWED
|
||||
if not item:
|
||||
return NOT_FOUND
|
||||
if isinstance(item, storage.BaseCollection):
|
||||
collection = item
|
||||
else:
|
||||
collection = item.collection
|
||||
headers = {"Content-Type": "text/xml; charset=%s" % self.encoding}
|
||||
status, xml_answer = xmlutils.report(
|
||||
base_prefix, path, xml_content, collection)
|
||||
return (status, headers, self._write_xml_content(xml_answer))
|
||||
from typing import Iterable, Optional, cast
|
||||
|
||||
from radicale import config, log, types, utils
|
||||
from radicale.app import Application
|
||||
from radicale.log import logger
|
||||
|
||||
VERSION: str = utils.package_version("radicale")
|
||||
|
||||
_application_instance: Optional[Application] = None
|
||||
_application_config_path: Optional[str] = None
|
||||
_application_lock = threading.Lock()
|
||||
|
||||
|
||||
def _get_application_instance(config_path: str, wsgi_errors: types.ErrorStream
|
||||
) -> Application:
|
||||
global _application_instance, _application_config_path
|
||||
with _application_lock:
|
||||
if _application_instance is None:
|
||||
log.setup()
|
||||
with log.register_stream(wsgi_errors):
|
||||
_application_config_path = config_path
|
||||
configuration = config.load(config.parse_compound_paths(
|
||||
config.DEFAULT_CONFIG_PATH,
|
||||
config_path))
|
||||
log.set_level(cast(str, configuration.get("logging", "level")), configuration.get("logging", "backtrace_on_debug"))
|
||||
# Log configuration after logger is configured
|
||||
default_config_active = True
|
||||
for source, miss in configuration.sources():
|
||||
logger.info("%s %s", "Skipped missing/unreadable" if miss
|
||||
else "Loaded", source)
|
||||
if not miss and source != "default config":
|
||||
default_config_active = False
|
||||
if default_config_active:
|
||||
logger.warning("%s", "No config file found/readable - only default config is active")
|
||||
_application_instance = Application(configuration)
|
||||
if _application_config_path != config_path:
|
||||
raise ValueError("RADICALE_CONFIG must not change: %r != %r" %
|
||||
(config_path, _application_config_path))
|
||||
return _application_instance
|
||||
|
||||
|
||||
def application(environ: types.WSGIEnviron,
|
||||
start_response: types.WSGIStartResponse) -> Iterable[bytes]:
|
||||
"""Entry point for external WSGI servers."""
|
||||
config_path = environ.get("RADICALE_CONFIG",
|
||||
os.environ.get("RADICALE_CONFIG"))
|
||||
app = _get_application_instance(config_path, environ["wsgi.errors"])
|
||||
return app(environ, start_response)
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2011-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -17,254 +19,194 @@
|
|||
"""
|
||||
Radicale executable module.
|
||||
|
||||
This module can be executed from a command line with ``$python -m radicale`` or
|
||||
from a python programme with ``radicale.__main__.run()``.
|
||||
This module can be executed from a command line with ``$python -m radicale``.
|
||||
Uses the built-in WSGI server.
|
||||
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import atexit
|
||||
import contextlib
|
||||
import os
|
||||
import select
|
||||
import signal
|
||||
import socket
|
||||
import ssl
|
||||
import sys
|
||||
from wsgiref.simple_server import make_server
|
||||
from types import FrameType
|
||||
from typing import List, Optional, cast
|
||||
|
||||
from . import (VERSION, Application, RequestHandler, ThreadedHTTPServer,
|
||||
ThreadedHTTPSServer, config, log)
|
||||
from radicale import VERSION, config, log, server, storage, types
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
def run():
|
||||
def run() -> None:
|
||||
"""Run Radicale as a standalone server."""
|
||||
exit_signal_numbers = [signal.SIGTERM, signal.SIGINT]
|
||||
if sys.platform == "win32":
|
||||
exit_signal_numbers.append(signal.SIGBREAK)
|
||||
else:
|
||||
exit_signal_numbers.append(signal.SIGHUP)
|
||||
exit_signal_numbers.append(signal.SIGQUIT)
|
||||
|
||||
# Raise SystemExit when signal arrives to run cleanup code
|
||||
# (like destructors, try-finish etc.), otherwise the process exits
|
||||
# without running any of them
|
||||
def exit_signal_handler(signal_number: int,
|
||||
stack_frame: Optional[FrameType]) -> None:
|
||||
sys.exit(1)
|
||||
for signal_number in exit_signal_numbers:
|
||||
signal.signal(signal_number, exit_signal_handler)
|
||||
|
||||
log.setup()
|
||||
|
||||
# Get command-line arguments
|
||||
parser = argparse.ArgumentParser(usage="radicale [OPTIONS]")
|
||||
# Configuration options are stored in dest with format "c:SECTION:OPTION"
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="radicale", usage="%(prog)s [OPTIONS]", allow_abbrev=False)
|
||||
|
||||
parser.add_argument("--version", action="version", version=VERSION)
|
||||
parser.add_argument(
|
||||
"-C", "--config", help="use a specific configuration file")
|
||||
parser.add_argument("--verify-storage", action="store_true",
|
||||
help="check the storage for errors and exit")
|
||||
parser.add_argument("-C", "--config",
|
||||
help="use specific configuration files", nargs="*")
|
||||
parser.add_argument("-D", "--debug", action="store_const", const="debug",
|
||||
dest="c:logging:level", default=argparse.SUPPRESS,
|
||||
help="print debug information")
|
||||
|
||||
groups = {}
|
||||
for section, values in config.INITIAL_CONFIG.items():
|
||||
group = parser.add_argument_group(section)
|
||||
groups[group] = []
|
||||
for option, data in values.items():
|
||||
for section, section_data in config.DEFAULT_CONFIG_SCHEMA.items():
|
||||
if section.startswith("_"):
|
||||
continue
|
||||
assert ":" not in section # check field separator
|
||||
assert "-" not in section and "_" not in section # not implemented
|
||||
group_description = None
|
||||
if section_data.get("_allow_extra"):
|
||||
group_description = "additional options allowed"
|
||||
if section == "headers":
|
||||
group_description += " (e.g. --headers-Pragma=no-cache)"
|
||||
elif "type" in section_data:
|
||||
group_description = "backend specific options omitted"
|
||||
group = parser.add_argument_group(section, group_description)
|
||||
for option, data in section_data.items():
|
||||
if option.startswith("_"):
|
||||
continue
|
||||
kwargs = data.copy()
|
||||
long_name = "--{0}-{1}".format(
|
||||
section, option.replace("_", "-"))
|
||||
args = kwargs.pop("aliases", [])
|
||||
long_name = "--%s-%s" % (section, option.replace("_", "-"))
|
||||
args: List[str] = list(kwargs.pop("aliases", ()))
|
||||
args.append(long_name)
|
||||
kwargs["dest"] = "{0}_{1}".format(section, option)
|
||||
groups[group].append(kwargs["dest"])
|
||||
kwargs["dest"] = "c:%s:%s" % (section, option)
|
||||
kwargs["metavar"] = "VALUE"
|
||||
kwargs["default"] = argparse.SUPPRESS
|
||||
del kwargs["value"]
|
||||
if "internal" in kwargs:
|
||||
with contextlib.suppress(KeyError):
|
||||
del kwargs["internal"]
|
||||
|
||||
if kwargs["type"] == bool:
|
||||
del kwargs["type"]
|
||||
kwargs["action"] = "store_const"
|
||||
kwargs["const"] = "True"
|
||||
opposite_args = kwargs.pop("opposite", [])
|
||||
opposite_args.append("--no{0}".format(long_name[1:]))
|
||||
group.add_argument(*args, **kwargs)
|
||||
|
||||
kwargs["const"] = "False"
|
||||
kwargs["help"] = "do not {0} (opposite of {1})".format(
|
||||
opposite_args = list(kwargs.pop("opposite_aliases", ()))
|
||||
opposite_args.append("--no%s" % long_name[1:])
|
||||
group.add_argument(*args, nargs="?", const="True", **kwargs)
|
||||
# Opposite argument
|
||||
kwargs["help"] = "do not %s (opposite of %s)" % (
|
||||
kwargs["help"], long_name)
|
||||
group.add_argument(*opposite_args, **kwargs)
|
||||
group.add_argument(*opposite_args, action="store_const",
|
||||
const="False", **kwargs)
|
||||
else:
|
||||
del kwargs["type"]
|
||||
group.add_argument(*args, **kwargs)
|
||||
|
||||
args = parser.parse_args()
|
||||
if args.config is not None:
|
||||
config_paths = [args.config] if args.config else []
|
||||
ignore_missing_paths = False
|
||||
else:
|
||||
config_paths = ["/etc/radicale/config",
|
||||
os.path.expanduser("~/.config/radicale/config")]
|
||||
if "RADICALE_CONFIG" in os.environ:
|
||||
config_paths.append(os.environ["RADICALE_CONFIG"])
|
||||
ignore_missing_paths = True
|
||||
try:
|
||||
configuration = config.load(config_paths,
|
||||
ignore_missing_paths=ignore_missing_paths)
|
||||
except Exception as e:
|
||||
print("ERROR: Invalid configuration: %s" % e, file=sys.stderr)
|
||||
if args.logging_debug:
|
||||
raise
|
||||
exit(1)
|
||||
args_ns, remaining_args = parser.parse_known_args()
|
||||
unrecognized_args = []
|
||||
while remaining_args:
|
||||
arg = remaining_args.pop(0)
|
||||
for section, data in config.DEFAULT_CONFIG_SCHEMA.items():
|
||||
if "type" not in data and not data.get("_allow_extra"):
|
||||
continue
|
||||
prefix = "--%s-" % section
|
||||
if arg.startswith(prefix):
|
||||
arg = arg[len(prefix):]
|
||||
break
|
||||
else:
|
||||
unrecognized_args.append(arg)
|
||||
continue
|
||||
value = ""
|
||||
if "=" in arg:
|
||||
arg, value = arg.split("=", maxsplit=1)
|
||||
elif remaining_args and not remaining_args[0].startswith("-"):
|
||||
value = remaining_args.pop(0)
|
||||
option = arg
|
||||
if not data.get("_allow_extra"): # preserve dash in HTTP header names
|
||||
option = option.replace("-", "_")
|
||||
vars(args_ns)["c:%s:%s" % (section, option)] = value
|
||||
if unrecognized_args:
|
||||
parser.error("unrecognized arguments: %s" %
|
||||
" ".join(unrecognized_args))
|
||||
|
||||
# Preliminary configure logging
|
||||
with contextlib.suppress(ValueError):
|
||||
log.set_level(config.DEFAULT_CONFIG_SCHEMA["logging"]["level"]["type"](
|
||||
vars(args_ns).get("c:logging:level", "")), True)
|
||||
|
||||
# Update Radicale configuration according to arguments
|
||||
for group, actions in groups.items():
|
||||
section = group.title
|
||||
for action in actions:
|
||||
value = getattr(args, action)
|
||||
if value is not None:
|
||||
configuration.set(section, action.split('_', 1)[1], value)
|
||||
|
||||
# Start logging
|
||||
filename = os.path.expanduser(configuration.get("logging", "config"))
|
||||
debug = configuration.getboolean("logging", "debug")
|
||||
try:
|
||||
logger = log.start("radicale", filename, debug)
|
||||
except Exception as e:
|
||||
print("ERROR: Failed to start logger: %s" % e, file=sys.stderr)
|
||||
if debug:
|
||||
raise
|
||||
exit(1)
|
||||
arguments_config: types.MUTABLE_CONFIG = {}
|
||||
for key, value in vars(args_ns).items():
|
||||
if key.startswith("c:"):
|
||||
_, section, option = key.split(":", maxsplit=2)
|
||||
arguments_config[section] = arguments_config.get(section, {})
|
||||
arguments_config[section][option] = value
|
||||
|
||||
try:
|
||||
serve(configuration, logger)
|
||||
configuration = config.load(config.parse_compound_paths(
|
||||
config.DEFAULT_CONFIG_PATH,
|
||||
os.environ.get("RADICALE_CONFIG"),
|
||||
os.pathsep.join(args_ns.config) if args_ns.config is not None
|
||||
else None))
|
||||
if arguments_config:
|
||||
configuration.update(arguments_config, "command line arguments")
|
||||
except Exception as e:
|
||||
logger.error("An exception occurred during server startup: %s", e,
|
||||
exc_info=True)
|
||||
exit(1)
|
||||
logger.critical("Invalid configuration: %s", e, exc_info=True)
|
||||
sys.exit(1)
|
||||
|
||||
# Configure logging
|
||||
log.set_level(cast(str, configuration.get("logging", "level")), configuration.get("logging", "backtrace_on_debug"))
|
||||
|
||||
def daemonize(configuration, logger):
|
||||
"""Fork and decouple if Radicale is configured as daemon."""
|
||||
# Check and create PID file in a race-free manner
|
||||
if configuration.get("server", "pid"):
|
||||
# Log configuration after logger is configured
|
||||
default_config_active = True
|
||||
for source, miss in configuration.sources():
|
||||
logger.info("%s %s", "Skipped missing/unreadable" if miss else "Loaded", source)
|
||||
if not miss and source != "default config":
|
||||
default_config_active = False
|
||||
|
||||
if default_config_active:
|
||||
logger.warning("%s", "No config file found/readable - only default config is active")
|
||||
|
||||
if args_ns.verify_storage:
|
||||
logger.info("Verifying storage")
|
||||
try:
|
||||
pid_path = os.path.abspath(os.path.expanduser(
|
||||
configuration.get("server", "pid")))
|
||||
pid_fd = os.open(
|
||||
pid_path, os.O_CREAT | os.O_EXCL | os.O_WRONLY)
|
||||
except OSError as e:
|
||||
raise OSError("PID file exists: %r" %
|
||||
configuration.get("server", "pid")) from e
|
||||
pid = os.fork()
|
||||
if pid:
|
||||
# Write PID
|
||||
if configuration.get("server", "pid"):
|
||||
with os.fdopen(pid_fd, "w") as pid_file:
|
||||
pid_file.write(str(pid))
|
||||
sys.exit()
|
||||
if configuration.get("server", "pid"):
|
||||
os.close(pid_fd)
|
||||
storage_ = storage.load(configuration)
|
||||
with storage_.acquire_lock("r"):
|
||||
if not storage_.verify():
|
||||
logger.critical("Storage verification failed")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
logger.critical("An exception occurred during storage "
|
||||
"verification: %s", e, exc_info=True)
|
||||
sys.exit(1)
|
||||
return
|
||||
|
||||
# Register exit function
|
||||
def cleanup():
|
||||
"""Remove the PID files."""
|
||||
logger.debug("Cleaning up")
|
||||
# Remove PID file
|
||||
os.unlink(pid_path)
|
||||
atexit.register(cleanup)
|
||||
# Decouple environment
|
||||
os.chdir("/")
|
||||
os.setsid()
|
||||
with open(os.devnull, "r") as null_in:
|
||||
os.dup2(null_in.fileno(), sys.stdin.fileno())
|
||||
with open(os.devnull, "w") as null_out:
|
||||
os.dup2(null_out.fileno(), sys.stdout.fileno())
|
||||
os.dup2(null_out.fileno(), sys.stderr.fileno())
|
||||
# Create a socket pair to notify the server of program shutdown
|
||||
shutdown_socket, shutdown_socket_out = socket.socketpair()
|
||||
|
||||
# Shutdown server when signal arrives
|
||||
def shutdown_signal_handler(signal_number: int,
|
||||
stack_frame: Optional[FrameType]) -> None:
|
||||
shutdown_socket.close()
|
||||
for signal_number in exit_signal_numbers:
|
||||
signal.signal(signal_number, shutdown_signal_handler)
|
||||
|
||||
def serve(configuration, logger):
|
||||
"""Serve radicale from configuration."""
|
||||
logger.info("Starting Radicale")
|
||||
|
||||
# Create collection servers
|
||||
servers = {}
|
||||
if configuration.getboolean("server", "ssl"):
|
||||
server_class = ThreadedHTTPSServer
|
||||
server_class.certificate = configuration.get("server", "certificate")
|
||||
server_class.key = configuration.get("server", "key")
|
||||
server_class.certificate_authority = configuration.get(
|
||||
"server", "certificate_authority")
|
||||
server_class.ciphers = configuration.get("server", "ciphers")
|
||||
server_class.protocol = getattr(
|
||||
ssl, configuration.get("server", "protocol"), ssl.PROTOCOL_SSLv23)
|
||||
# Test if the SSL files can be read
|
||||
for name in ["certificate", "key"] + (
|
||||
["certificate_authority"]
|
||||
if server_class.certificate_authority else []):
|
||||
filename = getattr(server_class, name)
|
||||
try:
|
||||
open(filename, "r").close()
|
||||
except OSError as e:
|
||||
raise RuntimeError("Failed to read SSL %s %r: %s" %
|
||||
(name, filename, e)) from e
|
||||
else:
|
||||
server_class = ThreadedHTTPServer
|
||||
server_class.client_timeout = configuration.getint("server", "timeout")
|
||||
server_class.max_connections = configuration.getint(
|
||||
"server", "max_connections")
|
||||
server_class.logger = logger
|
||||
|
||||
RequestHandler.logger = logger
|
||||
if not configuration.getboolean("server", "dns_lookup"):
|
||||
RequestHandler.address_string = lambda self: self.client_address[0]
|
||||
|
||||
shutdown_program = False
|
||||
|
||||
for host in configuration.get("server", "hosts").split(","):
|
||||
try:
|
||||
address, port = host.strip().rsplit(":", 1)
|
||||
address, port = address.strip("[] "), int(port)
|
||||
except ValueError as e:
|
||||
raise RuntimeError(
|
||||
"Failed to parse address %r: %s" % (host, e)) from e
|
||||
application = Application(configuration, logger)
|
||||
try:
|
||||
server = make_server(
|
||||
address, port, application, server_class, RequestHandler)
|
||||
except OSError as e:
|
||||
raise RuntimeError(
|
||||
"Failed to start server %r: %s" % (host, e)) from e
|
||||
servers[server.socket] = server
|
||||
logger.info("Listening to %r on port %d%s",
|
||||
server.server_name, server.server_port, " using SSL"
|
||||
if configuration.getboolean("server", "ssl") else "")
|
||||
|
||||
# Create a socket pair to notify the select syscall of program shutdown
|
||||
# This is not available in python < 3.5 on Windows
|
||||
if hasattr(socket, "socketpair"):
|
||||
shutdown_program_socket_in, shutdown_program_socket_out = (
|
||||
socket.socketpair())
|
||||
else:
|
||||
shutdown_program_socket_in, shutdown_program_socket_out = None, None
|
||||
|
||||
# SIGTERM and SIGINT (aka KeyboardInterrupt) should just mark this for
|
||||
# shutdown
|
||||
def shutdown(*args):
|
||||
nonlocal shutdown_program
|
||||
if shutdown_program:
|
||||
# Ignore following signals
|
||||
return
|
||||
logger.info("Stopping Radicale")
|
||||
shutdown_program = True
|
||||
if shutdown_program_socket_in:
|
||||
shutdown_program_socket_in.sendall(b"goodbye")
|
||||
signal.signal(signal.SIGTERM, shutdown)
|
||||
signal.signal(signal.SIGINT, shutdown)
|
||||
|
||||
# Main loop: wait for requests on any of the servers or program shutdown
|
||||
sockets = list(servers.keys())
|
||||
if shutdown_program_socket_out:
|
||||
# Use socket pair to get notified of program shutdown
|
||||
sockets.append(shutdown_program_socket_out)
|
||||
select_timeout = None
|
||||
if not shutdown_program_socket_out or os.name == "nt":
|
||||
# Fallback to busy waiting. (select.select blocks SIGINT on Windows.)
|
||||
select_timeout = 1.0
|
||||
if configuration.getboolean("server", "daemon"):
|
||||
daemonize(configuration, logger)
|
||||
logger.info("Radicale server ready")
|
||||
while not shutdown_program:
|
||||
try:
|
||||
rlist, _, xlist = select.select(
|
||||
sockets, [], sockets, select_timeout)
|
||||
except (KeyboardInterrupt, select.error):
|
||||
# SIGINT is handled by signal handler above
|
||||
rlist, xlist = [], []
|
||||
if xlist:
|
||||
raise RuntimeError("unhandled socket error")
|
||||
if rlist:
|
||||
server = servers.get(rlist[0])
|
||||
if server:
|
||||
server.handle_request()
|
||||
try:
|
||||
server.serve(configuration, shutdown_socket_out)
|
||||
except Exception as e:
|
||||
logger.critical("An exception occurred during server startup: %s", e,
|
||||
exc_info=False)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
370
radicale/app/__init__.py
Normal file
370
radicale/app/__init__.py
Normal file
|
@ -0,0 +1,370 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Radicale WSGI application.
|
||||
|
||||
Can be used with an external WSGI server (see ``radicale.application()``) or
|
||||
the built-in server (see ``radicale.server`` module).
|
||||
|
||||
"""
|
||||
|
||||
import base64
|
||||
import datetime
|
||||
import pprint
|
||||
import random
|
||||
import time
|
||||
import zlib
|
||||
from http import client
|
||||
from typing import Iterable, List, Mapping, Tuple, Union
|
||||
|
||||
from radicale import config, httputils, log, pathutils, types
|
||||
from radicale.app.base import ApplicationBase
|
||||
from radicale.app.delete import ApplicationPartDelete
|
||||
from radicale.app.get import ApplicationPartGet
|
||||
from radicale.app.head import ApplicationPartHead
|
||||
from radicale.app.mkcalendar import ApplicationPartMkcalendar
|
||||
from radicale.app.mkcol import ApplicationPartMkcol
|
||||
from radicale.app.move import ApplicationPartMove
|
||||
from radicale.app.options import ApplicationPartOptions
|
||||
from radicale.app.post import ApplicationPartPost
|
||||
from radicale.app.propfind import ApplicationPartPropfind
|
||||
from radicale.app.proppatch import ApplicationPartProppatch
|
||||
from radicale.app.put import ApplicationPartPut
|
||||
from radicale.app.report import ApplicationPartReport
|
||||
from radicale.log import logger
|
||||
|
||||
# Combination of types.WSGIStartResponse and WSGI application return value
|
||||
_IntermediateResponse = Tuple[str, List[Tuple[str, str]], Iterable[bytes]]
|
||||
|
||||
|
||||
class Application(ApplicationPartDelete, ApplicationPartHead,
|
||||
ApplicationPartGet, ApplicationPartMkcalendar,
|
||||
ApplicationPartMkcol, ApplicationPartMove,
|
||||
ApplicationPartOptions, ApplicationPartPropfind,
|
||||
ApplicationPartProppatch, ApplicationPartPost,
|
||||
ApplicationPartPut, ApplicationPartReport, ApplicationBase):
|
||||
"""WSGI application."""
|
||||
|
||||
_mask_passwords: bool
|
||||
_auth_delay: float
|
||||
_internal_server: bool
|
||||
_max_content_length: int
|
||||
_auth_realm: str
|
||||
_script_name: str
|
||||
_extra_headers: Mapping[str, str]
|
||||
_permit_delete_collection: bool
|
||||
_permit_overwrite_collection: bool
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
"""Initialize Application.
|
||||
|
||||
``configuration`` see ``radicale.config`` module.
|
||||
The ``configuration`` must not change during the lifetime of
|
||||
this object, it is kept as an internal reference.
|
||||
|
||||
"""
|
||||
super().__init__(configuration)
|
||||
self._mask_passwords = configuration.get("logging", "mask_passwords")
|
||||
self._bad_put_request_content = configuration.get("logging", "bad_put_request_content")
|
||||
self._request_header_on_debug = configuration.get("logging", "request_header_on_debug")
|
||||
self._response_content_on_debug = configuration.get("logging", "response_content_on_debug")
|
||||
self._auth_delay = configuration.get("auth", "delay")
|
||||
self._internal_server = configuration.get("server", "_internal_server")
|
||||
self._script_name = configuration.get("server", "script_name")
|
||||
if self._script_name:
|
||||
if self._script_name[0] != "/":
|
||||
logger.error("server.script_name must start with '/': %r", self._script_name)
|
||||
raise RuntimeError("server.script_name option has to start with '/'")
|
||||
else:
|
||||
if self._script_name.endswith("/"):
|
||||
logger.error("server.script_name must not end with '/': %r", self._script_name)
|
||||
raise RuntimeError("server.script_name option must not end with '/'")
|
||||
else:
|
||||
logger.info("Provided script name to strip from URI if called by reverse proxy: %r", self._script_name)
|
||||
else:
|
||||
logger.info("Default script name to strip from URI if called by reverse proxy is taken from HTTP_X_SCRIPT_NAME or SCRIPT_NAME")
|
||||
self._max_content_length = configuration.get(
|
||||
"server", "max_content_length")
|
||||
self._auth_realm = configuration.get("auth", "realm")
|
||||
self._permit_delete_collection = configuration.get("rights", "permit_delete_collection")
|
||||
logger.info("permit delete of collection: %s", self._permit_delete_collection)
|
||||
self._permit_overwrite_collection = configuration.get("rights", "permit_overwrite_collection")
|
||||
logger.info("permit overwrite of collection: %s", self._permit_overwrite_collection)
|
||||
self._extra_headers = dict()
|
||||
for key in self.configuration.options("headers"):
|
||||
self._extra_headers[key] = configuration.get("headers", key)
|
||||
|
||||
def _scrub_headers(self, environ: types.WSGIEnviron) -> types.WSGIEnviron:
|
||||
"""Mask passwords and cookies."""
|
||||
headers = dict(environ)
|
||||
if (self._mask_passwords and
|
||||
headers.get("HTTP_AUTHORIZATION", "").startswith("Basic")):
|
||||
headers["HTTP_AUTHORIZATION"] = "Basic **masked**"
|
||||
if headers.get("HTTP_COOKIE"):
|
||||
headers["HTTP_COOKIE"] = "**masked**"
|
||||
return headers
|
||||
|
||||
def __call__(self, environ: types.WSGIEnviron, start_response:
|
||||
types.WSGIStartResponse) -> Iterable[bytes]:
|
||||
with log.register_stream(environ["wsgi.errors"]):
|
||||
try:
|
||||
status_text, headers, answers = self._handle_request(environ)
|
||||
except Exception as e:
|
||||
logger.error("An exception occurred during %s request on %r: "
|
||||
"%s", environ.get("REQUEST_METHOD", "unknown"),
|
||||
environ.get("PATH_INFO", ""), e, exc_info=True)
|
||||
# Make minimal response
|
||||
status, raw_headers, raw_answer = (
|
||||
httputils.INTERNAL_SERVER_ERROR)
|
||||
assert isinstance(raw_answer, str)
|
||||
answer = raw_answer.encode("ascii")
|
||||
status_text = "%d %s" % (
|
||||
status, client.responses.get(status, "Unknown"))
|
||||
headers = [*raw_headers, ("Content-Length", str(len(answer)))]
|
||||
answers = [answer]
|
||||
start_response(status_text, headers)
|
||||
if environ.get("REQUEST_METHOD") == "HEAD":
|
||||
return []
|
||||
return answers
|
||||
|
||||
def _handle_request(self, environ: types.WSGIEnviron
|
||||
) -> _IntermediateResponse:
|
||||
time_begin = datetime.datetime.now()
|
||||
request_method = environ["REQUEST_METHOD"].upper()
|
||||
unsafe_path = environ.get("PATH_INFO", "")
|
||||
https = environ.get("HTTPS", "")
|
||||
|
||||
"""Manage a request."""
|
||||
def response(status: int, headers: types.WSGIResponseHeaders,
|
||||
answer: Union[None, str, bytes]) -> _IntermediateResponse:
|
||||
"""Helper to create response from internal types.WSGIResponse"""
|
||||
headers = dict(headers)
|
||||
# Set content length
|
||||
answers = []
|
||||
if answer is not None:
|
||||
if isinstance(answer, str):
|
||||
if self._response_content_on_debug:
|
||||
logger.debug("Response content:\n%s", answer)
|
||||
else:
|
||||
logger.debug("Response content: suppressed by config/option [logging] response_content_on_debug")
|
||||
headers["Content-Type"] += "; charset=%s" % self._encoding
|
||||
answer = answer.encode(self._encoding)
|
||||
accept_encoding = [
|
||||
encoding.strip() for encoding in
|
||||
environ.get("HTTP_ACCEPT_ENCODING", "").split(",")
|
||||
if encoding.strip()]
|
||||
|
||||
if "gzip" in accept_encoding:
|
||||
zcomp = zlib.compressobj(wbits=16 + zlib.MAX_WBITS)
|
||||
answer = zcomp.compress(answer) + zcomp.flush()
|
||||
headers["Content-Encoding"] = "gzip"
|
||||
|
||||
headers["Content-Length"] = str(len(answer))
|
||||
answers.append(answer)
|
||||
|
||||
# Add extra headers set in configuration
|
||||
headers.update(self._extra_headers)
|
||||
|
||||
# Start response
|
||||
time_end = datetime.datetime.now()
|
||||
status_text = "%d %s" % (
|
||||
status, client.responses.get(status, "Unknown"))
|
||||
logger.info("%s response status for %r%s in %.3f seconds: %s",
|
||||
request_method, unsafe_path, depthinfo,
|
||||
(time_end - time_begin).total_seconds(), status_text)
|
||||
# Return response content
|
||||
return status_text, list(headers.items()), answers
|
||||
|
||||
reverse_proxy = False
|
||||
remote_host = "unknown"
|
||||
if environ.get("REMOTE_HOST"):
|
||||
remote_host = repr(environ["REMOTE_HOST"])
|
||||
elif environ.get("REMOTE_ADDR"):
|
||||
remote_host = environ["REMOTE_ADDR"]
|
||||
if environ.get("HTTP_X_FORWARDED_FOR"):
|
||||
reverse_proxy = True
|
||||
remote_host = "%s (forwarded for %r)" % (
|
||||
remote_host, environ["HTTP_X_FORWARDED_FOR"])
|
||||
if environ.get("HTTP_X_FORWARDED_HOST") or environ.get("HTTP_X_FORWARDED_PROTO") or environ.get("HTTP_X_FORWARDED_SERVER"):
|
||||
reverse_proxy = True
|
||||
remote_useragent = ""
|
||||
if environ.get("HTTP_USER_AGENT"):
|
||||
remote_useragent = " using %r" % environ["HTTP_USER_AGENT"]
|
||||
depthinfo = ""
|
||||
if environ.get("HTTP_DEPTH"):
|
||||
depthinfo = " with depth %r" % environ["HTTP_DEPTH"]
|
||||
if https:
|
||||
https_info = " " + environ.get("SSL_PROTOCOL", "") + " " + environ.get("SSL_CIPHER", "")
|
||||
else:
|
||||
https_info = ""
|
||||
logger.info("%s request for %r%s received from %s%s%s",
|
||||
request_method, unsafe_path, depthinfo,
|
||||
remote_host, remote_useragent, https_info)
|
||||
if self._request_header_on_debug:
|
||||
logger.debug("Request header:\n%s",
|
||||
pprint.pformat(self._scrub_headers(environ)))
|
||||
else:
|
||||
logger.debug("Request header: suppressed by config/option [logging] request_header_on_debug")
|
||||
|
||||
# SCRIPT_NAME is already removed from PATH_INFO, according to the
|
||||
# WSGI specification.
|
||||
# Reverse proxies can overwrite SCRIPT_NAME with X-SCRIPT-NAME header
|
||||
if self._script_name and (reverse_proxy is True):
|
||||
base_prefix_src = "config"
|
||||
base_prefix = self._script_name
|
||||
else:
|
||||
base_prefix_src = ("HTTP_X_SCRIPT_NAME" if "HTTP_X_SCRIPT_NAME" in
|
||||
environ else "SCRIPT_NAME")
|
||||
base_prefix = environ.get(base_prefix_src, "")
|
||||
if base_prefix and base_prefix[0] != "/":
|
||||
logger.error("Base prefix (from %s) must start with '/': %r",
|
||||
base_prefix_src, base_prefix)
|
||||
if base_prefix_src == "HTTP_X_SCRIPT_NAME":
|
||||
return response(*httputils.BAD_REQUEST)
|
||||
return response(*httputils.INTERNAL_SERVER_ERROR)
|
||||
if base_prefix.endswith("/"):
|
||||
logger.warning("Base prefix (from %s) must not end with '/': %r",
|
||||
base_prefix_src, base_prefix)
|
||||
base_prefix = base_prefix.rstrip("/")
|
||||
if base_prefix:
|
||||
logger.debug("Base prefix (from %s): %r", base_prefix_src, base_prefix)
|
||||
|
||||
# Sanitize request URI (a WSGI server indicates with an empty path,
|
||||
# that the URL targets the application root without a trailing slash)
|
||||
path = pathutils.sanitize_path(unsafe_path)
|
||||
logger.debug("Sanitized path: %r", path)
|
||||
if (reverse_proxy is True) and (len(base_prefix) > 0):
|
||||
if path.startswith(base_prefix):
|
||||
path_new = path.removeprefix(base_prefix)
|
||||
logger.debug("Called by reverse proxy, remove base prefix %r from path: %r => %r", base_prefix, path, path_new)
|
||||
path = path_new
|
||||
else:
|
||||
logger.warning("Called by reverse proxy, cannot removed base prefix %r from path: %r as not matching", base_prefix, path)
|
||||
|
||||
# Get function corresponding to method
|
||||
function = getattr(self, "do_%s" % request_method, None)
|
||||
if not function:
|
||||
return response(*httputils.METHOD_NOT_ALLOWED)
|
||||
|
||||
# Redirect all "…/.well-known/{caldav,carddav}" paths to "/".
|
||||
# This shouldn't be necessary but some clients like TbSync require it.
|
||||
# Status must be MOVED PERMANENTLY using FOUND causes problems
|
||||
if (path.rstrip("/").endswith("/.well-known/caldav") or
|
||||
path.rstrip("/").endswith("/.well-known/carddav")):
|
||||
return response(*httputils.redirect(
|
||||
base_prefix + "/", client.MOVED_PERMANENTLY))
|
||||
# Return NOT FOUND for all other paths containing ".well-known"
|
||||
if path.endswith("/.well-known") or "/.well-known/" in path:
|
||||
return response(*httputils.NOT_FOUND)
|
||||
|
||||
# Ask authentication backend to check rights
|
||||
login = password = ""
|
||||
external_login = self._auth.get_external_login(environ)
|
||||
authorization = environ.get("HTTP_AUTHORIZATION", "")
|
||||
if external_login:
|
||||
login, password = external_login
|
||||
login, password = login or "", password or ""
|
||||
elif authorization.startswith("Basic"):
|
||||
authorization = authorization[len("Basic"):].strip()
|
||||
login, password = httputils.decode_request(
|
||||
self.configuration, environ, base64.b64decode(
|
||||
authorization.encode("ascii"))).split(":", 1)
|
||||
|
||||
(user, info) = self._auth.login(login, password) or ("", "") if login else ("", "")
|
||||
if self.configuration.get("auth", "type") == "ldap":
|
||||
try:
|
||||
logger.debug("Groups %r", ",".join(self._auth._ldap_groups))
|
||||
self._rights._user_groups = self._auth._ldap_groups
|
||||
except AttributeError:
|
||||
pass
|
||||
if user and login == user:
|
||||
logger.info("Successful login: %r (%s)", user, info)
|
||||
elif user:
|
||||
logger.info("Successful login: %r -> %r (%s)", login, user, info)
|
||||
elif login:
|
||||
logger.warning("Failed login attempt from %s: %r (%s)",
|
||||
remote_host, login, info)
|
||||
# Random delay to avoid timing oracles and bruteforce attacks
|
||||
if self._auth_delay > 0:
|
||||
random_delay = self._auth_delay * (0.5 + random.random())
|
||||
logger.debug("Failed login, sleeping random: %.3f sec", random_delay)
|
||||
time.sleep(random_delay)
|
||||
|
||||
if user and not pathutils.is_safe_path_component(user):
|
||||
# Prevent usernames like "user/calendar.ics"
|
||||
logger.info("Refused unsafe username: %r", user)
|
||||
user = ""
|
||||
|
||||
# Create principal collection
|
||||
if user:
|
||||
principal_path = "/%s/" % user
|
||||
with self._storage.acquire_lock("r", user):
|
||||
principal = next(iter(self._storage.discover(
|
||||
principal_path, depth="1")), None)
|
||||
if not principal:
|
||||
if "W" in self._rights.authorization(user, principal_path):
|
||||
with self._storage.acquire_lock("w", user):
|
||||
try:
|
||||
new_coll = self._storage.create_collection(principal_path)
|
||||
if new_coll:
|
||||
jsn_coll = self.configuration.get("storage", "predefined_collections")
|
||||
for (name_coll, props) in jsn_coll.items():
|
||||
try:
|
||||
self._storage.create_collection(principal_path + name_coll, props=props)
|
||||
except ValueError as e:
|
||||
logger.warning("Failed to create predefined collection %r: %s", name_coll, e)
|
||||
except ValueError as e:
|
||||
logger.warning("Failed to create principal "
|
||||
"collection %r: %s", user, e)
|
||||
user = ""
|
||||
else:
|
||||
logger.warning("Access to principal path %r denied by "
|
||||
"rights backend", principal_path)
|
||||
|
||||
if self._internal_server:
|
||||
# Verify content length
|
||||
content_length = int(environ.get("CONTENT_LENGTH") or 0)
|
||||
if content_length:
|
||||
if (self._max_content_length > 0 and
|
||||
content_length > self._max_content_length):
|
||||
logger.info("Request body too large: %d", content_length)
|
||||
return response(*httputils.REQUEST_ENTITY_TOO_LARGE)
|
||||
|
||||
if not login or user:
|
||||
status, headers, answer = function(
|
||||
environ, base_prefix, path, user)
|
||||
if (status, headers, answer) == httputils.NOT_ALLOWED:
|
||||
logger.info("Access to %r denied for %s", path,
|
||||
repr(user) if user else "anonymous user")
|
||||
else:
|
||||
status, headers, answer = httputils.NOT_ALLOWED
|
||||
|
||||
if ((status, headers, answer) == httputils.NOT_ALLOWED and not user and
|
||||
not external_login):
|
||||
# Unknown or unauthorized user
|
||||
logger.debug("Asking client for authentication")
|
||||
status = client.UNAUTHORIZED
|
||||
headers = dict(headers)
|
||||
headers.update({
|
||||
"WWW-Authenticate":
|
||||
"Basic realm=\"%s\"" % self._auth_realm})
|
||||
|
||||
return response(status, headers, answer)
|
145
radicale/app/base.py
Normal file
145
radicale/app/base.py
Normal file
|
@ -0,0 +1,145 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2020 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import io
|
||||
import logging
|
||||
import posixpath
|
||||
import sys
|
||||
import xml.etree.ElementTree as ET
|
||||
from typing import Optional
|
||||
|
||||
from radicale import (auth, config, hook, httputils, pathutils, rights,
|
||||
storage, types, web, xmlutils)
|
||||
from radicale.log import logger
|
||||
|
||||
# HACK: https://github.com/tiran/defusedxml/issues/54
|
||||
import defusedxml.ElementTree as DefusedET # isort:skip
|
||||
sys.modules["xml.etree"].ElementTree = ET # type:ignore[attr-defined]
|
||||
|
||||
|
||||
class ApplicationBase:
|
||||
|
||||
configuration: config.Configuration
|
||||
_auth: auth.BaseAuth
|
||||
_storage: storage.BaseStorage
|
||||
_rights: rights.BaseRights
|
||||
_web: web.BaseWeb
|
||||
_encoding: str
|
||||
_permit_delete_collection: bool
|
||||
_permit_overwrite_collection: bool
|
||||
_hook: hook.BaseHook
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
self.configuration = configuration
|
||||
self._auth = auth.load(configuration)
|
||||
self._storage = storage.load(configuration)
|
||||
self._rights = rights.load(configuration)
|
||||
self._web = web.load(configuration)
|
||||
self._encoding = configuration.get("encoding", "request")
|
||||
self._log_bad_put_request_content = configuration.get("logging", "bad_put_request_content")
|
||||
self._response_content_on_debug = configuration.get("logging", "response_content_on_debug")
|
||||
self._request_content_on_debug = configuration.get("logging", "request_content_on_debug")
|
||||
self._hook = hook.load(configuration)
|
||||
|
||||
def _read_xml_request_body(self, environ: types.WSGIEnviron
|
||||
) -> Optional[ET.Element]:
|
||||
content = httputils.decode_request(
|
||||
self.configuration, environ,
|
||||
httputils.read_raw_request_body(self.configuration, environ))
|
||||
if not content:
|
||||
return None
|
||||
try:
|
||||
xml_content = DefusedET.fromstring(content)
|
||||
except ET.ParseError as e:
|
||||
logger.debug("Request content (Invalid XML):\n%s", content)
|
||||
raise RuntimeError("Failed to parse XML: %s" % e) from e
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
if self._request_content_on_debug:
|
||||
logger.debug("Request content (XML):\n%s",
|
||||
xmlutils.pretty_xml(xml_content))
|
||||
else:
|
||||
logger.debug("Request content (XML): suppressed by config/option [logging] request_content_on_debug")
|
||||
return xml_content
|
||||
|
||||
def _xml_response(self, xml_content: ET.Element) -> bytes:
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
if self._response_content_on_debug:
|
||||
logger.debug("Response content (XML):\n%s",
|
||||
xmlutils.pretty_xml(xml_content))
|
||||
else:
|
||||
logger.debug("Response content (XML): suppressed by config/option [logging] response_content_on_debug")
|
||||
f = io.BytesIO()
|
||||
ET.ElementTree(xml_content).write(f, encoding=self._encoding,
|
||||
xml_declaration=True)
|
||||
return f.getvalue()
|
||||
|
||||
def _webdav_error_response(self, status: int, human_tag: str
|
||||
) -> types.WSGIResponse:
|
||||
"""Generate XML error response."""
|
||||
headers = {"Content-Type": "text/xml; charset=%s" % self._encoding}
|
||||
content = self._xml_response(xmlutils.webdav_error(human_tag))
|
||||
return status, headers, content
|
||||
|
||||
|
||||
class Access:
|
||||
"""Helper class to check access rights of an item"""
|
||||
|
||||
user: str
|
||||
path: str
|
||||
parent_path: str
|
||||
permissions: str
|
||||
_rights: rights.BaseRights
|
||||
_parent_permissions: Optional[str]
|
||||
|
||||
def __init__(self, rights: rights.BaseRights, user: str, path: str
|
||||
) -> None:
|
||||
self._rights = rights
|
||||
self.user = user
|
||||
self.path = path
|
||||
self.parent_path = pathutils.unstrip_path(
|
||||
posixpath.dirname(pathutils.strip_path(path)), True)
|
||||
self.permissions = self._rights.authorization(self.user, self.path)
|
||||
self._parent_permissions = None
|
||||
|
||||
@property
|
||||
def parent_permissions(self) -> str:
|
||||
if self.path == self.parent_path:
|
||||
return self.permissions
|
||||
if self._parent_permissions is None:
|
||||
self._parent_permissions = self._rights.authorization(
|
||||
self.user, self.parent_path)
|
||||
return self._parent_permissions
|
||||
|
||||
def check(self, permission: str,
|
||||
item: Optional[types.CollectionOrItem] = None) -> bool:
|
||||
if permission not in "rwdDoO":
|
||||
raise ValueError("Invalid permission argument: %r" % permission)
|
||||
if not item:
|
||||
permissions = permission + permission.upper()
|
||||
parent_permissions = permission
|
||||
elif isinstance(item, storage.BaseCollection):
|
||||
if item.tag:
|
||||
permissions = permission
|
||||
else:
|
||||
permissions = permission.upper()
|
||||
parent_permissions = ""
|
||||
else:
|
||||
permissions = ""
|
||||
parent_permissions = permission
|
||||
return bool(rights.intersect(self.permissions, permissions) or (
|
||||
self.path != self.parent_path and
|
||||
rights.intersect(self.parent_permissions, parent_permissions)))
|
107
radicale/app/delete.py
Normal file
107
radicale/app/delete.py
Normal file
|
@ -0,0 +1,107 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import xml.etree.ElementTree as ET
|
||||
from http import client
|
||||
from typing import Optional
|
||||
|
||||
from radicale import httputils, storage, types, xmlutils
|
||||
from radicale.app.base import Access, ApplicationBase
|
||||
from radicale.hook import HookNotificationItem, HookNotificationItemTypes
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
def xml_delete(base_prefix: str, path: str, collection: storage.BaseCollection,
|
||||
item_href: Optional[str] = None) -> ET.Element:
|
||||
"""Read and answer DELETE requests.
|
||||
|
||||
Read rfc4918-9.6 for info.
|
||||
|
||||
"""
|
||||
collection.delete(item_href)
|
||||
|
||||
multistatus = ET.Element(xmlutils.make_clark("D:multistatus"))
|
||||
response = ET.Element(xmlutils.make_clark("D:response"))
|
||||
multistatus.append(response)
|
||||
|
||||
href_element = ET.Element(xmlutils.make_clark("D:href"))
|
||||
href_element.text = xmlutils.make_href(base_prefix, path)
|
||||
response.append(href_element)
|
||||
|
||||
status = ET.Element(xmlutils.make_clark("D:status"))
|
||||
status.text = xmlutils.make_response(200)
|
||||
response.append(status)
|
||||
|
||||
return multistatus
|
||||
|
||||
|
||||
class ApplicationPartDelete(ApplicationBase):
|
||||
|
||||
def do_DELETE(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
path: str, user: str) -> types.WSGIResponse:
|
||||
"""Manage DELETE request."""
|
||||
access = Access(self._rights, user, path)
|
||||
if not access.check("w"):
|
||||
return httputils.NOT_ALLOWED
|
||||
with self._storage.acquire_lock("w", user):
|
||||
item = next(iter(self._storage.discover(path)), None)
|
||||
if not item:
|
||||
return httputils.NOT_FOUND
|
||||
if not access.check("w", item):
|
||||
return httputils.NOT_ALLOWED
|
||||
if_match = environ.get("HTTP_IF_MATCH", "*")
|
||||
if if_match not in ("*", item.etag):
|
||||
# ETag precondition not verified, do not delete item
|
||||
return httputils.PRECONDITION_FAILED
|
||||
hook_notification_item_list = []
|
||||
if isinstance(item, storage.BaseCollection):
|
||||
if self._permit_delete_collection:
|
||||
if access.check("d", item):
|
||||
logger.info("delete of collection is permitted by config/option [rights] permit_delete_collection but explicit forbidden by permission 'd': %s", path)
|
||||
return httputils.NOT_ALLOWED
|
||||
else:
|
||||
if not access.check("D", item):
|
||||
logger.info("delete of collection is prevented by config/option [rights] permit_delete_collection and not explicit allowed by permission 'D': %s", path)
|
||||
return httputils.NOT_ALLOWED
|
||||
for i in item.get_all():
|
||||
hook_notification_item_list.append(
|
||||
HookNotificationItem(
|
||||
HookNotificationItemTypes.DELETE,
|
||||
access.path,
|
||||
i.uid
|
||||
)
|
||||
)
|
||||
xml_answer = xml_delete(base_prefix, path, item)
|
||||
else:
|
||||
assert item.collection is not None
|
||||
assert item.href is not None
|
||||
hook_notification_item_list.append(
|
||||
HookNotificationItem(
|
||||
HookNotificationItemTypes.DELETE,
|
||||
access.path,
|
||||
item.uid
|
||||
)
|
||||
)
|
||||
xml_answer = xml_delete(
|
||||
base_prefix, path, item.collection, item.href)
|
||||
for notification_item in hook_notification_item_list:
|
||||
self._hook.notify(notification_item)
|
||||
headers = {"Content-Type": "text/xml; charset=%s" % self._encoding}
|
||||
return client.OK, headers, self._xml_response(xml_answer)
|
111
radicale/app/get.py
Normal file
111
radicale/app/get.py
Normal file
|
@ -0,0 +1,111 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import posixpath
|
||||
from http import client
|
||||
from urllib.parse import quote
|
||||
|
||||
from radicale import httputils, pathutils, storage, types, xmlutils
|
||||
from radicale.app.base import Access, ApplicationBase
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
def propose_filename(collection: storage.BaseCollection) -> str:
|
||||
"""Propose a filename for a collection."""
|
||||
if collection.tag == "VADDRESSBOOK":
|
||||
fallback_title = "Address book"
|
||||
suffix = ".vcf"
|
||||
elif collection.tag == "VCALENDAR":
|
||||
fallback_title = "Calendar"
|
||||
suffix = ".ics"
|
||||
else:
|
||||
fallback_title = posixpath.basename(collection.path)
|
||||
suffix = ""
|
||||
title = collection.get_meta("D:displayname") or fallback_title
|
||||
if title and not title.lower().endswith(suffix.lower()):
|
||||
title += suffix
|
||||
return title
|
||||
|
||||
|
||||
class ApplicationPartGet(ApplicationBase):
|
||||
|
||||
def _content_disposition_attachment(self, filename: str) -> str:
|
||||
value = "attachment"
|
||||
try:
|
||||
encoded_filename = quote(filename, encoding=self._encoding)
|
||||
except UnicodeEncodeError:
|
||||
logger.warning("Failed to encode filename: %r", filename,
|
||||
exc_info=True)
|
||||
encoded_filename = ""
|
||||
if encoded_filename:
|
||||
value += "; filename*=%s''%s" % (self._encoding, encoded_filename)
|
||||
return value
|
||||
|
||||
def do_GET(self, environ: types.WSGIEnviron, base_prefix: str, path: str,
|
||||
user: str) -> types.WSGIResponse:
|
||||
"""Manage GET request."""
|
||||
# Redirect to /.web if the root path is requested
|
||||
if not pathutils.strip_path(path):
|
||||
return httputils.redirect(base_prefix + "/.web")
|
||||
if path == "/.web" or path.startswith("/.web/"):
|
||||
# Redirect to sanitized path for all subpaths of /.web
|
||||
unsafe_path = environ.get("PATH_INFO", "")
|
||||
if len(base_prefix) > 0:
|
||||
unsafe_path = unsafe_path.removeprefix(base_prefix)
|
||||
if unsafe_path != path:
|
||||
location = base_prefix + path
|
||||
logger.info("Redirecting to sanitized path: %r ==> %r",
|
||||
base_prefix + unsafe_path, location)
|
||||
return httputils.redirect(location, client.MOVED_PERMANENTLY)
|
||||
# Dispatch /.web path to web module
|
||||
return self._web.get(environ, base_prefix, path, user)
|
||||
access = Access(self._rights, user, path)
|
||||
if not access.check("r") and "i" not in access.permissions:
|
||||
return httputils.NOT_ALLOWED
|
||||
with self._storage.acquire_lock("r", user):
|
||||
item = next(iter(self._storage.discover(path)), None)
|
||||
if not item:
|
||||
return httputils.NOT_FOUND
|
||||
if access.check("r", item):
|
||||
limited_access = False
|
||||
elif "i" in access.permissions:
|
||||
limited_access = True
|
||||
else:
|
||||
return httputils.NOT_ALLOWED
|
||||
if isinstance(item, storage.BaseCollection):
|
||||
if not item.tag:
|
||||
return (httputils.NOT_ALLOWED if limited_access else
|
||||
httputils.DIRECTORY_LISTING)
|
||||
content_type = xmlutils.MIMETYPES[item.tag]
|
||||
content_disposition = self._content_disposition_attachment(
|
||||
propose_filename(item))
|
||||
elif limited_access:
|
||||
return httputils.NOT_ALLOWED
|
||||
else:
|
||||
content_type = xmlutils.OBJECT_MIMETYPES[item.name]
|
||||
content_disposition = ""
|
||||
assert item.last_modified
|
||||
headers = {
|
||||
"Content-Type": content_type,
|
||||
"Last-Modified": item.last_modified,
|
||||
"ETag": item.etag}
|
||||
if content_disposition:
|
||||
headers["Content-Disposition"] = content_disposition
|
||||
answer = item.serialize()
|
||||
return client.OK, headers, answer
|
31
radicale/app/head.py
Normal file
31
radicale/app/head.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from radicale import types
|
||||
from radicale.app.base import ApplicationBase
|
||||
from radicale.app.get import ApplicationPartGet
|
||||
|
||||
|
||||
class ApplicationPartHead(ApplicationPartGet, ApplicationBase):
|
||||
|
||||
def do_HEAD(self, environ: types.WSGIEnviron, base_prefix: str, path: str,
|
||||
user: str) -> types.WSGIResponse:
|
||||
"""Manage HEAD request."""
|
||||
# Body is dropped in `Application.__call__` for HEAD requests
|
||||
return self.do_GET(environ, base_prefix, path, user)
|
92
radicale/app/mkcalendar.py
Normal file
92
radicale/app/mkcalendar.py
Normal file
|
@ -0,0 +1,92 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import errno
|
||||
import posixpath
|
||||
import re
|
||||
import socket
|
||||
from http import client
|
||||
|
||||
import radicale.item as radicale_item
|
||||
from radicale import httputils, pathutils, storage, types, xmlutils
|
||||
from radicale.app.base import ApplicationBase
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class ApplicationPartMkcalendar(ApplicationBase):
|
||||
|
||||
def do_MKCALENDAR(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
path: str, user: str) -> types.WSGIResponse:
|
||||
"""Manage MKCALENDAR request."""
|
||||
if "w" not in self._rights.authorization(user, path):
|
||||
return httputils.NOT_ALLOWED
|
||||
try:
|
||||
xml_content = self._read_xml_request_body(environ)
|
||||
except RuntimeError as e:
|
||||
logger.warning(
|
||||
"Bad MKCALENDAR request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
except socket.timeout:
|
||||
logger.debug("Client timed out", exc_info=True)
|
||||
return httputils.REQUEST_TIMEOUT
|
||||
# Prepare before locking
|
||||
props_with_remove = xmlutils.props_from_request(xml_content)
|
||||
props_with_remove["tag"] = "VCALENDAR"
|
||||
try:
|
||||
props = radicale_item.check_and_sanitize_props(props_with_remove)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad MKCALENDAR request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
# TODO: use this?
|
||||
# timezone = props.get("C:calendar-timezone")
|
||||
with self._storage.acquire_lock("w", user):
|
||||
item = next(iter(self._storage.discover(path)), None)
|
||||
if item:
|
||||
return self._webdav_error_response(
|
||||
client.CONFLICT, "D:resource-must-be-null")
|
||||
parent_path = pathutils.unstrip_path(
|
||||
posixpath.dirname(pathutils.strip_path(path)), True)
|
||||
parent_item = next(iter(self._storage.discover(parent_path)), None)
|
||||
if not parent_item:
|
||||
return httputils.CONFLICT
|
||||
if (not isinstance(parent_item, storage.BaseCollection) or
|
||||
parent_item.tag):
|
||||
return httputils.FORBIDDEN
|
||||
try:
|
||||
self._storage.create_collection(path, props=props)
|
||||
except ValueError as e:
|
||||
# return better matching HTTP result in case errno is provided and catched
|
||||
errno_match = re.search("\\[Errno ([0-9]+)\\]", str(e))
|
||||
if errno_match:
|
||||
logger.error(
|
||||
"Failed MKCALENDAR request on %r: %s", path, e, exc_info=True)
|
||||
errno_e = int(errno_match.group(1))
|
||||
if errno_e == errno.ENOSPC:
|
||||
return httputils.INSUFFICIENT_STORAGE
|
||||
elif errno_e in [errno.EPERM, errno.EACCES]:
|
||||
return httputils.FORBIDDEN
|
||||
else:
|
||||
return httputils.INTERNAL_SERVER_ERROR
|
||||
else:
|
||||
logger.warning(
|
||||
"Bad MKCALENDAR request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
return client.CREATED, {}, None
|
97
radicale/app/mkcol.py
Normal file
97
radicale/app/mkcol.py
Normal file
|
@ -0,0 +1,97 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import errno
|
||||
import posixpath
|
||||
import re
|
||||
import socket
|
||||
from http import client
|
||||
|
||||
import radicale.item as radicale_item
|
||||
from radicale import httputils, pathutils, rights, storage, types, xmlutils
|
||||
from radicale.app.base import ApplicationBase
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class ApplicationPartMkcol(ApplicationBase):
|
||||
|
||||
def do_MKCOL(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
path: str, user: str) -> types.WSGIResponse:
|
||||
"""Manage MKCOL request."""
|
||||
permissions = self._rights.authorization(user, path)
|
||||
if not rights.intersect(permissions, "Ww"):
|
||||
return httputils.NOT_ALLOWED
|
||||
try:
|
||||
xml_content = self._read_xml_request_body(environ)
|
||||
except RuntimeError as e:
|
||||
logger.warning(
|
||||
"Bad MKCOL request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
except socket.timeout:
|
||||
logger.debug("Client timed out", exc_info=True)
|
||||
return httputils.REQUEST_TIMEOUT
|
||||
# Prepare before locking
|
||||
props_with_remove = xmlutils.props_from_request(xml_content)
|
||||
try:
|
||||
props = radicale_item.check_and_sanitize_props(props_with_remove)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad MKCOL request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
collection_type = props.get("tag") or "UNKNOWN"
|
||||
if props.get("tag") and "w" not in permissions:
|
||||
logger.warning("MKCOL request %r (type:%s): %s", path, collection_type, "rejected because of missing rights 'w'")
|
||||
return httputils.NOT_ALLOWED
|
||||
if not props.get("tag") and "W" not in permissions:
|
||||
logger.warning("MKCOL request %r (type:%s): %s", path, collection_type, "rejected because of missing rights 'W'")
|
||||
return httputils.NOT_ALLOWED
|
||||
with self._storage.acquire_lock("w", user):
|
||||
item = next(iter(self._storage.discover(path)), None)
|
||||
if item:
|
||||
return httputils.METHOD_NOT_ALLOWED
|
||||
parent_path = pathutils.unstrip_path(
|
||||
posixpath.dirname(pathutils.strip_path(path)), True)
|
||||
parent_item = next(iter(self._storage.discover(parent_path)), None)
|
||||
if not parent_item:
|
||||
return httputils.CONFLICT
|
||||
if (not isinstance(parent_item, storage.BaseCollection) or
|
||||
parent_item.tag):
|
||||
return httputils.FORBIDDEN
|
||||
try:
|
||||
self._storage.create_collection(path, props=props)
|
||||
except ValueError as e:
|
||||
# return better matching HTTP result in case errno is provided and catched
|
||||
errno_match = re.search("\\[Errno ([0-9]+)\\]", str(e))
|
||||
if errno_match:
|
||||
logger.error(
|
||||
"Failed MKCOL request on %r (type:%s): %s", path, collection_type, e, exc_info=True)
|
||||
errno_e = int(errno_match.group(1))
|
||||
if errno_e == errno.ENOSPC:
|
||||
return httputils.INSUFFICIENT_STORAGE
|
||||
elif errno_e in [errno.EPERM, errno.EACCES]:
|
||||
return httputils.FORBIDDEN
|
||||
else:
|
||||
return httputils.INTERNAL_SERVER_ERROR
|
||||
else:
|
||||
logger.warning(
|
||||
"Bad MKCOL request on %r (type:%s): %s", path, collection_type, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
logger.info("MKCOL request %r (type:%s): %s", path, collection_type, "successful")
|
||||
return client.CREATED, {}, None
|
130
radicale/app/move.py
Normal file
130
radicale/app/move.py
Normal file
|
@ -0,0 +1,130 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2023 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2023-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import errno
|
||||
import posixpath
|
||||
import re
|
||||
from http import client
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from radicale import httputils, pathutils, storage, types
|
||||
from radicale.app.base import Access, ApplicationBase
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
def get_server_netloc(environ: types.WSGIEnviron, force_port: bool = False):
|
||||
if environ.get("HTTP_X_FORWARDED_HOST"):
|
||||
host = environ["HTTP_X_FORWARDED_HOST"]
|
||||
proto = environ.get("HTTP_X_FORWARDED_PROTO") or "http"
|
||||
port = "443" if proto == "https" else "80"
|
||||
port = environ["HTTP_X_FORWARDED_PORT"] or port
|
||||
else:
|
||||
host = environ.get("HTTP_HOST") or environ["SERVER_NAME"]
|
||||
proto = environ["wsgi.url_scheme"]
|
||||
port = environ["SERVER_PORT"]
|
||||
if (not force_port and port == ("443" if proto == "https" else "80") or
|
||||
re.search(r":\d+$", host)):
|
||||
return host
|
||||
return host + ":" + port
|
||||
|
||||
|
||||
class ApplicationPartMove(ApplicationBase):
|
||||
|
||||
def do_MOVE(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
path: str, user: str) -> types.WSGIResponse:
|
||||
"""Manage MOVE request."""
|
||||
raw_dest = environ.get("HTTP_DESTINATION", "")
|
||||
to_url = urlparse(raw_dest)
|
||||
to_netloc_with_port = to_url.netloc
|
||||
if to_url.port is None:
|
||||
to_netloc_with_port += (":443" if to_url.scheme == "https"
|
||||
else ":80")
|
||||
if to_netloc_with_port != get_server_netloc(environ, force_port=True):
|
||||
logger.info("Unsupported destination address: %r", raw_dest)
|
||||
# Remote destination server, not supported
|
||||
return httputils.REMOTE_DESTINATION
|
||||
access = Access(self._rights, user, path)
|
||||
if not access.check("w"):
|
||||
return httputils.NOT_ALLOWED
|
||||
to_path = pathutils.sanitize_path(to_url.path)
|
||||
if not (to_path + "/").startswith(base_prefix + "/"):
|
||||
logger.warning("Destination %r from MOVE request on %r doesn't "
|
||||
"start with base prefix", to_path, path)
|
||||
return httputils.NOT_ALLOWED
|
||||
to_path = to_path[len(base_prefix):]
|
||||
to_access = Access(self._rights, user, to_path)
|
||||
if not to_access.check("w"):
|
||||
return httputils.NOT_ALLOWED
|
||||
|
||||
with self._storage.acquire_lock("w", user):
|
||||
item = next(iter(self._storage.discover(path)), None)
|
||||
if not item:
|
||||
return httputils.NOT_FOUND
|
||||
if (not access.check("w", item) or
|
||||
not to_access.check("w", item)):
|
||||
return httputils.NOT_ALLOWED
|
||||
if isinstance(item, storage.BaseCollection):
|
||||
# TODO: support moving collections
|
||||
return httputils.METHOD_NOT_ALLOWED
|
||||
|
||||
to_item = next(iter(self._storage.discover(to_path)), None)
|
||||
if isinstance(to_item, storage.BaseCollection):
|
||||
return httputils.FORBIDDEN
|
||||
to_parent_path = pathutils.unstrip_path(
|
||||
posixpath.dirname(pathutils.strip_path(to_path)), True)
|
||||
to_collection = next(iter(
|
||||
self._storage.discover(to_parent_path)), None)
|
||||
if not to_collection:
|
||||
return httputils.CONFLICT
|
||||
assert isinstance(to_collection, storage.BaseCollection)
|
||||
assert item.collection is not None
|
||||
collection_tag = item.collection.tag
|
||||
if not collection_tag or collection_tag != to_collection.tag:
|
||||
return httputils.FORBIDDEN
|
||||
if to_item and environ.get("HTTP_OVERWRITE", "F") != "T":
|
||||
return httputils.PRECONDITION_FAILED
|
||||
if (to_item and item.uid != to_item.uid or
|
||||
not to_item and
|
||||
to_collection.path != item.collection.path and
|
||||
to_collection.has_uid(item.uid)):
|
||||
return self._webdav_error_response(
|
||||
client.CONFLICT, "%s:no-uid-conflict" % (
|
||||
"C" if collection_tag == "VCALENDAR" else "CR"))
|
||||
to_href = posixpath.basename(pathutils.strip_path(to_path))
|
||||
try:
|
||||
self._storage.move(item, to_collection, to_href)
|
||||
except ValueError as e:
|
||||
# return better matching HTTP result in case errno is provided and catched
|
||||
errno_match = re.search("\\[Errno ([0-9]+)\\]", str(e))
|
||||
if errno_match:
|
||||
logger.error(
|
||||
"Failed MOVE request on %r: %s", path, e, exc_info=True)
|
||||
errno_e = int(errno_match.group(1))
|
||||
if errno_e == errno.ENOSPC:
|
||||
return httputils.INSUFFICIENT_STORAGE
|
||||
elif errno_e in [errno.EPERM, errno.EACCES]:
|
||||
return httputils.FORBIDDEN
|
||||
else:
|
||||
return httputils.INTERNAL_SERVER_ERROR
|
||||
else:
|
||||
logger.warning(
|
||||
"Bad MOVE request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
return client.NO_CONTENT if to_item else client.CREATED, {}, None
|
35
radicale/app/options.py
Normal file
35
radicale/app/options.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from http import client
|
||||
|
||||
from radicale import httputils, types
|
||||
from radicale.app.base import ApplicationBase
|
||||
|
||||
|
||||
class ApplicationPartOptions(ApplicationBase):
|
||||
|
||||
def do_OPTIONS(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
path: str, user: str) -> types.WSGIResponse:
|
||||
"""Manage OPTIONS request."""
|
||||
headers = {
|
||||
"Allow": ", ".join(
|
||||
name[3:] for name in dir(self) if name.startswith("do_")),
|
||||
"DAV": httputils.DAV_HEADERS}
|
||||
return client.OK, headers, None
|
32
radicale/app/post.py
Normal file
32
radicale/app/post.py
Normal file
|
@ -0,0 +1,32 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2020 Tom Hacohen <tom@stosb.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from radicale import httputils, types
|
||||
from radicale.app.base import ApplicationBase
|
||||
|
||||
|
||||
class ApplicationPartPost(ApplicationBase):
|
||||
|
||||
def do_POST(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
path: str, user: str) -> types.WSGIResponse:
|
||||
"""Manage POST request."""
|
||||
if path == "/.web" or path.startswith("/.web/"):
|
||||
return self._web.post(environ, base_prefix, path, user)
|
||||
return httputils.METHOD_NOT_ALLOWED
|
412
radicale/app/propfind.py
Normal file
412
radicale/app/propfind.py
Normal file
|
@ -0,0 +1,412 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import collections
|
||||
import itertools
|
||||
import posixpath
|
||||
import socket
|
||||
import xml.etree.ElementTree as ET
|
||||
from http import client
|
||||
from typing import Dict, Iterable, Iterator, List, Optional, Sequence, Tuple
|
||||
|
||||
from radicale import httputils, pathutils, rights, storage, types, xmlutils
|
||||
from radicale.app.base import Access, ApplicationBase
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
def xml_propfind(base_prefix: str, path: str,
|
||||
xml_request: Optional[ET.Element],
|
||||
allowed_items: Iterable[Tuple[types.CollectionOrItem, str]],
|
||||
user: str, encoding: str) -> Optional[ET.Element]:
|
||||
"""Read and answer PROPFIND requests.
|
||||
|
||||
Read rfc4918-9.1 for info.
|
||||
|
||||
The collections parameter is a list of collections that are to be included
|
||||
in the output.
|
||||
|
||||
"""
|
||||
# A client may choose not to submit a request body. An empty PROPFIND
|
||||
# request body MUST be treated as if it were an 'allprop' request.
|
||||
top_element = (xml_request[0] if xml_request is not None else
|
||||
ET.Element(xmlutils.make_clark("D:allprop")))
|
||||
|
||||
props: List[str] = []
|
||||
allprop = False
|
||||
propname = False
|
||||
if top_element.tag == xmlutils.make_clark("D:allprop"):
|
||||
allprop = True
|
||||
elif top_element.tag == xmlutils.make_clark("D:propname"):
|
||||
propname = True
|
||||
elif top_element.tag == xmlutils.make_clark("D:prop"):
|
||||
props.extend(prop.tag for prop in top_element)
|
||||
|
||||
if xmlutils.make_clark("D:current-user-principal") in props and not user:
|
||||
# Ask for authentication
|
||||
# Returning the DAV:unauthenticated pseudo-principal as specified in
|
||||
# RFC 5397 doesn't seem to work with DAVx5.
|
||||
return None
|
||||
|
||||
# Writing answer
|
||||
multistatus = ET.Element(xmlutils.make_clark("D:multistatus"))
|
||||
|
||||
for item, permission in allowed_items:
|
||||
write = permission == "w"
|
||||
multistatus.append(xml_propfind_response(
|
||||
base_prefix, path, item, props, user, encoding, write=write,
|
||||
allprop=allprop, propname=propname))
|
||||
|
||||
return multistatus
|
||||
|
||||
|
||||
def xml_propfind_response(
|
||||
base_prefix: str, path: str, item: types.CollectionOrItem,
|
||||
props: Sequence[str], user: str, encoding: str, write: bool = False,
|
||||
propname: bool = False, allprop: bool = False) -> ET.Element:
|
||||
"""Build and return a PROPFIND response."""
|
||||
if propname and allprop or (props and (propname or allprop)):
|
||||
raise ValueError("Only use one of props, propname and allprops")
|
||||
|
||||
if isinstance(item, storage.BaseCollection):
|
||||
is_collection = True
|
||||
is_leaf = item.tag in ("VADDRESSBOOK", "VCALENDAR", "VSUBSCRIBED")
|
||||
collection = item
|
||||
# Some clients expect collections to end with `/`
|
||||
uri = pathutils.unstrip_path(item.path, True)
|
||||
else:
|
||||
is_collection = is_leaf = False
|
||||
assert item.collection is not None
|
||||
assert item.href
|
||||
collection = item.collection
|
||||
uri = pathutils.unstrip_path(posixpath.join(
|
||||
collection.path, item.href))
|
||||
response = ET.Element(xmlutils.make_clark("D:response"))
|
||||
href = ET.Element(xmlutils.make_clark("D:href"))
|
||||
href.text = xmlutils.make_href(base_prefix, uri)
|
||||
response.append(href)
|
||||
|
||||
if propname or allprop:
|
||||
props = []
|
||||
# Should list all properties that can be retrieved by the code below
|
||||
props.append(xmlutils.make_clark("D:principal-collection-set"))
|
||||
props.append(xmlutils.make_clark("D:current-user-principal"))
|
||||
props.append(xmlutils.make_clark("D:current-user-privilege-set"))
|
||||
props.append(xmlutils.make_clark("D:supported-report-set"))
|
||||
props.append(xmlutils.make_clark("D:resourcetype"))
|
||||
props.append(xmlutils.make_clark("D:owner"))
|
||||
|
||||
if is_collection and collection.is_principal:
|
||||
props.append(xmlutils.make_clark("C:calendar-user-address-set"))
|
||||
props.append(xmlutils.make_clark("D:principal-URL"))
|
||||
props.append(xmlutils.make_clark("CR:addressbook-home-set"))
|
||||
props.append(xmlutils.make_clark("C:calendar-home-set"))
|
||||
|
||||
if not is_collection or is_leaf:
|
||||
props.append(xmlutils.make_clark("D:getetag"))
|
||||
props.append(xmlutils.make_clark("D:getlastmodified"))
|
||||
props.append(xmlutils.make_clark("D:getcontenttype"))
|
||||
props.append(xmlutils.make_clark("D:getcontentlength"))
|
||||
|
||||
if is_collection:
|
||||
if is_leaf:
|
||||
props.append(xmlutils.make_clark("D:displayname"))
|
||||
props.append(xmlutils.make_clark("D:sync-token"))
|
||||
if collection.tag == "VCALENDAR":
|
||||
props.append(xmlutils.make_clark("CS:getctag"))
|
||||
props.append(
|
||||
xmlutils.make_clark("C:supported-calendar-component-set"))
|
||||
|
||||
meta = collection.get_meta()
|
||||
for tag in meta:
|
||||
if tag == "tag":
|
||||
continue
|
||||
clark_tag = xmlutils.make_clark(tag)
|
||||
if clark_tag not in props:
|
||||
props.append(clark_tag)
|
||||
|
||||
responses: Dict[int, List[ET.Element]] = collections.defaultdict(list)
|
||||
if propname:
|
||||
for tag in props:
|
||||
responses[200].append(ET.Element(tag))
|
||||
props = []
|
||||
for tag in props:
|
||||
element = ET.Element(tag)
|
||||
is404 = False
|
||||
if tag == xmlutils.make_clark("D:getetag"):
|
||||
if not is_collection or is_leaf:
|
||||
element.text = item.etag
|
||||
else:
|
||||
is404 = True
|
||||
elif tag == xmlutils.make_clark("D:getlastmodified"):
|
||||
if not is_collection or is_leaf:
|
||||
element.text = item.last_modified
|
||||
else:
|
||||
is404 = True
|
||||
elif tag == xmlutils.make_clark("D:principal-collection-set"):
|
||||
child_element = ET.Element(xmlutils.make_clark("D:href"))
|
||||
child_element.text = xmlutils.make_href(base_prefix, "/")
|
||||
element.append(child_element)
|
||||
elif (tag in (xmlutils.make_clark("C:calendar-user-address-set"),
|
||||
xmlutils.make_clark("D:principal-URL"),
|
||||
xmlutils.make_clark("CR:addressbook-home-set"),
|
||||
xmlutils.make_clark("C:calendar-home-set")) and
|
||||
is_collection and collection.is_principal):
|
||||
child_element = ET.Element(xmlutils.make_clark("D:href"))
|
||||
child_element.text = xmlutils.make_href(base_prefix, path)
|
||||
element.append(child_element)
|
||||
elif tag == xmlutils.make_clark("C:supported-calendar-component-set"):
|
||||
human_tag = xmlutils.make_human_tag(tag)
|
||||
if is_collection and is_leaf:
|
||||
components_text = collection.get_meta(human_tag)
|
||||
if components_text:
|
||||
components = components_text.split(",")
|
||||
else:
|
||||
components = ["VTODO", "VEVENT", "VJOURNAL"]
|
||||
for component in components:
|
||||
comp = ET.Element(xmlutils.make_clark("C:comp"))
|
||||
comp.set("name", component)
|
||||
element.append(comp)
|
||||
else:
|
||||
is404 = True
|
||||
elif tag == xmlutils.make_clark("D:current-user-principal"):
|
||||
if user:
|
||||
child_element = ET.Element(xmlutils.make_clark("D:href"))
|
||||
child_element.text = xmlutils.make_href(
|
||||
base_prefix, "/%s/" % user)
|
||||
element.append(child_element)
|
||||
else:
|
||||
element.append(ET.Element(
|
||||
xmlutils.make_clark("D:unauthenticated")))
|
||||
elif tag == xmlutils.make_clark("D:current-user-privilege-set"):
|
||||
privileges = ["D:read"]
|
||||
if write:
|
||||
privileges.append("D:all")
|
||||
privileges.append("D:write")
|
||||
privileges.append("D:write-properties")
|
||||
privileges.append("D:write-content")
|
||||
for human_tag in privileges:
|
||||
privilege = ET.Element(xmlutils.make_clark("D:privilege"))
|
||||
privilege.append(ET.Element(
|
||||
xmlutils.make_clark(human_tag)))
|
||||
element.append(privilege)
|
||||
elif tag == xmlutils.make_clark("D:supported-report-set"):
|
||||
# These 3 reports are not implemented
|
||||
reports = ["D:expand-property",
|
||||
"D:principal-search-property-set",
|
||||
"D:principal-property-search"]
|
||||
if is_collection and is_leaf:
|
||||
reports.append("D:sync-collection")
|
||||
if collection.tag == "VADDRESSBOOK":
|
||||
reports.append("CR:addressbook-multiget")
|
||||
reports.append("CR:addressbook-query")
|
||||
elif collection.tag == "VCALENDAR":
|
||||
reports.append("C:calendar-multiget")
|
||||
reports.append("C:calendar-query")
|
||||
for human_tag in reports:
|
||||
supported_report = ET.Element(
|
||||
xmlutils.make_clark("D:supported-report"))
|
||||
report_element = ET.Element(xmlutils.make_clark("D:report"))
|
||||
report_element.append(
|
||||
ET.Element(xmlutils.make_clark(human_tag)))
|
||||
supported_report.append(report_element)
|
||||
element.append(supported_report)
|
||||
elif tag == xmlutils.make_clark("D:getcontentlength"):
|
||||
if not is_collection or is_leaf:
|
||||
element.text = str(len(item.serialize().encode(encoding)))
|
||||
else:
|
||||
is404 = True
|
||||
elif tag == xmlutils.make_clark("D:owner"):
|
||||
# return empty elment, if no owner available (rfc3744-5.1)
|
||||
if collection.owner:
|
||||
child_element = ET.Element(xmlutils.make_clark("D:href"))
|
||||
child_element.text = xmlutils.make_href(
|
||||
base_prefix, "/%s/" % collection.owner)
|
||||
element.append(child_element)
|
||||
elif is_collection:
|
||||
if tag == xmlutils.make_clark("D:getcontenttype"):
|
||||
if is_leaf:
|
||||
element.text = xmlutils.MIMETYPES[
|
||||
collection.tag]
|
||||
else:
|
||||
is404 = True
|
||||
elif tag == xmlutils.make_clark("D:resourcetype"):
|
||||
if collection.is_principal:
|
||||
child_element = ET.Element(
|
||||
xmlutils.make_clark("D:principal"))
|
||||
element.append(child_element)
|
||||
if is_leaf:
|
||||
if collection.tag == "VADDRESSBOOK":
|
||||
child_element = ET.Element(
|
||||
xmlutils.make_clark("CR:addressbook"))
|
||||
element.append(child_element)
|
||||
elif collection.tag == "VCALENDAR":
|
||||
child_element = ET.Element(
|
||||
xmlutils.make_clark("C:calendar"))
|
||||
element.append(child_element)
|
||||
elif collection.tag == "VSUBSCRIBED":
|
||||
child_element = ET.Element(
|
||||
xmlutils.make_clark("CS:subscribed"))
|
||||
element.append(child_element)
|
||||
child_element = ET.Element(xmlutils.make_clark("D:collection"))
|
||||
element.append(child_element)
|
||||
elif tag == xmlutils.make_clark("RADICALE:displayname"):
|
||||
# Only for internal use by the web interface
|
||||
displayname = collection.get_meta("D:displayname")
|
||||
if displayname is not None:
|
||||
element.text = displayname
|
||||
else:
|
||||
is404 = True
|
||||
elif tag == xmlutils.make_clark("RADICALE:getcontentcount"):
|
||||
# Only for internal use by the web interface
|
||||
if isinstance(item, storage.BaseCollection) and not collection.is_principal:
|
||||
element.text = str(sum(1 for x in item.get_all()))
|
||||
else:
|
||||
is404 = True
|
||||
elif tag == xmlutils.make_clark("D:displayname"):
|
||||
displayname = collection.get_meta("D:displayname")
|
||||
if not displayname and is_leaf:
|
||||
displayname = collection.path
|
||||
if displayname is not None:
|
||||
element.text = displayname
|
||||
else:
|
||||
is404 = True
|
||||
elif tag == xmlutils.make_clark("CS:getctag"):
|
||||
if is_leaf:
|
||||
element.text = collection.etag
|
||||
else:
|
||||
is404 = True
|
||||
elif tag == xmlutils.make_clark("D:sync-token"):
|
||||
if is_leaf:
|
||||
element.text, _ = collection.sync()
|
||||
else:
|
||||
is404 = True
|
||||
elif tag == xmlutils.make_clark("CS:source"):
|
||||
if is_leaf:
|
||||
child_element = ET.Element(xmlutils.make_clark("D:href"))
|
||||
child_element.text = collection.get_meta('CS:source')
|
||||
element.append(child_element)
|
||||
else:
|
||||
is404 = True
|
||||
else:
|
||||
human_tag = xmlutils.make_human_tag(tag)
|
||||
tag_text = collection.get_meta(human_tag)
|
||||
if tag_text is not None:
|
||||
element.text = tag_text
|
||||
else:
|
||||
is404 = True
|
||||
# Not for collections
|
||||
elif tag == xmlutils.make_clark("D:getcontenttype"):
|
||||
assert not isinstance(item, storage.BaseCollection)
|
||||
element.text = xmlutils.get_content_type(item, encoding)
|
||||
elif tag == xmlutils.make_clark("D:resourcetype"):
|
||||
# resourcetype must be returned empty for non-collection elements
|
||||
pass
|
||||
else:
|
||||
is404 = True
|
||||
|
||||
responses[404 if is404 else 200].append(element)
|
||||
|
||||
for status_code, children in responses.items():
|
||||
if not children:
|
||||
continue
|
||||
propstat = ET.Element(xmlutils.make_clark("D:propstat"))
|
||||
response.append(propstat)
|
||||
prop = ET.Element(xmlutils.make_clark("D:prop"))
|
||||
prop.extend(children)
|
||||
propstat.append(prop)
|
||||
status = ET.Element(xmlutils.make_clark("D:status"))
|
||||
status.text = xmlutils.make_response(status_code)
|
||||
propstat.append(status)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
class ApplicationPartPropfind(ApplicationBase):
|
||||
|
||||
def _collect_allowed_items(
|
||||
self, items: Iterable[types.CollectionOrItem], user: str
|
||||
) -> Iterator[Tuple[types.CollectionOrItem, str]]:
|
||||
"""Get items from request that user is allowed to access."""
|
||||
for item in items:
|
||||
if isinstance(item, storage.BaseCollection):
|
||||
path = pathutils.unstrip_path(item.path, True)
|
||||
if item.tag:
|
||||
permissions = rights.intersect(
|
||||
self._rights.authorization(user, path), "rw")
|
||||
target = "collection with tag %r" % item.path
|
||||
else:
|
||||
permissions = rights.intersect(
|
||||
self._rights.authorization(user, path), "RW")
|
||||
target = "collection %r" % item.path
|
||||
else:
|
||||
assert item.collection is not None
|
||||
path = pathutils.unstrip_path(item.collection.path, True)
|
||||
permissions = rights.intersect(
|
||||
self._rights.authorization(user, path), "rw")
|
||||
target = "item %r from %r" % (item.href, item.collection.path)
|
||||
if rights.intersect(permissions, "Ww"):
|
||||
permission = "w"
|
||||
status = "write"
|
||||
elif rights.intersect(permissions, "Rr"):
|
||||
permission = "r"
|
||||
status = "read"
|
||||
else:
|
||||
permission = ""
|
||||
status = "NO"
|
||||
logger.debug(
|
||||
"%s has %s access to %s",
|
||||
repr(user) if user else "anonymous user", status, target)
|
||||
if permission:
|
||||
yield item, permission
|
||||
|
||||
def do_PROPFIND(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
path: str, user: str) -> types.WSGIResponse:
|
||||
"""Manage PROPFIND request."""
|
||||
access = Access(self._rights, user, path)
|
||||
if not access.check("r"):
|
||||
return httputils.NOT_ALLOWED
|
||||
try:
|
||||
xml_content = self._read_xml_request_body(environ)
|
||||
except RuntimeError as e:
|
||||
logger.warning(
|
||||
"Bad PROPFIND request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
except socket.timeout:
|
||||
logger.debug("Client timed out", exc_info=True)
|
||||
return httputils.REQUEST_TIMEOUT
|
||||
with self._storage.acquire_lock("r", user):
|
||||
items_iter = iter(self._storage.discover(
|
||||
path, environ.get("HTTP_DEPTH", "0"),
|
||||
None, self._rights._user_groups))
|
||||
# take root item for rights checking
|
||||
item = next(items_iter, None)
|
||||
if not item:
|
||||
return httputils.NOT_FOUND
|
||||
if not access.check("r", item):
|
||||
return httputils.NOT_ALLOWED
|
||||
# put item back
|
||||
items_iter = itertools.chain([item], items_iter)
|
||||
allowed_items = self._collect_allowed_items(items_iter, user)
|
||||
headers = {"DAV": httputils.DAV_HEADERS,
|
||||
"Content-Type": "text/xml; charset=%s" % self._encoding}
|
||||
xml_answer = xml_propfind(base_prefix, path, xml_content,
|
||||
allowed_items, user, self._encoding)
|
||||
if xml_answer is None:
|
||||
return httputils.NOT_ALLOWED
|
||||
return client.MULTI_STATUS, headers, self._xml_response(xml_answer)
|
130
radicale/app/proppatch.py
Normal file
130
radicale/app/proppatch.py
Normal file
|
@ -0,0 +1,130 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2020 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2020-2020 Tuna Celik <tuna@jakpark.com>
|
||||
# Copyright © 2025-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import errno
|
||||
import re
|
||||
import socket
|
||||
import xml.etree.ElementTree as ET
|
||||
from http import client
|
||||
from typing import Dict, Optional, cast
|
||||
|
||||
import defusedxml.ElementTree as DefusedET
|
||||
|
||||
import radicale.item as radicale_item
|
||||
from radicale import httputils, storage, types, xmlutils
|
||||
from radicale.app.base import Access, ApplicationBase
|
||||
from radicale.hook import HookNotificationItem, HookNotificationItemTypes
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
def xml_proppatch(base_prefix: str, path: str,
|
||||
xml_request: Optional[ET.Element],
|
||||
collection: storage.BaseCollection) -> ET.Element:
|
||||
"""Read and answer PROPPATCH requests.
|
||||
|
||||
Read rfc4918-9.2 for info.
|
||||
|
||||
"""
|
||||
multistatus = ET.Element(xmlutils.make_clark("D:multistatus"))
|
||||
response = ET.Element(xmlutils.make_clark("D:response"))
|
||||
multistatus.append(response)
|
||||
href = ET.Element(xmlutils.make_clark("D:href"))
|
||||
href.text = xmlutils.make_href(base_prefix, path)
|
||||
response.append(href)
|
||||
# Create D:propstat element for props with status 200 OK
|
||||
propstat = ET.Element(xmlutils.make_clark("D:propstat"))
|
||||
status = ET.Element(xmlutils.make_clark("D:status"))
|
||||
status.text = xmlutils.make_response(200)
|
||||
props_ok = ET.Element(xmlutils.make_clark("D:prop"))
|
||||
propstat.append(props_ok)
|
||||
propstat.append(status)
|
||||
response.append(propstat)
|
||||
|
||||
props_with_remove = xmlutils.props_from_request(xml_request)
|
||||
all_props_with_remove = cast(Dict[str, Optional[str]],
|
||||
dict(collection.get_meta()))
|
||||
all_props_with_remove.update(props_with_remove)
|
||||
all_props = radicale_item.check_and_sanitize_props(all_props_with_remove)
|
||||
collection.set_meta(all_props)
|
||||
for short_name in props_with_remove:
|
||||
props_ok.append(ET.Element(xmlutils.make_clark(short_name)))
|
||||
|
||||
return multistatus
|
||||
|
||||
|
||||
class ApplicationPartProppatch(ApplicationBase):
|
||||
|
||||
def do_PROPPATCH(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
path: str, user: str) -> types.WSGIResponse:
|
||||
"""Manage PROPPATCH request."""
|
||||
access = Access(self._rights, user, path)
|
||||
if not access.check("w"):
|
||||
return httputils.NOT_ALLOWED
|
||||
try:
|
||||
xml_content = self._read_xml_request_body(environ)
|
||||
except RuntimeError as e:
|
||||
logger.warning(
|
||||
"Bad PROPPATCH request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
except socket.timeout:
|
||||
logger.debug("Client timed out", exc_info=True)
|
||||
return httputils.REQUEST_TIMEOUT
|
||||
with self._storage.acquire_lock("w", user):
|
||||
item = next(iter(self._storage.discover(path)), None)
|
||||
if not item:
|
||||
return httputils.NOT_FOUND
|
||||
if not access.check("w", item):
|
||||
return httputils.NOT_ALLOWED
|
||||
if not isinstance(item, storage.BaseCollection):
|
||||
return httputils.FORBIDDEN
|
||||
headers = {"DAV": httputils.DAV_HEADERS,
|
||||
"Content-Type": "text/xml; charset=%s" % self._encoding}
|
||||
try:
|
||||
xml_answer = xml_proppatch(base_prefix, path, xml_content,
|
||||
item)
|
||||
if xml_content is not None:
|
||||
hook_notification_item = HookNotificationItem(
|
||||
HookNotificationItemTypes.CPATCH,
|
||||
access.path,
|
||||
DefusedET.tostring(
|
||||
xml_content,
|
||||
encoding=self._encoding
|
||||
).decode(encoding=self._encoding)
|
||||
)
|
||||
self._hook.notify(hook_notification_item)
|
||||
except ValueError as e:
|
||||
# return better matching HTTP result in case errno is provided and catched
|
||||
errno_match = re.search("\\[Errno ([0-9]+)\\]", str(e))
|
||||
if errno_match:
|
||||
logger.error(
|
||||
"Failed PROPPATCH request on %r: %s", path, e, exc_info=True)
|
||||
errno_e = int(errno_match.group(1))
|
||||
if errno_e == errno.ENOSPC:
|
||||
return httputils.INSUFFICIENT_STORAGE
|
||||
elif errno_e in [errno.EPERM, errno.EACCES]:
|
||||
return httputils.FORBIDDEN
|
||||
else:
|
||||
return httputils.INTERNAL_SERVER_ERROR
|
||||
else:
|
||||
logger.warning(
|
||||
"Bad PROPPATCH request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
return client.MULTI_STATUS, headers, self._xml_response(xml_answer)
|
287
radicale/app/put.py
Normal file
287
radicale/app/put.py
Normal file
|
@ -0,0 +1,287 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2020 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2020-2023 Tuna Celik <tuna@jakpark.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import errno
|
||||
import itertools
|
||||
import posixpath
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
from http import client
|
||||
from types import TracebackType
|
||||
from typing import Iterator, List, Mapping, MutableMapping, Optional, Tuple
|
||||
|
||||
import vobject
|
||||
|
||||
import radicale.item as radicale_item
|
||||
from radicale import (httputils, pathutils, rights, storage, types, utils,
|
||||
xmlutils)
|
||||
from radicale.app.base import Access, ApplicationBase
|
||||
from radicale.hook import HookNotificationItem, HookNotificationItemTypes
|
||||
from radicale.log import logger
|
||||
|
||||
MIMETYPE_TAGS: Mapping[str, str] = {value: key for key, value in
|
||||
xmlutils.MIMETYPES.items()}
|
||||
|
||||
PRODID = u"-//Radicale//NONSGML Version " + utils.package_version("radicale") + "//EN"
|
||||
|
||||
|
||||
def prepare(vobject_items: List[vobject.base.Component], path: str,
|
||||
content_type: str, permission: bool, parent_permission: bool,
|
||||
tag: Optional[str] = None,
|
||||
write_whole_collection: Optional[bool] = None) -> Tuple[
|
||||
Iterator[radicale_item.Item], # items
|
||||
Optional[str], # tag
|
||||
Optional[bool], # write_whole_collection
|
||||
Optional[MutableMapping[str, str]], # props
|
||||
Optional[Tuple[type, BaseException, Optional[TracebackType]]]]:
|
||||
if (write_whole_collection or permission and not parent_permission):
|
||||
write_whole_collection = True
|
||||
tag = radicale_item.predict_tag_of_whole_collection(
|
||||
vobject_items, MIMETYPE_TAGS.get(content_type))
|
||||
if not tag:
|
||||
raise ValueError("Can't determine collection tag")
|
||||
collection_path = pathutils.strip_path(path)
|
||||
elif (write_whole_collection is not None and not write_whole_collection or
|
||||
not permission and parent_permission):
|
||||
write_whole_collection = False
|
||||
if tag is None:
|
||||
tag = radicale_item.predict_tag_of_parent_collection(vobject_items)
|
||||
collection_path = posixpath.dirname(pathutils.strip_path(path))
|
||||
props: Optional[MutableMapping[str, str]] = None
|
||||
stored_exc_info = None
|
||||
items = []
|
||||
try:
|
||||
if tag and write_whole_collection is not None:
|
||||
radicale_item.check_and_sanitize_items(
|
||||
vobject_items, is_collection=write_whole_collection, tag=tag)
|
||||
if write_whole_collection and tag == "VCALENDAR":
|
||||
vobject_components: List[vobject.base.Component] = []
|
||||
vobject_item, = vobject_items
|
||||
for content in ("vevent", "vtodo", "vjournal"):
|
||||
vobject_components.extend(
|
||||
getattr(vobject_item, "%s_list" % content, []))
|
||||
vobject_components_by_uid = itertools.groupby(
|
||||
sorted(vobject_components, key=radicale_item.get_uid),
|
||||
radicale_item.get_uid)
|
||||
for _, components in vobject_components_by_uid:
|
||||
vobject_collection = vobject.iCalendar()
|
||||
for component in components:
|
||||
vobject_collection.add(component)
|
||||
vobject_collection.add(vobject.base.ContentLine("PRODID", [], PRODID))
|
||||
item = radicale_item.Item(collection_path=collection_path,
|
||||
vobject_item=vobject_collection)
|
||||
item.prepare()
|
||||
items.append(item)
|
||||
elif write_whole_collection and tag == "VADDRESSBOOK":
|
||||
for vobject_item in vobject_items:
|
||||
item = radicale_item.Item(collection_path=collection_path,
|
||||
vobject_item=vobject_item)
|
||||
item.prepare()
|
||||
items.append(item)
|
||||
elif not write_whole_collection:
|
||||
vobject_item, = vobject_items
|
||||
item = radicale_item.Item(collection_path=collection_path,
|
||||
vobject_item=vobject_item)
|
||||
item.prepare()
|
||||
items.append(item)
|
||||
|
||||
if write_whole_collection:
|
||||
props = {}
|
||||
if tag:
|
||||
props["tag"] = tag
|
||||
if tag == "VCALENDAR" and vobject_items:
|
||||
if hasattr(vobject_items[0], "x_wr_calname"):
|
||||
calname = vobject_items[0].x_wr_calname.value
|
||||
if calname:
|
||||
props["D:displayname"] = calname
|
||||
if hasattr(vobject_items[0], "x_wr_caldesc"):
|
||||
caldesc = vobject_items[0].x_wr_caldesc.value
|
||||
if caldesc:
|
||||
props["C:calendar-description"] = caldesc
|
||||
props = radicale_item.check_and_sanitize_props(props)
|
||||
except Exception:
|
||||
exc_info_or_none_tuple = sys.exc_info()
|
||||
assert exc_info_or_none_tuple[0] is not None
|
||||
stored_exc_info = exc_info_or_none_tuple
|
||||
|
||||
# Use iterator for items and delete references to free memory early
|
||||
def items_iter() -> Iterator[radicale_item.Item]:
|
||||
while items:
|
||||
yield items.pop(0)
|
||||
return items_iter(), tag, write_whole_collection, props, stored_exc_info
|
||||
|
||||
|
||||
class ApplicationPartPut(ApplicationBase):
|
||||
|
||||
def do_PUT(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
path: str, user: str) -> types.WSGIResponse:
|
||||
"""Manage PUT request."""
|
||||
access = Access(self._rights, user, path)
|
||||
if not access.check("w"):
|
||||
return httputils.NOT_ALLOWED
|
||||
try:
|
||||
content = httputils.read_request_body(self.configuration, environ)
|
||||
except RuntimeError as e:
|
||||
logger.warning("Bad PUT request on %r (read_request_body): %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
except socket.timeout:
|
||||
logger.debug("Client timed out", exc_info=True)
|
||||
return httputils.REQUEST_TIMEOUT
|
||||
# Prepare before locking
|
||||
content_type = environ.get("CONTENT_TYPE", "").split(";",
|
||||
maxsplit=1)[0]
|
||||
try:
|
||||
vobject_items = radicale_item.read_components(content or "")
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Bad PUT request on %r (read_components): %s", path, e, exc_info=True)
|
||||
if self._log_bad_put_request_content:
|
||||
logger.warning("Bad PUT request content of %r:\n%s", path, content)
|
||||
else:
|
||||
logger.debug("Bad PUT request content: suppressed by config/option [logging] bad_put_request_content")
|
||||
return httputils.BAD_REQUEST
|
||||
(prepared_items, prepared_tag, prepared_write_whole_collection,
|
||||
prepared_props, prepared_exc_info) = prepare(
|
||||
vobject_items, path, content_type,
|
||||
bool(rights.intersect(access.permissions, "Ww")),
|
||||
bool(rights.intersect(access.parent_permissions, "w")))
|
||||
|
||||
with self._storage.acquire_lock("w", user, path=path):
|
||||
item = next(iter(self._storage.discover(path)), None)
|
||||
parent_item = next(iter(
|
||||
self._storage.discover(access.parent_path)), None)
|
||||
if not isinstance(parent_item, storage.BaseCollection):
|
||||
return httputils.CONFLICT
|
||||
|
||||
write_whole_collection = (
|
||||
isinstance(item, storage.BaseCollection) or
|
||||
not parent_item.tag)
|
||||
|
||||
if write_whole_collection:
|
||||
tag = prepared_tag
|
||||
else:
|
||||
tag = parent_item.tag
|
||||
|
||||
if write_whole_collection:
|
||||
if ("w" if tag else "W") not in access.permissions:
|
||||
if not parent_item.tag:
|
||||
logger.warning("Not a collection (check .Radicale.props): %r", parent_item.path)
|
||||
return httputils.NOT_ALLOWED
|
||||
if not self._permit_overwrite_collection:
|
||||
if ("O") not in access.permissions:
|
||||
logger.info("overwrite of collection is prevented by config/option [rights] permit_overwrite_collection and not explicit allowed by permssion 'O': %r", path)
|
||||
return httputils.NOT_ALLOWED
|
||||
else:
|
||||
if ("o") in access.permissions:
|
||||
logger.info("overwrite of collection is allowed by config/option [rights] permit_overwrite_collection but explicit forbidden by permission 'o': %r", path)
|
||||
return httputils.NOT_ALLOWED
|
||||
elif "w" not in access.parent_permissions:
|
||||
return httputils.NOT_ALLOWED
|
||||
|
||||
etag = environ.get("HTTP_IF_MATCH", "")
|
||||
if not item and etag:
|
||||
# Etag asked but no item found: item has been removed
|
||||
logger.warning("Precondition failed on PUT request for %r (HTTP_IF_MATCH: %s, item not existing)", path, etag)
|
||||
return httputils.PRECONDITION_FAILED
|
||||
if item and etag and item.etag != etag:
|
||||
# Etag asked but item not matching: item has changed
|
||||
logger.warning("Precondition failed on PUT request for %r (HTTP_IF_MATCH: %s, item has different etag: %s)", path, etag, item.etag)
|
||||
return httputils.PRECONDITION_FAILED
|
||||
if item and etag:
|
||||
logger.debug("Precondition passed on PUT request for %r (HTTP_IF_MATCH: %s, item has etag: %s)", path, etag, item.etag)
|
||||
|
||||
match = environ.get("HTTP_IF_NONE_MATCH", "") == "*"
|
||||
if item and match:
|
||||
# Creation asked but item found: item can't be replaced
|
||||
logger.warning("Precondition failed on PUT request for %r (HTTP_IF_NONE_MATCH: *, creation requested but item found with etag: %s)", path, item.etag)
|
||||
return httputils.PRECONDITION_FAILED
|
||||
if match:
|
||||
logger.debug("Precondition passed on PUT request for %r (HTTP_IF_NONE_MATCH: *)", path)
|
||||
|
||||
if (tag != prepared_tag or
|
||||
prepared_write_whole_collection != write_whole_collection):
|
||||
(prepared_items, prepared_tag, prepared_write_whole_collection,
|
||||
prepared_props, prepared_exc_info) = prepare(
|
||||
vobject_items, path, content_type,
|
||||
bool(rights.intersect(access.permissions, "Ww")),
|
||||
bool(rights.intersect(access.parent_permissions, "w")),
|
||||
tag, write_whole_collection)
|
||||
props = prepared_props
|
||||
if prepared_exc_info:
|
||||
logger.warning(
|
||||
"Bad PUT request on %r (prepare): %s", path, prepared_exc_info[1],
|
||||
exc_info=prepared_exc_info)
|
||||
return httputils.BAD_REQUEST
|
||||
|
||||
if write_whole_collection:
|
||||
try:
|
||||
etag = self._storage.create_collection(
|
||||
path, prepared_items, props).etag
|
||||
for item in prepared_items:
|
||||
hook_notification_item = HookNotificationItem(
|
||||
HookNotificationItemTypes.UPSERT,
|
||||
access.path,
|
||||
item.serialize()
|
||||
)
|
||||
self._hook.notify(hook_notification_item)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad PUT request on %r (create_collection): %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
else:
|
||||
assert not isinstance(item, storage.BaseCollection)
|
||||
prepared_item, = prepared_items
|
||||
if (item and item.uid != prepared_item.uid or
|
||||
not item and parent_item.has_uid(prepared_item.uid)):
|
||||
return self._webdav_error_response(
|
||||
client.CONFLICT, "%s:no-uid-conflict" % (
|
||||
"C" if tag == "VCALENDAR" else "CR"))
|
||||
|
||||
href = posixpath.basename(pathutils.strip_path(path))
|
||||
try:
|
||||
etag = parent_item.upload(href, prepared_item).etag
|
||||
hook_notification_item = HookNotificationItem(
|
||||
HookNotificationItemTypes.UPSERT,
|
||||
access.path,
|
||||
prepared_item.serialize()
|
||||
)
|
||||
self._hook.notify(hook_notification_item)
|
||||
except ValueError as e:
|
||||
# return better matching HTTP result in case errno is provided and catched
|
||||
errno_match = re.search("\\[Errno ([0-9]+)\\]", str(e))
|
||||
if errno_match:
|
||||
logger.error(
|
||||
"Failed PUT request on %r (upload): %s", path, e, exc_info=True)
|
||||
errno_e = int(errno_match.group(1))
|
||||
if errno_e == errno.ENOSPC:
|
||||
return httputils.INSUFFICIENT_STORAGE
|
||||
elif errno_e in [errno.EPERM, errno.EACCES]:
|
||||
return httputils.FORBIDDEN
|
||||
else:
|
||||
return httputils.INTERNAL_SERVER_ERROR
|
||||
else:
|
||||
logger.warning(
|
||||
"Bad PUT request on %r (upload): %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
|
||||
headers = {"ETag": etag}
|
||||
return client.CREATED, headers, None
|
642
radicale/app/report.py
Normal file
642
radicale/app/report.py
Normal file
|
@ -0,0 +1,642 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Pieter Hijma <pieterhijma@users.noreply.github.com>
|
||||
# Copyright © 2024-2024 Ray <ray@react0r.com>
|
||||
# Copyright © 2024-2024 Georgiy <metallerok@gmail.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import contextlib
|
||||
import copy
|
||||
import datetime
|
||||
import posixpath
|
||||
import socket
|
||||
import xml.etree.ElementTree as ET
|
||||
from http import client
|
||||
from typing import (Callable, Iterable, Iterator, List, Optional, Sequence,
|
||||
Tuple, Union)
|
||||
from urllib.parse import unquote, urlparse
|
||||
|
||||
import vobject
|
||||
import vobject.base
|
||||
from vobject.base import ContentLine
|
||||
|
||||
import radicale.item as radicale_item
|
||||
from radicale import httputils, pathutils, storage, types, xmlutils
|
||||
from radicale.app.base import Access, ApplicationBase
|
||||
from radicale.item import filter as radicale_filter
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
def free_busy_report(base_prefix: str, path: str, xml_request: Optional[ET.Element],
|
||||
collection: storage.BaseCollection, encoding: str,
|
||||
unlock_storage_fn: Callable[[], None],
|
||||
max_occurrence: int
|
||||
) -> Tuple[int, Union[ET.Element, str]]:
|
||||
# NOTE: this function returns both an Element and a string because
|
||||
# free-busy reports are an edge-case on the return type according
|
||||
# to the spec.
|
||||
|
||||
multistatus = ET.Element(xmlutils.make_clark("D:multistatus"))
|
||||
if xml_request is None:
|
||||
return client.MULTI_STATUS, multistatus
|
||||
root = xml_request
|
||||
if (root.tag == xmlutils.make_clark("C:free-busy-query") and
|
||||
collection.tag != "VCALENDAR"):
|
||||
logger.warning("Invalid REPORT method %r on %r requested",
|
||||
xmlutils.make_human_tag(root.tag), path)
|
||||
return client.FORBIDDEN, xmlutils.webdav_error("D:supported-report")
|
||||
|
||||
time_range_element = root.find(xmlutils.make_clark("C:time-range"))
|
||||
assert isinstance(time_range_element, ET.Element)
|
||||
|
||||
# Build a single filter from the free busy query for retrieval
|
||||
# TODO: filter for VFREEBUSY in additional to VEVENT but
|
||||
# test_filter doesn't support that yet.
|
||||
vevent_cf_element = ET.Element(xmlutils.make_clark("C:comp-filter"),
|
||||
attrib={'name': 'VEVENT'})
|
||||
vevent_cf_element.append(time_range_element)
|
||||
vcalendar_cf_element = ET.Element(xmlutils.make_clark("C:comp-filter"),
|
||||
attrib={'name': 'VCALENDAR'})
|
||||
vcalendar_cf_element.append(vevent_cf_element)
|
||||
filter_element = ET.Element(xmlutils.make_clark("C:filter"))
|
||||
filter_element.append(vcalendar_cf_element)
|
||||
filters = (filter_element,)
|
||||
|
||||
# First pull from storage
|
||||
retrieved_items = list(collection.get_filtered(filters))
|
||||
# !!! Don't access storage after this !!!
|
||||
unlock_storage_fn()
|
||||
|
||||
cal = vobject.iCalendar()
|
||||
collection_tag = collection.tag
|
||||
while retrieved_items:
|
||||
# Second filtering before evaluating occurrences.
|
||||
# ``item.vobject_item`` might be accessed during filtering.
|
||||
# Don't keep reference to ``item``, because VObject requires a lot of
|
||||
# memory.
|
||||
item, filter_matched = retrieved_items.pop(0)
|
||||
if not filter_matched:
|
||||
try:
|
||||
if not test_filter(collection_tag, item, filter_element):
|
||||
continue
|
||||
except ValueError as e:
|
||||
raise ValueError("Failed to free-busy filter item %r from %r: %s" %
|
||||
(item.href, collection.path, e)) from e
|
||||
except Exception as e:
|
||||
raise RuntimeError("Failed to free-busy filter item %r from %r: %s" %
|
||||
(item.href, collection.path, e)) from e
|
||||
|
||||
fbtype = None
|
||||
if item.component_name == 'VEVENT':
|
||||
transp = getattr(item.vobject_item.vevent, 'transp', None)
|
||||
if transp and transp.value != 'OPAQUE':
|
||||
continue
|
||||
|
||||
status = getattr(item.vobject_item.vevent, 'status', None)
|
||||
if not status or status.value == 'CONFIRMED':
|
||||
fbtype = 'BUSY'
|
||||
elif status.value == 'CANCELLED':
|
||||
fbtype = 'FREE'
|
||||
elif status.value == 'TENTATIVE':
|
||||
fbtype = 'BUSY-TENTATIVE'
|
||||
else:
|
||||
# Could do fbtype = status.value for x-name, I prefer this
|
||||
fbtype = 'BUSY'
|
||||
|
||||
# TODO: coalesce overlapping periods
|
||||
|
||||
if max_occurrence > 0:
|
||||
n_occurrences = max_occurrence+1
|
||||
else:
|
||||
n_occurrences = 0
|
||||
occurrences = radicale_filter.time_range_fill(item.vobject_item,
|
||||
time_range_element,
|
||||
"VEVENT",
|
||||
n=n_occurrences)
|
||||
if len(occurrences) >= max_occurrence:
|
||||
raise ValueError("FREEBUSY occurrences limit of {} hit"
|
||||
.format(max_occurrence))
|
||||
|
||||
for occurrence in occurrences:
|
||||
vfb = cal.add('vfreebusy')
|
||||
vfb.add('dtstamp').value = item.vobject_item.vevent.dtstamp.value
|
||||
vfb.add('dtstart').value, vfb.add('dtend').value = occurrence
|
||||
if fbtype:
|
||||
vfb.add('fbtype').value = fbtype
|
||||
return (client.OK, cal.serialize())
|
||||
|
||||
|
||||
def xml_report(base_prefix: str, path: str, xml_request: Optional[ET.Element],
|
||||
collection: storage.BaseCollection, encoding: str,
|
||||
unlock_storage_fn: Callable[[], None]
|
||||
) -> Tuple[int, ET.Element]:
|
||||
"""Read and answer REPORT requests that return XML.
|
||||
|
||||
Read rfc3253-3.6 for info.
|
||||
|
||||
"""
|
||||
multistatus = ET.Element(xmlutils.make_clark("D:multistatus"))
|
||||
if xml_request is None:
|
||||
return client.MULTI_STATUS, multistatus
|
||||
root = xml_request
|
||||
if root.tag in (xmlutils.make_clark("D:principal-search-property-set"),
|
||||
xmlutils.make_clark("D:principal-property-search"),
|
||||
xmlutils.make_clark("D:expand-property")):
|
||||
# We don't support searching for principals or indirect retrieving of
|
||||
# properties, just return an empty result.
|
||||
# InfCloud asks for expand-property reports (even if we don't announce
|
||||
# support for them) and stops working if an error code is returned.
|
||||
logger.warning("Unsupported REPORT method %r on %r requested",
|
||||
xmlutils.make_human_tag(root.tag), path)
|
||||
return client.MULTI_STATUS, multistatus
|
||||
if (root.tag == xmlutils.make_clark("C:calendar-multiget") and
|
||||
collection.tag != "VCALENDAR" or
|
||||
root.tag == xmlutils.make_clark("CR:addressbook-multiget") and
|
||||
collection.tag != "VADDRESSBOOK" or
|
||||
root.tag == xmlutils.make_clark("D:sync-collection") and
|
||||
collection.tag not in ("VADDRESSBOOK", "VCALENDAR")):
|
||||
logger.warning("Invalid REPORT method %r on %r requested",
|
||||
xmlutils.make_human_tag(root.tag), path)
|
||||
return client.FORBIDDEN, xmlutils.webdav_error("D:supported-report")
|
||||
|
||||
props: Union[ET.Element, List]
|
||||
if root.find(xmlutils.make_clark("D:prop")) is not None:
|
||||
props = root.find(xmlutils.make_clark("D:prop")) # type: ignore[assignment]
|
||||
else:
|
||||
props = []
|
||||
|
||||
hreferences: Iterable[str]
|
||||
if root.tag in (
|
||||
xmlutils.make_clark("C:calendar-multiget"),
|
||||
xmlutils.make_clark("CR:addressbook-multiget")):
|
||||
# Read rfc4791-7.9 for info
|
||||
hreferences = set()
|
||||
for href_element in root.findall(xmlutils.make_clark("D:href")):
|
||||
temp_url_path = urlparse(href_element.text).path
|
||||
assert isinstance(temp_url_path, str)
|
||||
href_path = pathutils.sanitize_path(unquote(temp_url_path))
|
||||
if (href_path + "/").startswith(base_prefix + "/"):
|
||||
hreferences.add(href_path[len(base_prefix):])
|
||||
else:
|
||||
logger.warning("Skipping invalid path %r in REPORT request on "
|
||||
"%r", href_path, path)
|
||||
elif root.tag == xmlutils.make_clark("D:sync-collection"):
|
||||
old_sync_token_element = root.find(
|
||||
xmlutils.make_clark("D:sync-token"))
|
||||
old_sync_token = ""
|
||||
if old_sync_token_element is not None and old_sync_token_element.text:
|
||||
old_sync_token = old_sync_token_element.text.strip()
|
||||
logger.debug("Client provided sync token: %r", old_sync_token)
|
||||
try:
|
||||
sync_token, names = collection.sync(old_sync_token)
|
||||
except ValueError as e:
|
||||
# Invalid sync token
|
||||
logger.warning("Client provided invalid sync token %r: %s",
|
||||
old_sync_token, e, exc_info=True)
|
||||
# client.CONFLICT doesn't work with some clients (e.g. InfCloud)
|
||||
return (client.FORBIDDEN,
|
||||
xmlutils.webdav_error("D:valid-sync-token"))
|
||||
hreferences = (pathutils.unstrip_path(
|
||||
posixpath.join(collection.path, n)) for n in names)
|
||||
# Append current sync token to response
|
||||
sync_token_element = ET.Element(xmlutils.make_clark("D:sync-token"))
|
||||
sync_token_element.text = sync_token
|
||||
multistatus.append(sync_token_element)
|
||||
else:
|
||||
hreferences = (path,)
|
||||
filters = (
|
||||
root.findall(xmlutils.make_clark("C:filter")) +
|
||||
root.findall(xmlutils.make_clark("CR:filter")))
|
||||
|
||||
# Retrieve everything required for finishing the request.
|
||||
retrieved_items = list(retrieve_items(
|
||||
base_prefix, path, collection, hreferences, filters, multistatus))
|
||||
collection_tag = collection.tag
|
||||
# !!! Don't access storage after this !!!
|
||||
unlock_storage_fn()
|
||||
|
||||
while retrieved_items:
|
||||
# ``item.vobject_item`` might be accessed during filtering.
|
||||
# Don't keep reference to ``item``, because VObject requires a lot of
|
||||
# memory.
|
||||
item, filters_matched = retrieved_items.pop(0)
|
||||
if filters and not filters_matched:
|
||||
try:
|
||||
if not all(test_filter(collection_tag, item, filter_)
|
||||
for filter_ in filters):
|
||||
continue
|
||||
except ValueError as e:
|
||||
raise ValueError("Failed to filter item %r from %r: %s" %
|
||||
(item.href, collection.path, e)) from e
|
||||
except Exception as e:
|
||||
raise RuntimeError("Failed to filter item %r from %r: %s" %
|
||||
(item.href, collection.path, e)) from e
|
||||
|
||||
found_props = []
|
||||
not_found_props = []
|
||||
|
||||
for prop in props:
|
||||
element = ET.Element(prop.tag)
|
||||
if prop.tag == xmlutils.make_clark("D:getetag"):
|
||||
element.text = item.etag
|
||||
found_props.append(element)
|
||||
elif prop.tag == xmlutils.make_clark("D:getcontenttype"):
|
||||
element.text = xmlutils.get_content_type(item, encoding)
|
||||
found_props.append(element)
|
||||
elif prop.tag in (
|
||||
xmlutils.make_clark("C:calendar-data"),
|
||||
xmlutils.make_clark("CR:address-data")):
|
||||
element.text = item.serialize()
|
||||
|
||||
expand = prop.find(xmlutils.make_clark("C:expand"))
|
||||
if expand is not None and item.component_name == 'VEVENT':
|
||||
start = expand.get('start')
|
||||
end = expand.get('end')
|
||||
|
||||
if (start is None) or (end is None):
|
||||
return client.FORBIDDEN, \
|
||||
xmlutils.webdav_error("C:expand")
|
||||
|
||||
start = datetime.datetime.strptime(
|
||||
start, '%Y%m%dT%H%M%SZ'
|
||||
).replace(tzinfo=datetime.timezone.utc)
|
||||
end = datetime.datetime.strptime(
|
||||
end, '%Y%m%dT%H%M%SZ'
|
||||
).replace(tzinfo=datetime.timezone.utc)
|
||||
|
||||
expanded_element = _expand(
|
||||
element, copy.copy(item), start, end)
|
||||
found_props.append(expanded_element)
|
||||
else:
|
||||
found_props.append(element)
|
||||
else:
|
||||
not_found_props.append(element)
|
||||
|
||||
assert item.href
|
||||
uri = pathutils.unstrip_path(
|
||||
posixpath.join(collection.path, item.href))
|
||||
multistatus.append(xml_item_response(
|
||||
base_prefix, uri, found_props=found_props,
|
||||
not_found_props=not_found_props, found_item=True))
|
||||
|
||||
return client.MULTI_STATUS, multistatus
|
||||
|
||||
|
||||
def _expand(
|
||||
element: ET.Element,
|
||||
item: radicale_item.Item,
|
||||
start: datetime.datetime,
|
||||
end: datetime.datetime,
|
||||
) -> ET.Element:
|
||||
vevent_component: vobject.base.Component = copy.copy(item.vobject_item)
|
||||
|
||||
# Split the vevents included in the component into one that contains the
|
||||
# recurrence information and others that contain a recurrence id to
|
||||
# override instances.
|
||||
vevent_recurrence, vevents_overridden = _split_overridden_vevents(vevent_component)
|
||||
|
||||
dt_format = '%Y%m%dT%H%M%SZ'
|
||||
all_day_event = False
|
||||
|
||||
if type(vevent_recurrence.dtstart.value) is datetime.date:
|
||||
# If an event comes to us with a dtstart specified as a date
|
||||
# then in the response we return the date, not datetime
|
||||
dt_format = '%Y%m%d'
|
||||
all_day_event = True
|
||||
# In case of dates, we need to remove timezone information since
|
||||
# rruleset.between computes with datetimes without timezone information
|
||||
start = start.replace(tzinfo=None)
|
||||
end = end.replace(tzinfo=None)
|
||||
|
||||
for vevent in vevents_overridden:
|
||||
_strip_single_event(vevent, dt_format)
|
||||
|
||||
duration = None
|
||||
if hasattr(vevent_recurrence, "dtend"):
|
||||
duration = vevent_recurrence.dtend.value - vevent_recurrence.dtstart.value
|
||||
|
||||
rruleset = None
|
||||
if hasattr(vevent_recurrence, 'rrule'):
|
||||
rruleset = vevent_recurrence.getrruleset()
|
||||
|
||||
if rruleset:
|
||||
# This function uses datetimes internally without timezone info for dates
|
||||
recurrences = rruleset.between(start, end, inc=True)
|
||||
|
||||
_strip_component(vevent_component)
|
||||
_strip_single_event(vevent_recurrence, dt_format)
|
||||
|
||||
is_component_filled: bool = False
|
||||
i_overridden = 0
|
||||
|
||||
for recurrence_dt in recurrences:
|
||||
recurrence_utc = recurrence_dt.astimezone(datetime.timezone.utc)
|
||||
i_overridden, vevent = _find_overridden(i_overridden, vevents_overridden, recurrence_utc, dt_format)
|
||||
|
||||
if not vevent:
|
||||
# We did not find an overridden instance, so create a new one
|
||||
vevent = copy.deepcopy(vevent_recurrence)
|
||||
|
||||
# For all day events, the system timezone may influence the
|
||||
# results, so use recurrence_dt
|
||||
recurrence_id = recurrence_dt if all_day_event else recurrence_utc
|
||||
vevent.recurrence_id = ContentLine(
|
||||
name='RECURRENCE-ID',
|
||||
value=recurrence_id, params={}
|
||||
)
|
||||
_convert_to_utc(vevent, 'recurrence_id', dt_format)
|
||||
vevent.dtstart = ContentLine(
|
||||
name='DTSTART',
|
||||
value=recurrence_id.strftime(dt_format), params={}
|
||||
)
|
||||
if duration:
|
||||
vevent.dtend = ContentLine(
|
||||
name='DTEND',
|
||||
value=(recurrence_id + duration).strftime(dt_format), params={}
|
||||
)
|
||||
|
||||
if not is_component_filled:
|
||||
vevent_component.vevent = vevent
|
||||
is_component_filled = True
|
||||
else:
|
||||
vevent_component.add(vevent)
|
||||
|
||||
element.text = vevent_component.serialize()
|
||||
|
||||
return element
|
||||
|
||||
|
||||
def _convert_timezone(vevent: vobject.icalendar.RecurringComponent,
|
||||
name_prop: str,
|
||||
name_content_line: str):
|
||||
prop = getattr(vevent, name_prop, None)
|
||||
if prop:
|
||||
if type(prop.value) is datetime.date:
|
||||
date_time = datetime.datetime.fromordinal(
|
||||
prop.value.toordinal()
|
||||
).replace(tzinfo=datetime.timezone.utc)
|
||||
else:
|
||||
date_time = prop.value.astimezone(datetime.timezone.utc)
|
||||
|
||||
setattr(vevent, name_prop, ContentLine(name=name_content_line, value=date_time, params=[]))
|
||||
|
||||
|
||||
def _convert_to_utc(vevent: vobject.icalendar.RecurringComponent,
|
||||
name_prop: str,
|
||||
dt_format: str):
|
||||
prop = getattr(vevent, name_prop, None)
|
||||
if prop:
|
||||
setattr(vevent, name_prop, ContentLine(name=prop.name, value=prop.value.strftime(dt_format), params=[]))
|
||||
|
||||
|
||||
def _strip_single_event(vevent: vobject.icalendar.RecurringComponent, dt_format: str) -> None:
|
||||
_convert_timezone(vevent, 'dtstart', 'DTSTART')
|
||||
_convert_timezone(vevent, 'dtend', 'DTEND')
|
||||
_convert_timezone(vevent, 'recurrence_id', 'RECURRENCE-ID')
|
||||
|
||||
# There is something strange behaviour during serialization native datetime, so converting manually
|
||||
_convert_to_utc(vevent, 'dtstart', dt_format)
|
||||
_convert_to_utc(vevent, 'dtend', dt_format)
|
||||
_convert_to_utc(vevent, 'recurrence_id', dt_format)
|
||||
|
||||
try:
|
||||
delattr(vevent, 'rrule')
|
||||
delattr(vevent, 'exdate')
|
||||
delattr(vevent, 'exrule')
|
||||
delattr(vevent, 'rdate')
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
||||
def _strip_component(vevent: vobject.base.Component) -> None:
|
||||
timezones_to_remove = []
|
||||
for component in vevent.components():
|
||||
if component.name == 'VTIMEZONE':
|
||||
timezones_to_remove.append(component)
|
||||
|
||||
for timezone in timezones_to_remove:
|
||||
vevent.remove(timezone)
|
||||
|
||||
|
||||
def _split_overridden_vevents(
|
||||
component: vobject.base.Component,
|
||||
) -> Tuple[
|
||||
vobject.icalendar.RecurringComponent,
|
||||
List[vobject.icalendar.RecurringComponent]
|
||||
]:
|
||||
vevent_recurrence = None
|
||||
vevents_overridden = []
|
||||
|
||||
for vevent in component.vevent_list:
|
||||
if hasattr(vevent, 'recurrence_id'):
|
||||
vevents_overridden += [vevent]
|
||||
elif vevent_recurrence:
|
||||
raise ValueError(
|
||||
f"component with UID {vevent.uid} "
|
||||
f"has more than one vevent with recurrence information"
|
||||
)
|
||||
else:
|
||||
vevent_recurrence = vevent
|
||||
|
||||
if vevent_recurrence:
|
||||
return (
|
||||
vevent_recurrence, sorted(
|
||||
vevents_overridden,
|
||||
key=lambda vevent: vevent.recurrence_id.value
|
||||
)
|
||||
)
|
||||
else:
|
||||
raise ValueError(
|
||||
f"component with UID {vevent.uid} "
|
||||
f"does not have a vevent without a recurrence_id"
|
||||
)
|
||||
|
||||
|
||||
def _find_overridden(
|
||||
start: int,
|
||||
vevents: List[vobject.icalendar.RecurringComponent],
|
||||
dt: datetime.datetime,
|
||||
dt_format: str
|
||||
) -> Tuple[int, Optional[vobject.icalendar.RecurringComponent]]:
|
||||
for i in range(start, len(vevents)):
|
||||
dt_event = datetime.datetime.strptime(
|
||||
vevents[i].recurrence_id.value,
|
||||
dt_format
|
||||
).replace(tzinfo=datetime.timezone.utc)
|
||||
if dt_event == dt:
|
||||
return (i + 1, vevents[i])
|
||||
return (start, None)
|
||||
|
||||
|
||||
def xml_item_response(base_prefix: str, href: str,
|
||||
found_props: Sequence[ET.Element] = (),
|
||||
not_found_props: Sequence[ET.Element] = (),
|
||||
found_item: bool = True) -> ET.Element:
|
||||
response = ET.Element(xmlutils.make_clark("D:response"))
|
||||
|
||||
href_element = ET.Element(xmlutils.make_clark("D:href"))
|
||||
href_element.text = xmlutils.make_href(base_prefix, href)
|
||||
response.append(href_element)
|
||||
|
||||
if found_item:
|
||||
for code, props in ((200, found_props), (404, not_found_props)):
|
||||
if props:
|
||||
propstat = ET.Element(xmlutils.make_clark("D:propstat"))
|
||||
status = ET.Element(xmlutils.make_clark("D:status"))
|
||||
status.text = xmlutils.make_response(code)
|
||||
prop_element = ET.Element(xmlutils.make_clark("D:prop"))
|
||||
for prop in props:
|
||||
prop_element.append(prop)
|
||||
propstat.append(prop_element)
|
||||
propstat.append(status)
|
||||
response.append(propstat)
|
||||
else:
|
||||
status = ET.Element(xmlutils.make_clark("D:status"))
|
||||
status.text = xmlutils.make_response(404)
|
||||
response.append(status)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def retrieve_items(
|
||||
base_prefix: str, path: str, collection: storage.BaseCollection,
|
||||
hreferences: Iterable[str], filters: Sequence[ET.Element],
|
||||
multistatus: ET.Element) -> Iterator[Tuple[radicale_item.Item, bool]]:
|
||||
"""Retrieves all items that are referenced in ``hreferences`` from
|
||||
``collection`` and adds 404 responses for missing and invalid items
|
||||
to ``multistatus``."""
|
||||
collection_requested = False
|
||||
|
||||
def get_names() -> Iterator[str]:
|
||||
"""Extracts all names from references in ``hreferences`` and adds
|
||||
404 responses for invalid references to ``multistatus``.
|
||||
If the whole collections is referenced ``collection_requested``
|
||||
gets set to ``True``."""
|
||||
nonlocal collection_requested
|
||||
for hreference in hreferences:
|
||||
try:
|
||||
name = pathutils.name_from_path(hreference, collection)
|
||||
except ValueError as e:
|
||||
logger.warning("Skipping invalid path %r in REPORT request on "
|
||||
"%r: %s", hreference, path, e)
|
||||
response = xml_item_response(base_prefix, hreference,
|
||||
found_item=False)
|
||||
multistatus.append(response)
|
||||
continue
|
||||
if name:
|
||||
# Reference is an item
|
||||
yield name
|
||||
else:
|
||||
# Reference is a collection
|
||||
collection_requested = True
|
||||
|
||||
for name, item in collection.get_multi(get_names()):
|
||||
if not item:
|
||||
uri = pathutils.unstrip_path(posixpath.join(collection.path, name))
|
||||
response = xml_item_response(base_prefix, uri, found_item=False)
|
||||
multistatus.append(response)
|
||||
else:
|
||||
yield item, False
|
||||
if collection_requested:
|
||||
yield from collection.get_filtered(filters)
|
||||
|
||||
|
||||
def test_filter(collection_tag: str, item: radicale_item.Item,
|
||||
filter_: ET.Element) -> bool:
|
||||
"""Match an item against a filter."""
|
||||
if (collection_tag == "VCALENDAR" and
|
||||
filter_.tag != xmlutils.make_clark("C:%s" % filter_)):
|
||||
if len(filter_) == 0:
|
||||
return True
|
||||
if len(filter_) > 1:
|
||||
raise ValueError("Filter with %d children" % len(filter_))
|
||||
if filter_[0].tag != xmlutils.make_clark("C:comp-filter"):
|
||||
raise ValueError("Unexpected %r in filter" % filter_[0].tag)
|
||||
return radicale_filter.comp_match(item, filter_[0])
|
||||
if (collection_tag == "VADDRESSBOOK" and
|
||||
filter_.tag != xmlutils.make_clark("CR:%s" % filter_)):
|
||||
for child in filter_:
|
||||
if child.tag != xmlutils.make_clark("CR:prop-filter"):
|
||||
raise ValueError("Unexpected %r in filter" % child.tag)
|
||||
test = filter_.get("test", "anyof")
|
||||
if test == "anyof":
|
||||
return any(radicale_filter.prop_match(item.vobject_item, f, "CR")
|
||||
for f in filter_)
|
||||
if test == "allof":
|
||||
return all(radicale_filter.prop_match(item.vobject_item, f, "CR")
|
||||
for f in filter_)
|
||||
raise ValueError("Unsupported filter test: %r" % test)
|
||||
raise ValueError("Unsupported filter %r for %r" %
|
||||
(filter_.tag, collection_tag))
|
||||
|
||||
|
||||
class ApplicationPartReport(ApplicationBase):
|
||||
|
||||
def do_REPORT(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
path: str, user: str) -> types.WSGIResponse:
|
||||
"""Manage REPORT request."""
|
||||
access = Access(self._rights, user, path)
|
||||
if not access.check("r"):
|
||||
return httputils.NOT_ALLOWED
|
||||
try:
|
||||
xml_content = self._read_xml_request_body(environ)
|
||||
except RuntimeError as e:
|
||||
logger.warning("Bad REPORT request on %r: %s", path, e,
|
||||
exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
except socket.timeout:
|
||||
logger.debug("Client timed out", exc_info=True)
|
||||
return httputils.REQUEST_TIMEOUT
|
||||
with contextlib.ExitStack() as lock_stack:
|
||||
lock_stack.enter_context(self._storage.acquire_lock("r", user))
|
||||
item = next(iter(self._storage.discover(path)), None)
|
||||
if not item:
|
||||
return httputils.NOT_FOUND
|
||||
if not access.check("r", item):
|
||||
return httputils.NOT_ALLOWED
|
||||
if isinstance(item, storage.BaseCollection):
|
||||
collection = item
|
||||
else:
|
||||
assert item.collection is not None
|
||||
collection = item.collection
|
||||
|
||||
if xml_content is not None and \
|
||||
xml_content.tag == xmlutils.make_clark("C:free-busy-query"):
|
||||
max_occurrence = self.configuration.get("reporting", "max_freebusy_occurrence")
|
||||
try:
|
||||
status, body = free_busy_report(
|
||||
base_prefix, path, xml_content, collection, self._encoding,
|
||||
lock_stack.close, max_occurrence)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad REPORT request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
headers = {"Content-Type": "text/calendar; charset=%s" % self._encoding}
|
||||
return status, headers, str(body)
|
||||
else:
|
||||
try:
|
||||
status, xml_answer = xml_report(
|
||||
base_prefix, path, xml_content, collection, self._encoding,
|
||||
lock_stack.close)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Bad REPORT request on %r: %s", path, e, exc_info=True)
|
||||
return httputils.BAD_REQUEST
|
||||
headers = {"Content-Type": "text/xml; charset=%s" % self._encoding}
|
||||
return status, headers, self._xml_response(xml_answer)
|
240
radicale/auth.py
240
radicale/auth.py
|
@ -1,240 +0,0 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Authentication management.
|
||||
|
||||
Default is htpasswd authentication.
|
||||
|
||||
Apache's htpasswd command (httpd.apache.org/docs/programs/htpasswd.html)
|
||||
manages a file for storing user credentials. It can encrypt passwords using
|
||||
different methods, e.g. BCRYPT, MD5-APR1 (a version of MD5 modified for
|
||||
Apache), SHA1, or by using the system's CRYPT routine. The CRYPT and SHA1
|
||||
encryption methods implemented by htpasswd are considered as insecure. MD5-APR1
|
||||
provides medium security as of 2015. Only BCRYPT can be considered secure by
|
||||
current standards.
|
||||
|
||||
MD5-APR1-encrypted credentials can be written by all versions of htpasswd (it
|
||||
is the default, in fact), whereas BCRYPT requires htpasswd 2.4.x or newer.
|
||||
|
||||
The `is_authenticated(user, password)` function provided by this module
|
||||
verifies the user-given credentials by parsing the htpasswd credential file
|
||||
pointed to by the ``htpasswd_filename`` configuration value while assuming
|
||||
the password encryption method specified via the ``htpasswd_encryption``
|
||||
configuration value.
|
||||
|
||||
The following htpasswd password encrpytion methods are supported by Radicale
|
||||
out-of-the-box:
|
||||
|
||||
- plain-text (created by htpasswd -p...) -- INSECURE
|
||||
- CRYPT (created by htpasswd -d...) -- INSECURE
|
||||
- SHA1 (created by htpasswd -s...) -- INSECURE
|
||||
|
||||
When passlib (https://pypi.python.org/pypi/passlib) is importable, the
|
||||
following significantly more secure schemes are parsable by Radicale:
|
||||
|
||||
- MD5-APR1 (htpasswd -m...) -- htpasswd's default method
|
||||
- BCRYPT (htpasswd -B...) -- Requires htpasswd 2.4.x
|
||||
|
||||
"""
|
||||
|
||||
import base64
|
||||
import functools
|
||||
import hashlib
|
||||
import hmac
|
||||
import os
|
||||
from importlib import import_module
|
||||
|
||||
INTERNAL_TYPES = ("None", "none", "remote_user", "http_x_remote_user",
|
||||
"htpasswd")
|
||||
|
||||
|
||||
def load(configuration, logger):
|
||||
"""Load the authentication manager chosen in configuration."""
|
||||
auth_type = configuration.get("auth", "type")
|
||||
if auth_type in ("None", "none"): # DEPRECATED: use "none"
|
||||
class_ = NoneAuth
|
||||
elif auth_type == "remote_user":
|
||||
class_ = RemoteUserAuth
|
||||
elif auth_type == "http_x_remote_user":
|
||||
class_ = HttpXRemoteUserAuth
|
||||
elif auth_type == "htpasswd":
|
||||
class_ = Auth
|
||||
else:
|
||||
try:
|
||||
class_ = import_module(auth_type).Auth
|
||||
except Exception as e:
|
||||
raise RuntimeError("Failed to load authentication module %r: %s" %
|
||||
(auth_type, e)) from e
|
||||
logger.info("Authentication type is %r", auth_type)
|
||||
return class_(configuration, logger)
|
||||
|
||||
|
||||
class BaseAuth:
|
||||
def __init__(self, configuration, logger):
|
||||
self.configuration = configuration
|
||||
self.logger = logger
|
||||
|
||||
def get_external_login(self, environ):
|
||||
"""Optionally provide the login and password externally.
|
||||
|
||||
Returns a tuple (login, password) or ().
|
||||
|
||||
"""
|
||||
return ()
|
||||
|
||||
def is_authenticated(self, user, password):
|
||||
"""Validate credentials.
|
||||
|
||||
Iterate through htpasswd credential file until user matches, extract
|
||||
hash (encrypted password) and check hash against user-given password,
|
||||
using the method specified in the Radicale config.
|
||||
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def map_login_to_user(self, login):
|
||||
"""Map login to internal username."""
|
||||
return login
|
||||
|
||||
|
||||
class NoneAuth(BaseAuth):
|
||||
def is_authenticated(self, user, password):
|
||||
return True
|
||||
|
||||
|
||||
class Auth(BaseAuth):
|
||||
def __init__(self, configuration, logger):
|
||||
super().__init__(configuration, logger)
|
||||
self.filename = os.path.expanduser(
|
||||
configuration.get("auth", "htpasswd_filename"))
|
||||
self.encryption = configuration.get("auth", "htpasswd_encryption")
|
||||
|
||||
if self.encryption == "ssha":
|
||||
self.verify = self._ssha
|
||||
elif self.encryption == "sha1":
|
||||
self.verify = self._sha1
|
||||
elif self.encryption == "plain":
|
||||
self.verify = self._plain
|
||||
elif self.encryption == "md5":
|
||||
try:
|
||||
from passlib.hash import apr_md5_crypt
|
||||
except ImportError as e:
|
||||
raise RuntimeError(
|
||||
"The htpasswd encryption method 'md5' requires "
|
||||
"the passlib module.") from e
|
||||
self.verify = functools.partial(self._md5apr1, apr_md5_crypt)
|
||||
elif self.encryption == "bcrypt":
|
||||
try:
|
||||
from passlib.hash import bcrypt
|
||||
except ImportError as e:
|
||||
raise RuntimeError(
|
||||
"The htpasswd encryption method 'bcrypt' requires "
|
||||
"the passlib module with bcrypt support.") from e
|
||||
# A call to `encrypt` raises passlib.exc.MissingBackendError with a
|
||||
# good error message if bcrypt backend is not available. Trigger
|
||||
# this here.
|
||||
bcrypt.encrypt("test-bcrypt-backend")
|
||||
self.verify = functools.partial(self._bcrypt, bcrypt)
|
||||
elif self.encryption == "crypt":
|
||||
try:
|
||||
import crypt
|
||||
except ImportError as e:
|
||||
raise RuntimeError(
|
||||
"The htpasswd encryption method 'crypt' requires "
|
||||
"the crypt() system support.") from e
|
||||
self.verify = functools.partial(self._crypt, crypt)
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"The htpasswd encryption method %r is not "
|
||||
"supported." % self.encryption)
|
||||
|
||||
def _plain(self, hash_value, password):
|
||||
"""Check if ``hash_value`` and ``password`` match, plain method."""
|
||||
return hmac.compare_digest(hash_value, password)
|
||||
|
||||
def _crypt(self, crypt, hash_value, password):
|
||||
"""Check if ``hash_value`` and ``password`` match, crypt method."""
|
||||
return hmac.compare_digest(crypt.crypt(password, hash_value),
|
||||
hash_value)
|
||||
|
||||
def _sha1(self, hash_value, password):
|
||||
"""Check if ``hash_value`` and ``password`` match, sha1 method."""
|
||||
hash_value = hash_value.replace("{SHA}", "").encode("ascii")
|
||||
password = password.encode(self.configuration.get("encoding", "stock"))
|
||||
sha1 = hashlib.sha1()
|
||||
sha1.update(password)
|
||||
return hmac.compare_digest(sha1.digest(), base64.b64decode(hash_value))
|
||||
|
||||
def _ssha(self, hash_value, password):
|
||||
"""Check if ``hash_value`` and ``password`` match, salted sha1 method.
|
||||
|
||||
This method is not directly supported by htpasswd, but it can be
|
||||
written with e.g. openssl, and nginx can parse it.
|
||||
|
||||
"""
|
||||
hash_value = base64.b64decode(hash_value.replace(
|
||||
"{SSHA}", "").encode("ascii"))
|
||||
password = password.encode(self.configuration.get("encoding", "stock"))
|
||||
salt_value = hash_value[20:]
|
||||
hash_value = hash_value[:20]
|
||||
sha1 = hashlib.sha1()
|
||||
sha1.update(password)
|
||||
sha1.update(salt_value)
|
||||
return hmac.compare_digest(sha1.digest(), hash_value)
|
||||
|
||||
def _bcrypt(self, bcrypt, hash_value, password):
|
||||
return bcrypt.verify(password, hash_value)
|
||||
|
||||
def _md5apr1(self, md5_apr1, hash_value, password):
|
||||
return md5_apr1.verify(password, hash_value)
|
||||
|
||||
def is_authenticated(self, user, password):
|
||||
# The content of the file is not cached because reading is generally a
|
||||
# very cheap operation, and it's useful to get live updates of the
|
||||
# htpasswd file.
|
||||
try:
|
||||
with open(self.filename) as fd:
|
||||
for line in fd:
|
||||
line = line.strip()
|
||||
if line:
|
||||
try:
|
||||
login, hash_value = line.split(":")
|
||||
# Always compare both login and password to avoid
|
||||
# timing attacks, see #591.
|
||||
login_ok = hmac.compare_digest(login, user)
|
||||
password_ok = self.verify(hash_value, password)
|
||||
if login_ok & password_ok:
|
||||
return True
|
||||
except ValueError as e:
|
||||
raise RuntimeError("Invalid htpasswd file %r: %s" %
|
||||
(self.filename, e)) from e
|
||||
except OSError as e:
|
||||
raise RuntimeError("Failed to load htpasswd file %r: %s" %
|
||||
(self.filename, e)) from e
|
||||
return False
|
||||
|
||||
|
||||
class RemoteUserAuth(NoneAuth):
|
||||
def get_external_login(self, environ):
|
||||
return environ.get("REMOTE_USER", ""), ""
|
||||
|
||||
|
||||
class HttpXRemoteUserAuth(NoneAuth):
|
||||
def get_external_login(self, environ):
|
||||
return environ.get("HTTP_X_REMOTE_USER", ""), ""
|
314
radicale/auth/__init__.py
Normal file
314
radicale/auth/__init__.py
Normal file
|
@ -0,0 +1,314 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Authentication module.
|
||||
|
||||
Authentication is based on usernames and passwords. If something more
|
||||
advanced is needed an external WSGI server or reverse proxy can be used
|
||||
(see ``remote_user`` or ``http_x_remote_user`` backend).
|
||||
|
||||
Take a look at the class ``BaseAuth`` if you want to implement your own.
|
||||
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
from typing import List, Sequence, Set, Tuple, Union, final
|
||||
|
||||
from radicale import config, types, utils
|
||||
from radicale.log import logger
|
||||
|
||||
INTERNAL_TYPES: Sequence[str] = ("none", "remote_user", "http_x_remote_user",
|
||||
"denyall",
|
||||
"htpasswd",
|
||||
"ldap",
|
||||
"imap",
|
||||
"oauth2",
|
||||
"pam",
|
||||
"dovecot")
|
||||
|
||||
CACHE_LOGIN_TYPES: Sequence[str] = (
|
||||
"dovecot",
|
||||
"ldap",
|
||||
"htpasswd",
|
||||
"imap",
|
||||
"oauth2",
|
||||
"pam",
|
||||
)
|
||||
|
||||
INSECURE_IF_NO_LOOPBACK_TYPES: Sequence[str] = (
|
||||
"remote_user",
|
||||
"http_x_remote_user",
|
||||
)
|
||||
|
||||
AUTH_SOCKET_FAMILY: Sequence[str] = ("AF_UNIX", "AF_INET", "AF_INET6")
|
||||
|
||||
|
||||
def load(configuration: "config.Configuration") -> "BaseAuth":
|
||||
"""Load the authentication module chosen in configuration."""
|
||||
_type = configuration.get("auth", "type")
|
||||
if _type == "none":
|
||||
logger.warning("No user authentication is selected: '[auth] type=none' (INSECURE)")
|
||||
elif _type == "denyall":
|
||||
logger.warning("All user authentication is blocked by: '[auth] type=denyall'")
|
||||
elif _type in INSECURE_IF_NO_LOOPBACK_TYPES:
|
||||
sgi = os.environ.get('SERVER_GATEWAY_INTERFACE') or None
|
||||
if not sgi:
|
||||
hosts: List[Tuple[str, int]] = configuration.get("server", "hosts")
|
||||
localhost_only = True
|
||||
address_lo = []
|
||||
address = []
|
||||
for address_port in hosts:
|
||||
if address_port[0] in ["localhost", "localhost6", "127.0.0.1", "::1"]:
|
||||
address_lo.append(utils.format_address(address_port))
|
||||
else:
|
||||
address.append(utils.format_address(address_port))
|
||||
localhost_only = False
|
||||
if localhost_only is False:
|
||||
logger.warning("User authentication '[auth] type=%s' is selected but server is not only listen on loopback address (potentially INSECURE): %s", _type, " ".join(address))
|
||||
return utils.load_plugin(INTERNAL_TYPES, "auth", "Auth", BaseAuth,
|
||||
configuration)
|
||||
|
||||
|
||||
class BaseAuth:
|
||||
|
||||
_ldap_groups: Set[str] = set([])
|
||||
_lc_username: bool
|
||||
_uc_username: bool
|
||||
_strip_domain: bool
|
||||
_auth_delay: float
|
||||
_failed_auth_delay: float
|
||||
_type: str
|
||||
_cache_logins: bool
|
||||
_cache_successful: dict # login -> (digest, time_ns)
|
||||
_cache_successful_logins_expiry: int
|
||||
_cache_failed: dict # digest_failed -> (time_ns, login)
|
||||
_cache_failed_logins_expiry: int
|
||||
_cache_failed_logins_salt_ns: int # persistent over runtime
|
||||
_lock: threading.Lock
|
||||
|
||||
def __init__(self, configuration: "config.Configuration") -> None:
|
||||
"""Initialize BaseAuth.
|
||||
|
||||
``configuration`` see ``radicale.config`` module.
|
||||
The ``configuration`` must not change during the lifetime of
|
||||
this object, it is kept as an internal reference.
|
||||
|
||||
"""
|
||||
self.configuration = configuration
|
||||
self._lc_username = configuration.get("auth", "lc_username")
|
||||
self._uc_username = configuration.get("auth", "uc_username")
|
||||
self._strip_domain = configuration.get("auth", "strip_domain")
|
||||
logger.info("auth.strip_domain: %s", self._strip_domain)
|
||||
logger.info("auth.lc_username: %s", self._lc_username)
|
||||
logger.info("auth.uc_username: %s", self._uc_username)
|
||||
if self._lc_username is True and self._uc_username is True:
|
||||
raise RuntimeError("auth.lc_username and auth.uc_username cannot be enabled together")
|
||||
self._auth_delay = configuration.get("auth", "delay")
|
||||
logger.info("auth.delay: %f", self._auth_delay)
|
||||
self._failed_auth_delay = 0
|
||||
self._lock = threading.Lock()
|
||||
# cache_successful_logins
|
||||
self._cache_logins = configuration.get("auth", "cache_logins")
|
||||
self._type = configuration.get("auth", "type")
|
||||
if (self._type in CACHE_LOGIN_TYPES) or (self._cache_logins is False):
|
||||
logger.info("auth.cache_logins: %s", self._cache_logins)
|
||||
else:
|
||||
logger.info("auth.cache_logins: %s (but not required for type '%s' and disabled therefore)", self._cache_logins, self._type)
|
||||
self._cache_logins = False
|
||||
if self._cache_logins is True:
|
||||
self._cache_successful_logins_expiry = configuration.get("auth", "cache_successful_logins_expiry")
|
||||
if self._cache_successful_logins_expiry < 0:
|
||||
raise RuntimeError("self._cache_successful_logins_expiry cannot be < 0")
|
||||
self._cache_failed_logins_expiry = configuration.get("auth", "cache_failed_logins_expiry")
|
||||
if self._cache_failed_logins_expiry < 0:
|
||||
raise RuntimeError("self._cache_failed_logins_expiry cannot be < 0")
|
||||
logger.info("auth.cache_successful_logins_expiry: %s seconds", self._cache_successful_logins_expiry)
|
||||
logger.info("auth.cache_failed_logins_expiry: %s seconds", self._cache_failed_logins_expiry)
|
||||
# cache init
|
||||
self._cache_successful = dict()
|
||||
self._cache_failed = dict()
|
||||
self._cache_failed_logins_salt_ns = time.time_ns()
|
||||
|
||||
def _cache_digest(self, login: str, password: str, salt: str) -> str:
|
||||
h = hashlib.sha3_512()
|
||||
h.update(salt.encode())
|
||||
h.update(login.encode())
|
||||
h.update(password.encode())
|
||||
return str(h.digest())
|
||||
|
||||
def get_external_login(self, environ: types.WSGIEnviron) -> Union[
|
||||
Tuple[()], Tuple[str, str]]:
|
||||
"""Optionally provide the login and password externally.
|
||||
|
||||
``environ`` a dict with the WSGI environment
|
||||
|
||||
If ``()`` is returned, Radicale handles HTTP authentication.
|
||||
Otherwise, returns a tuple ``(login, password)``. For anonymous users
|
||||
``login`` must be ``""``.
|
||||
|
||||
"""
|
||||
return ()
|
||||
|
||||
def _login(self, login: str, password: str) -> str:
|
||||
"""Check credentials and map login to internal user
|
||||
|
||||
``login`` the login name
|
||||
|
||||
``password`` the password
|
||||
|
||||
Returns the username or ``""`` for invalid credentials.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
def _sleep_for_constant_exec_time(self, time_ns_begin: int):
|
||||
"""Sleep some time to reach a constant execution time for failed logins
|
||||
|
||||
Independent of time required by external backend or used digest methods
|
||||
|
||||
Increase final execution time in case initial limit exceeded
|
||||
|
||||
See also issue 591
|
||||
|
||||
"""
|
||||
time_delta = (time.time_ns() - time_ns_begin) / 1000 / 1000 / 1000
|
||||
with self._lock:
|
||||
# avoid that another thread is changing global value at the same time
|
||||
failed_auth_delay = self._failed_auth_delay
|
||||
failed_auth_delay_old = failed_auth_delay
|
||||
if time_delta > failed_auth_delay:
|
||||
# set new
|
||||
failed_auth_delay = time_delta
|
||||
# store globally
|
||||
self._failed_auth_delay = failed_auth_delay
|
||||
if (failed_auth_delay_old != failed_auth_delay):
|
||||
logger.debug("Failed login constant execution time need increase of failed_auth_delay: %.9f -> %.9f sec", failed_auth_delay_old, failed_auth_delay)
|
||||
# sleep == 0
|
||||
else:
|
||||
sleep = failed_auth_delay - time_delta
|
||||
logger.debug("Failed login constant exection time alignment, sleeping: %.9f sec", sleep)
|
||||
time.sleep(sleep)
|
||||
|
||||
@final
|
||||
def login(self, login: str, password: str) -> Tuple[str, str]:
|
||||
time_ns_begin = time.time_ns()
|
||||
result_from_cache = False
|
||||
if self._lc_username:
|
||||
login = login.lower()
|
||||
if self._uc_username:
|
||||
login = login.upper()
|
||||
if self._strip_domain:
|
||||
login = login.split('@')[0]
|
||||
if self._cache_logins is True:
|
||||
# time_ns is also used as salt
|
||||
result = ""
|
||||
digest = ""
|
||||
time_ns = time.time_ns()
|
||||
# cleanup failed login cache to avoid out-of-memory
|
||||
cache_failed_entries = len(self._cache_failed)
|
||||
if cache_failed_entries > 0:
|
||||
logger.debug("Login failed cache investigation start (entries: %d)", cache_failed_entries)
|
||||
self._lock.acquire()
|
||||
cache_failed_cleanup = dict()
|
||||
for digest in self._cache_failed:
|
||||
(time_ns_cache, login_cache) = self._cache_failed[digest]
|
||||
age_failed = int((time_ns - time_ns_cache) / 1000 / 1000 / 1000)
|
||||
if age_failed > self._cache_failed_logins_expiry:
|
||||
cache_failed_cleanup[digest] = (login_cache, age_failed)
|
||||
cache_failed_cleanup_entries = len(cache_failed_cleanup)
|
||||
logger.debug("Login failed cache cleanup start (entries: %d)", cache_failed_cleanup_entries)
|
||||
if cache_failed_cleanup_entries > 0:
|
||||
for digest in cache_failed_cleanup:
|
||||
(login, age_failed) = cache_failed_cleanup[digest]
|
||||
logger.debug("Login failed cache entry for user+password expired: '%s' (age: %d > %d sec)", login_cache, age_failed, self._cache_failed_logins_expiry)
|
||||
del self._cache_failed[digest]
|
||||
self._lock.release()
|
||||
logger.debug("Login failed cache investigation finished")
|
||||
# check for cache failed login
|
||||
digest_failed = login + ":" + self._cache_digest(login, password, str(self._cache_failed_logins_salt_ns))
|
||||
if self._cache_failed.get(digest_failed):
|
||||
# login+password found in cache "failed" -> shortcut return
|
||||
(time_ns_cache, login_cache) = self._cache_failed[digest]
|
||||
age_failed = int((time_ns - time_ns_cache) / 1000 / 1000 / 1000)
|
||||
logger.debug("Login failed cache entry for user+password found: '%s' (age: %d sec)", login_cache, age_failed)
|
||||
self._sleep_for_constant_exec_time(time_ns_begin)
|
||||
return ("", self._type + " / cached")
|
||||
if self._cache_successful.get(login):
|
||||
# login found in cache "successful"
|
||||
(digest_cache, time_ns_cache) = self._cache_successful[login]
|
||||
digest = self._cache_digest(login, password, str(time_ns_cache))
|
||||
if digest == digest_cache:
|
||||
age_success = int((time_ns - time_ns_cache) / 1000 / 1000 / 1000)
|
||||
if age_success > self._cache_successful_logins_expiry:
|
||||
logger.debug("Login successful cache entry for user+password found but expired: '%s' (age: %d > %d sec)", login, age_success, self._cache_successful_logins_expiry)
|
||||
# delete expired success from cache
|
||||
del self._cache_successful[login]
|
||||
digest = ""
|
||||
else:
|
||||
logger.debug("Login successful cache entry for user+password found: '%s' (age: %d sec)", login, age_success)
|
||||
result = login
|
||||
result_from_cache = True
|
||||
else:
|
||||
logger.debug("Login successful cache entry for user+password not matching: '%s'", login)
|
||||
else:
|
||||
# login not found in cache, caculate always to avoid timing attacks
|
||||
digest = self._cache_digest(login, password, str(time_ns))
|
||||
if result == "":
|
||||
# verify login+password via configured backend
|
||||
logger.debug("Login verification for user+password via backend: '%s'", login)
|
||||
result = self._login(login, password)
|
||||
if result != "":
|
||||
logger.debug("Login successful for user+password via backend: '%s'", login)
|
||||
if digest == "":
|
||||
# successful login, but expired, digest must be recalculated
|
||||
digest = self._cache_digest(login, password, str(time_ns))
|
||||
# store successful login in cache
|
||||
self._lock.acquire()
|
||||
self._cache_successful[login] = (digest, time_ns)
|
||||
self._lock.release()
|
||||
logger.debug("Login successful cache for user set: '%s'", login)
|
||||
if self._cache_failed.get(digest_failed):
|
||||
logger.debug("Login failed cache for user cleared: '%s'", login)
|
||||
del self._cache_failed[digest_failed]
|
||||
else:
|
||||
logger.debug("Login failed for user+password via backend: '%s'", login)
|
||||
self._lock.acquire()
|
||||
self._cache_failed[digest_failed] = (time_ns, login)
|
||||
self._lock.release()
|
||||
logger.debug("Login failed cache for user set: '%s'", login)
|
||||
if result_from_cache is True:
|
||||
if result == "":
|
||||
self._sleep_for_constant_exec_time(time_ns_begin)
|
||||
return (result, self._type + " / cached")
|
||||
else:
|
||||
if result == "":
|
||||
self._sleep_for_constant_exec_time(time_ns_begin)
|
||||
return (result, self._type)
|
||||
else:
|
||||
# self._cache_logins is False
|
||||
result = self._login(login, password)
|
||||
if result == "":
|
||||
self._sleep_for_constant_exec_time(time_ns_begin)
|
||||
return (result, self._type)
|
20
radicale.py → radicale/auth/denyall.py
Executable file → Normal file
20
radicale.py → radicale/auth/denyall.py
Executable file → Normal file
|
@ -1,9 +1,5 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# This file is part of Radicale Server - Calendar Server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -19,12 +15,16 @@
|
|||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Radicale CalDAV Server.
|
||||
A dummy backend that denies any username and password.
|
||||
|
||||
Launch the server according to configuration and command-line options.
|
||||
Used as default for security reasons.
|
||||
|
||||
"""
|
||||
|
||||
import radicale.__main__
|
||||
from radicale import auth
|
||||
|
||||
radicale.__main__.run()
|
||||
|
||||
class Auth(auth.BaseAuth):
|
||||
|
||||
def _login(self, login: str, password: str) -> str:
|
||||
return ""
|
192
radicale/auth/dovecot.py
Normal file
192
radicale/auth/dovecot.py
Normal file
|
@ -0,0 +1,192 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# Copyright © 2014 Giel van Schijndel
|
||||
# Copyright © 2019 (GalaxyMaster)
|
||||
# Copyright © 2025-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import base64
|
||||
import itertools
|
||||
import os
|
||||
import socket
|
||||
from contextlib import closing
|
||||
|
||||
from radicale import auth
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class Auth(auth.BaseAuth):
|
||||
def __init__(self, configuration):
|
||||
super().__init__(configuration)
|
||||
self.timeout = 5
|
||||
self.request_id_gen = itertools.count(1)
|
||||
|
||||
config_family = configuration.get("auth", "dovecot_connection_type")
|
||||
if config_family == "AF_UNIX":
|
||||
self.family = socket.AF_UNIX
|
||||
self.address = configuration.get("auth", "dovecot_socket")
|
||||
logger.info("auth dovecot socket: %r", self.address)
|
||||
return
|
||||
|
||||
self.address = configuration.get("auth", "dovecot_host"), configuration.get("auth", "dovecot_port")
|
||||
logger.warning("auth dovecot address: %r (INSECURE, credentials are transmitted in clear text)", self.address)
|
||||
if config_family == "AF_INET":
|
||||
self.family = socket.AF_INET
|
||||
else:
|
||||
self.family = socket.AF_INET6
|
||||
|
||||
def _login(self, login, password):
|
||||
"""Validate credentials.
|
||||
|
||||
Check if the ``login``/``password`` pair is valid according to Dovecot.
|
||||
|
||||
This implementation communicates with a Dovecot server through the
|
||||
Dovecot Authentication Protocol v1.1.
|
||||
|
||||
https://dovecot.org/doc/auth-protocol.txt
|
||||
|
||||
"""
|
||||
|
||||
logger.info("Authentication request (dovecot): '{}'".format(login))
|
||||
if not login or not password:
|
||||
return ""
|
||||
|
||||
with closing(socket.socket(
|
||||
self.family,
|
||||
socket.SOCK_STREAM)
|
||||
) as sock:
|
||||
try:
|
||||
sock.settimeout(self.timeout)
|
||||
sock.connect(self.address)
|
||||
|
||||
buf = bytes()
|
||||
supported_mechs = []
|
||||
done = False
|
||||
seen_part = [0, 0, 0]
|
||||
# Upon the initial connection we only care about the
|
||||
# handshake, which is usually just around 100 bytes long,
|
||||
# e.g.
|
||||
#
|
||||
# VERSION 1 2
|
||||
# MECH PLAIN plaintext
|
||||
# SPID 22901
|
||||
# CUID 1
|
||||
# COOKIE 2dbe4116a30fb4b8a8719f4448420af7
|
||||
# DONE
|
||||
#
|
||||
# Hence, we try to read just once with a buffer big
|
||||
# enough to hold all of it.
|
||||
buf = sock.recv(1024)
|
||||
while b'\n' in buf and not done:
|
||||
line, buf = buf.split(b'\n', 1)
|
||||
parts = line.split(b'\t')
|
||||
first, parts = parts[0], parts[1:]
|
||||
|
||||
if first == b'VERSION':
|
||||
if seen_part[0]:
|
||||
logger.warning(
|
||||
"Server presented multiple VERSION "
|
||||
"tokens, ignoring"
|
||||
)
|
||||
continue
|
||||
version = parts
|
||||
logger.debug("Dovecot server version: '{}'".format(
|
||||
(b'.'.join(version)).decode()
|
||||
))
|
||||
if int(version[0]) != 1:
|
||||
logger.fatal(
|
||||
"Only Dovecot 1.x versions are supported!"
|
||||
)
|
||||
return ""
|
||||
seen_part[0] += 1
|
||||
elif first == b'MECH':
|
||||
supported_mechs.append(parts[0])
|
||||
seen_part[1] += 1
|
||||
elif first == b'DONE':
|
||||
seen_part[2] += 1
|
||||
if not (seen_part[0] and seen_part[1]):
|
||||
logger.fatal(
|
||||
"An unexpected end of the server "
|
||||
"handshake received!"
|
||||
)
|
||||
return ""
|
||||
done = True
|
||||
|
||||
if not done:
|
||||
logger.fatal("Encountered a broken server handshake!")
|
||||
return ""
|
||||
|
||||
logger.debug(
|
||||
"Supported auth methods: '{}'"
|
||||
.format((b"', '".join(supported_mechs)).decode())
|
||||
)
|
||||
if b'PLAIN' not in supported_mechs:
|
||||
logger.info(
|
||||
"Authentication method 'PLAIN' is not supported, "
|
||||
"but is required!"
|
||||
)
|
||||
return ""
|
||||
|
||||
# Handshake
|
||||
logger.debug("Sending auth handshake")
|
||||
sock.send(b'VERSION\t1\t1\n')
|
||||
sock.send(b'CPID\t%u\n' % os.getpid())
|
||||
|
||||
request_id = next(self.request_id_gen)
|
||||
logger.debug(
|
||||
"Authenticating with request id: '{}'"
|
||||
.format(request_id)
|
||||
)
|
||||
sock.send(
|
||||
b'AUTH\t%u\tPLAIN\tservice=radicale\tresp=%b\n' %
|
||||
(
|
||||
request_id, base64.b64encode(
|
||||
b'\0%b\0%b' %
|
||||
(login.encode(), password.encode())
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
logger.debug("Processing auth response")
|
||||
buf = sock.recv(1024)
|
||||
line = buf.split(b'\n', 1)[0]
|
||||
parts = line.split(b'\t')[:2]
|
||||
resp, reply_id, params = (
|
||||
parts[0], int(parts[1]),
|
||||
dict(part.split('=', 1) for part in parts[2:])
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
"Auth response: result='{}', id='{}', parameters={}"
|
||||
.format(resp.decode(), reply_id, params)
|
||||
)
|
||||
if request_id != reply_id:
|
||||
logger.fatal(
|
||||
"Unexpected reply ID {} received (expected {})"
|
||||
.format(
|
||||
reply_id, request_id
|
||||
)
|
||||
)
|
||||
return ""
|
||||
|
||||
if resp == b'OK':
|
||||
return login
|
||||
|
||||
except socket.error as e:
|
||||
logger.fatal(
|
||||
"Failed to communicate with Dovecot: %s" %
|
||||
(e)
|
||||
)
|
||||
|
||||
return ""
|
319
radicale/auth/htpasswd.py
Normal file
319
radicale/auth/htpasswd.py
Normal file
|
@ -0,0 +1,319 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Authentication backend that checks credentials with a htpasswd file.
|
||||
|
||||
Apache's htpasswd command (httpd.apache.org/docs/programs/htpasswd.html)
|
||||
manages a file for storing user credentials. It can encrypt passwords using
|
||||
different the methods BCRYPT/SHA256/SHA512 or MD5-APR1 (a version of MD5 modified for
|
||||
Apache). MD5-APR1 provides medium security as of 2015. Only BCRYPT/SHA256/SHA512 can be
|
||||
considered secure by current standards.
|
||||
|
||||
MD5-APR1-encrypted credentials can be written by all versions of htpasswd (it
|
||||
is the default, in fact), whereas BCRYPT/SHA256/SHA512 requires htpasswd 2.4.x or newer.
|
||||
|
||||
The `is_authenticated(user, password)` function provided by this module
|
||||
verifies the user-given credentials by parsing the htpasswd credential file
|
||||
pointed to by the ``htpasswd_filename`` configuration value while assuming
|
||||
the password encryption method specified via the ``htpasswd_encryption``
|
||||
configuration value.
|
||||
|
||||
The following htpasswd password encryption methods are supported by Radicale
|
||||
out-of-the-box:
|
||||
- plain-text (created by htpasswd -p ...) -- INSECURE
|
||||
- MD5-APR1 (htpasswd -m ...) -- htpasswd's default method, INSECURE
|
||||
- SHA256 (htpasswd -2 ...)
|
||||
- SHA512 (htpasswd -5 ...)
|
||||
|
||||
When bcrypt is installed:
|
||||
- BCRYPT (htpasswd -B ...) -- Requires htpasswd 2.4.x
|
||||
|
||||
"""
|
||||
|
||||
import functools
|
||||
import hmac
|
||||
import os
|
||||
import re
|
||||
import threading
|
||||
import time
|
||||
from typing import Any, Tuple
|
||||
|
||||
from passlib.hash import apr_md5_crypt, sha256_crypt, sha512_crypt
|
||||
|
||||
from radicale import auth, config, logger
|
||||
|
||||
|
||||
class Auth(auth.BaseAuth):
|
||||
|
||||
_filename: str
|
||||
_encoding: str
|
||||
_htpasswd: dict # login -> digest
|
||||
_htpasswd_mtime_ns: int
|
||||
_htpasswd_size: int
|
||||
_htpasswd_ok: bool
|
||||
_htpasswd_not_ok_time: float
|
||||
_htpasswd_not_ok_reminder_seconds: int
|
||||
_htpasswd_bcrypt_use: int
|
||||
_htpasswd_cache: bool
|
||||
_has_bcrypt: bool
|
||||
_encryption: str
|
||||
_lock: threading.Lock
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
self._filename = configuration.get("auth", "htpasswd_filename")
|
||||
logger.info("auth htpasswd file: %r", self._filename)
|
||||
self._encoding = configuration.get("encoding", "stock")
|
||||
logger.info("auth htpasswd file encoding: %r", self._encoding)
|
||||
self._htpasswd_cache = configuration.get("auth", "htpasswd_cache")
|
||||
logger.info("auth htpasswd cache: %s", self._htpasswd_cache)
|
||||
self._encryption: str = configuration.get("auth", "htpasswd_encryption")
|
||||
logger.info("auth htpasswd encryption is 'radicale.auth.htpasswd_encryption.%s'", self._encryption)
|
||||
|
||||
self._has_bcrypt = False
|
||||
self._htpasswd_ok = False
|
||||
self._htpasswd_not_ok_reminder_seconds = 60 # currently hardcoded
|
||||
(self._htpasswd_ok, self._htpasswd_bcrypt_use, self._htpasswd, self._htpasswd_size, self._htpasswd_mtime_ns) = self._read_htpasswd(True, False)
|
||||
self._lock = threading.Lock()
|
||||
|
||||
if self._encryption == "plain":
|
||||
self._verify = self._plain
|
||||
elif self._encryption == "md5":
|
||||
self._verify = self._md5apr1
|
||||
elif self._encryption == "sha256":
|
||||
self._verify = self._sha256
|
||||
elif self._encryption == "sha512":
|
||||
self._verify = self._sha512
|
||||
elif self._encryption == "bcrypt" or self._encryption == "autodetect":
|
||||
try:
|
||||
import bcrypt
|
||||
except ImportError as e:
|
||||
if (self._encryption == "autodetect") and (self._htpasswd_bcrypt_use == 0):
|
||||
logger.warning("auth htpasswd encryption is 'radicale.auth.htpasswd_encryption.%s' which can require bycrypt module, but currently no entries found", self._encryption)
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"The htpasswd encryption method 'bcrypt' or 'autodetect' requires "
|
||||
"the bcrypt module (entries found: %d)." % self._htpasswd_bcrypt_use) from e
|
||||
else:
|
||||
self._has_bcrypt = True
|
||||
if self._encryption == "autodetect":
|
||||
if self._htpasswd_bcrypt_use == 0:
|
||||
logger.info("auth htpasswd encryption is 'radicale.auth.htpasswd_encryption.%s' and bycrypt module found, but currently not required", self._encryption)
|
||||
else:
|
||||
logger.info("auth htpasswd encryption is 'radicale.auth.htpasswd_encryption.%s' and bycrypt module found (bcrypt entries found: %d)", self._encryption, self._htpasswd_bcrypt_use)
|
||||
if self._encryption == "bcrypt":
|
||||
self._verify = functools.partial(self._bcrypt, bcrypt)
|
||||
else:
|
||||
self._verify = self._autodetect
|
||||
if self._htpasswd_bcrypt_use:
|
||||
self._verify_bcrypt = functools.partial(self._bcrypt, bcrypt)
|
||||
else:
|
||||
raise RuntimeError("The htpasswd encryption method %r is not "
|
||||
"supported." % self._encryption)
|
||||
|
||||
def _plain(self, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
"""Check if ``hash_value`` and ``password`` match, plain method."""
|
||||
return ("PLAIN", hmac.compare_digest(hash_value.encode(), password.encode()))
|
||||
|
||||
def _plain_fallback(self, method_orig, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
"""Check if ``hash_value`` and ``password`` match, plain method / fallback in case of hash length is not matching on autodetection."""
|
||||
info = "PLAIN/fallback as hash length not matching for " + method_orig + ": " + str(len(hash_value))
|
||||
return (info, hmac.compare_digest(hash_value.encode(), password.encode()))
|
||||
|
||||
def _bcrypt(self, bcrypt: Any, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
if self._encryption == "autodetect" and len(hash_value) != 60:
|
||||
return self._plain_fallback("BCRYPT", hash_value, password)
|
||||
else:
|
||||
return ("BCRYPT", bcrypt.checkpw(password=password.encode('utf-8'), hashed_password=hash_value.encode()))
|
||||
|
||||
def _md5apr1(self, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
if self._encryption == "autodetect" and len(hash_value) != 37:
|
||||
return self._plain_fallback("MD5-APR1", hash_value, password)
|
||||
else:
|
||||
return ("MD5-APR1", apr_md5_crypt.verify(password, hash_value.strip()))
|
||||
|
||||
def _sha256(self, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
if self._encryption == "autodetect" and len(hash_value) != 63:
|
||||
return self._plain_fallback("SHA-256", hash_value, password)
|
||||
else:
|
||||
return ("SHA-256", sha256_crypt.verify(password, hash_value.strip()))
|
||||
|
||||
def _sha512(self, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
if self._encryption == "autodetect" and len(hash_value) != 106:
|
||||
return self._plain_fallback("SHA-512", hash_value, password)
|
||||
else:
|
||||
return ("SHA-512", sha512_crypt.verify(password, hash_value.strip()))
|
||||
|
||||
def _autodetect(self, hash_value: str, password: str) -> tuple[str, bool]:
|
||||
if hash_value.startswith("$apr1$", 0, 6):
|
||||
# MD5-APR1
|
||||
return self._md5apr1(hash_value, password)
|
||||
elif re.match(r"^\$2(a|b|x|y)?\$", hash_value):
|
||||
# BCRYPT
|
||||
return self._verify_bcrypt(hash_value, password)
|
||||
elif hash_value.startswith("$5$", 0, 3):
|
||||
# SHA-256
|
||||
return self._sha256(hash_value, password)
|
||||
elif hash_value.startswith("$6$", 0, 3):
|
||||
# SHA-512
|
||||
return self._sha512(hash_value, password)
|
||||
else:
|
||||
return self._plain(hash_value, password)
|
||||
|
||||
def _read_htpasswd(self, init: bool, suppress: bool) -> Tuple[bool, int, dict, int, int]:
|
||||
"""Read htpasswd file
|
||||
|
||||
init == True: stop on error
|
||||
init == False: warn/skip on error and set mark to log reminder every interval
|
||||
suppress == True: suppress warnings, change info to debug (used in non-caching mode)
|
||||
suppress == False: do not suppress warnings (used in caching mode)
|
||||
|
||||
"""
|
||||
htpasswd_ok = True
|
||||
bcrypt_use = 0
|
||||
if (init is True) or (suppress is True):
|
||||
info = "Read"
|
||||
else:
|
||||
info = "Re-read"
|
||||
if suppress is False:
|
||||
logger.info("%s content of htpasswd file start: %r", info, self._filename)
|
||||
else:
|
||||
logger.debug("%s content of htpasswd file start: %r", info, self._filename)
|
||||
htpasswd: dict[str, str] = dict()
|
||||
entries = 0
|
||||
duplicates = 0
|
||||
errors = 0
|
||||
try:
|
||||
with open(self._filename, encoding=self._encoding) as f:
|
||||
line_num = 0
|
||||
for line in f:
|
||||
line_num += 1
|
||||
line = line.rstrip("\n")
|
||||
if line.lstrip() and not line.lstrip().startswith("#"):
|
||||
try:
|
||||
login, digest = line.split(":", maxsplit=1)
|
||||
skip = False
|
||||
if login == "" or digest == "":
|
||||
if init is True:
|
||||
raise ValueError("htpasswd file contains problematic line not matching <login>:<digest> in line: %d" % line_num)
|
||||
else:
|
||||
errors += 1
|
||||
logger.warning("htpasswd file contains problematic line not matching <login>:<digest> in line: %d (ignored)", line_num)
|
||||
htpasswd_ok = False
|
||||
skip = True
|
||||
else:
|
||||
if htpasswd.get(login):
|
||||
duplicates += 1
|
||||
if init is True:
|
||||
raise ValueError("htpasswd file contains duplicate login: '%s'", login, line_num)
|
||||
else:
|
||||
logger.warning("htpasswd file contains duplicate login: '%s' (line: %d / ignored)", login, line_num)
|
||||
htpasswd_ok = False
|
||||
skip = True
|
||||
else:
|
||||
if re.match(r"^\$2(a|b|x|y)?\$", digest) and len(digest) == 60:
|
||||
if init is True:
|
||||
bcrypt_use += 1
|
||||
else:
|
||||
if self._has_bcrypt is False:
|
||||
logger.warning("htpasswd file contains bcrypt digest login: '%s' (line: %d / ignored because module is not loaded)", login, line_num)
|
||||
skip = True
|
||||
htpasswd_ok = False
|
||||
if skip is False:
|
||||
htpasswd[login] = digest
|
||||
entries += 1
|
||||
except ValueError as e:
|
||||
if init is True:
|
||||
raise RuntimeError("Invalid htpasswd file %r: %s" % (self._filename, e)) from e
|
||||
except OSError as e:
|
||||
if init is True:
|
||||
raise RuntimeError("Failed to load htpasswd file %r: %s" % (self._filename, e)) from e
|
||||
else:
|
||||
logger.warning("Failed to load htpasswd file on re-read: %r" % self._filename)
|
||||
htpasswd_ok = False
|
||||
htpasswd_size = os.stat(self._filename).st_size
|
||||
htpasswd_mtime_ns = os.stat(self._filename).st_mtime_ns
|
||||
if suppress is False:
|
||||
logger.info("%s content of htpasswd file done: %r (entries: %d, duplicates: %d, errors: %d)", info, self._filename, entries, duplicates, errors)
|
||||
else:
|
||||
logger.debug("%s content of htpasswd file done: %r (entries: %d, duplicates: %d, errors: %d)", info, self._filename, entries, duplicates, errors)
|
||||
if htpasswd_ok is True:
|
||||
self._htpasswd_not_ok_time = 0
|
||||
else:
|
||||
self._htpasswd_not_ok_time = time.time()
|
||||
return (htpasswd_ok, bcrypt_use, htpasswd, htpasswd_size, htpasswd_mtime_ns)
|
||||
|
||||
def _login(self, login: str, password: str) -> str:
|
||||
"""Validate credentials.
|
||||
|
||||
Iterate through htpasswd credential file until login matches, extract
|
||||
hash (encrypted password) and check hash against password,
|
||||
using the method specified in the Radicale config.
|
||||
|
||||
Optional: the content of the file is cached and live updates will be detected by
|
||||
comparing mtime_ns and size
|
||||
|
||||
"""
|
||||
login_ok = False
|
||||
digest: str
|
||||
if self._htpasswd_cache is True:
|
||||
# check and re-read file if required
|
||||
with self._lock:
|
||||
htpasswd_size = os.stat(self._filename).st_size
|
||||
htpasswd_mtime_ns = os.stat(self._filename).st_mtime_ns
|
||||
if (htpasswd_size != self._htpasswd_size) or (htpasswd_mtime_ns != self._htpasswd_mtime_ns):
|
||||
(self._htpasswd_ok, self._htpasswd_bcrypt_use, self._htpasswd, self._htpasswd_size, self._htpasswd_mtime_ns) = self._read_htpasswd(False, False)
|
||||
self._htpasswd_not_ok_time = 0
|
||||
|
||||
# log reminder of problemantic file every interval
|
||||
current_time = time.time()
|
||||
if (self._htpasswd_ok is False):
|
||||
if (self._htpasswd_not_ok_time > 0):
|
||||
if (current_time - self._htpasswd_not_ok_time) > self._htpasswd_not_ok_reminder_seconds:
|
||||
logger.warning("htpasswd file still contains issues (REMINDER, check warnings in the past): %r" % self._filename)
|
||||
self._htpasswd_not_ok_time = current_time
|
||||
else:
|
||||
self._htpasswd_not_ok_time = current_time
|
||||
|
||||
if self._htpasswd.get(login):
|
||||
digest = self._htpasswd[login]
|
||||
login_ok = True
|
||||
else:
|
||||
# read file on every request
|
||||
(htpasswd_ok, htpasswd_bcrypt_use, htpasswd, htpasswd_size, htpasswd_mtime_ns) = self._read_htpasswd(False, True)
|
||||
if htpasswd.get(login):
|
||||
digest = htpasswd[login]
|
||||
login_ok = True
|
||||
|
||||
if login_ok is True:
|
||||
try:
|
||||
(method, password_ok) = self._verify(digest, password)
|
||||
except ValueError as e:
|
||||
logger.error("Login verification failed for user: '%s' (htpasswd/%s) with errror '%s'", login, self._encryption, e)
|
||||
return ""
|
||||
if password_ok:
|
||||
logger.debug("Login verification successful for user: '%s' (htpasswd/%s/%s)", login, self._encryption, method)
|
||||
return login
|
||||
else:
|
||||
logger.warning("Login verification failed for user: '%s' (htpasswd/%s/%s)", login, self._encryption, method)
|
||||
else:
|
||||
logger.warning("Login verification user not found (htpasswd): '%s'", login)
|
||||
return ""
|
39
radicale/auth/http_x_remote_user.py
Normal file
39
radicale/auth/http_x_remote_user.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Authentication backend that takes the username from the
|
||||
``HTTP_X_REMOTE_USER`` header.
|
||||
|
||||
It's intended for use with a reverse proxy. Be aware as this will be insecure
|
||||
if the reverse proxy is not configured properly.
|
||||
|
||||
"""
|
||||
|
||||
from typing import Tuple, Union
|
||||
|
||||
from radicale import types
|
||||
from radicale.auth import none
|
||||
|
||||
|
||||
class Auth(none.Auth):
|
||||
|
||||
def get_external_login(self, environ: types.WSGIEnviron) -> Union[
|
||||
Tuple[()], Tuple[str, str]]:
|
||||
return environ.get("HTTP_X_REMOTE_USER", ""), ""
|
73
radicale/auth/imap.py
Normal file
73
radicale/auth/imap.py
Normal file
|
@ -0,0 +1,73 @@
|
|||
# RadicaleIMAP IMAP authentication plugin for Radicale.
|
||||
# Copyright © 2017, 2020 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2025-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import imaplib
|
||||
import ssl
|
||||
|
||||
from radicale import auth
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class Auth(auth.BaseAuth):
|
||||
"""Authenticate user with IMAP."""
|
||||
|
||||
def __init__(self, configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
self._host, self._port = self.configuration.get("auth", "imap_host")
|
||||
logger.info("auth imap host: %r", self._host)
|
||||
self._security = self.configuration.get("auth", "imap_security")
|
||||
if self._security == "none":
|
||||
logger.warning("auth imap security: %s (INSECURE, credentials are transmitted in clear text)", self._security)
|
||||
else:
|
||||
logger.info("auth imap security: %s", self._security)
|
||||
if self._security == "tls":
|
||||
if self._port is None:
|
||||
self._port = 993
|
||||
logger.info("auth imap port (autoselected): %d", self._port)
|
||||
else:
|
||||
logger.info("auth imap port: %d", self._port)
|
||||
else:
|
||||
if self._port is None:
|
||||
self._port = 143
|
||||
logger.info("auth imap port (autoselected): %d", self._port)
|
||||
else:
|
||||
logger.info("auth imap port: %d", self._port)
|
||||
|
||||
def _login(self, login, password) -> str:
|
||||
try:
|
||||
connection: imaplib.IMAP4 | imaplib.IMAP4_SSL
|
||||
if self._security == "tls":
|
||||
connection = imaplib.IMAP4_SSL(
|
||||
host=self._host, port=self._port,
|
||||
ssl_context=ssl.create_default_context())
|
||||
else:
|
||||
connection = imaplib.IMAP4(host=self._host, port=self._port)
|
||||
if self._security == "starttls":
|
||||
connection.starttls(ssl.create_default_context())
|
||||
try:
|
||||
connection.authenticate(
|
||||
"PLAIN",
|
||||
lambda _: "{0}\x00{0}\x00{1}".format(login, password).encode(),
|
||||
)
|
||||
except imaplib.IMAP4.error as e:
|
||||
logger.warning("IMAP authentication failed for user %r: %s", login, e, exc_info=False)
|
||||
return ""
|
||||
connection.logout()
|
||||
return login
|
||||
except (OSError, imaplib.IMAP4.error) as e:
|
||||
logger.error("Failed to communicate with IMAP server %r: %s" % ("[%s]:%d" % (self._host, self._port), e))
|
||||
return ""
|
269
radicale/auth/ldap.py
Normal file
269
radicale/auth/ldap.py
Normal file
|
@ -0,0 +1,269 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2022-2024 Peter Varkoly
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""
|
||||
Authentication backend that checks credentials with a LDAP server.
|
||||
Following parameters are needed in the configuration:
|
||||
ldap_uri The LDAP URL to the server like ldap://localhost
|
||||
ldap_base The baseDN of the LDAP server
|
||||
ldap_reader_dn The DN of a LDAP user with read access to get the user accounts
|
||||
ldap_secret The password of the ldap_reader_dn
|
||||
ldap_secret_file The path of the file containing the password of the ldap_reader_dn
|
||||
ldap_filter The search filter to find the user to authenticate by the username
|
||||
ldap_user_attribute The attribute to be used as username after authentication
|
||||
ldap_groups_attribute The attribute containing group memberships in the LDAP user entry
|
||||
Following parameters controls SSL connections:
|
||||
ldap_use_ssl If the connection
|
||||
ldap_ssl_verify_mode The certificate verification mode. NONE, OPTIONAL, default is REQUIRED
|
||||
ldap_ssl_ca_file
|
||||
|
||||
"""
|
||||
import ssl
|
||||
|
||||
from radicale import auth, config
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class Auth(auth.BaseAuth):
|
||||
_ldap_uri: str
|
||||
_ldap_base: str
|
||||
_ldap_reader_dn: str
|
||||
_ldap_secret: str
|
||||
_ldap_filter: str
|
||||
_ldap_attributes: list[str] = []
|
||||
_ldap_user_attr: str
|
||||
_ldap_groups_attr: str
|
||||
_ldap_module_version: int = 3
|
||||
_ldap_use_ssl: bool = False
|
||||
_ldap_ssl_verify_mode: int = ssl.CERT_REQUIRED
|
||||
_ldap_ssl_ca_file: str = ""
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
try:
|
||||
import ldap3
|
||||
self.ldap3 = ldap3
|
||||
except ImportError:
|
||||
try:
|
||||
import ldap
|
||||
self._ldap_module_version = 2
|
||||
self.ldap = ldap
|
||||
except ImportError as e:
|
||||
raise RuntimeError("LDAP authentication requires the ldap3 module") from e
|
||||
|
||||
self._ldap_ignore_attribute_create_modify_timestamp = configuration.get("auth", "ldap_ignore_attribute_create_modify_timestamp")
|
||||
if self._ldap_ignore_attribute_create_modify_timestamp:
|
||||
self.ldap3.utils.config._ATTRIBUTES_EXCLUDED_FROM_CHECK.extend(['createTimestamp', 'modifyTimestamp'])
|
||||
logger.info("auth.ldap_ignore_attribute_create_modify_timestamp applied")
|
||||
|
||||
self._ldap_uri = configuration.get("auth", "ldap_uri")
|
||||
self._ldap_base = configuration.get("auth", "ldap_base")
|
||||
self._ldap_reader_dn = configuration.get("auth", "ldap_reader_dn")
|
||||
self._ldap_secret = configuration.get("auth", "ldap_secret")
|
||||
self._ldap_filter = configuration.get("auth", "ldap_filter")
|
||||
self._ldap_user_attr = configuration.get("auth", "ldap_user_attribute")
|
||||
self._ldap_groups_attr = configuration.get("auth", "ldap_groups_attribute")
|
||||
ldap_secret_file_path = configuration.get("auth", "ldap_secret_file")
|
||||
if ldap_secret_file_path:
|
||||
with open(ldap_secret_file_path, 'r') as file:
|
||||
self._ldap_secret = file.read().rstrip('\n')
|
||||
if self._ldap_module_version == 3:
|
||||
self._ldap_use_ssl = configuration.get("auth", "ldap_use_ssl")
|
||||
if self._ldap_use_ssl:
|
||||
self._ldap_ssl_ca_file = configuration.get("auth", "ldap_ssl_ca_file")
|
||||
tmp = configuration.get("auth", "ldap_ssl_verify_mode")
|
||||
if tmp == "NONE":
|
||||
self._ldap_ssl_verify_mode = ssl.CERT_NONE
|
||||
elif tmp == "OPTIONAL":
|
||||
self._ldap_ssl_verify_mode = ssl.CERT_OPTIONAL
|
||||
logger.info("auth.ldap_uri : %r" % self._ldap_uri)
|
||||
logger.info("auth.ldap_base : %r" % self._ldap_base)
|
||||
logger.info("auth.ldap_reader_dn : %r" % self._ldap_reader_dn)
|
||||
logger.info("auth.ldap_filter : %r" % self._ldap_filter)
|
||||
if self._ldap_user_attr:
|
||||
logger.info("auth.ldap_user_attribute : %r" % self._ldap_user_attr)
|
||||
else:
|
||||
logger.info("auth.ldap_user_attribute : (not provided)")
|
||||
if self._ldap_groups_attr:
|
||||
logger.info("auth.ldap_groups_attribute: %r" % self._ldap_groups_attr)
|
||||
else:
|
||||
logger.info("auth.ldap_groups_attribute: (not provided)")
|
||||
if ldap_secret_file_path:
|
||||
logger.info("auth.ldap_secret_file_path: %r" % ldap_secret_file_path)
|
||||
if self._ldap_secret:
|
||||
logger.info("auth.ldap_secret : (from file)")
|
||||
else:
|
||||
logger.info("auth.ldap_secret_file_path: (not provided)")
|
||||
if self._ldap_secret:
|
||||
logger.info("auth.ldap_secret : (from config)")
|
||||
if self._ldap_reader_dn and not self._ldap_secret:
|
||||
logger.error("auth.ldap_secret : (not provided)")
|
||||
raise RuntimeError("LDAP authentication requires ldap_secret for ldap_reader_dn")
|
||||
logger.info("auth.ldap_use_ssl : %s" % self._ldap_use_ssl)
|
||||
if self._ldap_use_ssl is True:
|
||||
logger.info("auth.ldap_ssl_verify_mode : %s" % self._ldap_ssl_verify_mode)
|
||||
if self._ldap_ssl_ca_file:
|
||||
logger.info("auth.ldap_ssl_ca_file : %r" % self._ldap_ssl_ca_file)
|
||||
else:
|
||||
logger.info("auth.ldap_ssl_ca_file : (not provided)")
|
||||
"""Extend attributes to to be returned in the user query"""
|
||||
if self._ldap_groups_attr:
|
||||
self._ldap_attributes.append(self._ldap_groups_attr)
|
||||
if self._ldap_user_attr:
|
||||
self._ldap_attributes.append(self._ldap_user_attr)
|
||||
logger.info("ldap_attributes : %r" % self._ldap_attributes)
|
||||
|
||||
def _login2(self, login: str, password: str) -> str:
|
||||
try:
|
||||
"""Bind as reader dn"""
|
||||
logger.debug(f"_login2 {self._ldap_uri}, {self._ldap_reader_dn}")
|
||||
conn = self.ldap.initialize(self._ldap_uri)
|
||||
conn.protocol_version = 3
|
||||
conn.set_option(self.ldap.OPT_REFERRALS, 0)
|
||||
conn.simple_bind_s(self._ldap_reader_dn, self._ldap_secret)
|
||||
"""Search for the dn of user to authenticate"""
|
||||
escaped_login = self.ldap.filter.escape_filter_chars(login)
|
||||
logger.debug(f"_login2 login escaped for LDAP filters: {escaped_login}")
|
||||
res = conn.search_s(
|
||||
self._ldap_base,
|
||||
self.ldap.SCOPE_SUBTREE,
|
||||
filterstr=self._ldap_filter.format(escaped_login),
|
||||
attrlist=self._ldap_attributes
|
||||
)
|
||||
if len(res) != 1:
|
||||
"""User could not be found unambiguously"""
|
||||
logger.debug(f"_login2 no unique DN found for '{login}'")
|
||||
return ""
|
||||
user_entry = res[0]
|
||||
user_dn = user_entry[0]
|
||||
logger.debug(f"_login2 found LDAP user DN {user_dn}")
|
||||
"""Close LDAP connection"""
|
||||
conn.unbind()
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Invalid LDAP configuration:{e}")
|
||||
|
||||
try:
|
||||
"""Bind as user to authenticate"""
|
||||
conn = self.ldap.initialize(self._ldap_uri)
|
||||
conn.protocol_version = 3
|
||||
conn.set_option(self.ldap.OPT_REFERRALS, 0)
|
||||
conn.simple_bind_s(user_dn, password)
|
||||
tmp: list[str] = []
|
||||
if self._ldap_groups_attr:
|
||||
tmp = []
|
||||
for g in user_entry[1][self._ldap_groups_attr]:
|
||||
"""Get group g's RDN's attribute value"""
|
||||
try:
|
||||
rdns = self.ldap.dn.explode_dn(g, notypes=True)
|
||||
tmp.append(rdns[0])
|
||||
except Exception:
|
||||
tmp.append(g.decode('utf8'))
|
||||
self._ldap_groups = set(tmp)
|
||||
logger.debug("_login2 LDAP groups of user: %s", ",".join(self._ldap_groups))
|
||||
if self._ldap_user_attr:
|
||||
if user_entry[1][self._ldap_user_attr]:
|
||||
tmplogin = user_entry[1][self._ldap_user_attr][0]
|
||||
login = tmplogin.decode('utf-8')
|
||||
logger.debug(f"_login2 user set to: '{login}'")
|
||||
conn.unbind()
|
||||
logger.debug(f"_login2 {login} successfully authenticated")
|
||||
return login
|
||||
except self.ldap.INVALID_CREDENTIALS:
|
||||
return ""
|
||||
|
||||
def _login3(self, login: str, password: str) -> str:
|
||||
"""Connect the server"""
|
||||
try:
|
||||
logger.debug(f"_login3 {self._ldap_uri}, {self._ldap_reader_dn}")
|
||||
if self._ldap_use_ssl:
|
||||
tls = self.ldap3.Tls(validate=self._ldap_ssl_verify_mode)
|
||||
if self._ldap_ssl_ca_file != "":
|
||||
tls = self.ldap3.Tls(
|
||||
validate=self._ldap_ssl_verify_mode,
|
||||
ca_certs_file=self._ldap_ssl_ca_file
|
||||
)
|
||||
server = self.ldap3.Server(self._ldap_uri, use_ssl=True, tls=tls)
|
||||
else:
|
||||
server = self.ldap3.Server(self._ldap_uri)
|
||||
conn = self.ldap3.Connection(server, self._ldap_reader_dn, password=self._ldap_secret)
|
||||
except self.ldap3.core.exceptions.LDAPSocketOpenError:
|
||||
raise RuntimeError("Unable to reach LDAP server")
|
||||
except Exception as e:
|
||||
logger.debug(f"_login3 error 1 {e}")
|
||||
pass
|
||||
|
||||
if not conn.bind():
|
||||
logger.debug("_login3 cannot bind")
|
||||
raise RuntimeError("Unable to read from LDAP server")
|
||||
|
||||
logger.debug(f"_login3 bind as {self._ldap_reader_dn}")
|
||||
"""Search the user dn"""
|
||||
escaped_login = self.ldap3.utils.conv.escape_filter_chars(login)
|
||||
logger.debug(f"_login3 login escaped for LDAP filters: {escaped_login}")
|
||||
conn.search(
|
||||
search_base=self._ldap_base,
|
||||
search_filter=self._ldap_filter.format(escaped_login),
|
||||
search_scope=self.ldap3.SUBTREE,
|
||||
attributes=self._ldap_attributes
|
||||
)
|
||||
if len(conn.entries) != 1:
|
||||
"""User could not be found unambiguously"""
|
||||
logger.debug(f"_login3 no unique DN found for '{login}'")
|
||||
return ""
|
||||
|
||||
user_entry = conn.response[0]
|
||||
conn.unbind()
|
||||
user_dn = user_entry['dn']
|
||||
logger.debug(f"_login3 found LDAP user DN {user_dn}")
|
||||
try:
|
||||
"""Try to bind as the user itself"""
|
||||
conn = self.ldap3.Connection(server, user_dn, password=password)
|
||||
if not conn.bind():
|
||||
logger.debug(f"_login3 user '{login}' cannot be found")
|
||||
return ""
|
||||
tmp: list[str] = []
|
||||
if self._ldap_groups_attr:
|
||||
tmp = []
|
||||
for g in user_entry['attributes'][self._ldap_groups_attr]:
|
||||
"""Get group g's RDN's attribute value"""
|
||||
try:
|
||||
rdns = self.ldap3.utils.dn.parse_dn(g)
|
||||
tmp.append(rdns[0][1])
|
||||
except Exception:
|
||||
tmp.append(g)
|
||||
self._ldap_groups = set(tmp)
|
||||
logger.debug("_login3 LDAP groups of user: %s", ",".join(self._ldap_groups))
|
||||
if self._ldap_user_attr:
|
||||
if user_entry['attributes'][self._ldap_user_attr]:
|
||||
login = user_entry['attributes'][self._ldap_user_attr]
|
||||
logger.debug(f"_login3 user set to: '{login}'")
|
||||
conn.unbind()
|
||||
logger.debug(f"_login3 {login} successfully authenticated")
|
||||
return login
|
||||
except Exception as e:
|
||||
logger.debug(f"_login3 error 2 {e}")
|
||||
pass
|
||||
return ""
|
||||
|
||||
def _login(self, login: str, password: str) -> str:
|
||||
"""Validate credentials.
|
||||
In first step we make a connection to the LDAP server with the ldap_reader_dn credential.
|
||||
In next step the DN of the user to authenticate will be searched.
|
||||
In the last step the authentication of the user will be proceeded.
|
||||
"""
|
||||
if self._ldap_module_version == 2:
|
||||
return self._login2(login, password)
|
||||
return self._login3(login, password)
|
16
bin/radicale → radicale/auth/none.py
Executable file → Normal file
16
bin/radicale → radicale/auth/none.py
Executable file → Normal file
|
@ -1,9 +1,8 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -19,13 +18,14 @@
|
|||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Radicale CalDAV Server.
|
||||
|
||||
Launch the server according to configuration and command-line options.
|
||||
A dummy backend that accepts any username and password.
|
||||
|
||||
"""
|
||||
|
||||
import radicale.__main__
|
||||
from radicale import auth
|
||||
|
||||
|
||||
radicale.__main__.run()
|
||||
class Auth(auth.BaseAuth):
|
||||
|
||||
def _login(self, login: str, password: str) -> str:
|
||||
return login
|
66
radicale/auth/oauth2.py
Normal file
66
radicale/auth/oauth2.py
Normal file
|
@ -0,0 +1,66 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
#
|
||||
# Original from https://gitlab.mim-libre.fr/alphabet/radicale_oauth/
|
||||
# Copyright © 2021-2022 Bruno Boiget
|
||||
# Copyright © 2022-2022 Daniel Dehennin
|
||||
#
|
||||
# Since migration into upstream
|
||||
# Copyright © 2025-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Authentication backend that checks credentials against an oauth2 server auth endpoint
|
||||
"""
|
||||
|
||||
import requests
|
||||
|
||||
from radicale import auth
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class Auth(auth.BaseAuth):
|
||||
def __init__(self, configuration):
|
||||
super().__init__(configuration)
|
||||
self._endpoint = configuration.get("auth", "oauth2_token_endpoint")
|
||||
if not self._endpoint:
|
||||
logger.error("auth.oauth2_token_endpoint URL missing")
|
||||
raise RuntimeError("OAuth2 token endpoint URL is required")
|
||||
logger.info("auth OAuth2 token endpoint: %s" % (self._endpoint))
|
||||
|
||||
def _login(self, login, password):
|
||||
"""Validate credentials.
|
||||
Sends login credentials to oauth token endpoint and checks that a token is returned
|
||||
"""
|
||||
try:
|
||||
# authenticate to authentication endpoint and return login if ok, else ""
|
||||
req_params = {
|
||||
"username": login,
|
||||
"password": password,
|
||||
"grant_type": "password",
|
||||
"client_id": "radicale",
|
||||
}
|
||||
req_headers = {"Content-Type": "application/x-www-form-urlencoded"}
|
||||
response = requests.post(
|
||||
self._endpoint, data=req_params, headers=req_headers
|
||||
)
|
||||
if (
|
||||
response.status_code == requests.codes.ok
|
||||
and "access_token" in response.json()
|
||||
):
|
||||
return login
|
||||
except OSError as e:
|
||||
logger.critical("Failed to authenticate against OAuth2 server %s: %s" % (self._endpoint, e))
|
||||
logger.warning("User failed to authenticate using OAuth2: %r" % login)
|
||||
return ""
|
105
radicale/auth/pam.py
Normal file
105
radicale/auth/pam.py
Normal file
|
@ -0,0 +1,105 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# This file is part of Radicale Server - Calendar Server
|
||||
# Copyright © 2011 Henry-Nicolas Tourneur
|
||||
# Copyright © 2021-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2025-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
PAM authentication.
|
||||
|
||||
Authentication using the ``pam-python`` module.
|
||||
|
||||
Important: radicale user need access to /etc/shadow by e.g.
|
||||
chgrp radicale /etc/shadow
|
||||
chmod g+r
|
||||
"""
|
||||
|
||||
import grp
|
||||
import pwd
|
||||
|
||||
from radicale import auth
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class Auth(auth.BaseAuth):
|
||||
def __init__(self, configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
try:
|
||||
import pam
|
||||
self.pam = pam
|
||||
except ImportError as e:
|
||||
raise RuntimeError("PAM authentication requires the Python pam module") from e
|
||||
self._service = configuration.get("auth", "pam_service")
|
||||
logger.info("auth.pam_service: %s" % self._service)
|
||||
self._group_membership = configuration.get("auth", "pam_group_membership")
|
||||
if (self._group_membership):
|
||||
logger.info("auth.pam_group_membership: %s" % self._group_membership)
|
||||
else:
|
||||
logger.warning("auth.pam_group_membership: (empty, nothing to check / INSECURE)")
|
||||
|
||||
def pam_authenticate(self, *args, **kwargs):
|
||||
return self.pam.authenticate(*args, **kwargs)
|
||||
|
||||
def _login(self, login: str, password: str) -> str:
|
||||
"""Check if ``user``/``password`` couple is valid."""
|
||||
if login is None or password is None:
|
||||
return ""
|
||||
|
||||
# Check whether the user exists in the PAM system
|
||||
try:
|
||||
pwd.getpwnam(login).pw_uid
|
||||
except KeyError:
|
||||
logger.debug("PAM user not found: %r" % login)
|
||||
return ""
|
||||
else:
|
||||
logger.debug("PAM user found: %r" % login)
|
||||
|
||||
# Check whether the user has a primary group (mandatory)
|
||||
try:
|
||||
# Get user primary group
|
||||
primary_group = grp.getgrgid(pwd.getpwnam(login).pw_gid).gr_name
|
||||
logger.debug("PAM user %r has primary group: %r" % (login, primary_group))
|
||||
except KeyError:
|
||||
logger.debug("PAM user has no primary group: %r" % login)
|
||||
return ""
|
||||
|
||||
# Obtain supplementary groups
|
||||
members = []
|
||||
if (self._group_membership):
|
||||
try:
|
||||
members = grp.getgrnam(self._group_membership).gr_mem
|
||||
except KeyError:
|
||||
logger.debug(
|
||||
"PAM membership required group doesn't exist: %r" %
|
||||
self._group_membership)
|
||||
return ""
|
||||
|
||||
# Check whether the user belongs to the required group
|
||||
# (primary or supplementary)
|
||||
if (self._group_membership):
|
||||
if (primary_group != self._group_membership) and (login not in members):
|
||||
logger.warning("PAM user %r belongs not to the required group: %r" % (login, self._group_membership))
|
||||
return ""
|
||||
else:
|
||||
logger.debug("PAM user %r belongs to the required group: %r" % (login, self._group_membership))
|
||||
|
||||
# Check the password
|
||||
if self.pam_authenticate(login, password, service=self._service):
|
||||
return login
|
||||
else:
|
||||
logger.debug("PAM authentication not successful for user: %r (service %r)" % (login, self._service))
|
||||
return ""
|
38
radicale/auth/remote_user.py
Normal file
38
radicale/auth/remote_user.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Authentication backend that takes the username from the ``REMOTE_USER``
|
||||
WSGI environment variable.
|
||||
|
||||
It's intended for use with an external WSGI server.
|
||||
|
||||
"""
|
||||
|
||||
from typing import Tuple, Union
|
||||
|
||||
from radicale import types
|
||||
from radicale.auth import none
|
||||
|
||||
|
||||
class Auth(none.Auth):
|
||||
|
||||
def get_external_login(self, environ: types.WSGIEnviron
|
||||
) -> Union[Tuple[()], Tuple[str, str]]:
|
||||
return environ.get("REMOTE_USER", ""), ""
|
|
@ -1,7 +1,9 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2017-2020 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -17,28 +19,40 @@
|
|||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Radicale configuration module.
|
||||
Configuration module
|
||||
|
||||
Give a configparser-like interface to read and write configuration.
|
||||
Use ``load()`` to obtain an instance of ``Configuration`` for use with
|
||||
``radicale.app.Application``.
|
||||
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
import json
|
||||
import math
|
||||
import os
|
||||
import string
|
||||
import sys
|
||||
from collections import OrderedDict
|
||||
from configparser import RawConfigParser as ConfigParser
|
||||
from configparser import RawConfigParser
|
||||
from typing import (Any, Callable, ClassVar, Iterable, List, Optional,
|
||||
Sequence, Tuple, TypeVar, Union)
|
||||
|
||||
from . import auth, rights, storage, web
|
||||
from radicale import auth, hook, rights, storage, types, web
|
||||
from radicale.item import check_and_sanitize_props
|
||||
|
||||
DEFAULT_CONFIG_PATH: str = os.pathsep.join([
|
||||
"?/etc/radicale/config",
|
||||
"?~/.config/radicale/config"])
|
||||
|
||||
|
||||
def positive_int(value):
|
||||
def positive_int(value: Any) -> int:
|
||||
value = int(value)
|
||||
if value < 0:
|
||||
raise ValueError("value is negative: %d" % value)
|
||||
return value
|
||||
|
||||
|
||||
def positive_float(value):
|
||||
def positive_float(value: Any) -> float:
|
||||
value = float(value)
|
||||
if not math.isfinite(value):
|
||||
raise ValueError("value is infinite")
|
||||
|
@ -49,74 +63,138 @@ def positive_float(value):
|
|||
return value
|
||||
|
||||
|
||||
def logging_level(value: Any) -> str:
|
||||
if value not in ("debug", "info", "warning", "error", "critical"):
|
||||
raise ValueError("unsupported level: %r" % value)
|
||||
return value
|
||||
|
||||
|
||||
def filepath(value: Any) -> str:
|
||||
if not value:
|
||||
return ""
|
||||
value = os.path.expanduser(value)
|
||||
if sys.platform == "win32":
|
||||
value = os.path.expandvars(value)
|
||||
return os.path.abspath(value)
|
||||
|
||||
|
||||
def list_of_ip_address(value: Any) -> List[Tuple[str, int]]:
|
||||
def ip_address(value):
|
||||
try:
|
||||
address, port = value.rsplit(":", 1)
|
||||
return address.strip(string.whitespace + "[]"), int(port)
|
||||
except ValueError:
|
||||
raise ValueError("malformed IP address: %r" % value)
|
||||
return [ip_address(s) for s in value.split(",")]
|
||||
|
||||
|
||||
def str_or_callable(value: Any) -> Union[str, Callable]:
|
||||
if callable(value):
|
||||
return value
|
||||
return str(value)
|
||||
|
||||
|
||||
def unspecified_type(value: Any) -> Any:
|
||||
return value
|
||||
|
||||
|
||||
def _convert_to_bool(value: Any) -> bool:
|
||||
if value.lower() not in RawConfigParser.BOOLEAN_STATES:
|
||||
raise ValueError("not a boolean: %r" % value)
|
||||
return RawConfigParser.BOOLEAN_STATES[value.lower()]
|
||||
|
||||
|
||||
def imap_address(value):
|
||||
if "]" in value:
|
||||
pre_address, pre_address_port = value.rsplit("]", 1)
|
||||
else:
|
||||
pre_address, pre_address_port = "", value
|
||||
if ":" in pre_address_port:
|
||||
pre_address2, port = pre_address_port.rsplit(":", 1)
|
||||
address = pre_address + pre_address2
|
||||
else:
|
||||
address, port = pre_address + pre_address_port, None
|
||||
try:
|
||||
return (address.strip(string.whitespace + "[]"),
|
||||
None if port is None else int(port))
|
||||
except ValueError:
|
||||
raise ValueError("malformed IMAP address: %r" % value)
|
||||
|
||||
|
||||
def imap_security(value):
|
||||
if value not in ("tls", "starttls", "none"):
|
||||
raise ValueError("unsupported IMAP security: %r" % value)
|
||||
return value
|
||||
|
||||
|
||||
def json_str(value: Any) -> dict:
|
||||
if not value:
|
||||
return {}
|
||||
ret = json.loads(value)
|
||||
for (name_coll, props) in ret.items():
|
||||
checked_props = check_and_sanitize_props(props)
|
||||
ret[name_coll] = checked_props
|
||||
return ret
|
||||
|
||||
|
||||
INTERNAL_OPTIONS: Sequence[str] = ("_allow_extra",)
|
||||
# Default configuration
|
||||
INITIAL_CONFIG = OrderedDict([
|
||||
DEFAULT_CONFIG_SCHEMA: types.CONFIG_SCHEMA = OrderedDict([
|
||||
("server", OrderedDict([
|
||||
("hosts", {
|
||||
"value": "127.0.0.1:5232",
|
||||
"value": "localhost:5232",
|
||||
"help": "set server hostnames including ports",
|
||||
"aliases": ["-H", "--hosts"],
|
||||
"type": str}),
|
||||
("daemon", {
|
||||
"value": "False",
|
||||
"help": "launch as daemon",
|
||||
"aliases": ["-d", "--daemon"],
|
||||
"opposite": ["-f", "--foreground"],
|
||||
"type": bool}),
|
||||
("pid", {
|
||||
"value": "",
|
||||
"help": "set PID filename for daemon mode",
|
||||
"aliases": ["-p", "--pid"],
|
||||
"type": str}),
|
||||
"aliases": ("-H", "--hosts",),
|
||||
"type": list_of_ip_address}),
|
||||
("max_connections", {
|
||||
"value": "20",
|
||||
"value": "8",
|
||||
"help": "maximum number of parallel connections",
|
||||
"type": positive_int}),
|
||||
("max_content_length", {
|
||||
"value": "10000000",
|
||||
"value": "100000000",
|
||||
"help": "maximum size of request body in bytes",
|
||||
"type": positive_int}),
|
||||
("timeout", {
|
||||
"value": "10",
|
||||
"value": "30",
|
||||
"help": "socket timeout",
|
||||
"type": positive_int}),
|
||||
"type": positive_float}),
|
||||
("ssl", {
|
||||
"value": "False",
|
||||
"help": "use SSL connection",
|
||||
"aliases": ["-s", "--ssl"],
|
||||
"opposite": ["-S", "--no-ssl"],
|
||||
"aliases": ("-s", "--ssl",),
|
||||
"opposite_aliases": ("-S", "--no-ssl",),
|
||||
"type": bool}),
|
||||
("protocol", {
|
||||
"value": "",
|
||||
"help": "SSL/TLS protocol (Apache SSLProtocol format)",
|
||||
"type": str}),
|
||||
("ciphersuite", {
|
||||
"value": "",
|
||||
"help": "SSL/TLS Cipher Suite (OpenSSL cipher list format)",
|
||||
"type": str}),
|
||||
("certificate", {
|
||||
"value": "/etc/ssl/radicale.cert.pem",
|
||||
"help": "set certificate file",
|
||||
"aliases": ["-c", "--certificate"],
|
||||
"type": str}),
|
||||
"aliases": ("-c", "--certificate",),
|
||||
"type": filepath}),
|
||||
("key", {
|
||||
"value": "/etc/ssl/radicale.key.pem",
|
||||
"help": "set private key file",
|
||||
"aliases": ["-k", "--key"],
|
||||
"type": str}),
|
||||
"aliases": ("-k", "--key",),
|
||||
"type": filepath}),
|
||||
("certificate_authority", {
|
||||
"value": "",
|
||||
"help": "set CA certificate for validating clients",
|
||||
"aliases": ["--certificate-authority"],
|
||||
"type": str}),
|
||||
("protocol", {
|
||||
"value": "PROTOCOL_TLSv1_2",
|
||||
"help": "SSL protocol used",
|
||||
"type": str}),
|
||||
("ciphers", {
|
||||
"aliases": ("--certificate-authority",),
|
||||
"type": filepath}),
|
||||
("script_name", {
|
||||
"value": "",
|
||||
"help": "available ciphers",
|
||||
"help": "script name to strip from URI if called by reverse proxy (default taken from HTTP_X_SCRIPT_NAME or SCRIPT_NAME)",
|
||||
"type": str}),
|
||||
("dns_lookup", {
|
||||
"value": "True",
|
||||
"help": "use reverse DNS to resolve client address in logs",
|
||||
"type": bool}),
|
||||
("realm", {
|
||||
"value": "Radicale - Password Required",
|
||||
"help": "message displayed when a password is needed",
|
||||
"type": str})])),
|
||||
("_internal_server", {
|
||||
"value": "False",
|
||||
"help": "the internal server is used",
|
||||
"type": bool})])),
|
||||
("encoding", OrderedDict([
|
||||
("request", {
|
||||
"value": "utf-8",
|
||||
|
@ -128,132 +206,495 @@ INITIAL_CONFIG = OrderedDict([
|
|||
"type": str})])),
|
||||
("auth", OrderedDict([
|
||||
("type", {
|
||||
"value": "none",
|
||||
"help": "authentication method",
|
||||
"type": str,
|
||||
"value": "denyall",
|
||||
"help": "authentication method (" + "|".join(auth.INTERNAL_TYPES) + ")",
|
||||
"type": str_or_callable,
|
||||
"internal": auth.INTERNAL_TYPES}),
|
||||
("cache_logins", {
|
||||
"value": "false",
|
||||
"help": "cache successful/failed logins for until expiration time",
|
||||
"type": bool}),
|
||||
("cache_successful_logins_expiry", {
|
||||
"value": "15",
|
||||
"help": "expiration time for caching successful logins in seconds",
|
||||
"type": int}),
|
||||
("cache_failed_logins_expiry", {
|
||||
"value": "90",
|
||||
"help": "expiration time for caching failed logins in seconds",
|
||||
"type": int}),
|
||||
("htpasswd_filename", {
|
||||
"value": "/etc/radicale/users",
|
||||
"help": "htpasswd filename",
|
||||
"type": str}),
|
||||
"type": filepath}),
|
||||
("htpasswd_encryption", {
|
||||
"value": "bcrypt",
|
||||
"value": "autodetect",
|
||||
"help": "htpasswd encryption method",
|
||||
"type": str}),
|
||||
("htpasswd_cache", {
|
||||
"value": "False",
|
||||
"help": "enable caching of htpasswd file",
|
||||
"type": bool}),
|
||||
("dovecot_connection_type", {
|
||||
"value": "AF_UNIX",
|
||||
"help": "Connection type for dovecot authentication",
|
||||
"type": str_or_callable,
|
||||
"internal": auth.AUTH_SOCKET_FAMILY}),
|
||||
("dovecot_socket", {
|
||||
"value": "/var/run/dovecot/auth-client",
|
||||
"help": "dovecot auth AF_UNIX socket",
|
||||
"type": str}),
|
||||
("dovecot_host", {
|
||||
"value": "localhost",
|
||||
"help": "dovecot auth AF_INET or AF_INET6 host",
|
||||
"type": str}),
|
||||
("dovecot_port", {
|
||||
"value": "12345",
|
||||
"help": "dovecot auth port",
|
||||
"type": int}),
|
||||
("realm", {
|
||||
"value": "Radicale - Password Required",
|
||||
"help": "message displayed when a password is needed",
|
||||
"type": str}),
|
||||
("delay", {
|
||||
"value": "1",
|
||||
"help": "incorrect authentication delay",
|
||||
"type": positive_float})])),
|
||||
"type": positive_float}),
|
||||
("ldap_ignore_attribute_create_modify_timestamp", {
|
||||
"value": "false",
|
||||
"help": "Ignore modifyTimestamp and createTimestamp attributes. Need if Authentik LDAP server is used.",
|
||||
"type": bool}),
|
||||
("ldap_uri", {
|
||||
"value": "ldap://localhost",
|
||||
"help": "URI to the ldap server",
|
||||
"type": str}),
|
||||
("ldap_base", {
|
||||
"value": "",
|
||||
"help": "LDAP base DN of the ldap server",
|
||||
"type": str}),
|
||||
("ldap_reader_dn", {
|
||||
"value": "",
|
||||
"help": "the DN of a ldap user with read access to get the user accounts",
|
||||
"type": str}),
|
||||
("ldap_secret", {
|
||||
"value": "",
|
||||
"help": "the password of the ldap_reader_dn",
|
||||
"type": str}),
|
||||
("ldap_secret_file", {
|
||||
"value": "",
|
||||
"help": "path of the file containing the password of the ldap_reader_dn",
|
||||
"type": str}),
|
||||
("ldap_filter", {
|
||||
"value": "(cn={0})",
|
||||
"help": "the search filter to find the user DN to authenticate by the username",
|
||||
"type": str}),
|
||||
("ldap_user_attribute", {
|
||||
"value": "",
|
||||
"help": "the attribute to be used as username after authentication",
|
||||
"type": str}),
|
||||
("ldap_groups_attribute", {
|
||||
"value": "",
|
||||
"help": "attribute to read the group memberships from",
|
||||
"type": str}),
|
||||
("ldap_use_ssl", {
|
||||
"value": "False",
|
||||
"help": "Use ssl on the ldap connection",
|
||||
"type": bool}),
|
||||
("ldap_ssl_verify_mode", {
|
||||
"value": "REQUIRED",
|
||||
"help": "The certificate verification mode. NONE, OPTIONAL, default is REQUIRED",
|
||||
"type": str}),
|
||||
("ldap_ssl_ca_file", {
|
||||
"value": "",
|
||||
"help": "The path to the CA file in pem format which is used to certificate the server certificate",
|
||||
"type": str}),
|
||||
("imap_host", {
|
||||
"value": "localhost",
|
||||
"help": "IMAP server hostname: address|address:port|[address]:port|*localhost*",
|
||||
"type": imap_address}),
|
||||
("imap_security", {
|
||||
"value": "tls",
|
||||
"help": "Secure the IMAP connection: *tls*|starttls|none",
|
||||
"type": imap_security}),
|
||||
("oauth2_token_endpoint", {
|
||||
"value": "",
|
||||
"help": "OAuth2 token endpoint URL",
|
||||
"type": str}),
|
||||
("pam_group_membership", {
|
||||
"value": "",
|
||||
"help": "PAM group user should be member of",
|
||||
"type": str}),
|
||||
("pam_service", {
|
||||
"value": "radicale",
|
||||
"help": "PAM service",
|
||||
"type": str}),
|
||||
("strip_domain", {
|
||||
"value": "False",
|
||||
"help": "strip domain from username",
|
||||
"type": bool}),
|
||||
("uc_username", {
|
||||
"value": "False",
|
||||
"help": "convert username to uppercase, must be true for case-insensitive auth providers",
|
||||
"type": bool}),
|
||||
("lc_username", {
|
||||
"value": "False",
|
||||
"help": "convert username to lowercase, must be true for case-insensitive auth providers",
|
||||
"type": bool})])),
|
||||
("rights", OrderedDict([
|
||||
("type", {
|
||||
"value": "owner_only",
|
||||
"help": "rights backend",
|
||||
"type": str,
|
||||
"type": str_or_callable,
|
||||
"internal": rights.INTERNAL_TYPES}),
|
||||
("permit_delete_collection", {
|
||||
"value": "True",
|
||||
"help": "permit delete of a collection",
|
||||
"type": bool}),
|
||||
("permit_overwrite_collection", {
|
||||
"value": "True",
|
||||
"help": "permit overwrite of a collection",
|
||||
"type": bool}),
|
||||
("file", {
|
||||
"value": "/etc/radicale/rights",
|
||||
"help": "file for rights management from_file",
|
||||
"type": str})])),
|
||||
"type": filepath})])),
|
||||
("storage", OrderedDict([
|
||||
("type", {
|
||||
"value": "multifilesystem",
|
||||
"help": "storage backend",
|
||||
"type": str,
|
||||
"type": str_or_callable,
|
||||
"internal": storage.INTERNAL_TYPES}),
|
||||
("filesystem_folder", {
|
||||
"value": os.path.expanduser(
|
||||
"/var/lib/radicale/collections"),
|
||||
"value": "/var/lib/radicale/collections",
|
||||
"help": "path where collections are stored",
|
||||
"type": filepath}),
|
||||
("filesystem_cache_folder", {
|
||||
"value": "",
|
||||
"help": "path where cache of collections is stored in case of use_cache_subfolder_* options are active",
|
||||
"type": filepath}),
|
||||
("use_cache_subfolder_for_item", {
|
||||
"value": "False",
|
||||
"help": "use subfolder 'collection-cache' for 'item' cache file structure instead of inside collection folder",
|
||||
"type": bool}),
|
||||
("use_cache_subfolder_for_history", {
|
||||
"value": "False",
|
||||
"help": "use subfolder 'collection-cache' for 'history' cache file structure instead of inside collection folder",
|
||||
"type": bool}),
|
||||
("use_cache_subfolder_for_synctoken", {
|
||||
"value": "False",
|
||||
"help": "use subfolder 'collection-cache' for 'sync-token' cache file structure instead of inside collection folder",
|
||||
"type": bool}),
|
||||
("use_mtime_and_size_for_item_cache", {
|
||||
"value": "False",
|
||||
"help": "use mtime and file size instead of SHA256 for 'item' cache (improves speed)",
|
||||
"type": bool}),
|
||||
("folder_umask", {
|
||||
"value": "",
|
||||
"help": "umask for folder creation (empty: system default)",
|
||||
"type": str}),
|
||||
("max_sync_token_age", {
|
||||
"value": 2592000, # 30 days
|
||||
"value": "2592000", # 30 days
|
||||
"help": "delete sync token that are older",
|
||||
"type": int}),
|
||||
("filesystem_fsync", {
|
||||
"type": positive_int}),
|
||||
("skip_broken_item", {
|
||||
"value": "True",
|
||||
"help": "sync all changes to filesystem during requests",
|
||||
"type": bool}),
|
||||
("filesystem_locking", {
|
||||
"value": "True",
|
||||
"help": "lock the storage while accessing it",
|
||||
"type": bool}),
|
||||
("filesystem_close_lock_file", {
|
||||
"value": "False",
|
||||
"help": "close the lock file when no more clients are waiting",
|
||||
"help": "skip broken item instead of triggering exception",
|
||||
"type": bool}),
|
||||
("hook", {
|
||||
"value": "",
|
||||
"help": "command that is run after changes to storage",
|
||||
"type": str}),
|
||||
("_filesystem_fsync", {
|
||||
"value": "True",
|
||||
"help": "sync all changes to filesystem during requests",
|
||||
"type": bool}),
|
||||
("predefined_collections", {
|
||||
"value": "",
|
||||
"help": "predefined user collections",
|
||||
"type": json_str})])),
|
||||
("hook", OrderedDict([
|
||||
("type", {
|
||||
"value": "none",
|
||||
"help": "hook backend",
|
||||
"type": str,
|
||||
"internal": hook.INTERNAL_TYPES}),
|
||||
("rabbitmq_endpoint", {
|
||||
"value": "",
|
||||
"help": "endpoint where rabbitmq server is running",
|
||||
"type": str}),
|
||||
("rabbitmq_topic", {
|
||||
"value": "",
|
||||
"help": "topic to declare queue",
|
||||
"type": str}),
|
||||
("rabbitmq_queue_type", {
|
||||
"value": "",
|
||||
"help": "queue type for topic declaration",
|
||||
"type": str})])),
|
||||
("web", OrderedDict([
|
||||
("type", {
|
||||
"value": "internal",
|
||||
"help": "web interface backend",
|
||||
"type": str,
|
||||
"type": str_or_callable,
|
||||
"internal": web.INTERNAL_TYPES})])),
|
||||
("logging", OrderedDict([
|
||||
("config", {
|
||||
"value": "",
|
||||
"help": "logging configuration file",
|
||||
"type": str}),
|
||||
("debug", {
|
||||
("level", {
|
||||
"value": "info",
|
||||
"help": "threshold for the logger",
|
||||
"type": logging_level}),
|
||||
("bad_put_request_content", {
|
||||
"value": "False",
|
||||
"help": "print debug information",
|
||||
"aliases": ["-D", "--debug"],
|
||||
"help": "log bad PUT request content",
|
||||
"type": bool}),
|
||||
("full_environment", {
|
||||
("backtrace_on_debug", {
|
||||
"value": "False",
|
||||
"help": "store all environment variables",
|
||||
"help": "log backtrace on level=debug",
|
||||
"type": bool}),
|
||||
("request_header_on_debug", {
|
||||
"value": "False",
|
||||
"help": "log request header on level=debug",
|
||||
"type": bool}),
|
||||
("request_content_on_debug", {
|
||||
"value": "False",
|
||||
"help": "log request content on level=debug",
|
||||
"type": bool}),
|
||||
("response_content_on_debug", {
|
||||
"value": "False",
|
||||
"help": "log response content on level=debug",
|
||||
"type": bool}),
|
||||
("rights_rule_doesnt_match_on_debug", {
|
||||
"value": "False",
|
||||
"help": "log rights rules which doesn't match on level=debug",
|
||||
"type": bool}),
|
||||
("storage_cache_actions_on_debug", {
|
||||
"value": "False",
|
||||
"help": "log storage cache action on level=debug",
|
||||
"type": bool}),
|
||||
("mask_passwords", {
|
||||
"value": "True",
|
||||
"help": "mask passwords in logs",
|
||||
"type": bool})]))])
|
||||
"type": bool})])),
|
||||
("headers", OrderedDict([
|
||||
("_allow_extra", str)])),
|
||||
("reporting", OrderedDict([
|
||||
("max_freebusy_occurrence", {
|
||||
"value": "10000",
|
||||
"help": "number of occurrences per event when reporting",
|
||||
"type": positive_int})]))
|
||||
])
|
||||
|
||||
|
||||
def load(paths=(), extra_config=None, ignore_missing_paths=True):
|
||||
config = ConfigParser()
|
||||
for section, values in INITIAL_CONFIG.items():
|
||||
config.add_section(section)
|
||||
for key, data in values.items():
|
||||
config.set(section, key, data["value"])
|
||||
if extra_config:
|
||||
for section, values in extra_config.items():
|
||||
for key, value in values.items():
|
||||
config.set(section, key, value)
|
||||
for path in paths:
|
||||
if path or not ignore_missing_paths:
|
||||
try:
|
||||
if not config.read(path) and not ignore_missing_paths:
|
||||
raise RuntimeError("No such file: %r" % path)
|
||||
except Exception as e:
|
||||
raise RuntimeError(
|
||||
"Failed to load config file %r: %s" % (path, e)) from e
|
||||
# Check the configuration
|
||||
for section in config.sections():
|
||||
if section == "headers":
|
||||
continue
|
||||
if section not in INITIAL_CONFIG:
|
||||
raise RuntimeError("Invalid section %r in config" % section)
|
||||
allow_extra_options = ("type" in INITIAL_CONFIG[section] and
|
||||
config.get(section, "type") not in
|
||||
INITIAL_CONFIG[section]["type"].get("internal",
|
||||
()))
|
||||
for option in config[section]:
|
||||
if option not in INITIAL_CONFIG[section]:
|
||||
if allow_extra_options:
|
||||
continue
|
||||
raise RuntimeError("Invalid option %r in section %r in "
|
||||
"config" % (option, section))
|
||||
type_ = INITIAL_CONFIG[section][option]["type"]
|
||||
try:
|
||||
if type_ == bool:
|
||||
config.getboolean(section, option)
|
||||
def parse_compound_paths(*compound_paths: Optional[str]
|
||||
) -> List[Tuple[str, bool]]:
|
||||
"""Parse a compound path and return the individual paths.
|
||||
Paths in a compound path are joined by ``os.pathsep``. If a path starts
|
||||
with ``?`` the return value ``IGNORE_IF_MISSING`` is set.
|
||||
|
||||
When multiple ``compound_paths`` are passed, the last argument that is
|
||||
not ``None`` is used.
|
||||
|
||||
Returns a dict of the format ``[(PATH, IGNORE_IF_MISSING), ...]``
|
||||
|
||||
"""
|
||||
compound_path = ""
|
||||
for p in compound_paths:
|
||||
if p is not None:
|
||||
compound_path = p
|
||||
paths = []
|
||||
for path in compound_path.split(os.pathsep):
|
||||
ignore_if_missing = path.startswith("?")
|
||||
if ignore_if_missing:
|
||||
path = path[1:]
|
||||
path = filepath(path)
|
||||
if path:
|
||||
paths.append((path, ignore_if_missing))
|
||||
return paths
|
||||
|
||||
|
||||
def load(paths: Optional[Iterable[Tuple[str, bool]]] = None
|
||||
) -> "Configuration":
|
||||
"""
|
||||
Create instance of ``Configuration`` for use with
|
||||
``radicale.app.Application``.
|
||||
|
||||
``paths`` a list of configuration files with the format
|
||||
``[(PATH, IGNORE_IF_MISSING), ...]``.
|
||||
If a configuration file is missing and IGNORE_IF_MISSING is set, the
|
||||
config is set to ``Configuration.SOURCE_MISSING``.
|
||||
|
||||
The configuration can later be changed with ``Configuration.update()``.
|
||||
|
||||
"""
|
||||
if paths is None:
|
||||
paths = []
|
||||
configuration = Configuration(DEFAULT_CONFIG_SCHEMA)
|
||||
for path, ignore_if_missing in paths:
|
||||
parser = RawConfigParser()
|
||||
config_source = "config file %r" % path
|
||||
config: types.CONFIG
|
||||
try:
|
||||
with open(path) as f:
|
||||
parser.read_file(f)
|
||||
config = {s: {o: parser[s][o] for o in parser.options(s)}
|
||||
for s in parser.sections()}
|
||||
except Exception as e:
|
||||
if not (ignore_if_missing and isinstance(e, (
|
||||
FileNotFoundError, NotADirectoryError, PermissionError))):
|
||||
raise RuntimeError("Failed to load %s: %s" % (config_source, e)
|
||||
) from e
|
||||
config = Configuration.SOURCE_MISSING
|
||||
configuration.update(config, config_source)
|
||||
return configuration
|
||||
|
||||
|
||||
_Self = TypeVar("_Self", bound="Configuration")
|
||||
|
||||
|
||||
class Configuration:
|
||||
|
||||
SOURCE_MISSING: ClassVar[types.CONFIG] = {}
|
||||
|
||||
_schema: types.CONFIG_SCHEMA
|
||||
_values: types.MUTABLE_CONFIG
|
||||
_configs: List[Tuple[types.CONFIG, str, bool]]
|
||||
|
||||
def __init__(self, schema: types.CONFIG_SCHEMA) -> None:
|
||||
"""Initialize configuration.
|
||||
|
||||
``schema`` a dict that describes the configuration format.
|
||||
See ``DEFAULT_CONFIG_SCHEMA``.
|
||||
The content of ``schema`` must not change afterwards, it is kept
|
||||
as an internal reference.
|
||||
|
||||
Use ``load()`` to create an instance for use with
|
||||
``radicale.app.Application``.
|
||||
|
||||
"""
|
||||
self._schema = schema
|
||||
self._values = {}
|
||||
self._configs = []
|
||||
default = {section: {option: self._schema[section][option]["value"]
|
||||
for option in self._schema[section]
|
||||
if option not in INTERNAL_OPTIONS}
|
||||
for section in self._schema}
|
||||
self.update(default, "default config", privileged=True)
|
||||
|
||||
def update(self, config: types.CONFIG, source: Optional[str] = None,
|
||||
privileged: bool = False) -> None:
|
||||
"""Update the configuration.
|
||||
|
||||
``config`` a dict of the format {SECTION: {OPTION: VALUE, ...}, ...}.
|
||||
The configuration is checked for errors according to the config schema.
|
||||
The content of ``config`` must not change afterwards, it is kept
|
||||
as an internal reference.
|
||||
|
||||
``source`` a description of the configuration source (used in error
|
||||
messages).
|
||||
|
||||
``privileged`` allows updating sections and options starting with "_".
|
||||
|
||||
"""
|
||||
if source is None:
|
||||
source = "unspecified config"
|
||||
new_values: types.MUTABLE_CONFIG = {}
|
||||
for section in config:
|
||||
if (section not in self._schema or
|
||||
section.startswith("_") and not privileged):
|
||||
raise ValueError(
|
||||
"Invalid section %r in %s" % (section, source))
|
||||
new_values[section] = {}
|
||||
extra_type = None
|
||||
extra_type = self._schema[section].get("_allow_extra")
|
||||
if "type" in self._schema[section]:
|
||||
if "type" in config[section]:
|
||||
plugin = config[section]["type"]
|
||||
else:
|
||||
type_(config.get(section, option))
|
||||
except Exception as e:
|
||||
raise RuntimeError(
|
||||
"Invalid %s value for option %r in section %r in config: "
|
||||
"%r" % (type_.__name__, option, section,
|
||||
config.get(section, option))) from e
|
||||
return config
|
||||
plugin = self.get(section, "type")
|
||||
if plugin not in self._schema[section]["type"]["internal"]:
|
||||
extra_type = unspecified_type
|
||||
for option in config[section]:
|
||||
type_ = extra_type
|
||||
if option in self._schema[section]:
|
||||
type_ = self._schema[section][option]["type"]
|
||||
if (not type_ or option in INTERNAL_OPTIONS or
|
||||
option.startswith("_") and not privileged):
|
||||
raise RuntimeError("Invalid option %r in section %r in "
|
||||
"%s" % (option, section, source))
|
||||
raw_value = config[section][option]
|
||||
try:
|
||||
if type_ == bool and not isinstance(raw_value, bool):
|
||||
raw_value = _convert_to_bool(raw_value)
|
||||
new_values[section][option] = type_(raw_value)
|
||||
except Exception as e:
|
||||
raise RuntimeError(
|
||||
"Invalid %s value for option %r in section %r in %s: "
|
||||
"%r" % (type_.__name__, option, section, source,
|
||||
raw_value)) from e
|
||||
self._configs.append((config, source, bool(privileged)))
|
||||
for section in new_values:
|
||||
self._values[section] = self._values.get(section, {})
|
||||
self._values[section].update(new_values[section])
|
||||
|
||||
def get(self, section: str, option: str) -> Any:
|
||||
"""Get the value of ``option`` in ``section``."""
|
||||
with contextlib.suppress(KeyError):
|
||||
return self._values[section][option]
|
||||
raise KeyError(section, option)
|
||||
|
||||
def get_raw(self, section: str, option: str) -> Any:
|
||||
"""Get the raw value of ``option`` in ``section``."""
|
||||
for config, _, _ in reversed(self._configs):
|
||||
if option in config.get(section, {}):
|
||||
return config[section][option]
|
||||
raise KeyError(section, option)
|
||||
|
||||
def get_source(self, section: str, option: str) -> str:
|
||||
"""Get the source that provides ``option`` in ``section``."""
|
||||
for config, source, _ in reversed(self._configs):
|
||||
if option in config.get(section, {}):
|
||||
return source
|
||||
raise KeyError(section, option)
|
||||
|
||||
def sections(self) -> List[str]:
|
||||
"""List all sections."""
|
||||
return list(self._values.keys())
|
||||
|
||||
def options(self, section: str) -> List[str]:
|
||||
"""List all options in ``section``"""
|
||||
return list(self._values[section].keys())
|
||||
|
||||
def sources(self) -> List[Tuple[str, bool]]:
|
||||
"""List all config sources."""
|
||||
return [(source, config is self.SOURCE_MISSING) for
|
||||
config, source, _ in self._configs]
|
||||
|
||||
def copy(self: _Self, plugin_schema: Optional[types.CONFIG_SCHEMA] = None
|
||||
) -> _Self:
|
||||
"""Create a copy of the configuration
|
||||
|
||||
``plugin_schema`` is a optional dict that contains additional options
|
||||
for usage with a plugin. See ``DEFAULT_CONFIG_SCHEMA``.
|
||||
|
||||
"""
|
||||
if plugin_schema is None:
|
||||
schema = self._schema
|
||||
else:
|
||||
new_schema = dict(self._schema)
|
||||
for section, options in plugin_schema.items():
|
||||
if (section not in new_schema or
|
||||
"type" not in new_schema[section] or
|
||||
"internal" not in new_schema[section]["type"]):
|
||||
raise ValueError("not a plugin section: %r" % section)
|
||||
new_section = dict(new_schema[section])
|
||||
new_type = dict(new_section["type"])
|
||||
new_type["internal"] = (self.get(section, "type"),)
|
||||
new_section["type"] = new_type
|
||||
for option, value in options.items():
|
||||
if option in new_section:
|
||||
raise ValueError("option already exists in %r: %r" %
|
||||
(section, option))
|
||||
new_section[option] = value
|
||||
new_schema[section] = new_section
|
||||
schema = new_schema
|
||||
copy = type(self)(schema)
|
||||
for config, source, privileged in self._configs:
|
||||
copy.update(config, source, privileged)
|
||||
return copy
|
||||
|
|
69
radicale/hook/__init__.py
Normal file
69
radicale/hook/__init__.py
Normal file
|
@ -0,0 +1,69 @@
|
|||
import json
|
||||
from enum import Enum
|
||||
from typing import Sequence
|
||||
|
||||
from radicale import pathutils, utils
|
||||
from radicale.log import logger
|
||||
|
||||
INTERNAL_TYPES: Sequence[str] = ("none", "rabbitmq")
|
||||
|
||||
|
||||
def load(configuration):
|
||||
"""Load the storage module chosen in configuration."""
|
||||
try:
|
||||
return utils.load_plugin(
|
||||
INTERNAL_TYPES, "hook", "Hook", BaseHook, configuration)
|
||||
except Exception as e:
|
||||
logger.warning(e)
|
||||
logger.warning("Hook \"%s\" failed to load, falling back to \"none\"." % configuration.get("hook", "type"))
|
||||
configuration = configuration.copy()
|
||||
configuration.update({"hook": {"type": "none"}}, "hook", privileged=True)
|
||||
return utils.load_plugin(
|
||||
INTERNAL_TYPES, "hook", "Hook", BaseHook, configuration)
|
||||
|
||||
|
||||
class BaseHook:
|
||||
def __init__(self, configuration):
|
||||
"""Initialize BaseHook.
|
||||
|
||||
``configuration`` see ``radicale.config`` module.
|
||||
The ``configuration`` must not change during the lifetime of
|
||||
this object, it is kept as an internal reference.
|
||||
|
||||
"""
|
||||
self.configuration = configuration
|
||||
|
||||
def notify(self, notification_item):
|
||||
"""Upload a new or replace an existing item."""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class HookNotificationItemTypes(Enum):
|
||||
CPATCH = "cpatch"
|
||||
UPSERT = "upsert"
|
||||
DELETE = "delete"
|
||||
|
||||
|
||||
def _cleanup(path):
|
||||
sane_path = pathutils.strip_path(path)
|
||||
attributes = sane_path.split("/") if sane_path else []
|
||||
|
||||
if len(attributes) < 2:
|
||||
return ""
|
||||
return attributes[0] + "/" + attributes[1]
|
||||
|
||||
|
||||
class HookNotificationItem:
|
||||
|
||||
def __init__(self, notification_item_type, path, content):
|
||||
self.type = notification_item_type.value
|
||||
self.point = _cleanup(path)
|
||||
self.content = content
|
||||
|
||||
def to_json(self):
|
||||
return json.dumps(
|
||||
self,
|
||||
default=lambda o: o.__dict__,
|
||||
sort_keys=True,
|
||||
indent=4
|
||||
)
|
6
radicale/hook/none.py
Normal file
6
radicale/hook/none.py
Normal file
|
@ -0,0 +1,6 @@
|
|||
from radicale import hook
|
||||
|
||||
|
||||
class Hook(hook.BaseHook):
|
||||
def notify(self, notification_item):
|
||||
"""Notify nothing. Empty hook."""
|
50
radicale/hook/rabbitmq/__init__.py
Normal file
50
radicale/hook/rabbitmq/__init__.py
Normal file
|
@ -0,0 +1,50 @@
|
|||
import pika
|
||||
from pika.exceptions import ChannelWrongStateError, StreamLostError
|
||||
|
||||
from radicale import hook
|
||||
from radicale.hook import HookNotificationItem
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class Hook(hook.BaseHook):
|
||||
|
||||
def __init__(self, configuration):
|
||||
super().__init__(configuration)
|
||||
self._endpoint = configuration.get("hook", "rabbitmq_endpoint")
|
||||
self._topic = configuration.get("hook", "rabbitmq_topic")
|
||||
self._queue_type = configuration.get("hook", "rabbitmq_queue_type")
|
||||
self._encoding = configuration.get("encoding", "stock")
|
||||
|
||||
self._make_connection_synced()
|
||||
self._make_declare_queue_synced()
|
||||
|
||||
def _make_connection_synced(self):
|
||||
parameters = pika.URLParameters(self._endpoint)
|
||||
connection = pika.BlockingConnection(parameters)
|
||||
self._channel = connection.channel()
|
||||
|
||||
def _make_declare_queue_synced(self):
|
||||
self._channel.queue_declare(queue=self._topic, durable=True, arguments={"x-queue-type": self._queue_type})
|
||||
|
||||
def notify(self, notification_item):
|
||||
if isinstance(notification_item, HookNotificationItem):
|
||||
self._notify(notification_item, True)
|
||||
|
||||
def _notify(self, notification_item, recall):
|
||||
try:
|
||||
self._channel.basic_publish(
|
||||
exchange='',
|
||||
routing_key=self._topic,
|
||||
body=notification_item.to_json().encode(
|
||||
encoding=self._encoding
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
if (isinstance(e, ChannelWrongStateError) or
|
||||
isinstance(e, StreamLostError)) and recall:
|
||||
self._make_connection_synced()
|
||||
self._notify(notification_item, False)
|
||||
return
|
||||
logger.error("An exception occurred during "
|
||||
"publishing hook notification item: %s",
|
||||
e, exc_info=True)
|
242
radicale/httputils.py
Normal file
242
radicale/httputils.py
Normal file
|
@ -0,0 +1,242 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Helper functions for HTTP.
|
||||
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
import time
|
||||
from http import client
|
||||
from typing import List, Mapping, Union, cast
|
||||
|
||||
from radicale import config, pathutils, types
|
||||
from radicale.log import logger
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
import pkg_resources
|
||||
|
||||
_TRAVERSABLE_LIKE_TYPE = pathlib.Path
|
||||
else:
|
||||
import importlib.abc
|
||||
from importlib import resources
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
_TRAVERSABLE_LIKE_TYPE = Union[importlib.abc.Traversable, pathlib.Path]
|
||||
else:
|
||||
_TRAVERSABLE_LIKE_TYPE = Union[importlib.resources.abc.Traversable, pathlib.Path]
|
||||
|
||||
NOT_ALLOWED: types.WSGIResponse = (
|
||||
client.FORBIDDEN, (("Content-Type", "text/plain"),),
|
||||
"Access to the requested resource forbidden.")
|
||||
FORBIDDEN: types.WSGIResponse = (
|
||||
client.FORBIDDEN, (("Content-Type", "text/plain"),),
|
||||
"Action on the requested resource refused.")
|
||||
BAD_REQUEST: types.WSGIResponse = (
|
||||
client.BAD_REQUEST, (("Content-Type", "text/plain"),), "Bad Request")
|
||||
NOT_FOUND: types.WSGIResponse = (
|
||||
client.NOT_FOUND, (("Content-Type", "text/plain"),),
|
||||
"The requested resource could not be found.")
|
||||
CONFLICT: types.WSGIResponse = (
|
||||
client.CONFLICT, (("Content-Type", "text/plain"),),
|
||||
"Conflict in the request.")
|
||||
METHOD_NOT_ALLOWED: types.WSGIResponse = (
|
||||
client.METHOD_NOT_ALLOWED, (("Content-Type", "text/plain"),),
|
||||
"The method is not allowed on the requested resource.")
|
||||
PRECONDITION_FAILED: types.WSGIResponse = (
|
||||
client.PRECONDITION_FAILED,
|
||||
(("Content-Type", "text/plain"),), "Precondition failed.")
|
||||
REQUEST_TIMEOUT: types.WSGIResponse = (
|
||||
client.REQUEST_TIMEOUT, (("Content-Type", "text/plain"),),
|
||||
"Connection timed out.")
|
||||
REQUEST_ENTITY_TOO_LARGE: types.WSGIResponse = (
|
||||
client.REQUEST_ENTITY_TOO_LARGE, (("Content-Type", "text/plain"),),
|
||||
"Request body too large.")
|
||||
REMOTE_DESTINATION: types.WSGIResponse = (
|
||||
client.BAD_GATEWAY, (("Content-Type", "text/plain"),),
|
||||
"Remote destination not supported.")
|
||||
DIRECTORY_LISTING: types.WSGIResponse = (
|
||||
client.FORBIDDEN, (("Content-Type", "text/plain"),),
|
||||
"Directory listings are not supported.")
|
||||
INSUFFICIENT_STORAGE: types.WSGIResponse = (
|
||||
client.INSUFFICIENT_STORAGE, (("Content-Type", "text/plain"),),
|
||||
"Insufficient Storage. Please contact the administrator.")
|
||||
INTERNAL_SERVER_ERROR: types.WSGIResponse = (
|
||||
client.INTERNAL_SERVER_ERROR, (("Content-Type", "text/plain"),),
|
||||
"A server error occurred. Please contact the administrator.")
|
||||
|
||||
DAV_HEADERS: str = "1, 2, 3, calendar-access, addressbook, extended-mkcol"
|
||||
|
||||
MIMETYPES: Mapping[str, str] = {
|
||||
".css": "text/css",
|
||||
".eot": "application/vnd.ms-fontobject",
|
||||
".gif": "image/gif",
|
||||
".html": "text/html",
|
||||
".js": "application/javascript",
|
||||
".manifest": "text/cache-manifest",
|
||||
".png": "image/png",
|
||||
".svg": "image/svg+xml",
|
||||
".ttf": "application/font-sfnt",
|
||||
".txt": "text/plain",
|
||||
".woff": "application/font-woff",
|
||||
".woff2": "font/woff2",
|
||||
".xml": "text/xml"}
|
||||
FALLBACK_MIMETYPE: str = "application/octet-stream"
|
||||
|
||||
|
||||
def decode_request(configuration: "config.Configuration",
|
||||
environ: types.WSGIEnviron, text: bytes) -> str:
|
||||
"""Try to magically decode ``text`` according to given ``environ``."""
|
||||
# List of charsets to try
|
||||
charsets: List[str] = []
|
||||
|
||||
# First append content charset given in the request
|
||||
content_type = environ.get("CONTENT_TYPE")
|
||||
if content_type and "charset=" in content_type:
|
||||
charsets.append(
|
||||
content_type.split("charset=")[1].split(";")[0].strip())
|
||||
# Then append default Radicale charset
|
||||
charsets.append(cast(str, configuration.get("encoding", "request")))
|
||||
# Then append various fallbacks
|
||||
charsets.append("utf-8")
|
||||
charsets.append("iso8859-1")
|
||||
# Remove duplicates
|
||||
for i, s in reversed(list(enumerate(charsets))):
|
||||
if s in charsets[:i]:
|
||||
del charsets[i]
|
||||
|
||||
# Try to decode
|
||||
for charset in charsets:
|
||||
with contextlib.suppress(UnicodeDecodeError):
|
||||
return text.decode(charset)
|
||||
raise UnicodeDecodeError("decode_request", text, 0, len(text),
|
||||
"all codecs failed [%s]" % ", ".join(charsets))
|
||||
|
||||
|
||||
def read_raw_request_body(configuration: "config.Configuration",
|
||||
environ: types.WSGIEnviron) -> bytes:
|
||||
content_length = int(environ.get("CONTENT_LENGTH") or 0)
|
||||
if not content_length:
|
||||
return b""
|
||||
content = environ["wsgi.input"].read(content_length)
|
||||
if len(content) < content_length:
|
||||
raise RuntimeError("Request body too short: %d" % len(content))
|
||||
return content
|
||||
|
||||
|
||||
def read_request_body(configuration: "config.Configuration",
|
||||
environ: types.WSGIEnviron) -> str:
|
||||
content = decode_request(configuration, environ,
|
||||
read_raw_request_body(configuration, environ))
|
||||
if configuration.get("logging", "request_content_on_debug"):
|
||||
logger.debug("Request content:\n%s", content)
|
||||
else:
|
||||
logger.debug("Request content: suppressed by config/option [logging] request_content_on_debug")
|
||||
return content
|
||||
|
||||
|
||||
def redirect(location: str, status: int = client.FOUND) -> types.WSGIResponse:
|
||||
return (status,
|
||||
{"Location": location, "Content-Type": "text/plain"},
|
||||
"Redirected to %s" % location)
|
||||
|
||||
|
||||
def _serve_traversable(
|
||||
traversable: _TRAVERSABLE_LIKE_TYPE, base_prefix: str, path: str,
|
||||
path_prefix: str, index_file: str, mimetypes: Mapping[str, str],
|
||||
fallback_mimetype: str) -> types.WSGIResponse:
|
||||
if path != path_prefix and not path.startswith(path_prefix):
|
||||
raise ValueError("path must start with path_prefix: %r --> %r" %
|
||||
(path_prefix, path))
|
||||
assert pathutils.sanitize_path(path) == path
|
||||
parts_path = path[len(path_prefix):].strip('/')
|
||||
parts = parts_path.split("/") if parts_path else []
|
||||
for part in parts:
|
||||
if not pathutils.is_safe_filesystem_path_component(part):
|
||||
logger.debug("Web content with unsafe path %r requested", path)
|
||||
return NOT_FOUND
|
||||
if (not traversable.is_dir() or
|
||||
all(part != entry.name for entry in traversable.iterdir())):
|
||||
return NOT_FOUND
|
||||
traversable = traversable.joinpath(part)
|
||||
if traversable.is_dir():
|
||||
if not path.endswith("/"):
|
||||
return redirect(base_prefix + path + "/")
|
||||
if not index_file:
|
||||
return NOT_FOUND
|
||||
traversable = traversable.joinpath(index_file)
|
||||
if not traversable.is_file():
|
||||
return NOT_FOUND
|
||||
content_type = MIMETYPES.get(
|
||||
os.path.splitext(traversable.name)[1].lower(), FALLBACK_MIMETYPE)
|
||||
headers = {"Content-Type": content_type}
|
||||
if isinstance(traversable, pathlib.Path):
|
||||
headers["Last-Modified"] = time.strftime(
|
||||
"%a, %d %b %Y %H:%M:%S GMT",
|
||||
time.gmtime(traversable.stat().st_mtime))
|
||||
answer = traversable.read_bytes()
|
||||
if path == "/.web/index.html" or path == "/.web/":
|
||||
# enable link on the fly in index.html if InfCloud index.html is existing
|
||||
# class="infcloudlink-hidden" -> class="infcloudlink"
|
||||
path_posix = str(traversable)
|
||||
path_posix_infcloud = path_posix.replace("/internal_data/index.html", "/internal_data/infcloud/index.html")
|
||||
if os.path.isfile(path_posix_infcloud):
|
||||
# logger.debug("Enable InfCloud link in served page: %r", path)
|
||||
answer = answer.replace(b"infcloudlink-hidden", b"infcloud")
|
||||
elif path == "/.web/infcloud/config.js":
|
||||
# adjust on the fly default config.js of InfCloud installation
|
||||
# logger.debug("Adjust on-the-fly default InfCloud config.js in served page: %r", path)
|
||||
answer = answer.replace(b"location.pathname.replace(RegExp('/+[^/]+/*(index\\.html)?$'),'')+", b"location.pathname.replace(RegExp('/\\.web\\.infcloud/(index\\.html)?$'),'')+")
|
||||
answer = answer.replace(b"'/caldav.php/',", b"'/',")
|
||||
answer = answer.replace(b"settingsAccount: true,", b"settingsAccount: false,")
|
||||
elif path == "/.web/infcloud/main.js":
|
||||
# adjust on the fly default main.js of InfCloud installation
|
||||
logger.debug("Adjust on-the-fly default InfCloud main.js in served page: %r", path)
|
||||
answer = answer.replace(b"'InfCloud - the open source CalDAV/CardDAV web client'", b"'InfCloud - the open source CalDAV/CardDAV web client - served through Radicale CalDAV/CardDAV server'")
|
||||
return client.OK, headers, answer
|
||||
|
||||
|
||||
def serve_resource(
|
||||
package: str, resource: str, base_prefix: str, path: str,
|
||||
path_prefix: str = "/.web", index_file: str = "index.html",
|
||||
mimetypes: Mapping[str, str] = MIMETYPES,
|
||||
fallback_mimetype: str = FALLBACK_MIMETYPE) -> types.WSGIResponse:
|
||||
if sys.version_info < (3, 9):
|
||||
traversable = pathlib.Path(
|
||||
pkg_resources.resource_filename(package, resource))
|
||||
else:
|
||||
traversable = resources.files(package).joinpath(resource)
|
||||
return _serve_traversable(traversable, base_prefix, path, path_prefix,
|
||||
index_file, mimetypes, fallback_mimetype)
|
||||
|
||||
|
||||
def serve_folder(
|
||||
folder: str, base_prefix: str, path: str,
|
||||
path_prefix: str = "/.web", index_file: str = "index.html",
|
||||
mimetypes: Mapping[str, str] = MIMETYPES,
|
||||
fallback_mimetype: str = FALLBACK_MIMETYPE) -> types.WSGIResponse:
|
||||
# deprecated: use `serve_resource` instead
|
||||
traversable = pathlib.Path(folder)
|
||||
return _serve_traversable(traversable, base_prefix, path, path_prefix,
|
||||
index_file, mimetypes, fallback_mimetype)
|
478
radicale/item/__init__.py
Normal file
478
radicale/item/__init__.py
Normal file
|
@ -0,0 +1,478 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Module for address books and calendar entries (see ``Item``).
|
||||
|
||||
"""
|
||||
|
||||
import binascii
|
||||
import contextlib
|
||||
import math
|
||||
import os
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
from hashlib import sha256
|
||||
from itertools import chain
|
||||
from typing import (Any, Callable, List, MutableMapping, Optional, Sequence,
|
||||
Tuple)
|
||||
|
||||
import vobject
|
||||
|
||||
from radicale import storage # noqa:F401
|
||||
from radicale import pathutils
|
||||
from radicale.item import filter as radicale_filter
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
def read_components(s: str) -> List[vobject.base.Component]:
|
||||
"""Wrapper for vobject.readComponents"""
|
||||
# Workaround for bug in InfCloud
|
||||
# PHOTO is a data URI
|
||||
s = re.sub(r"^(PHOTO(?:;[^:\r\n]*)?;ENCODING=b(?:;[^:\r\n]*)?:)"
|
||||
r"data:[^;,\r\n]*;base64,", r"\1", s,
|
||||
flags=re.MULTILINE | re.IGNORECASE)
|
||||
# Workaround for bug with malformed ICS files containing control codes
|
||||
# Filter out all control codes except those we expect to find:
|
||||
# * 0x09 Horizontal Tab
|
||||
# * 0x0A Line Feed
|
||||
# * 0x0D Carriage Return
|
||||
s = re.sub(r'[\x00-\x08\x0B\x0C\x0E-\x1F]', '', s)
|
||||
return list(vobject.readComponents(s, allowQP=True))
|
||||
|
||||
|
||||
def predict_tag_of_parent_collection(
|
||||
vobject_items: Sequence[vobject.base.Component]) -> Optional[str]:
|
||||
"""Returns the predicted tag or `None`"""
|
||||
if len(vobject_items) != 1:
|
||||
return None
|
||||
if vobject_items[0].name == "VCALENDAR":
|
||||
return "VCALENDAR"
|
||||
if vobject_items[0].name in ("VCARD", "VLIST"):
|
||||
return "VADDRESSBOOK"
|
||||
return None
|
||||
|
||||
|
||||
def predict_tag_of_whole_collection(
|
||||
vobject_items: Sequence[vobject.base.Component],
|
||||
fallback_tag: Optional[str] = None) -> Optional[str]:
|
||||
"""Returns the predicted tag or `fallback_tag`"""
|
||||
if vobject_items and vobject_items[0].name == "VCALENDAR":
|
||||
return "VCALENDAR"
|
||||
if vobject_items and vobject_items[0].name in ("VCARD", "VLIST"):
|
||||
return "VADDRESSBOOK"
|
||||
if not fallback_tag and not vobject_items:
|
||||
# Maybe an empty address book
|
||||
return "VADDRESSBOOK"
|
||||
return fallback_tag
|
||||
|
||||
|
||||
def check_and_sanitize_items(
|
||||
vobject_items: List[vobject.base.Component],
|
||||
is_collection: bool = False, tag: str = "") -> None:
|
||||
"""Check vobject items for common errors and add missing UIDs.
|
||||
|
||||
Modifies the list `vobject_items`.
|
||||
|
||||
``is_collection`` indicates that vobject_item contains unrelated
|
||||
components.
|
||||
|
||||
The ``tag`` of the collection.
|
||||
|
||||
"""
|
||||
if tag and tag not in ("VCALENDAR", "VADDRESSBOOK", "VSUBSCRIBED"):
|
||||
raise ValueError("Unsupported collection tag: %r" % tag)
|
||||
if not is_collection and len(vobject_items) != 1:
|
||||
raise ValueError("Item contains %d components" % len(vobject_items))
|
||||
if tag == "VCALENDAR":
|
||||
if len(vobject_items) > 1:
|
||||
raise RuntimeError("VCALENDAR collection contains %d "
|
||||
"components" % len(vobject_items))
|
||||
vobject_item = vobject_items[0]
|
||||
if vobject_item.name != "VCALENDAR":
|
||||
raise ValueError("Item type %r not supported in %r "
|
||||
"collection" % (vobject_item.name, tag))
|
||||
component_uids = set()
|
||||
for component in vobject_item.components():
|
||||
if component.name in ("VTODO", "VEVENT", "VJOURNAL"):
|
||||
component_uid = get_uid(component)
|
||||
if component_uid:
|
||||
component_uids.add(component_uid)
|
||||
component_name = None
|
||||
object_uid = None
|
||||
object_uid_set = False
|
||||
for component in vobject_item.components():
|
||||
# https://tools.ietf.org/html/rfc4791#section-4.1
|
||||
if component.name == "VTIMEZONE":
|
||||
continue
|
||||
if component_name is None or is_collection:
|
||||
component_name = component.name
|
||||
elif component_name != component.name:
|
||||
raise ValueError("Multiple component types in object: %r, %r" %
|
||||
(component_name, component.name))
|
||||
if component_name not in ("VTODO", "VEVENT", "VJOURNAL"):
|
||||
continue
|
||||
component_uid = get_uid(component)
|
||||
if not object_uid_set or is_collection:
|
||||
object_uid_set = True
|
||||
object_uid = component_uid
|
||||
if not component_uid:
|
||||
if not is_collection:
|
||||
raise ValueError("%s component without UID in object" %
|
||||
component_name)
|
||||
component_uid = find_available_uid(
|
||||
component_uids.__contains__)
|
||||
component_uids.add(component_uid)
|
||||
if hasattr(component, "uid"):
|
||||
component.uid.value = component_uid
|
||||
else:
|
||||
component.add("UID").value = component_uid
|
||||
elif not object_uid or not component_uid:
|
||||
raise ValueError("Multiple %s components without UID in "
|
||||
"object" % component_name)
|
||||
elif object_uid != component_uid:
|
||||
raise ValueError(
|
||||
"Multiple %s components with different UIDs in object: "
|
||||
"%r, %r" % (component_name, object_uid, component_uid))
|
||||
# Workaround for bug in Lightning (Thunderbird)
|
||||
# Rescheduling a single occurrence from a repeating event creates
|
||||
# an event with DTEND and DURATION:PT0S
|
||||
if (hasattr(component, "dtend") and
|
||||
hasattr(component, "duration") and
|
||||
component.duration.value == timedelta(0)):
|
||||
logger.debug("Quirks: Removing zero duration from %s in "
|
||||
"object %r", component_name, component_uid)
|
||||
del component.duration
|
||||
# Workaround for Evolution
|
||||
# EXDATE has value DATE even if DTSTART/DTEND is DATE-TIME.
|
||||
# The RFC is vaguely formulated on the issue.
|
||||
# To resolve the issue convert EXDATE and RDATE to
|
||||
# the same type as DTDSTART
|
||||
if hasattr(component, "dtstart"):
|
||||
ref_date = component.dtstart.value
|
||||
ref_value_param = component.dtstart.params.get("VALUE")
|
||||
for dates in chain(component.contents.get("exdate", []),
|
||||
component.contents.get("rdate", [])):
|
||||
if all(type(d) is type(ref_date) for d in dates.value):
|
||||
continue
|
||||
for i, date in enumerate(dates.value):
|
||||
dates.value[i] = ref_date.replace(
|
||||
date.year, date.month, date.day)
|
||||
with contextlib.suppress(KeyError):
|
||||
del dates.params["VALUE"]
|
||||
if ref_value_param is not None:
|
||||
dates.params["VALUE"] = ref_value_param
|
||||
# vobject interprets recurrence rules on demand
|
||||
try:
|
||||
component.rruleset
|
||||
except Exception as e:
|
||||
raise ValueError("Invalid recurrence rules in %s in object %r"
|
||||
% (component.name, component_uid)) from e
|
||||
elif tag == "VADDRESSBOOK":
|
||||
# https://tools.ietf.org/html/rfc6352#section-5.1
|
||||
object_uids = set()
|
||||
for vobject_item in vobject_items:
|
||||
if vobject_item.name == "VCARD":
|
||||
object_uid = get_uid(vobject_item)
|
||||
if object_uid:
|
||||
object_uids.add(object_uid)
|
||||
for vobject_item in vobject_items:
|
||||
if vobject_item.name == "VLIST":
|
||||
# Custom format used by SOGo Connector to store lists of
|
||||
# contacts
|
||||
continue
|
||||
if vobject_item.name != "VCARD":
|
||||
raise ValueError("Item type %r not supported in %r "
|
||||
"collection" % (vobject_item.name, tag))
|
||||
object_uid = get_uid(vobject_item)
|
||||
if not object_uid:
|
||||
if not is_collection:
|
||||
raise ValueError("%s object without UID" %
|
||||
vobject_item.name)
|
||||
object_uid = find_available_uid(object_uids.__contains__)
|
||||
object_uids.add(object_uid)
|
||||
if hasattr(vobject_item, "uid"):
|
||||
vobject_item.uid.value = object_uid
|
||||
else:
|
||||
vobject_item.add("UID").value = object_uid
|
||||
else:
|
||||
for item in vobject_items:
|
||||
raise ValueError("Item type %r not supported in %s collection" %
|
||||
(item.name, repr(tag) if tag else "generic"))
|
||||
|
||||
|
||||
def check_and_sanitize_props(props: MutableMapping[Any, Any]
|
||||
) -> MutableMapping[str, str]:
|
||||
"""Check collection properties for common errors.
|
||||
|
||||
Modifies the dict `props`.
|
||||
|
||||
"""
|
||||
for k, v in list(props.items()): # Make copy to be able to delete items
|
||||
if not isinstance(k, str):
|
||||
raise ValueError("Key must be %r not %r: %r" % (
|
||||
str.__name__, type(k).__name__, k))
|
||||
if not isinstance(v, str):
|
||||
if v is None:
|
||||
del props[k]
|
||||
continue
|
||||
raise ValueError("Value of %r must be %r not %r: %r" % (
|
||||
k, str.__name__, type(v).__name__, v))
|
||||
if k == "tag":
|
||||
if v not in ("", "VCALENDAR", "VADDRESSBOOK", "VSUBSCRIBED"):
|
||||
raise ValueError("Unsupported collection tag: %r" % v)
|
||||
return props
|
||||
|
||||
|
||||
def find_available_uid(exists_fn: Callable[[str], bool], suffix: str = ""
|
||||
) -> str:
|
||||
"""Generate a pseudo-random UID"""
|
||||
# Prevent infinite loop
|
||||
for _ in range(1000):
|
||||
r = binascii.hexlify(os.urandom(16)).decode("ascii")
|
||||
name = "%s-%s-%s-%s-%s%s" % (
|
||||
r[:8], r[8:12], r[12:16], r[16:20], r[20:], suffix)
|
||||
if not exists_fn(name):
|
||||
return name
|
||||
# Something is wrong with the PRNG or `exists_fn`
|
||||
raise RuntimeError("No available random UID found")
|
||||
|
||||
|
||||
def get_etag(text: str) -> str:
|
||||
"""Etag from collection or item.
|
||||
|
||||
Encoded as quoted-string (see RFC 2616).
|
||||
|
||||
"""
|
||||
etag = sha256()
|
||||
etag.update(text.encode())
|
||||
return '"%s"' % etag.hexdigest()
|
||||
|
||||
|
||||
def get_uid(vobject_component: vobject.base.Component) -> str:
|
||||
"""UID value of an item if defined."""
|
||||
return (vobject_component.uid.value or ""
|
||||
if hasattr(vobject_component, "uid") else "")
|
||||
|
||||
|
||||
def get_uid_from_object(vobject_item: vobject.base.Component) -> str:
|
||||
"""UID value of an calendar/addressbook object."""
|
||||
if vobject_item.name == "VCALENDAR":
|
||||
if hasattr(vobject_item, "vevent"):
|
||||
return get_uid(vobject_item.vevent)
|
||||
if hasattr(vobject_item, "vjournal"):
|
||||
return get_uid(vobject_item.vjournal)
|
||||
if hasattr(vobject_item, "vtodo"):
|
||||
return get_uid(vobject_item.vtodo)
|
||||
elif vobject_item.name == "VCARD":
|
||||
return get_uid(vobject_item)
|
||||
return ""
|
||||
|
||||
|
||||
def find_tag(vobject_item: vobject.base.Component) -> str:
|
||||
"""Find component name from ``vobject_item``."""
|
||||
if vobject_item.name == "VCALENDAR":
|
||||
for component in vobject_item.components():
|
||||
if component.name != "VTIMEZONE":
|
||||
return component.name or ""
|
||||
return ""
|
||||
|
||||
|
||||
def find_time_range(vobject_item: vobject.base.Component, tag: str
|
||||
) -> Tuple[int, int]:
|
||||
"""Find enclosing time range from ``vobject item``.
|
||||
|
||||
``tag`` must be set to the return value of ``find_tag``.
|
||||
|
||||
Returns a tuple (``start``, ``end``) where ``start`` and ``end`` are
|
||||
POSIX timestamps.
|
||||
|
||||
This is intended to be used for matching against simplified prefilters.
|
||||
|
||||
"""
|
||||
if not tag:
|
||||
return radicale_filter.TIMESTAMP_MIN, radicale_filter.TIMESTAMP_MAX
|
||||
start = end = None
|
||||
|
||||
def range_fn(range_start: datetime, range_end: datetime,
|
||||
is_recurrence: bool) -> bool:
|
||||
nonlocal start, end
|
||||
if start is None or range_start < start:
|
||||
start = range_start
|
||||
if end is None or end < range_end:
|
||||
end = range_end
|
||||
return False
|
||||
|
||||
def infinity_fn(range_start: datetime) -> bool:
|
||||
nonlocal start, end
|
||||
if start is None or range_start < start:
|
||||
start = range_start
|
||||
end = radicale_filter.DATETIME_MAX
|
||||
return True
|
||||
|
||||
radicale_filter.visit_time_ranges(vobject_item, tag, range_fn, infinity_fn)
|
||||
if start is None:
|
||||
start = radicale_filter.DATETIME_MIN
|
||||
if end is None:
|
||||
end = radicale_filter.DATETIME_MAX
|
||||
return math.floor(start.timestamp()), math.ceil(end.timestamp())
|
||||
|
||||
|
||||
class Item:
|
||||
"""Class for address book and calendar entries."""
|
||||
|
||||
collection: Optional["storage.BaseCollection"]
|
||||
href: Optional[str]
|
||||
last_modified: Optional[str]
|
||||
|
||||
_collection_path: str
|
||||
_text: Optional[str]
|
||||
_vobject_item: Optional[vobject.base.Component]
|
||||
_etag: Optional[str]
|
||||
_uid: Optional[str]
|
||||
_name: Optional[str]
|
||||
_component_name: Optional[str]
|
||||
_time_range: Optional[Tuple[int, int]]
|
||||
|
||||
def __init__(self,
|
||||
collection_path: Optional[str] = None,
|
||||
collection: Optional["storage.BaseCollection"] = None,
|
||||
vobject_item: Optional[vobject.base.Component] = None,
|
||||
href: Optional[str] = None,
|
||||
last_modified: Optional[str] = None,
|
||||
text: Optional[str] = None,
|
||||
etag: Optional[str] = None,
|
||||
uid: Optional[str] = None,
|
||||
name: Optional[str] = None,
|
||||
component_name: Optional[str] = None,
|
||||
time_range: Optional[Tuple[int, int]] = None):
|
||||
"""Initialize an item.
|
||||
|
||||
``collection_path`` the path of the parent collection (optional if
|
||||
``collection`` is set).
|
||||
|
||||
``collection`` the parent collection (optional).
|
||||
|
||||
``href`` the href of the item.
|
||||
|
||||
``last_modified`` the HTTP-datetime of when the item was modified.
|
||||
|
||||
``text`` the text representation of the item (optional if
|
||||
``vobject_item`` is set).
|
||||
|
||||
``vobject_item`` the vobject item (optional if ``text`` is set).
|
||||
|
||||
``etag`` the etag of the item (optional). See ``get_etag``.
|
||||
|
||||
``uid`` the UID of the object (optional). See ``get_uid_from_object``.
|
||||
|
||||
``name`` the name of the item (optional). See ``vobject_item.name``.
|
||||
|
||||
``component_name`` the name of the primary component (optional).
|
||||
See ``find_tag``.
|
||||
|
||||
``time_range`` the enclosing time range. See ``find_time_range``.
|
||||
|
||||
"""
|
||||
if text is None and vobject_item is None:
|
||||
raise ValueError(
|
||||
"At least one of 'text' or 'vobject_item' must be set")
|
||||
if collection_path is None:
|
||||
if collection is None:
|
||||
raise ValueError("At least one of 'collection_path' or "
|
||||
"'collection' must be set")
|
||||
collection_path = collection.path
|
||||
assert collection_path == pathutils.strip_path(
|
||||
pathutils.sanitize_path(collection_path))
|
||||
self._collection_path = collection_path
|
||||
self.collection = collection
|
||||
self.href = href
|
||||
self.last_modified = last_modified
|
||||
self._text = text
|
||||
self._vobject_item = vobject_item
|
||||
self._etag = etag
|
||||
self._uid = uid
|
||||
self._name = name
|
||||
self._component_name = component_name
|
||||
self._time_range = time_range
|
||||
|
||||
def serialize(self) -> str:
|
||||
if self._text is None:
|
||||
try:
|
||||
self._text = self.vobject_item.serialize()
|
||||
except Exception as e:
|
||||
raise RuntimeError("Failed to serialize item %r from %r: %s" %
|
||||
(self.href, self._collection_path,
|
||||
e)) from e
|
||||
return self._text
|
||||
|
||||
@property
|
||||
def vobject_item(self):
|
||||
if self._vobject_item is None:
|
||||
try:
|
||||
self._vobject_item = vobject.readOne(self._text)
|
||||
except Exception as e:
|
||||
raise RuntimeError("Failed to parse item %r from %r: %s" %
|
||||
(self.href, self._collection_path,
|
||||
e)) from e
|
||||
return self._vobject_item
|
||||
|
||||
@property
|
||||
def etag(self) -> str:
|
||||
"""Encoded as quoted-string (see RFC 2616)."""
|
||||
if self._etag is None:
|
||||
self._etag = get_etag(self.serialize())
|
||||
return self._etag
|
||||
|
||||
@property
|
||||
def uid(self) -> str:
|
||||
if self._uid is None:
|
||||
self._uid = get_uid_from_object(self.vobject_item)
|
||||
return self._uid
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
if self._name is None:
|
||||
self._name = self.vobject_item.name or ""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def component_name(self) -> str:
|
||||
if self._component_name is None:
|
||||
self._component_name = find_tag(self.vobject_item)
|
||||
return self._component_name
|
||||
|
||||
@property
|
||||
def time_range(self) -> Tuple[int, int]:
|
||||
if self._time_range is None:
|
||||
self._time_range = find_time_range(
|
||||
self.vobject_item, self.component_name)
|
||||
return self._time_range
|
||||
|
||||
def prepare(self) -> None:
|
||||
"""Fill cache with values."""
|
||||
orig_vobject_item = self._vobject_item
|
||||
self.serialize()
|
||||
self.etag
|
||||
self.uid
|
||||
self.name
|
||||
self.time_range
|
||||
self.component_name
|
||||
self._vobject_item = orig_vobject_item
|
595
radicale/item/filter.py
Normal file
595
radicale/item/filter.py
Normal file
|
@ -0,0 +1,595 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2015 Guillaume Ayoub
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
import math
|
||||
import xml.etree.ElementTree as ET
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
from itertools import chain
|
||||
from typing import (Callable, Iterable, Iterator, List, Optional, Sequence,
|
||||
Tuple)
|
||||
|
||||
import vobject
|
||||
|
||||
from radicale import item, xmlutils
|
||||
from radicale.log import logger
|
||||
|
||||
DAY: timedelta = timedelta(days=1)
|
||||
SECOND: timedelta = timedelta(seconds=1)
|
||||
DATETIME_MIN: datetime = datetime.min.replace(tzinfo=timezone.utc)
|
||||
DATETIME_MAX: datetime = datetime.max.replace(tzinfo=timezone.utc)
|
||||
TIMESTAMP_MIN: int = math.floor(DATETIME_MIN.timestamp())
|
||||
TIMESTAMP_MAX: int = math.ceil(DATETIME_MAX.timestamp())
|
||||
|
||||
|
||||
def date_to_datetime(d: date) -> datetime:
|
||||
"""Transform any date to a UTC datetime.
|
||||
|
||||
If ``d`` is a datetime without timezone, return as UTC datetime. If ``d``
|
||||
is already a datetime with timezone, return as is.
|
||||
|
||||
"""
|
||||
if not isinstance(d, datetime):
|
||||
d = datetime.combine(d, datetime.min.time())
|
||||
if not d.tzinfo:
|
||||
# NOTE: using vobject's UTC as it wasn't playing well with datetime's.
|
||||
d = d.replace(tzinfo=vobject.icalendar.utc)
|
||||
return d
|
||||
|
||||
|
||||
def parse_time_range(time_filter: ET.Element) -> Tuple[datetime, datetime]:
|
||||
start_text = time_filter.get("start")
|
||||
end_text = time_filter.get("end")
|
||||
if start_text:
|
||||
start = datetime.strptime(
|
||||
start_text, "%Y%m%dT%H%M%SZ").replace(
|
||||
tzinfo=timezone.utc)
|
||||
else:
|
||||
start = DATETIME_MIN
|
||||
if end_text:
|
||||
end = datetime.strptime(
|
||||
end_text, "%Y%m%dT%H%M%SZ").replace(
|
||||
tzinfo=timezone.utc)
|
||||
else:
|
||||
end = DATETIME_MAX
|
||||
return start, end
|
||||
|
||||
|
||||
def time_range_timestamps(time_filter: ET.Element) -> Tuple[int, int]:
|
||||
start, end = parse_time_range(time_filter)
|
||||
return (math.floor(start.timestamp()), math.ceil(end.timestamp()))
|
||||
|
||||
|
||||
def comp_match(item: "item.Item", filter_: ET.Element, level: int = 0) -> bool:
|
||||
"""Check whether the ``item`` matches the comp ``filter_``.
|
||||
|
||||
If ``level`` is ``0``, the filter is applied on the
|
||||
item's collection. Otherwise, it's applied on the item.
|
||||
|
||||
See rfc4791-9.7.1.
|
||||
|
||||
"""
|
||||
|
||||
# TODO: Filtering VALARM and VFREEBUSY is not implemented
|
||||
# HACK: the filters are tested separately against all components
|
||||
|
||||
if level == 0:
|
||||
tag = item.name
|
||||
elif level == 1:
|
||||
tag = item.component_name
|
||||
else:
|
||||
logger.warning(
|
||||
"Filters with three levels of comp-filter are not supported")
|
||||
return True
|
||||
if not tag:
|
||||
return False
|
||||
name = filter_.get("name", "").upper()
|
||||
if len(filter_) == 0:
|
||||
# Point #1 of rfc4791-9.7.1
|
||||
return name == tag
|
||||
if len(filter_) == 1:
|
||||
if filter_[0].tag == xmlutils.make_clark("C:is-not-defined"):
|
||||
# Point #2 of rfc4791-9.7.1
|
||||
return name != tag
|
||||
if name != tag:
|
||||
return False
|
||||
if (level == 0 and name != "VCALENDAR" or
|
||||
level == 1 and name not in ("VTODO", "VEVENT", "VJOURNAL")):
|
||||
logger.warning("Filtering %s is not supported", name)
|
||||
return True
|
||||
# Point #3 and #4 of rfc4791-9.7.1
|
||||
components = ([item.vobject_item] if level == 0
|
||||
else list(getattr(item.vobject_item,
|
||||
"%s_list" % tag.lower())))
|
||||
for child in filter_:
|
||||
if child.tag == xmlutils.make_clark("C:prop-filter"):
|
||||
if not any(prop_match(comp, child, "C")
|
||||
for comp in components):
|
||||
return False
|
||||
elif child.tag == xmlutils.make_clark("C:time-range"):
|
||||
if not time_range_match(item.vobject_item, filter_[0], tag):
|
||||
return False
|
||||
elif child.tag == xmlutils.make_clark("C:comp-filter"):
|
||||
if not comp_match(item, child, level=level + 1):
|
||||
return False
|
||||
else:
|
||||
raise ValueError("Unexpected %r in comp-filter" % child.tag)
|
||||
return True
|
||||
|
||||
|
||||
def prop_match(vobject_item: vobject.base.Component,
|
||||
filter_: ET.Element, ns: str) -> bool:
|
||||
"""Check whether the ``item`` matches the prop ``filter_``.
|
||||
|
||||
See rfc4791-9.7.2 and rfc6352-10.5.1.
|
||||
|
||||
"""
|
||||
name = filter_.get("name", "").lower()
|
||||
if len(filter_) == 0:
|
||||
# Point #1 of rfc4791-9.7.2
|
||||
return name in vobject_item.contents
|
||||
if len(filter_) == 1:
|
||||
if filter_[0].tag == xmlutils.make_clark("%s:is-not-defined" % ns):
|
||||
# Point #2 of rfc4791-9.7.2
|
||||
return name not in vobject_item.contents
|
||||
if name not in vobject_item.contents:
|
||||
return False
|
||||
# Point #3 and #4 of rfc4791-9.7.2
|
||||
for child in filter_:
|
||||
if ns == "C" and child.tag == xmlutils.make_clark("C:time-range"):
|
||||
if not time_range_match(vobject_item, child, name):
|
||||
return False
|
||||
elif child.tag == xmlutils.make_clark("%s:text-match" % ns):
|
||||
if not text_match(vobject_item, child, name, ns):
|
||||
return False
|
||||
elif child.tag == xmlutils.make_clark("%s:param-filter" % ns):
|
||||
if not param_filter_match(vobject_item, child, name, ns):
|
||||
return False
|
||||
else:
|
||||
raise ValueError("Unexpected %r in prop-filter" % child.tag)
|
||||
return True
|
||||
|
||||
|
||||
def time_range_match(vobject_item: vobject.base.Component,
|
||||
filter_: ET.Element, child_name: str) -> bool:
|
||||
"""Check whether the component/property ``child_name`` of
|
||||
``vobject_item`` matches the time-range ``filter_``."""
|
||||
|
||||
if not filter_.get("start") and not filter_.get("end"):
|
||||
return False
|
||||
|
||||
start, end = parse_time_range(filter_)
|
||||
matched = False
|
||||
|
||||
def range_fn(range_start: datetime, range_end: datetime,
|
||||
is_recurrence: bool) -> bool:
|
||||
nonlocal matched
|
||||
if start < range_end and range_start < end:
|
||||
matched = True
|
||||
return True
|
||||
if end < range_start and not is_recurrence:
|
||||
return True
|
||||
return False
|
||||
|
||||
def infinity_fn(start: datetime) -> bool:
|
||||
return False
|
||||
|
||||
visit_time_ranges(vobject_item, child_name, range_fn, infinity_fn)
|
||||
return matched
|
||||
|
||||
|
||||
def time_range_fill(vobject_item: vobject.base.Component,
|
||||
filter_: ET.Element, child_name: str, n: int = 1
|
||||
) -> List[Tuple[datetime, datetime]]:
|
||||
"""Create a list of ``n`` occurances from the component/property ``child_name``
|
||||
of ``vobject_item``."""
|
||||
if not filter_.get("start") and not filter_.get("end"):
|
||||
return []
|
||||
|
||||
start, end = parse_time_range(filter_)
|
||||
ranges: List[Tuple[datetime, datetime]] = []
|
||||
|
||||
def range_fn(range_start: datetime, range_end: datetime,
|
||||
is_recurrence: bool) -> bool:
|
||||
nonlocal ranges
|
||||
if start < range_end and range_start < end:
|
||||
ranges.append((range_start, range_end))
|
||||
if n > 0 and len(ranges) >= n:
|
||||
return True
|
||||
if end < range_start and not is_recurrence:
|
||||
return True
|
||||
return False
|
||||
|
||||
def infinity_fn(range_start: datetime) -> bool:
|
||||
return False
|
||||
|
||||
visit_time_ranges(vobject_item, child_name, range_fn, infinity_fn)
|
||||
return ranges
|
||||
|
||||
|
||||
def visit_time_ranges(vobject_item: vobject.base.Component, child_name: str,
|
||||
range_fn: Callable[[datetime, datetime, bool], bool],
|
||||
infinity_fn: Callable[[datetime], bool]) -> None:
|
||||
"""Visit all time ranges in the component/property ``child_name`` of
|
||||
`vobject_item`` with visitors ``range_fn`` and ``infinity_fn``.
|
||||
|
||||
``range_fn`` gets called for every time_range with ``start`` and ``end``
|
||||
datetimes and ``is_recurrence`` as arguments. If the function returns True,
|
||||
the operation is cancelled.
|
||||
|
||||
``infinity_fn`` gets called when an infinite recurrence rule is detected
|
||||
with ``start`` datetime as argument. If the function returns True, the
|
||||
operation is cancelled.
|
||||
|
||||
See rfc4791-9.9.
|
||||
|
||||
"""
|
||||
|
||||
# HACK: According to rfc5545-3.8.4.4 a recurrence that is rescheduled
|
||||
# with Recurrence ID affects the recurrence itself and all following
|
||||
# recurrences too. This is not respected and client don't seem to bother
|
||||
# either.
|
||||
|
||||
def getrruleset(child: vobject.base.Component, ignore: Sequence[date]
|
||||
) -> Tuple[Iterable[date], bool]:
|
||||
infinite = False
|
||||
for rrule in child.contents.get("rrule", []):
|
||||
if (";UNTIL=" not in rrule.value.upper() and
|
||||
";COUNT=" not in rrule.value.upper()):
|
||||
infinite = True
|
||||
break
|
||||
if infinite:
|
||||
for dtstart in child.getrruleset(addRDate=True):
|
||||
if dtstart in ignore:
|
||||
continue
|
||||
if infinity_fn(date_to_datetime(dtstart)):
|
||||
return (), True
|
||||
break
|
||||
return filter(lambda dtstart: dtstart not in ignore,
|
||||
child.getrruleset(addRDate=True)), False
|
||||
|
||||
def get_children(components: Iterable[vobject.base.Component]) -> Iterator[
|
||||
Tuple[vobject.base.Component, bool, List[date]]]:
|
||||
main = None
|
||||
rec_main = None
|
||||
recurrences = []
|
||||
for comp in components:
|
||||
if hasattr(comp, "recurrence_id") and comp.recurrence_id.value:
|
||||
recurrences.append(comp.recurrence_id.value)
|
||||
if comp.rruleset:
|
||||
if comp.rruleset._len is None:
|
||||
logger.warning("Ignore empty RRULESET in item at RECURRENCE-ID with value '%s' and UID '%s'", comp.recurrence_id.value, comp.uid.value)
|
||||
else:
|
||||
# Prevent possible infinite loop
|
||||
raise ValueError("Overwritten recurrence with RRULESET")
|
||||
rec_main = comp
|
||||
yield comp, True, []
|
||||
else:
|
||||
if main is not None:
|
||||
raise ValueError("Multiple main components. Got comp: {}".format(comp))
|
||||
main = comp
|
||||
if main is None and len(recurrences) == 1:
|
||||
main = rec_main
|
||||
if main is None:
|
||||
raise ValueError("Main component missing")
|
||||
yield main, False, recurrences
|
||||
|
||||
# Comments give the lines in the tables of the specification
|
||||
if child_name == "VEVENT":
|
||||
for child, is_recurrence, recurrences in get_children(
|
||||
vobject_item.vevent_list):
|
||||
# TODO: check if there's a timezone
|
||||
dtstart = child.dtstart.value
|
||||
|
||||
if child.rruleset:
|
||||
dtstarts, infinity = getrruleset(child, recurrences)
|
||||
if infinity:
|
||||
return
|
||||
else:
|
||||
dtstarts = (dtstart,)
|
||||
|
||||
dtend = getattr(child, "dtend", None)
|
||||
if dtend is not None:
|
||||
dtend = dtend.value
|
||||
original_duration = (dtend - dtstart).total_seconds()
|
||||
dtend = date_to_datetime(dtend)
|
||||
|
||||
duration = getattr(child, "duration", None)
|
||||
if duration is not None:
|
||||
original_duration = duration = duration.value
|
||||
|
||||
for dtstart in dtstarts:
|
||||
dtstart_is_datetime = isinstance(dtstart, datetime)
|
||||
dtstart = date_to_datetime(dtstart)
|
||||
|
||||
if dtend is not None:
|
||||
# Line 1
|
||||
dtend = dtstart + timedelta(seconds=original_duration)
|
||||
if range_fn(dtstart, dtend, is_recurrence):
|
||||
return
|
||||
elif duration is not None:
|
||||
if original_duration is None:
|
||||
original_duration = duration.seconds
|
||||
if duration.seconds > 0:
|
||||
# Line 2
|
||||
if range_fn(dtstart, dtstart + duration,
|
||||
is_recurrence):
|
||||
return
|
||||
else:
|
||||
# Line 3
|
||||
if range_fn(dtstart, dtstart + SECOND, is_recurrence):
|
||||
return
|
||||
elif dtstart_is_datetime:
|
||||
# Line 4
|
||||
if range_fn(dtstart, dtstart + SECOND, is_recurrence):
|
||||
return
|
||||
else:
|
||||
# Line 5
|
||||
if range_fn(dtstart, dtstart + DAY, is_recurrence):
|
||||
return
|
||||
|
||||
elif child_name == "VTODO":
|
||||
for child, is_recurrence, recurrences in get_children(
|
||||
vobject_item.vtodo_list):
|
||||
dtstart = getattr(child, "dtstart", None)
|
||||
duration = getattr(child, "duration", None)
|
||||
due = getattr(child, "due", None)
|
||||
completed = getattr(child, "completed", None)
|
||||
created = getattr(child, "created", None)
|
||||
|
||||
if dtstart is not None:
|
||||
dtstart = date_to_datetime(dtstart.value)
|
||||
if duration is not None:
|
||||
duration = duration.value
|
||||
if due is not None:
|
||||
due = date_to_datetime(due.value)
|
||||
if dtstart is not None:
|
||||
original_duration = (due - dtstart).total_seconds()
|
||||
if completed is not None:
|
||||
completed = date_to_datetime(completed.value)
|
||||
if created is not None:
|
||||
created = date_to_datetime(created.value)
|
||||
original_duration = (completed - created).total_seconds()
|
||||
elif created is not None:
|
||||
created = date_to_datetime(created.value)
|
||||
|
||||
if child.rruleset:
|
||||
reference_dates, infinity = getrruleset(child, recurrences)
|
||||
if infinity:
|
||||
return
|
||||
else:
|
||||
if dtstart is not None:
|
||||
reference_dates = (dtstart,)
|
||||
elif due is not None:
|
||||
reference_dates = (due,)
|
||||
elif completed is not None:
|
||||
reference_dates = (completed,)
|
||||
elif created is not None:
|
||||
reference_dates = (created,)
|
||||
else:
|
||||
# Line 8
|
||||
if range_fn(DATETIME_MIN, DATETIME_MAX, is_recurrence):
|
||||
return
|
||||
reference_dates = ()
|
||||
|
||||
for reference_date in reference_dates:
|
||||
reference_date = date_to_datetime(reference_date)
|
||||
|
||||
if dtstart is not None and duration is not None:
|
||||
# Line 1
|
||||
if range_fn(reference_date,
|
||||
reference_date + duration + SECOND,
|
||||
is_recurrence):
|
||||
return
|
||||
if range_fn(reference_date + duration - SECOND,
|
||||
reference_date + duration + SECOND,
|
||||
is_recurrence):
|
||||
return
|
||||
elif dtstart is not None and due is not None:
|
||||
# Line 2
|
||||
due = reference_date + timedelta(seconds=original_duration)
|
||||
if (range_fn(reference_date, due, is_recurrence) or
|
||||
range_fn(reference_date,
|
||||
reference_date + SECOND, is_recurrence) or
|
||||
range_fn(due - SECOND, due, is_recurrence) or
|
||||
range_fn(due - SECOND, reference_date + SECOND,
|
||||
is_recurrence)):
|
||||
return
|
||||
elif dtstart is not None:
|
||||
if range_fn(reference_date, reference_date + SECOND,
|
||||
is_recurrence):
|
||||
return
|
||||
elif due is not None:
|
||||
# Line 4
|
||||
if range_fn(reference_date - SECOND, reference_date,
|
||||
is_recurrence):
|
||||
return
|
||||
elif completed is not None and created is not None:
|
||||
# Line 5
|
||||
completed = reference_date + timedelta(
|
||||
seconds=original_duration)
|
||||
if (range_fn(reference_date - SECOND,
|
||||
reference_date + SECOND,
|
||||
is_recurrence) or
|
||||
range_fn(completed - SECOND, completed + SECOND,
|
||||
is_recurrence) or
|
||||
range_fn(reference_date - SECOND,
|
||||
reference_date + SECOND, is_recurrence) or
|
||||
range_fn(completed - SECOND, completed + SECOND,
|
||||
is_recurrence)):
|
||||
return
|
||||
elif completed is not None:
|
||||
# Line 6
|
||||
if range_fn(reference_date - SECOND,
|
||||
reference_date + SECOND, is_recurrence):
|
||||
return
|
||||
elif created is not None:
|
||||
# Line 7
|
||||
if range_fn(reference_date, DATETIME_MAX, is_recurrence):
|
||||
return
|
||||
|
||||
elif child_name == "VJOURNAL":
|
||||
for child, is_recurrence, recurrences in get_children(
|
||||
vobject_item.vjournal_list):
|
||||
dtstart = getattr(child, "dtstart", None)
|
||||
|
||||
if dtstart is not None:
|
||||
dtstart = dtstart.value
|
||||
if child.rruleset:
|
||||
dtstarts, infinity = getrruleset(child, recurrences)
|
||||
if infinity:
|
||||
return
|
||||
else:
|
||||
dtstarts = (dtstart,)
|
||||
|
||||
for dtstart in dtstarts:
|
||||
dtstart_is_datetime = isinstance(dtstart, datetime)
|
||||
dtstart = date_to_datetime(dtstart)
|
||||
|
||||
if dtstart_is_datetime:
|
||||
# Line 1
|
||||
if range_fn(dtstart, dtstart + SECOND, is_recurrence):
|
||||
return
|
||||
else:
|
||||
# Line 2
|
||||
if range_fn(dtstart, dtstart + DAY, is_recurrence):
|
||||
return
|
||||
|
||||
else:
|
||||
# Match a property
|
||||
child = getattr(vobject_item, child_name.lower())
|
||||
if isinstance(child, date):
|
||||
child_is_datetime = isinstance(child, datetime)
|
||||
child = date_to_datetime(child)
|
||||
if child_is_datetime:
|
||||
range_fn(child, child + SECOND, False)
|
||||
else:
|
||||
range_fn(child, child + DAY, False)
|
||||
|
||||
|
||||
def text_match(vobject_item: vobject.base.Component,
|
||||
filter_: ET.Element, child_name: str, ns: str,
|
||||
attrib_name: Optional[str] = None) -> bool:
|
||||
"""Check whether the ``item`` matches the text-match ``filter_``.
|
||||
|
||||
See rfc4791-9.7.5.
|
||||
|
||||
"""
|
||||
# TODO: collations are not supported, but the default ones needed
|
||||
# for DAV servers are actually pretty useless. Texts are lowered to
|
||||
# be case-insensitive, almost as the "i;ascii-casemap" value.
|
||||
text = next(filter_.itertext()).lower()
|
||||
match_type = "contains"
|
||||
if ns == "CR":
|
||||
match_type = filter_.get("match-type", match_type)
|
||||
|
||||
def match(value: str) -> bool:
|
||||
value = value.lower()
|
||||
if match_type == "equals":
|
||||
return value == text
|
||||
if match_type == "contains":
|
||||
return text in value
|
||||
if match_type == "starts-with":
|
||||
return value.startswith(text)
|
||||
if match_type == "ends-with":
|
||||
return value.endswith(text)
|
||||
raise ValueError("Unexpected text-match match-type: %r" % match_type)
|
||||
|
||||
children = getattr(vobject_item, "%s_list" % child_name, [])
|
||||
if attrib_name is not None:
|
||||
condition = any(
|
||||
match(attrib) for child in children
|
||||
for attrib in child.params.get(attrib_name, []))
|
||||
else:
|
||||
res = []
|
||||
for child in children:
|
||||
# Some filters such as CATEGORIES provide a list in child.value
|
||||
if type(child.value) is list:
|
||||
for value in child.value:
|
||||
res.append(match(value))
|
||||
else:
|
||||
res.append(match(child.value))
|
||||
condition = any(res)
|
||||
if filter_.get("negate-condition") == "yes":
|
||||
return not condition
|
||||
return condition
|
||||
|
||||
|
||||
def param_filter_match(vobject_item: vobject.base.Component,
|
||||
filter_: ET.Element, parent_name: str, ns: str) -> bool:
|
||||
"""Check whether the ``item`` matches the param-filter ``filter_``.
|
||||
|
||||
See rfc4791-9.7.3.
|
||||
|
||||
"""
|
||||
name = filter_.get("name", "").upper()
|
||||
children = getattr(vobject_item, "%s_list" % parent_name, [])
|
||||
condition = any(name in child.params for child in children)
|
||||
if len(filter_) > 0:
|
||||
if filter_[0].tag == xmlutils.make_clark("%s:text-match" % ns):
|
||||
return condition and text_match(
|
||||
vobject_item, filter_[0], parent_name, ns, name)
|
||||
if filter_[0].tag == xmlutils.make_clark("%s:is-not-defined" % ns):
|
||||
return not condition
|
||||
return condition
|
||||
|
||||
|
||||
def simplify_prefilters(filters: Iterable[ET.Element], collection_tag: str
|
||||
) -> Tuple[Optional[str], int, int, bool]:
|
||||
"""Creates a simplified condition from ``filters``.
|
||||
|
||||
Returns a tuple (``tag``, ``start``, ``end``, ``simple``) where ``tag`` is
|
||||
a string or None (match all) and ``start`` and ``end`` are POSIX
|
||||
timestamps (as int). ``simple`` is a bool that indicates that ``filters``
|
||||
and the simplified condition are identical.
|
||||
|
||||
"""
|
||||
flat_filters = list(chain.from_iterable(filters))
|
||||
simple = len(flat_filters) <= 1
|
||||
for col_filter in flat_filters:
|
||||
if collection_tag != "VCALENDAR":
|
||||
simple = False
|
||||
break
|
||||
if (col_filter.tag != xmlutils.make_clark("C:comp-filter") or
|
||||
col_filter.get("name", "").upper() != "VCALENDAR"):
|
||||
simple = False
|
||||
continue
|
||||
simple &= len(col_filter) <= 1
|
||||
for comp_filter in col_filter:
|
||||
if comp_filter.tag != xmlutils.make_clark("C:comp-filter"):
|
||||
simple = False
|
||||
continue
|
||||
tag = comp_filter.get("name", "").upper()
|
||||
if comp_filter.find(
|
||||
xmlutils.make_clark("C:is-not-defined")) is not None:
|
||||
simple = False
|
||||
continue
|
||||
simple &= len(comp_filter) <= 1
|
||||
for time_filter in comp_filter:
|
||||
if tag not in ("VTODO", "VEVENT", "VJOURNAL"):
|
||||
simple = False
|
||||
break
|
||||
if time_filter.tag != xmlutils.make_clark("C:time-range"):
|
||||
simple = False
|
||||
continue
|
||||
start, end = time_range_timestamps(time_filter)
|
||||
return tag, start, end, simple
|
||||
return tag, TIMESTAMP_MIN, TIMESTAMP_MAX, simple
|
||||
return None, TIMESTAMP_MIN, TIMESTAMP_MAX, simple
|
260
radicale/log.py
260
radicale/log.py
|
@ -1,5 +1,7 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2011-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2023 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -15,61 +17,235 @@
|
|||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Radicale logging module.
|
||||
Functions to set up Python's logging facility for Radicale's WSGI application.
|
||||
|
||||
Manage logging from a configuration file. For more information, see:
|
||||
http://docs.python.org/library/logging.config.html
|
||||
Log messages are sent to the first available target of:
|
||||
|
||||
- Error stream specified by the WSGI server in "wsgi.errors"
|
||||
- ``sys.stderr``
|
||||
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
import io
|
||||
import logging
|
||||
import logging.config
|
||||
import signal
|
||||
import os
|
||||
import socket
|
||||
import struct
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from typing import (Any, Callable, ClassVar, Dict, Iterator, Mapping, Optional,
|
||||
Tuple, Union, cast)
|
||||
|
||||
from radicale import types
|
||||
|
||||
def configure_from_file(logger, filename, debug):
|
||||
logging.config.fileConfig(filename, disable_existing_loggers=False)
|
||||
if debug:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
for handler in logger.handlers:
|
||||
handler.setLevel(logging.DEBUG)
|
||||
return logger
|
||||
LOGGER_NAME: str = "radicale"
|
||||
LOGGER_FORMATS: Mapping[str, str] = {
|
||||
"verbose": "[%(asctime)s] [%(ident)s] [%(levelname)s] %(message)s",
|
||||
"journal": "[%(ident)s] [%(levelname)s] %(message)s",
|
||||
}
|
||||
DATE_FORMAT: str = "%Y-%m-%d %H:%M:%S %z"
|
||||
|
||||
logger: logging.Logger = logging.getLogger(LOGGER_NAME)
|
||||
|
||||
|
||||
class RemoveTracebackFilter(logging.Filter):
|
||||
def filter(self, record):
|
||||
|
||||
def filter(self, record: logging.LogRecord) -> bool:
|
||||
record.exc_info = None
|
||||
return True
|
||||
|
||||
|
||||
def start(name="radicale", filename=None, debug=False):
|
||||
"""Start the logging according to the configuration."""
|
||||
logger = logging.getLogger(name)
|
||||
if debug:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
else:
|
||||
logger.addFilter(RemoveTracebackFilter())
|
||||
if filename:
|
||||
# Configuration taken from file
|
||||
REMOVE_TRACEBACK_FILTER: logging.Filter = RemoveTracebackFilter()
|
||||
|
||||
|
||||
class IdentLogRecordFactory:
|
||||
"""LogRecordFactory that adds ``ident`` attribute."""
|
||||
|
||||
def __init__(self, upstream_factory: Callable[..., logging.LogRecord]
|
||||
) -> None:
|
||||
self._upstream_factory = upstream_factory
|
||||
|
||||
def __call__(self, *args: Any, **kwargs: Any) -> logging.LogRecord:
|
||||
record = self._upstream_factory(*args, **kwargs)
|
||||
ident = ("%d" % record.process if record.process is not None
|
||||
else record.processName or "unknown")
|
||||
tid = None
|
||||
if record.thread is not None:
|
||||
if record.thread != threading.main_thread().ident:
|
||||
ident += "/%s" % (record.threadName or "unknown")
|
||||
if (sys.version_info >= (3, 8) and
|
||||
record.thread == threading.get_ident()):
|
||||
tid = threading.get_native_id()
|
||||
record.ident = ident # type:ignore[attr-defined]
|
||||
record.tid = tid # type:ignore[attr-defined]
|
||||
return record
|
||||
|
||||
|
||||
class ThreadedStreamHandler(logging.Handler):
|
||||
"""Sends logging output to the stream registered for the current thread or
|
||||
``sys.stderr`` when no stream was registered."""
|
||||
|
||||
terminator: ClassVar[str] = "\n"
|
||||
|
||||
_streams: Dict[int, types.ErrorStream]
|
||||
_journal_stream_id: Optional[Tuple[int, int]]
|
||||
_journal_socket: Optional[socket.socket]
|
||||
_journal_socket_failed: bool
|
||||
_formatters: Mapping[str, logging.Formatter]
|
||||
_formatter: Optional[logging.Formatter]
|
||||
|
||||
def __init__(self, format_name: Optional[str] = None) -> None:
|
||||
super().__init__()
|
||||
self._streams = {}
|
||||
self._journal_stream_id = None
|
||||
with contextlib.suppress(TypeError, ValueError):
|
||||
dev, inode = os.environ.get("JOURNAL_STREAM", "").split(":", 1)
|
||||
self._journal_stream_id = (int(dev), int(inode))
|
||||
self._journal_socket = None
|
||||
self._journal_socket_failed = False
|
||||
self._formatters = {name: logging.Formatter(fmt, DATE_FORMAT)
|
||||
for name, fmt in LOGGER_FORMATS.items()}
|
||||
self._formatter = (self._formatters[format_name]
|
||||
if format_name is not None else None)
|
||||
|
||||
def _get_formatter(self, default_format_name: str) -> logging.Formatter:
|
||||
return self._formatter or self._formatters[default_format_name]
|
||||
|
||||
def _detect_journal(self, stream: types.ErrorStream) -> bool:
|
||||
if not self._journal_stream_id or not isinstance(stream, io.IOBase):
|
||||
return False
|
||||
try:
|
||||
configure_from_file(logger, filename, debug)
|
||||
except Exception as e:
|
||||
raise RuntimeError("Failed to load logging configuration file %r: "
|
||||
"%s" % (filename, e)) from e
|
||||
# Reload config on SIGHUP (UNIX only)
|
||||
if hasattr(signal, "SIGHUP"):
|
||||
def handler(signum, frame):
|
||||
try:
|
||||
configure_from_file(logger, filename, debug)
|
||||
except Exception as e:
|
||||
logger.error("Failed to reload logging configuration file "
|
||||
"%r: %s", filename, e, exc_info=True)
|
||||
signal.signal(signal.SIGHUP, handler)
|
||||
stat = os.fstat(stream.fileno())
|
||||
except OSError:
|
||||
return False
|
||||
return self._journal_stream_id == (stat.st_dev, stat.st_ino)
|
||||
|
||||
@staticmethod
|
||||
def _encode_journal(data: Mapping[str, Optional[Union[str, int]]]
|
||||
) -> bytes:
|
||||
msg = b""
|
||||
for key, value in data.items():
|
||||
if value is None:
|
||||
continue
|
||||
keyb = key.encode()
|
||||
valueb = str(value).encode()
|
||||
if b"\n" in valueb:
|
||||
msg += (keyb + b"\n" +
|
||||
struct.pack("<Q", len(valueb)) + valueb + b"\n")
|
||||
else:
|
||||
msg += keyb + b"=" + valueb + b"\n"
|
||||
return msg
|
||||
|
||||
def _try_emit_journal(self, record: logging.LogRecord) -> bool:
|
||||
if not self._journal_socket:
|
||||
# Try to connect to systemd journal socket
|
||||
if self._journal_socket_failed or not hasattr(socket, "AF_UNIX"):
|
||||
return False
|
||||
journal_socket = None
|
||||
try:
|
||||
journal_socket = socket.socket(
|
||||
socket.AF_UNIX, socket.SOCK_DGRAM)
|
||||
journal_socket.connect("/run/systemd/journal/socket")
|
||||
except OSError as e:
|
||||
self._journal_socket_failed = True
|
||||
if journal_socket:
|
||||
journal_socket.close()
|
||||
# Log after setting `_journal_socket_failed` to prevent loop!
|
||||
logger.error("Failed to connect to systemd journal: %s",
|
||||
e, exc_info=True)
|
||||
return False
|
||||
self._journal_socket = journal_socket
|
||||
|
||||
priority = {"DEBUG": 7,
|
||||
"INFO": 6,
|
||||
"WARNING": 4,
|
||||
"ERROR": 3,
|
||||
"CRITICAL": 2}.get(record.levelname, 4)
|
||||
timestamp = time.strftime("%Y-%m-%dT%H:%M:%S.%%03dZ",
|
||||
time.gmtime(record.created)) % record.msecs
|
||||
data = {"PRIORITY": priority,
|
||||
"TID": cast(Optional[int], getattr(record, "tid", None)),
|
||||
"SYSLOG_IDENTIFIER": record.name,
|
||||
"SYSLOG_FACILITY": 1,
|
||||
"SYSLOG_PID": record.process,
|
||||
"SYSLOG_TIMESTAMP": timestamp,
|
||||
"CODE_FILE": record.pathname,
|
||||
"CODE_LINE": record.lineno,
|
||||
"CODE_FUNC": record.funcName,
|
||||
"MESSAGE": self._get_formatter("journal").format(record)}
|
||||
self._journal_socket.sendall(self._encode_journal(data))
|
||||
return True
|
||||
|
||||
def emit(self, record: logging.LogRecord) -> None:
|
||||
try:
|
||||
stream = self._streams.get(threading.get_ident(), sys.stderr)
|
||||
if self._detect_journal(stream) and self._try_emit_journal(record):
|
||||
return
|
||||
msg = self._get_formatter("verbose").format(record)
|
||||
stream.write(msg + self.terminator)
|
||||
stream.flush()
|
||||
except Exception:
|
||||
self.handleError(record)
|
||||
|
||||
@types.contextmanager
|
||||
def register_stream(self, stream: types.ErrorStream) -> Iterator[None]:
|
||||
"""Register stream for logging output of the current thread."""
|
||||
key = threading.get_ident()
|
||||
self._streams[key] = stream
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
del self._streams[key]
|
||||
|
||||
|
||||
@types.contextmanager
|
||||
def register_stream(stream: types.ErrorStream) -> Iterator[None]:
|
||||
"""Register stream for logging output of the current thread."""
|
||||
yield
|
||||
|
||||
|
||||
def setup() -> None:
|
||||
"""Set global logging up."""
|
||||
global register_stream
|
||||
format_name = os.environ.get("RADICALE_LOG_FORMAT") or None
|
||||
sane_format_name = format_name if format_name in LOGGER_FORMATS else None
|
||||
handler = ThreadedStreamHandler(sane_format_name)
|
||||
logging.basicConfig(handlers=[handler])
|
||||
register_stream = handler.register_stream
|
||||
log_record_factory = IdentLogRecordFactory(logging.getLogRecordFactory())
|
||||
logging.setLogRecordFactory(log_record_factory)
|
||||
set_level(logging.INFO, True)
|
||||
if format_name != sane_format_name:
|
||||
logger.error("Invalid RADICALE_LOG_FORMAT: %r", format_name)
|
||||
|
||||
|
||||
logger_display_backtrace_disabled: bool = False
|
||||
logger_display_backtrace_enabled: bool = False
|
||||
|
||||
|
||||
def set_level(level: Union[int, str], backtrace_on_debug: bool) -> None:
|
||||
"""Set logging level for global logger."""
|
||||
global logger_display_backtrace_disabled
|
||||
global logger_display_backtrace_enabled
|
||||
if isinstance(level, str):
|
||||
level = getattr(logging, level.upper())
|
||||
assert isinstance(level, int)
|
||||
logger.setLevel(level)
|
||||
if level > logging.DEBUG:
|
||||
if logger_display_backtrace_disabled is False:
|
||||
logger.info("Logging of backtrace is disabled in this loglevel")
|
||||
logger_display_backtrace_disabled = True
|
||||
logger.addFilter(REMOVE_TRACEBACK_FILTER)
|
||||
else:
|
||||
# Default configuration, standard output
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setFormatter(
|
||||
logging.Formatter("[%(thread)x] %(levelname)s: %(message)s"))
|
||||
logger.addHandler(handler)
|
||||
return logger
|
||||
if not backtrace_on_debug:
|
||||
if logger_display_backtrace_disabled is False:
|
||||
logger.debug("Logging of backtrace is disabled by option in this loglevel")
|
||||
logger_display_backtrace_disabled = True
|
||||
logger.addFilter(REMOVE_TRACEBACK_FILTER)
|
||||
else:
|
||||
if logger_display_backtrace_enabled is False:
|
||||
logger.debug("Logging of backtrace is enabled by option in this loglevel")
|
||||
logger_display_backtrace_enabled = True
|
||||
logger.removeFilter(REMOVE_TRACEBACK_FILTER)
|
||||
|
|
316
radicale/pathutils.py
Normal file
316
radicale/pathutils.py
Normal file
|
@ -0,0 +1,316 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Helper functions for working with the file system.
|
||||
|
||||
"""
|
||||
|
||||
import errno
|
||||
import os
|
||||
import posixpath
|
||||
import sys
|
||||
import threading
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Iterator, Type, Union
|
||||
|
||||
from radicale import storage, types
|
||||
|
||||
if sys.platform == "win32":
|
||||
import ctypes
|
||||
import ctypes.wintypes
|
||||
import msvcrt
|
||||
|
||||
LOCKFILE_EXCLUSIVE_LOCK: int = 2
|
||||
ULONG_PTR: Union[Type[ctypes.c_uint32], Type[ctypes.c_uint64]]
|
||||
if ctypes.sizeof(ctypes.c_void_p) == 4:
|
||||
ULONG_PTR = ctypes.c_uint32
|
||||
else:
|
||||
ULONG_PTR = ctypes.c_uint64
|
||||
|
||||
class Overlapped(ctypes.Structure):
|
||||
_fields_ = [
|
||||
("internal", ULONG_PTR),
|
||||
("internal_high", ULONG_PTR),
|
||||
("offset", ctypes.wintypes.DWORD),
|
||||
("offset_high", ctypes.wintypes.DWORD),
|
||||
("h_event", ctypes.wintypes.HANDLE)]
|
||||
|
||||
kernel32 = ctypes.WinDLL("kernel32", use_last_error=True)
|
||||
lock_file_ex = kernel32.LockFileEx
|
||||
lock_file_ex.argtypes = [
|
||||
ctypes.wintypes.HANDLE,
|
||||
ctypes.wintypes.DWORD,
|
||||
ctypes.wintypes.DWORD,
|
||||
ctypes.wintypes.DWORD,
|
||||
ctypes.wintypes.DWORD,
|
||||
ctypes.POINTER(Overlapped)]
|
||||
lock_file_ex.restype = ctypes.wintypes.BOOL
|
||||
unlock_file_ex = kernel32.UnlockFileEx
|
||||
unlock_file_ex.argtypes = [
|
||||
ctypes.wintypes.HANDLE,
|
||||
ctypes.wintypes.DWORD,
|
||||
ctypes.wintypes.DWORD,
|
||||
ctypes.wintypes.DWORD,
|
||||
ctypes.POINTER(Overlapped)]
|
||||
unlock_file_ex.restype = ctypes.wintypes.BOOL
|
||||
else:
|
||||
import fcntl
|
||||
|
||||
if sys.platform == "linux":
|
||||
import ctypes
|
||||
|
||||
RENAME_EXCHANGE: int = 2
|
||||
renameat2 = None
|
||||
try:
|
||||
renameat2 = ctypes.CDLL(None, use_errno=True).renameat2
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
renameat2.argtypes = [
|
||||
ctypes.c_int, ctypes.c_char_p,
|
||||
ctypes.c_int, ctypes.c_char_p,
|
||||
ctypes.c_uint]
|
||||
renameat2.restype = ctypes.c_int
|
||||
|
||||
if sys.platform == "darwin":
|
||||
# Definition missing in PyPy
|
||||
F_FULLFSYNC: int = getattr(fcntl, "F_FULLFSYNC", 51)
|
||||
|
||||
|
||||
class RwLock:
|
||||
"""A readers-Writer lock that locks a file."""
|
||||
|
||||
_path: str
|
||||
_readers: int
|
||||
_writer: bool
|
||||
_lock: threading.Lock
|
||||
|
||||
def __init__(self, path: str) -> None:
|
||||
self._path = path
|
||||
self._readers = 0
|
||||
self._writer = False
|
||||
self._lock = threading.Lock()
|
||||
|
||||
@property
|
||||
def locked(self) -> str:
|
||||
with self._lock:
|
||||
if self._readers > 0:
|
||||
return "r"
|
||||
if self._writer:
|
||||
return "w"
|
||||
return ""
|
||||
|
||||
@types.contextmanager
|
||||
def acquire(self, mode: str) -> Iterator[None]:
|
||||
if mode not in "rw":
|
||||
raise ValueError("Invalid mode: %r" % mode)
|
||||
with open(self._path, "w+") as lock_file:
|
||||
if sys.platform == "win32":
|
||||
handle = msvcrt.get_osfhandle(lock_file.fileno())
|
||||
flags = LOCKFILE_EXCLUSIVE_LOCK if mode == "w" else 0
|
||||
overlapped = Overlapped()
|
||||
try:
|
||||
if not lock_file_ex(handle, flags, 0, 1, 0, overlapped):
|
||||
raise ctypes.WinError()
|
||||
except OSError as e:
|
||||
raise RuntimeError("Locking the storage failed: %s" % e
|
||||
) from e
|
||||
else:
|
||||
_cmd = fcntl.LOCK_EX if mode == "w" else fcntl.LOCK_SH
|
||||
try:
|
||||
fcntl.flock(lock_file.fileno(), _cmd)
|
||||
except OSError as e:
|
||||
raise RuntimeError("Locking the storage failed: %s" % e
|
||||
) from e
|
||||
with self._lock:
|
||||
if self._writer or mode == "w" and self._readers != 0:
|
||||
raise RuntimeError("Locking the storage failed: "
|
||||
"Guarantees failed")
|
||||
if mode == "r":
|
||||
self._readers += 1
|
||||
else:
|
||||
self._writer = True
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
with self._lock:
|
||||
if mode == "r":
|
||||
self._readers -= 1
|
||||
self._writer = False
|
||||
|
||||
|
||||
def rename_exchange(src: str, dst: str) -> None:
|
||||
"""Exchange the files or directories `src` and `dst`.
|
||||
|
||||
Both `src` and `dst` must exist but may be of different types.
|
||||
|
||||
On Linux with renameat2 the operation is atomic.
|
||||
On other platforms it's not atomic.
|
||||
|
||||
"""
|
||||
src_dir, src_base = os.path.split(src)
|
||||
dst_dir, dst_base = os.path.split(dst)
|
||||
src_dir = src_dir or os.curdir
|
||||
dst_dir = dst_dir or os.curdir
|
||||
if not src_base or not dst_base:
|
||||
raise ValueError("Invalid arguments: %r -> %r" % (src, dst))
|
||||
if sys.platform == "linux" and renameat2:
|
||||
src_base_bytes = os.fsencode(src_base)
|
||||
dst_base_bytes = os.fsencode(dst_base)
|
||||
src_dir_fd = os.open(src_dir, 0)
|
||||
try:
|
||||
dst_dir_fd = os.open(dst_dir, 0)
|
||||
try:
|
||||
if renameat2(src_dir_fd, src_base_bytes,
|
||||
dst_dir_fd, dst_base_bytes,
|
||||
RENAME_EXCHANGE) == 0:
|
||||
return
|
||||
errno_ = ctypes.get_errno()
|
||||
# Fallback if RENAME_EXCHANGE not supported by filesystem
|
||||
if errno_ != errno.EINVAL:
|
||||
raise OSError(errno_, os.strerror(errno_))
|
||||
finally:
|
||||
os.close(dst_dir_fd)
|
||||
finally:
|
||||
os.close(src_dir_fd)
|
||||
with TemporaryDirectory(prefix=".Radicale.tmp-", dir=src_dir
|
||||
) as tmp_dir:
|
||||
os.rename(dst, os.path.join(tmp_dir, "interim"))
|
||||
os.rename(src, dst)
|
||||
os.rename(os.path.join(tmp_dir, "interim"), src)
|
||||
|
||||
|
||||
def fsync(fd: int) -> None:
|
||||
if sys.platform == "darwin":
|
||||
try:
|
||||
fcntl.fcntl(fd, F_FULLFSYNC)
|
||||
return
|
||||
except OSError as e:
|
||||
# Fallback if F_FULLFSYNC not supported by filesystem
|
||||
if e.errno != errno.EINVAL:
|
||||
raise
|
||||
os.fsync(fd)
|
||||
|
||||
|
||||
def strip_path(path: str) -> str:
|
||||
assert sanitize_path(path) == path
|
||||
return path.strip("/")
|
||||
|
||||
|
||||
def unstrip_path(stripped_path: str, trailing_slash: bool = False) -> str:
|
||||
assert strip_path(sanitize_path(stripped_path)) == stripped_path
|
||||
assert stripped_path or trailing_slash
|
||||
path = "/%s" % stripped_path
|
||||
if trailing_slash and not path.endswith("/"):
|
||||
path += "/"
|
||||
return path
|
||||
|
||||
|
||||
def sanitize_path(path: str) -> str:
|
||||
"""Make path absolute with leading slash to prevent access to other data.
|
||||
|
||||
Preserve potential trailing slash.
|
||||
|
||||
"""
|
||||
trailing_slash = "/" if path.endswith("/") else ""
|
||||
path = posixpath.normpath(path)
|
||||
new_path = "/"
|
||||
for part in path.split("/"):
|
||||
if not is_safe_path_component(part):
|
||||
continue
|
||||
new_path = posixpath.join(new_path, part)
|
||||
trailing_slash = "" if new_path.endswith("/") else trailing_slash
|
||||
return new_path + trailing_slash
|
||||
|
||||
|
||||
def is_safe_path_component(path: str) -> bool:
|
||||
"""Check if path is a single component of a path.
|
||||
|
||||
Check that the path is safe to join too.
|
||||
|
||||
"""
|
||||
return bool(path) and "/" not in path and path not in (".", "..")
|
||||
|
||||
|
||||
def is_safe_filesystem_path_component(path: str) -> bool:
|
||||
"""Check if path is a single component of a local and posix filesystem
|
||||
path.
|
||||
|
||||
Check that the path is safe to join too.
|
||||
|
||||
"""
|
||||
return (
|
||||
bool(path) and not os.path.splitdrive(path)[0] and
|
||||
(sys.platform != "win32" or ":" not in path) and # Block NTFS-ADS
|
||||
not os.path.split(path)[0] and path not in (os.curdir, os.pardir) and
|
||||
not path.startswith(".") and not path.endswith("~") and
|
||||
is_safe_path_component(path))
|
||||
|
||||
|
||||
def path_to_filesystem(root: str, sane_path: str) -> str:
|
||||
"""Convert `sane_path` to a local filesystem path relative to `root`.
|
||||
|
||||
`root` must be a secure filesystem path, it will be prepend to the path.
|
||||
|
||||
`sane_path` must be a sanitized path without leading or trailing ``/``.
|
||||
|
||||
Conversion of `sane_path` is done in a secure manner,
|
||||
or raises ``ValueError``.
|
||||
|
||||
"""
|
||||
assert sane_path == strip_path(sanitize_path(sane_path))
|
||||
safe_path = root
|
||||
parts = sane_path.split("/") if sane_path else []
|
||||
for part in parts:
|
||||
if not is_safe_filesystem_path_component(part):
|
||||
raise UnsafePathError(part)
|
||||
safe_path_parent = safe_path
|
||||
safe_path = os.path.join(safe_path, part)
|
||||
# Check for conflicting files (e.g. case-insensitive file systems
|
||||
# or short names on Windows file systems)
|
||||
if (os.path.lexists(safe_path) and
|
||||
part not in (e.name for e in os.scandir(safe_path_parent))):
|
||||
raise CollidingPathError(part)
|
||||
return safe_path
|
||||
|
||||
|
||||
class UnsafePathError(ValueError):
|
||||
|
||||
def __init__(self, path: str) -> None:
|
||||
super().__init__("Can't translate name safely to filesystem: %r" %
|
||||
path)
|
||||
|
||||
|
||||
class CollidingPathError(ValueError):
|
||||
|
||||
def __init__(self, path: str) -> None:
|
||||
super().__init__("File name collision: %r" % path)
|
||||
|
||||
|
||||
def name_from_path(path: str, collection: "storage.BaseCollection") -> str:
|
||||
"""Return Radicale item name from ``path``."""
|
||||
assert sanitize_path(path) == path
|
||||
start = unstrip_path(collection.path, True)
|
||||
if not (path + "/").startswith(start):
|
||||
raise ValueError("%r doesn't start with %r" % (path, start))
|
||||
name = path[len(start):]
|
||||
if name and not is_safe_path_component(name):
|
||||
raise ValueError("%r is not a component in collection %r" %
|
||||
(name, collection.path))
|
||||
return name
|
0
radicale/py.typed
Normal file
0
radicale/py.typed
Normal file
|
@ -1,176 +0,0 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Rights backends.
|
||||
|
||||
This module loads the rights backend, according to the rights
|
||||
configuration.
|
||||
|
||||
Default rights are based on a regex-based file whose name is specified in the
|
||||
config (section "right", key "file").
|
||||
|
||||
Authentication login is matched against the "user" key, and collection's path
|
||||
is matched against the "collection" key. You can use Python's ConfigParser
|
||||
interpolation values %(login)s and %(path)s. You can also get groups from the
|
||||
user regex in the collection with {0}, {1}, etc.
|
||||
|
||||
For example, for the "user" key, ".+" means "authenticated user" and ".*"
|
||||
means "anybody" (including anonymous users).
|
||||
|
||||
Section names are only used for naming the rule.
|
||||
|
||||
Leading or ending slashes are trimmed from collection's path.
|
||||
|
||||
"""
|
||||
|
||||
import configparser
|
||||
import os.path
|
||||
import posixpath
|
||||
import re
|
||||
from importlib import import_module
|
||||
|
||||
from . import storage
|
||||
|
||||
INTERNAL_TYPES = ("None", "none", "authenticated", "owner_write", "owner_only",
|
||||
"from_file")
|
||||
|
||||
|
||||
def load(configuration, logger):
|
||||
"""Load the rights manager chosen in configuration."""
|
||||
rights_type = configuration.get("rights", "type")
|
||||
if configuration.get("auth", "type") in ("None", "none"): # DEPRECATED
|
||||
rights_type = "None"
|
||||
if rights_type in ("None", "none"): # DEPRECATED: use "none"
|
||||
rights_class = NoneRights
|
||||
elif rights_type == "authenticated":
|
||||
rights_class = AuthenticatedRights
|
||||
elif rights_type == "owner_write":
|
||||
rights_class = OwnerWriteRights
|
||||
elif rights_type == "owner_only":
|
||||
rights_class = OwnerOnlyRights
|
||||
elif rights_type == "from_file":
|
||||
rights_class = Rights
|
||||
else:
|
||||
try:
|
||||
rights_class = import_module(rights_type).Rights
|
||||
except Exception as e:
|
||||
raise RuntimeError("Failed to load rights module %r: %s" %
|
||||
(rights_type, e)) from e
|
||||
logger.info("Rights type is %r", rights_type)
|
||||
return rights_class(configuration, logger)
|
||||
|
||||
|
||||
class BaseRights:
|
||||
def __init__(self, configuration, logger):
|
||||
self.configuration = configuration
|
||||
self.logger = logger
|
||||
|
||||
def authorized(self, user, path, permission):
|
||||
"""Check if the user is allowed to read or write the collection.
|
||||
|
||||
If ``user`` is empty, check for anonymous rights.
|
||||
|
||||
``path`` is sanitized.
|
||||
|
||||
``permission`` is "r" or "w".
|
||||
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def authorized_item(self, user, path, permission):
|
||||
"""Check if the user is allowed to read or write the item."""
|
||||
path = storage.sanitize_path(path)
|
||||
parent_path = storage.sanitize_path(
|
||||
"/%s/" % posixpath.dirname(path.strip("/")))
|
||||
return self.authorized(user, parent_path, permission)
|
||||
|
||||
|
||||
class NoneRights(BaseRights):
|
||||
def authorized(self, user, path, permission):
|
||||
return True
|
||||
|
||||
|
||||
class AuthenticatedRights(BaseRights):
|
||||
def authorized(self, user, path, permission):
|
||||
return bool(user)
|
||||
|
||||
|
||||
class OwnerWriteRights(BaseRights):
|
||||
def authorized(self, user, path, permission):
|
||||
sane_path = storage.sanitize_path(path).strip("/")
|
||||
return bool(user) and (permission == "r" or
|
||||
user == sane_path.split("/", maxsplit=1)[0])
|
||||
|
||||
|
||||
class OwnerOnlyRights(BaseRights):
|
||||
def authorized(self, user, path, permission):
|
||||
sane_path = storage.sanitize_path(path).strip("/")
|
||||
return bool(user) and (
|
||||
permission == "r" and not sane_path or
|
||||
user == sane_path.split("/", maxsplit=1)[0])
|
||||
|
||||
def authorized_item(self, user, path, permission):
|
||||
sane_path = storage.sanitize_path(path).strip("/")
|
||||
if "/" not in sane_path:
|
||||
return False
|
||||
return super().authorized_item(user, path, permission)
|
||||
|
||||
|
||||
class Rights(BaseRights):
|
||||
def __init__(self, configuration, logger):
|
||||
super().__init__(configuration, logger)
|
||||
self.filename = os.path.expanduser(configuration.get("rights", "file"))
|
||||
|
||||
def authorized(self, user, path, permission):
|
||||
user = user or ""
|
||||
sane_path = storage.sanitize_path(path).strip("/")
|
||||
# Prevent "regex injection"
|
||||
user_escaped = re.escape(user)
|
||||
sane_path_escaped = re.escape(sane_path)
|
||||
regex = configparser.ConfigParser(
|
||||
{"login": user_escaped, "path": sane_path_escaped})
|
||||
try:
|
||||
if not regex.read(self.filename):
|
||||
raise RuntimeError("No such file: %r" %
|
||||
self.filename)
|
||||
except Exception as e:
|
||||
raise RuntimeError("Failed to load rights file %r: %s" %
|
||||
(self.filename, e)) from e
|
||||
for section in regex.sections():
|
||||
try:
|
||||
re_user_pattern = regex.get(section, "user")
|
||||
re_collection_pattern = regex.get(section, "collection")
|
||||
# Emulate fullmatch
|
||||
user_match = re.match(r"(?:%s)\Z" % re_user_pattern, user)
|
||||
collection_match = user_match and re.match(
|
||||
r"(?:%s)\Z" % re_collection_pattern.format(
|
||||
*map(re.escape, user_match.groups())), sane_path)
|
||||
except Exception as e:
|
||||
raise RuntimeError("Error in section %r of rights file %r: "
|
||||
"%s" % (section, self.filename, e)) from e
|
||||
if user_match and collection_match:
|
||||
self.logger.debug("Rule %r:%r matches %r:%r from section %r",
|
||||
user, sane_path, re_user_pattern,
|
||||
re_collection_pattern, section)
|
||||
return permission in regex.get(section, "permission")
|
||||
else:
|
||||
self.logger.debug("Rule %r:%r doesn't match %r:%r from section"
|
||||
" %r", user, sane_path, re_user_pattern,
|
||||
re_collection_pattern, section)
|
||||
self.logger.info(
|
||||
"Rights: %r:%r doesn't match any section", user, sane_path)
|
||||
return False
|
82
radicale/rights/__init__.py
Normal file
82
radicale/rights/__init__.py
Normal file
|
@ -0,0 +1,82 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
The rights module used to determine if a user can read and/or write
|
||||
collections and entries.
|
||||
|
||||
Permissions:
|
||||
|
||||
- R: read collections (excluding address books and calendars)
|
||||
- r: read address book and calendar collections
|
||||
- i: subset of **r** that only allows direct access via HTTP method GET
|
||||
(CalDAV/CardDAV is susceptible to expensive search requests)
|
||||
- W: write collections (excluding address books and calendars)
|
||||
- w: write address book and calendar collections
|
||||
|
||||
Take a look at the class ``BaseRights`` if you want to implement your own.
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Set
|
||||
|
||||
from radicale import config, utils
|
||||
|
||||
INTERNAL_TYPES: Sequence[str] = ("authenticated", "owner_write", "owner_only",
|
||||
"from_file")
|
||||
|
||||
|
||||
def load(configuration: "config.Configuration") -> "BaseRights":
|
||||
"""Load the rights module chosen in configuration."""
|
||||
return utils.load_plugin(INTERNAL_TYPES, "rights", "Rights", BaseRights,
|
||||
configuration)
|
||||
|
||||
|
||||
def intersect(a: str, b: str) -> str:
|
||||
"""Intersect two lists of rights.
|
||||
|
||||
Returns all rights that are both in ``a`` and ``b``.
|
||||
|
||||
"""
|
||||
return "".join(set(a).intersection(set(b)))
|
||||
|
||||
|
||||
class BaseRights:
|
||||
|
||||
_user_groups: Set[str] = set([])
|
||||
|
||||
def __init__(self, configuration: "config.Configuration") -> None:
|
||||
"""Initialize BaseRights.
|
||||
|
||||
``configuration`` see ``radicale.config`` module.
|
||||
The ``configuration`` must not change during the lifetime of
|
||||
this object, it is kept as an internal reference.
|
||||
|
||||
"""
|
||||
self.configuration = configuration
|
||||
|
||||
def authorization(self, user: str, path: str) -> str:
|
||||
"""Get granted rights of ``user`` for the collection ``path``.
|
||||
|
||||
If ``user`` is empty, check for anonymous rights.
|
||||
|
||||
``path`` is sanitized.
|
||||
|
||||
Returns granted rights (e.g. ``"RW"``).
|
||||
|
||||
"""
|
||||
raise NotImplementedError
|
41
radicale/rights/authenticated.py
Normal file
41
radicale/rights/authenticated.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Rights backend that allows authenticated users to read and write all
|
||||
calendars and address books.
|
||||
|
||||
"""
|
||||
|
||||
from radicale import config, pathutils, rights
|
||||
|
||||
|
||||
class Rights(rights.BaseRights):
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
self._verify_user = self.configuration.get("auth", "type") != "none"
|
||||
|
||||
def authorization(self, user: str, path: str) -> str:
|
||||
if self._verify_user and not user:
|
||||
return ""
|
||||
sane_path = pathutils.strip_path(path)
|
||||
if "/" not in sane_path:
|
||||
return "RW"
|
||||
if sane_path.count("/") == 1:
|
||||
return "rw"
|
||||
return ""
|
109
radicale/rights/from_file.py
Normal file
109
radicale/rights/from_file.py
Normal file
|
@ -0,0 +1,109 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Rights backend based on a regex-based file whose name is specified in the
|
||||
config (section "rights", key "file").
|
||||
|
||||
The login is matched against the "user" key, and the collection path
|
||||
is matched against the "collection" key. In the "collection" regex you can use
|
||||
`{user}` and get groups from the "user" regex with `{0}`, `{1}`, etc.
|
||||
In consequence of the parameter substitution you have to write `{{` and `}}`
|
||||
if you want to use regular curly braces in the "user" and "collection" regexes.
|
||||
|
||||
For example, for the "user" key, ".+" means "authenticated user" and ".*"
|
||||
means "anybody" (including anonymous users).
|
||||
|
||||
Section names are only used for naming the rule.
|
||||
|
||||
Leading or ending slashes are trimmed from collection's path.
|
||||
|
||||
"""
|
||||
|
||||
import configparser
|
||||
import re
|
||||
|
||||
from radicale import config, pathutils, rights
|
||||
from radicale.log import logger
|
||||
|
||||
|
||||
class Rights(rights.BaseRights):
|
||||
|
||||
_filename: str
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
self._filename = configuration.get("rights", "file")
|
||||
self._log_rights_rule_doesnt_match_on_debug = configuration.get("logging", "rights_rule_doesnt_match_on_debug")
|
||||
self._rights_config = configparser.ConfigParser()
|
||||
try:
|
||||
with open(self._filename, "r") as f:
|
||||
self._rights_config.read_file(f)
|
||||
logger.debug("Read rights file")
|
||||
except Exception as e:
|
||||
raise RuntimeError("Failed to load rights file %r: %s" %
|
||||
(self._filename, e)) from e
|
||||
|
||||
def authorization(self, user: str, path: str) -> str:
|
||||
user = user or ""
|
||||
sane_path = pathutils.strip_path(path)
|
||||
# Prevent "regex injection"
|
||||
escaped_user = re.escape(user)
|
||||
if not self._log_rights_rule_doesnt_match_on_debug:
|
||||
logger.debug("logging of rules which doesn't match suppressed by config/option [logging] rights_rule_doesnt_match_on_debug")
|
||||
for section in self._rights_config.sections():
|
||||
group_match = None
|
||||
user_match = None
|
||||
try:
|
||||
user_pattern = self._rights_config.get(section, "user", fallback="")
|
||||
collection_pattern = self._rights_config.get(section, "collection")
|
||||
allowed_groups = self._rights_config.get(section, "groups", fallback="").split(",")
|
||||
try:
|
||||
group_match = len(self._user_groups.intersection(allowed_groups)) > 0
|
||||
except Exception:
|
||||
pass
|
||||
# Use empty format() for harmonized handling of curly braces
|
||||
if user_pattern != "":
|
||||
user_match = re.fullmatch(user_pattern.format(), user)
|
||||
user_collection_match = user_match and re.fullmatch(
|
||||
collection_pattern.format(
|
||||
*(re.escape(s) for s in user_match.groups()),
|
||||
user=escaped_user), sane_path)
|
||||
group_collection_match = group_match and re.fullmatch(
|
||||
collection_pattern.format(user=escaped_user), sane_path)
|
||||
except Exception as e:
|
||||
raise RuntimeError("Error in section %r of rights file %r: "
|
||||
"%s" % (section, self._filename, e)) from e
|
||||
if user_match and user_collection_match:
|
||||
permission = self._rights_config.get(section, "permissions")
|
||||
logger.debug("Rule %r:%r matches %r:%r from section %r permission %r",
|
||||
user, sane_path, user_pattern,
|
||||
collection_pattern, section, permission)
|
||||
return permission
|
||||
if group_match and group_collection_match:
|
||||
permission = self._rights_config.get(section, "permissions")
|
||||
logger.debug("Rule %r:%r matches %r:%r from section %r permission %r by group membership",
|
||||
user, sane_path, user_pattern,
|
||||
collection_pattern, section, permission)
|
||||
return permission
|
||||
if self._log_rights_rule_doesnt_match_on_debug:
|
||||
logger.debug("Rule %r:%r doesn't match %r:%r from section %r",
|
||||
user, sane_path, user_pattern, collection_pattern,
|
||||
section)
|
||||
logger.debug("Rights: %r:%r doesn't match any section", user, sane_path)
|
||||
return ""
|
42
radicale/rights/owner_only.py
Normal file
42
radicale/rights/owner_only.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Rights backend that allows authenticated users to read and write their own
|
||||
calendars and address books.
|
||||
|
||||
"""
|
||||
|
||||
import radicale.rights.authenticated as authenticated
|
||||
from radicale import pathutils
|
||||
|
||||
|
||||
class Rights(authenticated.Rights):
|
||||
|
||||
def authorization(self, user: str, path: str) -> str:
|
||||
if self._verify_user and not user:
|
||||
return ""
|
||||
sane_path = pathutils.strip_path(path)
|
||||
if not sane_path:
|
||||
return "R"
|
||||
if self._verify_user and user != sane_path.split("/", maxsplit=1)[0]:
|
||||
return ""
|
||||
if "/" not in sane_path:
|
||||
return "RW"
|
||||
if sane_path.count("/") == 1:
|
||||
return "rw"
|
||||
return ""
|
44
radicale/rights/owner_write.py
Normal file
44
radicale/rights/owner_write.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Rights backend that allows authenticated users to read all calendars and
|
||||
address books but only grants write access to their own.
|
||||
|
||||
"""
|
||||
|
||||
import radicale.rights.authenticated as authenticated
|
||||
from radicale import pathutils
|
||||
|
||||
|
||||
class Rights(authenticated.Rights):
|
||||
|
||||
def authorization(self, user: str, path: str) -> str:
|
||||
if self._verify_user and not user:
|
||||
return ""
|
||||
sane_path = pathutils.strip_path(path)
|
||||
if not sane_path:
|
||||
return "R"
|
||||
if self._verify_user:
|
||||
owned = user == sane_path.split("/", maxsplit=1)[0]
|
||||
else:
|
||||
owned = True
|
||||
if "/" not in sane_path:
|
||||
return "RW" if owned else "R"
|
||||
if sane_path.count("/") == 1:
|
||||
return "rw" if owned else "r"
|
||||
return ""
|
371
radicale/server.py
Normal file
371
radicale/server.py
Normal file
|
@ -0,0 +1,371 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2023 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Built-in WSGI server.
|
||||
|
||||
"""
|
||||
|
||||
import http
|
||||
import select
|
||||
import socket
|
||||
import socketserver
|
||||
import ssl
|
||||
import sys
|
||||
import wsgiref.simple_server
|
||||
from typing import (Any, Callable, Dict, List, MutableMapping, Optional, Set,
|
||||
Tuple, Union)
|
||||
from urllib.parse import unquote
|
||||
|
||||
from radicale import Application, config, utils
|
||||
from radicale.log import logger
|
||||
|
||||
COMPAT_EAI_ADDRFAMILY: int
|
||||
if hasattr(socket, "EAI_ADDRFAMILY"):
|
||||
COMPAT_EAI_ADDRFAMILY = socket.EAI_ADDRFAMILY # type:ignore[attr-defined]
|
||||
elif hasattr(socket, "EAI_NONAME"):
|
||||
# Windows and BSD don't have a special error code for this
|
||||
COMPAT_EAI_ADDRFAMILY = socket.EAI_NONAME
|
||||
COMPAT_EAI_NODATA: int
|
||||
if hasattr(socket, "EAI_NODATA"):
|
||||
COMPAT_EAI_NODATA = socket.EAI_NODATA
|
||||
elif hasattr(socket, "EAI_NONAME"):
|
||||
# Windows and BSD don't have a special error code for this
|
||||
COMPAT_EAI_NODATA = socket.EAI_NONAME
|
||||
COMPAT_IPPROTO_IPV6: int
|
||||
if hasattr(socket, "IPPROTO_IPV6"):
|
||||
COMPAT_IPPROTO_IPV6 = socket.IPPROTO_IPV6
|
||||
elif sys.platform == "win32":
|
||||
# HACK: https://bugs.python.org/issue29515
|
||||
COMPAT_IPPROTO_IPV6 = 41
|
||||
|
||||
|
||||
# IPv4 (host, port) and IPv6 (host, port, flowinfo, scopeid)
|
||||
ADDRESS_TYPE = utils.ADDRESS_TYPE
|
||||
|
||||
|
||||
class ParallelHTTPServer(socketserver.ThreadingMixIn,
|
||||
wsgiref.simple_server.WSGIServer):
|
||||
|
||||
configuration: config.Configuration
|
||||
worker_sockets: Set[socket.socket]
|
||||
_timeout: float
|
||||
|
||||
# We wait for child threads ourself (ThreadingMixIn)
|
||||
block_on_close: bool = False
|
||||
daemon_threads: bool = True
|
||||
|
||||
def __init__(self, configuration: config.Configuration, family: int,
|
||||
address: Tuple[str, int], RequestHandlerClass:
|
||||
Callable[..., http.server.BaseHTTPRequestHandler]) -> None:
|
||||
self.configuration = configuration
|
||||
self.address_family = family
|
||||
super().__init__(address, RequestHandlerClass)
|
||||
self.worker_sockets = set()
|
||||
self._timeout = configuration.get("server", "timeout")
|
||||
|
||||
def server_bind(self) -> None:
|
||||
if self.address_family == socket.AF_INET6:
|
||||
# Only allow IPv6 connections to the IPv6 socket
|
||||
self.socket.setsockopt(COMPAT_IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
|
||||
super().server_bind()
|
||||
|
||||
def get_request( # type:ignore[override]
|
||||
self) -> Tuple[socket.socket, Tuple[ADDRESS_TYPE, socket.socket]]:
|
||||
# Set timeout for client
|
||||
request: socket.socket
|
||||
client_address: ADDRESS_TYPE
|
||||
request, client_address = super().get_request() # type:ignore[misc]
|
||||
if self._timeout > 0:
|
||||
request.settimeout(self._timeout)
|
||||
worker_socket, worker_socket_out = socket.socketpair()
|
||||
self.worker_sockets.add(worker_socket_out)
|
||||
# HACK: Forward `worker_socket` via `client_address` return value
|
||||
# to worker thread.
|
||||
# The super class calls `verify_request`, `process_request` and
|
||||
# `handle_error` with modified `client_address` value.
|
||||
return request, (client_address, worker_socket)
|
||||
|
||||
def verify_request( # type:ignore[override]
|
||||
self, request: socket.socket, client_address_and_socket:
|
||||
Tuple[ADDRESS_TYPE, socket.socket]) -> bool:
|
||||
return True
|
||||
|
||||
def process_request( # type:ignore[override]
|
||||
self, request: socket.socket, client_address_and_socket:
|
||||
Tuple[ADDRESS_TYPE, socket.socket]) -> None:
|
||||
# HACK: Super class calls `finish_request` in new thread with
|
||||
# `client_address_and_socket`
|
||||
return super().process_request(
|
||||
request, client_address_and_socket) # type:ignore[arg-type]
|
||||
|
||||
def finish_request( # type:ignore[override]
|
||||
self, request: socket.socket, client_address_and_socket:
|
||||
Tuple[ADDRESS_TYPE, socket.socket]) -> None:
|
||||
# HACK: Unpack `client_address_and_socket` and call super class
|
||||
# `finish_request` with original `client_address`
|
||||
client_address, worker_socket = client_address_and_socket
|
||||
try:
|
||||
return self.finish_request_locked(request, client_address)
|
||||
finally:
|
||||
worker_socket.close()
|
||||
|
||||
def finish_request_locked(self, request: socket.socket,
|
||||
client_address: ADDRESS_TYPE) -> None:
|
||||
return super().finish_request(
|
||||
request, client_address) # type:ignore[arg-type]
|
||||
|
||||
def handle_error( # type:ignore[override]
|
||||
self, request: socket.socket,
|
||||
client_address_or_client_address_and_socket:
|
||||
Union[ADDRESS_TYPE, Tuple[ADDRESS_TYPE, socket.socket]]) -> None:
|
||||
# HACK: This method can be called with the modified
|
||||
# `client_address_and_socket` or the original `client_address` value
|
||||
e = sys.exc_info()[1]
|
||||
assert e is not None
|
||||
if isinstance(e, socket.timeout):
|
||||
logger.info("Client timed out", exc_info=True)
|
||||
else:
|
||||
logger.error("An exception occurred during request: %s",
|
||||
sys.exc_info()[1], exc_info=True)
|
||||
|
||||
|
||||
class ParallelHTTPSServer(ParallelHTTPServer):
|
||||
|
||||
def server_bind(self) -> None:
|
||||
super().server_bind()
|
||||
# Wrap the TCP socket in an SSL socket
|
||||
certfile: str = self.configuration.get("server", "certificate")
|
||||
keyfile: str = self.configuration.get("server", "key")
|
||||
cafile: str = self.configuration.get("server", "certificate_authority")
|
||||
protocol: str = self.configuration.get("server", "protocol")
|
||||
ciphersuite: str = self.configuration.get("server", "ciphersuite")
|
||||
# Test if the files can be read
|
||||
for name, filename in [("certificate", certfile), ("key", keyfile),
|
||||
("certificate_authority", cafile)]:
|
||||
type_name = config.DEFAULT_CONFIG_SCHEMA["server"][name][
|
||||
"type"].__name__
|
||||
source = self.configuration.get_source("server", name)
|
||||
if name == "certificate_authority" and not filename:
|
||||
continue
|
||||
try:
|
||||
open(filename).close()
|
||||
except OSError as e:
|
||||
raise RuntimeError(
|
||||
"Invalid %s value for option %r in section %r in %s: %r "
|
||||
"(%s)" % (type_name, name, "server", source, filename,
|
||||
e)) from e
|
||||
context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
logger.info("SSL load files certificate='%s' key='%s'", certfile, keyfile)
|
||||
context.load_cert_chain(certfile=certfile, keyfile=keyfile)
|
||||
if protocol:
|
||||
logger.info("SSL set explicit protocols (maybe not all supported by underlying OpenSSL): '%s'", protocol)
|
||||
context.options = utils.ssl_context_options_by_protocol(protocol, context.options)
|
||||
context.minimum_version = utils.ssl_context_minimum_version_by_options(context.options)
|
||||
if (context.minimum_version == 0):
|
||||
raise RuntimeError("No SSL minimum protocol active")
|
||||
context.maximum_version = utils.ssl_context_maximum_version_by_options(context.options)
|
||||
if (context.maximum_version == 0):
|
||||
raise RuntimeError("No SSL maximum protocol active")
|
||||
else:
|
||||
logger.info("SSL active protocols: (system-default)")
|
||||
logger.debug("SSL minimum acceptable protocol: %s", context.minimum_version)
|
||||
logger.debug("SSL maximum acceptable protocol: %s", context.maximum_version)
|
||||
logger.info("SSL accepted protocols: %s", ' '.join(utils.ssl_get_protocols(context)))
|
||||
if ciphersuite:
|
||||
logger.info("SSL set explicit ciphersuite (maybe not all supported by underlying OpenSSL): '%s'", ciphersuite)
|
||||
context.set_ciphers(ciphersuite)
|
||||
else:
|
||||
logger.info("SSL active ciphersuite: (system-default)")
|
||||
cipherlist = []
|
||||
for entry in context.get_ciphers():
|
||||
cipherlist.append(entry["name"])
|
||||
logger.info("SSL accepted ciphers: %s", ' '.join(cipherlist))
|
||||
if cafile:
|
||||
logger.info("SSL enable mandatory client certificate verification using CA file='%s'", cafile)
|
||||
context.load_verify_locations(cafile=cafile)
|
||||
context.verify_mode = ssl.CERT_REQUIRED
|
||||
self.socket = context.wrap_socket(
|
||||
self.socket, server_side=True, do_handshake_on_connect=False)
|
||||
|
||||
def finish_request_locked( # type:ignore[override]
|
||||
self, request: ssl.SSLSocket, client_address: ADDRESS_TYPE
|
||||
) -> None:
|
||||
try:
|
||||
try:
|
||||
request.do_handshake()
|
||||
except socket.timeout:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise RuntimeError("SSL handshake failed: %s client %s" % (e, str(client_address[0]))) from e
|
||||
except Exception:
|
||||
try:
|
||||
self.handle_error(request, client_address)
|
||||
finally:
|
||||
self.shutdown_request(request) # type:ignore[attr-defined]
|
||||
return
|
||||
return super().finish_request_locked(request, client_address)
|
||||
|
||||
|
||||
class ServerHandler(wsgiref.simple_server.ServerHandler):
|
||||
|
||||
# Don't pollute WSGI environ with OS environment
|
||||
os_environ: MutableMapping[str, str] = {}
|
||||
|
||||
def log_exception(self, exc_info) -> None:
|
||||
logger.error("An exception occurred during request: %s",
|
||||
exc_info[1], exc_info=exc_info) # type:ignore[arg-type]
|
||||
|
||||
|
||||
class RequestHandler(wsgiref.simple_server.WSGIRequestHandler):
|
||||
"""HTTP requests handler."""
|
||||
|
||||
# HACK: Assigned in `socketserver.StreamRequestHandler`
|
||||
connection: socket.socket
|
||||
|
||||
def log_request(self, code: Union[int, str] = "-",
|
||||
size: Union[int, str] = "-") -> None:
|
||||
pass # Disable request logging.
|
||||
|
||||
def log_error(self, format_: str, *args: Any) -> None:
|
||||
logger.error("An error occurred during request: %s", format_ % args)
|
||||
|
||||
def get_environ(self) -> Dict[str, Any]:
|
||||
env = super().get_environ()
|
||||
if isinstance(self.connection, ssl.SSLSocket):
|
||||
env["HTTPS"] = "on"
|
||||
env["SSL_CIPHER"] = self.request.cipher()[0]
|
||||
env["SSL_PROTOCOL"] = self.request.version()
|
||||
# The certificate can be evaluated by the auth module
|
||||
env["REMOTE_CERTIFICATE"] = self.connection.getpeercert()
|
||||
# Parent class only tries latin1 encoding
|
||||
env["PATH_INFO"] = unquote(self.path.split("?", 1)[0])
|
||||
return env
|
||||
|
||||
def handle(self) -> None:
|
||||
"""Copy of WSGIRequestHandler.handle with different ServerHandler"""
|
||||
|
||||
self.raw_requestline = self.rfile.readline(65537)
|
||||
if len(self.raw_requestline) > 65536:
|
||||
self.requestline = ""
|
||||
self.request_version = ""
|
||||
self.command = ""
|
||||
self.send_error(414)
|
||||
return
|
||||
|
||||
if not self.parse_request():
|
||||
return
|
||||
|
||||
handler = ServerHandler(
|
||||
self.rfile, self.wfile, self.get_stderr(), self.get_environ()
|
||||
)
|
||||
handler.request_handler = self # type:ignore[attr-defined]
|
||||
app = self.server.get_app() # type:ignore[attr-defined]
|
||||
handler.run(app)
|
||||
|
||||
|
||||
def serve(configuration: config.Configuration,
|
||||
shutdown_socket: Optional[socket.socket] = None) -> None:
|
||||
"""Serve radicale from configuration.
|
||||
|
||||
`shutdown_socket` can be used to gracefully shutdown the server.
|
||||
The socket can be created with `socket.socketpair()`, when the other socket
|
||||
gets closed the server stops accepting new requests by clients and the
|
||||
function returns after all active requests are finished.
|
||||
|
||||
"""
|
||||
|
||||
logger.info("Starting Radicale (%s)", utils.packages_version())
|
||||
# Copy configuration before modifying
|
||||
configuration = configuration.copy()
|
||||
configuration.update({"server": {"_internal_server": "True"}}, "server",
|
||||
privileged=True)
|
||||
|
||||
use_ssl: bool = configuration.get("server", "ssl")
|
||||
server_class = ParallelHTTPSServer if use_ssl else ParallelHTTPServer
|
||||
application = Application(configuration)
|
||||
servers = {}
|
||||
try:
|
||||
hosts: List[Tuple[str, int]] = configuration.get("server", "hosts")
|
||||
for address_port in hosts:
|
||||
# retrieve IPv4/IPv6 address of address
|
||||
try:
|
||||
getaddrinfo = socket.getaddrinfo(address_port[0], address_port[1], 0, socket.SOCK_STREAM, socket.IPPROTO_TCP)
|
||||
except OSError as e:
|
||||
logger.warning("cannot retrieve IPv4 or IPv6 address of '%s': %s" % (utils.format_address(address_port), e))
|
||||
continue
|
||||
logger.debug("getaddrinfo of '%s': %s" % (utils.format_address(address_port), getaddrinfo))
|
||||
for (address_family, socket_kind, socket_proto, socket_flags, socket_address) in getaddrinfo:
|
||||
logger.debug("try to create server socket on '%s'" % (utils.format_address(socket_address)))
|
||||
try:
|
||||
server = server_class(configuration, address_family, (socket_address[0], socket_address[1]), RequestHandler)
|
||||
except OSError as e:
|
||||
logger.warning("cannot create server socket on '%s': %s" % (utils.format_address(socket_address), e))
|
||||
continue
|
||||
servers[server.socket] = server
|
||||
server.set_app(application)
|
||||
logger.info("Listening on %r%s",
|
||||
utils.format_address(server.server_address),
|
||||
" with SSL" if use_ssl else "")
|
||||
if not servers:
|
||||
raise RuntimeError("No servers started")
|
||||
|
||||
# Mainloop
|
||||
select_timeout = None
|
||||
if sys.platform == "win32":
|
||||
# Fallback to busy waiting. (select(...) blocks SIGINT on Windows.)
|
||||
select_timeout = 1.0
|
||||
max_connections: int = configuration.get("server", "max_connections")
|
||||
logger.info("Radicale server ready")
|
||||
while True:
|
||||
rlist: List[socket.socket] = []
|
||||
# Wait for finished clients
|
||||
for server in servers.values():
|
||||
rlist.extend(server.worker_sockets)
|
||||
# Accept new connections if max_connections is not reached
|
||||
if max_connections <= 0 or len(rlist) < max_connections:
|
||||
rlist.extend(servers)
|
||||
# Use socket to get notified of program shutdown
|
||||
if shutdown_socket is not None:
|
||||
rlist.append(shutdown_socket)
|
||||
rlist, _, _ = select.select(rlist, [], [], select_timeout)
|
||||
rset = set(rlist)
|
||||
if shutdown_socket in rset:
|
||||
logger.info("Stopping Radicale")
|
||||
break
|
||||
for server in servers.values():
|
||||
finished_sockets = server.worker_sockets.intersection(rset)
|
||||
for s in finished_sockets:
|
||||
s.close()
|
||||
server.worker_sockets.remove(s)
|
||||
rset.remove(s)
|
||||
if finished_sockets:
|
||||
server.service_actions()
|
||||
if rset:
|
||||
active_server = servers.get(rset.pop())
|
||||
if active_server:
|
||||
active_server.handle_request()
|
||||
finally:
|
||||
# Wait for clients to finish and close servers
|
||||
for server in servers.values():
|
||||
for s in server.worker_sockets:
|
||||
s.recv(1)
|
||||
s.close()
|
||||
server.server_close()
|
1595
radicale/storage.py
1595
radicale/storage.py
File diff suppressed because it is too large
Load diff
358
radicale/storage/__init__.py
Normal file
358
radicale/storage/__init__.py
Normal file
|
@ -0,0 +1,358 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
The storage module that stores calendars and address books.
|
||||
|
||||
Take a look at the class ``BaseCollection`` if you want to implement your own.
|
||||
|
||||
"""
|
||||
|
||||
import json
|
||||
import xml.etree.ElementTree as ET
|
||||
from hashlib import sha256
|
||||
from typing import (Callable, ContextManager, Iterable, Iterator, Mapping,
|
||||
Optional, Sequence, Set, Tuple, Union, overload)
|
||||
|
||||
import vobject
|
||||
|
||||
from radicale import config
|
||||
from radicale import item as radicale_item
|
||||
from radicale import types, utils
|
||||
from radicale.item import filter as radicale_filter
|
||||
from radicale.log import logger
|
||||
|
||||
INTERNAL_TYPES: Sequence[str] = ("multifilesystem", "multifilesystem_nolock",)
|
||||
|
||||
# NOTE: change only if cache structure is modified to avoid cache invalidation on update
|
||||
CACHE_VERSION_RADICALE = "3.3.1"
|
||||
|
||||
CACHE_VERSION: bytes = ("%s=%s;%s=%s;" % ("radicale", CACHE_VERSION_RADICALE, "vobject", utils.package_version("vobject"))).encode()
|
||||
|
||||
|
||||
def load(configuration: "config.Configuration") -> "BaseStorage":
|
||||
"""Load the storage module chosen in configuration."""
|
||||
logger.debug("storage cache version: %r", str(CACHE_VERSION))
|
||||
return utils.load_plugin(INTERNAL_TYPES, "storage", "Storage", BaseStorage,
|
||||
configuration)
|
||||
|
||||
|
||||
class ComponentExistsError(ValueError):
|
||||
|
||||
def __init__(self, path: str) -> None:
|
||||
message = "Component already exists: %r" % path
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
class ComponentNotFoundError(ValueError):
|
||||
|
||||
def __init__(self, path: str) -> None:
|
||||
message = "Component doesn't exist: %r" % path
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
class BaseCollection:
|
||||
|
||||
@property
|
||||
def path(self) -> str:
|
||||
"""The sanitized path of the collection without leading or
|
||||
trailing ``/``."""
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def owner(self) -> str:
|
||||
"""The owner of the collection."""
|
||||
return self.path.split("/", maxsplit=1)[0]
|
||||
|
||||
@property
|
||||
def is_principal(self) -> bool:
|
||||
"""Collection is a principal."""
|
||||
return bool(self.path) and "/" not in self.path
|
||||
|
||||
@property
|
||||
def etag(self) -> str:
|
||||
"""Encoded as quoted-string (see RFC 2616)."""
|
||||
etag = sha256()
|
||||
for item in self.get_all():
|
||||
assert item.href
|
||||
etag.update((item.href + "/" + item.etag).encode())
|
||||
etag.update(json.dumps(self.get_meta(), sort_keys=True).encode())
|
||||
return '"%s"' % etag.hexdigest()
|
||||
|
||||
@property
|
||||
def tag(self) -> str:
|
||||
"""The tag of the collection."""
|
||||
return self.get_meta("tag") or ""
|
||||
|
||||
def sync(self, old_token: str = "") -> Tuple[str, Iterable[str]]:
|
||||
"""Get the current sync token and changed items for synchronization.
|
||||
|
||||
``old_token`` an old sync token which is used as the base of the
|
||||
delta update. If sync token is empty, all items are returned.
|
||||
ValueError is raised for invalid or old tokens.
|
||||
|
||||
WARNING: This simple default implementation treats all sync-token as
|
||||
invalid.
|
||||
|
||||
"""
|
||||
def hrefs_iter() -> Iterator[str]:
|
||||
for item in self.get_all():
|
||||
assert item.href
|
||||
yield item.href
|
||||
token = "http://radicale.org/ns/sync/%s" % self.etag.strip("\"")
|
||||
if old_token:
|
||||
raise ValueError("Sync token are not supported")
|
||||
return token, hrefs_iter()
|
||||
|
||||
def get_multi(self, hrefs: Iterable[str]
|
||||
) -> Iterable[Tuple[str, Optional["radicale_item.Item"]]]:
|
||||
"""Fetch multiple items.
|
||||
|
||||
It's not required to return the requested items in the correct order.
|
||||
Duplicated hrefs can be ignored.
|
||||
|
||||
Returns tuples with the href and the item or None if the item doesn't
|
||||
exist.
|
||||
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_all(self) -> Iterable["radicale_item.Item"]:
|
||||
"""Fetch all items."""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_filtered(self, filters: Iterable[ET.Element]
|
||||
) -> Iterable[Tuple["radicale_item.Item", bool]]:
|
||||
"""Fetch all items with optional filtering.
|
||||
|
||||
This can largely improve performance of reports depending on
|
||||
the filters and this implementation.
|
||||
|
||||
Returns tuples in the form ``(item, filters_matched)``.
|
||||
``filters_matched`` is a bool that indicates if ``filters`` are fully
|
||||
matched.
|
||||
|
||||
"""
|
||||
if not self.tag:
|
||||
return
|
||||
tag, start, end, simple = radicale_filter.simplify_prefilters(
|
||||
filters, self.tag)
|
||||
for item in self.get_all():
|
||||
if tag is not None and tag != item.component_name:
|
||||
continue
|
||||
istart, iend = item.time_range
|
||||
if istart >= end or iend <= start:
|
||||
continue
|
||||
yield item, simple and (start <= istart or iend <= end)
|
||||
|
||||
def has_uid(self, uid: str) -> bool:
|
||||
"""Check if a UID exists in the collection."""
|
||||
for item in self.get_all():
|
||||
if item.uid == uid:
|
||||
return True
|
||||
return False
|
||||
|
||||
def upload(self, href: str, item: "radicale_item.Item") -> (
|
||||
"radicale_item.Item"):
|
||||
"""Upload a new or replace an existing item."""
|
||||
raise NotImplementedError
|
||||
|
||||
def delete(self, href: Optional[str] = None) -> None:
|
||||
"""Delete an item.
|
||||
|
||||
When ``href`` is ``None``, delete the collection.
|
||||
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@overload
|
||||
def get_meta(self, key: None = None) -> Mapping[str, str]: ...
|
||||
|
||||
@overload
|
||||
def get_meta(self, key: str) -> Optional[str]: ...
|
||||
|
||||
def get_meta(self, key: Optional[str] = None
|
||||
) -> Union[Mapping[str, str], Optional[str]]:
|
||||
"""Get metadata value for collection.
|
||||
|
||||
Return the value of the property ``key``. If ``key`` is ``None`` return
|
||||
a dict with all properties
|
||||
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def set_meta(self, props: Mapping[str, str]) -> None:
|
||||
"""Set metadata values for collection.
|
||||
|
||||
``props`` a dict with values for properties.
|
||||
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def last_modified(self) -> str:
|
||||
"""Get the HTTP-datetime of when the collection was modified."""
|
||||
raise NotImplementedError
|
||||
|
||||
def serialize(self) -> str:
|
||||
"""Get the unicode string representing the whole collection."""
|
||||
if self.tag == "VCALENDAR":
|
||||
in_vcalendar = False
|
||||
vtimezones = ""
|
||||
included_tzids: Set[str] = set()
|
||||
vtimezone = []
|
||||
tzid = None
|
||||
components = ""
|
||||
# Concatenate all child elements of VCALENDAR from all items
|
||||
# together, while preventing duplicated VTIMEZONE entries.
|
||||
# VTIMEZONEs are only distinguished by their TZID, if different
|
||||
# timezones share the same TZID this produces erroneous output.
|
||||
# VObject fails at this too.
|
||||
for item in self.get_all():
|
||||
depth = 0
|
||||
for line in item.serialize().split("\r\n"):
|
||||
if line.startswith("BEGIN:"):
|
||||
depth += 1
|
||||
if depth == 1 and line == "BEGIN:VCALENDAR":
|
||||
in_vcalendar = True
|
||||
elif in_vcalendar:
|
||||
if depth == 1 and line.startswith("END:"):
|
||||
in_vcalendar = False
|
||||
if depth == 2 and line == "BEGIN:VTIMEZONE":
|
||||
vtimezone.append(line + "\r\n")
|
||||
elif vtimezone:
|
||||
vtimezone.append(line + "\r\n")
|
||||
if depth == 2 and line.startswith("TZID:"):
|
||||
tzid = line[len("TZID:"):]
|
||||
elif depth == 2 and line.startswith("END:"):
|
||||
if tzid is None or tzid not in included_tzids:
|
||||
vtimezones += "".join(vtimezone)
|
||||
if tzid is not None:
|
||||
included_tzids.add(tzid)
|
||||
vtimezone.clear()
|
||||
tzid = None
|
||||
elif depth >= 2:
|
||||
components += line + "\r\n"
|
||||
if line.startswith("END:"):
|
||||
depth -= 1
|
||||
template = vobject.iCalendar()
|
||||
displayname = self.get_meta("D:displayname")
|
||||
if displayname:
|
||||
template.add("X-WR-CALNAME")
|
||||
template.x_wr_calname.value_param = "TEXT"
|
||||
template.x_wr_calname.value = displayname
|
||||
description = self.get_meta("C:calendar-description")
|
||||
if description:
|
||||
template.add("X-WR-CALDESC")
|
||||
template.x_wr_caldesc.value_param = "TEXT"
|
||||
template.x_wr_caldesc.value = description
|
||||
template = template.serialize()
|
||||
template_insert_pos = template.find("\r\nEND:VCALENDAR\r\n") + 2
|
||||
assert template_insert_pos != -1
|
||||
return (template[:template_insert_pos] +
|
||||
vtimezones + components +
|
||||
template[template_insert_pos:])
|
||||
if self.tag == "VADDRESSBOOK":
|
||||
return "".join((item.serialize() for item in self.get_all()))
|
||||
return ""
|
||||
|
||||
|
||||
class BaseStorage:
|
||||
|
||||
def __init__(self, configuration: "config.Configuration") -> None:
|
||||
"""Initialize BaseStorage.
|
||||
|
||||
``configuration`` see ``radicale.config`` module.
|
||||
The ``configuration`` must not change during the lifetime of
|
||||
this object, it is kept as an internal reference.
|
||||
|
||||
"""
|
||||
self.configuration = configuration
|
||||
|
||||
def discover(
|
||||
self, path: str, depth: str = "0",
|
||||
child_context_manager: Optional[
|
||||
Callable[[str, Optional[str]], ContextManager[None]]] = None,
|
||||
user_groups: Set[str] = set([])) -> Iterable["types.CollectionOrItem"]:
|
||||
"""Discover a list of collections under the given ``path``.
|
||||
|
||||
``path`` is sanitized.
|
||||
|
||||
If ``depth`` is "0", only the actual object under ``path`` is
|
||||
returned.
|
||||
|
||||
If ``depth`` is anything but "0", it is considered as "1" and direct
|
||||
children are included in the result.
|
||||
|
||||
The root collection "/" must always exist.
|
||||
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def move(self, item: "radicale_item.Item", to_collection: BaseCollection,
|
||||
to_href: str) -> None:
|
||||
"""Move an object.
|
||||
|
||||
``item`` is the item to move.
|
||||
|
||||
``to_collection`` is the target collection.
|
||||
|
||||
``to_href`` is the target name in ``to_collection``. An item with the
|
||||
same name might already exist.
|
||||
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def create_collection(
|
||||
self, href: str,
|
||||
items: Optional[Iterable["radicale_item.Item"]] = None,
|
||||
props: Optional[Mapping[str, str]] = None) -> BaseCollection:
|
||||
"""Create a collection.
|
||||
|
||||
``href`` is the sanitized path.
|
||||
|
||||
If the collection already exists and neither ``collection`` nor
|
||||
``props`` are set, this method shouldn't do anything. Otherwise the
|
||||
existing collection must be replaced.
|
||||
|
||||
``collection`` is a list of vobject components.
|
||||
|
||||
``props`` are metadata values for the collection.
|
||||
|
||||
``props["tag"]`` is the type of collection (VCALENDAR or VADDRESSBOOK).
|
||||
If the key ``tag`` is missing, ``items`` is ignored.
|
||||
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@types.contextmanager
|
||||
def acquire_lock(self, mode: str, user: str = "") -> Iterator[None]:
|
||||
"""Set a context manager to lock the whole storage.
|
||||
|
||||
``mode`` must either be "r" for shared access or "w" for exclusive
|
||||
access.
|
||||
|
||||
``user`` is the name of the logged in user or empty.
|
||||
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def verify(self) -> bool:
|
||||
"""Check the storage for errors."""
|
||||
raise NotImplementedError
|
195
radicale/storage/multifilesystem/__init__.py
Normal file
195
radicale/storage/multifilesystem/__init__.py
Normal file
|
@ -0,0 +1,195 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Storage backend that stores data in the file system.
|
||||
|
||||
Uses one folder per collection and one file per collection entry.
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from typing import ClassVar, Iterator, Optional, Type
|
||||
|
||||
from radicale import config
|
||||
from radicale.log import logger
|
||||
from radicale.storage.multifilesystem.base import CollectionBase, StorageBase
|
||||
from radicale.storage.multifilesystem.cache import CollectionPartCache
|
||||
from radicale.storage.multifilesystem.create_collection import \
|
||||
StoragePartCreateCollection
|
||||
from radicale.storage.multifilesystem.delete import CollectionPartDelete
|
||||
from radicale.storage.multifilesystem.discover import StoragePartDiscover
|
||||
from radicale.storage.multifilesystem.get import CollectionPartGet
|
||||
from radicale.storage.multifilesystem.history import CollectionPartHistory
|
||||
from radicale.storage.multifilesystem.lock import (CollectionPartLock,
|
||||
StoragePartLock)
|
||||
from radicale.storage.multifilesystem.meta import CollectionPartMeta
|
||||
from radicale.storage.multifilesystem.move import StoragePartMove
|
||||
from radicale.storage.multifilesystem.sync import CollectionPartSync
|
||||
from radicale.storage.multifilesystem.upload import CollectionPartUpload
|
||||
from radicale.storage.multifilesystem.verify import StoragePartVerify
|
||||
|
||||
# 999 second, 999 ms, 999 us, 999 ns
|
||||
MTIME_NS_TEST: int = 999999999999
|
||||
|
||||
|
||||
class Collection(
|
||||
CollectionPartDelete, CollectionPartMeta, CollectionPartSync,
|
||||
CollectionPartUpload, CollectionPartGet, CollectionPartCache,
|
||||
CollectionPartLock, CollectionPartHistory, CollectionBase):
|
||||
|
||||
_etag_cache: Optional[str]
|
||||
|
||||
def __init__(self, storage_: "Storage", path: str,
|
||||
filesystem_path: Optional[str] = None) -> None:
|
||||
super().__init__(storage_, path, filesystem_path)
|
||||
self._etag_cache = None
|
||||
|
||||
@property
|
||||
def path(self) -> str:
|
||||
return self._path
|
||||
|
||||
@property
|
||||
def last_modified(self) -> str:
|
||||
def relevant_files_iter() -> Iterator[str]:
|
||||
yield self._filesystem_path
|
||||
if os.path.exists(self._props_path):
|
||||
yield self._props_path
|
||||
for href in self._list():
|
||||
yield os.path.join(self._filesystem_path, href)
|
||||
last = max(map(os.path.getmtime, relevant_files_iter()))
|
||||
return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(last))
|
||||
|
||||
@property
|
||||
def etag(self) -> str:
|
||||
# reuse cached value if the storage is read-only
|
||||
if self._storage._lock.locked == "w" or self._etag_cache is None:
|
||||
self._etag_cache = super().etag
|
||||
return self._etag_cache
|
||||
|
||||
|
||||
class Storage(
|
||||
StoragePartCreateCollection, StoragePartLock, StoragePartMove,
|
||||
StoragePartVerify, StoragePartDiscover, StorageBase):
|
||||
|
||||
_collection_class: ClassVar[Type[Collection]] = Collection
|
||||
|
||||
def _analyse_mtime(self):
|
||||
# calculate and display mtime resolution
|
||||
path = os.path.join(self._get_collection_root_folder(), ".Radicale.mtime_test")
|
||||
logger.debug("Storage item mtime resolution test with file: %r", path)
|
||||
try:
|
||||
with open(path, "w") as f:
|
||||
f.write("mtime_test")
|
||||
f.close
|
||||
except Exception as e:
|
||||
logger.warning("Storage item mtime resolution test not possible, cannot write file: %r (%s)", path, e)
|
||||
raise
|
||||
# set mtime_ns for tests
|
||||
try:
|
||||
os.utime(path, times=None, ns=(MTIME_NS_TEST, MTIME_NS_TEST))
|
||||
except Exception as e:
|
||||
logger.warning("Storage item mtime resolution test not possible, cannot set utime on file: %r (%s)", path, e)
|
||||
os.remove(path)
|
||||
raise
|
||||
logger.debug("Storage item mtime resoultion test set: %d" % MTIME_NS_TEST)
|
||||
mtime_ns = os.stat(path).st_mtime_ns
|
||||
logger.debug("Storage item mtime resoultion test get: %d" % mtime_ns)
|
||||
# start analysis
|
||||
precision = 1
|
||||
mtime_ns_test = MTIME_NS_TEST
|
||||
while mtime_ns > 0:
|
||||
if mtime_ns == mtime_ns_test:
|
||||
break
|
||||
factor = 2
|
||||
if int(mtime_ns / factor) == int(mtime_ns_test / factor):
|
||||
precision = precision * factor
|
||||
break
|
||||
factor = 5
|
||||
if int(mtime_ns / factor) == int(mtime_ns_test / factor):
|
||||
precision = precision * factor
|
||||
break
|
||||
precision = precision * 10
|
||||
mtime_ns = int(mtime_ns / 10)
|
||||
mtime_ns_test = int(mtime_ns_test / 10)
|
||||
unit = "ns"
|
||||
precision_unit = precision
|
||||
if precision >= 1000000000:
|
||||
precision_unit = int(precision / 1000000000)
|
||||
unit = "s"
|
||||
elif precision >= 1000000:
|
||||
precision_unit = int(precision / 1000000)
|
||||
unit = "ms"
|
||||
elif precision >= 1000:
|
||||
precision_unit = int(precision / 1000)
|
||||
unit = "us"
|
||||
os.remove(path)
|
||||
return (precision, precision_unit, unit)
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
logger.info("Storage location: %r", self._filesystem_folder)
|
||||
if not os.path.exists(self._filesystem_folder):
|
||||
logger.warning("Storage location: %r not existing, create now", self._filesystem_folder)
|
||||
self._makedirs_synced(self._filesystem_folder)
|
||||
logger.info("Storage location subfolder: %r", self._get_collection_root_folder())
|
||||
if not os.path.exists(self._get_collection_root_folder()):
|
||||
logger.warning("Storage location subfolder: %r not existing, create now", self._get_collection_root_folder())
|
||||
self._makedirs_synced(self._get_collection_root_folder())
|
||||
logger.info("Storage cache subfolder usage for 'item': %s", self._use_cache_subfolder_for_item)
|
||||
logger.info("Storage cache subfolder usage for 'history': %s", self._use_cache_subfolder_for_history)
|
||||
logger.info("Storage cache subfolder usage for 'sync-token': %s", self._use_cache_subfolder_for_synctoken)
|
||||
logger.info("Storage cache use mtime and size for 'item': %s", self._use_mtime_and_size_for_item_cache)
|
||||
try:
|
||||
(precision, precision_unit, unit) = self._analyse_mtime()
|
||||
if precision >= 100000000:
|
||||
# >= 100 ms
|
||||
logger.warning("Storage item mtime resolution test result: %d %s (VERY RISKY ON PRODUCTION SYSTEMS)" % (precision_unit, unit))
|
||||
elif precision >= 10000000:
|
||||
# >= 10 ms
|
||||
logger.warning("Storage item mtime resolution test result: %d %s (RISKY ON PRODUCTION SYSTEMS)" % (precision_unit, unit))
|
||||
else:
|
||||
logger.info("Storage item mtime resolution test result: %d %s" % (precision_unit, unit))
|
||||
if self._use_mtime_and_size_for_item_cache is False:
|
||||
logger.info("Storage cache using mtime and size for 'item' may be an option in case of performance issues")
|
||||
except Exception:
|
||||
logger.warning("Storage item mtime resolution test result not successful")
|
||||
logger.debug("Storage cache action logging: %s", self._debug_cache_actions)
|
||||
if self._use_cache_subfolder_for_item is True or self._use_cache_subfolder_for_history is True or self._use_cache_subfolder_for_synctoken is True:
|
||||
logger.info("Storage cache subfolder: %r", self._get_collection_cache_folder())
|
||||
if not os.path.exists(self._get_collection_cache_folder()):
|
||||
logger.warning("Storage cache subfolder: %r not existing, create now", self._get_collection_cache_folder())
|
||||
self._makedirs_synced(self._get_collection_cache_folder())
|
||||
if sys.platform != "win32":
|
||||
if not self._folder_umask:
|
||||
# retrieve current umask by setting a dummy umask
|
||||
current_umask = os.umask(0o0022)
|
||||
logger.info("Storage folder umask (from system): '%04o'", current_umask)
|
||||
# reset to original
|
||||
os.umask(current_umask)
|
||||
else:
|
||||
try:
|
||||
config_umask = int(self._folder_umask, 8)
|
||||
except Exception:
|
||||
logger.critical("storage folder umask defined but invalid: '%s'", self._folder_umask)
|
||||
raise
|
||||
logger.info("storage folder umask defined: '%04o'", config_umask)
|
||||
self._config_umask = config_umask
|
167
radicale/storage/multifilesystem/base.py
Normal file
167
radicale/storage/multifilesystem/base.py
Normal file
|
@ -0,0 +1,167 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import sys
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import IO, AnyStr, ClassVar, Iterator, Optional, Type
|
||||
|
||||
from radicale import config, pathutils, storage, types
|
||||
from radicale.storage import multifilesystem # noqa:F401
|
||||
|
||||
|
||||
class CollectionBase(storage.BaseCollection):
|
||||
|
||||
_storage: "multifilesystem.Storage"
|
||||
_path: str
|
||||
_encoding: str
|
||||
_filesystem_path: str
|
||||
|
||||
def __init__(self, storage_: "multifilesystem.Storage", path: str,
|
||||
filesystem_path: Optional[str] = None) -> None:
|
||||
super().__init__()
|
||||
self._storage = storage_
|
||||
folder = storage_._get_collection_root_folder()
|
||||
# Path should already be sanitized
|
||||
self._path = pathutils.strip_path(path)
|
||||
self._encoding = storage_.configuration.get("encoding", "stock")
|
||||
self._skip_broken_item = storage_.configuration.get("storage", "skip_broken_item")
|
||||
if filesystem_path is None:
|
||||
filesystem_path = pathutils.path_to_filesystem(folder, self.path)
|
||||
self._filesystem_path = filesystem_path
|
||||
|
||||
# TODO: better fix for "mypy"
|
||||
@types.contextmanager # type: ignore
|
||||
def _atomic_write(self, path: str, mode: str = "w",
|
||||
newline: Optional[str] = None) -> Iterator[IO[AnyStr]]:
|
||||
# TODO: Overload with Literal when dropping support for Python < 3.8
|
||||
parent_dir, name = os.path.split(path)
|
||||
# Do not use mkstemp because it creates with permissions 0o600
|
||||
with TemporaryDirectory(
|
||||
prefix=".Radicale.tmp-", dir=parent_dir) as tmp_dir:
|
||||
with open(os.path.join(tmp_dir, name), mode, newline=newline,
|
||||
encoding=None if "b" in mode else self._encoding) as tmp:
|
||||
yield tmp
|
||||
tmp.flush()
|
||||
self._storage._fsync(tmp)
|
||||
os.replace(os.path.join(tmp_dir, name), path)
|
||||
self._storage._sync_directory(parent_dir)
|
||||
|
||||
|
||||
class StorageBase(storage.BaseStorage):
|
||||
|
||||
_collection_class: ClassVar[Type["multifilesystem.Collection"]]
|
||||
|
||||
_filesystem_folder: str
|
||||
_filesystem_cache_folder: str
|
||||
_filesystem_fsync: bool
|
||||
_use_cache_subfolder_for_item: bool
|
||||
_use_cache_subfolder_for_history: bool
|
||||
_use_cache_subfolder_for_synctoken: bool
|
||||
_use_mtime_and_size_for_item_cache: bool
|
||||
_debug_cache_actions: bool
|
||||
_folder_umask: str
|
||||
_config_umask: int
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
self._filesystem_folder = configuration.get(
|
||||
"storage", "filesystem_folder")
|
||||
self._filesystem_fsync = configuration.get(
|
||||
"storage", "_filesystem_fsync")
|
||||
self._filesystem_cache_folder = configuration.get(
|
||||
"storage", "filesystem_cache_folder")
|
||||
self._use_cache_subfolder_for_item = configuration.get(
|
||||
"storage", "use_cache_subfolder_for_item")
|
||||
self._use_cache_subfolder_for_history = configuration.get(
|
||||
"storage", "use_cache_subfolder_for_history")
|
||||
self._use_cache_subfolder_for_synctoken = configuration.get(
|
||||
"storage", "use_cache_subfolder_for_synctoken")
|
||||
self._use_mtime_and_size_for_item_cache = configuration.get(
|
||||
"storage", "use_mtime_and_size_for_item_cache")
|
||||
self._folder_umask = configuration.get(
|
||||
"storage", "folder_umask")
|
||||
self._debug_cache_actions = configuration.get(
|
||||
"logging", "storage_cache_actions_on_debug")
|
||||
|
||||
def _get_collection_root_folder(self) -> str:
|
||||
return os.path.join(self._filesystem_folder, "collection-root")
|
||||
|
||||
def _get_collection_cache_folder(self) -> str:
|
||||
if self._filesystem_cache_folder:
|
||||
return os.path.join(self._filesystem_cache_folder, "collection-cache")
|
||||
else:
|
||||
return os.path.join(self._filesystem_folder, "collection-cache")
|
||||
|
||||
def _get_collection_cache_subfolder(self, path, folder, subfolder) -> str:
|
||||
if (self._use_cache_subfolder_for_item is True) and (subfolder == "item"):
|
||||
path = path.replace(self._get_collection_root_folder(), self._get_collection_cache_folder())
|
||||
elif (self._use_cache_subfolder_for_history is True) and (subfolder == "history"):
|
||||
path = path.replace(self._get_collection_root_folder(), self._get_collection_cache_folder())
|
||||
elif (self._use_cache_subfolder_for_synctoken is True) and (subfolder == "sync-token"):
|
||||
path = path.replace(self._get_collection_root_folder(), self._get_collection_cache_folder())
|
||||
return os.path.join(path, folder, subfolder)
|
||||
|
||||
def _fsync(self, f: IO[AnyStr]) -> None:
|
||||
if self._filesystem_fsync:
|
||||
try:
|
||||
pathutils.fsync(f.fileno())
|
||||
except OSError as e:
|
||||
raise RuntimeError("Fsync'ing file %r failed: %s" %
|
||||
(f.name, e)) from e
|
||||
|
||||
def _sync_directory(self, path: str) -> None:
|
||||
"""Sync directory to disk.
|
||||
|
||||
This only works on POSIX and does nothing on other systems.
|
||||
|
||||
"""
|
||||
if not self._filesystem_fsync:
|
||||
return
|
||||
if sys.platform != "win32":
|
||||
try:
|
||||
fd = os.open(path, 0)
|
||||
try:
|
||||
pathutils.fsync(fd)
|
||||
finally:
|
||||
os.close(fd)
|
||||
except OSError as e:
|
||||
raise RuntimeError("Fsync'ing directory %r failed: %s" %
|
||||
(path, e)) from e
|
||||
|
||||
def _makedirs_synced(self, filesystem_path: str) -> None:
|
||||
"""Recursively create a directory and its parents in a sync'ed way.
|
||||
|
||||
This method acts silently when the folder already exists.
|
||||
|
||||
"""
|
||||
if os.path.isdir(filesystem_path):
|
||||
return
|
||||
parent_filesystem_path = os.path.dirname(filesystem_path)
|
||||
if sys.platform != "win32" and self._folder_umask:
|
||||
oldmask = os.umask(self._config_umask)
|
||||
# Prevent infinite loop
|
||||
if filesystem_path != parent_filesystem_path:
|
||||
# Create parent dirs recursively
|
||||
self._makedirs_synced(parent_filesystem_path)
|
||||
# Possible race!
|
||||
os.makedirs(filesystem_path, exist_ok=True)
|
||||
self._sync_directory(parent_filesystem_path)
|
||||
if sys.platform != "win32" and self._folder_umask:
|
||||
os.umask(oldmask)
|
130
radicale/storage/multifilesystem/cache.py
Normal file
130
radicale/storage/multifilesystem/cache.py
Normal file
|
@ -0,0 +1,130 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import pickle
|
||||
import time
|
||||
from hashlib import sha256
|
||||
from typing import BinaryIO, Iterable, NamedTuple, Optional, cast
|
||||
|
||||
import radicale.item as radicale_item
|
||||
from radicale import pathutils, storage
|
||||
from radicale.log import logger
|
||||
from radicale.storage.multifilesystem.base import CollectionBase
|
||||
|
||||
CacheContent = NamedTuple("CacheContent", [
|
||||
("uid", str), ("etag", str), ("text", str), ("name", str), ("tag", str),
|
||||
("start", int), ("end", int)])
|
||||
|
||||
|
||||
class CollectionPartCache(CollectionBase):
|
||||
|
||||
def _clean_cache(self, folder: str, names: Iterable[str],
|
||||
max_age: int = 0) -> None:
|
||||
"""Delete all ``names`` in ``folder`` that are older than ``max_age``.
|
||||
"""
|
||||
age_limit: Optional[float] = None
|
||||
if max_age is not None and max_age > 0:
|
||||
age_limit = time.time() - max_age
|
||||
modified = False
|
||||
for name in names:
|
||||
if not pathutils.is_safe_filesystem_path_component(name):
|
||||
continue
|
||||
if age_limit is not None:
|
||||
try:
|
||||
# Race: Another process might have deleted the file.
|
||||
mtime = os.path.getmtime(os.path.join(folder, name))
|
||||
except FileNotFoundError:
|
||||
continue
|
||||
if mtime > age_limit:
|
||||
continue
|
||||
logger.debug("Found expired item in cache: %r", name)
|
||||
# Race: Another process might have deleted or locked the
|
||||
# file.
|
||||
try:
|
||||
os.remove(os.path.join(folder, name))
|
||||
except (FileNotFoundError, PermissionError):
|
||||
continue
|
||||
modified = True
|
||||
if modified:
|
||||
self._storage._sync_directory(folder)
|
||||
|
||||
@staticmethod
|
||||
def _item_cache_hash(raw_text: bytes) -> str:
|
||||
_hash = sha256()
|
||||
_hash.update(storage.CACHE_VERSION)
|
||||
_hash.update(raw_text)
|
||||
return _hash.hexdigest()
|
||||
|
||||
@staticmethod
|
||||
def _item_cache_mtime_and_size(size: int, raw_text: int) -> str:
|
||||
return str(storage.CACHE_VERSION.decode()) + "size=" + str(size) + ";mtime=" + str(raw_text)
|
||||
|
||||
def _item_cache_content(self, item: radicale_item.Item) -> CacheContent:
|
||||
return CacheContent(item.uid, item.etag, item.serialize(), item.name,
|
||||
item.component_name, *item.time_range)
|
||||
|
||||
def _store_item_cache(self, href: str, item: radicale_item.Item,
|
||||
cache_hash: str = "") -> CacheContent:
|
||||
if not cache_hash:
|
||||
if self._storage._use_mtime_and_size_for_item_cache is True:
|
||||
raise RuntimeError("_store_item_cache called without cache_hash is not supported if [storage] use_mtime_and_size_for_item_cache is True")
|
||||
else:
|
||||
cache_hash = self._item_cache_hash(
|
||||
item.serialize().encode(self._encoding))
|
||||
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
|
||||
content = self._item_cache_content(item)
|
||||
self._storage._makedirs_synced(cache_folder)
|
||||
# Race: Other processes might have created and locked the file.
|
||||
# TODO: better fix for "mypy"
|
||||
with contextlib.suppress(PermissionError), self._atomic_write( # type: ignore
|
||||
os.path.join(cache_folder, href), "wb") as fo:
|
||||
fb = cast(BinaryIO, fo)
|
||||
pickle.dump((cache_hash, *content), fb)
|
||||
return content
|
||||
|
||||
def _load_item_cache(self, href: str, cache_hash: str
|
||||
) -> Optional[CacheContent]:
|
||||
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
|
||||
path = os.path.join(cache_folder, href)
|
||||
try:
|
||||
with open(path, "rb") as f:
|
||||
hash_, *remainder = pickle.load(f)
|
||||
if hash_ and hash_ == cache_hash:
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache match : %r with hash %r", path, cache_hash)
|
||||
return CacheContent(*remainder)
|
||||
else:
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache no match : %r with hash %r", path, cache_hash)
|
||||
except FileNotFoundError:
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache not found : %r with hash %r", path, cache_hash)
|
||||
pass
|
||||
except (pickle.UnpicklingError, ValueError) as e:
|
||||
logger.warning("Failed to load item cache entry %r in %r: %s",
|
||||
href, self.path, e, exc_info=True)
|
||||
return None
|
||||
|
||||
def _clean_item_cache(self) -> None:
|
||||
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
|
||||
self._clean_cache(cache_folder, (
|
||||
e.name for e in os.scandir(cache_folder) if not
|
||||
os.path.isfile(os.path.join(self._filesystem_path, e.name))))
|
81
radicale/storage/multifilesystem/create_collection.py
Normal file
81
radicale/storage/multifilesystem/create_collection.py
Normal file
|
@ -0,0 +1,81 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Iterable, Optional, cast
|
||||
|
||||
import radicale.item as radicale_item
|
||||
from radicale import pathutils
|
||||
from radicale.log import logger
|
||||
from radicale.storage import multifilesystem
|
||||
from radicale.storage.multifilesystem.base import StorageBase
|
||||
|
||||
|
||||
class StoragePartCreateCollection(StorageBase):
|
||||
|
||||
def create_collection(self, href: str,
|
||||
items: Optional[Iterable[radicale_item.Item]] = None,
|
||||
props=None) -> "multifilesystem.Collection":
|
||||
folder = self._get_collection_root_folder()
|
||||
|
||||
# Path should already be sanitized
|
||||
sane_path = pathutils.strip_path(href)
|
||||
filesystem_path = pathutils.path_to_filesystem(folder, sane_path)
|
||||
logger.debug("Create collection: %r" % filesystem_path)
|
||||
|
||||
if not props:
|
||||
self._makedirs_synced(filesystem_path)
|
||||
return self._collection_class(
|
||||
cast(multifilesystem.Storage, self),
|
||||
pathutils.unstrip_path(sane_path, True))
|
||||
|
||||
parent_dir = os.path.dirname(filesystem_path)
|
||||
self._makedirs_synced(parent_dir)
|
||||
|
||||
# Create a temporary directory with an unsafe name
|
||||
try:
|
||||
with TemporaryDirectory(prefix=".Radicale.tmp-", dir=parent_dir
|
||||
) as tmp_dir:
|
||||
# The temporary directory itself can't be renamed
|
||||
tmp_filesystem_path = os.path.join(tmp_dir, "collection")
|
||||
os.makedirs(tmp_filesystem_path)
|
||||
col = self._collection_class(
|
||||
cast(multifilesystem.Storage, self),
|
||||
pathutils.unstrip_path(sane_path, True),
|
||||
filesystem_path=tmp_filesystem_path)
|
||||
col.set_meta(props)
|
||||
if items is not None:
|
||||
if props.get("tag") == "VCALENDAR":
|
||||
col._upload_all_nonatomic(items, suffix=".ics")
|
||||
elif props.get("tag") == "VADDRESSBOOK":
|
||||
col._upload_all_nonatomic(items, suffix=".vcf")
|
||||
|
||||
if os.path.lexists(filesystem_path):
|
||||
pathutils.rename_exchange(tmp_filesystem_path, filesystem_path)
|
||||
else:
|
||||
os.rename(tmp_filesystem_path, filesystem_path)
|
||||
self._sync_directory(parent_dir)
|
||||
except Exception as e:
|
||||
raise ValueError("Failed to create collection %r as %r %s" %
|
||||
(href, filesystem_path, e)) from e
|
||||
|
||||
return self._collection_class(
|
||||
cast(multifilesystem.Storage, self),
|
||||
pathutils.unstrip_path(sane_path, True))
|
62
radicale/storage/multifilesystem/delete.py
Normal file
62
radicale/storage/multifilesystem/delete.py
Normal file
|
@ -0,0 +1,62 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Optional
|
||||
|
||||
from radicale import pathutils, storage
|
||||
from radicale.storage.multifilesystem.base import CollectionBase
|
||||
from radicale.storage.multifilesystem.history import CollectionPartHistory
|
||||
|
||||
|
||||
class CollectionPartDelete(CollectionPartHistory, CollectionBase):
|
||||
|
||||
def delete(self, href: Optional[str] = None) -> None:
|
||||
if href is None:
|
||||
# Delete the collection
|
||||
parent_dir = os.path.dirname(self._filesystem_path)
|
||||
try:
|
||||
os.rmdir(self._filesystem_path)
|
||||
except OSError:
|
||||
with TemporaryDirectory(prefix=".Radicale.tmp-", dir=parent_dir
|
||||
) as tmp:
|
||||
os.rename(self._filesystem_path, os.path.join(
|
||||
tmp, os.path.basename(self._filesystem_path)))
|
||||
self._storage._sync_directory(parent_dir)
|
||||
else:
|
||||
self._storage._sync_directory(parent_dir)
|
||||
else:
|
||||
# Delete an item
|
||||
if not pathutils.is_safe_filesystem_path_component(href):
|
||||
raise pathutils.UnsafePathError(href)
|
||||
path = pathutils.path_to_filesystem(self._filesystem_path, href)
|
||||
if not os.path.isfile(path):
|
||||
raise storage.ComponentNotFoundError(href)
|
||||
os.remove(path)
|
||||
self._storage._sync_directory(os.path.dirname(path))
|
||||
# Track the change
|
||||
self._update_history_etag(href, None)
|
||||
self._clean_history()
|
||||
# Remove item from cache
|
||||
cache_folder = self._storage._get_collection_cache_subfolder(os.path.dirname(path), ".Radicale.cache", "item")
|
||||
cache_file = os.path.join(cache_folder, os.path.basename(path))
|
||||
if os.path.isfile(cache_file):
|
||||
os.remove(cache_file)
|
||||
self._storage._sync_directory(cache_folder)
|
117
radicale/storage/multifilesystem/discover.py
Normal file
117
radicale/storage/multifilesystem/discover.py
Normal file
|
@ -0,0 +1,117 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import base64
|
||||
import os
|
||||
import posixpath
|
||||
from typing import Callable, ContextManager, Iterator, Optional, Set, cast
|
||||
|
||||
from radicale import pathutils, types
|
||||
from radicale.log import logger
|
||||
from radicale.storage import multifilesystem
|
||||
from radicale.storage.multifilesystem.base import StorageBase
|
||||
|
||||
|
||||
@types.contextmanager
|
||||
def _null_child_context_manager(path: str,
|
||||
href: Optional[str]) -> Iterator[None]:
|
||||
yield
|
||||
|
||||
|
||||
class StoragePartDiscover(StorageBase):
|
||||
|
||||
def discover(
|
||||
self, path: str, depth: str = "0",
|
||||
child_context_manager: Optional[
|
||||
Callable[[str, Optional[str]], ContextManager[None]]] = None,
|
||||
user_groups: Set[str] = set([])
|
||||
) -> Iterator[types.CollectionOrItem]:
|
||||
# assert isinstance(self, multifilesystem.Storage)
|
||||
if child_context_manager is None:
|
||||
child_context_manager = _null_child_context_manager
|
||||
# Path should already be sanitized
|
||||
sane_path = pathutils.strip_path(path)
|
||||
attributes = sane_path.split("/") if sane_path else []
|
||||
|
||||
folder = self._get_collection_root_folder()
|
||||
# Create the root collection
|
||||
self._makedirs_synced(folder)
|
||||
try:
|
||||
filesystem_path = pathutils.path_to_filesystem(folder, sane_path)
|
||||
except ValueError as e:
|
||||
# Path is unsafe
|
||||
logger.debug("Unsafe path %r requested from storage: %s",
|
||||
sane_path, e, exc_info=True)
|
||||
return
|
||||
|
||||
# Check if the path exists and if it leads to a collection or an item
|
||||
href: Optional[str]
|
||||
if not os.path.isdir(filesystem_path):
|
||||
if attributes and os.path.isfile(filesystem_path):
|
||||
href = attributes.pop()
|
||||
else:
|
||||
return
|
||||
else:
|
||||
href = None
|
||||
|
||||
sane_path = "/".join(attributes)
|
||||
collection = self._collection_class(
|
||||
cast(multifilesystem.Storage, self),
|
||||
pathutils.unstrip_path(sane_path, True))
|
||||
|
||||
if href:
|
||||
item = collection._get(href)
|
||||
if item is not None:
|
||||
yield item
|
||||
return
|
||||
|
||||
yield collection
|
||||
|
||||
if depth == "0":
|
||||
return
|
||||
|
||||
for href in collection._list():
|
||||
with child_context_manager(sane_path, href):
|
||||
item = collection._get(href)
|
||||
if item is not None:
|
||||
yield item
|
||||
|
||||
for entry in os.scandir(filesystem_path):
|
||||
if not entry.is_dir():
|
||||
continue
|
||||
href = entry.name
|
||||
if not pathutils.is_safe_filesystem_path_component(href):
|
||||
if not href.startswith(".Radicale"):
|
||||
logger.debug("Skipping collection %r in %r",
|
||||
href, sane_path)
|
||||
continue
|
||||
sane_child_path = posixpath.join(sane_path, href)
|
||||
child_path = pathutils.unstrip_path(sane_child_path, True)
|
||||
with child_context_manager(sane_child_path, None):
|
||||
yield self._collection_class(
|
||||
cast(multifilesystem.Storage, self), child_path)
|
||||
for group in user_groups:
|
||||
href = base64.b64encode(group.encode('utf-8')).decode('ascii')
|
||||
logger.debug(f"searching for group calendar {group} {href}")
|
||||
sane_child_path = f"GROUPS/{href}"
|
||||
if not os.path.isdir(pathutils.path_to_filesystem(folder, sane_child_path)):
|
||||
continue
|
||||
child_path = f"/GROUPS/{href}/"
|
||||
with child_context_manager(sane_child_path, None):
|
||||
yield self._collection_class(
|
||||
cast(multifilesystem.Storage, self), child_path)
|
167
radicale/storage/multifilesystem/get.py
Normal file
167
radicale/storage/multifilesystem/get.py
Normal file
|
@ -0,0 +1,167 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from typing import Iterable, Iterator, Optional, Tuple
|
||||
|
||||
import radicale.item as radicale_item
|
||||
from radicale import pathutils
|
||||
from radicale.log import logger
|
||||
from radicale.storage import multifilesystem
|
||||
from radicale.storage.multifilesystem.base import CollectionBase
|
||||
from radicale.storage.multifilesystem.cache import CollectionPartCache
|
||||
from radicale.storage.multifilesystem.lock import CollectionPartLock
|
||||
|
||||
|
||||
class CollectionPartGet(CollectionPartCache, CollectionPartLock,
|
||||
CollectionBase):
|
||||
|
||||
_item_cache_cleaned: bool
|
||||
|
||||
def __init__(self, storage_: "multifilesystem.Storage", path: str,
|
||||
filesystem_path: Optional[str] = None) -> None:
|
||||
super().__init__(storage_, path, filesystem_path)
|
||||
self._item_cache_cleaned = False
|
||||
|
||||
def _list(self) -> Iterator[str]:
|
||||
for entry in os.scandir(self._filesystem_path):
|
||||
if not entry.is_file():
|
||||
continue
|
||||
href = entry.name
|
||||
if not pathutils.is_safe_filesystem_path_component(href):
|
||||
if not href.startswith(".Radicale"):
|
||||
logger.debug("Skipping item %r in %r", href, self.path)
|
||||
continue
|
||||
yield href
|
||||
|
||||
def _get(self, href: str, verify_href: bool = True
|
||||
) -> Optional[radicale_item.Item]:
|
||||
if verify_href:
|
||||
try:
|
||||
if not pathutils.is_safe_filesystem_path_component(href):
|
||||
raise pathutils.UnsafePathError(href)
|
||||
path = pathutils.path_to_filesystem(self._filesystem_path,
|
||||
href)
|
||||
except ValueError as e:
|
||||
logger.debug(
|
||||
"Can't translate name %r safely to filesystem in %r: %s",
|
||||
href, self.path, e, exc_info=True)
|
||||
return None
|
||||
else:
|
||||
path = os.path.join(self._filesystem_path, href)
|
||||
try:
|
||||
with open(path, "rb") as f:
|
||||
raw_text = f.read()
|
||||
except (FileNotFoundError, IsADirectoryError):
|
||||
return None
|
||||
except PermissionError:
|
||||
# Windows raises ``PermissionError`` when ``path`` is a directory
|
||||
if (sys.platform == "win32" and
|
||||
os.path.isdir(path) and os.access(path, os.R_OK)):
|
||||
return None
|
||||
raise
|
||||
# The hash of the component in the file system. This is used to check,
|
||||
# if the entry in the cache is still valid.
|
||||
if self._storage._use_mtime_and_size_for_item_cache is True:
|
||||
cache_hash = self._item_cache_mtime_and_size(os.stat(path).st_size, os.stat(path).st_mtime_ns)
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache check for: %r with mtime and size %r", path, cache_hash)
|
||||
else:
|
||||
cache_hash = self._item_cache_hash(raw_text)
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache check for: %r with hash %r", path, cache_hash)
|
||||
cache_content = self._load_item_cache(href, cache_hash)
|
||||
if cache_content is None:
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache miss for: %r", path)
|
||||
with self._acquire_cache_lock("item"):
|
||||
# Lock the item cache to prevent multiple processes from
|
||||
# generating the same data in parallel.
|
||||
# This improves the performance for multiple requests.
|
||||
if self._storage._lock.locked == "r":
|
||||
# Check if another process created the file in the meantime
|
||||
cache_content = self._load_item_cache(href, cache_hash)
|
||||
if cache_content is None:
|
||||
try:
|
||||
vobject_items = radicale_item.read_components(
|
||||
raw_text.decode(self._encoding))
|
||||
radicale_item.check_and_sanitize_items(
|
||||
vobject_items, tag=self.tag)
|
||||
vobject_item, = vobject_items
|
||||
temp_item = radicale_item.Item(
|
||||
collection=self, vobject_item=vobject_item)
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache store for: %r", path)
|
||||
cache_content = self._store_item_cache(
|
||||
href, temp_item, cache_hash)
|
||||
except Exception as e:
|
||||
if self._skip_broken_item:
|
||||
logger.warning("Skip broken item %r in %r: %s", href, self.path, e)
|
||||
return None
|
||||
else:
|
||||
raise RuntimeError("Failed to load item %r in %r: %s" %
|
||||
(href, self.path, e)) from e
|
||||
# Clean cache entries once after the data in the file
|
||||
# system was edited externally.
|
||||
if not self._item_cache_cleaned:
|
||||
self._item_cache_cleaned = True
|
||||
self._clean_item_cache()
|
||||
else:
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache hit for: %r", path)
|
||||
last_modified = time.strftime(
|
||||
"%a, %d %b %Y %H:%M:%S GMT",
|
||||
time.gmtime(os.path.getmtime(path)))
|
||||
# Don't keep reference to ``vobject_item``, because it requires a lot
|
||||
# of memory.
|
||||
return radicale_item.Item(
|
||||
collection=self, href=href, last_modified=last_modified,
|
||||
etag=cache_content.etag, text=cache_content.text,
|
||||
uid=cache_content.uid, name=cache_content.name,
|
||||
component_name=cache_content.tag,
|
||||
time_range=(cache_content.start, cache_content.end))
|
||||
|
||||
def get_multi(self, hrefs: Iterable[str]
|
||||
) -> Iterator[Tuple[str, Optional[radicale_item.Item]]]:
|
||||
# It's faster to check for file name collisions here, because
|
||||
# we only need to call os.listdir once.
|
||||
files = None
|
||||
for href in hrefs:
|
||||
if files is None:
|
||||
# List dir after hrefs returned one item, the iterator may be
|
||||
# empty and the for-loop is never executed.
|
||||
files = os.listdir(self._filesystem_path)
|
||||
path = os.path.join(self._filesystem_path, href)
|
||||
if (not pathutils.is_safe_filesystem_path_component(href) or
|
||||
href not in files and os.path.lexists(path)):
|
||||
logger.debug("Can't translate name safely to filesystem: %r",
|
||||
href)
|
||||
yield (href, None)
|
||||
else:
|
||||
yield (href, self._get(href, verify_href=False))
|
||||
|
||||
def get_all(self) -> Iterator[radicale_item.Item]:
|
||||
for href in self._list():
|
||||
# We don't need to check for collisions, because the file names
|
||||
# are from os.listdir.
|
||||
item = self._get(href, verify_href=False)
|
||||
if item is not None:
|
||||
yield item
|
92
radicale/storage/multifilesystem/history.py
Normal file
92
radicale/storage/multifilesystem/history.py
Normal file
|
@ -0,0 +1,92 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import binascii
|
||||
import contextlib
|
||||
import os
|
||||
import pickle
|
||||
from typing import BinaryIO, Optional, cast
|
||||
|
||||
import radicale.item as radicale_item
|
||||
from radicale import pathutils
|
||||
from radicale.log import logger
|
||||
from radicale.storage import multifilesystem
|
||||
from radicale.storage.multifilesystem.base import CollectionBase
|
||||
|
||||
|
||||
class CollectionPartHistory(CollectionBase):
|
||||
|
||||
_max_sync_token_age: int
|
||||
|
||||
def __init__(self, storage_: "multifilesystem.Storage", path: str,
|
||||
filesystem_path: Optional[str] = None) -> None:
|
||||
super().__init__(storage_, path, filesystem_path)
|
||||
self._max_sync_token_age = storage_.configuration.get(
|
||||
"storage", "max_sync_token_age")
|
||||
|
||||
def _update_history_etag(self, href, item):
|
||||
"""Updates and retrieves the history etag from the history cache.
|
||||
|
||||
The history cache contains a file for each current and deleted item
|
||||
of the collection. These files contain the etag of the item (empty
|
||||
string for deleted items) and a history etag, which is a hash over
|
||||
the previous history etag and the etag separated by "/".
|
||||
"""
|
||||
history_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "history")
|
||||
try:
|
||||
with open(os.path.join(history_folder, href), "rb") as f:
|
||||
cache_etag, history_etag = pickle.load(f)
|
||||
except (FileNotFoundError, pickle.UnpicklingError, ValueError) as e:
|
||||
if isinstance(e, (pickle.UnpicklingError, ValueError)):
|
||||
logger.warning(
|
||||
"Failed to load history cache entry %r in %r: %s",
|
||||
href, self.path, e, exc_info=True)
|
||||
cache_etag = ""
|
||||
# Initialize with random data to prevent collisions with cleaned
|
||||
# expired items.
|
||||
history_etag = binascii.hexlify(os.urandom(16)).decode("ascii")
|
||||
etag = item.etag if item else ""
|
||||
if etag != cache_etag:
|
||||
self._storage._makedirs_synced(history_folder)
|
||||
history_etag = radicale_item.get_etag(
|
||||
history_etag + "/" + etag).strip("\"")
|
||||
# Race: Other processes might have created and locked the file.
|
||||
with contextlib.suppress(PermissionError), self._atomic_write(
|
||||
os.path.join(history_folder, href), "wb") as fo:
|
||||
fb = cast(BinaryIO, fo)
|
||||
pickle.dump([etag, history_etag], fb)
|
||||
return history_etag
|
||||
|
||||
def _get_deleted_history_hrefs(self):
|
||||
"""Returns the hrefs of all deleted items that are still in the
|
||||
history cache."""
|
||||
history_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "history")
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
for entry in os.scandir(history_folder):
|
||||
href = entry.name
|
||||
if not pathutils.is_safe_filesystem_path_component(href):
|
||||
continue
|
||||
if os.path.isfile(os.path.join(self._filesystem_path, href)):
|
||||
continue
|
||||
yield href
|
||||
|
||||
def _clean_history(self):
|
||||
# Delete all expired history entries of deleted items.
|
||||
history_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "history")
|
||||
self._clean_cache(history_folder, self._get_deleted_history_hrefs(),
|
||||
max_age=self._max_sync_token_age)
|
121
radicale/storage/multifilesystem/lock.py
Normal file
121
radicale/storage/multifilesystem/lock.py
Normal file
|
@ -0,0 +1,121 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2023-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import contextlib
|
||||
import logging
|
||||
import os
|
||||
import shlex
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Iterator
|
||||
|
||||
from radicale import config, pathutils, types
|
||||
from radicale.log import logger
|
||||
from radicale.storage.multifilesystem.base import CollectionBase, StorageBase
|
||||
|
||||
|
||||
class CollectionPartLock(CollectionBase):
|
||||
|
||||
@types.contextmanager
|
||||
def _acquire_cache_lock(self, ns: str = "") -> Iterator[None]:
|
||||
if self._storage._lock.locked == "w":
|
||||
yield
|
||||
return
|
||||
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", ns)
|
||||
self._storage._makedirs_synced(cache_folder)
|
||||
lock_path = os.path.join(cache_folder,
|
||||
".Radicale.lock" + (".%s" % ns if ns else ""))
|
||||
logger.debug("Lock file (CollectionPartLock): %r" % lock_path)
|
||||
lock = pathutils.RwLock(lock_path)
|
||||
with lock.acquire("w"):
|
||||
yield
|
||||
|
||||
|
||||
class StoragePartLock(StorageBase):
|
||||
|
||||
_lock: pathutils.RwLock
|
||||
_hook: str
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
lock_path = os.path.join(self._filesystem_folder, ".Radicale.lock")
|
||||
logger.debug("Lock file (StoragePartLock): %r" % lock_path)
|
||||
self._lock = pathutils.RwLock(lock_path)
|
||||
self._hook = configuration.get("storage", "hook")
|
||||
|
||||
@types.contextmanager
|
||||
def acquire_lock(self, mode: str, user: str = "", *args, **kwargs) -> Iterator[None]:
|
||||
with self._lock.acquire(mode):
|
||||
yield
|
||||
# execute hook
|
||||
if mode == "w" and self._hook:
|
||||
debug = logger.isEnabledFor(logging.DEBUG)
|
||||
# Use new process group for child to prevent terminals
|
||||
# from sending SIGINT etc.
|
||||
preexec_fn = None
|
||||
creationflags = 0
|
||||
if sys.platform == "win32":
|
||||
creationflags |= subprocess.CREATE_NEW_PROCESS_GROUP
|
||||
else:
|
||||
# Process group is also used to identify child processes
|
||||
preexec_fn = os.setpgrp
|
||||
# optional argument
|
||||
path = kwargs.get('path', "")
|
||||
try:
|
||||
command = self._hook % {
|
||||
"path": shlex.quote(self._get_collection_root_folder() + path),
|
||||
"cwd": shlex.quote(self._filesystem_folder),
|
||||
"user": shlex.quote(user or "Anonymous")}
|
||||
except KeyError as e:
|
||||
logger.error("Storage hook contains not supported placeholder %s (skip execution of: %r)" % (e, self._hook))
|
||||
return
|
||||
|
||||
logger.debug("Executing storage hook: '%s'" % command)
|
||||
try:
|
||||
p = subprocess.Popen(
|
||||
command, stdin=subprocess.DEVNULL,
|
||||
stdout=subprocess.PIPE if debug else subprocess.DEVNULL,
|
||||
stderr=subprocess.PIPE if debug else subprocess.DEVNULL,
|
||||
shell=True, universal_newlines=True, preexec_fn=preexec_fn,
|
||||
cwd=self._filesystem_folder, creationflags=creationflags)
|
||||
except Exception as e:
|
||||
logger.error("Execution of storage hook not successful on 'Popen': %s" % e)
|
||||
return
|
||||
logger.debug("Executing storage hook started 'Popen'")
|
||||
try:
|
||||
stdout_data, stderr_data = p.communicate()
|
||||
except BaseException as e: # e.g. KeyboardInterrupt or SystemExit
|
||||
logger.error("Execution of storage hook not successful on 'communicate': %s" % e)
|
||||
p.kill()
|
||||
p.wait()
|
||||
return
|
||||
finally:
|
||||
if sys.platform != "win32":
|
||||
# Kill remaining children identified by process group
|
||||
with contextlib.suppress(OSError):
|
||||
os.killpg(p.pid, signal.SIGKILL)
|
||||
logger.debug("Executing storage hook finished")
|
||||
if stdout_data:
|
||||
logger.debug("Captured stdout from storage hook:\n%s", stdout_data)
|
||||
if stderr_data:
|
||||
logger.debug("Captured stderr from storage hook:\n%s", stderr_data)
|
||||
if p.returncode != 0:
|
||||
logger.error("Execution of storage hook not successful: %s" % subprocess.CalledProcessError(p.returncode, p.args))
|
||||
return
|
71
radicale/storage/multifilesystem/meta.py
Normal file
71
radicale/storage/multifilesystem/meta.py
Normal file
|
@ -0,0 +1,71 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import json
|
||||
import os
|
||||
from typing import Mapping, Optional, TextIO, Union, cast, overload
|
||||
|
||||
import radicale.item as radicale_item
|
||||
from radicale.storage import multifilesystem
|
||||
from radicale.storage.multifilesystem.base import CollectionBase
|
||||
|
||||
|
||||
class CollectionPartMeta(CollectionBase):
|
||||
|
||||
_meta_cache: Optional[Mapping[str, str]]
|
||||
_props_path: str
|
||||
|
||||
def __init__(self, storage_: "multifilesystem.Storage", path: str,
|
||||
filesystem_path: Optional[str] = None) -> None:
|
||||
super().__init__(storage_, path, filesystem_path)
|
||||
self._meta_cache = None
|
||||
self._props_path = os.path.join(
|
||||
self._filesystem_path, ".Radicale.props")
|
||||
|
||||
@overload
|
||||
def get_meta(self, key: None = None) -> Mapping[str, str]: ...
|
||||
|
||||
@overload
|
||||
def get_meta(self, key: str) -> Optional[str]: ...
|
||||
|
||||
def get_meta(self, key: Optional[str] = None) -> Union[Mapping[str, str],
|
||||
Optional[str]]:
|
||||
# reuse cached value if the storage is read-only
|
||||
if self._storage._lock.locked == "w" or self._meta_cache is None:
|
||||
try:
|
||||
try:
|
||||
with open(self._props_path, encoding=self._encoding) as f:
|
||||
temp_meta = json.load(f)
|
||||
except FileNotFoundError:
|
||||
temp_meta = {}
|
||||
self._meta_cache = radicale_item.check_and_sanitize_props(
|
||||
temp_meta)
|
||||
except ValueError as e:
|
||||
raise RuntimeError("Failed to load properties of collection "
|
||||
"%r: %s" % (self.path, e)) from e
|
||||
return self._meta_cache if key is None else self._meta_cache.get(key)
|
||||
|
||||
def set_meta(self, props: Mapping[str, str]) -> None:
|
||||
# TODO: better fix for "mypy"
|
||||
try:
|
||||
with self._atomic_write(self._props_path, "w") as fo: # type: ignore
|
||||
f = cast(TextIO, fo)
|
||||
json.dump(props, f, sort_keys=True)
|
||||
except OSError as e:
|
||||
raise ValueError("Failed to write meta data %r %s" % (self._props_path, e)) from e
|
69
radicale/storage/multifilesystem/move.py
Normal file
69
radicale/storage/multifilesystem/move.py
Normal file
|
@ -0,0 +1,69 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
|
||||
from radicale import item as radicale_item
|
||||
from radicale import pathutils, storage
|
||||
from radicale.log import logger
|
||||
from radicale.storage import multifilesystem
|
||||
from radicale.storage.multifilesystem.base import StorageBase
|
||||
|
||||
|
||||
class StoragePartMove(StorageBase):
|
||||
|
||||
def move(self, item: radicale_item.Item,
|
||||
to_collection: storage.BaseCollection, to_href: str) -> None:
|
||||
if not pathutils.is_safe_filesystem_path_component(to_href):
|
||||
raise pathutils.UnsafePathError(to_href)
|
||||
assert isinstance(to_collection, multifilesystem.Collection)
|
||||
assert isinstance(item.collection, multifilesystem.Collection)
|
||||
assert item.href
|
||||
move_from = pathutils.path_to_filesystem(item.collection._filesystem_path, item.href)
|
||||
move_to = pathutils.path_to_filesystem(to_collection._filesystem_path, to_href)
|
||||
try:
|
||||
os.replace(move_from, move_to)
|
||||
except OSError as e:
|
||||
raise ValueError("Failed to move file %r => %r %s" % (move_from, move_to, e)) from e
|
||||
self._sync_directory(to_collection._filesystem_path)
|
||||
if item.collection._filesystem_path != to_collection._filesystem_path:
|
||||
self._sync_directory(item.collection._filesystem_path)
|
||||
# Move the item cache entry
|
||||
cache_folder = self._get_collection_cache_subfolder(item.collection._filesystem_path, ".Radicale.cache", "item")
|
||||
to_cache_folder = self._get_collection_cache_subfolder(to_collection._filesystem_path, ".Radicale.cache", "item")
|
||||
self._makedirs_synced(to_cache_folder)
|
||||
move_from = os.path.join(cache_folder, item.href)
|
||||
move_to = os.path.join(to_cache_folder, to_href)
|
||||
try:
|
||||
os.replace(move_from, move_to)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
except OSError as e:
|
||||
logger.error("Failed to move cache file %r => %r %s" % (move_from, move_to, e))
|
||||
pass
|
||||
else:
|
||||
self._makedirs_synced(to_cache_folder)
|
||||
if cache_folder != to_cache_folder:
|
||||
self._makedirs_synced(cache_folder)
|
||||
# Track the change
|
||||
to_collection._update_history_etag(to_href, item)
|
||||
item.collection._update_history_etag(item.href, None)
|
||||
to_collection._clean_history()
|
||||
if item.collection._filesystem_path != to_collection._filesystem_path:
|
||||
item.collection._clean_history()
|
123
radicale/storage/multifilesystem/sync.py
Normal file
123
radicale/storage/multifilesystem/sync.py
Normal file
|
@ -0,0 +1,123 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import contextlib
|
||||
import itertools
|
||||
import os
|
||||
import pickle
|
||||
from hashlib import sha256
|
||||
from typing import BinaryIO, Iterable, Tuple, cast
|
||||
|
||||
from radicale.log import logger
|
||||
from radicale.storage.multifilesystem.base import CollectionBase
|
||||
from radicale.storage.multifilesystem.cache import CollectionPartCache
|
||||
from radicale.storage.multifilesystem.history import CollectionPartHistory
|
||||
|
||||
|
||||
class CollectionPartSync(CollectionPartCache, CollectionPartHistory,
|
||||
CollectionBase):
|
||||
|
||||
def sync(self, old_token: str = "") -> Tuple[str, Iterable[str]]:
|
||||
# The sync token has the form http://radicale.org/ns/sync/TOKEN_NAME
|
||||
# where TOKEN_NAME is the sha256 hash of all history etags of present
|
||||
# and past items of the collection.
|
||||
def check_token_name(token_name: str) -> bool:
|
||||
if len(token_name) != 64:
|
||||
return False
|
||||
for c in token_name:
|
||||
if c not in "0123456789abcdef":
|
||||
return False
|
||||
return True
|
||||
|
||||
old_token_name = ""
|
||||
if old_token:
|
||||
# Extract the token name from the sync token
|
||||
if not old_token.startswith("http://radicale.org/ns/sync/"):
|
||||
raise ValueError("Malformed token: %r" % old_token)
|
||||
old_token_name = old_token[len("http://radicale.org/ns/sync/"):]
|
||||
if not check_token_name(old_token_name):
|
||||
raise ValueError("Malformed token: %r" % old_token)
|
||||
# Get the current state and sync-token of the collection.
|
||||
state = {}
|
||||
token_name_hash = sha256()
|
||||
# Find the history of all existing and deleted items
|
||||
for href, item in itertools.chain(
|
||||
((item.href, item) for item in self.get_all()),
|
||||
((href, None) for href in self._get_deleted_history_hrefs())):
|
||||
history_etag = self._update_history_etag(href, item)
|
||||
state[href] = history_etag
|
||||
token_name_hash.update((href + "/" + history_etag).encode())
|
||||
token_name = token_name_hash.hexdigest()
|
||||
token = "http://radicale.org/ns/sync/%s" % token_name
|
||||
if token_name == old_token_name:
|
||||
# Nothing changed
|
||||
return token, ()
|
||||
token_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "sync-token")
|
||||
token_path = os.path.join(token_folder, token_name)
|
||||
old_state = {}
|
||||
if old_token_name:
|
||||
# load the old token state
|
||||
old_token_path = os.path.join(token_folder, old_token_name)
|
||||
try:
|
||||
# Race: Another process might have deleted the file.
|
||||
with open(old_token_path, "rb") as f:
|
||||
old_state = pickle.load(f)
|
||||
except (FileNotFoundError, pickle.UnpicklingError,
|
||||
ValueError) as e:
|
||||
if isinstance(e, (pickle.UnpicklingError, ValueError)):
|
||||
logger.warning(
|
||||
"Failed to load stored sync token %r in %r: %s",
|
||||
old_token_name, self.path, e, exc_info=True)
|
||||
# Delete the damaged file
|
||||
with contextlib.suppress(FileNotFoundError,
|
||||
PermissionError):
|
||||
os.remove(old_token_path)
|
||||
raise ValueError("Token not found: %r" % old_token)
|
||||
# write the new token state or update the modification time of
|
||||
# existing token state
|
||||
if not os.path.exists(token_path):
|
||||
self._storage._makedirs_synced(token_folder)
|
||||
try:
|
||||
# Race: Other processes might have created and locked the file.
|
||||
# TODO: better fix for "mypy"
|
||||
with self._atomic_write(token_path, "wb") as fo: # type: ignore
|
||||
fb = cast(BinaryIO, fo)
|
||||
pickle.dump(state, fb)
|
||||
except PermissionError:
|
||||
pass
|
||||
else:
|
||||
# clean up old sync tokens and item cache
|
||||
self._clean_cache(token_folder, os.listdir(token_folder),
|
||||
max_age=self._max_sync_token_age)
|
||||
self._clean_history()
|
||||
else:
|
||||
# Try to update the modification time
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
# Race: Another process might have deleted the file.
|
||||
os.utime(token_path)
|
||||
changes = []
|
||||
# Find all new, changed and deleted (that are still in the item cache)
|
||||
# items
|
||||
for href, history_etag in state.items():
|
||||
if history_etag != old_state.get(href):
|
||||
changes.append(href)
|
||||
# Find all deleted items that are no longer in the item cache
|
||||
for href, history_etag in old_state.items():
|
||||
if href not in state:
|
||||
changes.append(href)
|
||||
return token, changes
|
138
radicale/storage/multifilesystem/upload.py
Normal file
138
radicale/storage/multifilesystem/upload.py
Normal file
|
@ -0,0 +1,138 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2022 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import errno
|
||||
import os
|
||||
import pickle
|
||||
import sys
|
||||
from typing import Iterable, Iterator, TextIO, cast
|
||||
|
||||
import radicale.item as radicale_item
|
||||
from radicale import pathutils
|
||||
from radicale.log import logger
|
||||
from radicale.storage.multifilesystem.base import CollectionBase
|
||||
from radicale.storage.multifilesystem.cache import CollectionPartCache
|
||||
from radicale.storage.multifilesystem.get import CollectionPartGet
|
||||
from radicale.storage.multifilesystem.history import CollectionPartHistory
|
||||
|
||||
|
||||
class CollectionPartUpload(CollectionPartGet, CollectionPartCache,
|
||||
CollectionPartHistory, CollectionBase):
|
||||
|
||||
def upload(self, href: str, item: radicale_item.Item
|
||||
) -> radicale_item.Item:
|
||||
if not pathutils.is_safe_filesystem_path_component(href):
|
||||
raise pathutils.UnsafePathError(href)
|
||||
path = pathutils.path_to_filesystem(self._filesystem_path, href)
|
||||
try:
|
||||
with self._atomic_write(path, newline="") as fo: # type: ignore
|
||||
f = cast(TextIO, fo)
|
||||
f.write(item.serialize())
|
||||
except Exception as e:
|
||||
raise ValueError("Failed to store item %r in collection %r: %s" %
|
||||
(href, self.path, e)) from e
|
||||
# store cache file
|
||||
if self._storage._use_mtime_and_size_for_item_cache is True:
|
||||
cache_hash = self._item_cache_mtime_and_size(os.stat(path).st_size, os.stat(path).st_mtime_ns)
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache store for: %r with mtime and size %r", path, cache_hash)
|
||||
else:
|
||||
cache_hash = self._item_cache_hash(item.serialize().encode(self._encoding))
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache store for: %r with hash %r", path, cache_hash)
|
||||
try:
|
||||
self._store_item_cache(href, item, cache_hash)
|
||||
except Exception as e:
|
||||
raise ValueError("Failed to store item cache of %r in collection %r: %s" %
|
||||
(href, self.path, e)) from e
|
||||
# Track the change
|
||||
self._update_history_etag(href, item)
|
||||
self._clean_history()
|
||||
uploaded_item = self._get(href, verify_href=False)
|
||||
if uploaded_item is None:
|
||||
raise RuntimeError("Storage modified externally")
|
||||
return uploaded_item
|
||||
|
||||
def _upload_all_nonatomic(self, items: Iterable[radicale_item.Item],
|
||||
suffix: str = "") -> None:
|
||||
"""Upload a new set of items non-atomic"""
|
||||
def is_safe_free_href(href: str) -> bool:
|
||||
return (pathutils.is_safe_filesystem_path_component(href) and
|
||||
not os.path.lexists(
|
||||
os.path.join(self._filesystem_path, href)))
|
||||
|
||||
def get_safe_free_hrefs(uid: str) -> Iterator[str]:
|
||||
for href in [uid if uid.lower().endswith(suffix.lower())
|
||||
else uid + suffix,
|
||||
radicale_item.get_etag(uid).strip('"') + suffix]:
|
||||
if is_safe_free_href(href):
|
||||
yield href
|
||||
yield radicale_item.find_available_uid(
|
||||
lambda href: not is_safe_free_href(href), suffix)
|
||||
|
||||
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
|
||||
self._storage._makedirs_synced(cache_folder)
|
||||
for item in items:
|
||||
uid = item.uid
|
||||
logger.debug("Store item from list with uid: '%s'" % uid)
|
||||
cache_content = self._item_cache_content(item)
|
||||
for href in get_safe_free_hrefs(uid):
|
||||
path = os.path.join(self._filesystem_path, href)
|
||||
try:
|
||||
f = open(path,
|
||||
"w", newline="", encoding=self._encoding)
|
||||
except OSError as e:
|
||||
if (sys.platform != "win32" and e.errno == errno.EINVAL or
|
||||
sys.platform == "win32" and e.errno == 123):
|
||||
# not a valid filename
|
||||
continue
|
||||
raise
|
||||
break
|
||||
else:
|
||||
raise RuntimeError("No href found for item %r in temporary "
|
||||
"collection %r" % (uid, self.path))
|
||||
|
||||
try:
|
||||
with f:
|
||||
f.write(item.serialize())
|
||||
f.flush()
|
||||
self._storage._fsync(f)
|
||||
except Exception as e:
|
||||
raise ValueError(
|
||||
"Failed to store item %r in temporary collection %r: %s" %
|
||||
(uid, self.path, e)) from e
|
||||
|
||||
# store cache file
|
||||
if self._storage._use_mtime_and_size_for_item_cache is True:
|
||||
cache_hash = self._item_cache_mtime_and_size(os.stat(path).st_size, os.stat(path).st_mtime_ns)
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache store for: %r with mtime and size %r", path, cache_hash)
|
||||
else:
|
||||
cache_hash = self._item_cache_hash(item.serialize().encode(self._encoding))
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache store for: %r with hash %r", path, cache_hash)
|
||||
path_cache = os.path.join(cache_folder, href)
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache store into: %r", path_cache)
|
||||
with open(os.path.join(cache_folder, href), "wb") as fb:
|
||||
pickle.dump((cache_hash, *cache_content), fb)
|
||||
fb.flush()
|
||||
self._storage._fsync(fb)
|
||||
self._storage._sync_directory(cache_folder)
|
||||
self._storage._sync_directory(self._filesystem_path)
|
92
radicale/storage/multifilesystem/verify.py
Normal file
92
radicale/storage/multifilesystem/verify.py
Normal file
|
@ -0,0 +1,92 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2014 Jean-Marc Martins
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from typing import Iterator, Optional, Set
|
||||
|
||||
from radicale import pathutils, storage, types
|
||||
from radicale.log import logger
|
||||
from radicale.storage.multifilesystem.base import StorageBase
|
||||
from radicale.storage.multifilesystem.discover import StoragePartDiscover
|
||||
|
||||
|
||||
class StoragePartVerify(StoragePartDiscover, StorageBase):
|
||||
|
||||
def verify(self) -> bool:
|
||||
item_errors = collection_errors = 0
|
||||
logger.info("Disable fsync during storage verification")
|
||||
self._filesystem_fsync = False
|
||||
|
||||
@types.contextmanager
|
||||
def exception_cm(sane_path: str, href: Optional[str]
|
||||
) -> Iterator[None]:
|
||||
nonlocal item_errors, collection_errors
|
||||
try:
|
||||
yield
|
||||
except Exception as e:
|
||||
if href is not None:
|
||||
item_errors += 1
|
||||
name = "item %r in %r" % (href, sane_path)
|
||||
else:
|
||||
collection_errors += 1
|
||||
name = "collection %r" % sane_path
|
||||
logger.error("Invalid %s: %s", name, e, exc_info=True)
|
||||
|
||||
remaining_sane_paths = [""]
|
||||
while remaining_sane_paths:
|
||||
sane_path = remaining_sane_paths.pop(0)
|
||||
path = pathutils.unstrip_path(sane_path, True)
|
||||
logger.info("Verifying path %r", sane_path)
|
||||
count = 0
|
||||
is_collection = True
|
||||
with exception_cm(sane_path, None):
|
||||
saved_item_errors = item_errors
|
||||
collection: Optional[storage.BaseCollection] = None
|
||||
uids: Set[str] = set()
|
||||
has_child_collections = False
|
||||
for item in self.discover(path, "1", exception_cm):
|
||||
if not collection:
|
||||
assert isinstance(item, storage.BaseCollection)
|
||||
collection = item
|
||||
collection.get_meta()
|
||||
if not collection.tag:
|
||||
is_collection = False
|
||||
logger.info("Skip !collection %r", sane_path)
|
||||
continue
|
||||
if isinstance(item, storage.BaseCollection):
|
||||
has_child_collections = True
|
||||
remaining_sane_paths.append(item.path)
|
||||
elif item.uid in uids:
|
||||
logger.error("Invalid item %r in %r: UID conflict %r",
|
||||
item.href, sane_path, item.uid)
|
||||
else:
|
||||
uids.add(item.uid)
|
||||
count += 1
|
||||
logger.debug("Verified in %r item %r",
|
||||
sane_path, item.href)
|
||||
assert collection
|
||||
if item_errors == saved_item_errors:
|
||||
if is_collection:
|
||||
collection.sync()
|
||||
if has_child_collections and collection.tag:
|
||||
logger.error("Invalid collection %r: %r must not have "
|
||||
"child collections", sane_path,
|
||||
collection.tag)
|
||||
if is_collection:
|
||||
logger.info("Verified collect %r (items: %d)", sane_path, count)
|
||||
return item_errors == 0 and collection_errors == 0
|
114
radicale/storage/multifilesystem_nolock.py
Normal file
114
radicale/storage/multifilesystem_nolock.py
Normal file
|
@ -0,0 +1,114 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2021 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
The multifilesystem backend without file-based locking.
|
||||
"""
|
||||
|
||||
import threading
|
||||
from collections import deque
|
||||
from typing import ClassVar, Deque, Dict, Hashable, Iterator, Type
|
||||
|
||||
from radicale import config, pathutils, types
|
||||
from radicale.storage import multifilesystem
|
||||
|
||||
|
||||
class RwLock(pathutils.RwLock):
|
||||
|
||||
_cond: threading.Condition
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__("")
|
||||
self._cond = threading.Condition(self._lock)
|
||||
|
||||
@types.contextmanager
|
||||
def acquire(self, mode: str, user: str = "") -> Iterator[None]:
|
||||
if mode not in "rw":
|
||||
raise ValueError("Invalid mode: %r" % mode)
|
||||
with self._cond:
|
||||
self._cond.wait_for(lambda: not self._writer and (
|
||||
mode == "r" or self._readers == 0))
|
||||
if mode == "r":
|
||||
self._readers += 1
|
||||
else:
|
||||
self._writer = True
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
with self._cond:
|
||||
if mode == "r":
|
||||
self._readers -= 1
|
||||
self._writer = False
|
||||
if self._readers == 0:
|
||||
self._cond.notify_all()
|
||||
|
||||
|
||||
class LockDict:
|
||||
|
||||
_lock: threading.Lock
|
||||
_dict: Dict[Hashable, Deque[threading.Lock]]
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._lock = threading.Lock()
|
||||
self._dict = {}
|
||||
|
||||
@types.contextmanager
|
||||
def acquire(self, key: Hashable) -> Iterator[None]:
|
||||
with self._lock:
|
||||
waiters = self._dict.get(key)
|
||||
if waiters is None:
|
||||
self._dict[key] = waiters = deque()
|
||||
wait = bool(waiters)
|
||||
waiter = threading.Lock()
|
||||
waiter.acquire()
|
||||
waiters.append(waiter)
|
||||
if wait:
|
||||
waiter.acquire()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
with self._lock:
|
||||
assert waiters[0] is waiter and self._dict[key] is waiters
|
||||
del waiters[0]
|
||||
if waiters:
|
||||
waiters[0].release()
|
||||
else:
|
||||
del self._dict[key]
|
||||
|
||||
|
||||
class Collection(multifilesystem.Collection):
|
||||
|
||||
_storage: "Storage"
|
||||
|
||||
@types.contextmanager
|
||||
def _acquire_cache_lock(self, ns: str = "") -> Iterator[None]:
|
||||
if self._storage._lock.locked == "w":
|
||||
yield
|
||||
return
|
||||
with self._storage._cache_lock.acquire((self.path, ns)):
|
||||
yield
|
||||
|
||||
|
||||
class Storage(multifilesystem.Storage):
|
||||
|
||||
_collection_class: ClassVar[Type[Collection]] = Collection
|
||||
|
||||
_cache_lock: LockDict
|
||||
|
||||
def __init__(self, configuration: config.Configuration) -> None:
|
||||
super().__init__(configuration)
|
||||
self._lock = RwLock()
|
||||
self._cache_lock = LockDict()
|
|
@ -1,5 +1,6 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -19,48 +20,218 @@ Tests for Radicale.
|
|||
|
||||
"""
|
||||
|
||||
import base64
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import wsgiref.util
|
||||
import xml.etree.ElementTree as ET
|
||||
from io import BytesIO
|
||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||
from urllib.parse import quote
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
|
||||
import defusedxml.ElementTree as DefusedET
|
||||
import vobject
|
||||
|
||||
logger = logging.getLogger("radicale_test")
|
||||
if not logger.hasHandlers():
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
|
||||
logger.addHandler(handler)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
import radicale
|
||||
from radicale import app, config, types, xmlutils
|
||||
|
||||
RESPONSES = Dict[str, Union[int, Dict[str, Tuple[int, ET.Element]], vobject.base.Component]]
|
||||
|
||||
# Enable debug output
|
||||
radicale.log.logger.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
class BaseTest:
|
||||
"""Base class for tests."""
|
||||
logger = logger
|
||||
|
||||
def request(self, method, path, data=None, **args):
|
||||
colpath: str
|
||||
configuration: config.Configuration
|
||||
application: app.Application
|
||||
|
||||
def setup_method(self) -> None:
|
||||
self.configuration = config.load()
|
||||
self.colpath = tempfile.mkdtemp()
|
||||
self.configure({
|
||||
"storage": {"filesystem_folder": self.colpath,
|
||||
# Disable syncing to disk for better performance
|
||||
"_filesystem_fsync": "False"},
|
||||
# Set incorrect authentication delay to a short duration
|
||||
"auth": {"delay": "0.001"}})
|
||||
|
||||
def configure(self, config_: types.CONFIG) -> None:
|
||||
self.configuration.update(config_, "test", privileged=True)
|
||||
self.application = app.Application(self.configuration)
|
||||
|
||||
def teardown_method(self) -> None:
|
||||
shutil.rmtree(self.colpath)
|
||||
|
||||
def request(self, method: str, path: str, data: Optional[str] = None,
|
||||
check: Optional[int] = None, **kwargs
|
||||
) -> Tuple[int, Dict[str, str], str]:
|
||||
"""Send a request."""
|
||||
self.application._status = None
|
||||
self.application._headers = None
|
||||
self.application._answer = None
|
||||
login = kwargs.pop("login", None)
|
||||
if login is not None and not isinstance(login, str):
|
||||
raise TypeError("login argument must be %r, not %r" %
|
||||
(str, type(login)))
|
||||
environ: Dict[str, Any] = {k.upper(): v for k, v in kwargs.items()}
|
||||
for k, v in environ.items():
|
||||
if not isinstance(v, str):
|
||||
raise TypeError("type of %r is %r, expected %r" %
|
||||
(k, type(v), str))
|
||||
encoding: str = self.configuration.get("encoding", "request")
|
||||
if login:
|
||||
environ["HTTP_AUTHORIZATION"] = "Basic " + base64.b64encode(
|
||||
login.encode(encoding)).decode()
|
||||
environ["REQUEST_METHOD"] = method.upper()
|
||||
environ["PATH_INFO"] = path
|
||||
if data is not None:
|
||||
data_bytes = data.encode(encoding)
|
||||
environ["wsgi.input"] = BytesIO(data_bytes)
|
||||
environ["CONTENT_LENGTH"] = str(len(data_bytes))
|
||||
environ["wsgi.errors"] = sys.stderr
|
||||
wsgiref.util.setup_testing_defaults(environ)
|
||||
status = headers = None
|
||||
|
||||
for key in args:
|
||||
args[key.upper()] = args[key]
|
||||
args["REQUEST_METHOD"] = method.upper()
|
||||
args["PATH_INFO"] = path
|
||||
if data:
|
||||
data = data.encode("utf-8")
|
||||
args["wsgi.input"] = BytesIO(data)
|
||||
args["CONTENT_LENGTH"] = str(len(data))
|
||||
self.application._answer = self.application(args, self.start_response)
|
||||
def start_response(status_: str, headers_: List[Tuple[str, str]]
|
||||
) -> None:
|
||||
nonlocal status, headers
|
||||
status = int(status_.split()[0])
|
||||
headers = dict(headers_)
|
||||
answers = list(self.application(environ, start_response))
|
||||
assert status is not None and headers is not None
|
||||
assert check is None or status == check, "%d != %d" % (status, check)
|
||||
|
||||
return (
|
||||
int(self.application._status.split()[0]),
|
||||
dict(self.application._headers),
|
||||
self.application._answer[0].decode("utf-8")
|
||||
if self.application._answer else None)
|
||||
return status, headers, answers[0].decode() if answers else ""
|
||||
|
||||
def start_response(self, status, headers):
|
||||
"""Put the response values into the current application."""
|
||||
self.application._status = status
|
||||
self.application._headers = headers
|
||||
@staticmethod
|
||||
def parse_responses(text: str) -> RESPONSES:
|
||||
xml = DefusedET.fromstring(text)
|
||||
assert xml.tag == xmlutils.make_clark("D:multistatus")
|
||||
path_responses: RESPONSES = {}
|
||||
for response in xml.findall(xmlutils.make_clark("D:response")):
|
||||
href = response.find(xmlutils.make_clark("D:href"))
|
||||
assert href.text not in path_responses
|
||||
prop_responses: Dict[str, Tuple[int, ET.Element]] = {}
|
||||
for propstat in response.findall(
|
||||
xmlutils.make_clark("D:propstat")):
|
||||
status = propstat.find(xmlutils.make_clark("D:status"))
|
||||
assert status.text.startswith("HTTP/1.1 ")
|
||||
status_code = int(status.text.split(" ")[1])
|
||||
for element in propstat.findall(
|
||||
"./%s/*" % xmlutils.make_clark("D:prop")):
|
||||
human_tag = xmlutils.make_human_tag(element.tag)
|
||||
assert human_tag not in prop_responses
|
||||
prop_responses[human_tag] = (status_code, element)
|
||||
status = response.find(xmlutils.make_clark("D:status"))
|
||||
if status is not None:
|
||||
assert not prop_responses
|
||||
assert status.text.startswith("HTTP/1.1 ")
|
||||
status_code = int(status.text.split(" ")[1])
|
||||
path_responses[href.text] = status_code
|
||||
else:
|
||||
path_responses[href.text] = prop_responses
|
||||
return path_responses
|
||||
|
||||
@staticmethod
|
||||
def parse_free_busy(text: str) -> RESPONSES:
|
||||
path_responses: RESPONSES = {}
|
||||
path_responses[""] = vobject.readOne(text)
|
||||
return path_responses
|
||||
|
||||
def get(self, path: str, check: Optional[int] = 200, **kwargs
|
||||
) -> Tuple[int, str]:
|
||||
assert "data" not in kwargs
|
||||
status, _, answer = self.request("GET", path, check=check, **kwargs)
|
||||
return status, answer
|
||||
|
||||
def post(self, path: str, data: Optional[str] = None,
|
||||
check: Optional[int] = 200, **kwargs) -> Tuple[int, str]:
|
||||
status, _, answer = self.request("POST", path, data, check=check,
|
||||
**kwargs)
|
||||
return status, answer
|
||||
|
||||
def put(self, path: str, data: str, check: Optional[int] = 201,
|
||||
**kwargs) -> Tuple[int, str]:
|
||||
status, _, answer = self.request("PUT", path, data, check=check,
|
||||
**kwargs)
|
||||
return status, answer
|
||||
|
||||
def propfind(self, path: str, data: Optional[str] = None,
|
||||
check: Optional[int] = 207, **kwargs
|
||||
) -> Tuple[int, RESPONSES]:
|
||||
status, _, answer = self.request("PROPFIND", path, data, check=check,
|
||||
**kwargs)
|
||||
if status < 200 or 300 <= status:
|
||||
return status, {}
|
||||
assert answer is not None
|
||||
responses = self.parse_responses(answer)
|
||||
if kwargs.get("HTTP_DEPTH", "0") == "0":
|
||||
assert len(responses) == 1 and quote(path) in responses
|
||||
return status, responses
|
||||
|
||||
def proppatch(self, path: str, data: Optional[str] = None,
|
||||
check: Optional[int] = 207, **kwargs
|
||||
) -> Tuple[int, RESPONSES]:
|
||||
status, _, answer = self.request("PROPPATCH", path, data, check=check,
|
||||
**kwargs)
|
||||
if status < 200 or 300 <= status:
|
||||
return status, {}
|
||||
assert answer is not None
|
||||
responses = self.parse_responses(answer)
|
||||
assert len(responses) == 1 and path in responses
|
||||
return status, responses
|
||||
|
||||
def report(self, path: str, data: str, check: Optional[int] = 207,
|
||||
is_xml: Optional[bool] = True,
|
||||
**kwargs) -> Tuple[int, RESPONSES]:
|
||||
status, _, answer = self.request("REPORT", path, data, check=check,
|
||||
**kwargs)
|
||||
if status < 200 or 300 <= status:
|
||||
return status, {}
|
||||
assert answer is not None
|
||||
if is_xml:
|
||||
parsed = self.parse_responses(answer)
|
||||
else:
|
||||
parsed = self.parse_free_busy(answer)
|
||||
return status, parsed
|
||||
|
||||
def delete(self, path: str, check: Optional[int] = 200, **kwargs
|
||||
) -> Tuple[int, RESPONSES]:
|
||||
assert "data" not in kwargs
|
||||
status, _, answer = self.request("DELETE", path, check=check, **kwargs)
|
||||
if status < 200 or 300 <= status:
|
||||
return status, {}
|
||||
assert answer is not None
|
||||
responses = self.parse_responses(answer)
|
||||
assert len(responses) == 1 and path in responses
|
||||
return status, responses
|
||||
|
||||
def mkcalendar(self, path: str, data: Optional[str] = None,
|
||||
check: Optional[int] = 201, **kwargs
|
||||
) -> Tuple[int, str]:
|
||||
status, _, answer = self.request("MKCALENDAR", path, data, check=check,
|
||||
**kwargs)
|
||||
return status, answer
|
||||
|
||||
def mkcol(self, path: str, data: Optional[str] = None,
|
||||
check: Optional[int] = 201, **kwargs) -> int:
|
||||
status, _, _ = self.request("MKCOL", path, data, check=check, **kwargs)
|
||||
return status
|
||||
|
||||
def create_addressbook(self, path: str, check: Optional[int] = 201,
|
||||
**kwargs) -> int:
|
||||
assert "data" not in kwargs
|
||||
return self.mkcol(path, """\
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<create xmlns="DAV:" xmlns:CR="urn:ietf:params:xml:ns:carddav">
|
||||
<set>
|
||||
<prop>
|
||||
<resourcetype>
|
||||
<collection />
|
||||
<CR:addressbook />
|
||||
</resourcetype>
|
||||
</prop>
|
||||
</set>
|
||||
</create>""", check=check, **kwargs)
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -27,5 +28,8 @@ from radicale import auth
|
|||
|
||||
|
||||
class Auth(auth.BaseAuth):
|
||||
def is_authenticated(self, user, password):
|
||||
return user == "tmp"
|
||||
|
||||
def _login(self, login: str, password: str) -> str:
|
||||
if login == "tmp":
|
||||
return login
|
||||
return ""
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# Copyright (C) 2017 Unrud <unrud@openaliasbox.org>
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -19,9 +19,13 @@ Custom rights management.
|
|||
|
||||
"""
|
||||
|
||||
from radicale import rights
|
||||
from radicale import pathutils, rights
|
||||
|
||||
|
||||
class Rights(rights.BaseRights):
|
||||
def authorized(self, user, path, permission):
|
||||
return path.strip("/") in ("tmp", "other")
|
||||
|
||||
def authorization(self, user: str, path: str) -> str:
|
||||
sane_path = pathutils.strip_path(path)
|
||||
if sane_path not in ("tmp", "other"):
|
||||
return ""
|
||||
return "RrWw"
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2012-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -17,15 +18,19 @@
|
|||
"""
|
||||
Custom storage backend.
|
||||
|
||||
Copy of filesystem storage backend for testing
|
||||
Copy of multifilesystem storage backend that uses the default ``sync``
|
||||
implementation for testing.
|
||||
|
||||
"""
|
||||
|
||||
from radicale import storage
|
||||
from radicale.storage import BaseCollection, multifilesystem
|
||||
|
||||
|
||||
# TODO: make something more in this collection (and test it)
|
||||
class Collection(storage.Collection):
|
||||
"""Collection stored in a folder."""
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
class Collection(multifilesystem.Collection):
|
||||
|
||||
sync = BaseCollection.sync
|
||||
|
||||
|
||||
class Storage(multifilesystem.Storage):
|
||||
|
||||
_collection_class = Collection
|
36
radicale/tests/custom/web.py
Normal file
36
radicale/tests/custom/web.py
Normal file
|
@ -0,0 +1,36 @@
|
|||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Custom web plugin.
|
||||
|
||||
"""
|
||||
|
||||
from http import client
|
||||
|
||||
from radicale import httputils, types, web
|
||||
|
||||
|
||||
class Web(web.BaseWeb):
|
||||
|
||||
def get(self, environ: types.WSGIEnviron, base_prefix: str, path: str,
|
||||
user: str) -> types.WSGIResponse:
|
||||
return client.OK, {"Content-Type": "text/plain"}, "custom"
|
||||
|
||||
def post(self, environ: types.WSGIEnviron, base_prefix: str, path: str,
|
||||
user: str) -> types.WSGIResponse:
|
||||
content = httputils.read_request_body(self.configuration, environ)
|
||||
return client.OK, {"Content-Type": "text/plain"}, "echo:" + content
|
|
@ -1,7 +1,8 @@
|
|||
# This file is part of Radicale Server - Calendar Server
|
||||
# This file is part of Radicale - CalDAV and CardDAV server
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -25,12 +26,24 @@ This module offers helpers to use in tests.
|
|||
|
||||
import os
|
||||
|
||||
EXAMPLES_FOLDER = os.path.join(os.path.dirname(__file__), "static")
|
||||
from radicale import config, types
|
||||
|
||||
EXAMPLES_FOLDER: str = os.path.join(os.path.dirname(__file__), "static")
|
||||
|
||||
|
||||
def get_file_content(file_name):
|
||||
try:
|
||||
with open(os.path.join(EXAMPLES_FOLDER, file_name)) as fd:
|
||||
return fd.read()
|
||||
except IOError:
|
||||
print("Couldn't open the file %s" % file_name)
|
||||
def get_file_path(file_name: str) -> str:
|
||||
return os.path.join(EXAMPLES_FOLDER, file_name)
|
||||
|
||||
|
||||
def get_file_content(file_name: str) -> str:
|
||||
with open(get_file_path(file_name), encoding="utf-8") as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def configuration_to_dict(configuration: config.Configuration) -> types.CONFIG:
|
||||
"""Convert configuration to a dict with raw values."""
|
||||
return {section: {option: configuration.get_raw(section, option)
|
||||
for option in configuration.options(section)
|
||||
if not option.startswith("_")}
|
||||
for section in configuration.sections()
|
||||
if not section.startswith("_")}
|
||||
|
|
4
radicale/tests/static/allprop.xml
Normal file
4
radicale/tests/static/allprop.xml
Normal file
|
@ -0,0 +1,4 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<D:propfind xmlns:D="DAV:">
|
||||
<D:allprop />
|
||||
</D:propfind>
|
20
radicale/tests/static/cert.pem
Normal file
20
radicale/tests/static/cert.pem
Normal file
|
@ -0,0 +1,20 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIIDXDCCAkSgAwIBAgIJAKBsA+sXwPtuMA0GCSqGSIb3DQEBCwUAMEIxCzAJBgNV
|
||||
BAYTAlhYMRUwEwYDVQQHDAxEZWZhdWx0IENpdHkxHDAaBgNVBAoME0RlZmF1bHQg
|
||||
Q29tcGFueSBMdGQwIBcNMTgwOTAzMjAyNDE2WhgPMjExODA4MTAyMDI0MTZaMEIx
|
||||
CzAJBgNVBAYTAlhYMRUwEwYDVQQHDAxEZWZhdWx0IENpdHkxHDAaBgNVBAoME0Rl
|
||||
ZmF1bHQgQ29tcGFueSBMdGQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
|
||||
AQDMEBfr6oEk/t1Op9fSRRRrReQOZqx+gC1jHONSDXudDyfZBFSQx1QY9EtFqMUr
|
||||
lvY3uI+rohujMTfXih6AEXTHHJmRIk80hDR/ovDMDiC5+z6EuKwbKPtjDMKqn7Hb
|
||||
YoA4pyRWwzPydrZRVeG9+z4YY5uMRCmpzLqWcm04kgCEeJqKpb9ZQMKL/8fq8a9p
|
||||
v5rfOXqtneje4yJAOF/L2EXk/MjdqvYR/cu2kTP8IDocTYZj6xjA9GVb37Xga+YG
|
||||
u/SbGSU9vU8rmXJqqAFR/im97bz960Q/Q2VN2y9nTLEPCjGeyxcatxDw6vc1s2GE
|
||||
5ttuu6aPmRc392T3kFV9ZnYdAgMBAAGjUzBRMB0GA1UdDgQWBBRKPvGgdpsYK/ma
|
||||
3l+FMUIngO9xGTAfBgNVHSMEGDAWgBRKPvGgdpsYK/ma3l+FMUIngO9xGTAPBgNV
|
||||
HRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQCID4FTrX6DJKQzvDTg6ejP
|
||||
ziSeoea7+nqtVogEBfmzm8YY4pu6qbNM8EHwbP9cnbZ6V48PmZUV4hQibGy33C6E
|
||||
EIvqNBHcO/WqjbL2IWKcuZH7pMQVedR3GAV8sJMMwBOTtdopcTbnYFRZYwXV2dKe
|
||||
reo5ukDZo8KyQHS9lloi5IPhsTufPBK3n9EtMa/Ch7bqmXEiSkKFU04o2kuj0Urk
|
||||
hG8lnX1Ff2xWjG5N9Hp7xaEWk3LO/nDxlF/AmF3pDuWkZXpzNpUk70KlNx8xSKYR
|
||||
cHmp2Z1hrA7PvUrG46I2dwC+y09hRXFSqYBT2po9Uzwj8aSNXGr1vKBzebqi9Sxc
|
||||
-----END CERTIFICATE-----
|
|
@ -3,4 +3,5 @@ VERSION:3.0
|
|||
UID:contact1
|
||||
N:Contact;;;;
|
||||
FN:Contact
|
||||
NICKNAME:test
|
||||
END:VCARD
|
||||
|
|
8
radicale/tests/static/contact_photo_with_data_uri.vcf
Normal file
8
radicale/tests/static/contact_photo_with_data_uri.vcf
Normal file
|
@ -0,0 +1,8 @@
|
|||
BEGIN:VCARD
|
||||
VERSION:3.0
|
||||
UID:contact
|
||||
N:Contact;;;;
|
||||
FN:Contact
|
||||
NICKNAME:test
|
||||
PHOTO;ENCODING=b;TYPE=png:data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAAD0lEQVQIHQEEAPv/AP///wX+Av4DfRnGAAAAAElFTkSuQmCC
|
||||
END:VCARD
|
|
@ -25,6 +25,7 @@ LAST-MODIFIED:20130902T150158Z
|
|||
DTSTAMP:20130902T150158Z
|
||||
UID:event1
|
||||
SUMMARY:Event
|
||||
CATEGORIES:some_category1,another_category2
|
||||
ORGANIZER:mailto:unclesam@example.com
|
||||
ATTENDEE;ROLE=REQ-PARTICIPANT;PARTSTAT=TENTATIVE;CN=Jane Doe:MAILTO:janedoe@example.com
|
||||
ATTENDEE;ROLE=REQ-PARTICIPANT;DELEGATED-FROM="MAILTO:bob@host.com";PARTSTAT=ACCEPTED;CN=John Doe:MAILTO:johndoe@example.com
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue