Merge pull request #1648 from pbiering/cache-extension-umask-fixes

Cache extension umask fixes
This commit is contained in:
Peter Bieringer 2024-12-10 08:11:04 +00:00 committed by GitHub
commit 778f56cc4d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 149 additions and 22 deletions

View file

@ -2,8 +2,13 @@
## 3.3.2.dev
* Fix: debug logging in rights/from_file
* Add: option [storage] use_cache_subfolder_for_item for storing item cache outside collection-root
* Add: option [storage] use_cache_subfolder_for_item for storing 'item' cache outside collection-root
* Fix: ignore empty RRULESET in item
* Add: option [storage] filesystem_cache_folder for defining location of cache outside collection-root
* Add: option [storage] use_cache_subfolder_for_history for storing 'history' cache outside collection-root
* Add: option [storage] use_cache_subfolder_for_synctoken for storing 'sync-token' cache outside collection-root
* Add: option [storage] folder_umask for configuration of umask (overwrite system-default)
* Fix: also remove 'item' from cache on delete
## 3.3.1

View file

@ -1005,12 +1005,48 @@ Folder for storing local collections, created if not present.
Default: `/var/lib/radicale/collections`
##### filesystem_cache_folder
Folder for storing cache of local collections, created if not present
Default: (filesystem_folder)
Note: only used in case of use_cache_subfolder_* options are active
Note: can be used on multi-instance setup to cache files on local node (see below)
##### use_cache_subfolder_for_item
Use subfolder `collection-cache` for cache file structure of item instead of inside collection folders, created if not present
Use subfolder `collection-cache` for cache file structure of 'item' instead of inside collection folders, created if not present
Default: `False`
Note: can be used on multi-instance setup to cache 'item' on local node
##### use_cache_subfolder_for_history
Use subfolder `collection-cache` for cache file structure of 'history' instead of inside collection folders, created if not present
Default: `False`
Note: use only on single-instance setup, will break consistency with client in multi-instance setup
##### use_cache_subfolder_for_synctoken
Use subfolder `collection-cache` for cache file structure of 'sync-token' instead of inside collection folders, created if not present
Default: `False`
Note: use only on single-instance setup, will break consistency with client in multi-instance setup
##### folder_umask
Use configured umask for folder creation (not applicable for OS Windows)
Default: (system-default, usual `0022`)
Useful value: `0077` (user:rw group:- other:-) or `0027` (user:rw group:r other:-) or `0007` (user:rw group:rw other:-) or `0022` (user:rw group:r other:r)
##### max_sync_token_age
Delete sync-token that are older than the specified time. (seconds)

20
config
View file

@ -138,9 +138,27 @@
# Folder for storing local collections, created if not present
#filesystem_folder = /var/lib/radicale/collections
# Use subfolder 'collection-cache' for item cache file structure instead of inside collection folder
# Folder for storing cache of local collections, created if not present
# Note: only used in case of use_cache_subfolder_* options are active
# Note: can be used on multi-instance setup to cache files on local node (see below)
#filesystem_cache_folder = (filesystem_folder)
# Use subfolder 'collection-cache' for 'item' cache file structure instead of inside collection folder
# Note: can be used on multi-instance setup to cache 'item' on local node
#use_cache_subfolder_for_item = False
# Use subfolder 'collection-cache' for 'history' cache file structure instead of inside collection folder
# Note: use only on single-instance setup, will break consistency with client in multi-instance setup
#use_cache_subfolder_for_history = False
# Use subfolder 'collection-cache' for 'sync-token' cache file structure instead of inside collection folder
# Note: use only on single-instance setup, will break consistency with client in multi-instance setup
#use_cache_subfolder_for_synctoken = False
# Use configured umask for folder creation (not applicable for OS Windows)
# Useful value: 0077 | 0027 | 0007 | 0022
#folder_umask = (system default, usual 0022)
# Delete sync token that are older (seconds)
#max_sync_token_age = 2592000

View file

@ -279,10 +279,26 @@ DEFAULT_CONFIG_SCHEMA: types.CONFIG_SCHEMA = OrderedDict([
"value": "/var/lib/radicale/collections",
"help": "path where collections are stored",
"type": filepath}),
("filesystem_cache_folder", {
"value": "",
"help": "path where cache of collections is stored in case of use_cache_subfolder_* options are active",
"type": filepath}),
("use_cache_subfolder_for_item", {
"value": "False",
"help": "use subfolder 'collection-cache' for item cache file structure instead of inside collection folder",
"help": "use subfolder 'collection-cache' for 'item' cache file structure instead of inside collection folder",
"type": bool}),
("use_cache_subfolder_for_history", {
"value": "False",
"help": "use subfolder 'collection-cache' for 'history' cache file structure instead of inside collection folder",
"type": bool}),
("use_cache_subfolder_for_synctoken", {
"value": "False",
"help": "use subfolder 'collection-cache' for 'sync-token' cache file structure instead of inside collection folder",
"type": bool}),
("folder_umask", {
"value": "",
"help": "umask for folder creation (empty: system default)",
"type": str}),
("max_sync_token_age", {
"value": "2592000", # 30 days
"help": "delete sync token that are older",

View file

@ -25,6 +25,7 @@ Uses one folder per collection and one file per collection entry.
"""
import os
import sys
import time
from typing import ClassVar, Iterator, Optional, Type
@ -90,6 +91,27 @@ class Storage(
def __init__(self, configuration: config.Configuration) -> None:
super().__init__(configuration)
self._makedirs_synced(self._filesystem_folder)
logger.info("storage location: %r", self._filesystem_folder)
logger.info("storage cache subfolder usage for item: %s", self._use_cache_subfolder_for_item)
self._makedirs_synced(self._filesystem_folder)
logger.info("storage location subfolder: %r", self._get_collection_root_folder())
logger.info("storage cache subfolder usage for 'item': %s", self._use_cache_subfolder_for_item)
logger.info("storage cache subfolder usage for 'history': %s", self._use_cache_subfolder_for_history)
logger.info("storage cache subfolder usage for 'sync-token': %s", self._use_cache_subfolder_for_synctoken)
if self._use_cache_subfolder_for_item is True or self._use_cache_subfolder_for_history is True or self._use_cache_subfolder_for_synctoken is True:
logger.info("storage cache subfolder: %r", self._get_collection_cache_folder())
self._makedirs_synced(self._get_collection_cache_folder())
if sys.platform != "win32":
if not self._folder_umask:
# retrieve current umask by setting a dummy umask
current_umask = os.umask(0o0022)
logger.info("storage folder umask (from system): '%04o'", current_umask)
# reset to original
os.umask(current_umask)
else:
try:
config_umask = int(self._folder_umask, 8)
except Exception:
logger.critical("storage folder umask defined but invalid: '%s'", self._folder_umask)
raise
logger.info("storage folder umask defined: '%04o'", config_umask)
self._config_umask = config_umask

View file

@ -69,8 +69,13 @@ class StorageBase(storage.BaseStorage):
_collection_class: ClassVar[Type["multifilesystem.Collection"]]
_filesystem_folder: str
_filesystem_cache_folder: str
_filesystem_fsync: bool
_use_cache_subfolder_for_item: bool
_use_cache_subfolder_for_history: bool
_use_cache_subfolder_for_synctoken: bool
_folder_umask: str
_config_umask: int
def __init__(self, configuration: config.Configuration) -> None:
super().__init__(configuration)
@ -78,15 +83,33 @@ class StorageBase(storage.BaseStorage):
"storage", "filesystem_folder")
self._filesystem_fsync = configuration.get(
"storage", "_filesystem_fsync")
self._filesystem_cache_folder = configuration.get(
"storage", "filesystem_cache_folder")
self._use_cache_subfolder_for_item = configuration.get(
"storage", "use_cache_subfolder_for_item")
self._use_cache_subfolder_for_history = configuration.get(
"storage", "use_cache_subfolder_for_history")
self._use_cache_subfolder_for_synctoken = configuration.get(
"storage", "use_cache_subfolder_for_synctoken")
self._folder_umask = configuration.get(
"storage", "folder_umask")
def _get_collection_root_folder(self) -> str:
return os.path.join(self._filesystem_folder, "collection-root")
def _get_collection_cache_folder(self, path, folder, subfolder) -> str:
def _get_collection_cache_folder(self) -> str:
if self._filesystem_cache_folder:
return os.path.join(self._filesystem_cache_folder, "collection-cache")
else:
return os.path.join(self._filesystem_folder, "collection-cache")
def _get_collection_cache_subfolder(self, path, folder, subfolder) -> str:
if (self._use_cache_subfolder_for_item is True) and (subfolder == "item"):
path = path.replace(os.path.join(self._filesystem_folder, "collection-root"), os.path.join(self._filesystem_folder, "collection-cache"))
path = path.replace(self._get_collection_root_folder(), self._get_collection_cache_folder())
elif (self._use_cache_subfolder_for_history is True) and (subfolder == "history"):
path = path.replace(self._get_collection_root_folder(), self._get_collection_cache_folder())
elif (self._use_cache_subfolder_for_synctoken is True) and (subfolder == "sync-token"):
path = path.replace(self._get_collection_root_folder(), self._get_collection_cache_folder())
return os.path.join(path, folder, subfolder)
def _fsync(self, f: IO[AnyStr]) -> None:
@ -125,6 +148,8 @@ class StorageBase(storage.BaseStorage):
if os.path.isdir(filesystem_path):
return
parent_filesystem_path = os.path.dirname(filesystem_path)
if sys.platform != "win32" and self._folder_umask:
oldmask = os.umask(self._config_umask)
# Prevent infinite loop
if filesystem_path != parent_filesystem_path:
# Create parent dirs recursively
@ -132,3 +157,5 @@ class StorageBase(storage.BaseStorage):
# Possible race!
os.makedirs(filesystem_path, exist_ok=True)
self._sync_directory(parent_filesystem_path)
if sys.platform != "win32" and self._folder_umask:
os.umask(oldmask)

View file

@ -82,7 +82,7 @@ class CollectionPartCache(CollectionBase):
if not cache_hash:
cache_hash = self._item_cache_hash(
item.serialize().encode(self._encoding))
cache_folder = self._storage._get_collection_cache_folder(self._filesystem_path, ".Radicale.cache", "item")
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
content = self._item_cache_content(item)
self._storage._makedirs_synced(cache_folder)
# Race: Other processes might have created and locked the file.
@ -95,7 +95,7 @@ class CollectionPartCache(CollectionBase):
def _load_item_cache(self, href: str, cache_hash: str
) -> Optional[CacheContent]:
cache_folder = self._storage._get_collection_cache_folder(self._filesystem_path, ".Radicale.cache", "item")
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
try:
with open(os.path.join(cache_folder, href), "rb") as f:
hash_, *remainder = pickle.load(f)
@ -109,7 +109,7 @@ class CollectionPartCache(CollectionBase):
return None
def _clean_item_cache(self) -> None:
cache_folder = self._storage._get_collection_cache_folder(self._filesystem_path, ".Radicale.cache", "item")
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
self._clean_cache(cache_folder, (
e.name for e in os.scandir(cache_folder) if not
os.path.isfile(os.path.join(self._filesystem_path, e.name))))

View file

@ -2,6 +2,7 @@
# Copyright © 2014 Jean-Marc Martins
# Copyright © 2012-2017 Guillaume Ayoub
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@ -53,3 +54,9 @@ class CollectionPartDelete(CollectionPartHistory, CollectionBase):
# Track the change
self._update_history_etag(href, None)
self._clean_history()
# Remove item from cache
cache_folder = self._storage._get_collection_cache_subfolder(os.path.dirname(path), ".Radicale.cache", "item")
cache_file = os.path.join(cache_folder, os.path.basename(path))
if os.path.isfile(cache_file):
os.remove(cache_file)
self._storage._sync_directory(cache_folder)

View file

@ -47,8 +47,7 @@ class CollectionPartHistory(CollectionBase):
string for deleted items) and a history etag, which is a hash over
the previous history etag and the etag separated by "/".
"""
history_folder = os.path.join(self._filesystem_path,
".Radicale.cache", "history")
history_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "history")
try:
with open(os.path.join(history_folder, href), "rb") as f:
cache_etag, history_etag = pickle.load(f)
@ -76,8 +75,7 @@ class CollectionPartHistory(CollectionBase):
def _get_deleted_history_hrefs(self):
"""Returns the hrefs of all deleted items that are still in the
history cache."""
history_folder = os.path.join(self._filesystem_path,
".Radicale.cache", "history")
history_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "history")
with contextlib.suppress(FileNotFoundError):
for entry in os.scandir(history_folder):
href = entry.name
@ -89,7 +87,6 @@ class CollectionPartHistory(CollectionBase):
def _clean_history(self):
# Delete all expired history entries of deleted items.
history_folder = os.path.join(self._filesystem_path,
".Radicale.cache", "history")
history_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "history")
self._clean_cache(history_folder, self._get_deleted_history_hrefs(),
max_age=self._max_sync_token_age)

View file

@ -42,8 +42,8 @@ class StoragePartMove(StorageBase):
if item.collection._filesystem_path != to_collection._filesystem_path:
self._sync_directory(item.collection._filesystem_path)
# Move the item cache entry
cache_folder = self._get_collection_cache_folder(item.collection._filesystem_path, ".Radicale.cache", "item")
to_cache_folder = self._get_collection_cache_folder(to_collection._filesystem_path, ".Radicale.cache", "item")
cache_folder = self._get_collection_cache_subfolder(item.collection._filesystem_path, ".Radicale.cache", "item")
to_cache_folder = self._get_collection_cache_subfolder(to_collection._filesystem_path, ".Radicale.cache", "item")
self._makedirs_synced(to_cache_folder)
try:
os.replace(os.path.join(cache_folder, item.href),

View file

@ -67,8 +67,7 @@ class CollectionPartSync(CollectionPartCache, CollectionPartHistory,
if token_name == old_token_name:
# Nothing changed
return token, ()
token_folder = os.path.join(self._filesystem_path,
".Radicale.cache", "sync-token")
token_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "sync-token")
token_path = os.path.join(token_folder, token_name)
old_state = {}
if old_token_name:

View file

@ -76,7 +76,7 @@ class CollectionPartUpload(CollectionPartGet, CollectionPartCache,
yield radicale_item.find_available_uid(
lambda href: not is_safe_free_href(href), suffix)
cache_folder = self._storage._get_collection_cache_folder(self._filesystem_path, ".Radicale.cache", "item")
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
self._storage._makedirs_synced(cache_folder)
for item in items:
uid = item.uid