mirror of
https://github.com/Kozea/Radicale.git
synced 2025-04-03 21:27:36 +03:00
rename function
This commit is contained in:
parent
05d4e91856
commit
644548c866
5 changed files with 10 additions and 14 deletions
|
@ -82,7 +82,7 @@ class CollectionPartCache(CollectionBase):
|
|||
if not cache_hash:
|
||||
cache_hash = self._item_cache_hash(
|
||||
item.serialize().encode(self._encoding))
|
||||
cache_folder = self._storage._get_collection_cache_folder(self._filesystem_path, ".Radicale.cache", "item")
|
||||
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
|
||||
content = self._item_cache_content(item)
|
||||
self._storage._makedirs_synced(cache_folder)
|
||||
# Race: Other processes might have created and locked the file.
|
||||
|
@ -95,7 +95,7 @@ class CollectionPartCache(CollectionBase):
|
|||
|
||||
def _load_item_cache(self, href: str, cache_hash: str
|
||||
) -> Optional[CacheContent]:
|
||||
cache_folder = self._storage._get_collection_cache_folder(self._filesystem_path, ".Radicale.cache", "item")
|
||||
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
|
||||
try:
|
||||
with open(os.path.join(cache_folder, href), "rb") as f:
|
||||
hash_, *remainder = pickle.load(f)
|
||||
|
@ -109,7 +109,7 @@ class CollectionPartCache(CollectionBase):
|
|||
return None
|
||||
|
||||
def _clean_item_cache(self) -> None:
|
||||
cache_folder = self._storage._get_collection_cache_folder(self._filesystem_path, ".Radicale.cache", "item")
|
||||
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
|
||||
self._clean_cache(cache_folder, (
|
||||
e.name for e in os.scandir(cache_folder) if not
|
||||
os.path.isfile(os.path.join(self._filesystem_path, e.name))))
|
||||
|
|
|
@ -47,8 +47,7 @@ class CollectionPartHistory(CollectionBase):
|
|||
string for deleted items) and a history etag, which is a hash over
|
||||
the previous history etag and the etag separated by "/".
|
||||
"""
|
||||
history_folder = os.path.join(self._filesystem_path,
|
||||
".Radicale.cache", "history")
|
||||
history_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "history")
|
||||
try:
|
||||
with open(os.path.join(history_folder, href), "rb") as f:
|
||||
cache_etag, history_etag = pickle.load(f)
|
||||
|
@ -76,8 +75,7 @@ class CollectionPartHistory(CollectionBase):
|
|||
def _get_deleted_history_hrefs(self):
|
||||
"""Returns the hrefs of all deleted items that are still in the
|
||||
history cache."""
|
||||
history_folder = os.path.join(self._filesystem_path,
|
||||
".Radicale.cache", "history")
|
||||
history_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "history")
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
for entry in os.scandir(history_folder):
|
||||
href = entry.name
|
||||
|
@ -89,7 +87,6 @@ class CollectionPartHistory(CollectionBase):
|
|||
|
||||
def _clean_history(self):
|
||||
# Delete all expired history entries of deleted items.
|
||||
history_folder = os.path.join(self._filesystem_path,
|
||||
".Radicale.cache", "history")
|
||||
history_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "history")
|
||||
self._clean_cache(history_folder, self._get_deleted_history_hrefs(),
|
||||
max_age=self._max_sync_token_age)
|
||||
|
|
|
@ -42,8 +42,8 @@ class StoragePartMove(StorageBase):
|
|||
if item.collection._filesystem_path != to_collection._filesystem_path:
|
||||
self._sync_directory(item.collection._filesystem_path)
|
||||
# Move the item cache entry
|
||||
cache_folder = self._get_collection_cache_folder(item.collection._filesystem_path, ".Radicale.cache", "item")
|
||||
to_cache_folder = self._get_collection_cache_folder(to_collection._filesystem_path, ".Radicale.cache", "item")
|
||||
cache_folder = self._get_collection_cache_subfolder(item.collection._filesystem_path, ".Radicale.cache", "item")
|
||||
to_cache_folder = self._get_collection_cache_subfolder(to_collection._filesystem_path, ".Radicale.cache", "item")
|
||||
self._makedirs_synced(to_cache_folder)
|
||||
try:
|
||||
os.replace(os.path.join(cache_folder, item.href),
|
||||
|
|
|
@ -67,8 +67,7 @@ class CollectionPartSync(CollectionPartCache, CollectionPartHistory,
|
|||
if token_name == old_token_name:
|
||||
# Nothing changed
|
||||
return token, ()
|
||||
token_folder = os.path.join(self._filesystem_path,
|
||||
".Radicale.cache", "sync-token")
|
||||
token_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "sync-token")
|
||||
token_path = os.path.join(token_folder, token_name)
|
||||
old_state = {}
|
||||
if old_token_name:
|
||||
|
|
|
@ -76,7 +76,7 @@ class CollectionPartUpload(CollectionPartGet, CollectionPartCache,
|
|||
yield radicale_item.find_available_uid(
|
||||
lambda href: not is_safe_free_href(href), suffix)
|
||||
|
||||
cache_folder = self._storage._get_collection_cache_folder(self._filesystem_path, ".Radicale.cache", "item")
|
||||
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
|
||||
self._storage._makedirs_synced(cache_folder)
|
||||
for item in items:
|
||||
uid = item.uid
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue