From 6fdfb95f5337888233fa2a770980afdb1404d218 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sun, 9 Oct 2016 12:00:46 +0200 Subject: [PATCH 001/101] fix broken merge --- radicale/storage.py | 30 +----------------------------- 1 file changed, 1 insertion(+), 29 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index 4c2bcda0a..69b0d7f3c 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -631,6 +631,7 @@ def get(self, href): "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(os.path.getmtime(path))) try: + self.logger.debug("Read object: %s", path) item = vobject.readOne(text) except Exception: self.logger.exception("Object broken (skip 'get'): %s", path) @@ -697,38 +698,9 @@ def last_modified(self): def serialize(self): items = [] -<<<<<<< HEAD time_begin = datetime.datetime.now() - for href in os.listdir(self._filesystem_path): - if not is_safe_filesystem_path_component(href): - self.logger.debug("Skipping component: %s", href) - continue - path = os.path.join(self._filesystem_path, href) - if os.path.isfile(path): - self.logger.debug("Read object: %s", path) - with open(path, encoding=self.encoding, newline="") as fd: - try: - # check whether vobject liks the item - item = vobject.readOne(fd.read()) - except: - self.logger.exception("Object broken (skip 'list'): %s", path) - continue - - if self.get_meta("tag") == "VADDRESSBOOK": - try: - # check whether vobject liks the VCARD item - item.serialize() - except: - self.logger.exception("Object broken (skip 'read'): %s", path) - self.logger.error("Broken VCARD content: %s", item) - continue - items.append(item) - time_end = datetime.datetime.now() - self.logger.info("Collection read %d items in %s sec from %s", len(items),(time_end - time_begin).total_seconds(), self._filesystem_path) -======= for href in self.list(): items.append(self.get(href).item) ->>>>>>> 03fbb1e68ebb2dcc953826ed4542326e20fdf758 if self.get_meta("tag") == "VCALENDAR": collection = vobject.iCalendar() for item in items: From 35946fd14aa1bddf6876c427c5ff1cdd199257d8 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sun, 9 Oct 2016 11:12:48 +0000 Subject: [PATCH 002/101] improve logging and reimplement VCARD check --- radicale/storage.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index 69b0d7f3c..6a319c8eb 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -633,9 +633,16 @@ def get(self, href): try: self.logger.debug("Read object: %s", path) item = vobject.readOne(text) - except Exception: - self.logger.exception("Object broken (skip 'get'): %s", path) + except Exception as e: + self.logger.error("Object broken (skip 'get'): %s (%s)", path, e) return None; + if self.get_meta("tag") == "VADDRESSBOOK": + # check whether vobject likes the VCARD item + try: + item.serialize() + except Exception as e: + self.logger.error("VCARD object broken (skip 'get'): %s (%s)", path, e) + return None; return Item(self, item, href, last_modified) def upload(self, href, vobject_item): @@ -700,7 +707,8 @@ def serialize(self): items = [] time_begin = datetime.datetime.now() for href in self.list(): - items.append(self.get(href).item) + if hasattr(self.get(href),'item'): + items.append(self.get(href).item) if self.get_meta("tag") == "VCALENDAR": collection = vobject.iCalendar() for item in items: From f2f8ffeb4d896127eb33e403bc596f063a113e99 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sun, 9 Oct 2016 11:17:49 +0000 Subject: [PATCH 003/101] remove no longer required exception handling (catched in storage.py now proper) --- radicale/xmlutils.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/radicale/xmlutils.py b/radicale/xmlutils.py index 738bf50f5..89c1ba683 100644 --- a/radicale/xmlutils.py +++ b/radicale/xmlutils.py @@ -28,7 +28,6 @@ import posixpath import re import xml.etree.ElementTree as ET -from pprint import pprint from collections import OrderedDict from datetime import datetime, timedelta, timezone from http import client @@ -572,12 +571,7 @@ def _propfind_response(path, item, props, user, write=False): element = ET.Element(tag) is404 = False if tag == _tag("D", "getetag"): - try: - element.text = item.etag - except: - print("Object broken (skip)") - pprint(vars(item)) - return None + element.text = item.etag elif tag == _tag("D", "getlastmodified"): element.text = item.last_modified elif tag == _tag("D", "principal-collection-set"): From b128691b238edccfb6c779620f2d3a34fcfe8bd3 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sun, 9 Oct 2016 12:03:05 +0000 Subject: [PATCH 004/101] re-add lost collection read performance logging --- radicale/storage.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/radicale/storage.py b/radicale/storage.py index 6a319c8eb..237f4ac27 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -709,6 +709,8 @@ def serialize(self): for href in self.list(): if hasattr(self.get(href),'item'): items.append(self.get(href).item) + time_end = datetime.datetime.now() + self.logger.info("Collection read %d items in %s sec from %s", len(items),(time_end - time_begin).total_seconds(), self._filesystem_path) if self.get_meta("tag") == "VCALENDAR": collection = vobject.iCalendar() for item in items: From 68877ef1ed5b8f46a6e9cc7f1dc74710298c0baa Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sun, 9 Oct 2016 14:32:43 +0000 Subject: [PATCH 005/101] cosmetics --- radicale/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index 7fec1e837..e44503535 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -301,7 +301,7 @@ def response(status, headers=(), answer=None): time_end = datetime.datetime.now() status = "%i %s" % ( status, client.responses.get(status, "Unknown")) - self.logger.info("%s answer status for %s in %s sec: %s", environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo, (time_end - time_begin).total_seconds(), status) + self.logger.info("%s response status for %s in %s sec: %s", environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo, (time_end - time_begin).total_seconds(), status) start_response(status, list(headers.items())) # Return response content return [answer] if answer else [] From 96ff1d6c7d6c7863c73bcc7e98460d868c04b366 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sun, 9 Oct 2016 16:04:10 +0000 Subject: [PATCH 006/101] extended response logging --- radicale/__init__.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index e44503535..b5db6fd3b 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -298,11 +298,20 @@ def response(status, headers=(), answer=None): headers[key] = self.configuration.get("headers", key) # Start response - time_end = datetime.datetime.now() status = "%i %s" % ( status, client.responses.get(status, "Unknown")) - self.logger.info("%s response status for %s in %s sec: %s", environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo, (time_end - time_begin).total_seconds(), status) start_response(status, list(headers.items())) + time_end = datetime.datetime.now() + sizeinfo = "" + if answer: + sizeinfo = sizeinfo + str(len(answer)) + " bytes" + if "Content-Encoding" in headers: + if len(sizeinfo) > 0: + sizeinfo = sizeinfo + " " + sizeinfo = sizeinfo + headers["Content-Encoding"] + if len(sizeinfo) > 0: + sizeinfo = " (" + sizeinfo + ")" + self.logger.info("%s response status for %s in %s sec: %s", environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo + sizeinfo, (time_end - time_begin).total_seconds(), status) # Return response content return [answer] if answer else [] From 405f2a2f05aab8449fb503992ff9a593803745ec Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 11 Oct 2016 05:32:29 +0000 Subject: [PATCH 007/101] minor logging improvement --- radicale/__init__.py | 6 +++--- radicale/storage.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index b5db6fd3b..bd2dcf450 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -304,14 +304,14 @@ def response(status, headers=(), answer=None): time_end = datetime.datetime.now() sizeinfo = "" if answer: - sizeinfo = sizeinfo + str(len(answer)) + " bytes" + sizeinfo = sizeinfo + str(len(str(answer))) + " bytes" if "Content-Encoding" in headers: if len(sizeinfo) > 0: sizeinfo = sizeinfo + " " sizeinfo = sizeinfo + headers["Content-Encoding"] if len(sizeinfo) > 0: sizeinfo = " (" + sizeinfo + ")" - self.logger.info("%s response status for %s in %s sec: %s", environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo + sizeinfo, (time_end - time_begin).total_seconds(), status) + self.logger.info("%s response for %s in %s sec status: %s", environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo + sizeinfo, (time_end - time_begin).total_seconds(), status) # Return response content return [answer] if answer else [] @@ -331,7 +331,7 @@ def response(status, headers=(), answer=None): if environ["HTTP_DEPTH"]: depthinfo = " with depth " + environ["HTTP_DEPTH"] time_begin = datetime.datetime.now() - self.logger.info("%s request for %s received from %s using \"%s\"", + self.logger.info("%s request for %s received from %s using \"%s\"", environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo, remote_host, remote_useragent) headers = pprint.pformat(self.headers_log(environ)) self.logger.debug("Request headers:\n%s", headers) diff --git a/radicale/storage.py b/radicale/storage.py index 237f4ac27..7ff26adf2 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -631,7 +631,7 @@ def get(self, href): "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(os.path.getmtime(path))) try: - self.logger.debug("Read object: %s", path) + self.logger.debug("Read object ('get'): %s", path) item = vobject.readOne(text) except Exception as e: self.logger.error("Object broken (skip 'get'): %s (%s)", path, e) From 3bec989bc6fb1bdbeed2b62d7f3d079681326ebb Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 11 Oct 2016 10:44:13 +0000 Subject: [PATCH 008/101] add Item caching (default: enabled, can be disabled by config option) --- config | 3 + radicale/config.py | 1 + radicale/storage.py | 131 +++++++++++++++++++++++++++++++++++++++----- 3 files changed, 121 insertions(+), 14 deletions(-) diff --git a/config b/config index 1cdfb75ae..4b6c58a50 100644 --- a/config +++ b/config @@ -112,6 +112,9 @@ #hook = # Example: git add -A && (git diff --cached --quiet || git commit -m "Changes by "%(user)s) +# Caching (of Items) in memory +cache = True + [logging] diff --git a/radicale/config.py b/radicale/config.py index 5076ffa3d..20023a68b 100644 --- a/radicale/config.py +++ b/radicale/config.py @@ -59,6 +59,7 @@ "filesystem_folder": os.path.expanduser( "~/.config/radicale/collections"), "fsync": "True", + "cache": "True", "hook": "", "close_lock_file": "False"}, "logging": { diff --git a/radicale/storage.py b/radicale/storage.py index 7ff26adf2..165e6351f 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -35,6 +35,7 @@ import threading import time import datetime +import copy from contextlib import contextmanager from hashlib import md5 from importlib import import_module @@ -210,6 +211,59 @@ def etag(self): return get_etag(self.serialize()) +## Item caching +# cache counter +class Item_cache_counter: + def __init__(self, lookup, hit, miss, dirty): + self.lookup = 0 + self.hit = 0 + self.miss = 0 + self.dirty = 0 + + def string(self): + if (self.lookup > 0): + message = "lookup=%d hit=%d (%3.2f%%) miss=%d (%3.2f%%) dirty=%d (%3.2f%%)" % ( + self.lookup, + self.hit, + self.hit * 100 / self.lookup, + self.miss, + self.miss * 100 / self.lookup, + self.dirty, + self.dirty * 100 / self.lookup + ) + else: + message = "no cache lookups so far" + return(message) + + def string_delta(self, stamp): + delta_lookup = self.lookup - stamp.lookup + if (delta_lookup > 0): + message = "lookup=%d hit=%d (%3.2f%%) miss=%d (%3.2f%%) dirty=%d (%3.2f%%)" % ( + delta_lookup, + self.hit - stamp.hit, + (self.hit - stamp.hit) * 100 / delta_lookup, + self.miss - stamp.miss, + (self.miss - stamp.miss) * 100 / delta_lookup, + self.dirty - stamp.dirty, + (self.dirty - stamp.dirty) * 100 / delta_lookup + ) + else: + message = "no cache lookups so far" + return(message) + +# cache entry +class Item_cache_entry: + def __init__(self, Item, last_modified_time, last_used_time): + self.Item = Item + self.last_modified_time = last_modified_time + self.last_used_time = last_used_time + +# cache initialization +Item_cache_data = {} +Item_cache_counter = Item_cache_counter(0, 0, 0, 0) +Item_cache_active = 0 + + class BaseCollection: # Overriden on copy by the "load" function @@ -366,6 +420,7 @@ class Collection(BaseCollection): """Collection stored in several files per calendar.""" def __init__(self, path, principal=False, folder=None): + global Item_cache_active if not folder: folder = self._get_collection_root_folder() # Path should already be sanitized @@ -377,6 +432,10 @@ def __init__(self, path, principal=False, folder=None): split_path = self.path.split("/") self.owner = split_path[0] if len(split_path) > 1 else None self.is_principal = principal + if self.configuration.getboolean("storage", "cache"): + if Item_cache_active == 0: + self.logger.info("Item cache enabled") + Item_cache_active = 1 @classmethod def _get_collection_root_folder(cls): @@ -606,6 +665,11 @@ def move(cls, item, to_collection, to_href): cls._sync_directory(item.collection._filesystem_path) def list(self): + global Item_cache_data + global Item_cache_counter + global Item_cache_active + if Item_cache_active == 1: + Item_cache_counter_stamp = copy.deepcopy(Item_cache_counter) for href in os.listdir(self._filesystem_path): if not is_safe_filesystem_path_component(href): if not href.startswith(".Radicale"): @@ -614,8 +678,14 @@ def list(self): path = os.path.join(self._filesystem_path, href) if os.path.isfile(path): yield href + if Item_cache_active == 1: + self.logger.info("Cache current statistics: %s", Item_cache_counter.string_delta(Item_cache_counter_stamp)) + self.logger.info("Cache global statistics: %s", Item_cache_counter.string()) def get(self, href): + global Item_cache_data + global Item_cache_counter + global Item_cache_active if not href: return None if not is_safe_filesystem_path_component(href): @@ -625,25 +695,51 @@ def get(self, href): path = path_to_filesystem(self._filesystem_path, href) if not os.path.isfile(path): return None - with open(path, encoding=self.encoding, newline="") as f: - text = f.read() + last_modified_time = os.path.getmtime(path) last_modified = time.strftime( "%a, %d %b %Y %H:%M:%S GMT", - time.gmtime(os.path.getmtime(path))) - try: - self.logger.debug("Read object ('get'): %s", path) - item = vobject.readOne(text) - except Exception as e: - self.logger.error("Object broken (skip 'get'): %s (%s)", path, e) - return None; - if self.get_meta("tag") == "VADDRESSBOOK": - # check whether vobject likes the VCARD item + time.gmtime(last_modified_time)) + + Item_cache_hit = 0 + if Item_cache_active == 1: + # Item cache lookup + Item_cache_counter.lookup += 1 + if path in Item_cache_data: + if Item_cache_data[path].last_modified_time == last_modified_time: + Item_cache_counter.hit += 1 + Item_cache_hit = 1 + else: + Item_cache_counter.dirty += 1 + else: + Item_cache_counter.miss += 1 + + if Item_cache_hit == 0 or Item_cache_active == 0: + with open(path, encoding=self.encoding, newline="") as f: + text = f.read() try: - item.serialize() + self.logger.debug("Read object ('get'): %s", path) + item = vobject.readOne(text) except Exception as e: - self.logger.error("VCARD object broken (skip 'get'): %s (%s)", path, e) + self.logger.error("Object broken (skip 'get'): %s (%s)", path, e) return None; - return Item(self, item, href, last_modified) + if self.get_meta("tag") == "VADDRESSBOOK": + # check whether vobject likes the VCARD item + try: + item.serialize() + except Exception as e: + self.logger.error("VCARD object broken (skip 'get'): %s (%s)", path, e) + return None; + Item_entry = Item(self, item, href, last_modified) + + if Item_cache_active == 1: + # cache handling + self.logger.debug("Store item in cache: %s", path) + Item_cache_data[path] = Item_cache_entry(Item_entry, last_modified_time, datetime.datetime.now()) + + return Item_entry + else: + self.logger.debug("Retrieve from cache: %s", path) + return Item_cache_data[path].Item def upload(self, href, vobject_item): if not is_safe_filesystem_path_component(href): @@ -655,6 +751,8 @@ def upload(self, href, vobject_item): return item def delete(self, href=None): + global Item_cache_data + global Item_cache_active if href is None: # Delete the collection parent_dir = os.path.dirname(self._filesystem_path) @@ -677,6 +775,11 @@ def delete(self, href=None): raise ComponentNotFoundError(href) os.remove(path) self._sync_directory(os.path.dirname(path)) + if Item_cache_active == 1: + # remove from cache, if existing + if path in Item_cache_data: + self.logger.debug("Delete from cache: %s", path) + del Item_cache_data[path] def get_meta(self, key): if os.path.exists(self._props_path): From 9f4adb645fb3eba9773b589333fcd4cf69481824 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Thu, 13 Oct 2016 05:30:26 +0000 Subject: [PATCH 009/101] cosmetic renaming of cache stats logging --- radicale/storage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index 165e6351f..8c73fdce7 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -679,8 +679,8 @@ def list(self): if os.path.isfile(path): yield href if Item_cache_active == 1: - self.logger.info("Cache current statistics: %s", Item_cache_counter.string_delta(Item_cache_counter_stamp)) - self.logger.info("Cache global statistics: %s", Item_cache_counter.string()) + self.logger.info("Cache request statistics: %s", Item_cache_counter.string_delta(Item_cache_counter_stamp)) + self.logger.info("Cache overall statistics: %s", Item_cache_counter.string()) def get(self, href): global Item_cache_data From 355be4395215139feb8d090b047615c6d9545a88 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Thu, 13 Oct 2016 05:55:20 +0000 Subject: [PATCH 010/101] make cache/collection performance logging optional --- config | 3 +++ radicale/config.py | 1 + radicale/storage.py | 26 ++++++++++++++++++++------ 3 files changed, 24 insertions(+), 6 deletions(-) diff --git a/config b/config index 4b6c58a50..bd4561256 100644 --- a/config +++ b/config @@ -131,6 +131,9 @@ cache = True # Don't include passwords in logs #mask_passwords = True +# Include cache/collections performance statistics in logs on info level +performance = True + [headers] diff --git a/radicale/config.py b/radicale/config.py index 20023a68b..d2b2c9a28 100644 --- a/radicale/config.py +++ b/radicale/config.py @@ -66,6 +66,7 @@ "config": "/etc/radicale/logging", "debug": "False", "full_environment": "False", + "performance": "False", "mask_passwords": "True"}} diff --git a/radicale/storage.py b/radicale/storage.py index 8c73fdce7..c8e75fe53 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -212,6 +212,7 @@ def etag(self): ## Item caching +# TODO: catch all potential race conditions during add/delete # cache counter class Item_cache_counter: def __init__(self, lookup, hit, miss, dirty): @@ -248,7 +249,7 @@ def string_delta(self, stamp): (self.dirty - stamp.dirty) * 100 / delta_lookup ) else: - message = "no cache lookups so far" + message = "no cache lookup so far" return(message) # cache entry @@ -256,7 +257,7 @@ class Item_cache_entry: def __init__(self, Item, last_modified_time, last_used_time): self.Item = Item self.last_modified_time = last_modified_time - self.last_used_time = last_used_time + self.last_used_time = last_used_time # TODO: implement cleanup job of old entries # cache initialization Item_cache_data = {} @@ -432,9 +433,15 @@ def __init__(self, path, principal=False, folder=None): split_path = self.path.split("/") self.owner = split_path[0] if len(split_path) > 1 else None self.is_principal = principal + self.logging_performance = 0 + if self.configuration.getboolean("logging", "performance"): + self.logging_performance = 1 if self.configuration.getboolean("storage", "cache"): if Item_cache_active == 0: - self.logger.info("Item cache enabled") + if self.logging_performance == 1: + self.logger.info("Item cache enabled (performance log on info level)") + else: + self.logger.info("Item cache enabled (cache performance log only on debug level)") Item_cache_active = 1 @classmethod @@ -679,8 +686,12 @@ def list(self): if os.path.isfile(path): yield href if Item_cache_active == 1: - self.logger.info("Cache request statistics: %s", Item_cache_counter.string_delta(Item_cache_counter_stamp)) - self.logger.info("Cache overall statistics: %s", Item_cache_counter.string()) + if self.logging_performance == 1: + self.logger.info("Cache request statistics: %s", Item_cache_counter.string_delta(Item_cache_counter_stamp)) + self.logger.info("Cache overall statistics: %s", Item_cache_counter.string()) + else: + self.logger.debug("Cache request statistics: %s", Item_cache_counter.string_delta(Item_cache_counter_stamp)) + self.logger.debug("Cache overall statistics: %s", Item_cache_counter.string()) def get(self, href): global Item_cache_data @@ -813,7 +824,10 @@ def serialize(self): if hasattr(self.get(href),'item'): items.append(self.get(href).item) time_end = datetime.datetime.now() - self.logger.info("Collection read %d items in %s sec from %s", len(items),(time_end - time_begin).total_seconds(), self._filesystem_path) + if self.logging_performance == 1: + self.logger.info("Collection read %d items in %s sec from %s", len(items),(time_end - time_begin).total_seconds(), self._filesystem_path) + else: + self.logger.debug("Collection read %d items in %s sec from %s", len(items),(time_end - time_begin).total_seconds(), self._filesystem_path) if self.get_meta("tag") == "VCALENDAR": collection = vobject.iCalendar() for item in items: From 97dca7a8264ab0c176ff082ae2c6b4d7ee3a6b09 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Thu, 13 Oct 2016 20:07:53 +0000 Subject: [PATCH 011/101] adjust some startup log levels --- radicale/__main__.py | 6 +++--- radicale/auth.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/radicale/__main__.py b/radicale/__main__.py index 8a50075d1..2d6084ef5 100644 --- a/radicale/__main__.py +++ b/radicale/__main__.py @@ -153,7 +153,7 @@ def cleanup(): atexit.register(cleanup) logger.info("Starting Radicale") - logger.debug( + logger.info( "Base URL prefix: %s", configuration.get("server", "base_prefix")) # Create collection servers @@ -192,7 +192,7 @@ def cleanup(): server = make_server( address, port, application, server_class, RequestHandler) servers[server.socket] = server - logger.debug("Listening to %s port %s", + logger.info("Listening to %s port %s", server.server_name, server.server_port) if configuration.getboolean("server", "ssl"): logger.debug("Using SSL") @@ -228,7 +228,7 @@ def shutdown(*args): else: # Fallback to busy waiting select_timeout = 1.0 - logger.debug("Radicale server ready") + logger.info("Radicale server ready") while not shutdown_program: try: rlist, _, xlist = select.select( diff --git a/radicale/auth.py b/radicale/auth.py index 2fa2807ee..30422ec96 100644 --- a/radicale/auth.py +++ b/radicale/auth.py @@ -63,7 +63,7 @@ def load(configuration, logger): """Load the authentication manager chosen in configuration.""" auth_type = configuration.get("auth", "type") - logger.debug("Authentication type is %s", auth_type) + logger.info("Authentication type is %s", auth_type) if auth_type == "None": class_ = NoneAuth elif auth_type == "htpasswd": From 1972a29616a89b25d0fba6783d326bc7a9bae9bf Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Thu, 13 Oct 2016 20:10:24 +0000 Subject: [PATCH 012/101] change shutdown loglevel --- radicale/__main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/radicale/__main__.py b/radicale/__main__.py index 2d6084ef5..dbbcbf88f 100644 --- a/radicale/__main__.py +++ b/radicale/__main__.py @@ -144,7 +144,7 @@ def serve(configuration, logger): # Register exit function def cleanup(): """Remove the PID files.""" - logger.debug("Cleaning up") + logger.info("Cleaning up") # Remove PID file if (configuration.get("server", "pid") and configuration.getboolean("server", "daemon")): From 0325f71a3eca18409e353e09c2c1d8d57d93463c Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Fri, 14 Oct 2016 05:46:11 +0000 Subject: [PATCH 013/101] log cache statistics on exit --- radicale/__main__.py | 5 +++-- radicale/storage.py | 6 ++++++ 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/radicale/__main__.py b/radicale/__main__.py index dbbcbf88f..dc3a3684a 100644 --- a/radicale/__main__.py +++ b/radicale/__main__.py @@ -34,7 +34,7 @@ from . import ( VERSION, Application, RequestHandler, ThreadedHTTPServer, - ThreadedHTTPSServer, config, log) + ThreadedHTTPSServer, config, log, storage) def run(): @@ -144,13 +144,14 @@ def serve(configuration, logger): # Register exit function def cleanup(): """Remove the PID files.""" - logger.info("Cleaning up") + logger.info("Cleaning up 'main'") # Remove PID file if (configuration.get("server", "pid") and configuration.getboolean("server", "daemon")): os.unlink(configuration.get("server", "pid")) atexit.register(cleanup) + atexit.register(storage.cleanup, logger) logger.info("Starting Radicale") logger.info( diff --git a/radicale/storage.py b/radicale/storage.py index c8e75fe53..ba255bc4c 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -101,6 +101,12 @@ class CollectionCopy(collection_class): return CollectionCopy +def cleanup(logger): + """Print cache statistics.""" + logger.info("Cleaning up 'storage'") + logger.info("Cache overall statistics: %s", Item_cache_counter.string()) + + def get_etag(text): """Etag from collection or item. From c088393b99d5383b2af3603df6ba33be3f17529a Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Fri, 14 Oct 2016 19:02:49 +0000 Subject: [PATCH 014/101] add option for regular cache statistics log --- config | 5 +++++ radicale/config.py | 1 + radicale/storage.py | 30 +++++++++++++++++++++++------- 3 files changed, 29 insertions(+), 7 deletions(-) diff --git a/config b/config index bd4561256..729c8ecd1 100644 --- a/config +++ b/config @@ -134,6 +134,11 @@ cache = True # Include cache/collections performance statistics in logs on info level performance = True +# Include regular cache performance statistics in logs on info level +# 0: on each request (incl. current request) +# >0: minimum interval in seconds (only global) +cache_statistics_interval = 60 + [headers] diff --git a/radicale/config.py b/radicale/config.py index d2b2c9a28..a457c9d17 100644 --- a/radicale/config.py +++ b/radicale/config.py @@ -67,6 +67,7 @@ "debug": "False", "full_environment": "False", "performance": "False", + "cache_statistics_interval": "60", "mask_passwords": "True"}} diff --git a/radicale/storage.py b/radicale/storage.py index ba255bc4c..f6c283e97 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -217,9 +217,9 @@ def etag(self): return get_etag(self.serialize()) -## Item caching +### Item caching # TODO: catch all potential race conditions during add/delete -# cache counter +## cache counter class Item_cache_counter: def __init__(self, lookup, hit, miss, dirty): self.lookup = 0 @@ -258,18 +258,25 @@ def string_delta(self, stamp): message = "no cache lookup so far" return(message) -# cache entry +## cache entry class Item_cache_entry: def __init__(self, Item, last_modified_time, last_used_time): self.Item = Item self.last_modified_time = last_modified_time self.last_used_time = last_used_time # TODO: implement cleanup job of old entries -# cache initialization +## cache initialization Item_cache_data = {} Item_cache_counter = Item_cache_counter(0, 0, 0, 0) Item_cache_active = 0 +## cache regular statistics logging on info level +# 0: on each request (incl. current request) +# >0: after at least every given seconds (exc. current request) +Item_cache_statistics_log_interval = datetime.timedelta(seconds=60) +# init timestamp +Item_cache_statistics_log_last_time = datetime.datetime.now() + class BaseCollection: @@ -428,6 +435,7 @@ class Collection(BaseCollection): def __init__(self, path, principal=False, folder=None): global Item_cache_active + global Item_cache_statistics_log_interval if not folder: folder = self._get_collection_root_folder() # Path should already be sanitized @@ -442,12 +450,16 @@ def __init__(self, path, principal=False, folder=None): self.logging_performance = 0 if self.configuration.getboolean("logging", "performance"): self.logging_performance = 1 + Item_cache_statistics_log_interval = datetime.timedelta(seconds=self.configuration.getint("logging", "cache_statistics_interval")) if self.configuration.getboolean("storage", "cache"): if Item_cache_active == 0: if self.logging_performance == 1: self.logger.info("Item cache enabled (performance log on info level)") else: - self.logger.info("Item cache enabled (cache performance log only on debug level)") + if (Item_cache_statistics_log_interval.total_seconds() > 0): + self.logger.info("Item cache enabled (regular statistics log on info level with minimum interval %d sec)", Item_cache_statistics_log_interval.total_seconds()) + else: + self.logger.info("Item cache enabled (statistics log only on debug level)") Item_cache_active = 1 @classmethod @@ -681,6 +693,8 @@ def list(self): global Item_cache_data global Item_cache_counter global Item_cache_active + global Item_cache_statistics_log_interval + global Item_cache_statistics_log_last_time if Item_cache_active == 1: Item_cache_counter_stamp = copy.deepcopy(Item_cache_counter) for href in os.listdir(self._filesystem_path): @@ -692,9 +706,11 @@ def list(self): if os.path.isfile(path): yield href if Item_cache_active == 1: - if self.logging_performance == 1: - self.logger.info("Cache request statistics: %s", Item_cache_counter.string_delta(Item_cache_counter_stamp)) + if (self.logging_performance == 1) or (Item_cache_statistics_log_interval == 0) or (datetime.datetime.now() - Item_cache_statistics_log_last_time > Item_cache_statistics_log_interval): + if (self.logging_performance == 1) or (Item_cache_statistics_log_interval == 0): + self.logger.info("Cache request statistics: %s", Item_cache_counter.string_delta(Item_cache_counter_stamp)) self.logger.info("Cache overall statistics: %s", Item_cache_counter.string()) + Item_cache_statistics_log_last_time = datetime.datetime.now() else: self.logger.debug("Cache request statistics: %s", Item_cache_counter.string_delta(Item_cache_counter_stamp)) self.logger.debug("Cache overall statistics: %s", Item_cache_counter.string()) From ec82abf9f764a0b61cc086d0faf4fd03d42fa3fa Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Fri, 14 Oct 2016 19:25:19 +0000 Subject: [PATCH 015/101] move cache statistics logging into class function --- radicale/storage.py | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index f6c283e97..65fbc5081 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -258,6 +258,18 @@ def string_delta(self, stamp): message = "no cache lookup so far" return(message) + def log(self, logger, stamp, logging_performance): + global Item_cache_statistics_log_interval + global Item_cache_statistics_log_last_time + if (logging_performance == 1) or (Item_cache_statistics_log_interval == 0) or (datetime.datetime.now() - Item_cache_statistics_log_last_time > Item_cache_statistics_log_interval): + if (logging_performance == 1) or (Item_cache_statistics_log_interval == 0): + logger.info("Cache request statistics: %s", self.string_delta(stamp)) + logger.info("Cache overall statistics: %s", self.string()) + Item_cache_statistics_log_last_time = datetime.datetime.now() + else: + logger.debug("Cache request statistics: %s", self.string_delta(stamp)) + logger.debug("Cache overall statistics: %s", self.string()) + ## cache entry class Item_cache_entry: def __init__(self, Item, last_modified_time, last_used_time): @@ -693,8 +705,6 @@ def list(self): global Item_cache_data global Item_cache_counter global Item_cache_active - global Item_cache_statistics_log_interval - global Item_cache_statistics_log_last_time if Item_cache_active == 1: Item_cache_counter_stamp = copy.deepcopy(Item_cache_counter) for href in os.listdir(self._filesystem_path): @@ -706,14 +716,7 @@ def list(self): if os.path.isfile(path): yield href if Item_cache_active == 1: - if (self.logging_performance == 1) or (Item_cache_statistics_log_interval == 0) or (datetime.datetime.now() - Item_cache_statistics_log_last_time > Item_cache_statistics_log_interval): - if (self.logging_performance == 1) or (Item_cache_statistics_log_interval == 0): - self.logger.info("Cache request statistics: %s", Item_cache_counter.string_delta(Item_cache_counter_stamp)) - self.logger.info("Cache overall statistics: %s", Item_cache_counter.string()) - Item_cache_statistics_log_last_time = datetime.datetime.now() - else: - self.logger.debug("Cache request statistics: %s", Item_cache_counter.string_delta(Item_cache_counter_stamp)) - self.logger.debug("Cache overall statistics: %s", Item_cache_counter.string()) + Item_cache_counter.log(self.logger, Item_cache_counter_stamp, self.logging_performance) def get(self, href): global Item_cache_data From ecb63192dab5e069d487f056729d9bad29792d45 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sat, 15 Oct 2016 05:30:56 +0000 Subject: [PATCH 016/101] log cache request statisics unconditionally on debug level --- radicale/storage.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/radicale/storage.py b/radicale/storage.py index 65fbc5081..495af404f 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -264,6 +264,8 @@ def log(self, logger, stamp, logging_performance): if (logging_performance == 1) or (Item_cache_statistics_log_interval == 0) or (datetime.datetime.now() - Item_cache_statistics_log_last_time > Item_cache_statistics_log_interval): if (logging_performance == 1) or (Item_cache_statistics_log_interval == 0): logger.info("Cache request statistics: %s", self.string_delta(stamp)) + else: + logger.debug("Cache request statistics: %s", self.string_delta(stamp)) logger.info("Cache overall statistics: %s", self.string()) Item_cache_statistics_log_last_time = datetime.datetime.now() else: From 3736a434607523572ad8c8d4ff9b7b23183c2df8 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sat, 15 Oct 2016 05:32:15 +0000 Subject: [PATCH 017/101] add optional dedicated debug log with instructions --- logging | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/logging b/logging index 250359c46..80fcd3fbf 100644 --- a/logging +++ b/logging @@ -22,7 +22,7 @@ keys = console,file [formatters] # Logging formatters -keys = simple,full +keys = simple,full,filedebug # Loggers @@ -31,6 +31,8 @@ keys = simple,full # Root logger level = DEBUG handlers = console,file +# Disable line above and enable following line for dedicated additional debug log +#handlers = console,file,filedebug # Handlers @@ -45,9 +47,17 @@ formatter = simple [handler_file] # File handler class = FileHandler +# Enable following line in case dedicated debug log is activated +#level = INFO args = ('/var/log/radicale',) formatter = full +[handler_filedebug] +# File handler (for dedicated debug log) +class = FileHandler +args = ('/var/log/radicale/radicale-debug.log',) +formatter = full + # Formatters From 72afdc3755e70d61b0169b791b64ddc09f1327b5 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sat, 15 Oct 2016 05:33:43 +0000 Subject: [PATCH 018/101] change log level for SSL info and extend startup log message in case 'debug' is enabled --- radicale/__main__.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/radicale/__main__.py b/radicale/__main__.py index dc3a3684a..2999fecdb 100644 --- a/radicale/__main__.py +++ b/radicale/__main__.py @@ -196,7 +196,7 @@ def cleanup(): logger.info("Listening to %s port %s", server.server_name, server.server_port) if configuration.getboolean("server", "ssl"): - logger.debug("Using SSL") + logger.info("Using SSL") # Create a socket pair to notify the select syscall of program shutdown # This is not available in python < 3.5 on Windows @@ -229,7 +229,10 @@ def shutdown(*args): else: # Fallback to busy waiting select_timeout = 1.0 - logger.info("Radicale server ready") + if configuration.getboolean("logging", "debug"): + logger.info("Radicale server ready (with 'debug' enabled)") + else: + logger.info("Radicale server ready") while not shutdown_program: try: rlist, _, xlist = select.select( From 5c358c1bd084c34d76cdba57c1836c32c77061c0 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sat, 15 Oct 2016 05:35:52 +0000 Subject: [PATCH 019/101] enable request/response logging only if 'debug' was explicitly selected by config/command line option --- radicale/__init__.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index bd2dcf450..4355afc9e 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -196,6 +196,7 @@ def __init__(self, configuration, logger): self.Collection = storage.load(configuration, logger) self.authorized = rights.load(configuration, logger) self.encoding = configuration.get("encoding", "request") + self.debug = configuration.getboolean("logging", "debug") def headers_log(self, environ): """Sanitize headers for logging.""" @@ -278,7 +279,8 @@ def response(status, headers=(), answer=None): headers = dict(headers) # Set content length if answer: - self.logger.debug("Response content:\n%s", answer) + if self.debug: + self.logger.debug("Response content:\n%s", answer) answer = answer.encode(self.encoding) accept_encoding = [ encoding.strip() for encoding in @@ -447,7 +449,8 @@ def _read_content(self, environ): if content_length > 0: content = self.decode( environ["wsgi.input"].read(content_length), environ) - self.logger.debug("Request content:\n%s", content.strip()) + if self.debug: + self.logger.debug("Request content:\n%s", content.strip()) else: content = None return content From 6face6c8a799b8aaa582030c8a123f31f1a13fc4 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sat, 15 Oct 2016 05:38:11 +0000 Subject: [PATCH 020/101] enable request header logging only if 'debug' was explicitly selected by config/command line option --- radicale/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index 4355afc9e..32648dfed 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -336,7 +336,8 @@ def response(status, headers=(), answer=None): self.logger.info("%s request for %s received from %s using \"%s\"", environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo, remote_host, remote_useragent) headers = pprint.pformat(self.headers_log(environ)) - self.logger.debug("Request headers:\n%s", headers) + if self.debug: + self.logger.debug("Request headers:\n%s", headers) # Strip base_prefix from request URI base_prefix = self.configuration.get("server", "base_prefix") From 6852db8f9dcf69e2d7de5cdf867c635bfdd0171e Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sat, 15 Oct 2016 10:29:04 +0000 Subject: [PATCH 021/101] add examples for optional usage of TimedRotatingFileHandler --- logging | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/logging b/logging index 80fcd3fbf..ad831fd13 100644 --- a/logging +++ b/logging @@ -45,18 +45,27 @@ args = (sys.stderr,) formatter = simple [handler_file] +formatter = full +# Enable following line in case dedicated debug log (see below) is activated +#level = INFO # File handler class = FileHandler -# Enable following line in case dedicated debug log is activated -#level = INFO args = ('/var/log/radicale',) -formatter = full +# Time Rotating File Handler (class+args: disable above and enable below) +# (filename, when, interval, backupCount, ...) [see https://docs.python.org/3/library/logging.handlers.html#logging.handlers.TimedRotatingFileHandler] +# example: rollover at midnight every day, keep 90 files (means 90 days) +#class = handlers.TimedRotatingFileHandler +#args = ('/var/log/radicale/radicale.log', 'midnight', 1, 90) [handler_filedebug] +formatter = full # File handler (for dedicated debug log) class = FileHandler args = ('/var/log/radicale/radicale-debug.log',) -formatter = full +# Time Rotating File Handler (class+args: disable above and enable below) +# example: rollover every hour, keep 24 files (means 1 days) +#class = handlers.TimedRotatingFileHandler +#args = ('/var/log/radicale/radicale-debug.log', 'H', 1, 24) # Formatters From 61aed6e68fbc5636e1e679b0afd7a3c55b8c41c3 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sat, 15 Oct 2016 10:30:25 +0000 Subject: [PATCH 022/101] change default of cache_statistics_interval to 300 sec --- config | 2 +- radicale/config.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/config b/config index 729c8ecd1..769c81317 100644 --- a/config +++ b/config @@ -137,7 +137,7 @@ performance = True # Include regular cache performance statistics in logs on info level # 0: on each request (incl. current request) # >0: minimum interval in seconds (only global) -cache_statistics_interval = 60 +cache_statistics_interval = 300 [headers] diff --git a/radicale/config.py b/radicale/config.py index a457c9d17..6da3449f1 100644 --- a/radicale/config.py +++ b/radicale/config.py @@ -67,7 +67,7 @@ "debug": "False", "full_environment": "False", "performance": "False", - "cache_statistics_interval": "60", + "cache_statistics_interval": "300", "mask_passwords": "True"}} From fba056d4c1453ba3bfdc6d9b8147d5c247653945 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sun, 16 Oct 2016 17:06:09 +0000 Subject: [PATCH 023/101] unconditionally execute a serialize check for items read --- radicale/storage.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index 495af404f..be9f73a5e 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -760,13 +760,12 @@ def get(self, href): except Exception as e: self.logger.error("Object broken (skip 'get'): %s (%s)", path, e) return None; - if self.get_meta("tag") == "VADDRESSBOOK": - # check whether vobject likes the VCARD item - try: - item.serialize() - except Exception as e: - self.logger.error("VCARD object broken (skip 'get'): %s (%s)", path, e) - return None; + # check whether vobject likes the VCARD item + try: + item.serialize() + except Exception as e: + self.logger.error("VCARD object broken (skip 'get'): %s (%s)", path, e) + return None; Item_entry = Item(self, item, href, last_modified) if Item_cache_active == 1: From 41acb74b0168a1068a656a84614b7118301db861 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sun, 16 Oct 2016 20:05:07 +0000 Subject: [PATCH 024/101] vobject parser to detect/analyze broken vcard/vcalender files --- radicale-storage-check.py | 58 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) create mode 100755 radicale-storage-check.py diff --git a/radicale-storage-check.py b/radicale-storage-check.py new file mode 100755 index 000000000..5a68d1d8a --- /dev/null +++ b/radicale-storage-check.py @@ -0,0 +1,58 @@ +#!/usr/bin/python3.4 -s +# +# This file is a tool for the Radicale Server - Calendar Server +# Copyright © 2014 Jean-Marc Martins +# Copyright © 2012-2016 Guillaume Ayoub +# +# This library is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Radicale. If not, see . + +import sys +import traceback +import vobject + +## Main +if len(sys.argv) < 2: + print ('Missing file names as arguments') + +i = 1 +while i < len(sys.argv): + path = sys.argv[i] + print ('Analyze file:', path) + i += 1 + with open(path, encoding="utf-8", newline="") as f: + print ('Read file:', path) + text = f.read() + print ("Parse file contents:", path) + try: + item = vobject.readOne(text) + print ("Parse file contents successful:", path) + except Exception as e: + print ("Parse file contents ERROR", path) + print ("Unexpected error:", e) + traceback.print_exc(file=sys.stdout) + exit(1); + + # check whether vobject likes the VCARD item + try: + print ("Serialize file contents:", path) + item_ser = item.serialize() + print ("Serialize file contents successful:", path) + print ("=== CONTENTS BEGIN ===") + print (str(item_ser)) + print ("=== CONTENTS END ===") + except Exception as e: + print ("Serialize file contents ERROR", path) + print ("Unexpected error:", e) + traceback.print_exc(file=sys.stdout) + exit(1); From 7eac250eb0cdb532dd7e3bb77442cc92a7b0fde8 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Mon, 17 Oct 2016 20:02:21 +0000 Subject: [PATCH 025/101] remove dirty entries from cache implement counters for cached entries and memory add option for exception logging minor internal fixes --- config | 7 ++++-- radicale/config.py | 1 + radicale/storage.py | 58 ++++++++++++++++++++++++++++++--------------- 3 files changed, 45 insertions(+), 21 deletions(-) diff --git a/config b/config index 769c81317..bbfc81f58 100644 --- a/config +++ b/config @@ -123,7 +123,7 @@ cache = True # For more information about the syntax of the configuration file, see: # http://docs.python.org/library/logging.config.html #config = /etc/radicale/logging -# Set the default logging level to debug +# Set the default logging level to debug (this enables also request/response logging) #debug = False # Store all environment variables (including those set in the shell) #full_environment = False @@ -132,7 +132,10 @@ cache = True #mask_passwords = True # Include cache/collections performance statistics in logs on info level -performance = True +performance = False + +# Include exceptions in logs +exceptions = False # Include regular cache performance statistics in logs on info level # 0: on each request (incl. current request) diff --git a/radicale/config.py b/radicale/config.py index 6da3449f1..d87dc9e23 100644 --- a/radicale/config.py +++ b/radicale/config.py @@ -67,6 +67,7 @@ "debug": "False", "full_environment": "False", "performance": "False", + "exceptions": "False", "cache_statistics_interval": "300", "mask_passwords": "True"}} diff --git a/radicale/storage.py b/radicale/storage.py index be9f73a5e..334314812 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -221,25 +221,30 @@ def etag(self): # TODO: catch all potential race conditions during add/delete ## cache counter class Item_cache_counter: - def __init__(self, lookup, hit, miss, dirty): + def __init__(self): self.lookup = 0 self.hit = 0 self.miss = 0 self.dirty = 0 + self.entries= 0 + self.size = 0 def string(self): - if (self.lookup > 0): - message = "lookup=%d hit=%d (%3.2f%%) miss=%d (%3.2f%%) dirty=%d (%3.2f%%)" % ( + global Item_cache_data + if (self.entries > 0): + message = "lookup=%d hit=%d (%3.2f%%) miss=%d (%3.2f%%) dirty=%d (%3.2f%%) entries=%d memoryKiB=%.3f" % ( self.lookup, self.hit, self.hit * 100 / self.lookup, self.miss, self.miss * 100 / self.lookup, self.dirty, - self.dirty * 100 / self.lookup + self.dirty * 100 / self.lookup, + self.entries, + self.size / 1024 ) else: - message = "no cache lookups so far" + message = "no cache entries" return(message) def string_delta(self, stamp): @@ -261,8 +266,8 @@ def string_delta(self, stamp): def log(self, logger, stamp, logging_performance): global Item_cache_statistics_log_interval global Item_cache_statistics_log_last_time - if (logging_performance == 1) or (Item_cache_statistics_log_interval == 0) or (datetime.datetime.now() - Item_cache_statistics_log_last_time > Item_cache_statistics_log_interval): - if (logging_performance == 1) or (Item_cache_statistics_log_interval == 0): + if (logging_performance) or (Item_cache_statistics_log_interval == 0) or (datetime.datetime.now() - Item_cache_statistics_log_last_time > Item_cache_statistics_log_interval): + if (logging_performance) or (Item_cache_statistics_log_interval == 0): logger.info("Cache request statistics: %s", self.string_delta(stamp)) else: logger.debug("Cache request statistics: %s", self.string_delta(stamp)) @@ -281,7 +286,7 @@ def __init__(self, Item, last_modified_time, last_used_time): ## cache initialization Item_cache_data = {} -Item_cache_counter = Item_cache_counter(0, 0, 0, 0) +Item_cache_counter = Item_cache_counter() Item_cache_active = 0 ## cache regular statistics logging on info level @@ -461,13 +466,16 @@ def __init__(self, path, principal=False, folder=None): split_path = self.path.split("/") self.owner = split_path[0] if len(split_path) > 1 else None self.is_principal = principal - self.logging_performance = 0 + self.logging_performance = False + self.logging_exceptions = False if self.configuration.getboolean("logging", "performance"): - self.logging_performance = 1 + self.logging_performance = True + if self.configuration.getboolean("logging", "exceptions"): + self.logging_exceptions = True Item_cache_statistics_log_interval = datetime.timedelta(seconds=self.configuration.getint("logging", "cache_statistics_interval")) if self.configuration.getboolean("storage", "cache"): if Item_cache_active == 0: - if self.logging_performance == 1: + if self.logging_performance: self.logger.info("Item cache enabled (performance log on info level)") else: if (Item_cache_statistics_log_interval.total_seconds() > 0): @@ -748,6 +756,11 @@ def get(self, href): Item_cache_hit = 1 else: Item_cache_counter.dirty += 1 + # remove from cache + self.logger.debug("Delete from cache (dirty): %s", path) + Item_cache_counter.entries -= 1 + Item_cache_counter.size -= len(str(Item_cache_data[path].Item.item)) + del Item_cache_data[path] else: Item_cache_counter.miss += 1 @@ -758,13 +771,16 @@ def get(self, href): self.logger.debug("Read object ('get'): %s", path) item = vobject.readOne(text) except Exception as e: - self.logger.error("Object broken (skip 'get'): %s (%s)", path, e) + self.logger.error("Object broken on read (skip 'get'): %s (%s)", path, e) + if self.logging_exceptions: + self.logger.exception("Exception details:") return None; - # check whether vobject likes the VCARD item try: item.serialize() except Exception as e: - self.logger.error("VCARD object broken (skip 'get'): %s (%s)", path, e) + self.logger.error("Object broken on serialize (skip 'get'): %s (%s)", path, e) + if self.logging_exceptions: + self.logger.exception("Exception details:") return None; Item_entry = Item(self, item, href, last_modified) @@ -772,6 +788,8 @@ def get(self, href): # cache handling self.logger.debug("Store item in cache: %s", path) Item_cache_data[path] = Item_cache_entry(Item_entry, last_modified_time, datetime.datetime.now()) + Item_cache_counter.size += len(str(Item_entry.item)) + Item_cache_counter.entries += 1 return Item_entry else: @@ -813,10 +831,12 @@ def delete(self, href=None): os.remove(path) self._sync_directory(os.path.dirname(path)) if Item_cache_active == 1: - # remove from cache, if existing - if path in Item_cache_data: - self.logger.debug("Delete from cache: %s", path) - del Item_cache_data[path] + # remove from cache, if existing + if path in Item_cache_data: + self.logger.debug("Delete from cache (delete): %s", path) + Item_cache_counter.entries -= 1 + Item_cache_counter.size -= len(str(Item_cache_data[path].Item.item)) + del Item_cache_data[path] def get_meta(self, key): if os.path.exists(self._props_path): @@ -850,7 +870,7 @@ def serialize(self): if hasattr(self.get(href),'item'): items.append(self.get(href).item) time_end = datetime.datetime.now() - if self.logging_performance == 1: + if self.logging_performance: self.logger.info("Collection read %d items in %s sec from %s", len(items),(time_end - time_begin).total_seconds(), self._filesystem_path) else: self.logger.debug("Collection read %d items in %s sec from %s", len(items),(time_end - time_begin).total_seconds(), self._filesystem_path) From 3a39fc1bc67c6b7b0951e918f153254abd5cd704 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Mon, 17 Oct 2016 20:48:17 +0000 Subject: [PATCH 026/101] delete timestamp object after usage moved next internal variable to boolean --- radicale/storage.py | 29 +++++++++++++++-------------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index 334314812..c828377b2 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -268,14 +268,14 @@ def log(self, logger, stamp, logging_performance): global Item_cache_statistics_log_last_time if (logging_performance) or (Item_cache_statistics_log_interval == 0) or (datetime.datetime.now() - Item_cache_statistics_log_last_time > Item_cache_statistics_log_interval): if (logging_performance) or (Item_cache_statistics_log_interval == 0): - logger.info("Cache request statistics: %s", self.string_delta(stamp)) + logger.info("Item cache request statistics: %s", self.string_delta(stamp)) else: - logger.debug("Cache request statistics: %s", self.string_delta(stamp)) - logger.info("Cache overall statistics: %s", self.string()) + logger.debug("Item cache request statistics: %s", self.string_delta(stamp)) + logger.info("Item cache overall statistics: %s", self.string()) Item_cache_statistics_log_last_time = datetime.datetime.now() else: - logger.debug("Cache request statistics: %s", self.string_delta(stamp)) - logger.debug("Cache overall statistics: %s", self.string()) + logger.debug("Item cache request statistics: %s", self.string_delta(stamp)) + logger.debug("Item cache overall statistics: %s", self.string()) ## cache entry class Item_cache_entry: @@ -287,7 +287,7 @@ def __init__(self, Item, last_modified_time, last_used_time): ## cache initialization Item_cache_data = {} Item_cache_counter = Item_cache_counter() -Item_cache_active = 0 +Item_cache_active = False ## cache regular statistics logging on info level # 0: on each request (incl. current request) @@ -474,7 +474,7 @@ def __init__(self, path, principal=False, folder=None): self.logging_exceptions = True Item_cache_statistics_log_interval = datetime.timedelta(seconds=self.configuration.getint("logging", "cache_statistics_interval")) if self.configuration.getboolean("storage", "cache"): - if Item_cache_active == 0: + if not Item_cache_active: if self.logging_performance: self.logger.info("Item cache enabled (performance log on info level)") else: @@ -482,7 +482,7 @@ def __init__(self, path, principal=False, folder=None): self.logger.info("Item cache enabled (regular statistics log on info level with minimum interval %d sec)", Item_cache_statistics_log_interval.total_seconds()) else: self.logger.info("Item cache enabled (statistics log only on debug level)") - Item_cache_active = 1 + Item_cache_active = True @classmethod def _get_collection_root_folder(cls): @@ -715,7 +715,7 @@ def list(self): global Item_cache_data global Item_cache_counter global Item_cache_active - if Item_cache_active == 1: + if Item_cache_active: Item_cache_counter_stamp = copy.deepcopy(Item_cache_counter) for href in os.listdir(self._filesystem_path): if not is_safe_filesystem_path_component(href): @@ -725,8 +725,9 @@ def list(self): path = os.path.join(self._filesystem_path, href) if os.path.isfile(path): yield href - if Item_cache_active == 1: + if Item_cache_active: Item_cache_counter.log(self.logger, Item_cache_counter_stamp, self.logging_performance) + del Item_cache_counter_stamp def get(self, href): global Item_cache_data @@ -747,7 +748,7 @@ def get(self, href): time.gmtime(last_modified_time)) Item_cache_hit = 0 - if Item_cache_active == 1: + if Item_cache_active: # Item cache lookup Item_cache_counter.lookup += 1 if path in Item_cache_data: @@ -764,7 +765,7 @@ def get(self, href): else: Item_cache_counter.miss += 1 - if Item_cache_hit == 0 or Item_cache_active == 0: + if Item_cache_hit == 0 or Item_cache_active == False: with open(path, encoding=self.encoding, newline="") as f: text = f.read() try: @@ -784,7 +785,7 @@ def get(self, href): return None; Item_entry = Item(self, item, href, last_modified) - if Item_cache_active == 1: + if Item_cache_active: # cache handling self.logger.debug("Store item in cache: %s", path) Item_cache_data[path] = Item_cache_entry(Item_entry, last_modified_time, datetime.datetime.now()) @@ -830,7 +831,7 @@ def delete(self, href=None): raise ComponentNotFoundError(href) os.remove(path) self._sync_directory(os.path.dirname(path)) - if Item_cache_active == 1: + if Item_cache_active: # remove from cache, if existing if path in Item_cache_data: self.logger.debug("Delete from cache (delete): %s", path) From 6d5f6dd900d23e20f8b14c874e76b48c731b663e Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 18 Oct 2016 20:37:14 +0000 Subject: [PATCH 027/101] add props cache, add selective debug option, some changes in caching code --- radicale/__init__.py | 3 + radicale/config.py | 1 + radicale/storage.py | 184 ++++++++++++++++++++++++++++++++++--------- 3 files changed, 150 insertions(+), 38 deletions(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index 32648dfed..5314b7b91 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -502,6 +502,7 @@ def do_GET(self, environ, path, user): "Last-Modified": collection.last_modified, "ETag": item.etag} answer = item.serialize() + storage.log_cache_statistics(self) return client.OK, headers, answer def do_HEAD(self, environ, path, user): @@ -603,6 +604,7 @@ def do_PROPFIND(self, environ, path, user): headers = {"DAV": DAV_HEADERS, "Content-Type": "text/xml"} status, answer = xmlutils.propfind( path, content, read_items, write_items, user) + storage.log_cache_statistics(self) if status == client.FORBIDDEN: return NOT_ALLOWED else: @@ -690,4 +692,5 @@ def do_REPORT(self, environ, path, user): collection = item.collection headers = {"Content-Type": "text/xml"} answer = xmlutils.report(path, content, collection) + storage.log_cache_statistics(self) return client.MULTI_STATUS, headers, answer diff --git a/radicale/config.py b/radicale/config.py index d87dc9e23..28e52d803 100644 --- a/radicale/config.py +++ b/radicale/config.py @@ -65,6 +65,7 @@ "logging": { "config": "/etc/radicale/logging", "debug": "False", + "debug_filter": "0xffff", "full_environment": "False", "performance": "False", "exceptions": "False", diff --git a/radicale/storage.py b/radicale/storage.py index c828377b2..85e7c4419 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -35,7 +35,6 @@ import threading import time import datetime -import copy from contextlib import contextmanager from hashlib import md5 from importlib import import_module @@ -104,7 +103,10 @@ class CollectionCopy(collection_class): def cleanup(logger): """Print cache statistics.""" logger.info("Cleaning up 'storage'") - logger.info("Cache overall statistics: %s", Item_cache_counter.string()) + if Item_cache_active: + logger.info("Items cache overall statistics: %s", Item_cache_counter.string()) + if Props_cache_active: + logger.info("Props cache overall statistics: %s", Props_cache_counter.string()) def get_etag(text): @@ -183,6 +185,17 @@ def path_to_filesystem(root, *paths): return safe_path +def log_cache_statistics(self): + global Item_cache_active + global Item_cache_counter + global Props_cache_active + global Props_cache_counter + if Item_cache_active: + Item_cache_counter.log("Items", self.logger) + if Props_cache_active: + Props_cache_counter.log("Props", self.logger) + + class UnsafePathError(ValueError): def __init__(self, path): message = "Can't translate name safely to filesystem: %s" % path @@ -219,8 +232,8 @@ def etag(self): ### Item caching # TODO: catch all potential race conditions during add/delete -## cache counter -class Item_cache_counter: +## cache counter statistics +class Cache_counter: def __init__(self): self.lookup = 0 self.hit = 0 @@ -228,6 +241,18 @@ def __init__(self): self.dirty = 0 self.entries= 0 self.size = 0 + self.perflog= False + self.lastlog= datetime.datetime.now() + + def copy(self, counter): + self.lookup = counter.lookup + self.hit = counter.hit + self.miss = counter.miss + self.dirty = counter.dirty + self.entries= counter.entries + self.size = counter.size + self.perflog= counter.perflog + self.lastlog= counter.lastlog def string(self): global Item_cache_data @@ -263,38 +288,47 @@ def string_delta(self, stamp): message = "no cache lookup so far" return(message) - def log(self, logger, stamp, logging_performance): - global Item_cache_statistics_log_interval - global Item_cache_statistics_log_last_time - if (logging_performance) or (Item_cache_statistics_log_interval == 0) or (datetime.datetime.now() - Item_cache_statistics_log_last_time > Item_cache_statistics_log_interval): - if (logging_performance) or (Item_cache_statistics_log_interval == 0): - logger.info("Item cache request statistics: %s", self.string_delta(stamp)) - else: - logger.debug("Item cache request statistics: %s", self.string_delta(stamp)) - logger.info("Item cache overall statistics: %s", self.string()) - Item_cache_statistics_log_last_time = datetime.datetime.now() + def log_request(self, token, logger, stamp, logging_performance): + if (logging_performance) or (Cache_counter_log_interval == 0): + logger.info("%s cache request statistics: %s", token, self.string_delta(stamp)) + else: + logger.debug("%s cache request statistics: %s", token, self.string_delta(stamp)) + + def log(self, token, logger): + global Cache_counter_log_interval + if (self.perflog) or (Cache_counter_log_interval == 0) or (datetime.datetime.now() - self.lastlog > Cache_counter_log_interval): + logger.info("%s cache overall statistics: %s", token, self.string()) + self.lastlog = datetime.datetime.now() else: - logger.debug("Item cache request statistics: %s", self.string_delta(stamp)) - logger.debug("Item cache overall statistics: %s", self.string()) + logger.debug("%s cache overall statistics: %s", token, self.string()) + ## cache entry class Item_cache_entry: - def __init__(self, Item, last_modified_time, last_used_time): + def __init__(self, Item, last_modified_time): self.Item = Item self.last_modified_time = last_modified_time - self.last_used_time = last_used_time # TODO: implement cleanup job of old entries + +## cache entry +class Props_cache_entry: + def __init__(self, props_contents, last_modified_time): + self.props_contents = props_contents + self.last_modified_time = last_modified_time ## cache initialization Item_cache_data = {} -Item_cache_counter = Item_cache_counter() +Item_cache_counter = Cache_counter() Item_cache_active = False +Props_cache_data = {} +Props_cache_counter = Cache_counter() +Props_cache_active = False + + ## cache regular statistics logging on info level # 0: on each request (incl. current request) # >0: after at least every given seconds (exc. current request) -Item_cache_statistics_log_interval = datetime.timedelta(seconds=60) -# init timestamp -Item_cache_statistics_log_last_time = datetime.datetime.now() +Cache_counter_log_interval = datetime.timedelta(seconds=60) class BaseCollection: @@ -454,7 +488,8 @@ class Collection(BaseCollection): def __init__(self, path, principal=False, folder=None): global Item_cache_active - global Item_cache_statistics_log_interval + global Props_cache_active + global Cache_counter_log_interval if not folder: folder = self._get_collection_root_folder() # Path should already be sanitized @@ -472,17 +507,30 @@ def __init__(self, path, principal=False, folder=None): self.logging_performance = True if self.configuration.getboolean("logging", "exceptions"): self.logging_exceptions = True - Item_cache_statistics_log_interval = datetime.timedelta(seconds=self.configuration.getint("logging", "cache_statistics_interval")) + self.logging_filter = int(self.configuration.get("logging", "debug_filter"), 0) + self.logger.debug("debug filter active with: 0x%x", self.logging_filter) + Cache_counter_log_interval = datetime.timedelta(seconds=self.configuration.getint("logging", "cache_statistics_interval")) if self.configuration.getboolean("storage", "cache"): if not Item_cache_active: if self.logging_performance: - self.logger.info("Item cache enabled (performance log on info level)") + self.logger.info("Items cache enabled (performance log on info level)") + Item_cache_counter.perflog = True else: - if (Item_cache_statistics_log_interval.total_seconds() > 0): - self.logger.info("Item cache enabled (regular statistics log on info level with minimum interval %d sec)", Item_cache_statistics_log_interval.total_seconds()) + if (Cache_counter_log_interval.total_seconds() > 0): + self.logger.info("Items cache enabled (regular statistics log on info level with minimum interval %d sec)", Cache_counter_log_interval.total_seconds()) else: - self.logger.info("Item cache enabled (statistics log only on debug level)") + self.logger.info("Items cache enabled (statistics log only on debug level)") Item_cache_active = True + if not Props_cache_active: + if self.logging_performance: + self.logger.info("Props cache enabled (performance log on info level)") + Props_cache_counter.perflog = True + else: + if (Cache_counter_log_interval.total_seconds() > 0): + self.logger.info("Props cache enabled (regular statistics log on info level with minimum interval %d sec)", Cache_counter_log_interval.total_seconds()) + else: + self.logger.info("Props cache enabled (statistics log only on debug level)") + Props_cache_active = True @classmethod def _get_collection_root_folder(cls): @@ -712,11 +760,19 @@ def move(cls, item, to_collection, to_href): cls._sync_directory(item.collection._filesystem_path) def list(self): + if not self.logging_filter & 0x0010: + self.logger.debug("function storage/list called") global Item_cache_data global Item_cache_counter global Item_cache_active + global Props_cache_counter + global Props_cache_active if Item_cache_active: - Item_cache_counter_stamp = copy.deepcopy(Item_cache_counter) + Item_cache_counter_stamp = Cache_counter() + Item_cache_counter_stamp.copy(Item_cache_counter) + if Props_cache_active: + Props_cache_counter_stamp = Cache_counter() + Props_cache_counter_stamp.copy(Props_cache_counter) for href in os.listdir(self._filesystem_path): if not is_safe_filesystem_path_component(href): if not href.startswith(".Radicale"): @@ -726,10 +782,13 @@ def list(self): if os.path.isfile(path): yield href if Item_cache_active: - Item_cache_counter.log(self.logger, Item_cache_counter_stamp, self.logging_performance) - del Item_cache_counter_stamp + Item_cache_counter.log_request("Items", self.logger, Item_cache_counter_stamp, self.logging_performance) + if Props_cache_active: + Props_cache_counter.log_request("Props", self.logger, Props_cache_counter_stamp, self.logging_performance) def get(self, href): + if not self.logging_filter & 0x0010: + self.logger.debug("function storage/get called") global Item_cache_data global Item_cache_counter global Item_cache_active @@ -758,7 +817,7 @@ def get(self, href): else: Item_cache_counter.dirty += 1 # remove from cache - self.logger.debug("Delete from cache (dirty): %s", path) + self.logger.debug("Item delete from cache (dirty): %s", path) Item_cache_counter.entries -= 1 Item_cache_counter.size -= len(str(Item_cache_data[path].Item.item)) del Item_cache_data[path] @@ -769,7 +828,8 @@ def get(self, href): with open(path, encoding=self.encoding, newline="") as f: text = f.read() try: - self.logger.debug("Read object ('get'): %s", path) + if not self.logging_filter & 0x0100: + self.logger.debug("Item read ('get'): %s", path) item = vobject.readOne(text) except Exception as e: self.logger.error("Object broken on read (skip 'get'): %s (%s)", path, e) @@ -787,14 +847,16 @@ def get(self, href): if Item_cache_active: # cache handling - self.logger.debug("Store item in cache: %s", path) - Item_cache_data[path] = Item_cache_entry(Item_entry, last_modified_time, datetime.datetime.now()) + if not self.logging_filter & 0x1000: + self.logger.debug("Item store in cache: %s", path) + Item_cache_data[path] = Item_cache_entry(Item_entry, last_modified_time) Item_cache_counter.size += len(str(Item_entry.item)) Item_cache_counter.entries += 1 return Item_entry else: - self.logger.debug("Retrieve from cache: %s", path) + if not self.logging_filter & 0x2000: + self.logger.debug("Item retrieve from cache: %s", path) return Item_cache_data[path].Item def upload(self, href, vobject_item): @@ -840,13 +902,57 @@ def delete(self, href=None): del Item_cache_data[path] def get_meta(self, key): + global Props_cache_active + global Props_cache_counter if os.path.exists(self._props_path): - with open(self._props_path, encoding=self.encoding) as f: - return json.load(f).get(key) + Props_cache_hit = 0 + if Props_cache_active: + last_modified_time = os.path.getmtime(self._props_path) + # Props cache lookup + Props_cache_counter.lookup += 1 + if self._props_path in Props_cache_data: + if Props_cache_data[self._props_path].last_modified_time == last_modified_time: + Props_cache_counter.hit += 1 + Props_cache_hit = 1 + else: + Props_cache_counter.dirty += 1 + # remove from cache + self.logger.debug("Props delete from cache (dirty): %s", path) + Props_cache_counter.entries -= 1 + del Props_cache_data[self._props_path] + else: + Props_cache_counter.miss += 1 + + if Props_cache_hit == 0 or Props_cache_active == False: + with open(self._props_path, encoding=self.encoding) as f: + if not self.logging_filter & 0x0400: + self.logger.debug("Props read ('get_meta') : %s", self._props_path) + props_contents = json.load(f) + + if Props_cache_active: + # cache handling + if not self.logging_filter & 0x4000: + self.logger.debug("Props store in cache : %s", self._props_path) + Props_cache_data[self._props_path] = Props_cache_entry(props_contents, last_modified_time) + Props_cache_counter.entries += 1 + + return props_contents.get(key) + else: + if not self.logging_filter & 0x8000: + self.logger.debug("Props retrieve from cache: %s", self._props_path) + return Props_cache_data[self._props_path].props_contents.get(key) def set_meta(self, props): + global Props_cache_active + global Props_cache_counter if os.path.exists(self._props_path): + if Props_cache_active: + if self._props_path in Props_cache_data: + self.logger.debug("Props delete from cache ('set_meta'): %s", path) + Props_cache_counter.entries -= 1 + del Props_cache_data[self._props_path] with open(self._props_path, encoding=self.encoding) as f: + self.logger.debug("Write props ('set_meta'): %s", self._props_path) old_props = json.load(f) old_props.update(props) props = old_props @@ -865,6 +971,8 @@ def last_modified(self): return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(last)) def serialize(self): + if not self.logging_filter & 0x0010: + self.logger.debug("function storage/serialize called") items = [] time_begin = datetime.datetime.now() for href in self.list(): From 6103d7ff5431bad044aff5f67c2cc5e46d73d896 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 18 Oct 2016 22:22:17 +0000 Subject: [PATCH 028/101] reorg cache init and logging --- radicale/__init__.py | 5 +- radicale/storage.py | 315 +++++++++++++++++++++++-------------------- 2 files changed, 169 insertions(+), 151 deletions(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index 5314b7b91..e142813bd 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -304,6 +304,8 @@ def response(status, headers=(), answer=None): status, client.responses.get(status, "Unknown")) start_response(status, list(headers.items())) time_end = datetime.datetime.now() + if self.configuration.getboolean("storage", "cache"): + storage.cache_log_statistics_overall(self) sizeinfo = "" if answer: sizeinfo = sizeinfo + str(len(str(answer))) + " bytes" @@ -502,7 +504,6 @@ def do_GET(self, environ, path, user): "Last-Modified": collection.last_modified, "ETag": item.etag} answer = item.serialize() - storage.log_cache_statistics(self) return client.OK, headers, answer def do_HEAD(self, environ, path, user): @@ -604,7 +605,6 @@ def do_PROPFIND(self, environ, path, user): headers = {"DAV": DAV_HEADERS, "Content-Type": "text/xml"} status, answer = xmlutils.propfind( path, content, read_items, write_items, user) - storage.log_cache_statistics(self) if status == client.FORBIDDEN: return NOT_ALLOWED else: @@ -692,5 +692,4 @@ def do_REPORT(self, environ, path, user): collection = item.collection headers = {"Content-Type": "text/xml"} answer = xmlutils.report(path, content, collection) - storage.log_cache_statistics(self) return client.MULTI_STATUS, headers, answer diff --git a/radicale/storage.py b/radicale/storage.py index 85e7c4419..ff383d527 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -84,14 +84,48 @@ class Overlapped(ctypes.Structure): elif os.name == "posix": import fcntl +logging_filter = 0xffff def load(configuration, logger): + global logging_filter """Load the storage manager chosen in configuration.""" storage_type = configuration.get("storage", "type") + logger.info("Radicale storage manager loading: %s", storage_type) + logging_filter = int(configuration.get("logging", "debug_filter"), 0) + logger.debug("debug filter active with: 0x%x", logging_filter) + if storage_type == "multifilesystem": collection_class = Collection + ## initialize cache + global Items_cache_active + global Props_cache_active + global Items_cache_counter + global Props_cache_counter + if configuration.getboolean("storage", "cache"): + Items_cache_active = True + Props_cache_active = True + Items_cache_counter.loginterval = configuration.getint("logging", "cache_statistics_interval") + Props_cache_counter.loginterval = configuration.getint("logging", "cache_statistics_interval") + if configuration.getboolean("logging", "performance"): + if Items_cache_active: + logger.info("Items cache enabled (performance log on info level)") + Items_cache_counter.perflog = True + if Props_cache_active: + logger.info("Props cache enabled (performance log on info level)") + Props_cache_counter.perflog = True + else: + if (Items_cache_counter.loginterval > 0): + logger.info("Items cache enabled (regular statistics log on info level with minimum interval %d sec)", Items_cache_counter.loginterval) + else: + logger.info("Items cache enabled (statistics log only on debug level)") + + if (Props_cache_counter.loginterval > 0): + logger.info("Props cache enabled (regular statistics log on info level with minimum interval %d sec)", Props_cache_counter.loginterval) + else: + logger.info("Items cache enabled (statistics log only on debug level)") else: collection_class = import_module(storage_type).Collection + logger.info("Radicale storage manager successfully loaded: %s", storage_type) class CollectionCopy(collection_class): """Collection copy, avoids overriding the original class attributes.""" @@ -103,8 +137,8 @@ class CollectionCopy(collection_class): def cleanup(logger): """Print cache statistics.""" logger.info("Cleaning up 'storage'") - if Item_cache_active: - logger.info("Items cache overall statistics: %s", Item_cache_counter.string()) + if Items_cache_active: + logger.info("Items cache overall statistics: %s", Items_cache_counter.string()) if Props_cache_active: logger.info("Props cache overall statistics: %s", Props_cache_counter.string()) @@ -185,52 +219,7 @@ def path_to_filesystem(root, *paths): return safe_path -def log_cache_statistics(self): - global Item_cache_active - global Item_cache_counter - global Props_cache_active - global Props_cache_counter - if Item_cache_active: - Item_cache_counter.log("Items", self.logger) - if Props_cache_active: - Props_cache_counter.log("Props", self.logger) - - -class UnsafePathError(ValueError): - def __init__(self, path): - message = "Can't translate name safely to filesystem: %s" % path - super().__init__(message) - - -class ComponentExistsError(ValueError): - def __init__(self, path): - message = "Component already exists: %s" % path - super().__init__(message) - - -class ComponentNotFoundError(ValueError): - def __init__(self, path): - message = "Component doesn't exist: %s" % path - super().__init__(message) - - -class Item: - def __init__(self, collection, item, href, last_modified=None): - self.collection = collection - self.item = item - self.href = href - self.last_modified = last_modified - - def __getattr__(self, attr): - return getattr(self.item, attr) - - @property - def etag(self): - """Encoded as quoted-string (see RFC 2616).""" - return get_etag(self.serialize()) - - -### Item caching +### BEGIN Items/Props caching # TODO: catch all potential race conditions during add/delete ## cache counter statistics class Cache_counter: @@ -242,7 +231,11 @@ def __init__(self): self.entries= 0 self.size = 0 self.perflog= False + ## cache statistics logging on info level + # 0: on each request (incl. current request) + # >0: after at least every given loginterval (excl. current request) self.lastlog= datetime.datetime.now() + self.loginterval = datetime.timedelta(seconds=60) # default def copy(self, counter): self.lookup = counter.lookup @@ -253,9 +246,9 @@ def copy(self, counter): self.size = counter.size self.perflog= counter.perflog self.lastlog= counter.lastlog + self.loginterval= counter.loginterval def string(self): - global Item_cache_data if (self.entries > 0): message = "lookup=%d hit=%d (%3.2f%%) miss=%d (%3.2f%%) dirty=%d (%3.2f%%) entries=%d memoryKiB=%.3f" % ( self.lookup, @@ -288,15 +281,14 @@ def string_delta(self, stamp): message = "no cache lookup so far" return(message) - def log_request(self, token, logger, stamp, logging_performance): - if (logging_performance) or (Cache_counter_log_interval == 0): + def log_request(self, token, logger, stamp): + if (self.perflog) or (self.loginterval == 0): logger.info("%s cache request statistics: %s", token, self.string_delta(stamp)) else: logger.debug("%s cache request statistics: %s", token, self.string_delta(stamp)) - def log(self, token, logger): - global Cache_counter_log_interval - if (self.perflog) or (Cache_counter_log_interval == 0) or (datetime.datetime.now() - self.lastlog > Cache_counter_log_interval): + def log_overall(self, token, logger): + if (self.perflog) or (self.loginterval == 0) or (datetime.datetime.now() - self.lastlog > self.loginterval): logger.info("%s cache overall statistics: %s", token, self.string()) self.lastlog = datetime.datetime.now() else: @@ -305,30 +297,83 @@ def log(self, token, logger): ## cache entry class Item_cache_entry: - def __init__(self, Item, last_modified_time): + def __init__(self, Item, size, last_modified_time): self.Item = Item + self.size = size self.last_modified_time = last_modified_time -## cache entry class Props_cache_entry: - def __init__(self, props_contents, last_modified_time): + def __init__(self, props_contents, size, last_modified_time): self.props_contents = props_contents + self.size = size self.last_modified_time = last_modified_time ## cache initialization -Item_cache_data = {} -Item_cache_counter = Cache_counter() -Item_cache_active = False +Items_cache_data = {} +Items_cache_counter = Cache_counter() +Items_cache_active = False Props_cache_data = {} Props_cache_counter = Cache_counter() Props_cache_active = False +## global functions to be called also from other modules +def cache_log_statistics_overall(self): + global Items_cache_active + global Items_cache_counter + global Props_cache_active + global Props_cache_counter + if Items_cache_active: + Items_cache_counter.log_overall("Items", self.logger) + if Props_cache_active: + Props_cache_counter.log_overall("Props", self.logger) + +def cache_log_statistics_request(self, stamp): + global Items_cache_active + global Items_cache_counter + global Props_cache_active + global Props_cache_counter + if Items_cache_active: + Items_cache_counter.log_request("Items", self.logger, stamp) + if Props_cache_active: + Props_cache_counter.log_request("Props", self.logger, stamp) + +### END Items/Props caching + + +class UnsafePathError(ValueError): + def __init__(self, path): + message = "Can't translate name safely to filesystem: %s" % path + super().__init__(message) + + +class ComponentExistsError(ValueError): + def __init__(self, path): + message = "Component already exists: %s" % path + super().__init__(message) + + +class ComponentNotFoundError(ValueError): + def __init__(self, path): + message = "Component doesn't exist: %s" % path + super().__init__(message) + + +class Item: + def __init__(self, collection, item, href, last_modified=None): + self.collection = collection + self.item = item + self.href = href + self.last_modified = last_modified + + def __getattr__(self, attr): + return getattr(self.item, attr) + + @property + def etag(self): + """Encoded as quoted-string (see RFC 2616).""" + return get_etag(self.serialize()) -## cache regular statistics logging on info level -# 0: on each request (incl. current request) -# >0: after at least every given seconds (exc. current request) -Cache_counter_log_interval = datetime.timedelta(seconds=60) class BaseCollection: @@ -487,9 +532,6 @@ class Collection(BaseCollection): """Collection stored in several files per calendar.""" def __init__(self, path, principal=False, folder=None): - global Item_cache_active - global Props_cache_active - global Cache_counter_log_interval if not folder: folder = self._get_collection_root_folder() # Path should already be sanitized @@ -501,36 +543,6 @@ def __init__(self, path, principal=False, folder=None): split_path = self.path.split("/") self.owner = split_path[0] if len(split_path) > 1 else None self.is_principal = principal - self.logging_performance = False - self.logging_exceptions = False - if self.configuration.getboolean("logging", "performance"): - self.logging_performance = True - if self.configuration.getboolean("logging", "exceptions"): - self.logging_exceptions = True - self.logging_filter = int(self.configuration.get("logging", "debug_filter"), 0) - self.logger.debug("debug filter active with: 0x%x", self.logging_filter) - Cache_counter_log_interval = datetime.timedelta(seconds=self.configuration.getint("logging", "cache_statistics_interval")) - if self.configuration.getboolean("storage", "cache"): - if not Item_cache_active: - if self.logging_performance: - self.logger.info("Items cache enabled (performance log on info level)") - Item_cache_counter.perflog = True - else: - if (Cache_counter_log_interval.total_seconds() > 0): - self.logger.info("Items cache enabled (regular statistics log on info level with minimum interval %d sec)", Cache_counter_log_interval.total_seconds()) - else: - self.logger.info("Items cache enabled (statistics log only on debug level)") - Item_cache_active = True - if not Props_cache_active: - if self.logging_performance: - self.logger.info("Props cache enabled (performance log on info level)") - Props_cache_counter.perflog = True - else: - if (Cache_counter_log_interval.total_seconds() > 0): - self.logger.info("Props cache enabled (regular statistics log on info level with minimum interval %d sec)", Cache_counter_log_interval.total_seconds()) - else: - self.logger.info("Props cache enabled (statistics log only on debug level)") - Props_cache_active = True @classmethod def _get_collection_root_folder(cls): @@ -760,16 +772,17 @@ def move(cls, item, to_collection, to_href): cls._sync_directory(item.collection._filesystem_path) def list(self): - if not self.logging_filter & 0x0010: + global logging_filter + if not logging_filter & 0x0010: self.logger.debug("function storage/list called") - global Item_cache_data - global Item_cache_counter - global Item_cache_active + global Items_cache_data + global Items_cache_counter + global Items_cache_active global Props_cache_counter global Props_cache_active - if Item_cache_active: - Item_cache_counter_stamp = Cache_counter() - Item_cache_counter_stamp.copy(Item_cache_counter) + if Items_cache_active: + Items_cache_counter_stamp = Cache_counter() + Items_cache_counter_stamp.copy(Items_cache_counter) if Props_cache_active: Props_cache_counter_stamp = Cache_counter() Props_cache_counter_stamp.copy(Props_cache_counter) @@ -781,17 +794,18 @@ def list(self): path = os.path.join(self._filesystem_path, href) if os.path.isfile(path): yield href - if Item_cache_active: - Item_cache_counter.log_request("Items", self.logger, Item_cache_counter_stamp, self.logging_performance) + if Items_cache_active: + Items_cache_counter.log_request("Items", self.logger, Items_cache_counter_stamp) if Props_cache_active: - Props_cache_counter.log_request("Props", self.logger, Props_cache_counter_stamp, self.logging_performance) + Props_cache_counter.log_request("Props", self.logger, Props_cache_counter_stamp) def get(self, href): - if not self.logging_filter & 0x0010: + global logging_filter + if not logging_filter & 0x0010: self.logger.debug("function storage/get called") - global Item_cache_data - global Item_cache_counter - global Item_cache_active + global Items_cache_data + global Items_cache_counter + global Items_cache_active if not href: return None if not is_safe_filesystem_path_component(href): @@ -807,57 +821,57 @@ def get(self, href): time.gmtime(last_modified_time)) Item_cache_hit = 0 - if Item_cache_active: + if Items_cache_active: # Item cache lookup - Item_cache_counter.lookup += 1 - if path in Item_cache_data: - if Item_cache_data[path].last_modified_time == last_modified_time: - Item_cache_counter.hit += 1 + Items_cache_counter.lookup += 1 + if path in Items_cache_data: + if Items_cache_data[path].last_modified_time == last_modified_time: + Items_cache_counter.hit += 1 Item_cache_hit = 1 else: - Item_cache_counter.dirty += 1 + Items_cache_counter.dirty += 1 # remove from cache self.logger.debug("Item delete from cache (dirty): %s", path) - Item_cache_counter.entries -= 1 - Item_cache_counter.size -= len(str(Item_cache_data[path].Item.item)) - del Item_cache_data[path] + Items_cache_counter.entries -= 1 + Items_cache_counter.size -= Items_cache_data[path].size + del Items_cache_data[path] else: - Item_cache_counter.miss += 1 + Items_cache_counter.miss += 1 - if Item_cache_hit == 0 or Item_cache_active == False: + if Item_cache_hit == 0 or Items_cache_active == False: with open(path, encoding=self.encoding, newline="") as f: text = f.read() try: - if not self.logging_filter & 0x0100: + if not logging_filter & 0x0100: self.logger.debug("Item read ('get'): %s", path) item = vobject.readOne(text) except Exception as e: self.logger.error("Object broken on read (skip 'get'): %s (%s)", path, e) - if self.logging_exceptions: + if self.configuration.getboolean("logging", "exceptions"): self.logger.exception("Exception details:") return None; try: item.serialize() except Exception as e: self.logger.error("Object broken on serialize (skip 'get'): %s (%s)", path, e) - if self.logging_exceptions: + if self.configuration.getboolean("logging", "exceptions"): self.logger.exception("Exception details:") return None; Item_entry = Item(self, item, href, last_modified) - if Item_cache_active: + if Items_cache_active: # cache handling - if not self.logging_filter & 0x1000: + if not logging_filter & 0x1000: self.logger.debug("Item store in cache: %s", path) - Item_cache_data[path] = Item_cache_entry(Item_entry, last_modified_time) - Item_cache_counter.size += len(str(Item_entry.item)) - Item_cache_counter.entries += 1 + Items_cache_data[path] = Item_cache_entry(Item_entry, len(str(Item_entry.item)), last_modified_time) + Items_cache_counter.size += Items_cache_data[path].size + Items_cache_counter.entries += 1 return Item_entry else: - if not self.logging_filter & 0x2000: + if not logging_filter & 0x2000: self.logger.debug("Item retrieve from cache: %s", path) - return Item_cache_data[path].Item + return Items_cache_data[path].Item def upload(self, href, vobject_item): if not is_safe_filesystem_path_component(href): @@ -869,8 +883,8 @@ def upload(self, href, vobject_item): return item def delete(self, href=None): - global Item_cache_data - global Item_cache_active + global Items_cache_data + global Items_cache_active if href is None: # Delete the collection parent_dir = os.path.dirname(self._filesystem_path) @@ -893,15 +907,16 @@ def delete(self, href=None): raise ComponentNotFoundError(href) os.remove(path) self._sync_directory(os.path.dirname(path)) - if Item_cache_active: + if Items_cache_active: # remove from cache, if existing - if path in Item_cache_data: - self.logger.debug("Delete from cache (delete): %s", path) - Item_cache_counter.entries -= 1 - Item_cache_counter.size -= len(str(Item_cache_data[path].Item.item)) - del Item_cache_data[path] + if path in Items_cache_data: + self.logger.debug("Item delete from cache ('delete'): %s", path) + Items_cache_counter.entries -= 1 + Items_cache_counter.size -= len(str(Items_cache_data[path].Item.item)) + del Items_cache_data[path] def get_meta(self, key): + global logging_filter global Props_cache_active global Props_cache_counter if os.path.exists(self._props_path): @@ -917,7 +932,8 @@ def get_meta(self, key): else: Props_cache_counter.dirty += 1 # remove from cache - self.logger.debug("Props delete from cache (dirty): %s", path) + self.logger.debug("Props delete from cache (dirty): %s", self._props_path) + Props_cache_counter.size -= Props_cache_data[self._props_path].size Props_cache_counter.entries -= 1 del Props_cache_data[self._props_path] else: @@ -925,20 +941,21 @@ def get_meta(self, key): if Props_cache_hit == 0 or Props_cache_active == False: with open(self._props_path, encoding=self.encoding) as f: - if not self.logging_filter & 0x0400: + if not logging_filter & 0x0400: self.logger.debug("Props read ('get_meta') : %s", self._props_path) - props_contents = json.load(f) + props_contents = json.load(f) if Props_cache_active: # cache handling - if not self.logging_filter & 0x4000: + if not logging_filter & 0x4000: self.logger.debug("Props store in cache : %s", self._props_path) - Props_cache_data[self._props_path] = Props_cache_entry(props_contents, last_modified_time) + Props_cache_data[self._props_path] = Props_cache_entry(props_contents, len(str(props_contents)), last_modified_time) + Props_cache_counter.size += Props_cache_data[self._props_path].size Props_cache_counter.entries += 1 return props_contents.get(key) else: - if not self.logging_filter & 0x8000: + if not logging_filter & 0x8000: self.logger.debug("Props retrieve from cache: %s", self._props_path) return Props_cache_data[self._props_path].props_contents.get(key) @@ -949,6 +966,7 @@ def set_meta(self, props): if Props_cache_active: if self._props_path in Props_cache_data: self.logger.debug("Props delete from cache ('set_meta'): %s", path) + Props_cache_counter.size -= Props_cache_data[self._props_path].size Props_cache_counter.entries -= 1 del Props_cache_data[self._props_path] with open(self._props_path, encoding=self.encoding) as f: @@ -971,7 +989,8 @@ def last_modified(self): return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(last)) def serialize(self): - if not self.logging_filter & 0x0010: + global logging_filter + if not logging_filter & 0x0010: self.logger.debug("function storage/serialize called") items = [] time_begin = datetime.datetime.now() @@ -979,7 +998,7 @@ def serialize(self): if hasattr(self.get(href),'item'): items.append(self.get(href).item) time_end = datetime.datetime.now() - if self.logging_performance: + if self.configuration.getboolean("logging", "performance"): self.logger.info("Collection read %d items in %s sec from %s", len(items),(time_end - time_begin).total_seconds(), self._filesystem_path) else: self.logger.debug("Collection read %d items in %s sec from %s", len(items),(time_end - time_begin).total_seconds(), self._filesystem_path) From cf31736dac4e79de63d5785aeff8355a33204a41 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 18 Oct 2016 22:28:34 +0000 Subject: [PATCH 029/101] remove an accidental inserted newline --- radicale/storage.py | 1 - 1 file changed, 1 deletion(-) diff --git a/radicale/storage.py b/radicale/storage.py index ff383d527..2c5f2d317 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -375,7 +375,6 @@ def etag(self): return get_etag(self.serialize()) - class BaseCollection: # Overriden on copy by the "load" function From e888c866cf6f00d6c9eb278aadde025dbd2c63a3 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Wed, 19 Oct 2016 00:59:40 +0000 Subject: [PATCH 030/101] remove request cache statistics logging (not thread-safe) implement locking --- radicale/storage.py | 132 ++++++++++++++++---------------------------- 1 file changed, 48 insertions(+), 84 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index 2c5f2d317..902ae520b 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -92,7 +92,8 @@ def load(configuration, logger): storage_type = configuration.get("storage", "type") logger.info("Radicale storage manager loading: %s", storage_type) logging_filter = int(configuration.get("logging", "debug_filter"), 0) - logger.debug("debug filter active with: 0x%x", logging_filter) + if (logging_filter > 0): + logger.info("debug filter active with: 0x%x", logging_filter) if storage_type == "multifilesystem": collection_class = Collection @@ -138,9 +139,9 @@ def cleanup(logger): """Print cache statistics.""" logger.info("Cleaning up 'storage'") if Items_cache_active: - logger.info("Items cache overall statistics: %s", Items_cache_counter.string()) + logger.info("Items cache overall statistics: %s", Items_cache_counter.string_overall()) if Props_cache_active: - logger.info("Props cache overall statistics: %s", Props_cache_counter.string()) + logger.info("Props cache overall statistics: %s", Props_cache_counter.string_overall()) def get_etag(text): @@ -220,7 +221,6 @@ def path_to_filesystem(root, *paths): ### BEGIN Items/Props caching -# TODO: catch all potential race conditions during add/delete ## cache counter statistics class Cache_counter: def __init__(self): @@ -237,18 +237,7 @@ def __init__(self): self.lastlog= datetime.datetime.now() self.loginterval = datetime.timedelta(seconds=60) # default - def copy(self, counter): - self.lookup = counter.lookup - self.hit = counter.hit - self.miss = counter.miss - self.dirty = counter.dirty - self.entries= counter.entries - self.size = counter.size - self.perflog= counter.perflog - self.lastlog= counter.lastlog - self.loginterval= counter.loginterval - - def string(self): + def string_overall(self): if (self.entries > 0): message = "lookup=%d hit=%d (%3.2f%%) miss=%d (%3.2f%%) dirty=%d (%3.2f%%) entries=%d memoryKiB=%.3f" % ( self.lookup, @@ -265,31 +254,9 @@ def string(self): message = "no cache entries" return(message) - def string_delta(self, stamp): - delta_lookup = self.lookup - stamp.lookup - if (delta_lookup > 0): - message = "lookup=%d hit=%d (%3.2f%%) miss=%d (%3.2f%%) dirty=%d (%3.2f%%)" % ( - delta_lookup, - self.hit - stamp.hit, - (self.hit - stamp.hit) * 100 / delta_lookup, - self.miss - stamp.miss, - (self.miss - stamp.miss) * 100 / delta_lookup, - self.dirty - stamp.dirty, - (self.dirty - stamp.dirty) * 100 / delta_lookup - ) - else: - message = "no cache lookup so far" - return(message) - - def log_request(self, token, logger, stamp): - if (self.perflog) or (self.loginterval == 0): - logger.info("%s cache request statistics: %s", token, self.string_delta(stamp)) - else: - logger.debug("%s cache request statistics: %s", token, self.string_delta(stamp)) - def log_overall(self, token, logger): if (self.perflog) or (self.loginterval == 0) or (datetime.datetime.now() - self.lastlog > self.loginterval): - logger.info("%s cache overall statistics: %s", token, self.string()) + logger.info("%s cache overall statistics: %s", token, self.string_overall()) self.lastlog = datetime.datetime.now() else: logger.debug("%s cache overall statistics: %s", token, self.string()) @@ -309,10 +276,12 @@ def __init__(self, props_contents, size, last_modified_time): self.last_modified_time = last_modified_time ## cache initialization +Items_cache_lock = threading.Lock() Items_cache_data = {} Items_cache_counter = Cache_counter() Items_cache_active = False +Props_cache_lock = threading.Lock() Props_cache_data = {} Props_cache_counter = Cache_counter() Props_cache_active = False @@ -328,16 +297,6 @@ def cache_log_statistics_overall(self): if Props_cache_active: Props_cache_counter.log_overall("Props", self.logger) -def cache_log_statistics_request(self, stamp): - global Items_cache_active - global Items_cache_counter - global Props_cache_active - global Props_cache_counter - if Items_cache_active: - Items_cache_counter.log_request("Items", self.logger, stamp) - if Props_cache_active: - Props_cache_counter.log_request("Props", self.logger, stamp) - ### END Items/Props caching @@ -771,20 +730,11 @@ def move(cls, item, to_collection, to_href): cls._sync_directory(item.collection._filesystem_path) def list(self): - global logging_filter - if not logging_filter & 0x0010: - self.logger.debug("function storage/list called") global Items_cache_data global Items_cache_counter global Items_cache_active global Props_cache_counter global Props_cache_active - if Items_cache_active: - Items_cache_counter_stamp = Cache_counter() - Items_cache_counter_stamp.copy(Items_cache_counter) - if Props_cache_active: - Props_cache_counter_stamp = Cache_counter() - Props_cache_counter_stamp.copy(Props_cache_counter) for href in os.listdir(self._filesystem_path): if not is_safe_filesystem_path_component(href): if not href.startswith(".Radicale"): @@ -793,15 +743,8 @@ def list(self): path = os.path.join(self._filesystem_path, href) if os.path.isfile(path): yield href - if Items_cache_active: - Items_cache_counter.log_request("Items", self.logger, Items_cache_counter_stamp) - if Props_cache_active: - Props_cache_counter.log_request("Props", self.logger, Props_cache_counter_stamp) def get(self, href): - global logging_filter - if not logging_filter & 0x0010: - self.logger.debug("function storage/get called") global Items_cache_data global Items_cache_counter global Items_cache_active @@ -821,6 +764,8 @@ def get(self, href): Item_cache_hit = 0 if Items_cache_active: + Items_cache_lock.acquire() + # Item cache lookup Items_cache_counter.lookup += 1 if path in Items_cache_data: @@ -829,6 +774,7 @@ def get(self, href): Item_cache_hit = 1 else: Items_cache_counter.dirty += 1 + self.Cache_counter_Items.dirty += 1 # remove from cache self.logger.debug("Item delete from cache (dirty): %s", path) Items_cache_counter.entries -= 1 @@ -840,6 +786,7 @@ def get(self, href): if Item_cache_hit == 0 or Items_cache_active == False: with open(path, encoding=self.encoding, newline="") as f: text = f.read() + try: if not logging_filter & 0x0100: self.logger.debug("Item read ('get'): %s", path) @@ -848,29 +795,41 @@ def get(self, href): self.logger.error("Object broken on read (skip 'get'): %s (%s)", path, e) if self.configuration.getboolean("logging", "exceptions"): self.logger.exception("Exception details:") + if Items_cache_active: + Items_cache_lock.release() return None; + try: - item.serialize() + # test whether object is parseable + item_serialized = item.serialize() except Exception as e: self.logger.error("Object broken on serialize (skip 'get'): %s (%s)", path, e) if self.configuration.getboolean("logging", "exceptions"): self.logger.exception("Exception details:") + if Items_cache_active: + Items_cache_lock.release() return None; + + # temp object not required + del item_serialized + # retrieve from cache Item_entry = Item(self, item, href, last_modified) if Items_cache_active: - # cache handling + # store in cache if not logging_filter & 0x1000: self.logger.debug("Item store in cache: %s", path) - Items_cache_data[path] = Item_cache_entry(Item_entry, len(str(Item_entry.item)), last_modified_time) + Items_cache_data[path] = Item_cache_entry(Item_entry, len(str(Item_entry.item)) + len(path), last_modified_time) Items_cache_counter.size += Items_cache_data[path].size Items_cache_counter.entries += 1 - - return Item_entry else: if not logging_filter & 0x2000: self.logger.debug("Item retrieve from cache: %s", path) - return Items_cache_data[path].Item + Item_entry = Items_cache_data[path].Item + + if Items_cache_active: + Items_cache_lock.release() + return Item_entry def upload(self, href, vobject_item): if not is_safe_filesystem_path_component(href): @@ -907,12 +866,14 @@ def delete(self, href=None): os.remove(path) self._sync_directory(os.path.dirname(path)) if Items_cache_active: + Props_cache_lock.acquire() # remove from cache, if existing if path in Items_cache_data: self.logger.debug("Item delete from cache ('delete'): %s", path) Items_cache_counter.entries -= 1 - Items_cache_counter.size -= len(str(Items_cache_data[path].Item.item)) + Items_cache_counter.size -= Items_cache_data[path].size del Items_cache_data[path] + Props_cache_lock.release() def get_meta(self, key): global logging_filter @@ -921,6 +882,7 @@ def get_meta(self, key): if os.path.exists(self._props_path): Props_cache_hit = 0 if Props_cache_active: + Props_cache_lock.acquire() last_modified_time = os.path.getmtime(self._props_path) # Props cache lookup Props_cache_counter.lookup += 1 @@ -948,26 +910,31 @@ def get_meta(self, key): # cache handling if not logging_filter & 0x4000: self.logger.debug("Props store in cache : %s", self._props_path) - Props_cache_data[self._props_path] = Props_cache_entry(props_contents, len(str(props_contents)), last_modified_time) + Props_cache_data[self._props_path] = Props_cache_entry(props_contents, len(str(props_contents)) + len(self._props_path), last_modified_time) Props_cache_counter.size += Props_cache_data[self._props_path].size Props_cache_counter.entries += 1 - return props_contents.get(key) + meta = props_contents.get(key) else: if not logging_filter & 0x8000: self.logger.debug("Props retrieve from cache: %s", self._props_path) - return Props_cache_data[self._props_path].props_contents.get(key) + meta = Props_cache_data[self._props_path].props_contents.get(key) + + if Props_cache_active: + Props_cache_lock.release() + return meta def set_meta(self, props): global Props_cache_active global Props_cache_counter if os.path.exists(self._props_path): - if Props_cache_active: - if self._props_path in Props_cache_data: - self.logger.debug("Props delete from cache ('set_meta'): %s", path) - Props_cache_counter.size -= Props_cache_data[self._props_path].size - Props_cache_counter.entries -= 1 - del Props_cache_data[self._props_path] + with Props_cache_lock: + if Props_cache_active: + if self._props_path in Props_cache_data: + self.logger.debug("Props delete from cache ('set_meta'): %s", path) + Props_cache_counter.size -= Props_cache_data[self._props_path].size + Props_cache_counter.entries -= 1 + del Props_cache_data[self._props_path] with open(self._props_path, encoding=self.encoding) as f: self.logger.debug("Write props ('set_meta'): %s", self._props_path) old_props = json.load(f) @@ -988,9 +955,6 @@ def last_modified(self): return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(last)) def serialize(self): - global logging_filter - if not logging_filter & 0x0010: - self.logger.debug("function storage/serialize called") items = [] time_begin = datetime.datetime.now() for href in self.list(): From 8ecff7bf0e19be35b45b736d65e4233795b1d284 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Wed, 19 Oct 2016 01:03:34 +0000 Subject: [PATCH 031/101] add explanation for debug_filter --- config | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/config b/config index bbfc81f58..4177aa7e9 100644 --- a/config +++ b/config @@ -125,6 +125,16 @@ cache = True #config = /etc/radicale/logging # Set the default logging level to debug (this enables also request/response logging) #debug = False + +# suppress selectable debug messages (to keep debug log cleaner) +debug_filter = 0xffff +# 0x0100: suppress "read object" +# 0x0400: suppress "read props" +# 0x1000: suppress "store object in cache" +# 0x2000: suppress "retrieve object from cache" +# 0x4000: suppress "store props in cache" +# 0x8000: suppress "retrieve props from cache" + # Store all environment variables (including those set in the shell) #full_environment = False @@ -138,8 +148,8 @@ performance = False exceptions = False # Include regular cache performance statistics in logs on info level -# 0: on each request (incl. current request) -# >0: minimum interval in seconds (only global) +# 0: on each request +# >0: minimum interval in seconds cache_statistics_interval = 300 From 4be9172e463b0526f53e7c7a256c2e0c3c858d05 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Wed, 19 Oct 2016 01:14:55 +0000 Subject: [PATCH 032/101] fix time delta handling --- radicale/storage.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index 902ae520b..a4ffbafae 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -105,7 +105,7 @@ def load(configuration, logger): if configuration.getboolean("storage", "cache"): Items_cache_active = True Props_cache_active = True - Items_cache_counter.loginterval = configuration.getint("logging", "cache_statistics_interval") + Items_cache_counter.loginterval = seconds=configuration.getint("logging", "cache_statistics_interval") Props_cache_counter.loginterval = configuration.getint("logging", "cache_statistics_interval") if configuration.getboolean("logging", "performance"): if Items_cache_active: @@ -235,7 +235,7 @@ def __init__(self): # 0: on each request (incl. current request) # >0: after at least every given loginterval (excl. current request) self.lastlog= datetime.datetime.now() - self.loginterval = datetime.timedelta(seconds=60) # default + self.loginterval = 60 # default def string_overall(self): if (self.entries > 0): @@ -255,11 +255,11 @@ def string_overall(self): return(message) def log_overall(self, token, logger): - if (self.perflog) or (self.loginterval == 0) or (datetime.datetime.now() - self.lastlog > self.loginterval): + if (self.perflog) or (self.loginterval == 0) or (datetime.datetime.now() - self.lastlog > datetime.timedelta(seconds=self.loginterval)): logger.info("%s cache overall statistics: %s", token, self.string_overall()) self.lastlog = datetime.datetime.now() else: - logger.debug("%s cache overall statistics: %s", token, self.string()) + logger.debug("%s cache overall statistics: %s", token, self.string_overall()) ## cache entry From 5a7164c1ee9e07a0fb1680d5cbffe47135248d04 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Wed, 19 Oct 2016 01:24:30 +0000 Subject: [PATCH 033/101] extend description --- config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config b/config index 4177aa7e9..09b408a0c 100644 --- a/config +++ b/config @@ -112,7 +112,7 @@ #hook = # Example: git add -A && (git diff --cached --quiet || git commit -m "Changes by "%(user)s) -# Caching (of Items) in memory +# Caching (of Items and Props files) in memory cache = True From f20cd102ff0dfbb50f056504d662cb2a274a6f7d Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Wed, 19 Oct 2016 01:27:33 +0000 Subject: [PATCH 034/101] remove obsolete code --- radicale/storage.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index a4ffbafae..221539d75 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -730,11 +730,6 @@ def move(cls, item, to_collection, to_href): cls._sync_directory(item.collection._filesystem_path) def list(self): - global Items_cache_data - global Items_cache_counter - global Items_cache_active - global Props_cache_counter - global Props_cache_active for href in os.listdir(self._filesystem_path): if not is_safe_filesystem_path_component(href): if not href.startswith(".Radicale"): From 2f848d5b495786a220e3c6f71932053afacd4344 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Wed, 19 Oct 2016 02:17:35 +0000 Subject: [PATCH 035/101] add also support for debug_filter --- config | 3 +++ radicale/rights.py | 17 +++++++++++------ 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/config b/config index 09b408a0c..2ec443312 100644 --- a/config +++ b/config @@ -128,8 +128,11 @@ cache = True # suppress selectable debug messages (to keep debug log cleaner) debug_filter = 0xffff +# 0x0080: suppress 'rights' "Test if" + "Section" # 0x0100: suppress "read object" +# 0x0200: suppress "delete object from cache" # 0x0400: suppress "read props" +# 0x0800: suppress "delete props from cache" # 0x1000: suppress "store object in cache" # 0x2000: suppress "retrieve object from cache" # 0x4000: suppress "store props in cache" diff --git a/radicale/rights.py b/radicale/rights.py index 00928435a..2d382f78a 100644 --- a/radicale/rights.py +++ b/radicale/rights.py @@ -91,6 +91,7 @@ class BaseRights: def __init__(self, configuration, logger): self.configuration = configuration self.logger = logger + self.debug_filter = int(configuration.get("logging", "debug_filter"), 0) def authorized(self, user, collection, permission): """Check if the user is allowed to read or write the collection. @@ -122,7 +123,8 @@ def authorized(self, user, path, permission): self.logger.debug("Rights type '%s'", self.rights_type) regex.readfp(StringIO(DEFINED_RIGHTS[self.rights_type])) else: - self.logger.debug("Reading rights from file '%s'", self.filename) + if not self.debug_filter & 0x0080: + self.logger.debug("Reading rights from file '%s'", self.filename) if not regex.read(self.filename): self.logger.error( "File '%s' not found for rights", self.filename) @@ -131,17 +133,20 @@ def authorized(self, user, path, permission): for section in regex.sections(): re_user = regex.get(section, "user") re_collection = regex.get(section, "collection") - self.logger.debug( - "Test if '%s:%s' matches against '%s:%s' from section '%s'", - user, sane_path, re_user, re_collection, section) + if not self.debug_filter & 0x0080: + self.logger.debug( + "Test if '%s:%s' matches against '%s:%s' from section '%s'", + user, sane_path, re_user, re_collection, section) # Emulate fullmatch user_match = re.match(r"(?:%s)\Z" % re_user, user) if user_match: re_collection = re_collection.format(*user_match.groups()) # Emulate fullmatch if re.match(r"(?:%s)\Z" % re_collection, sane_path): - self.logger.debug("Section '%s' matches", section) + if not self.debug_filter & 0x0080: + self.logger.debug("Section '%s' matches", section) return permission in regex.get(section, "permission") else: - self.logger.debug("Section '%s' does not match", section) + if not self.debug_filter & 0x0080: + self.logger.debug("Section '%s' does not match", section) return False From 078f56ffbc999589382ad49d359b9ed18331383f Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Wed, 19 Oct 2016 02:17:44 +0000 Subject: [PATCH 036/101] review debug_filter and locking and global cache --- radicale/storage.py | 60 +++++++++++++++++++++------------------------ 1 file changed, 28 insertions(+), 32 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index 221539d75..97a992e11 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -84,16 +84,13 @@ class Overlapped(ctypes.Structure): elif os.name == "posix": import fcntl -logging_filter = 0xffff - def load(configuration, logger): - global logging_filter """Load the storage manager chosen in configuration.""" storage_type = configuration.get("storage", "type") logger.info("Radicale storage manager loading: %s", storage_type) - logging_filter = int(configuration.get("logging", "debug_filter"), 0) - if (logging_filter > 0): - logger.info("debug filter active with: 0x%x", logging_filter) + debug_filter = int(configuration.get("logging", "debug_filter"), 0) + if (debug_filter > 0): + logger.info("debug filter active with: 0x%x", debug_filter) if storage_type == "multifilesystem": collection_class = Collection @@ -124,6 +121,9 @@ def load(configuration, logger): logger.info("Props cache enabled (regular statistics log on info level with minimum interval %d sec)", Props_cache_counter.loginterval) else: logger.info("Items cache enabled (statistics log only on debug level)") + else: + logger.info("Items cache disabled") + logger.info("Props cache disabled") else: collection_class = import_module(storage_type).Collection logger.info("Radicale storage manager successfully loaded: %s", storage_type) @@ -288,9 +288,7 @@ def __init__(self, props_contents, size, last_modified_time): ## global functions to be called also from other modules def cache_log_statistics_overall(self): - global Items_cache_active global Items_cache_counter - global Props_cache_active global Props_cache_counter if Items_cache_active: Items_cache_counter.log_overall("Items", self.logger) @@ -501,6 +499,7 @@ def __init__(self, path, principal=False, folder=None): split_path = self.path.split("/") self.owner = split_path[0] if len(split_path) > 1 else None self.is_principal = principal + self.debug_filter = int(self.configuration.get("logging", "debug_filter"), 0) @classmethod def _get_collection_root_folder(cls): @@ -743,6 +742,7 @@ def get(self, href): global Items_cache_data global Items_cache_counter global Items_cache_active + global Items_cache_lock if not href: return None if not is_safe_filesystem_path_component(href): @@ -769,9 +769,9 @@ def get(self, href): Item_cache_hit = 1 else: Items_cache_counter.dirty += 1 - self.Cache_counter_Items.dirty += 1 # remove from cache - self.logger.debug("Item delete from cache (dirty): %s", path) + if not self.debug_filter & 0x0200: + self.logger.debug("Item delete from cache (dirty): %s", path) Items_cache_counter.entries -= 1 Items_cache_counter.size -= Items_cache_data[path].size del Items_cache_data[path] @@ -783,7 +783,7 @@ def get(self, href): text = f.read() try: - if not logging_filter & 0x0100: + if not self.debug_filter & 0x0100: self.logger.debug("Item read ('get'): %s", path) item = vobject.readOne(text) except Exception as e: @@ -812,13 +812,13 @@ def get(self, href): if Items_cache_active: # store in cache - if not logging_filter & 0x1000: + if not self.debug_filter & 0x1000: self.logger.debug("Item store in cache: %s", path) Items_cache_data[path] = Item_cache_entry(Item_entry, len(str(Item_entry.item)) + len(path), last_modified_time) Items_cache_counter.size += Items_cache_data[path].size Items_cache_counter.entries += 1 else: - if not logging_filter & 0x2000: + if not self.debug_filter & 0x2000: self.logger.debug("Item retrieve from cache: %s", path) Item_entry = Items_cache_data[path].Item @@ -837,7 +837,9 @@ def upload(self, href, vobject_item): def delete(self, href=None): global Items_cache_data + global Items_cache_counter global Items_cache_active + global Items_cache_lock if href is None: # Delete the collection parent_dir = os.path.dirname(self._filesystem_path) @@ -858,22 +860,24 @@ def delete(self, href=None): path = path_to_filesystem(self._filesystem_path, href) if not os.path.isfile(path): raise ComponentNotFoundError(href) - os.remove(path) - self._sync_directory(os.path.dirname(path)) if Items_cache_active: - Props_cache_lock.acquire() - # remove from cache, if existing + Items_cache_lock.acquire() if path in Items_cache_data: + # remove from cache, if existing self.logger.debug("Item delete from cache ('delete'): %s", path) Items_cache_counter.entries -= 1 Items_cache_counter.size -= Items_cache_data[path].size del Items_cache_data[path] - Props_cache_lock.release() + os.remove(path) + if Items_cache_active: + Items_cache_lock.release() + self._sync_directory(os.path.dirname(path)) def get_meta(self, key): - global logging_filter + global Props_cache_data global Props_cache_active global Props_cache_counter + global Props_cache_lock if os.path.exists(self._props_path): Props_cache_hit = 0 if Props_cache_active: @@ -888,7 +892,8 @@ def get_meta(self, key): else: Props_cache_counter.dirty += 1 # remove from cache - self.logger.debug("Props delete from cache (dirty): %s", self._props_path) + if not self.debug_filter & 0x0800: + self.logger.debug("Props delete from cache (dirty): %s", self._props_path) Props_cache_counter.size -= Props_cache_data[self._props_path].size Props_cache_counter.entries -= 1 del Props_cache_data[self._props_path] @@ -897,13 +902,13 @@ def get_meta(self, key): if Props_cache_hit == 0 or Props_cache_active == False: with open(self._props_path, encoding=self.encoding) as f: - if not logging_filter & 0x0400: + if not self.debug_filter & 0x0400: self.logger.debug("Props read ('get_meta') : %s", self._props_path) props_contents = json.load(f) if Props_cache_active: # cache handling - if not logging_filter & 0x4000: + if not self.debug_filter & 0x4000: self.logger.debug("Props store in cache : %s", self._props_path) Props_cache_data[self._props_path] = Props_cache_entry(props_contents, len(str(props_contents)) + len(self._props_path), last_modified_time) Props_cache_counter.size += Props_cache_data[self._props_path].size @@ -911,7 +916,7 @@ def get_meta(self, key): meta = props_contents.get(key) else: - if not logging_filter & 0x8000: + if not self.debug_filter & 0x8000: self.logger.debug("Props retrieve from cache: %s", self._props_path) meta = Props_cache_data[self._props_path].props_contents.get(key) @@ -920,16 +925,7 @@ def get_meta(self, key): return meta def set_meta(self, props): - global Props_cache_active - global Props_cache_counter if os.path.exists(self._props_path): - with Props_cache_lock: - if Props_cache_active: - if self._props_path in Props_cache_data: - self.logger.debug("Props delete from cache ('set_meta'): %s", path) - Props_cache_counter.size -= Props_cache_data[self._props_path].size - Props_cache_counter.entries -= 1 - del Props_cache_data[self._props_path] with open(self._props_path, encoding=self.encoding) as f: self.logger.debug("Write props ('set_meta'): %s", self._props_path) old_props = json.load(f) From 0781a24afd215246933622604b1b868d3a57002e Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Wed, 19 Oct 2016 03:25:35 +0000 Subject: [PATCH 037/101] implement a request token to be able to match concurrent requests in (debug) log --- radicale/__init__.py | 38 +++++++++++++++++++++++--------------- 1 file changed, 23 insertions(+), 15 deletions(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index e142813bd..a947f7a4a 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -41,6 +41,7 @@ import wsgiref.simple_server import zlib import datetime +from hashlib import md5 from http import client from urllib.parse import unquote, urlparse @@ -197,6 +198,7 @@ def __init__(self, configuration, logger): self.authorized = rights.load(configuration, logger) self.encoding = configuration.get("encoding", "request") self.debug = configuration.getboolean("logging", "debug") + self.debug_filter = int(configuration.get("logging", "debug_filter"), 0) def headers_log(self, environ): """Sanitize headers for logging.""" @@ -253,18 +255,20 @@ def collect_allowed_items(self, items, user): else: path = item.collection.path if self.authorized(user, path, "r"): - self.logger.debug( - "%s has read access to collection %s", - user or "Anonymous", path or "/") + if not self.debug_filter & 0x0080: + self.logger.debug( + "%s has read access to collection %s", + user or "Anonymous", path or "/") read_allowed_items.append(item) else: self.logger.debug( - "%s has NO read access to collection %s", - user or "Anonymous", path or "/") + "%s has NO read access to collection %s", + user or "Anonymous", path or "/") if self.authorized(user, path, "w"): - self.logger.debug( - "%s has write access to collection %s", - user or "Anonymous", path or "/") + if not self.debug_filter & 0x0080: + self.logger.debug( + "%s has write access to collection %s", + user or "Anonymous", path or "/") write_allowed_items.append(item) else: self.logger.debug( @@ -280,7 +284,7 @@ def response(status, headers=(), answer=None): # Set content length if answer: if self.debug: - self.logger.debug("Response content:\n%s", answer) + self.logger.debug("Response content [%s]:\n%s", self.request_token, answer) answer = answer.encode(self.encoding) accept_encoding = [ encoding.strip() for encoding in @@ -315,7 +319,7 @@ def response(status, headers=(), answer=None): sizeinfo = sizeinfo + headers["Content-Encoding"] if len(sizeinfo) > 0: sizeinfo = " (" + sizeinfo + ")" - self.logger.info("%s response for %s in %s sec status: %s", environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo + sizeinfo, (time_end - time_begin).total_seconds(), status) + self.logger.info("[%s] %s response for %s in %s sec status: %s", self.request_token, environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo + sizeinfo, (time_end - time_begin).total_seconds(), status) # Return response content return [answer] if answer else [] @@ -335,11 +339,15 @@ def response(status, headers=(), answer=None): if environ["HTTP_DEPTH"]: depthinfo = " with depth " + environ["HTTP_DEPTH"] time_begin = datetime.datetime.now() - self.logger.info("%s request for %s received from %s using \"%s\"", - environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo, remote_host, remote_useragent) - headers = pprint.pformat(self.headers_log(environ)) + + # Create an unique request token + self.request_token = md5((environ["PATH_INFO"] + depthinfo + remote_host + remote_useragent +str(time_begin)).encode('utf-8')).hexdigest()[1:8] + + self.logger.info("[%s] %s request for %s received from %s using \"%s\"", + self.request_token, environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo, remote_host, remote_useragent) if self.debug: - self.logger.debug("Request headers:\n%s", headers) + headers = pprint.pformat(self.headers_log(environ)) + self.logger.debug("Request headers [%s]:\n%s", self.request_token, headers) # Strip base_prefix from request URI base_prefix = self.configuration.get("server", "base_prefix") @@ -453,7 +461,7 @@ def _read_content(self, environ): content = self.decode( environ["wsgi.input"].read(content_length), environ) if self.debug: - self.logger.debug("Request content:\n%s", content.strip()) + self.logger.debug("Request content [%s]:\n%s", self.request_token, content.strip()) else: content = None return content From 1a0390fa805847fddd697843560e4656211d8ba2 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Wed, 19 Oct 2016 03:48:59 +0000 Subject: [PATCH 038/101] bugfix token handling --- radicale/__init__.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index a947f7a4a..3fd0be8b3 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -319,7 +319,7 @@ def response(status, headers=(), answer=None): sizeinfo = sizeinfo + headers["Content-Encoding"] if len(sizeinfo) > 0: sizeinfo = " (" + sizeinfo + ")" - self.logger.info("[%s] %s response for %s in %s sec status: %s", self.request_token, environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo + sizeinfo, (time_end - time_begin).total_seconds(), status) + self.logger.info("[%s] %s response for %s in %s sec status: %s", request_token, environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo + sizeinfo, (time_end - time_begin).total_seconds(), status) # Return response content return [answer] if answer else [] @@ -341,10 +341,12 @@ def response(status, headers=(), answer=None): time_begin = datetime.datetime.now() # Create an unique request token - self.request_token = md5((environ["PATH_INFO"] + depthinfo + remote_host + remote_useragent +str(time_begin)).encode('utf-8')).hexdigest()[1:8] + request_token = md5((environ["PATH_INFO"] + depthinfo + remote_host + remote_useragent +str(time_begin)).encode('utf-8')).hexdigest()[1:8] + # store token for header/request/response logging + self.request_token = request_token self.logger.info("[%s] %s request for %s received from %s using \"%s\"", - self.request_token, environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo, remote_host, remote_useragent) + request_token, environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo, remote_host, remote_useragent) if self.debug: headers = pprint.pformat(self.headers_log(environ)) self.logger.debug("Request headers [%s]:\n%s", self.request_token, headers) From 8d1b34ef984be4b7d5ef43a02e2fca99a429377a Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Wed, 19 Oct 2016 03:57:04 +0000 Subject: [PATCH 039/101] shift cache logging below response logging --- radicale/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index 3fd0be8b3..ce9f93f6e 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -308,8 +308,6 @@ def response(status, headers=(), answer=None): status, client.responses.get(status, "Unknown")) start_response(status, list(headers.items())) time_end = datetime.datetime.now() - if self.configuration.getboolean("storage", "cache"): - storage.cache_log_statistics_overall(self) sizeinfo = "" if answer: sizeinfo = sizeinfo + str(len(str(answer))) + " bytes" @@ -320,6 +318,8 @@ def response(status, headers=(), answer=None): if len(sizeinfo) > 0: sizeinfo = " (" + sizeinfo + ")" self.logger.info("[%s] %s response for %s in %s sec status: %s", request_token, environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo + sizeinfo, (time_end - time_begin).total_seconds(), status) + if self.configuration.getboolean("storage", "cache"): + storage.cache_log_statistics_overall(self) # Return response content return [answer] if answer else [] From a4a6ccb30bccee01cf4c28bf5619ef47b05163e1 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 25 Oct 2016 07:27:14 +0200 Subject: [PATCH 040/101] implement separate config options for items/props cache --- config | 6 ++++-- radicale/config.py | 3 ++- radicale/storage.py | 21 ++++++++++++--------- 3 files changed, 18 insertions(+), 12 deletions(-) diff --git a/config b/config index 2ec443312..ebc9ab0e2 100644 --- a/config +++ b/config @@ -112,8 +112,10 @@ #hook = # Example: git add -A && (git diff --cached --quiet || git commit -m "Changes by "%(user)s) -# Caching (of Items and Props files) in memory -cache = True +# Caching of Items files in memory +cache_items = False +# Caching of Props files in memory +cache_props = False [logging] diff --git a/radicale/config.py b/radicale/config.py index 28e52d803..1f6f8fddb 100644 --- a/radicale/config.py +++ b/radicale/config.py @@ -59,7 +59,8 @@ "filesystem_folder": os.path.expanduser( "~/.config/radicale/collections"), "fsync": "True", - "cache": "True", + "cache_items": "false", + "cache_props": "false", "hook": "", "close_lock_file": "False"}, "logging": { diff --git a/radicale/storage.py b/radicale/storage.py index 97a992e11..26857c49b 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -99,30 +99,33 @@ def load(configuration, logger): global Props_cache_active global Items_cache_counter global Props_cache_counter - if configuration.getboolean("storage", "cache"): + if configuration.getboolean("storage", "cache_items"): Items_cache_active = True - Props_cache_active = True Items_cache_counter.loginterval = seconds=configuration.getint("logging", "cache_statistics_interval") - Props_cache_counter.loginterval = configuration.getint("logging", "cache_statistics_interval") if configuration.getboolean("logging", "performance"): if Items_cache_active: logger.info("Items cache enabled (performance log on info level)") Items_cache_counter.perflog = True - if Props_cache_active: - logger.info("Props cache enabled (performance log on info level)") - Props_cache_counter.perflog = True else: if (Items_cache_counter.loginterval > 0): logger.info("Items cache enabled (regular statistics log on info level with minimum interval %d sec)", Items_cache_counter.loginterval) else: logger.info("Items cache enabled (statistics log only on debug level)") - + else: + logger.info("Items cache disabled") + if configuration.getboolean("storage", "cache_props"): + Props_cache_active = True + Props_cache_counter.loginterval = configuration.getint("logging", "cache_statistics_interval") + if configuration.getboolean("logging", "performance"): + if Props_cache_active: + logger.info("Props cache enabled (performance log on info level)") + Props_cache_counter.perflog = True + else: if (Props_cache_counter.loginterval > 0): logger.info("Props cache enabled (regular statistics log on info level with minimum interval %d sec)", Props_cache_counter.loginterval) else: - logger.info("Items cache enabled (statistics log only on debug level)") + logger.info("Props cache enabled (statistics log only on debug level)") else: - logger.info("Items cache disabled") logger.info("Props cache disabled") else: collection_class = import_module(storage_type).Collection From 5640650ad3d52ade8e7f7fe2d053ac463dddd1c4 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 25 Oct 2016 05:33:49 +0000 Subject: [PATCH 041/101] missing adjustments regaring cache config --- radicale/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index ce9f93f6e..e5298bf9d 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -318,7 +318,7 @@ def response(status, headers=(), answer=None): if len(sizeinfo) > 0: sizeinfo = " (" + sizeinfo + ")" self.logger.info("[%s] %s response for %s in %s sec status: %s", request_token, environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo + sizeinfo, (time_end - time_begin).total_seconds(), status) - if self.configuration.getboolean("storage", "cache"): + if self.configuration.getboolean("storage", "cache_items") or self.configuration.getboolean("storage", "cache_props"): storage.cache_log_statistics_overall(self) # Return response content return [answer] if answer else [] From 5f5f9b3dcaef831c599a573be6dbd281b5a32835 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 25 Oct 2016 20:44:44 +0000 Subject: [PATCH 042/101] implement debug_filter for header,request,response --- config | 6 +++++- radicale/__init__.py | 14 +++++++------- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/config b/config index ebc9ab0e2..c7e27062d 100644 --- a/config +++ b/config @@ -130,7 +130,11 @@ cache_props = False # suppress selectable debug messages (to keep debug log cleaner) debug_filter = 0xffff -# 0x0080: suppress 'rights' "Test if" + "Section" +# 0x0001: suppress "header content" +# 0x0002: suppress "request content" +# 0x0004: suppress "response content" +# 0x0040: suppress "sanitized path" +# 0x0080: suppress "rights" "Test if" + "Section" # 0x0100: suppress "read object" # 0x0200: suppress "delete object from cache" # 0x0400: suppress "read props" diff --git a/radicale/__init__.py b/radicale/__init__.py index e5298bf9d..23c0c432c 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -283,7 +283,7 @@ def response(status, headers=(), answer=None): headers = dict(headers) # Set content length if answer: - if self.debug: + if self.debug and not (self.debug_filter & 0x0004): self.logger.debug("Response content [%s]:\n%s", self.request_token, answer) answer = answer.encode(self.encoding) accept_encoding = [ @@ -317,9 +317,8 @@ def response(status, headers=(), answer=None): sizeinfo = sizeinfo + headers["Content-Encoding"] if len(sizeinfo) > 0: sizeinfo = " (" + sizeinfo + ")" - self.logger.info("[%s] %s response for %s in %s sec status: %s", request_token, environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo + sizeinfo, (time_end - time_begin).total_seconds(), status) - if self.configuration.getboolean("storage", "cache_items") or self.configuration.getboolean("storage", "cache_props"): - storage.cache_log_statistics_overall(self) + self.logger.info("[%s] %s response for %s in %.3f sec status: %s", request_token, environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo + sizeinfo, (time_end - time_begin).total_seconds(), status) + storage.cache_log_statistics_overall(self) # Return response content return [answer] if answer else [] @@ -347,7 +346,7 @@ def response(status, headers=(), answer=None): self.logger.info("[%s] %s request for %s received from %s using \"%s\"", request_token, environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo, remote_host, remote_useragent) - if self.debug: + if self.debug and not (self.debug_filter & 0x0001): headers = pprint.pformat(self.headers_log(environ)) self.logger.debug("Request headers [%s]:\n%s", self.request_token, headers) @@ -367,7 +366,8 @@ def response(status, headers=(), answer=None): # Sanitize request URI environ["PATH_INFO"] = storage.sanitize_path( unquote(environ["PATH_INFO"])) - self.logger.debug("Sanitized path: %s", environ["PATH_INFO"]) + if not (self.debug_filter & 0x0040): + self.logger.debug("Sanitized path: %s", environ["PATH_INFO"]) path = environ["PATH_INFO"] # Get function corresponding to method @@ -462,7 +462,7 @@ def _read_content(self, environ): if content_length > 0: content = self.decode( environ["wsgi.input"].read(content_length), environ) - if self.debug: + if self.debug and not (self.debug_filter & 0x0002): self.logger.debug("Request content [%s]:\n%s", self.request_token, content.strip()) else: content = None From d20c86fdee2b8637504edfb06c9f71a5b4fc3173 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 25 Oct 2016 20:45:32 +0000 Subject: [PATCH 043/101] extend logging on debug level with resource and garbage collector, add try/except around collection serialization to trace down an exception found on productive system --- radicale/storage.py | 53 ++++++++++++++++++++++++++++++++++++--------- 1 file changed, 43 insertions(+), 10 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index 26857c49b..9acb764bf 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -35,6 +35,8 @@ import threading import time import datetime +import resource +import gc from contextlib import contextmanager from hashlib import md5 from importlib import import_module @@ -127,6 +129,8 @@ def load(configuration, logger): logger.info("Props cache enabled (statistics log only on debug level)") else: logger.info("Props cache disabled") + if configuration.getboolean("logging", "performance"): + logger.info("General performance log on debug level enabled") else: collection_class = import_module(storage_type).Collection logger.info("Radicale storage manager successfully loaded: %s", storage_type) @@ -257,12 +261,12 @@ def string_overall(self): message = "no cache entries" return(message) - def log_overall(self, token, logger): + def log_overall(self, token, logger, request_token): if (self.perflog) or (self.loginterval == 0) or (datetime.datetime.now() - self.lastlog > datetime.timedelta(seconds=self.loginterval)): - logger.info("%s cache overall statistics: %s", token, self.string_overall()) + logger.info("[%s] %s cache overall statistics: %s", request_token, token, self.string_overall()) self.lastlog = datetime.datetime.now() else: - logger.debug("%s cache overall statistics: %s", token, self.string_overall()) + logger.debug("[%s] %s cache overall statistics: %s", request_token, token, self.string_overall()) ## cache entry @@ -294,9 +298,27 @@ def cache_log_statistics_overall(self): global Items_cache_counter global Props_cache_counter if Items_cache_active: - Items_cache_counter.log_overall("Items", self.logger) + Items_cache_counter.log_overall("Items", self.logger, self.request_token) if Props_cache_active: - Props_cache_counter.log_overall("Props", self.logger) + Props_cache_counter.log_overall("Props", self.logger, self.request_token) + if self.configuration.getboolean("logging", "performance"): + # log process statistics + rusage = resource.getrusage(resource.RUSAGE_THREAD) + self.logger.debug("[%s] ru_utime=%.3f ru_stime=%.3f ru_maxrss=%s ru_inblock=%s ru_oublock=%s", self.request_token, + rusage.ru_utime, + rusage.ru_stime, + rusage.ru_maxrss, + rusage.ru_inblock, + rusage.ru_oublock) + # log garbage collector statistics + self.logger.debug("[%s] gc_count=%s gc_threshold=%s gc.unreachable=%d gc.stats.0=%s gc.stats.1=%s gc.stats.2=%s", self.request_token, + gc.get_count(), + gc.get_threshold(), + len(gc.garbage), + str(gc.get_stats()[0]), + str(gc.get_stats()[1]), + str(gc.get_stats()[2]), + ) ### END Items/Props caching @@ -956,9 +978,10 @@ def serialize(self): items.append(self.get(href).item) time_end = datetime.datetime.now() if self.configuration.getboolean("logging", "performance"): - self.logger.info("Collection read %d items in %s sec from %s", len(items),(time_end - time_begin).total_seconds(), self._filesystem_path) + self.logger.info("Collection read %d items in %.3f sec from %s", len(items),(time_end - time_begin).total_seconds(), self._filesystem_path) else: - self.logger.debug("Collection read %d items in %s sec from %s", len(items),(time_end - time_begin).total_seconds(), self._filesystem_path) + self.logger.debug("Collection read %d items in %.3f sec from %s", len(items),(time_end - time_begin).total_seconds(), self._filesystem_path) + result = "" if self.get_meta("tag") == "VCALENDAR": collection = vobject.iCalendar() for item in items: @@ -967,10 +990,20 @@ def serialize(self): for item_part in getattr(item, "%s_list" % content): collection.add(item_part) break - return collection.serialize() + try: + result = collection.serialize() + except: + self.logger.error("VCALENDAR collection serializing broken: %s (%s)", self._filesystem_path, e) + if self.configuration.getboolean("logging", "exceptions"): + self.logger.exception("Exception details:") elif self.get_meta("tag") == "VADDRESSBOOK": - return "".join([item.serialize() for item in items]) - return "" + try: + result = "".join([item.serialize() for item in items]) + except: + self.logger.error("VADDRESSBOOK collection serializing broken: %s (%s)", self._filesystem_path, e) + if self.configuration.getboolean("logging", "exceptions"): + self.logger.exception("Exception details:") + return result _lock = threading.Lock() _waiters = [] From 79b74acb051f044ebc5a207858aa3da98d415f13 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Thu, 27 Oct 2016 06:40:15 +0000 Subject: [PATCH 044/101] optionally log item content after read, fix log lines --- config | 1 + radicale/storage.py | 14 ++++++++------ 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/config b/config index c7e27062d..980f29fc6 100644 --- a/config +++ b/config @@ -133,6 +133,7 @@ debug_filter = 0xffff # 0x0001: suppress "header content" # 0x0002: suppress "request content" # 0x0004: suppress "response content" +# 0x0008: suppress "item content (on read)" # 0x0040: suppress "sanitized path" # 0x0080: suppress "rights" "Test if" + "Section" # 0x0100: suppress "read object" diff --git a/radicale/storage.py b/radicale/storage.py index 9acb764bf..9f8892fb2 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -259,14 +259,14 @@ def string_overall(self): ) else: message = "no cache entries" - return(message) + return message - def log_overall(self, token, logger, request_token): + def log_overall(self, token, logger): if (self.perflog) or (self.loginterval == 0) or (datetime.datetime.now() - self.lastlog > datetime.timedelta(seconds=self.loginterval)): - logger.info("[%s] %s cache overall statistics: %s", request_token, token, self.string_overall()) + logger.info("%s cache overall statistics: %s", token, self.string_overall()) self.lastlog = datetime.datetime.now() else: - logger.debug("[%s] %s cache overall statistics: %s", request_token, token, self.string_overall()) + logger.debug("%s cache overall statistics: %s", token, self.string_overall()) ## cache entry @@ -298,9 +298,9 @@ def cache_log_statistics_overall(self): global Items_cache_counter global Props_cache_counter if Items_cache_active: - Items_cache_counter.log_overall("Items", self.logger, self.request_token) + Items_cache_counter.log_overall("Items", self.logger) if Props_cache_active: - Props_cache_counter.log_overall("Props", self.logger, self.request_token) + Props_cache_counter.log_overall("Props", self.logger) if self.configuration.getboolean("logging", "performance"): # log process statistics rusage = resource.getrusage(resource.RUSAGE_THREAD) @@ -811,6 +811,8 @@ def get(self, href): if not self.debug_filter & 0x0100: self.logger.debug("Item read ('get'): %s", path) item = vobject.readOne(text) + if not self.debug_filter & 0x0008: + self.logger.debug("Item content ('get'): %s:\n%s", path, str(item)) except Exception as e: self.logger.error("Object broken on read (skip 'get'): %s (%s)", path, e) if self.configuration.getboolean("logging", "exceptions"): From 90bb5456e6ebad3df63cce6050b65eb8e8066976 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Thu, 27 Oct 2016 22:12:10 +0200 Subject: [PATCH 045/101] fix copyright --- radicale-storage-check.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/radicale-storage-check.py b/radicale-storage-check.py index 5a68d1d8a..13e5af90e 100755 --- a/radicale-storage-check.py +++ b/radicale-storage-check.py @@ -1,8 +1,7 @@ #!/usr/bin/python3.4 -s # # This file is a tool for the Radicale Server - Calendar Server -# Copyright © 2014 Jean-Marc Martins -# Copyright © 2012-2016 Guillaume Ayoub +# Copyright © 2016 Peter Bieringer # # This library is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by From 2bd1b4f71a9264e01600a70b15e8e79d57d52493 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sun, 13 Nov 2016 11:08:17 +0000 Subject: [PATCH 046/101] remove not implemented offering of "sync-collection" on REPORT request preventing at least SOGo connector to start using "sync-token" queries, which are not supported. See also: - https://github.com/Kozea/Radicale/issues/306 - https://sogo.nu/bugs/view.php?id=3896 --- radicale/xmlutils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/radicale/xmlutils.py b/radicale/xmlutils.py index 89c1ba683..39591d4e6 100644 --- a/radicale/xmlutils.py +++ b/radicale/xmlutils.py @@ -615,7 +615,7 @@ def _propfind_response(path, item, props, user, write=False): element.append(privilege) elif tag == _tag("D", "supported-report-set"): for report_name in ( - "principal-property-search", "sync-collection", + "principal-property-search", "expand-property", "principal-search-property-set"): supported = ET.Element(_tag("D", "supported-report")) report_tag = ET.Element(_tag("D", "report")) From db03bbe69577baa936abbf91193ba6535ab60aa5 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 15 Nov 2016 22:15:12 +0100 Subject: [PATCH 047/101] catch broken items during put --- radicale/__init__.py | 3 +++ radicale/storage.py | 10 +++++++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index 23c0c432c..fd71162e9 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -682,6 +682,9 @@ def do_PUT(self, environ, path, user): parent_item.set_meta({"tag": tag}) href = posixpath.basename(path.strip("/")) new_item = parent_item.upload(href, items[0]) + if not new_item: + # upload not possible because of error + return PRECONDITION_FAILED headers = {"ETag": new_item.etag} return client.CREATED, headers, None diff --git a/radicale/storage.py b/radicale/storage.py index 9f8892fb2..509685d4a 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -859,7 +859,15 @@ def upload(self, href, vobject_item): path = path_to_filesystem(self._filesystem_path, href) item = Item(self, vobject_item, href) with self._atomic_write(path, newline="") as fd: - fd.write(item.serialize()) + try: + item_serialized = item.serialize() + except Exception as e: + self.logger.error("Object broken on serialize (skip 'upload'): %s (%s)", href, e) + self.logger.error("Item content ('upload'): %s:\n%s", href, str(vobject_item)) + if self.configuration.getboolean("logging", "exceptions"): + self.logger.exception("Exception details:") + return None; + fd.write(item_serialized) return item def delete(self, href=None): From c84f2f9b2857bd4f19bc645339eb9e52119c0ce3 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 15 Nov 2016 22:31:33 +0100 Subject: [PATCH 048/101] detect empty files (created by older put which broke on item.serialize) --- radicale/storage.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/radicale/storage.py b/radicale/storage.py index 509685d4a..71d095034 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -806,6 +806,12 @@ def get(self, href): if Item_cache_hit == 0 or Items_cache_active == False: with open(path, encoding=self.encoding, newline="") as f: text = f.read() + if len(text) == 0: + # empty file + self.logger.error("Object empty (skip 'get'): %s", path) + if Items_cache_active: + Items_cache_lock.release() + return None; try: if not self.debug_filter & 0x0100: From e8d1d6bf10b14d690d3984b0692da523f3634d32 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 15 Nov 2016 22:35:08 +0100 Subject: [PATCH 049/101] check item.serialize before creating file to avoid empty file --- radicale/storage.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index 71d095034..1d7f03fcf 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -864,15 +864,15 @@ def upload(self, href, vobject_item): raise UnsafePathError(href) path = path_to_filesystem(self._filesystem_path, href) item = Item(self, vobject_item, href) + try: + item_serialized = item.serialize() + except Exception as e: + self.logger.error("Object broken on serialize (skip 'upload'): %s (%s)", href, e) + self.logger.error("Item content ('upload'): %s:\n%s", href, str(vobject_item)) + if self.configuration.getboolean("logging", "exceptions"): + self.logger.exception("Exception details:") + return None; with self._atomic_write(path, newline="") as fd: - try: - item_serialized = item.serialize() - except Exception as e: - self.logger.error("Object broken on serialize (skip 'upload'): %s (%s)", href, e) - self.logger.error("Item content ('upload'): %s:\n%s", href, str(vobject_item)) - if self.configuration.getboolean("logging", "exceptions"): - self.logger.exception("Exception details:") - return None; fd.write(item_serialized) return item From b4fc36fb9abd3fb083710bdaa1b4ba707bf7ca2d Mon Sep 17 00:00:00 2001 From: Tim Woodcock Date: Tue, 29 Nov 2016 16:32:27 -0800 Subject: [PATCH 050/101] remove umask(0) in daemon mode --- radicale/__main__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/radicale/__main__.py b/radicale/__main__.py index 2999fecdb..c572dc682 100644 --- a/radicale/__main__.py +++ b/radicale/__main__.py @@ -132,7 +132,6 @@ def serve(configuration, logger): with os.fdopen(pid_fd, "w") as pid_file: pid_file.write(str(os.getpid())) # Decouple environment - os.umask(0) os.chdir("/") os.setsid() with open(os.devnull, "r") as null_in: From 1beef09df9474d5681cb3bd8be7595ddcb4799c9 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sun, 9 Oct 2016 12:00:46 +0200 Subject: [PATCH 051/101] fix broken merge --- radicale/storage.py | 1 + 1 file changed, 1 insertion(+) diff --git a/radicale/storage.py b/radicale/storage.py index 89e06dc9c..4e2d9a527 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -648,6 +648,7 @@ def get(self, href): "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(os.path.getmtime(path))) try: + self.logger.debug("Read object: %s", path) item = vobject.readOne(text) except Exception: self.logger.error("Failed to parse component: %s", href) From 12a49ffc23e6975c3cfca992256f6fab551882ff Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sun, 9 Oct 2016 11:12:48 +0000 Subject: [PATCH 052/101] improve logging and reimplement VCARD check --- radicale/storage.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index 4e2d9a527..db9a2196a 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -650,9 +650,16 @@ def get(self, href): try: self.logger.debug("Read object: %s", path) item = vobject.readOne(text) - except Exception: - self.logger.error("Failed to parse component: %s", href) - raise + except Exception as e: + self.logger.error("Object broken (skip 'get'): %s (%s)", path, e) + return None; + if self.get_meta("tag") == "VADDRESSBOOK": + # check whether vobject likes the VCARD item + try: + item.serialize() + except Exception as e: + self.logger.error("VCARD object broken (skip 'get'): %s (%s)", path, e) + return None; return Item(self, item, href, last_modified) def upload(self, href, vobject_item): @@ -718,7 +725,8 @@ def serialize(self): items = [] time_begin = datetime.datetime.now() for href in self.list(): - items.append(self.get(href).item) + if hasattr(self.get(href),'item'): + items.append(self.get(href).item) time_end = datetime.datetime.now() self.logger.info( "Collection read %d items in %s sec from %s", len(items), From 86dd3f4383118b5aa7c1f2db63b7616f834d275f Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sun, 9 Oct 2016 16:04:10 +0000 Subject: [PATCH 053/101] extended response logging --- radicale/__init__.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index 09f3d551e..33c5dfb38 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -308,14 +308,23 @@ def response(status, headers=(), answer=None): headers[key] = self.configuration.get("headers", key) # Start response - time_end = datetime.datetime.now() status = "%i %s" % ( status, client.responses.get(status, "Unknown")) + start_response(status, list(headers.items())) + time_end = datetime.datetime.now() + sizeinfo = "" + if answer: + sizeinfo = sizeinfo + str(len(answer)) + " bytes" + if "Content-Encoding" in headers: + if len(sizeinfo) > 0: + sizeinfo = sizeinfo + " " + sizeinfo = sizeinfo + headers["Content-Encoding"] + if len(sizeinfo) > 0: + sizeinfo = " (" + sizeinfo + ")" self.logger.info( - "%s answer status for %s in %s sec: %s", - environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo, + "%s response status for %s in %s sec: %s", + environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo + sizeinfo, (time_end - time_begin).total_seconds(), status) - start_response(status, list(headers.items())) # Return response content return [answer] if answer else [] From 43a34b537a9c46d67b2049bf86e58c7714191117 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 11 Oct 2016 05:32:29 +0000 Subject: [PATCH 054/101] minor logging improvement --- radicale/__init__.py | 2 +- radicale/storage.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index 33c5dfb38..50a7843f0 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -314,7 +314,7 @@ def response(status, headers=(), answer=None): time_end = datetime.datetime.now() sizeinfo = "" if answer: - sizeinfo = sizeinfo + str(len(answer)) + " bytes" + sizeinfo = sizeinfo + str(len(str(answer))) + " bytes" if "Content-Encoding" in headers: if len(sizeinfo) > 0: sizeinfo = sizeinfo + " " diff --git a/radicale/storage.py b/radicale/storage.py index db9a2196a..6c0e40f80 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -648,7 +648,7 @@ def get(self, href): "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(os.path.getmtime(path))) try: - self.logger.debug("Read object: %s", path) + self.logger.debug("Read object ('get'): %s", path) item = vobject.readOne(text) except Exception as e: self.logger.error("Object broken (skip 'get'): %s (%s)", path, e) From f92bbb75e630d92153a8d1c4d231af58910029e0 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 11 Oct 2016 10:44:13 +0000 Subject: [PATCH 055/101] add Item caching (default: enabled, can be disabled by config option) --- config | 3 + radicale/config.py | 18 ++++++ radicale/storage.py | 131 +++++++++++++++++++++++++++++++++++++++----- 3 files changed, 138 insertions(+), 14 deletions(-) diff --git a/config b/config index 8855c1184..65a011c0f 100644 --- a/config +++ b/config @@ -111,6 +111,9 @@ # Example: git add -A && (git diff --cached --quiet || git commit -m "Changes by "%(user)s) #hook = +# Caching (of Items) in memory +cache = True + [logging] diff --git a/radicale/config.py b/radicale/config.py index c4c1b38c2..c7943a7bb 100644 --- a/radicale/config.py +++ b/radicale/config.py @@ -118,6 +118,12 @@ ("hook", { "value": "", "help": "command that is run after changes to storage"})])), + ("cache_items", { + "value": "False", + "help": "enable item file caching in memory"}), + ("cache_props", { + "value": "False", + "help": "enable props file caching in memory"}), ("logging", OrderedDict([ ("config", { "value": "/etc/radicale/logging", @@ -126,12 +132,24 @@ "value": "False", "help": "print debug information", "aliases": ["-D", "--debug"]}), + ("debug_filter", { + "value": "0xffff", + "help": "filter debug log (binary mask)") + ("exceptions": { + "value" : "False", + "help": "include exceptions in logs") ("full_environment", { "value": "False", "help": "store all environment variables"}), ("mask_passwords", { "value": "True", "help": "mask passwords in logs"})]))]) + ("cache_statistics_interval", { + "value": "300", + "help": "log interval of item/props cache statistics on info level") + ("performance": { + "value": "False", + "help": "include cache/collections performance statistics in logs on info level") def load(paths=(), extra_config=None): diff --git a/radicale/storage.py b/radicale/storage.py index 6c0e40f80..ce4ac26b2 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -36,6 +36,7 @@ import threading import time import datetime +import copy from contextlib import contextmanager from hashlib import md5 from importlib import import_module @@ -223,6 +224,59 @@ def etag(self): return get_etag(self.serialize()) +## Item caching +# cache counter +class Item_cache_counter: + def __init__(self, lookup, hit, miss, dirty): + self.lookup = 0 + self.hit = 0 + self.miss = 0 + self.dirty = 0 + + def string(self): + if (self.lookup > 0): + message = "lookup=%d hit=%d (%3.2f%%) miss=%d (%3.2f%%) dirty=%d (%3.2f%%)" % ( + self.lookup, + self.hit, + self.hit * 100 / self.lookup, + self.miss, + self.miss * 100 / self.lookup, + self.dirty, + self.dirty * 100 / self.lookup + ) + else: + message = "no cache lookups so far" + return(message) + + def string_delta(self, stamp): + delta_lookup = self.lookup - stamp.lookup + if (delta_lookup > 0): + message = "lookup=%d hit=%d (%3.2f%%) miss=%d (%3.2f%%) dirty=%d (%3.2f%%)" % ( + delta_lookup, + self.hit - stamp.hit, + (self.hit - stamp.hit) * 100 / delta_lookup, + self.miss - stamp.miss, + (self.miss - stamp.miss) * 100 / delta_lookup, + self.dirty - stamp.dirty, + (self.dirty - stamp.dirty) * 100 / delta_lookup + ) + else: + message = "no cache lookups so far" + return(message) + +# cache entry +class Item_cache_entry: + def __init__(self, Item, last_modified_time, last_used_time): + self.Item = Item + self.last_modified_time = last_modified_time + self.last_used_time = last_used_time + +# cache initialization +Item_cache_data = {} +Item_cache_counter = Item_cache_counter(0, 0, 0, 0) +Item_cache_active = 0 + + class BaseCollection: # Overriden on copy by the "load" function @@ -379,6 +433,7 @@ class Collection(BaseCollection): """Collection stored in several files per calendar.""" def __init__(self, path, principal=False, folder=None): + global Item_cache_active if not folder: folder = self._get_collection_root_folder() # Path should already be sanitized @@ -390,6 +445,10 @@ def __init__(self, path, principal=False, folder=None): split_path = self.path.split("/") self.owner = split_path[0] if len(split_path) > 1 else None self.is_principal = principal + if self.configuration.getboolean("storage", "cache"): + if Item_cache_active == 0: + self.logger.info("Item cache enabled") + Item_cache_active = 1 @classmethod def _get_collection_root_folder(cls): @@ -623,6 +682,11 @@ def move(cls, item, to_collection, to_href): cls._sync_directory(item.collection._filesystem_path) def list(self): + global Item_cache_data + global Item_cache_counter + global Item_cache_active + if Item_cache_active == 1: + Item_cache_counter_stamp = copy.deepcopy(Item_cache_counter) for href in os.listdir(self._filesystem_path): if not is_safe_filesystem_path_component(href): if not href.startswith(".Radicale"): @@ -631,8 +695,14 @@ def list(self): path = os.path.join(self._filesystem_path, href) if os.path.isfile(path): yield href + if Item_cache_active == 1: + self.logger.info("Cache current statistics: %s", Item_cache_counter.string_delta(Item_cache_counter_stamp)) + self.logger.info("Cache global statistics: %s", Item_cache_counter.string()) def get(self, href): + global Item_cache_data + global Item_cache_counter + global Item_cache_active if not href: return None if not is_safe_filesystem_path_component(href): @@ -642,25 +712,51 @@ def get(self, href): path = path_to_filesystem(self._filesystem_path, href) if not os.path.isfile(path): return None - with open(path, encoding=self.encoding, newline="") as f: - text = f.read() + last_modified_time = os.path.getmtime(path) last_modified = time.strftime( "%a, %d %b %Y %H:%M:%S GMT", - time.gmtime(os.path.getmtime(path))) - try: - self.logger.debug("Read object ('get'): %s", path) - item = vobject.readOne(text) - except Exception as e: - self.logger.error("Object broken (skip 'get'): %s (%s)", path, e) - return None; - if self.get_meta("tag") == "VADDRESSBOOK": - # check whether vobject likes the VCARD item + time.gmtime(last_modified_time)) + + Item_cache_hit = 0 + if Item_cache_active == 1: + # Item cache lookup + Item_cache_counter.lookup += 1 + if path in Item_cache_data: + if Item_cache_data[path].last_modified_time == last_modified_time: + Item_cache_counter.hit += 1 + Item_cache_hit = 1 + else: + Item_cache_counter.dirty += 1 + else: + Item_cache_counter.miss += 1 + + if Item_cache_hit == 0 or Item_cache_active == 0: + with open(path, encoding=self.encoding, newline="") as f: + text = f.read() try: - item.serialize() + self.logger.debug("Read object ('get'): %s", path) + item = vobject.readOne(text) except Exception as e: - self.logger.error("VCARD object broken (skip 'get'): %s (%s)", path, e) + self.logger.error("Object broken (skip 'get'): %s (%s)", path, e) return None; - return Item(self, item, href, last_modified) + if self.get_meta("tag") == "VADDRESSBOOK": + # check whether vobject likes the VCARD item + try: + item.serialize() + except Exception as e: + self.logger.error("VCARD object broken (skip 'get'): %s (%s)", path, e) + return None; + Item_entry = Item(self, item, href, last_modified) + + if Item_cache_active == 1: + # cache handling + self.logger.debug("Store item in cache: %s", path) + Item_cache_data[path] = Item_cache_entry(Item_entry, last_modified_time, datetime.datetime.now()) + + return Item_entry + else: + self.logger.debug("Retrieve from cache: %s", path) + return Item_cache_data[path].Item def upload(self, href, vobject_item): if not is_safe_filesystem_path_component(href): @@ -672,6 +768,8 @@ def upload(self, href, vobject_item): return item def delete(self, href=None): + global Item_cache_data + global Item_cache_active if href is None: # Delete the collection parent_dir = os.path.dirname(self._filesystem_path) @@ -694,6 +792,11 @@ def delete(self, href=None): raise ComponentNotFoundError(href) os.remove(path) self._sync_directory(os.path.dirname(path)) + if Item_cache_active == 1: + # remove from cache, if existing + if path in Item_cache_data: + self.logger.debug("Delete from cache: %s", path) + del Item_cache_data[path] def get_meta(self, key=None): if os.path.exists(self._props_path): From 3be24cc8cbe5b1070263efa30ce948438b39bf40 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Thu, 13 Oct 2016 05:30:26 +0000 Subject: [PATCH 056/101] cosmetic renaming of cache stats logging --- radicale/storage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index ce4ac26b2..729516229 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -696,8 +696,8 @@ def list(self): if os.path.isfile(path): yield href if Item_cache_active == 1: - self.logger.info("Cache current statistics: %s", Item_cache_counter.string_delta(Item_cache_counter_stamp)) - self.logger.info("Cache global statistics: %s", Item_cache_counter.string()) + self.logger.info("Cache request statistics: %s", Item_cache_counter.string_delta(Item_cache_counter_stamp)) + self.logger.info("Cache overall statistics: %s", Item_cache_counter.string()) def get(self, href): global Item_cache_data From 7ebbe9580b438e3a799dd41916a977f44d63595a Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Thu, 13 Oct 2016 05:55:20 +0000 Subject: [PATCH 057/101] make cache/collection performance logging optional --- radicale/storage.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index 729516229..19c65429f 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -831,9 +831,14 @@ def serialize(self): if hasattr(self.get(href),'item'): items.append(self.get(href).item) time_end = datetime.datetime.now() - self.logger.info( - "Collection read %d items in %s sec from %s", len(items), - (time_end - time_begin).total_seconds(), self._filesystem_path) + if self.logging_performance == 1: + self.logger.info( + "Collection read %d items in %s sec from %s", len(items), + (time_end - time_begin).total_seconds(), self._filesystem_path) + else: + self.logger.debug( + "Collection read %d items in %s sec from %s", len(items), + (time_end - time_begin).total_seconds(), self._filesystem_path) if self.get_meta("tag") == "VCALENDAR": collection = vobject.iCalendar() for item in items: From 978655994865c7840f74f999dcc3d3c1a15f4f52 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Thu, 13 Oct 2016 20:07:53 +0000 Subject: [PATCH 058/101] adjust some startup log levels --- radicale/__main__.py | 4 ++-- radicale/auth.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/radicale/__main__.py b/radicale/__main__.py index 79b410a0c..782dbb22e 100644 --- a/radicale/__main__.py +++ b/radicale/__main__.py @@ -188,7 +188,7 @@ def cleanup(): server = make_server( address, port, application, server_class, RequestHandler) servers[server.socket] = server - logger.debug("Listening to %s port %s", + logger.info("Listening to %s port %s", server.server_name, server.server_port) if configuration.getboolean("server", "ssl"): logger.debug("Using SSL") @@ -224,7 +224,7 @@ def shutdown(*args): else: # Fallback to busy waiting select_timeout = 1.0 - logger.debug("Radicale server ready") + logger.info("Radicale server ready") while not shutdown_program: try: rlist, _, xlist = select.select( diff --git a/radicale/auth.py b/radicale/auth.py index 2fa2807ee..30422ec96 100644 --- a/radicale/auth.py +++ b/radicale/auth.py @@ -63,7 +63,7 @@ def load(configuration, logger): """Load the authentication manager chosen in configuration.""" auth_type = configuration.get("auth", "type") - logger.debug("Authentication type is %s", auth_type) + logger.info("Authentication type is %s", auth_type) if auth_type == "None": class_ = NoneAuth elif auth_type == "htpasswd": From 2289293d9ba5cda036283639f9c1ab14053e7af7 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Thu, 13 Oct 2016 20:10:24 +0000 Subject: [PATCH 059/101] change shutdown loglevel --- radicale/__main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/radicale/__main__.py b/radicale/__main__.py index 782dbb22e..197f678c0 100644 --- a/radicale/__main__.py +++ b/radicale/__main__.py @@ -142,7 +142,7 @@ def serve(configuration, logger): # Register exit function def cleanup(): """Remove the PID files.""" - logger.debug("Cleaning up") + logger.info("Cleaning up") # Remove PID file if (configuration.get("server", "pid") and configuration.getboolean("server", "daemon")): From 4ff7d7b10a09691811fb17202e5425d550791c9a Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Fri, 14 Oct 2016 05:46:11 +0000 Subject: [PATCH 060/101] log cache statistics on exit --- radicale/__main__.py | 5 +++-- radicale/storage.py | 6 ++++++ 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/radicale/__main__.py b/radicale/__main__.py index 197f678c0..fcaec7e5e 100644 --- a/radicale/__main__.py +++ b/radicale/__main__.py @@ -34,7 +34,7 @@ from . import ( VERSION, Application, RequestHandler, ThreadedHTTPServer, - ThreadedHTTPSServer, config, log) + ThreadedHTTPSServer, config, log, storage) def run(): @@ -142,13 +142,14 @@ def serve(configuration, logger): # Register exit function def cleanup(): """Remove the PID files.""" - logger.info("Cleaning up") + logger.info("Cleaning up 'main'") # Remove PID file if (configuration.get("server", "pid") and configuration.getboolean("server", "daemon")): os.unlink(configuration.get("server", "pid")) atexit.register(cleanup) + atexit.register(storage.cleanup, logger) logger.info("Starting Radicale") # Create collection servers diff --git a/radicale/storage.py b/radicale/storage.py index 19c65429f..30ae6405d 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -102,6 +102,12 @@ class CollectionCopy(collection_class): return CollectionCopy +def cleanup(logger): + """Print cache statistics.""" + logger.info("Cleaning up 'storage'") + logger.info("Cache overall statistics: %s", Item_cache_counter.string()) + + def get_etag(text): """Etag from collection or item. From c64db7fe39a29002272b17477a16bb60b602e841 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Fri, 14 Oct 2016 19:02:49 +0000 Subject: [PATCH 061/101] add option for regular cache statistics log --- config | 8 ++++++++ radicale/storage.py | 32 +++++++++++++++++++++++++++++--- 2 files changed, 37 insertions(+), 3 deletions(-) diff --git a/config b/config index 65a011c0f..77b4a5c11 100644 --- a/config +++ b/config @@ -132,6 +132,14 @@ cache = True # Don't include passwords in logs #mask_passwords = True +# Include cache/collections performance statistics in logs on info level +performance = True + +# Include regular cache performance statistics in logs on info level +# 0: on each request (incl. current request) +# >0: minimum interval in seconds (only global) +cache_statistics_interval = 60 + [headers] diff --git a/radicale/storage.py b/radicale/storage.py index 30ae6405d..6c59a1bef 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -282,6 +282,13 @@ def __init__(self, Item, last_modified_time, last_used_time): Item_cache_counter = Item_cache_counter(0, 0, 0, 0) Item_cache_active = 0 +## cache regular statistics logging on info level +# 0: on each request (incl. current request) +# >0: after at least every given seconds (exc. current request) +Item_cache_statistics_log_interval = datetime.timedelta(seconds=60) +# init timestamp +Item_cache_statistics_log_last_time = datetime.datetime.now() + class BaseCollection: @@ -440,6 +447,7 @@ class Collection(BaseCollection): def __init__(self, path, principal=False, folder=None): global Item_cache_active + global Item_cache_statistics_log_interval if not folder: folder = self._get_collection_root_folder() # Path should already be sanitized @@ -451,9 +459,19 @@ def __init__(self, path, principal=False, folder=None): split_path = self.path.split("/") self.owner = split_path[0] if len(split_path) > 1 else None self.is_principal = principal + self.logging_performance = 0 + if self.configuration.getboolean("logging", "performance"): + self.logging_performance = 1 + Item_cache_statistics_log_interval = datetime.timedelta(seconds=self.configuration.getint("logging", "cache_statistics_interval")) if self.configuration.getboolean("storage", "cache"): if Item_cache_active == 0: - self.logger.info("Item cache enabled") + if self.logging_performance == 1: + self.logger.info("Item cache enabled (performance log on info level)") + else: + if (Item_cache_statistics_log_interval.total_seconds() > 0): + self.logger.info("Item cache enabled (regular statistics log on info level with minimum interval %d sec)", Item_cache_statistics_log_interval.total_seconds()) + else: + self.logger.info("Item cache enabled (statistics log only on debug level)") Item_cache_active = 1 @classmethod @@ -691,6 +709,8 @@ def list(self): global Item_cache_data global Item_cache_counter global Item_cache_active + global Item_cache_statistics_log_interval + global Item_cache_statistics_log_last_time if Item_cache_active == 1: Item_cache_counter_stamp = copy.deepcopy(Item_cache_counter) for href in os.listdir(self._filesystem_path): @@ -702,8 +722,14 @@ def list(self): if os.path.isfile(path): yield href if Item_cache_active == 1: - self.logger.info("Cache request statistics: %s", Item_cache_counter.string_delta(Item_cache_counter_stamp)) - self.logger.info("Cache overall statistics: %s", Item_cache_counter.string()) + if (self.logging_performance == 1) or (Item_cache_statistics_log_interval == 0) or (datetime.datetime.now() - Item_cache_statistics_log_last_time > Item_cache_statistics_log_interval): + if (self.logging_performance == 1) or (Item_cache_statistics_log_interval == 0): + self.logger.info("Cache request statistics: %s", Item_cache_counter.string_delta(Item_cache_counter_stamp)) + self.logger.info("Cache overall statistics: %s", Item_cache_counter.string()) + Item_cache_statistics_log_last_time = datetime.datetime.now() + else: + self.logger.debug("Cache request statistics: %s", Item_cache_counter.string_delta(Item_cache_counter_stamp)) + self.logger.debug("Cache overall statistics: %s", Item_cache_counter.string()) def get(self, href): global Item_cache_data From cafb7ed9a2e59e5d33df1872c6855313251c29bf Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Fri, 14 Oct 2016 19:25:19 +0000 Subject: [PATCH 062/101] move cache statistics logging into class function --- radicale/storage.py | 25 ++++++++++++++----------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index 6c59a1bef..24e3f0473 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -270,7 +270,19 @@ def string_delta(self, stamp): message = "no cache lookups so far" return(message) -# cache entry + def log(self, logger, stamp, logging_performance): + global Item_cache_statistics_log_interval + global Item_cache_statistics_log_last_time + if (logging_performance == 1) or (Item_cache_statistics_log_interval == 0) or (datetime.datetime.now() - Item_cache_statistics_log_last_time > Item_cache_statistics_log_interval): + if (logging_performance == 1) or (Item_cache_statistics_log_interval == 0): + logger.info("Cache request statistics: %s", self.string_delta(stamp)) + logger.info("Cache overall statistics: %s", self.string()) + Item_cache_statistics_log_last_time = datetime.datetime.now() + else: + logger.debug("Cache request statistics: %s", self.string_delta(stamp)) + logger.debug("Cache overall statistics: %s", self.string()) + +## cache entry class Item_cache_entry: def __init__(self, Item, last_modified_time, last_used_time): self.Item = Item @@ -709,8 +721,6 @@ def list(self): global Item_cache_data global Item_cache_counter global Item_cache_active - global Item_cache_statistics_log_interval - global Item_cache_statistics_log_last_time if Item_cache_active == 1: Item_cache_counter_stamp = copy.deepcopy(Item_cache_counter) for href in os.listdir(self._filesystem_path): @@ -722,14 +732,7 @@ def list(self): if os.path.isfile(path): yield href if Item_cache_active == 1: - if (self.logging_performance == 1) or (Item_cache_statistics_log_interval == 0) or (datetime.datetime.now() - Item_cache_statistics_log_last_time > Item_cache_statistics_log_interval): - if (self.logging_performance == 1) or (Item_cache_statistics_log_interval == 0): - self.logger.info("Cache request statistics: %s", Item_cache_counter.string_delta(Item_cache_counter_stamp)) - self.logger.info("Cache overall statistics: %s", Item_cache_counter.string()) - Item_cache_statistics_log_last_time = datetime.datetime.now() - else: - self.logger.debug("Cache request statistics: %s", Item_cache_counter.string_delta(Item_cache_counter_stamp)) - self.logger.debug("Cache overall statistics: %s", Item_cache_counter.string()) + Item_cache_counter.log(self.logger, Item_cache_counter_stamp, self.logging_performance) def get(self, href): global Item_cache_data From 9e12481a8609b8f5a06990e00eb53e501aa7def8 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sat, 15 Oct 2016 05:30:56 +0000 Subject: [PATCH 063/101] log cache request statisics unconditionally on debug level --- radicale/storage.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/radicale/storage.py b/radicale/storage.py index 24e3f0473..9c7cc8d23 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -276,6 +276,8 @@ def log(self, logger, stamp, logging_performance): if (logging_performance == 1) or (Item_cache_statistics_log_interval == 0) or (datetime.datetime.now() - Item_cache_statistics_log_last_time > Item_cache_statistics_log_interval): if (logging_performance == 1) or (Item_cache_statistics_log_interval == 0): logger.info("Cache request statistics: %s", self.string_delta(stamp)) + else: + logger.debug("Cache request statistics: %s", self.string_delta(stamp)) logger.info("Cache overall statistics: %s", self.string()) Item_cache_statistics_log_last_time = datetime.datetime.now() else: From 1bc81d1da2d339020f93313af0f1ef4ba16cfbc9 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sat, 15 Oct 2016 05:32:15 +0000 Subject: [PATCH 064/101] add optional dedicated debug log with instructions --- logging | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/logging b/logging index 250359c46..80fcd3fbf 100644 --- a/logging +++ b/logging @@ -22,7 +22,7 @@ keys = console,file [formatters] # Logging formatters -keys = simple,full +keys = simple,full,filedebug # Loggers @@ -31,6 +31,8 @@ keys = simple,full # Root logger level = DEBUG handlers = console,file +# Disable line above and enable following line for dedicated additional debug log +#handlers = console,file,filedebug # Handlers @@ -45,9 +47,17 @@ formatter = simple [handler_file] # File handler class = FileHandler +# Enable following line in case dedicated debug log is activated +#level = INFO args = ('/var/log/radicale',) formatter = full +[handler_filedebug] +# File handler (for dedicated debug log) +class = FileHandler +args = ('/var/log/radicale/radicale-debug.log',) +formatter = full + # Formatters From 29ac974b27af8901721a57e9a44648fd6db611cb Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sat, 15 Oct 2016 05:33:43 +0000 Subject: [PATCH 065/101] change log level for SSL info and extend startup log message in case 'debug' is enabled --- radicale/__main__.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/radicale/__main__.py b/radicale/__main__.py index fcaec7e5e..3ba18fee4 100644 --- a/radicale/__main__.py +++ b/radicale/__main__.py @@ -192,7 +192,7 @@ def cleanup(): logger.info("Listening to %s port %s", server.server_name, server.server_port) if configuration.getboolean("server", "ssl"): - logger.debug("Using SSL") + logger.info("Using SSL") # Create a socket pair to notify the select syscall of program shutdown # This is not available in python < 3.5 on Windows @@ -225,7 +225,10 @@ def shutdown(*args): else: # Fallback to busy waiting select_timeout = 1.0 - logger.info("Radicale server ready") + if configuration.getboolean("logging", "debug"): + logger.info("Radicale server ready (with 'debug' enabled)") + else: + logger.info("Radicale server ready") while not shutdown_program: try: rlist, _, xlist = select.select( From 495ea7de8ac0f69172ce3ef6379146c5fe77be07 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sat, 15 Oct 2016 05:35:52 +0000 Subject: [PATCH 066/101] enable request/response logging only if 'debug' was explicitly selected by config/command line option --- radicale/__init__.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index 50a7843f0..ba21322a6 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -205,6 +205,7 @@ def __init__(self, configuration, logger): self.Collection = storage.load(configuration, logger) self.authorized = rights.load(configuration, logger) self.encoding = configuration.get("encoding", "request") + self.debug = configuration.getboolean("logging", "debug") def headers_log(self, environ): """Sanitize headers for logging.""" @@ -287,7 +288,8 @@ def response(status, headers=(), answer=None): headers = dict(headers) # Set content length if answer: - self.logger.debug("Response content:\n%s", answer) + if self.debug: + self.logger.debug("Response content:\n%s", answer) answer = answer.encode(self.encoding) accept_encoding = [ encoding.strip() for encoding in @@ -459,7 +461,8 @@ def _read_content(self, environ): if content_length > 0: content = self.decode( environ["wsgi.input"].read(content_length), environ) - self.logger.debug("Request content:\n%s", content.strip()) + if self.debug: + self.logger.debug("Request content:\n%s", content.strip()) else: content = None return content From 2e74d6d079706dc87c6f3bcf22c35d29c9edf6cf Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sat, 15 Oct 2016 05:38:11 +0000 Subject: [PATCH 067/101] enable request header logging only if 'debug' was explicitly selected by config/command line option --- radicale/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index ba21322a6..0369f07b7 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -350,7 +350,8 @@ def response(status, headers=(), answer=None): environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo, remote_host, remote_useragent) headers = pprint.pformat(self.headers_log(environ)) - self.logger.debug("Request headers:\n%s", headers) + if self.debug: + self.logger.debug("Request headers:\n%s", headers) # Let reverse proxies overwrite SCRIPT_NAME if "HTTP_X_SCRIPT_NAME" in environ: From 41e1325248a65ee740e45f713d2719d3c3d208eb Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sat, 15 Oct 2016 10:29:04 +0000 Subject: [PATCH 068/101] add examples for optional usage of TimedRotatingFileHandler --- logging | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/logging b/logging index 80fcd3fbf..ad831fd13 100644 --- a/logging +++ b/logging @@ -45,18 +45,27 @@ args = (sys.stderr,) formatter = simple [handler_file] +formatter = full +# Enable following line in case dedicated debug log (see below) is activated +#level = INFO # File handler class = FileHandler -# Enable following line in case dedicated debug log is activated -#level = INFO args = ('/var/log/radicale',) -formatter = full +# Time Rotating File Handler (class+args: disable above and enable below) +# (filename, when, interval, backupCount, ...) [see https://docs.python.org/3/library/logging.handlers.html#logging.handlers.TimedRotatingFileHandler] +# example: rollover at midnight every day, keep 90 files (means 90 days) +#class = handlers.TimedRotatingFileHandler +#args = ('/var/log/radicale/radicale.log', 'midnight', 1, 90) [handler_filedebug] +formatter = full # File handler (for dedicated debug log) class = FileHandler args = ('/var/log/radicale/radicale-debug.log',) -formatter = full +# Time Rotating File Handler (class+args: disable above and enable below) +# example: rollover every hour, keep 24 files (means 1 days) +#class = handlers.TimedRotatingFileHandler +#args = ('/var/log/radicale/radicale-debug.log', 'H', 1, 24) # Formatters From 3a43bf0cddb0ee012a17298f674867db21c3b346 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sat, 15 Oct 2016 10:30:25 +0000 Subject: [PATCH 069/101] change default of cache_statistics_interval to 300 sec --- config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config b/config index 77b4a5c11..048356a75 100644 --- a/config +++ b/config @@ -138,7 +138,7 @@ performance = True # Include regular cache performance statistics in logs on info level # 0: on each request (incl. current request) # >0: minimum interval in seconds (only global) -cache_statistics_interval = 60 +cache_statistics_interval = 300 [headers] From ddef45b0115d8abd68a24959b2c540f82659a14f Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sun, 16 Oct 2016 17:06:09 +0000 Subject: [PATCH 070/101] unconditionally execute a serialize check for items read --- radicale/storage.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index 9c7cc8d23..d76d9984b 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -776,13 +776,12 @@ def get(self, href): except Exception as e: self.logger.error("Object broken (skip 'get'): %s (%s)", path, e) return None; - if self.get_meta("tag") == "VADDRESSBOOK": - # check whether vobject likes the VCARD item - try: - item.serialize() - except Exception as e: - self.logger.error("VCARD object broken (skip 'get'): %s (%s)", path, e) - return None; + # check whether vobject likes the VCARD item + try: + item.serialize() + except Exception as e: + self.logger.error("VCARD object broken (skip 'get'): %s (%s)", path, e) + return None; Item_entry = Item(self, item, href, last_modified) if Item_cache_active == 1: From c5016982b8dbc1f3a5a4e72e4ab969f0e89d49f2 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sun, 16 Oct 2016 20:05:07 +0000 Subject: [PATCH 071/101] vobject parser to detect/analyze broken vcard/vcalender files --- radicale-storage-check.py | 58 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) create mode 100755 radicale-storage-check.py diff --git a/radicale-storage-check.py b/radicale-storage-check.py new file mode 100755 index 000000000..5a68d1d8a --- /dev/null +++ b/radicale-storage-check.py @@ -0,0 +1,58 @@ +#!/usr/bin/python3.4 -s +# +# This file is a tool for the Radicale Server - Calendar Server +# Copyright © 2014 Jean-Marc Martins +# Copyright © 2012-2016 Guillaume Ayoub +# +# This library is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Radicale. If not, see . + +import sys +import traceback +import vobject + +## Main +if len(sys.argv) < 2: + print ('Missing file names as arguments') + +i = 1 +while i < len(sys.argv): + path = sys.argv[i] + print ('Analyze file:', path) + i += 1 + with open(path, encoding="utf-8", newline="") as f: + print ('Read file:', path) + text = f.read() + print ("Parse file contents:", path) + try: + item = vobject.readOne(text) + print ("Parse file contents successful:", path) + except Exception as e: + print ("Parse file contents ERROR", path) + print ("Unexpected error:", e) + traceback.print_exc(file=sys.stdout) + exit(1); + + # check whether vobject likes the VCARD item + try: + print ("Serialize file contents:", path) + item_ser = item.serialize() + print ("Serialize file contents successful:", path) + print ("=== CONTENTS BEGIN ===") + print (str(item_ser)) + print ("=== CONTENTS END ===") + except Exception as e: + print ("Serialize file contents ERROR", path) + print ("Unexpected error:", e) + traceback.print_exc(file=sys.stdout) + exit(1); From e6ef8e5c7ac944e9c56387848673ced82c90d3ed Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Mon, 17 Oct 2016 20:02:21 +0000 Subject: [PATCH 072/101] remove dirty entries from cache implement counters for cached entries and memory add option for exception logging minor internal fixes --- config | 7 ++++-- radicale/storage.py | 58 ++++++++++++++++++++++++++++++--------------- 2 files changed, 44 insertions(+), 21 deletions(-) diff --git a/config b/config index 048356a75..0695baaaa 100644 --- a/config +++ b/config @@ -123,7 +123,7 @@ cache = True # http://docs.python.org/library/logging.config.html #config = /etc/radicale/logging -# Set the default logging level to debug +# Set the default logging level to debug (this enables also request/response logging) #debug = False # Store all environment variables (including those set in the shell) @@ -133,7 +133,10 @@ cache = True #mask_passwords = True # Include cache/collections performance statistics in logs on info level -performance = True +performance = False + +# Include exceptions in logs +exceptions = False # Include regular cache performance statistics in logs on info level # 0: on each request (incl. current request) diff --git a/radicale/storage.py b/radicale/storage.py index d76d9984b..165fda626 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -233,25 +233,30 @@ def etag(self): ## Item caching # cache counter class Item_cache_counter: - def __init__(self, lookup, hit, miss, dirty): + def __init__(self): self.lookup = 0 self.hit = 0 self.miss = 0 self.dirty = 0 + self.entries= 0 + self.size = 0 def string(self): - if (self.lookup > 0): - message = "lookup=%d hit=%d (%3.2f%%) miss=%d (%3.2f%%) dirty=%d (%3.2f%%)" % ( + global Item_cache_data + if (self.entries > 0): + message = "lookup=%d hit=%d (%3.2f%%) miss=%d (%3.2f%%) dirty=%d (%3.2f%%) entries=%d memoryKiB=%.3f" % ( self.lookup, self.hit, self.hit * 100 / self.lookup, self.miss, self.miss * 100 / self.lookup, self.dirty, - self.dirty * 100 / self.lookup + self.dirty * 100 / self.lookup, + self.entries, + self.size / 1024 ) else: - message = "no cache lookups so far" + message = "no cache entries" return(message) def string_delta(self, stamp): @@ -273,8 +278,8 @@ def string_delta(self, stamp): def log(self, logger, stamp, logging_performance): global Item_cache_statistics_log_interval global Item_cache_statistics_log_last_time - if (logging_performance == 1) or (Item_cache_statistics_log_interval == 0) or (datetime.datetime.now() - Item_cache_statistics_log_last_time > Item_cache_statistics_log_interval): - if (logging_performance == 1) or (Item_cache_statistics_log_interval == 0): + if (logging_performance) or (Item_cache_statistics_log_interval == 0) or (datetime.datetime.now() - Item_cache_statistics_log_last_time > Item_cache_statistics_log_interval): + if (logging_performance) or (Item_cache_statistics_log_interval == 0): logger.info("Cache request statistics: %s", self.string_delta(stamp)) else: logger.debug("Cache request statistics: %s", self.string_delta(stamp)) @@ -293,7 +298,7 @@ def __init__(self, Item, last_modified_time, last_used_time): # cache initialization Item_cache_data = {} -Item_cache_counter = Item_cache_counter(0, 0, 0, 0) +Item_cache_counter = Item_cache_counter() Item_cache_active = 0 ## cache regular statistics logging on info level @@ -473,13 +478,16 @@ def __init__(self, path, principal=False, folder=None): split_path = self.path.split("/") self.owner = split_path[0] if len(split_path) > 1 else None self.is_principal = principal - self.logging_performance = 0 + self.logging_performance = False + self.logging_exceptions = False if self.configuration.getboolean("logging", "performance"): - self.logging_performance = 1 + self.logging_performance = True + if self.configuration.getboolean("logging", "exceptions"): + self.logging_exceptions = True Item_cache_statistics_log_interval = datetime.timedelta(seconds=self.configuration.getint("logging", "cache_statistics_interval")) if self.configuration.getboolean("storage", "cache"): if Item_cache_active == 0: - if self.logging_performance == 1: + if self.logging_performance: self.logger.info("Item cache enabled (performance log on info level)") else: if (Item_cache_statistics_log_interval.total_seconds() > 0): @@ -764,6 +772,11 @@ def get(self, href): Item_cache_hit = 1 else: Item_cache_counter.dirty += 1 + # remove from cache + self.logger.debug("Delete from cache (dirty): %s", path) + Item_cache_counter.entries -= 1 + Item_cache_counter.size -= len(str(Item_cache_data[path].Item.item)) + del Item_cache_data[path] else: Item_cache_counter.miss += 1 @@ -774,13 +787,16 @@ def get(self, href): self.logger.debug("Read object ('get'): %s", path) item = vobject.readOne(text) except Exception as e: - self.logger.error("Object broken (skip 'get'): %s (%s)", path, e) + self.logger.error("Object broken on read (skip 'get'): %s (%s)", path, e) + if self.logging_exceptions: + self.logger.exception("Exception details:") return None; - # check whether vobject likes the VCARD item try: item.serialize() except Exception as e: - self.logger.error("VCARD object broken (skip 'get'): %s (%s)", path, e) + self.logger.error("Object broken on serialize (skip 'get'): %s (%s)", path, e) + if self.logging_exceptions: + self.logger.exception("Exception details:") return None; Item_entry = Item(self, item, href, last_modified) @@ -788,6 +804,8 @@ def get(self, href): # cache handling self.logger.debug("Store item in cache: %s", path) Item_cache_data[path] = Item_cache_entry(Item_entry, last_modified_time, datetime.datetime.now()) + Item_cache_counter.size += len(str(Item_entry.item)) + Item_cache_counter.entries += 1 return Item_entry else: @@ -829,10 +847,12 @@ def delete(self, href=None): os.remove(path) self._sync_directory(os.path.dirname(path)) if Item_cache_active == 1: - # remove from cache, if existing - if path in Item_cache_data: - self.logger.debug("Delete from cache: %s", path) - del Item_cache_data[path] + # remove from cache, if existing + if path in Item_cache_data: + self.logger.debug("Delete from cache (delete): %s", path) + Item_cache_counter.entries -= 1 + Item_cache_counter.size -= len(str(Item_cache_data[path].Item.item)) + del Item_cache_data[path] def get_meta(self, key=None): if os.path.exists(self._props_path): @@ -867,7 +887,7 @@ def serialize(self): if hasattr(self.get(href),'item'): items.append(self.get(href).item) time_end = datetime.datetime.now() - if self.logging_performance == 1: + if self.logging_performance: self.logger.info( "Collection read %d items in %s sec from %s", len(items), (time_end - time_begin).total_seconds(), self._filesystem_path) From 0835b8d08aed76659508a41ed6e558b4422287bc Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Mon, 17 Oct 2016 20:48:17 +0000 Subject: [PATCH 073/101] delete timestamp object after usage moved next internal variable to boolean --- radicale/storage.py | 29 +++++++++++++++-------------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index 165fda626..d32dc8bd0 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -280,14 +280,14 @@ def log(self, logger, stamp, logging_performance): global Item_cache_statistics_log_last_time if (logging_performance) or (Item_cache_statistics_log_interval == 0) or (datetime.datetime.now() - Item_cache_statistics_log_last_time > Item_cache_statistics_log_interval): if (logging_performance) or (Item_cache_statistics_log_interval == 0): - logger.info("Cache request statistics: %s", self.string_delta(stamp)) + logger.info("Item cache request statistics: %s", self.string_delta(stamp)) else: - logger.debug("Cache request statistics: %s", self.string_delta(stamp)) - logger.info("Cache overall statistics: %s", self.string()) + logger.debug("Item cache request statistics: %s", self.string_delta(stamp)) + logger.info("Item cache overall statistics: %s", self.string()) Item_cache_statistics_log_last_time = datetime.datetime.now() else: - logger.debug("Cache request statistics: %s", self.string_delta(stamp)) - logger.debug("Cache overall statistics: %s", self.string()) + logger.debug("Item cache request statistics: %s", self.string_delta(stamp)) + logger.debug("Item cache overall statistics: %s", self.string()) ## cache entry class Item_cache_entry: @@ -299,7 +299,7 @@ def __init__(self, Item, last_modified_time, last_used_time): # cache initialization Item_cache_data = {} Item_cache_counter = Item_cache_counter() -Item_cache_active = 0 +Item_cache_active = False ## cache regular statistics logging on info level # 0: on each request (incl. current request) @@ -486,7 +486,7 @@ def __init__(self, path, principal=False, folder=None): self.logging_exceptions = True Item_cache_statistics_log_interval = datetime.timedelta(seconds=self.configuration.getint("logging", "cache_statistics_interval")) if self.configuration.getboolean("storage", "cache"): - if Item_cache_active == 0: + if not Item_cache_active: if self.logging_performance: self.logger.info("Item cache enabled (performance log on info level)") else: @@ -494,7 +494,7 @@ def __init__(self, path, principal=False, folder=None): self.logger.info("Item cache enabled (regular statistics log on info level with minimum interval %d sec)", Item_cache_statistics_log_interval.total_seconds()) else: self.logger.info("Item cache enabled (statistics log only on debug level)") - Item_cache_active = 1 + Item_cache_active = True @classmethod def _get_collection_root_folder(cls): @@ -731,7 +731,7 @@ def list(self): global Item_cache_data global Item_cache_counter global Item_cache_active - if Item_cache_active == 1: + if Item_cache_active: Item_cache_counter_stamp = copy.deepcopy(Item_cache_counter) for href in os.listdir(self._filesystem_path): if not is_safe_filesystem_path_component(href): @@ -741,8 +741,9 @@ def list(self): path = os.path.join(self._filesystem_path, href) if os.path.isfile(path): yield href - if Item_cache_active == 1: + if Item_cache_active: Item_cache_counter.log(self.logger, Item_cache_counter_stamp, self.logging_performance) + del Item_cache_counter_stamp def get(self, href): global Item_cache_data @@ -763,7 +764,7 @@ def get(self, href): time.gmtime(last_modified_time)) Item_cache_hit = 0 - if Item_cache_active == 1: + if Item_cache_active: # Item cache lookup Item_cache_counter.lookup += 1 if path in Item_cache_data: @@ -780,7 +781,7 @@ def get(self, href): else: Item_cache_counter.miss += 1 - if Item_cache_hit == 0 or Item_cache_active == 0: + if Item_cache_hit == 0 or Item_cache_active == False: with open(path, encoding=self.encoding, newline="") as f: text = f.read() try: @@ -800,7 +801,7 @@ def get(self, href): return None; Item_entry = Item(self, item, href, last_modified) - if Item_cache_active == 1: + if Item_cache_active: # cache handling self.logger.debug("Store item in cache: %s", path) Item_cache_data[path] = Item_cache_entry(Item_entry, last_modified_time, datetime.datetime.now()) @@ -846,7 +847,7 @@ def delete(self, href=None): raise ComponentNotFoundError(href) os.remove(path) self._sync_directory(os.path.dirname(path)) - if Item_cache_active == 1: + if Item_cache_active: # remove from cache, if existing if path in Item_cache_data: self.logger.debug("Delete from cache (delete): %s", path) From d88432342ae7c9b242aac6b179c2ddcb10c581f8 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 18 Oct 2016 20:37:14 +0000 Subject: [PATCH 074/101] add props cache, add selective debug option, some changes in caching code --- radicale/__init__.py | 3 + radicale/storage.py | 189 ++++++++++++++++++++++++++++++++++--------- 2 files changed, 152 insertions(+), 40 deletions(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index 0369f07b7..e98116001 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -516,6 +516,7 @@ def do_GET(self, environ, base_prefix, path, user): "Last-Modified": collection.last_modified, "ETag": item.etag} answer = item.serialize() + storage.log_cache_statistics(self) return client.OK, headers, answer def do_HEAD(self, environ, base_prefix, path, user): @@ -618,6 +619,7 @@ def do_PROPFIND(self, environ, base_prefix, path, user): headers = {"DAV": DAV_HEADERS, "Content-Type": "text/xml"} status, answer = xmlutils.propfind( base_prefix, path, content, read_items, write_items, user) + storage.log_cache_statistics(self) if status == client.FORBIDDEN: return NOT_ALLOWED else: @@ -705,4 +707,5 @@ def do_REPORT(self, environ, base_prefix, path, user): collection = item.collection headers = {"Content-Type": "text/xml"} answer = xmlutils.report(base_prefix, path, content, collection) + storage.log_cache_statistics(self) return client.MULTI_STATUS, headers, answer diff --git a/radicale/storage.py b/radicale/storage.py index d32dc8bd0..585575b44 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -36,7 +36,6 @@ import threading import time import datetime -import copy from contextlib import contextmanager from hashlib import md5 from importlib import import_module @@ -105,7 +104,10 @@ class CollectionCopy(collection_class): def cleanup(logger): """Print cache statistics.""" logger.info("Cleaning up 'storage'") - logger.info("Cache overall statistics: %s", Item_cache_counter.string()) + if Item_cache_active: + logger.info("Items cache overall statistics: %s", Item_cache_counter.string()) + if Props_cache_active: + logger.info("Props cache overall statistics: %s", Props_cache_counter.string()) def get_etag(text): @@ -190,6 +192,17 @@ def path_to_filesystem(root, *paths): return safe_path +def log_cache_statistics(self): + global Item_cache_active + global Item_cache_counter + global Props_cache_active + global Props_cache_counter + if Item_cache_active: + Item_cache_counter.log("Items", self.logger) + if Props_cache_active: + Props_cache_counter.log("Props", self.logger) + + class UnsafePathError(ValueError): def __init__(self, path): message = "Can't translate name safely to filesystem: %s" % path @@ -230,9 +243,10 @@ def etag(self): return get_etag(self.serialize()) -## Item caching -# cache counter -class Item_cache_counter: +### Item caching +# TODO: catch all potential race conditions during add/delete +## cache counter statistics +class Cache_counter: def __init__(self): self.lookup = 0 self.hit = 0 @@ -240,6 +254,18 @@ def __init__(self): self.dirty = 0 self.entries= 0 self.size = 0 + self.perflog= False + self.lastlog= datetime.datetime.now() + + def copy(self, counter): + self.lookup = counter.lookup + self.hit = counter.hit + self.miss = counter.miss + self.dirty = counter.dirty + self.entries= counter.entries + self.size = counter.size + self.perflog= counter.perflog + self.lastlog= counter.lastlog def string(self): global Item_cache_data @@ -275,38 +301,47 @@ def string_delta(self, stamp): message = "no cache lookups so far" return(message) - def log(self, logger, stamp, logging_performance): - global Item_cache_statistics_log_interval - global Item_cache_statistics_log_last_time - if (logging_performance) or (Item_cache_statistics_log_interval == 0) or (datetime.datetime.now() - Item_cache_statistics_log_last_time > Item_cache_statistics_log_interval): - if (logging_performance) or (Item_cache_statistics_log_interval == 0): - logger.info("Item cache request statistics: %s", self.string_delta(stamp)) - else: - logger.debug("Item cache request statistics: %s", self.string_delta(stamp)) - logger.info("Item cache overall statistics: %s", self.string()) - Item_cache_statistics_log_last_time = datetime.datetime.now() + def log_request(self, token, logger, stamp, logging_performance): + if (logging_performance) or (Cache_counter_log_interval == 0): + logger.info("%s cache request statistics: %s", token, self.string_delta(stamp)) + else: + logger.debug("%s cache request statistics: %s", token, self.string_delta(stamp)) + + def log(self, token, logger): + global Cache_counter_log_interval + if (self.perflog) or (Cache_counter_log_interval == 0) or (datetime.datetime.now() - self.lastlog > Cache_counter_log_interval): + logger.info("%s cache overall statistics: %s", token, self.string()) + self.lastlog = datetime.datetime.now() else: - logger.debug("Item cache request statistics: %s", self.string_delta(stamp)) - logger.debug("Item cache overall statistics: %s", self.string()) + logger.debug("%s cache overall statistics: %s", token, self.string()) + ## cache entry class Item_cache_entry: - def __init__(self, Item, last_modified_time, last_used_time): + def __init__(self, Item, last_modified_time): self.Item = Item self.last_modified_time = last_modified_time - self.last_used_time = last_used_time + +## cache entry +class Props_cache_entry: + def __init__(self, props_contents, last_modified_time): + self.props_contents = props_contents + self.last_modified_time = last_modified_time # cache initialization Item_cache_data = {} -Item_cache_counter = Item_cache_counter() +Item_cache_counter = Cache_counter() Item_cache_active = False +Props_cache_data = {} +Props_cache_counter = Cache_counter() +Props_cache_active = False + + ## cache regular statistics logging on info level # 0: on each request (incl. current request) # >0: after at least every given seconds (exc. current request) -Item_cache_statistics_log_interval = datetime.timedelta(seconds=60) -# init timestamp -Item_cache_statistics_log_last_time = datetime.datetime.now() +Cache_counter_log_interval = datetime.timedelta(seconds=60) class BaseCollection: @@ -466,7 +501,8 @@ class Collection(BaseCollection): def __init__(self, path, principal=False, folder=None): global Item_cache_active - global Item_cache_statistics_log_interval + global Props_cache_active + global Cache_counter_log_interval if not folder: folder = self._get_collection_root_folder() # Path should already be sanitized @@ -484,17 +520,30 @@ def __init__(self, path, principal=False, folder=None): self.logging_performance = True if self.configuration.getboolean("logging", "exceptions"): self.logging_exceptions = True - Item_cache_statistics_log_interval = datetime.timedelta(seconds=self.configuration.getint("logging", "cache_statistics_interval")) + self.logging_filter = int(self.configuration.get("logging", "debug_filter"), 0) + self.logger.debug("debug filter active with: 0x%x", self.logging_filter) + Cache_counter_log_interval = datetime.timedelta(seconds=self.configuration.getint("logging", "cache_statistics_interval")) if self.configuration.getboolean("storage", "cache"): if not Item_cache_active: if self.logging_performance: - self.logger.info("Item cache enabled (performance log on info level)") + self.logger.info("Items cache enabled (performance log on info level)") + Item_cache_counter.perflog = True else: - if (Item_cache_statistics_log_interval.total_seconds() > 0): - self.logger.info("Item cache enabled (regular statistics log on info level with minimum interval %d sec)", Item_cache_statistics_log_interval.total_seconds()) + if (Cache_counter_log_interval.total_seconds() > 0): + self.logger.info("Items cache enabled (regular statistics log on info level with minimum interval %d sec)", Cache_counter_log_interval.total_seconds()) else: - self.logger.info("Item cache enabled (statistics log only on debug level)") + self.logger.info("Items cache enabled (statistics log only on debug level)") Item_cache_active = True + if not Props_cache_active: + if self.logging_performance: + self.logger.info("Props cache enabled (performance log on info level)") + Props_cache_counter.perflog = True + else: + if (Cache_counter_log_interval.total_seconds() > 0): + self.logger.info("Props cache enabled (regular statistics log on info level with minimum interval %d sec)", Cache_counter_log_interval.total_seconds()) + else: + self.logger.info("Props cache enabled (statistics log only on debug level)") + Props_cache_active = True @classmethod def _get_collection_root_folder(cls): @@ -728,11 +777,19 @@ def move(cls, item, to_collection, to_href): cls._sync_directory(item.collection._filesystem_path) def list(self): + if not self.logging_filter & 0x0010: + self.logger.debug("function storage/list called") global Item_cache_data global Item_cache_counter global Item_cache_active + global Props_cache_counter + global Props_cache_active if Item_cache_active: - Item_cache_counter_stamp = copy.deepcopy(Item_cache_counter) + Item_cache_counter_stamp = Cache_counter() + Item_cache_counter_stamp.copy(Item_cache_counter) + if Props_cache_active: + Props_cache_counter_stamp = Cache_counter() + Props_cache_counter_stamp.copy(Props_cache_counter) for href in os.listdir(self._filesystem_path): if not is_safe_filesystem_path_component(href): if not href.startswith(".Radicale"): @@ -742,10 +799,13 @@ def list(self): if os.path.isfile(path): yield href if Item_cache_active: - Item_cache_counter.log(self.logger, Item_cache_counter_stamp, self.logging_performance) - del Item_cache_counter_stamp + Item_cache_counter.log_request("Items", self.logger, Item_cache_counter_stamp, self.logging_performance) + if Props_cache_active: + Props_cache_counter.log_request("Props", self.logger, Props_cache_counter_stamp, self.logging_performance) def get(self, href): + if not self.logging_filter & 0x0010: + self.logger.debug("function storage/get called") global Item_cache_data global Item_cache_counter global Item_cache_active @@ -774,7 +834,7 @@ def get(self, href): else: Item_cache_counter.dirty += 1 # remove from cache - self.logger.debug("Delete from cache (dirty): %s", path) + self.logger.debug("Item delete from cache (dirty): %s", path) Item_cache_counter.entries -= 1 Item_cache_counter.size -= len(str(Item_cache_data[path].Item.item)) del Item_cache_data[path] @@ -785,7 +845,8 @@ def get(self, href): with open(path, encoding=self.encoding, newline="") as f: text = f.read() try: - self.logger.debug("Read object ('get'): %s", path) + if not self.logging_filter & 0x0100: + self.logger.debug("Item read ('get'): %s", path) item = vobject.readOne(text) except Exception as e: self.logger.error("Object broken on read (skip 'get'): %s (%s)", path, e) @@ -803,14 +864,16 @@ def get(self, href): if Item_cache_active: # cache handling - self.logger.debug("Store item in cache: %s", path) - Item_cache_data[path] = Item_cache_entry(Item_entry, last_modified_time, datetime.datetime.now()) + if not self.logging_filter & 0x1000: + self.logger.debug("Item store in cache: %s", path) + Item_cache_data[path] = Item_cache_entry(Item_entry, last_modified_time) Item_cache_counter.size += len(str(Item_entry.item)) Item_cache_counter.entries += 1 return Item_entry else: - self.logger.debug("Retrieve from cache: %s", path) + if not self.logging_filter & 0x2000: + self.logger.debug("Item retrieve from cache: %s", path) return Item_cache_data[path].Item def upload(self, href, vobject_item): @@ -856,14 +919,58 @@ def delete(self, href=None): del Item_cache_data[path] def get_meta(self, key=None): + def get_meta(self, key): + global Props_cache_active + global Props_cache_counter if os.path.exists(self._props_path): - with open(self._props_path, encoding=self.encoding) as f: - meta = json.load(f) - return meta.get(key) if key else meta + Props_cache_hit = 0 + if Props_cache_active: + last_modified_time = os.path.getmtime(self._props_path) + # Props cache lookup + Props_cache_counter.lookup += 1 + if self._props_path in Props_cache_data: + if Props_cache_data[self._props_path].last_modified_time == last_modified_time: + Props_cache_counter.hit += 1 + Props_cache_hit = 1 + else: + Props_cache_counter.dirty += 1 + # remove from cache + self.logger.debug("Props delete from cache (dirty): %s", path) + Props_cache_counter.entries -= 1 + del Props_cache_data[self._props_path] + else: + Props_cache_counter.miss += 1 + + if Props_cache_hit == 0 or Props_cache_active == False: + with open(self._props_path, encoding=self.encoding) as f: + if not self.logging_filter & 0x0400: + self.logger.debug("Props read ('get_meta') : %s", self._props_path) + props_contents = json.load(f) + + if Props_cache_active: + # cache handling + if not self.logging_filter & 0x4000: + self.logger.debug("Props store in cache : %s", self._props_path) + Props_cache_data[self._props_path] = Props_cache_entry(props_contents, last_modified_time) + Props_cache_counter.entries += 1 + + return props_contents.get(key) if key else props_contents + else: + if not self.logging_filter & 0x8000: + self.logger.debug("Props retrieve from cache: %s", self._props_path) + return Props_cache_data[self._props_path].props_contents.get(key) if key else Props_cache_data[self._props_path].props_contents def set_meta(self, props): + global Props_cache_active + global Props_cache_counter if os.path.exists(self._props_path): + if Props_cache_active: + if self._props_path in Props_cache_data: + self.logger.debug("Props delete from cache ('set_meta'): %s", path) + Props_cache_counter.entries -= 1 + del Props_cache_data[self._props_path] with open(self._props_path, encoding=self.encoding) as f: + self.logger.debug("Write props ('set_meta'): %s", self._props_path) old_props = json.load(f) old_props.update(props) props = old_props @@ -882,6 +989,8 @@ def last_modified(self): return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(last)) def serialize(self): + if not self.logging_filter & 0x0010: + self.logger.debug("function storage/serialize called") items = [] time_begin = datetime.datetime.now() for href in self.list(): From a99ef7d5f263681490a3387c0e946bbea1f25f90 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 18 Oct 2016 22:22:17 +0000 Subject: [PATCH 075/101] reorg cache init and logging --- radicale/__init__.py | 3 +- radicale/storage.py | 272 ++++++++++++++++++++++++++----------------- 2 files changed, 170 insertions(+), 105 deletions(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index e98116001..eea5fc517 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -314,6 +314,8 @@ def response(status, headers=(), answer=None): status, client.responses.get(status, "Unknown")) start_response(status, list(headers.items())) time_end = datetime.datetime.now() + if self.configuration.getboolean("storage", "cache"): + storage.cache_log_statistics_overall(self) sizeinfo = "" if answer: sizeinfo = sizeinfo + str(len(str(answer))) + " bytes" @@ -516,7 +518,6 @@ def do_GET(self, environ, base_prefix, path, user): "Last-Modified": collection.last_modified, "ETag": item.etag} answer = item.serialize() - storage.log_cache_statistics(self) return client.OK, headers, answer def do_HEAD(self, environ, base_prefix, path, user): diff --git a/radicale/storage.py b/radicale/storage.py index 585575b44..6d97a15bf 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -85,14 +85,48 @@ class Overlapped(ctypes.Structure): elif os.name == "posix": import fcntl +logging_filter = 0xffff def load(configuration, logger): + global logging_filter """Load the storage manager chosen in configuration.""" storage_type = configuration.get("storage", "type") + logger.info("Radicale storage manager loading: %s", storage_type) + logging_filter = int(configuration.get("logging", "debug_filter"), 0) + logger.debug("debug filter active with: 0x%x", logging_filter) + if storage_type == "multifilesystem": collection_class = Collection + ## initialize cache + global Items_cache_active + global Props_cache_active + global Items_cache_counter + global Props_cache_counter + if configuration.getboolean("storage", "cache"): + Items_cache_active = True + Props_cache_active = True + Items_cache_counter.loginterval = configuration.getint("logging", "cache_statistics_interval") + Props_cache_counter.loginterval = configuration.getint("logging", "cache_statistics_interval") + if configuration.getboolean("logging", "performance"): + if Items_cache_active: + logger.info("Items cache enabled (performance log on info level)") + Items_cache_counter.perflog = True + if Props_cache_active: + logger.info("Props cache enabled (performance log on info level)") + Props_cache_counter.perflog = True + else: + if (Items_cache_counter.loginterval > 0): + logger.info("Items cache enabled (regular statistics log on info level with minimum interval %d sec)", Items_cache_counter.loginterval) + else: + logger.info("Items cache enabled (statistics log only on debug level)") + + if (Props_cache_counter.loginterval > 0): + logger.info("Props cache enabled (regular statistics log on info level with minimum interval %d sec)", Props_cache_counter.loginterval) + else: + logger.info("Items cache enabled (statistics log only on debug level)") else: collection_class = import_module(storage_type).Collection + logger.info("Radicale storage manager successfully loaded: %s", storage_type) class CollectionCopy(collection_class): """Collection copy, avoids overriding the original class attributes.""" @@ -104,8 +138,8 @@ class CollectionCopy(collection_class): def cleanup(logger): """Print cache statistics.""" logger.info("Cleaning up 'storage'") - if Item_cache_active: - logger.info("Items cache overall statistics: %s", Item_cache_counter.string()) + if Items_cache_active: + logger.info("Items cache overall statistics: %s", Items_cache_counter.string()) if Props_cache_active: logger.info("Props cache overall statistics: %s", Props_cache_counter.string()) @@ -243,7 +277,7 @@ def etag(self): return get_etag(self.serialize()) -### Item caching +### BEGIN Items/Props caching # TODO: catch all potential race conditions during add/delete ## cache counter statistics class Cache_counter: @@ -255,7 +289,11 @@ def __init__(self): self.entries= 0 self.size = 0 self.perflog= False + ## cache statistics logging on info level + # 0: on each request (incl. current request) + # >0: after at least every given loginterval (excl. current request) self.lastlog= datetime.datetime.now() + self.loginterval = datetime.timedelta(seconds=60) # default def copy(self, counter): self.lookup = counter.lookup @@ -266,9 +304,9 @@ def copy(self, counter): self.size = counter.size self.perflog= counter.perflog self.lastlog= counter.lastlog + self.loginterval= counter.loginterval def string(self): - global Item_cache_data if (self.entries > 0): message = "lookup=%d hit=%d (%3.2f%%) miss=%d (%3.2f%%) dirty=%d (%3.2f%%) entries=%d memoryKiB=%.3f" % ( self.lookup, @@ -301,15 +339,14 @@ def string_delta(self, stamp): message = "no cache lookups so far" return(message) - def log_request(self, token, logger, stamp, logging_performance): - if (logging_performance) or (Cache_counter_log_interval == 0): + def log_request(self, token, logger, stamp): + if (self.perflog) or (self.loginterval == 0): logger.info("%s cache request statistics: %s", token, self.string_delta(stamp)) else: logger.debug("%s cache request statistics: %s", token, self.string_delta(stamp)) - def log(self, token, logger): - global Cache_counter_log_interval - if (self.perflog) or (Cache_counter_log_interval == 0) or (datetime.datetime.now() - self.lastlog > Cache_counter_log_interval): + def log_overall(self, token, logger): + if (self.perflog) or (self.loginterval == 0) or (datetime.datetime.now() - self.lastlog > self.loginterval): logger.info("%s cache overall statistics: %s", token, self.string()) self.lastlog = datetime.datetime.now() else: @@ -318,30 +355,83 @@ def log(self, token, logger): ## cache entry class Item_cache_entry: - def __init__(self, Item, last_modified_time): + def __init__(self, Item, size, last_modified_time): self.Item = Item + self.size = size self.last_modified_time = last_modified_time -## cache entry class Props_cache_entry: - def __init__(self, props_contents, last_modified_time): + def __init__(self, props_contents, size, last_modified_time): self.props_contents = props_contents + self.size = size self.last_modified_time = last_modified_time -# cache initialization -Item_cache_data = {} -Item_cache_counter = Cache_counter() -Item_cache_active = False +## cache initialization +Items_cache_data = {} +Items_cache_counter = Cache_counter() +Items_cache_active = False Props_cache_data = {} Props_cache_counter = Cache_counter() Props_cache_active = False +## global functions to be called also from other modules +def cache_log_statistics_overall(self): + global Items_cache_active + global Items_cache_counter + global Props_cache_active + global Props_cache_counter + if Items_cache_active: + Items_cache_counter.log_overall("Items", self.logger) + if Props_cache_active: + Props_cache_counter.log_overall("Props", self.logger) + +def cache_log_statistics_request(self, stamp): + global Items_cache_active + global Items_cache_counter + global Props_cache_active + global Props_cache_counter + if Items_cache_active: + Items_cache_counter.log_request("Items", self.logger, stamp) + if Props_cache_active: + Props_cache_counter.log_request("Props", self.logger, stamp) + +### END Items/Props caching + + +class UnsafePathError(ValueError): + def __init__(self, path): + message = "Can't translate name safely to filesystem: %s" % path + super().__init__(message) + + +class ComponentExistsError(ValueError): + def __init__(self, path): + message = "Component already exists: %s" % path + super().__init__(message) + + +class ComponentNotFoundError(ValueError): + def __init__(self, path): + message = "Component doesn't exist: %s" % path + super().__init__(message) + + +class Item: + def __init__(self, collection, item, href, last_modified=None): + self.collection = collection + self.item = item + self.href = href + self.last_modified = last_modified + + def __getattr__(self, attr): + return getattr(self.item, attr) + + @property + def etag(self): + """Encoded as quoted-string (see RFC 2616).""" + return get_etag(self.serialize()) -## cache regular statistics logging on info level -# 0: on each request (incl. current request) -# >0: after at least every given seconds (exc. current request) -Cache_counter_log_interval = datetime.timedelta(seconds=60) class BaseCollection: @@ -500,9 +590,6 @@ class Collection(BaseCollection): """Collection stored in several files per calendar.""" def __init__(self, path, principal=False, folder=None): - global Item_cache_active - global Props_cache_active - global Cache_counter_log_interval if not folder: folder = self._get_collection_root_folder() # Path should already be sanitized @@ -514,36 +601,6 @@ def __init__(self, path, principal=False, folder=None): split_path = self.path.split("/") self.owner = split_path[0] if len(split_path) > 1 else None self.is_principal = principal - self.logging_performance = False - self.logging_exceptions = False - if self.configuration.getboolean("logging", "performance"): - self.logging_performance = True - if self.configuration.getboolean("logging", "exceptions"): - self.logging_exceptions = True - self.logging_filter = int(self.configuration.get("logging", "debug_filter"), 0) - self.logger.debug("debug filter active with: 0x%x", self.logging_filter) - Cache_counter_log_interval = datetime.timedelta(seconds=self.configuration.getint("logging", "cache_statistics_interval")) - if self.configuration.getboolean("storage", "cache"): - if not Item_cache_active: - if self.logging_performance: - self.logger.info("Items cache enabled (performance log on info level)") - Item_cache_counter.perflog = True - else: - if (Cache_counter_log_interval.total_seconds() > 0): - self.logger.info("Items cache enabled (regular statistics log on info level with minimum interval %d sec)", Cache_counter_log_interval.total_seconds()) - else: - self.logger.info("Items cache enabled (statistics log only on debug level)") - Item_cache_active = True - if not Props_cache_active: - if self.logging_performance: - self.logger.info("Props cache enabled (performance log on info level)") - Props_cache_counter.perflog = True - else: - if (Cache_counter_log_interval.total_seconds() > 0): - self.logger.info("Props cache enabled (regular statistics log on info level with minimum interval %d sec)", Cache_counter_log_interval.total_seconds()) - else: - self.logger.info("Props cache enabled (statistics log only on debug level)") - Props_cache_active = True @classmethod def _get_collection_root_folder(cls): @@ -777,16 +834,17 @@ def move(cls, item, to_collection, to_href): cls._sync_directory(item.collection._filesystem_path) def list(self): - if not self.logging_filter & 0x0010: + global logging_filter + if not logging_filter & 0x0010: self.logger.debug("function storage/list called") - global Item_cache_data - global Item_cache_counter - global Item_cache_active + global Items_cache_data + global Items_cache_counter + global Items_cache_active global Props_cache_counter global Props_cache_active - if Item_cache_active: - Item_cache_counter_stamp = Cache_counter() - Item_cache_counter_stamp.copy(Item_cache_counter) + if Items_cache_active: + Items_cache_counter_stamp = Cache_counter() + Items_cache_counter_stamp.copy(Items_cache_counter) if Props_cache_active: Props_cache_counter_stamp = Cache_counter() Props_cache_counter_stamp.copy(Props_cache_counter) @@ -798,17 +856,18 @@ def list(self): path = os.path.join(self._filesystem_path, href) if os.path.isfile(path): yield href - if Item_cache_active: - Item_cache_counter.log_request("Items", self.logger, Item_cache_counter_stamp, self.logging_performance) + if Items_cache_active: + Items_cache_counter.log_request("Items", self.logger, Items_cache_counter_stamp) if Props_cache_active: - Props_cache_counter.log_request("Props", self.logger, Props_cache_counter_stamp, self.logging_performance) + Props_cache_counter.log_request("Props", self.logger, Props_cache_counter_stamp) def get(self, href): - if not self.logging_filter & 0x0010: + global logging_filter + if not logging_filter & 0x0010: self.logger.debug("function storage/get called") - global Item_cache_data - global Item_cache_counter - global Item_cache_active + global Items_cache_data + global Items_cache_counter + global Items_cache_active if not href: return None if not is_safe_filesystem_path_component(href): @@ -824,57 +883,57 @@ def get(self, href): time.gmtime(last_modified_time)) Item_cache_hit = 0 - if Item_cache_active: + if Items_cache_active: # Item cache lookup - Item_cache_counter.lookup += 1 - if path in Item_cache_data: - if Item_cache_data[path].last_modified_time == last_modified_time: - Item_cache_counter.hit += 1 + Items_cache_counter.lookup += 1 + if path in Items_cache_data: + if Items_cache_data[path].last_modified_time == last_modified_time: + Items_cache_counter.hit += 1 Item_cache_hit = 1 else: - Item_cache_counter.dirty += 1 + Items_cache_counter.dirty += 1 # remove from cache self.logger.debug("Item delete from cache (dirty): %s", path) - Item_cache_counter.entries -= 1 - Item_cache_counter.size -= len(str(Item_cache_data[path].Item.item)) - del Item_cache_data[path] + Items_cache_counter.entries -= 1 + Items_cache_counter.size -= Items_cache_data[path].size + del Items_cache_data[path] else: - Item_cache_counter.miss += 1 + Items_cache_counter.miss += 1 - if Item_cache_hit == 0 or Item_cache_active == False: + if Item_cache_hit == 0 or Items_cache_active == False: with open(path, encoding=self.encoding, newline="") as f: text = f.read() try: - if not self.logging_filter & 0x0100: + if not logging_filter & 0x0100: self.logger.debug("Item read ('get'): %s", path) item = vobject.readOne(text) except Exception as e: self.logger.error("Object broken on read (skip 'get'): %s (%s)", path, e) - if self.logging_exceptions: + if self.configuration.getboolean("logging", "exceptions"): self.logger.exception("Exception details:") return None; try: item.serialize() except Exception as e: self.logger.error("Object broken on serialize (skip 'get'): %s (%s)", path, e) - if self.logging_exceptions: + if self.configuration.getboolean("logging", "exceptions"): self.logger.exception("Exception details:") return None; Item_entry = Item(self, item, href, last_modified) - if Item_cache_active: + if Items_cache_active: # cache handling - if not self.logging_filter & 0x1000: + if not logging_filter & 0x1000: self.logger.debug("Item store in cache: %s", path) - Item_cache_data[path] = Item_cache_entry(Item_entry, last_modified_time) - Item_cache_counter.size += len(str(Item_entry.item)) - Item_cache_counter.entries += 1 + Items_cache_data[path] = Item_cache_entry(Item_entry, len(str(Item_entry.item)), last_modified_time) + Items_cache_counter.size += Items_cache_data[path].size + Items_cache_counter.entries += 1 return Item_entry else: - if not self.logging_filter & 0x2000: + if not logging_filter & 0x2000: self.logger.debug("Item retrieve from cache: %s", path) - return Item_cache_data[path].Item + return Items_cache_data[path].Item def upload(self, href, vobject_item): if not is_safe_filesystem_path_component(href): @@ -886,8 +945,8 @@ def upload(self, href, vobject_item): return item def delete(self, href=None): - global Item_cache_data - global Item_cache_active + global Items_cache_data + global Items_cache_active if href is None: # Delete the collection parent_dir = os.path.dirname(self._filesystem_path) @@ -910,16 +969,17 @@ def delete(self, href=None): raise ComponentNotFoundError(href) os.remove(path) self._sync_directory(os.path.dirname(path)) - if Item_cache_active: + if Items_cache_active: # remove from cache, if existing - if path in Item_cache_data: - self.logger.debug("Delete from cache (delete): %s", path) - Item_cache_counter.entries -= 1 - Item_cache_counter.size -= len(str(Item_cache_data[path].Item.item)) - del Item_cache_data[path] + if path in Items_cache_data: + self.logger.debug("Item delete from cache ('delete'): %s", path) + Items_cache_counter.entries -= 1 + Items_cache_counter.size -= len(str(Items_cache_data[path].Item.item)) + del Items_cache_data[path] def get_meta(self, key=None): def get_meta(self, key): + global logging_filter global Props_cache_active global Props_cache_counter if os.path.exists(self._props_path): @@ -935,7 +995,8 @@ def get_meta(self, key): else: Props_cache_counter.dirty += 1 # remove from cache - self.logger.debug("Props delete from cache (dirty): %s", path) + self.logger.debug("Props delete from cache (dirty): %s", self._props_path) + Props_cache_counter.size -= Props_cache_data[self._props_path].size Props_cache_counter.entries -= 1 del Props_cache_data[self._props_path] else: @@ -943,20 +1004,21 @@ def get_meta(self, key): if Props_cache_hit == 0 or Props_cache_active == False: with open(self._props_path, encoding=self.encoding) as f: - if not self.logging_filter & 0x0400: + if not logging_filter & 0x0400: self.logger.debug("Props read ('get_meta') : %s", self._props_path) - props_contents = json.load(f) + props_contents = json.load(f) if Props_cache_active: # cache handling - if not self.logging_filter & 0x4000: + if not logging_filter & 0x4000: self.logger.debug("Props store in cache : %s", self._props_path) - Props_cache_data[self._props_path] = Props_cache_entry(props_contents, last_modified_time) + Props_cache_data[self._props_path] = Props_cache_entry(props_contents, len(str(props_contents)), last_modified_time) + Props_cache_counter.size += Props_cache_data[self._props_path].size Props_cache_counter.entries += 1 return props_contents.get(key) if key else props_contents else: - if not self.logging_filter & 0x8000: + if not logging_filter & 0x8000: self.logger.debug("Props retrieve from cache: %s", self._props_path) return Props_cache_data[self._props_path].props_contents.get(key) if key else Props_cache_data[self._props_path].props_contents @@ -967,6 +1029,7 @@ def set_meta(self, props): if Props_cache_active: if self._props_path in Props_cache_data: self.logger.debug("Props delete from cache ('set_meta'): %s", path) + Props_cache_counter.size -= Props_cache_data[self._props_path].size Props_cache_counter.entries -= 1 del Props_cache_data[self._props_path] with open(self._props_path, encoding=self.encoding) as f: @@ -989,7 +1052,8 @@ def last_modified(self): return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(last)) def serialize(self): - if not self.logging_filter & 0x0010: + global logging_filter + if not logging_filter & 0x0010: self.logger.debug("function storage/serialize called") items = [] time_begin = datetime.datetime.now() @@ -997,7 +1061,7 @@ def serialize(self): if hasattr(self.get(href),'item'): items.append(self.get(href).item) time_end = datetime.datetime.now() - if self.logging_performance: + if self.configuration.getboolean("logging", "performance"): self.logger.info( "Collection read %d items in %s sec from %s", len(items), (time_end - time_begin).total_seconds(), self._filesystem_path) From 96cd7e817fba83597404407c6051b0677e89dc64 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 18 Oct 2016 22:28:34 +0000 Subject: [PATCH 076/101] remove an accidental inserted newline --- radicale/storage.py | 1 - 1 file changed, 1 deletion(-) diff --git a/radicale/storage.py b/radicale/storage.py index 6d97a15bf..3d639ffab 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -433,7 +433,6 @@ def etag(self): return get_etag(self.serialize()) - class BaseCollection: # Overriden on copy by the "load" function From 35c88d6d068761f55e8c5bcff1b74a7d01e193af Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Wed, 19 Oct 2016 00:59:40 +0000 Subject: [PATCH 077/101] remove request cache statistics logging (not thread-safe) implement locking --- radicale/storage.py | 132 ++++++++++++++++---------------------------- 1 file changed, 48 insertions(+), 84 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index 3d639ffab..bc11290c4 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -93,7 +93,8 @@ def load(configuration, logger): storage_type = configuration.get("storage", "type") logger.info("Radicale storage manager loading: %s", storage_type) logging_filter = int(configuration.get("logging", "debug_filter"), 0) - logger.debug("debug filter active with: 0x%x", logging_filter) + if (logging_filter > 0): + logger.info("debug filter active with: 0x%x", logging_filter) if storage_type == "multifilesystem": collection_class = Collection @@ -139,9 +140,9 @@ def cleanup(logger): """Print cache statistics.""" logger.info("Cleaning up 'storage'") if Items_cache_active: - logger.info("Items cache overall statistics: %s", Items_cache_counter.string()) + logger.info("Items cache overall statistics: %s", Items_cache_counter.string_overall()) if Props_cache_active: - logger.info("Props cache overall statistics: %s", Props_cache_counter.string()) + logger.info("Props cache overall statistics: %s", Props_cache_counter.string_overall()) def get_etag(text): @@ -278,7 +279,6 @@ def etag(self): ### BEGIN Items/Props caching -# TODO: catch all potential race conditions during add/delete ## cache counter statistics class Cache_counter: def __init__(self): @@ -295,18 +295,7 @@ def __init__(self): self.lastlog= datetime.datetime.now() self.loginterval = datetime.timedelta(seconds=60) # default - def copy(self, counter): - self.lookup = counter.lookup - self.hit = counter.hit - self.miss = counter.miss - self.dirty = counter.dirty - self.entries= counter.entries - self.size = counter.size - self.perflog= counter.perflog - self.lastlog= counter.lastlog - self.loginterval= counter.loginterval - - def string(self): + def string_overall(self): if (self.entries > 0): message = "lookup=%d hit=%d (%3.2f%%) miss=%d (%3.2f%%) dirty=%d (%3.2f%%) entries=%d memoryKiB=%.3f" % ( self.lookup, @@ -323,31 +312,9 @@ def string(self): message = "no cache entries" return(message) - def string_delta(self, stamp): - delta_lookup = self.lookup - stamp.lookup - if (delta_lookup > 0): - message = "lookup=%d hit=%d (%3.2f%%) miss=%d (%3.2f%%) dirty=%d (%3.2f%%)" % ( - delta_lookup, - self.hit - stamp.hit, - (self.hit - stamp.hit) * 100 / delta_lookup, - self.miss - stamp.miss, - (self.miss - stamp.miss) * 100 / delta_lookup, - self.dirty - stamp.dirty, - (self.dirty - stamp.dirty) * 100 / delta_lookup - ) - else: - message = "no cache lookups so far" - return(message) - - def log_request(self, token, logger, stamp): - if (self.perflog) or (self.loginterval == 0): - logger.info("%s cache request statistics: %s", token, self.string_delta(stamp)) - else: - logger.debug("%s cache request statistics: %s", token, self.string_delta(stamp)) - def log_overall(self, token, logger): if (self.perflog) or (self.loginterval == 0) or (datetime.datetime.now() - self.lastlog > self.loginterval): - logger.info("%s cache overall statistics: %s", token, self.string()) + logger.info("%s cache overall statistics: %s", token, self.string_overall()) self.lastlog = datetime.datetime.now() else: logger.debug("%s cache overall statistics: %s", token, self.string()) @@ -367,10 +334,12 @@ def __init__(self, props_contents, size, last_modified_time): self.last_modified_time = last_modified_time ## cache initialization +Items_cache_lock = threading.Lock() Items_cache_data = {} Items_cache_counter = Cache_counter() Items_cache_active = False +Props_cache_lock = threading.Lock() Props_cache_data = {} Props_cache_counter = Cache_counter() Props_cache_active = False @@ -386,16 +355,6 @@ def cache_log_statistics_overall(self): if Props_cache_active: Props_cache_counter.log_overall("Props", self.logger) -def cache_log_statistics_request(self, stamp): - global Items_cache_active - global Items_cache_counter - global Props_cache_active - global Props_cache_counter - if Items_cache_active: - Items_cache_counter.log_request("Items", self.logger, stamp) - if Props_cache_active: - Props_cache_counter.log_request("Props", self.logger, stamp) - ### END Items/Props caching @@ -833,20 +792,11 @@ def move(cls, item, to_collection, to_href): cls._sync_directory(item.collection._filesystem_path) def list(self): - global logging_filter - if not logging_filter & 0x0010: - self.logger.debug("function storage/list called") global Items_cache_data global Items_cache_counter global Items_cache_active global Props_cache_counter global Props_cache_active - if Items_cache_active: - Items_cache_counter_stamp = Cache_counter() - Items_cache_counter_stamp.copy(Items_cache_counter) - if Props_cache_active: - Props_cache_counter_stamp = Cache_counter() - Props_cache_counter_stamp.copy(Props_cache_counter) for href in os.listdir(self._filesystem_path): if not is_safe_filesystem_path_component(href): if not href.startswith(".Radicale"): @@ -855,15 +805,8 @@ def list(self): path = os.path.join(self._filesystem_path, href) if os.path.isfile(path): yield href - if Items_cache_active: - Items_cache_counter.log_request("Items", self.logger, Items_cache_counter_stamp) - if Props_cache_active: - Props_cache_counter.log_request("Props", self.logger, Props_cache_counter_stamp) def get(self, href): - global logging_filter - if not logging_filter & 0x0010: - self.logger.debug("function storage/get called") global Items_cache_data global Items_cache_counter global Items_cache_active @@ -883,6 +826,8 @@ def get(self, href): Item_cache_hit = 0 if Items_cache_active: + Items_cache_lock.acquire() + # Item cache lookup Items_cache_counter.lookup += 1 if path in Items_cache_data: @@ -891,6 +836,7 @@ def get(self, href): Item_cache_hit = 1 else: Items_cache_counter.dirty += 1 + self.Cache_counter_Items.dirty += 1 # remove from cache self.logger.debug("Item delete from cache (dirty): %s", path) Items_cache_counter.entries -= 1 @@ -902,6 +848,7 @@ def get(self, href): if Item_cache_hit == 0 or Items_cache_active == False: with open(path, encoding=self.encoding, newline="") as f: text = f.read() + try: if not logging_filter & 0x0100: self.logger.debug("Item read ('get'): %s", path) @@ -910,29 +857,41 @@ def get(self, href): self.logger.error("Object broken on read (skip 'get'): %s (%s)", path, e) if self.configuration.getboolean("logging", "exceptions"): self.logger.exception("Exception details:") + if Items_cache_active: + Items_cache_lock.release() return None; + try: - item.serialize() + # test whether object is parseable + item_serialized = item.serialize() except Exception as e: self.logger.error("Object broken on serialize (skip 'get'): %s (%s)", path, e) if self.configuration.getboolean("logging", "exceptions"): self.logger.exception("Exception details:") + if Items_cache_active: + Items_cache_lock.release() return None; + + # temp object not required + del item_serialized + # retrieve from cache Item_entry = Item(self, item, href, last_modified) if Items_cache_active: - # cache handling + # store in cache if not logging_filter & 0x1000: self.logger.debug("Item store in cache: %s", path) - Items_cache_data[path] = Item_cache_entry(Item_entry, len(str(Item_entry.item)), last_modified_time) + Items_cache_data[path] = Item_cache_entry(Item_entry, len(str(Item_entry.item)) + len(path), last_modified_time) Items_cache_counter.size += Items_cache_data[path].size Items_cache_counter.entries += 1 - - return Item_entry else: if not logging_filter & 0x2000: self.logger.debug("Item retrieve from cache: %s", path) - return Items_cache_data[path].Item + Item_entry = Items_cache_data[path].Item + + if Items_cache_active: + Items_cache_lock.release() + return Item_entry def upload(self, href, vobject_item): if not is_safe_filesystem_path_component(href): @@ -969,12 +928,14 @@ def delete(self, href=None): os.remove(path) self._sync_directory(os.path.dirname(path)) if Items_cache_active: + Props_cache_lock.acquire() # remove from cache, if existing if path in Items_cache_data: self.logger.debug("Item delete from cache ('delete'): %s", path) Items_cache_counter.entries -= 1 - Items_cache_counter.size -= len(str(Items_cache_data[path].Item.item)) + Items_cache_counter.size -= Items_cache_data[path].size del Items_cache_data[path] + Props_cache_lock.release() def get_meta(self, key=None): def get_meta(self, key): @@ -984,6 +945,7 @@ def get_meta(self, key): if os.path.exists(self._props_path): Props_cache_hit = 0 if Props_cache_active: + Props_cache_lock.acquire() last_modified_time = os.path.getmtime(self._props_path) # Props cache lookup Props_cache_counter.lookup += 1 @@ -1011,26 +973,31 @@ def get_meta(self, key): # cache handling if not logging_filter & 0x4000: self.logger.debug("Props store in cache : %s", self._props_path) - Props_cache_data[self._props_path] = Props_cache_entry(props_contents, len(str(props_contents)), last_modified_time) + Props_cache_data[self._props_path] = Props_cache_entry(props_contents, len(str(props_contents)) + len(self._props_path), last_modified_time) Props_cache_counter.size += Props_cache_data[self._props_path].size Props_cache_counter.entries += 1 - return props_contents.get(key) if key else props_contents + meta = props_contents.get(key) if key else props_contents else: if not logging_filter & 0x8000: self.logger.debug("Props retrieve from cache: %s", self._props_path) - return Props_cache_data[self._props_path].props_contents.get(key) if key else Props_cache_data[self._props_path].props_contents + meta = Props_cache_data[self._props_path].props_contents.get(key) if key else Props_cache_data[self._props_path].props_contents + + if Props_cache_active: + Props_cache_lock.release() + return meta def set_meta(self, props): global Props_cache_active global Props_cache_counter if os.path.exists(self._props_path): - if Props_cache_active: - if self._props_path in Props_cache_data: - self.logger.debug("Props delete from cache ('set_meta'): %s", path) - Props_cache_counter.size -= Props_cache_data[self._props_path].size - Props_cache_counter.entries -= 1 - del Props_cache_data[self._props_path] + with Props_cache_lock: + if Props_cache_active: + if self._props_path in Props_cache_data: + self.logger.debug("Props delete from cache ('set_meta'): %s", path) + Props_cache_counter.size -= Props_cache_data[self._props_path].size + Props_cache_counter.entries -= 1 + del Props_cache_data[self._props_path] with open(self._props_path, encoding=self.encoding) as f: self.logger.debug("Write props ('set_meta'): %s", self._props_path) old_props = json.load(f) @@ -1051,9 +1018,6 @@ def last_modified(self): return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(last)) def serialize(self): - global logging_filter - if not logging_filter & 0x0010: - self.logger.debug("function storage/serialize called") items = [] time_begin = datetime.datetime.now() for href in self.list(): From cba424f91c55569aa7eac3bd8e070bd031f37c0f Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Wed, 19 Oct 2016 01:03:34 +0000 Subject: [PATCH 078/101] add explanation for debug_filter --- config | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/config b/config index 0695baaaa..467dcc82f 100644 --- a/config +++ b/config @@ -126,6 +126,15 @@ cache = True # Set the default logging level to debug (this enables also request/response logging) #debug = False +# suppress selectable debug messages (to keep debug log cleaner) +debug_filter = 0xffff +# 0x0100: suppress "read object" +# 0x0400: suppress "read props" +# 0x1000: suppress "store object in cache" +# 0x2000: suppress "retrieve object from cache" +# 0x4000: suppress "store props in cache" +# 0x8000: suppress "retrieve props from cache" + # Store all environment variables (including those set in the shell) #full_environment = False @@ -139,8 +148,8 @@ performance = False exceptions = False # Include regular cache performance statistics in logs on info level -# 0: on each request (incl. current request) -# >0: minimum interval in seconds (only global) +# 0: on each request +# >0: minimum interval in seconds cache_statistics_interval = 300 From d0bf6708b0b98665335cb43ea380ca42be8f8ed9 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Wed, 19 Oct 2016 01:14:55 +0000 Subject: [PATCH 079/101] fix time delta handling --- radicale/storage.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index bc11290c4..47ef70d8f 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -106,7 +106,7 @@ def load(configuration, logger): if configuration.getboolean("storage", "cache"): Items_cache_active = True Props_cache_active = True - Items_cache_counter.loginterval = configuration.getint("logging", "cache_statistics_interval") + Items_cache_counter.loginterval = seconds=configuration.getint("logging", "cache_statistics_interval") Props_cache_counter.loginterval = configuration.getint("logging", "cache_statistics_interval") if configuration.getboolean("logging", "performance"): if Items_cache_active: @@ -293,7 +293,7 @@ def __init__(self): # 0: on each request (incl. current request) # >0: after at least every given loginterval (excl. current request) self.lastlog= datetime.datetime.now() - self.loginterval = datetime.timedelta(seconds=60) # default + self.loginterval = 60 # default def string_overall(self): if (self.entries > 0): @@ -313,11 +313,11 @@ def string_overall(self): return(message) def log_overall(self, token, logger): - if (self.perflog) or (self.loginterval == 0) or (datetime.datetime.now() - self.lastlog > self.loginterval): + if (self.perflog) or (self.loginterval == 0) or (datetime.datetime.now() - self.lastlog > datetime.timedelta(seconds=self.loginterval)): logger.info("%s cache overall statistics: %s", token, self.string_overall()) self.lastlog = datetime.datetime.now() else: - logger.debug("%s cache overall statistics: %s", token, self.string()) + logger.debug("%s cache overall statistics: %s", token, self.string_overall()) ## cache entry From e9a1af0ffe2230d489d4f42e8b9f54fe3f6eb87d Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Wed, 19 Oct 2016 01:24:30 +0000 Subject: [PATCH 080/101] extend description --- config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config b/config index 467dcc82f..5393d5570 100644 --- a/config +++ b/config @@ -111,7 +111,7 @@ # Example: git add -A && (git diff --cached --quiet || git commit -m "Changes by "%(user)s) #hook = -# Caching (of Items) in memory +# Caching (of Items and Props files) in memory cache = True From 9be525ec13c22cf08de0520753f655ec941f78b0 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Wed, 19 Oct 2016 01:27:33 +0000 Subject: [PATCH 081/101] remove obsolete code --- radicale/storage.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index 47ef70d8f..614a13c47 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -792,11 +792,6 @@ def move(cls, item, to_collection, to_href): cls._sync_directory(item.collection._filesystem_path) def list(self): - global Items_cache_data - global Items_cache_counter - global Items_cache_active - global Props_cache_counter - global Props_cache_active for href in os.listdir(self._filesystem_path): if not is_safe_filesystem_path_component(href): if not href.startswith(".Radicale"): From 0621ca21911dd443a9582cdc4fde45d0d4a3d583 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Wed, 19 Oct 2016 02:17:35 +0000 Subject: [PATCH 082/101] add also support for debug_filter --- config | 3 +++ radicale/rights.py | 17 +++++++++++------ 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/config b/config index 5393d5570..bfb0b6e16 100644 --- a/config +++ b/config @@ -128,8 +128,11 @@ cache = True # suppress selectable debug messages (to keep debug log cleaner) debug_filter = 0xffff +# 0x0080: suppress 'rights' "Test if" + "Section" # 0x0100: suppress "read object" +# 0x0200: suppress "delete object from cache" # 0x0400: suppress "read props" +# 0x0800: suppress "delete props from cache" # 0x1000: suppress "store object in cache" # 0x2000: suppress "retrieve object from cache" # 0x4000: suppress "store props in cache" diff --git a/radicale/rights.py b/radicale/rights.py index 00928435a..2d382f78a 100644 --- a/radicale/rights.py +++ b/radicale/rights.py @@ -91,6 +91,7 @@ class BaseRights: def __init__(self, configuration, logger): self.configuration = configuration self.logger = logger + self.debug_filter = int(configuration.get("logging", "debug_filter"), 0) def authorized(self, user, collection, permission): """Check if the user is allowed to read or write the collection. @@ -122,7 +123,8 @@ def authorized(self, user, path, permission): self.logger.debug("Rights type '%s'", self.rights_type) regex.readfp(StringIO(DEFINED_RIGHTS[self.rights_type])) else: - self.logger.debug("Reading rights from file '%s'", self.filename) + if not self.debug_filter & 0x0080: + self.logger.debug("Reading rights from file '%s'", self.filename) if not regex.read(self.filename): self.logger.error( "File '%s' not found for rights", self.filename) @@ -131,17 +133,20 @@ def authorized(self, user, path, permission): for section in regex.sections(): re_user = regex.get(section, "user") re_collection = regex.get(section, "collection") - self.logger.debug( - "Test if '%s:%s' matches against '%s:%s' from section '%s'", - user, sane_path, re_user, re_collection, section) + if not self.debug_filter & 0x0080: + self.logger.debug( + "Test if '%s:%s' matches against '%s:%s' from section '%s'", + user, sane_path, re_user, re_collection, section) # Emulate fullmatch user_match = re.match(r"(?:%s)\Z" % re_user, user) if user_match: re_collection = re_collection.format(*user_match.groups()) # Emulate fullmatch if re.match(r"(?:%s)\Z" % re_collection, sane_path): - self.logger.debug("Section '%s' matches", section) + if not self.debug_filter & 0x0080: + self.logger.debug("Section '%s' matches", section) return permission in regex.get(section, "permission") else: - self.logger.debug("Section '%s' does not match", section) + if not self.debug_filter & 0x0080: + self.logger.debug("Section '%s' does not match", section) return False From b64e597180c12867139122e9eb4cd38c3ad109b5 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Wed, 19 Oct 2016 02:17:44 +0000 Subject: [PATCH 083/101] review debug_filter and locking and global cache --- radicale/storage.py | 60 +++++++++++++++++++++------------------------ 1 file changed, 28 insertions(+), 32 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index 614a13c47..05a8a3bde 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -85,16 +85,13 @@ class Overlapped(ctypes.Structure): elif os.name == "posix": import fcntl -logging_filter = 0xffff - def load(configuration, logger): - global logging_filter """Load the storage manager chosen in configuration.""" storage_type = configuration.get("storage", "type") logger.info("Radicale storage manager loading: %s", storage_type) - logging_filter = int(configuration.get("logging", "debug_filter"), 0) - if (logging_filter > 0): - logger.info("debug filter active with: 0x%x", logging_filter) + debug_filter = int(configuration.get("logging", "debug_filter"), 0) + if (debug_filter > 0): + logger.info("debug filter active with: 0x%x", debug_filter) if storage_type == "multifilesystem": collection_class = Collection @@ -125,6 +122,9 @@ def load(configuration, logger): logger.info("Props cache enabled (regular statistics log on info level with minimum interval %d sec)", Props_cache_counter.loginterval) else: logger.info("Items cache enabled (statistics log only on debug level)") + else: + logger.info("Items cache disabled") + logger.info("Props cache disabled") else: collection_class = import_module(storage_type).Collection logger.info("Radicale storage manager successfully loaded: %s", storage_type) @@ -346,9 +346,7 @@ def __init__(self, props_contents, size, last_modified_time): ## global functions to be called also from other modules def cache_log_statistics_overall(self): - global Items_cache_active global Items_cache_counter - global Props_cache_active global Props_cache_counter if Items_cache_active: Items_cache_counter.log_overall("Items", self.logger) @@ -559,6 +557,7 @@ def __init__(self, path, principal=False, folder=None): split_path = self.path.split("/") self.owner = split_path[0] if len(split_path) > 1 else None self.is_principal = principal + self.debug_filter = int(self.configuration.get("logging", "debug_filter"), 0) @classmethod def _get_collection_root_folder(cls): @@ -805,6 +804,7 @@ def get(self, href): global Items_cache_data global Items_cache_counter global Items_cache_active + global Items_cache_lock if not href: return None if not is_safe_filesystem_path_component(href): @@ -831,9 +831,9 @@ def get(self, href): Item_cache_hit = 1 else: Items_cache_counter.dirty += 1 - self.Cache_counter_Items.dirty += 1 # remove from cache - self.logger.debug("Item delete from cache (dirty): %s", path) + if not self.debug_filter & 0x0200: + self.logger.debug("Item delete from cache (dirty): %s", path) Items_cache_counter.entries -= 1 Items_cache_counter.size -= Items_cache_data[path].size del Items_cache_data[path] @@ -845,7 +845,7 @@ def get(self, href): text = f.read() try: - if not logging_filter & 0x0100: + if not self.debug_filter & 0x0100: self.logger.debug("Item read ('get'): %s", path) item = vobject.readOne(text) except Exception as e: @@ -874,13 +874,13 @@ def get(self, href): if Items_cache_active: # store in cache - if not logging_filter & 0x1000: + if not self.debug_filter & 0x1000: self.logger.debug("Item store in cache: %s", path) Items_cache_data[path] = Item_cache_entry(Item_entry, len(str(Item_entry.item)) + len(path), last_modified_time) Items_cache_counter.size += Items_cache_data[path].size Items_cache_counter.entries += 1 else: - if not logging_filter & 0x2000: + if not self.debug_filter & 0x2000: self.logger.debug("Item retrieve from cache: %s", path) Item_entry = Items_cache_data[path].Item @@ -899,7 +899,9 @@ def upload(self, href, vobject_item): def delete(self, href=None): global Items_cache_data + global Items_cache_counter global Items_cache_active + global Items_cache_lock if href is None: # Delete the collection parent_dir = os.path.dirname(self._filesystem_path) @@ -920,23 +922,25 @@ def delete(self, href=None): path = path_to_filesystem(self._filesystem_path, href) if not os.path.isfile(path): raise ComponentNotFoundError(href) - os.remove(path) - self._sync_directory(os.path.dirname(path)) if Items_cache_active: - Props_cache_lock.acquire() - # remove from cache, if existing + Items_cache_lock.acquire() if path in Items_cache_data: + # remove from cache, if existing self.logger.debug("Item delete from cache ('delete'): %s", path) Items_cache_counter.entries -= 1 Items_cache_counter.size -= Items_cache_data[path].size del Items_cache_data[path] - Props_cache_lock.release() + os.remove(path) + if Items_cache_active: + Items_cache_lock.release() + self._sync_directory(os.path.dirname(path)) def get_meta(self, key=None): def get_meta(self, key): - global logging_filter + global Props_cache_data global Props_cache_active global Props_cache_counter + global Props_cache_lock if os.path.exists(self._props_path): Props_cache_hit = 0 if Props_cache_active: @@ -951,7 +955,8 @@ def get_meta(self, key): else: Props_cache_counter.dirty += 1 # remove from cache - self.logger.debug("Props delete from cache (dirty): %s", self._props_path) + if not self.debug_filter & 0x0800: + self.logger.debug("Props delete from cache (dirty): %s", self._props_path) Props_cache_counter.size -= Props_cache_data[self._props_path].size Props_cache_counter.entries -= 1 del Props_cache_data[self._props_path] @@ -960,13 +965,13 @@ def get_meta(self, key): if Props_cache_hit == 0 or Props_cache_active == False: with open(self._props_path, encoding=self.encoding) as f: - if not logging_filter & 0x0400: + if not self.debug_filter & 0x0400: self.logger.debug("Props read ('get_meta') : %s", self._props_path) props_contents = json.load(f) if Props_cache_active: # cache handling - if not logging_filter & 0x4000: + if not self.debug_filter & 0x4000: self.logger.debug("Props store in cache : %s", self._props_path) Props_cache_data[self._props_path] = Props_cache_entry(props_contents, len(str(props_contents)) + len(self._props_path), last_modified_time) Props_cache_counter.size += Props_cache_data[self._props_path].size @@ -974,7 +979,7 @@ def get_meta(self, key): meta = props_contents.get(key) if key else props_contents else: - if not logging_filter & 0x8000: + if not self.debug_filter & 0x8000: self.logger.debug("Props retrieve from cache: %s", self._props_path) meta = Props_cache_data[self._props_path].props_contents.get(key) if key else Props_cache_data[self._props_path].props_contents @@ -983,16 +988,7 @@ def get_meta(self, key): return meta def set_meta(self, props): - global Props_cache_active - global Props_cache_counter if os.path.exists(self._props_path): - with Props_cache_lock: - if Props_cache_active: - if self._props_path in Props_cache_data: - self.logger.debug("Props delete from cache ('set_meta'): %s", path) - Props_cache_counter.size -= Props_cache_data[self._props_path].size - Props_cache_counter.entries -= 1 - del Props_cache_data[self._props_path] with open(self._props_path, encoding=self.encoding) as f: self.logger.debug("Write props ('set_meta'): %s", self._props_path) old_props = json.load(f) From c671d16dea4d40ff21d744a6a01463b04aa53679 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Wed, 19 Oct 2016 03:25:35 +0000 Subject: [PATCH 084/101] implement a request token to be able to match concurrent requests in (debug) log --- radicale/__init__.py | 39 ++++++++++++++++++++++++--------------- 1 file changed, 24 insertions(+), 15 deletions(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index eea5fc517..7b39bcef2 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -42,6 +42,7 @@ import wsgiref.simple_server import zlib import datetime +from hashlib import md5 from http import client from urllib.parse import unquote, urlparse @@ -206,6 +207,7 @@ def __init__(self, configuration, logger): self.authorized = rights.load(configuration, logger) self.encoding = configuration.get("encoding", "request") self.debug = configuration.getboolean("logging", "debug") + self.debug_filter = int(configuration.get("logging", "debug_filter"), 0) def headers_log(self, environ): """Sanitize headers for logging.""" @@ -262,18 +264,20 @@ def collect_allowed_items(self, items, user): else: path = item.collection.path if self.authorized(user, path, "r"): - self.logger.debug( - "%s has read access to collection %s", - user or "Anonymous", path or "/") + if not self.debug_filter & 0x0080: + self.logger.debug( + "%s has read access to collection %s", + user or "Anonymous", path or "/") read_allowed_items.append(item) else: self.logger.debug( - "%s has NO read access to collection %s", - user or "Anonymous", path or "/") + "%s has NO read access to collection %s", + user or "Anonymous", path or "/") if self.authorized(user, path, "w"): - self.logger.debug( - "%s has write access to collection %s", - user or "Anonymous", path or "/") + if not self.debug_filter & 0x0080: + self.logger.debug( + "%s has write access to collection %s", + user or "Anonymous", path or "/") write_allowed_items.append(item) else: self.logger.debug( @@ -289,7 +293,7 @@ def response(status, headers=(), answer=None): # Set content length if answer: if self.debug: - self.logger.debug("Response content:\n%s", answer) + self.logger.debug("Response content [%s]:\n%s", self.request_token, answer) answer = answer.encode(self.encoding) accept_encoding = [ encoding.strip() for encoding in @@ -326,7 +330,8 @@ def response(status, headers=(), answer=None): if len(sizeinfo) > 0: sizeinfo = " (" + sizeinfo + ")" self.logger.info( - "%s response status for %s in %s sec: %s", + "[%s] %s response status for %s in %s sec: %s", + self.request_token, environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo + sizeinfo, (time_end - time_begin).total_seconds(), status) # Return response content @@ -347,13 +352,17 @@ def response(status, headers=(), answer=None): if environ.get("HTTP_DEPTH"): depthinfo = " with depth " + environ["HTTP_DEPTH"] time_begin = datetime.datetime.now() - self.logger.info( - "%s request for %s received from %s using \"%s\"", + + # Create an unique request token + self.request_token = md5((environ["PATH_INFO"] + depthinfo + remote_host + remote_useragent +str(time_begin)).encode('utf-8')).hexdigest()[1:8] + + self.logger.info("[%s] %s request for %s received from %s using \"%s\"", + self.request_token, environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo, remote_host, remote_useragent) - headers = pprint.pformat(self.headers_log(environ)) if self.debug: - self.logger.debug("Request headers:\n%s", headers) + headers = pprint.pformat(self.headers_log(environ)) + self.logger.debug("Request headers [%s]:\n%s", self.request_token, headers) # Let reverse proxies overwrite SCRIPT_NAME if "HTTP_X_SCRIPT_NAME" in environ: @@ -465,7 +474,7 @@ def _read_content(self, environ): content = self.decode( environ["wsgi.input"].read(content_length), environ) if self.debug: - self.logger.debug("Request content:\n%s", content.strip()) + self.logger.debug("Request content [%s]:\n%s", self.request_token, content.strip()) else: content = None return content From 214f5dc8e9a1d1983563b6265649d13500de85ca Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Wed, 19 Oct 2016 03:48:59 +0000 Subject: [PATCH 085/101] bugfix token handling --- radicale/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index 7b39bcef2..1f7aea28d 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -354,7 +354,9 @@ def response(status, headers=(), answer=None): time_begin = datetime.datetime.now() # Create an unique request token - self.request_token = md5((environ["PATH_INFO"] + depthinfo + remote_host + remote_useragent +str(time_begin)).encode('utf-8')).hexdigest()[1:8] + request_token = md5((environ["PATH_INFO"] + depthinfo + remote_host + remote_useragent +str(time_begin)).encode('utf-8')).hexdigest()[1:8] + # store token for header/request/response logging + self.request_token = request_token self.logger.info("[%s] %s request for %s received from %s using \"%s\"", self.request_token, From 0208a74146c2bb5a0dbf3ddf496b8d13d2a959bc Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Wed, 19 Oct 2016 03:57:04 +0000 Subject: [PATCH 086/101] shift cache logging below response logging --- radicale/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index 1f7aea28d..28cd70244 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -318,8 +318,6 @@ def response(status, headers=(), answer=None): status, client.responses.get(status, "Unknown")) start_response(status, list(headers.items())) time_end = datetime.datetime.now() - if self.configuration.getboolean("storage", "cache"): - storage.cache_log_statistics_overall(self) sizeinfo = "" if answer: sizeinfo = sizeinfo + str(len(str(answer))) + " bytes" @@ -334,6 +332,8 @@ def response(status, headers=(), answer=None): self.request_token, environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo + sizeinfo, (time_end - time_begin).total_seconds(), status) + if self.configuration.getboolean("storage", "cache"): + storage.cache_log_statistics_overall(self) # Return response content return [answer] if answer else [] From e9e638f5f7c33500b65488fc4698de3544de910f Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 25 Oct 2016 07:27:14 +0200 Subject: [PATCH 087/101] implement separate config options for items/props cache --- config | 6 ++++-- radicale/storage.py | 21 ++++++++++++--------- 2 files changed, 16 insertions(+), 11 deletions(-) diff --git a/config b/config index bfb0b6e16..60dd33e6b 100644 --- a/config +++ b/config @@ -111,8 +111,10 @@ # Example: git add -A && (git diff --cached --quiet || git commit -m "Changes by "%(user)s) #hook = -# Caching (of Items and Props files) in memory -cache = True +# Caching of Items files in memory +cache_items = False +# Caching of Props files in memory +cache_props = False [logging] diff --git a/radicale/storage.py b/radicale/storage.py index 05a8a3bde..67d091b42 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -100,30 +100,33 @@ def load(configuration, logger): global Props_cache_active global Items_cache_counter global Props_cache_counter - if configuration.getboolean("storage", "cache"): + if configuration.getboolean("storage", "cache_items"): Items_cache_active = True - Props_cache_active = True Items_cache_counter.loginterval = seconds=configuration.getint("logging", "cache_statistics_interval") - Props_cache_counter.loginterval = configuration.getint("logging", "cache_statistics_interval") if configuration.getboolean("logging", "performance"): if Items_cache_active: logger.info("Items cache enabled (performance log on info level)") Items_cache_counter.perflog = True - if Props_cache_active: - logger.info("Props cache enabled (performance log on info level)") - Props_cache_counter.perflog = True else: if (Items_cache_counter.loginterval > 0): logger.info("Items cache enabled (regular statistics log on info level with minimum interval %d sec)", Items_cache_counter.loginterval) else: logger.info("Items cache enabled (statistics log only on debug level)") - + else: + logger.info("Items cache disabled") + if configuration.getboolean("storage", "cache_props"): + Props_cache_active = True + Props_cache_counter.loginterval = configuration.getint("logging", "cache_statistics_interval") + if configuration.getboolean("logging", "performance"): + if Props_cache_active: + logger.info("Props cache enabled (performance log on info level)") + Props_cache_counter.perflog = True + else: if (Props_cache_counter.loginterval > 0): logger.info("Props cache enabled (regular statistics log on info level with minimum interval %d sec)", Props_cache_counter.loginterval) else: - logger.info("Items cache enabled (statistics log only on debug level)") + logger.info("Props cache enabled (statistics log only on debug level)") else: - logger.info("Items cache disabled") logger.info("Props cache disabled") else: collection_class = import_module(storage_type).Collection From c06cf84a70398692bc51460fb1387b3d70ff2ba6 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 25 Oct 2016 05:33:49 +0000 Subject: [PATCH 088/101] missing adjustments regaring cache config --- radicale/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index 28cd70244..e534ed6ff 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -332,7 +332,7 @@ def response(status, headers=(), answer=None): self.request_token, environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo + sizeinfo, (time_end - time_begin).total_seconds(), status) - if self.configuration.getboolean("storage", "cache"): + if self.configuration.getboolean("storage", "cache_items") or self.configuration.getboolean("storage", "cache_props"): storage.cache_log_statistics_overall(self) # Return response content return [answer] if answer else [] From 2bf70180861bdcb5da916a028442a8609d1dab79 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 25 Oct 2016 20:44:44 +0000 Subject: [PATCH 089/101] implement debug_filter for header,request,response --- config | 6 +++++- radicale/__init__.py | 14 +++++++------- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/config b/config index 60dd33e6b..311fcba58 100644 --- a/config +++ b/config @@ -130,7 +130,11 @@ cache_props = False # suppress selectable debug messages (to keep debug log cleaner) debug_filter = 0xffff -# 0x0080: suppress 'rights' "Test if" + "Section" +# 0x0001: suppress "header content" +# 0x0002: suppress "request content" +# 0x0004: suppress "response content" +# 0x0040: suppress "sanitized path" +# 0x0080: suppress "rights" "Test if" + "Section" # 0x0100: suppress "read object" # 0x0200: suppress "delete object from cache" # 0x0400: suppress "read props" diff --git a/radicale/__init__.py b/radicale/__init__.py index e534ed6ff..555a9e1d1 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -292,7 +292,7 @@ def response(status, headers=(), answer=None): headers = dict(headers) # Set content length if answer: - if self.debug: + if self.debug and not (self.debug_filter & 0x0004): self.logger.debug("Response content [%s]:\n%s", self.request_token, answer) answer = answer.encode(self.encoding) accept_encoding = [ @@ -328,12 +328,11 @@ def response(status, headers=(), answer=None): if len(sizeinfo) > 0: sizeinfo = " (" + sizeinfo + ")" self.logger.info( - "[%s] %s response status for %s in %s sec: %s", + "[%s] %s response status for %s in %.3f sec status: %s", self.request_token, environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo + sizeinfo, (time_end - time_begin).total_seconds(), status) - if self.configuration.getboolean("storage", "cache_items") or self.configuration.getboolean("storage", "cache_props"): - storage.cache_log_statistics_overall(self) + storage.cache_log_statistics_overall(self) # Return response content return [answer] if answer else [] @@ -362,7 +361,7 @@ def response(status, headers=(), answer=None): self.request_token, environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo, remote_host, remote_useragent) - if self.debug: + if self.debug and not (self.debug_filter & 0x0001): headers = pprint.pformat(self.headers_log(environ)) self.logger.debug("Request headers [%s]:\n%s", self.request_token, headers) @@ -379,7 +378,8 @@ def response(status, headers=(), answer=None): base_prefix = environ["SCRIPT_NAME"] # Sanitize request URI environ["PATH_INFO"] = storage.sanitize_path(environ["PATH_INFO"]) - self.logger.debug("Sanitized path: %s", environ["PATH_INFO"]) + if not (self.debug_filter & 0x0040): + self.logger.debug("Sanitized path: %s", environ["PATH_INFO"]) path = environ["PATH_INFO"] # Get function corresponding to method @@ -475,7 +475,7 @@ def _read_content(self, environ): if content_length > 0: content = self.decode( environ["wsgi.input"].read(content_length), environ) - if self.debug: + if self.debug and not (self.debug_filter & 0x0002): self.logger.debug("Request content [%s]:\n%s", self.request_token, content.strip()) else: content = None From c47dbb60779f66a65f4dafa29148576f387753d9 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 25 Oct 2016 20:45:32 +0000 Subject: [PATCH 090/101] extend logging on debug level with resource and garbage collector, add try/except around collection serialization to trace down an exception found on productive system --- radicale/storage.py | 53 ++++++++++++++++++++++++++++++++++++--------- 1 file changed, 43 insertions(+), 10 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index 67d091b42..ba02d98de 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -36,6 +36,8 @@ import threading import time import datetime +import resource +import gc from contextlib import contextmanager from hashlib import md5 from importlib import import_module @@ -128,6 +130,8 @@ def load(configuration, logger): logger.info("Props cache enabled (statistics log only on debug level)") else: logger.info("Props cache disabled") + if configuration.getboolean("logging", "performance"): + logger.info("General performance log on debug level enabled") else: collection_class = import_module(storage_type).Collection logger.info("Radicale storage manager successfully loaded: %s", storage_type) @@ -315,12 +319,12 @@ def string_overall(self): message = "no cache entries" return(message) - def log_overall(self, token, logger): + def log_overall(self, token, logger, request_token): if (self.perflog) or (self.loginterval == 0) or (datetime.datetime.now() - self.lastlog > datetime.timedelta(seconds=self.loginterval)): - logger.info("%s cache overall statistics: %s", token, self.string_overall()) + logger.info("[%s] %s cache overall statistics: %s", request_token, token, self.string_overall()) self.lastlog = datetime.datetime.now() else: - logger.debug("%s cache overall statistics: %s", token, self.string_overall()) + logger.debug("[%s] %s cache overall statistics: %s", request_token, token, self.string_overall()) ## cache entry @@ -352,9 +356,27 @@ def cache_log_statistics_overall(self): global Items_cache_counter global Props_cache_counter if Items_cache_active: - Items_cache_counter.log_overall("Items", self.logger) + Items_cache_counter.log_overall("Items", self.logger, self.request_token) if Props_cache_active: - Props_cache_counter.log_overall("Props", self.logger) + Props_cache_counter.log_overall("Props", self.logger, self.request_token) + if self.configuration.getboolean("logging", "performance"): + # log process statistics + rusage = resource.getrusage(resource.RUSAGE_THREAD) + self.logger.debug("[%s] ru_utime=%.3f ru_stime=%.3f ru_maxrss=%s ru_inblock=%s ru_oublock=%s", self.request_token, + rusage.ru_utime, + rusage.ru_stime, + rusage.ru_maxrss, + rusage.ru_inblock, + rusage.ru_oublock) + # log garbage collector statistics + self.logger.debug("[%s] gc_count=%s gc_threshold=%s gc.unreachable=%d gc.stats.0=%s gc.stats.1=%s gc.stats.2=%s", self.request_token, + gc.get_count(), + gc.get_threshold(), + len(gc.garbage), + str(gc.get_stats()[0]), + str(gc.get_stats()[1]), + str(gc.get_stats()[2]), + ) ### END Items/Props caching @@ -1020,12 +1042,13 @@ def serialize(self): time_end = datetime.datetime.now() if self.configuration.getboolean("logging", "performance"): self.logger.info( - "Collection read %d items in %s sec from %s", len(items), + "Collection read %d items in %.3f sec from %s", len(items), (time_end - time_begin).total_seconds(), self._filesystem_path) else: self.logger.debug( - "Collection read %d items in %s sec from %s", len(items), + "Collection read %d items in %.3f sec from %s", len(items), (time_end - time_begin).total_seconds(), self._filesystem_path) + result = "" if self.get_meta("tag") == "VCALENDAR": collection = vobject.iCalendar() for item in items: @@ -1034,10 +1057,20 @@ def serialize(self): for item_part in getattr(item, "%s_list" % content): collection.add(item_part) break - return collection.serialize() + try: + result = collection.serialize() + except: + self.logger.error("VCALENDAR collection serializing broken: %s (%s)", self._filesystem_path, e) + if self.configuration.getboolean("logging", "exceptions"): + self.logger.exception("Exception details:") elif self.get_meta("tag") == "VADDRESSBOOK": - return "".join([item.serialize() for item in items]) - return "" + try: + result = "".join([item.serialize() for item in items]) + except: + self.logger.error("VADDRESSBOOK collection serializing broken: %s (%s)", self._filesystem_path, e) + if self.configuration.getboolean("logging", "exceptions"): + self.logger.exception("Exception details:") + return result _lock = threading.Lock() _waiters = [] From ada5176fd9f163a8294337c476680efc1ab0e324 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Thu, 27 Oct 2016 06:40:15 +0000 Subject: [PATCH 091/101] optionally log item content after read, fix log lines --- config | 1 + radicale/storage.py | 14 ++++++++------ 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/config b/config index 311fcba58..9bd0128c7 100644 --- a/config +++ b/config @@ -133,6 +133,7 @@ debug_filter = 0xffff # 0x0001: suppress "header content" # 0x0002: suppress "request content" # 0x0004: suppress "response content" +# 0x0008: suppress "item content (on read)" # 0x0040: suppress "sanitized path" # 0x0080: suppress "rights" "Test if" + "Section" # 0x0100: suppress "read object" diff --git a/radicale/storage.py b/radicale/storage.py index ba02d98de..6aa293cc1 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -317,14 +317,14 @@ def string_overall(self): ) else: message = "no cache entries" - return(message) + return message - def log_overall(self, token, logger, request_token): + def log_overall(self, token, logger): if (self.perflog) or (self.loginterval == 0) or (datetime.datetime.now() - self.lastlog > datetime.timedelta(seconds=self.loginterval)): - logger.info("[%s] %s cache overall statistics: %s", request_token, token, self.string_overall()) + logger.info("%s cache overall statistics: %s", token, self.string_overall()) self.lastlog = datetime.datetime.now() else: - logger.debug("[%s] %s cache overall statistics: %s", request_token, token, self.string_overall()) + logger.debug("%s cache overall statistics: %s", token, self.string_overall()) ## cache entry @@ -356,9 +356,9 @@ def cache_log_statistics_overall(self): global Items_cache_counter global Props_cache_counter if Items_cache_active: - Items_cache_counter.log_overall("Items", self.logger, self.request_token) + Items_cache_counter.log_overall("Items", self.logger) if Props_cache_active: - Props_cache_counter.log_overall("Props", self.logger, self.request_token) + Props_cache_counter.log_overall("Props", self.logger) if self.configuration.getboolean("logging", "performance"): # log process statistics rusage = resource.getrusage(resource.RUSAGE_THREAD) @@ -873,6 +873,8 @@ def get(self, href): if not self.debug_filter & 0x0100: self.logger.debug("Item read ('get'): %s", path) item = vobject.readOne(text) + if not self.debug_filter & 0x0008: + self.logger.debug("Item content ('get'): %s:\n%s", path, str(item)) except Exception as e: self.logger.error("Object broken on read (skip 'get'): %s (%s)", path, e) if self.configuration.getboolean("logging", "exceptions"): From 547b379dc53d647ff5b043125b70779dedf8c4e8 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Thu, 27 Oct 2016 22:12:10 +0200 Subject: [PATCH 092/101] fix copyright --- radicale-storage-check.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/radicale-storage-check.py b/radicale-storage-check.py index 5a68d1d8a..13e5af90e 100755 --- a/radicale-storage-check.py +++ b/radicale-storage-check.py @@ -1,8 +1,7 @@ #!/usr/bin/python3.4 -s # # This file is a tool for the Radicale Server - Calendar Server -# Copyright © 2014 Jean-Marc Martins -# Copyright © 2012-2016 Guillaume Ayoub +# Copyright © 2016 Peter Bieringer # # This library is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by From fd186dc9ff6a1f2513d1ca398c0594ddf6b43d3e Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sun, 13 Nov 2016 11:08:17 +0000 Subject: [PATCH 093/101] remove not implemented offering of "sync-collection" on REPORT request preventing at least SOGo connector to start using "sync-token" queries, which are not supported. See also: - https://github.com/Kozea/Radicale/issues/306 - https://sogo.nu/bugs/view.php?id=3896 --- radicale/xmlutils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/radicale/xmlutils.py b/radicale/xmlutils.py index 375117c13..79316178d 100644 --- a/radicale/xmlutils.py +++ b/radicale/xmlutils.py @@ -679,7 +679,7 @@ def _propfind_response(base_prefix, path, item, props, user, write=False, element.append(privilege) elif tag == _tag("D", "supported-report-set"): for report_name in ( - "principal-property-search", "sync-collection", + "principal-property-search", "expand-property", "principal-search-property-set"): supported = ET.Element(_tag("D", "supported-report")) report_tag = ET.Element(_tag("D", "report")) From 0edecfc3447a0e98fa2679a4444d000899beb8df Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 15 Nov 2016 22:15:12 +0100 Subject: [PATCH 094/101] catch broken items during put --- radicale/__init__.py | 3 +++ radicale/storage.py | 10 +++++++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index 555a9e1d1..1574ff35f 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -699,6 +699,9 @@ def do_PUT(self, environ, base_prefix, path, user): parent_item.set_meta({"tag": tag}) href = posixpath.basename(path.strip("/")) new_item = parent_item.upload(href, items[0]) + if not new_item: + # upload not possible because of error + return PRECONDITION_FAILED headers = {"ETag": new_item.etag} return client.CREATED, headers, None diff --git a/radicale/storage.py b/radicale/storage.py index 6aa293cc1..881deaf7b 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -921,7 +921,15 @@ def upload(self, href, vobject_item): path = path_to_filesystem(self._filesystem_path, href) item = Item(self, vobject_item, href) with self._atomic_write(path, newline="") as fd: - fd.write(item.serialize()) + try: + item_serialized = item.serialize() + except Exception as e: + self.logger.error("Object broken on serialize (skip 'upload'): %s (%s)", href, e) + self.logger.error("Item content ('upload'): %s:\n%s", href, str(vobject_item)) + if self.configuration.getboolean("logging", "exceptions"): + self.logger.exception("Exception details:") + return None; + fd.write(item_serialized) return item def delete(self, href=None): From cb1f9a6de026cd66c07de39d230f957b9b2664da Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 15 Nov 2016 22:31:33 +0100 Subject: [PATCH 095/101] detect empty files (created by older put which broke on item.serialize) --- radicale/storage.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/radicale/storage.py b/radicale/storage.py index 881deaf7b..f2e96014c 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -868,6 +868,12 @@ def get(self, href): if Item_cache_hit == 0 or Items_cache_active == False: with open(path, encoding=self.encoding, newline="") as f: text = f.read() + if len(text) == 0: + # empty file + self.logger.error("Object empty (skip 'get'): %s", path) + if Items_cache_active: + Items_cache_lock.release() + return None; try: if not self.debug_filter & 0x0100: From 3047bba4be19446e70ff2a814bd99653539900d2 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 15 Nov 2016 22:35:08 +0100 Subject: [PATCH 096/101] check item.serialize before creating file to avoid empty file --- radicale/storage.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index f2e96014c..0f2f4efa1 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -926,15 +926,15 @@ def upload(self, href, vobject_item): raise UnsafePathError(href) path = path_to_filesystem(self._filesystem_path, href) item = Item(self, vobject_item, href) + try: + item_serialized = item.serialize() + except Exception as e: + self.logger.error("Object broken on serialize (skip 'upload'): %s (%s)", href, e) + self.logger.error("Item content ('upload'): %s:\n%s", href, str(vobject_item)) + if self.configuration.getboolean("logging", "exceptions"): + self.logger.exception("Exception details:") + return None; with self._atomic_write(path, newline="") as fd: - try: - item_serialized = item.serialize() - except Exception as e: - self.logger.error("Object broken on serialize (skip 'upload'): %s (%s)", href, e) - self.logger.error("Item content ('upload'): %s:\n%s", href, str(vobject_item)) - if self.configuration.getboolean("logging", "exceptions"): - self.logger.exception("Exception details:") - return None; fd.write(item_serialized) return item From 4566e35196bc1c6b328c226840de4718f5f8f389 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sun, 12 Mar 2017 11:02:49 +0100 Subject: [PATCH 097/101] fix rebase merging leftovers --- radicale/__init__.py | 5 ++--- radicale/storage.py | 1 - 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index 1574ff35f..a0c66d195 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -357,7 +357,8 @@ def response(status, headers=(), answer=None): # store token for header/request/response logging self.request_token = request_token - self.logger.info("[%s] %s request for %s received from %s using \"%s\"", + self.logger.info( + "[%s] %s request for %s received from %s using \"%s\"", self.request_token, environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo, remote_host, remote_useragent) @@ -631,7 +632,6 @@ def do_PROPFIND(self, environ, base_prefix, path, user): headers = {"DAV": DAV_HEADERS, "Content-Type": "text/xml"} status, answer = xmlutils.propfind( base_prefix, path, content, read_items, write_items, user) - storage.log_cache_statistics(self) if status == client.FORBIDDEN: return NOT_ALLOWED else: @@ -722,5 +722,4 @@ def do_REPORT(self, environ, base_prefix, path, user): collection = item.collection headers = {"Content-Type": "text/xml"} answer = xmlutils.report(base_prefix, path, content, collection) - storage.log_cache_statistics(self) return client.MULTI_STATUS, headers, answer diff --git a/radicale/storage.py b/radicale/storage.py index 0f2f4efa1..9c1697694 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -977,7 +977,6 @@ def delete(self, href=None): self._sync_directory(os.path.dirname(path)) def get_meta(self, key=None): - def get_meta(self, key): global Props_cache_data global Props_cache_active global Props_cache_counter From c83f39fc6c934a972b328af5daa35c7af13be15e Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sun, 12 Mar 2017 11:06:47 +0100 Subject: [PATCH 098/101] fix merge/rebase leftovers --- radicale/storage.py | 59 ++++++++++----------------------------------- 1 file changed, 13 insertions(+), 46 deletions(-) diff --git a/radicale/storage.py b/radicale/storage.py index 9c1697694..371f6874a 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -87,6 +87,7 @@ class Overlapped(ctypes.Structure): elif os.name == "posix": import fcntl + def load(configuration, logger): """Load the storage manager chosen in configuration.""" storage_type = configuration.get("storage", "type") @@ -234,17 +235,6 @@ def path_to_filesystem(root, *paths): return safe_path -def log_cache_statistics(self): - global Item_cache_active - global Item_cache_counter - global Props_cache_active - global Props_cache_counter - if Item_cache_active: - Item_cache_counter.log("Items", self.logger) - if Props_cache_active: - Props_cache_counter.log("Props", self.logger) - - class UnsafePathError(ValueError): def __init__(self, path): message = "Can't translate name safely to filesystem: %s" % path @@ -287,6 +277,17 @@ def etag(self): ### BEGIN Items/Props caching ## cache counter statistics +def log_cache_statistics(self): + global Item_cache_active + global Item_cache_counter + global Props_cache_active + global Props_cache_counter + if Item_cache_active: + Item_cache_counter.log("Items", self.logger) + if Props_cache_active: + Props_cache_counter.log("Props", self.logger) + + class Cache_counter: def __init__(self): self.lookup = 0 @@ -317,7 +318,7 @@ def string_overall(self): ) else: message = "no cache entries" - return message + return message def log_overall(self, token, logger): if (self.perflog) or (self.loginterval == 0) or (datetime.datetime.now() - self.lastlog > datetime.timedelta(seconds=self.loginterval)): @@ -381,40 +382,6 @@ def cache_log_statistics_overall(self): ### END Items/Props caching -class UnsafePathError(ValueError): - def __init__(self, path): - message = "Can't translate name safely to filesystem: %s" % path - super().__init__(message) - - -class ComponentExistsError(ValueError): - def __init__(self, path): - message = "Component already exists: %s" % path - super().__init__(message) - - -class ComponentNotFoundError(ValueError): - def __init__(self, path): - message = "Component doesn't exist: %s" % path - super().__init__(message) - - -class Item: - def __init__(self, collection, item, href, last_modified=None): - self.collection = collection - self.item = item - self.href = href - self.last_modified = last_modified - - def __getattr__(self, attr): - return getattr(self.item, attr) - - @property - def etag(self): - """Encoded as quoted-string (see RFC 2616).""" - return get_etag(self.serialize()) - - class BaseCollection: # Overriden on copy by the "load" function From 150f6300004e92d2e17287e19697b2abc0a41d75 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sun, 12 Mar 2017 11:49:31 +0000 Subject: [PATCH 099/101] fixing improper extensions of config --- radicale/config.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/radicale/config.py b/radicale/config.py index c7943a7bb..692c35146 100644 --- a/radicale/config.py +++ b/radicale/config.py @@ -115,15 +115,15 @@ ("filesystem_close_lock_file", { "value": "False", "help": "close the lock file when no more clients are waiting"}), - ("hook", { - "value": "", - "help": "command that is run after changes to storage"})])), ("cache_items", { "value": "False", "help": "enable item file caching in memory"}), ("cache_props", { "value": "False", "help": "enable props file caching in memory"}), + ("hook", { + "value": "", + "help": "command that is run after changes to storage"})])), ("logging", OrderedDict([ ("config", { "value": "/etc/radicale/logging", @@ -134,22 +134,22 @@ "aliases": ["-D", "--debug"]}), ("debug_filter", { "value": "0xffff", - "help": "filter debug log (binary mask)") - ("exceptions": { + "help": "filter debug log (binary mask)"}), + ("exceptions", { "value" : "False", - "help": "include exceptions in logs") + "help": "include exceptions in logs"}), ("full_environment", { "value": "False", "help": "store all environment variables"}), - ("mask_passwords", { - "value": "True", - "help": "mask passwords in logs"})]))]) ("cache_statistics_interval", { "value": "300", - "help": "log interval of item/props cache statistics on info level") - ("performance": { + "help": "log interval of item/props cache statistics on info level"}), + ("performance", { "value": "False", - "help": "include cache/collections performance statistics in logs on info level") + "help": "include cache/collections performance statistics in logs on info level"}), + ("mask_passwords", { + "value": "True", + "help": "mask passwords in logs"})]))]) def load(paths=(), extra_config=None): From 2aeff8adc42d36ae3d03199c414861adc39686cb Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sun, 12 Mar 2017 12:00:42 +0000 Subject: [PATCH 100/101] remove duplicate code (merge issue) --- radicale/__init__.py | 24 ------------------------ 1 file changed, 24 deletions(-) diff --git a/radicale/__init__.py b/radicale/__init__.py index 7b640da5b..a0c66d195 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -357,30 +357,6 @@ def response(status, headers=(), answer=None): # store token for header/request/response logging self.request_token = request_token - self.logger.info("[%s] %s request for %s received from %s using \"%s\"", - request_token, environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo, remote_host, remote_useragent) - if self.debug and not (self.debug_filter & 0x0001): - headers = pprint.pformat(self.headers_log(environ)) - self.logger.debug("Request headers [%s]:\n%s", self.request_token, headers) - - # Strip base_prefix from request URI - base_prefix = self.configuration.get("server", "base_prefix") - if environ["PATH_INFO"].startswith(base_prefix): - environ["PATH_INFO"] = environ["PATH_INFO"][len(base_prefix):] - elif self.configuration.get("server", "can_skip_base_prefix"): - self.logger.debug( - "Prefix already stripped from path: %s", environ["PATH_INFO"]) - else: - # Request path not starting with base_prefix, not allowed - self.logger.debug( - "Path not starting with prefix: %s", environ["PATH_INFO"]) - return response(*NOT_ALLOWED) - - # Create an unique request token - request_token = md5((environ["PATH_INFO"] + depthinfo + remote_host + remote_useragent +str(time_begin)).encode('utf-8')).hexdigest()[1:8] - # store token for header/request/response logging - self.request_token = request_token - self.logger.info( "[%s] %s request for %s received from %s using \"%s\"", self.request_token, From ba3f72e4231f9d1d77c42a213444cfd385370a21 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Sun, 12 Mar 2017 22:24:13 +0100 Subject: [PATCH 101/101] cosmetics --- config | 2 +- radicale/__init__.py | 4 ++-- radicale/config.py | 6 +++--- radicale/storage.py | 1 + 4 files changed, 7 insertions(+), 6 deletions(-) diff --git a/config b/config index 9bd0128c7..99220b478 100644 --- a/config +++ b/config @@ -151,7 +151,7 @@ debug_filter = 0xffff # Don't include passwords in logs #mask_passwords = True -# Include cache/collections performance statistics in logs on info level +# Include performance statistics in logs on info level performance = False # Include exceptions in logs diff --git a/radicale/__init__.py b/radicale/__init__.py index a0c66d195..4ba2866ca 100644 --- a/radicale/__init__.py +++ b/radicale/__init__.py @@ -271,8 +271,8 @@ def collect_allowed_items(self, items, user): read_allowed_items.append(item) else: self.logger.debug( - "%s has NO read access to collection %s", - user or "Anonymous", path or "/") + "%s has NO read access to collection %s", + user or "Anonymous", path or "/") if self.authorized(user, path, "w"): if not self.debug_filter & 0x0080: self.logger.debug( diff --git a/radicale/config.py b/radicale/config.py index 692c35146..45527c6de 100644 --- a/radicale/config.py +++ b/radicale/config.py @@ -138,15 +138,15 @@ ("exceptions", { "value" : "False", "help": "include exceptions in logs"}), + ("performance", { + "value": "False", + "help": "include performance statistics in logs on info level"}), ("full_environment", { "value": "False", "help": "store all environment variables"}), ("cache_statistics_interval", { "value": "300", "help": "log interval of item/props cache statistics on info level"}), - ("performance", { - "value": "False", - "help": "include cache/collections performance statistics in logs on info level"}), ("mask_passwords", { "value": "True", "help": "mask passwords in logs"})]))]) diff --git a/radicale/storage.py b/radicale/storage.py index d25e3fde6..618c915b2 100644 --- a/radicale/storage.py +++ b/radicale/storage.py @@ -87,6 +87,7 @@ class Overlapped(ctypes.Structure): elif os.name == "posix": import fcntl + def load(configuration, logger): """Load the storage manager chosen in configuration.""" storage_type = configuration.get("storage", "type")