changeset 3725:9b45f0f168cf

tools (common): new `async_utils` module with an async version of `lru_cache`
author Goffi <goffi@goffi.org>
date Tue, 25 Jan 2022 17:25:10 +0100
parents a0c08fcfe11e
children 33d75cd3c371
files sat/tools/common/async_utils.py
diffstat 1 files changed, 48 insertions(+), 0 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sat/tools/common/async_utils.py	Tue Jan 25 17:25:10 2022 +0100
@@ -0,0 +1,48 @@
+#!/usr/bin/env python3
+
+
+# Libervia: an XMPP client
+# Copyright (C) 2009-2021 Jérôme Poisson (goffi@goffi.org)
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+
+# You should have received a copy of the GNU Affero General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""tools to launch process in a async way (using Twisted)"""
+
+from collections import OrderedDict
+from typing import Optional, Callable, Awaitable
+from sat.core.log import getLogger
+
+
+log = getLogger(__name__)
+
+
+def async_lru(maxsize: Optional[int] = None) -> Callable:
+    """Decorator to cache async function results using LRU algorithm"""
+    def decorator(func: Callable) -> Callable:
+        cache = OrderedDict()
+        async def wrapper(*args) -> Awaitable:
+            if args in cache:
+                log.debug(f"using result in cache for {args}")
+                cache.move_to_end(args)
+                result = cache[args]
+                return result
+            log.debug(f"caching result for {args}")
+            result = await func(*args)
+            cache[args] = result
+            if maxsize is not None and len(cache) > maxsize:
+                value = cache.popitem(False)
+                log.debug(f"Removing LRU value: {value}")
+            return result
+        return wrapper
+    return decorator