Mercurial > libervia-backend
view libervia/backend/tools/common/async_utils.py @ 4095:684ba556a617
core (memory/sqla_mapping): fix legacy pickled values:
folloing packages refactoring, legacy pickled values could not be unpickled (due to use of
old classes). This temporary workaround fix it, but the right thing to do will be to move
from pickle to JSON at some point.
author | Goffi <goffi@goffi.org> |
---|---|
date | Mon, 12 Jun 2023 14:57:27 +0200 |
parents | 4b842c1fb686 |
children | 0d7bb4df2343 |
line wrap: on
line source
#!/usr/bin/env python3 # Libervia: an XMPP client # Copyright (C) 2009-2021 Jérôme Poisson (goffi@goffi.org) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """tools to launch process in a async way (using Twisted)""" from collections import OrderedDict from typing import Optional, Callable, Awaitable from libervia.backend.core.log import getLogger log = getLogger(__name__) def async_lru(maxsize: Optional[int] = 50) -> Callable: """Decorator to cache async function results using LRU algorithm @param maxsize: maximum number of items to keep in cache. None to have no limit """ def decorator(func: Callable) -> Callable: cache = OrderedDict() async def wrapper(*args) -> Awaitable: if args in cache: log.debug(f"using result in cache for {args}") cache.move_to_end(args) result = cache[args] return result log.debug(f"caching result for {args}") result = await func(*args) cache[args] = result if maxsize is not None and len(cache) > maxsize: value = cache.popitem(False) log.debug(f"Removing LRU value: {value}") return result return wrapper return decorator