Mercurial > libervia-backend
view libervia/backend/tools/common/async_utils.py @ 4242:8acf46ed7f36
frontends: remote control implementation:
This is the frontends common part of remote control implementation. It handle the creation
of WebRTC session, and management of inputs. For now the reception use freedesktop.org
Desktop portal, and works mostly with Wayland based Desktop Environments.
rel 436
author | Goffi <goffi@goffi.org> |
---|---|
date | Sat, 11 May 2024 13:52:43 +0200 |
parents | 4b842c1fb686 |
children | 0d7bb4df2343 |
line wrap: on
line source
#!/usr/bin/env python3 # Libervia: an XMPP client # Copyright (C) 2009-2021 Jérôme Poisson (goffi@goffi.org) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """tools to launch process in a async way (using Twisted)""" from collections import OrderedDict from typing import Optional, Callable, Awaitable from libervia.backend.core.log import getLogger log = getLogger(__name__) def async_lru(maxsize: Optional[int] = 50) -> Callable: """Decorator to cache async function results using LRU algorithm @param maxsize: maximum number of items to keep in cache. None to have no limit """ def decorator(func: Callable) -> Callable: cache = OrderedDict() async def wrapper(*args) -> Awaitable: if args in cache: log.debug(f"using result in cache for {args}") cache.move_to_end(args) result = cache[args] return result log.debug(f"caching result for {args}") result = await func(*args) cache[args] = result if maxsize is not None and len(cache) > maxsize: value = cache.popitem(False) log.debug(f"Removing LRU value: {value}") return result return wrapper return decorator