view libervia/backend/tools/common/async_utils.py @ 4351:6a0a081485b8

plugin autocrypt: Autocrypt protocol implementation: Implementation of autocrypt: `autocrypt` header is checked, and if present and no public key is known for the peer, the key is imported. `autocrypt` header is also added to outgoing message (only if an email gateway is detected). For the moment, the JID is use as identifier, but the real email used by gateway should be used in the future. rel 456
author Goffi <goffi@goffi.org>
date Fri, 28 Feb 2025 09:23:35 +0100
parents 0d7bb4df2343
children
line wrap: on
line source

#!/usr/bin/env python3


# Libervia: an XMPP client
# Copyright (C) 2009-2021 Jérôme Poisson (goffi@goffi.org)

# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.

# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Affero General Public License for more details.

# You should have received a copy of the GNU Affero General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.

"""tools to launch process in a async way (using Twisted)"""

from collections import OrderedDict
from typing import Optional, Callable, Awaitable
from libervia.backend.core.log import getLogger


log = getLogger(__name__)


def async_lru(maxsize: Optional[int] = 50) -> Callable:
    """Decorator to cache async function results using LRU algorithm

    @param maxsize: maximum number of items to keep in cache.
        None to have no limit

    """

    def decorator(func: Callable) -> Callable:
        cache = OrderedDict()

        async def wrapper(*args) -> Awaitable:
            if args in cache:
                log.debug(f"using result in cache for {args}")
                cache.move_to_end(args)
                result = cache[args]
                return result
            log.debug(f"caching result for {args}")
            result = await func(*args)
            cache[args] = result
            if maxsize is not None and len(cache) > maxsize:
                value = cache.popitem(False)
                log.debug(f"Removing LRU value: {value}")
            return result

        return wrapper

    return decorator