diff libervia/backend/memory/memory.py @ 4270:0d7bb4df2343

Reformatted code base using black.
author Goffi <goffi@goffi.org>
date Wed, 19 Jun 2024 18:44:57 +0200
parents 64a85ce8be70
children
line wrap: on
line diff
--- a/libervia/backend/memory/memory.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/memory/memory.py	Wed Jun 19 18:44:57 2024 +0200
@@ -155,7 +155,7 @@
         raise NotImplementedError("You need do use new_session to create a session")
 
     def __delitem__(self, session_id):
-        """ delete the session data """
+        """delete the session data"""
         self._purge_session(session_id)
 
     def keys(self):
@@ -253,7 +253,6 @@
         self.admin_jids = set()
         self._file_path_lock = defer.DeferredLock()
 
-
     async def initialise(self):
         self.storage = Storage()
         await self.storage.initialise()
@@ -281,15 +280,11 @@
                     else:
                         self.admin_jids.add(admin_jid)
 
-
     ## Configuration ##
 
     def config_get(
-        self,
-        section: str|None,
-        name: str,
-        default: Any = None
-    ) -> str|list|dict:
+        self, section: str | None, name: str, default: Any = None
+    ) -> str | list | dict:
         """Get the main configuration option
 
         @param section: section of the config file (None or '' for DEFAULT)
@@ -348,7 +343,7 @@
     ## Profiles/Sessions management ##
 
     def start_session(self, password, profile):
-        """"Iniatialise session for a profile
+        """ "Iniatialise session for a profile
 
         @param password(unicode): profile session password
             or empty string is no password is set
@@ -375,8 +370,8 @@
                     session_d = self._entities_cache[profile]
                 except KeyError:
                     # else we do request the params
-                    session_d = self._entities_cache[profile] = self.load_individual_params(
-                        profile
+                    session_d = self._entities_cache[profile] = (
+                        self.load_individual_params(profile)
                     )
                     session_d.addCallback(create_session)
                 finally:
@@ -434,8 +429,9 @@
         )
         valid = PasswordHasher.verify(password, sat_cipher)
         if not valid:
-            log.warning(_("Authentication failure of profile {profile}").format(
-                profile=profile))
+            log.warning(
+                _("Authentication failure of profile {profile}").format(profile=profile)
+            )
             raise exceptions.PasswordError("The provided profile password doesn't match.")
         return await self.new_auth_session(password, profile)
 
@@ -467,9 +463,7 @@
             del self._entities_cache[profile]
         except KeyError:
             log.error(
-                _(
-                    "Trying to purge roster status cache for a profile not in memory: [%s]"
-                )
+                _("Trying to purge roster status cache for a profile not in memory: [%s]")
                 % profile
             )
 
@@ -554,9 +548,7 @@
             # be sure to call this after checking that the profile doesn't exist yet
 
             # generated once for all and saved in a PersistentDict
-            personal_key = BlockCipher.get_random_key(
-                base64=True
-            ).decode('utf-8')
+            personal_key = BlockCipher.get_random_key(base64=True).decode("utf-8")
             self.auth_sessions.new_session(
                 {C.MEMORY_CRYPTO_KEY: personal_key}, profile=name
             )  # will be encrypted by param_set
@@ -626,29 +618,42 @@
 
     def _history_get_serialise(self, history_data):
         return [
-            (uid, timestamp, from_jid, to_jid, message, subject, mess_type,
-             data_format.serialise(extra)) for uid, timestamp, from_jid, to_jid, message,
-            subject, mess_type, extra in history_data
+            (
+                uid,
+                timestamp,
+                from_jid,
+                to_jid,
+                message,
+                subject,
+                mess_type,
+                data_format.serialise(extra),
+            )
+            for uid, timestamp, from_jid, to_jid, message, subject, mess_type, extra in history_data
         ]
 
-    def _history_get(self, from_jid_s, to_jid_s, limit=C.HISTORY_LIMIT_NONE, between=True,
-                    filters=None, profile=C.PROF_KEY_NONE):
+    def _history_get(
+        self,
+        from_jid_s,
+        to_jid_s,
+        limit=C.HISTORY_LIMIT_NONE,
+        between=True,
+        filters=None,
+        profile=C.PROF_KEY_NONE,
+    ):
         from_jid = jid.JID(from_jid_s) if from_jid_s else None
         to_jid = jid.JID(to_jid_s) if to_jid_s else None
-        d = self.history_get(
-            from_jid, to_jid, limit, between, filters, profile
-        )
+        d = self.history_get(from_jid, to_jid, limit, between, filters, profile)
         d.addCallback(self._history_get_serialise)
         return d
 
     def history_get(
         self,
-        from_jid: jid.JID|None,
-        to_jid: jid.JID|None,
+        from_jid: jid.JID | None,
+        to_jid: jid.JID | None,
         limit: int = C.HISTORY_LIMIT_NONE,
         between: bool = True,
-        filters: dict[str, str]|None = None,
-        profile: str = C.PROF_KEY_NONE
+        filters: dict[str, str] | None = None,
+        profile: str = C.PROF_KEY_NONE,
     ) -> defer.Deferred[list]:
         """Retrieve messages in history
 
@@ -671,7 +676,9 @@
             limit = None
         if limit == 0:
             return defer.succeed([])
-        return self.storage.history_get(from_jid, to_jid, limit, between, filters, profile)
+        return self.storage.history_get(
+            from_jid, to_jid, limit, between, filters, profile
+        )
 
     ## Statuses ##
 
@@ -714,9 +721,7 @@
         """
         client = self.host.get_client(profile_key)
         presence_data = PresenceTuple(show, priority, statuses)
-        self.update_entity_data(
-            client, entity_jid, "presence", presence_data
-        )
+        self.update_entity_data(client, entity_jid, "presence", presence_data)
         if entity_jid.resource and show != C.PRESENCE_UNAVAILABLE:
             # If a resource is available, bare jid should not have presence information
             try:
@@ -743,7 +748,9 @@
         # FIXME: is there a need to keep cache data for resources which are not connected anymore?
         if entity_jid.resource:
             raise ValueError(
-                "get_all_resources must be used with a bare jid (got {})".format(entity_jid)
+                "get_all_resources must be used with a bare jid (got {})".format(
+                    entity_jid
+                )
             )
         profile_cache = self._get_profile_cache(client)
         try:
@@ -791,7 +798,9 @@
         """
         if entity_jid.resource:
             raise ValueError(
-                "main_resource_get must be used with a bare jid (got {})".format(entity_jid)
+                "main_resource_get must be used with a bare jid (got {})".format(
+                    entity_jid
+                )
             )
         try:
             if self.host.plugins["XEP-0045"].is_joined_room(client, entity_jid):
@@ -857,9 +866,7 @@
                 full_jid.resource = resource
                 yield full_jid
 
-    def update_entity_data(
-        self, client, entity_jid, key, value, silent=False
-    ):
+    def update_entity_data(self, client, entity_jid, key, value, silent=False):
         """Set a misc data for an entity
 
         If key was registered with set_signal_on_update, a signal will be sent to frontends
@@ -884,10 +891,7 @@
             entity_data[key] = value
             if key in self._key_signals and not silent:
                 self.host.bridge.entity_data_updated(
-                    jid_.full(),
-                    key,
-                    data_format.serialise(value),
-                    client.profile
+                    jid_.full(), key, data_format.serialise(value), client.profile
                 )
 
     def del_entity_datum(self, client, entity_jid, key):
@@ -910,9 +914,7 @@
             try:
                 entity_data = profile_cache[jid_.userhostJID()][jid_.resource]
             except KeyError:
-                raise exceptions.UnknownEntityError(
-                    "Entity {} not in cache".format(jid_)
-                )
+                raise exceptions.UnknownEntityError("Entity {} not in cache".format(jid_))
             try:
                 del entity_data[key]
             except KeyError as e:
@@ -927,7 +929,7 @@
             client, [jid.JID(jid_) for jid_ in entities_jids], keys_list
         )
         return {
-            jid_.full(): {k: data_format.serialise(v) for k,v in data.items()}
+            jid_.full(): {k: data_format.serialise(v) for k, v in data.items()}
             for jid_, data in ret.items()
         }
 
@@ -980,7 +982,8 @@
 
     def _get_entity_data(self, entity_jid_s, keys_list=None, profile=C.PROF_KEY_NONE):
         return self.entity_data_get(
-            self.host.get_client(profile), jid.JID(entity_jid_s), keys_list)
+            self.host.get_client(profile), jid.JID(entity_jid_s), keys_list
+        )
 
     def entity_data_get(self, client, entity_jid, keys_list=None):
         """Get a list of cached values for entity
@@ -999,9 +1002,7 @@
             entity_data = profile_cache[entity_jid.userhostJID()][entity_jid.resource]
         except KeyError:
             raise exceptions.UnknownEntityError(
-                "Entity {} not in cache (was requesting {})".format(
-                    entity_jid, keys_list
-                )
+                "Entity {} not in cache (was requesting {})".format(entity_jid, keys_list)
             )
         if keys_list is None:
             return entity_data
@@ -1146,7 +1147,9 @@
 
     ## Parameters ##
 
-    def get_string_param_a(self, name, category, attr="value", profile_key=C.PROF_KEY_NONE):
+    def get_string_param_a(
+        self, name, category, attr="value", profile_key=C.PROF_KEY_NONE
+    ):
         return self.params.get_string_param_a(name, category, attr, profile_key)
 
     def param_get_a(self, name, category, attr="value", profile_key=C.PROF_KEY_NONE):
@@ -1172,13 +1175,20 @@
         )
 
     def async_get_string_param_a(
-        self, name, category, attribute="value", security_limit=C.NO_SECURITY_LIMIT,
-        profile_key=C.PROF_KEY_NONE):
+        self,
+        name,
+        category,
+        attribute="value",
+        security_limit=C.NO_SECURITY_LIMIT,
+        profile_key=C.PROF_KEY_NONE,
+    ):
 
         profile = self.get_profile_name(profile_key)
-        return defer.ensureDeferred(self.params.async_get_string_param_a(
-            name, category, attribute, security_limit, profile
-        ))
+        return defer.ensureDeferred(
+            self.params.async_get_string_param_a(
+                name, category, attribute, security_limit, profile
+            )
+        )
 
     def _get_params_ui(self, security_limit, app, extra_s, profile_key):
         return self.params._get_params_ui(security_limit, app, extra_s, profile_key)
@@ -1211,31 +1221,38 @@
         client = self.host.get_client(profile_key)
         # we accept any type
         data = data_format.deserialise(data_s, type_check=None)
-        return defer.ensureDeferred(self.storage.set_private_value(
-            namespace, key, data, binary=True, profile=client.profile))
+        return defer.ensureDeferred(
+            self.storage.set_private_value(
+                namespace, key, data, binary=True, profile=client.profile
+            )
+        )
 
     def _private_data_get(self, namespace, key, profile_key):
         client = self.host.get_client(profile_key)
         d = defer.ensureDeferred(
             self.storage.get_privates(
-                namespace, [key], binary=True, profile=client.profile)
+                namespace, [key], binary=True, profile=client.profile
+            )
         )
         d.addCallback(lambda data_dict: data_format.serialise(data_dict.get(key)))
         return d
 
     def _private_data_delete(self, namespace, key, profile_key):
         client = self.host.get_client(profile_key)
-        return defer.ensureDeferred(self.storage.del_private_value(
-            namespace, key, binary=True, profile=client.profile))
+        return defer.ensureDeferred(
+            self.storage.del_private_value(
+                namespace, key, binary=True, profile=client.profile
+            )
+        )
 
     ## Files ##
 
     def check_file_permission(
-            self,
-            file_data: dict,
-            peer_jid: Optional[jid.JID],
-            perms_to_check: Optional[Tuple[str]],
-            set_affiliation: bool = False
+        self,
+        file_data: dict,
+        peer_jid: Optional[jid.JID],
+        perms_to_check: Optional[Tuple[str]],
+        set_affiliation: bool = False,
     ) -> None:
         """Check that an entity has the right permission on a file
 
@@ -1256,7 +1273,7 @@
         peer_jid = peer_jid.userhostJID()
         if peer_jid == file_data["owner"]:
             if set_affiliation:
-                file_data['affiliation'] = 'owner'
+                file_data["affiliation"] = "owner"
             # the owner has all rights, nothing to check
             return
         if not C.ACCESS_PERMS.issuperset(perms_to_check):
@@ -1274,7 +1291,7 @@
                 # otherwise, we use public permission, as the parent directory will
                 # block anyway, this avoid to have to recursively change permissions for
                 # all sub directories/files when modifying a permission
-                if not file_data.get('parent'):
+                if not file_data.get("parent"):
                     raise exceptions.PermissionError()
                 else:
                     perm_type = C.ACCESS_TYPE_PUBLIC
@@ -1358,44 +1375,41 @@
     def get_file_affiliations(self, file_data: dict) -> Dict[jid.JID, str]:
         """Convert file access to pubsub like affiliations"""
         affiliations = {}
-        access_data = file_data['access']
+        access_data = file_data["access"]
 
         read_data = access_data.get(C.ACCESS_PERM_READ, {})
-        if read_data.get('type') == C.ACCESS_TYPE_WHITELIST:
-            for entity_jid_s in read_data['jids']:
+        if read_data.get("type") == C.ACCESS_TYPE_WHITELIST:
+            for entity_jid_s in read_data["jids"]:
                 entity_jid = jid.JID(entity_jid_s)
-                affiliations[entity_jid] = 'member'
+                affiliations[entity_jid] = "member"
 
         write_data = access_data.get(C.ACCESS_PERM_WRITE, {})
-        if write_data.get('type') == C.ACCESS_TYPE_WHITELIST:
-            for entity_jid_s in write_data['jids']:
+        if write_data.get("type") == C.ACCESS_TYPE_WHITELIST:
+            for entity_jid_s in write_data["jids"]:
                 entity_jid = jid.JID(entity_jid_s)
-                affiliations[entity_jid] = 'publisher'
+                affiliations[entity_jid] = "publisher"
 
-        owner = file_data.get('owner')
+        owner = file_data.get("owner")
         if owner:
-            affiliations[owner] = 'owner'
+            affiliations[owner] = "owner"
 
         return affiliations
 
     def _set_file_affiliations_update(
-        self,
-        access: dict,
-        file_data: dict,
-        affiliations: Dict[jid.JID, str]
+        self, access: dict, file_data: dict, affiliations: Dict[jid.JID, str]
     ) -> None:
         read_data = access.setdefault(C.ACCESS_PERM_READ, {})
-        if read_data.get('type') != C.ACCESS_TYPE_WHITELIST:
-            read_data['type'] = C.ACCESS_TYPE_WHITELIST
-            if 'jids' not in read_data:
-                read_data['jids'] = []
-        read_whitelist = read_data['jids']
+        if read_data.get("type") != C.ACCESS_TYPE_WHITELIST:
+            read_data["type"] = C.ACCESS_TYPE_WHITELIST
+            if "jids" not in read_data:
+                read_data["jids"] = []
+        read_whitelist = read_data["jids"]
         write_data = access.setdefault(C.ACCESS_PERM_WRITE, {})
-        if write_data.get('type') != C.ACCESS_TYPE_WHITELIST:
-            write_data['type'] = C.ACCESS_TYPE_WHITELIST
-            if 'jids' not in write_data:
-                write_data['jids'] = []
-        write_whitelist = write_data['jids']
+        if write_data.get("type") != C.ACCESS_TYPE_WHITELIST:
+            write_data["type"] = C.ACCESS_TYPE_WHITELIST
+            if "jids" not in write_data:
+                write_data["jids"] = []
+        write_whitelist = write_data["jids"]
         for entity_jid, affiliation in affiliations.items():
             entity_jid_s = entity_jid.full()
             if affiliation == "none":
@@ -1428,10 +1442,7 @@
                 raise ValueError(f"unknown affiliation: {affiliation!r}")
 
     async def set_file_affiliations(
-        self,
-        client,
-        file_data: dict,
-        affiliations: Dict[jid.JID, str]
+        self, client, file_data: dict, affiliations: Dict[jid.JID, str]
     ) -> None:
         """Apply pubsub like affiliation to file_data
 
@@ -1442,22 +1453,19 @@
             - "member" gives read permission only
             - "none" removes both read and write permissions
         """
-        file_id = file_data['id']
+        file_id = file_data["id"]
         await self.file_update(
             file_id,
-            'access',
+            "access",
             update_cb=partial(
                 self._set_file_affiliations_update,
                 file_data=file_data,
-                affiliations=affiliations
+                affiliations=affiliations,
             ),
         )
 
     def _set_file_access_model_update(
-        self,
-        access: dict,
-        file_data: dict,
-        access_model: str
+        self, access: dict, file_data: dict, access_model: str
     ) -> None:
         read_data = access.setdefault(C.ACCESS_PERM_READ, {})
         if access_model == "open":
@@ -1467,9 +1475,9 @@
         else:
             raise ValueError(f"unknown access model: {access_model}")
 
-        read_data['type'] = requested_type
-        if requested_type == C.ACCESS_TYPE_WHITELIST and 'jids' not in read_data:
-            read_data['jids'] = []
+        read_data["type"] = requested_type
+        if requested_type == C.ACCESS_TYPE_WHITELIST and "jids" not in read_data:
+            read_data["jids"] = []
 
     async def set_file_access_model(
         self,
@@ -1483,24 +1491,24 @@
             - "open": set public access to file/dir
             - "whitelist": set whitelist to file/dir
         """
-        file_id = file_data['id']
+        file_id = file_data["id"]
         await self.file_update(
             file_id,
-            'access',
+            "access",
             update_cb=partial(
                 self._set_file_access_model_update,
                 file_data=file_data,
-                access_model=access_model
+                access_model=access_model,
             ),
         )
 
     def get_files_owner(
-            self,
-            client,
-            owner: Optional[jid.JID],
-            peer_jid: Optional[jid.JID],
-            file_id: Optional[str] = None,
-            parent: Optional[str] = None
+        self,
+        client,
+        owner: Optional[jid.JID],
+        peer_jid: Optional[jid.JID],
+        file_id: Optional[str] = None,
+        parent: Optional[str] = None,
     ) -> jid.JID:
         """Get owner to use for a file operation
 
@@ -1526,10 +1534,26 @@
         return peer_jid.userhostJID()
 
     async def get_files(
-        self, client, peer_jid, file_id=None, version=None, parent=None, path=None,
-        type_=None, file_hash=None, hash_algo=None, name=None, namespace=None,
-        mime_type=None, public_id=None, owner=None, access=None, projection=None,
-        unique=False, perms_to_check=(C.ACCESS_PERM_READ,)):
+        self,
+        client,
+        peer_jid,
+        file_id=None,
+        version=None,
+        parent=None,
+        path=None,
+        type_=None,
+        file_hash=None,
+        hash_algo=None,
+        name=None,
+        namespace=None,
+        mime_type=None,
+        public_id=None,
+        owner=None,
+        access=None,
+        projection=None,
+        unique=False,
+        perms_to_check=(C.ACCESS_PERM_READ,),
+    ):
         """Retrieve files with with given filters
 
         @param peer_jid(jid.JID, None): jid trying to access the file
@@ -1628,11 +1652,27 @@
         return files
 
     async def set_file(
-        self, client, name, file_id=None, version="", parent=None, path=None,
-        type_=C.FILE_TYPE_FILE, file_hash=None, hash_algo=None, size=None,
-        namespace=None, mime_type=None, public_id=None, created=None, modified=None,
-        owner=None, access=None, extra=None, peer_jid=None,
-        perms_to_check=(C.ACCESS_PERM_WRITE,)
+        self,
+        client,
+        name,
+        file_id=None,
+        version="",
+        parent=None,
+        path=None,
+        type_=C.FILE_TYPE_FILE,
+        file_hash=None,
+        hash_algo=None,
+        size=None,
+        namespace=None,
+        mime_type=None,
+        public_id=None,
+        created=None,
+        modified=None,
+        owner=None,
+        access=None,
+        extra=None,
+        peer_jid=None,
+        perms_to_check=(C.ACCESS_PERM_WRITE,),
     ):
         """Set a file metadata
 
@@ -1694,7 +1734,7 @@
         else:
             mime_type = mime_type.lower()
         if public_id is not None:
-            assert len(public_id)>0
+            assert len(public_id) > 0
         if created is None:
             created = time.time()
         if namespace is not None:
@@ -1761,10 +1801,7 @@
         )
 
     async def file_get_used_space(
-        self,
-        client,
-        peer_jid: jid.JID,
-        owner: Optional[jid.JID] = None
+        self, client, peer_jid: jid.JID, owner: Optional[jid.JID] = None
     ) -> int:
         """Get space taken by all files owned by an entity
 
@@ -1797,7 +1834,7 @@
         peer_jid: jid.JID,
         recursive: bool,
         files_path: Path,
-        file_data: dict
+        file_data: dict,
     ):
         """Internal method to delete files/directories recursively
 
@@ -1807,43 +1844,56 @@
         @param files_path(unicode): path of the directory containing the actual files
         @param file_data(dict): data of the file to delete
         """
-        if file_data['owner'] != peer_jid:
+        if file_data["owner"] != peer_jid:
             raise exceptions.PermissionError(
-                "file {file_name} can't be deleted, {peer_jid} is not the owner"
-                .format(file_name=file_data['name'], peer_jid=peer_jid.full()))
-        if file_data['type'] == C.FILE_TYPE_DIRECTORY:
-            sub_files = yield self.get_files(client, peer_jid, parent=file_data['id'])
+                "file {file_name} can't be deleted, {peer_jid} is not the owner".format(
+                    file_name=file_data["name"], peer_jid=peer_jid.full()
+                )
+            )
+        if file_data["type"] == C.FILE_TYPE_DIRECTORY:
+            sub_files = yield self.get_files(client, peer_jid, parent=file_data["id"])
             if sub_files and not recursive:
                 raise exceptions.DataError(_("Can't delete directory, it is not empty"))
             # we first delete the sub-files
             for sub_file_data in sub_files:
-                if sub_file_data['type'] == C.FILE_TYPE_DIRECTORY:
-                    sub_file_path = files_path / sub_file_data['name']
+                if sub_file_data["type"] == C.FILE_TYPE_DIRECTORY:
+                    sub_file_path = files_path / sub_file_data["name"]
                 else:
                     sub_file_path = files_path
                 yield self._delete_file(
-                    client, peer_jid, recursive, sub_file_path, sub_file_data)
+                    client, peer_jid, recursive, sub_file_path, sub_file_data
+                )
             # then the directory itself
-            yield self.storage.file_delete(file_data['id'])
-        elif file_data['type'] == C.FILE_TYPE_FILE:
-            log.info(_("deleting file {name} with hash {file_hash}").format(
-                name=file_data['name'], file_hash=file_data['file_hash']))
-            yield self.storage.file_delete(file_data['id'])
+            yield self.storage.file_delete(file_data["id"])
+        elif file_data["type"] == C.FILE_TYPE_FILE:
+            log.info(
+                _("deleting file {name} with hash {file_hash}").format(
+                    name=file_data["name"], file_hash=file_data["file_hash"]
+                )
+            )
+            yield self.storage.file_delete(file_data["id"])
             references = yield self.get_files(
-                client, peer_jid, file_hash=file_data['file_hash'])
+                client, peer_jid, file_hash=file_data["file_hash"]
+            )
             if references:
                 log.debug("there are still references to the file, we keep it")
             else:
-                file_path = os.path.join(files_path, file_data['file_hash'])
-                log.info(_("no reference left to {file_path}, deleting").format(
-                    file_path=file_path))
+                file_path = os.path.join(files_path, file_data["file_hash"])
+                log.info(
+                    _("no reference left to {file_path}, deleting").format(
+                        file_path=file_path
+                    )
+                )
                 try:
                     os.unlink(file_path)
                 except FileNotFoundError:
-                    log.error(f"file at {file_path!r} doesn't exist but it was referenced in files database")
+                    log.error(
+                        f"file at {file_path!r} doesn't exist but it was referenced in files database"
+                    )
         else:
-            raise exceptions.InternalError('Unexpected file type: {file_type}'
-                .format(file_type=file_data['type']))
+            raise exceptions.InternalError(
+                "Unexpected file type: {file_type}".format(file_type=file_data["type"])
+            )
 
     async def file_delete(self, client, peer_jid, file_id, recursive=False):
         """Delete a single file or a directory and all its sub-files
@@ -1857,8 +1907,9 @@
         #        should be checked too
         files_data = await self.get_files(client, peer_jid, file_id)
         if not files_data:
-            raise exceptions.NotFound("Can't find the file with id {file_id}".format(
-                file_id=file_id))
+            raise exceptions.NotFound(
+                "Can't find the file with id {file_id}".format(file_id=file_id)
+            )
         file_data = files_data[0]
         if file_data["type"] != C.FILE_TYPE_DIRECTORY and recursive:
             raise ValueError("recursive can only be set for directories")
@@ -1879,12 +1930,11 @@
         return Path(
             self._cache_path,
             regex.path_escape(namespace),
-            *(regex.path_escape(a) for a in args)
+            *(regex.path_escape(a) for a in args),
         )
 
     ## Notifications ##
 
-
     def _add_notification(
         self,
         type_: str,
@@ -1896,7 +1946,7 @@
         priority: str,
         expire_at: float,
         extra_s: str,
-        profile_key: str
+        profile_key: str,
     ) -> defer.Deferred:
         client = self.host.get_client(profile_key)
 
@@ -1907,9 +1957,7 @@
             notification_type = NotificationType[type_]
             notification_priority = NotificationPriority[priority]
         except KeyError as e:
-            raise exceptions.DataError(
-                f"invalid notification type or priority data: {e}"
-            )
+            raise exceptions.DataError(f"invalid notification type or priority data: {e}")
 
         return defer.ensureDeferred(
             self.add_notification(
@@ -1922,7 +1970,7 @@
                 requires_action,
                 notification_priority,
                 expire_at or None,
-                data_format.deserialise(extra_s)
+                data_format.deserialise(extra_s),
             )
         )
 
@@ -1955,21 +2003,28 @@
         @param extra: additional data.
         """
         notification = await self.storage.add_notification(
-            None if is_global else client, type_, body_plain, body_rich, title,
-            requires_action, priority, expire_at, extra
+            None if is_global else client,
+            type_,
+            body_plain,
+            body_rich,
+            title,
+            requires_action,
+            priority,
+            expire_at,
+            extra,
         )
         self.host.bridge.notification_new(
             str(notification.id),
             notification.timestamp,
             type_.value,
             body_plain,
-            body_rich or '',
-            title or '',
+            body_rich or "",
+            title or "",
             requires_action,
             priority.value,
             expire_at or 0,
-            data_format.serialise(extra) if extra else '',
-            C.PROF_KEY_ALL if is_global else client.profile
+            data_format.serialise(extra) if extra else "",
+            C.PROF_KEY_ALL if is_global else client.profile,
         )
 
     def _get_notifications(self, filters_s: str, profile_key: str) -> defer.Deferred:
@@ -1992,12 +2047,14 @@
         filters = data_format.deserialise(filters_s)
 
         try:
-            if 'type' in filters:
-                filters['type_'] = NotificationType[filters.pop('type')]
-            if 'status' in filters:
-                filters['status'] = NotificationStatus[filters['status']]
-            if 'min_priority' in filters:
-                filters['min_priority'] = NotificationPriority[filters['min_priority']].value
+            if "type" in filters:
+                filters["type_"] = NotificationType[filters.pop("type")]
+            if "status" in filters:
+                filters["status"] = NotificationStatus[filters["status"]]
+            if "min_priority" in filters:
+                filters["min_priority"] = NotificationPriority[
+                    filters["min_priority"]
+                ].value
         except KeyError as e:
             raise exceptions.DataError(f"invalid filter data: {e}")
 
@@ -2010,10 +2067,7 @@
         return d
 
     def _delete_notification(
-        self,
-        id_: str,
-        is_global: bool,
-        profile_key: str
+        self, id_: str, is_global: bool, profile_key: str
     ) -> defer.Deferred:
         client = self.host.get_client(profile_key)
         if is_global and not client.is_admin:
@@ -2023,10 +2077,7 @@
         return defer.ensureDeferred(self.delete_notification(client, id_, is_global))
 
     async def delete_notification(
-        self,
-        client: SatXMPPEntity,
-        id_: str,
-        is_global: bool=False
+        self, client: SatXMPPEntity, id_: str, is_global: bool = False
     ) -> None:
         """Delete a notification
 
@@ -2036,8 +2087,7 @@
         """
         await self.storage.delete_notification(None if is_global else client, id_)
         self.host.bridge.notification_deleted(
-            id_,
-            C.PROF_KEY_ALL if is_global else client.profile
+            id_, C.PROF_KEY_ALL if is_global else client.profile
         )
 
     def _notifications_expired_clean(
@@ -2050,12 +2100,10 @@
 
         return defer.ensureDeferred(
             self.storage.clean_expired_notifications(
-                client,
-                None if limit_timestamp == -1.0 else limit_timestamp
+                client, None if limit_timestamp == -1.0 else limit_timestamp
             )
         )
 
-
     ## Misc ##
 
     def is_entity_available(self, client, entity_jid):