changeset 4270:0d7bb4df2343

Reformatted code base using black.
author Goffi <goffi@goffi.org>
date Wed, 19 Jun 2024 18:44:57 +0200
parents 64a85ce8be70
children a5d27f69eedb
files libervia/backend/bridge/bridge_constructor/base_constructor.py libervia/backend/bridge/bridge_constructor/constructors/dbus/constructor.py libervia/backend/bridge/bridge_constructor/constructors/dbus/dbus_core_template.py libervia/backend/bridge/bridge_constructor/constructors/dbus/dbus_frontend_template.py libervia/backend/bridge/bridge_constructor/constructors/embedded/constructor.py libervia/backend/bridge/bridge_constructor/constructors/embedded/embedded_template.py libervia/backend/bridge/bridge_constructor/constructors/mediawiki/constructor.py libervia/backend/bridge/bridge_constructor/constructors/pb/constructor.py libervia/backend/bridge/bridge_constructor/constructors/pb/pb_core_template.py libervia/backend/bridge/bridge_constructor/constructors/pb/pb_frontend_template.py libervia/backend/bridge/dbus_bridge.py libervia/backend/bridge/pb.py libervia/backend/core/constants.py libervia/backend/core/core_types.py libervia/backend/core/exceptions.py libervia/backend/core/i18n.py libervia/backend/core/launcher.py libervia/backend/core/log.py libervia/backend/core/log_config.py libervia/backend/core/main.py libervia/backend/core/xmpp.py libervia/backend/memory/crypto.py libervia/backend/memory/disco.py libervia/backend/memory/encryption.py libervia/backend/memory/memory.py libervia/backend/memory/migration/env.py libervia/backend/memory/migration/versions/129ac51807e4_create_virtual_table_for_full_text_.py libervia/backend/memory/migration/versions/2ab01aa1f686_create_table_for_notifications.py libervia/backend/memory/migration/versions/4b002773cf92_add_origin_id_column_to_history_and_.py libervia/backend/memory/migration/versions/602caf848068_drop_message_types_table_fix_nullable.py libervia/backend/memory/migration/versions/610345f77e75_add_version_id_to_history.py libervia/backend/memory/migration/versions/79e5f3313fa4_create_table_for_pubsub_subscriptions.py libervia/backend/memory/migration/versions/8974efc51d22_create_tables_for_pubsub_caching.py libervia/backend/memory/migration/versions/fe3a02cb4bec_convert_legacypickle_columns_to_json.py libervia/backend/memory/params.py libervia/backend/memory/persistent.py libervia/backend/memory/sqla.py libervia/backend/memory/sqla_mapping.py libervia/backend/models/core.py libervia/backend/plugins/plugin_adhoc_dbus.py libervia/backend/plugins/plugin_app_manager_docker/__init__.py libervia/backend/plugins/plugin_blog_import_dokuwiki.py libervia/backend/plugins/plugin_comp_ap_gateway/__init__.py libervia/backend/plugins/plugin_comp_ap_gateway/ad_hoc.py libervia/backend/plugins/plugin_comp_ap_gateway/constants.py libervia/backend/plugins/plugin_comp_ap_gateway/events.py libervia/backend/plugins/plugin_comp_ap_gateway/http_server.py libervia/backend/plugins/plugin_comp_ap_gateway/pubsub_service.py libervia/backend/plugins/plugin_comp_ap_gateway/regex.py libervia/backend/plugins/plugin_comp_file_sharing.py libervia/backend/plugins/plugin_comp_file_sharing_management.py libervia/backend/plugins/plugin_dbg_manhole.py libervia/backend/plugins/plugin_exp_command_export.py libervia/backend/plugins/plugin_exp_invitation.py libervia/backend/plugins/plugin_exp_invitation_file.py libervia/backend/plugins/plugin_exp_invitation_pubsub.py libervia/backend/plugins/plugin_exp_jingle_stream.py libervia/backend/plugins/plugin_exp_lang_detect.py libervia/backend/plugins/plugin_exp_list_of_interest.py libervia/backend/plugins/plugin_exp_parrot.py libervia/backend/plugins/plugin_exp_pubsub_admin.py libervia/backend/plugins/plugin_exp_pubsub_hook.py libervia/backend/plugins/plugin_import.py libervia/backend/plugins/plugin_merge_req_mercurial.py libervia/backend/plugins/plugin_misc_account.py libervia/backend/plugins/plugin_misc_android.py libervia/backend/plugins/plugin_misc_app_manager/__init__.py libervia/backend/plugins/plugin_misc_app_manager/models.py libervia/backend/plugins/plugin_misc_attach.py libervia/backend/plugins/plugin_misc_download.py libervia/backend/plugins/plugin_misc_email_invitation.py libervia/backend/plugins/plugin_misc_extra_pep.py libervia/backend/plugins/plugin_misc_file.py libervia/backend/plugins/plugin_misc_groupblog.py libervia/backend/plugins/plugin_misc_identity.py libervia/backend/plugins/plugin_misc_ip.py libervia/backend/plugins/plugin_misc_jid_search.py libervia/backend/plugins/plugin_misc_lists.py libervia/backend/plugins/plugin_misc_merge_requests.py libervia/backend/plugins/plugin_misc_nat_port.py libervia/backend/plugins/plugin_misc_radiocol.py libervia/backend/plugins/plugin_misc_remote_control.py libervia/backend/plugins/plugin_misc_room_game.py libervia/backend/plugins/plugin_misc_tarot.py libervia/backend/plugins/plugin_misc_text_commands.py libervia/backend/plugins/plugin_misc_text_syntaxes.py libervia/backend/plugins/plugin_misc_upload.py libervia/backend/plugins/plugin_misc_uri_finder.py libervia/backend/plugins/plugin_misc_url_preview.py libervia/backend/plugins/plugin_misc_watched.py libervia/backend/plugins/plugin_pubsub_cache.py libervia/backend/plugins/plugin_sec_aesgcm.py libervia/backend/plugins/plugin_sec_otr.py libervia/backend/plugins/plugin_sec_oxps.py libervia/backend/plugins/plugin_sec_pte.py libervia/backend/plugins/plugin_sec_pubsub_signing.py libervia/backend/plugins/plugin_syntax_wiki_dotclear.py libervia/backend/plugins/plugin_xep_0020.py libervia/backend/plugins/plugin_xep_0033.py libervia/backend/plugins/plugin_xep_0045.py libervia/backend/plugins/plugin_xep_0047.py libervia/backend/plugins/plugin_xep_0048.py libervia/backend/plugins/plugin_xep_0054.py libervia/backend/plugins/plugin_xep_0055.py libervia/backend/plugins/plugin_xep_0059.py libervia/backend/plugins/plugin_xep_0060.py libervia/backend/plugins/plugin_xep_0065.py libervia/backend/plugins/plugin_xep_0070.py libervia/backend/plugins/plugin_xep_0071.py libervia/backend/plugins/plugin_xep_0077.py libervia/backend/plugins/plugin_xep_0080.py libervia/backend/plugins/plugin_xep_0082.py libervia/backend/plugins/plugin_xep_0084.py libervia/backend/plugins/plugin_xep_0085.py libervia/backend/plugins/plugin_xep_0096.py libervia/backend/plugins/plugin_xep_0100.py libervia/backend/plugins/plugin_xep_0103.py libervia/backend/plugins/plugin_xep_0106.py libervia/backend/plugins/plugin_xep_0115.py libervia/backend/plugins/plugin_xep_0163.py libervia/backend/plugins/plugin_xep_0166/__init__.py libervia/backend/plugins/plugin_xep_0166/models.py libervia/backend/plugins/plugin_xep_0167/__init__.py libervia/backend/plugins/plugin_xep_0167/mapping.py libervia/backend/plugins/plugin_xep_0176.py libervia/backend/plugins/plugin_xep_0184.py libervia/backend/plugins/plugin_xep_0191.py libervia/backend/plugins/plugin_xep_0198.py libervia/backend/plugins/plugin_xep_0199.py libervia/backend/plugins/plugin_xep_0231.py libervia/backend/plugins/plugin_xep_0234.py libervia/backend/plugins/plugin_xep_0249.py libervia/backend/plugins/plugin_xep_0260.py libervia/backend/plugins/plugin_xep_0261.py libervia/backend/plugins/plugin_xep_0264.py libervia/backend/plugins/plugin_xep_0272.py libervia/backend/plugins/plugin_xep_0277.py libervia/backend/plugins/plugin_xep_0280.py libervia/backend/plugins/plugin_xep_0292.py libervia/backend/plugins/plugin_xep_0300.py libervia/backend/plugins/plugin_xep_0308.py libervia/backend/plugins/plugin_xep_0313.py libervia/backend/plugins/plugin_xep_0329.py libervia/backend/plugins/plugin_xep_0334.py libervia/backend/plugins/plugin_xep_0338.py libervia/backend/plugins/plugin_xep_0339.py libervia/backend/plugins/plugin_xep_0343.py libervia/backend/plugins/plugin_xep_0346.py libervia/backend/plugins/plugin_xep_0352.py libervia/backend/plugins/plugin_xep_0353.py libervia/backend/plugins/plugin_xep_0359.py libervia/backend/plugins/plugin_xep_0363.py libervia/backend/plugins/plugin_xep_0372.py libervia/backend/plugins/plugin_xep_0373.py libervia/backend/plugins/plugin_xep_0374.py libervia/backend/plugins/plugin_xep_0376.py libervia/backend/plugins/plugin_xep_0380.py libervia/backend/plugins/plugin_xep_0384.py libervia/backend/plugins/plugin_xep_0391.py libervia/backend/plugins/plugin_xep_0420.py libervia/backend/plugins/plugin_xep_0422.py libervia/backend/plugins/plugin_xep_0428.py libervia/backend/plugins/plugin_xep_0446.py libervia/backend/plugins/plugin_xep_0447.py libervia/backend/plugins/plugin_xep_0448.py libervia/backend/plugins/plugin_xep_0465.py libervia/backend/plugins/plugin_xep_0470.py libervia/backend/plugins/plugin_xep_0471.py libervia/backend/test/helpers.py libervia/backend/test/helpers_plugins.py libervia/backend/test/test_core_xmpp.py libervia/backend/test/test_memory.py libervia/backend/test/test_plugin_misc_groupblog.py libervia/backend/test/test_plugin_misc_radiocol.py libervia/backend/test/test_plugin_misc_room_game.py libervia/backend/test/test_plugin_misc_text_syntaxes.py libervia/backend/test/test_plugin_xep_0033.py libervia/backend/test/test_plugin_xep_0277.py libervia/backend/test/test_plugin_xep_0334.py libervia/backend/tools/async_trigger.py libervia/backend/tools/common/async_process.py libervia/backend/tools/common/async_utils.py libervia/backend/tools/common/data_format.py libervia/backend/tools/common/data_objects.py libervia/backend/tools/common/date_utils.py libervia/backend/tools/common/email.py libervia/backend/tools/common/regex.py libervia/backend/tools/common/template.py libervia/backend/tools/common/template_xmlui.py libervia/backend/tools/common/tls.py libervia/backend/tools/common/utils.py libervia/backend/tools/config.py libervia/backend/tools/image.py libervia/backend/tools/sat_defer.py libervia/backend/tools/stream.py libervia/backend/tools/trigger.py libervia/backend/tools/utils.py libervia/backend/tools/video.py libervia/backend/tools/web.py libervia/backend/tools/xml_tools.py libervia/backend/tools/xmpp_datetime.py libervia/cli/arg_tools.py libervia/cli/base.py libervia/cli/call_gui.py libervia/cli/call_simple.py libervia/cli/call_tui.py libervia/cli/cmd_account.py libervia/cli/cmd_adhoc.py libervia/cli/cmd_application.py libervia/cli/cmd_avatar.py libervia/cli/cmd_blocking.py libervia/cli/cmd_blog.py libervia/cli/cmd_call.py libervia/cli/cmd_encryption.py libervia/cli/cmd_event.py libervia/cli/cmd_file.py libervia/cli/cmd_identity.py libervia/cli/cmd_info.py libervia/cli/cmd_input.py libervia/cli/cmd_message.py libervia/cli/cmd_notifications.py libervia/cli/cmd_param.py libervia/cli/cmd_pipe.py libervia/cli/cmd_profile.py libervia/cli/cmd_pubsub.py libervia/cli/cmd_remote_control.py libervia/cli/cmd_roster.py libervia/cli/cmd_shell.py libervia/cli/cmd_uri.py libervia/cli/common.py libervia/cli/constants.py libervia/cli/loops.py libervia/cli/output_std.py libervia/cli/output_template.py libervia/cli/output_xmlui.py libervia/cli/xml_tools.py libervia/frontends/bridge/dbus_bridge.py libervia/frontends/bridge/pb.py libervia/frontends/quick_frontend/quick_app.py libervia/frontends/quick_frontend/quick_blog.py libervia/frontends/quick_frontend/quick_chat.py libervia/frontends/quick_frontend/quick_contact_list.py libervia/frontends/quick_frontend/quick_game_tarot.py libervia/frontends/quick_frontend/quick_list_manager.py libervia/frontends/quick_frontend/quick_profile_manager.py libervia/frontends/quick_frontend/quick_utils.py libervia/frontends/quick_frontend/quick_widgets.py libervia/frontends/tools/portal_desktop.py libervia/frontends/tools/strings.py libervia/frontends/tools/webrtc.py libervia/frontends/tools/webrtc_file.py libervia/frontends/tools/webrtc_models.py libervia/frontends/tools/webrtc_remote_control.py libervia/frontends/tools/xmltools.py libervia/frontends/tools/xmlui.py libervia/tui/base.py libervia/tui/chat.py libervia/tui/contact_list.py libervia/tui/game_tarot.py libervia/tui/progress.py libervia/tui/status.py libervia/tui/xmlui.py
diffstat 262 files changed, 12597 insertions(+), 8959 deletions(-) [+]
line wrap: on
line diff
--- a/libervia/backend/bridge/bridge_constructor/base_constructor.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/bridge/bridge_constructor/base_constructor.py	Wed Jun 19 18:44:57 2024 +0200
@@ -77,7 +77,8 @@
     def get_default(self, name):
         """Return default values of a function in a dict
         @param name: Name of the function to get
-        @return: dict, each key is the integer param number (no key if no default value)"""
+        @return: dict, each key is the integer param number (no key if no default value)
+        """
         default_dict = {}
         def_re = re.compile(r"param_(\d+)_default")
 
@@ -109,7 +110,8 @@
     def get_arguments_doc(self, name):
         """Return documentation of arguments
         @param name: Name of the function to get
-        @return: dict, each key is the integer param number (no key if no argument doc), value is a tuple (name, doc)"""
+        @return: dict, each key is the integer param number (no key if no argument doc), value is a tuple (name, doc)
+        """
         doc_dict = {}
         option_re = re.compile(r"doc_param_(\d+)")
         value_re = re.compile(r"^(\w+): (.*)$", re.MULTILINE | re.DOTALL)
@@ -145,8 +147,19 @@
         i = 0
 
         while i < len(signature):
-            if signature[i] not in ["b", "y", "n", "i", "x", "q", "u", "t", "d", "s",
-                                    "a"]:
+            if signature[i] not in [
+                "b",
+                "y",
+                "n",
+                "i",
+                "x",
+                "q",
+                "u",
+                "t",
+                "d",
+                "s",
+                "a",
+            ]:
                 raise ParseError("Unmanaged attribute type [%c]" % signature[i])
 
             if signature[i] == "a":
@@ -293,8 +306,9 @@
             extend_method(completion, function, default, arg_doc, async_)
 
             for part, fmt in FORMATS.items():
-                if (part.startswith(function["type"])
-                    or part.startswith(f"async_{function['type']}")):
+                if part.startswith(function["type"]) or part.startswith(
+                    f"async_{function['type']}"
+                ):
                     parts[part.upper()].append(fmt.format(**completion))
 
         # at this point, signals_part, methods_part and direct_calls should be filled,
@@ -350,10 +364,12 @@
                 os.mkdir(self.args.dest_dir)
             full_path = os.path.join(self.args.dest_dir, filename)
             if os.path.exists(full_path) and not self.args.force:
-                print((
-                    "The destination file [%s] already exists ! Use --force to overwrite it"
-                    % full_path
-                ))
+                print(
+                    (
+                        "The destination file [%s] already exists ! Use --force to overwrite it"
+                        % full_path
+                    )
+                )
             try:
                 with open(full_path, "w") as dest_file:
                     dest_file.write("\n".join(file_buf))
--- a/libervia/backend/bridge/bridge_constructor/constructors/dbus/constructor.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/bridge/bridge_constructor/constructors/dbus/constructor.py	Wed Jun 19 18:44:57 2024 +0200
@@ -27,14 +27,11 @@
     CORE_FORMATS = {
         "methods_declarations": """\
         Method('{name}', arguments='{sig_in}', returns='{sig_out}'),""",
-
         "methods": """\
     def dbus_{name}(self, {args}):
         {debug}return self._callback("{name}", {args_no_default})\n""",
-
         "signals_declarations": """\
         Signal('{name}', '{sig_in}'),""",
-
         "signals": """\
     def {name}(self, {args}):
         self._obj.emitSignal("{name}", {args})\n""",
@@ -68,7 +65,8 @@
         completion.update(
             {
                 "debug": (
-                    "" if not self.args.debug
+                    ""
+                    if not self.args.debug
                     else f'log.debug ("{completion["name"]}")\n{8 * " "}'
                 )
             }
@@ -78,9 +76,11 @@
         completion.update(
             {
                 # XXX: we can manage blocking call in the same way as async one: if callback is None the call will be blocking
-                "debug": ""
-                if not self.args.debug
-                else 'log.debug ("%s")\n%s' % (completion["name"], 8 * " "),
+                "debug": (
+                    ""
+                    if not self.args.debug
+                    else 'log.debug ("%s")\n%s' % (completion["name"], 8 * " ")
+                ),
                 "args_result": self.get_arguments(function["sig_in"], name=arg_doc),
                 "async_args": "callback=None, errback=None",
                 "async_comma": ", " if function["sig_in"] else "",
@@ -95,9 +95,9 @@
         )
         if async_:
             completion["blocking_call"] = ""
-            completion[
-                "async_args_result"
-            ] = "timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler"
+            completion["async_args_result"] = (
+                "timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler"
+            )
         else:
             # XXX: To have a blocking call, we must have not reply_handler, so we test if callback exists, and add reply_handler only in this case
             completion[
--- a/libervia/backend/bridge/bridge_constructor/constructors/dbus/dbus_core_template.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/bridge/bridge_constructor/constructors/dbus/dbus_core_template.py	Wed Jun 19 18:44:57 2024 +0200
@@ -31,10 +31,8 @@
 
 # Interface prefix
 const_INT_PREFIX = config.config_get(
-    config.parse_main_conf(),
-    "",
-    "bridge_dbus_int_prefix",
-    "org.libervia.Libervia")
+    config.parse_main_conf(), "", "bridge_dbus_int_prefix", "org.libervia.Libervia"
+)
 const_ERROR_PREFIX = const_INT_PREFIX + ".error"
 const_OBJ_PATH = "/org/libervia/Libervia/bridge"
 const_CORE_SUFFIX = ".core"
@@ -88,12 +86,10 @@
 
     core_iface = DBusInterface(
         const_INT_PREFIX + const_CORE_SUFFIX,
-##METHODS_DECLARATIONS_PART##
-##SIGNALS_DECLARATIONS_PART##
+        ##METHODS_DECLARATIONS_PART##
+        ##SIGNALS_DECLARATIONS_PART##
     )
-    plugin_iface = DBusInterface(
-        const_INT_PREFIX + const_PLUGIN_SUFFIX
-    )
+    plugin_iface = DBusInterface(const_INT_PREFIX + const_PLUGIN_SUFFIX)
 
     dbusInterfaces = [core_iface, plugin_iface]
 
@@ -116,8 +112,10 @@
             d.addErrback(GenericException.create_and_raise)
             return d
 
+
 ##METHODS_PART##
 
+
 class bridge:
 
     def __init__(self):
@@ -140,7 +138,7 @@
         conn.exportObject(self._obj)
         await conn.requestBusName(const_INT_PREFIX)
 
-##SIGNALS_PART##
+    ##SIGNALS_PART##
     def register_method(self, name, callback):
         log.debug(f"registering DBus bridge method [{name}]")
         self._obj.register_method(name, callback)
@@ -149,7 +147,7 @@
         self._obj.emitSignal(name, *args)
 
     def add_method(
-            self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}
+        self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}
     ):
         """Dynamically add a method to D-Bus bridge"""
         # FIXME: doc parameter is kept only temporary, the time to remove it from calls
@@ -157,10 +155,12 @@
         self._obj.plugin_iface.addMethod(
             Method(name, arguments=in_sign, returns=out_sign)
         )
+
         # we have to create a method here instead of using partialmethod, because txdbus
         # uses __func__ which doesn't work with partialmethod
         def caller(self_, *args, **kwargs):
             return self_._callback(name, *args, **kwargs)
+
         setattr(self._obj, f"dbus_{name}", MethodType(caller, self._obj))
         self.register_method(name, method)
 
--- a/libervia/backend/bridge/bridge_constructor/constructors/dbus/dbus_frontend_template.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/bridge/bridge_constructor/constructors/dbus/dbus_frontend_template.py	Wed Jun 19 18:44:57 2024 +0200
@@ -33,12 +33,10 @@
 
 # Interface prefix
 const_INT_PREFIX = config.config_get(
-    config.parse_main_conf(),
-    "",
-    "bridge_dbus_int_prefix",
-    "org.libervia.Libervia")
+    config.parse_main_conf(), "", "bridge_dbus_int_prefix", "org.libervia.Libervia"
+)
 const_ERROR_PREFIX = const_INT_PREFIX + ".error"
-const_OBJ_PATH = '/org/libervia/Libervia/bridge'
+const_OBJ_PATH = "/org/libervia/Libervia/bridge"
 const_CORE_SUFFIX = ".core"
 const_PLUGIN_SUFFIX = ".plugin"
 const_TIMEOUT = 120
@@ -52,7 +50,7 @@
     """
     full_name = dbus_e.get_dbus_name()
     if full_name.startswith(const_ERROR_PREFIX):
-        name = dbus_e.get_dbus_name()[len(const_ERROR_PREFIX) + 1:]
+        name = dbus_e.get_dbus_name()[len(const_ERROR_PREFIX) + 1 :]
     else:
         name = full_name
     # XXX: dbus_e.args doesn't contain the original DBusException args, but we
@@ -62,7 +60,7 @@
     try:
         message, condition = ast.literal_eval(message)
     except (SyntaxError, ValueError, TypeError):
-        condition = ''
+        condition = ""
     return BridgeException(name, message, condition)
 
 
@@ -71,24 +69,33 @@
     def bridge_connect(self, callback, errback):
         try:
             self.sessions_bus = dbus.SessionBus()
-            self.db_object = self.sessions_bus.get_object(const_INT_PREFIX,
-                                                          const_OBJ_PATH)
-            self.db_core_iface = dbus.Interface(self.db_object,
-                                                dbus_interface=const_INT_PREFIX + const_CORE_SUFFIX)
-            self.db_plugin_iface = dbus.Interface(self.db_object,
-                                                  dbus_interface=const_INT_PREFIX + const_PLUGIN_SUFFIX)
+            self.db_object = self.sessions_bus.get_object(
+                const_INT_PREFIX, const_OBJ_PATH
+            )
+            self.db_core_iface = dbus.Interface(
+                self.db_object, dbus_interface=const_INT_PREFIX + const_CORE_SUFFIX
+            )
+            self.db_plugin_iface = dbus.Interface(
+                self.db_object, dbus_interface=const_INT_PREFIX + const_PLUGIN_SUFFIX
+            )
         except dbus.exceptions.DBusException as e:
-            if e._dbus_error_name in ('org.freedesktop.DBus.Error.ServiceUnknown',
-                                      'org.freedesktop.DBus.Error.Spawn.ExecFailed'):
+            if e._dbus_error_name in (
+                "org.freedesktop.DBus.Error.ServiceUnknown",
+                "org.freedesktop.DBus.Error.Spawn.ExecFailed",
+            ):
                 errback(BridgeExceptionNoService())
-            elif e._dbus_error_name == 'org.freedesktop.DBus.Error.NotSupported':
-                log.error(_("D-Bus is not launched, please see README to see instructions on how to launch it"))
+            elif e._dbus_error_name == "org.freedesktop.DBus.Error.NotSupported":
+                log.error(
+                    _(
+                        "D-Bus is not launched, please see README to see instructions on how to launch it"
+                    )
+                )
                 errback(BridgeInitError)
             else:
                 errback(e)
         else:
             callback()
-        #props = self.db_core_iface.getProperties()
+        # props = self.db_core_iface.getProperties()
 
     def register_signal(self, functionName, handler, iface="core"):
         if iface == "core":
@@ -96,10 +103,10 @@
         elif iface == "plugin":
             self.db_plugin_iface.connect_to_signal(functionName, handler)
         else:
-            log.error(_('Unknown interface'))
+            log.error(_("Unknown interface"))
 
     def __getattribute__(self, name):
-        """ usual __getattribute__ if the method exists, else try to find a plugin method """
+        """usual __getattribute__ if the method exists, else try to find a plugin method"""
         try:
             return object.__getattribute__(self, name)
         except AttributeError:
@@ -114,20 +121,26 @@
                 args = list(args)
 
                 if kwargs:
-                    if 'callback' in kwargs:
+                    if "callback" in kwargs:
                         async_ = True
-                        _callback = kwargs.pop('callback')
-                        _errback = kwargs.pop('errback', lambda failure: log.error(str(failure)))
+                        _callback = kwargs.pop("callback")
+                        _errback = kwargs.pop(
+                            "errback", lambda failure: log.error(str(failure))
+                        )
                     try:
-                        args.append(kwargs.pop('profile'))
+                        args.append(kwargs.pop("profile"))
                     except KeyError:
                         try:
-                            args.append(kwargs.pop('profile_key'))
+                            args.append(kwargs.pop("profile_key"))
                         except KeyError:
                             pass
                     # at this point, kwargs should be empty
                     if kwargs:
-                        log.warning("unexpected keyword arguments, they will be ignored: {}".format(kwargs))
+                        log.warning(
+                            "unexpected keyword arguments, they will be ignored: {}".format(
+                                kwargs
+                            )
+                        )
                 elif len(args) >= 2 and callable(args[-1]) and callable(args[-2]):
                     async_ = True
                     _errback = args.pop()
@@ -136,9 +149,11 @@
                 method = getattr(self.db_plugin_iface, name)
 
                 if async_:
-                    kwargs['timeout'] = const_TIMEOUT
-                    kwargs['reply_handler'] = _callback
-                    kwargs['error_handler'] = lambda err: _errback(dbus_to_bridge_exception(err))
+                    kwargs["timeout"] = const_TIMEOUT
+                    kwargs["reply_handler"] = _callback
+                    kwargs["error_handler"] = lambda err: _errback(
+                        dbus_to_bridge_exception(err)
+                    )
 
                 try:
                     return method(*args, **kwargs)
@@ -158,17 +173,21 @@
 
             return get_plugin_method
 
+
 ##METHODS_PART##
 
+
 class AIOBridge(bridge):
 
     def register_signal(self, functionName, handler, iface="core"):
         loop = asyncio.get_running_loop()
-        async_handler = lambda *args: asyncio.run_coroutine_threadsafe(handler(*args), loop)
+        async_handler = lambda *args: asyncio.run_coroutine_threadsafe(
+            handler(*args), loop
+        )
         return super().register_signal(functionName, async_handler, iface)
 
     def __getattribute__(self, name):
-        """ usual __getattribute__ if the method exists, else try to find a plugin method """
+        """usual __getattribute__ if the method exists, else try to find a plugin method"""
         try:
             return object.__getattribute__(self, name)
         except AttributeError:
@@ -178,16 +197,18 @@
                 fut = loop.create_future()
                 method = getattr(self.db_plugin_iface, name)
                 reply_handler = lambda ret=None: loop.call_soon_threadsafe(
-                    fut.set_result, ret)
+                    fut.set_result, ret
+                )
                 error_handler = lambda err: loop.call_soon_threadsafe(
-                    fut.set_exception, dbus_to_bridge_exception(err))
+                    fut.set_exception, dbus_to_bridge_exception(err)
+                )
                 try:
                     method(
                         *args,
                         **kwargs,
                         timeout=const_TIMEOUT,
                         reply_handler=reply_handler,
-                        error_handler=error_handler
+                        error_handler=error_handler,
                     )
                 except ValueError as e:
                     if e.args[0].startswith("Unable to guess signature"):
@@ -202,7 +223,7 @@
                             **kwargs,
                             timeout=const_TIMEOUT,
                             reply_handler=reply_handler,
-                            error_handler=error_handler
+                            error_handler=error_handler,
                         )
 
                     else:
@@ -216,8 +237,9 @@
         fut = loop.create_future()
         super().bridge_connect(
             callback=lambda: loop.call_soon_threadsafe(fut.set_result, None),
-            errback=lambda e: loop.call_soon_threadsafe(fut.set_exception, e)
+            errback=lambda e: loop.call_soon_threadsafe(fut.set_exception, e),
         )
         return fut
 
+
 ##ASYNC_METHODS_PART##
--- a/libervia/backend/bridge/bridge_constructor/constructors/embedded/constructor.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/bridge/bridge_constructor/constructors/embedded/constructor.py	Wed Jun 19 18:44:57 2024 +0200
@@ -48,9 +48,11 @@
     def core_completion_method(self, completion, function, default, arg_doc, async_):
         completion.update(
             {
-                "debug": ""
-                if not self.args.debug
-                else 'log.debug ("%s")\n%s' % (completion["name"], 8 * " "),
+                "debug": (
+                    ""
+                    if not self.args.debug
+                    else 'log.debug ("%s")\n%s' % (completion["name"], 8 * " ")
+                ),
                 "args_result": self.get_arguments(function["sig_in"], name=arg_doc),
                 "args_comma": ", " if function["sig_in"] else "",
             }
@@ -60,9 +62,8 @@
             completion["cb_or_lambda"] = (
                 "callback" if function["sig_out"] else "lambda __: callback()"
             )
-            completion[
-                "ret_routine"
-            ] = """\
+            completion["ret_routine"] = (
+                """\
         d = self._methods_cbs["{name}"]({args_result})
         if callback is not None:
             d.addCallback({cb_or_lambda})
@@ -72,13 +73,13 @@
             d.addErrback(errback)
         return d
         """.format(
-                **completion
+                    **completion
+                )
             )
         else:
             completion["ret_or_nothing"] = "ret" if function["sig_out"] else ""
-            completion[
-                "ret_routine"
-            ] = """\
+            completion["ret_routine"] = (
+                """\
         try:
             ret = self._methods_cbs["{name}"]({args_result})
         except Exception as e:
@@ -91,7 +92,8 @@
                 return ret
             else:
                 callback({ret_or_nothing})""".format(
-                **completion
+                    **completion
+                )
             )
 
     def core_completion_signal(self, completion, function, default, arg_doc, async_):
--- a/libervia/backend/bridge/bridge_constructor/constructors/embedded/embedded_template.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/bridge/bridge_constructor/constructors/embedded/embedded_template.py	Wed Jun 19 18:44:57 2024 +0200
@@ -85,7 +85,9 @@
         else:
             cb(*args, **kwargs)
 
-    def add_method(self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}):
+    def add_method(
+        self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}
+    ):
         # FIXME: doc parameter is kept only temporary, the time to remove it from calls
         log.debug("Adding method [{}] to embedded bridge".format(name))
         self.register_method(name, method)
--- a/libervia/backend/bridge/bridge_constructor/constructors/mediawiki/constructor.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/bridge/bridge_constructor/constructors/mediawiki/constructor.py	Wed Jun 19 18:44:57 2024 +0200
@@ -108,21 +108,21 @@
                 function["sig_out"],
             )
             completion = {
-                "signature": signature_signal
-                if function["type"] == "signal"
-                else signature_method,
+                "signature": (
+                    signature_signal if function["type"] == "signal" else signature_method
+                ),
                 "sig_out": function["sig_out"] or "",
                 "category": function["category"],
                 "name": section,
                 "doc": self.get_doc(section) or "FIXME: No description available",
                 "async": async_msg if "async" in self.getFlags(section) else "",
-                "deprecated": deprecated_msg
-                if "deprecated" in self.getFlags(section)
-                else "",
+                "deprecated": (
+                    deprecated_msg if "deprecated" in self.getFlags(section) else ""
+                ),
                 "parameters": self._wiki_parameter(section, function["sig_in"]),
-                "return": self._wiki_return(section)
-                if function["type"] == "method"
-                else "",
+                "return": (
+                    self._wiki_return(section) if function["type"] == "method" else ""
+                ),
             }
 
             dest = signals_part if function["type"] == "signal" else methods_part
--- a/libervia/backend/bridge/bridge_constructor/constructors/pb/constructor.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/bridge/bridge_constructor/constructors/pb/constructor.py	Wed Jun 19 18:44:57 2024 +0200
@@ -61,11 +61,13 @@
             {
                 "args_comma": ", " if function["sig_in"] else "",
                 "args_no_def": self.get_arguments(function["sig_in"], name=arg_doc),
-                "callback": "callback"
-                if function["sig_out"]
-                else "lambda __: callback()",
-                "debug": ""
-                if not self.args.debug
-                else 'log.debug ("%s")\n%s' % (completion["name"], 8 * " "),
+                "callback": (
+                    "callback" if function["sig_out"] else "lambda __: callback()"
+                ),
+                "debug": (
+                    ""
+                    if not self.args.debug
+                    else 'log.debug ("%s")\n%s' % (completion["name"], 8 * " ")
+                ),
             }
         )
--- a/libervia/backend/bridge/bridge_constructor/constructors/pb/pb_core_template.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/bridge/bridge_constructor/constructors/pb/pb_core_template.py	Wed Jun 19 18:44:57 2024 +0200
@@ -99,6 +99,7 @@
             del self.signals_paused
             log.debug("bridge signals have been reactivated")
 
+
 ##METHODS_PART##
 
 
@@ -136,7 +137,7 @@
         #  self.root.register_method(name, callback)
 
     def add_method(
-            self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}
+        self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}
     ):
         """Dynamically add a method to PB bridge"""
         # FIXME: doc parameter is kept only temporary, the time to remove it from calls
@@ -163,4 +164,5 @@
         """
         self.root._bridge_reactivate_signals()
 
+
 ##SIGNALS_PART##
--- a/libervia/backend/bridge/bridge_constructor/constructors/pb/pb_frontend_template.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/bridge/bridge_constructor/constructors/pb/pb_frontend_template.py	Wed Jun 19 18:44:57 2024 +0200
@@ -48,9 +48,7 @@
             pass
         else:
             raise exceptions.InternalError(
-                "{name} signal handler has been registered twice".format(
-                    name=method_name
-                )
+                "{name} signal handler has been registered twice".format(name=method_name)
             )
         setattr(self, method_name, handler)
 
@@ -70,8 +68,7 @@
         """Convert Failure to BridgeException"""
         ori_errback(
             BridgeException(
-                name=failure_.type.decode('utf-8'),
-                message=str(failure_.value)
+                name=failure_.type.decode("utf-8"), message=str(failure_.value)
             )
         )
 
@@ -167,11 +164,13 @@
 
 ##METHODS_PART##
 
+
 class AIOSignalsHandler(SignalsHandler):
 
     def register_signal(self, name, handler, iface="core"):
         async_handler = lambda *args, **kwargs: defer.Deferred.fromFuture(
-            asyncio.ensure_future(handler(*args, **kwargs)))
+            asyncio.ensure_future(handler(*args, **kwargs))
+        )
         return super().register_signal(name, async_handler, iface)
 
 
@@ -183,9 +182,8 @@
     def _errback(self, failure_):
         """Convert Failure to BridgeException"""
         raise BridgeException(
-            name=failure_.type.decode('utf-8'),
-            message=str(failure_.value)
-            )
+            name=failure_.type.decode("utf-8"), message=str(failure_.value)
+        )
 
     def call(self, name, *args, **kwargs):
         d = self.root.callRemote(name, *args, *kwargs)
@@ -196,4 +194,5 @@
         d = super().bridge_connect(callback=None, errback=None)
         return await d.asFuture(asyncio.get_event_loop())
 
+
 ##ASYNC_METHODS_PART##
--- a/libervia/backend/bridge/dbus_bridge.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/bridge/dbus_bridge.py	Wed Jun 19 18:44:57 2024 +0200
@@ -31,10 +31,8 @@
 
 # Interface prefix
 const_INT_PREFIX = config.config_get(
-    config.parse_main_conf(),
-    "",
-    "bridge_dbus_int_prefix",
-    "org.libervia.Libervia")
+    config.parse_main_conf(), "", "bridge_dbus_int_prefix", "org.libervia.Libervia"
+)
 const_ERROR_PREFIX = const_INT_PREFIX + ".error"
 const_OBJ_PATH = "/org/libervia/Libervia/bridge"
 const_CORE_SUFFIX = ".core"
@@ -88,100 +86,104 @@
 
     core_iface = DBusInterface(
         const_INT_PREFIX + const_CORE_SUFFIX,
-        Method('action_launch', arguments='sss', returns='s'),
-        Method('actions_get', arguments='s', returns='a(ssi)'),
-        Method('config_get', arguments='ss', returns='s'),
-        Method('connect', arguments='ssa{ss}', returns='b'),
-        Method('contact_add', arguments='ss', returns=''),
-        Method('contact_del', arguments='ss', returns=''),
-        Method('contact_get', arguments='ss', returns='(a{ss}as)'),
-        Method('contact_update', arguments='ssass', returns=''),
-        Method('contacts_get', arguments='s', returns='a(sa{ss}as)'),
-        Method('contacts_get_from_group', arguments='ss', returns='as'),
-        Method('devices_infos_get', arguments='ss', returns='s'),
-        Method('disco_find_by_features', arguments='asa(ss)bbbbbs', returns='(a{sa(sss)}a{sa(sss)}a{sa(sss)})'),
-        Method('disco_infos', arguments='ssbs', returns='(asa(sss)a{sa(a{ss}as)})'),
-        Method('disco_items', arguments='ssbs', returns='a(sss)'),
-        Method('disconnect', arguments='s', returns=''),
-        Method('encryption_namespace_get', arguments='s', returns='s'),
-        Method('encryption_plugins_get', arguments='', returns='s'),
-        Method('encryption_trust_ui_get', arguments='sss', returns='s'),
-        Method('entities_data_get', arguments='asass', returns='a{sa{ss}}'),
-        Method('entity_data_get', arguments='sass', returns='a{ss}'),
-        Method('features_get', arguments='s', returns='a{sa{ss}}'),
-        Method('history_get', arguments='ssiba{ss}s', returns='a(sdssa{ss}a{ss}ss)'),
-        Method('image_check', arguments='s', returns='s'),
-        Method('image_convert', arguments='ssss', returns='s'),
-        Method('image_generate_preview', arguments='ss', returns='s'),
-        Method('image_resize', arguments='sii', returns='s'),
-        Method('init_pre_script', arguments='', returns=''),
-        Method('is_connected', arguments='s', returns='b'),
-        Method('main_resource_get', arguments='ss', returns='s'),
-        Method('menu_help_get', arguments='ss', returns='s'),
-        Method('menu_launch', arguments='sasa{ss}is', returns='a{ss}'),
-        Method('menus_get', arguments='si', returns='a(ssasasa{ss})'),
-        Method('message_encryption_get', arguments='ss', returns='s'),
-        Method('message_encryption_start', arguments='ssbs', returns=''),
-        Method('message_encryption_stop', arguments='ss', returns=''),
-        Method('message_send', arguments='sa{ss}a{ss}sss', returns=''),
-        Method('namespaces_get', arguments='', returns='a{ss}'),
-        Method('notification_add', arguments='ssssbbsdss', returns=''),
-        Method('notification_delete', arguments='sbs', returns=''),
-        Method('notifications_expired_clean', arguments='ds', returns=''),
-        Method('notifications_get', arguments='ss', returns='s'),
-        Method('param_get_a', arguments='ssss', returns='s'),
-        Method('param_get_a_async', arguments='sssis', returns='s'),
-        Method('param_set', arguments='sssis', returns=''),
-        Method('param_ui_get', arguments='isss', returns='s'),
-        Method('params_categories_get', arguments='', returns='as'),
-        Method('params_register_app', arguments='sis', returns=''),
-        Method('params_template_load', arguments='s', returns='b'),
-        Method('params_template_save', arguments='s', returns='b'),
-        Method('params_values_from_category_get_async', arguments='sisss', returns='a{ss}'),
-        Method('presence_set', arguments='ssa{ss}s', returns=''),
-        Method('presence_statuses_get', arguments='s', returns='a{sa{s(sia{ss})}}'),
-        Method('private_data_delete', arguments='sss', returns=''),
-        Method('private_data_get', arguments='sss', returns='s'),
-        Method('private_data_set', arguments='ssss', returns=''),
-        Method('profile_create', arguments='sss', returns=''),
-        Method('profile_delete_async', arguments='s', returns=''),
-        Method('profile_is_session_started', arguments='s', returns='b'),
-        Method('profile_name_get', arguments='s', returns='s'),
-        Method('profile_set_default', arguments='s', returns=''),
-        Method('profile_start_session', arguments='ss', returns='b'),
-        Method('profiles_list_get', arguments='bb', returns='as'),
-        Method('progress_get', arguments='ss', returns='a{ss}'),
-        Method('progress_get_all', arguments='s', returns='a{sa{sa{ss}}}'),
-        Method('progress_get_all_metadata', arguments='s', returns='a{sa{sa{ss}}}'),
-        Method('ready_get', arguments='', returns=''),
-        Method('roster_resync', arguments='s', returns=''),
-        Method('session_infos_get', arguments='s', returns='a{ss}'),
-        Method('sub_waiting_get', arguments='s', returns='a{ss}'),
-        Method('subscription', arguments='sss', returns=''),
-        Method('version_get', arguments='', returns='s'),
-        Signal('_debug', 'sa{ss}s'),
-        Signal('action_new', 'ssis'),
-        Signal('connected', 'ss'),
-        Signal('contact_deleted', 'ss'),
-        Signal('contact_new', 'sa{ss}ass'),
-        Signal('disconnected', 's'),
-        Signal('entity_data_updated', 'ssss'),
-        Signal('message_encryption_started', 'sss'),
-        Signal('message_encryption_stopped', 'sa{ss}s'),
-        Signal('message_new', 'sdssa{ss}a{ss}sss'),
-        Signal('message_update', 'ssss'),
-        Signal('notification_deleted', 'ss'),
-        Signal('notification_new', 'sdssssbidss'),
-        Signal('param_update', 'ssss'),
-        Signal('presence_update', 'ssia{ss}s'),
-        Signal('progress_error', 'sss'),
-        Signal('progress_finished', 'sa{ss}s'),
-        Signal('progress_started', 'sa{ss}s'),
-        Signal('subscribe', 'sss'),
+        Method("action_launch", arguments="sss", returns="s"),
+        Method("actions_get", arguments="s", returns="a(ssi)"),
+        Method("config_get", arguments="ss", returns="s"),
+        Method("connect", arguments="ssa{ss}", returns="b"),
+        Method("contact_add", arguments="ss", returns=""),
+        Method("contact_del", arguments="ss", returns=""),
+        Method("contact_get", arguments="ss", returns="(a{ss}as)"),
+        Method("contact_update", arguments="ssass", returns=""),
+        Method("contacts_get", arguments="s", returns="a(sa{ss}as)"),
+        Method("contacts_get_from_group", arguments="ss", returns="as"),
+        Method("devices_infos_get", arguments="ss", returns="s"),
+        Method(
+            "disco_find_by_features",
+            arguments="asa(ss)bbbbbs",
+            returns="(a{sa(sss)}a{sa(sss)}a{sa(sss)})",
+        ),
+        Method("disco_infos", arguments="ssbs", returns="(asa(sss)a{sa(a{ss}as)})"),
+        Method("disco_items", arguments="ssbs", returns="a(sss)"),
+        Method("disconnect", arguments="s", returns=""),
+        Method("encryption_namespace_get", arguments="s", returns="s"),
+        Method("encryption_plugins_get", arguments="", returns="s"),
+        Method("encryption_trust_ui_get", arguments="sss", returns="s"),
+        Method("entities_data_get", arguments="asass", returns="a{sa{ss}}"),
+        Method("entity_data_get", arguments="sass", returns="a{ss}"),
+        Method("features_get", arguments="s", returns="a{sa{ss}}"),
+        Method("history_get", arguments="ssiba{ss}s", returns="a(sdssa{ss}a{ss}ss)"),
+        Method("image_check", arguments="s", returns="s"),
+        Method("image_convert", arguments="ssss", returns="s"),
+        Method("image_generate_preview", arguments="ss", returns="s"),
+        Method("image_resize", arguments="sii", returns="s"),
+        Method("init_pre_script", arguments="", returns=""),
+        Method("is_connected", arguments="s", returns="b"),
+        Method("main_resource_get", arguments="ss", returns="s"),
+        Method("menu_help_get", arguments="ss", returns="s"),
+        Method("menu_launch", arguments="sasa{ss}is", returns="a{ss}"),
+        Method("menus_get", arguments="si", returns="a(ssasasa{ss})"),
+        Method("message_encryption_get", arguments="ss", returns="s"),
+        Method("message_encryption_start", arguments="ssbs", returns=""),
+        Method("message_encryption_stop", arguments="ss", returns=""),
+        Method("message_send", arguments="sa{ss}a{ss}sss", returns=""),
+        Method("namespaces_get", arguments="", returns="a{ss}"),
+        Method("notification_add", arguments="ssssbbsdss", returns=""),
+        Method("notification_delete", arguments="sbs", returns=""),
+        Method("notifications_expired_clean", arguments="ds", returns=""),
+        Method("notifications_get", arguments="ss", returns="s"),
+        Method("param_get_a", arguments="ssss", returns="s"),
+        Method("param_get_a_async", arguments="sssis", returns="s"),
+        Method("param_set", arguments="sssis", returns=""),
+        Method("param_ui_get", arguments="isss", returns="s"),
+        Method("params_categories_get", arguments="", returns="as"),
+        Method("params_register_app", arguments="sis", returns=""),
+        Method("params_template_load", arguments="s", returns="b"),
+        Method("params_template_save", arguments="s", returns="b"),
+        Method(
+            "params_values_from_category_get_async", arguments="sisss", returns="a{ss}"
+        ),
+        Method("presence_set", arguments="ssa{ss}s", returns=""),
+        Method("presence_statuses_get", arguments="s", returns="a{sa{s(sia{ss})}}"),
+        Method("private_data_delete", arguments="sss", returns=""),
+        Method("private_data_get", arguments="sss", returns="s"),
+        Method("private_data_set", arguments="ssss", returns=""),
+        Method("profile_create", arguments="sss", returns=""),
+        Method("profile_delete_async", arguments="s", returns=""),
+        Method("profile_is_session_started", arguments="s", returns="b"),
+        Method("profile_name_get", arguments="s", returns="s"),
+        Method("profile_set_default", arguments="s", returns=""),
+        Method("profile_start_session", arguments="ss", returns="b"),
+        Method("profiles_list_get", arguments="bb", returns="as"),
+        Method("progress_get", arguments="ss", returns="a{ss}"),
+        Method("progress_get_all", arguments="s", returns="a{sa{sa{ss}}}"),
+        Method("progress_get_all_metadata", arguments="s", returns="a{sa{sa{ss}}}"),
+        Method("ready_get", arguments="", returns=""),
+        Method("roster_resync", arguments="s", returns=""),
+        Method("session_infos_get", arguments="s", returns="a{ss}"),
+        Method("sub_waiting_get", arguments="s", returns="a{ss}"),
+        Method("subscription", arguments="sss", returns=""),
+        Method("version_get", arguments="", returns="s"),
+        Signal("_debug", "sa{ss}s"),
+        Signal("action_new", "ssis"),
+        Signal("connected", "ss"),
+        Signal("contact_deleted", "ss"),
+        Signal("contact_new", "sa{ss}ass"),
+        Signal("disconnected", "s"),
+        Signal("entity_data_updated", "ssss"),
+        Signal("message_encryption_started", "sss"),
+        Signal("message_encryption_stopped", "sa{ss}s"),
+        Signal("message_new", "sdssa{ss}a{ss}sss"),
+        Signal("message_update", "ssss"),
+        Signal("notification_deleted", "ss"),
+        Signal("notification_new", "sdssssbidss"),
+        Signal("param_update", "ssss"),
+        Signal("presence_update", "ssia{ss}s"),
+        Signal("progress_error", "sss"),
+        Signal("progress_finished", "sa{ss}s"),
+        Signal("progress_started", "sa{ss}s"),
+        Signal("subscribe", "sss"),
     )
-    plugin_iface = DBusInterface(
-        const_INT_PREFIX + const_PLUGIN_SUFFIX
-    )
+    plugin_iface = DBusInterface(const_INT_PREFIX + const_PLUGIN_SUFFIX)
 
     dbusInterfaces = [core_iface, plugin_iface]
 
@@ -213,7 +215,7 @@
     def dbus_config_get(self, section, name):
         return self._callback("config_get", section, name)
 
-    def dbus_connect(self, profile_key="@DEFAULT@", password='', options={}):
+    def dbus_connect(self, profile_key="@DEFAULT@", password="", options={}):
         return self._callback("connect", profile_key, password, options)
 
     def dbus_contact_add(self, entity_jid, profile_key="@DEFAULT@"):
@@ -237,13 +239,37 @@
     def dbus_devices_infos_get(self, bare_jid, profile_key):
         return self._callback("devices_infos_get", bare_jid, profile_key)
 
-    def dbus_disco_find_by_features(self, namespaces, identities, bare_jid=False, service=True, roster=True, own_jid=True, local_device=False, profile_key="@DEFAULT@"):
-        return self._callback("disco_find_by_features", namespaces, identities, bare_jid, service, roster, own_jid, local_device, profile_key)
+    def dbus_disco_find_by_features(
+        self,
+        namespaces,
+        identities,
+        bare_jid=False,
+        service=True,
+        roster=True,
+        own_jid=True,
+        local_device=False,
+        profile_key="@DEFAULT@",
+    ):
+        return self._callback(
+            "disco_find_by_features",
+            namespaces,
+            identities,
+            bare_jid,
+            service,
+            roster,
+            own_jid,
+            local_device,
+            profile_key,
+        )
 
-    def dbus_disco_infos(self, entity_jid, node=u'', use_cache=True, profile_key="@DEFAULT@"):
+    def dbus_disco_infos(
+        self, entity_jid, node="", use_cache=True, profile_key="@DEFAULT@"
+    ):
         return self._callback("disco_infos", entity_jid, node, use_cache, profile_key)
 
-    def dbus_disco_items(self, entity_jid, node=u'', use_cache=True, profile_key="@DEFAULT@"):
+    def dbus_disco_items(
+        self, entity_jid, node="", use_cache=True, profile_key="@DEFAULT@"
+    ):
         return self._callback("disco_items", entity_jid, node, use_cache, profile_key)
 
     def dbus_disconnect(self, profile_key="@DEFAULT@"):
@@ -252,8 +278,12 @@
     def dbus_encryption_namespace_get(self, arg_0):
         return self._callback("encryption_namespace_get", arg_0)
 
-    def dbus_encryption_plugins_get(self, ):
-        return self._callback("encryption_plugins_get", )
+    def dbus_encryption_plugins_get(
+        self,
+    ):
+        return self._callback(
+            "encryption_plugins_get",
+        )
 
     def dbus_encryption_trust_ui_get(self, to_jid, namespace, profile_key):
         return self._callback("encryption_trust_ui_get", to_jid, namespace, profile_key)
@@ -267,8 +297,12 @@
     def dbus_features_get(self, profile_key):
         return self._callback("features_get", profile_key)
 
-    def dbus_history_get(self, from_jid, to_jid, limit, between=True, filters='', profile="@NONE@"):
-        return self._callback("history_get", from_jid, to_jid, limit, between, filters, profile)
+    def dbus_history_get(
+        self, from_jid, to_jid, limit, between=True, filters="", profile="@NONE@"
+    ):
+        return self._callback(
+            "history_get", from_jid, to_jid, limit, between, filters, profile
+        )
 
     def dbus_image_check(self, arg_0):
         return self._callback("image_check", arg_0)
@@ -282,8 +316,12 @@
     def dbus_image_resize(self, image_path, width, height):
         return self._callback("image_resize", image_path, width, height)
 
-    def dbus_init_pre_script(self, ):
-        return self._callback("init_pre_script", )
+    def dbus_init_pre_script(
+        self,
+    ):
+        return self._callback(
+            "init_pre_script",
+        )
 
     def dbus_is_connected(self, profile_key="@DEFAULT@"):
         return self._callback("is_connected", profile_key)
@@ -295,7 +333,9 @@
         return self._callback("menu_help_get", menu_id, language)
 
     def dbus_menu_launch(self, menu_type, path, data, security_limit, profile_key):
-        return self._callback("menu_launch", menu_type, path, data, security_limit, profile_key)
+        return self._callback(
+            "menu_launch", menu_type, path, data, security_limit, profile_key
+        )
 
     def dbus_menus_get(self, language, security_limit):
         return self._callback("menus_get", language, security_limit)
@@ -303,20 +343,62 @@
     def dbus_message_encryption_get(self, to_jid, profile_key):
         return self._callback("message_encryption_get", to_jid, profile_key)
 
-    def dbus_message_encryption_start(self, to_jid, namespace='', replace=False, profile_key="@NONE@"):
-        return self._callback("message_encryption_start", to_jid, namespace, replace, profile_key)
+    def dbus_message_encryption_start(
+        self, to_jid, namespace="", replace=False, profile_key="@NONE@"
+    ):
+        return self._callback(
+            "message_encryption_start", to_jid, namespace, replace, profile_key
+        )
 
     def dbus_message_encryption_stop(self, to_jid, profile_key):
         return self._callback("message_encryption_stop", to_jid, profile_key)
 
-    def dbus_message_send(self, to_jid, message, subject={}, mess_type="auto", extra={}, profile_key="@NONE@"):
-        return self._callback("message_send", to_jid, message, subject, mess_type, extra, profile_key)
+    def dbus_message_send(
+        self,
+        to_jid,
+        message,
+        subject={},
+        mess_type="auto",
+        extra={},
+        profile_key="@NONE@",
+    ):
+        return self._callback(
+            "message_send", to_jid, message, subject, mess_type, extra, profile_key
+        )
+
+    def dbus_namespaces_get(
+        self,
+    ):
+        return self._callback(
+            "namespaces_get",
+        )
 
-    def dbus_namespaces_get(self, ):
-        return self._callback("namespaces_get", )
-
-    def dbus_notification_add(self, type_, body_plain, body_rich, title, is_global, requires_action, arg_6, priority, expire_at, extra):
-        return self._callback("notification_add", type_, body_plain, body_rich, title, is_global, requires_action, arg_6, priority, expire_at, extra)
+    def dbus_notification_add(
+        self,
+        type_,
+        body_plain,
+        body_rich,
+        title,
+        is_global,
+        requires_action,
+        arg_6,
+        priority,
+        expire_at,
+        extra,
+    ):
+        return self._callback(
+            "notification_add",
+            type_,
+            body_plain,
+            body_rich,
+            title,
+            is_global,
+            requires_action,
+            arg_6,
+            priority,
+            expire_at,
+            extra,
+        )
 
     def dbus_notification_delete(self, id_, is_global, profile_key):
         return self._callback("notification_delete", id_, is_global, profile_key)
@@ -327,22 +409,43 @@
     def dbus_notifications_get(self, filters, profile_key):
         return self._callback("notifications_get", filters, profile_key)
 
-    def dbus_param_get_a(self, name, category, attribute="value", profile_key="@DEFAULT@"):
+    def dbus_param_get_a(
+        self, name, category, attribute="value", profile_key="@DEFAULT@"
+    ):
         return self._callback("param_get_a", name, category, attribute, profile_key)
 
-    def dbus_param_get_a_async(self, name, category, attribute="value", security_limit=-1, profile_key="@DEFAULT@"):
-        return self._callback("param_get_a_async", name, category, attribute, security_limit, profile_key)
+    def dbus_param_get_a_async(
+        self,
+        name,
+        category,
+        attribute="value",
+        security_limit=-1,
+        profile_key="@DEFAULT@",
+    ):
+        return self._callback(
+            "param_get_a_async", name, category, attribute, security_limit, profile_key
+        )
 
-    def dbus_param_set(self, name, value, category, security_limit=-1, profile_key="@DEFAULT@"):
-        return self._callback("param_set", name, value, category, security_limit, profile_key)
+    def dbus_param_set(
+        self, name, value, category, security_limit=-1, profile_key="@DEFAULT@"
+    ):
+        return self._callback(
+            "param_set", name, value, category, security_limit, profile_key
+        )
 
-    def dbus_param_ui_get(self, security_limit=-1, app='', extra='', profile_key="@DEFAULT@"):
+    def dbus_param_ui_get(
+        self, security_limit=-1, app="", extra="", profile_key="@DEFAULT@"
+    ):
         return self._callback("param_ui_get", security_limit, app, extra, profile_key)
 
-    def dbus_params_categories_get(self, ):
-        return self._callback("params_categories_get", )
+    def dbus_params_categories_get(
+        self,
+    ):
+        return self._callback(
+            "params_categories_get",
+        )
 
-    def dbus_params_register_app(self, xml, security_limit=-1, app=''):
+    def dbus_params_register_app(self, xml, security_limit=-1, app=""):
         return self._callback("params_register_app", xml, security_limit, app)
 
     def dbus_params_template_load(self, filename):
@@ -351,10 +454,19 @@
     def dbus_params_template_save(self, filename):
         return self._callback("params_template_save", filename)
 
-    def dbus_params_values_from_category_get_async(self, category, security_limit=-1, app="", extra="", profile_key="@DEFAULT@"):
-        return self._callback("params_values_from_category_get_async", category, security_limit, app, extra, profile_key)
+    def dbus_params_values_from_category_get_async(
+        self, category, security_limit=-1, app="", extra="", profile_key="@DEFAULT@"
+    ):
+        return self._callback(
+            "params_values_from_category_get_async",
+            category,
+            security_limit,
+            app,
+            extra,
+            profile_key,
+        )
 
-    def dbus_presence_set(self, to_jid='', show='', statuses={}, profile_key="@DEFAULT@"):
+    def dbus_presence_set(self, to_jid="", show="", statuses={}, profile_key="@DEFAULT@"):
         return self._callback("presence_set", to_jid, show, statuses, profile_key)
 
     def dbus_presence_statuses_get(self, profile_key="@DEFAULT@"):
@@ -369,7 +481,7 @@
     def dbus_private_data_set(self, namespace, key, data, profile_key):
         return self._callback("private_data_set", namespace, key, data, profile_key)
 
-    def dbus_profile_create(self, profile, password='', component=''):
+    def dbus_profile_create(self, profile, password="", component=""):
         return self._callback("profile_create", profile, password, component)
 
     def dbus_profile_delete_async(self, profile):
@@ -384,7 +496,7 @@
     def dbus_profile_set_default(self, profile):
         return self._callback("profile_set_default", profile)
 
-    def dbus_profile_start_session(self, password='', profile_key="@DEFAULT@"):
+    def dbus_profile_start_session(self, password="", profile_key="@DEFAULT@"):
         return self._callback("profile_start_session", password, profile_key)
 
     def dbus_profiles_list_get(self, clients=True, components=False):
@@ -399,8 +511,12 @@
     def dbus_progress_get_all_metadata(self, profile):
         return self._callback("progress_get_all_metadata", profile)
 
-    def dbus_ready_get(self, ):
-        return self._callback("ready_get", )
+    def dbus_ready_get(
+        self,
+    ):
+        return self._callback(
+            "ready_get",
+        )
 
     def dbus_roster_resync(self, profile_key="@DEFAULT@"):
         return self._callback("roster_resync", profile_key)
@@ -414,8 +530,12 @@
     def dbus_subscription(self, sub_type, entity, profile_key="@DEFAULT@"):
         return self._callback("subscription", sub_type, entity, profile_key)
 
-    def dbus_version_get(self, ):
-        return self._callback("version_get", )
+    def dbus_version_get(
+        self,
+    ):
+        return self._callback(
+            "version_get",
+        )
 
 
 class bridge:
@@ -462,13 +582,39 @@
         self._obj.emitSignal("entity_data_updated", jid, name, value, profile)
 
     def message_encryption_started(self, to_jid, encryption_data, profile_key):
-        self._obj.emitSignal("message_encryption_started", to_jid, encryption_data, profile_key)
+        self._obj.emitSignal(
+            "message_encryption_started", to_jid, encryption_data, profile_key
+        )
 
     def message_encryption_stopped(self, to_jid, encryption_data, profile_key):
-        self._obj.emitSignal("message_encryption_stopped", to_jid, encryption_data, profile_key)
+        self._obj.emitSignal(
+            "message_encryption_stopped", to_jid, encryption_data, profile_key
+        )
 
-    def message_new(self, uid, timestamp, from_jid, to_jid, message, subject, mess_type, extra, profile):
-        self._obj.emitSignal("message_new", uid, timestamp, from_jid, to_jid, message, subject, mess_type, extra, profile)
+    def message_new(
+        self,
+        uid,
+        timestamp,
+        from_jid,
+        to_jid,
+        message,
+        subject,
+        mess_type,
+        extra,
+        profile,
+    ):
+        self._obj.emitSignal(
+            "message_new",
+            uid,
+            timestamp,
+            from_jid,
+            to_jid,
+            message,
+            subject,
+            mess_type,
+            extra,
+            profile,
+        )
 
     def message_update(self, uid, message_type, message_data, profile):
         self._obj.emitSignal("message_update", uid, message_type, message_data, profile)
@@ -476,14 +622,42 @@
     def notification_deleted(self, id, profile):
         self._obj.emitSignal("notification_deleted", id, profile)
 
-    def notification_new(self, id, timestamp, type, body_plain, body_rich, title, requires_action, priority, expire_at, extra, profile):
-        self._obj.emitSignal("notification_new", id, timestamp, type, body_plain, body_rich, title, requires_action, priority, expire_at, extra, profile)
+    def notification_new(
+        self,
+        id,
+        timestamp,
+        type,
+        body_plain,
+        body_rich,
+        title,
+        requires_action,
+        priority,
+        expire_at,
+        extra,
+        profile,
+    ):
+        self._obj.emitSignal(
+            "notification_new",
+            id,
+            timestamp,
+            type,
+            body_plain,
+            body_rich,
+            title,
+            requires_action,
+            priority,
+            expire_at,
+            extra,
+            profile,
+        )
 
     def param_update(self, name, value, category, profile):
         self._obj.emitSignal("param_update", name, value, category, profile)
 
     def presence_update(self, entity_jid, show, priority, statuses, profile):
-        self._obj.emitSignal("presence_update", entity_jid, show, priority, statuses, profile)
+        self._obj.emitSignal(
+            "presence_update", entity_jid, show, priority, statuses, profile
+        )
 
     def progress_error(self, id, error, profile):
         self._obj.emitSignal("progress_error", id, error, profile)
@@ -505,7 +679,7 @@
         self._obj.emitSignal(name, *args)
 
     def add_method(
-            self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}
+        self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}
     ):
         """Dynamically add a method to D-Bus bridge"""
         # FIXME: doc parameter is kept only temporary, the time to remove it from calls
@@ -513,10 +687,12 @@
         self._obj.plugin_iface.addMethod(
             Method(name, arguments=in_sign, returns=out_sign)
         )
+
         # we have to create a method here instead of using partialmethod, because txdbus
         # uses __func__ which doesn't work with partialmethod
         def caller(self_, *args, **kwargs):
             return self_._callback(name, *args, **kwargs)
+
         setattr(self._obj, f"dbus_{name}", MethodType(caller, self._obj))
         self.register_method(name, method)
 
@@ -524,4 +700,4 @@
         """Dynamically add a signal to D-Bus bridge"""
         log.debug(f"Adding signal {name!r} to D-Bus bridge")
         self._obj.plugin_iface.addSignal(Signal(name, signature))
-        setattr(bridge, name, partialmethod(bridge.emit_signal, name))
\ No newline at end of file
+        setattr(bridge, name, partialmethod(bridge.emit_signal, name))
--- a/libervia/backend/bridge/pb.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/bridge/pb.py	Wed Jun 19 18:44:57 2024 +0200
@@ -99,6 +99,7 @@
             del self.signals_paused
             log.debug("bridge signals have been reactivated")
 
+
 ##METHODS_PART##
 
 
@@ -136,7 +137,7 @@
         #  self.root.register_method(name, callback)
 
     def add_method(
-            self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}
+        self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}
     ):
         """Dynamically add a method to PB bridge"""
         # FIXME: doc parameter is kept only temporary, the time to remove it from calls
@@ -185,13 +186,39 @@
         self.send_signal("entity_data_updated", jid, name, value, profile)
 
     def message_encryption_started(self, to_jid, encryption_data, profile_key):
-        self.send_signal("message_encryption_started", to_jid, encryption_data, profile_key)
+        self.send_signal(
+            "message_encryption_started", to_jid, encryption_data, profile_key
+        )
 
     def message_encryption_stopped(self, to_jid, encryption_data, profile_key):
-        self.send_signal("message_encryption_stopped", to_jid, encryption_data, profile_key)
+        self.send_signal(
+            "message_encryption_stopped", to_jid, encryption_data, profile_key
+        )
 
-    def message_new(self, uid, timestamp, from_jid, to_jid, message, subject, mess_type, extra, profile):
-        self.send_signal("message_new", uid, timestamp, from_jid, to_jid, message, subject, mess_type, extra, profile)
+    def message_new(
+        self,
+        uid,
+        timestamp,
+        from_jid,
+        to_jid,
+        message,
+        subject,
+        mess_type,
+        extra,
+        profile,
+    ):
+        self.send_signal(
+            "message_new",
+            uid,
+            timestamp,
+            from_jid,
+            to_jid,
+            message,
+            subject,
+            mess_type,
+            extra,
+            profile,
+        )
 
     def message_update(self, uid, message_type, message_data, profile):
         self.send_signal("message_update", uid, message_type, message_data, profile)
@@ -199,8 +226,34 @@
     def notification_deleted(self, id, profile):
         self.send_signal("notification_deleted", id, profile)
 
-    def notification_new(self, id, timestamp, type, body_plain, body_rich, title, requires_action, priority, expire_at, extra, profile):
-        self.send_signal("notification_new", id, timestamp, type, body_plain, body_rich, title, requires_action, priority, expire_at, extra, profile)
+    def notification_new(
+        self,
+        id,
+        timestamp,
+        type,
+        body_plain,
+        body_rich,
+        title,
+        requires_action,
+        priority,
+        expire_at,
+        extra,
+        profile,
+    ):
+        self.send_signal(
+            "notification_new",
+            id,
+            timestamp,
+            type,
+            body_plain,
+            body_rich,
+            title,
+            requires_action,
+            priority,
+            expire_at,
+            extra,
+            profile,
+        )
 
     def param_update(self, name, value, category, profile):
         self.send_signal("param_update", name, value, category, profile)
--- a/libervia/backend/core/constants.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/core/constants.py	Wed Jun 19 18:44:57 2024 +0200
@@ -46,9 +46,7 @@
     APP_NAME_ALT = "Libervia"
     APP_NAME_FILE = "libervia"
     APP_NAME_FULL = f"{APP_NAME} ({APP_COMPONENT})"
-    APP_VERSION = (
-        backend.__version__
-    )
+    APP_VERSION = backend.__version__
     APP_RELEASE_NAME = "La Ruche"
     APP_URL = "https://libervia.org"
 
@@ -117,8 +115,8 @@
     ## Roster jids selection ##
     PUBLIC = "PUBLIC"
     ALL = (
-        "ALL"
-    )  # ALL means all known contacts, while PUBLIC means everybody, known or not
+        "ALL"  # ALL means all known contacts, while PUBLIC means everybody, known or not
+    )
     GROUP = "GROUP"
     JID = "JID"
 
@@ -226,9 +224,7 @@
     PI_IMPORT_NAME = "import_name"
     PI_MAIN = "main"
     PI_HANDLER = "handler"
-    PI_TYPE = (
-        "type"
-    )  #  FIXME: should be types, and should handle single unicode type or tuple of types (e.g. "blog" and "import")
+    PI_TYPE = "type"  #  FIXME: should be types, and should handle single unicode type or tuple of types (e.g. "blog" and "import")
     PI_MODES = "modes"
     PI_PROTOCOLS = "protocols"
     PI_DEPENDENCIES = "dependencies"
@@ -405,14 +401,13 @@
     KEY_ATTACHMENTS_PREVIEW = "preview"
     KEY_ATTACHMENTS_RESIZE = "resize"
 
-
     ## Common extra keys/values ##
     KEY_ORDER_BY = "order_by"
     KEY_USE_CACHE = "use_cache"
     KEY_DECRYPT = "decrypt"
 
-    ORDER_BY_CREATION = 'creation'
-    ORDER_BY_MODIFICATION = 'modification'
+    ORDER_BY_CREATION = "creation"
+    ORDER_BY_MODIFICATION = "modification"
 
     # internationalisation
     DEFAULT_LOCALE = "en_GB"
@@ -432,9 +427,7 @@
     EXIT_CONFLICT = 19  # an item already exists
     EXIT_USER_CANCELLED = 20  # user cancelled action
     EXIT_INTERNAL_ERROR = 111  # unexpected error
-    EXIT_FILE_NOT_EXE = (
-        126
-    )  # a file to be executed was found, but it was not an executable utility (cf. man 1 exit)
+    EXIT_FILE_NOT_EXE = 126  # a file to be executed was found, but it was not an executable utility (cf. man 1 exit)
     EXIT_CMD_NOT_FOUND = 127  # a utility to be executed was not found (cf. man 1 exit)
     EXIT_CMD_ERROR = 127  # a utility to be executed returned an error exit code
     EXIT_SIGNAL_INT = 128  # a command was interrupted by a signal (cf. man 1 exit)
@@ -492,8 +485,9 @@
 
         # we don't want the very verbose jnius log when we are in DEBUG level
         import logging
-        logging.getLogger('jnius').setLevel(logging.WARNING)
-        logging.getLogger('jnius.reflect').setLevel(logging.WARNING)
+
+        logging.getLogger("jnius").setLevel(logging.WARNING)
+        logging.getLogger("jnius.reflect").setLevel(logging.WARNING)
 
         Environment = autoclass("android.os.Environment")
 
@@ -518,9 +512,10 @@
         ]
     else:
         import os
+
         # we use parent of "sat" module dir as last config path, this is useful for
         # per instance configurations (e.g. a dev instance and a main instance)
-        root_dir = dirname(dirname(backend.__file__)) + '/'
+        root_dir = dirname(dirname(backend.__file__)) + "/"
         Const.CONFIG_PATHS = (
             # /etc/_sat.conf is used for system-related settings (e.g. when media_dir
             # is set by the distribution and has not reason to change, or in a Docker
@@ -541,10 +536,10 @@
         # on recent versions of Flatpak, FLATPAK_ID is set at run time
         # it seems that this is not the case on older versions,
         # but FLATPAK_SANDBOX_DIR seems set then
-        if os.getenv('FLATPAK_ID') or os.getenv('FLATPAK_SANDBOX_DIR'):
+        if os.getenv("FLATPAK_ID") or os.getenv("FLATPAK_SANDBOX_DIR"):
             # for Flatpak, the conf can't be set in /etc or $HOME, so we have
             # to add /app
-            Const.CONFIG_PATHS.append('/app/')
+            Const.CONFIG_PATHS.append("/app/")
 
         ## Configuration ##
         Const.DEFAULT_CONFIG = {
@@ -564,4 +559,3 @@
             realpath(expanduser(path) + "sat.conf")
             for path in Const.CONFIG_PATHS
         ]
-
--- a/libervia/backend/core/core_types.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/core/core_types.py	Wed Jun 19 18:44:57 2024 +0200
@@ -34,11 +34,10 @@
     server_jid: t_jid.JID
     IQ: Callable[[Optional[str], Optional[int]], xmlstream.IQ]
 
-EncryptionPlugin = namedtuple("EncryptionPlugin", ("instance",
-                                                   "name",
-                                                   "namespace",
-                                                   "priority",
-                                                   "directed"))
+
+EncryptionPlugin = namedtuple(
+    "EncryptionPlugin", ("instance", "name", "namespace", "priority", "directed")
+)
 
 
 class EncryptionSession(TypedDict):
@@ -47,21 +46,23 @@
 
 # Incomplete types built through observation rather than code inspection.
 MessageDataExtra = TypedDict(
-    "MessageDataExtra",
-    { "encrypted": bool, "origin_id": str },
-    total=False
+    "MessageDataExtra", {"encrypted": bool, "origin_id": str}, total=False
 )
 
 
-MessageData = TypedDict("MessageData", {
-    "from": t_jid.JID,
-    "to": t_jid.JID,
-    "uid": str,
-    "message": Dict[str, str],
-    "subject": Dict[str, str],
-    "type": str,
-    "timestamp": float,
-    "extra": MessageDataExtra,
-    "ENCRYPTION": EncryptionSession,
-    "xml": domish.Element
-}, total=False)
+MessageData = TypedDict(
+    "MessageData",
+    {
+        "from": t_jid.JID,
+        "to": t_jid.JID,
+        "uid": str,
+        "message": Dict[str, str],
+        "subject": Dict[str, str],
+        "type": str,
+        "timestamp": float,
+        "extra": MessageDataExtra,
+        "ENCRYPTION": EncryptionSession,
+        "xml": domish.Element,
+    },
+    total=False,
+)
--- a/libervia/backend/core/exceptions.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/core/exceptions.py	Wed Jun 19 18:44:57 2024 +0200
@@ -62,6 +62,7 @@
 
 class MissingPlugin(Exception):
     """A SàT plugin needed for a feature/method is missing"""
+
     pass
 
 
@@ -129,6 +130,7 @@
 
 class EncryptionError(Exception):
     """Invalid encryption"""
+
     pass
 
 
@@ -143,6 +145,7 @@
 
 class InvalidCertificate(Exception):
     """A TLS certificate is not valid"""
+
     pass
 
 
--- a/libervia/backend/core/i18n.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/core/i18n.py	Wed Jun 19 18:44:57 2024 +0200
@@ -37,7 +37,6 @@
             )
         _translators[lang].install()
 
-
 except ImportError:
 
     log.warning("gettext support disabled")
--- a/libervia/backend/core/launcher.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/core/launcher.py	Wed Jun 19 18:44:57 2024 +0200
@@ -201,11 +201,7 @@
         return pid_dir / f"{self.APP_NAME_FILE}.pid"
 
     def wait_for_service(
-        self,
-        service_host: str,
-        service_port: int,
-        timeout: int,
-        service_name: str
+        self, service_host: str, service_port: int, timeout: int, service_name: str
     ) -> None:
         """Waits for a network service to become available.
 
--- a/libervia/backend/core/log.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/core/log.py	Wed Jun 19 18:44:57 2024 +0200
@@ -36,8 +36,8 @@
 backend = None
 _loggers: Dict[str, "Logger"] = {}
 handlers = {}
-COLOR_START = '%(color_start)s'
-COLOR_END = '%(color_end)s'
+COLOR_START = "%(color_start)s"
+COLOR_END = "%(color_end)s"
 
 
 class Filtered(Exception):
@@ -46,8 +46,9 @@
 
 class Logger:
     """High level logging class"""
-    fmt = None # format option as given by user (e.g. SAT_LOG_LOGGER)
-    filter_name = None # filter to call
+
+    fmt = None  # format option as given by user (e.g. SAT_LOG_LOGGER)
+    filter_name = None  # filter to call
     post_treat = None
 
     def __init__(self, name):
@@ -72,7 +73,7 @@
         message: object,
         level: Optional[str] = None,
         exc_info: _ExcInfoType = False,
-        **kwargs
+        **kwargs,
     ) -> None:
         """Actually log the message
 
@@ -83,11 +84,7 @@
         print(message)
 
     def log(
-        self,
-        level: str,
-        message: object,
-        exc_info: _ExcInfoType = False,
-        **kwargs
+        self, level: str, message: object, exc_info: _ExcInfoType = False, **kwargs
     ) -> None:
         """Print message
 
@@ -116,14 +113,18 @@
         """
         if self.fmt is None and self.filter_name is None:
             return message
-        record = {'name': self._name,
-                  'message': message,
-                  'levelname': level,
-                 }
+        record = {
+            "name": self._name,
+            "message": message,
+            "levelname": level,
+        }
         try:
             if not self.filter_name.dict_filter(record):
                 raise Filtered
-        except (AttributeError, TypeError): # XXX: TypeError is here because of a pyjamas bug which need to be fixed (TypeError is raised instead of AttributeError)
+        except (
+            AttributeError,
+            TypeError,
+        ):  # XXX: TypeError is here because of a pyjamas bug which need to be fixed (TypeError is raised instead of AttributeError)
             if self.filter_name is not None:
                 raise ValueError("Bad filter: filters must have a .filter method")
         try:
@@ -131,9 +132,9 @@
         except TypeError:
             return message
         except KeyError as e:
-            if e.args[0] == 'profile':
+            if e.args[0] == "profile":
                 # XXX: %(profile)s use some magic with introspection, for debugging purpose only *DO NOT* use in production
-                record['profile'] = configure_cls[backend].get_profile()
+                record["profile"] = configure_cls[backend].get_profile()
                 return self.fmt % record
             else:
                 raise e
@@ -167,6 +168,7 @@
         """
         assert name_re
         import re
+
         self.name_re = re.compile(name_re)
 
     def filter(self, record):
@@ -180,10 +182,12 @@
         @param dict_record: dictionary with at list a key "name" with logger name
         @return: True if message should be logged
         """
+
         class LogRecord(object):
             pass
+
         log_record = LogRecord()
-        log_record.name = dict_record['name']
+        log_record.name = dict_record["name"]
         return self.filter(log_record) == 1
 
 
@@ -192,8 +196,17 @@
     # True if color location is specified in fmt (with COLOR_START)
     _color_location = False
 
-    def __init__(self, level=None, fmt=None, output=None, logger=None, colors=False,
-                 levels_taints_dict=None, force_colors=False, backend_data=None):
+    def __init__(
+        self,
+        level=None,
+        fmt=None,
+        output=None,
+        logger=None,
+        colors=False,
+        levels_taints_dict=None,
+        force_colors=False,
+        backend_data=None,
+    ):
         """Configure a backend
 
         @param level: one of C.LOG_LEVELS
@@ -231,19 +244,21 @@
         if level is not None:
             # we deactivate methods below level
             level_idx = C.LOG_LEVELS.index(level)
+
             def dev_null(self, msg):
                 pass
+
             for _level in C.LOG_LEVELS[:level_idx]:
                 setattr(Logger, _level.lower(), dev_null)
 
     def configure_format(self, fmt):
         if fmt is not None:
-            if fmt != '%(message)s': # %(message)s is the same as None
+            if fmt != "%(message)s":  # %(message)s is the same as None
                 Logger.fmt = fmt
             if COLOR_START in fmt:
                 ConfigureBase._color_location = True
-                if fmt.find(COLOR_END,fmt.rfind(COLOR_START))<0:
-                   # color_start not followed by an end, we add it
+                if fmt.find(COLOR_END, fmt.rfind(COLOR_START)) < 0:
+                    # color_start not followed by an end, we add it
                     Logger.fmt += COLOR_END
 
     def configure_output(self, output):
@@ -265,12 +280,14 @@
             taints = self.__class__.taints = {}
             for level in C.LOG_LEVELS:
                 # we want use values and use constant value as default
-                taint_list = levels_taints_dict.get(level, C.LOG_OPT_TAINTS_DICT[1][level])
+                taint_list = levels_taints_dict.get(
+                    level, C.LOG_OPT_TAINTS_DICT[1][level]
+                )
                 ansi_list = []
                 for elt in taint_list:
                     elt = elt.upper()
                     try:
-                        ansi = getattr(A, 'FG_{}'.format(elt))
+                        ansi = getattr(A, "FG_{}".format(elt))
                     except AttributeError:
                         try:
                             ansi = getattr(A, elt)
@@ -278,13 +295,13 @@
                             # we use raw string if element is unknown
                             ansi = elt
                     ansi_list.append(ansi)
-                taints[level] = ''.join(ansi_list)
+                taints[level] = "".join(ansi_list)
 
     def post_treatment(self):
         pass
 
     def manage_outputs(self, outputs_raw):
-        """ Parse output option in a backend agnostic way, and fill handlers consequently
+        """Parse output option in a backend agnostic way, and fill handlers consequently
 
         @param outputs_raw: output option as enterred in environment variable or in configuration
         """
@@ -298,15 +315,19 @@
         for output in outputs:
             if not output:
                 continue
-            if output[-1] == ')':
+            if output[-1] == ")":
                 # we have options
-                opt_begin = output.rfind('(')
-                options = output[opt_begin+1:-1]
+                opt_begin = output.rfind("(")
+                options = output[opt_begin + 1 : -1]
                 output = output[:opt_begin]
             else:
                 options = None
 
-            if output not in (C.LOG_OPT_OUTPUT_DEFAULT, C.LOG_OPT_OUTPUT_FILE, C.LOG_OPT_OUTPUT_MEMORY):
+            if output not in (
+                C.LOG_OPT_OUTPUT_DEFAULT,
+                C.LOG_OPT_OUTPUT_FILE,
+                C.LOG_OPT_OUTPUT_MEMORY,
+            ):
                 raise ValueError("Invalid output [%s]" % output)
 
             if output == C.LOG_OPT_OUTPUT_DEFAULT:
@@ -314,20 +335,26 @@
                 handlers[output] = None
             elif output == C.LOG_OPT_OUTPUT_FILE:
                 if not options:
-                    ValueError("{handler} output need a path as option" .format(handle=output))
+                    ValueError(
+                        "{handler} output need a path as option".format(handle=output)
+                    )
                 handlers.setdefault(output, []).append(options)
-                options = None # option are parsed, we can empty them
+                options = None  # option are parsed, we can empty them
             elif output == C.LOG_OPT_OUTPUT_MEMORY:
                 # we have memory handler, option can be the len limit or None
                 try:
                     limit = int(options)
-                    options = None # option are parsed, we can empty them
+                    options = None  # option are parsed, we can empty them
                 except (TypeError, ValueError):
                     limit = C.LOG_OPT_OUTPUT_MEMORY_LIMIT
                 handlers[output] = limit
 
-            if options: # we should not have unparsed options
-                raise ValueError("options [{options}] are not supported for {handler} output".format(options=options, handler=output))
+            if options:  # we should not have unparsed options
+                raise ValueError(
+                    "options [{options}] are not supported for {handler} output".format(
+                        options=options, handler=output
+                    )
+                )
 
     @staticmethod
     def memory_get(size=None):
@@ -349,13 +376,12 @@
         try:
             start = cls.taints[level]
         except KeyError:
-            start = ''
+            start = ""
 
         if cls._color_location:
-            return message % {'color_start': start,
-                              'color_end': A.RESET}
+            return message % {"color_start": start, "color_end": A.RESET}
         else:
-            return '%s%s%s' % (start, message, A.RESET)
+            return "%s%s%s" % (start, message, A.RESET)
 
     @staticmethod
     def get_profile():
@@ -370,9 +396,10 @@
         ConfigureCustom.LOGGER_CLASS = logger_class
 
 
-configure_cls = { None: ConfigureBase,
-                   C.LOG_BACKEND_CUSTOM: ConfigureCustom
-                 }  # XXX: (key: backend, value: Configure subclass) must be filled when new backend are added
+configure_cls = {
+    None: ConfigureBase,
+    C.LOG_BACKEND_CUSTOM: ConfigureCustom,
+}  # XXX: (key: backend, value: Configure subclass) must be filled when new backend are added
 
 
 def configure(backend_, **options):
@@ -391,36 +418,46 @@
     except KeyError:
         raise ValueError("unknown backend [{}]".format(backend))
     if backend == C.LOG_BACKEND_CUSTOM:
-        logger_class = options.pop('logger_class')
+        logger_class = options.pop("logger_class")
         configure_class(logger_class, **options)
     else:
         configure_class(**options)
 
+
 def memory_get(size=None):
     if not C.LOG_OPT_OUTPUT_MEMORY in handlers:
-        raise ValueError('memory output is not used')
+        raise ValueError("memory output is not used")
     return configure_cls[backend].memory_get(size)
 
+
 def getLogger(name=C.LOG_BASE_LOGGER) -> Logger:
     try:
         logger_class = configure_cls[backend].LOGGER_CLASS
     except KeyError:
-        raise ValueError("This method should not be called with backend [{}]".format(backend))
+        raise ValueError(
+            "This method should not be called with backend [{}]".format(backend)
+        )
     return _loggers.setdefault(name, logger_class(name))
 
+
 _root_logger = getLogger()
 
+
 def debug(msg, **kwargs):
     _root_logger.debug(msg, **kwargs)
 
+
 def info(msg, **kwargs):
     _root_logger.info(msg, **kwargs)
 
+
 def warning(msg, **kwargs):
     _root_logger.warning(msg, **kwargs)
 
+
 def error(msg, **kwargs):
     _root_logger.error(msg, **kwargs)
 
+
 def critical(msg, **kwargs):
     _root_logger.critical(msg, **kwargs)
--- a/libervia/backend/core/log_config.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/core/log_config.py	Wed Jun 19 18:44:57 2024 +0200
@@ -31,6 +31,7 @@
     def __init__(self, *args, **kwargs):
         super().__init__(*args, **kwargs)
         from twisted.logger import Logger
+
         self.twisted_log = Logger()
 
     def out(self, message, level=None, **kwargs):
@@ -59,7 +60,7 @@
             except AttributeError:
                 isatty = False
             # FIXME: isatty should be tested on each handler, not globaly
-            if (force_colors or isatty):
+            if force_colors or isatty:
                 # we need colors
                 log.Logger.post_treat = lambda logger, level, message: self.ansi_colors(
                     level, message
@@ -109,6 +110,7 @@
 
     def pre_treatment(self):
         from twisted import logger
+
         global logger
         self.level_map = {
             C.LOG_LVL_DEBUG: logger.LogLevel.debug,
@@ -125,6 +127,7 @@
     def configure_output(self, output):
         import sys
         from twisted.python import logfile
+
         self.log_publisher = logger.LogPublisher()
 
         if output is None:
@@ -137,13 +140,15 @@
                     "You must pass options as backend_data with Twisted backend"
                 )
             options = self.backend_data
-            log_file = logfile.LogFile.fromFullPath(options['logfile'])
+            log_file = logfile.LogFile.fromFullPath(options["logfile"])
             self.log_publisher.addObserver(
-                logger.FileLogObserver(log_file, self.text_formatter))
+                logger.FileLogObserver(log_file, self.text_formatter)
+            )
             # we also want output to stdout if we are in debug or nodaemon mode
             if options.get("nodaemon", False) or options.get("debug", False):
                 self.log_publisher.addObserver(
-                    logger.FileLogObserver(sys.stdout, self.text_formatter))
+                    logger.FileLogObserver(sys.stdout, self.text_formatter)
+                )
 
         if C.LOG_OPT_OUTPUT_FILE in log.handlers:
 
@@ -152,7 +157,8 @@
                     sys.stdout if path == "-" else logfile.LogFile.fromFullPath(path)
                 )
                 self.log_publisher.addObserver(
-                    logger.FileLogObserver(log_file, self.text_formatter))
+                    logger.FileLogObserver(log_file, self.text_formatter)
+                )
 
         if C.LOG_OPT_OUTPUT_MEMORY in log.handlers:
             raise NotImplementedError(
@@ -172,21 +178,21 @@
         """Install twistedObserver which manage non SàT logs"""
         # from twisted import logger
         import sys
+
         filtering_obs = logger.FilteringLogObserver(
             observer=self.log_publisher,
             predicates=[
                 logger.LogLevelFilterPredicate(self.level),
-                ]
+            ],
         )
         logger.globalLogBeginner.beginLoggingTo([filtering_obs])
 
     def text_formatter(self, event):
-        if event.get('sat_logged', False):
-            timestamp = ''.join([logger.formatTime(event.get("log_time", None)), " "])
+        if event.get("sat_logged", False):
+            timestamp = "".join([logger.formatTime(event.get("log_time", None)), " "])
             return f"{timestamp}{event.get('log_format', '')}\n"
         else:
-            eventText = logger.eventAsText(
-                event, includeSystem=True)
+            eventText = logger.eventAsText(event, includeSystem=True)
             if not eventText:
                 return None
             return eventText.replace("\n", "\n\t") + "\n"
@@ -241,6 +247,7 @@
 
         class SatFormatter(logging.Formatter):
             """Formatter which manage SàT specificities"""
+
             _format = fmt
             _with_profile = "%(profile)s" in fmt
 
@@ -310,11 +317,9 @@
                             super(SatMemoryHandler, self).emit(self.format(record))
 
                     hdlr = SatMemoryHandler(options)
-                    log.handlers[
-                        handler
-                    ] = (
-                        hdlr
-                    )  # we keep a reference to the handler to read the buffer later
+                    log.handlers[handler] = (
+                        hdlr  # we keep a reference to the handler to read the buffer later
+                    )
                     self._add_handler(root_logger, hdlr, can_colors=False)
                 elif handler == C.LOG_OPT_OUTPUT_FILE:
                     import os.path
--- a/libervia/backend/core/main.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/core/main.py	Wed Jun 19 18:44:57 2024 +0200
@@ -29,6 +29,7 @@
 from libervia import backend
 from libervia.backend.core.i18n import _, D_, language_switch
 from libervia.backend.core import patches
+
 patches.apply()
 from twisted.application import service
 from twisted.internet import defer
@@ -57,6 +58,7 @@
 
 log = getLogger(__name__)
 
+
 class LiberviaBackend(service.Service):
 
     def _init(self):
@@ -67,7 +69,7 @@
         # dynamic menus. key: callback_id, value: menu data (dictionnary)
         self._menus = {}
         self._menus_paths = {}  # path to id. key: (menu_type, lower case tuple of path),
-                                # value: menu id
+        # value: menu id
 
         # like initialised, but launched before init script is done, mainly useful for CLI
         # frontend, so it can be used in init script, while other frontends are waiting.
@@ -75,7 +77,7 @@
         self.initialised = defer.Deferred()
         self.profiles = {}
         self.plugins = {}
-        # map for short name to whole namespace,
+        # map for short name to whole namespace,
         # extended by plugins with register_namespace
         self.ns_map = {
             "x-data": xmpp.NS_X_DATA,
@@ -85,13 +87,10 @@
         self.memory = memory.Memory(self)
 
         # trigger are used to change Libervia behaviour
-        self.trigger = (
-            trigger.TriggerManager()
-        )
+        self.trigger = trigger.TriggerManager()
 
-        bridge_name = (
-            os.getenv("LIBERVIA_BRIDGE_NAME")
-            or self.memory.config_get("", "bridge", "dbus")
+        bridge_name = os.getenv("LIBERVIA_BRIDGE_NAME") or self.memory.config_get(
+            "", "bridge", "dbus"
         )
 
         bridge_module = dynamic_import.bridge(bridge_name)
@@ -120,7 +119,6 @@
         else:
             self.local_shared_path = None
 
-
         defer.ensureDeferred(self._post_init())
 
     @property
@@ -174,40 +172,58 @@
         self.bridge.register_method("entity_data_get", self.memory._get_entity_data)
         self.bridge.register_method("entities_data_get", self.memory._get_entities_data)
         self.bridge.register_method("profile_create", self.memory.create_profile)
-        self.bridge.register_method("profile_delete_async", self.memory.profile_delete_async)
+        self.bridge.register_method(
+            "profile_delete_async", self.memory.profile_delete_async
+        )
         self.bridge.register_method("profile_start_session", self.memory.start_session)
         self.bridge.register_method(
             "profile_is_session_started", self.memory._is_session_started
         )
-        self.bridge.register_method("profile_set_default", self.memory.profile_set_default)
+        self.bridge.register_method(
+            "profile_set_default", self.memory.profile_set_default
+        )
         self.bridge.register_method("connect", self._connect)
         self.bridge.register_method("disconnect", self.disconnect)
         self.bridge.register_method("contact_get", self._contact_get)
         self.bridge.register_method("contacts_get", self.contacts_get)
-        self.bridge.register_method("contacts_get_from_group", self.contacts_get_from_group)
+        self.bridge.register_method(
+            "contacts_get_from_group", self.contacts_get_from_group
+        )
         self.bridge.register_method("main_resource_get", self.memory._get_main_resource)
         self.bridge.register_method(
             "presence_statuses_get", self.memory._get_presence_statuses
         )
         self.bridge.register_method("sub_waiting_get", self.memory.sub_waiting_get)
         self.bridge.register_method("message_send", self._message_send)
-        self.bridge.register_method("message_encryption_start",
-                                    self._message_encryption_start)
-        self.bridge.register_method("message_encryption_stop",
-                                    self._message_encryption_stop)
-        self.bridge.register_method("message_encryption_get",
-                                    self._message_encryption_get)
-        self.bridge.register_method("encryption_namespace_get",
-                                    self._encryption_namespace_get)
-        self.bridge.register_method("encryption_plugins_get", self._encryption_plugins_get)
-        self.bridge.register_method("encryption_trust_ui_get", self._encryption_trust_ui_get)
+        self.bridge.register_method(
+            "message_encryption_start", self._message_encryption_start
+        )
+        self.bridge.register_method(
+            "message_encryption_stop", self._message_encryption_stop
+        )
+        self.bridge.register_method(
+            "message_encryption_get", self._message_encryption_get
+        )
+        self.bridge.register_method(
+            "encryption_namespace_get", self._encryption_namespace_get
+        )
+        self.bridge.register_method(
+            "encryption_plugins_get", self._encryption_plugins_get
+        )
+        self.bridge.register_method(
+            "encryption_trust_ui_get", self._encryption_trust_ui_get
+        )
         self.bridge.register_method("config_get", self._get_config)
         self.bridge.register_method("param_set", self.param_set)
         self.bridge.register_method("param_get_a", self.memory.get_string_param_a)
         self.bridge.register_method("private_data_get", self.memory._private_data_get)
         self.bridge.register_method("private_data_set", self.memory._private_data_set)
-        self.bridge.register_method("private_data_delete", self.memory._private_data_delete)
-        self.bridge.register_method("param_get_a_async", self.memory.async_get_string_param_a)
+        self.bridge.register_method(
+            "private_data_delete", self.memory._private_data_delete
+        )
+        self.bridge.register_method(
+            "param_get_a_async", self.memory.async_get_string_param_a
+        )
         self.bridge.register_method(
             "params_values_from_category_get_async",
             self.memory._get_params_values_from_category,
@@ -216,7 +232,9 @@
         self.bridge.register_method(
             "params_categories_get", self.memory.params_categories_get
         )
-        self.bridge.register_method("params_register_app", self.memory.params_register_app)
+        self.bridge.register_method(
+            "params_register_app", self.memory.params_register_app
+        )
         self.bridge.register_method("history_get", self.memory._history_get)
         self.bridge.register_method("presence_set", self._set_presence)
         self.bridge.register_method("subscription", self.subscription)
@@ -242,13 +260,18 @@
         self.bridge.register_method("namespaces_get", self.get_namespaces)
         self.bridge.register_method("image_check", self._image_check)
         self.bridge.register_method("image_resize", self._image_resize)
-        self.bridge.register_method("image_generate_preview", self._image_generate_preview)
+        self.bridge.register_method(
+            "image_generate_preview", self._image_generate_preview
+        )
         self.bridge.register_method("image_convert", self._image_convert)
         self.bridge.register_method("notification_add", self.memory._add_notification)
         self.bridge.register_method("notifications_get", self.memory._get_notifications)
-        self.bridge.register_method("notification_delete", self.memory._delete_notification)
-        self.bridge.register_method("notifications_expired_clean", self.memory._notifications_expired_clean)
-
+        self.bridge.register_method(
+            "notification_delete", self.memory._delete_notification
+        )
+        self.bridge.register_method(
+            "notifications_expired_clean", self.memory._notifications_expired_clean
+        )
 
         await self.memory.initialise()
         self.common_cache = cache.Cache(self, None)
@@ -277,10 +300,7 @@
             else:
                 log.info(f"Running init script {init_script!r}.")
                 try:
-                    await async_process.run(
-                        str(init_script),
-                        verbose=True
-                    )
+                    await async_process.run(str(init_script), verbose=True)
                 except RuntimeError as e:
                     log.error(f"Init script failed: {e}")
                     self.stopService()
@@ -292,15 +312,18 @@
         # profile autoconnection must be done after self.initialised is called because
         # start_session waits for it.
         autoconnect_dict = await self.memory.storage.get_ind_param_values(
-            category='Connection', name='autoconnect_backend',
+            category="Connection",
+            name="autoconnect_backend",
         )
         profiles_autoconnect = [p for p, v in autoconnect_dict.items() if C.bool(v)]
         if not self.trigger.point("profilesAutoconnect", profiles_autoconnect):
             return
         if profiles_autoconnect:
-            log.info(D_(
-                "Following profiles will be connected automatically: {profiles}"
-                ).format(profiles= ', '.join(profiles_autoconnect)))
+            log.info(
+                D_(
+                    "Following profiles will be connected automatically: {profiles}"
+                ).format(profiles=", ".join(profiles_autoconnect))
+            )
         connect_d_list = []
         for profile in profiles_autoconnect:
             connect_d_list.append(defer.ensureDeferred(self.connect(profile)))
@@ -312,8 +335,8 @@
                     profile = profiles_autoconnect[0]
                     log.warning(
                         _("Can't autoconnect profile {profile}: {reason}").format(
-                            profile = profile,
-                            reason = result)
+                            profile=profile, reason=result
+                        )
                     )
 
     def _add_base_menus(self):
@@ -342,7 +365,8 @@
                 init_path = plug_path / f"__init__.{C.PLUGIN_EXT}"
                 if not init_path.exists():
                     log.warning(
-                        f"{plug_path} doesn't appear to be a package, can't load it")
+                        f"{plug_path} doesn't appear to be a package, can't load it"
+                    )
                     continue
                 plug_name = plug_path.name
             elif plug_path.is_file():
@@ -350,12 +374,12 @@
                     continue
                 plug_name = plug_path.stem
             else:
-                log.warning(
-                    f"{plug_path} is not a file or a dir, ignoring it")
+                log.warning(f"{plug_path} is not a file or a dir, ignoring it")
                 continue
             if not plug_name.isidentifier():
                 log.warning(
-                    f"{plug_name!r} is not a valid name for a plugin, ignoring it")
+                    f"{plug_name!r} is not a valid name for a plugin, ignoring it"
+                )
                 continue
             plugin_path = f"libervia.backend.plugins.{plug_name}"
             try:
@@ -364,9 +388,7 @@
                 self._unimport_plugin(plugin_path)
                 log.warning(
                     "Can't import plugin [{path}] because of an unavailale third party "
-                    "module:\n{msg}".format(
-                        path=plugin_path, msg=e
-                    )
+                    "module:\n{msg}".format(path=plugin_path, msg=e)
                 )
                 continue
             except exceptions.CancelError as e:
@@ -450,9 +472,7 @@
         else:
             if not import_name in plugins_to_import:
                 if optional:
-                    log.warning(
-                        _("Recommended plugin not found: {}").format(import_name)
-                    )
+                    log.warning(_("Recommended plugin not found: {}").format(import_name))
                     return
                 msg = "Dependency not found: {}".format(import_name)
                 log.error(msg)
@@ -521,7 +541,8 @@
         return defer.ensureDeferred(self.connect(profile, password, options))
 
     async def connect(
-        self, profile, password="", options=None, max_retries=C.XMPP_MAX_RETRIES):
+        self, profile, password="", options=None, max_retries=C.XMPP_MAX_RETRIES
+    ):
         """Connect a profile (i.e. connect client.component to XMPP server)
 
         Retrieve the individual parameters, authenticate the profile
@@ -761,16 +782,14 @@
         try:
             return self.ns_map[short_name]
         except KeyError:
-            raise exceptions.NotFound("namespace {short_name} is not registered"
-                                      .format(short_name=short_name))
+            raise exceptions.NotFound(
+                "namespace {short_name} is not registered".format(short_name=short_name)
+            )
 
     def get_session_infos(self, profile_key):
         """compile interesting data on current profile session"""
         client = self.get_client(profile_key)
-        data = {
-            "jid": client.jid.full(),
-            "started": str(int(client.started))
-            }
+        data = {"jid": client.jid.full(), "started": str(int(client.started))}
         return defer.succeed(data)
 
     def _get_devices_infos(self, bare_jid, profile_key):
@@ -808,11 +827,11 @@
                 "resource": resource,
             }
             try:
-                presence = cache_data['presence']
+                presence = cache_data["presence"]
             except KeyError:
                 pass
             else:
-                res_data['presence'] = {
+                res_data["presence"] = {
                     "show": presence.show,
                     "priority": presence.priority,
                     "statuses": presence.statuses,
@@ -821,12 +840,14 @@
             disco = await self.get_disco_infos(client, res_jid)
 
             for (category, type_), name in disco.identities.items():
-                identities = res_data.setdefault('identities', [])
-                identities.append({
-                    "name": name,
-                    "category": category,
-                    "type": type_,
-                })
+                identities = res_data.setdefault("identities", [])
+                identities.append(
+                    {
+                        "name": name,
+                        "category": category,
+                        "type": type_,
+                    }
+                )
 
             ret_data.append(res_data)
 
@@ -857,7 +878,7 @@
         """
         report = image.check(self, path, max_size=(300, 300))
 
-        if not report['too_large']:
+        if not report["too_large"]:
             # in the unlikely case that image is already smaller than a preview
             preview_path = path
         else:
@@ -867,17 +888,14 @@
             filename = f"{uid}{path.suffix.lower()}"
             metadata = client.cache.get_metadata(uid=uid)
             if metadata is not None:
-                preview_path = metadata['path']
+                preview_path = metadata["path"]
             else:
                 with client.cache.cache_data(
-                    source='HOST_PREVIEW',
-                    uid=uid,
-                    filename=filename) as cache_f:
+                    source="HOST_PREVIEW", uid=uid, filename=filename
+                ) as cache_f:
 
                     preview_path = await image.resize(
-                        path,
-                        new_size=report['recommended_size'],
-                        dest=cache_f
+                        path, new_size=report["recommended_size"], dest=cache_f
                     )
 
         return preview_path
@@ -922,23 +940,19 @@
             metadata = cache.get_metadata(uid=uid)
             if metadata is not None:
                 # there is already a conversion for this image in cache
-                return metadata['path']
+                return metadata["path"]
             else:
                 with cache.cache_data(
-                    source='HOST_IMAGE_CONVERT',
-                    uid=uid,
-                    filename=filename) as cache_f:
+                    source="HOST_IMAGE_CONVERT", uid=uid, filename=filename
+                ) as cache_f:
 
                     converted_path = await image.convert(
-                        source,
-                        dest=cache_f,
-                        extra=extra
+                        source, dest=cache_f, extra=extra
                     )
                 return converted_path
         else:
             return await image.convert(source, dest, extra)
 
-
     # local dirs
 
     def get_local_path(
@@ -998,18 +1012,19 @@
     def register_encryption_plugin(self, *args, **kwargs):
         return encryption.EncryptionHandler.register_plugin(*args, **kwargs)
 
-    def _message_encryption_start(self, to_jid_s, namespace, replace=False,
-                                profile_key=C.PROF_KEY_NONE):
+    def _message_encryption_start(
+        self, to_jid_s, namespace, replace=False, profile_key=C.PROF_KEY_NONE
+    ):
         client = self.get_client(profile_key)
         to_jid = jid.JID(to_jid_s)
         return defer.ensureDeferred(
-            client.encryption.start(to_jid, namespace or None, replace))
+            client.encryption.start(to_jid, namespace or None, replace)
+        )
 
     def _message_encryption_stop(self, to_jid_s, profile_key=C.PROF_KEY_NONE):
         client = self.get_client(profile_key)
         to_jid = jid.JID(to_jid_s)
-        return defer.ensureDeferred(
-            client.encryption.stop(to_jid))
+        return defer.ensureDeferred(client.encryption.stop(to_jid))
 
     def _message_encryption_get(self, to_jid_s, profile_key=C.PROF_KEY_NONE):
         client = self.get_client(profile_key)
@@ -1024,42 +1039,48 @@
         plugins = encryption.EncryptionHandler.getPlugins()
         ret = []
         for p in plugins:
-            ret.append({
-                "name": p.name,
-                "namespace": p.namespace,
-                "priority": p.priority,
-                "directed": p.directed,
-                })
+            ret.append(
+                {
+                    "name": p.name,
+                    "namespace": p.namespace,
+                    "priority": p.priority,
+                    "directed": p.directed,
+                }
+            )
         return data_format.serialise(ret)
 
     def _encryption_trust_ui_get(self, to_jid_s, namespace, profile_key):
         client = self.get_client(profile_key)
         to_jid = jid.JID(to_jid_s)
         d = defer.ensureDeferred(
-            client.encryption.get_trust_ui(to_jid, namespace=namespace or None))
+            client.encryption.get_trust_ui(to_jid, namespace=namespace or None)
+        )
         d.addCallback(lambda xmlui: xmlui.toXml())
         return d
 
     ## XMPP methods ##
 
     def _message_send(
-            self, to_jid_s, message, subject=None, mess_type="auto", extra_s="",
-            profile_key=C.PROF_KEY_NONE):
+        self,
+        to_jid_s,
+        message,
+        subject=None,
+        mess_type="auto",
+        extra_s="",
+        profile_key=C.PROF_KEY_NONE,
+    ):
         client = self.get_client(profile_key)
         to_jid = jid.JID(to_jid_s)
         return client.sendMessage(
-            to_jid,
-            message,
-            subject,
-            mess_type,
-            data_format.deserialise(extra_s)
+            to_jid, message, subject, mess_type, data_format.deserialise(extra_s)
         )
 
     def _set_presence(self, to="", show="", statuses=None, profile_key=C.PROF_KEY_NONE):
         return self.presence_set(jid.JID(to) if to else None, show, statuses, profile_key)
 
-    def presence_set(self, to_jid=None, show="", statuses=None,
-                    profile_key=C.PROF_KEY_NONE):
+    def presence_set(
+        self, to_jid=None, show="", statuses=None, profile_key=C.PROF_KEY_NONE
+    ):
         """Send our presence information"""
         if statuses is None:
             statuses = {}
@@ -1116,7 +1137,7 @@
     def contact_update(self, client, to_jid, name, groups):
         """update a contact in roster list"""
         roster_item = RosterItem(to_jid)
-        roster_item.name = name or u''
+        roster_item.name = name or ""
         roster_item.groups = set(groups)
         if not self.trigger.point("roster_update", client, roster_item):
             return
@@ -1167,28 +1188,46 @@
     def find_features_set(self, *args, **kwargs):
         return self.memory.disco.find_features_set(*args, **kwargs)
 
-    def _find_by_features(self, namespaces, identities, bare_jids, service, roster, own_jid,
-                        local_device, profile_key):
+    def _find_by_features(
+        self,
+        namespaces,
+        identities,
+        bare_jids,
+        service,
+        roster,
+        own_jid,
+        local_device,
+        profile_key,
+    ):
         client = self.get_client(profile_key)
         identities = [tuple(i) for i in identities] if identities else None
-        return defer.ensureDeferred(self.find_by_features(
-            client, namespaces, identities, bare_jids, service, roster, own_jid,
-            local_device))
+        return defer.ensureDeferred(
+            self.find_by_features(
+                client,
+                namespaces,
+                identities,
+                bare_jids,
+                service,
+                roster,
+                own_jid,
+                local_device,
+            )
+        )
 
     async def find_by_features(
         self,
         client: SatXMPPEntity,
         namespaces: List[str],
-        identities: Optional[List[Tuple[str, str]]]=None,
-        bare_jids: bool=False,
-        service: bool=True,
-        roster: bool=True,
-        own_jid: bool=True,
-        local_device: bool=False
+        identities: Optional[List[Tuple[str, str]]] = None,
+        bare_jids: bool = False,
+        service: bool = True,
+        roster: bool = True,
+        own_jid: bool = True,
+        local_device: bool = False,
     ) -> Tuple[
         Dict[jid.JID, Tuple[str, str, str]],
         Dict[jid.JID, Tuple[str, str, str]],
-        Dict[jid.JID, Tuple[str, str, str]]
+        Dict[jid.JID, Tuple[str, str, str]],
     ]:
         """Retrieve all services or contacts managing a set a features
 
@@ -1224,19 +1263,25 @@
         if service:
             services_jids = await self.find_features_set(client, namespaces)
             services_jids = list(services_jids)  # we need a list to map results below
-            services_infos  = await defer.DeferredList(
-                [self.get_disco_infos(client, service_jid) for service_jid in services_jids]
+            services_infos = await defer.DeferredList(
+                [
+                    self.get_disco_infos(client, service_jid)
+                    for service_jid in services_jids
+                ]
             )
 
             for idx, (success, infos) in enumerate(services_infos):
                 service_jid = services_jids[idx]
                 if not success:
                     log.warning(
-                        _("Can't find features for service {service_jid}, ignoring")
-                        .format(service_jid=service_jid.full()))
+                        _(
+                            "Can't find features for service {service_jid}, ignoring"
+                        ).format(service_jid=service_jid.full())
+                    )
                     continue
-                if (identities is not None
-                    and not set(infos.identities.keys()).issuperset(identities)):
+                if identities is not None and not set(infos.identities.keys()).issuperset(
+                    identities
+                ):
                     continue
                 found_identities = [
                     (cat, type_, name or "")
@@ -1291,8 +1336,10 @@
                 full_jid = full_jids[idx]
                 if not success:
                     log.warning(
-                        _("Can't retrieve {full_jid} infos, ignoring")
-                        .format(full_jid=full_jid.full()))
+                        _("Can't retrieve {full_jid} infos, ignoring").format(
+                            full_jid=full_jid.full()
+                        )
+                    )
                     continue
                 if infos.features.issuperset(namespaces):
                     if identities is not None and not set(
@@ -1476,21 +1523,16 @@
         return callback_id
 
     def remove_callback(self, callback_id):
-        """ Remove a previously registered callback
-        @param callback_id: id returned by [register_callback] """
+        """Remove a previously registered callback
+        @param callback_id: id returned by [register_callback]"""
         log.debug("Removing callback [%s]" % callback_id)
         del self._cb_map[callback_id]
 
     def _action_launch(
-        self,
-        callback_id: str,
-        data_s: str,
-        profile_key: str
+        self, callback_id: str, data_s: str, profile_key: str
     ) -> defer.Deferred:
         d = self.launch_callback(
-            callback_id,
-            data_format.deserialise(data_s),
-            profile_key
+            callback_id, data_format.deserialise(data_s), profile_key
         )
         d.addCallback(data_format.serialise)
         return d
@@ -1499,7 +1541,7 @@
         self,
         callback_id: str,
         data: Optional[dict] = None,
-        profile_key: str = C.PROF_KEY_NONE
+        profile_key: str = C.PROF_KEY_NONE,
     ) -> defer.Deferred:
         """Launch a specific callback
 
@@ -1568,8 +1610,14 @@
         """
         return tuple((p.lower().strip() for p in path))
 
-    def import_menu(self, path, callback, security_limit=C.NO_SECURITY_LIMIT,
-                   help_string="", type_=C.MENU_GLOBAL):
+    def import_menu(
+        self,
+        path,
+        callback,
+        security_limit=C.NO_SECURITY_LIMIT,
+        help_string="",
+        type_=C.MENU_GLOBAL,
+    ):
         r"""register a new menu for frontends
 
         @param path(iterable[unicode]): path to go to the menu
@@ -1677,13 +1725,20 @@
 
         return ret
 
-    def _launch_menu(self, menu_type, path, data=None, security_limit=C.NO_SECURITY_LIMIT,
-                    profile_key=C.PROF_KEY_NONE):
+    def _launch_menu(
+        self,
+        menu_type,
+        path,
+        data=None,
+        security_limit=C.NO_SECURITY_LIMIT,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         client = self.get_client(profile_key)
         return self.launch_menu(client, menu_type, path, data, security_limit)
 
-    def launch_menu(self, client, menu_type, path, data=None,
-        security_limit=C.NO_SECURITY_LIMIT):
+    def launch_menu(
+        self, client, menu_type, path, data=None, security_limit=C.NO_SECURITY_LIMIT
+    ):
         """launch action a menu action
 
         @param menu_type(unicode): type of menu to launch
--- a/libervia/backend/core/xmpp.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/core/xmpp.py	Wed Jun 19 18:44:57 2024 +0200
@@ -91,6 +91,7 @@
 
 class SatXMPPEntity(core_types.SatXMPPEntity):
     """Common code for Client and Component"""
+
     # profile is added there when start_connection begins and removed when it is finished
     profiles_connecting = set()
 
@@ -102,9 +103,11 @@
         clientConnectionFailed_ori = factory.clientConnectionFailed
         clientConnectionLost_ori = factory.clientConnectionLost
         factory.clientConnectionFailed = partial(
-            self.connection_terminated, term_type="failed", cb=clientConnectionFailed_ori)
+            self.connection_terminated, term_type="failed", cb=clientConnectionFailed_ori
+        )
         factory.clientConnectionLost = partial(
-            self.connection_terminated, term_type="lost", cb=clientConnectionLost_ori)
+            self.connection_terminated, term_type="lost", cb=clientConnectionLost_ori
+        )
 
         factory.maxRetries = max_retries
         factory.maxDelay = 30
@@ -115,11 +118,11 @@
         self.host_app = host_app
         self.cache = cache.Cache(host_app, profile)
         self.mess_id2uid = {}  # map from message id to uid used in history.
-                               # Key: (full_jid, message_id) Value: uid
+        # Key: (full_jid, message_id) Value: uid
         # this Deferred fire when entity is connected
         self.conn_deferred = defer.Deferred()
         self._progress_cb = {}  # callback called when a progress is requested
-                                # (key = progress id)
+        # (key = progress id)
         self.actions = {}  # used to keep track of actions for retrieval (key = action_id)
         self.encryption = encryption.EncryptionHandler(self)
 
@@ -148,9 +151,7 @@
 
             # profile_connecting/profile_connected methods handling
 
-            timer = connection_timer[plugin] = {
-                "total": 0
-            }
+            timer = connection_timer[plugin] = {"total": 0}
             # profile connecting is called right now (before actually starting client)
             connecting_cb = getattr(plugin, "profile_connecting", None)
             if connecting_cb is not None:
@@ -187,9 +188,7 @@
 
     @staticmethod
     async def _run_profile_connected(
-        callback: Callable,
-        entity: "SatXMPPEntity",
-        timer: Dict[str, float]
+        callback: Callable, entity: "SatXMPPEntity", timer: Dict[str, float]
     ) -> None:
         connected_start = time.time()
         await utils.as_deferred(callback, entity)
@@ -217,16 +216,15 @@
                 )
             except ValueError:
                 log.debug(_("Can't parse port value, using default value"))
-                port = (
-                    None
-                )  # will use default value 5222 or be retrieved from a DNS SRV record
+                port = None  # will use default value 5222 or be retrieved from a DNS SRV record
 
             password = await host.memory.param_get_a_async(
                 "Password", "Connection", profile_key=profile
             )
 
             entity_jid_s = await host.memory.param_get_a_async(
-                "JabberID", "Connection", profile_key=profile)
+                "JabberID", "Connection", profile_key=profile
+            )
             entity_jid = jid.JID(entity_jid_s)
 
             if not entity_jid.resource and not cls.is_component and entity_jid.user:
@@ -235,32 +233,43 @@
                 # reconnection. we only do that for client and if there is a user part, to
                 # let server decide for anonymous login
                 resource_dict = await host.memory.storage.get_privates(
-                    "core:xmpp", ["resource"] , profile=profile)
+                    "core:xmpp", ["resource"], profile=profile
+                )
                 try:
                     resource = resource_dict["resource"]
                 except KeyError:
                     resource = f"{C.APP_NAME_FILE}.{shortuuid.uuid()}"
                     await host.memory.storage.set_private_value(
-                        "core:xmpp", "resource", resource, profile=profile)
+                        "core:xmpp", "resource", resource, profile=profile
+                    )
 
-                log.info(_("We'll use the stable resource {resource}").format(
-                    resource=resource))
+                log.info(
+                    _("We'll use the stable resource {resource}").format(
+                        resource=resource
+                    )
+                )
                 entity_jid.resource = resource
 
             if profile in host.profiles:
                 if host.profiles[profile].is_connected():
                     raise exceptions.InternalError(
                         f"There is already a connected profile of name {profile!r} in "
-                        f"host")
-                log.debug(
-                    "removing unconnected profile {profile!r}")
+                        f"host"
+                    )
+                log.debug("removing unconnected profile {profile!r}")
                 del host.profiles[profile]
             entity = host.profiles[profile] = cls(
-                host, profile, entity_jid, password,
-                host.memory.param_get_a(C.FORCE_SERVER_PARAM, "Connection",
-                                      profile_key=profile) or None,
-                port, max_retries,
+                host,
+                profile,
+                entity_jid,
+                password,
+                host.memory.param_get_a(
+                    C.FORCE_SERVER_PARAM, "Connection", profile_key=profile
                 )
+                or None,
+                port,
+                max_retries,
+            )
 
             await entity.encryption.load_sessions()
 
@@ -315,7 +324,7 @@
                 plugins_by_timer = sorted(
                     connection_timer,
                     key=lambda p: connection_timer[p]["total"],
-                    reverse=True
+                    reverse=True,
                 )
                 # total is the addition of all connecting and connected, doesn't really
                 # reflect the real loading time as connected are launched in a
@@ -441,8 +450,9 @@
         # we save connector because it may be deleted when connection will be dropped
         # if reconnection is disabled
         self._saved_connector = connector
-        if reason is not None and not isinstance(reason.value,
-                                                 internet_error.ConnectionDone):
+        if reason is not None and not isinstance(
+            reason.value, internet_error.ConnectionDone
+        ):
             try:
                 reason_str = str(reason.value)
             except Exception:
@@ -496,8 +506,7 @@
     def _connected(self, xs):
         send_hooks = []
         receive_hooks = []
-        self.host_app.trigger.point(
-            "stream_hooks", self, receive_hooks, send_hooks)
+        self.host_app.trigger.point("stream_hooks", self, receive_hooks, send_hooks)
         for hook in receive_hooks:
             xs.add_hook(C.STREAM_HOOK_RECEIVE, hook)
         for hook in send_hooks:
@@ -529,11 +538,14 @@
                 try:
                     if err.value.args[0][0][2] == "certificate verify failed":
                         err = exceptions.InvalidCertificate(
-                            _("Your server certificate is not valid "
-                              "(its identity can't be checked).\n\n"
-                              "This should never happen and may indicate that "
-                              "somebody is trying to spy on you.\n"
-                              "Please contact your server administrator."))
+                            _(
+                                "Your server certificate is not valid "
+                                "(its identity can't be checked).\n\n"
+                                "This should never happen and may indicate that "
+                                "somebody is trying to spy on you.\n"
+                                "Please contact your server administrator."
+                            )
+                        )
                         self.factory.stopTrying()
                         try:
                             # with invalid certificate, we should not retry to connect
@@ -615,7 +627,7 @@
     def generate_message_xml(
         self,
         data: core_types.MessageData,
-        post_xml_treatments: Optional[defer.Deferred] = None
+        post_xml_treatments: Optional[defer.Deferred] = None,
     ) -> core_types.MessageData:
         """Generate <message/> stanza from message data
 
@@ -710,14 +722,22 @@
         #      This is intented for e2e encryption which doesn't do full stanza
         #      encryption (e.g. OTR)
         #      This trigger point can't cancel the method
-        await self.host_app.trigger.async_point("send_message_data", self, mess_data,
-            triggers_no_cancel=True)
+        await self.host_app.trigger.async_point(
+            "send_message_data", self, mess_data, triggers_no_cancel=True
+        )
         await self.a_send(mess_data["xml"])
         return mess_data
 
     def sendMessage(
-            self, to_jid, message, subject=None, mess_type="auto", extra=None, uid=None,
-            no_trigger=False):
+        self,
+        to_jid,
+        message,
+        subject=None,
+        mess_type="auto",
+        extra=None,
+        uid=None,
+        no_trigger=False,
+    ):
         r"""Send a message to an entity
 
         @param to_jid(jid.JID): destinee of the message
@@ -797,18 +817,27 @@
             ):
                 return defer.succeed(None)
 
-        log.debug(_("Sending message (type {type}, to {to})")
-                    .format(type=data["type"], to=to_jid.full()))
+        log.debug(
+            _("Sending message (type {type}, to {to})").format(
+                type=data["type"], to=to_jid.full()
+            )
+        )
 
-        pre_xml_treatments.addCallback(lambda __: self.generate_message_xml(data, post_xml_treatments))
+        pre_xml_treatments.addCallback(
+            lambda __: self.generate_message_xml(data, post_xml_treatments)
+        )
         pre_xml_treatments.addCallback(lambda __: post_xml_treatments)
         pre_xml_treatments.addErrback(self._cancel_error_trap)
         post_xml_treatments.addCallback(
             lambda __: defer.ensureDeferred(self.send_message_data(data))
         )
         if send_only:
-            log.debug(_("Triggers, storage and echo have been inhibited by the "
-                        "'send_only' parameter"))
+            log.debug(
+                _(
+                    "Triggers, storage and echo have been inhibited by the "
+                    "'send_only' parameter"
+                )
+            )
         else:
             self.add_post_xml_callbacks(post_xml_treatments)
             post_xml_treatments.addErrback(self._cancel_error_trap)
@@ -823,7 +852,8 @@
     def is_message_printable(self, mess_data):
         """Return True if a message contain payload to show in frontends"""
         return (
-            mess_data["message"] or mess_data["subject"]
+            mess_data["message"]
+            or mess_data["subject"]
             or mess_data["extra"].get(C.KEY_ATTACHMENTS)
             or mess_data["type"] == C.MESS_TYPE_INFO
         )
@@ -849,10 +879,16 @@
 
     def message_get_bridge_args(self, data):
         """Generate args to use with bridge from data dict"""
-        return (data["uid"], data["timestamp"], data["from"].full(),
-                data["to"].full(), data["message"], data["subject"],
-                data["type"], data_format.serialise(data["extra"]))
-
+        return (
+            data["uid"],
+            data["timestamp"],
+            data["from"].full(),
+            data["to"].full(),
+            data["message"],
+            data["subject"],
+            data["type"],
+            data_format.serialise(data["extra"]),
+        )
 
     def message_send_to_bridge(self, data):
         """Send message to bridge, so frontends can display it
@@ -869,8 +905,7 @@
 
                 # We send back the message, so all frontends are aware of it
                 self.host_app.bridge.message_new(
-                    *self.message_get_bridge_args(data),
-                    profile=self.profile
+                    *self.message_get_bridge_args(data), profile=self.profile
                 )
             else:
                 log.warning(_("No message found"))
@@ -900,8 +935,16 @@
     trigger_suffix = ""
     is_component = False
 
-    def __init__(self, host_app, profile, user_jid, password, host=None,
-                 port=C.XMPP_C2S_PORT, max_retries=C.XMPP_MAX_RETRIES):
+    def __init__(
+        self,
+        host_app,
+        profile,
+        user_jid,
+        password,
+        host=None,
+        port=C.XMPP_C2S_PORT,
+        max_retries=C.XMPP_MAX_RETRIES,
+    ):
         # XXX: DNS SRV records are checked when the host is not specified.
         # If no SRV record is found, the host is directly extracted from the JID.
         self.started = time.time()
@@ -933,12 +976,14 @@
                 host_data = None
             if host_data is not None:
                 log.info(
-                    "using {host}:{port} for host {host_ori} as requested in config"
-                    .format(host_ori=user_jid.host, host=host, port=port)
+                    "using {host}:{port} for host {host_ori} as requested in config".format(
+                        host_ori=user_jid.host, host=host, port=port
+                    )
                 )
 
         self.check_certificate = host_app.memory.param_get_a(
-            "check_certificate", "Connection", profile_key=profile)
+            "check_certificate", "Connection", profile_key=profile
+        )
 
         if self.check_certificate:
             tls_required, configurationForTLS = True, None
@@ -947,18 +992,26 @@
             configurationForTLS = ssl.CertificateOptions(trustRoot=None)
 
         wokkel_client.XMPPClient.__init__(
-            self, user_jid, password, host or None, port or C.XMPP_C2S_PORT,
-            tls_required=tls_required, configurationForTLS=configurationForTLS
+            self,
+            user_jid,
+            password,
+            host or None,
+            port or C.XMPP_C2S_PORT,
+            tls_required=tls_required,
+            configurationForTLS=configurationForTLS,
         )
         SatXMPPEntity.__init__(self, host_app, profile, max_retries)
 
         if not self.check_certificate:
-            msg = (_("Certificate validation is deactivated, this is unsecure and "
+            msg = _(
+                "Certificate validation is deactivated, this is unsecure and "
                 "somebody may be spying on you. If you have no good reason to disable "
-                "certificate validation, please activate \"Check certificate\" in your "
-                "settings in \"Connection\" tab."))
-            xml_tools.quick_note(host_app, self, msg, _("Security notice"),
-                level = C.XMLUI_DATA_LVL_WARNING)
+                'certificate validation, please activate "Check certificate" in your '
+                'settings in "Connection" tab.'
+            )
+            xml_tools.quick_note(
+                host_app, self, msg, _("Security notice"), level=C.XMLUI_DATA_LVL_WARNING
+            )
 
     @property
     def server_jid(self):
@@ -1002,10 +1055,7 @@
         post_xml_treatments.addCallback(self.message_send_to_bridge)
 
     def feedback(
-        self,
-        to_jid: jid.JID,
-        message: str,
-        extra: Optional[ExtraDict] = None
+        self, to_jid: jid.JID, message: str, extra: Optional[ExtraDict] = None
     ) -> None:
         """Send message to frontends
 
@@ -1045,16 +1095,24 @@
     """
 
     trigger_suffix = (
-        "Component"
-    )  # used for to distinguish some trigger points set in SatXMPPEntity
+        "Component"  # used for to distinguish some trigger points set in SatXMPPEntity
+    )
     is_component = True
     # XXX: set to True from entry plugin to keep messages in history for sent messages
     sendHistory = False
     # XXX: same as sendHistory but for received messaged
     receiveHistory = False
 
-    def __init__(self, host_app, profile, component_jid, password, host=None, port=None,
-                 max_retries=C.XMPP_MAX_RETRIES):
+    def __init__(
+        self,
+        host_app,
+        profile,
+        component_jid,
+        password,
+        host=None,
+        port=None,
+        max_retries=C.XMPP_MAX_RETRIES,
+    ):
         self.started = time.time()
         if port is None:
             port = C.XMPP_COMPONENT_PORT
@@ -1178,12 +1236,12 @@
         @param to_jid: destination JID of the request
         """
         try:
-            unescape = self.host_app.plugins['XEP-0106'].unescape
+            unescape = self.host_app.plugins["XEP-0106"].unescape
         except KeyError:
             raise exceptions.MissingPlugin("Plugin XEP-0106 is needed to retrieve owner")
         else:
             user = unescape(to_jid.user)
-        if '@' in user:
+        if "@" in user:
             # a full jid is specified
             return jid.JID(user)
         else:
@@ -1199,7 +1257,7 @@
         @param iq_elt: IQ stanza sent from the requested
         @return: owner and peer JIDs
         """
-        to_jid = jid.JID(iq_elt['to'])
+        to_jid = jid.JID(iq_elt["to"])
         if to_jid.user:
             owner = self.get_owner_from_jid(to_jid)
         else:
@@ -1227,7 +1285,7 @@
     def __init__(self, host):
         xmppim.MessageProtocol.__init__(self)
         self.host = host
-        self.messages_queue  = defer.DeferredQueue()
+        self.messages_queue = defer.DeferredQueue()
 
     def setHandlerParent(self, parent):
         super().setHandlerParent(parent)
@@ -1252,23 +1310,31 @@
         @return(dict): message data
         """
         if message_elt.name != "message":
-            log.warning(_(
-                "parse_message used with a non <message/> stanza, ignoring: {xml}"
-                .format(xml=message_elt.toXml())))
+            log.warning(
+                _(
+                    "parse_message used with a non <message/> stanza, ignoring: {xml}".format(
+                        xml=message_elt.toXml()
+                    )
+                )
+            )
             return {}
 
         if message_elt.uri == None:
             # xmlns may be None when wokkel element parsing strip out root namespace
             self.normalize_ns(message_elt, None)
         elif message_elt.uri != C.NS_CLIENT:
-            log.warning(_(
-                "received <message> with a wrong namespace: {xml}"
-                .format(xml=message_elt.toXml())))
+            log.warning(
+                _(
+                    "received <message> with a wrong namespace: {xml}".format(
+                        xml=message_elt.toXml()
+                    )
+                )
+            )
 
         client = self.parent
 
-        if not message_elt.hasAttribute('to'):
-            message_elt['to'] = client.jid.full()
+        if not message_elt.hasAttribute("to"):
+            message_elt["to"] = client.jid.full()
 
         message = {}
         subject = {}
@@ -1306,8 +1372,11 @@
         except AttributeError:
             # message_elt._received_timestamp should have been set in onMessage
             # but if parse_message is called directly, it can be missing
-            log.debug("missing received timestamp for {message_elt}".format(
-                message_elt=message_elt))
+            log.debug(
+                "missing received timestamp for {message_elt}".format(
+                    message_elt=message_elt
+                )
+            )
             received_timestamp = time.time()
 
         try:
@@ -1321,10 +1390,9 @@
             if parsed_delay.sender:
                 data["delay_sender"] = parsed_delay.sender.full()
 
-        self.host.trigger.point("message_parse", client,  message_elt, data)
+        self.host.trigger.point("message_parse", client, message_elt, data)
         return data
 
-
     def onMessage(self, message_elt: domish.Element) -> None:
         message_elt._received_timestamp = time.time()
         self.messages_queue.put(message_elt)
@@ -1347,9 +1415,7 @@
                 log.exception(f"Can't process message {message_elt.toXml()}")
 
     def _on_processing_timeout(
-        self,
-        message_elt: domish.Element,
-        async_point_d: defer.Deferred
+        self, message_elt: domish.Element, async_point_d: defer.Deferred
     ) -> None:
         log.error(
             "Processing of following message took too long, cancelling:"
@@ -1358,9 +1424,7 @@
         async_point_d.cancel()
 
     async def process_message(
-        self,
-        client: SatXMPPEntity,
-        message_elt: domish.Element
+        self, client: SatXMPPEntity, message_elt: domish.Element
     ) -> None:
         # TODO: handle threads
         if not "from" in message_elt.attributes:
@@ -1372,16 +1436,15 @@
 
         # plugin can add their treatments to this deferred
         post_treat = defer.Deferred()
-        async_point_d = defer.ensureDeferred(self.host.trigger.async_point(
-            "message_received", client, message_elt, post_treat
-        ))
+        async_point_d = defer.ensureDeferred(
+            self.host.trigger.async_point(
+                "message_received", client, message_elt, post_treat
+            )
+        )
         # message_received triggers block the messages queue, so they must not take too
         # long to proceed.
         delayed_call = reactor.callLater(
-            10,
-            self._on_processing_timeout,
-            message_elt,
-            async_point_d
+            10, self._on_processing_timeout, message_elt, async_point_d
         )
         trigger_ret_continue = await async_point_d
 
@@ -1411,14 +1474,15 @@
 
     def complete_attachments(self, data: MessageData) -> MessageData:
         """Complete missing metadata of attachments"""
-        for attachment in data['extra'].get(C.KEY_ATTACHMENTS, []):
+        for attachment in data["extra"].get(C.KEY_ATTACHMENTS, []):
             if "name" not in attachment and "url" in attachment:
-                name = (Path(unquote(urlparse(attachment['url']).path)).name
-                        or C.FILE_DEFAULT_NAME)
+                name = (
+                    Path(unquote(urlparse(attachment["url"]).path)).name
+                    or C.FILE_DEFAULT_NAME
+                )
                 attachment["name"] = name
-            if ((C.KEY_ATTACHMENTS_MEDIA_TYPE not in attachment
-                 and "name" in attachment)):
-                media_type = mimetypes.guess_type(attachment['name'], strict=False)[0]
+            if C.KEY_ATTACHMENTS_MEDIA_TYPE not in attachment and "name" in attachment:
+                media_type = mimetypes.guess_type(attachment["name"], strict=False)[0]
                 if media_type:
                     attachment[C.KEY_ATTACHMENTS_MEDIA_TYPE] = media_type
         return data
@@ -1432,8 +1496,9 @@
             if self.parent.is_message_printable(data):
                 return await self.host.memory.add_to_history(self.parent, data)
             else:
-                log.debug("not storing empty message to history: {data}"
-                    .format(data=data))
+                log.debug(
+                    "not storing empty message to history: {data}".format(data=data)
+                )
         return data
 
     def bridge_signal(self, data: MessageData) -> MessageData:
@@ -1459,8 +1524,9 @@
                     profile=self.parent.profile,
                 )
             else:
-                log.debug("Discarding bridge signal for empty message: {data}".format(
-                    data=data))
+                log.debug(
+                    "Discarding bridge signal for empty message: {data}".format(data=data)
+                )
         return data
 
 
@@ -1480,7 +1546,7 @@
     @property
     def versioning(self):
         """True if server support roster versioning"""
-        return (NS_ROSTER_VER, 'ver') in self.parent.xmlstream.features
+        return (NS_ROSTER_VER, "ver") in self.parent.xmlstream.features
 
     @property
     def roster_cache(self):
@@ -1547,7 +1613,7 @@
 
     @defer.inlineCallbacks
     def request_roster(self):
-        """Ask the server for Roster list """
+        """Ask the server for Roster list"""
         if self.versioning:
             log.info(_("our server support roster versioning, we use it"))
             roster_cache = self.roster_cache
@@ -1565,7 +1631,7 @@
                     if roster_jid_s == ROSTER_VER_KEY:
                         continue
                     roster_jid = jid.JID(roster_jid_s)
-                    roster_item_elt = generic.parseXml(roster_item_elt_s.encode('utf-8'))
+                    roster_item_elt = generic.parseXml(roster_item_elt_s.encode("utf-8"))
                     roster_item = xmppim.RosterItem.fromElement(roster_item_elt)
                     self._jids[roster_jid] = roster_item
                     self._register_item(roster_item)
@@ -1576,8 +1642,10 @@
         log.debug("requesting roster")
         roster = yield self.getRoster(version=version)
         if roster is None:
-            log.debug("empty roster result received, we'll get roster item with roster "
-                      "pushes")
+            log.debug(
+                "empty roster result received, we'll get roster item with roster "
+                "pushes"
+            )
         else:
             # a full roster is received
             self._groups.clear()
@@ -1589,9 +1657,7 @@
                     # may change in the future
                     log.info(
                         "Removing contact {} from roster because there is no presence "
-                        "subscription".format(
-                            item.jid
-                        )
+                        "subscription".format(item.jid)
                     )
                     self.removeItem(item.entity)  # FIXME: to be checked
                 else:
@@ -1646,8 +1712,10 @@
         self._jids[entity] = item
         self._register_item(item)
         self.host.bridge.contact_new(
-            entity.full(), self.get_attributes(item), list(item.groups),
-            self.parent.profile
+            entity.full(),
+            self.get_attributes(item),
+            list(item.groups),
+            self.parent.profile,
         )
 
     def removeReceived(self, request):
@@ -1710,7 +1778,8 @@
         """Return True if jid is in roster"""
         if not isinstance(entity_jid, jid.JID):
             raise exceptions.InternalError(
-                f"a JID is expected, not {type(entity_jid)}: {entity_jid!r}")
+                f"a JID is expected, not {type(entity_jid)}: {entity_jid!r}"
+            )
         return entity_jid in self._jids
 
     def is_subscribed_from(self, entity_jid: jid.JID) -> bool:
@@ -1825,7 +1894,12 @@
             statuses[C.PRESENCE_STATUSES_DEFAULT] = statuses.pop(None)
 
         if not self.host.trigger.point(
-            "presence_received", self.parent, entity, C.PRESENCE_UNAVAILABLE, 0, statuses,
+            "presence_received",
+            self.parent,
+            entity,
+            C.PRESENCE_UNAVAILABLE,
+            0,
+            statuses,
         ):
             return
 
@@ -1833,9 +1907,7 @@
         # if the entity is not known yet in this session or is already unavailable,
         # there is no need to send an unavailable signal
         try:
-            presence = self.host.memory.get_entity_datum(
-                self.client, entity, "presence"
-            )
+            presence = self.host.memory.get_entity_datum(self.client, entity, "presence")
         except (KeyError, exceptions.UnknownEntityError):
             # the entity has not been seen yet in this session
             pass
@@ -1951,9 +2023,11 @@
     def getDiscoInfo(self, requestor, target, nodeIdentifier=""):
         # those features are implemented in Wokkel (or sat_tmp.wokkel)
         # and thus are always available
-        return [disco.DiscoFeature(NS_X_DATA),
-                disco.DiscoFeature(NS_XML_ELEMENT),
-                disco.DiscoFeature(NS_DISCO_INFO)]
+        return [
+            disco.DiscoFeature(NS_X_DATA),
+            disco.DiscoFeature(NS_XML_ELEMENT),
+            disco.DiscoFeature(NS_DISCO_INFO),
+        ]
 
     def getDiscoItems(self, requestor, target, nodeIdentifier=""):
         return []
@@ -1985,6 +2059,7 @@
 @implementer(iwokkel.IDisco)
 class SatIdentityHandler(XMPPHandler):
     """Manage disco Identity of SàT."""
+
     # TODO: dynamic identity update (see docstring). Note that a XMPP entity can have
     #       several identities
 
--- a/libervia/backend/memory/crypto.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/memory/crypto.py	Wed Jun 19 18:44:57 2024 +0200
@@ -57,7 +57,9 @@
 
         cipher = Cipher(algorithms.AES(key), modes.CFB8(iv), backend=crypto_backend)
         encryptor = cipher.encryptor()
-        encrypted = encryptor.update(BlockCipher.pad(text.encode())) + encryptor.finalize()
+        encrypted = (
+            encryptor.update(BlockCipher.pad(text.encode())) + encryptor.finalize()
+        )
         return b64encode(iv + encrypted).decode()
 
     @staticmethod
@@ -149,7 +151,7 @@
             length=16,
             salt=salt,
             iterations=1000,
-            backend=crypto_backend
+            backend=crypto_backend,
         )
         key = kdf.derive(password.encode())
         return b64encode(salt + key).decode()
--- a/libervia/backend/memory/disco.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/memory/disco.py	Wed Jun 19 18:44:57 2024 +0200
@@ -111,7 +111,7 @@
 
 
 class Discovery(object):
-    """ Manage capabilities of entities """
+    """Manage capabilities of entities"""
 
     def __init__(self, host):
         self.host = host
@@ -172,7 +172,7 @@
         category: str,
         type_: str,
         jid_: Optional[jid.JID] = None,
-        node: str = ""
+        node: str = "",
     ) -> bool:
         """Tell if an entity has the requested identity
 
@@ -264,9 +264,9 @@
         if jid_ == client.server_jid and not node:
             # we cache items only for our own server and if node is not set
             try:
-                items = self.host.memory.entity_data_get(
-                    client, jid_, ["DISCO_ITEMS"]
-                )["DISCO_ITEMS"]
+                items = self.host.memory.entity_data_get(client, jid_, ["DISCO_ITEMS"])[
+                    "DISCO_ITEMS"
+                ]
                 log.debug("[%s] disco items are in cache" % jid_.full())
                 if not use_cache:
                     # we ignore cache, so we pretend we haven't found it
@@ -274,9 +274,7 @@
             except (KeyError, exceptions.UnknownEntityError):
                 log.debug("Caching [%s] disco items" % jid_.full())
                 items = yield client.disco.requestItems(jid_, nodeIdentifier=node)
-                self.host.memory.update_entity_data(
-                    client, jid_, "DISCO_ITEMS", items
-                )
+                self.host.memory.update_entity_data(client, jid_, "DISCO_ITEMS", items)
         else:
             try:
                 items = yield client.disco.requestItems(jid_, nodeIdentifier=node)
@@ -381,7 +379,7 @@
         return d
 
     def generate_hash(self, services):
-        """ Generate a unique hash for given service
+        """Generate a unique hash for given service
 
         hash algorithm is the one described in XEP-0115
         @param services: iterable of disco.DiscoIdentity/disco.DiscoFeature, as returned by discoHandler.info
@@ -413,22 +411,22 @@
 
         # extensions
         ext = list(services.extensions.values())
-        ext.sort(key=lambda f: f.formNamespace.encode('utf-8'))
+        ext.sort(key=lambda f: f.formNamespace.encode("utf-8"))
         for extension in ext:
-            s.append(extension.formNamespace.encode('utf-8'))
+            s.append(extension.formNamespace.encode("utf-8"))
             s.append(b"<")
             fields = extension.fieldList
-            fields.sort(key=lambda f: f.var.encode('utf-8'))
+            fields.sort(key=lambda f: f.var.encode("utf-8"))
             for field in fields:
-                s.append(field.var.encode('utf-8'))
+                s.append(field.var.encode("utf-8"))
                 s.append(b"<")
-                values = [v.encode('utf-8') for v in field.values]
+                values = [v.encode("utf-8") for v in field.values]
                 values.sort()
                 for value in values:
                     s.append(value)
                     s.append(b"<")
 
-        cap_hash = b64encode(sha1(b"".join(s)).digest()).decode('utf-8')
+        cap_hash = b64encode(sha1(b"".join(s)).digest()).decode("utf-8")
         log.debug(_("Capability hash generated: [{cap_hash}]").format(cap_hash=cap_hash))
         return cap_hash
 
@@ -464,11 +462,16 @@
 
             extensions[form_type or ""] = fields
 
-        defer.returnValue((
-            [str(f) for f in disco_infos.features],
-            [(cat, type_, name or "")
-             for (cat, type_), name in list(disco_infos.identities.items())],
-            extensions))
+        defer.returnValue(
+            (
+                [str(f) for f in disco_infos.features],
+                [
+                    (cat, type_, name or "")
+                    for (cat, type_), name in list(disco_infos.identities.items())
+                ],
+                extensions,
+            )
+        )
 
     def items2tuples(self, disco_items):
         """convert disco items to tuple of strings
@@ -486,7 +489,7 @@
     def _disco_items(
         self, entity_jid_s, node="", use_cache=True, profile_key=C.PROF_KEY_NONE
     ):
-        """ Discovery method for the bridge
+        """Discovery method for the bridge
 
         @param entity_jid_s: entity we want to discover
         @param node(unicode): optional node to use
--- a/libervia/backend/memory/encryption.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/memory/encryption.py	Wed Jun 19 18:44:57 2024 +0200
@@ -23,7 +23,11 @@
 from twisted.words.protocols.jabber import jid
 from twisted.internet import defer
 from twisted.python import failure
-from libervia.backend.core.core_types import EncryptionPlugin, EncryptionSession, MessageData
+from libervia.backend.core.core_types import (
+    EncryptionPlugin,
+    EncryptionSession,
+    MessageData,
+)
 from libervia.backend.core.i18n import D_, _
 from libervia.backend.core.constants import Const as C
 from libervia.backend.core import exceptions
@@ -38,13 +42,15 @@
 
 class EncryptionHandler:
     """Class to handle encryption sessions for a client"""
+
     plugins = []  # plugin able to encrypt messages
 
     def __init__(self, client):
         self.client = client
         self._sessions = {}  # bare_jid ==> encryption_data
         self._stored_session = persistent.PersistentDict(
-            "core:encryption", profile=client.profile)
+            "core:encryption", profile=client.profile
+        )
 
     @property
     def host(self):
@@ -63,9 +69,11 @@
             for idx, (success, err) in enumerate(result):
                 if not success:
                     entity_jid_s, namespace = list(self._stored_session.items())[idx]
-                    log.warning(_(
-                        "Could not restart {namespace!r} encryption with {entity}: {err}"
-                        ).format(namespace=namespace, entity=entity_jid_s, err=err))
+                    log.warning(
+                        _(
+                            "Could not restart {namespace!r} encryption with {entity}: {err}"
+                        ).format(namespace=namespace, entity=entity_jid_s, err=err)
+                    )
             log.info(_("encryption sessions restored"))
 
     @classmethod
@@ -105,7 +113,8 @@
             name=name,
             namespace=namespace,
             priority=priority,
-            directed=directed)
+            directed=directed,
+        )
         cls.plugins.append(plugin)
         cls.plugins.sort(key=lambda p: p.priority)
         log.info(_("Encryption plugin registered: {name}").format(name=name))
@@ -119,9 +128,11 @@
         try:
             return next(p for p in cls.plugins if p.namespace == namespace)
         except StopIteration:
-            raise exceptions.NotFound(_(
-                "Can't find requested encryption plugin: {namespace}").format(
-                    namespace=namespace))
+            raise exceptions.NotFound(
+                _("Can't find requested encryption plugin: {namespace}").format(
+                    namespace=namespace
+                )
+            )
 
     @classmethod
     def get_namespaces(cls):
@@ -139,9 +150,9 @@
         for p in cls.plugins:
             if p.name.lower() == name.lower():
                 return p.namespace
-        raise exceptions.NotFound(_(
-            "Can't find a plugin with the name \"{name}\".".format(
-                name=name)))
+        raise exceptions.NotFound(
+            _('Can\'t find a plugin with the name "{name}".'.format(name=name))
+        )
 
     def get_bridge_data(self, session):
         """Retrieve session data serialized for bridge.
@@ -150,12 +161,11 @@
         @return (unicode): serialized data for bridge
         """
         if session is None:
-            return ''
-        plugin = session['plugin']
-        bridge_data = {'name': plugin.name,
-                       'namespace': plugin.namespace}
-        if 'directed_devices' in session:
-            bridge_data['directed_devices'] = session['directed_devices']
+            return ""
+        plugin = session["plugin"]
+        bridge_data = {"name": plugin.name, "namespace": plugin.namespace}
+        if "directed_devices" in session:
+            bridge_data["directed_devices"] = session["directed_devices"]
 
         return data_format.serialise(bridge_data)
 
@@ -205,8 +215,12 @@
             it will be replaced by the new one
         """
         if not self.plugins:
-            raise exceptions.NotFound(_("No encryption plugin is registered, "
-                                        "an encryption session can't be started"))
+            raise exceptions.NotFound(
+                _(
+                    "No encryption plugin is registered, "
+                    "an encryption session can't be started"
+                )
+            )
 
         if namespace is None:
             plugin = self.plugins[0]
@@ -218,9 +232,12 @@
             # we have already an encryption session with this contact
             former_plugin = self._sessions[bare_jid]["plugin"]
             if former_plugin.namespace == namespace:
-                log.info(_("Session with {bare_jid} is already encrypted with {name}. "
-                           "Nothing to do.").format(
-                               bare_jid=bare_jid, name=former_plugin.name))
+                log.info(
+                    _(
+                        "Session with {bare_jid} is already encrypted with {name}. "
+                        "Nothing to do."
+                    ).format(bare_jid=bare_jid, name=former_plugin.name)
+                )
                 return
 
             if replace:
@@ -229,9 +246,10 @@
                 del self._sessions[bare_jid]
                 await self._stop_encryption(former_plugin, entity)
             else:
-                msg = (_("Session with {bare_jid} is already encrypted with {name}. "
-                         "Please stop encryption session before changing algorithm.")
-                       .format(bare_jid=bare_jid, name=plugin.name))
+                msg = _(
+                    "Session with {bare_jid} is already encrypted with {name}. "
+                    "Please stop encryption session before changing algorithm."
+                ).format(bare_jid=bare_jid, name=plugin.name)
                 log.warning(msg)
                 raise exceptions.ConflictError(msg)
 
@@ -241,34 +259,44 @@
                 entity.resource = self.host.memory.main_resource_get(self.client, entity)
                 if not entity.resource:
                     raise exceptions.NotFound(
-                        _("No resource found for {destinee}, can't encrypt with {name}")
-                        .format(destinee=entity.full(), name=plugin.name))
-                log.info(_("No resource specified to encrypt with {name}, using "
-                           "{destinee}.").format(destinee=entity.full(),
-                                                  name=plugin.name))
+                        _(
+                            "No resource found for {destinee}, can't encrypt with {name}"
+                        ).format(destinee=entity.full(), name=plugin.name)
+                    )
+                log.info(
+                    _(
+                        "No resource specified to encrypt with {name}, using "
+                        "{destinee}."
+                    ).format(destinee=entity.full(), name=plugin.name)
+                )
             # indicate that we encrypt only for some devices
-            directed_devices = data['directed_devices'] = [entity.resource]
+            directed_devices = data["directed_devices"] = [entity.resource]
         elif entity.resource:
             raise ValueError(_("{name} encryption must be used with bare jids."))
 
         await self._start_encryption(plugin, entity)
         self._sessions[entity.userhostJID()] = data
-        log.info(_("Encryption session has been set for {entity_jid} with "
-                   "{encryption_name}").format(
-                   entity_jid=entity.full(), encryption_name=plugin.name))
+        log.info(
+            _(
+                "Encryption session has been set for {entity_jid} with "
+                "{encryption_name}"
+            ).format(entity_jid=entity.full(), encryption_name=plugin.name)
+        )
         self.host.bridge.message_encryption_started(
-            entity.full(),
-            self.get_bridge_data(data),
-            self.client.profile)
-        msg = D_("Encryption session started: your messages with {destinee} are "
-                 "now end to end encrypted using {name} algorithm.").format(
-                 destinee=entity.full(), name=plugin.name)
-        directed_devices = data.get('directed_devices')
+            entity.full(), self.get_bridge_data(data), self.client.profile
+        )
+        msg = D_(
+            "Encryption session started: your messages with {destinee} are "
+            "now end to end encrypted using {name} algorithm."
+        ).format(destinee=entity.full(), name=plugin.name)
+        directed_devices = data.get("directed_devices")
         if directed_devices:
-            msg += "\n" + D_("Message are encrypted only for {nb_devices} device(s): "
-                              "{devices_list}.").format(
-                              nb_devices=len(directed_devices),
-                              devices_list = ', '.join(directed_devices))
+            msg += "\n" + D_(
+                "Message are encrypted only for {nb_devices} device(s): "
+                "{devices_list}."
+            ).format(
+                nb_devices=len(directed_devices), devices_list=", ".join(directed_devices)
+            )
 
         self.client.feedback(bare_jid, msg)
 
@@ -283,29 +311,38 @@
         session = self.getSession(entity.userhostJID())
         if not session:
             raise failure.Failure(
-                exceptions.NotFound(_("There is no encryption session with this "
-                                      "entity.")))
-        plugin = session['plugin']
+                exceptions.NotFound(
+                    _("There is no encryption session with this " "entity.")
+                )
+            )
+        plugin = session["plugin"]
         if namespace is not None and plugin.namespace != namespace:
-            raise exceptions.InternalError(_(
-                "The encryption session is not run with the expected plugin: encrypted "
-                "with {current_name} and was expecting {expected_name}").format(
-                current_name=session['plugin'].namespace,
-                expected_name=namespace))
+            raise exceptions.InternalError(
+                _(
+                    "The encryption session is not run with the expected plugin: encrypted "
+                    "with {current_name} and was expecting {expected_name}"
+                ).format(
+                    current_name=session["plugin"].namespace, expected_name=namespace
+                )
+            )
         if entity.resource:
             try:
-                directed_devices = session['directed_devices']
+                directed_devices = session["directed_devices"]
             except KeyError:
-                raise exceptions.NotFound(_(
-                    "There is a session for the whole entity (i.e. all devices of the "
-                    "entity), not a directed one. Please use bare jid if you want to "
-                    "stop the whole encryption with this entity."))
+                raise exceptions.NotFound(
+                    _(
+                        "There is a session for the whole entity (i.e. all devices of the "
+                        "entity), not a directed one. Please use bare jid if you want to "
+                        "stop the whole encryption with this entity."
+                    )
+                )
 
             try:
                 directed_devices.remove(entity.resource)
             except ValueError:
-                raise exceptions.NotFound(_("There is no directed session with this "
-                                            "entity."))
+                raise exceptions.NotFound(
+                    _("There is no directed session with this " "entity.")
+                )
             else:
                 if not directed_devices:
                     # if we have no more directed device sessions,
@@ -319,18 +356,24 @@
             del self._sessions[entity.userhostJID()]
             await self._stop_encryption(plugin, entity)
 
-        log.info(_("encryption session stopped with entity {entity}").format(
-            entity=entity.full()))
+        log.info(
+            _("encryption session stopped with entity {entity}").format(
+                entity=entity.full()
+            )
+        )
         self.host.bridge.message_encryption_stopped(
             entity.full(),
-            {'name': plugin.name,
-             'namespace': plugin.namespace,
+            {
+                "name": plugin.name,
+                "namespace": plugin.namespace,
             },
-            self.client.profile)
-        msg = D_("Encryption session finished: your messages with {destinee} are "
-                 "NOT end to end encrypted anymore.\nYour server administrators or "
-                 "{destinee} server administrators will be able to read them.").format(
-                 destinee=entity.full())
+            self.client.profile,
+        )
+        msg = D_(
+            "Encryption session finished: your messages with {destinee} are "
+            "NOT end to end encrypted anymore.\nYour server administrators or "
+            "{destinee} server administrators will be able to read them."
+        ).format(destinee=entity.full())
 
         self.client.feedback(entity, msg)
 
@@ -375,16 +418,19 @@
             session = self.getSession(entity_jid)
             if not session:
                 raise exceptions.NotFound(
-                    "No encryption session currently active for {entity_jid}"
-                    .format(entity_jid=entity_jid.full()))
-            plugin = session['plugin']
+                    "No encryption session currently active for {entity_jid}".format(
+                        entity_jid=entity_jid.full()
+                    )
+                )
+            plugin = session["plugin"]
         else:
             plugin = self.get_plugin(namespace)
         try:
             get_trust_ui = plugin.instance.get_trust_ui
         except AttributeError:
             raise NotImplementedError(
-                "Encryption plugin doesn't handle trust management UI")
+                "Encryption plugin doesn't handle trust management UI"
+            )
         else:
             return utils.as_deferred(get_trust_ui, self.client, entity_jid)
 
@@ -393,32 +439,32 @@
     @classmethod
     def _import_menus(cls, host):
         host.import_menu(
-             (D_("Encryption"), D_("unencrypted (plain text)")),
-             partial(cls._on_menu_unencrypted, host=host),
-             security_limit=0,
-             help_string=D_("End encrypted session"),
-             type_=C.MENU_SINGLE,
+            (D_("Encryption"), D_("unencrypted (plain text)")),
+            partial(cls._on_menu_unencrypted, host=host),
+            security_limit=0,
+            help_string=D_("End encrypted session"),
+            type_=C.MENU_SINGLE,
         )
         for plg in cls.getPlugins():
             host.import_menu(
-                 (D_("Encryption"), plg.name),
-                 partial(cls._on_menu_name, host=host, plg=plg),
-                 security_limit=0,
-                 help_string=D_("Start {name} session").format(name=plg.name),
-                 type_=C.MENU_SINGLE,
+                (D_("Encryption"), plg.name),
+                partial(cls._on_menu_name, host=host, plg=plg),
+                security_limit=0,
+                help_string=D_("Start {name} session").format(name=plg.name),
+                type_=C.MENU_SINGLE,
             )
             host.import_menu(
-                 (D_("Encryption"), D_("⛨ {name} trust").format(name=plg.name)),
-                 partial(cls._on_menu_trust, host=host, plg=plg),
-                 security_limit=0,
-                 help_string=D_("Manage {name} trust").format(name=plg.name),
-                 type_=C.MENU_SINGLE,
+                (D_("Encryption"), D_("⛨ {name} trust").format(name=plg.name)),
+                partial(cls._on_menu_trust, host=host, plg=plg),
+                security_limit=0,
+                help_string=D_("Manage {name} trust").format(name=plg.name),
+                type_=C.MENU_SINGLE,
             )
 
     @classmethod
     def _on_menu_unencrypted(cls, data, host, profile):
         client = host.get_client(profile)
-        peer_jid = jid.JID(data['jid']).userhostJID()
+        peer_jid = jid.JID(data["jid"]).userhostJID()
         d = defer.ensureDeferred(client.encryption.stop(peer_jid))
         d.addCallback(lambda __: {})
         return d
@@ -426,11 +472,12 @@
     @classmethod
     def _on_menu_name(cls, data, host, plg, profile):
         client = host.get_client(profile)
-        peer_jid = jid.JID(data['jid'])
+        peer_jid = jid.JID(data["jid"])
         if not plg.directed:
             peer_jid = peer_jid.userhostJID()
         d = defer.ensureDeferred(
-            client.encryption.start(peer_jid, plg.namespace, replace=True))
+            client.encryption.start(peer_jid, plg.namespace, replace=True)
+        )
         d.addCallback(lambda __: {})
         return d
 
@@ -438,22 +485,23 @@
     @defer.inlineCallbacks
     def _on_menu_trust(cls, data, host, plg, profile):
         client = host.get_client(profile)
-        peer_jid = jid.JID(data['jid']).userhostJID()
+        peer_jid = jid.JID(data["jid"]).userhostJID()
         ui = yield client.encryption.get_trust_ui(peer_jid, plg.namespace)
-        defer.returnValue({'xmlui': ui.toXml()})
+        defer.returnValue({"xmlui": ui.toXml()})
 
     ## Triggers ##
 
     def set_encryption_flag(self, mess_data):
         """Set "encryption" key in mess_data if session with destinee is encrypted"""
-        to_jid = mess_data['to']
+        to_jid = mess_data["to"]
         encryption = self._sessions.get(to_jid.userhostJID())
         if encryption is not None:
-            plugin = encryption['plugin']
+            plugin = encryption["plugin"]
             if mess_data["type"] == "groupchat" and plugin.directed:
                 raise exceptions.InternalError(
-                f"encryption flag must not be set for groupchat if encryption algorithm "
-                f"({encryption['plugin'].name}) is directed!")
+                    f"encryption flag must not be set for groupchat if encryption algorithm "
+                    f"({encryption['plugin'].name}) is directed!"
+                )
             mess_data[C.MESS_KEY_ENCRYPTION] = encryption
             self.mark_as_encrypted(mess_data, plugin.namespace)
 
@@ -467,26 +515,25 @@
         @param mess_data(dict): message data as used in post treat workflow
         @param namespace(str): namespace of the algorithm used for encrypting the message
         """
-        mess_data['extra'][C.MESS_KEY_ENCRYPTED] = True
-        from_bare_jid = mess_data['from'].userhostJID()
+        mess_data["extra"][C.MESS_KEY_ENCRYPTED] = True
+        from_bare_jid = mess_data["from"].userhostJID()
         if from_bare_jid != self.client.jid.userhostJID():
             session = self.getSession(from_bare_jid)
             if session is None:
                 # if we are currently unencrypted, we start a session automatically
                 # to avoid sending unencrypted messages in an encrypted context
-                log.info(_(
-                    "Starting e2e session with {peer_jid} as we receive encrypted "
-                    "messages")
-                    .format(peer_jid=from_bare_jid)
+                log.info(
+                    _(
+                        "Starting e2e session with {peer_jid} as we receive encrypted "
+                        "messages"
+                    ).format(peer_jid=from_bare_jid)
                 )
                 defer.ensureDeferred(self.start(from_bare_jid, namespace))
 
         return mess_data
 
     def is_encryption_requested(
-        self,
-        mess_data: MessageData,
-        namespace: Optional[str] = None
+        self, mess_data: MessageData, namespace: Optional[str] = None
     ) -> bool:
         """Helper method to check if encryption is requested in an outgoind message
 
@@ -499,7 +546,7 @@
         if encryption is None:
             return False
         # we get plugin even if namespace is None to be sure that the key exists
-        plugin = encryption['plugin']
+        plugin = encryption["plugin"]
         if namespace is None:
             return True
         return plugin.namespace == namespace
@@ -510,8 +557,7 @@
         @param mess_data(dict): message data
         @return (bool): True if the encrypted flag is present
         """
-        return mess_data['extra'].get(C.MESS_KEY_ENCRYPTED, False)
-
+        return mess_data["extra"].get(C.MESS_KEY_ENCRYPTED, False)
 
     def mark_as_trusted(self, mess_data):
         """Helper methor to mark a message as sent from a trusted entity.
@@ -530,5 +576,5 @@
         the plugin
         @param mess_data(dict): message data as used in post treat workflow
         """
-        mess_data['trusted'] = False
+        mess_data["trusted"] = False
         return mess_data
--- a/libervia/backend/memory/memory.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/memory/memory.py	Wed Jun 19 18:44:57 2024 +0200
@@ -155,7 +155,7 @@
         raise NotImplementedError("You need do use new_session to create a session")
 
     def __delitem__(self, session_id):
-        """ delete the session data """
+        """delete the session data"""
         self._purge_session(session_id)
 
     def keys(self):
@@ -253,7 +253,6 @@
         self.admin_jids = set()
         self._file_path_lock = defer.DeferredLock()
 
-
     async def initialise(self):
         self.storage = Storage()
         await self.storage.initialise()
@@ -281,15 +280,11 @@
                     else:
                         self.admin_jids.add(admin_jid)
 
-
     ## Configuration ##
 
     def config_get(
-        self,
-        section: str|None,
-        name: str,
-        default: Any = None
-    ) -> str|list|dict:
+        self, section: str | None, name: str, default: Any = None
+    ) -> str | list | dict:
         """Get the main configuration option
 
         @param section: section of the config file (None or '' for DEFAULT)
@@ -348,7 +343,7 @@
     ## Profiles/Sessions management ##
 
     def start_session(self, password, profile):
-        """"Iniatialise session for a profile
+        """ "Iniatialise session for a profile
 
         @param password(unicode): profile session password
             or empty string is no password is set
@@ -375,8 +370,8 @@
                     session_d = self._entities_cache[profile]
                 except KeyError:
                     # else we do request the params
-                    session_d = self._entities_cache[profile] = self.load_individual_params(
-                        profile
+                    session_d = self._entities_cache[profile] = (
+                        self.load_individual_params(profile)
                     )
                     session_d.addCallback(create_session)
                 finally:
@@ -434,8 +429,9 @@
         )
         valid = PasswordHasher.verify(password, sat_cipher)
         if not valid:
-            log.warning(_("Authentication failure of profile {profile}").format(
-                profile=profile))
+            log.warning(
+                _("Authentication failure of profile {profile}").format(profile=profile)
+            )
             raise exceptions.PasswordError("The provided profile password doesn't match.")
         return await self.new_auth_session(password, profile)
 
@@ -467,9 +463,7 @@
             del self._entities_cache[profile]
         except KeyError:
             log.error(
-                _(
-                    "Trying to purge roster status cache for a profile not in memory: [%s]"
-                )
+                _("Trying to purge roster status cache for a profile not in memory: [%s]")
                 % profile
             )
 
@@ -554,9 +548,7 @@
             # be sure to call this after checking that the profile doesn't exist yet
 
             # generated once for all and saved in a PersistentDict
-            personal_key = BlockCipher.get_random_key(
-                base64=True
-            ).decode('utf-8')
+            personal_key = BlockCipher.get_random_key(base64=True).decode("utf-8")
             self.auth_sessions.new_session(
                 {C.MEMORY_CRYPTO_KEY: personal_key}, profile=name
             )  # will be encrypted by param_set
@@ -626,29 +618,42 @@
 
     def _history_get_serialise(self, history_data):
         return [
-            (uid, timestamp, from_jid, to_jid, message, subject, mess_type,
-             data_format.serialise(extra)) for uid, timestamp, from_jid, to_jid, message,
-            subject, mess_type, extra in history_data
+            (
+                uid,
+                timestamp,
+                from_jid,
+                to_jid,
+                message,
+                subject,
+                mess_type,
+                data_format.serialise(extra),
+            )
+            for uid, timestamp, from_jid, to_jid, message, subject, mess_type, extra in history_data
         ]
 
-    def _history_get(self, from_jid_s, to_jid_s, limit=C.HISTORY_LIMIT_NONE, between=True,
-                    filters=None, profile=C.PROF_KEY_NONE):
+    def _history_get(
+        self,
+        from_jid_s,
+        to_jid_s,
+        limit=C.HISTORY_LIMIT_NONE,
+        between=True,
+        filters=None,
+        profile=C.PROF_KEY_NONE,
+    ):
         from_jid = jid.JID(from_jid_s) if from_jid_s else None
         to_jid = jid.JID(to_jid_s) if to_jid_s else None
-        d = self.history_get(
-            from_jid, to_jid, limit, between, filters, profile
-        )
+        d = self.history_get(from_jid, to_jid, limit, between, filters, profile)
         d.addCallback(self._history_get_serialise)
         return d
 
     def history_get(
         self,
-        from_jid: jid.JID|None,
-        to_jid: jid.JID|None,
+        from_jid: jid.JID | None,
+        to_jid: jid.JID | None,
         limit: int = C.HISTORY_LIMIT_NONE,
         between: bool = True,
-        filters: dict[str, str]|None = None,
-        profile: str = C.PROF_KEY_NONE
+        filters: dict[str, str] | None = None,
+        profile: str = C.PROF_KEY_NONE,
     ) -> defer.Deferred[list]:
         """Retrieve messages in history
 
@@ -671,7 +676,9 @@
             limit = None
         if limit == 0:
             return defer.succeed([])
-        return self.storage.history_get(from_jid, to_jid, limit, between, filters, profile)
+        return self.storage.history_get(
+            from_jid, to_jid, limit, between, filters, profile
+        )
 
     ## Statuses ##
 
@@ -714,9 +721,7 @@
         """
         client = self.host.get_client(profile_key)
         presence_data = PresenceTuple(show, priority, statuses)
-        self.update_entity_data(
-            client, entity_jid, "presence", presence_data
-        )
+        self.update_entity_data(client, entity_jid, "presence", presence_data)
         if entity_jid.resource and show != C.PRESENCE_UNAVAILABLE:
             # If a resource is available, bare jid should not have presence information
             try:
@@ -743,7 +748,9 @@
         # FIXME: is there a need to keep cache data for resources which are not connected anymore?
         if entity_jid.resource:
             raise ValueError(
-                "get_all_resources must be used with a bare jid (got {})".format(entity_jid)
+                "get_all_resources must be used with a bare jid (got {})".format(
+                    entity_jid
+                )
             )
         profile_cache = self._get_profile_cache(client)
         try:
@@ -791,7 +798,9 @@
         """
         if entity_jid.resource:
             raise ValueError(
-                "main_resource_get must be used with a bare jid (got {})".format(entity_jid)
+                "main_resource_get must be used with a bare jid (got {})".format(
+                    entity_jid
+                )
             )
         try:
             if self.host.plugins["XEP-0045"].is_joined_room(client, entity_jid):
@@ -857,9 +866,7 @@
                 full_jid.resource = resource
                 yield full_jid
 
-    def update_entity_data(
-        self, client, entity_jid, key, value, silent=False
-    ):
+    def update_entity_data(self, client, entity_jid, key, value, silent=False):
         """Set a misc data for an entity
 
         If key was registered with set_signal_on_update, a signal will be sent to frontends
@@ -884,10 +891,7 @@
             entity_data[key] = value
             if key in self._key_signals and not silent:
                 self.host.bridge.entity_data_updated(
-                    jid_.full(),
-                    key,
-                    data_format.serialise(value),
-                    client.profile
+                    jid_.full(), key, data_format.serialise(value), client.profile
                 )
 
     def del_entity_datum(self, client, entity_jid, key):
@@ -910,9 +914,7 @@
             try:
                 entity_data = profile_cache[jid_.userhostJID()][jid_.resource]
             except KeyError:
-                raise exceptions.UnknownEntityError(
-                    "Entity {} not in cache".format(jid_)
-                )
+                raise exceptions.UnknownEntityError("Entity {} not in cache".format(jid_))
             try:
                 del entity_data[key]
             except KeyError as e:
@@ -927,7 +929,7 @@
             client, [jid.JID(jid_) for jid_ in entities_jids], keys_list
         )
         return {
-            jid_.full(): {k: data_format.serialise(v) for k,v in data.items()}
+            jid_.full(): {k: data_format.serialise(v) for k, v in data.items()}
             for jid_, data in ret.items()
         }
 
@@ -980,7 +982,8 @@
 
     def _get_entity_data(self, entity_jid_s, keys_list=None, profile=C.PROF_KEY_NONE):
         return self.entity_data_get(
-            self.host.get_client(profile), jid.JID(entity_jid_s), keys_list)
+            self.host.get_client(profile), jid.JID(entity_jid_s), keys_list
+        )
 
     def entity_data_get(self, client, entity_jid, keys_list=None):
         """Get a list of cached values for entity
@@ -999,9 +1002,7 @@
             entity_data = profile_cache[entity_jid.userhostJID()][entity_jid.resource]
         except KeyError:
             raise exceptions.UnknownEntityError(
-                "Entity {} not in cache (was requesting {})".format(
-                    entity_jid, keys_list
-                )
+                "Entity {} not in cache (was requesting {})".format(entity_jid, keys_list)
             )
         if keys_list is None:
             return entity_data
@@ -1146,7 +1147,9 @@
 
     ## Parameters ##
 
-    def get_string_param_a(self, name, category, attr="value", profile_key=C.PROF_KEY_NONE):
+    def get_string_param_a(
+        self, name, category, attr="value", profile_key=C.PROF_KEY_NONE
+    ):
         return self.params.get_string_param_a(name, category, attr, profile_key)
 
     def param_get_a(self, name, category, attr="value", profile_key=C.PROF_KEY_NONE):
@@ -1172,13 +1175,20 @@
         )
 
     def async_get_string_param_a(
-        self, name, category, attribute="value", security_limit=C.NO_SECURITY_LIMIT,
-        profile_key=C.PROF_KEY_NONE):
+        self,
+        name,
+        category,
+        attribute="value",
+        security_limit=C.NO_SECURITY_LIMIT,
+        profile_key=C.PROF_KEY_NONE,
+    ):
 
         profile = self.get_profile_name(profile_key)
-        return defer.ensureDeferred(self.params.async_get_string_param_a(
-            name, category, attribute, security_limit, profile
-        ))
+        return defer.ensureDeferred(
+            self.params.async_get_string_param_a(
+                name, category, attribute, security_limit, profile
+            )
+        )
 
     def _get_params_ui(self, security_limit, app, extra_s, profile_key):
         return self.params._get_params_ui(security_limit, app, extra_s, profile_key)
@@ -1211,31 +1221,38 @@
         client = self.host.get_client(profile_key)
         # we accept any type
         data = data_format.deserialise(data_s, type_check=None)
-        return defer.ensureDeferred(self.storage.set_private_value(
-            namespace, key, data, binary=True, profile=client.profile))
+        return defer.ensureDeferred(
+            self.storage.set_private_value(
+                namespace, key, data, binary=True, profile=client.profile
+            )
+        )
 
     def _private_data_get(self, namespace, key, profile_key):
         client = self.host.get_client(profile_key)
         d = defer.ensureDeferred(
             self.storage.get_privates(
-                namespace, [key], binary=True, profile=client.profile)
+                namespace, [key], binary=True, profile=client.profile
+            )
         )
         d.addCallback(lambda data_dict: data_format.serialise(data_dict.get(key)))
         return d
 
     def _private_data_delete(self, namespace, key, profile_key):
         client = self.host.get_client(profile_key)
-        return defer.ensureDeferred(self.storage.del_private_value(
-            namespace, key, binary=True, profile=client.profile))
+        return defer.ensureDeferred(
+            self.storage.del_private_value(
+                namespace, key, binary=True, profile=client.profile
+            )
+        )
 
     ## Files ##
 
     def check_file_permission(
-            self,
-            file_data: dict,
-            peer_jid: Optional[jid.JID],
-            perms_to_check: Optional[Tuple[str]],
-            set_affiliation: bool = False
+        self,
+        file_data: dict,
+        peer_jid: Optional[jid.JID],
+        perms_to_check: Optional[Tuple[str]],
+        set_affiliation: bool = False,
     ) -> None:
         """Check that an entity has the right permission on a file
 
@@ -1256,7 +1273,7 @@
         peer_jid = peer_jid.userhostJID()
         if peer_jid == file_data["owner"]:
             if set_affiliation:
-                file_data['affiliation'] = 'owner'
+                file_data["affiliation"] = "owner"
             # the owner has all rights, nothing to check
             return
         if not C.ACCESS_PERMS.issuperset(perms_to_check):
@@ -1274,7 +1291,7 @@
                 # otherwise, we use public permission, as the parent directory will
                 # block anyway, this avoid to have to recursively change permissions for
                 # all sub directories/files when modifying a permission
-                if not file_data.get('parent'):
+                if not file_data.get("parent"):
                     raise exceptions.PermissionError()
                 else:
                     perm_type = C.ACCESS_TYPE_PUBLIC
@@ -1358,44 +1375,41 @@
     def get_file_affiliations(self, file_data: dict) -> Dict[jid.JID, str]:
         """Convert file access to pubsub like affiliations"""
         affiliations = {}
-        access_data = file_data['access']
+        access_data = file_data["access"]
 
         read_data = access_data.get(C.ACCESS_PERM_READ, {})
-        if read_data.get('type') == C.ACCESS_TYPE_WHITELIST:
-            for entity_jid_s in read_data['jids']:
+        if read_data.get("type") == C.ACCESS_TYPE_WHITELIST:
+            for entity_jid_s in read_data["jids"]:
                 entity_jid = jid.JID(entity_jid_s)
-                affiliations[entity_jid] = 'member'
+                affiliations[entity_jid] = "member"
 
         write_data = access_data.get(C.ACCESS_PERM_WRITE, {})
-        if write_data.get('type') == C.ACCESS_TYPE_WHITELIST:
-            for entity_jid_s in write_data['jids']:
+        if write_data.get("type") == C.ACCESS_TYPE_WHITELIST:
+            for entity_jid_s in write_data["jids"]:
                 entity_jid = jid.JID(entity_jid_s)
-                affiliations[entity_jid] = 'publisher'
+                affiliations[entity_jid] = "publisher"
 
-        owner = file_data.get('owner')
+        owner = file_data.get("owner")
         if owner:
-            affiliations[owner] = 'owner'
+            affiliations[owner] = "owner"
 
         return affiliations
 
     def _set_file_affiliations_update(
-        self,
-        access: dict,
-        file_data: dict,
-        affiliations: Dict[jid.JID, str]
+        self, access: dict, file_data: dict, affiliations: Dict[jid.JID, str]
     ) -> None:
         read_data = access.setdefault(C.ACCESS_PERM_READ, {})
-        if read_data.get('type') != C.ACCESS_TYPE_WHITELIST:
-            read_data['type'] = C.ACCESS_TYPE_WHITELIST
-            if 'jids' not in read_data:
-                read_data['jids'] = []
-        read_whitelist = read_data['jids']
+        if read_data.get("type") != C.ACCESS_TYPE_WHITELIST:
+            read_data["type"] = C.ACCESS_TYPE_WHITELIST
+            if "jids" not in read_data:
+                read_data["jids"] = []
+        read_whitelist = read_data["jids"]
         write_data = access.setdefault(C.ACCESS_PERM_WRITE, {})
-        if write_data.get('type') != C.ACCESS_TYPE_WHITELIST:
-            write_data['type'] = C.ACCESS_TYPE_WHITELIST
-            if 'jids' not in write_data:
-                write_data['jids'] = []
-        write_whitelist = write_data['jids']
+        if write_data.get("type") != C.ACCESS_TYPE_WHITELIST:
+            write_data["type"] = C.ACCESS_TYPE_WHITELIST
+            if "jids" not in write_data:
+                write_data["jids"] = []
+        write_whitelist = write_data["jids"]
         for entity_jid, affiliation in affiliations.items():
             entity_jid_s = entity_jid.full()
             if affiliation == "none":
@@ -1428,10 +1442,7 @@
                 raise ValueError(f"unknown affiliation: {affiliation!r}")
 
     async def set_file_affiliations(
-        self,
-        client,
-        file_data: dict,
-        affiliations: Dict[jid.JID, str]
+        self, client, file_data: dict, affiliations: Dict[jid.JID, str]
     ) -> None:
         """Apply pubsub like affiliation to file_data
 
@@ -1442,22 +1453,19 @@
             - "member" gives read permission only
             - "none" removes both read and write permissions
         """
-        file_id = file_data['id']
+        file_id = file_data["id"]
         await self.file_update(
             file_id,
-            'access',
+            "access",
             update_cb=partial(
                 self._set_file_affiliations_update,
                 file_data=file_data,
-                affiliations=affiliations
+                affiliations=affiliations,
             ),
         )
 
     def _set_file_access_model_update(
-        self,
-        access: dict,
-        file_data: dict,
-        access_model: str
+        self, access: dict, file_data: dict, access_model: str
     ) -> None:
         read_data = access.setdefault(C.ACCESS_PERM_READ, {})
         if access_model == "open":
@@ -1467,9 +1475,9 @@
         else:
             raise ValueError(f"unknown access model: {access_model}")
 
-        read_data['type'] = requested_type
-        if requested_type == C.ACCESS_TYPE_WHITELIST and 'jids' not in read_data:
-            read_data['jids'] = []
+        read_data["type"] = requested_type
+        if requested_type == C.ACCESS_TYPE_WHITELIST and "jids" not in read_data:
+            read_data["jids"] = []
 
     async def set_file_access_model(
         self,
@@ -1483,24 +1491,24 @@
             - "open": set public access to file/dir
             - "whitelist": set whitelist to file/dir
         """
-        file_id = file_data['id']
+        file_id = file_data["id"]
         await self.file_update(
             file_id,
-            'access',
+            "access",
             update_cb=partial(
                 self._set_file_access_model_update,
                 file_data=file_data,
-                access_model=access_model
+                access_model=access_model,
             ),
         )
 
     def get_files_owner(
-            self,
-            client,
-            owner: Optional[jid.JID],
-            peer_jid: Optional[jid.JID],
-            file_id: Optional[str] = None,
-            parent: Optional[str] = None
+        self,
+        client,
+        owner: Optional[jid.JID],
+        peer_jid: Optional[jid.JID],
+        file_id: Optional[str] = None,
+        parent: Optional[str] = None,
     ) -> jid.JID:
         """Get owner to use for a file operation
 
@@ -1526,10 +1534,26 @@
         return peer_jid.userhostJID()
 
     async def get_files(
-        self, client, peer_jid, file_id=None, version=None, parent=None, path=None,
-        type_=None, file_hash=None, hash_algo=None, name=None, namespace=None,
-        mime_type=None, public_id=None, owner=None, access=None, projection=None,
-        unique=False, perms_to_check=(C.ACCESS_PERM_READ,)):
+        self,
+        client,
+        peer_jid,
+        file_id=None,
+        version=None,
+        parent=None,
+        path=None,
+        type_=None,
+        file_hash=None,
+        hash_algo=None,
+        name=None,
+        namespace=None,
+        mime_type=None,
+        public_id=None,
+        owner=None,
+        access=None,
+        projection=None,
+        unique=False,
+        perms_to_check=(C.ACCESS_PERM_READ,),
+    ):
         """Retrieve files with with given filters
 
         @param peer_jid(jid.JID, None): jid trying to access the file
@@ -1628,11 +1652,27 @@
         return files
 
     async def set_file(
-        self, client, name, file_id=None, version="", parent=None, path=None,
-        type_=C.FILE_TYPE_FILE, file_hash=None, hash_algo=None, size=None,
-        namespace=None, mime_type=None, public_id=None, created=None, modified=None,
-        owner=None, access=None, extra=None, peer_jid=None,
-        perms_to_check=(C.ACCESS_PERM_WRITE,)
+        self,
+        client,
+        name,
+        file_id=None,
+        version="",
+        parent=None,
+        path=None,
+        type_=C.FILE_TYPE_FILE,
+        file_hash=None,
+        hash_algo=None,
+        size=None,
+        namespace=None,
+        mime_type=None,
+        public_id=None,
+        created=None,
+        modified=None,
+        owner=None,
+        access=None,
+        extra=None,
+        peer_jid=None,
+        perms_to_check=(C.ACCESS_PERM_WRITE,),
     ):
         """Set a file metadata
 
@@ -1694,7 +1734,7 @@
         else:
             mime_type = mime_type.lower()
         if public_id is not None:
-            assert len(public_id)>0
+            assert len(public_id) > 0
         if created is None:
             created = time.time()
         if namespace is not None:
@@ -1761,10 +1801,7 @@
         )
 
     async def file_get_used_space(
-        self,
-        client,
-        peer_jid: jid.JID,
-        owner: Optional[jid.JID] = None
+        self, client, peer_jid: jid.JID, owner: Optional[jid.JID] = None
     ) -> int:
         """Get space taken by all files owned by an entity
 
@@ -1797,7 +1834,7 @@
         peer_jid: jid.JID,
         recursive: bool,
         files_path: Path,
-        file_data: dict
+        file_data: dict,
     ):
         """Internal method to delete files/directories recursively
 
@@ -1807,43 +1844,56 @@
         @param files_path(unicode): path of the directory containing the actual files
         @param file_data(dict): data of the file to delete
         """
-        if file_data['owner'] != peer_jid:
+        if file_data["owner"] != peer_jid:
             raise exceptions.PermissionError(
-                "file {file_name} can't be deleted, {peer_jid} is not the owner"
-                .format(file_name=file_data['name'], peer_jid=peer_jid.full()))
-        if file_data['type'] == C.FILE_TYPE_DIRECTORY:
-            sub_files = yield self.get_files(client, peer_jid, parent=file_data['id'])
+                "file {file_name} can't be deleted, {peer_jid} is not the owner".format(
+                    file_name=file_data["name"], peer_jid=peer_jid.full()
+                )
+            )
+        if file_data["type"] == C.FILE_TYPE_DIRECTORY:
+            sub_files = yield self.get_files(client, peer_jid, parent=file_data["id"])
             if sub_files and not recursive:
                 raise exceptions.DataError(_("Can't delete directory, it is not empty"))
             # we first delete the sub-files
             for sub_file_data in sub_files:
-                if sub_file_data['type'] == C.FILE_TYPE_DIRECTORY:
-                    sub_file_path = files_path / sub_file_data['name']
+                if sub_file_data["type"] == C.FILE_TYPE_DIRECTORY:
+                    sub_file_path = files_path / sub_file_data["name"]
                 else:
                     sub_file_path = files_path
                 yield self._delete_file(
-                    client, peer_jid, recursive, sub_file_path, sub_file_data)
+                    client, peer_jid, recursive, sub_file_path, sub_file_data
+                )
             # then the directory itself
-            yield self.storage.file_delete(file_data['id'])
-        elif file_data['type'] == C.FILE_TYPE_FILE:
-            log.info(_("deleting file {name} with hash {file_hash}").format(
-                name=file_data['name'], file_hash=file_data['file_hash']))
-            yield self.storage.file_delete(file_data['id'])
+            yield self.storage.file_delete(file_data["id"])
+        elif file_data["type"] == C.FILE_TYPE_FILE:
+            log.info(
+                _("deleting file {name} with hash {file_hash}").format(
+                    name=file_data["name"], file_hash=file_data["file_hash"]
+                )
+            )
+            yield self.storage.file_delete(file_data["id"])
             references = yield self.get_files(
-                client, peer_jid, file_hash=file_data['file_hash'])
+                client, peer_jid, file_hash=file_data["file_hash"]
+            )
             if references:
                 log.debug("there are still references to the file, we keep it")
             else:
-                file_path = os.path.join(files_path, file_data['file_hash'])
-                log.info(_("no reference left to {file_path}, deleting").format(
-                    file_path=file_path))
+                file_path = os.path.join(files_path, file_data["file_hash"])
+                log.info(
+                    _("no reference left to {file_path}, deleting").format(
+                        file_path=file_path
+                    )
+                )
                 try:
                     os.unlink(file_path)
                 except FileNotFoundError:
-                    log.error(f"file at {file_path!r} doesn't exist but it was referenced in files database")
+                    log.error(
+                        f"file at {file_path!r} doesn't exist but it was referenced in files database"
+                    )
         else:
-            raise exceptions.InternalError('Unexpected file type: {file_type}'
-                .format(file_type=file_data['type']))
+            raise exceptions.InternalError(
+                "Unexpected file type: {file_type}".format(file_type=file_data["type"])
+            )
 
     async def file_delete(self, client, peer_jid, file_id, recursive=False):
         """Delete a single file or a directory and all its sub-files
@@ -1857,8 +1907,9 @@
         #        should be checked too
         files_data = await self.get_files(client, peer_jid, file_id)
         if not files_data:
-            raise exceptions.NotFound("Can't find the file with id {file_id}".format(
-                file_id=file_id))
+            raise exceptions.NotFound(
+                "Can't find the file with id {file_id}".format(file_id=file_id)
+            )
         file_data = files_data[0]
         if file_data["type"] != C.FILE_TYPE_DIRECTORY and recursive:
             raise ValueError("recursive can only be set for directories")
@@ -1879,12 +1930,11 @@
         return Path(
             self._cache_path,
             regex.path_escape(namespace),
-            *(regex.path_escape(a) for a in args)
+            *(regex.path_escape(a) for a in args),
         )
 
     ## Notifications ##
 
-
     def _add_notification(
         self,
         type_: str,
@@ -1896,7 +1946,7 @@
         priority: str,
         expire_at: float,
         extra_s: str,
-        profile_key: str
+        profile_key: str,
     ) -> defer.Deferred:
         client = self.host.get_client(profile_key)
 
@@ -1907,9 +1957,7 @@
             notification_type = NotificationType[type_]
             notification_priority = NotificationPriority[priority]
         except KeyError as e:
-            raise exceptions.DataError(
-                f"invalid notification type or priority data: {e}"
-            )
+            raise exceptions.DataError(f"invalid notification type or priority data: {e}")
 
         return defer.ensureDeferred(
             self.add_notification(
@@ -1922,7 +1970,7 @@
                 requires_action,
                 notification_priority,
                 expire_at or None,
-                data_format.deserialise(extra_s)
+                data_format.deserialise(extra_s),
             )
         )
 
@@ -1955,21 +2003,28 @@
         @param extra: additional data.
         """
         notification = await self.storage.add_notification(
-            None if is_global else client, type_, body_plain, body_rich, title,
-            requires_action, priority, expire_at, extra
+            None if is_global else client,
+            type_,
+            body_plain,
+            body_rich,
+            title,
+            requires_action,
+            priority,
+            expire_at,
+            extra,
         )
         self.host.bridge.notification_new(
             str(notification.id),
             notification.timestamp,
             type_.value,
             body_plain,
-            body_rich or '',
-            title or '',
+            body_rich or "",
+            title or "",
             requires_action,
             priority.value,
             expire_at or 0,
-            data_format.serialise(extra) if extra else '',
-            C.PROF_KEY_ALL if is_global else client.profile
+            data_format.serialise(extra) if extra else "",
+            C.PROF_KEY_ALL if is_global else client.profile,
         )
 
     def _get_notifications(self, filters_s: str, profile_key: str) -> defer.Deferred:
@@ -1992,12 +2047,14 @@
         filters = data_format.deserialise(filters_s)
 
         try:
-            if 'type' in filters:
-                filters['type_'] = NotificationType[filters.pop('type')]
-            if 'status' in filters:
-                filters['status'] = NotificationStatus[filters['status']]
-            if 'min_priority' in filters:
-                filters['min_priority'] = NotificationPriority[filters['min_priority']].value
+            if "type" in filters:
+                filters["type_"] = NotificationType[filters.pop("type")]
+            if "status" in filters:
+                filters["status"] = NotificationStatus[filters["status"]]
+            if "min_priority" in filters:
+                filters["min_priority"] = NotificationPriority[
+                    filters["min_priority"]
+                ].value
         except KeyError as e:
             raise exceptions.DataError(f"invalid filter data: {e}")
 
@@ -2010,10 +2067,7 @@
         return d
 
     def _delete_notification(
-        self,
-        id_: str,
-        is_global: bool,
-        profile_key: str
+        self, id_: str, is_global: bool, profile_key: str
     ) -> defer.Deferred:
         client = self.host.get_client(profile_key)
         if is_global and not client.is_admin:
@@ -2023,10 +2077,7 @@
         return defer.ensureDeferred(self.delete_notification(client, id_, is_global))
 
     async def delete_notification(
-        self,
-        client: SatXMPPEntity,
-        id_: str,
-        is_global: bool=False
+        self, client: SatXMPPEntity, id_: str, is_global: bool = False
     ) -> None:
         """Delete a notification
 
@@ -2036,8 +2087,7 @@
         """
         await self.storage.delete_notification(None if is_global else client, id_)
         self.host.bridge.notification_deleted(
-            id_,
-            C.PROF_KEY_ALL if is_global else client.profile
+            id_, C.PROF_KEY_ALL if is_global else client.profile
         )
 
     def _notifications_expired_clean(
@@ -2050,12 +2100,10 @@
 
         return defer.ensureDeferred(
             self.storage.clean_expired_notifications(
-                client,
-                None if limit_timestamp == -1.0 else limit_timestamp
+                client, None if limit_timestamp == -1.0 else limit_timestamp
             )
         )
 
-
     ## Misc ##
 
     def is_entity_available(self, client, entity_jid):
--- a/libervia/backend/memory/migration/env.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/memory/migration/env.py	Wed Jun 19 18:44:57 2024 +0200
@@ -62,7 +62,7 @@
         connection=connection,
         target_metadata=target_metadata,
         render_as_batch=True,
-        include_name=include_name
+        include_name=include_name,
     )
 
     with context.begin_transaction():
--- a/libervia/backend/memory/migration/versions/129ac51807e4_create_virtual_table_for_full_text_.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/memory/migration/versions/129ac51807e4_create_virtual_table_for_full_text_.py	Wed Jun 19 18:44:57 2024 +0200
@@ -5,13 +5,14 @@
 Create Date: 2021-08-13 19:13:54.112538
 
 """
+
 from alembic import op
 import sqlalchemy as sa
 
 
 # revision identifiers, used by Alembic.
-revision = '129ac51807e4'
-down_revision = '8974efc51d22'
+revision = "129ac51807e4"
+down_revision = "8974efc51d22"
 branch_labels = None
 depends_on = None
 
@@ -32,7 +33,7 @@
         "('delete', old.id, old.data);"
         "  INSERT INTO pubsub_items_fts(rowid, data) VALUES(new.id, new.data);"
         "END",
-        "INSERT INTO pubsub_items_fts(rowid, data) SELECT id, data from pubsub_items"
+        "INSERT INTO pubsub_items_fts(rowid, data) SELECT id, data from pubsub_items",
     ]
     for q in queries:
         op.execute(sa.DDL(q))
--- a/libervia/backend/memory/migration/versions/2ab01aa1f686_create_table_for_notifications.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/memory/migration/versions/2ab01aa1f686_create_table_for_notifications.py	Wed Jun 19 18:44:57 2024 +0200
@@ -5,42 +5,68 @@
 Create Date: 2023-10-16 12:11:43.507295
 
 """
+
 from alembic import op
 import sqlalchemy as sa
 
 
 # revision identifiers, used by Alembic.
-revision = '2ab01aa1f686'
-down_revision = '4b002773cf92'
+revision = "2ab01aa1f686"
+down_revision = "4b002773cf92"
 branch_labels = None
 depends_on = None
 
 
 def upgrade():
-    op.create_table('notifications',
-    sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
-    sa.Column('timestamp', sa.Float(), nullable=False),
-    sa.Column('expire_at', sa.Float(), nullable=True),
-    sa.Column('profile_id', sa.Integer(), nullable=True),
-    sa.Column('type', sa.Enum('chat', 'blog', 'calendar', 'file', 'call', 'service', 'other', name='notificationtype'), nullable=False),
-    sa.Column('title', sa.Text(), nullable=True),
-    sa.Column('body_plain', sa.Text(), nullable=False),
-    sa.Column('body_rich', sa.Text(), nullable=True),
-    sa.Column('requires_action', sa.Boolean(), nullable=True),
-    sa.Column('priority', sa.Integer(), nullable=True),
-    sa.Column('extra_data', sa.JSON(), nullable=True),
-    sa.Column('status', sa.Enum('new', 'read', name='notificationstatus'), nullable=True),
-    sa.ForeignKeyConstraint(['profile_id'], ['profiles.id'], name=op.f('fk_notifications_profile_id_profiles'), ondelete='CASCADE'),
-    sa.PrimaryKeyConstraint('id', name=op.f('pk_notifications'))
+    op.create_table(
+        "notifications",
+        sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
+        sa.Column("timestamp", sa.Float(), nullable=False),
+        sa.Column("expire_at", sa.Float(), nullable=True),
+        sa.Column("profile_id", sa.Integer(), nullable=True),
+        sa.Column(
+            "type",
+            sa.Enum(
+                "chat",
+                "blog",
+                "calendar",
+                "file",
+                "call",
+                "service",
+                "other",
+                name="notificationtype",
+            ),
+            nullable=False,
+        ),
+        sa.Column("title", sa.Text(), nullable=True),
+        sa.Column("body_plain", sa.Text(), nullable=False),
+        sa.Column("body_rich", sa.Text(), nullable=True),
+        sa.Column("requires_action", sa.Boolean(), nullable=True),
+        sa.Column("priority", sa.Integer(), nullable=True),
+        sa.Column("extra_data", sa.JSON(), nullable=True),
+        sa.Column(
+            "status", sa.Enum("new", "read", name="notificationstatus"), nullable=True
+        ),
+        sa.ForeignKeyConstraint(
+            ["profile_id"],
+            ["profiles.id"],
+            name=op.f("fk_notifications_profile_id_profiles"),
+            ondelete="CASCADE",
+        ),
+        sa.PrimaryKeyConstraint("id", name=op.f("pk_notifications")),
     )
-    with op.batch_alter_table('notifications', schema=None) as batch_op:
-        batch_op.create_index(batch_op.f('ix_notifications_profile_id'), ['profile_id'], unique=False)
-        batch_op.create_index('notifications_profile_id_status', ['profile_id', 'status'], unique=False)
+    with op.batch_alter_table("notifications", schema=None) as batch_op:
+        batch_op.create_index(
+            batch_op.f("ix_notifications_profile_id"), ["profile_id"], unique=False
+        )
+        batch_op.create_index(
+            "notifications_profile_id_status", ["profile_id", "status"], unique=False
+        )
 
 
 def downgrade():
-    with op.batch_alter_table('notifications', schema=None) as batch_op:
-        batch_op.drop_index('notifications_profile_id_status')
-        batch_op.drop_index(batch_op.f('ix_notifications_profile_id'))
+    with op.batch_alter_table("notifications", schema=None) as batch_op:
+        batch_op.drop_index("notifications_profile_id_status")
+        batch_op.drop_index(batch_op.f("ix_notifications_profile_id"))
 
-    op.drop_table('notifications')
+    op.drop_table("notifications")
--- a/libervia/backend/memory/migration/versions/4b002773cf92_add_origin_id_column_to_history_and_.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/memory/migration/versions/4b002773cf92_add_origin_id_column_to_history_and_.py	Wed Jun 19 18:44:57 2024 +0200
@@ -5,56 +5,43 @@
 Create Date: 2022-06-13 16:10:39.711634
 
 """
+
 from alembic import op
 import sqlalchemy as sa
 
 
 # revision identifiers, used by Alembic.
-revision = '4b002773cf92'
-down_revision = '79e5f3313fa4'
+revision = "4b002773cf92"
+down_revision = "79e5f3313fa4"
 branch_labels = None
 depends_on = None
 
 
 def upgrade():
-    with op.batch_alter_table('history', schema=None) as batch_op:
-        batch_op.add_column(sa.Column('origin_id', sa.Text(), nullable=True))
-        batch_op.create_unique_constraint('uq_origin_id', ['profile_id', 'origin_id', 'source'])
+    with op.batch_alter_table("history", schema=None) as batch_op:
+        batch_op.add_column(sa.Column("origin_id", sa.Text(), nullable=True))
+        batch_op.create_unique_constraint(
+            "uq_origin_id", ["profile_id", "origin_id", "source"]
+        )
 
-    with op.batch_alter_table('message', schema=None) as batch_op:
-        batch_op.alter_column('history_uid',
-               existing_type=sa.TEXT(),
-               nullable=False)
-        batch_op.alter_column('message',
-               existing_type=sa.TEXT(),
-               nullable=False)
+    with op.batch_alter_table("message", schema=None) as batch_op:
+        batch_op.alter_column("history_uid", existing_type=sa.TEXT(), nullable=False)
+        batch_op.alter_column("message", existing_type=sa.TEXT(), nullable=False)
 
-    with op.batch_alter_table('subject', schema=None) as batch_op:
-        batch_op.alter_column('history_uid',
-               existing_type=sa.TEXT(),
-               nullable=False)
-        batch_op.alter_column('subject',
-               existing_type=sa.TEXT(),
-               nullable=False)
+    with op.batch_alter_table("subject", schema=None) as batch_op:
+        batch_op.alter_column("history_uid", existing_type=sa.TEXT(), nullable=False)
+        batch_op.alter_column("subject", existing_type=sa.TEXT(), nullable=False)
 
 
 def downgrade():
-    with op.batch_alter_table('subject', schema=None) as batch_op:
-        batch_op.alter_column('subject',
-               existing_type=sa.TEXT(),
-               nullable=True)
-        batch_op.alter_column('history_uid',
-               existing_type=sa.TEXT(),
-               nullable=True)
+    with op.batch_alter_table("subject", schema=None) as batch_op:
+        batch_op.alter_column("subject", existing_type=sa.TEXT(), nullable=True)
+        batch_op.alter_column("history_uid", existing_type=sa.TEXT(), nullable=True)
 
-    with op.batch_alter_table('message', schema=None) as batch_op:
-        batch_op.alter_column('message',
-               existing_type=sa.TEXT(),
-               nullable=True)
-        batch_op.alter_column('history_uid',
-               existing_type=sa.TEXT(),
-               nullable=True)
+    with op.batch_alter_table("message", schema=None) as batch_op:
+        batch_op.alter_column("message", existing_type=sa.TEXT(), nullable=True)
+        batch_op.alter_column("history_uid", existing_type=sa.TEXT(), nullable=True)
 
-    with op.batch_alter_table('history', schema=None) as batch_op:
-        batch_op.drop_constraint('uq_origin_id', type_='unique')
-        batch_op.drop_column('origin_id')
+    with op.batch_alter_table("history", schema=None) as batch_op:
+        batch_op.drop_constraint("uq_origin_id", type_="unique")
+        batch_op.drop_column("origin_id")
--- a/libervia/backend/memory/migration/versions/602caf848068_drop_message_types_table_fix_nullable.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/memory/migration/versions/602caf848068_drop_message_types_table_fix_nullable.py	Wed Jun 19 18:44:57 2024 +0200
@@ -5,6 +5,7 @@
 Create Date: 2021-06-26 12:42:54.148313
 
 """
+
 from alembic import op
 from sqlalchemy import (
     Table,
--- a/libervia/backend/memory/migration/versions/610345f77e75_add_version_id_to_history.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/memory/migration/versions/610345f77e75_add_version_id_to_history.py	Wed Jun 19 18:44:57 2024 +0200
@@ -5,22 +5,27 @@
 Create Date: 2023-11-20 17:33:53.544032
 
 """
+
 from alembic import op
 import sqlalchemy as sa
 
 
 # revision identifiers, used by Alembic.
-revision = '610345f77e75'
-down_revision = '2ab01aa1f686'
+revision = "610345f77e75"
+down_revision = "2ab01aa1f686"
 branch_labels = None
 depends_on = None
 
 
 def upgrade():
-    with op.batch_alter_table('history', schema=None) as batch_op:
-        batch_op.add_column(sa.Column('version_id', sa.Integer(), server_default=sa.text('1'), nullable=False))
+    with op.batch_alter_table("history", schema=None) as batch_op:
+        batch_op.add_column(
+            sa.Column(
+                "version_id", sa.Integer(), server_default=sa.text("1"), nullable=False
+            )
+        )
 
 
 def downgrade():
-    with op.batch_alter_table('history', schema=None) as batch_op:
-        batch_op.drop_column('version_id')
+    with op.batch_alter_table("history", schema=None) as batch_op:
+        batch_op.drop_column("version_id")
--- a/libervia/backend/memory/migration/versions/79e5f3313fa4_create_table_for_pubsub_subscriptions.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/memory/migration/versions/79e5f3313fa4_create_table_for_pubsub_subscriptions.py	Wed Jun 19 18:44:57 2024 +0200
@@ -5,29 +5,36 @@
 Create Date: 2022-03-14 17:15:00.689871
 
 """
+
 from alembic import op
 import sqlalchemy as sa
 from libervia.backend.memory.sqla_mapping import JID
 
 
 # revision identifiers, used by Alembic.
-revision = '79e5f3313fa4'
-down_revision = '129ac51807e4'
+revision = "79e5f3313fa4"
+down_revision = "129ac51807e4"
 branch_labels = None
 depends_on = None
 
 
 def upgrade():
-    op.create_table('pubsub_subs',
-    sa.Column('id', sa.Integer(), nullable=False),
-    sa.Column('node_id', sa.Integer(), nullable=False),
-    sa.Column('subscriber', JID(), nullable=True),
-    sa.Column('state', sa.Enum('SUBSCRIBED', 'PENDING', name='state'), nullable=True),
-    sa.ForeignKeyConstraint(['node_id'], ['pubsub_nodes.id'], name=op.f('fk_pubsub_subs_node_id_pubsub_nodes'), ondelete='CASCADE'),
-    sa.PrimaryKeyConstraint('id', name=op.f('pk_pubsub_subs')),
-    sa.UniqueConstraint('node_id', 'subscriber', name=op.f('uq_pubsub_subs_node_id'))
+    op.create_table(
+        "pubsub_subs",
+        sa.Column("id", sa.Integer(), nullable=False),
+        sa.Column("node_id", sa.Integer(), nullable=False),
+        sa.Column("subscriber", JID(), nullable=True),
+        sa.Column("state", sa.Enum("SUBSCRIBED", "PENDING", name="state"), nullable=True),
+        sa.ForeignKeyConstraint(
+            ["node_id"],
+            ["pubsub_nodes.id"],
+            name=op.f("fk_pubsub_subs_node_id_pubsub_nodes"),
+            ondelete="CASCADE",
+        ),
+        sa.PrimaryKeyConstraint("id", name=op.f("pk_pubsub_subs")),
+        sa.UniqueConstraint("node_id", "subscriber", name=op.f("uq_pubsub_subs_node_id")),
     )
 
 
 def downgrade():
-    op.drop_table('pubsub_subs')
+    op.drop_table("pubsub_subs")
--- a/libervia/backend/memory/migration/versions/8974efc51d22_create_tables_for_pubsub_caching.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/memory/migration/versions/8974efc51d22_create_tables_for_pubsub_caching.py	Wed Jun 19 18:44:57 2024 +0200
@@ -5,53 +5,93 @@
 Create Date: 2021-07-27 16:38:54.658212
 
 """
+
 from alembic import op
 import sqlalchemy as sa
 from libervia.backend.memory.sqla_mapping import JID, Xml
 
 
 # revision identifiers, used by Alembic.
-revision = '8974efc51d22'
-down_revision = '602caf848068'
+revision = "8974efc51d22"
+down_revision = "602caf848068"
 branch_labels = None
 depends_on = None
 
 
 def upgrade():
     # ### commands auto generated by Alembic - please adjust! ###
-    op.create_table('pubsub_nodes',
-    sa.Column('id', sa.Integer(), nullable=False),
-    sa.Column('profile_id', sa.Integer(), nullable=True),
-    sa.Column('service', JID(), nullable=True),
-    sa.Column('name', sa.Text(), nullable=False),
-    sa.Column('subscribed', sa.Boolean(create_constraint=True, name='subscribed_bool'), nullable=False),
-    sa.Column('analyser', sa.Text(), nullable=True),
-    sa.Column('sync_state', sa.Enum('IN_PROGRESS', 'COMPLETED', 'ERROR', 'NO_SYNC', name='sync_state', create_constraint=True), nullable=True),
-    sa.Column('sync_state_updated', sa.Float(), nullable=False),
-    sa.Column('type', sa.Text(), nullable=True),
-    sa.Column('subtype', sa.Text(), nullable=True),
-    sa.Column('extra', sa.JSON(), nullable=True),
-    sa.ForeignKeyConstraint(['profile_id'], ['profiles.id'], name=op.f('fk_pubsub_nodes_profile_id_profiles'), ondelete='CASCADE'),
-    sa.PrimaryKeyConstraint('id', name=op.f('pk_pubsub_nodes')),
-    sa.UniqueConstraint('profile_id', 'service', 'name', name=op.f('uq_pubsub_nodes_profile_id'))
+    op.create_table(
+        "pubsub_nodes",
+        sa.Column("id", sa.Integer(), nullable=False),
+        sa.Column("profile_id", sa.Integer(), nullable=True),
+        sa.Column("service", JID(), nullable=True),
+        sa.Column("name", sa.Text(), nullable=False),
+        sa.Column(
+            "subscribed",
+            sa.Boolean(create_constraint=True, name="subscribed_bool"),
+            nullable=False,
+        ),
+        sa.Column("analyser", sa.Text(), nullable=True),
+        sa.Column(
+            "sync_state",
+            sa.Enum(
+                "IN_PROGRESS",
+                "COMPLETED",
+                "ERROR",
+                "NO_SYNC",
+                name="sync_state",
+                create_constraint=True,
+            ),
+            nullable=True,
+        ),
+        sa.Column("sync_state_updated", sa.Float(), nullable=False),
+        sa.Column("type", sa.Text(), nullable=True),
+        sa.Column("subtype", sa.Text(), nullable=True),
+        sa.Column("extra", sa.JSON(), nullable=True),
+        sa.ForeignKeyConstraint(
+            ["profile_id"],
+            ["profiles.id"],
+            name=op.f("fk_pubsub_nodes_profile_id_profiles"),
+            ondelete="CASCADE",
+        ),
+        sa.PrimaryKeyConstraint("id", name=op.f("pk_pubsub_nodes")),
+        sa.UniqueConstraint(
+            "profile_id", "service", "name", name=op.f("uq_pubsub_nodes_profile_id")
+        ),
     )
-    op.create_table('pubsub_items',
-    sa.Column('id', sa.Integer(), nullable=False),
-    sa.Column('node_id', sa.Integer(), nullable=False),
-    sa.Column('name', sa.Text(), nullable=False),
-    sa.Column('data', Xml(), nullable=False),
-    sa.Column('created', sa.DateTime(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
-    sa.Column('updated', sa.DateTime(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
-    sa.Column('parsed', sa.JSON(), nullable=True),
-    sa.ForeignKeyConstraint(['node_id'], ['pubsub_nodes.id'], name=op.f('fk_pubsub_items_node_id_pubsub_nodes'), ondelete='CASCADE'),
-    sa.PrimaryKeyConstraint('id', name=op.f('pk_pubsub_items')),
-    sa.UniqueConstraint('node_id', 'name', name=op.f('uq_pubsub_items_node_id'))
+    op.create_table(
+        "pubsub_items",
+        sa.Column("id", sa.Integer(), nullable=False),
+        sa.Column("node_id", sa.Integer(), nullable=False),
+        sa.Column("name", sa.Text(), nullable=False),
+        sa.Column("data", Xml(), nullable=False),
+        sa.Column(
+            "created",
+            sa.DateTime(),
+            server_default=sa.text("(CURRENT_TIMESTAMP)"),
+            nullable=False,
+        ),
+        sa.Column(
+            "updated",
+            sa.DateTime(),
+            server_default=sa.text("(CURRENT_TIMESTAMP)"),
+            nullable=False,
+        ),
+        sa.Column("parsed", sa.JSON(), nullable=True),
+        sa.ForeignKeyConstraint(
+            ["node_id"],
+            ["pubsub_nodes.id"],
+            name=op.f("fk_pubsub_items_node_id_pubsub_nodes"),
+            ondelete="CASCADE",
+        ),
+        sa.PrimaryKeyConstraint("id", name=op.f("pk_pubsub_items")),
+        sa.UniqueConstraint("node_id", "name", name=op.f("uq_pubsub_items_node_id")),
     )
     # ### end Alembic commands ###
 
 
 def downgrade():
     # ### commands auto generated by Alembic - please adjust! ###
-    op.drop_table('pubsub_items')
-    op.drop_table('pubsub_nodes')
+    op.drop_table("pubsub_items")
+    op.drop_table("pubsub_nodes")
     # ### end Alembic commands ###
--- a/libervia/backend/memory/migration/versions/fe3a02cb4bec_convert_legacypickle_columns_to_json.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/memory/migration/versions/fe3a02cb4bec_convert_legacypickle_columns_to_json.py	Wed Jun 19 18:44:57 2024 +0200
@@ -5,10 +5,12 @@
 Create Date: 2024-02-22 14:55:59.993983
 
 """
+
 from alembic import op
 import sqlalchemy as sa
 import pickle
 import json
+
 try:
     from libervia.backend.plugins.plugin_xep_0373 import PublicKeyMetadata
 except Exception:
@@ -76,10 +78,10 @@
                     "Warning: Failed to convert Trust Management cache with value "
                     f" {deserialized!r}, using empty array instead: {e}"
                 )
-                deserialized=[]
+                deserialized = []
 
         ret = json.dumps(deserialized, ensure_ascii=False, default=str)
-        if table == 'history' and ret == "{}":
+        if table == "history" and ret == "{}":
             # For history, we can remove empty data, but for other tables it may be
             # significant.
             ret = None
--- a/libervia/backend/memory/params.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/memory/params.py	Wed Jun 19 18:44:57 2024 +0200
@@ -227,15 +227,13 @@
             if not default:
                 log.info(_("No default profile, returning first one"))
                 try:
-                    default = self.host.memory.memory_data[
-                        "Profile_default"
-                    ] = self.storage.get_profiles_list()[0]
+                    default = self.host.memory.memory_data["Profile_default"] = (
+                        self.storage.get_profiles_list()[0]
+                    )
                 except IndexError:
                     log.info(_("No profile exist yet"))
                     raise exceptions.ProfileUnknownError(profile_key)
-            return (
-                default
-            )  # FIXME: temporary, must use real default value, and fallback to first one if it doesn't exists
+            return default  # FIXME: temporary, must use real default value, and fallback to first one if it doesn't exists
         elif profile_key == C.PROF_KEY_NONE:
             raise exceptions.ProfileNotSetError
         elif return_profile_keys and profile_key in [C.PROF_KEY_ALL]:
@@ -284,8 +282,8 @@
                         to_remove.append(cat_node)
                         continue
                     to_remove_count = (
-                        0
-                    )  # count the params to be removed from current category
+                        0  # count the params to be removed from current category
+                    )
                     for node in cat_node.childNodes:
                         if node.nodeName != "param" or not self.check_security_limit(
                             node, security_limit
@@ -382,9 +380,7 @@
         node = self._get_param_node(name, category, "@ALL@")
         if not node:
             log.error(
-                _(
-                    "Requested param [%(name)s] in category [%(category)s] doesn't exist !"
-                )
+                _("Requested param [%(name)s] in category [%(category)s] doesn't exist !")
                 % {"name": name, "category": category}
             )
             return
@@ -526,16 +522,18 @@
         return defer.succeed(password)
 
     def _type_to_str(self, result):
-        """Convert result to string, according to its type """
+        """Convert result to string, according to its type"""
         if isinstance(result, bool):
             return C.bool_const(result)
         elif isinstance(result, (list, set, tuple)):
-            return ', '.join(self._type_to_str(r) for r in result)
+            return ", ".join(self._type_to_str(r) for r in result)
         else:
             return str(result)
 
-    def get_string_param_a(self, name, category, attr="value", profile_key=C.PROF_KEY_NONE):
-        """ Same as param_get_a but for bridge: convert non string value to string """
+    def get_string_param_a(
+        self, name, category, attr="value", profile_key=C.PROF_KEY_NONE
+    ):
+        """Same as param_get_a but for bridge: convert non string value to string"""
         return self._type_to_str(
             self.param_get_a(name, category, attr, profile_key=profile_key)
         )
@@ -560,9 +558,7 @@
         node = self._get_param_node(name, category)
         if not node:
             log.error(
-                _(
-                    "Requested param [%(name)s] in category [%(category)s] doesn't exist !"
-                )
+                _("Requested param [%(name)s] in category [%(category)s] doesn't exist !")
                 % {"name": name, "category": category}
             )
             raise exceptions.NotFound
@@ -596,10 +592,16 @@
             return self._get_attr(node[1], attr, value)
 
     async def async_get_string_param_a(
-        self, name, category, attr="value", security_limit=C.NO_SECURITY_LIMIT,
-        profile=C.PROF_KEY_NONE):
+        self,
+        name,
+        category,
+        attr="value",
+        security_limit=C.NO_SECURITY_LIMIT,
+        profile=C.PROF_KEY_NONE,
+    ):
         value = await self.param_get_a_async(
-            name, category, attr, security_limit, profile_key=profile)
+            name, category, attr, security_limit, profile_key=profile
+        )
         return self._type_to_str(value)
 
     def param_get_a_async(
@@ -621,9 +623,7 @@
         node = self._get_param_node(name, category)
         if not node:
             log.error(
-                _(
-                    "Requested param [%(name)s] in category [%(category)s] doesn't exist !"
-                )
+                _("Requested param [%(name)s] in category [%(category)s] doesn't exist !")
                 % {"name": name, "category": category}
             )
             raise ValueError("Requested param doesn't exist")
@@ -662,14 +662,19 @@
             )
 
     def _get_params_values_from_category(
-        self, category, security_limit, app, extra_s, profile_key):
+        self, category, security_limit, app, extra_s, profile_key
+    ):
         client = self.host.get_client(profile_key)
         extra = data_format.deserialise(extra_s)
-        return defer.ensureDeferred(self.get_params_values_from_category(
-            client, category, security_limit, app, extra))
+        return defer.ensureDeferred(
+            self.get_params_values_from_category(
+                client, category, security_limit, app, extra
+            )
+        )
 
     async def get_params_values_from_category(
-        self, client, category, security_limit, app='', extra=None):
+        self, client, category, security_limit, app="", extra=None
+    ):
         """Get all parameters "attribute" for a category
 
         @param category(unicode): the desired category
@@ -697,8 +702,11 @@
                         )
                         continue
                     value = await self.async_get_string_param_a(
-                        name, category, security_limit=security_limit,
-                        profile=client.profile)
+                        name,
+                        category,
+                        security_limit=security_limit,
+                        profile=client.profile,
+                    )
 
                     ret[name] = value
                 break
@@ -751,9 +759,11 @@
 
         def check_node(node):
             """Check the node against security_limit, app and extra"""
-            return (self.check_security_limit(node, security_limit)
-                    and self.check_app(node, app)
-                    and self.check_extra(node, extra))
+            return (
+                self.check_security_limit(node, security_limit)
+                and self.check_app(node, app)
+                and self.check_extra(node, extra)
+            )
 
         if profile in self.params:
             profile_cache = self.params[profile]
@@ -828,9 +838,7 @@
                                         pass
                         elif dest_params[name].getAttribute("type") == "jids_list":
                             jids = profile_value.split("\t")
-                            for jid_elt in dest_params[name].getElementsByTagName(
-                                "jid"
-                            ):
+                            for jid_elt in dest_params[name].getElementsByTagName("jid"):
                                 dest_params[name].removeChild(
                                     jid_elt
                                 )  # remove all default
@@ -866,7 +874,6 @@
 
         return prof_xml
 
-
     def _get_params_ui(self, security_limit, app, extra_s, profile_key):
         client = self.host.get_client(profile_key)
         extra = data_format.deserialise(extra_s)
@@ -940,8 +947,14 @@
                 categories.append(cat.getAttribute("name"))
         return categories
 
-    def param_set(self, name, value, category, security_limit=C.NO_SECURITY_LIMIT,
-                 profile_key=C.PROF_KEY_NONE):
+    def param_set(
+        self,
+        name,
+        value,
+        category,
+        security_limit=C.NO_SECURITY_LIMIT,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         """Set a parameter, return None if the parameter is not in param xml.
 
         Parameter of type 'password' that are not the SàT profile password are
@@ -973,11 +986,8 @@
         if not self.check_security_limit(node[1], security_limit):
             msg = _(
                 "{profile!r} is trying to set parameter {name!r} in category "
-                "{category!r} without authorization!!!").format(
-                    profile=repr(profile),
-                    name=repr(name),
-                    category=repr(category)
-                )
+                "{category!r} without authorization!!!"
+            ).format(profile=repr(profile), name=repr(name), category=repr(category))
             log.warning(msg)
             raise exceptions.PermissionError(msg)
 
@@ -989,13 +999,12 @@
                 try:
                     int(value)
                 except ValueError:
-                    log.warning(_(
-                        "Trying to set parameter {name} in category {category} with"
-                        "an non-integer value"
-                    ).format(
-                        name=repr(name),
-                        category=repr(category)
-                    ))
+                    log.warning(
+                        _(
+                            "Trying to set parameter {name} in category {category} with"
+                            "an non-integer value"
+                        ).format(name=repr(name), category=repr(category))
+                    )
                     return defer.succeed(None)
                 if node[1].hasAttribute("constraint"):
                     constraint = node[1].getAttribute("constraint")
@@ -1135,11 +1144,11 @@
         @param app: name of the frontend requesting the parameters, or '' to get all parameters
         @return: True if node doesn't match category/name of extra['ignore'] list
         """
-        ignore_list = extra.get('ignore')
+        ignore_list = extra.get("ignore")
         if not ignore_list:
             return True
-        category = node.parentNode.getAttribute('name')
-        name = node.getAttribute('name')
+        category = node.parentNode.getAttribute("name")
+        name = node.getAttribute("name")
         ignore = [category, name] in ignore_list
         if ignore:
             log.debug(f"Ignoring parameter {category}/{name} as requested")
@@ -1164,10 +1173,10 @@
             selected = 'selected="true"'
             selected_found = True
         else:
-            selected = ''
+            selected = ""
         str_list.append(
-            f'<option value={quoteattr(value)} label={quoteattr(label)} {selected}/>'
+            f"<option value={quoteattr(value)} label={quoteattr(label)} {selected}/>"
         )
     if not selected_found:
         raise ValueError(f"selected value ({selected}) not found in options")
-    return '\n'.join(str_list)
+    return "\n".join(str_list)
--- a/libervia/backend/memory/persistent.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/memory/persistent.py	Wed Jun 19 18:44:57 2024 +0200
@@ -34,7 +34,9 @@
     r"""A dictionary which save persistently each value assigned
 
     /!\ be careful, each assignment means a database write
-    /!\ Memory must be initialised before loading/setting value with instances of this class"""
+    /!\ Memory must be initialised before loading/setting value with instances of this class
+    """
+
     storage = None
     binary = False
 
@@ -60,9 +62,11 @@
         need to be called before any other operation
         @return: defers the PersistentDict instance itself
         """
-        d = defer.ensureDeferred(self.storage.get_privates(
-            self.namespace, binary=self.binary, profile=self.profile
-        ))
+        d = defer.ensureDeferred(
+            self.storage.get_privates(
+                self.namespace, binary=self.binary, profile=self.profile
+            )
+        )
         d.addCallback(self._set_cache)
         d.addCallback(lambda __: self)
         return d
@@ -130,7 +134,9 @@
     def clear(self):
         """Delete all values from this namespace"""
         self._cache.clear()
-        return self.storage.del_private_namespace(self.namespace, self.binary, self.profile)
+        return self.storage.del_private_namespace(
+            self.namespace, self.binary, self.profile
+        )
 
     def get(self, key, default=None):
         return self._cache.get(key, default)
@@ -148,7 +154,8 @@
         """Async del, return a Deferred fired when value is actually deleted"""
         self._cache.__delitem__(key)
         return self.storage.del_private_value(
-            self.namespace, key, self.binary, self.profile)
+            self.namespace, key, self.binary, self.profile
+        )
 
     def setdefault(self, key, default):
         try:
@@ -171,6 +178,7 @@
 
 class PersistentBinaryDict(PersistentDict):
     """Persistent dict where value can be any python data (instead of string only)"""
+
     binary = True
 
 
@@ -181,6 +189,7 @@
     to save memory.
     /!\ most of methods return a Deferred
     """
+
     # TODO: missing methods should be implemented using database access
     # TODO: a cache would be useful (which is deleted after a timeout)
 
@@ -192,16 +201,20 @@
         raise NotImplementedError
 
     def items(self):
-        d = defer.ensureDeferred(self.storage.get_privates(
-            self.namespace, binary=self.binary, profile=self.profile
-        ))
+        d = defer.ensureDeferred(
+            self.storage.get_privates(
+                self.namespace, binary=self.binary, profile=self.profile
+            )
+        )
         d.addCallback(lambda data_dict: data_dict.items())
         return d
 
     def all(self):
-        return defer.ensureDeferred(self.storage.get_privates(
-            self.namespace, binary=self.binary, profile=self.profile
-        ))
+        return defer.ensureDeferred(
+            self.storage.get_privates(
+                self.namespace, binary=self.binary, profile=self.profile
+            )
+        )
 
     def __repr__(self):
         return self.__str__()
@@ -231,7 +244,7 @@
         raise NotImplementedError
 
     def __hash__(self):
-        return hash(str(self.__class__) + self.namespace + (self.profile or ''))
+        return hash(str(self.__class__) + self.namespace + (self.profile or ""))
 
     def __bool__(self):
         raise NotImplementedError
@@ -252,9 +265,11 @@
 
     def __getitem__(self, key):
         """get the value as a Deferred"""
-        d = defer.ensureDeferred(self.storage.get_privates(
-            self.namespace, keys=[key], binary=self.binary, profile=self.profile
-        ))
+        d = defer.ensureDeferred(
+            self.storage.get_privates(
+                self.namespace, keys=[key], binary=self.binary, profile=self.profile
+            )
+        )
         d.addCallback(self._data2value, key)
         return d
 
@@ -291,7 +306,8 @@
         """Async del, return a Deferred fired when value is actually deleted"""
         # XXX: similar as PersistentDict.adel, but doesn't use cache
         return self.storage.del_private_value(
-            self.namespace, key, self.binary, self.profile)
+            self.namespace, key, self.binary, self.profile
+        )
 
     def setdefault(self, key, default):
         raise NotImplementedError
@@ -314,4 +330,6 @@
         @param key(unicode): key to delete
         @return (D): A deferred fired when delete is done
         """
-        return self.storage.del_private_value(self.namespace, key, self.binary, self.profile)
+        return self.storage.del_private_value(
+            self.namespace, key, self.binary, self.profile
+        )
--- a/libervia/backend/memory/sqla.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/memory/sqla.py	Wed Jun 19 18:44:57 2024 +0200
@@ -215,7 +215,7 @@
         engine = create_async_engine(
             db_config["url"],
             future=True,
-            json_serializer=lambda obj: json.dumps(obj, ensure_ascii=False)
+            json_serializer=lambda obj: json.dumps(obj, ensure_ascii=False),
         )
 
         new_base = not db_config["path"].exists()
@@ -1778,7 +1778,7 @@
         type_: Optional[NotificationType] = None,
         status: Optional[NotificationStatus] = None,
         requires_action: Optional[bool] = None,
-        min_priority: Optional[int] = None
+        min_priority: Optional[int] = None,
     ) -> List[Notification]:
         """Retrieve all notifications for a given profile with optional filters.
 
@@ -1790,7 +1790,9 @@
         @return: list of matching Notification instances.
         """
         profile_id = self.profiles[client.profile]
-        filters = [or_(Notification.profile_id == profile_id, Notification.profile_id.is_(None))]
+        filters = [
+            or_(Notification.profile_id == profile_id, Notification.profile_id.is_(None))
+        ]
 
         if type_:
             filters.append(Notification.type == type_)
@@ -1803,9 +1805,7 @@
 
         async with self.session() as session:
             result = await session.execute(
-                select(Notification)
-                .where(and_(*filters))
-                .order_by(Notification.id)
+                select(Notification).where(and_(*filters)).order_by(Notification.id)
             )
             return result.scalars().all()
 
--- a/libervia/backend/memory/sqla_mapping.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/memory/sqla_mapping.py	Wed Jun 19 18:44:57 2024 +0200
@@ -89,7 +89,7 @@
 profiles = Profiles()
 
 
-def get_profile_by_id( profile_id):
+def get_profile_by_id(profile_id):
     return profiles.id_to_profile.get(profile_id)
 
 
@@ -263,23 +263,17 @@
 
     profile = relationship("Profile")
     messages = relationship(
-        "Message",
-        backref="history",
-        cascade="all, delete-orphan",
-        passive_deletes=True
+        "Message", backref="history", cascade="all, delete-orphan", passive_deletes=True
     )
     subjects = relationship(
-        "Subject",
-        backref="history",
-        cascade="all, delete-orphan",
-        passive_deletes=True
+        "Subject", backref="history", cascade="all, delete-orphan", passive_deletes=True
     )
     thread = relationship(
         "Thread",
         uselist=False,
         back_populates="history",
         cascade="all, delete-orphan",
-        passive_deletes=True
+        passive_deletes=True,
     )
     __mapper_args__ = {"version_id_col": version_id}
 
@@ -332,9 +326,9 @@
                 extra["thread_parent"] = self.thread.parent_id
 
         return {
-            "from": f"{self.source}/{self.source_res}"
-            if self.source_res
-            else self.source,
+            "from": (
+                f"{self.source}/{self.source_res}" if self.source_res else self.source
+            ),
             "to": f"{self.dest}/{self.dest_res}" if self.dest_res else self.dest,
             "uid": self.uid,
             "message": {m.language or "": m.message for m in self.messages},
@@ -456,7 +450,9 @@
     timestamp = Column(Float, nullable=False, default=time.time)
     expire_at = Column(Float, nullable=True)
 
-    profile_id = Column(ForeignKey("profiles.id", ondelete="CASCADE"), index=True, nullable=True)
+    profile_id = Column(
+        ForeignKey("profiles.id", ondelete="CASCADE"), index=True, nullable=True
+    )
     profile = relationship("Profile")
 
     type = Column(Enum(NotificationType), nullable=False)
--- a/libervia/backend/models/core.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/models/core.py	Wed Jun 19 18:44:57 2024 +0200
@@ -40,6 +40,7 @@
 
 class MessageEdition(BaseModel):
     """Data used to send a message edition"""
+
     message: dict[str, str]
     subject: dict[str, str] = Field(default_factory=dict)
     extra: dict[str, str] = Field(default_factory=dict)
--- a/libervia/backend/plugins/plugin_adhoc_dbus.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_adhoc_dbus.py	Wed Jun 19 18:44:57 2024 +0200
@@ -37,8 +37,10 @@
     from lxml import etree
 except ImportError:
     etree = None
-    log.warning("Missing module lxml, please download/install it from http://lxml.de/ ."
-                "Auto D-Bus discovery will be disabled")
+    log.warning(
+        "Missing module lxml, please download/install it from http://lxml.de/ ."
+        "Auto D-Bus discovery will be disabled"
+    )
 
 try:
     import txdbus
@@ -59,23 +61,26 @@
 CMD_GO_BACK = "GoBack"
 CMD_GO_FWD = "GoFW"
 SEEK_OFFSET = 5 * 1000 * 1000
-MPRIS_COMMANDS = ["org.mpris.MediaPlayer2.Player." + cmd for cmd in (
-    "Previous", CMD_GO_BACK, "PlayPause", CMD_GO_FWD, "Next")]
+MPRIS_COMMANDS = [
+    "org.mpris.MediaPlayer2.Player." + cmd
+    for cmd in ("Previous", CMD_GO_BACK, "PlayPause", CMD_GO_FWD, "Next")
+]
 MPRIS_PATH = "/org/mpris/MediaPlayer2"
-MPRIS_PROPERTIES = OrderedDict((
-    ("org.mpris.MediaPlayer2", (
-        "Identity",
-        )),
-    ("org.mpris.MediaPlayer2.Player", (
-        "Metadata",
-        "PlaybackStatus",
-        "Volume",
-        )),
-    ))
+MPRIS_PROPERTIES = OrderedDict(
+    (
+        ("org.mpris.MediaPlayer2", ("Identity",)),
+        (
+            "org.mpris.MediaPlayer2.Player",
+            (
+                "Metadata",
+                "PlaybackStatus",
+                "Volume",
+            ),
+        ),
+    )
+)
 MPRIS_METADATA_KEY = "Metadata"
-MPRIS_METADATA_MAP = OrderedDict((
-    ("xesam:title", "Title"),
-    ))
+MPRIS_METADATA_MAP = OrderedDict((("xesam:title", "Title"),))
 
 INTROSPECT_METHOD = "Introspect"
 IGNORED_IFACES_START = (
@@ -126,18 +131,20 @@
     async def profile_connected(self, client):
         if txdbus is not None:
             if self.session_con is None:
-                self.session_con = await dbus_client.connect(reactor, 'session')
+                self.session_con = await dbus_client.connect(reactor, "session")
                 self.fd_object = await self.session_con.getRemoteObject(FD_NAME, FD_PATH)
 
             self._c.add_ad_hoc_command(
-                client, self.local_media_cb, D_("Media Players"),
+                client,
+                self.local_media_cb,
+                D_("Media Players"),
                 node=NS_MEDIA_PLAYER,
-                timeout=60*60*6  # 6 hours timeout, to avoid breaking remote
-                                 # in the middle of a movie
+                timeout=60 * 60 * 6,  # 6 hours timeout, to avoid breaking remote
+                # in the middle of a movie
             )
 
     async def _dbus_async_call(self, proxy, method, *args, **kwargs):
-        """ Call a DBus method asynchronously and return a deferred
+        """Call a DBus method asynchronously and return a deferred
 
         @param proxy: DBus object proxy, as returner by get_object
         @param method: name of the method to call
@@ -152,17 +159,19 @@
 
     async def _dbus_get_property(self, proxy, interface, name):
         return await self._dbus_async_call(
-            proxy, "Get", interface, name, interface="org.freedesktop.DBus.Properties")
-
+            proxy, "Get", interface, name, interface="org.freedesktop.DBus.Properties"
+        )
 
     async def _dbus_list_names(self):
         return await self.fd_object.callRemote("ListNames")
 
     async def _dbus_introspect(self, proxy):
-        return await self._dbus_async_call(proxy, INTROSPECT_METHOD, interface=INTROSPECT_IFACE)
+        return await self._dbus_async_call(
+            proxy, INTROSPECT_METHOD, interface=INTROSPECT_IFACE
+        )
 
     def _accept_method(self, method):
-        """ Return True if we accept the method for a command
+        """Return True if we accept the method for a command
         @param method: etree.Element
         @return: True if the method is acceptable
 
@@ -181,9 +190,7 @@
         for node in el.iterchildren("node", "interface"):
             if node.tag == "node":
                 new_path = os.path.join(proxy.object_path, node.get("name"))
-                new_proxy = await self.session_con.getRemoteObject(
-                    bus_name, new_path
-                )
+                new_proxy = await self.session_con.getRemoteObject(bus_name, new_path)
                 await self._introspect(methods, bus_name, new_proxy)
             elif node.tag == "interface":
                 name = node.get("name")
@@ -197,16 +204,40 @@
                         log.debug("method accepted: [%s]" % method_name)
                         methods.add((proxy.object_path, name, method_name))
 
-    def _ad_hoc_dbus_add_auto(self, prog_name, allowed_jids, allowed_groups, allowed_magics,
-                          forbidden_jids, forbidden_groups, flags, profile_key):
+    def _ad_hoc_dbus_add_auto(
+        self,
+        prog_name,
+        allowed_jids,
+        allowed_groups,
+        allowed_magics,
+        forbidden_jids,
+        forbidden_groups,
+        flags,
+        profile_key,
+    ):
         client = self.host.get_client(profile_key)
         return self.ad_hoc_dbus_add_auto(
-            client, prog_name, allowed_jids, allowed_groups, allowed_magics,
-            forbidden_jids, forbidden_groups, flags)
+            client,
+            prog_name,
+            allowed_jids,
+            allowed_groups,
+            allowed_magics,
+            forbidden_jids,
+            forbidden_groups,
+            flags,
+        )
 
-    async def ad_hoc_dbus_add_auto(self, client, prog_name, allowed_jids=None, allowed_groups=None,
-                         allowed_magics=None, forbidden_jids=None, forbidden_groups=None,
-                         flags=None):
+    async def ad_hoc_dbus_add_auto(
+        self,
+        client,
+        prog_name,
+        allowed_jids=None,
+        allowed_groups=None,
+        allowed_magics=None,
+        forbidden_jids=None,
+        forbidden_groups=None,
+        flags=None,
+    ):
         bus_names = await self._dbus_list_names()
         bus_names = [bus_name for bus_name in bus_names if "." + prog_name in bus_name]
         if not bus_names:
@@ -241,9 +272,19 @@
 
         return (str(bus_name), methods)
 
-    def _add_command(self, client, adhoc_name, bus_name, methods, allowed_jids=None,
-                    allowed_groups=None, allowed_magics=None, forbidden_jids=None,
-                    forbidden_groups=None, flags=None):
+    def _add_command(
+        self,
+        client,
+        adhoc_name,
+        bus_name,
+        methods,
+        allowed_jids=None,
+        allowed_groups=None,
+        allowed_magics=None,
+        forbidden_jids=None,
+        forbidden_groups=None,
+        flags=None,
+    ):
         if flags is None:
             flags = set()
 
@@ -331,9 +372,16 @@
             - device name
             - device label
         """
-        found_data = await defer.ensureDeferred(self.host.find_by_features(
-            client, [self.host.ns_map['commands']], service=False, roster=False,
-            own_jid=True, local_device=True))
+        found_data = await defer.ensureDeferred(
+            self.host.find_by_features(
+                client,
+                [self.host.ns_map["commands"]],
+                service=False,
+                roster=False,
+                own_jid=True,
+                local_device=True,
+            )
+        )
 
         remotes = []
 
@@ -344,39 +392,47 @@
                 for cmd in cmd_list:
                     if cmd.nodeIdentifier == NS_MEDIA_PLAYER:
                         try:
-                            result_elt = await self._c.do(client, device_jid,
-                                                          NS_MEDIA_PLAYER, timeout=5)
+                            result_elt = await self._c.do(
+                                client, device_jid, NS_MEDIA_PLAYER, timeout=5
+                            )
                             command_elt = self._c.get_command_elt(result_elt)
                             form = data_form.findForm(command_elt, NS_MEDIA_PLAYER)
                             if form is None:
                                 continue
-                            mp_options = form.fields['media_player'].options
-                            session_id = command_elt.getAttribute('sessionid')
+                            mp_options = form.fields["media_player"].options
+                            session_id = command_elt.getAttribute("sessionid")
                             if mp_options and session_id:
                                 # we just want to discover player, so we cancel the
                                 # session
-                                self._c.do(client, device_jid, NS_MEDIA_PLAYER,
-                                           action=self._c.ACTION.CANCEL,
-                                           session_id=session_id)
+                                self._c.do(
+                                    client,
+                                    device_jid,
+                                    NS_MEDIA_PLAYER,
+                                    action=self._c.ACTION.CANCEL,
+                                    session_id=session_id,
+                                )
 
                             for opt in mp_options:
-                                remotes.append((device_jid_s,
-                                                opt.value,
-                                                opt.label or opt.value))
+                                remotes.append(
+                                    (device_jid_s, opt.value, opt.label or opt.value)
+                                )
                         except Exception as e:
-                            log.warning(_(
-                                "Can't retrieve remote controllers on {device_jid}: "
-                                "{reason}".format(device_jid=device_jid, reason=e)))
+                            log.warning(
+                                _(
+                                    "Can't retrieve remote controllers on {device_jid}: "
+                                    "{reason}".format(device_jid=device_jid, reason=e)
+                                )
+                            )
                         break
         return remotes
 
     async def do_mpris_command(self, proxy, command):
         iface, command = command.rsplit(".", 1)
         if command == CMD_GO_BACK:
-            command = 'Seek'
+            command = "Seek"
             args = [-SEEK_OFFSET]
         elif command == CMD_GO_FWD:
-            command = 'Seek'
+            command = "Seek"
             args = [SEEK_OFFSET]
         else:
             args = []
@@ -387,9 +443,7 @@
         for mpris_key, name in MPRIS_METADATA_MAP.items():
             if mpris_key in metadata:
                 value = str(metadata[mpris_key])
-                form.addField(data_form.Field(fieldType="fixed",
-                                              var=name,
-                                              value=value))
+                form.addField(data_form.Field(fieldType="fixed", var=name, value=value))
 
     async def local_media_cb(self, client, command_elt, session_data, action, node):
         assert txdbus is not None
@@ -408,10 +462,11 @@
                 return (None, self._c.STATUS.COMPLETED, None, note)
             options = []
             status = self._c.STATUS.EXECUTING
-            form = data_form.Form("form", title=D_("Media Player Selection"),
-                                  formNamespace=NS_MEDIA_PLAYER)
+            form = data_form.Form(
+                "form", title=D_("Media Player Selection"), formNamespace=NS_MEDIA_PLAYER
+            )
             for bus in bus_names:
-                player_name = bus[len(MPRIS_PREFIX)+1:]
+                player_name = bus[len(MPRIS_PREFIX) + 1 :]
                 if not player_name:
                     log.warning(_("Ignoring MPRIS bus without suffix"))
                     continue
@@ -430,9 +485,12 @@
                 raise ValueError(_("missing media_player value"))
 
             if not bus_name.startswith(MPRIS_PREFIX):
-                log.warning(_("Media player ad-hoc command trying to use non MPRIS bus. "
-                              "Hack attempt? Refused bus: {bus_name}").format(
-                              bus_name=bus_name))
+                log.warning(
+                    _(
+                        "Media player ad-hoc command trying to use non MPRIS bus. "
+                        "Hack attempt? Refused bus: {bus_name}"
+                    ).format(bus_name=bus_name)
+                )
                 note = (self._c.NOTE.ERROR, D_("Invalid player name."))
                 return (None, self._c.STATUS.COMPLETED, None, note)
 
@@ -453,29 +511,36 @@
 
             # we construct the remote control form
             form = data_form.Form("form", title=D_("Media Player Selection"))
-            form.addField(data_form.Field(fieldType="hidden",
-                                          var="media_player",
-                                          value=bus_name))
+            form.addField(
+                data_form.Field(fieldType="hidden", var="media_player", value=bus_name)
+            )
             for iface, properties_names in MPRIS_PROPERTIES.items():
                 for name in properties_names:
                     try:
                         value = await self._dbus_get_property(proxy, iface, name)
                     except Exception as e:
-                        log.warning(_("Can't retrieve attribute {name}: {reason}")
-                                    .format(name=name, reason=e))
+                        log.warning(
+                            _("Can't retrieve attribute {name}: {reason}").format(
+                                name=name, reason=e
+                            )
+                        )
                         continue
                     if name == MPRIS_METADATA_KEY:
                         self.add_mpris_metadata(form, value)
                     else:
-                        form.addField(data_form.Field(fieldType="fixed",
-                                                      var=name,
-                                                      value=str(value)))
+                        form.addField(
+                            data_form.Field(fieldType="fixed", var=name, value=str(value))
+                        )
 
             commands = [data_form.Option(c, c.rsplit(".", 1)[1]) for c in MPRIS_COMMANDS]
-            form.addField(data_form.Field(fieldType="list-single",
-                                          var="command",
-                                          options=commands,
-                                          required=True))
+            form.addField(
+                data_form.Field(
+                    fieldType="list-single",
+                    var="command",
+                    options=commands,
+                    required=True,
+                )
+            )
 
             payload = form.toElement()
             status = self._c.STATUS.EXECUTING
--- a/libervia/backend/plugins/plugin_app_manager_docker/__init__.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_app_manager_docker/__init__.py	Wed Jun 19 18:44:57 2024 +0200
@@ -36,8 +36,7 @@
     C.PI_DEPENDENCIES: ["APP_MANAGER"],
     C.PI_MAIN: "AppManagerDocker",
     C.PI_HANDLER: "no",
-    C.PI_DESCRIPTION: _(
-        """Applications Manager for Docker"""),
+    C.PI_DESCRIPTION: _("""Applications Manager for Docker"""),
 }
 
 
@@ -48,18 +47,19 @@
     def __init__(self, host):
         log.info(_("Docker App Manager initialization"))
         try:
-            self.docker_compose_path = which('docker-compose')[0]
+            self.docker_compose_path = which("docker-compose")[0]
         except IndexError:
             raise exceptions.NotFound(
                 '"docker-compose" executable not found, Docker can\'t be used with '
-                'application manager')
+                "application manager"
+            )
         super().__init__(host)
 
     async def start(self, app_data: dict) -> None:
         await self._am.start_common(app_data)
-        working_dir = app_data['_instance_dir_path']
+        working_dir = app_data["_instance_dir_path"]
         try:
-            override = app_data['override']
+            override = app_data["override"]
         except KeyError:
             pass
         else:
@@ -75,7 +75,7 @@
         )
 
     async def stop(self, app_data: dict) -> None:
-        working_dir = app_data['_instance_dir_path']
+        working_dir = app_data["_instance_dir_path"]
         await async_process.run(
             self.docker_compose_path,
             "down",
@@ -83,17 +83,18 @@
         )
 
     async def compute_expose(self, app_data: dict) -> None:
-        working_dir = app_data['_instance_dir_path']
-        expose = app_data['expose']
-        ports = expose.get('ports', {})
+        working_dir = app_data["_instance_dir_path"]
+        expose = app_data["expose"]
+        ports = expose.get("ports", {})
         for name, port_data in list(ports.items()):
             try:
-                service = port_data['service']
-                private = port_data['private']
+                service = port_data["service"]
+                private = port_data["private"]
                 int(private)
             except (KeyError, ValueError):
                 log.warning(
-                    f"invalid value found for {name!r} port in {app_data['_file_path']}")
+                    f"invalid value found for {name!r} port in {app_data['_file_path']}"
+                )
                 continue
             exposed_port = await async_process.run(
                 self.docker_compose_path,
@@ -104,11 +105,12 @@
             )
             exposed_port = exposed_port.decode().strip()
             try:
-                addr, port = exposed_port.split(':')
+                addr, port = exposed_port.split(":")
                 int(port)
             except ValueError:
                 log.warning(
-                    f"invalid exposed port for {name}, ignoring: {exposed_port!r}")
+                    f"invalid exposed port for {name}, ignoring: {exposed_port!r}"
+                )
                 del ports[name]
             else:
                 ports[name] = exposed_port
--- a/libervia/backend/plugins/plugin_blog_import_dokuwiki.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_blog_import_dokuwiki.py	Wed Jun 19 18:44:57 2024 +0200
@@ -377,9 +377,9 @@
         try:
             media_repo = options["media_repo"]
             if opt_upload_images:
-                options[
-                    self._blog_import.OPT_UPLOAD_IMAGES
-                ] = False  # force using --no-images-upload
+                options[self._blog_import.OPT_UPLOAD_IMAGES] = (
+                    False  # force using --no-images-upload
+                )
             info_msg = _(
                 "DokuWiki media files will be *downloaded* to {temp_dir} - to finish the import you have to upload them *manually* to {media_repo}"
             )
--- a/libervia/backend/plugins/plugin_comp_ap_gateway/__init__.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_comp_ap_gateway/__init__.py	Wed Jun 19 18:44:57 2024 +0200
@@ -87,7 +87,7 @@
     TYPE_REACTION,
     TYPE_TOMBSTONE,
     TYPE_JOIN,
-    TYPE_LEAVE
+    TYPE_LEAVE,
 )
 from .http_server import HTTPServer
 from .pubsub_service import APPubsubService
@@ -105,9 +105,23 @@
     C.PI_TYPE: C.PLUG_TYPE_ENTRY_POINT,
     C.PI_PROTOCOLS: [],
     C.PI_DEPENDENCIES: [
-        "XEP-0050", "XEP-0054", "XEP-0060", "XEP-0084", "XEP-0106", "XEP-0277",
-        "XEP-0292", "XEP-0329", "XEP-0372", "XEP-0424", "XEP-0465", "XEP-0470",
-        "XEP-0447", "XEP-0471", "PUBSUB_CACHE", "TEXT_SYNTAXES", "IDENTITY"
+        "XEP-0050",
+        "XEP-0054",
+        "XEP-0060",
+        "XEP-0084",
+        "XEP-0106",
+        "XEP-0277",
+        "XEP-0292",
+        "XEP-0329",
+        "XEP-0372",
+        "XEP-0424",
+        "XEP-0465",
+        "XEP-0470",
+        "XEP-0447",
+        "XEP-0471",
+        "PUBSUB_CACHE",
+        "TEXT_SYNTAXES",
+        "IDENTITY",
     ],
     C.PI_RECOMMENDATIONS: [],
     C.PI_MAIN: "APGateway",
@@ -160,26 +174,19 @@
             # we want to be sure that the callbacks are launched before pubsub cache's
             # one, as we need to inspect items before they are actually removed from cache
             # or updated
-            priority=1000
+            priority=1000,
         )
         self.pubsub_service = APPubsubService(self)
         self.ad_hoc = APAdHocService(self)
         self.ap_events = APEvents(self)
         host.trigger.add_with_check(
-            "message_received",
-            self,
-            self._message_received_trigger,
-            priority=-1000
+            "message_received", self, self._message_received_trigger, priority=-1000
         )
         host.trigger.add_with_check(
-            "XEP-0424_retract_received",
-            self,
-            self._on_message_retract
+            "XEP-0424_retract_received", self, self._on_message_retract
         )
         host.trigger.add_with_check(
-            "XEP-0372_ref_received",
-            self,
-            self._on_reference_received
+            "XEP-0372_ref_received", self, self._on_reference_received
         )
 
         host.bridge.add_method(
@@ -215,7 +222,7 @@
             private_key_pem = self.private_key.private_bytes(
                 encoding=serialization.Encoding.PEM,
                 format=serialization.PrivateFormat.PKCS8,
-                encryption_algorithm=serialization.NoEncryption()
+                encryption_algorithm=serialization.NoEncryption(),
             ).decode()
             await self.host.memory.storage.set_private_value(
                 IMPORT_NAME, "rsa_key", private_key_pem, profile=client.profile
@@ -228,16 +235,14 @@
         self.public_key = self.private_key.public_key()
         self.public_key_pem = self.public_key.public_bytes(
             encoding=serialization.Encoding.PEM,
-            format=serialization.PublicFormat.SubjectPublicKeyInfo
+            format=serialization.PublicFormat.SubjectPublicKeyInfo,
         ).decode()
 
         # params
         # URL and port
         self.public_url = self.host.memory.config_get(
             CONF_SECTION, "public_url"
-        ) or self.host.memory.config_get(
-            CONF_SECTION, "xmpp_domain"
-        )
+        ) or self.host.memory.config_get(CONF_SECTION, "xmpp_domain")
         if self.public_url is None:
             log.error(
                 '"public_url" not set in configuration, this is mandatory to have'
@@ -247,15 +252,15 @@
             return
         if parse.urlparse(self.public_url).scheme:
             log.error(
-                "Scheme must not be specified in \"public_url\", please remove it from "
-                "\"public_url\" configuration option. ActivityPub Gateway won't be run."
+                'Scheme must not be specified in "public_url", please remove it from '
+                '"public_url" configuration option. ActivityPub Gateway won\'t be run.'
             )
             return
-        self.http_port = int(self.host.memory.config_get(
-            CONF_SECTION, 'http_port', 8123))
+        self.http_port = int(self.host.memory.config_get(CONF_SECTION, "http_port", 8123))
         connection_type = self.host.memory.config_get(
-            CONF_SECTION, 'http_connection_type', 'https')
-        if connection_type not in ('http', 'https'):
+            CONF_SECTION, "http_connection_type", "https"
+        )
+        if connection_type not in ("http", "https"):
             raise exceptions.ConfigError(
                 'bad ap-gateay http_connection_type, you must use one of "http" or '
                 '"https"'
@@ -263,18 +268,17 @@
         self.http_sign_get = C.bool(
             self.host.memory.config_get(CONF_SECTION, "http_sign_get", C.BOOL_TRUE)
         )
-        self.max_items = int(self.host.memory.config_get(
-            CONF_SECTION, 'new_node_max_items', 50
-
-        ))
-        self.comments_max_depth = int(self.host.memory.config_get(
-            CONF_SECTION, 'comments_max_depth', 0
-        ))
-        self.ap_path = self.host.memory.config_get(CONF_SECTION, 'ap_path', '_ap')
+        self.max_items = int(
+            self.host.memory.config_get(CONF_SECTION, "new_node_max_items", 50)
+        )
+        self.comments_max_depth = int(
+            self.host.memory.config_get(CONF_SECTION, "comments_max_depth", 0)
+        )
+        self.ap_path = self.host.memory.config_get(CONF_SECTION, "ap_path", "_ap")
         self.base_ap_url = parse.urljoin(f"https://{self.public_url}", f"{self.ap_path}/")
         # True (default) if we provide gateway only to entities/services from our server
         self.local_only = C.bool(
-            self.host.memory.config_get(CONF_SECTION, 'local_only', C.BOOL_TRUE)
+            self.host.memory.config_get(CONF_SECTION, "local_only", C.BOOL_TRUE)
         )
         # if True (default), mention will be parsed in non-private content coming from
         # XMPP. This is necessary as XEP-0372 are coming separately from item where the
@@ -285,7 +289,7 @@
         )
 
         html_redirect: Dict[str, Union[str, dict]] = self.host.memory.config_get(
-            CONF_SECTION, 'html_redirect_dict', {}
+            CONF_SECTION, "html_redirect_dict", {}
         )
         self.html_redirect: Dict[str, List[dict]] = {}
         for url_type, target in html_redirect.items():
@@ -307,11 +311,10 @@
 
         # HTTP server launch
         self.server = HTTPServer(self)
-        if connection_type == 'http':
+        if connection_type == "http":
             reactor.listenTCP(self.http_port, self.server)
         else:
-            options = tls.get_options_from_config(
-                self.host.memory.config, CONF_SECTION)
+            options = tls.get_options_from_config(self.host.memory.config, CONF_SECTION)
             tls.tls_options_check(options)
             context_factory = tls.get_tls_context_factory(options)
             reactor.listenSSL(self.http_port, self.server, context_factory)
@@ -320,8 +323,7 @@
         self.client = client
         client.sendHistory = True
         client._ap_storage = persistent.LazyPersistentBinaryDict(
-            IMPORT_NAME,
-            client.profile
+            IMPORT_NAME, client.profile
         )
         await self.init(client)
 
@@ -329,9 +331,7 @@
         self.ad_hoc.init(client)
 
     async def _items_received(
-        self,
-        client: SatXMPPEntity,
-        itemsEvent: pubsub.ItemsEvent
+        self, client: SatXMPPEntity, itemsEvent: pubsub.ItemsEvent
     ) -> None:
         """Callback called when pubsub items are received
 
@@ -355,19 +355,23 @@
 
         if self._pa.is_attachment_node(itemsEvent.nodeIdentifier):
             await self.convert_and_post_attachments(
-                client, ap_account, itemsEvent.sender, itemsEvent.nodeIdentifier,
-                itemsEvent.items
+                client,
+                ap_account,
+                itemsEvent.sender,
+                itemsEvent.nodeIdentifier,
+                itemsEvent.items,
             )
         else:
             await self.convert_and_post_items(
-                client, ap_account, itemsEvent.sender, itemsEvent.nodeIdentifier,
-                itemsEvent.items
+                client,
+                ap_account,
+                itemsEvent.sender,
+                itemsEvent.nodeIdentifier,
+                itemsEvent.items,
             )
 
     async def get_virtual_client(
-        self,
-        requestor_actor_id: str,
-        actor_id: str
+        self, requestor_actor_id: str, actor_id: str
     ) -> SatXMPPEntity:
         """Get client for this component with a specified jid
 
@@ -413,8 +417,7 @@
             return await treq.json_content(resp)
         except Exception as e:
             raise error.StanzaError(
-                "service-unavailable",
-                text=f"Can't get AP data at {url}: {e}"
+                "service-unavailable", text=f"Can't get AP data at {url}: {e}"
             )
 
     async def ap_post(self, url: str, requestor_actor_id: str, doc: dict) -> TReqResponse:
@@ -429,11 +432,13 @@
             actor_account = actor_args[0]
             to_log = [
                 "",
-                f">>> {actor_account} is signing and posting to {url}:\n{pformat(doc)}"
+                f">>> {actor_account} is signing and posting to {url}:\n{pformat(doc)}",
             ]
 
         body = json.dumps(doc).encode()
-        headers = self._generate_signed_headers(url, requestor_actor_id, method="post", body=body)
+        headers = self._generate_signed_headers(
+            url, requestor_actor_id, method="post", body=body
+        )
         headers["Content-Type"] = MEDIA_TYPE_AP
 
         if self.verbose:
@@ -443,11 +448,7 @@
             to_log.append("---")
             log.info("\n".join(to_log))
 
-        resp = await treq.post(
-            url,
-            body,
-            headers=headers
-        )
+        resp = await treq.post(url, body, headers=headers)
         if resp.code >= 300:
             text = await resp.text()
             log.warning(f"POST request to {url} failed [{resp.code}]: {text}")
@@ -456,11 +457,7 @@
         return resp
 
     def _generate_signed_headers(
-        self,
-        url: str,
-        actor_id: str,
-        method: str,
-        body: bytes|None = None
+        self, url: str, actor_id: str, method: str, body: bytes | None = None
     ) -> dict[str, str]:
         """Generate HTTP headers with signature for a given request
 
@@ -474,7 +471,7 @@
         headers = {
             "(request-target)": f"{method} {p_url.path}",
             "Host": p_url.hostname,
-            "Date": http.datetimeToString().decode()
+            "Date": http.datetimeToString().decode(),
         }
 
         if body:
@@ -487,16 +484,14 @@
     @overload
     async def ap_get_object(
         self, requestor_actor_id: str, data: dict, key: str
-    ) -> dict|None:
-        ...
+    ) -> dict | None: ...
 
     @overload
     async def ap_get_object(
         self, requestor_actor_id: str, data: Union[str, dict], key: None = None
-    ) -> dict:
-        ...
+    ) -> dict: ...
 
-    async def ap_get_object(self, requestor_actor_id: str, data, key = None) -> dict|None:
+    async def ap_get_object(self, requestor_actor_id: str, data, key=None) -> dict | None:
         """Retrieve an AP object, dereferencing when necessary
 
         This method is to be used with attributes marked as "Functional" in
@@ -526,10 +521,7 @@
                 "was expecting a string or a dict, got {type(value)}: {value!r}}"
             )
 
-    async def ap_get_local_object(
-        self,
-        url: str
-    ) -> dict:
+    async def ap_get_local_object(self, url: str) -> dict:
         """Retrieve or generate local object
 
         for now, only handle XMPP items to convert to AP
@@ -596,11 +588,7 @@
             )
 
     async def ap_get_list(
-        self,
-        requestor_actor_id: str,
-        data: dict,
-        key: str,
-        only_ids: bool = False
+        self, requestor_actor_id: str, data: dict, key: str, only_ids: bool = False
     ) -> Optional[List[Dict[str, Any]]]:
         """Retrieve a list of objects from AP data, dereferencing when necessary
 
@@ -626,19 +614,12 @@
         if not isinstance(value, list):
             raise ValueError(f"A list was expected, got {type(value)}: {value!r}")
         if only_ids:
-            return [
-                {"id": v["id"]} if isinstance(v, dict) else {"id": v}
-                for v in value
-            ]
+            return [{"id": v["id"]} if isinstance(v, dict) else {"id": v} for v in value]
         else:
             return [await self.ap_get_object(requestor_actor_id, i) for i in value]
 
     async def ap_get_actors(
-        self,
-        requestor_actor_id: str,
-        data: dict,
-        key: str,
-        as_account: bool = True
+        self, requestor_actor_id: str, data: dict, key: str, as_account: bool = True
     ) -> List[str]:
         """Retrieve AP actors from data
 
@@ -671,9 +652,7 @@
                     f"invalid actors list to object {data.get('id')!r}: {value!r}"
                 )
         if not value:
-            raise exceptions.DataError(
-                f"list of actors is empty"
-            )
+            raise exceptions.DataError(f"list of actors is empty")
         if as_account:
             return [
                 await self.get_ap_account_from_id(requestor_actor_id, actor_id)
@@ -697,12 +676,16 @@
         @raise exceptions.NotFound: no actor has been found in data
         """
         try:
-            actors = await self.ap_get_actors(requestor_actor_id, data, "actor", as_account=False)
+            actors = await self.ap_get_actors(
+                requestor_actor_id, data, "actor", as_account=False
+            )
         except exceptions.DataError:
             actors = None
         if not actors:
             try:
-                actors = await self.ap_get_actors(requestor_actor_id, data, "attributedTo", as_account=False)
+                actors = await self.ap_get_actors(
+                    requestor_actor_id, data, "attributedTo", as_account=False
+                )
             except exceptions.DataError:
                 raise exceptions.NotFound(
                     'actor not specified in "actor" or "attributedTo"'
@@ -715,9 +698,7 @@
     def must_encode(self, text: str) -> bool:
         """Indicate if a text must be period encoded"""
         return (
-            not RE_ALLOWED_UNQUOTED.match(text)
-            or text.startswith("___")
-            or "---" in text
+            not RE_ALLOWED_UNQUOTED.match(text) or text.startswith("___") or "---" in text
         )
 
     def period_encode(self, text: str) -> str:
@@ -735,9 +716,7 @@
         )
 
     async def get_ap_account_from_jid_and_node(
-        self,
-        jid_: jid.JID,
-        node: Optional[str]
+        self, jid_: jid.JID, node: Optional[str]
     ) -> str:
         """Construct AP account from JID and node
 
@@ -778,26 +757,26 @@
         if node:
             account_elts.extend((node, "---"))
 
-        account_elts.extend((
-            user, "@", jid_.host if is_local else self.client.jid.userhost()
-        ))
+        account_elts.extend(
+            (user, "@", jid_.host if is_local else self.client.jid.userhost())
+        )
         return "".join(account_elts)
 
     def is_local(self, jid_: jid.JID) -> bool:
         """Returns True if jid_ use a domain or subdomain of gateway's host"""
         local_host = self.client.host.split(".")
         assert local_host
-        return jid_.host.split(".")[-len(local_host):] == local_host
+        return jid_.host.split(".")[-len(local_host) :] == local_host
 
     async def is_pubsub(self, jid_: jid.JID) -> bool:
         """Indicate if a JID is a Pubsub service"""
         host_disco = await self.host.get_disco_infos(self.client, jid_)
-        return (
-            ("pubsub", "service") in host_disco.identities
-            and not ("pubsub", "pep") in host_disco.identities
-        )
+        return ("pubsub", "service") in host_disco.identities and not (
+            "pubsub",
+            "pep",
+        ) in host_disco.identities
 
-    async def get_jid_and_node(self, ap_account: str) -> tuple[jid.JID, str|None]:
+    async def get_jid_and_node(self, ap_account: str) -> tuple[jid.JID, str | None]:
         """Decode raw AP account handle to get XMPP JID and Pubsub Node
 
         Username are case insensitive.
@@ -856,13 +835,11 @@
 
         if encoded:
             username = parse.unquote(
-                RE_PERIOD_ENC.sub(r"%\g<hex>", username),
-                errors="strict"
+                RE_PERIOD_ENC.sub(r"%\g<hex>", username), errors="strict"
             )
             if node:
                 node = parse.unquote(
-                    RE_PERIOD_ENC.sub(r"%\g<hex>", node),
-                    errors="strict"
+                    RE_PERIOD_ENC.sub(r"%\g<hex>", node), errors="strict"
                 )
 
         if "@" in username:
@@ -899,14 +876,7 @@
         The local jid is computer by escaping AP actor handle and using it as local part
         of JID, where domain part is this gateway own JID
         """
-        return jid.JID(
-            None,
-            (
-                self._e.escape(account),
-                self.client.jid.host,
-                None
-            )
-        )
+        return jid.JID(None, (self._e.escape(account), self.client.jid.host, None))
 
     async def get_jid_from_id(self, requestor_actor_id: str, actor_id: str) -> jid.JID:
         """Compute JID linking to an AP Actor ID
@@ -937,10 +907,10 @@
         @return: endpoint type and extra arguments
         """
         path = parse.urlparse(url).path.lstrip("/")
-        type_, *extra_args = path[len(self.ap_path):].lstrip("/").split("/")
+        type_, *extra_args = path[len(self.ap_path) :].lstrip("/").split("/")
         return type_, [parse.unquote(a) for a in extra_args]
 
-    def build_apurl(self, type_:str , *args: str) -> str:
+    def build_apurl(self, type_: str, *args: str) -> str:
         """Build an AP endpoint URL
 
         @param type_: type of AP endpoing
@@ -948,7 +918,7 @@
         """
         return parse.urljoin(
             self.base_ap_url,
-            str(Path(type_).joinpath(*(parse.quote_plus(a, safe="@") for a in args)))
+            str(Path(type_).joinpath(*(parse.quote_plus(a, safe="@") for a in args))),
         )
 
     def is_local_url(self, url: str) -> bool:
@@ -994,9 +964,7 @@
 
     @async_lru(maxsize=LRU_MAX_SIZE)
     async def get_actor_pub_key_data(
-        self,
-        requestor_actor_id: str,
-        actor_id: str
+        self, requestor_actor_id: str, actor_id: str
     ) -> Tuple[str, str, rsa.RSAPublicKey]:
         """Retrieve Public Key data from actor ID
 
@@ -1067,7 +1035,7 @@
         requestor_actor_id: str,
         signature: str,
         key_id: str,
-        headers: Dict[str, str]
+        headers: Dict[str, str],
     ) -> str:
         """Verify that signature matches given headers
 
@@ -1081,7 +1049,7 @@
 
         @raise InvalidSignature: signature doesn't match headers
         """
-        to_sign = "\n".join(f"{k.lower()}: {v}" for k,v in headers.items())
+        to_sign = "\n".join(f"{k.lower()}: {v}" for k, v in headers.items())
         if key_id.startswith("acct:"):
             actor = key_id[5:]
             actor_id = await self.get_ap_actor_id_from_account(actor)
@@ -1089,8 +1057,7 @@
             actor_id = key_id.split("#", 1)[0]
 
         pub_key_id, pub_key_owner, pub_key = await self.get_actor_pub_key_data(
-            requestor_actor_id,
-            actor_id
+            requestor_actor_id, actor_id
         )
         if pub_key_id != key_id or pub_key_owner != actor_id:
             raise exceptions.EncryptionError("Public Key mismatch")
@@ -1101,7 +1068,7 @@
                 to_sign.encode(),
                 # we have to use PKCS1v15 padding to be compatible with Mastodon
                 padding.PKCS1v15(),  # type: ignore
-                hashes.SHA256()  # type: ignore
+                hashes.SHA256(),  # type: ignore
             )
         except InvalidSignature:
             raise exceptions.EncryptionError(
@@ -1111,9 +1078,7 @@
         return actor_id
 
     def get_signature_data(
-            self,
-            key_id: str,
-            headers: Dict[str, str]
+        self, key_id: str, headers: Dict[str, str]
     ) -> Tuple[Dict[str, str], Dict[str, str]]:
         """Generate and return signature and corresponding headers
 
@@ -1130,20 +1095,22 @@
         """
         # headers must be lower case
         l_headers: Dict[str, str] = {k.lower(): v for k, v in headers.items()}
-        to_sign = "\n".join(f"{k}: {v}" for k,v in l_headers.items())
-        signature = base64.b64encode(self.private_key.sign(
-            to_sign.encode(),
-            # we have to use PKCS1v15 padding to be compatible with Mastodon
-            padding.PKCS1v15(),  # type: ignore
-            hashes.SHA256()  # type: ignore
-        )).decode()
+        to_sign = "\n".join(f"{k}: {v}" for k, v in l_headers.items())
+        signature = base64.b64encode(
+            self.private_key.sign(
+                to_sign.encode(),
+                # we have to use PKCS1v15 padding to be compatible with Mastodon
+                padding.PKCS1v15(),  # type: ignore
+                hashes.SHA256(),  # type: ignore
+            )
+        ).decode()
         sign_data = {
             "keyId": key_id,
             "Algorithm": "rsa-sha256",
             "headers": " ".join(l_headers.keys()),
-            "signature": signature
+            "signature": signature,
         }
-        new_headers = {k: v for k,v in headers.items() if not k.startswith("(")}
+        new_headers = {k: v for k, v in headers.items() if not k.startswith("(")}
         new_headers["Signature"] = self.build_signature_header(sign_data)
         return new_headers, sign_data
 
@@ -1167,18 +1134,19 @@
         """
         actor_id = await self.get_ap_actor_id_from_account(ap_account)
         requestor_actor_id = self.build_apurl(
-            TYPE_ACTOR,
-            await self.get_ap_account_from_jid_and_node(service, node)
+            TYPE_ACTOR, await self.get_ap_account_from_jid_and_node(service, node)
         )
         inbox = await self.get_ap_inbox_from_id(requestor_actor_id, actor_id)
         for item in items:
             if item.name == "item":
-                cached_item = await self.host.memory.storage.search_pubsub_items({
-                    "profiles": [self.client.profile],
-                    "services": [service],
-                    "nodes": [node],
-                    "names": [item["id"]]
-                })
+                cached_item = await self.host.memory.storage.search_pubsub_items(
+                    {
+                        "profiles": [self.client.profile],
+                        "services": [service],
+                        "nodes": [node],
+                        "names": [item["id"]],
+                    }
+                )
                 is_new = not bool(cached_item)
                 if node.startswith(self._events.namespace):
                     # event item
@@ -1259,7 +1227,7 @@
         service: jid.JID,
         node: str,
         items: List[domish.Element],
-        publisher: Optional[jid.JID] = None
+        publisher: Optional[jid.JID] = None,
     ) -> None:
         """Convert XMPP item attachments to AP activities and post them to actor inbox
 
@@ -1284,8 +1252,7 @@
 
         actor_id = await self.get_ap_actor_id_from_account(ap_account)
         requestor_actor_id = self.build_apurl(
-            TYPE_ACTOR,
-            await self.get_ap_account_from_jid_and_node(service, node)
+            TYPE_ACTOR, await self.get_ap_account_from_jid_and_node(service, node)
         )
         inbox = await self.get_ap_inbox_from_id(requestor_actor_id, actor_id)
 
@@ -1327,12 +1294,14 @@
         else:
             item_url = self.build_apurl(TYPE_ITEM, item_account, item_id)
 
-        old_attachment_pubsub_items = await self.host.memory.storage.search_pubsub_items({
-            "profiles": [self.client.profile],
-            "services": [service],
-            "nodes": [node],
-            "names": [item_elt["id"]]
-        })
+        old_attachment_pubsub_items = await self.host.memory.storage.search_pubsub_items(
+            {
+                "profiles": [self.client.profile],
+                "services": [service],
+                "nodes": [node],
+                "names": [item_elt["id"]],
+            }
+        )
         if not old_attachment_pubsub_items:
             old_attachment = {}
         else:
@@ -1345,10 +1314,7 @@
             except IndexError:
                 # no known element was present in attachments
                 old_attachment = {}
-        publisher_account = await self.get_ap_account_from_jid_and_node(
-            publisher,
-            None
-        )
+        publisher_account = await self.get_ap_account_from_jid_and_node(publisher, None)
         publisher_actor_id = self.build_apurl(TYPE_ACTOR, publisher_account)
         try:
             attachments = self._pa.items_2_attachment_data(client, [item_elt])[0]
@@ -1390,8 +1356,7 @@
                     "reaction", item_account, item_id, reaction.encode().hex()
                 )
                 reaction_activity = self.create_activity(
-                    TYPE_REACTION, publisher_actor_id, item_url,
-                    activity_id=activity_id
+                    TYPE_REACTION, publisher_actor_id, item_url, activity_id=activity_id
                 )
                 reaction_activity["content"] = reaction
                 reaction_activity["to"] = [ap_account]
@@ -1410,7 +1375,9 @@
             old_attending = old_attachment.get("rsvp", {}).get("attending", "no")
             if attending != old_attending:
                 activity_type = TYPE_JOIN if attending == "yes" else TYPE_LEAVE
-                activity_id = self.build_apurl(activity_type.lower(), item_account, item_id)
+                activity_id = self.build_apurl(
+                    activity_type.lower(), item_account, item_id
+                )
                 activity = self.create_activity(
                     activity_type, publisher_actor_id, item_url, activity_id=activity_id
                 )
@@ -1421,7 +1388,9 @@
             if "rsvp" in old_attachment:
                 old_attending = old_attachment.get("rsvp", {}).get("attending", "no")
                 if old_attending == "yes":
-                    activity_id = self.build_apurl(TYPE_LEAVE.lower(), item_account, item_id)
+                    activity_id = self.build_apurl(
+                        TYPE_LEAVE.lower(), item_account, item_id
+                    )
                     activity = self.create_activity(
                         TYPE_LEAVE, publisher_actor_id, item_url, activity_id=activity_id
                     )
@@ -1436,14 +1405,11 @@
                 client, service, node, with_subscriptions=True, create=True
             )
             await self.host.memory.storage.cache_pubsub_items(
-                self.client,
-                cached_node,
-                [item_elt],
-                [attachments]
+                self.client, cached_node, [item_elt], [attachments]
             )
 
     def _publish_message(self, mess_data_s: str, service_s: str, profile: str):
-        mess_data: dict = data_format.deserialise(mess_data_s) # type: ignore
+        mess_data: dict = data_format.deserialise(mess_data_s)  # type: ignore
         service = jid.JID(service_s)
         client = self.host.get_client(profile)
         return defer.ensureDeferred(self.publish_message(client, mess_data, service))
@@ -1463,10 +1429,12 @@
             raise ValueError(f"Invalid account: {account!r}")
         host = account.split("@")[1]
         try:
-            finger_data = await treq.json_content(await treq.get(
-                f"https://{host}/.well-known/webfinger?"
-                f"resource=acct:{parse.quote_plus(account)}",
-            ))
+            finger_data = await treq.json_content(
+                await treq.get(
+                    f"https://{host}/.well-known/webfinger?"
+                    f"resource=acct:{parse.quote_plus(account)}",
+                )
+            )
         except Exception as e:
             raise exceptions.DataError(f"Can't get webfinger data for {account!r}: {e}")
         for link in finger_data.get("links", []):
@@ -1481,15 +1449,11 @@
                     )
                 break
         else:
-            raise ValueError(
-                f"No ActivityPub link found for {account!r}"
-            )
+            raise ValueError(f"No ActivityPub link found for {account!r}")
         return href
 
     async def get_ap_actor_data_from_account(
-        self,
-        requestor_actor_id: str,
-        account: str
+        self, requestor_actor_id: str, account: str
     ) -> dict:
         """Retrieve ActivityPub Actor data
 
@@ -1499,10 +1463,7 @@
         return await self.ap_get(href, requestor_actor_id)
 
     async def get_ap_inbox_from_id(
-        self,
-        requestor_actor_id: str,
-        actor_id: str,
-        use_shared: bool = True
+        self, requestor_actor_id: str, actor_id: str, use_shared: bool = True
     ) -> str:
         """Retrieve inbox of an actor_id
 
@@ -1531,16 +1492,12 @@
         if self.is_local_url(actor_id):
             url_type, url_args = self.parse_apurl(actor_id)
             if url_type != "actor" or not url_args:
-                raise exceptions.DataError(
-                    f"invalid local actor ID: {actor_id}"
-                )
+                raise exceptions.DataError(f"invalid local actor ID: {actor_id}")
             account = url_args[0]
             try:
-                account_user, account_host = account.split('@')
+                account_user, account_host = account.split("@")
             except ValueError:
-                raise exceptions.DataError(
-                    f"invalid account from url: {actor_id}"
-                )
+                raise exceptions.DataError(f"invalid account from url: {actor_id}")
             if not account_user or account_host != self.public_url:
                 raise exceptions.DataError(
                     f"{account!r} is not a valid local account (from {actor_id})"
@@ -1636,7 +1593,7 @@
                 raise error.StanzaError(
                     "feature-not-implemented",
                     text="Maximum limit for previous_index has been reached, this limit"
-                    "is set to avoid DoS"
+                    "is set to avoid DoS",
                 )
             else:
                 # we'll convert "start_index" to "after_id", thus we need the item just
@@ -1665,7 +1622,7 @@
                         raise error.StanzaError(
                             "service-unavailable",
                             "Error while retrieving previous page from AP service at "
-                            f"{current_page}"
+                            f"{current_page}",
                         )
 
         init_page = "last" if chronological_pagination else "first"
@@ -1698,10 +1655,11 @@
                     found_after_id = True
                     if chronological_pagination:
                         start_index = retrieved_items - len(page_items) + limit_idx + 1
-                        page_items = page_items[limit_idx+1:]
+                        page_items = page_items[limit_idx + 1 :]
                     else:
-                        start_index = count - (retrieved_items - len(page_items) +
-                                               limit_idx + 1)
+                        start_index = count - (
+                            retrieved_items - len(page_items) + limit_idx + 1
+                        )
                         page_items = page_items[:limit_idx]
                     items.extend(page_items)
             else:
@@ -1730,14 +1688,13 @@
                 rsm_resp["index"] = 0
             else:
                 rsm_resp["index"] = count - len(items)
-            rsm_resp.update({
-                "first": items[0]["id"],
-                "last": items[-1]["id"]
-            })
+            rsm_resp.update({"first": items[0]["id"], "last": items[-1]["id"]})
 
         return items, rsm.RSMResponse(**rsm_resp)
 
-    async def ap_item_2_mb_data_and_elt(self, requestor_actor_id: str, ap_item: dict) -> tuple[dict, domish.Element]:
+    async def ap_item_2_mb_data_and_elt(
+        self, requestor_actor_id: str, ap_item: dict
+    ) -> tuple[dict, domish.Element]:
         """Convert AP item to parsed microblog data and corresponding item element
 
         @param requestor_actor_id: ID of the actor requesting the conversion.
@@ -1754,7 +1711,9 @@
             item_elt["publisher"] = mb_data["author_jid"]
         return mb_data, item_elt
 
-    async def ap_item_2_mb_elt(self, requestor_actor_id: str, ap_item: dict) -> domish.Element:
+    async def ap_item_2_mb_elt(
+        self, requestor_actor_id: str, ap_item: dict
+    ) -> domish.Element:
         """Convert AP item to XMPP item element
 
         @param requestor_actor_id: ID of the actor requesting the conversion.
@@ -1769,7 +1728,7 @@
         requestor_actor_id: str,
         page: Union[str, dict],
         parser: Callable[[str, dict], Awaitable[domish.Element]],
-        only_ids: bool = False
+        only_ids: bool = False,
     ) -> Tuple[dict, List[domish.Element]]:
         """Convert AP objects from an AP page to XMPP items
 
@@ -1781,18 +1740,20 @@
         """
         page_data = await self.ap_get_object(requestor_actor_id, page)
         if page_data is None:
-            log.warning('No data found in collection')
+            log.warning("No data found in collection")
             return {}, []
-        ap_items = await self.ap_get_list(requestor_actor_id, page_data, "orderedItems", only_ids=only_ids)
+        ap_items = await self.ap_get_list(
+            requestor_actor_id, page_data, "orderedItems", only_ids=only_ids
+        )
         if ap_items is None:
-            ap_items = await self.ap_get_list(requestor_actor_id, page_data, "items", only_ids=only_ids)
+            ap_items = await self.ap_get_list(
+                requestor_actor_id, page_data, "items", only_ids=only_ids
+            )
             if not ap_items:
-                log.warning(f'No item field found in collection: {page_data!r}')
+                log.warning(f"No item field found in collection: {page_data!r}")
                 return page_data, []
             else:
-                log.warning(
-                    "Items are not ordered, this is not spec compliant"
-                )
+                log.warning("Items are not ordered, this is not spec compliant")
         items = []
         # AP Collections are in antichronological order, but we expect chronological in
         # Pubsub, thus we reverse it
@@ -1805,10 +1766,7 @@
         return page_data, items
 
     async def get_comments_nodes(
-        self,
-        requestor_actor_id: str,
-        item_id: str,
-        parent_id: Optional[str]
+        self, requestor_actor_id: str, item_id: str, parent_id: Optional[str]
     ) -> Tuple[Optional[str], Optional[str]]:
         """Get node where this item is and node to use for comments
 
@@ -1827,7 +1785,7 @@
         if parent_id is None or not self.comments_max_depth:
             return (
                 self._m.get_comments_node(parent_id) if parent_id is not None else None,
-                self._m.get_comments_node(item_id)
+                self._m.get_comments_node(item_id),
             )
         parent_url = parent_id
         parents = []
@@ -1837,18 +1795,15 @@
             parent_url = parent_item.get("inReplyTo")
             if parent_url is None:
                 break
-        parent_limit = self.comments_max_depth-1
+        parent_limit = self.comments_max_depth - 1
         if len(parents) <= parent_limit:
             return (
                 self._m.get_comments_node(parents[-1]["id"]),
-                self._m.get_comments_node(item_id)
+                self._m.get_comments_node(item_id),
             )
         else:
             last_level_item = parents[parent_limit]
-            return (
-                self._m.get_comments_node(last_level_item["id"]),
-                None
-            )
+            return (self._m.get_comments_node(last_level_item["id"]), None)
 
     async def ap_item_2_mb_data(self, requestor_actor_id: str, ap_item: dict) -> dict:
         """Convert AP activity or object to microblog data
@@ -1925,7 +1880,9 @@
         if is_activity:
             authors = await self.ap_get_actors(requestor_actor_id, ap_item, "actor")
         else:
-            authors = await self.ap_get_actors(requestor_actor_id, ap_object, "attributedTo")
+            authors = await self.ap_get_actors(
+                requestor_actor_id, ap_object, "attributedTo"
+            )
         if len(authors) > 1:
             # we only keep first item as author
             # TODO: handle multiple actors
@@ -1963,22 +1920,14 @@
             comments_data = {
                 "service": author_jid,
                 "node": comments_node,
-                "uri": uri.build_xmpp_uri(
-                    "pubsub",
-                    path=author_jid,
-                    node=comments_node
-                )
+                "uri": uri.build_xmpp_uri("pubsub", path=author_jid, node=comments_node),
             }
             mb_data["comments"] = [comments_data]
 
         return mb_data
 
     async def get_reply_to_id_from_xmpp_node(
-        self,
-        client: SatXMPPEntity,
-        ap_account: str,
-        parent_item: str,
-        mb_data: dict
+        self, client: SatXMPPEntity, ap_account: str, parent_item: str, mb_data: dict
     ) -> str:
         """Get URL to use for ``inReplyTo`` field in AP item.
 
@@ -1995,18 +1944,16 @@
         """
         # FIXME: propose a protoXEP to properly get parent item, node and service
 
-        found_items = await self.host.memory.storage.search_pubsub_items({
-            "profiles": [client.profile],
-            "names": [parent_item]
-        })
+        found_items = await self.host.memory.storage.search_pubsub_items(
+            {"profiles": [client.profile], "names": [parent_item]}
+        )
         if not found_items:
             log.warning(f"parent item {parent_item!r} not found in cache")
             parent_ap_account = ap_account
         elif len(found_items) == 1:
             cached_node = found_items[0].node
             parent_ap_account = await self.get_ap_account_from_jid_and_node(
-                cached_node.service,
-                cached_node.name
+                cached_node.service, cached_node.name
             )
         else:
             # we found several cached item with given ID, we check if there is one
@@ -2014,32 +1961,25 @@
             try:
                 author = jid.JID(mb_data["author_jid"]).userhostJID()
                 cached_item = next(
-                    i for i in found_items
-                    if jid.JID(i.data["publisher"]).userhostJID()
-                    == author
+                    i
+                    for i in found_items
+                    if jid.JID(i.data["publisher"]).userhostJID() == author
                 )
             except StopIteration:
                 # no item corresponding to this author, we use ap_account
                 log.warning(
-                    "Can't find a single cached item for parent item "
-                    f"{parent_item!r}"
+                    "Can't find a single cached item for parent item " f"{parent_item!r}"
                 )
                 parent_ap_account = ap_account
             else:
                 cached_node = cached_item.node
                 parent_ap_account = await self.get_ap_account_from_jid_and_node(
-                    cached_node.service,
-                    cached_node.name
+                    cached_node.service, cached_node.name
                 )
 
-        return self.build_apurl(
-            TYPE_ITEM, parent_ap_account, parent_item
-        )
+        return self.build_apurl(TYPE_ITEM, parent_ap_account, parent_item)
 
-    async def repeated_mb_2_ap_item(
-        self,
-        mb_data: dict
-    ) -> dict:
+    async def repeated_mb_2_ap_item(self, mb_data: dict) -> dict:
         """Convert repeated blog item to suitable AP Announce activity
 
         @param mb_data: microblog metadata of an item repeating an other blog post
@@ -2047,10 +1987,7 @@
         """
         repeated = mb_data["extra"]["repeated"]
         repeater = jid.JID(repeated["by"])
-        repeater_account = await self.get_ap_account_from_jid_and_node(
-            repeater,
-            None
-        )
+        repeater_account = await self.get_ap_account_from_jid_and_node(repeater, None)
         repeater_id = self.build_apurl(TYPE_ACTOR, repeater_account)
         repeated_uri = repeated["uri"]
 
@@ -2095,7 +2032,7 @@
         announce["to"] = [NS_AP_PUBLIC]
         announce["cc"] = [
             self.build_apurl(TYPE_FOLLOWERS, repeater_account),
-            await self.get_ap_actor_id_from_account(repeated_account)
+            await self.get_ap_actor_id_from_account(repeated_account),
         ]
         return announce
 
@@ -2103,7 +2040,7 @@
         self,
         client: SatXMPPEntity,
         mb_data: dict,
-        public: bool =True,
+        public: bool = True,
         is_new: bool = True,
     ) -> dict:
         """Convert Libervia Microblog Data to ActivityPub item
@@ -2131,8 +2068,7 @@
         if not mb_data.get("author_jid"):
             mb_data["author_jid"] = client.jid.userhost()
         ap_account = await self.get_ap_account_from_jid_and_node(
-            jid.JID(mb_data["author_jid"]),
-            None
+            jid.JID(mb_data["author_jid"]), None
         )
         url_actor = self.build_apurl(TYPE_ACTOR, ap_account)
         url_item = self.build_apurl(TYPE_ITEM, ap_account, mb_data["id"])
@@ -2153,17 +2089,11 @@
             ap_attachments = ap_object["attachment"] = []
             for attachment in attachments:
                 try:
-                    url = next(
-                        s['url'] for s in attachment["sources"] if 'url' in s
-                    )
+                    url = next(s["url"] for s in attachment["sources"] if "url" in s)
                 except (StopIteration, KeyError):
-                    log.warning(
-                        f"Ignoring attachment without URL: {attachment}"
-                    )
+                    log.warning(f"Ignoring attachment without URL: {attachment}")
                     continue
-                ap_attachment = {
-                    "url": url
-                }
+                ap_attachment = {"url": url}
                 for key, ap_key in (
                     ("media_type", "mediaType"),
                     # XXX: yes "name", cf. [ap_item_2_mb_data]
@@ -2191,19 +2121,19 @@
                         log.warning(f"Can't add mention to {mentioned!r}: {e}")
                     else:
                         ap_object["to"].append(mentioned_id)
-                        ap_object.setdefault("tag", []).append({
-                            "type": TYPE_MENTION,
-                            "href": mentioned_id,
-                            "name": mention,
-                        })
+                        ap_object.setdefault("tag", []).append(
+                            {
+                                "type": TYPE_MENTION,
+                                "href": mentioned_id,
+                                "name": mention,
+                            }
+                        )
             try:
                 node = mb_data["node"]
                 service = jid.JID(mb_data["service"])
             except KeyError:
                 # node and service must always be specified when this method is used
-                raise exceptions.InternalError(
-                    "node or service is missing in mb_data"
-                )
+                raise exceptions.InternalError("node or service is missing in mb_data")
             try:
                 target_ap_account = await self.get_ap_account_from_jid_and_node(
                     service, node
@@ -2226,8 +2156,7 @@
             if self.is_virtual_jid(service):
                 # service is a proxy JID for AP account
                 actor_data = await self.get_ap_actor_data_from_account(
-                    url_actor,
-                    target_ap_account
+                    url_actor, target_ap_account
                 )
                 followers = actor_data.get("followers")
             else:
@@ -2244,10 +2173,7 @@
                 else:
                     # the publication is from a followed real XMPP node
                     ap_object["inReplyTo"] = await self.get_reply_to_id_from_xmpp_node(
-                        client,
-                        ap_account,
-                        parent_item,
-                        mb_data
+                        client, ap_account, parent_item, mb_data
                     )
 
         return self.create_activity(
@@ -2255,10 +2181,7 @@
         )
 
     async def publish_message(
-        self,
-        client: SatXMPPEntity,
-        mess_data: dict,
-        service: jid.JID
+        self, client: SatXMPPEntity, mess_data: dict, service: jid.JID
     ) -> None:
         """Send an AP message
 
@@ -2292,11 +2215,7 @@
         await self.ap_post(inbox_url, url_actor, item_data)
 
     async def ap_delete_item(
-        self,
-        jid_: jid.JID,
-        node: Optional[str],
-        item_id: str,
-        public: bool = True
+        self, jid_: jid.JID, node: Optional[str], item_id: str, public: bool = True
     ) -> Tuple[str, Dict[str, Any]]:
         """Build activity to delete an AP item
 
@@ -2314,11 +2233,9 @@
         author_account = await self.get_ap_account_from_jid_and_node(jid_, node)
         author_actor_id = self.build_apurl(TYPE_ACTOR, author_account)
 
-        items = await self.host.memory.storage.search_pubsub_items({
-            "profiles": [self.client.profile],
-            "services": [jid_],
-            "names": [item_id]
-        })
+        items = await self.host.memory.storage.search_pubsub_items(
+            {"profiles": [self.client.profile], "services": [jid_], "names": [item_id]}
+        )
         if not items:
             log.warning(
                 f"Deleting an unknown item at service {jid_}, node {node} and id "
@@ -2326,7 +2243,9 @@
             )
         else:
             try:
-                mb_data = await self._m.item_2_mb_data(self.client, items[0].data, jid_, node)
+                mb_data = await self._m.item_2_mb_data(
+                    self.client, items[0].data, jid_, node
+                )
                 if "repeated" in mb_data["extra"]:
                     # we are deleting a repeated item, we must translate this to an
                     # "Undo" of the "Announce" activity instead of a "Delete" one
@@ -2341,12 +2260,7 @@
 
         url_item = self.build_apurl(TYPE_ITEM, author_account, item_id)
         ap_item = self.create_activity(
-            "Delete",
-            author_actor_id,
-            {
-                "id": url_item,
-                "type": TYPE_TOMBSTONE
-            }
+            "Delete", author_actor_id, {"id": url_item, "type": TYPE_TOMBSTONE}
         )
         if public:
             ap_item["to"] = [NS_AP_PUBLIC]
@@ -2356,7 +2270,7 @@
         self,
         client: SatXMPPEntity,
         message_elt: domish.Element,
-        post_treat: defer.Deferred
+        post_treat: defer.Deferred,
     ) -> bool:
         """add the gateway workflow on post treatment"""
         if self.client is None:
@@ -2381,9 +2295,7 @@
             log.warning(f"ignoring non local message: {mess_data}")
             return mess_data
         if not mess_data["to"].user:
-            log.warning(
-                f"ignoring message addressed to gateway itself: {mess_data}"
-            )
+            log.warning(f"ignoring message addressed to gateway itself: {mess_data}")
             return mess_data
         requestor_actor_id = self.build_apurl(TYPE_ACTOR, mess_data["from"].userhost())
 
@@ -2391,9 +2303,7 @@
         try:
             actor_id = await self.get_ap_actor_id_from_account(actor_account)
         except Exception as e:
-            log.warning(
-                f"Can't retrieve data on actor {actor_account}: {e}"
-            )
+            log.warning(f"Can't retrieve data on actor {actor_account}: {e}")
             # TODO: send an error <message>
             return mess_data
         inbox = await self.get_ap_inbox_from_id(
@@ -2417,9 +2327,7 @@
             mb_data["id"] = origin_id
         attachments = mess_data["extra"].get(C.KEY_ATTACHMENTS)
         if attachments:
-            mb_data["extra"] = {
-                C.KEY_ATTACHMENTS: attachments
-            }
+            mb_data["extra"] = {C.KEY_ATTACHMENTS: attachments}
 
         client = self.client.get_virtual_client(mess_data["from"])
         ap_item = await self.mb_data_2_ap_item(client, mb_data, public=False)
@@ -2427,19 +2335,19 @@
         ap_object["to"] = ap_item["to"] = [actor_id]
         # we add a mention to direct message, otherwise peer is not notified in some AP
         # implementations (notably Mastodon), and the message may be missed easily.
-        ap_object.setdefault("tag", []).append({
-            "type": TYPE_MENTION,
-            "href": actor_id,
-            "name": f"@{actor_account}",
-        })
+        ap_object.setdefault("tag", []).append(
+            {
+                "type": TYPE_MENTION,
+                "href": actor_id,
+                "name": f"@{actor_account}",
+            }
+        )
 
         try:
             await self.ap_post(inbox, ap_item["actor"], ap_item)
         except Exception as e:
             # TODO: send an error <message>
-            log.warning(
-                f"Can't send message to {inbox}: {e}"
-            )
+            log.warning(f"Can't send message to {inbox}: {e}")
         return mess_data
 
     async def _on_message_retract(
@@ -2447,29 +2355,24 @@
         client: SatXMPPEntity,
         message_elt: domish.Element,
         retract_elt: domish.Element,
-        history: History
+        history: History,
     ) -> bool:
         if client != self.client:
             return True
         from_jid = jid.JID(message_elt["from"])
         if not self.is_local(from_jid):
-            log.debug(
-                f"ignoring retract request from non local jid {from_jid}"
-            )
+            log.debug(f"ignoring retract request from non local jid {from_jid}")
             return False
-        requestor_actor_id = self.build_apurl(
-            TYPE_ACTOR,
-            from_jid.userhost()
-        )
+        requestor_actor_id = self.build_apurl(TYPE_ACTOR, from_jid.userhost())
         to_jid = jid.JID(message_elt["to"])
-        if (to_jid.host != self.client.jid.full() or not to_jid.user):
+        if to_jid.host != self.client.jid.full() or not to_jid.user:
             # to_jid should be a virtual JID from this gateway
-            raise exceptions.InternalError(
-                f"Invalid destinee's JID: {to_jid.full()}"
-            )
+            raise exceptions.InternalError(f"Invalid destinee's JID: {to_jid.full()}")
         ap_account = self._e.unescape(to_jid.user)
         actor_id = await self.get_ap_actor_id_from_account(ap_account)
-        inbox = await self.get_ap_inbox_from_id(requestor_actor_id, actor_id, use_shared=False)
+        inbox = await self.get_ap_inbox_from_id(
+            requestor_actor_id, actor_id, use_shared=False
+        )
         url_actor, ap_item = await self.ap_delete_item(
             from_jid.userhostJID(), None, retract_elt["id"], public=False
         )
@@ -2480,7 +2383,7 @@
         self,
         client: SatXMPPEntity,
         message_elt: domish.Element,
-        reference_data: Dict[str, Union[str, int]]
+        reference_data: Dict[str, Union[str, int]],
     ) -> bool:
         parsed_uri: dict = reference_data.get("parsed_uri")
         if not parsed_uri:
@@ -2553,14 +2456,18 @@
         ap_item = await self.mb_data_2_ap_item(client, mb_data)
         ap_object = ap_item["object"]
         ap_object["to"] = [actor_id]
-        ap_object.setdefault("tag", []).append({
-            "type": TYPE_MENTION,
-            "href": actor_id,
-            "name": ap_account,
-        })
+        ap_object.setdefault("tag", []).append(
+            {
+                "type": TYPE_MENTION,
+                "href": actor_id,
+                "name": ap_account,
+            }
+        )
 
         requestor_actor_id = ap_item["actor"]
-        inbox = await self.get_ap_inbox_from_id(requestor_actor_id, actor_id, use_shared=False)
+        inbox = await self.get_ap_inbox_from_id(
+            requestor_actor_id, actor_id, use_shared=False
+        )
 
         await self.ap_post(inbox, requestor_actor_id, ap_item)
 
@@ -2583,7 +2490,7 @@
             )
             return
         try:
-            parent_item_account, parent_item_id = url_args[0], '/'.join(url_args[1:])
+            parent_item_account, parent_item_id = url_args[0], "/".join(url_args[1:])
         except (IndexError, ValueError):
             log.warning(
                 "Ignoring AP item replying to an XMPP item with invalid inReplyTo URL "
@@ -2603,7 +2510,8 @@
         except IndexError:
             log.warning(
                 f"Can't find parent item at {parent_item_service} (node "
-                f"{parent_item_node!r})\n{pformat(ap_item)}")
+                f"{parent_item_node!r})\n{pformat(ap_item)}"
+            )
             return
         parent_item_parsed = await self._m.item_2_mb_data(
             client, parent_item_elt, parent_item_service, parent_item_node
@@ -2614,16 +2522,18 @@
         except (KeyError, IndexError):
             # we don't have a comment node set for this item
             from libervia.backend.tools.xml_tools import pp_elt
+
             log.info(f"{pp_elt(parent_item_elt.toXml())}")
             raise NotImplementedError()
         else:
             requestor_actor_id = self.build_apurl(
                 TYPE_ACTOR,
-                await self.get_ap_account_from_jid_and_node(comment_service, comment_node)
+                await self.get_ap_account_from_jid_and_node(
+                    comment_service, comment_node
+                ),
             )
             __, item_elt = await self.ap_item_2_mb_data_and_elt(
-                requestor_actor_id,
-                ap_item
+                requestor_actor_id, ap_item
             )
             await self._p.publish(client, comment_service, comment_node, [item_elt])
             await self.notify_mentions(
@@ -2631,8 +2541,7 @@
             )
 
     def get_ap_item_targets(
-        self,
-        item: Dict[str, Any]
+        self, item: Dict[str, Any]
     ) -> Tuple[bool, Dict[str, Set[str]], List[Dict[str, str]]]:
         """Retrieve targets of an AP item, and indicate if it's a public one
 
@@ -2704,18 +2613,13 @@
         is_public, targets, mentions = self.get_ap_item_targets(item)
         if not is_public and targets.keys() == {TYPE_ACTOR}:
             # this is a direct message
-            await self.handle_message_ap_item(
-                client, targets, mentions, destinee, item
-            )
+            await self.handle_message_ap_item(client, targets, mentions, destinee, item)
         else:
             await self.handle_pubsub_ap_item(
                 client, targets, mentions, destinee, node, item, is_public
             )
 
-    def get_requestor_actor_id_from_targets(
-        self,
-        targets: set[str]
-    ) -> str:
+    def get_requestor_actor_id_from_targets(self, targets: set[str]) -> str:
         """Find local actor to use as requestor_actor_id from request targets.
 
         A local actor must be used to sign HTTP request, notably HTTP GET request for AP
@@ -2731,19 +2635,15 @@
         try:
             return next(t for t in targets if self.is_local_url(t))
         except StopIteration:
-            log.warning(
-                f"Can't find local target to use as requestor ID: {targets!r}"
-            )
-            return self.build_apurl(
-                TYPE_ACTOR, f"libervia@{self.public_url}"
-            )
+            log.warning(f"Can't find local target to use as requestor ID: {targets!r}")
+            return self.build_apurl(TYPE_ACTOR, f"libervia@{self.public_url}")
 
     async def handle_message_ap_item(
         self,
         client: SatXMPPEntity,
         targets: dict[str, Set[str]],
         mentions: list[Dict[str, str]],
-        destinee: jid.JID|None,
+        destinee: jid.JID | None,
         item: dict,
     ) -> None:
         """Parse and deliver direct AP items translating to XMPP messages
@@ -2755,15 +2655,12 @@
         targets_urls = {t for t_set in targets.values() for t in t_set}
         requestor_actor_id = self.get_requestor_actor_id_from_targets(targets_urls)
         targets_jids = {
-            await self.get_jid_from_id(requestor_actor_id, url)
-            for url in targets_urls
+            await self.get_jid_from_id(requestor_actor_id, url) for url in targets_urls
         }
         if destinee is not None:
             targets_jids.add(destinee)
         mb_data = await self.ap_item_2_mb_data(requestor_actor_id, item)
-        extra = {
-            "origin_id": mb_data["id"]
-        }
+        extra = {"origin_id": mb_data["id"]}
         attachments = mb_data["extra"].get(C.KEY_ATTACHMENTS)
         if attachments:
             extra[C.KEY_ATTACHMENTS] = attachments
@@ -2773,9 +2670,9 @@
             defer_l.append(
                 client.sendMessage(
                     target_jid,
-                    {'': mb_data.get("content", "")},
+                    {"": mb_data.get("content", "")},
                     mb_data.get("title"),
-                    extra=extra
+                    extra=extra,
                 )
             )
         await defer.DeferredList(defer_l)
@@ -2798,18 +2695,16 @@
         """
         targets_urls = {t for t_set in targets.values() for t in t_set}
         requestor_actor_id = self.get_requestor_actor_id_from_targets(targets_urls)
-        anchor = uri.build_xmpp_uri("pubsub", path=service.full(), node=node, item=item_id)
+        anchor = uri.build_xmpp_uri(
+            "pubsub", path=service.full(), node=node, item=item_id
+        )
         seen = set()
         # we start with explicit mentions because mentions' content will be used in the
         # future to fill "begin" and "end" reference attributes (we can't do it at the
         # moment as there is no way to specify the XML element to use in the blog item).
         for mention in mentions:
             mentioned_jid = await self.get_jid_from_id(requestor_actor_id, mention["uri"])
-            self._refs.send_reference(
-                self.client,
-                to_jid=mentioned_jid,
-                anchor=anchor
-            )
+            self._refs.send_reference(self.client, to_jid=mentioned_jid, anchor=anchor)
             seen.add(mentioned_jid)
 
         remaining = {
@@ -2818,21 +2713,17 @@
             for t in t_set
         } - seen
         for target in remaining:
-            self._refs.send_reference(
-                self.client,
-                to_jid=target,
-                anchor=anchor
-            )
+            self._refs.send_reference(self.client, to_jid=target, anchor=anchor)
 
     async def handle_pubsub_ap_item(
         self,
         client: SatXMPPEntity,
         targets: dict[str, set[str]],
         mentions: list[dict[str, str]],
-        destinee: jid.JID|None,
+        destinee: jid.JID | None,
         node: str,
         item: dict,
-        public: bool
+        public: bool,
     ) -> None:
         """Analyse, cache and deliver AP items translating to Pubsub
 
@@ -2859,14 +2750,16 @@
             # this item is a reply to an AP item, we use or create a corresponding node
             # for comments
             parent_node, __ = await self.get_comments_nodes(
-                requestor_actor_id,
-                item["id"],
-                in_reply_to
+                requestor_actor_id, item["id"], in_reply_to
             )
             node = parent_node or node
             cached_node = await self.host.memory.storage.get_pubsub_node(
-                client, service, node, with_subscriptions=True, create=True,
-                create_kwargs={"subscribed": True}
+                client,
+                service,
+                node,
+                with_subscriptions=True,
+                create=True,
+                create_kwargs={"subscribed": True},
             )
         else:
             # it is a root item (i.e. not a reply to an other item)
@@ -2878,33 +2771,25 @@
                 log.warning(
                     f"Received item in unknown node {node!r} at {service}. This may be "
                     f"due to a cache purge. We synchronise the node\n{item}"
-
                 )
                 return
         if item.get("type") == TYPE_EVENT:
             data, item_elt = await self.ap_events.ap_item_2_event_data_and_elt(
-                requestor_actor_id,
-                item
+                requestor_actor_id, item
             )
         else:
             data, item_elt = await self.ap_item_2_mb_data_and_elt(
-                requestor_actor_id,
-                item
+                requestor_actor_id, item
             )
         await self.host.memory.storage.cache_pubsub_items(
-            client,
-            cached_node,
-            [item_elt],
-            [data]
+            client, cached_node, [item_elt], [data]
         )
 
         for subscription in cached_node.subscriptions:
             if subscription.state != SubscriptionState.SUBSCRIBED:
                 continue
             self.pubsub_service.notifyPublish(
-                service,
-                node,
-                [(subscription.subscriber, None, [item_elt])]
+                service, node, [(subscription.subscriber, None, [item_elt])]
             )
 
         await self.notify_mentions(targets, mentions, service, node, item_elt["id"])
@@ -2940,7 +2825,7 @@
             History,
             History.origin_id,
             item_id,
-            (History.messages, History.subjects)
+            (History.messages, History.subjects),
         )
 
         if history is not None:
@@ -2973,7 +2858,5 @@
                 if subscription.state != SubscriptionState.SUBSCRIBED:
                     continue
                 self.pubsub_service.notifyRetract(
-                    client.jid,
-                    node,
-                    [(subscription.subscriber, None, [item_elt])]
+                    client.jid, node, [(subscription.subscriber, None, [item_elt])]
                 )
--- a/libervia/backend/plugins/plugin_comp_ap_gateway/ad_hoc.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_comp_ap_gateway/ad_hoc.py	Wed Jun 19 18:44:57 2024 +0200
@@ -29,6 +29,7 @@
 log = getLogger(__name__)
 NS_XMPP_JID_NODE_2_AP = "https://libervia.org/ap_gateway/xmpp_jid_node_2_ap_actor"
 
+
 class APAdHocService:
     """Ad-Hoc commands for AP Gateway"""
 
@@ -52,7 +53,7 @@
         command_elt: domish.Element,
         session_data: dict,
         action: str,
-        node: str
+        node: str,
     ):
         try:
             x_elt = next(command_elt.elements(data_form.NS_X_DATA, "x"))
@@ -63,18 +64,15 @@
             # root request
             status = self._c.STATUS.EXECUTING
             form = data_form.Form(
-                "form", title="XMPP JID/node to AP actor conversion",
-                formNamespace=NS_XMPP_JID_NODE_2_AP
+                "form",
+                title="XMPP JID/node to AP actor conversion",
+                formNamespace=NS_XMPP_JID_NODE_2_AP,
             )
 
-            field = data_form.Field(
-                "text-single", "jid", required=True
-            )
+            field = data_form.Field("text-single", "jid", required=True)
             form.addField(field)
 
-            field = data_form.Field(
-                "text-single", "node", required=False
-            )
+            field = data_form.Field("text-single", "node", required=False)
             form.addField(field)
 
             payload = form.toElement()
--- a/libervia/backend/plugins/plugin_comp_ap_gateway/constants.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_comp_ap_gateway/constants.py	Wed Jun 19 18:44:57 2024 +0200
@@ -55,32 +55,71 @@
         ("digest", "(request-target)"),
     ],
     b"GET": ["host"],
-    b"POST": ["digest"]
+    b"POST": ["digest"],
 }
 PAGE_SIZE = 10
 HS2019 = "hs2019"
 # delay after which a signed request is not accepted anymore
-SIGN_EXP = 12*60*60  # 12 hours (same value as for Mastodon)
+SIGN_EXP = 12 * 60 * 60  # 12 hours (same value as for Mastodon)
 
 LRU_MAX_SIZE = 200
 ACTIVITY_TYPES = (
-    "Accept", "Add", "Announce", "Arrive", "Block", "Create", "Delete", "Dislike", "Flag",
-    "Follow", "Ignore", "Invite", "Join", "Leave", "Like", "Listen", "Move", "Offer",
-    "Question", "Reject", "Read", "Remove", "TentativeReject", "TentativeAccept",
-    "Travel", "Undo", "Update", "View",
+    "Accept",
+    "Add",
+    "Announce",
+    "Arrive",
+    "Block",
+    "Create",
+    "Delete",
+    "Dislike",
+    "Flag",
+    "Follow",
+    "Ignore",
+    "Invite",
+    "Join",
+    "Leave",
+    "Like",
+    "Listen",
+    "Move",
+    "Offer",
+    "Question",
+    "Reject",
+    "Read",
+    "Remove",
+    "TentativeReject",
+    "TentativeAccept",
+    "Travel",
+    "Undo",
+    "Update",
+    "View",
     # non-standard activities
-    "EmojiReact"
+    "EmojiReact",
 )
 ACTIVITY_TYPES_LOWER = [a.lower() for a in ACTIVITY_TYPES]
 ACTIVITY_OBJECT_MANDATORY = (
-    "Create", "Update", "Delete", "Follow", "Add", "Remove", "Like", "Block", "Undo"
+    "Create",
+    "Update",
+    "Delete",
+    "Follow",
+    "Add",
+    "Remove",
+    "Like",
+    "Block",
+    "Undo",
 )
 ACTIVITY_TARGET_MANDATORY = ("Add", "Remove")
 # activities which can be used with Shared Inbox (i.e. with no account specified)
 # must be lowercase
 ACTIVIY_NO_ACCOUNT_ALLOWED = (
-    "create", "update", "delete", "announce", "undo", "like", "emojireact", "join",
-    "leave"
+    "create",
+    "update",
+    "delete",
+    "announce",
+    "undo",
+    "like",
+    "emojireact",
+    "join",
+    "leave",
 )
 # maximum number of parents to retrieve when comments_max_depth option is set
 COMMENTS_MAX_PARENTS = 100
--- a/libervia/backend/plugins/plugin_comp_ap_gateway/events.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_comp_ap_gateway/events.py	Wed Jun 19 18:44:57 2024 +0200
@@ -45,11 +45,12 @@
     "addressCountry": "sc:addressCountry",
     "addressLocality": "sc:addressLocality",
     "addressRegion": "sc:addressRegion",
-    "anonymousParticipationEnabled": {"@id": "mz:anonymousParticipationEnabled",
-                                      "@type": "sc:Boolean"},
+    "anonymousParticipationEnabled": {
+        "@id": "mz:anonymousParticipationEnabled",
+        "@type": "sc:Boolean",
+    },
     "category": "sc:category",
-    "commentsEnabled": {"@id": "pt:commentsEnabled",
-                        "@type": "sc:Boolean"},
+    "commentsEnabled": {"@id": "pt:commentsEnabled", "@type": "sc:Boolean"},
     "discoverable": "toot:discoverable",
     "discussions": {"@id": "mz:discussions", "@type": "@id"},
     "events": {"@id": "mz:events", "@type": "@id"},
@@ -57,8 +58,7 @@
     "inLanguage": "sc:inLanguage",
     "isOnline": {"@id": "mz:isOnline", "@type": "sc:Boolean"},
     "joinMode": {"@id": "mz:joinMode", "@type": "mz:joinModeType"},
-    "joinModeType": {"@id": "mz:joinModeType",
-                     "@type": "rdfs:Class"},
+    "joinModeType": {"@id": "mz:joinModeType", "@type": "rdfs:Class"},
     "location": {"@id": "sc:location", "@type": "sc:Place"},
     "manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
     "maximumAttendeeCapacity": "sc:maximumAttendeeCapacity",
@@ -66,19 +66,21 @@
     "members": {"@id": "mz:members", "@type": "@id"},
     "mz": "https://joinmobilizon.org/ns#",
     "openness": {"@id": "mz:openness", "@type": "@id"},
-    "participantCount": {"@id": "mz:participantCount",
-                         "@type": "sc:Integer"},
-    "participationMessage": {"@id": "mz:participationMessage",
-                             "@type": "sc:Text"},
+    "participantCount": {"@id": "mz:participantCount", "@type": "sc:Integer"},
+    "participationMessage": {"@id": "mz:participationMessage", "@type": "sc:Text"},
     "postalCode": "sc:postalCode",
     "posts": {"@id": "mz:posts", "@type": "@id"},
     "propertyID": "sc:propertyID",
     "pt": "https://joinpeertube.org/ns#",
     "remainingAttendeeCapacity": "sc:remainingAttendeeCapacity",
-    "repliesModerationOption": {"@id": "mz:repliesModerationOption",
-                                "@type": "mz:repliesModerationOptionType"},
-    "repliesModerationOptionType": {"@id": "mz:repliesModerationOptionType",
-                                    "@type": "rdfs:Class"},
+    "repliesModerationOption": {
+        "@id": "mz:repliesModerationOption",
+        "@type": "mz:repliesModerationOptionType",
+    },
+    "repliesModerationOptionType": {
+        "@id": "mz:repliesModerationOptionType",
+        "@type": "rdfs:Class",
+    },
     "resources": {"@id": "mz:resources", "@type": "@id"},
     "sc": "http://schema.org#",
     "streetAddress": "sc:streetAddress",
@@ -86,7 +88,7 @@
     "todos": {"@id": "mz:todos", "@type": "@id"},
     "toot": "http://joinmastodon.org/ns#",
     "uuid": "sc:identifier",
-    "value": "sc:value"
+    "value": "sc:value",
 }
 
 
@@ -99,7 +101,7 @@
         self._events = self.host.plugins["XEP-0471"]
 
     async def event_data_2_ap_item(
-        self, event_data: dict, author_jid: jid.JID, is_new: bool=True
+        self, event_data: dict, author_jid: jid.JID, is_new: bool = True
     ) -> dict:
         """Convert event data to AP activity
 
@@ -114,8 +116,7 @@
         if not event_data.get("id"):
             event_data["id"] = shortuuid.uuid()
         ap_account = await self.apg.get_ap_account_from_jid_and_node(
-            author_jid,
-            self._events.namespace
+            author_jid, self._events.namespace
         )
         url_actor = self.apg.build_apurl(TYPE_ACTOR, ap_account)
         url_item = self.apg.build_apurl(TYPE_ITEM, ap_account, event_data["id"])
@@ -141,12 +142,14 @@
             pass
         else:
             media_type = mimetypes.guess_type(head_picture_url, False)[0] or "image/jpeg"
-            attachment.append({
-                "name": "Banner",
-                "type": "Document",
-                "mediaType": media_type,
-                "url": head_picture_url,
-            })
+            attachment.append(
+                {
+                    "name": "Banner",
+                    "type": "Document",
+                    "mediaType": media_type,
+                    "url": head_picture_url,
+                }
+            )
 
         descriptions = event_data.get("descriptions")
         if descriptions:
@@ -162,10 +165,12 @@
         if categories:
             tag = ap_object["tag"] = []
             for category in categories:
-                tag.append({
-                    "name": f"#{category['term']}",
-                    "type": "Hashtag",
-                })
+                tag.append(
+                    {
+                        "name": f"#{category['term']}",
+                        "type": "Hashtag",
+                    }
+                )
 
         locations = event_data.get("locations")
         if locations:
@@ -203,12 +208,14 @@
 
             website = extra.get("website")
             if website:
-                attachment.append({
-                    "href": website,
-                    "mediaType": "text/html",
-                    "name": "Website",
-                    "type": "Link"
-                })
+                attachment.append(
+                    {
+                        "href": website,
+                        "mediaType": "text/html",
+                        "name": "Website",
+                        "type": "Link",
+                    }
+                )
 
             accessibility = extra.get("accessibility")
             if accessibility:
@@ -224,11 +231,13 @@
                         log.error(f"unexpected wheelchair value: {wheelchair}")
                         ap_wc_value = None
                     if ap_wc_value is not None:
-                        attachment.append({
-                            "propertyID": "mz:accessibility:wheelchairAccessible",
-                            "type": "PropertyValue",
-                            "value": ap_wc_value
-                        })
+                        attachment.append(
+                            {
+                                "propertyID": "mz:accessibility:wheelchairAccessible",
+                                "type": "PropertyValue",
+                                "value": ap_wc_value,
+                            }
+                        )
 
         activity = self.apg.create_activity(
             "Create" if is_new else "Update", url_actor, ap_object, activity_id=url_item
@@ -236,11 +245,7 @@
         activity["@context"].append(AP_EVENTS_CONTEXT)
         return activity
 
-    async def ap_item_2_event_data(
-        self,
-        requestor_actor_id: str,
-        ap_item: dict
-    ) -> dict:
+    async def ap_item_2_event_data(self, requestor_actor_id: str, ap_item: dict) -> dict:
         """Convert AP activity or object to event data
 
         @param requestor_actor_id: ID of the actor doing the request.
@@ -251,7 +256,9 @@
         """
         is_activity = self.apg.is_activity(ap_item)
         if is_activity:
-            ap_object = await self.apg.ap_get_object(requestor_actor_id, ap_item, "object")
+            ap_object = await self.apg.ap_get_object(
+                requestor_actor_id, ap_item, "object"
+            )
             if not ap_object:
                 log.warning(f'No "object" found in AP item {ap_item!r}')
                 raise exceptions.DataError
@@ -311,10 +318,7 @@
         # description
         content = ap_object.get("content")
         if content:
-            event_data["descriptions"] = [{
-                "type": "xhtml",
-                "description": content
-            }]
+            event_data["descriptions"] = [{"type": "xhtml", "description": content}]
 
         # categories
         tags = ap_object.get("tag")
@@ -329,7 +333,7 @@
                         continue
                     categories.append({"term": term})
 
-        #location
+        # location
         ap_location = ap_object.get("location")
         if ap_location:
             location = {}
@@ -361,24 +365,24 @@
         # a simple "yes"/"no" form.
         rsvp_data = {"fields": []}
         event_data["rsvp"] = [rsvp_data]
-        rsvp_data["fields"].append({
-            "type": "list-single",
-            "name": "attending",
-            "label": "Attending",
-            "options": [
-                {"label": "yes", "value": "yes"},
-                {"label": "no", "value": "no"}
-            ],
-            "required": True
-        })
+        rsvp_data["fields"].append(
+            {
+                "type": "list-single",
+                "name": "attending",
+                "label": "Attending",
+                "options": [
+                    {"label": "yes", "value": "yes"},
+                    {"label": "no", "value": "no"},
+                ],
+                "required": True,
+            }
+        )
 
         # comments
 
         if ap_object.get("commentsEnabled"):
             __, comments_node = await self.apg.get_comments_nodes(
-                requestor_actor_id,
-                object_id,
-                None
+                requestor_actor_id, object_id, None
             )
             event_data["comments"] = {
                 "service": author_jid,
@@ -399,15 +403,10 @@
         return event_data
 
     async def ap_item_2_event_data_and_elt(
-        self,
-        requestor_actor_id: str,
-        ap_item: dict
+        self, requestor_actor_id: str, ap_item: dict
     ) -> tuple[dict, domish.Element]:
         """Convert AP item to parsed event data and corresponding item element"""
-        event_data = await self.ap_item_2_event_data(
-            requestor_actor_id,
-            ap_item
-        )
+        event_data = await self.ap_item_2_event_data(requestor_actor_id, ap_item)
         event_elt = self._events.event_data_2_event_elt(event_data)
         item_elt = domish.Element((None, "item"))
         item_elt["id"] = event_data["id"]
@@ -415,13 +414,10 @@
         return event_data, item_elt
 
     async def ap_item_2_event_elt(
-        self,
-        requestor_actor_id: str,
-        ap_item: dict
+        self, requestor_actor_id: str, ap_item: dict
     ) -> domish.Element:
         """Convert AP item to XMPP item element"""
         __, item_elt = await self.ap_item_2_event_data_and_elt(
-            requestor_actor_id,
-            ap_item
+            requestor_actor_id, ap_item
         )
         return item_elt
--- a/libervia/backend/plugins/plugin_comp_ap_gateway/http_server.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_comp_ap_gateway/http_server.py	Wed Jun 19 18:44:57 2024 +0200
@@ -41,11 +41,29 @@
 from libervia.backend.memory.sqla_mapping import SubscriptionState
 
 from .constants import (
-    NS_AP, MEDIA_TYPE_AP, MEDIA_TYPE_AP_ALT, CONTENT_TYPE_WEBFINGER, CONTENT_TYPE_AP,
-    TYPE_ACTOR, TYPE_INBOX, TYPE_SHARED_INBOX, TYPE_OUTBOX, TYPE_EVENT, AP_REQUEST_TYPES,
-    PAGE_SIZE, ACTIVITY_TYPES_LOWER, ACTIVIY_NO_ACCOUNT_ALLOWED, SIGN_HEADERS, HS2019,
-    SIGN_EXP, TYPE_FOLLOWERS, TYPE_FOLLOWING, TYPE_ITEM, TYPE_LIKE, TYPE_REACTION,
-    ST_AP_CACHE
+    NS_AP,
+    MEDIA_TYPE_AP,
+    MEDIA_TYPE_AP_ALT,
+    CONTENT_TYPE_WEBFINGER,
+    CONTENT_TYPE_AP,
+    TYPE_ACTOR,
+    TYPE_INBOX,
+    TYPE_SHARED_INBOX,
+    TYPE_OUTBOX,
+    TYPE_EVENT,
+    AP_REQUEST_TYPES,
+    PAGE_SIZE,
+    ACTIVITY_TYPES_LOWER,
+    ACTIVIY_NO_ACCOUNT_ALLOWED,
+    SIGN_HEADERS,
+    HS2019,
+    SIGN_EXP,
+    TYPE_FOLLOWERS,
+    TYPE_FOLLOWING,
+    TYPE_ITEM,
+    TYPE_LIKE,
+    TYPE_REACTION,
+    ST_AP_CACHE,
 )
 from .regex import RE_SIG_PARAM
 
@@ -53,13 +71,13 @@
 log = getLogger(__name__)
 
 VERSION = unicodedata.normalize(
-    'NFKD',
-    f"{C.APP_NAME} ActivityPub Gateway {C.APP_VERSION}"
+    "NFKD", f"{C.APP_NAME} ActivityPub Gateway {C.APP_VERSION}"
 )
 
 
 class HTTPAPGServer(web_resource.Resource):
     """HTTP Server handling ActivityPub S2S protocol"""
+
     isLeaf = True
 
     def __init__(self, ap_gateway):
@@ -68,30 +86,23 @@
         super().__init__()
 
     def response_code(
-        self,
-        request: "HTTPRequest",
-        http_code: int,
-        msg: Optional[str] = None
+        self, request: "HTTPRequest", http_code: int, msg: Optional[str] = None
     ) -> None:
         """Log and set HTTP return code and associated message"""
         if msg is not None:
             log.warning(msg)
         request.setResponseCode(http_code, None if msg is None else msg.encode())
 
-    def _on_request_error(self, failure_: failure.Failure, request: "HTTPRequest") -> None:
+    def _on_request_error(
+        self, failure_: failure.Failure, request: "HTTPRequest"
+    ) -> None:
         exc = failure_.value
         if isinstance(exc, exceptions.NotFound):
-            self.response_code(
-                request,
-                http.NOT_FOUND,
-                str(exc)
-            )
+            self.response_code(request, http.NOT_FOUND, str(exc))
         else:
             log.exception(f"Internal error: {failure_.value}")
             self.response_code(
-                request,
-                http.INTERNAL_SERVER_ERROR,
-                f"internal error: {failure_.value}"
+                request, http.INTERNAL_SERVER_ERROR, f"internal error: {failure_.value}"
             )
             request.finish()
             raise failure_
@@ -105,7 +116,7 @@
         account = resource[5:].strip()
         if not resource.startswith("acct:") or not account:
             return web_resource.ErrorPage(
-                http.BAD_REQUEST, "Bad Request" , "Invalid webfinger resource"
+                http.BAD_REQUEST, "Bad Request", "Invalid webfinger resource"
             ).render(request)
 
         actor_url = self.apg.build_apurl(TYPE_ACTOR, account)
@@ -114,12 +125,8 @@
             "aliases": [actor_url],
             "subject": resource,
             "links": [
-                {
-                    "rel": "self",
-                    "type": "application/activity+json",
-                    "href": actor_url
-                }
-            ]
+                {"rel": "self", "type": "application/activity+json", "href": actor_url}
+            ],
         }
         request.setHeader("content-type", CONTENT_TYPE_WEBFINGER)
         request.write(json.dumps(resp).encode())
@@ -134,7 +141,7 @@
         node: Optional[str],
         ap_account: str,
         ap_url: str,
-        signing_actor: str
+        signing_actor: str,
     ) -> None:
         if node is None:
             node = self.apg._m.namespace
@@ -181,9 +188,11 @@
             elif type_ == TYPE_LIKE:
                 await self.handle_attachment_item(client, obj, {"noticed": False})
             elif type_ == TYPE_REACTION:
-                await self.handle_attachment_item(client, obj, {
-                    "reactions": {"operation": "update", "remove": [obj["content"]]}
-                })
+                await self.handle_attachment_item(
+                    client,
+                    obj,
+                    {"reactions": {"operation": "update", "remove": [obj["content"]]}},
+                )
             else:
                 log.warning(f"Unmanaged undo type: {type_!r}")
 
@@ -196,7 +205,7 @@
         node: Optional[str],
         ap_account: str,
         ap_url: str,
-        signing_actor: str
+        signing_actor: str,
     ) -> None:
         if node is None:
             node = self.apg._m.namespace
@@ -207,7 +216,7 @@
                 account_jid,
                 node,
                 # subscriptions from AP are always public
-                options=self.apg._pps.set_public_opt()
+                options=self.apg._pps.set_public_opt(),
             )
         except pubsub.SubscriptionPending:
             log.info(f"subscription to node {node!r} of {account_jid} is pending")
@@ -218,9 +227,7 @@
                 raise exceptions.InternalError('"subscribed" state was expected')
             inbox = await self.apg.get_ap_inbox_from_id(signing_actor, use_shared=False)
             actor_id = self.apg.build_apurl(TYPE_ACTOR, ap_account)
-            accept_data = self.apg.create_activity(
-                "Accept", actor_id, object_=data
-            )
+            accept_data = self.apg.create_activity("Accept", actor_id, object_=data)
             await self.apg.sign_and_post(inbox, actor_id, accept_data)
         await self.apg._c.synchronise(client, account_jid, node, resync=False)
 
@@ -233,7 +240,7 @@
         node: Optional[str],
         ap_account: str,
         ap_url: str,
-        signing_actor: str
+        signing_actor: str,
     ) -> None:
         if node is None:
             node = self.apg._m.namespace
@@ -253,7 +260,9 @@
                     continue
                 try:
                     sub = next(
-                        s for s in follow_node.subscriptions if s.subscriber==account_jid
+                        s
+                        for s in follow_node.subscriptions
+                        if s.subscriber == account_jid
                     )
                 except StopIteration:
                     log.warning(
@@ -283,7 +292,7 @@
         node: Optional[str],
         ap_account: Optional[str],
         ap_url: str,
-        signing_actor: str
+        signing_actor: str,
     ):
         if node is None:
             node = self.apg._m.namespace
@@ -325,13 +334,9 @@
             if repeated:
                 # we don't check sender when item is repeated, as it should be different
                 # from post author in this case
-                sender_jid = await self.apg.get_jid_from_id(
-                    requestor_actor_id,
-                    sender
-                )
+                sender_jid = await self.apg.get_jid_from_id(requestor_actor_id, sender)
                 repeater_jid = await self.apg.get_jid_from_id(
-                    requestor_actor_id,
-                    signing_actor
+                    requestor_actor_id, signing_actor
                 )
                 repeated_item_id = obj["id"]
                 if self.apg.is_local_url(repeated_item_id):
@@ -347,13 +352,14 @@
                         if not url_account or not url_item_id:
                             raise ValueError
                     except (RuntimeError, ValueError):
-                        raise exceptions.DataError(
-                            "local URI is invalid: {repeated_id}"
-                        )
+                        raise exceptions.DataError("local URI is invalid: {repeated_id}")
                     else:
                         url_jid, url_node = await self.apg.get_jid_and_node(url_account)
-                        if ((url_jid != sender_jid
-                             or url_node and url_node != self.apg._m.namespace)):
+                        if (
+                            url_jid != sender_jid
+                            or url_node
+                            and url_node != self.apg._m.namespace
+                        ):
                             raise exceptions.DataError(
                                 "announced ID doesn't match sender ({sender}): "
                                 f"[repeated_item_id]"
@@ -368,17 +374,15 @@
                         "pubsub",
                         path=sender_jid.full(),
                         node=self.apg._m.namespace,
-                        item=repeated_item_id
-                    )
+                        item=repeated_item_id,
+                    ),
                 }
                 # we must use activity's id and targets, not the original item ones
                 for field in ("id", "to", "bto", "cc", "bcc"):
                     obj[field] = data.get(field)
             else:
                 if sender != signing_actor:
-                    log.warning(
-                        "Ignoring object not attributed to signing actor: {obj}"
-                    )
+                    log.warning("Ignoring object not attributed to signing actor: {obj}")
                     continue
 
             await self.apg.new_ap_item(client, account_jid, node, obj)
@@ -392,7 +396,7 @@
         node: Optional[str],
         ap_account: Optional[str],
         ap_url: str,
-        signing_actor: str
+        signing_actor: str,
     ):
         await self.handle_new_ap_items(
             requestor_actor_id, request, data, account_jid, node, signing_actor
@@ -407,7 +411,7 @@
         node: Optional[str],
         ap_account: Optional[str],
         ap_url: str,
-        signing_actor: str
+        signing_actor: str,
     ):
         # Update is the same as create: the item ID stays the same, thus the item will be
         # overwritten
@@ -424,7 +428,7 @@
         node: Optional[str],
         ap_account: Optional[str],
         ap_url: str,
-        signing_actor: str
+        signing_actor: str,
     ):
         # we create a new item
         await self.handle_new_ap_items(
@@ -434,14 +438,11 @@
             account_jid,
             node,
             signing_actor,
-            repeated=True
+            repeated=True,
         )
 
     async def handle_attachment_item(
-        self,
-        client: SatXMPPEntity,
-        data: dict,
-        attachment_data: dict
+        self, client: SatXMPPEntity, data: dict, attachment_data: dict
     ) -> None:
         target_ids = data.get("object")
         if not target_ids:
@@ -485,11 +486,7 @@
                 author_jid, item_node, item_id
             )
             cached_node = await self.apg.host.memory.storage.get_pubsub_node(
-                client,
-                author_jid,
-                attachment_node,
-                with_subscriptions=True,
-                create=True
+                client, author_jid, attachment_node, with_subscriptions=True, create=True
             )
             found_items, __ = await self.apg.host.memory.storage.get_items(
                 cached_node, item_ids=[client.jid.userhost()]
@@ -501,19 +498,13 @@
                 old_item_elt = found_item.data
 
             item_elt = await self.apg._pa.apply_set_handler(
-                client,
-                {"extra": attachment_data},
-                old_item_elt,
-                None
+                client, {"extra": attachment_data}, old_item_elt, None
             )
             # we reparse the element, as there can be other attachments
             attachments_data = self.apg._pa.items_2_attachment_data(client, [item_elt])
             # and we update the cache
             await self.apg.host.memory.storage.cache_pubsub_items(
-                client,
-                cached_node,
-                [item_elt],
-                attachments_data or [{}]
+                client, cached_node, [item_elt], attachments_data or [{}]
             )
 
             if self.apg.is_virtual_jid(author_jid):
@@ -525,7 +516,7 @@
                     self.apg.pubsub_service.notifyPublish(
                         author_jid,
                         attachment_node,
-                        [(subscription.subscriber, None, [item_elt])]
+                        [(subscription.subscriber, None, [item_elt])],
                     )
             else:
                 # the attachment is on an XMPP item, we publish it to the attachment node
@@ -542,7 +533,7 @@
         node: Optional[str],
         ap_account: Optional[str],
         ap_url: str,
-        signing_actor: str
+        signing_actor: str,
     ) -> None:
         client = await self.apg.get_virtual_client(requestor_actor_id, signing_actor)
         await self.handle_attachment_item(client, data, {"noticed": True})
@@ -556,12 +547,12 @@
         node: Optional[str],
         ap_account: Optional[str],
         ap_url: str,
-        signing_actor: str
+        signing_actor: str,
     ) -> None:
         client = await self.apg.get_virtual_client(requestor_actor_id, signing_actor)
-        await self.handle_attachment_item(client, data, {
-            "reactions": {"operation": "update", "add": [data["content"]]}
-        })
+        await self.handle_attachment_item(
+            client, data, {"reactions": {"operation": "update", "add": [data["content"]]}}
+        )
 
     async def handle_join_activity(
         self,
@@ -572,7 +563,7 @@
         node: Optional[str],
         ap_account: Optional[str],
         ap_url: str,
-        signing_actor: str
+        signing_actor: str,
     ) -> None:
         client = await self.apg.get_virtual_client(requestor_actor_id, signing_actor)
         await self.handle_attachment_item(client, data, {"rsvp": {"attending": "yes"}})
@@ -586,7 +577,7 @@
         node: Optional[str],
         ap_account: Optional[str],
         ap_url: str,
-        signing_actor: str
+        signing_actor: str,
     ) -> None:
         client = await self.apg.get_virtual_client(requestor_actor_id, signing_actor)
         await self.handle_attachment_item(client, data, {"rsvp": {"attending": "no"}})
@@ -599,7 +590,7 @@
         node: Optional[str],
         ap_account: str,
         ap_url: str,
-        signing_actor: Optional[str]
+        signing_actor: Optional[str],
     ) -> dict:
         inbox = self.apg.build_apurl(TYPE_INBOX, ap_account)
         shared_inbox = self.apg.build_apurl(TYPE_SHARED_INBOX)
@@ -623,9 +614,8 @@
         actor_data = {
             "@context": [
                 "https://www.w3.org/ns/activitystreams",
-                "https://w3id.org/security/v1"
+                "https://w3id.org/security/v1",
             ],
-
             # XXX: Mastodon doesn't like percent-encode arobas, so we have to unescape it
             #   if it is escaped
             "id": ap_url.replace("%40", "@"),
@@ -639,7 +629,7 @@
             "publicKey": {
                 "id": f"{ap_url}#main-key",
                 "owner": ap_url,
-                "publicKeyPem": self.apg.public_key_pem
+                "publicKeyPem": self.apg.public_key_pem,
             },
             "endpoints": {
                 "sharedInbox": shared_inbox,
@@ -664,7 +654,7 @@
                 actor_data["icon"] = {
                     "type": "Image",
                     "url": avatar_url,
-                    "mediaType": media_type
+                    "mediaType": media_type,
                 }
 
         return actor_data
@@ -672,13 +662,12 @@
     def get_canonical_url(self, request: "HTTPRequest") -> str:
         return parse.urljoin(
             f"https://{self.apg.public_url}",
-            request.path.decode().rstrip("/")
-        # we unescape "@" for the same reason as in [ap_actor_request]
+            request.path.decode().rstrip("/"),
+            # we unescape "@" for the same reason as in [ap_actor_request]
         ).replace("%40", "@")
 
     def query_data_2_rsm_request(
-        self,
-        query_data: Dict[str, List[str]]
+        self, query_data: Dict[str, List[str]]
     ) -> rsm.RSMRequest:
         """Get RSM kwargs to use with RSMRequest from query data"""
         page = query_data.get("page")
@@ -690,7 +679,7 @@
         else:
             for query_key in ("index", "before", "after"):
                 try:
-                    kwargs={query_key: query_data[query_key][0], "max_": PAGE_SIZE}
+                    kwargs = {query_key: query_data[query_key][0], "max_": PAGE_SIZE}
                 except (KeyError, IndexError, ValueError):
                     pass
                 else:
@@ -705,7 +694,7 @@
         node: Optional[str],
         ap_account: str,
         ap_url: str,
-        query_data: Dict[str, List[str]]
+        query_data: Dict[str, List[str]],
     ) -> dict:
         if node is None:
             node = self.apg._m.namespace
@@ -719,7 +708,7 @@
                 service=account_jid,
                 node=node,
                 rsm_request=self.query_data_2_rsm_request(query_data),
-                extra = {C.KEY_USE_CACHE: False}
+                extra={C.KEY_USE_CACHE: False},
             )
         except error.StanzaError as e:
             log.warning(f"Can't get data from pubsub node {node} at {account_jid}: {e}")
@@ -730,8 +719,7 @@
         if node and node.startswith(self.apg._events.namespace):
             ordered_items = [
                 await self.apg.ap_events.event_data_2_ap_item(
-                    self.apg._events.event_elt_2_event_data(item),
-                    account_jid
+                    self.apg._events.event_elt_2_event_data(item), account_jid
                 )
                 for item in reversed(items)
             ]
@@ -740,11 +728,8 @@
                 await self.apg.mb_data_2_ap_item(
                     self.apg.client,
                     await self.apg._m.item_2_mb_data(
-                        self.apg.client,
-                        item,
-                        account_jid,
-                        node
-                    )
+                        self.apg.client, item, account_jid, node
+                    ),
                 )
                 for item in reversed(items)
             ]
@@ -753,7 +738,7 @@
             "id": url,
             "type": "OrderedCollectionPage",
             "partOf": base_url,
-            "orderedItems": ordered_items
+            "orderedItems": ordered_items,
         }
 
         if "rsm" not in metadata:
@@ -764,13 +749,13 @@
         # of what we get with RSM (at least with Libervia Pubsub)
         if not metadata["complete"]:
             try:
-                last= metadata["rsm"]["last"]
+                last = metadata["rsm"]["last"]
             except KeyError:
                 last = None
             ret_data["prev"] = f"{base_url}?{parse.urlencode({'after': last})}"
         if metadata["rsm"]["index"] != 0:
             try:
-                first= metadata["rsm"]["first"]
+                first = metadata["rsm"]["first"]
             except KeyError:
                 first = None
             ret_data["next"] = f"{base_url}?{parse.urlencode({'before': first})}"
@@ -785,7 +770,7 @@
         node: Optional[str],
         ap_account: str,
         ap_url: str,
-        signing_actor: Optional[str]
+        signing_actor: Optional[str],
     ) -> dict:
         if node is None:
             node = self.apg._m.namespace
@@ -809,7 +794,7 @@
                 node=node,
                 max_items=0,
                 rsm_request=rsm.RSMRequest(max_=0),
-                extra = {C.KEY_USE_CACHE: False}
+                extra={C.KEY_USE_CACHE: False},
             )
         except error.StanzaError as e:
             log.warning(f"Can't get data from pubsub node {node} at {account_jid}: {e}")
@@ -844,7 +829,7 @@
         node: Optional[str],
         ap_account: Optional[str],
         ap_url: str,
-        signing_actor: Optional[str]
+        signing_actor: Optional[str],
     ) -> None:
         assert data is not None
         if signing_actor is None:
@@ -855,14 +840,14 @@
             return self.response_code(
                 request,
                 http.UNSUPPORTED_MEDIA_TYPE,
-                f"request is not an activity, ignoring"
+                f"request is not an activity, ignoring",
             )
 
         if account_jid is None and activity_type not in ACTIVIY_NO_ACCOUNT_ALLOWED:
             return self.response_code(
                 request,
                 http.UNSUPPORTED_MEDIA_TYPE,
-                f"{activity_type.title()!r} activity must target an account"
+                f"{activity_type.title()!r} activity must target an account",
             )
 
         try:
@@ -871,12 +856,18 @@
             return self.response_code(
                 request,
                 http.UNSUPPORTED_MEDIA_TYPE,
-                f"{activity_type.title()} activity is not yet supported"
+                f"{activity_type.title()} activity is not yet supported",
             )
         else:
             await method(
-                requestor_actor_id, request, data, account_jid, node, ap_account, ap_url,
-                signing_actor
+                requestor_actor_id,
+                request,
+                data,
+                account_jid,
+                node,
+                ap_account,
+                ap_url,
+                signing_actor,
             )
 
     async def ap_followers_request(
@@ -887,7 +878,7 @@
         node: Optional[str],
         ap_account: Optional[str],
         ap_url: str,
-        signing_actor: Optional[str]
+        signing_actor: Optional[str],
     ) -> dict:
         if node is None:
             node = self.apg._m.namespace
@@ -902,20 +893,22 @@
                 ap_account = self.apg._e.unescape(subscriber.user)
             else:
                 # regular XMPP user
-                ap_account = await self.apg.get_ap_account_from_jid_and_node(subscriber, node)
+                ap_account = await self.apg.get_ap_account_from_jid_and_node(
+                    subscriber, node
+                )
             followers.append(ap_account)
 
         url = self.get_canonical_url(request)
         return {
-          "@context": ["https://www.w3.org/ns/activitystreams"],
-          "type": "OrderedCollection",
-          "id": url,
-          "totalItems": len(subscribers),
-          "first": {
-            "type": "OrderedCollectionPage",
+            "@context": ["https://www.w3.org/ns/activitystreams"],
+            "type": "OrderedCollection",
             "id": url,
-            "orderedItems": followers
-          }
+            "totalItems": len(subscribers),
+            "first": {
+                "type": "OrderedCollectionPage",
+                "id": url,
+                "orderedItems": followers,
+            },
         }
 
     async def ap_following_request(
@@ -926,12 +919,10 @@
         node: Optional[str],
         ap_account: Optional[str],
         ap_url: str,
-        signing_actor: Optional[str]
+        signing_actor: Optional[str],
     ) -> dict[str, Any]:
         client = self.apg.client
-        subscriptions = await self.apg._pps.subscriptions(
-            client, account_jid, node
-        )
+        subscriptions = await self.apg._pps.subscriptions(client, account_jid, node)
         following = []
         for sub_dict in subscriptions:
             service = jid.JID(sub_dict["service"])
@@ -947,15 +938,15 @@
 
         url = self.get_canonical_url(request)
         return {
-          "@context": ["https://www.w3.org/ns/activitystreams"],
-          "type": "OrderedCollection",
-          "id": url,
-          "totalItems": len(subscriptions),
-          "first": {
-            "type": "OrderedCollectionPage",
+            "@context": ["https://www.w3.org/ns/activitystreams"],
+            "type": "OrderedCollection",
             "id": url,
-            "orderedItems": following
-          }
+            "totalItems": len(subscriptions),
+            "first": {
+                "type": "OrderedCollectionPage",
+                "id": url,
+                "orderedItems": following,
+            },
         }
 
     def _get_to_log(
@@ -965,24 +956,23 @@
     ) -> List[str]:
         """Get base data to logs in verbose mode"""
         from pprint import pformat
+
         to_log = [
             "",
-            f"<<< got {request.method.decode()} request - {request.uri.decode()}"
+            f"<<< got {request.method.decode()} request - {request.uri.decode()}",
         ]
         if data is not None:
             to_log.append(pformat(data))
-        if self.apg.verbose>=3:
+        if self.apg.verbose >= 3:
             headers = "\n".join(
                 f"    {k.decode()}: {v.decode()}"
-                for k,v in request.getAllHeaders().items()
+                for k, v in request.getAllHeaders().items()
             )
             to_log.append(f"  headers:\n{headers}")
         return to_log
 
     def get_requestor_actor_id(
-        self,
-        data: dict|None = None,
-        uri_extra_args: list[str]|None = None
+        self, data: dict | None = None, uri_extra_args: list[str] | None = None
     ) -> str:
         """Find the actor ID of the requestor.
 
@@ -1034,33 +1024,30 @@
 
         # Still nothing, we'll have to use a generic actor.
         log.warning(
-            "Can't find destinee in \"to\" field, using generic requestor for signature."
+            'Can\'t find destinee in "to" field, using generic requestor for signature.'
         )
-        return self.apg.build_apurl(
-            TYPE_ACTOR, f"libervia@{self.apg.public_url}"
-        )
+        return self.apg.build_apurl(TYPE_ACTOR, f"libervia@{self.apg.public_url}")
 
     async def ap_request(
         self,
         request: "HTTPRequest",
-        data: dict|None = None,
-        signing_actor: str|None = None,
-        requestor_actor_id: str|None = None,
+        data: dict | None = None,
+        signing_actor: str | None = None,
+        requestor_actor_id: str | None = None,
     ) -> None:
         if self.apg.verbose:
             to_log = self._get_to_log(request, data)
 
         path = request.path.decode()
-        ap_url = parse.urljoin(
-            f"https://{self.apg.public_url}",
-            path
-        )
+        ap_url = parse.urljoin(f"https://{self.apg.public_url}", path)
         request_type, extra_args = self.apg.parse_apurl(ap_url)
 
         header_accept = request.getHeader("accept") or ""
-        if ((MEDIA_TYPE_AP not in header_accept
-             and MEDIA_TYPE_AP_ALT not in header_accept
-             and request_type in self.apg.html_redirect)):
+        if (
+            MEDIA_TYPE_AP not in header_accept
+            and MEDIA_TYPE_AP_ALT not in header_accept
+            and request_type in self.apg.html_redirect
+        ):
             # this is not a AP request, and we have a redirections for it
             kw = {}
             if extra_args:
@@ -1081,14 +1068,14 @@
                 if not filters:
                     break
                 # if we have filter, they must all match
-                elif all(v in kw[k] for k,v in filters.items()):
+                elif all(v in kw[k] for k, v in filters.items()):
                     break
             else:
                 # no redirection is matching
                 redirection = None
 
             if redirection is not None:
-                kw = {k: parse.quote(str(v), safe="") for k,v in kw.items()}
+                kw = {k: parse.quote(str(v), safe="") for k, v in kw.items()}
                 target_url = redirection["url"].format(**kw)
                 content = web_util.redirectTo(target_url.encode(), request)
                 request.write(content)
@@ -1096,9 +1083,7 @@
                 return
 
         if requestor_actor_id is None:
-            requestor_actor_id = self.get_requestor_actor_id(
-                data, extra_args
-            )
+            requestor_actor_id = self.get_requestor_actor_id(data, extra_args)
         if len(extra_args) == 0:
             if request_type != "shared_inbox":
                 raise exceptions.DataError(f"Invalid request type: {request_type!r}")
@@ -1121,21 +1106,29 @@
             ap_account = extra_args[0]
             account_jid, node = await self.apg.get_jid_and_node(ap_account)
             if request_type not in AP_REQUEST_TYPES.get(
-                    request.method.decode().upper(), []
+                request.method.decode().upper(), []
             ):
                 raise exceptions.DataError(f"Invalid request type: {request_type!r}")
             method = getattr(self, f"ap_{request_type}_request")
             ret_data = await method(
-                requestor_actor_id, request, data, account_jid, node, ap_account, ap_url, signing_actor
+                requestor_actor_id,
+                request,
+                data,
+                account_jid,
+                node,
+                ap_account,
+                ap_url,
+                signing_actor,
             )
         if ret_data is not None:
             request.setHeader("content-type", CONTENT_TYPE_AP)
             request.write(json.dumps(ret_data).encode())
         if self.apg.verbose:
             to_log.append(f"--- RET (code: {request.code})---")
-            if self.apg.verbose>=2:
+            if self.apg.verbose >= 2:
                 if ret_data is not None:
                     from pprint import pformat
+
                     to_log.append(f"{pformat(ret_data)}")
                     to_log.append("---")
             log.info("\n".join(to_log))
@@ -1149,15 +1142,13 @@
                 self.response_code(
                     request,
                     http.BAD_REQUEST,
-                    f"invalid body, was expecting a JSON object"
+                    f"invalid body, was expecting a JSON object",
                 )
                 request.finish()
                 return
         except (json.JSONDecodeError, ValueError) as e:
             self.response_code(
-                request,
-                http.BAD_REQUEST,
-                f"invalid json in inbox request: {e}"
+                request, http.BAD_REQUEST, f"invalid json in inbox request: {e}"
             )
             request.finish()
             return
@@ -1185,18 +1176,12 @@
                 to_log.append(f"  body: {request.content.read()!r}")
                 request.content.seek(0)
                 log.info("\n".join(to_log))
-            self.response_code(
-                request,
-                http.FORBIDDEN,
-                f"invalid signature: {e}"
-            )
+            self.response_code(request, http.FORBIDDEN, f"invalid signature: {e}")
             request.finish()
             return
         except Exception as e:
             self.response_code(
-                request,
-                http.INTERNAL_SERVER_ERROR,
-                f"Can't check signature: {e}"
+                request, http.INTERNAL_SERVER_ERROR, f"Can't check signature: {e}"
             )
             request.finish()
             return
@@ -1219,10 +1204,7 @@
             self._on_request_error(failure.Failure(e), request)
 
     async def check_signing_actor(
-        self,
-        requestor_actor_id: str,
-        data: dict,
-        signing_actor: str
+        self, requestor_actor_id: str, data: dict, signing_actor: str
     ) -> None:
         """That that signing actor correspond to actor declared in data
 
@@ -1241,9 +1223,7 @@
             )
 
     async def check_signature(
-        self,
-        requestor_actor_id: str,
-        request: "HTTPRequest"
+        self, requestor_actor_id: str, request: "HTTPRequest"
     ) -> str:
         """Check and validate HTTP signature
 
@@ -1264,10 +1244,11 @@
         except KeyError:
             raise exceptions.EncryptionError('"keyId" is missing from signature')
         algorithm = sign_data.get("algorithm", HS2019)
-        signed_headers = sign_data.get(
-            "headers",
-            "(created)" if algorithm==HS2019 else "date"
-        ).lower().split()
+        signed_headers = (
+            sign_data.get("headers", "(created)" if algorithm == HS2019 else "date")
+            .lower()
+            .split()
+        )
         try:
             headers_to_check = SIGN_HEADERS[None] + SIGN_HEADERS[request.method]
         except KeyError:
@@ -1284,9 +1265,7 @@
                         f"at least one of following header must be signed: {header}"
                     )
             elif header not in signed_headers:
-                raise exceptions.EncryptionError(
-                    f"the {header!r} header must be signed"
-                )
+                raise exceptions.EncryptionError(f"the {header!r} header must be signed")
 
         body = request.content.read()
         request.content.seek(0)
@@ -1329,7 +1308,8 @@
                     if forwarded is not None:
                         try:
                             host = [
-                                f[5:] for f in forwarded.split(";")
+                                f[5:]
+                                for f in forwarded.split(";")
                                 if f.startswith("host=")
                             ][0] or None
                         except IndexError:
@@ -1342,7 +1322,8 @@
                         value = host
                 elif to_sign == "digest":
                     hashes = {
-                        algo.lower(): hash_ for algo, hash_ in (
+                        algo.lower(): hash_
+                        for algo, hash_ in (
                             digest.split("=", 1) for digest in value.split(",")
                         )
                     }
@@ -1367,7 +1348,6 @@
         else:
             created = date_utils.date_parse(headers["date"])
 
-
         try:
             expires = float(headers["expires"])
         except KeyError:
@@ -1386,23 +1366,17 @@
 
         try:
             return await self.apg.check_signature(
-                requestor_actor_id,
-                sign_data["signature"],
-                key_id,
-                headers
+                requestor_actor_id, sign_data["signature"], key_id, headers
             )
         except exceptions.EncryptionError:
-            method, url = headers["(request-target)"].rsplit(' ', 1)
+            method, url = headers["(request-target)"].rsplit(" ", 1)
             headers["(request-target)"] = f"{method} {parse.unquote(url)}"
             log.debug(
                 "Using workaround for (request-target) encoding bug in signature, "
                 "see https://github.com/mastodon/mastodon/issues/18871"
             )
             return await self.apg.check_signature(
-                requestor_actor_id,
-                sign_data["signature"],
-                key_id,
-                headers
+                requestor_actor_id, sign_data["signature"], key_id, headers
             )
 
     def render(self, request):
--- a/libervia/backend/plugins/plugin_comp_ap_gateway/pubsub_service.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_comp_ap_gateway/pubsub_service.py	Wed Jun 19 18:44:57 2024 +0200
@@ -37,11 +37,7 @@
 from libervia.backend.tools.web import download_file
 from libervia.backend.memory.sqla_mapping import PubsubSub, SubscriptionState
 
-from .constants import (
-    TYPE_ACTOR,
-    ST_AVATAR,
-    MAX_AVATAR_SIZE
-)
+from .constants import TYPE_ACTOR, ST_AVATAR, MAX_AVATAR_SIZE
 
 
 log = getLogger(__name__)
@@ -52,13 +48,14 @@
     {"var": "pubsub#max_items", "value": "max"},
     {"var": "pubsub#access_model", "type": "list-single", "value": "open"},
     {"var": "pubsub#publish_model", "type": "list-single", "value": "open"},
-
 ]
 
 NODE_CONFIG_VALUES = {c["var"]: c["value"] for c in NODE_CONFIG}
 NODE_OPTIONS = {c["var"]: {} for c in NODE_CONFIG}
 for c in NODE_CONFIG:
-    NODE_OPTIONS[c["var"]].update({k:v for k,v in c.items() if k not in ("var", "value")})
+    NODE_OPTIONS[c["var"]].update(
+        {k: v for k, v in c.items() if k not in ("var", "value")}
+    )
 
 
 class APPubsubService(rsm.PubSubService):
@@ -88,34 +85,30 @@
             in requestor
         """
         if not recipient.user:
-            raise error.StanzaError(
-                "item-not-found",
-                text="No user part specified"
-            )
+            raise error.StanzaError("item-not-found", text="No user part specified")
         requestor_actor_id = self.apg.build_apurl(TYPE_ACTOR, requestor.userhost())
         recipient_account = self.apg._e.unescape(recipient.user)
-        recipient_actor_id = await self.apg.get_ap_actor_id_from_account(recipient_account)
+        recipient_actor_id = await self.apg.get_ap_actor_id_from_account(
+            recipient_account
+        )
         inbox = await self.apg.get_ap_inbox_from_id(recipient_actor_id, use_shared=False)
         return requestor_actor_id, recipient_actor_id, inbox
 
-
     @ensure_deferred
     async def publish(self, requestor, service, nodeIdentifier, items):
         if self.apg.local_only and not self.apg.is_local(requestor):
             raise error.StanzaError(
-                "forbidden",
-                "Only local users can publish on this gateway."
+                "forbidden", "Only local users can publish on this gateway."
             )
         if not service.user:
             raise error.StanzaError(
                 "bad-request",
-                "You must specify an ActivityPub actor account in JID user part."
+                "You must specify an ActivityPub actor account in JID user part.",
             )
         ap_account = self.apg._e.unescape(service.user)
         if ap_account.count("@") != 1:
             raise error.StanzaError(
-                "bad-request",
-                f"{ap_account!r} is not a valid ActivityPub actor account."
+                "bad-request", f"{ap_account!r} is not a valid ActivityPub actor account."
             )
 
         client = self.apg.client.get_virtual_client(requestor)
@@ -130,21 +123,17 @@
             cached_node = await self.host.memory.storage.get_pubsub_node(
                 client, service, nodeIdentifier, with_subscriptions=True, create=True
             )
-            await self.host.memory.storage.cache_pubsub_items(
-                client,
-                cached_node,
-                items
-            )
+            await self.host.memory.storage.cache_pubsub_items(client, cached_node, items)
             for subscription in cached_node.subscriptions:
                 if subscription.state != SubscriptionState.SUBSCRIBED:
                     continue
                 self.notifyPublish(
-                    service,
-                    nodeIdentifier,
-                    [(subscription.subscriber, None, items)]
+                    service, nodeIdentifier, [(subscription.subscriber, None, items)]
                 )
 
-    async def ap_following_2_elt(self, requestor_actor_id: str, ap_item: dict) -> domish.Element:
+    async def ap_following_2_elt(
+        self, requestor_actor_id: str, ap_item: dict
+    ) -> domish.Element:
         """Convert actor ID from following collection to XMPP item
 
         @param requestor_actor_id: ID of the actor doing the request.
@@ -159,9 +148,7 @@
         return item_elt
 
     async def ap_follower_2_elt(
-        self,
-        requestor_actor_id: str,
-        ap_item: dict
+        self, requestor_actor_id: str, ap_item: dict
     ) -> domish.Element:
         """Convert actor ID from followers collection to XMPP item
 
@@ -175,9 +162,7 @@
         return item_elt
 
     async def generate_v_card(
-        self,
-        requestor_actor_id: str,
-        ap_account: str
+        self, requestor_actor_id: str, ap_account: str
     ) -> domish.Element:
         """Generate vCard4 (XEP-0292) item element from ap_account's metadata
 
@@ -186,8 +171,7 @@
         @return: <item> with the <vcard> element
         """
         actor_data = await self.apg.get_ap_actor_data_from_account(
-            requestor_actor_id,
-            ap_account
+            requestor_actor_id, ap_account
         )
         identity_data = {}
 
@@ -212,10 +196,7 @@
         return item_elt
 
     async def get_avatar_data(
-        self,
-        client: SatXMPPEntity,
-        requestor_actor_id: str,
-        ap_account: str
+        self, client: SatXMPPEntity, requestor_actor_id: str, ap_account: str
     ) -> dict[str, Any]:
         """Retrieve actor's avatar if any, cache it and file actor_data
 
@@ -259,27 +240,21 @@
                 avatar_data = {
                     "path": dest_path,
                     "filename": filename,
-                    'media_type': image.guess_type(dest_path),
+                    "media_type": image.guess_type(dest_path),
                 }
 
-                await self.apg._i.cache_avatar(
-                    self.apg.IMPORT_NAME,
-                    avatar_data
-                )
+                await self.apg._i.cache_avatar(self.apg.IMPORT_NAME, avatar_data)
         else:
             avatar_data = {
-            "cache_uid": cache["uid"],
-            "path": cache["path"],
-            "media_type": cache["mime_type"]
-        }
+                "cache_uid": cache["uid"],
+                "path": cache["path"],
+                "media_type": cache["mime_type"],
+            }
 
         return avatar_data
 
     async def generate_avatar_metadata(
-        self,
-        client: SatXMPPEntity,
-        requestor_actor_id: str,
-        ap_account: str
+        self, client: SatXMPPEntity, requestor_actor_id: str, ap_account: str
     ) -> domish.Element:
         """Generate the metadata element for user avatar
 
@@ -308,14 +283,11 @@
         """
         if not itemIdentifiers:
             avatar_data = await self.get_avatar_data(
-                client,
-                requestor_actor_id,
-                ap_account
+                client, requestor_actor_id, ap_account
             )
             if "base64" not in avatar_data:
                 await threads.deferToThread(
-                    self._blocking_b_6_4_encode_avatar,
-                    avatar_data
+                    self._blocking_b_6_4_encode_avatar, avatar_data
                 )
         else:
             if len(itemIdentifiers) > 1:
@@ -327,10 +299,7 @@
             cache_data = self.apg.host.common_cache.get_metadata(item_id)
             if cache_data is None:
                 raise error.StanzaError("item-not-found")
-            avatar_data = {
-                "cache_uid": item_id,
-                "path": cache_data["path"]
-            }
+            avatar_data = {"cache_uid": item_id, "path": cache_data["path"]}
             await threads.deferToThread(self._blocking_b_6_4_encode_avatar, avatar_data)
 
         return self.apg._a.build_item_data_elt(avatar_data)
@@ -343,7 +312,7 @@
         node: str,
         maxItems: Optional[int],
         itemIdentifiers: Optional[List[str]],
-        rsm_req: Optional[rsm.RSMRequest]
+        rsm_req: Optional[rsm.RSMRequest],
     ) -> Tuple[List[domish.Element], Optional[rsm.RSMResponse]]:
         if not service.user:
             return [], None
@@ -353,8 +322,7 @@
             return [], None
 
         requestor_actor_id = self.apg.build_apurl(
-            TYPE_ACTOR,
-            await self.apg.get_ap_account_from_jid_and_node(service, node)
+            TYPE_ACTOR, await self.apg.get_ap_account_from_jid_and_node(service, node)
         )
 
         # cached_node may be pre-filled with some nodes (e.g. attachments nodes),
@@ -379,17 +347,12 @@
             return [item_elt], None
         elif node == self.apg._a.namespace_metadata:
             item_elt = await self.generate_avatar_metadata(
-                self.apg.client,
-                requestor_actor_id,
-                ap_account
+                self.apg.client, requestor_actor_id, ap_account
             )
             return [item_elt], None
         elif node == self.apg._a.namespace_data:
             item_elt = await self.generate_avatar_data(
-                self.apg.client,
-                requestor_actor_id,
-                ap_account,
-                itemIdentifiers
+                self.apg.client, requestor_actor_id, ap_account, itemIdentifiers
             )
             return [item_elt], None
         elif self.apg._pa.is_attachment_node(node):
@@ -410,7 +373,7 @@
                 raise error.StanzaError(
                     "feature-not-implemented",
                     text=f"AP Gateway {C.APP_VERSION} only supports "
-                    f"{self.apg._m.namespace} node for now"
+                    f"{self.apg._m.namespace} node for now",
                 )
             collection_name = "outbox"
             use_cache = True
@@ -444,18 +407,26 @@
             if rsm_req is None:
                 if maxItems is None:
                     maxItems = 20
-                kwargs.update({
-                    "max_items": maxItems,
-                    "chronological_pagination": False,
-                })
+                kwargs.update(
+                    {
+                        "max_items": maxItems,
+                        "chronological_pagination": False,
+                    }
+                )
             else:
-                if len(
-                    [v for v in (rsm_req.after, rsm_req.before, rsm_req.index)
-                     if v is not None]
-                ) > 1:
+                if (
+                    len(
+                        [
+                            v
+                            for v in (rsm_req.after, rsm_req.before, rsm_req.index)
+                            if v is not None
+                        ]
+                    )
+                    > 1
+                ):
                     raise error.StanzaError(
                         "bad-request",
-                        text="You can't use after, before and index at the same time"
+                        text="You can't use after, before and index at the same time",
                     )
                 kwargs.update({"max_items": rsm_req.max})
                 if rsm_req.after is not None:
@@ -476,25 +447,21 @@
                 try:
                     parent_data = await self.apg.ap_get(parent_item, requestor_actor_id)
                     collection = await self.apg.ap_get_object(
-                        requestor_actor_id,
-                        parent_data.get("object", {}),
-                        "replies"
+                        requestor_actor_id, parent_data.get("object", {}), "replies"
                     )
                 except Exception as e:
-                    raise error.StanzaError(
-                        "item-not-found",
-                        text=str(e)
-                    )
+                    raise error.StanzaError("item-not-found", text=str(e))
             else:
                 actor_data = await self.apg.get_ap_actor_data_from_account(
-                    requestor_actor_id,
-                    ap_account
+                    requestor_actor_id, ap_account
                 )
-                collection = await self.apg.ap_get_object(requestor_actor_id, actor_data, collection_name)
+                collection = await self.apg.ap_get_object(
+                    requestor_actor_id, actor_data, collection_name
+                )
             if not collection:
                 raise error.StanzaError(
                     "item-not-found",
-                    text=f"No collection found for node {node!r} (account: {ap_account})"
+                    text=f"No collection found for node {node!r} (account: {ap_account})",
                 )
 
             kwargs["parser"] = parser
@@ -528,17 +495,15 @@
         else:
             try:
                 subscription = next(
-                    s for s in node.subscriptions
+                    s
+                    for s in node.subscriptions
                     if s.subscriber == requestor.userhostJID()
                 )
             except StopIteration:
                 subscription = None
 
         if subscription is None:
-            subscription = PubsubSub(
-                subscriber=requestor.userhostJID(),
-                state=sub_state
-            )
+            subscription = PubsubSub(subscriber=requestor.userhostJID(), state=sub_state)
             node.subscriptions.append(subscription)
             await self.host.memory.storage.add(node)
         else:
@@ -586,11 +551,7 @@
         data = self.apg.create_activity(
             "Undo",
             req_actor_id,
-            self.apg.create_activity(
-                "Follow",
-                req_actor_id,
-                recip_actor_id
-            )
+            self.apg.create_activity("Follow", req_actor_id, recip_actor_id),
         )
 
         resp = await self.apg.sign_and_post(inbox, req_actor_id, data)
@@ -602,10 +563,7 @@
         return NODE_OPTIONS
 
     def getConfiguration(
-        self,
-        requestor: jid.JID,
-        service: jid.JID,
-        nodeIdentifier: str
+        self, requestor: jid.JID, service: jid.JID, nodeIdentifier: str
     ) -> defer.Deferred:
         return defer.succeed(NODE_CONFIG_VALUES)
 
@@ -615,12 +573,9 @@
         service: jid.JID,
         nodeIdentifier: str,
         pep: bool = False,
-        recipient: Optional[jid.JID] = None
+        recipient: Optional[jid.JID] = None,
     ) -> Optional[dict]:
         if not nodeIdentifier:
             return None
-        info = {
-            "type": "leaf",
-            "meta-data": NODE_CONFIG
-        }
+        info = {"type": "leaf", "meta-data": NODE_CONFIG}
         return info
--- a/libervia/backend/plugins/plugin_comp_ap_gateway/regex.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_comp_ap_gateway/regex.py	Wed Jun 19 18:44:57 2024 +0200
@@ -42,17 +42,17 @@
 # obs-text = %x80-FF
 # ---
 
-ows = '[ \t]*'
-bws = f'{ows}'
-obs_text = '[\\x80-\\xff]'
-qdtext = f'([\t !#-\\[\\]-~]|{obs_text})'
-quoted_pair = f'\\\\([\t !-~]|{obs_text})'
+ows = "[ \t]*"
+bws = f"{ows}"
+obs_text = "[\\x80-\\xff]"
+qdtext = f"([\t !#-\\[\\]-~]|{obs_text})"
+quoted_pair = f"\\\\([\t !-~]|{obs_text})"
 quoted_string = f'"({qdtext}|{quoted_pair})*"'
 tchar = "([!#$%&`*+\\-.^_]|\\\\'|[|~0-9a-zA-Z])"
-token = f'({tchar})+'
+token = f"({tchar})+"
 RE_SIG_PARAM = re.compile(
-    f'(?P<key>{token}{bws})={bws}'
-    f'((?P<uq_value>{token})|(?P<quoted_value>{quoted_string}))'
+    f"(?P<key>{token}{bws})={bws}"
+    f"((?P<uq_value>{token})|(?P<quoted_value>{quoted_string}))"
 )
 
 
--- a/libervia/backend/plugins/plugin_comp_file_sharing.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_comp_file_sharing.py	Wed Jun 19 18:44:57 2024 +0200
@@ -82,10 +82,7 @@
     "{used_space}, you can't upload {file_size} more."
 )
 
-HTTP_VERSION = unicodedata.normalize(
-    'NFKD',
-    f"{C.APP_NAME} file sharing {C.APP_VERSION}"
-)
+HTTP_VERSION = unicodedata.normalize("NFKD", f"{C.APP_NAME} file sharing {C.APP_VERSION}")
 
 
 class HTTPFileServer(resource.Resource):
@@ -94,32 +91,32 @@
     def errorPage(self, request, code):
         request.setResponseCode(code)
         if code == http.BAD_REQUEST:
-            brief = 'Bad Request'
+            brief = "Bad Request"
             details = "Your request is invalid"
         elif code == http.FORBIDDEN:
-            brief = 'Forbidden'
+            brief = "Forbidden"
             details = "You're not allowed to use this resource"
         elif code == http.NOT_FOUND:
-            brief = 'Not Found'
+            brief = "Not Found"
             details = "No resource found at this URL"
         else:
-            brief = 'Error'
+            brief = "Error"
             details = "This resource can't be used"
             log.error(f"Unexpected return code used: {code}")
         log.warning(
-            f'Error returned while trying to access url {request.uri.decode()}: '
+            f"Error returned while trying to access url {request.uri.decode()}: "
             f'"{brief}" ({code}): {details}'
         )
 
         return resource.ErrorPage(code, brief, details).render(request)
 
     def get_disposition_type(self, media_type, media_subtype):
-        if media_type in ('image', 'video'):
-            return 'inline'
-        elif media_type == 'application' and media_subtype == 'pdf':
-            return 'inline'
+        if media_type in ("image", "video"):
+            return "inline"
+        elif media_type == "application" and media_subtype == "pdf":
+            return "inline"
         else:
-            return 'attachment'
+            return "attachment"
 
     def render(self, request):
         request.setHeader("server", HTTP_VERSION)
@@ -127,13 +124,14 @@
         request.setHeader("Access-Control-Allow-Methods", "OPTIONS, HEAD, GET, PUT")
         request.setHeader(
             "Access-Control-Allow-Headers",
-            "Content-Type, Range, Xmpp-File-Path, Xmpp-File-No-Http")
+            "Content-Type, Range, Xmpp-File-Path, Xmpp-File-No-Http",
+        )
         request.setHeader("Access-Control-Allow-Credentials", "true")
         request.setHeader("Accept-Ranges", "bytes")
 
         request.setHeader(
-            "Access-Control-Expose-Headers",
-            "Date, Content-Length, Content-Range")
+            "Access-Control-Expose-Headers", "Date, Content-Length, Content-Range"
+        )
         return super().render(request)
 
     def render_OPTIONS(self, request):
@@ -157,7 +155,8 @@
             request.finish()
             return
         found_files = await request.file_sharing.host.memory.get_files(
-            client=None, peer_jid=None, perms_to_check=None, public_id=upload_id)
+            client=None, peer_jid=None, perms_to_check=None, public_id=upload_id
+        )
         if not found_files:
             request.write(self.errorPage(request, http.NOT_FOUND))
             request.finish()
@@ -166,22 +165,22 @@
             log.error(f"more that one files found for public id {upload_id!r}")
 
         found_file = found_files[0]
-        file_path = request.file_sharing.files_path/found_file['file_hash']
+        file_path = request.file_sharing.files_path / found_file["file_hash"]
         file_res = static.File(file_path)
         file_res.type = f'{found_file["media_type"]}/{found_file["media_subtype"]}'
-        file_res.encoding = file_res.contentEncodings.get(Path(found_file['name']).suffix)
+        file_res.encoding = file_res.contentEncodings.get(Path(found_file["name"]).suffix)
         disp_type = self.get_disposition_type(
-            found_file['media_type'], found_file['media_subtype'])
+            found_file["media_type"], found_file["media_subtype"]
+        )
         # the URL is percent encoded, and not all browsers/tools unquote the file name,
         # thus we add a content disposition header
         request.setHeader(
-            'Content-Disposition',
-            f"{disp_type}; filename*=UTF-8''{quote(found_file['name'])}"
+            "Content-Disposition",
+            f"{disp_type}; filename*=UTF-8''{quote(found_file['name'])}",
         )
         # cf. https://xmpp.org/extensions/xep-0363.html#server
         request.setHeader(
-            'Content-Security-Policy',
-            "default-src 'none'; frame-ancestors 'none';"
+            "Content-Security-Policy", "default-src 'none'; frame-ancestors 'none';"
         )
         ret = file_res.render(request)
         if ret != server.NOT_DONE_YET:
@@ -215,7 +214,7 @@
         if path:
             path = unquote(path)
         else:
-            path =  "/uploads"
+            path = "/uploads"
         if request.getHeader("Xmpp-File-No-Http") is not None:
             public_id = None
         else:
@@ -225,11 +224,14 @@
             "name": unquote(upload_request.filename),
             "mime_type": upload_request.content_type,
             "size": upload_request.size,
-            "path": path
+            "path": path,
         }
 
         await request.file_sharing.register_received_file(
-            client, upload_request.from_, file_data, tmp_file_path,
+            client,
+            upload_request.from_,
+            file_data,
+            tmp_file_path,
             public_id=public_id,
         )
 
@@ -256,11 +258,13 @@
         # we normalise the path
         path = urlparse(path.decode()).path
         try:
-            __, upload_id, filename = path.split('/')
+            __, upload_id, filename = path.split("/")
         except ValueError:
             raise exceptions.DataError("no enought path elements")
         if len(upload_id) < 10:
-            raise exceptions.DataError(f"invalid upload ID received for a PUT: {upload_id!r}")
+            raise exceptions.DataError(
+                f"invalid upload ID received for a PUT: {upload_id!r}"
+            )
 
         self._upload_data = (upload_id, filename)
         return self._upload_data
@@ -276,11 +280,11 @@
     def refuse_request(self):
         if self.content is not None:
             self.content.close()
-        self.content = open(os.devnull, 'w+b')
+        self.content = open(os.devnull, "w+b")
         self.channel._respondToBadRequestAndDisconnect()
 
     def gotLength(self, length):
-        if self.channel._command.decode().upper() == 'PUT':
+        if self.channel._command.decode().upper() == "PUT":
             # for PUT we check early if upload_id is fine, to avoid buffering a file we'll refuse
             # we buffer the file in component's TMP_BUFFER_DIR, so we just have to rename it at the end
             try:
@@ -289,9 +293,13 @@
                 log.warning(f"Invalid PUT request, we stop here: {e}")
                 return self.refuse_request()
             try:
-                client, upload_request, timer = self.file_sharing.expected_uploads.pop(upload_id)
+                client, upload_request, timer = self.file_sharing.expected_uploads.pop(
+                    upload_id
+                )
             except KeyError:
-                log.warning(f"unknown (expired?) upload ID received for a PUT: {upload_id!r}")
+                log.warning(
+                    f"unknown (expired?) upload ID received for a PUT: {upload_id!r}"
+                )
                 return self.refuse_request()
 
             if not timer.active:
@@ -309,10 +317,9 @@
 
             self.upload_request_data = (client, upload_request)
 
-            file_tmp_path = files_utils.get_unique_name(
-                self.file_tmp_dir/upload_id)
+            file_tmp_path = files_utils.get_unique_name(self.file_tmp_dir / upload_id)
 
-            self.content = open(file_tmp_path, 'w+b')
+            self.content = open(file_tmp_path, "w+b")
         else:
             return super().gotLength(length)
 
@@ -331,8 +338,8 @@
         super().__init__(HTTPFileServer())
 
     def getContentFile(self, length):
-        file_tmp_path = self.file_tmp_dir/shortuuid.uuid()
-        return open(file_tmp_path, 'w+b')
+        file_tmp_path = self.file_tmp_dir / shortuuid.uuid()
+        return open(file_tmp_path, "w+b")
 
 
 class FileSharing:
@@ -356,37 +363,53 @@
         self._t = self.host.plugins["XEP-0264"]
         self._hu = self.host.plugins["XEP-0363"]
         self._hu.register_handler(self._on_http_upload)
-        self.host.trigger.add_with_check("FILE_getDestDir", self, self._get_dest_dir_trigger)
+        self.host.trigger.add_with_check(
+            "FILE_getDestDir", self, self._get_dest_dir_trigger
+        )
         self.host.trigger.add_with_check(
-            "XEP-0234_fileSendingRequest", self, self._file_sending_request_trigger, priority=1000
+            "XEP-0234_fileSendingRequest",
+            self,
+            self._file_sending_request_trigger,
+            priority=1000,
         )
-        self.host.trigger.add_with_check("XEP-0234_buildFileElement", self, self._add_file_metadata_elts)
-        self.host.trigger.add_with_check("XEP-0234_parseFileElement", self, self._get_file_metadata_elts)
-        self.host.trigger.add_with_check("XEP-0329_compGetFilesFromNode", self, self._add_file_metadata)
+        self.host.trigger.add_with_check(
+            "XEP-0234_buildFileElement", self, self._add_file_metadata_elts
+        )
+        self.host.trigger.add_with_check(
+            "XEP-0234_parseFileElement", self, self._get_file_metadata_elts
+        )
+        self.host.trigger.add_with_check(
+            "XEP-0329_compGetFilesFromNode", self, self._add_file_metadata
+        )
         self.host.trigger.add_with_check(
             "XEP-0329_compGetFilesFromNode_build_directory",
             self,
-            self._add_directory_metadata_elts)
+            self._add_directory_metadata_elts,
+        )
         self.host.trigger.add_with_check(
-            "XEP-0329_parseResult_directory",
-            self,
-            self._get_directory_metadata_elts)
+            "XEP-0329_parseResult_directory", self, self._get_directory_metadata_elts
+        )
         self.files_path = self.host.get_local_path(None, C.FILES_DIR)
-        self.http_port = int(self.host.memory.config_get(
-            'component file-sharing', 'http_upload_port', 8888))
+        self.http_port = int(
+            self.host.memory.config_get(
+                "component file-sharing", "http_upload_port", 8888
+            )
+        )
         connection_type = self.host.memory.config_get(
-            'component file-sharing', 'http_upload_connection_type', 'https')
-        if connection_type not in ('http', 'https'):
+            "component file-sharing", "http_upload_connection_type", "https"
+        )
+        if connection_type not in ("http", "https"):
             raise exceptions.ConfigError(
                 'bad http_upload_connection_type, you must use one of "http" or "https"'
             )
         self.server = FileSharingSite(self)
         self.expected_uploads = {}
-        if connection_type == 'http':
+        if connection_type == "http":
             reactor.listenTCP(self.http_port, self.server)
         else:
             options = tls.get_options_from_config(
-                self.host.memory.config, "component file-sharing")
+                self.host.memory.config, "component file-sharing"
+            )
             tls.tls_options_check(options)
             context_factory = tls.get_tls_context_factory(options)
             reactor.listenSSL(self.http_port, self.server, context_factory)
@@ -400,7 +423,8 @@
 
         self.init()
         public_base_url = self.host.memory.config_get(
-            'component file-sharing', 'http_upload_public_facing_url')
+            "component file-sharing", "http_upload_public_facing_url"
+        )
         if public_base_url is None:
             client._file_sharing_base_url = f"https://{client.host}:{self.http_port}"
         else:
@@ -441,7 +465,8 @@
             thumbnails.append({"id": thumb_id, "size": thumb_size})
 
     async def register_received_file(
-            self, client, peer_jid, file_data, file_path, public_id=None, extra=None):
+        self, client, peer_jid, file_data, file_path, public_id=None, extra=None
+    ):
         """Post file reception tasks
 
         once file is received, this method create hash/thumbnails if necessary
@@ -463,9 +488,9 @@
             file_hash = file_data["hash_hasher"].hexdigest()
         else:
             hasher = self._h.get_hasher(HASH_ALGO)
-            with file_path.open('rb') as f:
+            with file_path.open("rb") as f:
                 file_hash = await self._h.calculate_hash(f, hasher)
-        final_path = self.files_path/file_hash
+        final_path = self.files_path / file_hash
 
         if final_path.is_file():
             log.debug(
@@ -490,8 +515,11 @@
                 try:
                     await video.get_thumbnail(final_path, thumb_path)
                 except Exception as e:
-                    log.warning(_("Can't get thumbnail for {final_path}: {e}").format(
-                        final_path=final_path, e=e))
+                    log.warning(
+                        _("Can't get thumbnail for {final_path}: {e}").format(
+                            final_path=final_path, e=e
+                        )
+                    )
                 else:
                     await self.generate_thumbnails(extra, thumb_path)
 
@@ -534,14 +562,15 @@
                     text=OVER_QUOTA_TXT.format(
                         quota=utils.get_human_size(quota),
                         used_space=utils.get_human_size(used_space),
-                        file_size=utils.get_human_size(file_data['size'])
-                    )
+                        file_size=utils.get_human_size(file_data["size"]),
+                    ),
                 )
         file_tmp_dir = self.host.get_local_path(
             None, C.FILES_TMP_DIR, peer_jid.userhost(), component=True
         )
-        file_tmp_path = file_data['file_path'] = files_utils.get_unique_name(
-            file_tmp_dir/filename)
+        file_tmp_path = file_data["file_path"] = files_utils.get_unique_name(
+            file_tmp_dir / filename
+        )
 
         transfer_data["finished_d"].addCallback(
             lambda __: defer.ensureDeferred(
@@ -559,8 +588,8 @@
     ):
         """This method retrieve a file on request, and send if after checking permissions"""
         peer_jid = session["peer_jid"]
-        if session['local_jid'].user:
-            owner = client.get_owner_from_jid(session['local_jid'])
+        if session["local_jid"].user:
+            owner = client.get_owner_from_jid(session["local_jid"])
         else:
             owner = peer_jid
         try:
@@ -592,9 +621,10 @@
 
         # we only use the first found file
         found_file = found_files[0]
-        if found_file['type'] != C.FILE_TYPE_FILE:
-            raise TypeError("a file was expected, type is {type_}".format(
-                type_=found_file['type']))
+        if found_file["type"] != C.FILE_TYPE_FILE:
+            raise TypeError(
+                "a file was expected, type is {type_}".format(type_=found_file["type"])
+            )
         file_hash = found_file["file_hash"]
         file_path = self.files_path / file_hash
         file_data["hash_hasher"] = hasher = self._h.get_hasher(found_file["hash_algo"])
@@ -624,9 +654,11 @@
         else:
             return (
                 False,
-                defer.ensureDeferred(self._retrieve_files(
-                    client, session, content_data, content_name, file_data, file_elt
-                )),
+                defer.ensureDeferred(
+                    self._retrieve_files(
+                        client, session, content_data, content_name, file_data, file_elt
+                    )
+                ),
             )
 
     ## HTTP Upload ##
@@ -639,7 +671,7 @@
 
     async def _on_http_upload(self, client, request):
         # filename should be already cleaned, but it's better to double check
-        assert '/' not in request.filename
+        assert "/" not in request.filename
         # client._file_sharing_allowed_hosts is set in plugin XEP-0329
         if request.from_.host not in client._file_sharing_allowed_hosts:
             raise error.StanzaError("forbidden")
@@ -654,13 +686,15 @@
                     text=OVER_QUOTA_TXT.format(
                         quota=utils.get_human_size(quota),
                         used_space=utils.get_human_size(used_space),
-                        file_size=utils.get_human_size(request.size)
+                        file_size=utils.get_human_size(request.size),
                     ),
-                    appCondition = self._hu.get_file_too_large_elt(max(quota - used_space, 0))
+                    appCondition=self._hu.get_file_too_large_elt(
+                        max(quota - used_space, 0)
+                    ),
                 )
 
         upload_id = shortuuid.ShortUUID().random(length=30)
-        assert '/' not in upload_id
+        assert "/" not in upload_id
         timer = reactor.callLater(30, self._purge_slot, upload_id)
         self.expected_uploads[upload_id] = (client, request, timer)
         url = urljoin(client._file_sharing_base_url, f"{upload_id}/{request.filename}")
@@ -675,7 +709,7 @@
 
     def _add_file_metadata_elts(self, client, file_elt, extra_args):
         # affiliation
-        affiliation = extra_args.get('affiliation')
+        affiliation = extra_args.get("affiliation")
         if affiliation is not None:
             file_elt.addElement((NS_FS_AFFILIATION, "affiliation"), content=affiliation)
 
@@ -715,7 +749,8 @@
         return True
 
     def _add_file_metadata(
-            self, client, iq_elt, iq_result_elt, owner, node_path, files_data):
+        self, client, iq_elt, iq_result_elt, owner, node_path, files_data
+    ):
         for file_data in files_data:
             file_data["comments_url"] = uri.build_xmpp_uri(
                 "pubsub",
@@ -725,22 +760,21 @@
         return True
 
     def _add_directory_metadata_elts(
-            self, client, file_data, directory_elt, owner, node_path):
-        affiliation = file_data.get('affiliation')
+        self, client, file_data, directory_elt, owner, node_path
+    ):
+        affiliation = file_data.get("affiliation")
         if affiliation is not None:
             directory_elt.addElement(
-                (NS_FS_AFFILIATION, "affiliation"),
-                content=affiliation
+                (NS_FS_AFFILIATION, "affiliation"), content=affiliation
             )
 
-    def _get_directory_metadata_elts(
-            self, client, elt, file_data):
+    def _get_directory_metadata_elts(self, client, elt, file_data):
         try:
             affiliation_elt = next(elt.elements(NS_FS_AFFILIATION, "affiliation"))
         except StopIteration:
             pass
         else:
-            file_data['affiliation'] = str(affiliation_elt)
+            file_data["affiliation"] = str(affiliation_elt)
 
 
 class Comments_handler(pubsub.PubSubService):
@@ -841,7 +875,9 @@
             peer_jid = None
         else:
             peer_jid = requestor.userhost()
-        update_cb = partial(self.comments_update, new_comments=comments, peer_jid=peer_jid)
+        update_cb = partial(
+            self.comments_update, new_comments=comments, peer_jid=peer_jid
+        )
         try:
             await self.host.memory.file_update(file_id, "extra", update_cb)
         except exceptions.PermissionError:
--- a/libervia/backend/plugins/plugin_comp_file_sharing_management.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_comp_file_sharing_management.py	Wed Jun 19 18:44:57 2024 +0200
@@ -85,22 +85,30 @@
 
     def profile_connected(self, client):
         self._c.add_ad_hoc_command(
-            client, self._on_change_file, "Change Permissions of File(s)",
+            client,
+            self._on_change_file,
+            "Change Permissions of File(s)",
             node=NS_FILE_MANAGEMENT_PERM,
             allowed_magics=C.ENTITY_ALL,
         )
         self._c.add_ad_hoc_command(
-            client, self._on_delete_file, "Delete File(s)",
+            client,
+            self._on_delete_file,
+            "Delete File(s)",
             node=NS_FILE_MANAGEMENT_DELETE,
             allowed_magics=C.ENTITY_ALL,
         )
         self._c.add_ad_hoc_command(
-            client, self._on_gen_thumbnails, "Generate Thumbnails",
+            client,
+            self._on_gen_thumbnails,
+            "Generate Thumbnails",
             node=NS_FILE_MANAGEMENT_THUMB,
             allowed_magics=C.ENTITY_ALL,
         )
         self._c.add_ad_hoc_command(
-            client, self._on_quota, "Get Quota",
+            client,
+            self._on_quota,
+            "Get Quota",
             node=NS_FILE_MANAGEMENT_QUOTA,
             allowed_magics=C.ENTITY_ALL,
         )
@@ -108,12 +116,14 @@
     def _delete(self, service_jid_s, path, namespace, profile):
         client = self.host.get_client(profile)
         service_jid = jid.JID(service_jid_s) if service_jid_s else None
-        return defer.ensureDeferred(self._c.sequence(
-            client,
-            [{"path": path, "namespace": namespace}, {"confirm": True}],
-            NS_FILE_MANAGEMENT_DELETE,
-            service_jid,
-        ))
+        return defer.ensureDeferred(
+            self._c.sequence(
+                client,
+                [{"path": path, "namespace": namespace}, {"confirm": True}],
+                NS_FILE_MANAGEMENT_DELETE,
+                service_jid,
+            )
+        )
 
     def _err(self, reason):
         """Helper method to get argument to return for error
@@ -133,17 +143,14 @@
         @return (tuple): arguments to use in defer.returnValue
         """
         status = self._c.STATUS.EXECUTING
-        form = data_form.Form("form", title="File Management",
-                              formNamespace=NS_FILE_MANAGEMENT)
+        form = data_form.Form(
+            "form", title="File Management", formNamespace=NS_FILE_MANAGEMENT
+        )
 
-        field = data_form.Field(
-            "text-single", "path", required=True
-        )
+        field = data_form.Field("text-single", "path", required=True)
         form.addField(field)
 
-        field = data_form.Field(
-            "text-single", "namespace", required=False
-        )
+        field = data_form.Field("text-single", "namespace", required=False)
         form.addField(field)
 
         payload = form.toElement()
@@ -159,17 +166,17 @@
         """
         fields = command_form.fields
         try:
-            path = fields['path'].value.strip()
-            namespace = fields['namespace'].value or None
+            path = fields["path"].value.strip()
+            namespace = fields["namespace"].value or None
         except KeyError:
             self._c.ad_hoc_error(self._c.ERROR.BAD_PAYLOAD)
 
         if not path:
             self._c.ad_hoc_error(self._c.ERROR.BAD_PAYLOAD)
 
-        requestor = session_data['requestor']
+        requestor = session_data["requestor"]
         requestor_bare = requestor.userhostJID()
-        path = path.rstrip('/')
+        path = path.rstrip("/")
         parent_path, basename = os.path.split(path)
 
         # TODO: if parent_path and basename are empty, we ask for root directory
@@ -177,35 +184,37 @@
 
         try:
             found_files = await self.host.memory.get_files(
-                client, requestor_bare, path=parent_path, name=basename,
-                namespace=namespace)
+                client,
+                requestor_bare,
+                path=parent_path,
+                name=basename,
+                namespace=namespace,
+            )
             found_file = found_files[0]
         except (exceptions.NotFound, IndexError):
             raise WorkflowError(self._err(_("file not found")))
         except exceptions.PermissionError:
             raise WorkflowError(self._err(_("forbidden")))
 
-        if found_file['owner'] != requestor_bare:
+        if found_file["owner"] != requestor_bare:
             # only owner can manage files
             log.warning(_("Only owner can manage files"))
             raise WorkflowError(self._err(_("forbidden")))
 
-        session_data['found_file'] = found_file
-        session_data['namespace'] = namespace
+        session_data["found_file"] = found_file
+        session_data["namespace"] = namespace
         return found_file
 
     def _update_read_permission(self, access, allowed_jids):
         if not allowed_jids:
             if C.ACCESS_PERM_READ in access:
                 del access[C.ACCESS_PERM_READ]
-        elif allowed_jids == 'PUBLIC':
-            access[C.ACCESS_PERM_READ] = {
-                "type": C.ACCESS_TYPE_PUBLIC
-            }
+        elif allowed_jids == "PUBLIC":
+            access[C.ACCESS_PERM_READ] = {"type": C.ACCESS_TYPE_PUBLIC}
         else:
             access[C.ACCESS_PERM_READ] = {
                 "type": C.ACCESS_TYPE_WHITELIST,
-                "jids": [j.full() for j in allowed_jids]
+                "jids": [j.full() for j in allowed_jids],
             }
 
     async def _update_dir(self, client, requestor, namespace, file_data, allowed_jids):
@@ -214,19 +223,25 @@
         @param file_data(dict): metadata of the file
         @param allowed_jids(list[jid.JID]): list of entities allowed to read the file
         """
-        assert file_data['type'] == C.FILE_TYPE_DIRECTORY
+        assert file_data["type"] == C.FILE_TYPE_DIRECTORY
         files_data = await self.host.memory.get_files(
-            client, requestor, parent=file_data['id'], namespace=namespace)
+            client, requestor, parent=file_data["id"], namespace=namespace
+        )
 
         for file_data in files_data:
-            if not file_data['access'].get(C.ACCESS_PERM_READ, {}):
-                log.debug("setting {perm} read permission for {name}".format(
-                    perm=allowed_jids, name=file_data['name']))
+            if not file_data["access"].get(C.ACCESS_PERM_READ, {}):
+                log.debug(
+                    "setting {perm} read permission for {name}".format(
+                        perm=allowed_jids, name=file_data["name"]
+                    )
+                )
                 await self.host.memory.file_update(
-                    file_data['id'], 'access',
-                    partial(self._update_read_permission, allowed_jids=allowed_jids))
-            if file_data['type'] == C.FILE_TYPE_DIRECTORY:
-                await self._update_dir(client, requestor, namespace, file_data, 'PUBLIC')
+                    file_data["id"],
+                    "access",
+                    partial(self._update_read_permission, allowed_jids=allowed_jids),
+                )
+            if file_data["type"] == C.FILE_TYPE_DIRECTORY:
+                await self._update_dir(client, requestor, namespace, file_data, "PUBLIC")
 
     async def _on_change_file(self, client, command_elt, session_data, action, node):
         try:
@@ -235,8 +250,8 @@
         except StopIteration:
             command_form = None
 
-        found_file = session_data.get('found_file')
-        requestor = session_data['requestor']
+        found_file = session_data.get("found_file")
+        requestor = session_data["requestor"]
         requestor_bare = requestor.userhostJID()
 
         if command_form is None or len(command_form.fields) == 0:
@@ -251,31 +266,39 @@
                 return e.err_args
 
             # management request
-            if found_file['type'] == C.FILE_TYPE_DIRECTORY:
+            if found_file["type"] == C.FILE_TYPE_DIRECTORY:
                 instructions = D_("Please select permissions for this directory")
             else:
                 instructions = D_("Please select permissions for this file")
 
-            form = data_form.Form("form", title="File Management",
-                                  instructions=[instructions],
-                                  formNamespace=NS_FILE_MANAGEMENT)
+            form = data_form.Form(
+                "form",
+                title="File Management",
+                instructions=[instructions],
+                formNamespace=NS_FILE_MANAGEMENT,
+            )
             field = data_form.Field(
-                "text-multi", "read_allowed", required=False,
-                desc='list of jids allowed to read this file (beside yourself), or '
-                     '"PUBLIC" to let a public access'
+                "text-multi",
+                "read_allowed",
+                required=False,
+                desc="list of jids allowed to read this file (beside yourself), or "
+                '"PUBLIC" to let a public access',
             )
             read_access = found_file["access"].get(C.ACCESS_PERM_READ, {})
-            access_type = read_access.get('type', C.ACCESS_TYPE_WHITELIST)
+            access_type = read_access.get("type", C.ACCESS_TYPE_WHITELIST)
             if access_type == C.ACCESS_TYPE_PUBLIC:
-                field.values = ['PUBLIC']
+                field.values = ["PUBLIC"]
             else:
-                field.values = read_access.get('jids', [])
+                field.values = read_access.get("jids", [])
             form.addField(field)
-            if found_file['type'] == C.FILE_TYPE_DIRECTORY:
+            if found_file["type"] == C.FILE_TYPE_DIRECTORY:
                 field = data_form.Field(
-                    "boolean", "recursive", value=False, required=False,
+                    "boolean",
+                    "recursive",
+                    value=False,
+                    required=False,
                     desc="Files under it will be made public to follow this dir "
-                         "permission (only if they don't have already a permission set)."
+                    "permission (only if they don't have already a permission set).",
                 )
                 form.addField(field)
 
@@ -286,42 +309,49 @@
         else:
             # final phase, we'll do permission change here
             try:
-                read_allowed = command_form.fields['read_allowed']
+                read_allowed = command_form.fields["read_allowed"]
             except KeyError:
                 self._c.ad_hoc_error(self._c.ERROR.BAD_PAYLOAD)
 
-            if read_allowed.value == 'PUBLIC':
-                allowed_jids = 'PUBLIC'
-            elif read_allowed.value.strip() == '':
+            if read_allowed.value == "PUBLIC":
+                allowed_jids = "PUBLIC"
+            elif read_allowed.value.strip() == "":
                 allowed_jids = None
             else:
                 try:
-                    allowed_jids = [jid.JID(v.strip()) for v in read_allowed.values
-                                    if v.strip()]
+                    allowed_jids = [
+                        jid.JID(v.strip()) for v in read_allowed.values if v.strip()
+                    ]
                 except RuntimeError as e:
-                    log.warning(_("Can't use read_allowed values: {reason}").format(
-                        reason=e))
+                    log.warning(
+                        _("Can't use read_allowed values: {reason}").format(reason=e)
+                    )
                     self._c.ad_hoc_error(self._c.ERROR.BAD_PAYLOAD)
 
-            if found_file['type'] == C.FILE_TYPE_FILE:
+            if found_file["type"] == C.FILE_TYPE_FILE:
                 await self.host.memory.file_update(
-                    found_file['id'], 'access',
-                    partial(self._update_read_permission, allowed_jids=allowed_jids))
+                    found_file["id"],
+                    "access",
+                    partial(self._update_read_permission, allowed_jids=allowed_jids),
+                )
             else:
                 try:
-                    recursive = command_form.fields['recursive']
+                    recursive = command_form.fields["recursive"]
                 except KeyError:
                     self._c.ad_hoc_error(self._c.ERROR.BAD_PAYLOAD)
                 await self.host.memory.file_update(
-                    found_file['id'], 'access',
-                    partial(self._update_read_permission, allowed_jids=allowed_jids))
+                    found_file["id"],
+                    "access",
+                    partial(self._update_read_permission, allowed_jids=allowed_jids),
+                )
                 if recursive:
                     # we set all file under the directory as public (if they haven't
                     # already a permission set), so allowed entities of root directory
                     # can read them.
-                    namespace = session_data['namespace']
+                    namespace = session_data["namespace"]
                     await self._update_dir(
-                        client, requestor_bare, namespace, found_file, 'PUBLIC')
+                        client, requestor_bare, namespace, found_file, "PUBLIC"
+                    )
 
             # job done, we can end the session
             status = self._c.STATUS.COMPLETED
@@ -336,8 +366,8 @@
         except StopIteration:
             command_form = None
 
-        found_file = session_data.get('found_file')
-        requestor = session_data['requestor']
+        found_file = session_data.get("found_file")
+        requestor = session_data["requestor"]
         requestor_bare = requestor.userhostJID()
 
         if command_form is None or len(command_form.fields) == 0:
@@ -350,18 +380,27 @@
                 found_file = await self._get_file_data(client, session_data, command_form)
             except WorkflowError as e:
                 return e.err_args
-            if found_file['type'] == C.FILE_TYPE_DIRECTORY:
-                msg = D_("Are you sure to delete directory {name} and all files and "
-                         "directories under it?").format(name=found_file['name'])
+            if found_file["type"] == C.FILE_TYPE_DIRECTORY:
+                msg = D_(
+                    "Are you sure to delete directory {name} and all files and "
+                    "directories under it?"
+                ).format(name=found_file["name"])
             else:
-                msg = D_("Are you sure to delete file {name}?"
-                    .format(name=found_file['name']))
-            form = data_form.Form("form", title="File Management",
-                                  instructions = [msg],
-                                  formNamespace=NS_FILE_MANAGEMENT)
+                msg = D_(
+                    "Are you sure to delete file {name}?".format(name=found_file["name"])
+                )
+            form = data_form.Form(
+                "form",
+                title="File Management",
+                instructions=[msg],
+                formNamespace=NS_FILE_MANAGEMENT,
+            )
             field = data_form.Field(
-                "boolean", "confirm", value=False, required=True,
-                desc="check this box to confirm"
+                "boolean",
+                "confirm",
+                value=False,
+                required=True,
+                desc="check this box to confirm",
             )
             form.addField(field)
             status = self._c.STATUS.EXECUTING
@@ -371,15 +410,16 @@
         else:
             # final phase, we'll do deletion here
             try:
-                confirmed = C.bool(command_form.fields['confirm'].value)
+                confirmed = C.bool(command_form.fields["confirm"].value)
             except KeyError:
                 self._c.ad_hoc_error(self._c.ERROR.BAD_PAYLOAD)
             if not confirmed:
                 note = None
             else:
-                recursive = found_file['type'] == C.FILE_TYPE_DIRECTORY
+                recursive = found_file["type"] == C.FILE_TYPE_DIRECTORY
                 await self.host.memory.file_delete(
-                    client, requestor_bare, found_file['id'], recursive)
+                    client, requestor_bare, found_file["id"], recursive
+                )
                 note = (self._c.NOTE.INFO, _("file deleted"))
             status = self._c.STATUS.COMPLETED
             payload = None
@@ -393,16 +433,17 @@
 
         @param file_data(dict): metadata of the file
         """
-        if file_data['type'] == C.FILE_TYPE_DIRECTORY:
+        if file_data["type"] == C.FILE_TYPE_DIRECTORY:
             sub_files_data = await self.host.memory.get_files(
-                client, requestor, parent=file_data['id'], namespace=namespace)
+                client, requestor, parent=file_data["id"], namespace=namespace
+            )
             for sub_file_data in sub_files_data:
                 await self._gen_thumbs(client, requestor, namespace, sub_file_data)
 
-        elif file_data['type'] == C.FILE_TYPE_FILE:
-            media_type = file_data['media_type']
-            file_path = os.path.join(self.files_path, file_data['file_hash'])
-            if media_type == 'image':
+        elif file_data["type"] == C.FILE_TYPE_FILE:
+            media_type = file_data["media_type"]
+            file_path = os.path.join(self.files_path, file_data["file_hash"])
+            if media_type == "image":
                 thumbnails = []
 
                 for max_thumb_size in self._t.SIZES:
@@ -414,20 +455,26 @@
                             60 * 60 * 24 * 31 * 6,
                         )
                     except Exception as e:
-                        log.warning(_("Can't create thumbnail: {reason}")
-                            .format(reason=e))
+                        log.warning(
+                            _("Can't create thumbnail: {reason}").format(reason=e)
+                        )
                         break
                     thumbnails.append({"id": thumb_id, "size": thumb_size})
 
                 await self.host.memory.file_update(
-                    file_data['id'], 'extra',
-                    partial(self._update_thumbs, thumbnails=thumbnails))
+                    file_data["id"],
+                    "extra",
+                    partial(self._update_thumbs, thumbnails=thumbnails),
+                )
 
-                log.info("thumbnails for [{file_name}] generated"
-                    .format(file_name=file_data['name']))
+                log.info(
+                    "thumbnails for [{file_name}] generated".format(
+                        file_name=file_data["name"]
+                    )
+                )
 
         else:
-            log.warning("unmanaged file type: {type_}".format(type_=file_data['type']))
+            log.warning("unmanaged file type: {type_}".format(type_=file_data["type"]))
 
     async def _on_gen_thumbnails(self, client, command_elt, session_data, action, node):
         try:
@@ -436,8 +483,8 @@
         except StopIteration:
             command_form = None
 
-        found_file = session_data.get('found_file')
-        requestor = session_data['requestor']
+        found_file = session_data.get("found_file")
+        requestor = session_data["requestor"]
 
         if command_form is None or len(command_form.fields) == 0:
             # root request
@@ -451,7 +498,7 @@
                 return e.err_args
 
             log.info("Generating thumbnails as requested")
-            await self._gen_thumbs(client, requestor, found_file['namespace'], found_file)
+            await self._gen_thumbs(client, requestor, found_file["namespace"], found_file)
 
             # job done, we can end the session
             status = self._c.STATUS.COMPLETED
@@ -460,7 +507,7 @@
             return (payload, status, None, note)
 
     async def _on_quota(self, client, command_elt, session_data, action, node):
-        requestor = session_data['requestor']
+        requestor = session_data["requestor"]
         quota = self.host.plugins["file_sharing"].get_quota(client, requestor)
         try:
             size_used = await self.host.memory.file_get_used_space(client, requestor)
@@ -473,11 +520,10 @@
         note = (
             self._c.NOTE.INFO,
             _("You are currently using {size_used} on {size_quota}").format(
-                size_used = utils.get_human_size(size_used),
-                size_quota = (
-                    _("unlimited quota") if quota is None
-                    else utils.get_human_size(quota)
-                )
-            )
+                size_used=utils.get_human_size(size_used),
+                size_quota=(
+                    _("unlimited quota") if quota is None else utils.get_human_size(quota)
+                ),
+            ),
         )
         return (payload, status, None, note)
--- a/libervia/backend/plugins/plugin_dbg_manhole.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_dbg_manhole.py	Wed Jun 19 18:44:57 2024 +0200
@@ -40,7 +40,6 @@
 }
 
 
-
 class Manhole(object):
 
     def __init__(self, host):
@@ -50,10 +49,17 @@
             self.start_manhole(port)
 
     def start_manhole(self, port):
-        log.warning(_("/!\\ Manhole debug server activated, be sure to not use it in "
-                      "production, this is dangerous /!\\"))
-        log.info(_("You can connect to manhole server using telnet on port {port}")
-            .format(port=port))
+        log.warning(
+            _(
+                "/!\\ Manhole debug server activated, be sure to not use it in "
+                "production, this is dangerous /!\\"
+            )
+        )
+        log.info(
+            _("You can connect to manhole server using telnet on port {port}").format(
+                port=port
+            )
+        )
         f = protocol.ServerFactory()
         namespace = {
             "host": self.host,
@@ -61,9 +67,10 @@
             "jid": jid,
             "d": defer.ensureDeferred,
         }
-        f.protocol = lambda: TelnetTransport(TelnetBootstrapProtocol,
-                                             insults.ServerProtocol,
-                                             ColoredManhole,
-                                             namespace=namespace,
-                                             )
+        f.protocol = lambda: TelnetTransport(
+            TelnetBootstrapProtocol,
+            insults.ServerProtocol,
+            ColoredManhole,
+            namespace=namespace,
+        )
         reactor.listenTCP(port, f)
--- a/libervia/backend/plugins/plugin_exp_command_export.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_exp_command_export.py	Wed Jun 19 18:44:57 2024 +0200
@@ -41,7 +41,7 @@
 
 
 class ExportCommandProtocol(protocol.ProcessProtocol):
-    """ Try to register an account with prosody """
+    """Try to register an account with prosody"""
 
     def __init__(self, parent, client, target, options):
         self.parent = parent
@@ -73,10 +73,10 @@
         self.transport.write(message.encode("utf-8"))
 
     def bool_option(self, key):
-        """ Get boolean value from options
+        """Get boolean value from options
         @param key: name of the option
         @return: True if key exists and set to "true" (case insensitive),
-                 False in all other cases """
+                 False in all other cases"""
         value = self.options.get(key, "")
         return value.lower() == "true"
 
@@ -92,7 +92,9 @@
         log.info(_("Plugin command export initialization"))
         self.host = host
         self.spawned = {}  # key = entity
-        host.trigger.add("message_received", self.message_received_trigger, priority=10000)
+        host.trigger.add(
+            "message_received", self.message_received_trigger, priority=10000
+        )
         host.bridge.add_method(
             "command_export",
             ".plugin",
@@ -102,19 +104,19 @@
         )
 
     def removeProcess(self, entity, process):
-        """ Called when the process is finished
+        """Called when the process is finished
         @param entity: jid.JID attached to the process
         @param process: process to remove"""
         try:
             processes_set = self.spawned[(entity, process.client.profile)]
             processes_set.discard(process)
             if not processes_set:
-                del (self.spawned[(entity, process.client.profile)])
+                del self.spawned[(entity, process.client.profile)]
         except ValueError:
             pass
 
     def message_received_trigger(self, client, message_elt, post_treat):
-        """ Check if source is linked and repeat message, else do nothing  """
+        """Check if source is linked and repeat message, else do nothing"""
         from_jid = jid.JID(message_elt["from"])
         spawned_key = (from_jid.userhostJID(), client.profile)
 
@@ -140,7 +142,7 @@
         return True
 
     def _export_command(self, command, args, targets, options, profile_key):
-        """ Export a commands to authorised targets
+        """Export a commands to authorised targets
         @param command: full path of the command to execute
         @param args: list of arguments, with command name as first one
         @param targets: list of allowed entities
--- a/libervia/backend/plugins/plugin_exp_invitation.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_exp_invitation.py	Wed Jun 19 18:44:57 2024 +0200
@@ -45,9 +45,7 @@
 }
 
 NS_INVITATION = "https://salut-a-toi/protocol/invitation:0"
-INVITATION = '/message/invitation[@xmlns="{ns_invit}"]'.format(
-    ns_invit=NS_INVITATION
-)
+INVITATION = '/message/invitation[@xmlns="{ns_invit}"]'.format(ns_invit=NS_INVITATION)
 NS_INVITATION_LIST = NS_INVITATION + "#list"
 
 
@@ -91,8 +89,10 @@
         """
         if namespace in self._ns_cb:
             raise exceptions.ConflictError(
-                "invitation namespace {namespace} is already register with {callback}"
-                .format(namespace=namespace, callback=self._ns_cb[namespace]))
+                "invitation namespace {namespace} is already register with {callback}".format(
+                    namespace=namespace, callback=self._ns_cb[namespace]
+                )
+            )
         self._ns_cb[namespace] = callback
 
     def _generate_base_invitation(self, client, invitee_jid, name, extra):
@@ -118,14 +118,16 @@
         invitation_elt = mess_data["xml"].addElement("invitation", NS_INVITATION)
         if name is not None:
             invitation_elt["name"] = name
-        thumb_url = extra.get('thumb_url')
+        thumb_url = extra.get("thumb_url")
         if thumb_url:
-            if not thumb_url.startswith('http'):
+            if not thumb_url.startswith("http"):
                 log.warning(
-                    "only http URLs are allowed for thumbnails, got {url}, ignoring"
-                    .format(url=thumb_url))
+                    "only http URLs are allowed for thumbnails, got {url}, ignoring".format(
+                        url=thumb_url
+                    )
+                )
             else:
-                invitation_elt['thumb_url'] = thumb_url
+                invitation_elt["thumb_url"] = thumb_url
         return mess_data, invitation_elt
 
     def send_pubsub_invitation(
@@ -136,7 +138,7 @@
         node: str,
         item_id: Optional[str],
         name: Optional[str],
-        extra: Optional[dict]
+        extra: Optional[dict],
     ) -> None:
         """Send an pubsub invitation in a <message> stanza
 
@@ -151,7 +153,8 @@
         if extra is None:
             extra = {}
         mess_data, invitation_elt = self._generate_base_invitation(
-            client, invitee_jid, name, extra)
+            client, invitee_jid, name, extra
+        )
         pubsub_elt = invitation_elt.addElement("pubsub")
         pubsub_elt["service"] = service.full()
         pubsub_elt["node"] = node
@@ -173,8 +176,15 @@
         client.send(mess_data["xml"])
 
     async def send_file_sharing_invitation(
-        self, client, invitee_jid, service, repos_type=None, namespace=None, path=None,
-        name=None, extra=None
+        self,
+        client,
+        invitee_jid,
+        service,
+        repos_type=None,
+        namespace=None,
+        path=None,
+        name=None,
+        extra=None,
     ):
         """Send a file sharing invitation in a <message> stanza
 
@@ -196,7 +206,7 @@
         # FIXME: not the best place to adapt permission, but it's necessary to check them
         #   for UX
         try:
-            await self.host.plugins['XEP-0329'].affiliationsSet(
+            await self.host.plugins["XEP-0329"].affiliationsSet(
                 client, service, namespace, path, {invitee_jid: "member"}
             )
         except Exception as e:
@@ -214,18 +224,20 @@
                 )
             else:
                 try:
-                    extra['thumb_url'] = own_interest['thumb_url']
+                    extra["thumb_url"] = own_interest["thumb_url"]
                 except KeyError:
                     pass
 
         mess_data, invitation_elt = self._generate_base_invitation(
-            client, invitee_jid, name, extra)
+            client, invitee_jid, name, extra
+        )
         file_sharing_elt = invitation_elt.addElement("file_sharing")
         file_sharing_elt["service"] = service.full()
         if repos_type is not None:
             if repos_type not in ("files", "photos"):
                 msg = "unknown repository type: {repos_type}".format(
-                    repos_type=repos_type)
+                    repos_type=repos_type
+                )
                 log.warning(msg)
                 raise exceptions.DateError(msg)
             file_sharing_elt["type"] = repos_type
@@ -250,8 +262,9 @@
                     client, service, node, item_ids=[item_id]
                 )
             except Exception as e:
-                log.warning(_("Can't get item linked with invitation: {reason}").format(
-                            reason=e))
+                log.warning(
+                    _("Can't get item linked with invitation: {reason}").format(reason=e)
+                )
             try:
                 item_elt = items[0]
             except IndexError:
@@ -261,8 +274,9 @@
             try:
                 namespace = item_elt.firstChildElement().uri
             except Exception as e:
-                log.warning(_("Can't retrieve namespace of invitation: {reason}").format(
-                    reason = e))
+                log.warning(
+                    _("Can't retrieve namespace of invitation: {reason}").format(reason=e)
+                )
                 raise exceptions.DataError
 
             args = [service, node, item_id, item_elt]
@@ -271,7 +285,7 @@
                 node_data_elt = next(pubsub_elt.elements(NS_INVITATION, "node_data"))
             except StopIteration:
                 raise exceptions.DataError("Bad invitation, ignoring")
-            namespace = node_data_elt['namespace']
+            namespace = node_data_elt["namespace"]
             args = [service, node, None, node_data_elt]
 
         return namespace, args
@@ -296,7 +310,7 @@
         name = invitation_elt.getAttribute("name")
         extra = {}
         if invitation_elt.hasAttribute("thumb_url"):
-            extra['thumb_url'] = invitation_elt['thumb_url']
+            extra["thumb_url"] = invitation_elt["thumb_url"]
 
         for elt in invitation_elt.elements():
             if elt.uri != NS_INVITATION:
@@ -307,22 +321,26 @@
             elif elt.name == "file_sharing":
                 method = self._parse_file_sharing_elt
             else:
-                log.warning("not implemented invitation element: {xml}".format(
-                    xml = elt.toXml()))
+                log.warning(
+                    "not implemented invitation element: {xml}".format(xml=elt.toXml())
+                )
                 continue
             try:
                 namespace, args = await method(client, elt)
             except exceptions.DataError:
-                log.warning("Can't parse invitation element: {xml}".format(
-                            xml = elt.toXml()))
+                log.warning(
+                    "Can't parse invitation element: {xml}".format(xml=elt.toXml())
+                )
                 continue
 
             try:
                 cb = self._ns_cb[namespace]
             except KeyError:
-                log.warning(_(
-                    'No handler for namespace "{namespace}", invitation ignored')
-                    .format(namespace=namespace))
+                log.warning(
+                    _(
+                        'No handler for namespace "{namespace}", invitation ignored'
+                    ).format(namespace=namespace)
+                )
             else:
                 await utils.as_deferred(cb, client, namespace, name, extra, *args)
 
--- a/libervia/backend/plugins/plugin_exp_invitation_file.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_exp_invitation_file.py	Wed Jun 19 18:44:57 2024 +0200
@@ -53,21 +53,35 @@
             in_sign="ssssssss",
             out_sign="",
             method=self._send_file_sharing_invitation,
-            async_=True
+            async_=True,
         )
 
     def _send_file_sharing_invitation(
-            self, invitee_jid_s, service_s, repos_type=None, namespace=None, path=None,
-            name=None, extra_s='', profile_key=C.PROF_KEY_NONE):
+        self,
+        invitee_jid_s,
+        service_s,
+        repos_type=None,
+        namespace=None,
+        path=None,
+        name=None,
+        extra_s="",
+        profile_key=C.PROF_KEY_NONE,
+    ):
         client = self.host.get_client(profile_key)
         invitee_jid = jid.JID(invitee_jid_s)
         service = jid.JID(service_s)
         extra = data_format.deserialise(extra_s)
         return defer.ensureDeferred(
             self.host.plugins["INVITATION"].send_file_sharing_invitation(
-                client, invitee_jid, service, repos_type=repos_type or None,
-                namespace=namespace or None, path=path or None, name=name or None,
-                extra=extra)
+                client,
+                invitee_jid,
+                service,
+                repos_type=repos_type or None,
+                namespace=namespace or None,
+                path=path or None,
+                name=name or None,
+                extra=extra,
+            )
         )
 
     def on_invitation(
@@ -79,25 +93,31 @@
         service: jid.JID,
         repos_type: str,
         sharing_ns: str,
-        path: str
+        path: str,
     ):
         if repos_type == "files":
             type_human = _("file sharing")
         elif repos_type == "photos":
             type_human = _("photo album")
         else:
-            log.warning("Unknown repository type: {repos_type}".format(
-                repos_type=repos_type))
+            log.warning(
+                "Unknown repository type: {repos_type}".format(repos_type=repos_type)
+            )
             repos_type = "file"
             type_human = _("file sharing")
-        log.info(_(
-            '{profile} has received an invitation for a files repository ({type_human}) '
-            'with namespace {sharing_ns!r} at path [{path}]').format(
-            profile=client.profile, type_human=type_human, sharing_ns=sharing_ns,
-                path=path)
+        log.info(
+            _(
+                "{profile} has received an invitation for a files repository ({type_human}) "
+                "with namespace {sharing_ns!r} at path [{path}]"
+            ).format(
+                profile=client.profile,
+                type_human=type_human,
+                sharing_ns=sharing_ns,
+                path=path,
             )
+        )
         return defer.ensureDeferred(
-            self.host.plugins['LIST_INTEREST'].register_file_sharing(
+            self.host.plugins["LIST_INTEREST"].register_file_sharing(
                 client, service, repos_type, sharing_ns, path, name, extra
             )
         )
--- a/libervia/backend/plugins/plugin_exp_invitation_pubsub.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_exp_invitation_pubsub.py	Wed Jun 19 18:44:57 2024 +0200
@@ -57,20 +57,23 @@
             in_sign="sssssss",
             out_sign="",
             method=self._send_pubsub_invitation,
-            async_=True
+            async_=True,
         )
 
-    def register(
-        self,
-        namespace: str,
-        handler
-    ) -> None:
+    def register(self, namespace: str, handler) -> None:
         self._ns_handler[namespace] = handler
         self.host.plugins["INVITATION"].register_namespace(namespace, self.on_invitation)
 
     def _send_pubsub_invitation(
-            self, invitee_jid_s, service_s, node, item_id=None,
-            name=None, extra_s='', profile_key=C.PROF_KEY_NONE):
+        self,
+        invitee_jid_s,
+        service_s,
+        node,
+        item_id=None,
+        name=None,
+        extra_s="",
+        profile_key=C.PROF_KEY_NONE,
+    ):
         client = self.host.get_client(profile_key)
         invitee_jid = jid.JID(invitee_jid_s)
         service = jid.JID(service_s)
@@ -83,7 +86,7 @@
                 node,
                 item_id or None,
                 name=name or None,
-                extra=extra
+                extra=extra,
             )
         )
 
@@ -94,7 +97,7 @@
         service: jid.JID,
         node: str,
         item_id: Optional[str] = None,
-        name: str = '',
+        name: str = "",
         extra: Optional[dict] = None,
     ) -> None:
         if extra is None:
@@ -112,24 +115,26 @@
                 else:
                     await utils.as_deferred(
                         preflight,
-                        client, invitee_jid, service, node, item_id, name, extra
+                        client,
+                        invitee_jid,
+                        service,
+                        node,
+                        item_id,
+                        name,
+                        extra,
                     )
             if item_id is None:
                 item_id = extra.pop("default_item_id", None)
 
         # we authorize our invitee to see the nodes of interest
-        await self._p.set_node_affiliations(client, service, node, {invitee_jid: "member"})
+        await self._p.set_node_affiliations(
+            client, service, node, {invitee_jid: "member"}
+        )
         log.debug(f"affiliation set on {service}'s {node!r} node")
 
         # now we send the invitation
         self.host.plugins["INVITATION"].send_pubsub_invitation(
-            client,
-            invitee_jid,
-            service,
-            node,
-            item_id,
-            name=name or None,
-            extra=extra
+            client, invitee_jid, service, node, item_id, name=name or None, extra=extra
         )
 
     async def on_invitation(
@@ -141,7 +146,7 @@
         service: jid.JID,
         node: str,
         item_id: Optional[str],
-        item_elt: domish.Element
+        item_elt: domish.Element,
     ) -> None:
         if extra is None:
             extra = {}
@@ -155,7 +160,14 @@
         else:
             await utils.as_deferred(
                 preflight,
-                client, namespace, name, extra, service, node, item_id, item_elt
+                client,
+                namespace,
+                name,
+                extra,
+                service,
+                node,
+                item_id,
+                item_elt,
             )
             if item_id is None:
                 item_id = extra.pop("default_item_id", None)
@@ -164,6 +176,6 @@
         if not name:
             name = extra.pop("name", "")
 
-        return await self.host.plugins['LIST_INTEREST'].register_pubsub(
-            client, namespace, service, node, item_id, creator,
-            name, element, extra)
+        return await self.host.plugins["LIST_INTEREST"].register_pubsub(
+            client, namespace, service, node, item_id, creator, name, element, extra
+        )
--- a/libervia/backend/plugins/plugin_exp_jingle_stream.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_exp_jingle_stream.py	Wed Jun 19 18:44:57 2024 +0200
@@ -217,17 +217,19 @@
                 factory.port_listening = port_listening
                 break
         # we don't want to wait for IQ result of initiate
-        defer.ensureDeferred(self._j.initiate(
-            client,
-            to_jid,
-            [
-                {
-                    "app_ns": NS_STREAM,
-                    "senders": self._j.ROLE_INITIATOR,
-                    "app_kwargs": {"stream_object": factory},
-                }
-            ],
-        ))
+        defer.ensureDeferred(
+            self._j.initiate(
+                client,
+                to_jid,
+                [
+                    {
+                        "app_ns": NS_STREAM,
+                        "senders": self._j.ROLE_INITIATOR,
+                        "app_kwargs": {"stream_object": factory},
+                    }
+                ],
+            )
+        )
         return str(port)
 
     def jingle_session_init(self, client, session, content_name, stream_object):
@@ -239,7 +241,9 @@
         return desc_elt
 
     @defer.inlineCallbacks
-    def jingle_request_confirmation(self, client, action, session, content_name, desc_elt):
+    def jingle_request_confirmation(
+        self, client, action, session, content_name, desc_elt
+    ):
         """This method request confirmation for a jingle session"""
         content_data = session["contents"][content_name]
         if content_data["senders"] not in (
@@ -287,7 +291,9 @@
             content_data["stream_object"] = application_data["stream_object"]
             finished_d = content_data["finished_d"] = defer.Deferred()
             args = [client, session, content_name, content_data]
-            finished_d.addCallbacks(self._finished_cb, self._finished_eb, args, None, args)
+            finished_d.addCallbacks(
+                self._finished_cb, self._finished_eb, args, None, args
+            )
         else:
             log.warning("FIXME: unmanaged action {}".format(action))
         return desc_elt
--- a/libervia/backend/plugins/plugin_exp_lang_detect.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_exp_lang_detect.py	Wed Jun 19 18:44:57 2024 +0200
@@ -79,7 +79,7 @@
         return mess_data
 
     def message_received_trigger(self, client, message_elt, post_treat):
-        """ Check if source is linked and repeat message, else do nothing  """
+        """Check if source is linked and repeat message, else do nothing"""
 
         lang_detect = self.host.memory.param_get_a(
             NAME, CATEGORY, profile_key=client.profile
--- a/libervia/backend/plugins/plugin_exp_list_of_interest.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_exp_list_of_interest.py	Wed Jun 19 18:44:57 2024 +0200
@@ -99,8 +99,18 @@
             if e.condition == "conflict":
                 log.debug(_("requested node already exists"))
 
-    async def register_pubsub(self, client, namespace, service, node, item_id=None,
-                       creator=False, name=None, element=None, extra=None):
+    async def register_pubsub(
+        self,
+        client,
+        namespace,
+        service,
+        node,
+        item_id=None,
+        creator=False,
+        name=None,
+        element=None,
+        extra=None,
+    ):
         """Register an interesting element in personal list
 
         @param namespace(unicode): namespace of the interest
@@ -123,10 +133,10 @@
         interest_elt = domish.Element((NS_LIST_INTEREST, "interest"))
         interest_elt["namespace"] = namespace
         if name is not None:
-            interest_elt['name'] = name
-        thumb_url = extra.get('thumb_url')
+            interest_elt["name"] = name
+        thumb_url = extra.get("thumb_url")
         if thumb_url:
-            interest_elt['thumb_url'] = thumb_url
+            interest_elt["thumb_url"] = thumb_url
         pubsub_elt = interest_elt.addElement("pubsub")
         pubsub_elt["service"] = service.full()
         pubsub_elt["node"] = node
@@ -136,12 +146,9 @@
             pubsub_elt["creator"] = C.BOOL_TRUE
         if element is not None:
             pubsub_elt.addChild(element)
-        uri_kwargs = {
-            "path": service.full(),
-            "node": node
-        }
+        uri_kwargs = {"path": service.full(), "node": node}
         if item_id:
-            uri_kwargs['id'] = item_id
+            uri_kwargs["id"] = item_id
         interest_uri = uri.build_xmpp_uri("pubsub", **uri_kwargs)
         # we use URI of the interest as item id to avoid duplicates
         item_elt = pubsub.Item(interest_uri, payload=interest_elt)
@@ -150,16 +157,22 @@
         )
 
     def _register_file_sharing(
-        self, service, repos_type, namespace, path, name, extra_raw,
-        profile
+        self, service, repos_type, namespace, path, name, extra_raw, profile
     ):
         client = self.host.get_client(profile)
         extra = data_format.deserialise(extra_raw)
 
-        return defer.ensureDeferred(self.register_file_sharing(
-            client, jid.JID(service), repos_type or None, namespace or None, path or None,
-            name or None, extra
-        ))
+        return defer.ensureDeferred(
+            self.register_file_sharing(
+                client,
+                jid.JID(service),
+                repos_type or None,
+                namespace or None,
+                path or None,
+                name or None,
+                extra,
+            )
+        )
 
     def normalise_file_sharing_service(self, client, service):
         # FIXME: Q&D fix as the bare file sharing service JID will lead to user own
@@ -167,14 +180,21 @@
         #   By specifying the user part, we for the use of the host repository.
         #   A cleaner way should be implemented
         if service.user is None:
-            service.user = self.host.plugins['XEP-0106'].escape(client.jid.user)
+            service.user = self.host.plugins["XEP-0106"].escape(client.jid.user)
 
     def get_file_sharing_id(self, service, namespace, path):
         return f"{service}_{namespace or ''}_{path or ''}"
 
     async def register_file_sharing(
-            self, client, service, repos_type=None, namespace=None, path=None, name=None,
-            extra=None):
+        self,
+        client,
+        service,
+        repos_type=None,
+        namespace=None,
+        path=None,
+        name=None,
+        extra=None,
+    ):
         """Register an interesting file repository in personal list
 
         @param service(jid.JID): service of the file repository
@@ -192,10 +212,10 @@
         interest_elt = domish.Element((NS_LIST_INTEREST, "interest"))
         interest_elt["namespace"] = self.host.get_namespace("fis")
         if name is not None:
-            interest_elt['name'] = name
-        thumb_url = extra.get('thumb_url')
+            interest_elt["name"] = name
+        thumb_url = extra.get("thumb_url")
         if thumb_url:
-            interest_elt['thumb_url'] = thumb_url
+            interest_elt["thumb_url"] = thumb_url
 
         file_sharing_elt = interest_elt.addElement("file_sharing")
         file_sharing_elt["service"] = service.full()
@@ -213,36 +233,41 @@
     def _list_interests_serialise(self, interests_data):
         interests = []
         for item_elt in interests_data[0]:
-            interest_data = {"id": item_elt['id']}
+            interest_data = {"id": item_elt["id"]}
             interest_elt = item_elt.interest
-            if interest_elt.hasAttribute('namespace'):
-                interest_data['namespace'] = interest_elt.getAttribute('namespace')
-            if interest_elt.hasAttribute('name'):
-                interest_data['name'] = interest_elt.getAttribute('name')
-            if interest_elt.hasAttribute('thumb_url'):
-                interest_data['thumb_url'] = interest_elt.getAttribute('thumb_url')
+            if interest_elt.hasAttribute("namespace"):
+                interest_data["namespace"] = interest_elt.getAttribute("namespace")
+            if interest_elt.hasAttribute("name"):
+                interest_data["name"] = interest_elt.getAttribute("name")
+            if interest_elt.hasAttribute("thumb_url"):
+                interest_data["thumb_url"] = interest_elt.getAttribute("thumb_url")
             elt = interest_elt.firstChildElement()
             if elt.uri != NS_LIST_INTEREST:
-                log.warning("unexpected child element, ignoring: {xml}".format(
-                    xml = elt.toXml()))
+                log.warning(
+                    "unexpected child element, ignoring: {xml}".format(xml=elt.toXml())
+                )
                 continue
-            if elt.name == 'pubsub':
-                interest_data.update({
-                    "type": "pubsub",
-                    "service": elt['service'],
-                    "node": elt['node'],
-                })
-                for attr in ('item', 'creator'):
+            if elt.name == "pubsub":
+                interest_data.update(
+                    {
+                        "type": "pubsub",
+                        "service": elt["service"],
+                        "node": elt["node"],
+                    }
+                )
+                for attr in ("item", "creator"):
                     if elt.hasAttribute(attr):
                         interest_data[attr] = elt[attr]
-            elif elt.name == 'file_sharing':
-                interest_data.update({
-                    "type": "file_sharing",
-                    "service": elt['service'],
-                })
-                if elt.hasAttribute('type'):
-                    interest_data['subtype'] = elt['type']
-                for attr in ('files_namespace', 'path'):
+            elif elt.name == "file_sharing":
+                interest_data.update(
+                    {
+                        "type": "file_sharing",
+                        "service": elt["service"],
+                    }
+                )
+                if elt.hasAttribute("type"):
+                    interest_data["subtype"] = elt["type"]
+                for attr in ("files_namespace", "path"):
                     if elt.hasAttribute(attr):
                         interest_data[attr] = elt[attr]
             else:
@@ -282,8 +307,9 @@
                 try:
                     interest_elt = next(item.elements(NS_LIST_INTEREST, "interest"))
                 except StopIteration:
-                    log.warning(_("Missing interest element: {xml}").format(
-                        xml=item.toXml()))
+                    log.warning(
+                        _("Missing interest element: {xml}").format(xml=item.toXml())
+                    )
                     continue
                 if interest_elt.getAttribute("namespace") == namespace:
                     filtered_items.append(item)
@@ -292,14 +318,15 @@
         return (items, metadata)
 
     def _interest_retract(self, service_s, item_id, profile_key):
-        d = self._p._retract_item(
-            service_s, NS_LIST_INTEREST, item_id, True, profile_key)
+        d = self._p._retract_item(service_s, NS_LIST_INTEREST, item_id, True, profile_key)
         d.addCallback(lambda __: None)
         return d
 
     async def get(self, client: SatXMPPEntity, item_id: str) -> dict:
         """Retrieve a specific interest in profile's list"""
-        items_data = await self._p.get_items(client, None, NS_LIST_INTEREST, item_ids=[item_id])
+        items_data = await self._p.get_items(
+            client, None, NS_LIST_INTEREST, item_ids=[item_id]
+        )
         try:
             return self._list_interests_serialise(items_data)[0]
         except IndexError:
--- a/libervia/backend/plugins/plugin_exp_parrot.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_exp_parrot.py	Wed Jun 19 18:44:57 2024 +0200
@@ -74,7 +74,7 @@
     #        raise trigger.SkipOtherTriggers
 
     def message_received_trigger(self, client, message_elt, post_treat):
-        """ Check if source is linked and repeat message, else do nothing  """
+        """Check if source is linked and repeat message, else do nothing"""
         # TODO: many things are not repeated (subject, thread, etc)
         from_jid = message_elt["from"]
 
@@ -93,7 +93,8 @@
 
             try:
                 entity_type = self.host.memory.entity_data_get(
-                    client, from_jid, [C.ENTITY_TYPE])[C.ENTITY_TYPE]
+                    client, from_jid, [C.ENTITY_TYPE]
+                )[C.ENTITY_TYPE]
             except (UnknownEntityError, KeyError):
                 entity_type = "contact"
             if entity_type == C.ENTITY_TYPE_MUC:
--- a/libervia/backend/plugins/plugin_exp_pubsub_admin.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_exp_pubsub_admin.py	Wed Jun 19 18:44:57 2024 +0200
@@ -37,9 +37,11 @@
     C.PI_RECOMMENDATIONS: [],
     C.PI_MAIN: "PubsubAdmin",
     C.PI_HANDLER: "no",
-    C.PI_DESCRIPTION: _("""\Implementation of Pubsub Administrator
+    C.PI_DESCRIPTION: _(
+        """\Implementation of Pubsub Administrator
 This allows a pubsub administrator to overwrite completly items, including publisher.
-Specially useful when importing a node."""),
+Specially useful when importing a node."""
+    ),
 }
 
 NS_PUBSUB_ADMIN = "https://salut-a-toi.org/spec/pubsub_admin:0"
@@ -58,35 +60,36 @@
             async_=True,
         )
 
-    def _publish(self, service, nodeIdentifier, items, extra=None,
-                 profile_key=C.PROF_KEY_NONE):
+    def _publish(
+        self, service, nodeIdentifier, items, extra=None, profile_key=C.PROF_KEY_NONE
+    ):
         client = self.host.get_client(profile_key)
         service = None if not service else jid.JID(service)
         extra = data_format.deserialise(extra)
-        items = [generic.parseXml(i.encode('utf-8')) for i in items]
-        return self.publish(
-            client, service, nodeIdentifier, items, extra
-        )
+        items = [generic.parseXml(i.encode("utf-8")) for i in items]
+        return self.publish(client, service, nodeIdentifier, items, extra)
 
     def _send_cb(self, iq_result):
         publish_elt = iq_result.admin.pubsub.publish
         ids = []
-        for item_elt in publish_elt.elements(pubsub.NS_PUBSUB, 'item'):
-            ids.append(item_elt['id'])
+        for item_elt in publish_elt.elements(pubsub.NS_PUBSUB, "item"):
+            ids.append(item_elt["id"])
         return ids
 
     def publish(self, client, service, nodeIdentifier, items, extra=None):
         for item in items:
-            if item.name != 'item' or item.uri != pubsub.NS_PUBSUB:
+            if item.name != "item" or item.uri != pubsub.NS_PUBSUB:
                 raise exceptions.DataError(
-                    'Invalid element, a pubsub item is expected: {xml}'.format(
-                    xml=item.toXml()))
+                    "Invalid element, a pubsub item is expected: {xml}".format(
+                        xml=item.toXml()
+                    )
+                )
         iq_elt = client.IQ()
-        iq_elt['to'] = service.full() if service else client.jid.userhost()
-        admin_elt = iq_elt.addElement((NS_PUBSUB_ADMIN, 'admin'))
-        pubsub_elt = admin_elt.addElement((pubsub.NS_PUBSUB, 'pubsub'))
-        publish_elt = pubsub_elt.addElement('publish')
-        publish_elt['node'] = nodeIdentifier
+        iq_elt["to"] = service.full() if service else client.jid.userhost()
+        admin_elt = iq_elt.addElement((NS_PUBSUB_ADMIN, "admin"))
+        pubsub_elt = admin_elt.addElement((pubsub.NS_PUBSUB, "pubsub"))
+        publish_elt = pubsub_elt.addElement("publish")
+        publish_elt["node"] = nodeIdentifier
         for item in items:
             publish_elt.addChild(item)
         d = iq_elt.send()
--- a/libervia/backend/plugins/plugin_exp_pubsub_hook.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_exp_pubsub_hook.py	Wed Jun 19 18:44:57 2024 +0200
@@ -55,7 +55,9 @@
     def __init__(self, host):
         log.info(_("PubSub Hook initialization"))
         self.host = host
-        self.node_hooks = {}  # keep track of the number of hooks per node (for all profiles)
+        self.node_hooks = (
+            {}
+        )  # keep track of the number of hooks per node (for all profiles)
         host.bridge.add_method(
             "ps_hook_add", ".plugin", in_sign="ssssbs", out_sign="", method=self._addHook
         )
@@ -108,7 +110,9 @@
         else:
             if self.node_hooks[node] == 0:
                 del self.node_hooks[node]
-                self.host.plugins["XEP-0060"].remove_managed_node(node, self._items_received)
+                self.host.plugins["XEP-0060"].remove_managed_node(
+                    node, self._items_received
+                )
                 log.debug(_("hook removed"))
             else:
                 log.debug(_("node still needed for an other hook"))
@@ -120,9 +124,7 @@
             )
         if hook_type != HOOK_TYPE_PYTHON_FILE:
             raise NotImplementedError(
-                _("{hook_type} hook type not implemented yet").format(
-                    hook_type=hook_type
-                )
+                _("{hook_type} hook type not implemented yet").format(hook_type=hook_type)
             )
         self._install_node_manager(client, node)
         hook_data = {"service": service, "type": hook_type, "arg": hook_arg}
@@ -160,7 +162,14 @@
                     try:
                         if hook_type == HOOK_TYPE_PYTHON_FILE:
                             hook_globals = {}
-                            exec(compile(open(hook_data["arg"], "rb").read(), hook_data["arg"], 'exec'), hook_globals)
+                            exec(
+                                compile(
+                                    open(hook_data["arg"], "rb").read(),
+                                    hook_data["arg"],
+                                    "exec",
+                                ),
+                                hook_globals,
+                            )
                             callback = hook_globals["hook"]
                         else:
                             raise NotImplementedError(
@@ -224,7 +233,9 @@
     def _removeHook(self, service, node, hook_type, hook_arg, profile):
         client = self.host.get_client(profile)
         service = jid.JID(service) if service else client.jid.userhostJID()
-        return self.remove_hook(client, service, node, hook_type or None, hook_arg or None)
+        return self.remove_hook(
+            client, service, node, hook_type or None, hook_arg or None
+        )
 
     def remove_hook(self, client, service, node, hook_type=None, hook_arg=None):
         """Remove a persistent or temporaty root
--- a/libervia/backend/plugins/plugin_import.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_import.py	Wed Jun 19 18:44:57 2024 +0200
@@ -139,8 +139,16 @@
         else:
             return importer.short_desc, importer.long_desc
 
-    def _do_import(self, import_handler, name, location, options, pubsub_service="",
-                  pubsub_node="", profile=C.PROF_KEY_NONE):
+    def _do_import(
+        self,
+        import_handler,
+        name,
+        location,
+        options,
+        pubsub_service="",
+        pubsub_node="",
+        profile=C.PROF_KEY_NONE,
+    ):
         client = self.host.get_client(profile)
         options = {key: str(value) for key, value in options.items()}
         for option in import_handler.BOOL_OPTIONS:
@@ -169,8 +177,16 @@
         )
 
     @defer.inlineCallbacks
-    def do_import(self, client, import_handler, name, location, options=None,
-                 pubsub_service=None, pubsub_node=None,):
+    def do_import(
+        self,
+        client,
+        import_handler,
+        name,
+        location,
+        options=None,
+        pubsub_service=None,
+        pubsub_node=None,
+    ):
         """import data
 
         @param import_handler(object): instance of the import handler
@@ -324,9 +340,9 @@
         name = name.lower()
         if name in import_handler.importers:
             raise exceptions.ConflictError(
-                _(
-                    "An {handler_name} importer with the name {name} already exist"
-                ).format(handler_name=import_handler.name, name=name)
+                _("An {handler_name} importer with the name {name} already exist").format(
+                    handler_name=import_handler.name, name=name
+                )
             )
         import_handler.importers[name] = Importer(callback, short_desc, long_desc)
 
--- a/libervia/backend/plugins/plugin_merge_req_mercurial.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_merge_req_mercurial.py	Wed Jun 19 18:44:57 2024 +0200
@@ -24,6 +24,7 @@
 from libervia.backend.core.constants import Const as C
 from libervia.backend.core import exceptions
 from libervia.backend.core.log import getLogger
+
 log = getLogger(__name__)
 
 
@@ -34,15 +35,16 @@
     C.PI_DEPENDENCIES: ["MERGE_REQUESTS"],
     C.PI_MAIN: "MercurialHandler",
     C.PI_HANDLER: "no",
-    C.PI_DESCRIPTION: _("""Merge request handler for Mercurial""")
+    C.PI_DESCRIPTION: _("""Merge request handler for Mercurial"""),
 }
 
 SHORT_DESC = D_("handle Mercurial repository")
-CLEAN_RE = re.compile(r'[^\w -._]', flags=re.UNICODE)
+CLEAN_RE = re.compile(r"[^\w -._]", flags=re.UNICODE)
 
 
 class MercurialProtocol(async_process.CommandProtocol):
     """handle hg commands"""
+
     name = "Mercurial"
     command = None
 
@@ -65,45 +67,58 @@
 
 
 class MercurialHandler(object):
-    data_types = ('mercurial_changeset',)
+    data_types = ("mercurial_changeset",)
 
     def __init__(self, host):
         log.info(_("Mercurial merge request handler initialization"))
         try:
-            MercurialProtocol.command = which('hg')[0]
+            MercurialProtocol.command = which("hg")[0]
         except IndexError:
-            raise exceptions.NotFound(_("Mercurial executable (hg) not found, "
-                                        "can't use Mercurial handler"))
+            raise exceptions.NotFound(
+                _("Mercurial executable (hg) not found, " "can't use Mercurial handler")
+            )
         self.host = host
-        self._m = host.plugins['MERGE_REQUESTS']
-        self._m.register('mercurial', self, self.data_types, SHORT_DESC)
-
+        self._m = host.plugins["MERGE_REQUESTS"]
+        self._m.register("mercurial", self, self.data_types, SHORT_DESC)
 
     def check(self, repository):
-        d = MercurialProtocol.run(repository, 'identify')
+        d = MercurialProtocol.run(repository, "identify")
         d.addCallback(lambda __: True)
         d.addErrback(lambda __: False)
         return d
 
     def export(self, repository):
         d = MercurialProtocol.run(
-            repository, 'export', '-g', '-r', 'outgoing() and ancestors(.)',
-            '--encoding=utf-8'
+            repository,
+            "export",
+            "-g",
+            "-r",
+            "outgoing() and ancestors(.)",
+            "--encoding=utf-8",
         )
-        d.addCallback(lambda data: data.decode('utf-8'))
+        d.addCallback(lambda data: data.decode("utf-8"))
         return d
 
     def import_(self, repository, data, data_type, item_id, service, node, extra):
         parsed_data = self.parse(data)
         try:
-            parsed_name = parsed_data[0]['commit_msg'].split('\n')[0]
-            parsed_name = CLEAN_RE.sub('', parsed_name)[:40]
+            parsed_name = parsed_data[0]["commit_msg"].split("\n")[0]
+            parsed_name = CLEAN_RE.sub("", parsed_name)[:40]
         except Exception:
-            parsed_name = ''
-        name = 'mr_{item_id}_{parsed_name}'.format(item_id=CLEAN_RE.sub('', item_id),
-                                                   parsed_name=parsed_name)
-        return MercurialProtocol.run(repository, 'qimport', '-g', '--name', name,
-                                     '--encoding=utf-8', '-', stdin=data)
+            parsed_name = ""
+        name = "mr_{item_id}_{parsed_name}".format(
+            item_id=CLEAN_RE.sub("", item_id), parsed_name=parsed_name
+        )
+        return MercurialProtocol.run(
+            repository,
+            "qimport",
+            "-g",
+            "--name",
+            name,
+            "--encoding=utf-8",
+            "-",
+            stdin=data,
+        )
 
     def parse(self, data, data_type=None):
         lines = data.splitlines()
@@ -113,55 +128,62 @@
             patch = {}
             commit_msg = []
             diff = []
-            state = 'init'
-            if lines[0] != '# HG changeset patch':
-                raise exceptions.DataError(_('invalid changeset signature'))
+            state = "init"
+            if lines[0] != "# HG changeset patch":
+                raise exceptions.DataError(_("invalid changeset signature"))
             # line index of this patch in the whole data
             patch_idx = total_lines - len(lines)
             del lines[0]
 
             for idx, line in enumerate(lines):
-                if state == 'init':
-                    if line.startswith('# '):
-                        if line.startswith('# User '):
+                if state == "init":
+                    if line.startswith("# "):
+                        if line.startswith("# User "):
                             elems = line[7:].split()
                             if not elems:
                                 continue
                             last = elems[-1]
-                            if (last.startswith('<') and last.endswith('>')
-                                and '@' in last):
+                            if (
+                                last.startswith("<")
+                                and last.endswith(">")
+                                and "@" in last
+                            ):
                                 patch[self._m.META_EMAIL] = elems.pop()[1:-1]
-                            patch[self._m.META_AUTHOR] = ' '.join(elems)
-                        elif line.startswith('# Date '):
+                            patch[self._m.META_AUTHOR] = " ".join(elems)
+                        elif line.startswith("# Date "):
                             time_data = line[7:].split()
                             if len(time_data) != 2:
-                                log.warning(_('unexpected time data: {data}')
-                                            .format(data=line[7:]))
+                                log.warning(
+                                    _("unexpected time data: {data}").format(
+                                        data=line[7:]
+                                    )
+                                )
                                 continue
-                            patch[self._m.META_TIMESTAMP] = (int(time_data[0])
-                                                             + int(time_data[1]))
-                        elif line.startswith('# Node ID '):
+                            patch[self._m.META_TIMESTAMP] = int(time_data[0]) + int(
+                                time_data[1]
+                            )
+                        elif line.startswith("# Node ID "):
                             patch[self._m.META_HASH] = line[10:]
-                        elif line.startswith('# Parent  '):
+                        elif line.startswith("# Parent  "):
                             patch[self._m.META_PARENT_HASH] = line[10:]
                     else:
-                        state = 'commit_msg'
-                if state == 'commit_msg':
-                    if line.startswith('diff --git a/'):
-                        state = 'diff'
+                        state = "commit_msg"
+                if state == "commit_msg":
+                    if line.startswith("diff --git a/"):
+                        state = "diff"
                         patch[self._m.META_DIFF_IDX] = patch_idx + idx + 1
                     else:
                         commit_msg.append(line)
-                if state == 'diff':
-                    if line.startswith('# ') or idx == len(lines)-1:
+                if state == "diff":
+                    if line.startswith("# ") or idx == len(lines) - 1:
                         # a new patch is starting or we have reached end of patches
-                        if idx == len(lines)-1:
+                        if idx == len(lines) - 1:
                             # end of patches, we need to keep the line
                             diff.append(line)
-                        patch[self._m.META_COMMIT_MSG] = '\n'.join(commit_msg)
-                        patch[self._m.META_DIFF] = '\n'.join(diff)
+                        patch[self._m.META_COMMIT_MSG] = "\n".join(commit_msg)
+                        patch[self._m.META_DIFF] = "\n".join(diff)
                         patches.append(patch)
-                        if idx == len(lines)-1:
+                        if idx == len(lines) - 1:
                             del lines[:]
                         else:
                             del lines[:idx]
--- a/libervia/backend/plugins/plugin_misc_account.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_account.py	Wed Jun 19 18:44:57 2024 +0200
@@ -314,7 +314,8 @@
             # there is no email when an existing XMPP account is used
             body = f"New account created on {domain}: {profile} [{email or '<no email>'}]"
             d_admin = sat_email.send_email(
-                self.host.memory.config, admins_emails, subject, body)
+                self.host.memory.config, admins_emails, subject, body
+            )
 
         admins_emails_txt = ", ".join(["<" + addr + ">" for addr in admins_emails])
         d_admin.addCallbacks(
@@ -348,7 +349,7 @@
                 "Account creation confirmation sent to <{}>".format(email)
             ),
             self._send_email_eb,
-            errbackArgs=[email]
+            errbackArgs=[email],
         )
         return defer.DeferredList([d_user, d_admin])
 
@@ -743,15 +744,18 @@
         d.addCallback(
             lambda __: self.host.memory.get_profile_name(jid_s)
         )  # checks if the profile has been successfuly created
-        d.addCallback(lambda profile: defer.ensureDeferred(
-            self.host.connect(profile, password, {}, 0)))
+        d.addCallback(
+            lambda profile: defer.ensureDeferred(
+                self.host.connect(profile, password, {}, 0)
+            )
+        )
 
         def connected(result):
             self.send_emails(None, profile=jid_s)
             return result
 
         def remove_profile(
-            failure
+            failure,
         ):  # profile has been successfully created but the XMPP credentials are wrong!
             log.debug(
                 "Removing previously auto-created profile: %s" % failure.getErrorMessage()
--- a/libervia/backend/plugins/plugin_misc_android.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_android.py	Wed Jun 19 18:44:57 2024 +0200
@@ -67,20 +67,20 @@
 from android import python_act
 
 
-Context = autoclass('android.content.Context')
-ConnectivityManager = autoclass('android.net.ConnectivityManager')
-MediaPlayer = autoclass('android.media.MediaPlayer')
-AudioManager = autoclass('android.media.AudioManager')
+Context = autoclass("android.content.Context")
+ConnectivityManager = autoclass("android.net.ConnectivityManager")
+MediaPlayer = autoclass("android.media.MediaPlayer")
+AudioManager = autoclass("android.media.AudioManager")
 
 # notifications
-AndroidString = autoclass('java.lang.String')
-PendingIntent = autoclass('android.app.PendingIntent')
-Intent = autoclass('android.content.Intent')
+AndroidString = autoclass("java.lang.String")
+PendingIntent = autoclass("android.app.PendingIntent")
+Intent = autoclass("android.content.Intent")
 
 # DNS
 # regex to find dns server prop with "getprop"
 RE_DNS = re.compile(r"^\[net\.[a-z0-9]+\.dns[0-4]\]: \[(.*)\]$", re.MULTILINE)
-SystemProperties = autoclass('android.os.SystemProperties')
+SystemProperties = autoclass("android.os.SystemProperties")
 
 #: delay between a pause event and sending the inactive indication to server, in seconds
 #: we don't send the indication immediately because user can be just checking something
@@ -148,7 +148,8 @@
             action_data = AndroidString(json.dumps(sat_action).encode())
             log.debug(f"adding extra {INTENT_EXTRA_ACTION} ==> {action_data}")
             notification_intent = notification_intent.putExtra(
-                INTENT_EXTRA_ACTION, action_data)
+                INTENT_EXTRA_ACTION, action_data
+            )
 
         # we use PendingIntent.FLAG_UPDATE_CURRENT here, otherwise extra won't be set
         # in the new intent (the old ACTION_MAIN intent will be reused). This differs
@@ -168,14 +169,12 @@
         # in the generic "notify" method).
         sat_action = kwargs.pop("sat_action", None)
         noti = None
-        message = kwargs.get('message', '').encode('utf-8')
-        ticker = kwargs.get('ticker', '').encode('utf-8')
-        title = AndroidString(
-            kwargs.get('title', '').encode('utf-8')
-        )
-        icon = kwargs.get('app_icon', '')
+        message = kwargs.get("message", "").encode("utf-8")
+        ticker = kwargs.get("ticker", "").encode("utf-8")
+        title = AndroidString(kwargs.get("title", "").encode("utf-8"))
+        icon = kwargs.get("app_icon", "")
 
-        if kwargs.get('toast', False):
+        if kwargs.get("toast", False):
             self._toast(message)
             return
         else:
@@ -212,7 +211,6 @@
         return FrontendStateProtocol(self.android_plugin)
 
 
-
 class AndroidPlugin(object):
 
     params = """
@@ -243,7 +241,7 @@
         log.info(_("plugin Android initialization"))
         log.info(f"using Android API {api_version}")
         self.host = host
-        self._csi = host.plugins.get('XEP-0352')
+        self._csi = host.plugins.get("XEP-0352")
         self._csi_timer = None
         host.memory.update_params(self.params)
         try:
@@ -268,7 +266,9 @@
                 raise e
         # we set a low priority because we want the notification to be sent after all
         # plugins have done their job
-        host.trigger.add("message_received", self.message_received_trigger, priority=-1000)
+        host.trigger.add(
+            "message_received", self.message_received_trigger, priority=-1000
+        )
 
         # profiles autoconnection
         host.bridge.add_method(
@@ -312,7 +312,7 @@
             callback=lambda *args, **kwargs: reactor.callFromThread(
                 self.on_connectivity_change
             ),
-            actions=["android.net.conn.CONNECTIVITY_CHANGE"]
+            actions=["android.net.conn.CONNECTIVITY_CHANGE"],
         )
         self.br.start()
 
@@ -355,14 +355,18 @@
             - there is a message and it is not a groupchat
             - message is not coming from ourself
         """
-        if (mess_data["message"] and mess_data["type"] != C.MESS_TYPE_GROUPCHAT
-            and not mess_data["from"].userhostJID() == client.jid.userhostJID()):
+        if (
+            mess_data["message"]
+            and mess_data["type"] != C.MESS_TYPE_GROUPCHAT
+            and not mess_data["from"].userhostJID() == client.jid.userhostJID()
+        ):
             message = next(iter(mess_data["message"].values()))
             try:
                 subject = next(iter(mess_data["subject"].values()))
             except StopIteration:
                 subject = D_("new message from {contact}").format(
-                    contact = mess_data['from'])
+                    contact=mess_data["from"]
+                )
 
             notification = Notification()
             notification._notify(
@@ -379,25 +383,24 @@
             vibrate_mode = ringer_mode == AudioManager.RINGER_MODE_VIBRATE
 
             ring_setting = self.host.memory.param_get_a(
-                PARAM_RING_NAME,
-                PARAM_RING_CATEGORY,
-                profile_key=client.profile
+                PARAM_RING_NAME, PARAM_RING_CATEGORY, profile_key=client.profile
             )
 
-            if ring_setting != 'never' and ringer_mode == AudioManager.RINGER_MODE_NORMAL:
+            if ring_setting != "never" and ringer_mode == AudioManager.RINGER_MODE_NORMAL:
                 self.notif_player.start()
 
             vibration_setting = self.host.memory.param_get_a(
-                PARAM_VIBRATE_NAME,
-                PARAM_VIBRATE_CATEGORY,
-                profile_key=client.profile
+                PARAM_VIBRATE_NAME, PARAM_VIBRATE_CATEGORY, profile_key=client.profile
             )
-            if (vibration_setting == 'always'
-                or vibration_setting == 'vibrate' and vibrate_mode):
-                    try:
-                        vibrator.vibrate()
-                    except Exception as e:
-                        log.warning("Can't use vibrator: {e}".format(e=e))
+            if (
+                vibration_setting == "always"
+                or vibration_setting == "vibrate"
+                and vibrate_mode
+            ):
+                try:
+                    vibrator.vibrate()
+                except Exception as e:
+                    log.warning("Can't use vibrator: {e}".format(e=e))
         return mess_data
 
     def message_received_trigger(self, client, message_elt, post_treat):
@@ -414,7 +417,8 @@
 
     async def _get_profiles_autoconnect(self):
         autoconnect_dict = await self.host.memory.storage.get_ind_param_values(
-            category='Connection', name='autoconnect_backend',
+            category="Connection",
+            name="autoconnect_backend",
         )
         return [p for p, v in autoconnect_dict.items() if C.bool(v)]
 
@@ -426,7 +430,8 @@
         if len(profiles_autoconnect) > 1:
             log.warning(
                 f"More that one profiles with backend autoconnection set found, picking "
-                f"up first one (full list: {profiles_autoconnect!r})")
+                f"up first one (full list: {profiles_autoconnect!r})"
+            )
         return profiles_autoconnect[0]
 
     # CSI
@@ -503,17 +508,22 @@
             elif net_type == NET_TYPE_MOBILE:
                 log.info("mobile data activated")
             else:
-                log.info("network activated (type={net_type_android})"
-                    .format(net_type_android=net_type_android))
+                log.info(
+                    "network activated (type={net_type_android})".format(
+                        net_type_android=net_type_android
+                    )
+                )
         else:
-            log.debug("_check_connectivity called without network change ({net_type})"
-                .format(net_type = net_type))
+            log.debug(
+                "_check_connectivity called without network change ({net_type})".format(
+                    net_type=net_type
+                )
+            )
 
         # we always call _handle_network_change even if there is not connectivity change
         # to be sure to reconnect when necessary
         await self._handle_network_change(net_type)
 
-
     def on_connectivity_change(self):
         log.debug("on_connectivity_change called")
         d = defer.ensureDeferred(self._check_connectivity())
@@ -527,7 +537,8 @@
 
         log.info(
             "Patching Twisted to use Android DNS resolver ({dns_servers})".format(
-            dns_servers=', '.join([s[0] for s in dns_servers]))
+                dns_servers=", ".join([s[0] for s in dns_servers])
+            )
         )
         dns_client.theResolver = dns_client.createResolver(servers=dns_servers)
 
@@ -538,7 +549,7 @@
             # thanks to A-IV at https://stackoverflow.com/a/11362271 for the way to go
             log.debug("Old API, using SystemProperties to find DNS")
             for idx in range(1, 5):
-                addr = SystemProperties.get(f'net.dns{idx}')
+                addr = SystemProperties.get(f"net.dns{idx}")
                 if abstract.isIPAddress(addr):
                     servers.append((addr, 53))
         else:
@@ -546,15 +557,18 @@
             # use of getprop inspired by various solutions at
             # https://stackoverflow.com/q/3070144
             # it's the most simple option, and it fit wells with async_process
-            getprop_paths = which('getprop')
+            getprop_paths = which("getprop")
             if getprop_paths:
                 try:
                     getprop_path = getprop_paths[0]
                     props = await async_process.run(getprop_path)
-                    servers = [(ip, 53) for ip in RE_DNS.findall(props.decode())
-                               if abstract.isIPAddress(ip)]
+                    servers = [
+                        (ip, 53)
+                        for ip in RE_DNS.findall(props.decode())
+                        if abstract.isIPAddress(ip)
+                    ]
                 except Exception as e:
-                    log.warning(f"Can't use \"getprop\" to find DNS server: {e}")
+                    log.warning(f'Can\'t use "getprop" to find DNS server: {e}')
         if not servers:
             # FIXME: Cloudflare's 1.1.1.1 seems to have a better privacy policy, to be
             #   checked.
@@ -562,5 +576,5 @@
                 "no server found, we have to use factory Google DNS, this is not ideal "
                 "for privacy"
             )
-            servers.append(('8.8.8.8', 53), ('8.8.4.4', 53))
+            servers.append(("8.8.8.8", 53), ("8.8.4.4", 53))
         return servers
--- a/libervia/backend/plugins/plugin_misc_app_manager/__init__.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_app_manager/__init__.py	Wed Jun 19 18:44:57 2024 +0200
@@ -45,7 +45,7 @@
     import yaml
 except ImportError:
     raise exceptions.MissingModule(
-        'Missing module PyYAML, please download/install it. You can use '
+        "Missing module PyYAML, please download/install it. You can use "
         '"pip install pyyaml"'
     )
 
@@ -59,8 +59,6 @@
     from yaml import Loader, Dumper
 
 
-
-
 PLUGIN_INFO = {
     C.PI_NAME: "Applications Manager",
     C.PI_IMPORT_NAME: "APP_MANAGER",
@@ -73,7 +71,8 @@
 
 Manage external applications using packagers, OS virtualization/containers or other
 software management tools.
-"""),
+"""
+    ),
 }
 
 APP_FILE_PREFIX = "libervia_app_"
@@ -92,7 +91,6 @@
         # instance id to app data map
         self._instances = {}
 
-
         self.persistent_data = persistent.LazyPersistentBinaryDict("app_manager")
 
         host.bridge.add_method(
@@ -128,30 +126,28 @@
         )
         # application has been started succeesfully,
         # args: name, instance_id, extra
-        host.bridge.add_signal(
-            "application_started", ".plugin", signature="sss"
-        )
+        host.bridge.add_signal("application_started", ".plugin", signature="sss")
         # application went wrong with the application
         # args: name, instance_id, extra
-        host.bridge.add_signal(
-            "application_error", ".plugin", signature="sss"
+        host.bridge.add_signal("application_error", ".plugin", signature="sss")
+        yaml.add_constructor("!libervia_conf", self._libervia_conf_constr, Loader=Loader)
+        yaml.add_constructor(
+            "!libervia_generate_pwd", self._libervia_generate_pwd_constr, Loader=Loader
         )
         yaml.add_constructor(
-            "!libervia_conf", self._libervia_conf_constr, Loader=Loader)
-        yaml.add_constructor(
-            "!libervia_generate_pwd", self._libervia_generate_pwd_constr, Loader=Loader)
-        yaml.add_constructor(
-            "!libervia_param", self._libervia_param_constr, Loader=Loader)
+            "!libervia_param", self._libervia_param_constr, Loader=Loader
+        )
 
     def unload(self):
         log.debug("unloading applications manager")
         for instances in self._started.values():
             for instance in instances:
-                data = instance['data']
-                if not data['single_instance']:
+                data = instance["data"]
+                if not data["single_instance"]:
                     log.debug(
-                        f"cleaning temporary directory at {data['_instance_dir_path']}")
-                    data['_instance_dir_obj'].cleanup()
+                        f"cleaning temporary directory at {data['_instance_dir_path']}"
+                    )
+                    data["_instance_dir_obj"].cleanup()
 
     def _libervia_conf_constr(self, loader, node) -> str:
         """Get a value from Libervia configuration
@@ -183,22 +179,27 @@
 
         value = self.host.memory.config_get(section, name, default)
         # FIXME: "public_url" is used only here and doesn't take multi-sites into account
-        if name == "public_url" and (not value or value.startswith('http')):
+        if name == "public_url" and (not value or value.startswith("http")):
             if not value:
-                log.warning(_(
-                    'No value found for "public_url", using "example.org" for '
-                    'now, please set the proper value in libervia.conf'))
+                log.warning(
+                    _(
+                        'No value found for "public_url", using "example.org" for '
+                        "now, please set the proper value in libervia.conf"
+                    )
+                )
             else:
-                log.warning(_(
-                    'invalid value for "public_url" ({value}), it musts not start with '
-                    'schema ("http"), ignoring it and using "example.org" '
-                    'instead')
-                        .format(value=value))
+                log.warning(
+                    _(
+                        'invalid value for "public_url" ({value}), it musts not start with '
+                        'schema ("http"), ignoring it and using "example.org" '
+                        "instead"
+                    ).format(value=value)
+                )
             value = "example.org"
 
         if filter_ is None:
             pass
-        elif filter_ == 'first':
+        elif filter_ == "first":
             value = value[0]
         elif filter_ == "not":
             value = C.bool(value)
@@ -232,7 +233,7 @@
         except KeyError:
             alphabet = string.ascii_letters + string.digits
             key_size = int(kwargs.get("size", 30))
-            key = ''.join(secrets.choice(alphabet) for __ in range(key_size))
+            key = "".join(secrets.choice(alphabet) for __ in range(key_size))
             self._app_persistent_data[pwd_data_key] = key
         else:
             log.debug(f"Re-using existing key for {name!r} password.")
@@ -255,7 +256,8 @@
         name = manager.name
         if name in self._managers:
             raise exceptions.ConflictError(
-                f"There is already a manager with the name {name}")
+                f"There is already a manager with the name {name}"
+            )
         self._managers[manager.name] = manager
         if manager.discover_path is not None:
             self.discover(manager.discover_path, manager)
@@ -269,25 +271,18 @@
         try:
             app_type = app_data["type"]
         except KeyError:
-            raise exceptions.DataError(
-                "app file doesn't have the mandatory \"type\" key"
-            )
+            raise exceptions.DataError('app file doesn\'t have the mandatory "type" key')
         if not isinstance(app_type, str):
-            raise exceptions.DataError(
-                f"invalid app data type: {app_type!r}"
-            )
+            raise exceptions.DataError(f"invalid app data type: {app_type!r}")
         app_type = app_type.strip()
         try:
             return self._managers[app_type]
         except KeyError:
             raise exceptions.NotFound(
-                f"No manager found to manage app of type {app_type!r}")
+                f"No manager found to manage app of type {app_type!r}"
+            )
 
-    def get_app_data(
-        self,
-        id_type: Optional[str],
-        identifier: str
-    ) -> dict:
+    def get_app_data(self, id_type: Optional[str], identifier: str) -> dict:
         """Retrieve instance's app_data from identifier
 
         @param id_type: type of the identifier, can be:
@@ -300,8 +295,8 @@
         @raise ValueError: id_type is invalid
         """
         if not id_type:
-            id_type = 'name'
-        if id_type == 'name':
+            id_type = "name"
+        if id_type == "name":
             identifier = identifier.lower().strip()
             try:
                 return next(iter(self._started[identifier]))
@@ -309,7 +304,7 @@
                 raise exceptions.NotFound(
                     f"No instance of {identifier!r} is currently running"
                 )
-        elif id_type == 'instance':
+        elif id_type == "instance":
             instance_id = identifier
             try:
                 return self._instances[instance_id]
@@ -320,11 +315,7 @@
         else:
             raise ValueError(f"invalid id_type: {id_type!r}")
 
-    def discover(
-        self,
-        dir_path: Path,
-        manager: AppManagerBackend|None = None
-    ) -> None:
+    def discover(self, dir_path: Path, manager: AppManagerBackend | None = None) -> None:
         """Search for app configuration file.
 
         App configuration files must start with [APP_FILE_PREFIX] and have a ``.yaml``
@@ -336,19 +327,16 @@
                     app_data = self.parse(file_path)
                     manager = self.get_manager(app_data)
                 except (exceptions.DataError, exceptions.NotFound) as e:
-                    log.warning(
-                        f"Can't parse {file_path}, skipping: {e}")
+                    log.warning(f"Can't parse {file_path}, skipping: {e}")
                     continue
-            app_name = file_path.stem[len(APP_FILE_PREFIX):].strip().lower()
+            app_name = file_path.stem[len(APP_FILE_PREFIX) :].strip().lower()
             if not app_name:
                 log.warning(f"invalid app file name at {file_path}")
                 continue
             app_dict = self._apps.setdefault(app_name, {})
             manager_set = app_dict.setdefault(manager, set())
             manager_set.add(file_path)
-            log.debug(
-                f"{app_name!r} {manager.name} application found"
-            )
+            log.debug(f"{app_name!r} {manager.name} application found")
 
     def parse(self, file_path: Path, params: Optional[dict] = None) -> dict:
         """Parse Libervia application file
@@ -367,7 +355,7 @@
         if "name" not in app_data:
             # note that we don't use lower() here as we want human readable name and
             # uppercase may be set on purpose
-            app_data['name'] = file_path.stem[len(APP_FILE_PREFIX):].strip()
+            app_data["name"] = file_path.stem[len(APP_FILE_PREFIX) :].strip()
         single_instance = app_data.setdefault("single_instance", True)
         if not isinstance(single_instance, bool):
             raise ValueError(
@@ -429,9 +417,7 @@
         try:
             app_file_path = next(iter(next(iter(self._apps[app_name].values()))))
         except KeyError:
-            raise exceptions.NotFound(
-                f"No application found with the name {app_name!r}"
-            )
+            raise exceptions.NotFound(f"No application found with the name {app_name!r}")
         log.info(f"starting {app_name!r}")
         self._app_persistent_data = await self.persistent_data.get(app_name) or {}
         self._app_persistent_data["last_started"] = time.time()
@@ -439,13 +425,10 @@
         app_data = self.parse(app_file_path, extra)
         await self.persistent_data.aset(app_name, self._app_persistent_data)
         app_data["_started"] = False
-        app_data['_file_path'] = app_file_path
-        app_data['_name_canonical'] = app_name
-        single_instance = app_data['single_instance']
-        ret_data = {
-            "name": app_name,
-            "started": False
-        }
+        app_data["_file_path"] = app_file_path
+        app_data["_name_canonical"] = app_name
+        single_instance = app_data["single_instance"]
+        ret_data = {"name": app_name, "started": False}
         if single_instance:
             if started_data:
                 instance_data = started_data[0]
@@ -462,14 +445,14 @@
                     PLUGIN_INFO[C.PI_IMPORT_NAME], app_name
                 )
                 cache_path.mkdir(0o700, parents=True, exist_ok=True)
-                app_data['_instance_dir_path'] = cache_path
+                app_data["_instance_dir_path"] = cache_path
         else:
             dest_dir_obj = tempfile.TemporaryDirectory(prefix="libervia_app_")
-            app_data['_instance_dir_obj'] = dest_dir_obj
-            app_data['_instance_dir_path'] = Path(dest_dir_obj.name)
-        instance_id = ret_data["instance"] = app_data['_instance_id'] = shortuuid.uuid()
+            app_data["_instance_dir_obj"] = dest_dir_obj
+            app_data["_instance_dir_path"] = Path(dest_dir_obj.name)
+        instance_id = ret_data["instance"] = app_data["_instance_id"] = shortuuid.uuid()
         manager = self.get_manager(app_data)
-        app_data['_manager'] = manager
+        app_data["_manager"] = manager
         started_data.append(app_data)
         self._instances[instance_id] = app_data
         # we retrieve exposed data such as url_prefix which can be useful computed exposed
@@ -482,7 +465,7 @@
             start = manager.start
         except AttributeError:
             raise exceptions.InternalError(
-                f"{manager.name} doesn't have the mandatory \"start\" method"
+                f'{manager.name} doesn\'t have the mandatory "start" method'
             )
         else:
             defer.ensureDeferred(self.start_app(start, app_data))
@@ -498,12 +481,8 @@
             self.host.bridge.application_error(
                 app_name,
                 instance_id,
-                data_format.serialise(
-                    {
-                        "class": str(type(e)),
-                        "msg": str(e)
-                    }
-                ))
+                data_format.serialise({"class": str(type(e)), "msg": str(e)}),
+            )
         else:
             app_data["_started"] = True
             self.host.bridge.application_started(app_name, instance_id, "")
@@ -512,7 +491,8 @@
     def _stop(self, identifier, id_type, extra):
         extra = data_format.deserialise(extra)
         return defer.ensureDeferred(
-            self.stop(str(identifier), str(id_type) or None, extra))
+            self.stop(str(identifier), str(id_type) or None, extra)
+        )
 
     async def stop(
         self,
@@ -527,15 +507,15 @@
 
         log.info(f"stopping {app_data['name']!r}")
 
-        app_name = app_data['_name_canonical']
-        instance_id = app_data['_instance_id']
-        manager = app_data['_manager']
+        app_name = app_data["_name_canonical"]
+        instance_id = app_data["_instance_id"]
+        manager = app_data["_manager"]
 
         try:
             stop = manager.stop
         except AttributeError:
             raise exceptions.InternalError(
-                f"{manager.name} doesn't have the mandatory \"stop\" method"
+                f'{manager.name} doesn\'t have the mandatory "stop" method'
             )
         else:
             try:
@@ -551,7 +531,8 @@
             del self._instances[instance_id]
         except KeyError:
             log.error(
-                f"INTERNAL ERROR: {instance_id!r} is not present in self._instances")
+                f"INTERNAL ERROR: {instance_id!r} is not present in self._instances"
+            )
 
         try:
             self._started[app_name].remove(app_data)
@@ -590,13 +571,13 @@
         to handle manager specific conventions.
         """
         app_data = self.get_app_data(id_type, identifier)
-        if app_data.get('_exposed_computed', False):
-            return app_data['expose']
+        if app_data.get("_exposed_computed", False):
+            return app_data["expose"]
         if extra is None:
             extra = {}
         expose = app_data.setdefault("expose", {})
         if "passwords" in expose:
-            passwords = expose['passwords']
+            passwords = expose["passwords"]
             for name, value in list(passwords.items()):
                 if isinstance(value, list):
                     # if we have a list, is the sequence of keys leading to the value
@@ -605,7 +586,8 @@
                         passwords[name] = self.get_app_data_value(value, app_data)
                     except KeyError:
                         log.warning(
-                            f"Can't retrieve exposed value for password {name!r}: {e}")
+                            f"Can't retrieve exposed value for password {name!r}: {e}"
+                        )
                         del passwords[name]
 
         for key in ("url_prefix", "front_url"):
@@ -614,9 +596,7 @@
                 try:
                     expose[key] = self.get_app_data_value(value, app_data)
                 except KeyError:
-                    log.warning(
-                        f"Can't retrieve exposed value for {key!r} at {value}"
-                    )
+                    log.warning(f"Can't retrieve exposed value for {key!r} at {value}")
                     del expose[key]
 
         front_url = expose.get("front_url")
@@ -628,35 +608,35 @@
                     path_elt = parsed_url.path.split("/", 1)
                     parsed_url = parsed_url._replace(
                         netloc=path_elt[0],
-                        path=f"/{path_elt[1]}" if len(path_elt) > 1 else ""
+                        path=f"/{path_elt[1]}" if len(path_elt) > 1 else "",
                     )
-                parsed_url = parsed_url._replace(scheme='https')
+                parsed_url = parsed_url._replace(scheme="https")
                 expose["front_url"] = urlunparse(parsed_url)
 
         if extra.get("skip_compute", False):
             return expose
 
         try:
-            compute_expose = app_data['_manager'].compute_expose
+            compute_expose = app_data["_manager"].compute_expose
         except AttributeError:
             pass
         else:
             await compute_expose(app_data)
 
-        app_data['_exposed_computed'] = True
+        app_data["_exposed_computed"] = True
         return expose
 
     async def _do_prepare(
         self,
         app_data: dict,
     ) -> None:
-        name = app_data['name']
-        dest_path = app_data['_instance_dir_path']
+        name = app_data["name"]
+        dest_path = app_data["_instance_dir_path"]
         if next(dest_path.iterdir(), None) != None:
             log.debug(f"There is already a prepared dir at {dest_path}, nothing to do")
             return
         try:
-            prepare = app_data['prepare'].copy()
+            prepare = app_data["prepare"].copy()
         except KeyError:
             prepare = {}
 
@@ -668,7 +648,7 @@
             log.debug(f"[{name}] [prepare] running {action!r} action")
             if action == "git":
                 try:
-                    git_path = which('git')[0]
+                    git_path = which("git")[0]
                 except IndexError:
                     raise exceptions.NotFound(
                         "Can't find \"git\" executable, {name} can't be started without it"
@@ -688,8 +668,8 @@
         self,
         app_data: dict,
     ) -> None:
-        dest_path = app_data['_instance_dir_path']
-        files = app_data.get('files')
+        dest_path = app_data["_instance_dir_path"]
+        files = app_data.get("files")
         if not files:
             return
         if not isinstance(files, dict):
--- a/libervia/backend/plugins/plugin_misc_app_manager/models.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_app_manager/models.py	Wed Jun 19 18:44:57 2024 +0200
@@ -27,7 +27,7 @@
     """Abstract class for App Manager."""
 
     name: str
-    discover_path: Path|None = None
+    discover_path: Path | None = None
 
     def __init__(self, host) -> None:
         """Initialize the App Manager.
--- a/libervia/backend/plugins/plugin_misc_attach.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_attach.py	Wed Jun 19 18:44:57 2024 +0200
@@ -49,7 +49,7 @@
 }
 
 
-AttachmentHandler = namedtuple('AttachmentHandler', ['can_handle', 'attach', 'priority'])
+AttachmentHandler = namedtuple("AttachmentHandler", ["can_handle", "attach", "priority"])
 
 
 class AttachPlugin:
@@ -60,7 +60,7 @@
         self._u = host.plugins["UPLOAD"]
         host.trigger.add("sendMessage", self._send_message_trigger)
         host.trigger.add("sendMessageComponent", self._send_message_trigger)
-        self._attachments_handlers = {'clear': [], 'encrypted': []}
+        self._attachments_handlers = {"clear": [], "encrypted": []}
         self.register(self.default_can_handle, self.default_attach, False, -1000)
 
     def register(self, can_handle, attach, encrypted=False, priority=0):
@@ -82,12 +82,13 @@
         """
         handler = AttachmentHandler(can_handle, attach, priority)
         handlers = (
-            self._attachments_handlers['encrypted']
-            if encrypted else self._attachments_handlers['clear']
+            self._attachments_handlers["encrypted"]
+            if encrypted
+            else self._attachments_handlers["clear"]
         )
         if handler in handlers:
             raise exceptions.InternalError(
-                'Attachment handler has been registered twice, this should never happen'
+                "Attachment handler has been registered twice, this should never happen"
             )
 
         handlers.append(handler)
@@ -112,33 +113,40 @@
                     media_type = mimetypes.guess_type(path, strict=False)[0]
                     if media_type is None:
                         log.warning(
-                            _("Can't resize attachment of unknown type: {attachment}")
-                            .format(attachment=attachment))
+                            _(
+                                "Can't resize attachment of unknown type: {attachment}"
+                            ).format(attachment=attachment)
+                        )
                         continue
                     attachment[C.KEY_ATTACHMENTS_MEDIA_TYPE] = media_type
 
-                main_type = media_type.split('/')[0]
+                main_type = media_type.split("/")[0]
                 if main_type == "image":
                     report = image.check(self.host, path)
-                    if report['too_large']:
+                    if report["too_large"]:
                         tmp_dir = Path(tempfile.mkdtemp())
                         tmp_dirs_to_clean.append(tmp_dir)
                         new_path = tmp_dir / path.name
                         await image.resize(
-                            path, report["recommended_size"], dest=new_path)
+                            path, report["recommended_size"], dest=new_path
+                        )
                         attachment["path"] = new_path
                         log.info(
-                            _("Attachment {path!r} has been resized at {new_path!r}")
-                            .format(path=str(path), new_path=str(new_path)))
+                            _(
+                                "Attachment {path!r} has been resized at {new_path!r}"
+                            ).format(path=str(path), new_path=str(new_path))
+                        )
                 else:
                     log.warning(
-                        _("Can't resize attachment of type {main_type!r}: {attachment}")
-                        .format(main_type=main_type, attachment=attachment))
+                        _(
+                            "Can't resize attachment of type {main_type!r}: {attachment}"
+                        ).format(main_type=main_type, attachment=attachment)
+                    )
 
         if client.encryption.is_encryption_requested(data):
-            handlers = self._attachments_handlers['encrypted']
+            handlers = self._attachments_handlers["encrypted"]
         else:
-            handlers = self._attachments_handlers['clear']
+            handlers = self._attachments_handlers["clear"]
 
         for handler in handlers:
             can_handle = await utils.as_deferred(handler.can_handle, client, data)
@@ -147,8 +155,9 @@
         else:
             raise exceptions.NotFound(
                 _("No plugin can handle attachment with {destinee}").format(
-                destinee = data['to']
-            ))
+                    destinee=data["to"]
+                )
+            )
 
         await utils.as_deferred(handler.attach, client, data)
 
@@ -159,10 +168,7 @@
         return data
 
     async def upload_files(
-        self,
-        client: SatXMPPEntity,
-        data: dict,
-        upload_cb: Optional[Callable] = None
+        self, client: SatXMPPEntity, data: dict, upload_cb: Optional[Callable] = None
     ):
         """Upload file, and update attachments
 
@@ -202,7 +208,7 @@
                 continue
 
             if "url" in attachment:
-                url = attachment.pop('url')
+                url = attachment.pop("url")
                 log.warning(
                     f"unexpected URL in attachment: {url!r}\nattachment: {attachment}"
                 )
@@ -214,18 +220,18 @@
 
             attachment["size"] = path.stat().st_size
 
-            extra = {
-                "attachment": attachment
-            }
+            extra = {"attachment": attachment}
             progress_id = attachment.pop("progress_id", None)
             if progress_id:
                 extra["progress_id"] = progress_id
             check_certificate = self.host.memory.param_get_a(
-                "check_certificate", "Connection", profile_key=client.profile)
+                "check_certificate", "Connection", profile_key=client.profile
+            )
             if not check_certificate:
-                extra['ignore_tls_errors'] = True
+                extra["ignore_tls_errors"] = True
                 log.warning(
-                    _("certificate check disabled for upload, this is dangerous!"))
+                    _("certificate check disabled for upload, this is dangerous!")
+                )
 
             __, upload_d = await upload_cb(
                 client=client,
@@ -255,8 +261,9 @@
         return defer.ensureDeferred(self.attach_files(client, data))
 
     def _send_message_trigger(
-        self, client, mess_data, pre_xml_treatments, post_xml_treatments):
-        if mess_data['extra'].get(C.KEY_ATTACHMENTS):
+        self, client, mess_data, pre_xml_treatments, post_xml_treatments
+    ):
+        if mess_data["extra"].get(C.KEY_ATTACHMENTS):
             post_xml_treatments.addCallback(self._attach_files, client=client)
         return True
 
@@ -271,8 +278,8 @@
             body_elt = data["xml"].addElement("body")
         attachments = data["extra"][C.KEY_ATTACHMENTS]
         if attachments:
-            body_links = '\n'.join(a['url'] for a in attachments)
+            body_links = "\n".join(a["url"] for a in attachments)
             if str(body_elt).strip():
                 # if there is already a body, we add a line feed before the first link
-                body_elt.addContent('\n')
+                body_elt.addContent("\n")
             body_elt.addContent(body_links)
--- a/libervia/backend/plugins/plugin_misc_download.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_download.py	Wed Jun 19 18:44:57 2024 +0200
@@ -72,18 +72,20 @@
         )
         self._download_callbacks = {}
         self._scheme_callbacks = {}
-        self.register_scheme('http', self.download_http)
-        self.register_scheme('https', self.download_http)
+        self.register_scheme("http", self.download_http)
+        self.register_scheme("https", self.download_http)
 
     def _file_download(
-            self, attachment_s: str, dest_path: str, extra_s: str, profile: str
+        self, attachment_s: str, dest_path: str, extra_s: str, profile: str
     ) -> defer.Deferred:
-        d = defer.ensureDeferred(self.file_download(
-            self.host.get_client(profile),
-            data_format.deserialise(attachment_s),
-            Path(dest_path),
-            data_format.deserialise(extra_s)
-        ))
+        d = defer.ensureDeferred(
+            self.file_download(
+                self.host.get_client(profile),
+                data_format.deserialise(attachment_s),
+                Path(dest_path),
+                data_format.deserialise(extra_s),
+            )
+        )
         d.addCallback(lambda ret: data_format.serialise(ret))
         return d
 
@@ -92,7 +94,7 @@
         client: SatXMPPEntity,
         attachment: Dict[str, Any],
         dest_path: Path,
-        extra: Optional[Dict[str, Any]] = None
+        extra: Optional[Dict[str, Any]] = None,
     ) -> Dict[str, Any]:
         """Download a file using best available method
 
@@ -103,8 +105,10 @@
         try:
             progress_id, __ = await self.download(client, attachment, dest_path, extra)
         except Exception as e:
-            if (isinstance(e, jabber_error.StanzaError)
-                and e.condition == 'not-acceptable'):
+            if (
+                isinstance(e, jabber_error.StanzaError)
+                and e.condition == "not-acceptable"
+            ):
                 reason = e.text
             else:
                 reason = str(e)
@@ -119,14 +123,16 @@
             return {"progress": progress_id}
 
     def _file_download_complete(
-            self, attachment_s: str, dest_path: str, extra_s: str, profile: str
+        self, attachment_s: str, dest_path: str, extra_s: str, profile: str
     ) -> defer.Deferred:
-        d = defer.ensureDeferred(self.file_download_complete(
-            self.host.get_client(profile),
-            data_format.deserialise(attachment_s),
-            Path(dest_path),
-            data_format.deserialise(extra_s)
-        ))
+        d = defer.ensureDeferred(
+            self.file_download_complete(
+                self.host.get_client(profile),
+                data_format.deserialise(attachment_s),
+                Path(dest_path),
+                data_format.deserialise(extra_s),
+            )
+        )
         d.addCallback(lambda path: str(path))
         return d
 
@@ -135,7 +141,7 @@
         client: SatXMPPEntity,
         attachment: Dict[str, Any],
         dest_path: Path,
-        extra: Optional[Dict[str, Any]] = None
+        extra: Optional[Dict[str, Any]] = None,
     ) -> str:
         """Helper method to fully download a file and return its path
 
@@ -152,11 +158,11 @@
         client: SatXMPPEntity,
         uri: str,
         dest_path: Union[Path, str],
-        extra: Optional[Dict[str, Any]] = None
+        extra: Optional[Dict[str, Any]] = None,
     ) -> Tuple[str, defer.Deferred]:
         if extra is None:
             extra = {}
-        uri_parsed = urlparse(uri, 'http')
+        uri_parsed = urlparse(uri, "http")
         if dest_path:
             dest_path = Path(dest_path)
             cache_uid = None
@@ -164,31 +170,32 @@
             filename = Path(unquote(uri_parsed.path)).name.strip() or C.FILE_DEFAULT_NAME
             # we don't use Path.suffixes because we don't want to have more than 2
             # suffixes, but we still want to handle suffixes like "tar.gz".
-            stem, *suffixes = filename.rsplit('.', 2)
+            stem, *suffixes = filename.rsplit(".", 2)
             # we hash the URL to have an unique identifier, and avoid double download
             url_hash = hashlib.sha256(uri_parsed.geturl().encode()).hexdigest()
             cache_uid = f"{stem}_{url_hash}"
             cache_data = client.cache.get_metadata(cache_uid)
             if cache_data is not None:
                 # file is already in cache, we return it
-                download_d = defer.succeed(cache_data['path'])
-                return '', download_d
+                download_d = defer.succeed(cache_data["path"])
+                return "", download_d
             else:
                 # the file is not in cache
-                unique_name = '.'.join([cache_uid] + suffixes)
+                unique_name = ".".join([cache_uid] + suffixes)
                 with client.cache.cache_data(
-                    "DOWNLOAD", cache_uid, filename=unique_name) as f:
+                    "DOWNLOAD", cache_uid, filename=unique_name
+                ) as f:
                     # we close the file and only use its name, the file will be opened
                     # by the registered callback
                     dest_path = Path(f.name)
 
         # should we check certificates?
         check_certificate = self.host.memory.param_get_a(
-            "check_certificate", "Connection", profile_key=client.profile)
+            "check_certificate", "Connection", profile_key=client.profile
+        )
         if not check_certificate:
-            extra['ignore_tls_errors'] = True
-            log.warning(
-                _("certificate check disabled for download, this is dangerous!"))
+            extra["ignore_tls_errors"] = True
+            log.warning(_("certificate check disabled for download, this is dangerous!"))
 
         try:
             callback = self._scheme_callbacks[uri_parsed.scheme]
@@ -197,11 +204,12 @@
         else:
             try:
                 progress_id, download_d = await callback(
-                    client, uri_parsed, dest_path, extra)
+                    client, uri_parsed, dest_path, extra
+                )
             except Exception as e:
-                log.warning(_(
-                    "Can't download URI {uri}: {reason}").format(
-                    uri=uri, reason=e))
+                log.warning(
+                    _("Can't download URI {uri}: {reason}").format(uri=uri, reason=e)
+                )
                 if cache_uid is not None:
                     client.cache.remove_from_cache(cache_uid)
                 elif dest_path.exists():
@@ -210,13 +218,12 @@
             download_d.addCallback(lambda __: dest_path)
             return progress_id, download_d
 
-
     async def download(
         self,
         client: SatXMPPEntity,
         attachment: Dict[str, Any],
         dest_path: Union[Path, str],
-        extra: Optional[Dict[str, Any]] = None
+        extra: Optional[Dict[str, Any]] = None,
     ) -> Tuple[str, defer.Deferred]:
         """Download a file from URI using suitable method
 
@@ -247,9 +254,7 @@
                 try:
                     cb = self._download_callbacks[source_type]
                 except KeyError:
-                    log.warning(
-                        f"no source handler registered for {source_type!r}"
-                    )
+                    log.warning(f"no source handler registered for {source_type!r}")
                 else:
                     try:
                         return await cb(client, attachment, source, dest_path, extra)
@@ -271,11 +276,14 @@
         source_type: str,
         callback: Callable[
             [
-                SatXMPPEntity, Dict[str, Any], Dict[str, Any], Union[str, Path],
-                Dict[str, Any]
+                SatXMPPEntity,
+                Dict[str, Any],
+                Dict[str, Any],
+                Union[str, Path],
+                Dict[str, Any],
             ],
-            Tuple[str, defer.Deferred]
-        ]
+            Tuple[str, defer.Deferred],
+        ],
     ) -> None:
         """Register a handler to manage a type of attachment source
 
@@ -334,17 +342,15 @@
     async def download_http(self, client, uri_parsed, dest_path, options):
         url = uri_parsed.geturl()
 
-        if options.get('ignore_tls_errors', False):
-            log.warning(
-                "TLS certificate check disabled, this is highly insecure"
-            )
+        if options.get("ignore_tls_errors", False):
+            log.warning("TLS certificate check disabled, this is highly insecure")
             treq_client = treq_client_no_ssl
         else:
             treq_client = treq
 
         head_data = await treq_client.head(url)
         try:
-            content_length = int(head_data.headers.getRawHeaders('content-length')[0])
+            content_length = int(head_data.headers.getRawHeaders("content-length")[0])
         except (KeyError, TypeError, IndexError):
             content_length = None
             log.debug(f"No content lenght found at {url}")
@@ -353,7 +359,7 @@
             client,
             dest_path,
             mode="wb",
-            size = content_length,
+            size=content_length,
         )
 
         progress_id = file_obj.uid
--- a/libervia/backend/plugins/plugin_misc_email_invitation.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_email_invitation.py	Wed Jun 19 18:44:57 2024 +0200
@@ -38,27 +38,37 @@
     C.PI_NAME: "Email Invitations",
     C.PI_IMPORT_NAME: "EMAIL_INVITATION",
     C.PI_TYPE: C.PLUG_TYPE_MISC,
-    C.PI_DEPENDENCIES: ['XEP-0077'],
+    C.PI_DEPENDENCIES: ["XEP-0077"],
     C.PI_RECOMMENDATIONS: ["IDENTITY"],
     C.PI_MAIN: "InvitationsPlugin",
     C.PI_HANDLER: "no",
-    C.PI_DESCRIPTION: _("""invitation of people without XMPP account""")
+    C.PI_DESCRIPTION: _("""invitation of people without XMPP account"""),
 }
 
 
 SUFFIX_MAX = 5
 INVITEE_PROFILE_TPL = "guest@@{uuid}"
-KEY_ID = 'id'
-KEY_JID = 'jid'
-KEY_CREATED = 'created'
-KEY_LAST_CONNECTION = 'last_connection'
-KEY_GUEST_PROFILE = 'guest_profile'
-KEY_PASSWORD = 'password'
-KEY_EMAILS_EXTRA = 'emails_extra'
-EXTRA_RESERVED = {KEY_ID, KEY_JID, KEY_CREATED, 'jid_', 'jid', KEY_LAST_CONNECTION,
-                  KEY_GUEST_PROFILE, KEY_PASSWORD, KEY_EMAILS_EXTRA}
+KEY_ID = "id"
+KEY_JID = "jid"
+KEY_CREATED = "created"
+KEY_LAST_CONNECTION = "last_connection"
+KEY_GUEST_PROFILE = "guest_profile"
+KEY_PASSWORD = "password"
+KEY_EMAILS_EXTRA = "emails_extra"
+EXTRA_RESERVED = {
+    KEY_ID,
+    KEY_JID,
+    KEY_CREATED,
+    "jid_",
+    "jid",
+    KEY_LAST_CONNECTION,
+    KEY_GUEST_PROFILE,
+    KEY_PASSWORD,
+    KEY_EMAILS_EXTRA,
+}
 DEFAULT_SUBJECT = D_("You have been invited by {host_name} to {app_name}")
-DEFAULT_BODY = D_("""Hello {name}!
+DEFAULT_BODY = D_(
+    """Hello {name}!
 
 You have received an invitation from {host_name} to participate to "{app_name}".
 To join, you just have to click on the following URL:
@@ -68,7 +78,8 @@
 If you want more details on {app_name}, you can check {app_url}.
 
 Welcome!
-""")
+"""
+)
 
 
 class InvitationsPlugin(object):
@@ -76,39 +87,79 @@
     def __init__(self, host):
         log.info(_("plugin Invitations initialization"))
         self.host = host
-        self.invitations = persistent.LazyPersistentBinaryDict('invitations')
-        host.bridge.add_method("invitation_create", ".plugin", in_sign='sasssssssssa{ss}s',
-                              out_sign='a{ss}',
-                              method=self._create,
-                              async_=True)
-        host.bridge.add_method("invitation_get", ".plugin", in_sign='s', out_sign='a{ss}',
-                              method=self.get,
-                              async_=True)
-        host.bridge.add_method("invitation_delete", ".plugin", in_sign='s', out_sign='',
-                              method=self._delete,
-                              async_=True)
-        host.bridge.add_method("invitation_modify", ".plugin", in_sign='sa{ss}b',
-                              out_sign='',
-                              method=self._modify,
-                              async_=True)
-        host.bridge.add_method("invitation_list", ".plugin", in_sign='s',
-                              out_sign='a{sa{ss}}',
-                              method=self._list,
-                              async_=True)
-        host.bridge.add_method("invitation_simple_create", ".plugin", in_sign='sssss',
-                              out_sign='a{ss}',
-                              method=self._simple_create,
-                              async_=True)
+        self.invitations = persistent.LazyPersistentBinaryDict("invitations")
+        host.bridge.add_method(
+            "invitation_create",
+            ".plugin",
+            in_sign="sasssssssssa{ss}s",
+            out_sign="a{ss}",
+            method=self._create,
+            async_=True,
+        )
+        host.bridge.add_method(
+            "invitation_get",
+            ".plugin",
+            in_sign="s",
+            out_sign="a{ss}",
+            method=self.get,
+            async_=True,
+        )
+        host.bridge.add_method(
+            "invitation_delete",
+            ".plugin",
+            in_sign="s",
+            out_sign="",
+            method=self._delete,
+            async_=True,
+        )
+        host.bridge.add_method(
+            "invitation_modify",
+            ".plugin",
+            in_sign="sa{ss}b",
+            out_sign="",
+            method=self._modify,
+            async_=True,
+        )
+        host.bridge.add_method(
+            "invitation_list",
+            ".plugin",
+            in_sign="s",
+            out_sign="a{sa{ss}}",
+            method=self._list,
+            async_=True,
+        )
+        host.bridge.add_method(
+            "invitation_simple_create",
+            ".plugin",
+            in_sign="sssss",
+            out_sign="a{ss}",
+            method=self._simple_create,
+            async_=True,
+        )
 
     def check_extra(self, extra):
         if EXTRA_RESERVED.intersection(extra):
             raise ValueError(
-                _("You can't use following key(s) in extra, they are reserved: {}")
-                .format(', '.join(EXTRA_RESERVED.intersection(extra))))
+                _(
+                    "You can't use following key(s) in extra, they are reserved: {}"
+                ).format(", ".join(EXTRA_RESERVED.intersection(extra)))
+            )
 
-    def _create(self, email='', emails_extra=None, jid_='', password='', name='',
-                host_name='', language='', url_template='', message_subject='',
-                message_body='', extra=None, profile=''):
+    def _create(
+        self,
+        email="",
+        emails_extra=None,
+        jid_="",
+        password="",
+        name="",
+        host_name="",
+        language="",
+        url_template="",
+        message_subject="",
+        message_body="",
+        extra=None,
+        profile="",
+    ):
         # XXX: we don't use **kwargs here to keep arguments name for introspection with
         #      D-Bus bridge
         if emails_extra is None:
@@ -117,16 +168,24 @@
         if extra is None:
             extra = {}
         else:
-            extra = {str(k): str(v) for k,v in extra.items()}
+            extra = {str(k): str(v) for k, v in extra.items()}
 
-        kwargs = {"extra": extra,
-                  KEY_EMAILS_EXTRA: [str(e) for e in emails_extra]
-                  }
+        kwargs = {"extra": extra, KEY_EMAILS_EXTRA: [str(e) for e in emails_extra]}
 
         # we need to be sure that values are unicode, else they won't be pickled correctly
         # with D-Bus
-        for key in ("jid_", "password", "name", "host_name", "email", "language",
-                    "url_template", "message_subject", "message_body", "profile"):
+        for key in (
+            "jid_",
+            "password",
+            "name",
+            "host_name",
+            "email",
+            "language",
+            "url_template",
+            "message_subject",
+            "message_body",
+            "profile",
+        ):
             value = locals()[key]
             if value:
                 kwargs[key] = str(value)
@@ -152,16 +211,13 @@
                 return invitation
 
     async def _create_account_and_profile(
-        self,
-        id_: str,
-        kwargs: dict,
-        extra: dict
+        self, id_: str, kwargs: dict, extra: dict
     ) -> None:
         """Create XMPP account and Libervia profile for guest"""
         ## XMPP account creation
-        password = kwargs.pop('password', None)
+        password = kwargs.pop("password", None)
         if password is None:
-           password = utils.generate_password()
+            password = utils.generate_password()
         assert password
         # XXX: password is here saved in clear in database
         #      it is needed for invitation as the same password is used for profile
@@ -171,14 +227,15 @@
         #        not be saved and could be used to encrypt profile password.
         extra[KEY_PASSWORD] = password
 
-        jid_ = kwargs.pop('jid_', None)
+        jid_ = kwargs.pop("jid_", None)
         if not jid_:
-            domain = self.host.memory.config_get(None, 'xmpp_domain')
+            domain = self.host.memory.config_get(None, "xmpp_domain")
             if not domain:
                 # TODO: fallback to profile's domain
                 raise ValueError(_("You need to specify xmpp_domain in sat.conf"))
-            jid_ = "invitation-{uuid}@{domain}".format(uuid=shortuuid.uuid(),
-                                                        domain=domain)
+            jid_ = "invitation-{uuid}@{domain}".format(
+                uuid=shortuuid.uuid(), domain=domain
+            )
         jid_ = jid.JID(jid_)
         extra[KEY_JID] = jid_.full()
 
@@ -186,42 +243,46 @@
             # we don't register account if there is no user as anonymous login is then
             # used
             try:
-                await self.host.plugins['XEP-0077'].register_new_account(jid_, password)
+                await self.host.plugins["XEP-0077"].register_new_account(jid_, password)
             except error.StanzaError as e:
                 prefix = jid_.user
                 idx = 0
-                while e.condition == 'conflict':
+                while e.condition == "conflict":
                     if idx >= SUFFIX_MAX:
                         raise exceptions.ConflictError(_("Can't create XMPP account"))
-                    jid_.user = prefix + '_' + str(idx)
-                    log.info(_("requested jid already exists, trying with {}".format(
-                        jid_.full())))
+                    jid_.user = prefix + "_" + str(idx)
+                    log.info(
+                        _(
+                            "requested jid already exists, trying with {}".format(
+                                jid_.full()
+                            )
+                        )
+                    )
                     try:
-                        await self.host.plugins['XEP-0077'].register_new_account(
-                            jid_,
-                            password
+                        await self.host.plugins["XEP-0077"].register_new_account(
+                            jid_, password
                         )
                     except error.StanzaError:
                         idx += 1
                     else:
                         break
-                if e.condition != 'conflict':
+                if e.condition != "conflict":
                     raise e
 
             log.info(_("account {jid_} created").format(jid_=jid_.full()))
 
         ## profile creation
 
-        extra[KEY_GUEST_PROFILE] = guest_profile = INVITEE_PROFILE_TPL.format(
-            uuid=id_
-        )
+        extra[KEY_GUEST_PROFILE] = guest_profile = INVITEE_PROFILE_TPL.format(uuid=id_)
         # profile creation should not fail as we generate unique name ourselves
         await self.host.memory.create_profile(guest_profile, password)
         await self.host.memory.start_session(password, guest_profile)
-        await self.host.memory.param_set("JabberID", jid_.full(), "Connection",
-                                        profile_key=guest_profile)
-        await self.host.memory.param_set("Password", password, "Connection",
-                                        profile_key=guest_profile)
+        await self.host.memory.param_set(
+            "JabberID", jid_.full(), "Connection", profile_key=guest_profile
+        )
+        await self.host.memory.param_set(
+            "Password", password, "Connection", profile_key=guest_profile
+        )
 
     async def create(self, **kwargs):
         r"""Create an invitation
@@ -284,15 +345,17 @@
             - filled extra dictionary, as saved in the databae
         """
         ## initial checks
-        extra = kwargs.pop('extra', {})
+        extra = kwargs.pop("extra", {})
         if set(kwargs).intersection(extra):
             raise ValueError(
                 _("You can't use following key(s) in both args and extra: {}").format(
-                ', '.join(set(kwargs).intersection(extra))))
+                    ", ".join(set(kwargs).intersection(extra))
+                )
+            )
 
         self.check_extra(extra)
 
-        email = kwargs.pop('email', None)
+        email = kwargs.pop("email", None)
 
         existing = await self.get_existing_invitation(email)
         if existing is not None:
@@ -300,16 +363,20 @@
             extra.update(existing)
             del extra[KEY_ID]
 
-        emails_extra = kwargs.pop('emails_extra', [])
+        emails_extra = kwargs.pop("emails_extra", [])
         if not email and emails_extra:
             raise ValueError(
-                _('You need to provide a main email address before using emails_extra'))
+                _("You need to provide a main email address before using emails_extra")
+            )
 
-        if (email is not None
-            and not 'url_template' in kwargs
-            and not 'message_body' in kwargs):
+        if (
+            email is not None
+            and not "url_template" in kwargs
+            and not "message_body" in kwargs
+        ):
             raise ValueError(
-                _("You need to provide url_template if you use default message body"))
+                _("You need to provide url_template if you use default message body")
+            )
 
         ## uuid
         log.info(_("creating an invitation"))
@@ -318,64 +385,62 @@
         if existing is None:
             await self._create_account_and_profile(id_, kwargs, extra)
 
-        profile = kwargs.pop('profile', None)
+        profile = kwargs.pop("profile", None)
         guest_profile = extra[KEY_GUEST_PROFILE]
         jid_ = jid.JID(extra[KEY_JID])
 
         ## identity
-        name = kwargs.pop('name', None)
+        name = kwargs.pop("name", None)
         password = extra[KEY_PASSWORD]
         if name is not None:
-            extra['name'] = name
+            extra["name"] = name
             try:
-                id_plugin = self.host.plugins['IDENTITY']
+                id_plugin = self.host.plugins["IDENTITY"]
             except KeyError:
                 pass
             else:
                 await self.host.connect(guest_profile, password)
                 guest_client = self.host.get_client(guest_profile)
-                await id_plugin.set_identity(guest_client, {'nicknames': [name]})
+                await id_plugin.set_identity(guest_client, {"nicknames": [name]})
                 await self.host.disconnect(guest_profile)
 
         ## email
-        language = kwargs.pop('language', None)
+        language = kwargs.pop("language", None)
         if language is not None:
-            extra['language'] = language.strip()
+            extra["language"] = language.strip()
 
         if email is not None:
-            extra['email'] = email
+            extra["email"] = email
             data_format.iter2dict(KEY_EMAILS_EXTRA, extra)
-            url_template = kwargs.pop('url_template', '')
-            format_args = {
-                'uuid': id_,
-                'app_name': C.APP_NAME,
-                'app_url': C.APP_URL}
+            url_template = kwargs.pop("url_template", "")
+            format_args = {"uuid": id_, "app_name": C.APP_NAME, "app_url": C.APP_URL}
 
             if name is None:
-                format_args['name'] = email
+                format_args["name"] = email
             else:
-                format_args['name'] = name
+                format_args["name"] = name
 
             if profile is None:
-                format_args['profile'] = ''
+                format_args["profile"] = ""
             else:
-                format_args['profile'] = extra['profile'] = profile
+                format_args["profile"] = extra["profile"] = profile
 
-            host_name = kwargs.pop('host_name', None)
+            host_name = kwargs.pop("host_name", None)
             if host_name is None:
-                format_args['host_name'] = profile or _("somebody")
+                format_args["host_name"] = profile or _("somebody")
             else:
-                format_args['host_name'] = extra['host_name'] = host_name
+                format_args["host_name"] = extra["host_name"] = host_name
 
             invite_url = url_template.format(**format_args)
-            format_args['url'] = invite_url
+            format_args["url"] = invite_url
 
             await sat_email.send_email(
                 self.host.memory.config,
                 [email] + emails_extra,
-                (kwargs.pop('message_subject', None) or DEFAULT_SUBJECT).format(
-                    **format_args),
-                (kwargs.pop('message_body', None) or DEFAULT_BODY).format(**format_args),
+                (kwargs.pop("message_subject", None) or DEFAULT_SUBJECT).format(
+                    **format_args
+                ),
+                (kwargs.pop("message_body", None) or DEFAULT_BODY).format(**format_args),
             )
 
         ## roster
@@ -388,7 +453,7 @@
             except Exception as e:
                 log.error(f"Can't get host profile: {profile}: {e}")
             else:
-                await self.host.contact_update(client, jid_, name, ['guests'])
+                await self.host.contact_update(client, jid_, name, ["guests"])
 
         if kwargs:
             log.warning(_("Not all arguments have been consumed: {}").format(kwargs))
@@ -410,11 +475,12 @@
         d = defer.ensureDeferred(
             self.simple_create(client, invitee_email, invitee_name, url_template, extra)
         )
-        d.addCallback(lambda data: {k: str(v) for k,v in data.items()})
+        d.addCallback(lambda data: {k: str(v) for k, v in data.items()})
         return d
 
     async def simple_create(
-        self, client, invitee_email, invitee_name, url_template, extra):
+        self, client, invitee_email, invitee_name, url_template, extra
+    ):
         """Simplified method to invite somebody by email"""
         return await self.create(
             name=invitee_name,
@@ -439,19 +505,20 @@
         """Delete an invitation data and associated XMPP account"""
         log.info(f"deleting invitation {id_}")
         data = await self.get(id_)
-        guest_profile = data['guest_profile']
-        password = data['password']
+        guest_profile = data["guest_profile"]
+        password = data["password"]
         try:
             await self.host.connect(guest_profile, password)
             guest_client = self.host.get_client(guest_profile)
             # XXX: be extra careful to use guest_client and not client below, as this will
             #   delete the associated XMPP account
             log.debug("deleting XMPP account")
-            await self.host.plugins['XEP-0077'].unregister(guest_client, None)
+            await self.host.plugins["XEP-0077"].unregister(guest_client, None)
         except (error.StanzaError, sasl.SASLAuthError) as e:
             log.warning(
                 f"Can't delete {guest_profile}'s XMPP account, maybe it as already been "
-                f"deleted: {e}")
+                f"deleted: {e}"
+            )
         try:
             await self.host.memory.profile_delete_async(guest_profile, True)
         except Exception as e:
@@ -461,8 +528,7 @@
         log.info(f"{id_} invitation has been deleted")
 
     def _modify(self, id_, new_extra, replace):
-        return self.modify(id_, {str(k): str(v) for k,v in new_extra.items()},
-                           replace)
+        return self.modify(id_, {str(k): str(v) for k, v in new_extra.items()}, replace)
 
     def modify(self, id_, new_extra, replace=False):
         """Modify invitation data
@@ -475,6 +541,7 @@
         @raise KeyError: there is not invitation with this id_
         """
         self.check_extra(new_extra)
+
         def got_current_data(current_data):
             if replace:
                 new_data = new_extra
@@ -485,7 +552,7 @@
                         continue
             else:
                 new_data = current_data
-                for k,v in new_extra.items():
+                for k, v in new_extra.items():
                     if k in EXTRA_RESERVED:
                         log.warning(_("Skipping reserved key {key}").format(key=k))
                         continue
@@ -515,7 +582,10 @@
         """
         invitations = await self.invitations.all()
         if profile != C.PROF_KEY_NONE:
-            invitations = {id_:data for id_, data in invitations.items()
-                           if data.get('profile') == profile}
+            invitations = {
+                id_: data
+                for id_, data in invitations.items()
+                if data.get("profile") == profile
+            }
 
         return invitations
--- a/libervia/backend/plugins/plugin_misc_extra_pep.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_extra_pep.py	Wed Jun 19 18:44:57 2024 +0200
@@ -71,4 +71,6 @@
         host.memory.update_params(self.params)
 
     def get_followed_entities(self, profile_key):
-        return self.host.memory.param_get_a(PARAM_NAME, PARAM_KEY, profile_key=profile_key)
+        return self.host.memory.param_get_a(
+            PARAM_NAME, PARAM_KEY, profile_key=profile_key
+        )
--- a/libervia/backend/plugins/plugin_misc_file.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_file.py	Wed Jun 19 18:44:57 2024 +0200
@@ -57,7 +57,7 @@
 SENDING_TITLE = D_("File sending")
 CONFIRM = D_(
     '{peer} wants to send the file "{name}" to you:\n{desc}\n\nThe file has a size of '
-    '{size_human}\n\nDo you accept ?'
+    "{size_human}\n\nDo you accept ?"
 )
 CONFIRM_TITLE = D_("Confirm file transfer")
 CONFIRM_OVERWRITE = D_("File {} already exists, are you sure you want to overwrite ?")
@@ -97,13 +97,19 @@
         name: str,
         file_desc: str,
         extra_s: str,
-        profile: str = C.PROF_KEY_NONE
+        profile: str = C.PROF_KEY_NONE,
     ) -> defer.Deferred:
         client = self.host.get_client(profile)
-        d = defer.ensureDeferred(self.file_send(
-            client, jid.JID(peer_jid_s), filepath, name or None, file_desc or None,
-            data_format.deserialise(extra_s)
-        ))
+        d = defer.ensureDeferred(
+            self.file_send(
+                client,
+                jid.JID(peer_jid_s),
+                filepath,
+                name or None,
+                file_desc or None,
+                data_format.deserialise(extra_s),
+            )
+        )
         d.addCallback(data_format.serialise)
         return d
 
@@ -111,10 +117,10 @@
         self,
         client: SatXMPPEntity,
         peer_jid: jid.JID,
-        filepath: str|Path,
-        filename: str|None=None,
-        file_desc: str|None=None,
-        extra: dict|None=None
+        filepath: str | Path,
+        filename: str | None = None,
+        file_desc: str | None = None,
+        extra: dict | None = None,
     ) -> dict:
         """Send a file using best available method
 
@@ -132,8 +138,9 @@
         if not filename:
             filename = filepath.name
         for manager, __ in self._file_managers:
-            if await utils.as_deferred(manager.can_handle_file_send,
-                                      client, peer_jid, str(filepath)):
+            if await utils.as_deferred(
+                manager.can_handle_file_send, client, peer_jid, str(filepath)
+            ):
                 try:
                     method_name = manager.name
                 except AttributeError:
@@ -144,19 +151,26 @@
                     )
                 )
                 try:
-                    file_data= await utils.as_deferred(
-                        manager.file_send, client, peer_jid, str(filepath), filename, file_desc,
-                        extra
+                    file_data = await utils.as_deferred(
+                        manager.file_send,
+                        client,
+                        peer_jid,
+                        str(filepath),
+                        filename,
+                        file_desc,
+                        extra,
                     )
                 except Exception as e:
                     log.warning(
-                        _("Can't send {filepath} to {peer_jid} with {method_name}: "
-                          "{reason}").format(
-                              filepath=filepath,
-                              peer_jid=peer_jid,
-                              method_name=method_name,
-                              reason=e
-                          )
+                        _(
+                            "Can't send {filepath} to {peer_jid} with {method_name}: "
+                            "{reason}"
+                        ).format(
+                            filepath=filepath,
+                            peer_jid=peer_jid,
+                            method_name=method_name,
+                            reason=e,
+                        )
                     )
                     continue
                 if "progress" not in file_data:
@@ -181,7 +195,7 @@
         return self.file_send(client, peer_jid, path)
 
     def _file_send_menu(self, data, profile):
-        """ XMLUI activated by menu: return file sending UI
+        """XMLUI activated by menu: return file sending UI
 
         @param profile: %(doc_profile)s
         """
@@ -215,13 +229,14 @@
         """
         m_data = (manager, priority)
         if m_data in self._file_managers:
-            raise exceptions.ConflictError(
-                f"Manager {manager} is already registered"
-            )
-        if not hasattr(manager, "can_handle_file_send") or not hasattr(manager, "file_send"):
+            raise exceptions.ConflictError(f"Manager {manager} is already registered")
+        if not hasattr(manager, "can_handle_file_send") or not hasattr(
+            manager, "file_send"
+        ):
             raise ValueError(
                 f'{manager} must have both "can_handle_file_send" and "file_send" methods to '
-                'be registered')
+                "be registered"
+            )
         self._file_managers.append(m_data)
         self._file_managers.sort(key=lambda m: m[1], reverse=True)
 
@@ -237,8 +252,7 @@
     # the overwrite check is done here
 
     def open_file_write(self, client, file_path, transfer_data, file_data, stream_object):
-        """create SatFile or FileStremaObject for the requested file and fill suitable data
-        """
+        """create SatFile or FileStremaObject for the requested file and fill suitable data"""
         if stream_object:
             assert "stream_object" not in transfer_data
             transfer_data["stream_object"] = stream.FileStreamObject(
--- a/libervia/backend/plugins/plugin_misc_groupblog.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_groupblog.py	Wed Jun 19 18:44:57 2024 +0200
@@ -35,9 +35,7 @@
 NS_PUBSUB = "http://jabber.org/protocol/pubsub"
 NS_GROUPBLOG = "http://salut-a-toi.org/protocol/groupblog"
 # NS_PUBSUB_EXP = 'http://goffi.org/protocol/pubsub' #for non official features
-NS_PUBSUB_EXP = (
-    NS_PUBSUB
-)  # XXX: we can't use custom namespace as Wokkel's PubSubService use official NS
+NS_PUBSUB_EXP = NS_PUBSUB  # XXX: we can't use custom namespace as Wokkel's PubSubService use official NS
 NS_PUBSUB_GROUPBLOG = NS_PUBSUB_EXP + "#groupblog"
 NS_PUBSUB_ITEM_CONFIG = NS_PUBSUB_EXP + "#item-config"
 
@@ -107,7 +105,7 @@
         access_model = config_form.get(self._p.OPT_ACCESS_MODEL, self._p.ACCESS_OPEN)
         if access_model == self._p.ACCESS_PUBLISHER_ROSTER:
             opt = self._p.OPT_ROSTER_GROUPS_ALLOWED
-            microblog_data['groups'] = config_form.fields[opt].values
+            microblog_data["groups"] = config_form.fields[opt].values
 
     def _data_2_entry_trigger(self, client, mb_data, entry_elt, item_elt):
         """Build fine access permission if needed
@@ -115,7 +113,7 @@
         This trigger check if "group*" key are present,
         and create a fine item config to restrict view to these groups
         """
-        groups = mb_data.get('groups', [])
+        groups = mb_data.get("groups", [])
         if not groups:
             return
         if not client.server_groupblog_available:
@@ -137,7 +135,8 @@
         """
         if "group" in mb_data:
             options[self._p.OPT_ACCESS_MODEL] = self._p.ACCESS_PUBLISHER_ROSTER
-            options[self._p.OPT_ROSTER_GROUPS_ALLOWED] = mb_data['groups']
+            options[self._p.OPT_ROSTER_GROUPS_ALLOWED] = mb_data["groups"]
+
 
 @implementer(iwokkel.IDisco)
 class GroupBlog_handler(XMPPHandler):
--- a/libervia/backend/plugins/plugin_misc_identity.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_identity.py	Wed Jun 19 18:44:57 2024 +0200
@@ -43,7 +43,6 @@
     )
 
 
-
 log = getLogger(__name__)
 
 
@@ -86,7 +85,7 @@
                 # (it is stored only for roster entities)
                 "store": True,
                 "store_serialisation": self._avatar_ser,
-                "store_deserialisation": self._avatar_deser
+                "store_deserialisation": self._avatar_deser,
             },
             "nicknames": {
                 "type": list,
@@ -103,7 +102,7 @@
                 "get_all": True,
                 "get_post_treatment": self.description_get_post_treatment,
                 "store": True,
-            }
+            },
         }
         host.trigger.add("roster_update", self._roster_update_trigger)
         host.memory.set_signal_on_update("avatar")
@@ -161,14 +160,15 @@
         client._identity_update_lock = []
         # we restore known identities from database
         client._identity_storage = persistent.LazyPersistentBinaryDict(
-            "identity", client.profile)
+            "identity", client.profile
+        )
 
         stored_data = await client._identity_storage.all()
 
         to_delete = []
 
         for key, value in stored_data.items():
-            entity_s, name = key.split('\n')
+            entity_s, name = key.split("\n")
             try:
                 metadata = self.metadata[name]
             except KeyError:
@@ -181,31 +181,31 @@
                     value = deser_method(value)
             entity = jid.JID(entity_s)
 
-            if name == 'avatar':
+            if name == "avatar":
                 if value is not None:
                     try:
-                        cache_uid = value['cache_uid']
+                        cache_uid = value["cache_uid"]
                         if not cache_uid:
                             raise ValueError
-                        filename = value['filename']
+                        filename = value["filename"]
                         if not filename:
                             raise ValueError
                     except (ValueError, KeyError):
                         log.warning(
                             f"invalid data for {entity} avatar, it will be deleted: "
-                            f"{value}")
+                            f"{value}"
+                        )
                         to_delete.append(key)
                         continue
                     cache = self.host.common_cache.get_metadata(cache_uid)
                     if cache is None:
                         log.debug(
-                            f"purging avatar for {entity}: it is not in cache anymore")
+                            f"purging avatar for {entity}: it is not in cache anymore"
+                        )
                         to_delete.append(key)
                         continue
 
-            self.host.memory.update_entity_data(
-                client, entity, name, value, silent=True
-            )
+            self.host.memory.update_entity_data(client, entity, name, value, silent=True)
 
         for key in to_delete:
             await client._identity_storage.adel(key)
@@ -219,22 +219,19 @@
             )
             defer.ensureDeferred(
                 self.update(
-                    client,
-                    IMPORT_NAME,
-                    "nicknames",
-                    [roster_item.name],
-                    roster_item.jid
+                    client, IMPORT_NAME, "nicknames", [roster_item.name], roster_item.jid
                 )
             )
         return True
 
     def register(
-            self,
-            origin: str,
-            metadata_name: str,
-            cb_get: Union[Coroutine, defer.Deferred],
-            cb_set: Union[Coroutine, defer.Deferred],
-            priority: int=0):
+        self,
+        origin: str,
+        metadata_name: str,
+        cb_get: Union[Coroutine, defer.Deferred],
+        cb_set: Union[Coroutine, defer.Deferred],
+        priority: int = 0,
+    ):
         """Register callbacks to handle identity metadata
 
         @param origin: namespace of the plugin managing this metadata
@@ -251,7 +248,7 @@
         if not metadata_name in self.metadata.keys():
             raise ValueError(f"Invalid metadata_name: {metadata_name!r}")
         callback = Callback(origin=origin, get=cb_get, set=cb_set, priority=priority)
-        cb_list = self.metadata[metadata_name].setdefault('callbacks', [])
+        cb_list = self.metadata[metadata_name].setdefault("callbacks", [])
         cb_list.append(callback)
         cb_list.sort(key=lambda c: c.priority, reverse=True)
 
@@ -276,7 +273,8 @@
         if not isinstance(value, value_type):
             raise ValueError(
                 f"{value} has wrong type: it is {type(value)} while {value_type} was "
-                f"expected")
+                f"expected"
+            )
 
     def get_field_type(self, metadata_name: str) -> str:
         """Return the type the requested field
@@ -287,13 +285,13 @@
         return self.metadata[metadata_name]["type"]
 
     async def get(
-            self,
-            client: SatXMPPEntity,
-            metadata_name: str,
-            entity: Optional[jid.JID],
-            use_cache: bool=True,
-            prefilled_values: Optional[Dict[str, Any]]=None
-        ):
+        self,
+        client: SatXMPPEntity,
+        metadata_name: str,
+        entity: Optional[jid.JID],
+        use_cache: bool = True,
+        prefilled_values: Optional[Dict[str, Any]] = None,
+    ):
         """Retrieve identity metadata of an entity
 
         if metadata is already in cache, it is returned. Otherwise, registered callbacks
@@ -311,28 +309,31 @@
             metadata = self.metadata[metadata_name]
         except KeyError:
             raise ValueError(f"Invalid metadata name: {metadata_name!r}")
-        get_all = metadata.get('get_all', False)
+        get_all = metadata.get("get_all", False)
         if use_cache:
             try:
-                data = self.host.memory.get_entity_datum(
-                    client, entity, metadata_name)
+                data = self.host.memory.get_entity_datum(client, entity, metadata_name)
             except (KeyError, exceptions.UnknownEntityError):
                 pass
             else:
                 return data
 
         try:
-            callbacks = metadata['callbacks']
+            callbacks = metadata["callbacks"]
         except KeyError:
-            log.warning(_("No callback registered for {metadata_name}")
-                        .format(metadata_name=metadata_name))
+            log.warning(
+                _("No callback registered for {metadata_name}").format(
+                    metadata_name=metadata_name
+                )
+            )
             return [] if get_all else None
 
         if get_all:
             all_data = []
         elif prefilled_values is not None:
             raise exceptions.InternalError(
-                "prefilled_values can only be used when `get_all` is set")
+                "prefilled_values can only be used when `get_all` is set"
+            )
 
         for callback in callbacks:
             try:
@@ -340,15 +341,18 @@
                     data = prefilled_values[callback.origin]
                     log.debug(
                         f"using prefilled values {data!r} for {metadata_name} with "
-                        f"{callback.origin}")
+                        f"{callback.origin}"
+                    )
                 else:
                     data = await defer.ensureDeferred(callback.get(client, entity))
             except exceptions.CancelError:
                 continue
             except Exception as e:
                 log.warning(
-                    _("Error while trying to get {metadata_name} with {callback}: {e}")
-                    .format(callback=callback.get, metadata_name=metadata_name, e=e))
+                    _(
+                        "Error while trying to get {metadata_name} with {callback}: {e}"
+                    ).format(callback=callback.get, metadata_name=metadata_name, e=e)
+                )
             else:
                 if data:
                     self.check_type(metadata_name, data)
@@ -369,10 +373,9 @@
         if post_treatment is not None:
             data = await utils.as_deferred(post_treatment, client, entity, data)
 
-        self.host.memory.update_entity_data(
-            client, entity, metadata_name, data)
+        self.host.memory.update_entity_data(client, entity, metadata_name, data)
 
-        if metadata.get('store', False):
+        if metadata.get("store", False):
             if data is not None:
                 ser_method = metadata.get("store_serialisation")
                 if ser_method is not None:
@@ -401,10 +404,13 @@
         self.check_type(metadata_name, data)
 
         try:
-            callbacks = metadata['callbacks']
+            callbacks = metadata["callbacks"]
         except KeyError:
-            log.warning(_("No callback registered for {metadata_name}")
-                        .format(metadata_name=metadata_name))
+            log.warning(
+                _("No callback registered for {metadata_name}").format(
+                    metadata_name=metadata_name
+                )
+            )
             return exceptions.FeatureNotFound(f"Can't set {metadata_name} for {entity}")
 
         for callback in callbacks:
@@ -414,8 +420,10 @@
                 continue
             except Exception as e:
                 log.warning(
-                    _("Error while trying to set {metadata_name} with {callback}: {e}")
-                    .format(callback=callback.set, metadata_name=metadata_name, e=e))
+                    _(
+                        "Error while trying to set {metadata_name} with {callback}: {e}"
+                    ).format(callback=callback.set, metadata_name=metadata_name, e=e)
+                )
             else:
                 break
         else:
@@ -431,7 +439,7 @@
         origin: str,
         metadata_name: str,
         data: Any,
-        entity: Optional[jid.JID]
+        entity: Optional[jid.JID],
     ):
         """Update a metadata in cache
 
@@ -445,8 +453,7 @@
         metadata = self.metadata[metadata_name]
 
         try:
-            cached_data = self.host.memory.get_entity_datum(
-                client, entity, metadata_name)
+            cached_data = self.host.memory.get_entity_datum(client, entity, metadata_name)
         except (KeyError, exceptions.UnknownEntityError):
             # metadata is not cached, we do the update
             pass
@@ -461,21 +468,22 @@
                 if cached_data is None:
                     log.debug(
                         f"{metadata_name} for {entity} is already disabled, nothing to "
-                        f"do")
+                        f"do"
+                    )
                     return
             elif cached_data is None:
                 pass
             elif not update_is_new_data(client, entity, cached_data, data):
                 log.debug(
-                    f"{metadata_name} for {entity} is already in cache, nothing to "
-                    f"do")
+                    f"{metadata_name} for {entity} is already in cache, nothing to " f"do"
+                )
                 return
 
         # we can't use the cache, so we do the update
 
         log.debug(f"updating {metadata_name} for {entity}")
 
-        if metadata.get('get_all', False):
+        if metadata.get("get_all", False):
             # get_all is set, meaning that we have to check all plugins
             # so we first delete current cache
             try:
@@ -491,14 +499,14 @@
             return
 
         if data is not None:
-            data_filter = metadata['update_data_filter']
+            data_filter = metadata["update_data_filter"]
             if data_filter is not None:
                 data = await utils.as_deferred(data_filter, client, entity, data)
             self.check_type(metadata_name, data)
 
         self.host.memory.update_entity_data(client, entity, metadata_name, data)
 
-        if metadata.get('store', False):
+        if metadata.get("store", False):
             key = f"{entity}\n{metadata_name}"
             if data is not None:
                 ser_method = metadata.get("store_serialisation")
@@ -519,14 +527,9 @@
     def _set_avatar(self, file_path, entity, profile_key=C.PROF_KEY_NONE):
         client = self.host.get_client(profile_key)
         entity = jid.JID(entity) if entity else None
-        return defer.ensureDeferred(
-            self.set(client, "avatar", file_path, entity))
+        return defer.ensureDeferred(self.set(client, "avatar", file_path, entity))
 
-    def _blocking_cache_avatar(
-        self,
-        source: str,
-        avatar_data: dict[str, Any]
-    ):
+    def _blocking_cache_avatar(self, source: str, avatar_data: dict[str, Any]):
         """This method is executed in a separated thread"""
         if avatar_data["media_type"] == "image/svg+xml":
             # for vector image, we save directly
@@ -562,13 +565,11 @@
         img_buf.seek(0)
         image_hash = hashlib.sha1(img_buf.read()).hexdigest()
         img_buf.seek(0)
-        with self.host.common_cache.cache_data(
-            source, image_hash, media_type
-        ) as f:
+        with self.host.common_cache.cache_data(source, image_hash, media_type) as f:
             f.write(img_buf.read())
-            avatar_data['path'] = Path(f.name)
-            avatar_data['filename'] = avatar_data['path'].name
-        avatar_data['cache_uid'] = image_hash
+            avatar_data["path"] = Path(f.name)
+            avatar_data["filename"] = avatar_data["path"].name
+        avatar_data["cache_uid"] = image_hash
 
     async def cache_avatar(self, source: str, avatar_data: Dict[str, Any]) -> None:
         """Resize if necessary and cache avatar
@@ -590,14 +591,14 @@
         if not file_path.is_file():
             raise ValueError(f"There is no file at {file_path} to use as avatar")
         avatar_data = {
-            'path': file_path,
-            'filename': file_path.name,
-            'media_type': image.guess_type(file_path),
+            "path": file_path,
+            "filename": file_path.name,
+            "media_type": image.guess_type(file_path),
         }
-        media_type = avatar_data['media_type']
+        media_type = avatar_data["media_type"]
         if media_type is None:
             raise ValueError(f"Can't identify type of image at {file_path}")
-        if not media_type.startswith('image/'):
+        if not media_type.startswith("image/"):
             raise ValueError(f"File at {file_path} doesn't appear to be an image")
         await self.cache_avatar(IMPORT_NAME, avatar_data)
         return avatar_data
@@ -607,10 +608,10 @@
         await self.update(client, IMPORT_NAME, "avatar", avatar_data, entity)
 
     def avatar_build_metadata(
-            self,
-            path: Path,
-            media_type: Optional[str] = None,
-            cache_uid: Optional[str] = None
+        self,
+        path: Path,
+        media_type: Optional[str] = None,
+        cache_uid: Optional[str] = None,
     ) -> Optional[Dict[str, Union[str, Path, None]]]:
         """Helper method to generate avatar metadata
 
@@ -639,12 +640,12 @@
             }
 
     def avatar_update_is_new_data(self, client, entity, cached_data, new_data):
-        return new_data['path'] != cached_data['path']
+        return new_data["path"] != cached_data["path"]
 
     async def avatar_update_data_filter(self, client, entity, data):
         if not isinstance(data, dict):
             raise ValueError(f"Invalid data type ({type(data)}), a dict is expected")
-        mandatory_keys = {'path', 'filename', 'cache_uid'}
+        mandatory_keys = {"path", "filename", "cache_uid"}
         if not data.keys() >= mandatory_keys:
             raise ValueError(f"missing avatar data keys: {mandatory_keys - data.keys()}")
         return data
@@ -702,19 +703,17 @@
         return not set(new_nicknames).issubset(cached_data)
 
     async def description_get_post_treatment(
-        self,
-        client: SatXMPPEntity,
-        entity: jid.JID,
-        plugin_description: List[str]
+        self, client: SatXMPPEntity, entity: jid.JID, plugin_description: List[str]
     ) -> str:
         """Join all descriptions in a unique string"""
-        return '\n'.join(plugin_description)
+        return "\n".join(plugin_description)
 
     def _get_identity(self, entity_s, metadata_filter, use_cache, profile):
         entity = jid.JID(entity_s)
         client = self.host.get_client(profile)
         d = defer.ensureDeferred(
-            self.get_identity(client, entity, metadata_filter, use_cache))
+            self.get_identity(client, entity, metadata_filter, use_cache)
+        )
         d.addCallback(data_format.serialise)
         return d
 
@@ -723,7 +722,7 @@
         client: SatXMPPEntity,
         entity: Optional[jid.JID] = None,
         metadata_filter: Optional[List[str]] = None,
-        use_cache: bool = True
+        use_cache: bool = True,
     ) -> Dict[str, Any]:
         """Retrieve identity of an entity
 
@@ -743,7 +742,8 @@
 
         for metadata_name in metadata_names:
             id_data[metadata_name] = await self.get(
-                client, metadata_name, entity, use_cache)
+                client, metadata_name, entity, use_cache
+            )
 
         return id_data
 
@@ -751,7 +751,7 @@
         entities = [jid.JID(e) for e in entities_s]
         client = self.host.get_client(profile)
         d = defer.ensureDeferred(self.get_identities(client, entities, metadata_filter))
-        d.addCallback(lambda d: data_format.serialise({str(j):i for j, i in d.items()}))
+        d.addCallback(lambda d: data_format.serialise({str(j): i for j, i in d.items()}))
         return d
 
     async def get_identities(
@@ -792,7 +792,7 @@
     def _get_base_identities(self, profile_key):
         client = self.host.get_client(profile_key)
         d = defer.ensureDeferred(self.get_base_identities(client))
-        d.addCallback(lambda d: data_format.serialise({str(j):i for j, i in d.items()}))
+        d.addCallback(lambda d: data_format.serialise({str(j): i for j, i in d.items()}))
         return d
 
     async def get_base_identities(
@@ -809,11 +809,7 @@
         else:
             entities = client.roster.get_jids() + [client.jid.userhostJID()]
 
-        return await self.get_identities(
-            client,
-            entities,
-            ['avatar', 'nicknames']
-        )
+        return await self.get_identities(client, entities, ["avatar", "nicknames"])
 
     def _set_identity(self, id_data_s, profile):
         client = self.host.get_client(profile)
@@ -827,11 +823,14 @@
         """
         if not id_data.keys() <= self.metadata.keys():
             raise ValueError(
-                f"Invalid metadata names: {id_data.keys() - self.metadata.keys()}")
+                f"Invalid metadata names: {id_data.keys() - self.metadata.keys()}"
+            )
         for metadata_name, data in id_data.items():
             try:
                 await self.set(client, metadata_name, data)
             except Exception as e:
                 log.warning(
-                    _("Can't set metadata {metadata_name!r}: {reason}")
-                    .format(metadata_name=metadata_name, reason=e))
+                    _("Can't set metadata {metadata_name!r}: {reason}").format(
+                        metadata_name=metadata_name, reason=e
+                    )
+                )
--- a/libervia/backend/plugins/plugin_misc_ip.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_ip.py	Wed Jun 19 18:44:57 2024 +0200
@@ -58,9 +58,7 @@
 }
 
 # TODO: GET_IP_PAGE should be configurable in sat.conf
-GET_IP_PAGE = (
-    "http://salut-a-toi.org/whereami/"
-)  # This page must only return external IP of the requester
+GET_IP_PAGE = "http://salut-a-toi.org/whereami/"  # This page must only return external IP of the requester
 GET_IP_LABEL = D_("Allow external get IP")
 GET_IP_CATEGORY = "General"
 GET_IP_NAME = "allow_get_ip"
@@ -75,7 +73,9 @@
 Do you agree to do this request ?
 """
 ).format(
-    page=GET_IP_PAGE, domain=urllib.parse.urlparse(GET_IP_PAGE).netloc, app_name=C.APP_NAME
+    page=GET_IP_PAGE,
+    domain=urllib.parse.urlparse(GET_IP_PAGE).netloc,
+    app_name=C.APP_NAME,
 )
 NS_IP_CHECK = "urn:xmpp:sic:1"
 
--- a/libervia/backend/plugins/plugin_misc_jid_search.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_jid_search.py	Wed Jun 19 18:44:57 2024 +0200
@@ -152,7 +152,6 @@
                     )
                 )
 
-
         matches.sort(
             key=lambda item: (item.exact_match, item.relevance or 0, item.in_roster),
             reverse=True,
--- a/libervia/backend/plugins/plugin_misc_lists.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_lists.py	Wed Jun 19 18:44:57 2024 +0200
@@ -40,8 +40,13 @@
     C.PI_IMPORT_NAME: "LISTS",
     C.PI_TYPE: "EXP",
     C.PI_PROTOCOLS: [],
-    C.PI_DEPENDENCIES: ["XEP-0060", "XEP-0346", "XEP-0277", "IDENTITY",
-                        "PUBSUB_INVITATION"],
+    C.PI_DEPENDENCIES: [
+        "XEP-0060",
+        "XEP-0346",
+        "XEP-0277",
+        "IDENTITY",
+        "PUBSUB_INVITATION",
+    ],
     C.PI_MAIN: "PubsubLists",
     C.PI_HANDLER: "no",
     C.PI_DESCRIPTION: _("""Pubsub lists management plugin"""),
@@ -63,44 +68,26 @@
                 "label": D_("status"),
                 "type": "list-single",
                 "options": [
-                    {
-                        "label": D_("to do"),
-                        "value": "todo"
-                    },
-                    {
-                        "label": D_("in progress"),
-                        "value": "in_progress"
-                    },
-                    {
-                        "label": D_("done"),
-                        "value": "done"
-                    },
+                    {"label": D_("to do"), "value": "todo"},
+                    {"label": D_("in progress"), "value": "in_progress"},
+                    {"label": D_("done"), "value": "done"},
                 ],
-                "value": "todo"
+                "value": "todo",
             },
             {
                 "name": "priority",
                 "label": D_("priority"),
                 "type": "list-single",
                 "options": [
-                    {
-                        "label": D_("major"),
-                        "value": "major"
-                    },
-                    {
-                        "label": D_("normal"),
-                        "value": "normal"
-                    },
-                    {
-                        "label": D_("minor"),
-                        "value": "minor"
-                    },
+                    {"label": D_("major"), "value": "major"},
+                    {"label": D_("normal"), "value": "normal"},
+                    {"label": D_("minor"), "value": "minor"},
                 ],
-                "value": "normal"
+                "value": "normal",
             },
             {"name": "body", "type": "xhtml"},
             {"name": "comments_uri"},
-        ]
+        ],
     },
     "grocery": {
         "name": D_("Grocery List"),
@@ -113,18 +100,12 @@
                 "label": D_("status"),
                 "type": "list-single",
                 "options": [
-                    {
-                        "label": D_("to buy"),
-                        "value": "to_buy"
-                    },
-                    {
-                        "label": D_("bought"),
-                        "value": "bought"
-                    },
+                    {"label": D_("to buy"), "value": "to_buy"},
+                    {"label": D_("bought"), "value": "bought"},
                 ],
-                "value": "to_buy"
+                "value": "to_buy",
             },
-        ]
+        ],
     },
     "tickets": {
         "name": D_("Tickets"),
@@ -140,65 +121,38 @@
                 "label": D_("type"),
                 "type": "list-single",
                 "options": [
-                    {
-                        "label": D_("bug"),
-                        "value": "bug"
-                    },
-                    {
-                        "label": D_("feature request"),
-                        "value": "feature"
-                    },
+                    {"label": D_("bug"), "value": "bug"},
+                    {"label": D_("feature request"), "value": "feature"},
                 ],
-                "value": "bug"
+                "value": "bug",
             },
             {
                 "name": "status",
                 "label": D_("status"),
                 "type": "list-single",
                 "options": [
-                    {
-                        "label": D_("queued"),
-                        "value": "queued"
-                    },
-                    {
-                        "label": D_("started"),
-                        "value": "started"
-                    },
-                    {
-                        "label": D_("review"),
-                        "value": "review"
-                    },
-                    {
-                        "label": D_("closed"),
-                        "value": "closed"
-                    },
+                    {"label": D_("queued"), "value": "queued"},
+                    {"label": D_("started"), "value": "started"},
+                    {"label": D_("review"), "value": "review"},
+                    {"label": D_("closed"), "value": "closed"},
                 ],
-                "value": "queued"
+                "value": "queued",
             },
             {
                 "name": "priority",
                 "label": D_("priority"),
                 "type": "list-single",
                 "options": [
-                    {
-                        "label": D_("major"),
-                        "value": "major"
-                    },
-                    {
-                        "label": D_("normal"),
-                        "value": "normal"
-                    },
-                    {
-                        "label": D_("minor"),
-                        "value": "minor"
-                    },
+                    {"label": D_("major"), "value": "major"},
+                    {"label": D_("normal"), "value": "normal"},
+                    {"label": D_("minor"), "value": "minor"},
                 ],
-                "value": "normal"
+                "value": "normal",
             },
             {"name": "body", "type": "xhtml"},
             {"name": "comments_uri"},
-        ]
-    }
+        ],
+    },
 }
 
 
@@ -211,9 +165,7 @@
         self.namespace = self._s.get_submitted_ns(APP_NS_TICKETS)
         host.register_namespace("tickets", APP_NS_TICKETS)
         host.register_namespace("tickets_type", NS_TICKETS_TYPE)
-        self.host.plugins["PUBSUB_INVITATION"].register(
-            APP_NS_TICKETS, self
-        )
+        self.host.plugins["PUBSUB_INVITATION"].register(APP_NS_TICKETS, self)
         self._p = self.host.plugins["XEP-0060"]
         self._m = self.host.plugins["XEP-0277"]
         host.bridge.add_method(
@@ -221,8 +173,7 @@
             ".plugin",
             in_sign="ssiassss",
             out_sign="s",
-            method=lambda service, node, max_items, items_ids, sub_id, extra, profile_key:
-                self._s._get(
+            method=lambda service, node, max_items, items_ids, sub_id, extra, profile_key: self._s._get(
                 service,
                 node,
                 max_items,
@@ -237,7 +188,8 @@
                     "updated": self._s.date_filter,
                     "time_limit": self._s.date_filter,
                 },
-                profile_key=profile_key),
+                profile_key=profile_key,
+            ),
             async_=True,
         )
         host.bridge.add_method(
@@ -262,8 +214,11 @@
             in_sign="sss",
             out_sign="s",
             method=lambda service, nodeIdentifier, profile_key: self._s._get_ui_schema(
-                service, nodeIdentifier, default_node=self.namespace,
-                profile_key=profile_key),
+                service,
+                nodeIdentifier,
+                default_node=self.namespace,
+                profile_key=profile_key,
+            ),
             async_=True,
         )
         host.bridge.add_method(
@@ -306,7 +261,7 @@
         service: jid.JID,
         node: str,
         item_id: Optional[str],
-        item_elt: domish.Element
+        item_elt: domish.Element,
     ) -> None:
         try:
             schema = await self._s.get_schema_form(client, service, node)
@@ -321,20 +276,38 @@
                 list_elt = extra["element"] = domish.Element((APP_NS_TICKETS, "list"))
                 list_elt["type"] = field_type
 
-    def _set(self, service, node, values, schema=None, item_id=None, extra_s='',
-             profile_key=C.PROF_KEY_NONE):
+    def _set(
+        self,
+        service,
+        node,
+        values,
+        schema=None,
+        item_id=None,
+        extra_s="",
+        profile_key=C.PROF_KEY_NONE,
+    ):
         client, service, node, schema, item_id, extra = self._s.prepare_bridge_set(
             service, node, schema, item_id, extra_s, profile_key
         )
-        d = defer.ensureDeferred(self.set(
-            client, service, node, values, schema, item_id, extra, deserialise=True
-        ))
+        d = defer.ensureDeferred(
+            self.set(
+                client, service, node, values, schema, item_id, extra, deserialise=True
+            )
+        )
         d.addCallback(lambda ret: ret or "")
         return d
 
     async def set(
-        self, client, service, node, values, schema=None, item_id=None, extra=None,
-        deserialise=False, form_ns=APP_NS_TICKETS
+        self,
+        client,
+        service,
+        node,
+        values,
+        schema=None,
+        item_id=None,
+        extra=None,
+        deserialise=False,
+        form_ns=APP_NS_TICKETS,
     ):
         """Publish a tickets
 
@@ -361,9 +334,7 @@
             # we need to use uuid for comments node, because we don't know item id in
             # advance (we don't want to set it ourselves to let the server choose, so we
             # can have a nicer id if serial ids is activated)
-            comments_node = self._m.get_comments_node(
-                node + "_" + str(shortuuid.uuid())
-            )
+            comments_node = self._m.get_comments_node(node + "_" + str(shortuuid.uuid()))
             options = {
                 self._p.OPT_ACCESS_MODEL: self._p.ACCESS_OPEN,
                 self._p.OPT_PERSIST_ITEMS: 1,
@@ -383,17 +354,17 @@
             client, service, node, values, schema, item_id, extra, deserialise, form_ns
         )
 
-    def _delete(
-        self, service_s, nodeIdentifier, itemIdentifier, notify, profile_key
-    ):
+    def _delete(self, service_s, nodeIdentifier, itemIdentifier, notify, profile_key):
         client = self.host.get_client(profile_key)
-        return defer.ensureDeferred(self.delete(
-            client,
-            jid.JID(service_s) if service_s else None,
-            nodeIdentifier,
-            itemIdentifier,
-            notify
-        ))
+        return defer.ensureDeferred(
+            self.delete(
+                client,
+                jid.JID(service_s) if service_s else None,
+                nodeIdentifier,
+                itemIdentifier,
+                notify,
+            )
+        )
 
     async def delete(
         self,
@@ -401,7 +372,7 @@
         service: Optional[jid.JID],
         node: Optional[str],
         itemIdentifier: str,
-        notify: Optional[bool] = None
+        notify: Optional[bool] = None,
     ) -> None:
         if not node:
             node = self.namespace
@@ -418,14 +389,15 @@
         return d
 
     async def lists_list(
-        self, client, service: Optional[jid.JID], node: Optional[str]=None
+        self, client, service: Optional[jid.JID], node: Optional[str] = None
     ) -> List[dict]:
         """Retrieve list of pubsub lists registered in personal interests
 
         @return list: list of lists metadata
         """
-        items, metadata = await self.host.plugins['LIST_INTEREST'].list_interests(
-            client, service, node, namespace=APP_NS_TICKETS)
+        items, metadata = await self.host.plugins["LIST_INTEREST"].list_interests(
+            client, service, node, namespace=APP_NS_TICKETS
+        )
         lists = []
         for item in items:
             interest_elt = item.interest
@@ -461,8 +433,10 @@
     def get_templates_names(self, client, language: str) -> list:
         """Retrieve well known list templates"""
 
-        templates = [{"id": tpl_id, "name": d["name"], "icon": d["icon"]}
-                     for tpl_id, d in TEMPLATES.items()]
+        templates = [
+            {"id": tpl_id, "name": d["name"], "icon": d["icon"]}
+            for tpl_id, d in TEMPLATES.items()
+        ]
         return templates
 
     def _get_template(self, name, language, profile):
@@ -475,9 +449,9 @@
 
     def _create_template(self, template_id, name, access_model, profile):
         client = self.host.get_client(profile)
-        d = defer.ensureDeferred(self.create_template(
-            client, template_id, name, access_model
-        ))
+        d = defer.ensureDeferred(
+            self.create_template(client, template_id, name, access_model)
+        )
         d.addCallback(lambda node_data: (node_data[0].full(), node_data[1]))
         return d
 
@@ -490,8 +464,7 @@
             name = shortuuid.uuid()
         fields = TEMPLATES[template_id]["fields"].copy()
         fields.insert(
-            0,
-            {"type": "hidden", "name": NS_TICKETS_TYPE, "value": template_id}
+            0, {"type": "hidden", "name": NS_TICKETS_TYPE, "value": template_id}
         )
         schema = xml_tools.data_dict_2_data_form(
             {"namespace": APP_NS_TICKETS, "fields": fields}
@@ -511,9 +484,15 @@
         list_elt = domish.Element((APP_NS_TICKETS, "list"))
         list_elt["type"] = template_id
         try:
-            await self.host.plugins['LIST_INTEREST'].register_pubsub(
-                client, APP_NS_TICKETS, service, node, creator=True,
-                name=name, element=list_elt)
+            await self.host.plugins["LIST_INTEREST"].register_pubsub(
+                client,
+                APP_NS_TICKETS,
+                service,
+                node,
+                creator=True,
+                name=name,
+                element=list_elt,
+            )
         except Exception as e:
             log.warning(f"Can't add list to interests: {e}")
         return service, node
--- a/libervia/backend/plugins/plugin_misc_merge_requests.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_merge_requests.py	Wed Jun 19 18:44:57 2024 +0200
@@ -29,7 +29,7 @@
 
 log = getLogger(__name__)
 
-APP_NS_MERGE_REQUESTS = 'org.salut-a-toi.merge_requests:0'
+APP_NS_MERGE_REQUESTS = "org.salut-a-toi.merge_requests:0"
 
 PLUGIN_INFO = {
     C.PI_NAME: _("Merge requests management"),
@@ -39,69 +39,86 @@
     C.PI_DEPENDENCIES: ["XEP-0060", "XEP-0346", "LISTS", "TEXT_SYNTAXES"],
     C.PI_MAIN: "MergeRequests",
     C.PI_HANDLER: "no",
-    C.PI_DESCRIPTION: _("""Merge requests management plugin""")
+    C.PI_DESCRIPTION: _("""Merge requests management plugin"""),
 }
 
-FIELD_DATA_TYPE = 'type'
-FIELD_DATA = 'request_data'
+FIELD_DATA_TYPE = "type"
+FIELD_DATA = "request_data"
 
 
-MergeRequestHandler = namedtuple("MergeRequestHandler", ['name',
-                                                         'handler',
-                                                         'data_types',
-                                                         'short_desc',
-                                                         'priority'])
+MergeRequestHandler = namedtuple(
+    "MergeRequestHandler", ["name", "handler", "data_types", "short_desc", "priority"]
+)
 
 
 class MergeRequests(object):
-    META_AUTHOR = 'author'
-    META_EMAIL = 'email'
-    META_TIMESTAMP = 'timestamp'
-    META_HASH = 'hash'
-    META_PARENT_HASH = 'parent_hash'
-    META_COMMIT_MSG = 'commit_msg'
-    META_DIFF = 'diff'
+    META_AUTHOR = "author"
+    META_EMAIL = "email"
+    META_TIMESTAMP = "timestamp"
+    META_HASH = "hash"
+    META_PARENT_HASH = "parent_hash"
+    META_COMMIT_MSG = "commit_msg"
+    META_DIFF = "diff"
     # index of the diff in the whole data
     # needed to retrieve comments location
-    META_DIFF_IDX = 'diff_idx'
+    META_DIFF_IDX = "diff_idx"
 
     def __init__(self, host):
         log.info(_("Merge requests plugin initialization"))
         self.host = host
         self._s = self.host.plugins["XEP-0346"]
         self.namespace = self._s.get_submitted_ns(APP_NS_MERGE_REQUESTS)
-        host.register_namespace('merge_requests', self.namespace)
+        host.register_namespace("merge_requests", self.namespace)
         self._p = self.host.plugins["XEP-0060"]
         self._t = self.host.plugins["LISTS"]
         self._handlers = {}
-        self._handlers_list = []  # handlers sorted by priority
+        self._handlers_list = []  # handlers sorted by priority
         self._type_handlers = {}  # data type => handler map
-        host.bridge.add_method("merge_requests_get", ".plugin",
-                              in_sign='ssiassss', out_sign='s',
-                              method=self._get,
-                              async_=True
-                              )
-        host.bridge.add_method("merge_request_set", ".plugin",
-                              in_sign='ssssa{sas}ssss', out_sign='s',
-                              method=self._set,
-                              async_=True)
-        host.bridge.add_method("merge_requests_schema_get", ".plugin",
-                              in_sign='sss', out_sign='s',
-                              method=lambda service, nodeIdentifier, profile_key:
-                                self._s._get_ui_schema(service,
-                                                     nodeIdentifier,
-                                                     default_node=self.namespace,
-                                                     profile_key=profile_key),
-                              async_=True)
-        host.bridge.add_method("merge_request_parse_data", ".plugin",
-                              in_sign='ss', out_sign='aa{ss}',
-                              method=self._parse_data,
-                              async_=True)
-        host.bridge.add_method("merge_requests_import", ".plugin",
-                              in_sign='ssssa{ss}s', out_sign='',
-                              method=self._import,
-                              async_=True
-                              )
+        host.bridge.add_method(
+            "merge_requests_get",
+            ".plugin",
+            in_sign="ssiassss",
+            out_sign="s",
+            method=self._get,
+            async_=True,
+        )
+        host.bridge.add_method(
+            "merge_request_set",
+            ".plugin",
+            in_sign="ssssa{sas}ssss",
+            out_sign="s",
+            method=self._set,
+            async_=True,
+        )
+        host.bridge.add_method(
+            "merge_requests_schema_get",
+            ".plugin",
+            in_sign="sss",
+            out_sign="s",
+            method=lambda service, nodeIdentifier, profile_key: self._s._get_ui_schema(
+                service,
+                nodeIdentifier,
+                default_node=self.namespace,
+                profile_key=profile_key,
+            ),
+            async_=True,
+        )
+        host.bridge.add_method(
+            "merge_request_parse_data",
+            ".plugin",
+            in_sign="ss",
+            out_sign="aa{ss}",
+            method=self._parse_data,
+            async_=True,
+        )
+        host.bridge.add_method(
+            "merge_requests_import",
+            ".plugin",
+            in_sign="ssssa{ss}s",
+            out_sign="",
+            method=self._import,
+            async_=True,
+        )
 
     def register(self, name, handler, data_types, short_desc, priority=0):
         """register an merge request handler
@@ -118,49 +135,81 @@
         @aram data_types(list[unicode]): data types that his handler can generate or parse
         """
         if name in self._handlers:
-            raise exceptions.ConflictError(_("a handler with name {name} already "
-                                             "exists!").format(name = name))
-        self._handlers[name] = MergeRequestHandler(name,
-                                                   handler,
-                                                   data_types,
-                                                   short_desc,
-                                                   priority)
+            raise exceptions.ConflictError(
+                _("a handler with name {name} already " "exists!").format(name=name)
+            )
+        self._handlers[name] = MergeRequestHandler(
+            name, handler, data_types, short_desc, priority
+        )
         self._handlers_list.append(name)
         self._handlers_list.sort(key=lambda name: self._handlers[name].priority)
         if isinstance(data_types, str):
             data_types = [data_types]
         for data_type in data_types:
             if data_type in self._type_handlers:
-                log.warning(_('merge requests of type {type} are already handled by '
-                              '{old_handler}, ignoring {new_handler}').format(
-                                type = data_type,
-                old_handler = self._type_handlers[data_type].name,
-                new_handler = name))
+                log.warning(
+                    _(
+                        "merge requests of type {type} are already handled by "
+                        "{old_handler}, ignoring {new_handler}"
+                    ).format(
+                        type=data_type,
+                        old_handler=self._type_handlers[data_type].name,
+                        new_handler=name,
+                    )
+                )
                 continue
             self._type_handlers[data_type] = self._handlers[name]
 
     def serialise(self, get_data):
         tickets_xmlui, metadata, items_patches = get_data
         tickets_xmlui_s, metadata = self._p.trans_items_data((tickets_xmlui, metadata))
-        return data_format.serialise({
-            "items": tickets_xmlui_s,
-            "metadata": metadata,
-            "items_patches": items_patches,
-        })
+        return data_format.serialise(
+            {
+                "items": tickets_xmlui_s,
+                "metadata": metadata,
+                "items_patches": items_patches,
+            }
+        )
 
-    def _get(self, service='', node='', max_items=10, item_ids=None, sub_id=None,
-             extra="", profile_key=C.PROF_KEY_NONE):
+    def _get(
+        self,
+        service="",
+        node="",
+        max_items=10,
+        item_ids=None,
+        sub_id=None,
+        extra="",
+        profile_key=C.PROF_KEY_NONE,
+    ):
         extra = data_format.deserialise(extra)
         client, service, node, max_items, extra, sub_id = self._s.prepare_bridge_get(
-            service, node, max_items, sub_id, extra, profile_key)
-        d = self.get(client, service, node or None, max_items, item_ids, sub_id or None,
-                     extra.rsm_request, extra.extra)
+            service, node, max_items, sub_id, extra, profile_key
+        )
+        d = self.get(
+            client,
+            service,
+            node or None,
+            max_items,
+            item_ids,
+            sub_id or None,
+            extra.rsm_request,
+            extra.extra,
+        )
         d.addCallback(self.serialise)
         return d
 
     @defer.inlineCallbacks
-    def get(self, client, service=None, node=None, max_items=None, item_ids=None,
-            sub_id=None, rsm_request=None, extra=None):
+    def get(
+        self,
+        client,
+        service=None,
+        node=None,
+        max_items=None,
+        item_ids=None,
+        sub_id=None,
+        rsm_request=None,
+        extra=None,
+    ):
         """Retrieve merge requests and convert them to XMLUI
 
         @param extra(XEP-0060.parse, None): can have following keys:
@@ -177,8 +226,8 @@
             extra = {}
         # XXX: Q&D way to get list for labels when displaying them, but text when we
         #      have to modify them
-        if C.bool(extra.get('labels_as_list', C.BOOL_FALSE)):
-            filters = {'labels': self._s.textbox_2_list_filter}
+        if C.bool(extra.get("labels_as_list", C.BOOL_FALSE)):
+            filters = {"labels": self._s.textbox_2_list_filter}
         else:
             filters = {}
         tickets_xmlui, metadata = yield defer.ensureDeferred(
@@ -192,10 +241,11 @@
                 rsm_request=rsm_request,
                 extra=extra,
                 form_ns=APP_NS_MERGE_REQUESTS,
-                filters = filters)
+                filters=filters,
+            )
         )
         parsed_patches = []
-        if extra.get('parse', False):
+        if extra.get("parse", False):
             for ticket in tickets_xmlui:
                 request_type = ticket.named_widgets[FIELD_DATA_TYPE].value
                 request_data = ticket.named_widgets[FIELD_DATA].value
@@ -203,21 +253,51 @@
                 parsed_patches.append(parsed_data)
         defer.returnValue((tickets_xmlui, metadata, parsed_patches))
 
-    def _set(self, service, node, repository, method, values, schema=None, item_id=None,
-             extra="", profile_key=C.PROF_KEY_NONE):
+    def _set(
+        self,
+        service,
+        node,
+        repository,
+        method,
+        values,
+        schema=None,
+        item_id=None,
+        extra="",
+        profile_key=C.PROF_KEY_NONE,
+    ):
         client, service, node, schema, item_id, extra = self._s.prepare_bridge_set(
-            service, node, schema, item_id, extra, profile_key)
+            service, node, schema, item_id, extra, profile_key
+        )
         d = defer.ensureDeferred(
             self.set(
-                client, service, node, repository, method, values, schema,
-                item_id or None, extra, deserialise=True
+                client,
+                service,
+                node,
+                repository,
+                method,
+                values,
+                schema,
+                item_id or None,
+                extra,
+                deserialise=True,
             )
         )
-        d.addCallback(lambda ret: ret or '')
+        d.addCallback(lambda ret: ret or "")
         return d
 
-    async def set(self, client, service, node, repository, method='auto', values=None,
-            schema=None, item_id=None, extra=None, deserialise=False):
+    async def set(
+        self,
+        client,
+        service,
+        node,
+        repository,
+        method="auto",
+        values=None,
+        schema=None,
+        item_id=None,
+        extra=None,
+        deserialise=False,
+    ):
         """Publish a tickets
 
         @param service(None, jid.JID): Pubsub service to use
@@ -233,18 +313,21 @@
             node = self.namespace
         if values is None:
             values = {}
-        update = extra.get('update', False)
+        update = extra.get("update", False)
         if not repository and not update:
             # in case of update, we may re-user former patches data
             # so repository is not mandatory
             raise exceptions.DataError(_("repository must be specified"))
 
         if FIELD_DATA in values:
-            raise exceptions.DataError(_("{field} is set by backend, you must not set "
-                                         "it in frontend").format(field = FIELD_DATA))
+            raise exceptions.DataError(
+                _("{field} is set by backend, you must not set " "it in frontend").format(
+                    field=FIELD_DATA
+                )
+            )
 
         if repository:
-            if method == 'auto':
+            if method == "auto":
                 for name in self._handlers_list:
                     handler = self._handlers[name].handler
                     can_handle = await handler.check(repository)
@@ -252,11 +335,15 @@
                         log.info(_("{name} handler will be used").format(name=name))
                         break
                 else:
-                    log.warning(_("repository {path} can't be handled by any installed "
-                                  "handler").format(
-                        path = repository))
-                    raise exceptions.NotFound(_("no handler for this repository has "
-                                                "been found"))
+                    log.warning(
+                        _(
+                            "repository {path} can't be handled by any installed "
+                            "handler"
+                        ).format(path=repository)
+                    )
+                    raise exceptions.NotFound(
+                        _("no handler for this repository has " "been found")
+                    )
             else:
                 try:
                     handler = self._handlers[name].handler
@@ -265,35 +352,50 @@
 
             data = await handler.export(repository)
             if not data.strip():
-                raise exceptions.DataError(_('export data is empty, do you have any '
-                                             'change to send?'))
+                raise exceptions.DataError(
+                    _("export data is empty, do you have any " "change to send?")
+                )
 
-            if not values.get('title') or not values.get('body'):
+            if not values.get("title") or not values.get("body"):
                 patches = handler.parse(data, values.get(FIELD_DATA_TYPE))
                 commits_msg = patches[-1][self.META_COMMIT_MSG]
                 msg_lines = commits_msg.splitlines()
-                if not values.get('title'):
-                    values['title'] = msg_lines[0]
-                if not values.get('body'):
-                    ts = self.host.plugins['TEXT_SYNTAXES']
+                if not values.get("title"):
+                    values["title"] = msg_lines[0]
+                if not values.get("body"):
+                    ts = self.host.plugins["TEXT_SYNTAXES"]
                     xhtml = await ts.convert(
-                        '\n'.join(msg_lines[1:]),
-                        syntax_from = ts.SYNTAX_TEXT,
-                        syntax_to = ts.SYNTAX_XHTML,
-                        profile = client.profile)
-                    values['body'] = '<div xmlns="{ns}">{xhtml}</div>'.format(
-                        ns=C.NS_XHTML, xhtml=xhtml)
+                        "\n".join(msg_lines[1:]),
+                        syntax_from=ts.SYNTAX_TEXT,
+                        syntax_to=ts.SYNTAX_XHTML,
+                        profile=client.profile,
+                    )
+                    values["body"] = '<div xmlns="{ns}">{xhtml}</div>'.format(
+                        ns=C.NS_XHTML, xhtml=xhtml
+                    )
 
             values[FIELD_DATA] = data
 
-        item_id = await self._t.set(client, service, node, values, schema, item_id, extra,
-                                    deserialise, form_ns=APP_NS_MERGE_REQUESTS)
+        item_id = await self._t.set(
+            client,
+            service,
+            node,
+            values,
+            schema,
+            item_id,
+            extra,
+            deserialise,
+            form_ns=APP_NS_MERGE_REQUESTS,
+        )
         return item_id
 
     def _parse_data(self, data_type, data):
         d = self.parse_data(data_type, data)
-        d.addCallback(lambda parsed_patches:
-            {key: str(value) for key, value in parsed_patches.items()})
+        d.addCallback(
+            lambda parsed_patches: {
+                key: str(value) for key, value in parsed_patches.items()
+            }
+        )
         return d
 
     def parse_data(self, data_type, data):
@@ -308,21 +410,31 @@
         try:
             handler = self._type_handlers[data_type]
         except KeyError:
-            raise exceptions.NotFound(_('No handler can handle data type "{type}"')
-                                      .format(type=data_type))
+            raise exceptions.NotFound(
+                _('No handler can handle data type "{type}"').format(type=data_type)
+            )
         return defer.maybeDeferred(handler.handler.parse, data, data_type)
 
-    def _import(self, repository, item_id, service=None, node=None, extra=None,
-                profile_key=C.PROF_KEY_NONE):
+    def _import(
+        self,
+        repository,
+        item_id,
+        service=None,
+        node=None,
+        extra=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         client = self.host.get_client(profile_key)
         service = jid.JID(service) if service else None
-        d = self.import_request(client, repository, item_id, service, node or None,
-                                extra=extra or None)
+        d = self.import_request(
+            client, repository, item_id, service, node or None, extra=extra or None
+        )
         return d
 
     @defer.inlineCallbacks
-    def import_request(self, client, repository, item, service=None, node=None,
-                       extra=None):
+    def import_request(
+        self, client, repository, item, service=None, node=None, extra=None
+    ):
         """import a merge request in specified directory
 
         @param repository(unicode): path to the repository where the code stands
@@ -336,7 +448,8 @@
                 node,
                 max_items=1,
                 item_ids=[item],
-                form_ns=APP_NS_MERGE_REQUESTS)
+                form_ns=APP_NS_MERGE_REQUESTS,
+            )
         )
         ticket_xmlui = tickets_xmlui[0]
         data = ticket_xmlui.named_widgets[FIELD_DATA].value
@@ -344,10 +457,14 @@
         try:
             handler = self._type_handlers[data_type]
         except KeyError:
-            raise exceptions.NotFound(_('No handler found to import {data_type}')
-                                      .format(data_type=data_type))
-        log.info(_("Importing patch [{item_id}] using {name} handler").format(
-            item_id = item,
-            name = handler.name))
-        yield handler.handler.import_(repository, data, data_type, item, service, node,
-                                      extra)
+            raise exceptions.NotFound(
+                _("No handler found to import {data_type}").format(data_type=data_type)
+            )
+        log.info(
+            _("Importing patch [{item_id}] using {name} handler").format(
+                item_id=item, name=handler.name
+            )
+        )
+        yield handler.handler.import_(
+            repository, data, data_type, item, service, node, extra
+        )
--- a/libervia/backend/plugins/plugin_misc_nat_port.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_nat_port.py	Wed Jun 19 18:44:57 2024 +0200
@@ -46,9 +46,7 @@
 }
 
 STARTING_PORT = 6000  # starting point to automatically find a port
-DEFAULT_DESC = (
-    "SaT port mapping"
-)  # we don't use "à" here as some bugged NAT don't manage charset correctly
+DEFAULT_DESC = "SaT port mapping"  # we don't use "à" here as some bugged NAT don't manage charset correctly
 
 
 class MappingError(Exception):
@@ -67,7 +65,9 @@
         self._upnp.discoverdelay = 200
         self._mutex = threading.Lock()  # used to protect access to self._upnp
         self._starting_port_cache = None  # used to cache the first available port
-        self._to_unmap = []  # list of tuples (ext_port, protocol) of ports to unmap on unload
+        self._to_unmap = (
+            []
+        )  # list of tuples (ext_port, protocol) of ports to unmap on unload
         discover_d = threads.deferToThread(self._discover)
         discover_d.chainDeferred(self._initialised)
         self._initialised.addErrback(self._init_failed)
--- a/libervia/backend/plugins/plugin_misc_radiocol.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_radiocol.py	Wed Jun 19 18:44:57 2024 +0200
@@ -195,13 +195,11 @@
         ]  # FIXME: referee comes from Libervia's client side, it's unsecure
         radio_data["to_delete"][
             attrs["filename"]
-        ] = (
-            song_path
-        )  # FIXME: works only because of the same host trick, see the note under the docstring
+        ] = song_path  # FIXME: works only because of the same host trick, see the note under the docstring
         return self.send(referee, ("", "song_added"), attrs, profile=profile)
 
     def play_next(self, room_jid, profile):
-        """"Play next song in queue if exists, and put a timer
+        """ "Play next song in queue if exists, and put a timer
         which trigger after the song has been played to play next one"""
         # TODO: songs need to be erased once played or found invalids
         #      ==> unlink done the Q&D way with the same host trick (see above)
@@ -245,9 +243,7 @@
             try:
                 file_to_delete = radio_data["to_delete"][filename]
             except KeyError:
-                log.error(
-                    _("INTERNAL ERROR: can't find full path of the song to delete")
-                )
+                log.error(_("INTERNAL ERROR: can't find full path of the song to delete"))
                 return False
         else:
             file_to_delete = filename
--- a/libervia/backend/plugins/plugin_misc_remote_control.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_remote_control.py	Wed Jun 19 18:44:57 2024 +0200
@@ -49,7 +49,6 @@
 }
 
 
-
 class RemoteControl(BaseApplicationHandler):
 
     def __init__(self, host):
@@ -84,19 +83,18 @@
     ) -> defer.Deferred[str]:
         client = self.host.get_client(profile)
         extra = data_format.deserialise(extra_s)
-        d = defer.ensureDeferred(self.remote_control_start(
-            client,
-            jid.JID(peer_jid_s),
-            extra,
-        ))
+        d = defer.ensureDeferred(
+            self.remote_control_start(
+                client,
+                jid.JID(peer_jid_s),
+                extra,
+            )
+        )
         d.addCallback(data_format.serialise)
         return d
 
     async def remote_control_start(
-        self,
-        client: SatXMPPEntity,
-        peer_jid: jid.JID,
-        extra: dict
+        self, client: SatXMPPEntity, peer_jid: jid.JID, extra: dict
     ) -> dict:
         """Start a remote control session.
 
@@ -104,9 +102,7 @@
         @return: progress id
         """
         if not extra:
-            raise exceptions.DataError(
-                '"extra" must be set.'
-            )
+            raise exceptions.DataError('"extra" must be set.')
         # webrtc is always used for remote control
         extra["webrtc"] = True
         content = {
@@ -127,9 +123,7 @@
         try:
             application_data = call_data["application"]
         except KeyError:
-            raise exceptions.DataError(
-                '"call_data" must have an application media.'
-            )
+            raise exceptions.DataError('"call_data" must have an application media.')
         try:
             content["transport_data"] = {
                 "sctp-port": metadata["sctp-port"],
@@ -139,7 +133,7 @@
                     "pwd": metadata["ice-pwd"],
                     "candidates": application_data.pop("ice-candidates"),
                     "fingerprint": application_data.pop("fingerprint", {}),
-                }
+                },
             }
             name = application_data.get("id")
             if name:
@@ -208,9 +202,7 @@
         session_id = session["id"]
         peer_jid = session["peer_jid"]
 
-        is_in_roster, confirm_msg, confirm_title = self._get_confirm_msg(
-            client, peer_jid
-        )
+        is_in_roster, confirm_msg, confirm_title = self._get_confirm_msg(client, peer_jid)
         if is_in_roster:
             action_type = C.META_TYPE_CONFIRM
         else:
@@ -227,7 +219,7 @@
             confirm_msg,
             confirm_title,
             profile=client.profile,
-            action_extra=action_extra
+            action_extra=action_extra,
         )
         if accepted:
             session["pre_accepted"] = True
@@ -248,11 +240,7 @@
         """The remote control has been rejected"""
 
     def jingle_session_init(
-        self,
-        client: SatXMPPEntity,
-        session: dict,
-        content_name: str,
-        extra: dict
+        self, client: SatXMPPEntity, session: dict, content_name: str, extra: dict
     ) -> domish.Element:
         """Initializes a jingle session.
 
@@ -305,9 +293,7 @@
                 client, session, content_data, content_name
             )
         else:
-            raise exceptions.InternalError(
-                f"Invalid role {role!r}"
-            )
+            raise exceptions.InternalError(f"Invalid role {role!r}")
 
     async def _remote_control_request_conf(
         self,
@@ -340,7 +326,7 @@
             confirm_msg,
             confirm_title,
             profile=client.profile,
-            action_extra=action_extra
+            action_extra=action_extra,
         )
 
     async def jingle_handler(self, client, action, session, content_name, desc_elt):
--- a/libervia/backend/plugins/plugin_misc_room_game.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_room_game.py	Wed Jun 19 18:44:57 2024 +0200
@@ -112,7 +112,9 @@
         self.game_init = game_init
         self.player_init = player_init
         self.games = {}
-        self.invitations = {}  # values are a couple (x, y) with x the time and y a list of users
+        self.invitations = (
+            {}
+        )  # values are a couple (x, y) with x the time and y a list of users
 
         # These are the default settings, which can be overwritten by child class after initialization
         self.invite_mode = self.FROM_PLAYERS if self.player_init == {} else self.FROM_NONE
@@ -460,7 +462,9 @@
         other_players = [jid.JID(player).userhostJID() for player in other_players]
         return self.prepare_room(other_players, room_jid, profile_key)
 
-    def prepare_room(self, other_players=None, room_jid=None, profile_key=C.PROF_KEY_NONE):
+    def prepare_room(
+        self, other_players=None, room_jid=None, profile_key=C.PROF_KEY_NONE
+    ):
         """Prepare the room for a game: create it if it doesn't exist and invite players.
 
         @param other_players (list[JID]): list of other players JID (bare)
@@ -716,7 +720,7 @@
         return started_elt
 
     def _send_elements(self, to_jid, data, profile=None):
-        """ TODO
+        """TODO
 
         @param to_jid: recipient JID
         @param data: list of (elem, attr, content) with:
@@ -745,7 +749,7 @@
         return defer.succeed(None)
 
     def send(self, to_jid, elem=None, attrs=None, content=None, profile=None):
-        """ TODO
+        """TODO
 
         @param to_jid: recipient JID
         @param elem: domish.Element, unicode or a couple:
--- a/libervia/backend/plugins/plugin_misc_tarot.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_tarot.py	Wed Jun 19 18:44:57 2024 +0200
@@ -141,7 +141,12 @@
         for value in ["excuse"] + list(map(str, list(range(1, 22)))):
             self.deck_ordered.append(TarotCard(("atout", value)))
         for suit in ["pique", "coeur", "carreau", "trefle"]:
-            for value in list(map(str, list(range(1, 11)))) + ["valet", "cavalier", "dame", "roi"]:
+            for value in list(map(str, list(range(1, 11)))) + [
+                "valet",
+                "cavalier",
+                "dame",
+                "roi",
+            ]:
                 self.deck_ordered.append(TarotCard((suit, value)))
         self.__choose_contrat_id = host.register_callback(
             self._contrat_choosed, with_data=True
@@ -474,9 +479,7 @@
                 if played_card.suit == "atout" and played_card > biggest_atout:
                     biggest_atout = played_card
                 idx = (idx + 1) % len(players)
-            has_suit = (
-                False
-            )  # True if there is one card of the asked suit in the hand of the player
+            has_suit = False  # True if there is one card of the asked suit in the hand of the player
             has_atout = False
             biggest_hand_atout = None
 
@@ -608,7 +611,7 @@
             hand[players[i]] = deck[0:hand_size]
             del deck[0:hand_size]
         chien.extend(deck)
-        del (deck[:])
+        del deck[:]
         msg_elts = {}
         for player in players:
             msg_elts[player] = self.__card_list_to_xml(hand[player], "hand")
--- a/libervia/backend/plugins/plugin_misc_text_commands.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_text_commands.py	Wed Jun 19 18:44:57 2024 +0200
@@ -151,7 +151,7 @@
         return data
 
     def register_text_commands(self, instance):
-        """ Add a text command
+        """Add a text command
 
         @param instance: instance of a class containing text commands
         """
@@ -196,12 +196,12 @@
     def send_message_trigger(
         self, client, mess_data, pre_xml_treatments, post_xml_treatments
     ):
-        """Install SendMessage command hook """
+        """Install SendMessage command hook"""
         pre_xml_treatments.addCallback(self._send_message_cmd_hook, client)
         return True
 
     def _send_message_cmd_hook(self, mess_data, client):
-        """ Check text commands in message, and react consequently
+        """Check text commands in message, and react consequently
 
         msg starting with / are potential command. If a command is found, it is executed,
         else an help message is sent.
@@ -248,7 +248,7 @@
 
         # looks like an actual command, we try to call the corresponding method
         def ret_handling(ret):
-            """ Handle command return value:
+            """Handle command return value:
             if ret is True, normally send message (possibly modified by command)
             else, abord message sending
             """
@@ -381,9 +381,7 @@
 
         d = defer.succeed(None)
         for __, callback in self._whois:
-            d.addCallback(
-                lambda __: callback(client, whois_msg, mess_data, target_jid)
-            )
+            d.addCallback(lambda __: callback(client, whois_msg, mess_data, target_jid))
 
         def feed_back(__):
             self.feed_back(client, "\n".join(whois_msg), mess_data)
@@ -464,7 +462,10 @@
                 short_help=cmd_data["doc_short_help"],
                 syntax=_(" " * 4 + "syntax: {}\n").format(syntax) if syntax else "",
                 args_help="\n".join(
-                    [" " * 8 + "{}".format(line) for line in self._get_args_help(cmd_data)]
+                    [
+                        " " * 8 + "{}".format(line)
+                        for line in self._get_args_help(cmd_data)
+                    ]
                 ),
             )
 
--- a/libervia/backend/plugins/plugin_misc_text_syntaxes.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_text_syntaxes.py	Wed Jun 19 18:44:57 2024 +0200
@@ -115,16 +115,80 @@
     "param",
     "source",
     "track",
-    "wbr")
+    "wbr",
+)
 
 SAFE_ATTRS = html.defs.safe_attrs.union({"style", "poster", "controls"}) - {"id"}
 SAFE_CLASSES = {
     # those classes are used for code highlighting
-    "bp", "c", "ch", "cm", "cp", "cpf", "cs", "dl", "err", "fm", "gd", "ge", "get", "gh",
-    "gi", "go", "gp", "gr", "gs", "gt", "gu", "highlight", "hll", "il", "k", "kc", "kd",
-    "kn", "kp", "kr", "kt", "m", "mb", "mf", "mh", "mi", "mo", "na", "nb", "nc", "nd",
-    "ne", "nf", "ni", "nl", "nn", "no", "nt", "nv", "o", "ow", "s", "sa", "sb", "sc",
-    "sd", "se", "sh", "si", "sr", "ss", "sx", "vc", "vg", "vi", "vm", "w", "write",
+    "bp",
+    "c",
+    "ch",
+    "cm",
+    "cp",
+    "cpf",
+    "cs",
+    "dl",
+    "err",
+    "fm",
+    "gd",
+    "ge",
+    "get",
+    "gh",
+    "gi",
+    "go",
+    "gp",
+    "gr",
+    "gs",
+    "gt",
+    "gu",
+    "highlight",
+    "hll",
+    "il",
+    "k",
+    "kc",
+    "kd",
+    "kn",
+    "kp",
+    "kr",
+    "kt",
+    "m",
+    "mb",
+    "mf",
+    "mh",
+    "mi",
+    "mo",
+    "na",
+    "nb",
+    "nc",
+    "nd",
+    "ne",
+    "nf",
+    "ni",
+    "nl",
+    "nn",
+    "no",
+    "nt",
+    "nv",
+    "o",
+    "ow",
+    "s",
+    "sa",
+    "sb",
+    "sc",
+    "sd",
+    "se",
+    "sh",
+    "si",
+    "sr",
+    "ss",
+    "sx",
+    "vc",
+    "vg",
+    "vi",
+    "vm",
+    "w",
+    "write",
 }
 STYLES_VALUES_REGEX = (
     r"^("
@@ -157,7 +221,7 @@
 
 
 class TextSyntaxes(object):
-    """ Text conversion class
+    """Text conversion class
     XHTML utf-8 is used as intermediate language for conversions
     """
 
@@ -170,7 +234,6 @@
     # default_syntax must be lower case
     default_syntax = SYNTAX_XHTML
 
-
     def __init__(self, host):
         log.info(_("Text syntaxes plugin initialization"))
         self.host = host
@@ -218,8 +281,8 @@
             #      when the user is not aware of markdown and HTML
             class EscapeHTML(Extension):
                 def extendMarkdown(self, md):
-                    md.preprocessors.deregister('html_block')
-                    md.inlinePatterns.deregister('html')
+                    md.preprocessors.deregister("html_block")
+                    md.inlinePatterns.deregister("html")
 
             def _html2text(html, baseurl=""):
                 h = html2text.HTML2Text(baseurl=baseurl)
@@ -228,20 +291,22 @@
 
             self.add_syntax(
                 self.SYNTAX_MARKDOWN,
-                partial(markdown.markdown,
-                        extensions=[
-                            EscapeHTML(),
-                            'nl2br',
-                            'codehilite',
-                            'fenced_code',
-                            'sane_lists',
-                            'tables',
-                            ],
-                        extension_configs = {
-                            "codehilite": {
-                                "css_class": "highlight",
-                            }
-                        }),
+                partial(
+                    markdown.markdown,
+                    extensions=[
+                        EscapeHTML(),
+                        "nl2br",
+                        "codehilite",
+                        "fenced_code",
+                        "sane_lists",
+                        "tables",
+                    ],
+                    extension_configs={
+                        "codehilite": {
+                            "css_class": "highlight",
+                        }
+                    },
+                ),
                 _html2text,
                 [TextSyntaxes.OPT_DEFAULT],
             )
@@ -287,7 +352,7 @@
         self.host.memory.update_params(self.params % self.params_data)
 
     def get_current_syntax(self, profile):
-        """ Return the selected syntax for the given profile
+        """Return the selected syntax for the given profile
 
         @param profile: %(doc_profile)s
         @return: profile selected syntax
@@ -301,7 +366,7 @@
         return failure
 
     def clean_style(self, styles_raw: str) -> str:
-        """"Clean unsafe CSS styles
+        """ "Clean unsafe CSS styles
 
         Remove styles not in the whitelist, or where the value doesn't match the regex
         @param styles_raw: CSS styles
@@ -323,9 +388,7 @@
             if value == "none":
                 continue
             cleaned_styles.append((key, value))
-        return "; ".join(
-            ["%s: %s" % (key_, value_) for key_, value_ in cleaned_styles]
-        )
+        return "; ".join(["%s: %s" % (key_, value_) for key_, value_ in cleaned_styles])
 
     def clean_classes(self, classes_raw: str) -> str:
         """Remove any non whitelisted class
@@ -355,9 +418,7 @@
         else:
             log.error("Only strings and HtmlElements can be cleaned")
             raise exceptions.DataError
-        cleaner = clean.Cleaner(
-            style=False, add_nofollow=False, safe_attrs=SAFE_ATTRS
-        )
+        cleaner = clean.Cleaner(style=False, add_nofollow=False, safe_attrs=SAFE_ATTRS)
         xhtml_elt = cleaner.clean_html(xhtml_elt)
         for elt in xhtml_elt.xpath("//*[@style]"):
             elt.set("style", self.clean_style(elt.get("style")))
@@ -369,11 +430,12 @@
                 if element.tag in VOID_ELEMENTS:
                     element.text = None
                 else:
-                    element.text = ''
+                    element.text = ""
         return html.tostring(xhtml_elt, encoding=str, method="xml")
 
-    def convert(self, text, syntax_from, syntax_to=_SYNTAX_XHTML, safe=True,
-                profile=None):
+    def convert(
+        self, text, syntax_from, syntax_to=_SYNTAX_XHTML, safe=True, profile=None
+    ):
         """Convert a text between two syntaxes
 
         @param text: text to convert
--- a/libervia/backend/plugins/plugin_misc_upload.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_upload.py	Wed Jun 19 18:44:57 2024 +0200
@@ -68,15 +68,15 @@
         self._upload_callbacks = []
 
     def _file_upload(
-        self, filepath, filename, upload_jid_s="", options='', profile=C.PROF_KEY_NONE
+        self, filepath, filename, upload_jid_s="", options="", profile=C.PROF_KEY_NONE
     ):
         client = self.host.get_client(profile)
         upload_jid = jid.JID(upload_jid_s) if upload_jid_s else None
         options = data_format.deserialise(options)
 
-        return defer.ensureDeferred(self.file_upload(
-            client, filepath, filename or None, upload_jid, options
-        ))
+        return defer.ensureDeferred(
+            self.file_upload(client, filepath, filename or None, upload_jid, options)
+        )
 
     async def file_upload(self, client, filepath, filename, upload_jid, options):
         """Send a file using best available method
@@ -87,10 +87,13 @@
         """
         try:
             progress_id, __ = await self.upload(
-                client, filepath, filename, upload_jid, options)
+                client, filepath, filename, upload_jid, options
+            )
         except Exception as e:
-            if (isinstance(e, jabber_error.StanzaError)
-                and e.condition == 'not-acceptable'):
+            if (
+                isinstance(e, jabber_error.StanzaError)
+                and e.condition == "not-acceptable"
+            ):
                 reason = e.text
             else:
                 reason = str(e)
@@ -110,7 +113,7 @@
         filepath: Union[Path, str],
         filename: Optional[str] = None,
         upload_jid: Optional[jid.JID] = None,
-        extra: Optional[dict]=None
+        extra: Optional[dict] = None,
     ) -> Tuple[str, defer.Deferred]:
         """Send a file using best available method
 
--- a/libervia/backend/plugins/plugin_misc_uri_finder.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_uri_finder.py	Wed Jun 19 18:44:57 2024 +0200
@@ -22,6 +22,7 @@
 from libervia.backend.core.log import getLogger
 from twisted.internet import defer
 import textwrap
+
 log = getLogger(__name__)
 import json
 import os.path
@@ -36,14 +37,18 @@
     C.PI_DEPENDENCIES: [],
     C.PI_MAIN: "URIFinder",
     C.PI_HANDLER: "no",
-    C.PI_DESCRIPTION: textwrap.dedent(_("""\
+    C.PI_DESCRIPTION: textwrap.dedent(
+        _(
+            """\
     Plugin to find URIs in well know location.
     This allows to retrieve settings to work with a project (e.g. pubsub node used for merge-requests).
-    """))
+    """
+        )
+    ),
 }
 
 
-SEARCH_FILES = ('readme', 'contributing')
+SEARCH_FILES = ("readme", "contributing")
 
 
 class URIFinder(object):
@@ -51,10 +56,14 @@
     def __init__(self, host):
         log.info(_("URI finder plugin initialization"))
         self.host = host
-        host.bridge.add_method("uri_find", ".plugin",
-                              in_sign='sas', out_sign='a{sa{ss}}',
-                              method=self.find,
-                              async_=True)
+        host.bridge.add_method(
+            "uri_find",
+            ".plugin",
+            in_sign="sas",
+            out_sign="a{sa{ss}}",
+            method=self.find,
+            async_=True,
+        )
 
     def find(self, path, keys):
         """Look for URI in well known locations
@@ -64,31 +73,38 @@
             e.g.: "tickets", "merge-requests"
         @return (dict[unicode, unicode]): map from key to found uri
         """
-        keys_re = '|'.join(keys)
+        keys_re = "|".join(keys)
         label_re = r'"(?P<label>[^"]+)"'
-        uri_re = re.compile(r'(?P<key>{keys_re})[ :]? +(?P<uri>xmpp:\S+)(?:.*use {label_re} label)?'.format(
-            keys_re=keys_re, label_re = label_re))
+        uri_re = re.compile(
+            r"(?P<key>{keys_re})[ :]? +(?P<uri>xmpp:\S+)(?:.*use {label_re} label)?".format(
+                keys_re=keys_re, label_re=label_re
+            )
+        )
         path = os.path.normpath(path)
         if not os.path.isdir(path) or not os.path.isabs(path):
-            raise ValueError('path must be an absolute path to a directory')
+            raise ValueError("path must be an absolute path to a directory")
 
         found_uris = {}
-        while path != '/':
+        while path != "/":
             for filename in os.listdir(path):
                 name, __ = os.path.splitext(filename)
                 if name.lower() in SEARCH_FILES:
                     file_path = os.path.join(path, filename)
                     with open(file_path) as f:
                         for m in uri_re.finditer(f.read()):
-                            key = m.group('key')
-                            uri = m.group('uri')
-                            label = m.group('label')
+                            key = m.group("key")
+                            uri = m.group("uri")
+                            label = m.group("label")
                             if key in found_uris:
-                                log.warning(_("Ignoring already found uri for key \"{key}\"").format(key=key))
+                                log.warning(
+                                    _(
+                                        'Ignoring already found uri for key "{key}"'
+                                    ).format(key=key)
+                                )
                             else:
-                                uri_data = found_uris[key] = {'uri': uri}
+                                uri_data = found_uris[key] = {"uri": uri}
                                 if label is not None:
-                                    uri_data['labels'] = json.dumps([label])
+                                    uri_data["labels"] = json.dumps([label])
             if found_uris:
                 break
             path = os.path.dirname(path)
--- a/libervia/backend/plugins/plugin_misc_url_preview.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_url_preview.py	Wed Jun 19 18:44:57 2024 +0200
@@ -109,8 +109,7 @@
 
         self.register("invidious", self.fetch_invidious_data, priority=-90)
         self.register_domain_protocol(
-            ["yewtu.be", "www.yewtu.be", "invidious.fdn.fr"],
-            "invidious"
+            ["yewtu.be", "www.yewtu.be", "invidious.fdn.fr"], "invidious"
         )
 
         # bridge methods
@@ -126,7 +125,9 @@
 
     # API
 
-    def _url_preview_get(self, url: str, options: str, profile_key: str) -> defer.Deferred:
+    def _url_preview_get(
+        self, url: str, options: str, profile_key: str
+    ) -> defer.Deferred:
         client = self.host.get_client(profile_key)
         d = defer.ensureDeferred(
             self.get_preview_data(client, url, data_format.deserialise(options))
@@ -190,7 +191,6 @@
                         log.warning(f"Can't clean html data: {e}\n{preview_data}")
                         del preview_data["html"]
 
-
         return preview_data
 
     @classmethod
@@ -305,15 +305,15 @@
         """
         oembed_url = f"https://www.youtube.com/oembed?url={parse.quote(url)}&format=json"
         data = await self._fetch_oembed_data(oembed_url)
-        if data is not None and 'html' in data:
-            html = data['html']
+        if data is not None and "html" in data:
+            html = data["html"]
             root = etree.HTML(html)
-            iframe_elt = root.xpath('//iframe')
+            iframe_elt = root.xpath("//iframe")
             if iframe_elt:
-                iframe_elt[0].attrib['style'] = (
-                    'position: absolute; top: 0; left: 0; width: 100%; height: 100%;'
-                )
-                data['html'] = etree.tostring(root, method='html', encoding='unicode')
+                iframe_elt[0].attrib[
+                    "style"
+                ] = "position: absolute; top: 0; left: 0; width: 100%; height: 100%;"
+                data["html"] = etree.tostring(root, method="html", encoding="unicode")
             else:
                 log.warning("No <iframe> found in the YouTube oEmbed response")
 
@@ -347,7 +347,6 @@
                 f"Failed to fetch preview for {url}, status code: {resp.code}"
             )
 
-
     async def fetch_generic_data(
         self, client: SatXMPPEntity, url: str, options: dict
     ) -> Optional[dict]:
@@ -430,9 +429,11 @@
                 "title": page.get("title"),
                 "description": page.get("extract"),
                 "url": url,
-                "image": page.get("thumbnail", {}).get("source")
-                if "thumbnail" in page
-                else None,
+                "image": (
+                    page.get("thumbnail", {}).get("source")
+                    if "thumbnail" in page
+                    else None
+                ),
             }
         else:
             raise PreviewFetchError(
@@ -441,7 +442,9 @@
 
     # Invidious
 
-    async def fetch_invidious_data(self, client: SatXMPPEntity, url: str, options: dict) -> Optional[dict]:
+    async def fetch_invidious_data(
+        self, client: SatXMPPEntity, url: str, options: dict
+    ) -> Optional[dict]:
         """
         Fetch Invidious data from a url and generate HTML iframe.
 
@@ -450,10 +453,10 @@
         @return: A dictionary containing the Invidious data or None if no data could be fetched.
         """
         parsed_url = parse.urlparse(url)
-        if 'watch' in parsed_url.path:
-            video_id = parse.parse_qs(parsed_url.query).get('v', [None])[0]
+        if "watch" in parsed_url.path:
+            video_id = parse.parse_qs(parsed_url.query).get("v", [None])[0]
         else:
-            video_id = parsed_url.path.strip('/')
+            video_id = parsed_url.path.strip("/")
         if not video_id:
             log.warning(f"Can't extract video ID from {url}")
             return None
@@ -465,25 +468,25 @@
             video_data = await resp.json()
             # construct the iframe html code
             html = (
-                f'<iframe'
+                f"<iframe"
                 f'    width="100%"'
                 f'    height="auto"'
                 f'    src="https://{parsed_url.netloc}/embed/{video_id}"'
                 f'    frameborder="0" '
                 f'    allow="'
-                f'        accelerometer;'
-                f'        autoplay;'
-                f'        clipboard-write;'
-                f'        encrypted-media;'
-                f'        gyroscope;'
+                f"        accelerometer;"
+                f"        autoplay;"
+                f"        clipboard-write;"
+                f"        encrypted-media;"
+                f"        gyroscope;"
                 f'        picture-in-picture"'
                 f'    style="'
-                f'        position: absolute;'
-                f'        top: 0;'
-                f'        left: 0;'
-                f'        width: 100%;'
+                f"        position: absolute;"
+                f"        top: 0;"
+                f"        left: 0;"
+                f"        width: 100%;"
                 f'        height: 100%;"'
-                f'    allowfullscreen></iframe>'
+                f"    allowfullscreen></iframe>"
             )
             # structure the data to be returned
             data = {
--- a/libervia/backend/plugins/plugin_misc_watched.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_misc_watched.py	Wed Jun 19 18:44:57 2024 +0200
@@ -70,14 +70,14 @@
 
         # we check that the previous presence was unavailable (no notification else)
         try:
-            old_show = self.host.memory.get_entity_datum(
-                client, entity, "presence").show
+            old_show = self.host.memory.get_entity_datum(client, entity, "presence").show
         except (KeyError, exceptions.UnknownEntityError):
             old_show = C.PRESENCE_UNAVAILABLE
 
         if old_show == C.PRESENCE_UNAVAILABLE:
             watched = self.host.memory.param_get_a(
-                NAME, CATEGORY, profile_key=client.profile)
+                NAME, CATEGORY, profile_key=client.profile
+            )
             if entity in watched or entity.userhostJID() in watched:
                 self.host.action_new(
                     {
--- a/libervia/backend/plugins/plugin_pubsub_cache.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_pubsub_cache.py	Wed Jun 19 18:44:57 2024 +0200
@@ -55,7 +55,6 @@
 PROGRESS_DEADLINE = 60 * 60 * 6
 
 
-
 class PubsubCache:
     # TODO: there is currently no notification for (un)subscribe events with XEP-0060,
     #   but it would be necessary to have this data if some devices unsubscribe a cached
@@ -196,7 +195,7 @@
             raise ValueError('"type" is mandatory in analyser')
         type_test_keys = {"node", "namespace"}
         if not type_test_keys.intersection(analyser):
-            raise ValueError(f'at least one of {type_test_keys} must be used')
+            raise ValueError(f"at least one of {type_test_keys} must be used")
         if name in self.analysers:
             raise exceptions.Conflict(
                 f"An analyser with the name {name!r} is already registered"
@@ -204,10 +203,7 @@
         self.analysers[name] = analyser
 
     async def cache_items(
-        self,
-        client: SatXMPPEntity,
-        pubsub_node: PubsubNode,
-        items: List[domish.Element]
+        self, client: SatXMPPEntity, pubsub_node: PubsubNode, items: List[domish.Element]
     ) -> None:
         try:
             parser = self.analysers[pubsub_node.analyser].get("parser")
@@ -217,11 +213,7 @@
         if parser is not None:
             parsed_items = [
                 await utils.as_deferred(
-                    parser,
-                    client,
-                    item,
-                    pubsub_node.service,
-                    pubsub_node.name
+                    parser, client, item, pubsub_node.service, pubsub_node.name
                 )
                 for item in items
             ]
@@ -232,19 +224,13 @@
             client, pubsub_node, items, parsed_items
         )
 
-    async def _cache_node(
-        self,
-        client: SatXMPPEntity,
-        pubsub_node: PubsubNode
-    ) -> None:
+    async def _cache_node(self, client: SatXMPPEntity, pubsub_node: PubsubNode) -> None:
         await self.host.memory.storage.update_pubsub_node_sync_state(
             pubsub_node, SyncState.IN_PROGRESS
         )
         service, node = pubsub_node.service, pubsub_node.name
         try:
-            log.debug(
-                f"Caching node {node!r} at {service} for {client.profile}"
-            )
+            log.debug(f"Caching node {node!r} at {service} for {client.profile}")
             if not pubsub_node.subscribed:
                 try:
                     sub = await self._p.subscribe(client, service, node)
@@ -286,9 +272,7 @@
                     items, __ = await client.pubsub_client.items(
                         pubsub_node.service, pubsub_node.name, maxItems=20
                     )
-                    await self.cache_items(
-                        client, pubsub_node, items
-                    )
+                    await self.cache_items(client, pubsub_node, items)
                 else:
                     raise e
             except exceptions.FeatureNotFound:
@@ -299,9 +283,7 @@
                 items, __ = await client.pubsub_client.items(
                     pubsub_node.service, pubsub_node.name, maxItems=20
                 )
-                await self.cache_items(
-                    client, pubsub_node, items
-                )
+                await self.cache_items(client, pubsub_node, items)
             else:
                 rsm_p = self.host.plugins["XEP-0059"]
                 rsm_request = rsm.RSMRequest()
@@ -310,9 +292,7 @@
                     items, rsm_response = await client.pubsub_client.items(
                         service, node, rsm_request=rsm_request
                     )
-                    await self.cache_items(
-                        client, pubsub_node, items
-                    )
+                    await self.cache_items(client, pubsub_node, items)
                     for item in items:
                         item_id = item["id"]
                         if item_id in cached_ids:
@@ -343,6 +323,7 @@
             )
         except Exception as e:
             import traceback
+
             tb = traceback.format_tb(e.__traceback__)
             log.error(
                 f"Can't cache node {node!r} at {service} for {client.profile}: {e}\n{tb}"
@@ -356,11 +337,7 @@
     def _cache_node_clean(self, __, pubsub_node):
         del self.in_progress[(pubsub_node.service, pubsub_node.name)]
 
-    def cache_node(
-        self,
-        client: SatXMPPEntity,
-        pubsub_node: PubsubNode
-    ) -> None:
+    def cache_node(self, client: SatXMPPEntity, pubsub_node: PubsubNode) -> None:
         """Launch node caching as a background task"""
         d = defer.ensureDeferred(self._cache_node(client, pubsub_node))
         d.addBoth(self._cache_node_clean, pubsub_node=pubsub_node)
@@ -372,15 +349,13 @@
         client: SatXMPPEntity,
         service: jid.JID,
         node: str,
-        pubsub_node : PubsubNode = None,
+        pubsub_node: PubsubNode = None,
     ) -> dict:
         """Use registered analysers on a node to determine what it is used for"""
         analyse = {"service": service, "node": node}
         if pubsub_node is None:
             try:
-                first_item = (await client.pubsub_client.items(
-                    service, node, 1
-                ))[0][0]
+                first_item = (await client.pubsub_client.items(service, node, 1))[0][0]
             except IndexError:
                 pass
             except error.StanzaError as e:
@@ -442,9 +417,7 @@
 
         else:
             found = False
-            log.debug(
-                f"node {node!r} at service {service} doesn't match any known type"
-            )
+            log.debug(f"node {node!r} at service {service} doesn't match any known type")
         if found:
             try:
                 match_cb = analyser["match_cb"]
@@ -455,12 +428,20 @@
         return analyse
 
     def _get_items_from_cache(
-        self, service="", node="", max_items=10, item_ids=None, sub_id=None,
-        extra="", profile_key=C.PROF_KEY_NONE
+        self,
+        service="",
+        node="",
+        max_items=10,
+        item_ids=None,
+        sub_id=None,
+        extra="",
+        profile_key=C.PROF_KEY_NONE,
     ):
-        d = defer.ensureDeferred(self._a_get_items_from_cache(
-            service, node, max_items, item_ids, sub_id, extra, profile_key
-        ))
+        d = defer.ensureDeferred(
+            self._a_get_items_from_cache(
+                service, node, max_items, item_ids, sub_id, extra, profile_key
+            )
+        )
         d.addCallback(self._p.trans_items_data)
         d.addCallback(self._p.serialise_items)
         return d
@@ -498,7 +479,7 @@
         item_ids: Optional[List[str]] = None,
         sub_id: Optional[str] = None,
         rsm_request: Optional[rsm.RSMRequest] = None,
-        extra: Optional[Dict[str, Any]] = None
+        extra: Optional[Dict[str, Any]] = None,
     ) -> Tuple[List[PubsubItem], dict]:
         """Get items from cache, using same arguments as for external Pubsub request"""
         if extra is None:
@@ -508,8 +489,10 @@
         if max_items is None and rsm_request is None:
             max_items = 20
             pubsub_items, metadata = await self.host.memory.storage.get_items(
-                node, max_items=max_items, item_ids=item_ids or None,
-                order_by=extra.get(C.KEY_ORDER_BY)
+                node,
+                max_items=max_items,
+                item_ids=item_ids or None,
+                order_by=extra.get(C.KEY_ORDER_BY),
             )
         elif max_items is not None:
             if rsm_request is not None:
@@ -531,9 +514,14 @@
             else:
                 before = rsm_request.before
             pubsub_items, metadata = await self.host.memory.storage.get_items(
-                node, max_items=rsm_request.max, before=before, after=rsm_request.after,
-                from_index=rsm_request.index, order_by=extra.get(C.KEY_ORDER_BY),
-                desc=desc, force_rsm=True,
+                node,
+                max_items=rsm_request.max,
+                before=before,
+                after=rsm_request.after,
+                from_index=rsm_request.index,
+                order_by=extra.get(C.KEY_ORDER_BY),
+                desc=desc,
+                force_rsm=True,
             )
 
         return pubsub_items, metadata
@@ -566,9 +554,7 @@
                     )
             if items:
                 log.debug(f"[{client.profile}] caching new items received from {node}")
-                await self.cache_items(
-                    client, node, items
-                )
+                await self.cache_items(client, node, items)
             if retract_ids:
                 log.debug(f"deleting retracted items from {node}")
                 await self.host.memory.storage.delete_pubsub_items(
@@ -602,7 +588,7 @@
         item_ids: Optional[List[str]],
         sub_id: Optional[str],
         rsm_request: Optional[rsm.RSMRequest],
-        extra: dict
+        extra: dict,
     ) -> Tuple[bool, Optional[Tuple[List[dict], dict]]]:
         if not self.use_cache:
             log.debug("cache disabled in settings")
@@ -699,7 +685,7 @@
         nodeIdentifier: str,
         sub_jid: Optional[jid.JID],
         options: Optional[dict],
-        subscription: pubsub.Subscription
+        subscription: pubsub.Subscription,
     ) -> None:
         pass
 
@@ -720,11 +706,7 @@
         return defer.ensureDeferred(self.synchronise(client, service, node))
 
     async def synchronise(
-        self,
-        client: SatXMPPEntity,
-        service: jid.JID,
-        node: str,
-        resync: bool = True
+        self, client: SatXMPPEntity, service: jid.JID, node: str, resync: bool = True
     ) -> None:
         """Synchronise a node with a pubsub service
 
@@ -740,9 +722,9 @@
         )
         if pubsub_node is None:
             log.info(
-                _(
-                    "Synchronising the new node {node} at {service}"
-                ).format(node=node, service=service.full)
+                _("Synchronising the new node {node} at {service}").format(
+                    node=node, service=service.full
+                )
             )
             analyse = await self.analyse_node(client, service, node)
             pubsub_node = await self.host.memory.storage.set_pubsub_node(
@@ -753,11 +735,13 @@
                 type_=analyse.get("type"),
             )
         elif not resync and pubsub_node.sync_state is not None:
-                # the node exists, nothing to do
-                return
+            # the node exists, nothing to do
+            return
 
-        if ((pubsub_node.sync_state == SyncState.IN_PROGRESS
-             or (service, node) in self.in_progress)):
+        if (
+            pubsub_node.sync_state == SyncState.IN_PROGRESS
+            or (service, node) in self.in_progress
+        ):
             log.warning(
                 _(
                     "{node} at {service} is already being synchronised, can't do a new "
--- a/libervia/backend/plugins/plugin_sec_aesgcm.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_sec_aesgcm.py	Wed Jun 19 18:44:57 2024 +0200
@@ -45,15 +45,20 @@
     C.PI_DEPENDENCIES: ["XEP-0363", "XEP-0384", "DOWNLOAD", "ATTACH"],
     C.PI_MAIN: "AESGCM",
     C.PI_HANDLER: "no",
-    C.PI_DESCRIPTION: dedent(_("""\
+    C.PI_DESCRIPTION: dedent(
+        _(
+            """\
     Implementation of AES-GCM scheme, a way to encrypt files (not official XMPP standard).
     See https://xmpp.org/extensions/inbox/omemo-media-sharing.html for details
-    """)),
+    """
+        )
+    ),
 }
 
 AESGCM_RE = re.compile(
-    r'aesgcm:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9'
-    r'()@:%_\+.~#?&\/\/=]*)')
+    r"aesgcm:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9"
+    r"()@:%_\+.~#?&\/\/=]*)"
+)
 
 
 class AESGCM(object):
@@ -61,13 +66,10 @@
     def __init__(self, host):
         self.host = host
         log.info(_("AESGCM plugin initialization"))
-        self._http_upload = host.plugins['XEP-0363']
+        self._http_upload = host.plugins["XEP-0363"]
         self._attach = host.plugins["ATTACH"]
-        host.plugins["DOWNLOAD"].register_scheme(
-            "aesgcm", self.download
-        )
-        self._attach.register(
-            self.can_handle_attachment, self.attach, encrypted=True)
+        host.plugins["DOWNLOAD"].register_scheme("aesgcm", self.download)
+        self._attach.register(self.can_handle_attachment, self.attach, encrypted=True)
         host.trigger.add("XEP-0363_upload_pre_slot", self._upload_pre_slot)
         host.trigger.add("XEP-0363_upload", self._upload_trigger)
         host.trigger.add("message_received", self._message_received_trigger)
@@ -84,7 +86,8 @@
             iv_size = 12
         else:
             raise ValueError(
-                f"Invalid URL fragment, can't decrypt file at {uri_parsed.get_url()}")
+                f"Invalid URL fragment, can't decrypt file at {uri_parsed.get_url()}"
+            )
 
         iv, key = fragment[:iv_size], fragment[iv_size:]
 
@@ -95,18 +98,17 @@
         ).decryptor()
 
         download_url = parse.urlunparse(
-            ('https', uri_parsed.netloc, uri_parsed.path, '', '', ''))
+            ("https", uri_parsed.netloc, uri_parsed.path, "", "", "")
+        )
 
-        if options.get('ignore_tls_errors', False):
-            log.warning(
-                "TLS certificate check disabled, this is highly insecure"
-            )
+        if options.get("ignore_tls_errors", False):
+            log.warning("TLS certificate check disabled, this is highly insecure")
             treq_client = treq_client_no_ssl
         else:
             treq_client = treq
 
         head_data = await treq_client.head(download_url)
-        content_length = int(head_data.headers.getRawHeaders('content-length')[0])
+        content_length = int(head_data.headers.getRawHeaders("content-length")[0])
         # the 128 bits tag is put at the end
         file_size = content_length - 16
 
@@ -115,18 +117,22 @@
             client,
             dest_path,
             mode="wb",
-            size = file_size,
+            size=file_size,
         )
 
         progress_id = file_obj.uid
 
         resp = await treq_client.get(download_url, unbuffered=True)
         if resp.code == 200:
-            d = treq.collect(resp, partial(
-                self.on_data_download,
-                client=client,
-                file_obj=file_obj,
-                decryptor=decryptor))
+            d = treq.collect(
+                resp,
+                partial(
+                    self.on_data_download,
+                    client=client,
+                    file_obj=file_obj,
+                    decryptor=decryptor,
+                ),
+            )
         else:
             d = defer.Deferred()
             self.host.plugins["DOWNLOAD"].errback_download(file_obj, d, resp)
@@ -141,12 +147,9 @@
             return True
 
     async def _upload_cb(self, client, filepath, filename, extra):
-        extra['encryption'] = C.ENC_AES_GCM
+        extra["encryption"] = C.ENC_AES_GCM
         return await self._http_upload.file_http_upload(
-            client=client,
-            filepath=filepath,
-            filename=filename,
-            extra=extra
+            client=client, filepath=filepath, filename=filename, extra=extra
         )
 
     async def attach(self, client, data):
@@ -157,7 +160,7 @@
         #   possible with the 0.4 version of OMEMO, it's possible to encrypt other stanza
         #   elements than body).
         attachments = data["extra"][C.KEY_ATTACHMENTS]
-        if not data['message'] or data['message'] == {'': ''}:
+        if not data["message"] or data["message"] == {"": ""}:
             extra_attachments = attachments[1:]
             del attachments[1:]
             await self._attach.upload_files(client, data, upload_cb=self._upload_cb)
@@ -177,16 +180,18 @@
         for attachment in extra_attachments:
             # we send all remaining attachment in a separate message
             await client.sendMessage(
-                to_jid=data['to'],
-                message={'': ''},
-                subject=data['subject'],
-                mess_type=data['type'],
+                to_jid=data["to"],
+                message={"": ""},
+                subject=data["subject"],
+                mess_type=data["type"],
                 extra={C.KEY_ATTACHMENTS: [attachment]},
             )
 
-        if ((not data['extra']
-             and (not data['message'] or data['message'] == {'': ''})
-             and not data['subject'])):
+        if (
+            not data["extra"]
+            and (not data["message"] or data["message"] == {"": ""})
+            and not data["subject"]
+        ):
             # nothing left to send, we can cancel the message
             raise exceptions.CancelError("Cancelled by AESGCM attachment handling")
 
@@ -222,7 +227,7 @@
             file_obj.write(decrypted)
 
     def _upload_pre_slot(self, client, extra, file_metadata):
-        if extra.get('encryption') != C.ENC_AES_GCM:
+        if extra.get("encryption") != C.ENC_AES_GCM:
             return True
         # the tag is appended to the file
         file_metadata["size"] += 16
@@ -239,19 +244,19 @@
                 return ret + tag
             except AlreadyFinalized:
                 # as we have already finalized, we can now send EOF
-                return b''
+                return b""
 
     def _upload_trigger(self, client, extra, sat_file, file_producer, slot):
-        if extra.get('encryption') != C.ENC_AES_GCM:
+        if extra.get("encryption") != C.ENC_AES_GCM:
             return True
         log.debug("encrypting file with AES-GCM")
         iv = secrets.token_bytes(12)
         key = secrets.token_bytes(32)
-        fragment = f'{iv.hex()}{key.hex()}'
+        fragment = f"{iv.hex()}{key.hex()}"
         ori_url = parse.urlparse(slot.get)
         # we change the get URL with the one with aesgcm scheme and containing the
         # encoded key + iv
-        slot.get = parse.urlunparse(['aesgcm', *ori_url[1:5], fragment])
+        slot.get = parse.urlunparse(["aesgcm", *ori_url[1:5], fragment])
 
         # encrypted data size will be bigger than original file size
         # so we need to check with final data length to avoid a warning on close()
@@ -270,13 +275,13 @@
 
         if sat_file.data_cb is not None:
             raise exceptions.InternalError(
-                f"data_cb was expected to be None, it is set to {sat_file.data_cb}")
+                f"data_cb was expected to be None, it is set to {sat_file.data_cb}"
+            )
 
         # with data_cb we encrypt the file on the fly
         sat_file.data_cb = partial(self._encrypt, encryptor=encryptor)
         return True
 
-
     def _pop_aesgcm_links(self, match, links):
         link = match.group()
         if link not in links:
@@ -284,22 +289,20 @@
         return ""
 
     def _check_aesgcm_attachments(self, client, data):
-        if not data.get('message'):
+        if not data.get("message"):
             return data
         links = []
 
-        for lang, message in list(data['message'].items()):
-            message = AESGCM_RE.sub(
-                partial(self._pop_aesgcm_links, links=links),
-                message)
+        for lang, message in list(data["message"].items()):
+            message = AESGCM_RE.sub(partial(self._pop_aesgcm_links, links=links), message)
             if links:
                 message = message.strip()
                 if not message:
-                    del data['message'][lang]
+                    del data["message"][lang]
                 else:
-                    data['message'][lang] = message
+                    data["message"][lang] = message
                 mess_encrypted = client.encryption.isEncrypted(data)
-                attachments = data['extra'].setdefault(C.KEY_ATTACHMENTS, [])
+                attachments = data["extra"].setdefault(C.KEY_ATTACHMENTS, [])
                 for link in links:
                     path = parse.urlparse(link).path
                     attachment = {
@@ -314,7 +317,7 @@
                         # encrypted, because the decryption key is part of the link,
                         # so sending it over unencrypted channel is like having no
                         # encryption at all.
-                        attachment['encrypted'] = True
+                        attachment["encrypted"] = True
                     attachments.append(attachment)
 
         return data
--- a/libervia/backend/plugins/plugin_sec_otr.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_sec_otr.py	Wed Jun 19 18:44:57 2024 +0200
@@ -109,7 +109,7 @@
             message data when an encrypted message is going to be sent
         """
         assert isinstance(self.peer, jid.JID)
-        msg = msg_str.decode('utf-8')
+        msg = msg_str.decode("utf-8")
         client = self.user.client
         log.debug("injecting encrypted message to {to}".format(to=self.peer))
         if appdata is None:
@@ -124,11 +124,11 @@
                 "timestamp": time.time(),
             }
             client.generate_message_xml(mess_data)
-            xml = mess_data['xml']
+            xml = mess_data["xml"]
             self._p_carbons.set_private(xml)
-            self._p_hints.add_hint_elements(xml, [
-                self._p_hints.HINT_NO_COPY,
-                self._p_hints.HINT_NO_PERMANENT_STORE])
+            self._p_hints.add_hint_elements(
+                xml, [self._p_hints.HINT_NO_COPY, self._p_hints.HINT_NO_PERMANENT_STORE]
+            )
             client.send(mess_data["xml"])
         else:
             message_elt = appdata["xml"]
@@ -153,11 +153,10 @@
         trusted = self.getCurrentTrust()
         if trusted is None:
             return False
-        elif trusted == 'trusted':
+        elif trusted == "trusted":
             return True
         else:
-            log.error("Unexpected getCurrentTrust() value: {value}".format(
-                value=trusted))
+            log.error("Unexpected getCurrentTrust() value: {value}".format(value=trusted))
             return False
 
     def set_state(self, state):
@@ -291,9 +290,7 @@
 
     def start_context(self, other_jid):
         assert isinstance(other_jid, jid.JID)
-        context = self.contexts.setdefault(
-            other_jid, Context(self, other_jid)
-        )
+        context = self.contexts.setdefault(other_jid, Context(self, other_jid))
         return context
 
     def get_context_for_user(self, other):
@@ -314,7 +311,9 @@
         )  #  FIXME: OTR should not be skipped per profile, this need to be refactored
         self._p_hints = host.plugins["XEP-0334"]
         self._p_carbons = host.plugins["XEP-0280"]
-        host.trigger.add("message_received", self.message_received_trigger, priority=100000)
+        host.trigger.add(
+            "message_received", self.message_received_trigger, priority=100000
+        )
         host.trigger.add("sendMessage", self.send_message_trigger, priority=100000)
         host.trigger.add("send_message_data", self._send_message_data_trigger)
         host.bridge.add_method(
@@ -374,11 +373,9 @@
         yield client._otr_data.load()
         encrypted_priv_key = client._otr_data.get(PRIVATE_KEY, None)
         if encrypted_priv_key is not None:
-            priv_key = self.host.memory.decrypt_value(
-                encrypted_priv_key, client.profile
-            )
+            priv_key = self.host.memory.decrypt_value(encrypted_priv_key, client.profile)
             ctxMng.account.privkey = potr.crypt.PK.parsePrivateKey(
-                unhexlify(priv_key.encode('utf-8'))
+                unhexlify(priv_key.encode("utf-8"))
             )[0]
         else:
             ctxMng.account.privkey = None
@@ -405,7 +402,7 @@
             entity_jid.resource = self.host.memory.main_resource_get(
                 client, entity_jid
             )  # FIXME: temporary and unsecure, must be changed when frontends
-               #        are refactored
+            #        are refactored
         ctxMng = client._otr_context_manager
         otrctx = ctxMng.get_context_for_user(entity_jid)
         priv_key = ctxMng.account.privkey
@@ -517,15 +514,18 @@
         @param to_jid(jid.JID): jid to start encrypted session with
         """
         encrypted_session = client.encryption.getSession(to_jid.userhostJID())
-        if encrypted_session and encrypted_session['plugin'].namespace != NS_OTR:
-            raise exceptions.ConflictError(_(
-                "Can't start an OTR session, there is already an encrypted session "
-                "with {name}").format(name=encrypted_session['plugin'].name))
+        if encrypted_session and encrypted_session["plugin"].namespace != NS_OTR:
+            raise exceptions.ConflictError(
+                _(
+                    "Can't start an OTR session, there is already an encrypted session "
+                    "with {name}"
+                ).format(name=encrypted_session["plugin"].name)
+            )
         if not to_jid.resource:
             to_jid.resource = self.host.memory.main_resource_get(
                 client, to_jid
             )  # FIXME: temporary and unsecure, must be changed when frontends
-               #        are refactored
+            #        are refactored
         otrctx = client._otr_context_manager.get_context_for_user(to_jid)
         query = otrctx.sendMessage(0, b"?OTRv?")
         otrctx.inject(query)
@@ -551,7 +551,7 @@
             to_jid.resource = self.host.memory.main_resource_get(
                 client, to_jid
             )  # FIXME: temporary and unsecure, must be changed when frontends
-               #        are refactored
+            #        are refactored
         otrctx = client._otr_context_manager.get_context_for_user(to_jid)
         otrctx.disconnect()
         return {}
@@ -588,7 +588,7 @@
                 to_jid.resource = self.host.memory.main_resource_get(
                     client, to_jid
                 )  # FIXME: temporary and unsecure, must be changed when frontends
-                   #        are refactored
+                #        are refactored
         except KeyError:
             log.error(_("jid key is not present !"))
             return defer.fail(exceptions.DataError)
@@ -606,7 +606,7 @@
                     context.disconnect()
                 ctxMng.account.privkey = None
                 ctxMng.account.getPrivkey()  # as account.privkey is None, getPrivkey
-                                             # will generate a new key, and save it
+                # will generate a new key, and save it
                 return {
                     "xmlui": xml_tools.note(
                         D_("Your private key has been dropped")
@@ -630,8 +630,8 @@
         otrctx = client._otr_context_manager.get_context_for_user(from_jid)
 
         try:
-            message = (
-                next(iter(data["message"].values()))
+            message = next(
+                iter(data["message"].values())
             )  # FIXME: Q&D fix for message refactoring, message is now a dict
             res = otrctx.receiveMessage(message.encode("utf-8"))
         except (potr.context.UnencryptedMessage, potr.context.NotOTRMessage):
@@ -642,8 +642,9 @@
             encrypted = False
             if otrctx.state == potr.context.STATE_ENCRYPTED:
                 log.warning(
-                    "Received unencrypted message in an encrypted context (from {jid})"
-                    .format(jid=from_jid.full())
+                    "Received unencrypted message in an encrypted context (from {jid})".format(
+                        jid=from_jid.full()
+                    )
                 )
 
                 feedback = (
@@ -692,8 +693,8 @@
                     pass
                 # TODO: add skip history as an option, but by default we don't skip it
                 # data[u'history'] = C.HISTORY_SKIP # we send the decrypted message to
-                                                    # frontends, but we don't want it in
-                                                    # history
+                # frontends, but we don't want it in
+                # history
             else:
                 raise failure.Failure(
                     exceptions.CancelError("Cancelled by OTR")
@@ -737,7 +738,7 @@
         if message_elt.getAttribute("type") == C.MESS_TYPE_GROUPCHAT:
             # OTR is not possible in group chats
             return True
-        from_jid = jid.JID(message_elt['from'])
+        from_jid = jid.JID(message_elt["from"])
         if not from_jid.resource or from_jid.userhostJID() == client.jid.userhostJID():
             # OTR is only usable when resources are present
             return True
@@ -751,9 +752,9 @@
         if client.is_component:
             return True
         encryption = mess_data.get(C.MESS_KEY_ENCRYPTION)
-        if encryption is None or encryption['plugin'].namespace != NS_OTR:
+        if encryption is None or encryption["plugin"].namespace != NS_OTR:
             return
-        to_jid = mess_data['to']
+        to_jid = mess_data["to"]
         if not to_jid.resource:
             to_jid.resource = self.host.memory.main_resource_get(
                 client, to_jid
@@ -777,9 +778,10 @@
                 log.warning("No message found")
             else:
                 self._p_carbons.set_private(message_elt)
-                self._p_hints.add_hint_elements(message_elt, [
-                    self._p_hints.HINT_NO_COPY,
-                    self._p_hints.HINT_NO_PERMANENT_STORE])
+                self._p_hints.add_hint_elements(
+                    message_elt,
+                    [self._p_hints.HINT_NO_COPY, self._p_hints.HINT_NO_PERMANENT_STORE],
+                )
                 otrctx.sendMessage(0, str(body).encode("utf-8"), appdata=mess_data)
         else:
             feedback = D_(
@@ -791,8 +793,9 @@
             client.feedback(to_jid, feedback)
             raise failure.Failure(exceptions.CancelError("Cancelled by OTR plugin"))
 
-    def send_message_trigger(self, client, mess_data, pre_xml_treatments,
-                           post_xml_treatments):
+    def send_message_trigger(
+        self, client, mess_data, pre_xml_treatments, post_xml_treatments
+    ):
         if client.is_component:
             return True
         if mess_data["type"] == "groupchat":
@@ -830,7 +833,7 @@
                 entity.resource = self.host.memory.main_resource_get(
                     client, entity
                 )  # FIXME: temporary and unsecure, must be changed when frontends
-                   #        are refactored
+                #        are refactored
             except exceptions.UnknownEntityError:
                 return True  #  entity was not connected
         if entity in client._otr_context_manager.contexts:
--- a/libervia/backend/plugins/plugin_sec_oxps.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_sec_oxps.py	Wed Jun 19 18:44:57 2024 +0200
@@ -140,9 +140,7 @@
         self.gpg_provider = get_gpg_provider(self.host, client)
 
     async def load_secrets(
-        self,
-        client: SatXMPPEntity,
-        node_uri: str
+        self, client: SatXMPPEntity, node_uri: str
     ) -> Optional[Dict[str, SharedSecret]]:
         """Load shared secret from databse or cache
 
@@ -174,8 +172,9 @@
                     timestamp=s["timestamp"],
                     origin=jid.JID(s["origin"]),
                     revoked=s["revoked"],
-                    shared_with={jid.JID(w) for w in s["shared_with"]}
-                ) for s in secrets_as_dict
+                    shared_with={jid.JID(w) for w in s["shared_with"]},
+                )
+                for s in secrets_as_dict
             }
             client.__cache[node_uri] = shared_secrets
             while len(client.__cache) > CACHE_MAX:
@@ -196,7 +195,7 @@
         self,
         client: SatXMPPEntity,
         node_uri: str,
-        shared_secrets: Dict[str, SharedSecret]
+        shared_secrets: Dict[str, SharedSecret],
     ) -> None:
         """Store shared secrets to database
 
@@ -223,10 +222,10 @@
         secret_key = secrets.token_urlsafe(64)
         secret_id = shortuuid.uuid()
         return SharedSecret(
-            id = secret_id,
-            key = secret_key,
-            timestamp = time.time(),
-            origin = client.jid.userhostJID()
+            id=secret_id,
+            key=secret_key,
+            timestamp=time.time(),
+            origin=client.jid.userhostJID(),
         )
 
     def _ps_secret_revoke(
@@ -235,7 +234,7 @@
         node: str,
         secret_id: str,
         recipients: List[str],
-        profile_key: str
+        profile_key: str,
     ) -> defer.Deferred:
         return defer.ensureDeferred(
             self.revoke(
@@ -253,7 +252,7 @@
         service: Optional[jid.JID],
         node: str,
         secret_id: str,
-        recipients: Optional[Iterable[jid.JID]] = None
+        recipients: Optional[Iterable[jid.JID]] = None,
     ) -> None:
         """Revoke a secret and notify entities
 
@@ -286,9 +285,7 @@
                 )
             shared_secret.revoked = True
         await self.store_secrets(client, node_uri, shared_secrets)
-        log.info(
-            f"shared secret {secret_id!r} for {node_uri} has been revoked."
-        )
+        log.info(f"shared secret {secret_id!r} for {node_uri} has been revoked.")
         if recipients is None:
             recipients = shared_secret.shared_with
         if recipients:
@@ -312,7 +309,7 @@
         service: jid.JID,
         node: str,
         secret_id: str,
-        recipient: jid.JID
+        recipient: jid.JID,
     ) -> None:
         revoke_elt = domish.Element((NS_OXPS, "revoke"))
         revoke_elt["jid"] = service.full()
@@ -336,7 +333,7 @@
         service: str,
         node: str,
         secret_ids: List[str],
-        profile_key: str
+        profile_key: str,
     ) -> defer.Deferred:
         return defer.ensureDeferred(
             self.share_secrets(
@@ -354,7 +351,7 @@
         service: Optional[jid.JID],
         node: str,
         shared_secret: SharedSecret,
-        recipient: jid.JID
+        recipient: jid.JID,
     ) -> None:
         """Create and send <shared-secret> element"""
         if service is None:
@@ -413,9 +410,7 @@
             try:
                 to_share = [shared_secrets[s_id] for s_id in secret_ids]
             except KeyError as e:
-                raise exceptions.NotFound(
-                    f"no shared secret found with given ID: {e}"
-                )
+                raise exceptions.NotFound(f"no shared secret found with given ID: {e}")
         for shared_secret in to_share:
             await self.share_secret(client, service, node, shared_secret, recipient)
         await self.store_secrets(client, node_uri, shared_secrets)
@@ -432,7 +427,7 @@
                 self.host.get_client(profile_key),
                 jid.JID(service) if service else None,
                 node,
-                [jid.JID(r) for r in recipients] or None
+                [jid.JID(r) for r in recipients] or None,
             )
         )
 
@@ -441,7 +436,7 @@
         client: SatXMPPEntity,
         service: Optional[jid.JID],
         node: str,
-        recipients: Optional[List[jid.JID]] = None
+        recipients: Optional[List[jid.JID]] = None,
     ) -> None:
         """Revoke all current known secrets, create and share a new one
 
@@ -466,11 +461,11 @@
             if shared_secrets:
                 # we get recipients from latests shared secret's shared_with list,
                 # regarless of deprecation (cause all keys may be deprecated)
-                recipients = list(sorted(
-                    shared_secrets.values(),
-                    key=lambda s: s.timestamp,
-                    reverse=True
-                )[0].shared_with)
+                recipients = list(
+                    sorted(
+                        shared_secrets.values(), key=lambda s: s.timestamp, reverse=True
+                    )[0].shared_with
+                )
             else:
                 recipients = []
 
@@ -482,10 +477,7 @@
         await self.store_secrets(client, node_uri, shared_secrets)
 
     def _ps_secrets_list(
-        self,
-        service: str,
-        node: str,
-        profile_key: str
+        self, service: str, node: str, profile_key: str
     ) -> defer.Deferred:
         d = defer.ensureDeferred(
             self.list_shared_secrets(
@@ -522,10 +514,7 @@
         ]
 
     async def handle_revoke_elt(
-        self,
-        client: SatXMPPEntity,
-        sender: jid.JID,
-        revoke_elt: domish.Element
+        self, client: SatXMPPEntity, sender: jid.JID, revoke_elt: domish.Element
     ) -> None:
         """Parse a <revoke> element and update local secrets
 
@@ -537,9 +526,7 @@
             node = revoke_elt["node"]
             secret_id = revoke_elt["id"]
         except (KeyError, RuntimeError) as e:
-            log.warning(
-                f"ignoring invalid <revoke> element: {e}\n{revoke_elt.toXml()}"
-            )
+            log.warning(f"ignoring invalid <revoke> element: {e}\n{revoke_elt.toXml()}")
             return
         node_uri = uri.build_xmpp_uri("pubsub", path=service.full(), node=node)
         shared_secrets = await self.load_secrets(client, node_uri)
@@ -571,10 +558,7 @@
         log.info(f"Shared secret {secret_id} has been revoked for {node_uri}")
 
     async def handle_shared_secret_elt(
-        self,
-        client: SatXMPPEntity,
-        sender: jid.JID,
-        shared_secret_elt: domish.Element
+        self, client: SatXMPPEntity, sender: jid.JID, shared_secret_elt: domish.Element
     ) -> None:
         """Parse a <shared-secret> element and update local secrets
 
@@ -632,7 +616,7 @@
         items: Optional[List[domish.Element]],
         options: Optional[dict],
         sender: jid.JID,
-        extra: Dict[str, Any]
+        extra: Dict[str, Any],
     ) -> bool:
         if not items or not extra.get("encrypted"):
             return True
@@ -702,9 +686,11 @@
         shared_secrets = None
         for item in items:
             payload = item.firstChildElement()
-            if (payload is not None
+            if (
+                payload is not None
                 and payload.name == "encrypted"
-                and payload.uri == NS_OXPS):
+                and payload.uri == NS_OXPS
+            ):
                 encrypted_elt = payload
                 secret_id = encrypted_elt.getAttribute("key")
                 if not secret_id:
@@ -713,7 +699,9 @@
                     )
                     continue
                 if shared_secrets is None:
-                    node_uri = uri.build_xmpp_uri("pubsub", path=service.full(), node=node)
+                    node_uri = uri.build_xmpp_uri(
+                        "pubsub", path=service.full(), node=node
+                    )
                     shared_secrets = await self.load_secrets(client, node_uri)
                     if shared_secrets is None:
                         log.warning(
@@ -730,8 +718,7 @@
                     continue
                 log.debug(f"decrypting item {item.getAttribute('id', '')}")
                 decrypted = self.gpg_provider.decrypt_symmetrically(
-                    base64.b64decode(str(encrypted_elt)),
-                    shared_secret.key
+                    base64.b64decode(str(encrypted_elt)), shared_secret.key
                 )
                 decrypted_elt = xml_tools.parse(decrypted)
                 item.children.clear()
@@ -743,7 +730,7 @@
         self,
         client: SatXMPPEntity,
         message_elt: domish.Element,
-        post_treat: defer.Deferred
+        post_treat: defer.Deferred,
     ) -> bool:
         sender = jid.JID(message_elt["from"]).userhostJID()
         # there may be an openpgp element if OXIM is not activate, in this case we have to
@@ -752,10 +739,7 @@
         if openpgp_elt is not None:
             try:
                 payload_elt, __ = await self._ox.unpack_openpgp_element(
-                    client,
-                    openpgp_elt,
-                    "signcrypt",
-                    sender
+                    client, openpgp_elt, "signcrypt", sender
                 )
             except Exception as e:
                 log.warning(f"Can't decrypt element: {e}\n{message_elt.toXml()}")
--- a/libervia/backend/plugins/plugin_sec_pte.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_sec_pte.py	Wed Jun 19 18:44:57 2024 +0200
@@ -72,7 +72,7 @@
         items: Optional[List[domish.Element]],
         options: Optional[dict],
         sender: jid.JID,
-        extra: Dict[str, Any]
+        extra: Dict[str, Any],
     ) -> bool:
         if not items or extra.get("encrypted_for") is None:
             return True
@@ -95,7 +95,7 @@
                 item,
                 targets,
                 is_muc_message=False,
-                stanza_id=None
+                stanza_id=None,
             )
             item_elts = list(item.elements())
             if len(item_elts) != 1:
@@ -126,9 +126,11 @@
             service = client.jid.userhostJID()
         for item in items:
             payload = item.firstChildElement()
-            if (payload is not None
+            if (
+                payload is not None
                 and payload.name == "encrypted"
-                and payload.uri == NS_PTE):
+                and payload.uri == NS_PTE
+            ):
                 encrypted_elt = payload
                 item.children.clear()
                 try:
@@ -138,25 +140,26 @@
                     raise exceptions.DataError(
                         f"invalid <encrypted> element: {encrypted_elt.toXml()}"
                     )
-                if encryption_type!= self._o.NS_TWOMEMO:
+                if encryption_type != self._o.NS_TWOMEMO:
                     raise NotImplementedError("only TWOMEMO is supported for now")
                 log.debug(f"decrypting item {item.getAttribute('id', '')}")
 
                 # FIXME: we do use _message_received_trigger now to decrypt the stanza, a
                 #   cleaner separated decrypt method should be used
                 encrypted_elt["from"] = encrypted_by.full()
-                if not await self._o._message_received_trigger(
-                    client,
-                    encrypted_elt,
-                    defer.Deferred()
-                ) or not encrypted_elt.children:
+                if (
+                    not await self._o._message_received_trigger(
+                        client, encrypted_elt, defer.Deferred()
+                    )
+                    or not encrypted_elt.children
+                ):
                     raise exceptions.EncryptionError("can't decrypt the message")
 
                 item.addChild(encrypted_elt.firstChildElement())
 
                 extra.setdefault("encrypted", {})[item["id"]] = {
                     "type": NS_PTE,
-                    "algorithm": encryption_type
+                    "algorithm": encryption_type,
                 }
         return True
 
--- a/libervia/backend/plugins/plugin_sec_pubsub_signing.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_sec_pubsub_signing.py	Wed Jun 19 18:44:57 2024 +0200
@@ -115,10 +115,7 @@
         # et_sign_data_elt = xml_tools.domish_elt_2_et_elt(sign_data_elt, True)
         et_sign_data_elt = etree.fromstring(sign_data_elt.toXml())
         to_sign = etree.tostring(
-            et_sign_data_elt,
-            method="c14n2",
-            with_comments=False,
-            strip_text=True
+            et_sign_data_elt, method="c14n2", with_comments=False, strip_text=True
         )
         # the data to sign is serialised, we cna restore original values
         item_elt["id"] = item_id
@@ -141,7 +138,7 @@
                 jid.JID(service),
                 node,
                 item_id,
-                data_format.deserialise(signature_data_s)
+                data_format.deserialise(signature_data_s),
             )
         )
         d.addCallback(data_format.serialise)
@@ -155,9 +152,7 @@
         item_id: str,
         signature_data: Dict[str, Any],
     ) -> Dict[str, Any]:
-        items, __ = await self._p.get_items(
-            client, service, node, item_ids=[item_id]
-        )
+        items, __ = await self._p.get_items(client, service, node, item_ids=[item_id])
         if not items != 1:
             raise exceptions.NotFound(
                 f"target item not found for {item_id!r} at {node!r} for {service}"
@@ -172,7 +167,8 @@
         signer = jid.JID(signers[0])
         signature = base64.b64decode(signature_data["signature"])
         verification_keys = {
-            k for k in await self._ox.import_all_public_keys(client, signer)
+            k
+            for k in await self._ox.import_all_public_keys(client, signer)
             if client.gpg_provider.can_sign(k)
         }
         signed_data = self.get_data_to_sign(item_elt, service, timestamp, signer.full())
@@ -200,9 +196,7 @@
         data: Dict[str, Any],
     ) -> None:
         try:
-            signature_elt = next(
-                attachments_elt.elements(NS_PUBSUB_SIGNING, "signature")
-            )
+            signature_elt = next(attachments_elt.elements(NS_PUBSUB_SIGNING, "signature"))
         except StopIteration:
             pass
         else:
@@ -220,13 +214,12 @@
                 "timestamp": timestamp,
                 "signers": [
                     str(s) for s in signature_elt.elements(NS_PUBSUB_SIGNING, "signer")
-                ]
+                ],
             }
             # FIXME: only OpenPGP signature is available for now, to be updated if and
             #   when more algorithms are available.
             sign_elt = next(
-                signature_elt.elements(NS_PUBSUB_SIGNING_OPENPGP, "sign"),
-                None
+                signature_elt.elements(NS_PUBSUB_SIGNING_OPENPGP, "sign"), None
             )
             if sign_elt is None:
                 log.warning(
@@ -243,7 +236,7 @@
         self,
         client: SatXMPPEntity,
         attachments_data: Dict[str, Any],
-        former_elt: Optional[domish.Element]
+        former_elt: Optional[domish.Element],
     ) -> Optional[domish.Element]:
         signature_data = attachments_data["extra"].get("signature")
         if signature_data is None:
@@ -277,7 +270,8 @@
 
             sign_elt = signature_elt.addElement((NS_PUBSUB_SIGNING_OPENPGP, "sign"))
             signing_keys = {
-                k for k in self._ox.list_secret_keys(client)
+                k
+                for k in self._ox.list_secret_keys(client)
                 if client.gpg_provider.can_sign(k.public_key)
             }
             # the base64 encoded signature itself
@@ -298,7 +292,7 @@
         items: Optional[List[domish.Element]],
         options: Optional[dict],
         sender: jid.JID,
-        extra: Dict[str, Any]
+        extra: Dict[str, Any],
     ) -> bool:
         if not items or not extra.get("signed"):
             return True
@@ -318,8 +312,8 @@
                             "item_elt": item_elt,
                             "signer": sender.userhost(),
                         }
-                    }
-                }
+                    },
+                },
             )
 
         return True
--- a/libervia/backend/plugins/plugin_syntax_wiki_dotclear.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_syntax_wiki_dotclear.py	Wed Jun 19 18:44:57 2024 +0200
@@ -45,8 +45,8 @@
 NOTE_A_TPL = "note_{}"
 ESCAPE_CHARS_BASE = r"(?P<escape_char>[][{}%|\\/*#@{{}}~$-])"
 ESCAPE_CHARS_EXTRA = (
-    r"!?_+'()"
-)  # These chars are not escaped in XHTML => dc_wiki conversion,
+    r"!?_+'()"  # These chars are not escaped in XHTML => dc_wiki conversion,
+)
 # but are used in the other direction
 ESCAPE_CHARS = ESCAPE_CHARS_BASE.format("")
 FLAG_UL = "ul"  # must be the name of the element
@@ -227,9 +227,9 @@
                 elif data in ("r", "d"):
                     img_elt["style"] = "display:block; float:right; margin:0 0 1em 1em"
                 elif data == "c":
-                    img_elt[
-                        "style"
-                    ] = "display:block; margin-left:auto; margin-right:auto"
+                    img_elt["style"] = (
+                        "display:block; margin-left:auto; margin-right:auto"
+                    )
                 else:
                     log.warning("bad position argument for image, ignoring it")
 
--- a/libervia/backend/plugins/plugin_xep_0020.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0020.py	Wed Jun 19 18:44:57 2024 +0200
@@ -101,9 +101,9 @@
             result[field] = values[0] if values else None
             if len(values) > 1:
                 log.warning(
-                    _(
-                        "More than one value choosed for {}, keeping the first one"
-                    ).format(field)
+                    _("More than one value choosed for {}, keeping the first one").format(
+                        field
+                    )
                 )
         return result
 
--- a/libervia/backend/plugins/plugin_xep_0033.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0033.py	Wed Jun 19 18:44:57 2024 +0200
@@ -96,9 +96,7 @@
                     log.warning(
                         _("XEP-0033 is being used but the server doesn't support it!")
                     )
-                    raise failure.Failure(
-                        exceptions.CancelError("Cancelled by XEP-0033")
-                    )
+                    raise failure.Failure(exceptions.CancelError("Cancelled by XEP-0033"))
                 if mess_data["to"] not in entities:
                     expected = _(" or ").join([entity.userhost() for entity in entities])
                     log.warning(
--- a/libervia/backend/plugins/plugin_xep_0045.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0045.py	Wed Jun 19 18:44:57 2024 +0200
@@ -53,14 +53,14 @@
     C.PI_RECOMMENDATIONS: [C.TEXT_CMDS, "XEP-0313"],
     C.PI_MAIN: "XEP_0045",
     C.PI_HANDLER: "yes",
-    C.PI_DESCRIPTION: _("""Implementation of Multi-User Chat""")
+    C.PI_DESCRIPTION: _("""Implementation of Multi-User Chat"""),
 }
 
-NS_MUC = 'http://jabber.org/protocol/muc'
-AFFILIATIONS = ('owner', 'admin', 'member', 'none', 'outcast')
-ROOM_USER_JOINED = 'ROOM_USER_JOINED'
-ROOM_USER_LEFT = 'ROOM_USER_LEFT'
-OCCUPANT_KEYS = ('nick', 'entity', 'affiliation', 'role')
+NS_MUC = "http://jabber.org/protocol/muc"
+AFFILIATIONS = ("owner", "admin", "member", "none", "outcast")
+ROOM_USER_JOINED = "ROOM_USER_JOINED"
+ROOM_USER_LEFT = "ROOM_USER_LEFT"
+OCCUPANT_KEYS = ("nick", "entity", "affiliation", "role")
 ROOM_STATE_OCCUPANTS = "occupants"
 ROOM_STATE_SELF_PRESENCE = "self-presence"
 ROOM_STATE_LIVE = "live"
@@ -69,9 +69,9 @@
 HISTORY_MAM = "mam"
 
 
-CONFIG_SECTION = 'plugin muc'
+CONFIG_SECTION = "plugin muc"
 
-default_conf = {"default_muc": 'sat@chat.jabberfr.org'}
+default_conf = {"default_muc": "sat@chat.jabberfr.org"}
 
 
 class AlreadyJoined(exceptions.ConflictError):
@@ -92,58 +92,101 @@
         # return same arguments as muc_room_joined + a boolean set to True is the room was
         # already joined (first argument)
         host.bridge.add_method(
-            "muc_join", ".plugin", in_sign='ssa{ss}s', out_sign='(bsa{sa{ss}}ssass)',
-            method=self._join, async_=True)
+            "muc_join",
+            ".plugin",
+            in_sign="ssa{ss}s",
+            out_sign="(bsa{sa{ss}}ssass)",
+            method=self._join,
+            async_=True,
+        )
         host.bridge.add_method(
-            "muc_nick", ".plugin", in_sign='sss', out_sign='', method=self._nick)
+            "muc_nick", ".plugin", in_sign="sss", out_sign="", method=self._nick
+        )
         host.bridge.add_method(
-            "muc_nick_get", ".plugin", in_sign='ss', out_sign='s', method=self._get_room_nick)
+            "muc_nick_get",
+            ".plugin",
+            in_sign="ss",
+            out_sign="s",
+            method=self._get_room_nick,
+        )
         host.bridge.add_method(
-            "muc_leave", ".plugin", in_sign='ss', out_sign='', method=self._leave,
-            async_=True)
+            "muc_leave",
+            ".plugin",
+            in_sign="ss",
+            out_sign="",
+            method=self._leave,
+            async_=True,
+        )
         host.bridge.add_method(
-            "muc_occupants_get", ".plugin", in_sign='ss', out_sign='a{sa{ss}}',
-            method=self._get_room_occupants)
+            "muc_occupants_get",
+            ".plugin",
+            in_sign="ss",
+            out_sign="a{sa{ss}}",
+            method=self._get_room_occupants,
+        )
         host.bridge.add_method(
-            "muc_subject", ".plugin", in_sign='sss', out_sign='', method=self._subject)
+            "muc_subject", ".plugin", in_sign="sss", out_sign="", method=self._subject
+        )
         host.bridge.add_method(
-            "muc_get_rooms_joined", ".plugin", in_sign='s', out_sign='a(sa{sa{ss}}ssas)',
-            method=self._get_rooms_joined)
+            "muc_get_rooms_joined",
+            ".plugin",
+            in_sign="s",
+            out_sign="a(sa{sa{ss}}ssas)",
+            method=self._get_rooms_joined,
+        )
         host.bridge.add_method(
-            "muc_get_unique_room_name", ".plugin", in_sign='ss', out_sign='s',
-            method=self._get_unique_name)
+            "muc_get_unique_room_name",
+            ".plugin",
+            in_sign="ss",
+            out_sign="s",
+            method=self._get_unique_name,
+        )
         host.bridge.add_method(
-            "muc_configure_room", ".plugin", in_sign='ss', out_sign='s',
-            method=self._configure_room, async_=True)
+            "muc_configure_room",
+            ".plugin",
+            in_sign="ss",
+            out_sign="s",
+            method=self._configure_room,
+            async_=True,
+        )
         host.bridge.add_method(
-            "muc_get_default_service", ".plugin", in_sign='', out_sign='s',
-            method=self.get_default_muc)
+            "muc_get_default_service",
+            ".plugin",
+            in_sign="",
+            out_sign="s",
+            method=self.get_default_muc,
+        )
         host.bridge.add_method(
-            "muc_get_service", ".plugin", in_sign='ss', out_sign='s',
-            method=self._get_muc_service, async_=True)
+            "muc_get_service",
+            ".plugin",
+            in_sign="ss",
+            out_sign="s",
+            method=self._get_muc_service,
+            async_=True,
+        )
         # called when a room will be joined but must be locked until join is received
         # (room is prepared, history is getting retrieved)
         # args: room_jid, profile
-        host.bridge.add_signal(
-            "muc_room_prepare_join", ".plugin", signature='ss')
+        host.bridge.add_signal("muc_room_prepare_join", ".plugin", signature="ss")
         # args: room_jid, occupants, user_nick, subject, profile
-        host.bridge.add_signal(
-            "muc_room_joined", ".plugin", signature='sa{sa{ss}}ssass')
+        host.bridge.add_signal("muc_room_joined", ".plugin", signature="sa{sa{ss}}ssass")
         # args: room_jid, profile
-        host.bridge.add_signal(
-            "muc_room_left", ".plugin", signature='ss')
+        host.bridge.add_signal("muc_room_left", ".plugin", signature="ss")
         # args: room_jid, old_nick, new_nick, profile
-        host.bridge.add_signal(
-            "muc_room_user_changed_nick", ".plugin", signature='ssss')
+        host.bridge.add_signal("muc_room_user_changed_nick", ".plugin", signature="ssss")
         # args: room_jid, subject, profile
-        host.bridge.add_signal(
-            "muc_room_new_subject", ".plugin", signature='sss')
+        host.bridge.add_signal("muc_room_new_subject", ".plugin", signature="sss")
         self.__submit_conf_id = host.register_callback(
-            self._submit_configuration, with_data=True)
+            self._submit_configuration, with_data=True
+        )
         self._room_join_id = host.register_callback(self._ui_room_join_cb, with_data=True)
         host.import_menu(
-            (D_("MUC"), D_("configure")), self._configure_room_menu, security_limit=0,
-            help_string=D_("Configure Multi-User Chat room"), type_=C.MENU_ROOM)
+            (D_("MUC"), D_("configure")),
+            self._configure_room_menu,
+            security_limit=0,
+            help_string=D_("Configure Multi-User Chat room"),
+            type_=C.MENU_ROOM,
+        )
         try:
             self.text_cmds = self.host.plugins[C.TEXT_CMDS]
         except KeyError:
@@ -157,7 +200,9 @@
 
         host.trigger.add("presence_available", self.presence_trigger)
         host.trigger.add("presence_received", self.presence_received_trigger)
-        host.trigger.add("message_received", self.message_received_trigger, priority=1000000)
+        host.trigger.add(
+            "message_received", self.message_received_trigger, priority=1000000
+        )
         host.trigger.add("message_parse", self._message_parse_trigger)
 
     async def profile_connected(self, client):
@@ -179,7 +224,7 @@
         if message_elt.getAttribute("type") == C.MESS_TYPE_GROUPCHAT:
             if message_elt.subject:
                 return False
-            from_jid = jid.JID(message_elt['from'])
+            from_jid = jid.JID(message_elt["from"])
             room_jid = from_jid.userhostJID()
             if room_jid in client._muc_client.joined_rooms:
                 room = client._muc_client.joined_rooms[room_jid]
@@ -189,16 +234,19 @@
                         # messages before history is complete, so this is not a warning
                         # but an expected case.
                         # On the other hand, with legacy history, it's not normal.
-                        log.warning(_(
-                            "Received non delayed message in a room before its "
-                            "initialisation: state={state}, msg={msg}").format(
-                        state=room.state,
-                        msg=message_elt.toXml()))
+                        log.warning(
+                            _(
+                                "Received non delayed message in a room before its "
+                                "initialisation: state={state}, msg={msg}"
+                            ).format(state=room.state, msg=message_elt.toXml())
+                        )
                     room._cache.append(message_elt)
                     return False
             else:
-                log.warning("Received groupchat message for a room which has not been "
-                            "joined, ignoring it: {}".format(message_elt.toXml()))
+                log.warning(
+                    "Received groupchat message for a room which has not been "
+                    "joined, ignoring it: {}".format(message_elt.toXml())
+                )
                 return False
         return True
 
@@ -284,34 +332,44 @@
             #    plugin should be refactored.
             getattr(room, "subject", ""),
             [s.name for s in room.statuses],
-            profile
-            ]
+            profile,
+        ]
 
     def _ui_room_join_cb(self, data, profile):
-        room_jid = jid.JID(data['index'])
+        room_jid = jid.JID(data["index"])
         client = self.host.get_client(profile)
-        defer.ensureDeferred(
-            self.join(client, room_jid)
-        )
+        defer.ensureDeferred(self.join(client, room_jid))
         return {}
 
     def _password_ui_cb(self, data, client, room_jid, nick):
         """Called when the user has given room password (or cancelled)"""
         if C.bool(data.get(C.XMLUI_DATA_CANCELLED, "false")):
             log.info("room join for {} is cancelled".format(room_jid.userhost()))
-            raise failure.Failure(exceptions.CancelError(D_("Room joining cancelled by user")))
-        password = data[xml_tools.form_escape('password')]
-        return client._muc_client.join(room_jid, nick, password).addCallbacks(self._join_cb, self._join_eb, (client, room_jid, nick), errbackArgs=(client, room_jid, nick, password))
+            raise failure.Failure(
+                exceptions.CancelError(D_("Room joining cancelled by user"))
+            )
+        password = data[xml_tools.form_escape("password")]
+        return client._muc_client.join(room_jid, nick, password).addCallbacks(
+            self._join_cb,
+            self._join_eb,
+            (client, room_jid, nick),
+            errbackArgs=(client, room_jid, nick, password),
+        )
 
     def _show_list_ui(self, items, client, service):
-        xmlui = xml_tools.XMLUI(title=D_('Rooms in {}'.format(service.full())))
-        adv_list = xmlui.change_container('advanced_list', columns=1, selectable='single', callback_id=self._room_join_id)
+        xmlui = xml_tools.XMLUI(title=D_("Rooms in {}".format(service.full())))
+        adv_list = xmlui.change_container(
+            "advanced_list",
+            columns=1,
+            selectable="single",
+            callback_id=self._room_join_id,
+        )
         items = sorted(items, key=lambda i: i.name.lower())
         for item in items:
             adv_list.set_row_index(item.entity.full())
             xmlui.addText(item.name)
         adv_list.end()
-        self.host.action_new({'xmlui': xmlui.toXml()}, profile=client.profile)
+        self.host.action_new({"xmlui": xmlui.toXml()}, profile=client.profile)
 
     def _join_cb(self, room, client, room_jid, nick):
         """Called when the user is in the requested room"""
@@ -321,8 +379,10 @@
             # a proper configuration management should be done
             log.debug(_("room locked !"))
             d = client._muc_client.configure(room.roomJID, {})
-            d.addErrback(self.host.log_errback,
-                         msg=_('Error while configuring the room: {failure_}'))
+            d.addErrback(
+                self.host.log_errback,
+                msg=_("Error while configuring the room: {failure_}"),
+            )
         return room.fully_joined
 
     def _join_eb(self, failure_, client, room_jid, nick, password):
@@ -330,33 +390,50 @@
         try:
             condition = failure_.value.condition
         except AttributeError:
-            msg_suffix = f': {failure_}'
+            msg_suffix = f": {failure_}"
         else:
-            if condition == 'conflict':
+            if condition == "conflict":
                 # we have a nickname conflict, we try again with "_" suffixed to current nickname
-                nick += '_'
-                return client._muc_client.join(room_jid, nick, password).addCallbacks(self._join_cb, self._join_eb, (client, room_jid, nick), errbackArgs=(client, room_jid, nick, password))
-            elif condition == 'not-allowed':
+                nick += "_"
+                return client._muc_client.join(room_jid, nick, password).addCallbacks(
+                    self._join_cb,
+                    self._join_eb,
+                    (client, room_jid, nick),
+                    errbackArgs=(client, room_jid, nick, password),
+                )
+            elif condition == "not-allowed":
                 # room is restricted, we need a password
-                password_ui = xml_tools.XMLUI("form", title=D_('Room {} is restricted').format(room_jid.userhost()), submit_id='')
-                password_ui.addText(D_("This room is restricted, please enter the password"))
-                password_ui.addPassword('password')
+                password_ui = xml_tools.XMLUI(
+                    "form",
+                    title=D_("Room {} is restricted").format(room_jid.userhost()),
+                    submit_id="",
+                )
+                password_ui.addText(
+                    D_("This room is restricted, please enter the password")
+                )
+                password_ui.addPassword("password")
                 d = xml_tools.defer_xmlui(self.host, password_ui, profile=client.profile)
                 d.addCallback(self._password_ui_cb, client, room_jid, nick)
                 return d
 
             msg_suffix = ' with condition "{}"'.format(failure_.value.condition)
 
-        mess = D_("Error while joining the room {room}{suffix}".format(
-            room = room_jid.userhost(), suffix = msg_suffix))
+        mess = D_(
+            "Error while joining the room {room}{suffix}".format(
+                room=room_jid.userhost(), suffix=msg_suffix
+            )
+        )
         log.warning(mess)
         xmlui = xml_tools.note(mess, D_("Group chat error"), level=C.XMLUI_DATA_LVL_ERROR)
-        self.host.action_new({'xmlui': xmlui.toXml()}, profile=client.profile)
+        self.host.action_new({"xmlui": xmlui.toXml()}, profile=client.profile)
 
     @staticmethod
     def _get_occupants(room):
         """Get occupants of a room in a form suitable for bridge"""
-        return {u.nick: {k:str(getattr(u,k) or '') for k in OCCUPANT_KEYS} for u in list(room.roster.values())}
+        return {
+            u.nick: {k: str(getattr(u, k) or "") for k in OCCUPANT_KEYS}
+            for u in list(room.roster.values())
+        }
 
     def _get_room_occupants(self, room_jid_s, profile_key):
         client = self.host.get_client(profile_key)
@@ -377,11 +454,12 @@
         for room in list(client._muc_client.joined_rooms.values()):
             if room.state == ROOM_STATE_LIVE:
                 result.append(
-                    (room.roomJID.userhost(),
-                     self._get_occupants(room),
-                     room.nick,
-                     room.subject,
-                     [s.name for s in room.statuses],
+                    (
+                        room.roomJID.userhost(),
+                        self._get_occupants(room),
+                        room.nick,
+                        room.subject,
+                        [s.name for s in room.statuses],
                     )
                 )
         return result
@@ -415,7 +493,7 @@
         """
         client = self.host.get_client(profile)
         try:
-            room_jid = jid.JID(menu_data['room_jid'])
+            room_jid = jid.JID(menu_data["room_jid"])
         except KeyError:
             log.error(_("room_jid key is not present !"))
             return defer.fail(exceptions.DataError)
@@ -425,6 +503,7 @@
                 msg = D_("No configuration available for this room")
                 return {"xmlui": xml_tools.note(msg).toXml()}
             return {"xmlui": xmlui.toXml()}
+
         return self.configure_room(client, room_jid).addCallback(xmlui_received)
 
     def configure_room(self, client, room_jid):
@@ -457,15 +536,15 @@
             session_data = self._sessions.profile_get(raw_data["session_id"], profile)
         except KeyError:
             log.warning(D_("Session ID doesn't exist, session has probably expired."))
-            _dialog = xml_tools.XMLUI('popup', title=D_('Room configuration failed'))
+            _dialog = xml_tools.XMLUI("popup", title=D_("Room configuration failed"))
             _dialog.addText(D_("Session ID doesn't exist, session has probably expired."))
-            return defer.succeed({'xmlui': _dialog.toXml()})
+            return defer.succeed({"xmlui": _dialog.toXml()})
 
         data = xml_tools.xmlui_result_2_data_form_result(raw_data)
-        d = client._muc_client.configure(session_data['room_jid'], data)
-        _dialog = xml_tools.XMLUI('popup', title=D_('Room configuration succeed'))
+        d = client._muc_client.configure(session_data["room_jid"], data)
+        _dialog = xml_tools.XMLUI("popup", title=D_("Room configuration succeed"))
         _dialog.addText(D_("The new settings have been saved."))
-        d.addCallback(lambda ignore: {'xmlui': _dialog.toXml()})
+        d.addCallback(lambda ignore: {"xmlui": _dialog.toXml()})
         del self._sessions[raw_data["session_id"]]
         return d
 
@@ -477,13 +556,14 @@
     def _get_muc_service(self, jid_=None, profile=C.PROF_KEY_NONE):
         client = self.host.get_client(profile)
         d = defer.ensureDeferred(self.get_muc_service(client, jid_ or None))
-        d.addCallback(lambda service_jid: service_jid.full() if service_jid is not None else '')
+        d.addCallback(
+            lambda service_jid: service_jid.full() if service_jid is not None else ""
+        )
         return d
 
     async def get_muc_service(
-        self,
-        client: SatXMPPEntity,
-        jid_: Optional[jid.JID] = None) -> Optional[jid.JID]:
+        self, client: SatXMPPEntity, jid_: Optional[jid.JID] = None
+    ) -> Optional[jid.JID]:
         """Return first found MUC service of an entity
 
         @param jid_: entity which may have a MUC service, or None for our own server
@@ -497,7 +577,9 @@
             else:
                 # we have a cached value, we return it
                 return muc_service
-        services = await self.host.find_service_entities(client, "conference", "text", jid_)
+        services = await self.host.find_service_entities(
+            client, "conference", "text", jid_
+        )
         for service in services:
             if ".irc." not in service.userhost():
                 # FIXME:
@@ -514,10 +596,7 @@
         return self.get_unique_name(client, muc_service or None).full()
 
     def get_unique_name(
-        self,
-        client: SatXMPPEntity,
-        muc_service: jid.JID|None = None,
-        prefix: str = ""
+        self, client: SatXMPPEntity, muc_service: jid.JID | None = None, prefix: str = ""
     ) -> jid.JID:
         """Return unique name for a room, avoiding collision
 
@@ -546,7 +625,9 @@
 
         @return: unicode
         """
-        return self.host.memory.config_get(CONFIG_SECTION, 'default_muc', default_conf['default_muc'])
+        return self.host.memory.config_get(
+            CONFIG_SECTION, "default_muc", default_conf["default_muc"]
+        )
 
     def _bridge_join_eb(self, failure_, client):
         failure_.trap(AlreadyJoined)
@@ -564,19 +645,23 @@
             try:
                 room_jid = jid.JID(room_jid_s)
             except (RuntimeError, jid.InvalidFormat, AttributeError):
-                return defer.fail(jid.InvalidFormat(_("Invalid room identifier: {room_id}'. Please give a room short or full identifier like 'room' or 'room@{muc_service}'.").format(
-                    room_id=room_jid_s,
-                    muc_service=str(muc_service))))
+                return defer.fail(
+                    jid.InvalidFormat(
+                        _(
+                            "Invalid room identifier: {room_id}'. Please give a room short or full identifier like 'room' or 'room@{muc_service}'."
+                        ).format(room_id=room_jid_s, muc_service=str(muc_service))
+                    )
+                )
             if not room_jid.user:
                 room_jid.user, room_jid.host = room_jid.host, muc_service
         else:
             room_jid = self.get_unique_name(client)
         # TODO: error management + signal in bridge
-        d = defer.ensureDeferred(
-            self.join(client, room_jid, nick, options or None)
+        d = defer.ensureDeferred(self.join(client, room_jid, nick, options or None))
+
+        d.addCallback(
+            lambda room: [False] + self._get_room_joined_args(room, client.profile)
         )
-
-        d.addCallback(lambda room: [False] + self._get_room_joined_args(room, client.profile))
         d.addErrback(self._bridge_join_eb, client)
         return d
 
@@ -585,7 +670,7 @@
         client: SatXMPPEntity,
         room_jid: jid.JID,
         nick: Optional[str] = None,
-        options: Optional[dict] = None
+        options: Optional[dict] = None,
     ) -> Optional[muc.Room]:
         if not nick:
             nick = client.jid.user
@@ -593,11 +678,17 @@
             options = {}
         if room_jid in client._muc_client.joined_rooms:
             room = client._muc_client.joined_rooms[room_jid]
-            log.info(_('{profile} is already in room {room_jid}').format(
-                profile=client.profile, room_jid = room_jid.userhost()))
+            log.info(
+                _("{profile} is already in room {room_jid}").format(
+                    profile=client.profile, room_jid=room_jid.userhost()
+                )
+            )
             raise AlreadyJoined(room)
-        log.info(_("[{profile}] is joining room {room} with nick {nick}").format(
-            profile=client.profile, room=room_jid.userhost(), nick=nick))
+        log.info(
+            _("[{profile}] is joining room {room} with nick {nick}").format(
+                profile=client.profile, room=room_jid.userhost(), nick=nick
+            )
+        )
         self.host.bridge.muc_room_prepare_join(room_jid.userhost(), client.profile)
 
         password = options.get("password")
@@ -611,9 +702,7 @@
         else:
             room.on_joined_callbacks = []
             room.on_left_callbacks = []
-            await defer.ensureDeferred(
-                self._join_cb(room, client, room_jid, nick)
-            )
+            await defer.ensureDeferred(self._join_cb(room, client, room_jid, nick))
         return room
 
     def pop_rooms(self, client):
@@ -669,7 +758,7 @@
         if options is None:
             options = {}
         self.check_room_joined(client, room_jid)
-        return client._muc_client.kick(room_jid, nick, reason=options.get('reason', None))
+        return client._muc_client.kick(room_jid, nick, reason=options.get("reason", None))
 
     def ban(self, client, entity_jid, room_jid, options=None):
         """Ban an entity from the room
@@ -683,7 +772,9 @@
             options = {}
         assert not entity_jid.resource
         assert not room_jid.resource
-        return client._muc_client.ban(room_jid, entity_jid, reason=options.get('reason', None))
+        return client._muc_client.ban(
+            room_jid, entity_jid, reason=options.get("reason", None)
+        )
 
     def affiliate(self, client, entity_jid, room_jid, options):
         """Change the affiliation of an entity
@@ -695,9 +786,11 @@
         self.check_room_joined(client, room_jid)
         assert not entity_jid.resource
         assert not room_jid.resource
-        assert 'affiliation' in options
+        assert "affiliation" in options
         # TODO: handles reason and nick
-        return client._muc_client.modifyAffiliationList(room_jid, [entity_jid], options['affiliation'])
+        return client._muc_client.modifyAffiliationList(
+            room_jid, [entity_jid], options["affiliation"]
+        )
 
     # Text commands #
 
@@ -733,9 +826,7 @@
                 muc_service = client.muc_service or ""
                 nick = client.jid.user
             room_jid = self.text_cmds.get_room_jid(room_raw, muc_service)
-            defer.ensureDeferred(
-                self.join(client, room_jid, nick, {})
-            )
+            defer.ensureDeferred(self.join(client, room_jid, nick, {}))
 
         return False
 
@@ -778,18 +869,19 @@
             self.text_cmds.feed_back(client, feedback, mess_data)
             return False
 
-        reason = ' '.join(options[1:]) if len(options) > 1 else None
+        reason = " ".join(options[1:]) if len(options) > 1 else None
 
         d = self.kick(client, nick, mess_data["to"], {"reason": reason})
 
         def cb(__):
-            feedback_msg = _('You have kicked {}').format(nick)
+            feedback_msg = _("You have kicked {}").format(nick)
             if reason is not None:
-                feedback_msg += _(' for the following reason: {reason}').format(
+                feedback_msg += _(" for the following reason: {reason}").format(
                     reason=reason
                 )
             self.text_cmds.feed_back(client, feedback_msg, mess_data)
             return True
+
         d.addCallback(cb)
         return d
 
@@ -804,28 +896,34 @@
         try:
             jid_s = options[0]
             entity_jid = jid.JID(jid_s).userhostJID()
-            assert(entity_jid.user)
-            assert(entity_jid.host)
-        except (RuntimeError, jid.InvalidFormat, AttributeError, IndexError,
-                AssertionError):
+            assert entity_jid.user
+            assert entity_jid.host
+        except (
+            RuntimeError,
+            jid.InvalidFormat,
+            AttributeError,
+            IndexError,
+            AssertionError,
+        ):
             feedback = _(
                 "You must provide a valid JID to ban, like in '/ban contact@example.net'"
             )
             self.text_cmds.feed_back(client, feedback, mess_data)
             return False
 
-        reason = ' '.join(options[1:]) if len(options) > 1 else None
+        reason = " ".join(options[1:]) if len(options) > 1 else None
 
         d = self.ban(client, entity_jid, mess_data["to"], {"reason": reason})
 
         def cb(__):
-            feedback_msg = _('You have banned {}').format(entity_jid)
+            feedback_msg = _("You have banned {}").format(entity_jid)
             if reason is not None:
-                feedback_msg += _(' for the following reason: {reason}').format(
+                feedback_msg += _(" for the following reason: {reason}").format(
                     reason=reason
                 )
             self.text_cmds.feed_back(client, feedback_msg, mess_data)
             return True
+
         d.addCallback(cb)
         return d
 
@@ -844,26 +942,40 @@
         try:
             jid_s = options[0]
             entity_jid = jid.JID(jid_s).userhostJID()
-            assert(entity_jid.user)
-            assert(entity_jid.host)
-        except (RuntimeError, jid.InvalidFormat, AttributeError, IndexError, AssertionError):
-            feedback = _("You must provide a valid JID to affiliate, like in '/affiliate contact@example.net member'")
+            assert entity_jid.user
+            assert entity_jid.host
+        except (
+            RuntimeError,
+            jid.InvalidFormat,
+            AttributeError,
+            IndexError,
+            AssertionError,
+        ):
+            feedback = _(
+                "You must provide a valid JID to affiliate, like in '/affiliate contact@example.net member'"
+            )
             self.text_cmds.feed_back(client, feedback, mess_data)
             return False
 
-        affiliation = options[1] if len(options) > 1 else 'none'
+        affiliation = options[1] if len(options) > 1 else "none"
         if affiliation not in AFFILIATIONS:
-            feedback = _("You must provide a valid affiliation: %s") % ' '.join(AFFILIATIONS)
+            feedback = _("You must provide a valid affiliation: %s") % " ".join(
+                AFFILIATIONS
+            )
             self.text_cmds.feed_back(client, feedback, mess_data)
             return False
 
-        d = self.affiliate(client, entity_jid, mess_data["to"], {'affiliation': affiliation})
+        d = self.affiliate(
+            client, entity_jid, mess_data["to"], {"affiliation": affiliation}
+        )
 
         def cb(__):
-            feedback_msg = _('New affiliation for {entity}: {affiliation}').format(
-                entity=entity_jid, affiliation=affiliation)
+            feedback_msg = _("New affiliation for {entity}: {affiliation}").format(
+                entity=entity_jid, affiliation=affiliation
+            )
             self.text_cmds.feed_back(client, feedback_msg, mess_data)
             return True
+
         d.addCallback(cb)
         return d
 
@@ -901,14 +1013,15 @@
         try:
             service = jid.JID(unparsed)
         except RuntimeError:
-            if mess_data['type'] == C.MESS_TYPE_GROUPCHAT:
+            if mess_data["type"] == C.MESS_TYPE_GROUPCHAT:
                 room_jid = mess_data["to"]
                 service = jid.JID(room_jid.host)
             elif client.muc_service is not None:
                 service = client.muc_service
             else:
                 msg = D_("No known default MUC service {unparsed}").format(
-                    unparsed=unparsed)
+                    unparsed=unparsed
+                )
                 self.text_cmds.feed_back(client, msg, mess_data)
                 return False
         except jid.InvalidFormat:
@@ -921,21 +1034,23 @@
         return False
 
     def _whois(self, client, whois_msg, mess_data, target_jid):
-        """ Add MUC user information to whois """
-        if mess_data['type'] != "groupchat":
+        """Add MUC user information to whois"""
+        if mess_data["type"] != "groupchat":
             return
         if target_jid.userhostJID() not in client._muc_client.joined_rooms:
             log.warning(_("This room has not been joined"))
             return
         if not target_jid.resource:
             return
-        user = client._muc_client.joined_rooms[target_jid.userhostJID()].getUser(target_jid.resource)
+        user = client._muc_client.joined_rooms[target_jid.userhostJID()].getUser(
+            target_jid.resource
+        )
         whois_msg.append(_("Nickname: %s") % user.nick)
         if user.entity:
             whois_msg.append(_("Entity: %s") % user.entity)
-        if user.affiliation != 'none':
+        if user.affiliation != "none":
             whois_msg.append(_("Affiliation: %s") % user.affiliation)
-        if user.role != 'none':
+        if user.role != "none":
             whois_msg.append(_("Role: %s") % user.role)
         if user.status:
             whois_msg.append(_("Status: %s") % user.status)
@@ -948,7 +1063,7 @@
         muc_client = client._muc_client
         for room_jid, room in muc_client.joined_rooms.items():
             elt = xml_tools.element_copy(presence_elt)
-            elt['to'] = room_jid.userhost() + '/' + room.nick
+            elt["to"] = room_jid.userhost() + "/" + room.nick
             client.presence.send(elt)
         return True
 
@@ -968,7 +1083,7 @@
         self.plugin_parent = plugin_parent
         muc.MUCClient.__init__(self)
         self._changing_nicks = set()  # used to keep trace of who is changing nick,
-                                      # and to discard userJoinedRoom signal in this case
+        # and to discard userJoinedRoom signal in this case
         print("init SatMUCClient OK")
 
     @property
@@ -1000,17 +1115,23 @@
         if new_state_idx == -1:
             raise exceptions.InternalError("unknown room state")
         if new_state_idx < 1:
-            raise exceptions.InternalError("unexpected new room state ({room}): {state}".format(
-                room=room.userhost(),
-                state=new_state))
-        expected_state = ROOM_STATES[new_state_idx-1]
+            raise exceptions.InternalError(
+                "unexpected new room state ({room}): {state}".format(
+                    room=room.userhost(), state=new_state
+                )
+            )
+        expected_state = ROOM_STATES[new_state_idx - 1]
         if room.state != expected_state:
-            log.error(_(
-                "room {room} is not in expected state: room is in state {current_state} "
-                "while we were expecting {expected_state}").format(
-                room=room.roomJID.userhost(),
-                current_state=room.state,
-                expected_state=expected_state))
+            log.error(
+                _(
+                    "room {room} is not in expected state: room is in state {current_state} "
+                    "while we were expecting {expected_state}"
+                ).format(
+                    room=room.roomJID.userhost(),
+                    current_state=room.state,
+                    expected_state=expected_state,
+                )
+            )
         room.state = new_state
 
     def _addRoom(self, room):
@@ -1026,11 +1147,7 @@
         room._cache_presence = {}
 
     async def _join_legacy(
-        self,
-        client: SatXMPPEntity,
-        room_jid: jid.JID,
-        nick: str,
-        password: Optional[str]
+        self, client: SatXMPPEntity, room_jid: jid.JID, nick: str, password: Optional[str]
     ) -> muc.Room:
         """Join room an retrieve history with legacy method"""
         mess_data_list = await self.host.memory.history_get(
@@ -1038,7 +1155,7 @@
             client.jid.userhostJID(),
             limit=1,
             between=True,
-            profile=client.profile
+            profile=client.profile,
         )
         if mess_data_list:
             timestamp = mess_data_list[0][1]
@@ -1049,7 +1166,8 @@
             seconds = None
 
         room = await super(LiberviaMUCClient, self).join(
-            room_jid, nick, muc.HistoryOptions(seconds=seconds), password)
+            room_jid, nick, muc.HistoryOptions(seconds=seconds), password
+        )
         # used to send bridge signal once backlog are written in history
         room._history_type = HISTORY_LEGACY
         room._history_d = defer.Deferred()
@@ -1057,10 +1175,7 @@
         return room
 
     async def _get_mam_history(
-        self,
-        client: SatXMPPEntity,
-        room: muc.Room,
-        room_jid: jid.JID
+        self, client: SatXMPPEntity, room: muc.Room, room_jid: jid.JID
     ) -> None:
         """Retrieve history for rooms handling MAM"""
         history_d = room._history_d = defer.Deferred()
@@ -1073,39 +1188,39 @@
             None,
             limit=1,
             between=False,
-            filters={
-                'types': C.MESS_TYPE_GROUPCHAT,
-                'last_stanza_id': True},
-            profile=client.profile)
+            filters={"types": C.MESS_TYPE_GROUPCHAT, "last_stanza_id": True},
+            profile=client.profile,
+        )
         if last_mess:
-            stanza_id = last_mess[0][-1]['stanza_id']
+            stanza_id = last_mess[0][-1]["stanza_id"]
             rsm_req = rsm.RSMRequest(max_=20, after=stanza_id)
-            no_loop=False
+            no_loop = False
         else:
-            log.info("We have no MAM archive for room {room_jid}.".format(
-                room_jid=room_jid))
+            log.info(
+                "We have no MAM archive for room {room_jid}.".format(room_jid=room_jid)
+            )
             # we don't want the whole archive if we have no archive yet
             # as it can be huge
-            rsm_req = rsm.RSMRequest(max_=50, before='')
-            no_loop=True
+            rsm_req = rsm.RSMRequest(max_=50, before="")
+            no_loop = True
 
         mam_req = mam.MAMRequest(rsm_=rsm_req)
         complete = False
         count = 0
         while not complete:
             try:
-                mam_data = await self._mam.get_archives(client, mam_req,
-                                                       service=room_jid)
+                mam_data = await self._mam.get_archives(client, mam_req, service=room_jid)
             except xmpp_error.StanzaError as e:
-                if last_mess and e.condition == 'item-not-found':
+                if last_mess and e.condition == "item-not-found":
                     log.warning(
                         f"requested item (with id {stanza_id!r}) can't be found in "
                         f"history of {room_jid}, history has probably been purged on "
-                        f"server.")
+                        f"server."
+                    )
                     # we get last items like for a new room
-                    rsm_req = rsm.RSMRequest(max_=50, before='')
+                    rsm_req = rsm.RSMRequest(max_=50, before="")
                     mam_req = mam.MAMRequest(rsm_=rsm_req)
-                    no_loop=True
+                    no_loop = True
                     continue
                 else:
                     raise e
@@ -1122,47 +1237,56 @@
                 for mess_elt in elt_list:
                     try:
                         fwd_message_elt = self._mam.get_message_from_result(
-                            client, mess_elt, mam_req, service=room_jid)
+                            client, mess_elt, mam_req, service=room_jid
+                        )
                     except exceptions.DataError:
                         continue
                     if fwd_message_elt.getAttribute("to"):
                         log.warning(
                             'Forwarded message element has a "to" attribute while it is '
-                            'forbidden by specifications')
+                            "forbidden by specifications"
+                        )
                     fwd_message_elt["to"] = client.jid.full()
                     client.messageProt.onMessage(fwd_message_elt)
                     client._muc_client._onGroupChat(fwd_message_elt)
 
         if not count:
-            log.info(_("No message received while offline in {room_jid}".format(
-                room_jid=room_jid)))
+            log.info(
+                _(
+                    "No message received while offline in {room_jid}".format(
+                        room_jid=room_jid
+                    )
+                )
+            )
         else:
             log.info(
-                _("We have received {num_mess} message(s) in {room_jid} while "
-                  "offline.")
-                .format(num_mess=count, room_jid=room_jid))
+                _(
+                    "We have received {num_mess} message(s) in {room_jid} while "
+                    "offline."
+                ).format(num_mess=count, room_jid=room_jid)
+            )
 
         # for legacy history, the following steps are done in receivedSubject but for MAM
         # the order is different (we have to join then get MAM archive, so subject
         # is received before archive), so we change state and add the callbacks here.
         self.change_room_state(room, ROOM_STATE_LIVE)
-        history_d.addCallbacks(self._history_cb, self._history_eb, [room],
-                                     errbackArgs=[room])
+        history_d.addCallbacks(
+            self._history_cb, self._history_eb, [room], errbackArgs=[room]
+        )
 
         # we wait for all callbacks to be processed
         await history_d
 
     async def _join_mam(
-        self,
-        client: SatXMPPEntity,
-        room_jid: jid.JID,
-        nick: str,
-        password: Optional[str]
+        self, client: SatXMPPEntity, room_jid: jid.JID, nick: str, password: Optional[str]
     ) -> muc.Room:
         """Join room and retrieve history using MAM"""
         room = await super(LiberviaMUCClient, self).join(
             # we don't want any history from room as we'll get it with MAM
-            room_jid, nick, muc.HistoryOptions(maxStanzas=0), password=password
+            room_jid,
+            nick,
+            muc.HistoryOptions(maxStanzas=0),
+            password=password,
         )
         room._history_type = HISTORY_MAM
         # MAM history retrieval can be very long, and doesn't need to be sync, so we don't
@@ -1197,8 +1321,11 @@
         if user is None:
             nick = presence.sender.resource
             if not nick:
-                log.warning(_("missing nick in presence: {xml}").format(
-                    xml = presence.toElement().toXml()))
+                log.warning(
+                    _("missing nick in presence: {xml}").format(
+                        xml=presence.toElement().toXml()
+                    )
+                )
                 return
             user = muc.User(nick, presence.entity)
 
@@ -1207,8 +1334,10 @@
         #      like 110 (REALJID_PUBLIC) after first <presence/> received
         #      so we keep only the initial <presence> (with SELF_PRESENCE),
         #      thus we check if attribute already exists
-        if (not hasattr(room, 'statuses')
-            and muc.STATUS_CODE.SELF_PRESENCE in presence.mucStatuses):
+        if (
+            not hasattr(room, "statuses")
+            and muc.STATUS_CODE.SELF_PRESENCE in presence.mucStatuses
+        ):
             room.statuses = presence.mucStatuses
 
         # Update user data
@@ -1250,8 +1379,11 @@
             # we have received our own nick,
             # this mean that the full room roster was received
             self.change_room_state(room, ROOM_STATE_SELF_PRESENCE)
-            log.debug("room {room} joined with nick {nick}".format(
-                room=room.occupantJID.userhost(), nick=user.nick))
+            log.debug(
+                "room {room} joined with nick {nick}".format(
+                    room=room.occupantJID.userhost(), nick=user.nick
+                )
+            )
             # we set type so we don't have to use a deferred
             # with disco to check entity type
             self.host.memory.update_entity_data(
@@ -1262,9 +1394,9 @@
                 "Received user presence data in a room before its initialisation "
                 "(current state: {state}),"
                 "this is not standard! Ignoring it: {room} ({nick})".format(
-                    state=room.state,
-                    room=room.roomJID.userhost(),
-                    nick=user.nick))
+                    state=room.state, room=room.roomJID.userhost(), nick=user.nick
+                )
+            )
             return
         else:
             if not room.fully_joined.called:
@@ -1275,24 +1407,29 @@
                 self._changing_nicks.remove(user.nick)
             except KeyError:
                 # this is a new user
-                log.debug(_("user {nick} has joined room {room_id}").format(
-                    nick=user.nick, room_id=room.occupantJID.userhost()))
+                log.debug(
+                    _("user {nick} has joined room {room_id}").format(
+                        nick=user.nick, room_id=room.occupantJID.userhost()
+                    )
+                )
                 if not self.host.trigger.point(
-                        "MUC user joined", room, user, self.client.profile):
+                    "MUC user joined", room, user, self.client.profile
+                ):
                     return
 
-                extra = {'info_type': ROOM_USER_JOINED,
-                         'user_affiliation': user.affiliation,
-                         'user_role': user.role,
-                         'user_nick': user.nick
-                         }
+                extra = {
+                    "info_type": ROOM_USER_JOINED,
+                    "user_affiliation": user.affiliation,
+                    "user_role": user.role,
+                    "user_nick": user.nick,
+                }
                 if user.entity is not None:
-                    extra['user_entity'] = user.entity.full()
+                    extra["user_entity"] = user.entity.full()
                 mess_data = {  # dict is similar to the one used in client.onMessage
                     "from": room.roomJID,
                     "to": self.client.jid,
                     "uid": str(uuid.uuid4()),
-                    "message": {'': D_("=> {} has joined the room").format(user.nick)},
+                    "message": {"": D_("=> {} has joined the room").format(user.nick)},
                     "subject": {},
                     "type": C.MESS_TYPE_INFO,
                     "extra": extra,
@@ -1304,42 +1441,52 @@
                 # self.client.message_add_to_history(mess_data)
                 self.client.message_send_to_bridge(mess_data)
 
-
     def userLeftRoom(self, room, user):
         if not self.host.trigger.point("MUC user left", room, user, self.client.profile):
             return
         if user.nick == room.nick:
             # we left the room
             room_jid_s = room.roomJID.userhost()
-            log.info(_("Room ({room}) left ({profile})").format(
-                room = room_jid_s, profile = self.client.profile))
-            self.host.memory.del_entity_cache(room.roomJID, profile_key=self.client.profile)
+            log.info(
+                _("Room ({room}) left ({profile})").format(
+                    room=room_jid_s, profile=self.client.profile
+                )
+            )
+            self.host.memory.del_entity_cache(
+                room.roomJID, profile_key=self.client.profile
+            )
             self.host.bridge.muc_room_left(room.roomJID.userhost(), self.client.profile)
         elif room.state != ROOM_STATE_LIVE:
-            log.warning("Received user presence data in a room before its initialisation (current state: {state}),"
+            log.warning(
+                "Received user presence data in a room before its initialisation (current state: {state}),"
                 "this is not standard! Ignoring it: {room} ({nick})".format(
-                state=room.state,
-                room=room.roomJID.userhost(),
-                nick=user.nick))
+                    state=room.state, room=room.roomJID.userhost(), nick=user.nick
+                )
+            )
             return
         else:
             if not room.fully_joined.called:
                 return
-            log.debug(_("user {nick} left room {room_id}").format(nick=user.nick, room_id=room.occupantJID.userhost()))
+            log.debug(
+                _("user {nick} left room {room_id}").format(
+                    nick=user.nick, room_id=room.occupantJID.userhost()
+                )
+            )
             for cb in room.on_left_callbacks:
                 defer.ensureDeferred(cb(room, user))
-            extra = {'info_type': ROOM_USER_LEFT,
-                     'user_affiliation': user.affiliation,
-                     'user_role': user.role,
-                     'user_nick': user.nick
-                     }
+            extra = {
+                "info_type": ROOM_USER_LEFT,
+                "user_affiliation": user.affiliation,
+                "user_role": user.role,
+                "user_nick": user.nick,
+            }
             if user.entity is not None:
-                extra['user_entity'] = user.entity.full()
+                extra["user_entity"] = user.entity.full()
             mess_data = {  # dict is similar to the one used in client.onMessage
                 "from": room.roomJID,
                 "to": self.client.jid,
                 "uid": str(uuid.uuid4()),
-                "message": {'': D_("<= {} has left the room").format(user.nick)},
+                "message": {"": D_("<= {} has left the room").format(user.nick)},
                 "subject": {},
                 "type": C.MESS_TYPE_INFO,
                 "extra": extra,
@@ -1350,7 +1497,9 @@
             self.client.message_send_to_bridge(mess_data)
 
     def user_changed_nick(self, room, user, new_nick):
-        self.host.bridge.muc_room_user_changed_nick(room.roomJID.userhost(), user.nick, new_nick, self.client.profile)
+        self.host.bridge.muc_room_user_changed_nick(
+            room.roomJID.userhost(), user.nick, new_nick, self.client.profile
+        )
 
     def userUpdatedStatus(self, room, user, show, status):
         entity = jid.JID(tuple=(room.roomJID.user, room.roomJID.host, user.nick))
@@ -1365,16 +1514,20 @@
                 "user": user,
                 "show": show,
                 "status": status,
-                }
+            }
             return
-        statuses = {C.PRESENCE_STATUSES_DEFAULT: status or ''}
+        statuses = {C.PRESENCE_STATUSES_DEFAULT: status or ""}
         self.host.bridge.presence_update(
-            entity.full(), show or '', 0, statuses, self.client.profile)
+            entity.full(), show or "", 0, statuses, self.client.profile
+        )
 
     ## messages ##
 
     def receivedGroupChat(self, room, user, body):
-        log.debug('receivedGroupChat: room=%s user=%s body=%s' % (room.roomJID.full(), user, body))
+        log.debug(
+            "receivedGroupChat: room=%s user=%s body=%s"
+            % (room.roomJID.full(), user, body)
+        )
 
     ## subject ##
 
@@ -1390,8 +1543,11 @@
         room, user = self._getRoomUser(message)
 
         if room is None:
-            log.warning("No room found for message: {message}"
-                        .format(message=message.toElement().toXml()))
+            log.warning(
+                "No room found for message: {message}".format(
+                    message=message.toElement().toXml()
+                )
+            )
             return
 
         if message.subject is not None:
@@ -1424,7 +1580,7 @@
         for elem in cache:
             self.client.xmlstream.dispatch(elem)
         for presence_data in cache_presence.values():
-            if not presence_data['show'] and not presence_data['status']:
+            if not presence_data["show"] and not presence_data["status"]:
                 # occupants are already sent in muc_room_joined, so if we don't have
                 # extra information like show or statuses, we can discard the signal
                 continue
@@ -1442,18 +1598,26 @@
         if room.state != ROOM_STATE_LIVE:
             if room._history_type == HISTORY_LEGACY:
                 self.change_room_state(room, ROOM_STATE_LIVE)
-                room._history_d.addCallbacks(self._history_cb, self._history_eb, [room], errbackArgs=[room])
+                room._history_d.addCallbacks(
+                    self._history_cb, self._history_eb, [room], errbackArgs=[room]
+                )
         else:
             # the subject has been changed
-            log.debug(_("New subject for room ({room_id}): {subject}").format(room_id = room.roomJID.full(), subject = subject))
-            self.host.bridge.muc_room_new_subject(room.roomJID.userhost(), subject, self.client.profile)
+            log.debug(
+                _("New subject for room ({room_id}): {subject}").format(
+                    room_id=room.roomJID.full(), subject=subject
+                )
+            )
+            self.host.bridge.muc_room_new_subject(
+                room.roomJID.userhost(), subject, self.client.profile
+            )
 
     ## disco ##
 
-    def getDiscoInfo(self, requestor, target, nodeIdentifier=''):
+    def getDiscoInfo(self, requestor, target, nodeIdentifier=""):
         return [disco.DiscoFeature(NS_MUC)]
 
-    def getDiscoItems(self, requestor, target, nodeIdentifier=''):
+    def getDiscoItems(self, requestor, target, nodeIdentifier=""):
         # TODO: manage room queries ? Bad for privacy, must be disabled by default
         #       see XEP-0045 § 6.7
         return []
--- a/libervia/backend/plugins/plugin_xep_0047.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0047.py	Wed Jun 19 18:44:57 2024 +0200
@@ -155,7 +155,7 @@
         return session_data
 
     def _on_ibb_open(self, iq_elt, client):
-        """"Called when an IBB <open> element is received
+        """ "Called when an IBB <open> element is received
 
         @param iq_elt(domish.Element): the whole <iq> stanza
         """
@@ -192,13 +192,15 @@
         # FIXME: if we never get the events, the observers stay.
         #        would be better to have generic observer and check id once triggered
         client.xmlstream.addObserver(event_data, observer_cb, client=client)
-        client.xmlstream.addOnetimeObserver(event_close, self._on_ibb_close, client=client)
+        client.xmlstream.addOnetimeObserver(
+            event_close, self._on_ibb_close, client=client
+        )
         # finally, we send the accept stanza
         iq_result_elt = xmlstream.toResponse(iq_elt, "result")
         client.send(iq_result_elt)
 
     def _on_ibb_close(self, iq_elt, client):
-        """"Called when an IBB <close> element is received
+        """ "Called when an IBB <close> element is received
 
         @param iq_elt(domish.Element): the whole <iq> stanza
         """
@@ -287,7 +289,9 @@
             self._kill_session(sid, client, error_condition)
         client.send(iq_elt)
 
-    def start_stream(self, client, stream_object, local_jid, to_jid, sid, block_size=None):
+    def start_stream(
+        self, client, stream_object, local_jid, to_jid, sid, block_size=None
+    ):
         """Launch the stream workflow
 
         @param stream_object(ifaces.IStreamProducer): stream object to send
@@ -340,7 +344,9 @@
             data_elt.addContent(base64.b64encode(buffer_).decode())
             args = [session_data, client]
             d = next_iq_elt.send()
-            d.addCallbacks(self._iq_data_stream_cb, self._iq_data_stream_eb, args, None, args)
+            d.addCallbacks(
+                self._iq_data_stream_cb, self._iq_data_stream_eb, args, None, args
+            )
         else:
             self.terminate_stream(session_data, client)
 
--- a/libervia/backend/plugins/plugin_xep_0048.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0048.py	Wed Jun 19 18:44:57 2024 +0200
@@ -59,7 +59,9 @@
         log.info(_("Bookmarks plugin initialization"))
         self.host = host
         # self.__menu_id = host.register_callback(self._bookmarks_menu, with_data=True)
-        self.__bm_save_id = host.register_callback(self._bookmarks_save_cb, with_data=True)
+        self.__bm_save_id = host.register_callback(
+            self._bookmarks_save_cb, with_data=True
+        )
         host.import_menu(
             (D_("Groups"), D_("Bookmarks")),
             self._bookmarks_menu,
@@ -107,18 +109,15 @@
             NS_BOOKMARKS, client.profile
         )
         await local.load()
-        local = cast(dict[str, dict|None]|None, local)
+        local = cast(dict[str, dict | None] | None, local)
         if not local:
-            local = {
-                XEP_0048.MUC_TYPE: {},
-                XEP_0048.URL_TYPE: {}
-            }
+            local = {XEP_0048.MUC_TYPE: {}, XEP_0048.URL_TYPE: {}}
         private = await self._get_server_bookmarks("private", client.profile)
         pubsub = client.bookmarks_pubsub = None
 
         for bookmarks in (local, private, pubsub):
             if bookmarks is not None:
-                for (room_jid, data) in list(bookmarks[XEP_0048.MUC_TYPE].items()):
+                for room_jid, data in list(bookmarks[XEP_0048.MUC_TYPE].items()):
                     if data.get("autojoin", "false") == "true":
                         nick = data.get("nick", client.jid.user)
                         defer.ensureDeferred(
@@ -201,9 +200,7 @@
                 if conference_elt.hasAttribute(attr):
                     data[attr] = conference_elt[attr]
             try:
-                data["nick"] = str(
-                    next(conference_elt.elements(NS_BOOKMARKS, "nick"))
-                )
+                data["nick"] = str(next(conference_elt.elements(NS_BOOKMARKS, "nick")))
             except StopIteration:
                 pass
             # TODO: manage password (need to be secured, see XEP-0049 §4)
@@ -277,7 +274,7 @@
         return d
 
     def _bookmarks_menu(self, data, profile):
-        """ XMLUI activated by menu: return Gateways UI
+        """XMLUI activated by menu: return Gateways UI
         @param profile: %(doc_profile)s
 
         """
@@ -296,7 +293,7 @@
         ):
             if bookmarks is None:
                 continue
-            for (room_jid, data) in sorted(
+            for room_jid, data in sorted(
                 list(bookmarks[XEP_0048.MUC_TYPE].items()),
                 key=lambda item: item[1].get("name", item[0].user),
             ):
@@ -359,7 +356,9 @@
         @param profile_key: %(doc_profile_key)s
         """
         assert storage_type in ("auto", "pubsub", "private", "local")
-        if type_ == XEP_0048.URL_TYPE and {"autojoin", "nick"}.intersection(list(data.keys())):
+        if type_ == XEP_0048.URL_TYPE and {"autojoin", "nick"}.intersection(
+            list(data.keys())
+        ):
             raise ValueError("autojoin or nick can't be used with URLs")
         client = self.host.get_client(profile_key)
         if storage_type == "auto":
@@ -446,9 +445,7 @@
             try:
                 data = bookmarks_ori[type_]
             except KeyError:
-                log.warning(
-                    f"{type_!r} missing in {storage_location} storage."
-                )
+                log.warning(f"{type_!r} missing in {storage_location} storage.")
                 data = bookmarks_ori[type_] = {}
             for bookmark in data:
                 if type_ == XEP_0048.MUC_TYPE:
--- a/libervia/backend/plugins/plugin_xep_0054.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0054.py	Wed Jun 19 18:44:57 2024 +0200
@@ -78,9 +78,9 @@
     def __init__(self, host):
         log.info(_("Plugin XEP_0054 initialization"))
         self.host = host
-        self._i = host.plugins['IDENTITY']
-        self._i.register(IMPORT_NAME, 'avatar', self.get_avatar, self.set_avatar)
-        self._i.register(IMPORT_NAME, 'nicknames', self.get_nicknames, self.set_nicknames)
+        self._i = host.plugins["IDENTITY"]
+        self._i.register(IMPORT_NAME, "avatar", self.get_avatar, self.set_avatar)
+        self._i.register(IMPORT_NAME, "nicknames", self.get_nicknames, self.set_nicknames)
         host.trigger.add("presence_available", self.presence_available_trigger)
 
     def get_handler(self, client):
@@ -90,9 +90,7 @@
         try:
             avatar_hash = client._xep_0054_avatar_hashes[client.jid.userhost()]
         except KeyError:
-            log.info(
-                _("No avatar in cache for {profile}")
-                .format(profile=client.profile))
+            log.info(_("No avatar in cache for {profile}").format(profile=client.profile))
             return True
         x_elt = domish.Element((NS_VCARD_UPDATE, "x"))
         x_elt.addElement("photo", content=avatar_hash)
@@ -101,7 +99,8 @@
 
     async def profile_connecting(self, client):
         client._xep_0054_avatar_hashes = persistent.PersistentDict(
-            NS_VCARD, client.profile)
+            NS_VCARD, client.profile
+        )
         await client._xep_0054_avatar_hashes.load()
 
     def save_photo(self, client, photo_elt, entity):
@@ -136,7 +135,8 @@
         if mime_type is None:
             log.debug(
                 f"no media type found specified for {entity}'s avatar, trying to "
-                f"guess")
+                f"guess"
+            )
 
             try:
                 mime_type = image.guess_type(io.BytesIO(decoded))
@@ -168,11 +168,7 @@
             elif elem.name == "NICKNAME":
                 nickname = vcard_dict["nickname"] = str(elem)
                 await self._i.update(
-                    client,
-                    IMPORT_NAME,
-                    "nicknames",
-                    [nickname],
-                    entity_jid
+                    client, IMPORT_NAME, "nicknames", [nickname], entity_jid
                 )
             elif elem.name == "URL":
                 vcard_dict["website"] = str(elem)
@@ -196,7 +192,8 @@
                     vcard_dict["avatar"] = avatar_hash
                 if avatar_hash is not None:
                     await client._xep_0054_avatar_hashes.aset(
-                        entity_jid.full(), avatar_hash)
+                        entity_jid.full(), avatar_hash
+                    )
 
                     if avatar_hash:
                         avatar_cache = self.host.common_cache.get_metadata(avatar_hash)
@@ -205,18 +202,21 @@
                             IMPORT_NAME,
                             "avatar",
                             {
-                                'path': avatar_cache['path'],
-                                'filename': avatar_cache['filename'],
-                                'media_type': avatar_cache['mime_type'],
-                                'cache_uid': avatar_hash
+                                "path": avatar_cache["path"],
+                                "filename": avatar_cache["filename"],
+                                "media_type": avatar_cache["mime_type"],
+                                "cache_uid": avatar_hash,
                             },
-                            entity_jid
+                            entity_jid,
                         )
                     else:
                         await self._i.update(
-                            client, IMPORT_NAME, "avatar", None, entity_jid)
+                            client, IMPORT_NAME, "avatar", None, entity_jid
+                        )
             else:
-                log.debug("FIXME: [{}] VCard_elt tag is not managed yet".format(elem.name))
+                log.debug(
+                    "FIXME: [{}] VCard_elt tag is not managed yet".format(elem.name)
+                )
 
         return vcard_dict
 
@@ -234,9 +234,11 @@
         try:
             return next(iq_ret_elt.elements(NS_VCARD, "vCard"))
         except StopIteration:
-            log.warning(_(
-                "vCard element not found for {entity_jid}: {xml}"
-                ).format(entity_jid=entity_jid, xml=iq_ret_elt.toXml()))
+            log.warning(
+                _("vCard element not found for {entity_jid}: {xml}").format(
+                    entity_jid=entity_jid, xml=iq_ret_elt.toXml()
+                )
+            )
             raise exceptions.DataError(f"no vCard element found for {entity_jid}")
 
     async def update_vcard_elt(self, client, entity_jid, to_replace):
@@ -279,18 +281,18 @@
         except exceptions.DataError:
             self._i.update(client, IMPORT_NAME, "avatar", None, entity_jid)
         except Exception as e:
-            log.warning(_(
-                "Can't get vCard for {entity_jid}: {e}"
-                ).format(entity_jid=entity_jid, e=e))
+            log.warning(
+                _("Can't get vCard for {entity_jid}: {e}").format(
+                    entity_jid=entity_jid, e=e
+                )
+            )
         else:
             log.debug(_("VCard found"))
             return await self.v_card_2_dict(client, vcard_elt, entity_jid)
 
     async def get_avatar(
-            self,
-            client: SatXMPPEntity,
-            entity_jid: jid.JID
-        ) -> Optional[dict]:
+        self, client: SatXMPPEntity, entity_jid: jid.JID
+    ) -> Optional[dict]:
         """Get avatar data
 
         @param entity: entity to get avatar from
@@ -304,9 +306,10 @@
         try:
             avatar_hash = hashes_cache[entity_jid.full()]
         except KeyError:
-            if 'avatar' in vcard:
+            if "avatar" in vcard:
                 raise exceptions.InternalError(
-                    "No avatar hash while avatar is found in vcard")
+                    "No avatar hash while avatar is found in vcard"
+                )
             return None
 
         if not avatar_hash:
@@ -314,7 +317,8 @@
 
         avatar_cache = self.host.common_cache.get_metadata(avatar_hash)
         return self._i.avatar_build_metadata(
-                avatar_cache['path'], avatar_cache['mime_type'], avatar_hash)
+            avatar_cache["path"], avatar_cache["mime_type"], avatar_hash
+        )
 
     async def set_avatar(self, client, avatar_data, entity):
         """Set avatar of the profile
@@ -323,7 +327,7 @@
             IDENTITY plugin.
         @param entity(jid.JID): entity whose avatar must be changed
         """
-        vcard_elt = await self.update_vcard_elt(client, entity, ['PHOTO'])
+        vcard_elt = await self.update_vcard_elt(client, entity, ["PHOTO"])
 
         iq_elt = client.IQ()
         iq_elt.addChild(vcard_elt)
@@ -345,7 +349,7 @@
         """
         vcard_data = await self.get_card(client, entity)
         try:
-            return [vcard_data['nickname']]
+            return [vcard_data["nickname"]]
         except (KeyError, TypeError):
             return []
 
@@ -357,7 +361,7 @@
         """
         nick = nicknames[0].strip()
 
-        vcard_elt = await self.update_vcard_elt(client, entity, ['NICKNAME'])
+        vcard_elt = await self.update_vcard_elt(client, entity, ["NICKNAME"])
 
         if nick:
             vcard_elt.addElement((NS_VCARD, "NICKNAME"), content=nick)
@@ -390,7 +394,8 @@
         """
         client = self.parent
         entity_jid = self.plugin_parent._i.get_identity_jid(
-            client, jid.JID(presence["from"]))
+            client, jid.JID(presence["from"])
+        )
 
         try:
             x_elt = next(presence.elements(NS_VCARD_UPDATE, "x"))
@@ -434,7 +439,8 @@
 
         if not given_hash:
             await self.plugin_parent._i.update(
-                client, IMPORT_NAME, "avatar", None, entity_jid)
+                client, IMPORT_NAME, "avatar", None, entity_jid
+            )
             # the avatar has been removed, no need to go further
             return
 
@@ -445,19 +451,18 @@
             )
             await self.plugin_parent._i.update(
                 client,
-                IMPORT_NAME, "avatar",
+                IMPORT_NAME,
+                "avatar",
                 {
-                    'path': avatar_cache['path'],
-                    'filename': avatar_cache['filename'],
-                    'media_type': avatar_cache['mime_type'],
-                    'cache_uid': given_hash,
+                    "path": avatar_cache["path"],
+                    "filename": avatar_cache["filename"],
+                    "media_type": avatar_cache["mime_type"],
+                    "cache_uid": given_hash,
                 },
-                entity_jid
+                entity_jid,
             )
         else:
-            log.debug(
-                "New avatar found for [{entity_jid}], requesting vcard"
-            )
+            log.debug("New avatar found for [{entity_jid}], requesting vcard")
             vcard = await self.plugin_parent.get_card(client, entity_jid)
             if vcard is None:
                 log.warning(f"Unexpected empty vCard for {entity_jid}")
--- a/libervia/backend/plugins/plugin_xep_0055.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0055.py	Wed Jun 19 18:44:57 2024 +0200
@@ -61,8 +61,8 @@
 
 FIELD_SINGLE = "field_single"  # single text field for the simple search
 FIELD_CURRENT_SERVICE = (
-    "current_service_jid"
-)  # read-only text field for the advanced search
+    "current_service_jid"  # read-only text field for the advanced search
+)
 
 
 class XEP_0055(object):
@@ -124,7 +124,9 @@
         """
         # check if the user's server offers some search services
         d = self._get_host_services(profile)
-        return d.addCallback(lambda services: self.get_main_ui(services, raw_data, profile))
+        return d.addCallback(
+            lambda services: self.get_main_ui(services, raw_data, profile)
+        )
 
     def get_main_ui(self, services, raw_data, profile):
         """Get the XMLUI for selecting a service and searching the directory.
@@ -198,7 +200,9 @@
         if simple_data:
             log.debug("Simple search with %s on %s" % (simple_data, service_jid))
             sub_cont.parent.set_selected(True)
-            main_ui.change_container(sub_cont.append(xml_tools.VerticalContainer(main_ui)))
+            main_ui.change_container(
+                sub_cont.append(xml_tools.VerticalContainer(main_ui))
+            )
             main_ui.addDivider("dash")
             d = self.search_request(service_jid, simple_data, profile)
             d.addCallbacks(
@@ -310,7 +314,9 @@
         if adv_data:  # display the search results
             log.debug("Advanced search with %s on %s" % (adv_data, service_jid))
             sub_cont.parent.set_selected(True)
-            main_ui.change_container(sub_cont.append(xml_tools.VerticalContainer(main_ui)))
+            main_ui.change_container(
+                sub_cont.append(xml_tools.VerticalContainer(main_ui))
+            )
             main_ui.addDivider("dash")
             d = self.search_request(service_jid, adv_data, profile)
             d.addCallbacks(
--- a/libervia/backend/plugins/plugin_xep_0059.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0059.py	Wed Jun 19 18:44:57 2024 +0200
@@ -61,7 +61,7 @@
         @return (rsm.RSMRequest, None): request with parsed arguments
             or None if no RSM arguments have been found
         """
-        if int(extra.get(RSM_PREFIX + 'max', 0)) < 0:
+        if int(extra.get(RSM_PREFIX + "max", 0)) < 0:
             raise ValueError(_("rsm_max can't be negative"))
 
         rsm_args = {}
@@ -122,10 +122,7 @@
         if rsm_response is None:
             # may happen if result set it empty, or we are at the end
             return None
-        if (
-            rsm_response.count is not None
-            and rsm_response.index is not None
-        ):
+        if rsm_response.count is not None and rsm_response.index is not None:
             next_index = rsm_response.index + rsm_request.max
             if next_index >= rsm_response.count:
                 # we have reached the last page
@@ -140,13 +137,10 @@
 
         if rsm_response.last is None:
             if rsm_response.count:
-                log.warning("Can't do pagination, no \"last\" received")
+                log.warning('Can\'t do pagination, no "last" received')
             return None
 
-        return rsm.RSMRequest(
-            max_=rsm_request.max,
-            after=rsm_response.last
-        )
+        return rsm.RSMRequest(max_=rsm_request.max, after=rsm_response.last)
 
 
 @implementer(iwokkel.IDisco)
--- a/libervia/backend/plugins/plugin_xep_0060.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0060.py	Wed Jun 19 18:44:57 2024 +0200
@@ -70,11 +70,12 @@
 TIMEOUT = 30
 # minimum features that a pubsub service must have to be selectable as default
 DEFAULT_PUBSUB_MIN_FEAT = {
- 'http://jabber.org/protocol/pubsub#persistent-items',
- 'http://jabber.org/protocol/pubsub#publish',
- 'http://jabber.org/protocol/pubsub#retract-items',
+    "http://jabber.org/protocol/pubsub#persistent-items",
+    "http://jabber.org/protocol/pubsub#publish",
+    "http://jabber.org/protocol/pubsub#retract-items",
 }
 
+
 class XEP_0060(object):
     OPT_ACCESS_MODEL = "pubsub#access_model"
     OPT_PERSIST_ITEMS = "pubsub#persist_items"
@@ -109,7 +110,9 @@
         self.host = host
         self._rsm = host.plugins.get("XEP-0059")
         self._mam = host.plugins.get("XEP-0313")
-        self._node_cb = {}  # dictionnary of callbacks for node (key: node, value: list of callbacks)
+        self._node_cb = (
+            {}
+        )  # dictionnary of callbacks for node (key: node, value: list of callbacks)
         self.rt_sessions = sat_defer.RTDeferredSessions()
         host.bridge.add_method(
             "ps_node_create",
@@ -376,9 +379,11 @@
             return {}
         try:
             return {
-                "service": client.pubsub_service.full()
-                if client.pubsub_service is not None
-                else ""
+                "service": (
+                    client.pubsub_service.full()
+                    if client.pubsub_service is not None
+                    else ""
+                )
             }
         except AttributeError:
             if self.host.is_connected(profile):
@@ -423,12 +428,7 @@
 
         return Extra(rsm_request, extra)
 
-    def add_managed_node(
-        self,
-        node: str,
-        priority: int = 0,
-        **kwargs: Callable
-    ):
+    def add_managed_node(self, node: str, priority: int = 0, **kwargs: Callable):
         """Add a handler for a node
 
         @param node: node to monitor
@@ -514,31 +514,40 @@
     #     d.addCallback(lambda subs: [sub.getAttribute('node') for sub in subs if sub.getAttribute('subscription') == filter_])
     #     return d
 
-    def _send_item(self, service, nodeIdentifier, payload, item_id=None, extra_ser="",
-                  profile_key=C.PROF_KEY_NONE):
+    def _send_item(
+        self,
+        service,
+        nodeIdentifier,
+        payload,
+        item_id=None,
+        extra_ser="",
+        profile_key=C.PROF_KEY_NONE,
+    ):
         client = self.host.get_client(profile_key)
         service = None if not service else jid.JID(service)
         payload = xml_tools.parse(payload)
         extra = data_format.deserialise(extra_ser)
-        d = defer.ensureDeferred(self.send_item(
-            client, service, nodeIdentifier, payload, item_id or None, extra
-        ))
+        d = defer.ensureDeferred(
+            self.send_item(
+                client, service, nodeIdentifier, payload, item_id or None, extra
+            )
+        )
         d.addCallback(lambda ret: ret or "")
         return d
 
-    def _send_items(self, service, nodeIdentifier, items, extra_ser=None,
-                  profile_key=C.PROF_KEY_NONE):
+    def _send_items(
+        self, service, nodeIdentifier, items, extra_ser=None, profile_key=C.PROF_KEY_NONE
+    ):
         client = self.host.get_client(profile_key)
         service = None if not service else jid.JID(service)
         try:
             items = [xml_tools.parse(item) for item in items]
         except Exception as e:
-            raise exceptions.DataError(_("Can't parse items: {msg}").format(
-                msg=e))
+            raise exceptions.DataError(_("Can't parse items: {msg}").format(msg=e))
         extra = data_format.deserialise(extra_ser)
-        return defer.ensureDeferred(self.send_items(
-            client, service, nodeIdentifier, items, extra=extra
-        ))
+        return defer.ensureDeferred(
+            self.send_items(client, service, nodeIdentifier, items, extra=extra)
+        )
 
     async def send_item(
         self,
@@ -547,7 +556,7 @@
         nodeIdentifier: str,
         payload: domish.Element,
         item_id: Optional[str] = None,
-        extra: Optional[Dict[str, Any]] = None
+        extra: Optional[Dict[str, Any]] = None,
     ) -> Optional[str]:
         """High level method to send one item
 
@@ -559,16 +568,12 @@
         @return: id of the created item
         """
         assert isinstance(payload, domish.Element)
-        item_elt = domish.Element((pubsub.NS_PUBSUB, 'item'))
+        item_elt = domish.Element((pubsub.NS_PUBSUB, "item"))
         if item_id is not None:
-            item_elt['id'] = item_id
+            item_elt["id"] = item_id
         item_elt.addChild(payload)
         published_ids = await self.send_items(
-            client,
-            service,
-            nodeIdentifier,
-            [item_elt],
-            extra=extra
+            client, service, nodeIdentifier, [item_elt], extra=extra
         )
         try:
             return published_ids[0]
@@ -582,7 +587,7 @@
         nodeIdentifier: str,
         items: List[domish.Element],
         sender: Optional[jid.JID] = None,
-        extra: Optional[Dict[str, Any]] = None
+        extra: Optional[Dict[str, Any]] = None,
     ) -> List[str]:
         """High level method to send several items at once
 
@@ -608,36 +613,47 @@
             service = client.jid.userhostJID()
         parsed_items = []
         for item in items:
-            if item.name != 'item':
+            if item.name != "item":
                 raise exceptions.DataError(_("Invalid item: {xml}").format(item.toXml()))
             item_id = item.getAttribute("id")
             parsed_items.append(pubsub.Item(id=item_id, payload=item.firstChildElement()))
         publish_options = extra.get(self.EXTRA_PUBLISH_OPTIONS)
         try:
             iq_result = await self.publish(
-                client, service, nodeIdentifier, parsed_items, options=publish_options,
-                sender=sender
+                client,
+                service,
+                nodeIdentifier,
+                parsed_items,
+                options=publish_options,
+                sender=sender,
             )
         except error.StanzaError as e:
-            if ((e.condition == 'conflict' and e.appCondition
-                 and e.appCondition.name == 'precondition-not-met'
-                 and publish_options is not None)):
+            if (
+                e.condition == "conflict"
+                and e.appCondition
+                and e.appCondition.name == "precondition-not-met"
+                and publish_options is not None
+            ):
                 # this usually happens when publish-options can't be set
-                policy = extra.get(self.EXTRA_ON_PRECOND_NOT_MET, 'raise')
-                if policy == 'raise':
+                policy = extra.get(self.EXTRA_ON_PRECOND_NOT_MET, "raise")
+                if policy == "raise":
                     raise e
-                elif policy == 'publish_without_options':
-                    log.warning(_(
-                        "Can't use publish-options ({options}) on node {node}, "
-                        "re-publishing without them: {reason}").format(
-                            options=', '.join(f'{k} = {v}'
-                                    for k,v in publish_options.items()),
+                elif policy == "publish_without_options":
+                    log.warning(
+                        _(
+                            "Can't use publish-options ({options}) on node {node}, "
+                            "re-publishing without them: {reason}"
+                        ).format(
+                            options=", ".join(
+                                f"{k} = {v}" for k, v in publish_options.items()
+                            ),
                             node=nodeIdentifier,
                             reason=e,
                         )
                     )
                     iq_result = await self.publish(
-                        client, service, nodeIdentifier, parsed_items)
+                        client, service, nodeIdentifier, parsed_items
+                    )
                 else:
                     raise exceptions.InternalError(
                         f"Invalid policy in extra's {self.EXTRA_ON_PRECOND_NOT_MET!r}: "
@@ -647,8 +663,8 @@
                 raise e
         try:
             return [
-                item['id']
-                for item in iq_result.pubsub.publish.elements(pubsub.NS_PUBSUB, 'item')
+                item["id"]
+                for item in iq_result.pubsub.publish.elements(pubsub.NS_PUBSUB, "item")
             ]
         except AttributeError:
             return []
@@ -661,7 +677,7 @@
         items: Optional[List[domish.Element]] = None,
         options: Optional[dict] = None,
         sender: Optional[jid.JID] = None,
-        extra: Optional[Dict[str, Any]] = None
+        extra: Optional[Dict[str, Any]] = None,
     ) -> domish.Element:
         """Publish pubsub items
 
@@ -679,13 +695,18 @@
         if extra is None:
             extra = {}
         if not await self.host.trigger.async_point(
-            "XEP-0060_publish", client, service, nodeIdentifier, items, options, sender,
-            extra
+            "XEP-0060_publish",
+            client,
+            service,
+            nodeIdentifier,
+            items,
+            options,
+            sender,
+            extra,
         ):
             return extra["iq_result_elt"]
         iq_result_elt = await client.pubsub_client.publish(
-            service, nodeIdentifier, items, sender,
-            options=options
+            service, nodeIdentifier, items, sender, options=options
         )
         return iq_result_elt
 
@@ -693,25 +714,35 @@
         try:
             item_elt = reduce(
                 lambda elt, ns_name: next(elt.elements(*ns_name)),
-                (message_elt,
-                 (mam.NS_MAM, "result"),
-                 (C.NS_FORWARD, "forwarded"),
-                 (C.NS_CLIENT, "message"),
-                 ("http://jabber.org/protocol/pubsub#event", "event"),
-                 ("http://jabber.org/protocol/pubsub#event", "items"),
-                 ("http://jabber.org/protocol/pubsub#event", "item"),
-                ))
+                (
+                    message_elt,
+                    (mam.NS_MAM, "result"),
+                    (C.NS_FORWARD, "forwarded"),
+                    (C.NS_CLIENT, "message"),
+                    ("http://jabber.org/protocol/pubsub#event", "event"),
+                    ("http://jabber.org/protocol/pubsub#event", "items"),
+                    ("http://jabber.org/protocol/pubsub#event", "item"),
+                ),
+            )
         except StopIteration:
             raise exceptions.DataError("Can't find Item in MAM message element")
         return item_elt
 
     def serialise_items(self, items_data):
         items, metadata = items_data
-        metadata['items'] = items
+        metadata["items"] = items
         return data_format.serialise(metadata)
 
-    def _get_items(self, service="", node="", max_items=10, item_ids=None, sub_id=None,
-                  extra="", profile_key=C.PROF_KEY_NONE):
+    def _get_items(
+        self,
+        service="",
+        node="",
+        max_items=10,
+        item_ids=None,
+        sub_id=None,
+        extra="",
+        profile_key=C.PROF_KEY_NONE,
+    ):
         """Get items from pubsub node
 
         @param max_items(int): maximum number of item to get, C.NO_LIMIT for no limit
@@ -720,16 +751,18 @@
         service = jid.JID(service) if service else None
         max_items = None if max_items == C.NO_LIMIT else max_items
         extra = self.parse_extra(data_format.deserialise(extra))
-        d = defer.ensureDeferred(self.get_items(
-            client,
-            service,
-            node,
-            max_items,
-            item_ids,
-            sub_id or None,
-            extra.rsm_request,
-            extra.extra,
-        ))
+        d = defer.ensureDeferred(
+            self.get_items(
+                client,
+                service,
+                node,
+                max_items,
+                item_ids,
+                sub_id or None,
+                extra.rsm_request,
+                extra.extra,
+            )
+        )
         d.addCallback(self.trans_items_data)
         d.addCallback(self.serialise_items)
         return d
@@ -743,7 +776,7 @@
         item_ids: Optional[List[str]] = None,
         sub_id: Optional[str] = None,
         rsm_request: Optional[rsm.RSMRequest] = None,
-        extra: Optional[dict] = None
+        extra: Optional[dict] = None,
     ) -> Tuple[List[domish.Element], dict]:
         """Retrieve pubsub items from a node.
 
@@ -770,25 +803,34 @@
         if extra is None:
             extra = {}
         cont, ret = await self.host.trigger.async_return_point(
-            "XEP-0060_getItems", client, service, node, max_items, item_ids, sub_id,
-            rsm_request, extra
+            "XEP-0060_getItems",
+            client,
+            service,
+            node,
+            max_items,
+            item_ids,
+            sub_id,
+            rsm_request,
+            extra,
         )
         if not cont:
             return ret
         try:
             mam_query = extra["mam"]
         except KeyError:
-            d = defer.ensureDeferred(client.pubsub_client.items(
-                service = service,
-                nodeIdentifier = node,
-                maxItems = max_items,
-                subscriptionIdentifier = sub_id,
-                sender = None,
-                itemIdentifiers = item_ids,
-                orderBy = extra.get(C.KEY_ORDER_BY),
-                rsm_request = rsm_request,
-                extra = extra
-            ))
+            d = defer.ensureDeferred(
+                client.pubsub_client.items(
+                    service=service,
+                    nodeIdentifier=node,
+                    maxItems=max_items,
+                    subscriptionIdentifier=sub_id,
+                    sender=None,
+                    itemIdentifiers=item_ids,
+                    orderBy=extra.get(C.KEY_ORDER_BY),
+                    rsm_request=rsm_request,
+                    extra=extra,
+                )
+            )
             # we have no MAM data here, so we add None
             d.addErrback(sat_defer.stanza_2_not_found)
             d.addTimeout(TIMEOUT, reactor)
@@ -844,7 +886,7 @@
             # we can put them directly in metadata
             metadata.update(mam_response)
         if rsm_request is not None and rsm_response is not None:
-            metadata['rsm'] = rsm_response.toDict()
+            metadata["rsm"] = rsm_response.toDict()
             if mam_response is None:
                 index = rsm_response.index
                 count = rsm_response.count
@@ -887,15 +929,28 @@
     #         d_dict[publisher] = self.get_items(service, node, max_items, None, sub_id, rsm, client.profile)
     #     defer.returnValue(d_dict)
 
-    def getOptions(self, service, nodeIdentifier, subscriber, subscriptionIdentifier=None,
-                   profile_key=C.PROF_KEY_NONE):
+    def getOptions(
+        self,
+        service,
+        nodeIdentifier,
+        subscriber,
+        subscriptionIdentifier=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         client = self.host.get_client(profile_key)
         return client.pubsub_client.getOptions(
             service, nodeIdentifier, subscriber, subscriptionIdentifier
         )
 
-    def setOptions(self, service, nodeIdentifier, subscriber, options,
-                   subscriptionIdentifier=None, profile_key=C.PROF_KEY_NONE):
+    def setOptions(
+        self,
+        service,
+        nodeIdentifier,
+        subscriber,
+        options,
+        subscriptionIdentifier=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         client = self.host.get_client(profile_key)
         return client.pubsub_client.setOptions(
             service, nodeIdentifier, subscriber, options, subscriptionIdentifier
@@ -912,7 +967,7 @@
         client: SatXMPPClient,
         service: jid.JID,
         nodeIdentifier: Optional[str] = None,
-        options: Optional[Dict[str, str]] = None
+        options: Optional[Dict[str, str]] = None,
     ) -> str:
         """Create a new node
 
@@ -1038,9 +1093,7 @@
         d = self.get_node_affiliations(
             client, jid.JID(service_s) if service_s else None, nodeIdentifier
         )
-        d.addCallback(
-            lambda affiliations: {j.full(): a for j, a in affiliations.items()}
-        )
+        d.addCallback(lambda affiliations: {j.full(): a for j, a in affiliations.items()})
         return d
 
     def get_node_affiliations(self, client, service, nodeIdentifier):
@@ -1122,10 +1175,7 @@
         )
 
     def deleteNode(
-        self,
-        client: SatXMPPClient,
-        service: jid.JID,
-        nodeIdentifier: str
+        self, client: SatXMPPClient, service: jid.JID, nodeIdentifier: str
     ) -> defer.Deferred:
         return client.pubsub_client.deleteNode(service, nodeIdentifier)
 
@@ -1188,9 +1238,9 @@
     ):
         client = self.host.get_client(profile_key)
         service = jid.JID(service) if service else None
-        return defer.ensureDeferred(self.rename_item(
-            client, service, node, item_id, new_id
-        ))
+        return defer.ensureDeferred(
+            self.rename_item(client, service, node, item_id, new_id)
+        )
 
     async def rename_item(
         self,
@@ -1198,7 +1248,7 @@
         service: Optional[jid.JID],
         node: str,
         item_id: str,
-        new_id: str
+        new_id: str,
     ) -> None:
         """Rename an item by recreating it then deleting it
 
@@ -1218,10 +1268,7 @@
         service = None if not service else jid.JID(service)
         d = defer.ensureDeferred(
             self.subscribe(
-                client,
-                service,
-                nodeIdentifier,
-                options=data_format.deserialise(options)
+                client, service, nodeIdentifier, options=data_format.deserialise(options)
             )
         )
         d.addCallback(lambda subscription: subscription.subscriptionIdentifier or "")
@@ -1233,23 +1280,31 @@
         service: Optional[jid.JID],
         nodeIdentifier: str,
         sub_jid: Optional[jid.JID] = None,
-        options: Optional[dict] = None
+        options: Optional[dict] = None,
     ) -> pubsub.Subscription:
         # TODO: reimplement a subscribtion cache, checking that we have not subscription before trying to subscribe
         if service is None:
             service = client.jid.userhostJID()
         cont, trigger_sub = await self.host.trigger.async_return_point(
-            "XEP-0060_subscribe", client, service, nodeIdentifier, sub_jid, options,
+            "XEP-0060_subscribe",
+            client,
+            service,
+            nodeIdentifier,
+            sub_jid,
+            options,
         )
         if not cont:
             return trigger_sub
         try:
             subscription = await client.pubsub_client.subscribe(
-                service, nodeIdentifier, sub_jid or client.jid.userhostJID(),
-                options=options, sender=client.jid.userhostJID()
+                service,
+                nodeIdentifier,
+                sub_jid or client.jid.userhostJID(),
+                options=options,
+                sender=client.jid.userhostJID(),
             )
         except error.StanzaError as e:
-            if e.condition == 'item-not-found':
+            if e.condition == "item-not-found":
                 raise exceptions.NotFound(e.text or e.condition)
             else:
                 raise e
@@ -1270,18 +1325,23 @@
         sender: Optional[jid.JID] = None,
     ) -> None:
         if not await self.host.trigger.async_point(
-            "XEP-0060_unsubscribe", client, service, nodeIdentifier, sub_jid,
-            subscriptionIdentifier, sender
+            "XEP-0060_unsubscribe",
+            client,
+            service,
+            nodeIdentifier,
+            sub_jid,
+            subscriptionIdentifier,
+            sender,
         ):
             return
         try:
             await client.pubsub_client.unsubscribe(
-            service,
-            nodeIdentifier,
-            sub_jid or client.jid.userhostJID(),
-            subscriptionIdentifier,
-            sender,
-        )
+                service,
+                nodeIdentifier,
+                sub_jid or client.jid.userhostJID(),
+                subscriptionIdentifier,
+                sender,
+            )
         except error.StanzaError as e:
             try:
                 next(e.getElement().elements(pubsub.NS_PUBSUB_ERRORS, "not-subscribed"))
@@ -1295,10 +1355,7 @@
 
     @utils.ensure_deferred
     async def _subscriptions(
-        self,
-        service="",
-        nodeIdentifier="",
-        profile_key=C.PROF_KEY_NONE
+        self, service="", nodeIdentifier="", profile_key=C.PROF_KEY_NONE
     ) -> str:
         client = self.host.get_client(profile_key)
         service = None if not service else jid.JID(service)
@@ -1309,7 +1366,7 @@
         self,
         client: SatXMPPEntity,
         service: Optional[jid.JID] = None,
-        node: Optional[str] = None
+        node: Optional[str] = None,
     ) -> List[Dict[str, Union[str, bool]]]:
         """Retrieve subscriptions from a service
 
@@ -1399,10 +1456,9 @@
             log.warning(f"Error while parsing item: {failure_.value}")
 
         d = defer.gatherResults([item_cb(item).addErrback(eb) for item in items])
-        d.addCallback(lambda parsed_items: (
-            [i for i in parsed_items if i is not None],
-            metadata
-        ))
+        d.addCallback(
+            lambda parsed_items: ([i for i in parsed_items if i is not None], metadata)
+        )
         return d
 
     def ser_d_list(self, results, failure_result=None):
@@ -1418,9 +1474,11 @@
         if failure_result is None:
             failure_result = ()
         return [
-            ("", result)
-            if success
-            else (str(result.result) or UNSPECIFIED, failure_result)
+            (
+                ("", result)
+                if success
+                else (str(result.result) or UNSPECIFIED, failure_result)
+            )
             for success, result in results
         ]
 
@@ -1428,10 +1486,7 @@
 
     @utils.ensure_deferred
     async def _get_node_subscriptions(
-        self,
-        service: str,
-        node: str,
-        profile_key: str
+        self, service: str, node: str, profile_key: str
     ) -> Dict[str, str]:
         client = self.host.get_client(profile_key)
         subs = await self.get_node_subscriptions(
@@ -1440,10 +1495,7 @@
         return {j.full(): a for j, a in subs.items()}
 
     async def get_node_subscriptions(
-        self,
-        client: SatXMPPEntity,
-        service: Optional[jid.JID],
-        nodeIdentifier: str
+        self, client: SatXMPPEntity, service: Optional[jid.JID], nodeIdentifier: str
     ) -> Dict[jid.JID, str]:
         """Retrieve subscriptions to a node
 
@@ -1471,15 +1523,11 @@
         try:
             return {
                 jid.JID(s["jid"]): s["subscription"]
-                for s in subscriptions_elt.elements(
-                    (pubsub.NS_PUBSUB, "subscription")
-                )
+                for s in subscriptions_elt.elements((pubsub.NS_PUBSUB, "subscription"))
             }
         except KeyError:
             raise ValueError(
-                _("Invalid result: bad <subscription> element: {}").format(
-                    iq_elt.toXml
-                )
+                _("Invalid result: bad <subscription> element: {}").format(iq_elt.toXml)
             )
 
     def _set_node_subscriptions(
@@ -1487,8 +1535,7 @@
     ):
         client = self.host.get_client(profile_key)
         subscriptions = {
-            jid.JID(jid_): subscription
-            for jid_, subscription in subscriptions.items()
+            jid.JID(jid_): subscription for jid_, subscription in subscriptions.items()
         }
         d = self.set_node_subscriptions(
             client,
@@ -1572,9 +1619,7 @@
         deferreds = {}
         for service, node in node_data:
             deferreds[(service, node)] = defer.ensureDeferred(
-                client.pubsub_client.subscribe(
-                    service, node, subscriber, options=options
-                )
+                client.pubsub_client.subscribe(service, node, subscriber, options=options)
             )
         return self.rt_sessions.new_session(deferreds, client.profile)
         # found_nodes = yield self.listNodes(service, profile=client.profile)
@@ -1639,8 +1684,14 @@
             profile_key,
         )
 
-    def get_from_many(self, node_data, max_item=None, rsm_request=None, extra=None,
-                    profile_key=C.PROF_KEY_NONE):
+    def get_from_many(
+        self,
+        node_data,
+        max_item=None,
+        rsm_request=None,
+        extra=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         """Get items from many nodes at once
 
         @param node_data (iterable[tuple]): iterable of tuple (service, node) where:
@@ -1654,9 +1705,11 @@
         client = self.host.get_client(profile_key)
         deferreds = {}
         for service, node in node_data:
-            deferreds[(service, node)] = defer.ensureDeferred(self.get_items(
-                client, service, node, max_item, rsm_request=rsm_request, extra=extra
-            ))
+            deferreds[(service, node)] = defer.ensureDeferred(
+                self.get_items(
+                    client, service, node, max_item, rsm_request=rsm_request, extra=extra
+                )
+            )
         return self.rt_sessions.new_session(deferreds, client.profile)
 
 
@@ -1686,14 +1739,25 @@
         if extra is None:
             extra = {}
         items, rsm_response = await super().items(
-            service, nodeIdentifier, maxItems, subscriptionIdentifier, sender,
-            itemIdentifiers, orderBy, rsm_request
+            service,
+            nodeIdentifier,
+            maxItems,
+            subscriptionIdentifier,
+            sender,
+            itemIdentifiers,
+            orderBy,
+            rsm_request,
         )
         # items must be returned, thus this async point can't stop the workflow (but it
         # can modify returned items)
         await self.host.trigger.async_point(
-            "XEP-0060_items", self.parent, service, nodeIdentifier, items, rsm_response,
-            extra
+            "XEP-0060_items",
+            self.parent,
+            service,
+            nodeIdentifier,
+            items,
+            rsm_response,
+            extra,
         )
         return items, rsm_response
 
@@ -1725,9 +1789,7 @@
             try:
                 await utils.as_deferred(callback, client, event)
             except Exception as e:
-                log.error(
-                    f"Error while running items event callback {callback}: {e}"
-                )
+                log.error(f"Error while running items event callback {callback}: {e}")
 
     def itemsReceived(self, event):
         log.debug("Pubsub items received")
@@ -1747,9 +1809,11 @@
         log.debug(("Publish node deleted"))
         for callback in self._get_node_callbacks(event.nodeIdentifier, C.PS_DELETE):
             d = utils.as_deferred(callback, self.parent, event)
-            d.addErrback(lambda f: log.error(
-                f"Error while running delete event callback {callback}: {f}"
-            ))
+            d.addErrback(
+                lambda f: log.error(
+                    f"Error while running delete event callback {callback}: {f}"
+                )
+            )
         client = self.parent
         if (event.sender, event.nodeIdentifier) in client.pubsub_watching:
             self.host.bridge.ps_event_raw(
@@ -1760,9 +1824,11 @@
         log.debug(("Publish node purged"))
         for callback in self._get_node_callbacks(event.nodeIdentifier, C.PS_PURGE):
             d = utils.as_deferred(callback, self.parent, event)
-            d.addErrback(lambda f: log.error(
-                f"Error while running purge event callback {callback}: {f}"
-            ))
+            d.addErrback(
+                lambda f: log.error(
+                    f"Error while running purge event callback {callback}: {f}"
+                )
+            )
         client = self.parent
         if (event.sender, event.nodeIdentifier) in client.pubsub_watching:
             self.host.bridge.ps_event_raw(
@@ -1808,7 +1874,7 @@
         @param NodeIdentifier(unicode): PubSub node to use
         """
         # TODO: propose this upstream and remove it once merged
-        request = pubsub.PubSubRequest('purge')
+        request = pubsub.PubSubRequest("purge")
         request.recipient = service
         request.nodeIdentifier = nodeIdentifier
         return request.send(self.xmlstream)
--- a/libervia/backend/plugins/plugin_xep_0065.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0065.py	Wed Jun 19 18:44:57 2024 +0200
@@ -96,8 +96,8 @@
 TIMER_KEY = "timer"
 DEFER_KEY = "finished"  # key of the deferred used to track session end
 SERVER_STARTING_PORT = (
-    0
-)  # starting number for server port search (0 to ask automatic attribution)
+    0  # starting number for server port search (0 to ask automatic attribution)
+)
 
 # priorities are candidates local priorities, must be a int between 0 and 65535
 PRIORITY_BEST_DIRECT = 10000
@@ -169,8 +169,17 @@
 
 
 class Candidate(object):
-    def __init__(self, host, port, type_, priority, jid_, id_=None, priority_local=False,
-                 factory=None,):
+    def __init__(
+        self,
+        host,
+        port,
+        type_,
+        priority,
+        jid_,
+        id_=None,
+        priority_local=False,
+        factory=None,
+    ):
         """
         @param host(unicode): host IP or domain
         @param port(int): port
@@ -247,7 +256,7 @@
             multiplier = 10
         else:
             raise exceptions.InternalError("Unknown {} type !".format(self.type))
-        return 2 ** 16 * multiplier + self._local_priority
+        return 2**16 * multiplier + self._local_priority
 
     def activate(self, client, sid, peer_jid, local_jid):
         """Activate the proxy candidate
@@ -289,7 +298,7 @@
 
 
 class SOCKSv5(protocol.Protocol):
-    CHUNK_SIZE = 2 ** 16
+    CHUNK_SIZE = 2**16
 
     def __init__(self, session_hash=None):
         """
@@ -377,9 +386,11 @@
         try:
             # Parse out data
             ver, ulen = struct.unpack("BB", self.buf[:2])
-            uname, = struct.unpack("%ds" % ulen, self.buf[2 : ulen + 2])
-            plen, = struct.unpack("B", self.buf[ulen + 2])
-            password, = struct.unpack("%ds" % plen, self.buf[ulen + 3 : ulen + 3 + plen])
+            (uname,) = struct.unpack("%ds" % ulen, self.buf[2 : ulen + 2])
+            (plen,) = struct.unpack("B", self.buf[ulen + 2])
+            (password,) = struct.unpack(
+                "%ds" % plen, self.buf[ulen + 3 : ulen + 3 + plen]
+            )
             # Trim off fron of the buffer
             self.buf = self.buf[3 + ulen + plen :]
             # Fire event to authenticate user
@@ -443,7 +454,7 @@
             return None
 
     def _make_request(self):
-        hash_ = self._session_hash.encode('utf-8')
+        hash_ = self._session_hash.encode("utf-8")
         request = struct.pack(
             "!5B%dsH" % len(hash_),
             SOCKS5_VER,
@@ -501,7 +512,7 @@
 
     def connect_requested(self, addr, port):
         # Check that this session is expected
-        if not self.factory.add_to_session(addr.decode('utf-8'), self):
+        if not self.factory.add_to_session(addr.decode("utf-8"), self):
             log.warning(
                 "Unexpected connection request received from {host}".format(
                     host=self.transport.getPeer().host
@@ -509,7 +520,7 @@
             )
             self.send_error_reply(REPLY_CONN_REFUSED)
             return
-        self._session_hash = addr.decode('utf-8')
+        self._session_hash = addr.decode("utf-8")
         self.connect_completed(addr, 0)
 
     def start_transfer(self, chunk_size):
@@ -724,7 +735,9 @@
         self.host = host
 
         # session data
-        self.hash_clients_map = {}  # key: hash of the transfer session, value: session data
+        self.hash_clients_map = (
+            {}
+        )  # key: hash of the transfer session, value: session data
         self._cache_proxies = {}  # key: server jid, value: proxy data
 
         # misc data
@@ -879,7 +892,9 @@
         @return (D(list[Candidate])): list of candidates, ordered by priority
         """
         server_factory = self.get_socks_5_server_factory()
-        local_port, ext_port, local_ips, external_ip = await self._get_network_data(client)
+        local_port, ext_port, local_ips, external_ip = await self._get_network_data(
+            client
+        )
         try:
             proxy = await self.get_proxy(client, local_jid)
         except exceptions.NotFound:
@@ -1020,7 +1035,9 @@
 
         return defers_list
 
-    def get_best_candidate(self, client, candidates, session_hash, peer_session_hash=None):
+    def get_best_candidate(
+        self, client, candidates, session_hash, peer_session_hash=None
+    ):
         """Get best candidate (according to priority) which can connect
 
         @param candidates(iterable[Candidate]): candidates to test
@@ -1137,7 +1154,8 @@
         @return (D): Deferred fired when session is finished
         """
         session_data = self._create_session(
-            client, stream_object, local_jid, to_jid, sid, True)
+            client, stream_object, local_jid, to_jid, sid, True
+        )
 
         session_data[client] = client
 
@@ -1159,9 +1177,13 @@
 
             d = iq_elt.send()
             args = [client, session_data, local_jid]
-            d.addCallbacks(self._iq_negotiation_cb, self._iq_negotiation_eb, args, None, args)
+            d.addCallbacks(
+                self._iq_negotiation_cb, self._iq_negotiation_eb, args, None, args
+            )
 
-        defer.ensureDeferred(self.get_candidates(client, local_jid)).addCallback(got_candidates)
+        defer.ensureDeferred(self.get_candidates(client, local_jid)).addCallback(
+            got_candidates
+        )
         return session_data[DEFER_KEY]
 
     def _iq_negotiation_cb(self, iq_elt, client, session_data, local_jid):
@@ -1181,9 +1203,9 @@
 
         streamhost_jid = jid.JID(streamhost_used_elt["jid"])
         try:
-            candidate = next((
-                c for c in session_data["candidates"] if c.jid == streamhost_jid
-            ))
+            candidate = next(
+                (c for c in session_data["candidates"] if c.jid == streamhost_jid)
+            )
         except StopIteration:
             log.warning(
                 "Candidate [{jid}] is unknown !".format(jid=streamhost_jid.full())
@@ -1220,8 +1242,9 @@
         """
         return self._create_session(*args, **kwargs)[DEFER_KEY]
 
-    def _create_session(self, client, stream_object, local_jid, to_jid, sid,
-                       requester=False):
+    def _create_session(
+        self, client, stream_object, local_jid, to_jid, sid, requester=False
+    ):
         """Called when a bytestream is imminent
 
         @param stream_object(iface.IStreamProducer): File object where data will be
--- a/libervia/backend/plugins/plugin_xep_0070.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0070.py	Wed Jun 19 18:44:57 2024 +0200
@@ -96,18 +96,21 @@
         auth_url = auth_elt["url"]
         self._dictRequest[client] = (auth_id, auth_method, auth_url, stanzaType, elt)
         title = D_("Auth confirmation")
-        message = D_("{auth_url} needs to validate your identity, do you agree?\n"
-                     "Validation code : {auth_id}\n\n"
-                     "Please check that this code is the same as on {auth_url}"
-                    ).format(auth_url=auth_url, auth_id=auth_id)
-        d = xml_tools.defer_confirm(self.host, message=message, title=title,
-            profile=client.profile)
+        message = D_(
+            "{auth_url} needs to validate your identity, do you agree?\n"
+            "Validation code : {auth_id}\n\n"
+            "Please check that this code is the same as on {auth_url}"
+        ).format(auth_url=auth_url, auth_id=auth_id)
+        d = xml_tools.defer_confirm(
+            self.host, message=message, title=title, profile=client.profile
+        )
         d.addCallback(self._auth_request_callback, client)
 
     def _auth_request_callback(self, authorized, client):
         try:
             auth_id, auth_method, auth_url, stanzaType, elt = self._dictRequest.pop(
-                client)
+                client
+            )
         except KeyError:
             authorized = False
 
--- a/libervia/backend/plugins/plugin_xep_0071.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0071.py	Wed Jun 19 18:44:57 2024 +0200
@@ -114,6 +114,7 @@
         @param body_elts: XHTML-IM body elements found
         @return: the data with the extra parameter updated
         """
+
         # TODO: check if text only body is empty, then try to convert XHTML-IM to pure text and show a warning message
         def converted(xhtml, lang):
             if lang:
@@ -156,7 +157,7 @@
             defers.append(d)
 
     def _send_message_add_rich(self, data, client):
-        """ Construct XHTML-IM node and add it XML element
+        """Construct XHTML-IM node and add it XML element
 
         @param data: message data as sended by sendMessage callback
         """
@@ -197,8 +198,7 @@
         return d_list
 
     def message_received_trigger(self, client, message, post_treat):
-        """ Check presence of XHTML-IM in message
-        """
+        """Check presence of XHTML-IM in message"""
         try:
             html_elt = next(message.elements(NS_XHTML_IM, "html"))
         except StopIteration:
@@ -210,7 +210,7 @@
         return True
 
     def send_message_trigger(self, client, data, pre_xml_treatments, post_xml_treatments):
-        """ Check presence of rich text in extra """
+        """Check presence of rich text in extra"""
         rich = {}
         xhtml = {}
         for key, value in data["extra"].items():
@@ -231,7 +231,7 @@
         return True
 
     def _purge_style(self, styles_raw):
-        """ Remove unauthorised styles according to the XEP-0071
+        """Remove unauthorised styles according to the XEP-0071
         @param styles_raw: raw styles (value of the style attribute)
         """
         purged = []
@@ -250,7 +250,7 @@
         return "; ".join(["%s: %s" % data for data in purged])
 
     def XHTML2XHTML_IM(self, xhtml):
-        """ Convert XHTML document to XHTML_IM subset
+        """Convert XHTML document to XHTML_IM subset
         @param xhtml: raw xhtml to convert
         """
         # TODO: more clever tag replacement (replace forbidden tags with equivalents when possible)
@@ -275,7 +275,7 @@
                 attrib = elem.attrib
                 att_to_remove = set(attrib).difference(allowed[elem.tag])
                 for att in att_to_remove:
-                    del (attrib[att])
+                    del attrib[att]
                 if "style" in attrib:
                     attrib["style"] = self._purge_style(attrib["style"])
 
--- a/libervia/backend/plugins/plugin_xep_0077.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0077.py	Wed Jun 19 18:44:57 2024 +0200
@@ -48,7 +48,7 @@
     # FIXME: request IQ is not send to check available fields,
     #        while XEP recommand to use it
     # FIXME: doesn't handle data form or oob
-    namespace = 'jabber:client'
+    namespace = "jabber:client"
 
     def __init__(self, jid_, password, email=None, check_certificate=True):
         log.debug(_("Registration asked for {jid}").format(jid=jid_))
@@ -70,15 +70,25 @@
             tls_required = False
             configurationForTLS = ssl.CertificateOptions(trustRoot=None)
         tls_init = xmlstream.TLSInitiatingInitializer(
-            xs, required=tls_required, configurationForTLS=configurationForTLS)
+            xs, required=tls_required, configurationForTLS=configurationForTLS
+        )
 
         xs.initializers.append(tls_init)
 
     def register(self, xmlstream):
-        log.debug(_("Stream started with {server}, now registering"
-                    .format(server=self.jid.host)))
-        iq = XEP_0077.build_register_iq(self.xmlstream, self.jid, self.password, self.email)
-        d = iq.send(self.jid.host).addCallbacks(self.registration_cb, self.registration_eb)
+        log.debug(
+            _(
+                "Stream started with {server}, now registering".format(
+                    server=self.jid.host
+                )
+            )
+        )
+        iq = XEP_0077.build_register_iq(
+            self.xmlstream, self.jid, self.password, self.email
+        )
+        d = iq.send(self.jid.host).addCallbacks(
+            self.registration_cb, self.registration_eb
+        )
         d.chainDeferred(self.registered)
 
     def registration_cb(self, answer):
@@ -261,10 +271,11 @@
         @param port(int): port of the server to register to
         """
         if host is None:
-           host = self.host.memory.config_get("", "xmpp_domain", "127.0.0.1")
+            host = self.host.memory.config_get("", "xmpp_domain", "127.0.0.1")
         check_certificate = host != "127.0.0.1"
         authenticator = RegisteringAuthenticator(
-            jid_, password, email, check_certificate=check_certificate)
+            jid_, password, email, check_certificate=check_certificate
+        )
         registered_d = authenticator.registered
         server_register = ServerRegister(authenticator)
         reactor.connectTCP(host, port, server_register)
@@ -288,11 +299,7 @@
         client = self.host.get_client(profile_key)
         return self.unregister(client, jid.JID(to_jid_s))
 
-    def unregister(
-            self,
-            client: SatXMPPEntity,
-            to_jid: jid.JID
-    ) -> defer.Deferred:
+    def unregister(self, client: SatXMPPEntity, to_jid: jid.JID) -> defer.Deferred:
         """remove registration from a server/service
 
         BEWARE! if you remove registration from profile own server, this will
@@ -309,4 +316,3 @@
         if not to_jid or to_jid == jid.JID(client.jid.host):
             d.addCallback(lambda __: client.entity_disconnect())
         return d
-
--- a/libervia/backend/plugins/plugin_xep_0080.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0080.py	Wed Jun 19 18:44:57 2024 +0200
@@ -64,7 +64,7 @@
     "text": str,
     "timestamp": "datetime",
     "tzo": str,
-    "uri": str
+    "uri": str,
 }
 
 
@@ -111,10 +111,7 @@
 
         return geoloc_elt
 
-    def parse_geoloc_elt(
-        self,
-        geoloc_elt: domish.Element
-    ) -> Dict[str, Any]:
+    def parse_geoloc_elt(self, geoloc_elt: domish.Element) -> Dict[str, Any]:
         """Parse <geoloc/> element
 
         @param geoloc_elt: <geoloc/> element
--- a/libervia/backend/plugins/plugin_xep_0082.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0082.py	Wed Jun 19 18:44:57 2024 +0200
@@ -23,17 +23,14 @@
 from libervia.backend.tools import xmpp_datetime
 
 
-__all__ = [  # pylint: disable=unused-variable
-    "PLUGIN_INFO",
-    "XEP_0082"
-]
+__all__ = ["PLUGIN_INFO", "XEP_0082"]  # pylint: disable=unused-variable
 
 
 PLUGIN_INFO = {
     C.PI_NAME: "XMPP Date and Time Profiles",
     C.PI_IMPORT_NAME: "XEP-0082",
     C.PI_TYPE: C.PLUG_TYPE_MISC,
-    C.PI_PROTOCOLS: [ "XEP-0082" ],
+    C.PI_PROTOCOLS: ["XEP-0082"],
     C.PI_DEPENDENCIES: [],
     C.PI_RECOMMENDATIONS: [],
     C.PI_MAIN: "XEP_0082",
--- a/libervia/backend/plugins/plugin_xep_0084.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0084.py	Wed Jun 19 18:44:57 2024 +0200
@@ -65,13 +65,9 @@
         host.register_namespace("avatar_data", NS_AVATAR_DATA)
         self.host = host
         self._p = host.plugins["XEP-0060"]
-        self._i = host.plugins['IDENTITY']
+        self._i = host.plugins["IDENTITY"]
         self._i.register(
-            IMPORT_NAME,
-            "avatar",
-            self.get_avatar,
-            self.set_avatar,
-            priority=2000
+            IMPORT_NAME, "avatar", self.get_avatar, self.set_avatar, priority=2000
         )
         host.plugins["XEP-0163"].add_pep_event(
             None, NS_AVATAR_METADATA, self._on_metadata_update
@@ -85,19 +81,15 @@
         defer.ensureDeferred(self.on_metadata_update(client, itemsEvent))
 
     async def on_metadata_update(
-        self,
-        client: SatXMPPEntity,
-        itemsEvent: pubsub.ItemsEvent
+        self, client: SatXMPPEntity, itemsEvent: pubsub.ItemsEvent
     ) -> None:
         entity = client.jid.userhostJID()
         avatar_metadata = await self.get_avatar(client, entity)
         await self._i.update(client, IMPORT_NAME, "avatar", avatar_metadata, entity)
 
     async def get_avatar(
-            self,
-            client: SatXMPPEntity,
-            entity_jid: jid.JID
-        ) -> Optional[dict]:
+        self, client: SatXMPPEntity, entity_jid: jid.JID
+    ) -> Optional[dict]:
         """Get avatar data
 
         @param entity: entity to get avatar from
@@ -107,10 +99,7 @@
         # metadata
         try:
             items, __ = await self._p.get_items(
-                client,
-                service,
-                NS_AVATAR_METADATA,
-                max_items=1
+                client, service, NS_AVATAR_METADATA, max_items=1
             )
         except exceptions.NotFound:
             return None
@@ -130,7 +119,7 @@
                 metadata = {
                     "id": str(info_elt["id"]),
                     "size": int(info_elt["bytes"]),
-                    "media_type": str(info_elt["type"])
+                    "media_type": str(info_elt["type"]),
                 }
                 avatar_id = metadata["id"]
                 if not avatar_id:
@@ -150,10 +139,7 @@
         if not cache_data:
             try:
                 data_items, __ = await self._p.get_items(
-                    client,
-                    service,
-                    NS_AVATAR_DATA,
-                    item_ids=[avatar_id]
+                    client, service, NS_AVATAR_DATA, item_ids=[avatar_id]
                 )
                 data_item_elt = data_items[0]
             except (error.StanzaError, IndexError) as e:
@@ -173,18 +159,13 @@
                 )
                 return None
             with self.host.common_cache.cache_data(
-                IMPORT_NAME,
-                avatar_id,
-                metadata["media_type"]
+                IMPORT_NAME, avatar_id, metadata["media_type"]
             ) as f:
                 f.write(avatar_buf)
-                cache_data = {
-                    "path": Path(f.name),
-                    "mime_type": metadata["media_type"]
-                }
+                cache_data = {"path": Path(f.name), "mime_type": metadata["media_type"]}
 
         return self._i.avatar_build_metadata(
-                cache_data['path'], cache_data['mime_type'], avatar_id
+            cache_data["path"], cache_data["mime_type"], avatar_id
         )
 
     def build_item_data_elt(self, avatar_data: Dict[str, Any]) -> domish.Element:
@@ -213,10 +194,7 @@
         return pubsub.Item(id=self._p.ID_SINGLETON, payload=metadata_elt)
 
     async def set_avatar(
-        self,
-        client: SatXMPPEntity,
-        avatar_data: Dict[str, Any],
-        entity: jid.JID
+        self, client: SatXMPPEntity, avatar_data: Dict[str, Any], entity: jid.JID
     ) -> None:
         """Set avatar of the profile
 
@@ -235,7 +213,7 @@
                 self._p.OPT_ACCESS_MODEL: self._p.ACCESS_OPEN,
                 self._p.OPT_PERSIST_ITEMS: 1,
                 self._p.OPT_MAX_ITEMS: 1,
-            }
+            },
         )
         item_data_elt = self.build_item_data_elt(avatar_data)
         await self._p.send_items(client, service, NS_AVATAR_DATA, [item_data_elt])
@@ -249,7 +227,7 @@
                 self._p.OPT_ACCESS_MODEL: self._p.ACCESS_OPEN,
                 self._p.OPT_PERSIST_ITEMS: 1,
                 self._p.OPT_MAX_ITEMS: 1,
-            }
+            },
         )
         item_metadata_elt = self.build_item_metadata_elt(avatar_data)
         await self._p.send_items(client, service, NS_AVATAR_METADATA, [item_metadata_elt])
@@ -261,7 +239,7 @@
     def getDiscoInfo(self, requestor, service, nodeIdentifier=""):
         return [
             disco.DiscoFeature(NS_AVATAR_METADATA),
-            disco.DiscoFeature(NS_AVATAR_DATA)
+            disco.DiscoFeature(NS_AVATAR_DATA),
         ]
 
     def getDiscoItems(self, requestor, service, nodeIdentifier=""):
--- a/libervia/backend/plugins/plugin_xep_0085.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0085.py	Wed Jun 19 18:44:57 2024 +0200
@@ -100,7 +100,9 @@
     def __init__(self, host):
         log.info(_("Chat State Notifications plugin initialization"))
         self.host = host
-        self.map = {}  # FIXME: would be better to use client instead of mapping profile to data
+        self.map = (
+            {}
+        )  # FIXME: would be better to use client instead of mapping profile to data
 
         # parameter value is retrieved before each use
         host.memory.update_params(self.params)
@@ -149,9 +151,7 @@
         if value == DELETE_VALUE:
             self.host.memory.del_entity_datum(client, entity_jid, ENTITY_KEY)
         else:
-            self.host.memory.update_entity_data(
-                client, entity_jid, ENTITY_KEY, value
-            )
+            self.host.memory.update_entity_data(client, entity_jid, ENTITY_KEY, value)
         if not value or value == DELETE_VALUE:
             # reinit chat state UI for this or these contact(s)
             self.host.bridge.chat_state_received(entity_jid.full(), "", profile)
@@ -260,7 +260,8 @@
         client = self.host.get_client(profile)
         try:
             type_ = self.host.memory.get_entity_datum(
-                client, to_jid.userhostJID(), C.ENTITY_TYPE)
+                client, to_jid.userhostJID(), C.ENTITY_TYPE
+            )
             if type_ == C.ENTITY_TYPE_MUC:
                 return True
         except (exceptions.UnknownEntityError, KeyError):
--- a/libervia/backend/plugins/plugin_xep_0096.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0096.py	Wed Jun 19 18:44:57 2024 +0200
@@ -67,7 +67,11 @@
         self._si = self.host.plugins["XEP-0095"]
         self._si.register_si_profile(SI_PROFILE_NAME, self._transfer_request)
         host.bridge.add_method(
-            "si_file_send", ".plugin", in_sign="sssss", out_sign="s", method=self._file_send
+            "si_file_send",
+            ".plugin",
+            in_sign="sssss",
+            out_sign="s",
+            method=self._file_send,
         )
 
     async def can_handle_file_send(self, client, peer_jid, filepath):
@@ -280,9 +284,7 @@
         @param data: session data
         """
         log.warning(
-            "Transfer {si_id} failed: {reason}".format(
-                reason=str(failure.value), **data
-            )
+            "Transfer {si_id} failed: {reason}".format(reason=str(failure.value), **data)
         )
         data["stream_object"].close()
 
@@ -297,9 +299,9 @@
         client: SatXMPPEntity,
         peer_jid: jid.JID,
         filepath: str,
-        name: str|None = None,
-        desc: str|None = None,
-        extra: dict|None = None
+        name: str | None = None,
+        desc: str | None = None,
+        extra: dict | None = None,
     ) -> dict:
         """Send a file using XEP-0096.
 
@@ -373,8 +375,9 @@
         stream_object = stream.FileStreamObject(
             self.host, client, filepath, uid=sid, size=size
         )
-        d = plugin.start_stream(client, stream_object, client.jid,
-                               jid.JID(iq_elt["from"]), sid)
+        d = plugin.start_stream(
+            client, stream_object, client.jid, jid.JID(iq_elt["from"]), sid
+        )
         d.addCallback(self._send_cb, client, sid, stream_object)
         d.addErrback(self._send_eb, client, sid, stream_object)
 
@@ -393,7 +396,9 @@
                     "Something went wrong during the file transfer session initialisation: {reason}"
                 ).format(reason=str(stanza_err))
                 title = D_("File transfer error")
-                xml_tools.quick_note(self.host, client, msg, title, C.XMLUI_DATA_LVL_ERROR)
+                xml_tools.quick_note(
+                    self.host, client, msg, title, C.XMLUI_DATA_LVL_ERROR
+                )
         elif failure.check(exceptions.DataError):
             log.warning("Invalid stanza received")
         else:
--- a/libervia/backend/plugins/plugin_xep_0100.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0100.py	Wed Jun 19 18:44:57 2024 +0200
@@ -61,7 +61,9 @@
     def __init__(self, host):
         log.info(_("Gateways plugin initialization"))
         self.host = host
-        self.__gateways = {}  # dict used to construct the answer to gateways_find. Key = target jid
+        self.__gateways = (
+            {}
+        )  # dict used to construct the answer to gateways_find. Key = target jid
         host.bridge.add_method(
             "gateways_find",
             ".plugin",
@@ -88,7 +90,7 @@
         )
 
     def _gateways_menu(self, data, profile):
-        """ XMLUI activated by menu: return Gateways UI
+        """XMLUI activated by menu: return Gateways UI
 
         @param profile: %(doc_profile)s
         """
@@ -148,7 +150,7 @@
         return d
 
     def _get_identity_desc(self, identity):
-        """ Return a human readable description of identity
+        """Return a human readable description of identity
         @param identity: tuple as returned by Disco identities (category, type)
 
         """
@@ -253,8 +255,7 @@
         return d
 
     def gateways_find(self, target, profile):
-        """Find gateways in the target JID, using discovery protocol
-        """
+        """Find gateways in the target JID, using discovery protocol"""
         client = self.host.get_client(profile)
         log.debug(
             _("find gateways (target = %(target)s, profile = %(profile)s)")
--- a/libervia/backend/plugins/plugin_xep_0103.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0103.py	Wed Jun 19 18:44:57 2024 +0200
@@ -48,11 +48,7 @@
         log.info(_("XEP-0103 (URL Address Information) plugin initialization"))
         host.register_namespace("url-data", NS_URL_DATA)
 
-    def get_url_data_elt(
-        self,
-        url: str,
-        **kwargs
-    ) -> domish.Element:
+    def get_url_data_elt(self, url: str, **kwargs) -> domish.Element:
         """Generate the element describing the URL
 
         @param url: URL to use
@@ -63,10 +59,7 @@
         url_data_elt["target"] = url
         return url_data_elt
 
-    def parse_url_data_elt(
-        self,
-        url_data_elt: domish.Element
-    ) -> Dict[str, Any]:
+    def parse_url_data_elt(self, url_data_elt: domish.Element) -> Dict[str, Any]:
         """Parse <url-data/> element
 
         @param url_data_elt: <url-data/> element
@@ -77,9 +70,7 @@
         """
         if url_data_elt.name != "url-data":
             try:
-                url_data_elt = next(
-                    url_data_elt.elements(NS_URL_DATA, "url-data")
-                )
+                url_data_elt = next(url_data_elt.elements(NS_URL_DATA, "url-data"))
             except StopIteration:
                 raise exceptions.NotFound
         try:
--- a/libervia/backend/plugins/plugin_xep_0106.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0106.py	Wed Jun 19 18:44:57 2024 +0200
@@ -40,23 +40,23 @@
 
 NS_JID_ESCAPING = r"jid\20escaping"
 ESCAPE_MAP = {
-    ' ': r'\20',
-    '"': r'\22',
-    '&': r'\26',
-    "'": r'\27',
-    '/': r'\2f',
-    ':': r'\3a',
-    '<': r'\3c',
-    '>': r'\3e',
-    '@': r'\40',
-    '\\': r'\5c',
+    " ": r"\20",
+    '"': r"\22",
+    "&": r"\26",
+    "'": r"\27",
+    "/": r"\2f",
+    ":": r"\3a",
+    "<": r"\3c",
+    ">": r"\3e",
+    "@": r"\40",
+    "\\": r"\5c",
 }
 
 
 class XEP_0106(object):
 
     def __init__(self, host):
-        self.reverse_map = {v:k for k,v in ESCAPE_MAP.items()}
+        self.reverse_map = {v: k for k, v in ESCAPE_MAP.items()}
 
     def get_handler(self, client):
         return XEP_0106_handler()
@@ -68,7 +68,7 @@
         @return (unicode): escaped text
         @raise ValueError: text can't be escaped
         """
-        if not text or text[0] == ' ' or text[-1] == ' ':
+        if not text or text[0] == " " or text[-1] == " ":
             raise ValueError("text must not be empty, or start or end with a whitespace")
         escaped = []
         for c in text:
@@ -76,7 +76,7 @@
                 escaped.append(ESCAPE_MAP[c])
             else:
                 escaped.append(c)
-        return ''.join(escaped)
+        return "".join(escaped)
 
     def unescape(self, escaped):
         """Unescape text
@@ -85,20 +85,22 @@
         @return (unicode): unescaped text
         @raise ValueError: text can't be unescaped
         """
-        if not escaped or escaped.startswith(r'\27') or escaped.endswith(r'\27'):
-            raise ValueError("escaped value must not be empty, or start or end with a "
-                             f"whitespace: rejected value is {escaped!r}")
+        if not escaped or escaped.startswith(r"\27") or escaped.endswith(r"\27"):
+            raise ValueError(
+                "escaped value must not be empty, or start or end with a "
+                f"whitespace: rejected value is {escaped!r}"
+            )
         unescaped = []
         idx = 0
         while idx < len(escaped):
-            char_seq = escaped[idx:idx+3]
+            char_seq = escaped[idx : idx + 3]
             if char_seq in self.reverse_map:
                 unescaped.append(self.reverse_map[char_seq])
                 idx += 3
             else:
                 unescaped.append(escaped[idx])
                 idx += 1
-        return ''.join(unescaped)
+        return "".join(unescaped)
 
 
 @implementer(disco.IDisco)
--- a/libervia/backend/plugins/plugin_xep_0115.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0115.py	Wed Jun 19 18:44:57 2024 +0200
@@ -170,8 +170,7 @@
                 _(
                     "Unknown hash method for entity capabilities: [{hash_method}] "
                     "(entity: {entity_jid}, node: {node})"
-                )
-                .format(hash_method = c_hash, entity_jid = from_jid, node = c_node)
+                ).format(hash_method=c_hash, entity_jid=from_jid, node=c_node)
             )
 
         def cb(__):
@@ -186,10 +185,10 @@
                         "computed: [{computed}]\n"
                         "(entity: {entity_jid}, node: {node})"
                     ).format(
-                        given = c_ver,
-                        computed = computed_hash,
-                        entity_jid = from_jid,
-                        node = c_node,
+                        given=c_ver,
+                        computed=computed_hash,
+                        entity_jid=from_jid,
+                        node=c_node,
                     )
                 )
 
--- a/libervia/backend/plugins/plugin_xep_0163.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0163.py	Wed Jun 19 18:44:57 2024 +0200
@@ -80,7 +80,7 @@
         node: str,
         in_callback: Callable,
         out_callback: Optional[Callable] = None,
-        notify: bool = True
+        notify: bool = True,
     ) -> None:
         """Add a Personal Eventing Protocol event manager
 
--- a/libervia/backend/plugins/plugin_xep_0166/__init__.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0166/__init__.py	Wed Jun 19 18:44:57 2024 +0200
@@ -51,15 +51,15 @@
 log = getLogger(__name__)
 
 
-IQ_SET : Final = '/iq[@type="set"]'
-NS_JINGLE : Final = "urn:xmpp:jingle:1"
-NS_JINGLE_ERROR : Final = "urn:xmpp:jingle:errors:1"
-JINGLE_REQUEST : Final = f'{IQ_SET}/jingle[@xmlns="{NS_JINGLE}"]'
-CONFIRM_TXT : Final = D_(
+IQ_SET: Final = '/iq[@type="set"]'
+NS_JINGLE: Final = "urn:xmpp:jingle:1"
+NS_JINGLE_ERROR: Final = "urn:xmpp:jingle:errors:1"
+JINGLE_REQUEST: Final = f'{IQ_SET}/jingle[@xmlns="{NS_JINGLE}"]'
+CONFIRM_TXT: Final = D_(
     "{entity} want to start a jingle session with you, do you accept ?"
 )
 
-PLUGIN_INFO : Final = {
+PLUGIN_INFO: Final = {
     C.PI_NAME: "Jingle",
     C.PI_IMPORT_NAME: "XEP-0166",
     C.PI_TYPE: "XEP",
@@ -72,54 +72,50 @@
 
 
 class XEP_0166:
-    namespace : Final = NS_JINGLE
+    namespace: Final = NS_JINGLE
 
-    ROLE_INITIATOR : Final = "initiator"
-    ROLE_RESPONDER : Final = "responder"
+    ROLE_INITIATOR: Final = "initiator"
+    ROLE_RESPONDER: Final = "responder"
 
-    TRANSPORT_DATAGRAM : Final = "UDP"
-    TRANSPORT_STREAMING : Final = "TCP"
+    TRANSPORT_DATAGRAM: Final = "UDP"
+    TRANSPORT_STREAMING: Final = "TCP"
 
-    REASON_SUCCESS : Final = "success"
-    REASON_DECLINE : Final = "decline"
-    REASON_FAILED_APPLICATION : Final = "failed-application"
-    REASON_FAILED_TRANSPORT : Final = "failed-transport"
-    REASON_CONNECTIVITY_ERROR : Final = "connectivity-error"
+    REASON_SUCCESS: Final = "success"
+    REASON_DECLINE: Final = "decline"
+    REASON_FAILED_APPLICATION: Final = "failed-application"
+    REASON_FAILED_TRANSPORT: Final = "failed-transport"
+    REASON_CONNECTIVITY_ERROR: Final = "connectivity-error"
 
-    STATE_PENDING : Final = "PENDING"
-    STATE_ACTIVE : Final = "ACTIVE"
-    STATE_ENDED : Final = "ENDED"
+    STATE_PENDING: Final = "PENDING"
+    STATE_ACTIVE: Final = "ACTIVE"
+    STATE_ENDED: Final = "ENDED"
 
     # standard actions
 
-    A_SESSION_INITIATE : Final = "session-initiate"
-    A_SESSION_ACCEPT : Final = "session-accept"
-    A_SESSION_TERMINATE : Final = "session-terminate"
-    A_SESSION_INFO : Final = "session-info"
-    A_TRANSPORT_REPLACE : Final = "transport-replace"
-    A_TRANSPORT_ACCEPT : Final = "transport-accept"
-    A_TRANSPORT_REJECT : Final = "transport-reject"
-    A_TRANSPORT_INFO : Final = "transport-info"
+    A_SESSION_INITIATE: Final = "session-initiate"
+    A_SESSION_ACCEPT: Final = "session-accept"
+    A_SESSION_TERMINATE: Final = "session-terminate"
+    A_SESSION_INFO: Final = "session-info"
+    A_TRANSPORT_REPLACE: Final = "transport-replace"
+    A_TRANSPORT_ACCEPT: Final = "transport-accept"
+    A_TRANSPORT_REJECT: Final = "transport-reject"
+    A_TRANSPORT_INFO: Final = "transport-info"
 
     # non standard actions
 
     #: called before the confirmation request, first event for responder, useful for
     #: parsing
-    A_PREPARE_CONFIRMATION : Final = "prepare-confirmation"
+    A_PREPARE_CONFIRMATION: Final = "prepare-confirmation"
     #: initiator must prepare tranfer
-    A_PREPARE_INITIATOR : Final = "prepare-initiator"
+    A_PREPARE_INITIATOR: Final = "prepare-initiator"
     #: responder must prepare tranfer
-    A_PREPARE_RESPONDER : Final = "prepare-responder"
-    #; session accepted ack has been received from initiator
-    A_ACCEPTED_ACK : Final = (
-        "accepted-ack"
-    )
-    A_START : Final = "start"  # application can start
+    A_PREPARE_RESPONDER: Final = "prepare-responder"
+    # ; session accepted ack has been received from initiator
+    A_ACCEPTED_ACK: Final = "accepted-ack"
+    A_START: Final = "start"  # application can start
     #: called when a transport is destroyed (e.g. because it is remplaced). Used to do
     #: cleaning operations
-    A_DESTROY : Final = (
-        "destroy"
-    )
+    A_DESTROY: Final = "destroy"
 
     def __init__(self, host):
         log.info(_("plugin Jingle initialization"))
@@ -157,9 +153,7 @@
         try:
             return client.jingle_sessions[session_id]
         except KeyError:
-            raise exceptions.NotFound(
-                f"No session with SID {session_id} found"
-            )
+            raise exceptions.NotFound(f"No session with SID {session_id} found")
 
     def create_session(
         self,
@@ -167,8 +161,8 @@
         sid: str,
         role: str,
         peer_jid: jid.JID,
-        local_jid: jid.JID|None = None,
-        **kwargs
+        local_jid: jid.JID | None = None,
+        **kwargs,
     ) -> dict:
         """Create a new jingle session.
 
@@ -192,7 +186,6 @@
         if role not in [XEP_0166.ROLE_INITIATOR, XEP_0166.ROLE_RESPONDER]:
             raise ValueError(f"Invalid role {role}. Expected initiator or responder.")
 
-
         session_data = {
             "id": sid,
             "state": XEP_0166.STATE_PENDING,
@@ -201,7 +194,7 @@
             "local_jid": local_jid or client.jid,
             "peer_jid": peer_jid,
             "started": time.time(),
-            "contents": {}
+            "contents": {},
         }
 
         # If extra kw args are provided, merge them into the session_data
@@ -213,27 +206,24 @@
 
         return session_data
 
-
     def delete_session(self, client, sid):
         try:
             del client.jingle_sessions[sid]
         except KeyError:
             log.debug(
                 f"Jingle session id {sid!r} is unknown, nothing to delete "
-                f"[{client.profile}]")
+                f"[{client.profile}]"
+            )
         else:
             log.debug(f"Jingle session id {sid!r} deleted [{client.profile}]")
 
     ## helpers methods to build stanzas ##
 
     def _build_jingle_elt(
-        self,
-        client: SatXMPPEntity,
-        session: dict,
-        action: str
+        self, client: SatXMPPEntity, session: dict, action: str
     ) -> Tuple[xmlstream.IQ, domish.Element]:
         iq_elt = client.IQ("set")
-        iq_elt["from"] = session['local_jid'].full()
+        iq_elt["from"] = session["local_jid"].full()
         iq_elt["to"] = session["peer_jid"].full()
         jingle_elt = iq_elt.addElement("jingle", NS_JINGLE)
         jingle_elt["sid"] = session["id"]
@@ -264,11 +254,7 @@
         log.warning(_("Error while terminating session: {msg}").format(msg=failure_))
 
     def _terminate(
-        self,
-        session_id: str,
-        reason: str,
-        reason_txt: str,
-        profile: str
+        self, session_id: str, reason: str, reason_txt: str, profile: str
     ) -> defer.Deferred:
         client = self.host.get_client(profile)
         session = self.get_session(client, session_id)
@@ -276,19 +262,14 @@
             raise ValueError(
                 'only "cancel", "decline" and "busy" and empty value are allowed'
             )
-        return self.terminate(
-            client,
-            reason or None,
-            session,
-            text=reason_txt or None
-        )
+        return self.terminate(client, reason or None, session, text=reason_txt or None)
 
     def terminate(
-            self,
-            client: SatXMPPEntity,
-            reason: str|list[domish.Element]|None,
-            session: dict,
-            text: str|None = None
+        self,
+        client: SatXMPPEntity,
+        reason: str | list[domish.Element] | None,
+        session: dict,
+        text: str | None = None,
     ) -> defer.Deferred:
         """Terminate the session
 
@@ -311,14 +292,9 @@
             reason_elt = None
         if text is not None:
             if reason_elt is None:
-                raise ValueError(
-                    "You have to specify a reason if text is specified"
-                )
+                raise ValueError("You have to specify a reason if text is specified")
             reason_elt.addElement("text", content=text)
-        if not self.host.trigger.point(
-            "XEP-0166_terminate",
-            client, session, reason_elt
-        ):
+        if not self.host.trigger.point("XEP-0166_terminate", client, session, reason_elt):
             return defer.succeed(None)
         self.delete_session(client, session["id"])
         d = iq_elt.send()
@@ -328,11 +304,11 @@
     ## errors which doesn't imply a stanza sending ##
 
     def _jingle_error_cb(
-            self,
-            failure_: failure.Failure|BaseException,
-            session: dict,
-            request: domish.Element,
-            client: SatXMPPEntity
+        self,
+        failure_: failure.Failure | BaseException,
+        session: dict,
+        request: domish.Element,
+        client: SatXMPPEntity,
     ) -> defer.Deferred:
         """Called when something is going wrong while parsing jingle request
 
@@ -352,20 +328,19 @@
         if isinstance(failure_, exceptions.DataError):
             return self.sendError(client, "bad-request", session["id"], request)
         elif isinstance(failure_, error.StanzaError):
-            return self.terminate(client, self.REASON_FAILED_APPLICATION, session,
-                                  text=str(failure_))
+            return self.terminate(
+                client, self.REASON_FAILED_APPLICATION, session, text=str(failure_)
+            )
         else:
             log.error(f"Unmanaged jingle exception: {failure_}")
-            return self.terminate(client, self.REASON_FAILED_APPLICATION, session,
-                                  text=str(failure_))
+            return self.terminate(
+                client, self.REASON_FAILED_APPLICATION, session, text=str(failure_)
+            )
 
     ## methods used by other plugins ##
 
     def register_application(
-        self,
-        namespace: str,
-        handler: BaseApplicationHandler,
-        priority: int = 0
+        self, namespace: str, handler: BaseApplicationHandler, priority: int = 0
     ) -> None:
         """Register an application plugin
 
@@ -405,11 +380,11 @@
         log.debug("new jingle application registered")
 
     def register_transport(
-            self,
-            namespace: str,
-            transport_type: str,
-            handler: BaseTransportHandler,
-            priority: int = 0
+        self,
+        namespace: str,
+        transport_type: str,
+        handler: BaseTransportHandler,
+        priority: int = 0,
     ) -> None:
         """Register a transport plugin
 
@@ -466,7 +441,9 @@
         content_elt["name"] = content_name
         content_elt["creator"] = content_data["creator"]
 
-        transport_elt = transport.handler.jingle_session_init(client, session, content_name)
+        transport_elt = transport.handler.jingle_session_init(
+            client, session, content_name
+        )
         content_elt.addChild(transport_elt)
         iq_elt.send()
 
@@ -477,7 +454,7 @@
         session: dict,
         content_name: str,
         iq_elt: Optional[xmlstream.IQ] = None,
-        context_elt: Optional[domish.Element] = None
+        context_elt: Optional[domish.Element] = None,
     ) -> Tuple[xmlstream.IQ, domish.Element]:
         """Build an element according to requested action
 
@@ -534,12 +511,10 @@
         try:
             return self._applications[namespace]
         except KeyError:
-            raise exceptions.NotFound(
-                f"No application registered for {namespace}"
-            )
+            raise exceptions.NotFound(f"No application registered for {namespace}")
 
     def get_content_data(self, content: dict, content_idx: int) -> ContentData:
-        """"Retrieve application and its argument from content"""
+        """ "Retrieve application and its argument from content"""
         app_ns = content["app_ns"]
         try:
             application = self.get_application(app_ns)
@@ -553,11 +528,7 @@
         except KeyError:
             content_name = content["name"] = str(content_idx)
         return ContentData(
-            application,
-            app_args,
-            app_kwargs,
-            transport_data,
-            content_name
+            application, app_args, app_kwargs, transport_data, content_name
         )
 
     async def initiate(
@@ -566,8 +537,8 @@
         peer_jid: jid.JID,
         contents: List[dict],
         encrypted: bool = False,
-        sid: str|None = None,
-        **extra_data: Any
+        sid: str | None = None,
+        **extra_data: Any,
     ) -> str:
         """Send a session initiation request
 
@@ -591,8 +562,11 @@
         @return: Sesson ID
         """
         assert contents  # there must be at least one content
-        if (peer_jid == client.jid
-            or client.is_component and peer_jid.host == client.jid.host):
+        if (
+            peer_jid == client.jid
+            or client.is_component
+            and peer_jid.host == client.jid.host
+        ):
             raise ValueError(_("You can't do a jingle session with yourself"))
         if sid is None:
             sid = str(uuid.uuid4())
@@ -602,8 +576,7 @@
         initiator = session["initiator"]
 
         if not await self.host.trigger.async_point(
-            "XEP-0166_initiate",
-            client, session, contents
+            "XEP-0166_initiate", client, session, contents
         ):
             return sid
 
@@ -658,21 +631,25 @@
             # then the description element
             application_data["desc_elt"] = desc_elt = await utils.as_deferred(
                 content_data.application.handler.jingle_session_init,
-                client, session, content_data.content_name,
-                *content_data.app_args, **content_data.app_kwargs
+                client,
+                session,
+                content_data.content_name,
+                *content_data.app_args,
+                **content_data.app_kwargs,
             )
             content_elt.addChild(desc_elt)
 
             # and the transport one
             transport_data["transport_elt"] = transport_elt = await utils.as_deferred(
                 transport.handler.jingle_session_init,
-                client, session, content_data.content_name,
+                client,
+                session,
+                content_data.content_name,
             )
             content_elt.addChild(transport_elt)
 
         if not await self.host.trigger.async_point(
-            "XEP-0166_initiate_elt_built",
-            client, session, iq_elt, jingle_elt
+            "XEP-0166_initiate_elt_built", client, session, iq_elt, jingle_elt
         ):
             return sid
 
@@ -737,9 +714,7 @@
         defer.ensureDeferred(self.on_jingle_request(client, request))
 
     async def on_jingle_request(
-        self,
-        client: SatXMPPEntity,
-        request: domish.Element
+        self, client: SatXMPPEntity, request: domish.Element
     ) -> None:
         """Called when any jingle request is received
 
@@ -803,15 +778,13 @@
                 # XXX: we store local_jid using request['to'] because for a component the
                 # jid used may not be client.jid (if a local part is used).
                 session = self.create_session(
-                    client, sid, XEP_0166.ROLE_RESPONDER, peer_jid, jid.JID(request['to'])
+                    client, sid, XEP_0166.ROLE_RESPONDER, peer_jid, jid.JID(request["to"])
                 )
         else:
             if session["peer_jid"] != peer_jid:
                 log.warning(
                     "sid conflict ({}), the jid doesn't match. Can be a collision, a "
-                    "hack attempt, or a bad sid generation".format(
-                        sid
-                    )
+                    "hack attempt, or a bad sid generation".format(sid)
                 )
                 self.sendError(client, "service-unavailable", sid, request)
                 return
@@ -849,7 +822,7 @@
         client: SatXMPPEntity,
         new: bool = False,
         creator: str = ROLE_INITIATOR,
-        with_application: bool =True,
+        with_application: bool = True,
         with_transport: bool = True,
         store_in_session: bool = True,
     ) -> Dict[str, dict]:
@@ -926,9 +899,7 @@
                     try:
                         application = self._applications[app_ns]
                     except KeyError:
-                        log.warning(
-                            "Unmanaged application namespace [{}]".format(app_ns)
-                        )
+                        log.warning("Unmanaged application namespace [{}]".format(app_ns))
                         self.sendError(
                             client, "service-unavailable", session["id"], request
                         )
@@ -998,7 +969,7 @@
         transp_default_cb: Optional[Callable] = None,
         delete: bool = True,
         elements: bool = True,
-        force_element: Optional[domish.Element] = None
+        force_element: Optional[domish.Element] = None,
     ) -> list[Any]:
         """Call application and transport plugin methods for all contents
 
@@ -1059,7 +1030,7 @@
         client: SatXMPPEntity,
         request: domish.Element,
         jingle_elt: domish.Element,
-        session: Dict[str, Any]
+        session: Dict[str, Any],
     ) -> None:
         """Called on session-initiate action
 
@@ -1093,20 +1064,15 @@
         # at this point we can send the <iq/> result to confirm reception of the request
         client.send(xmlstream.toResponse(request, "result"))
 
-
         assert "jingle_elt" not in session
         session["jingle_elt"] = jingle_elt
         if not await self.host.trigger.async_point(
-            "XEP-0166_on_session_initiate",
-            client, session, request, jingle_elt
+            "XEP-0166_on_session_initiate", client, session, request, jingle_elt
         ):
             return
 
         await self._call_plugins(
-            client,
-            XEP_0166.A_PREPARE_CONFIRMATION,
-            session,
-            delete=False
+            client, XEP_0166.A_PREPARE_CONFIRMATION, session, delete=False
         )
 
         # we now request each application plugin confirmation
@@ -1131,7 +1097,7 @@
         confirmations: list[bool],
         session: dict,
         jingle_elt: domish.Element,
-        client: SatXMPPEntity
+        client: SatXMPPEntity,
     ) -> None:
         """Method called when confirmation from user has been received
 
@@ -1150,7 +1116,7 @@
         iq_elt, jingle_elt = self._build_jingle_elt(
             client, session, XEP_0166.A_SESSION_ACCEPT
         )
-        jingle_elt["responder"] = session['local_jid'].full()
+        jingle_elt["responder"] = session["local_jid"].full()
         session["jingle_elt"] = jingle_elt
 
         # contents
@@ -1219,7 +1185,9 @@
             reason_elt = parent_elt.addElement("reason")
             return reason_elt
 
-    def parse_reason_elt(self, reason_elt: domish.Element) -> tuple[str|None, str|None]:
+    def parse_reason_elt(
+        self, reason_elt: domish.Element
+    ) -> tuple[str | None, str | None]:
         """Parse a <reason> element
 
         @return: reason found, and text if any
@@ -1242,7 +1210,7 @@
         client: SatXMPPEntity,
         request: domish.Element,
         jingle_elt: domish.Element,
-        session: dict
+        session: dict,
     ) -> None:
         # TODO: check reason, display a message to user if needed
         log.debug(f"Jingle Session {session['id']} terminated")
@@ -1286,10 +1254,7 @@
         session["jingle_elt"] = jingle_elt
 
         await self._call_plugins(
-            client,
-            XEP_0166.A_PREPARE_INITIATOR,
-            session,
-            delete=False
+            client, XEP_0166.A_PREPARE_INITIATOR, session, delete=False
         )
 
         await self._call_plugins(client, XEP_0166.A_SESSION_ACCEPT, session)
@@ -1398,7 +1363,11 @@
             # we can now actually replace the transport
             await utils.as_deferred(
                 content_data["transport"].handler.jingle_handler,
-                client, XEP_0166.A_DESTROY, session, content_name, None
+                client,
+                XEP_0166.A_DESTROY,
+                session,
+                content_name,
+                None,
             )
             content_data["transport"] = transport
             content_data["transport_data"].clear()
@@ -1409,13 +1378,21 @@
             # we notify the transport and insert its <transport/> in the answer
             accept_transport_elt = await utils.as_deferred(
                 transport.handler.jingle_handler,
-                client, XEP_0166.A_TRANSPORT_REPLACE, session, content_name, transport_elt
+                client,
+                XEP_0166.A_TRANSPORT_REPLACE,
+                session,
+                content_name,
+                transport_elt,
             )
             content_elt.addChild(accept_transport_elt)
             # there is no confirmation needed here, so we can directly prepare it
             await utils.as_deferred(
                 transport.handler.jingle_handler,
-                client, XEP_0166.A_PREPARE_RESPONDER, session, content_name, None
+                client,
+                XEP_0166.A_PREPARE_RESPONDER,
+                session,
+                content_name,
+                None,
             )
 
         iq_elt.send()
@@ -1425,7 +1402,7 @@
         client: SatXMPPEntity,
         request: domish.Element,
         jingle_elt: domish.Element,
-        session: dict
+        session: dict,
     ) -> None:
         """Method called once transport replacement is accepted
 
@@ -1472,7 +1449,7 @@
         client: SatXMPPEntity,
         request: domish.Element,
         jingle_elt: domish.Element,
-        session: dict
+        session: dict,
     ) -> None:
         """Method called when a transport-info action is received from other peer
 
@@ -1487,8 +1464,12 @@
 
         try:
             parsed_contents = self._parse_elements(
-                jingle_elt, session, request, client, with_application=False,
-                store_in_session=False
+                jingle_elt,
+                session,
+                request,
+                client,
+                with_application=False,
+                store_in_session=False,
             )
         except exceptions.CancelError:
             return
--- a/libervia/backend/plugins/plugin_xep_0166/models.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0166/models.py	Wed Jun 19 18:44:57 2024 +0200
@@ -28,14 +28,12 @@
 from libervia.backend.core.core_types import SatXMPPEntity
 from libervia.backend.core.i18n import _
 
+
 class BaseApplicationHandler(abc.ABC):
 
     @abc.abstractmethod
     async def jingle_preflight(
-        self,
-        client: SatXMPPEntity,
-        session: dict,
-        description_elt: domish.Element
+        self, client: SatXMPPEntity, session: dict, description_elt: domish.Element
     ) -> None:
         """Called when preparation steps are needed by a plugin
 
@@ -49,7 +47,7 @@
         client: SatXMPPEntity,
         session: dict,
         info_type: str,
-        info_data: dict|None = None
+        info_data: dict | None = None,
     ) -> None:
         """Called when we have new information during preflight
 
@@ -59,10 +57,7 @@
 
     @abc.abstractmethod
     async def jingle_preflight_cancel(
-        self,
-        client: SatXMPPEntity,
-        session: dict,
-        cancel_error: exceptions.CancelError
+        self, client: SatXMPPEntity, session: dict, cancel_error: exceptions.CancelError
     ) -> None:
         """Called when preflight initiation is cancelled
 
@@ -79,8 +74,7 @@
         content_name: str,
         desc_elt: domish.Element,
     ) -> Union[
-        Callable[..., Union[bool, defer.Deferred]],
-        Callable[..., Awaitable[bool]]
+        Callable[..., Union[bool, defer.Deferred]], Callable[..., Awaitable[bool]]
     ]:
         """If present, it is called on when session must be accepted.
         If not present, a generic accept dialog will be used.
@@ -94,15 +88,8 @@
 
     @abc.abstractmethod
     def jingle_session_init(
-        self,
-        client: SatXMPPEntity,
-        session: dict,
-        content_name: str,
-        *args, **kwargs
-    ) -> Union[
-        Callable[..., domish.Element],
-        Callable[..., Awaitable[domish.Element]]
-    ]:
+        self, client: SatXMPPEntity, session: dict, content_name: str, *args, **kwargs
+    ) -> Union[Callable[..., domish.Element], Callable[..., Awaitable[domish.Element]]]:
         """Must return the domish.Element used for initial content.
 
         @param client: SatXMPPEntity instance
@@ -119,11 +106,8 @@
         action: str,
         session: dict,
         content_name: str,
-        transport_elt: domish.Element
-    ) -> Union[
-        Callable[..., None],
-        Callable[..., Awaitable[None]]
-    ]:
+        transport_elt: domish.Element,
+    ) -> Union[Callable[..., None], Callable[..., Awaitable[None]]]:
         """Called on several actions to negotiate the application or transport.
 
         @param client: SatXMPPEntity instance
@@ -141,11 +125,8 @@
         action: str,
         session: dict,
         content_name: str,
-        reason_elt: domish.Element
-    ) -> Union[
-        Callable[..., None],
-        Callable[..., Awaitable[None]]
-    ]:
+        reason_elt: domish.Element,
+    ) -> Union[Callable[..., None], Callable[..., Awaitable[None]]]:
         """Called on session terminate, with reason_elt.
         May be used to clean session.
 
@@ -170,15 +151,8 @@
 
     @abc.abstractmethod
     def jingle_session_init(
-        self,
-        client: SatXMPPEntity,
-        session: dict,
-        content_name: str,
-        *args, **kwargs
-    ) -> Union[
-        Callable[..., domish.Element],
-        Callable[..., Awaitable[domish.Element]]
-    ]:
+        self, client: SatXMPPEntity, session: dict, content_name: str, *args, **kwargs
+    ) -> Union[Callable[..., domish.Element], Callable[..., Awaitable[domish.Element]]]:
         """Must return the domish.Element used for initial content.
 
         @param client: SatXMPPEntity instance
@@ -195,11 +169,8 @@
         action: str,
         session: dict,
         content_name: str,
-        reason_elt: domish.Element
-    ) -> Union[
-        Callable[..., None],
-        Callable[..., Awaitable[None]]
-    ]:
+        reason_elt: domish.Element,
+    ) -> Union[Callable[..., None], Callable[..., Awaitable[None]]]:
         """Called on several actions to negotiate the application or transport.
 
         @param client: SatXMPPEntity instance
--- a/libervia/backend/plugins/plugin_xep_0167/__init__.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0167/__init__.py	Wed Jun 19 18:44:57 2024 +0200
@@ -169,7 +169,7 @@
                     log.warning(f"no media ID found for {media_type}: {media_data}")
                 # FIXME: the 2 values below are linked to XEP-0343, they should be added
                 #   there instead, maybe with some new trigger?
-                for key in ("sctp-port","max-message-size"):
+                for key in ("sctp-port", "max-message-size"):
                     value = transport_data.get(key)
                     if value is not None:
                         metadata[key] = value
@@ -257,18 +257,19 @@
             raise exceptions.DataError("no valid media data found: {call_data}")
 
         call_type = (
-            C.META_SUBTYPE_CALL_VIDEO if "video" in call_data
+            C.META_SUBTYPE_CALL_VIDEO
+            if "video" in call_data
             else C.META_SUBTYPE_CALL_AUDIO
         )
 
         sid = await self._j.initiate(
-                client,
-                peer_jid,
-                contents,
-                call_type=call_type,
-                metadata=metadata,
-                peer_metadata={},
-            )
+            client,
+            peer_jid,
+            contents,
+            call_type=call_type,
+            metadata=metadata,
+            peer_metadata={},
+        )
         return sid
 
     def _call_answer_sdp(self, session_id: str, answer_sdp: str, profile: str) -> None:
--- a/libervia/backend/plugins/plugin_xep_0167/mapping.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0167/mapping.py	Wed Jun 19 18:44:57 2024 +0200
@@ -43,9 +43,7 @@
         return "a=recvonly"
 
 
-def generate_sdp_from_session(
-    session: dict, local: bool = False, port: int = 9
-) -> str:
+def generate_sdp_from_session(session: dict, local: bool = False, port: int = 9) -> str:
     """Generate an SDP string from session data.
 
     @param session: A dictionary containing the session data. It should have the
@@ -111,11 +109,13 @@
         session,
         local,
         sdp_lines,
-        triggers_no_cancel=True
+        triggers_no_cancel=True,
     )
     content_names = sorted(contents)
 
-    for content_name, content_data in [(n, contents[n]) for n in content_names]: # contents.items():
+    for content_name, content_data in [
+        (n, contents[n]) for n in content_names
+    ]:  # contents.items():
         app_data_key = "local_data" if local else "peer_data"
         application_data = content_data["application_data"]
         media_data = application_data[app_data_key]
@@ -142,7 +142,6 @@
         if senders is None:
             sdp_lines.append(senders_to_sdp(content_data["senders"], session))
 
-
         # Generate a= lines for rtpmap and fmtp
         for pt_id, pt in payload_types.items():
             name = pt["name"]
@@ -223,7 +222,6 @@
                     crypto_line += f" {session_params}"
                 sdp_lines.append(crypto_line)
 
-
         host.trigger.point(
             "XEP-0167_generate_sdp_content",
             session,
@@ -235,7 +233,7 @@
             app_data_key,
             media_data,
             media,
-            triggers_no_cancel=True
+            triggers_no_cancel=True,
         )
 
     # Combine SDP lines and return the result
@@ -298,7 +296,7 @@
                 media_data = call_data[media_type] = {
                     "application_data": application_data,
                     "transport_data": transport_data,
-                    "senders": senders
+                    "senders": senders,
                 }
 
             elif prefix == "a=":
@@ -457,7 +455,7 @@
                     media_type,
                     application_data,
                     transport_data,
-                    triggers_no_cancel=True
+                    triggers_no_cancel=True,
                 )
 
         except ValueError as e:
@@ -474,7 +472,7 @@
     # FIXME: is this really useful?
     # ICE candidates may only be specified for the first media, this
     # duplicate the candidate for the other in this case
-    all_media = {k:v for k,v in call_data.items() if k in ("audio", "video")}
+    all_media = {k: v for k, v in call_data.items() if k in ("audio", "video")}
     if len(all_media) > 1 and not all(
         "candidates" in c["transport_data"] for c in all_media.values()
     ):
@@ -548,7 +546,7 @@
             media_data,
             pt_data,
             payload_type_elt,
-            triggers_no_cancel=True
+            triggers_no_cancel=True,
         )
 
     if "bandwidth" in media_data:
@@ -574,7 +572,7 @@
         desc_elt,
         media_data,
         session,
-        triggers_no_cancel=True
+        triggers_no_cancel=True,
     )
 
     return desc_elt
@@ -632,7 +630,7 @@
             media_data,
             payload_type_elt,
             payload_type_data,
-            triggers_no_cancel=True
+            triggers_no_cancel=True,
         )
         payload_types[pt_id] = payload_type_data
 
@@ -678,10 +676,7 @@
         media_data["encryption"] = encryption_data
 
     host.trigger.point(
-        "XEP-0167_parse_description",
-        desc_elt,
-        media_data,
-        triggers_no_cancel=True
+        "XEP-0167_parse_description", desc_elt, media_data, triggers_no_cancel=True
     )
 
     return media_data
--- a/libervia/backend/plugins/plugin_xep_0176.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0176.py	Wed Jun 19 18:44:57 2024 +0200
@@ -291,7 +291,7 @@
                     content_name,
                     content_data,
                     transport_elt,
-                    iq_elt
+                    iq_elt,
                 )
                 await iq_elt.send()
 
@@ -381,8 +381,8 @@
         """
         session = self._j.get_session(client, session_id)
         iq_elt: Optional[domish.Element] = None
-        content_name: str|None = None
-        content_data: dict|None = None
+        content_name: str | None = None
+        content_data: dict | None = None
 
         for media_type, new_ice_data in media_ice_data.items():
             if session["state"] == self._j.STATE_PENDING:
@@ -436,8 +436,13 @@
             assert content_name is not None and content_data is not None
             try:
                 self.host.trigger.point(
-                    "XEP-0176_ice_candidate_send", client, session, media_ice_data,
-                    content_name, content_data, iq_elt
+                    "XEP-0176_ice_candidate_send",
+                    client,
+                    session,
+                    media_ice_data,
+                    content_name,
+                    content_data,
+                    iq_elt,
                 )
                 await iq_elt.send()
             except Exception as e:
--- a/libervia/backend/plugins/plugin_xep_0184.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0184.py	Wed Jun 19 18:44:57 2024 +0200
@@ -115,7 +115,7 @@
     def send_message_trigger(
         self, client, mess_data, pre_xml_treatments, post_xml_treatments
     ):
-        """Install SendMessage command hook """
+        """Install SendMessage command hook"""
 
         def treatment(mess_data):
             message = mess_data["xml"]
--- a/libervia/backend/plugins/plugin_xep_0191.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0191.py	Wed Jun 19 18:44:57 2024 +0200
@@ -84,10 +84,7 @@
         return XEP_0191_Handler(self)
 
     @ensure_deferred
-    async def _block_list(
-        self,
-        profile_key=C.PROF_KEY_NONE
-    ) -> List[str]:
+    async def _block_list(self, profile_key=C.PROF_KEY_NONE) -> List[str]:
         client = self.host.get_client(profile_key)
         blocked_jids = await self.block_list(client)
         return [j.full() for j in blocked_jids]
@@ -113,11 +110,7 @@
 
         return blocked_jids
 
-    def _block(
-        self,
-        entities: List[str],
-        profile_key: str = C.PROF_KEY_NONE
-    ) -> str:
+    def _block(self, entities: List[str], profile_key: str = C.PROF_KEY_NONE) -> str:
         client = self.host.get_client(profile_key)
         return defer.ensureDeferred(
             self.block(client, [jid.JID(entity) for entity in entities])
@@ -132,15 +125,9 @@
             item_elt["jid"] = entity.full()
         await iq_elt.send()
 
-    def _unblock(
-        self,
-        entities: List[str],
-        profile_key: str = C.PROF_KEY_NONE
-    ) -> None:
+    def _unblock(self, entities: List[str], profile_key: str = C.PROF_KEY_NONE) -> None:
         client = self.host.get_client(profile_key)
-        return defer.ensureDeferred(
-            self.unblock(client, [jid.JID(e) for e in entities])
-        )
+        return defer.ensureDeferred(self.unblock(client, [jid.JID(e) for e in entities]))
 
     async def unblock(self, client: SatXMPPEntity, entities: List[jid.JID]) -> None:
         await self.host.check_feature(client, NS_BLOCKING)
@@ -192,15 +179,10 @@
 
     def connectionInitialized(self):
         self.xmlstream.addObserver(
-            IQ_BLOCK_PUSH,
-            self.plugin_parent.on_block_push,
-            client=self.parent
-
+            IQ_BLOCK_PUSH, self.plugin_parent.on_block_push, client=self.parent
         )
         self.xmlstream.addObserver(
-            IQ_UNBLOCK_PUSH,
-            self.plugin_parent.on_unblock_push,
-            client=self.parent
+            IQ_UNBLOCK_PUSH, self.plugin_parent.on_unblock_push, client=self.parent
         )
 
     def getDiscoInfo(self, requestor, service, nodeIdentifier=""):
--- a/libervia/backend/plugins/plugin_xep_0198.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0198.py	Wed Jun 19 18:44:57 2024 +0200
@@ -58,7 +58,7 @@
 # Max number of seconds before requesting ack
 MAX_DELAY_ACK_R = 30
 MAX_COUNTER = 2**32
-RESUME_MAX = 5*60
+RESUME_MAX = 5 * 60
 # if we don't have an answer to ACK REQUEST after this delay, connection is aborted
 ACK_TIMEOUT = 35
 
@@ -92,8 +92,7 @@
     def enabled(self, enabled):
         if enabled:
             if self._enabled:
-                raise exceptions.InternalError(
-                    "Stream Management can't be enabled twice")
+                raise exceptions.InternalError("Stream Management can't be enabled twice")
             self._enabled = True
             callback, kw = self.callback_data
             self.timer = task.LoopingCall(callback, **kw)
@@ -133,25 +132,27 @@
     def __init__(self, host):
         log.info(_("Plugin Stream Management initialization"))
         self.host = host
-        host.register_namespace('sm', NS_SM)
+        host.register_namespace("sm", NS_SM)
         host.trigger.add("stream_hooks", self.add_hooks)
         host.trigger.add("xml_init", self._xml_init_trigger)
         host.trigger.add("disconnecting", self._disconnecting_trigger)
         host.trigger.add("disconnected", self._disconnected_trigger)
         try:
-            self._ack_timeout = int(host.memory.config_get("", "ack_timeout", ACK_TIMEOUT))
+            self._ack_timeout = int(
+                host.memory.config_get("", "ack_timeout", ACK_TIMEOUT)
+            )
         except ValueError:
             log.error(_("Invalid ack_timeout value, please check your configuration"))
             self._ack_timeout = ACK_TIMEOUT
         if not self._ack_timeout:
             log.info(_("Ack timeout disabled"))
         else:
-            log.info(_("Ack timeout set to {timeout}s").format(
-                timeout=self._ack_timeout))
+            log.info(_("Ack timeout set to {timeout}s").format(timeout=self._ack_timeout))
 
     def profile_connecting(self, client):
-        client._xep_0198_session = ProfileSessionData(callback=self.check_acks,
-                                                      client=client)
+        client._xep_0198_session = ProfileSessionData(
+            callback=self.check_acks, client=client
+        )
 
     def get_handler(self, client):
         return XEP_0198_handler(self)
@@ -164,12 +165,16 @@
 
     def _xml_init_trigger(self, client):
         """Enable or resume a stream mangement"""
-        if not (NS_SM, 'sm') in client.xmlstream.features:
-            log.warning(_(
-                "Your server doesn't support stream management ({namespace}), this is "
-                "used to improve connection problems detection (like network outages). "
-                "Please ask your server administrator to enable this feature.".format(
-                namespace=NS_SM)))
+        if not (NS_SM, "sm") in client.xmlstream.features:
+            log.warning(
+                _(
+                    "Your server doesn't support stream management ({namespace}), this is "
+                    "used to improve connection problems detection (like network outages). "
+                    "Please ask your server administrator to enable this feature.".format(
+                        namespace=NS_SM
+                    )
+                )
+            )
             return True
         session = client._xep_0198_session
 
@@ -185,9 +190,9 @@
 
         if session.resume_enabled:
             # we are resuming a session
-            resume_elt = domish.Element((NS_SM, 'resume'))
-            resume_elt['h'] = str(session.in_counter)
-            resume_elt['previd'] = session.session_id
+            resume_elt = domish.Element((NS_SM, "resume"))
+            resume_elt["h"] = str(session.in_counter)
+            resume_elt["previd"] = session.session_id
             client.send(resume_elt)
             session.resuming = True
             # session.enabled will be set on <resumed/> reception
@@ -195,8 +200,8 @@
         else:
             # we start a new session
             assert session.out_counter == 0
-            enable_elt = domish.Element((NS_SM, 'enable'))
-            enable_elt['resume'] = 'true'
+            enable_elt = domish.Element((NS_SM, "enable"))
+            enable_elt["resume"] = "true"
             client.send(enable_elt)
             session.enabled = True
             return True
@@ -217,9 +222,9 @@
         session.enabled = False
         if session.resume_enabled:
             session.disconnected_time = time.time()
-            session.disconnect_timer = reactor.callLater(session.session_max,
-                                                         client.disconnect_profile,
-                                                         reason)
+            session.disconnect_timer = reactor.callLater(
+                session.session_max, client.disconnect_profile, reason
+            )
             # disconnect_profile must not be called at this point
             # because session can be resumed
             return False
@@ -234,8 +239,10 @@
         #             session.buffer_idx))
         if session.ack_requested or not session.buffer:
             return
-        if (session.out_counter - session.buffer_idx >= MAX_STANZA_ACK_R
-            or time.time() - session.last_ack_r >= MAX_DELAY_ACK_R):
+        if (
+            session.out_counter - session.buffer_idx >= MAX_STANZA_ACK_R
+            or time.time() - session.last_ack_r >= MAX_DELAY_ACK_R
+        ):
             self.request_ack(client)
             session.ack_requested = True
             session.last_ack_r = time.time()
@@ -253,8 +260,9 @@
                     "diff = {diff}\n"
                     "server_acked = {server_acked}\n"
                     "buffer_idx = {buffer_id}".format(
-                        diff=diff, server_acked=server_acked,
-                        buffer_id=session.buffer_idx))
+                        diff=diff, server_acked=server_acked, buffer_id=session.buffer_idx
+                    )
+                )
             session.buffer_idx += diff
 
     def replay_buffer(self, client, buffer_, discard_results=False):
@@ -270,37 +278,47 @@
             except IndexError:
                 break
             else:
-                if ((discard_results
-                     and stanza.name == 'iq'
-                     and stanza.getAttribute('type') == 'result')):
+                if (
+                    discard_results
+                    and stanza.name == "iq"
+                    and stanza.getAttribute("type") == "result"
+                ):
                     continue
                 client.send(stanza)
 
     def send_ack(self, client):
         """Send an answer element with current IN counter"""
-        a_elt = domish.Element((NS_SM, 'a'))
-        a_elt['h'] = str(client._xep_0198_session.in_counter)
+        a_elt = domish.Element((NS_SM, "a"))
+        a_elt["h"] = str(client._xep_0198_session.in_counter)
         client.send(a_elt)
 
     def request_ack(self, client):
         """Send a request element"""
         session = client._xep_0198_session
-        r_elt = domish.Element((NS_SM, 'r'))
+        r_elt = domish.Element((NS_SM, "r"))
         client.send(r_elt)
         if session.req_timer is not None:
             raise exceptions.InternalError("req_timer should not be set")
         if self._ack_timeout:
-            session.req_timer = reactor.callLater(self._ack_timeout, self.on_ack_time_out,
-                                                  client)
+            session.req_timer = reactor.callLater(
+                self._ack_timeout, self.on_ack_time_out, client
+            )
 
     def _connectionFailed(self, failure_, connector):
         normal_host, normal_port = connector.normal_location
         del connector.normal_location
-        log.warning(_(
-            "Connection failed using location given by server (host: {host}, port: "
-            "{port}), switching to normal host and port (host: {normal_host}, port: "
-            "{normal_port})".format(host=connector.host, port=connector.port,
-                                     normal_host=normal_host, normal_port=normal_port)))
+        log.warning(
+            _(
+                "Connection failed using location given by server (host: {host}, port: "
+                "{port}), switching to normal host and port (host: {normal_host}, port: "
+                "{normal_port})".format(
+                    host=connector.host,
+                    port=connector.port,
+                    normal_host=normal_host,
+                    normal_port=normal_port,
+                )
+            )
+        )
         connector.host, connector.port = normal_host, normal_port
         connector.connectionFailed = connector.connectionFailed_ori
         del connector.connectionFailed_ori
@@ -311,14 +329,17 @@
         session.in_counter = 0
 
         # we check that resuming is possible and that we have a session id
-        resume = C.bool(enabled_elt.getAttribute('resume'))
-        session_id = enabled_elt.getAttribute('id')
+        resume = C.bool(enabled_elt.getAttribute("resume"))
+        session_id = enabled_elt.getAttribute("id")
         if not session_id:
             log.warning(_('Incorrect <enabled/> element received, no "id" attribute'))
         if not resume or not session_id:
-            log.warning(_(
-                "You're server doesn't support session resuming with stream management, "
-                "please contact your server administrator to enable it"))
+            log.warning(
+                _(
+                    "You're server doesn't support session resuming with stream management, "
+                    "please contact your server administrator to enable it"
+                )
+            )
             return
 
         session.session_id = session_id
@@ -329,17 +350,18 @@
 
         # location, in case server want resuming session to be elsewhere
         try:
-            location = enabled_elt['location']
+            location = enabled_elt["location"]
         except KeyError:
             pass
         else:
             # TODO: handle IPv6 here (in brackets, cf. XEP)
             try:
-                domain, port = location.split(':', 1)
+                domain, port = location.split(":", 1)
                 port = int(port)
             except ValueError:
-                log.warning(_("Invalid location received: {location}")
-                    .format(location=location))
+                log.warning(
+                    _("Invalid location received: {location}").format(location=location)
+                )
             else:
                 session.location = (domain, port)
                 # we monkey patch connector to use the new location
@@ -348,30 +370,36 @@
                 connector.host = domain
                 connector.port = port
                 connector.connectionFailed_ori = connector.connectionFailed
-                connector.connectionFailed = partial(self._connectionFailed,
-                                                     connector=connector)
+                connector.connectionFailed = partial(
+                    self._connectionFailed, connector=connector
+                )
 
         # resuming time
         try:
-            max_s = int(enabled_elt['max'])
+            max_s = int(enabled_elt["max"])
         except (ValueError, KeyError) as e:
             if isinstance(e, ValueError):
                 log.warning(_('Invalid "max" attribute'))
             max_s = RESUME_MAX
-            log.info(_("Using default session max value ({max_s} s).".format(
-                max_s=max_s)))
+            log.info(
+                _("Using default session max value ({max_s} s).".format(max_s=max_s))
+            )
             log.info(_("Stream Management enabled"))
         else:
-            log.info(_(
-                "Stream Management enabled, with a resumption time of {res_m:.2f} min"
-                .format(res_m = max_s/60)))
+            log.info(
+                _(
+                    "Stream Management enabled, with a resumption time of {res_m:.2f} min".format(
+                        res_m=max_s / 60
+                    )
+                )
+            )
         session.session_max = max_s
 
     def on_resumed(self, enabled_elt, client):
         session = client._xep_0198_session
         assert not session.enabled
         del session.resuming
-        server_acked = int(enabled_elt['h'])
+        server_acked = int(enabled_elt["h"])
         self.update_buffer(session, server_acked)
         resend_count = len(session.buffer)
         # we resend all stanza which have not been received properly
@@ -379,8 +407,12 @@
         # now we can continue the session
         session.enabled = True
         d_time = time.time() - session.disconnected_time
-        log.info(_("Stream session resumed (disconnected for {d_time} s, {count} "
-                   "stanza(s) resent)").format(d_time=int(d_time), count=resend_count))
+        log.info(
+            _(
+                "Stream session resumed (disconnected for {d_time} s, {count} "
+                "stanza(s) resent)"
+            ).format(d_time=int(d_time), count=resend_count)
+        )
 
     def on_failed(self, failed_elt, client):
         session = client._xep_0198_session
@@ -394,10 +426,9 @@
             # stream management can't be started at all
             msg = _("Can't use stream management")
             if condition_elt is None:
-                log.error(msg + '.')
+                log.error(msg + ".")
             else:
-                log.error(_("{msg}: {reason}").format(
-                msg=msg, reason=condition_elt.name))
+                log.error(_("{msg}: {reason}").format(msg=msg, reason=condition_elt.name))
         else:
             # only stream resumption failed, we can try full session init
             # XXX: we try to start full session init from this point, with many
@@ -408,10 +439,9 @@
             msg = _("stream resumption not possible, restarting full session")
 
             if condition_elt is None:
-                log.warning('{msg}.'.format(msg=msg))
+                log.warning("{msg}.".format(msg=msg))
             else:
-                log.warning("{msg}: {reason}".format(
-                    msg=msg, reason=condition_elt.name))
+                log.warning("{msg}: {reason}".format(msg=msg, reason=condition_elt.name))
             # stream resumption failed, but we still can do normal stream management
             # we restore attributes as if the session was new, and init stream
             # we keep everything initialized, and only do binding, roster request
@@ -420,15 +450,15 @@
                 client.conn_deferred = defer.Deferred()
             else:
                 log.error("conn_deferred should be called at this point")
-            plg_0045 = self.host.plugins.get('XEP-0045')
-            plg_0313 = self.host.plugins.get('XEP-0313')
+            plg_0045 = self.host.plugins.get("XEP-0045")
+            plg_0313 = self.host.plugins.get("XEP-0313")
 
             # FIXME: we should call all loaded plugins with generic callbacks
             #        (e.g. prepareResume and resume), so a hot resuming can be done
             #        properly for all plugins.
 
             if plg_0045 is not None:
-                # we have to remove joined rooms
+                # we have to remove joined rooms
                 muc_join_args = plg_0045.pop_rooms(client)
             # we need to recreate roster
             client.handlers.remove(client.roster)
@@ -439,7 +469,9 @@
             bind_init.required = True
             d = bind_init.start()
             # we set the jid, which may have changed
-            d.addCallback(lambda __: setattr(client.factory.authenticator, "jid", client.jid))
+            d.addCallback(
+                lambda __: setattr(client.factory.authenticator, "jid", client.jid)
+            )
             # we call the trigger who will send the <enable/> element
             d.addCallback(lambda __: self._xml_init_trigger(client))
             # then we have to re-request the roster, as changes may have occured
@@ -454,14 +486,14 @@
             if plg_0045 is not None:
                 # we re-join MUC rooms
                 muc_d_list = defer.DeferredList(
-                    [defer.ensureDeferred(plg_0045.join(*args))
-                     for args in muc_join_args]
+                    [defer.ensureDeferred(plg_0045.join(*args)) for args in muc_join_args]
                 )
                 d.addCallback(lambda __: muc_d_list)
             # at the end we replay the buffer, as those stanzas have probably not
             # been received
-            d.addCallback(lambda __: self.replay_buffer(client, buffer_,
-                                                       discard_results=True))
+            d.addCallback(
+                lambda __: self.replay_buffer(client, buffer_, discard_results=True)
+            )
 
     def on_receive(self, element, client):
         if not client.is_component:
@@ -472,9 +504,11 @@
     def on_send(self, obj, client):
         if not client.is_component:
             session = client._xep_0198_session
-            if (session.enabled
+            if (
+                session.enabled
                 and domish.IElement.providedBy(obj)
-                and obj.name.lower() in C.STANZA_NAMES):
+                and obj.name.lower() in C.STANZA_NAMES
+            ):
                 session.out_counter += 1 % MAX_COUNTER
                 session.buffer.appendleft(obj)
                 self.check_acks(client)
@@ -492,16 +526,24 @@
                 session.req_timer.cancel()
                 session.req_timer = None
         try:
-            server_acked = int(a_elt['h'])
+            server_acked = int(a_elt["h"])
         except ValueError:
-            log.warning(_("Server returned invalid ack element, disabling stream "
-                          "management: {xml}").format(xml=a_elt))
+            log.warning(
+                _(
+                    "Server returned invalid ack element, disabling stream "
+                    "management: {xml}"
+                ).format(xml=a_elt)
+            )
             session.enabled = False
             return
 
         if server_acked > session.out_counter:
-            log.error(_("Server acked more stanzas than we have sent, disabling stream "
-                        "management."))
+            log.error(
+                _(
+                    "Server acked more stanzas than we have sent, disabling stream "
+                    "management."
+                )
+            )
             session.reset()
             return
 
--- a/libervia/backend/plugins/plugin_xep_0199.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0199.py	Wed Jun 19 18:44:57 2024 +0200
@@ -49,7 +49,8 @@
         log.info(_("XMPP Ping plugin initialization"))
         self.host = host
         host.bridge.add_method(
-            "ping", ".plugin", in_sign='ss', out_sign='d', method=self._ping, async_=True)
+            "ping", ".plugin", in_sign="ss", out_sign="d", method=self._ping, async_=True
+        )
         try:
             self.text_cmds = self.host.plugins[C.TEXT_CMDS]
         except KeyError:
@@ -106,8 +107,12 @@
             txt_cmd.feed_back(client, "PONG ({time} s)".format(time=pong[1]), mess_data)
         else:
             txt_cmd.feed_back(
-                client, _("ping error ({err_msg}). Response time: {time} s")
-                .format(err_msg=pong[0], time=pong[1]), mess_data)
+                client,
+                _("ping error ({err_msg}). Response time: {time} s").format(
+                    err_msg=pong[0], time=pong[1]
+                ),
+                mess_data,
+            )
 
     def cmd_ping(self, client, mess_data):
         """ping an entity
@@ -120,8 +125,13 @@
                 entity_jid = jid.JID(mess_data["unparsed"].strip())
             except RuntimeError:
                 txt_cmd = self.host.plugins[C.TEXT_CMDS]
-                txt_cmd.feed_back(client, _('Invalid jid: "{entity_jid}"').format(
-                    entity_jid=mess_data["unparsed"].strip()), mess_data)
+                txt_cmd.feed_back(
+                    client,
+                    _('Invalid jid: "{entity_jid}"').format(
+                        entity_jid=mess_data["unparsed"].strip()
+                    ),
+                    mess_data,
+                )
                 return False
         else:
             entity_jid = mess_data["to"]
@@ -131,8 +141,11 @@
         return False
 
     def on_ping_request(self, iq_elt, client):
-        log.info(_("XMPP PING received from {from_jid} [{profile}]").format(
-            from_jid=iq_elt["from"], profile=client.profile))
+        log.info(
+            _("XMPP PING received from {from_jid} [{profile}]").format(
+                from_jid=iq_elt["from"], profile=client.profile
+            )
+        )
         iq_elt.handled = True
         iq_result_elt = xmlstream.toResponse(iq_elt, "result")
         client.send(iq_result_elt)
--- a/libervia/backend/plugins/plugin_xep_0231.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0231.py	Wed Jun 19 18:44:57 2024 +0200
@@ -185,12 +185,13 @@
             client.send(error_elt)
             return
 
-        with open(metadata["path"], 'rb') as f:
+        with open(metadata["path"], "rb") as f:
             data = f.read()
 
         result_elt = xmlstream.toResponse(iq_elt, "result")
         data_elt = result_elt.addElement(
-            (NS_BOB, "data"), content=base64.b64encode(data).decode())
+            (NS_BOB, "data"), content=base64.b64encode(data).decode()
+        )
         data_elt["cid"] = cid
         data_elt["type"] = metadata["mime_type"]
         data_elt["max-age"] = str(int(max(0, metadata["eol"] - time.time())))
@@ -240,7 +241,9 @@
     def connectionInitialized(self):
         if self.parent.is_component:
             self.xmlstream.addObserver(
-                IQ_BOB_REQUEST, self.plugin_parent.on_component_request, client=self.parent
+                IQ_BOB_REQUEST,
+                self.plugin_parent.on_component_request,
+                client=self.parent,
             )
 
     def getDiscoInfo(self, requestor, target, nodeIdentifier=""):
--- a/libervia/backend/plugins/plugin_xep_0234.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0234.py	Wed Jun 19 18:44:57 2024 +0200
@@ -63,8 +63,15 @@
 
 # TODO: use a Pydantic model for extra
 EXTRA_ALLOWED = {
-    "path", "namespace", "file_desc", "file_hash", "hash_algo", "webrtc", "call_data",
-    "size", "media_type"
+    "path",
+    "namespace",
+    "file_desc",
+    "file_hash",
+    "hash_algo",
+    "webrtc",
+    "call_data",
+    "size",
+    "media_type",
 }
 Range = namedtuple("Range", ("offset", "length"))
 
@@ -125,9 +132,21 @@
     # generic methods
 
     def build_file_element(
-        self, client, name=None, file_hash=None, hash_algo=None, size=None,
-        mime_type=None, desc=None, modified=None, transfer_range=None, path=None,
-        namespace=None, file_elt=None, **kwargs):
+        self,
+        client,
+        name=None,
+        file_hash=None,
+        hash_algo=None,
+        size=None,
+        mime_type=None,
+        desc=None,
+        modified=None,
+        transfer_range=None,
+        path=None,
+        namespace=None,
+        file_elt=None,
+        **kwargs,
+    ):
         """Generate a <file> element with available metadata
 
         @param file_hash(unicode, None): hash of the file
@@ -183,7 +202,8 @@
         elif hash_algo is not None:
             file_elt.addChild(self._hash.build_hash_used_elt(hash_algo))
         self.host.trigger.point(
-            "XEP-0234_buildFileElement", client, file_elt, extra_args=kwargs)
+            "XEP-0234_buildFileElement", client, file_elt, extra_args=kwargs
+        )
         if kwargs:
             for kw in kwargs:
                 log.debug("ignored keyword: {}".format(kw))
@@ -209,11 +229,11 @@
     async def parse_file_element(
         self,
         client: SatXMPPEntity,
-        file_elt: domish.Element|None,
+        file_elt: domish.Element | None,
         file_data: dict | None = None,
         given: bool = False,
         parent_elt: domish.Element | None = None,
-        keep_empty_range: bool = False
+        keep_empty_range: bool = False,
     ) -> dict:
         """Parse a <file> element and updates file dictionary accordingly.
 
@@ -299,7 +319,7 @@
         elif name == "..":
             # we don't want to go to parent dir when joining to a path
             file_data["name"] = "--"
-        elif  "/" in name or "\\" in name:
+        elif "/" in name or "\\" in name:
             file_data["name"] = regex.path_escape(name)
 
         try:
@@ -310,9 +330,7 @@
             pass
 
         try:
-            file_data["size"] = int(
-                str(next(file_elt.elements(NS_JINGLE_FT, "size")))
-            )
+            file_data["size"] = int(str(next(file_elt.elements(NS_JINGLE_FT, "size"))))
         except StopIteration:
             pass
 
@@ -359,14 +377,16 @@
     ) -> defer.Deferred[str]:
         client = self.host.get_client(profile)
         extra = data_format.deserialise(extra_s)
-        d = defer.ensureDeferred(self.file_send(
-            client,
-            jid.JID(peer_jid_s),
-            filepath,
-            name or None,
-            file_desc or None,
-            extra,
-        ))
+        d = defer.ensureDeferred(
+            self.file_send(
+                client,
+                jid.JID(peer_jid_s),
+                filepath,
+                name or None,
+                file_desc or None,
+                extra,
+            )
+        )
         d.addCallback(data_format.serialise)
         return d
 
@@ -375,9 +395,9 @@
         client: SatXMPPEntity,
         peer_jid: jid.JID,
         filepath: str,
-        name: str|None,
-        file_desc: str|None = None,
-        extra: dict|None = None
+        name: str | None,
+        file_desc: str | None = None,
+        extra: dict | None = None,
     ) -> dict:
         """Send a file using jingle file transfer
 
@@ -406,39 +426,48 @@
         }
 
         await self.host.trigger.async_point(
-            "XEP-0234_file_jingle_send",
-            client, peer_jid, content
+            "XEP-0234_file_jingle_send", client, peer_jid, content
         )
 
         session_id = await self._j.initiate(
-            client,
-            peer_jid,
-            [content],
-            encrypted = encrypted
+            client, peer_jid, [content], encrypted=encrypted
         )
         progress_id = await progress_id_d
-        return {
-            "progress": progress_id,
-            "session_id": session_id
-        }
+        return {"progress": progress_id, "session_id": session_id}
 
     def _file_jingle_request(
-            self, peer_jid, filepath, name="", file_hash="", hash_algo="", extra=None,
-            profile=C.PROF_KEY_NONE):
+        self,
+        peer_jid,
+        filepath,
+        name="",
+        file_hash="",
+        hash_algo="",
+        extra=None,
+        profile=C.PROF_KEY_NONE,
+    ):
         client = self.host.get_client(profile)
-        return defer.ensureDeferred(self.file_jingle_request(
-            client,
-            jid.JID(peer_jid),
-            filepath,
-            name or None,
-            file_hash or None,
-            hash_algo or None,
-            extra or None,
-        ))
+        return defer.ensureDeferred(
+            self.file_jingle_request(
+                client,
+                jid.JID(peer_jid),
+                filepath,
+                name or None,
+                file_hash or None,
+                hash_algo or None,
+                extra or None,
+            )
+        )
 
     async def file_jingle_request(
-            self, client, peer_jid, filepath, name=None, file_hash=None, hash_algo=None,
-            extra=None):
+        self,
+        client,
+        peer_jid,
+        filepath,
+        name=None,
+        file_hash=None,
+        hash_algo=None,
+        extra=None,
+    ):
         """Request a file using jingle file transfer
 
         @param peer_jid(jid.JID): destinee jid
@@ -480,23 +509,20 @@
     # jingle callbacks
 
     def _get_confirm_msg(
-        self,
-        client: SatXMPPEntity,
-        peer_jid: jid.JID,
-        file_data: dict
+        self, client: SatXMPPEntity, peer_jid: jid.JID, file_data: dict
     ) -> tuple[bool, str, str]:
         """Get confirmation message to display to user.
 
         This is the message to show when a file sending request is received."""
-        file_name = file_data.get('name')
-        file_size = file_data.get('size')
+        file_name = file_data.get("name")
+        file_size = file_data.get("size")
 
         if file_name:
             file_name_msg = D_('wants to send you the file "{file_name}"').format(
                 file_name=file_name
             )
         else:
-            file_name_msg = D_('wants to send you an unnamed file')
+            file_name_msg = D_("wants to send you an unnamed file")
 
         if file_size is not None:
             file_size_msg = D_("which has a size of {file_size_human}").format(
@@ -505,7 +531,7 @@
         else:
             file_size_msg = D_("which has an unknown size")
 
-        file_description = file_data.get('desc')
+        file_description = file_data.get("desc")
         if file_description:
             description_msg = " Description: {}.".format(file_description)
         else:
@@ -516,23 +542,30 @@
             confirm_msg = D_(
                 "Somebody not in your contact list ({peer_jid}) {file_name_msg} {file_size_msg}.{description_msg} "
                 "Accepting this could leak your presence and possibly your IP address. Do you accept?"
-            ).format(peer_jid=peer_jid, file_name_msg=file_name_msg, file_size_msg=file_size_msg, description_msg=description_msg)
+            ).format(
+                peer_jid=peer_jid,
+                file_name_msg=file_name_msg,
+                file_size_msg=file_size_msg,
+                description_msg=description_msg,
+            )
             confirm_title = D_("File sent from an unknown contact")
         else:
             is_in_roster = True
             confirm_msg = D_(
                 "{peer_jid} {file_name_msg} {file_size_msg}.{description_msg} Do you "
                 "accept?"
-            ).format(peer_jid=peer_jid, file_name_msg=file_name_msg, file_size_msg=file_size_msg, description_msg=description_msg)
+            ).format(
+                peer_jid=peer_jid,
+                file_name_msg=file_name_msg,
+                file_size_msg=file_size_msg,
+                description_msg=description_msg,
+            )
             confirm_title = D_("File Proposed")
 
         return (is_in_roster, confirm_msg, confirm_title)
 
     async def jingle_preflight(
-        self,
-        client: SatXMPPEntity,
-        session: dict,
-        description_elt: domish.Element
+        self, client: SatXMPPEntity, session: dict, description_elt: domish.Element
     ) -> None:
         """Perform preflight checks for an incoming call session.
 
@@ -568,7 +601,7 @@
             "type": action_type,
             "session_id": session_id,
             "from_jid": peer_jid.full(),
-            "file_data": file_data
+            "file_data": file_data,
         }
         action_extra["subtype"] = C.META_TYPE_FILE
         accepted = await xml_tools.defer_confirm(
@@ -576,7 +609,7 @@
             confirm_msg,
             confirm_title,
             profile=client.profile,
-            action_extra=action_extra
+            action_extra=action_extra,
         )
         if accepted:
             session["pre_accepted"] = True
@@ -587,15 +620,12 @@
         client: SatXMPPEntity,
         session: dict,
         info_type: str,
-        info_data: dict|None = None
+        info_data: dict | None = None,
     ) -> None:
         pass
 
     async def jingle_preflight_cancel(
-        self,
-        client: SatXMPPEntity,
-        session: dict,
-        cancel_error: exceptions.CancelError
+        self, client: SatXMPPEntity, session: dict, cancel_error: exceptions.CancelError
     ) -> None:
         pass
 
@@ -626,7 +656,8 @@
         application_data["file_path"] = filepath
         file_data = application_data["file_data"] = {}
         desc_elt = self.jingle_description_elt(
-            client, session, content_name, filepath, name, extra, progress_id_d)
+            client, session, content_name, filepath, name, extra, progress_id_d
+        )
         file_elt = desc_elt.addElement("file")
 
         if content_data["senders"] == self._j.ROLE_INITIATOR:
@@ -650,7 +681,8 @@
             if "path" in extra:
                 file_data["path"] = extra["path"]
             self.build_file_element_from_dict(
-                client, file_data, file_elt=file_elt, file_hash="")
+                client, file_data, file_elt=file_elt, file_hash=""
+            )
         else:
             # we request a file
             file_hash = extra.pop("file_hash", "")
@@ -740,7 +772,7 @@
         content_data: dict,
         content_name: str,
         file_data: dict,
-        file_elt: domish.Element
+        file_elt: domish.Element,
     ) -> bool:
         """parse file_elt, and handle user permission/file opening"""
         transport_data = content_data["transport_data"]
@@ -781,7 +813,7 @@
                 confirm_msg,
                 confirm_title,
                 profile=client.profile,
-                action_extra=action_extra
+                action_extra=action_extra,
             )
         else:
 
@@ -806,7 +838,10 @@
         if confirmed:
             await self.host.trigger.async_point(
                 "XEP-0234_file_receiving_request_conf",
-                client, session, content_data, file_elt
+                client,
+                session,
+                content_data,
+                file_elt,
             )
             args = [client, session, content_name, content_data]
             finished_d.addCallbacks(
@@ -870,7 +905,8 @@
                             progress_id, C.PROGRESS_ERROR_FAILED, client.profile
                         )
                         await self._j.terminate(
-                            client, self._j.REASON_FAILED_APPLICATION, session)
+                            client, self._j.REASON_FAILED_APPLICATION, session
+                        )
                         raise e
                 else:
                     # we are sending the file
@@ -889,10 +925,11 @@
 
             finished_d = content_data["finished_d"] = defer.Deferred()
             args = [client, session, content_name, content_data]
-            finished_d.addCallbacks(self._finished_cb, self._finished_eb, args, None, args)
+            finished_d.addCallbacks(
+                self._finished_cb, self._finished_eb, args, None, args
+            )
             await self.host.trigger.async_point(
-                "XEP-0234_jingle_handler",
-                client, session, content_data, desc_elt
+                "XEP-0234_jingle_handler", client, session, content_data, desc_elt
             )
         else:
             log.warning("FIXME: unmanaged action {}".format(action))
@@ -959,9 +996,7 @@
                     reason = f"{reason} - {reason_elt.text}"
             else:
                 reason = C.PROGRESS_ERROR_FAILED
-            self.host.bridge.progress_error(
-                progress_id, reason, client.profile
-            )
+            self.host.bridge.progress_error(progress_id, reason, client.profile)
 
     def _send_check_sum(self, client, session, content_name, content_data):
         """Send the session-info with the hash checksum"""
--- a/libervia/backend/plugins/plugin_xep_0249.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0249.py	Wed Jun 19 18:44:57 2024 +0200
@@ -33,7 +33,6 @@
 log = getLogger(__name__)
 
 
-
 try:
     from twisted.words.protocols.xmlstream import XMPPHandler
 except ImportError:
@@ -95,7 +94,7 @@
             self.host.plugins[C.TEXT_CMDS].register_text_commands(self)
         except KeyError:
             log.info(_("Text commands not available"))
-        host.register_namespace('x-conference', NS_X_CONFERENCE)
+        host.register_namespace("x-conference", NS_X_CONFERENCE)
         host.trigger.add("message_received", self._message_received_trigger)
 
     def get_handler(self, client):
@@ -120,7 +119,7 @@
         room: jid.JID,
         # the dict is only used internally, so we can safely use a default dict instead of
         # None here.
-        **options
+        **options,
     ) -> None:
         """Invite a user to a room
 
@@ -156,15 +155,16 @@
 
     def _message_received_trigger(self, client, message_elt, post_treat):
         """Check if a direct invitation is in the message, and handle it"""
-        x_elt = next(message_elt.elements(NS_X_CONFERENCE, 'x'), None)
+        x_elt = next(message_elt.elements(NS_X_CONFERENCE, "x"), None)
         if x_elt is None:
             return True
 
         try:
             room_jid_s = x_elt["jid"]
         except KeyError:
-            log.warning(_("invalid invitation received: {xml}").format(
-                xml=message_elt.toXml()))
+            log.warning(
+                _("invalid invitation received: {xml}").format(xml=message_elt.toXml())
+            )
             return False
         log.info(
             _("Invitation received for room %(room)s [%(profile)s]")
@@ -200,7 +200,7 @@
                 "type": C.META_TYPE_CONFIRM,
                 "subtype": C.META_TYPE_MUC_INVIRATION,
                 "from_jid": from_jid_s,
-                "room_jid": room_jid_s
+                "room_jid": room_jid_s,
             }
             confirm_msg = D_(
                 "You have been invited by %(user)s to join the room %(room)s. "
@@ -208,8 +208,11 @@
             ) % {"user": from_jid_s, "room": room_jid_s}
             confirm_title = D_("MUC invitation")
             d = xml_tools.defer_confirm(
-                self.host, confirm_msg, confirm_title, profile=client.profile,
-                action_extra=action_extra
+                self.host,
+                confirm_msg,
+                confirm_title,
+                profile=client.profile,
+                action_extra=action_extra,
             )
 
             def accept_cb(accepted):
--- a/libervia/backend/plugins/plugin_xep_0260.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0260.py	Wed Jun 19 18:44:57 2024 +0200
@@ -97,7 +97,9 @@
             # self._s5b.registerCandidate(candidate)
         return candidates
 
-    def _build_candidates(self, session, candidates, sid, session_hash, client, mode=None):
+    def _build_candidates(
+        self, session, candidates, sid, session_hash, client, mode=None
+    ):
         """Build <transport> element with candidates
 
         @param session(dict): jingle session data
@@ -147,7 +149,8 @@
         )  # requester and target are inversed for peer candidates
         transport_data["stream_d"] = self._s5b.register_hash(client, session_hash, None)
         candidates = transport_data["candidates"] = await self._s5b.get_candidates(
-            client, session["local_jid"])
+            client, session["local_jid"]
+        )
         mode = "tcp"  # XXX: we only manage tcp for now
         transport_elt = self._build_candidates(
             session, candidates, sid, session_hash, client, mode
@@ -155,7 +158,9 @@
 
         return transport_elt
 
-    def _proxy_activated_cb(self, iq_result_elt, client, candidate, session, content_name):
+    def _proxy_activated_cb(
+        self, iq_result_elt, client, candidate, session, content_name
+    ):
         """Called when activation confirmation has been received from proxy
 
         cf XEP-0260 § 2.4
@@ -304,7 +309,11 @@
                     )
                     args = [client, choosed_candidate, session, content_name]
                     d.addCallbacks(
-                        self._proxy_activated_cb, self._proxy_activated_eb, args, None, args
+                        self._proxy_activated_cb,
+                        self._proxy_activated_eb,
+                        args,
+                        None,
+                        args,
                     )
                 else:
                     # this Deferred will be called when we'll receive activation confirmation from other peer
@@ -356,9 +365,7 @@
                 log.warning("No cid found in <candidate-used>")
                 raise exceptions.DataError
             try:
-                candidate = next((
-                    c for c in transport_data["candidates"] if c.id == cid
-                ))
+                candidate = next((c for c in transport_data["candidates"] if c.id == cid))
             except StopIteration:
                 log.warning("Given cid doesn't correspond to any known candidate !")
                 raise exceptions.DataError  # TODO: send an error to other peer, and use better exception
@@ -433,10 +440,8 @@
             session_hash = transport_data["session_hash"] = self._s5b.get_session_hash(
                 session["local_jid"], session["peer_jid"], sid
             )
-            peer_session_hash = transport_data[
-                "peer_session_hash"
-            ] = self._s5b.get_session_hash(
-                session["peer_jid"], session["local_jid"], sid
+            peer_session_hash = transport_data["peer_session_hash"] = (
+                self._s5b.get_session_hash(session["peer_jid"], session["local_jid"], sid)
             )  # requester and target are inversed for peer candidates
             peer_candidates = transport_data["peer_candidates"] = self._parse_candidates(
                 transport_elt
@@ -505,8 +510,8 @@
     def _do_fallback(self, feature_checked, session, content_name, client):
         """Do the fallback, method called once feature is checked
 
-         @param feature_checked(bool): True if other peer can do IBB
-         """
+        @param feature_checked(bool): True if other peer can do IBB
+        """
         if not feature_checked:
             log.warning(
                 "Other peer can't manage jingle IBB, be have to terminate the session"
--- a/libervia/backend/plugins/plugin_xep_0261.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0261.py	Wed Jun 19 18:44:57 2024 +0200
@@ -97,7 +97,8 @@
                 d.chainDeferred(content_data["finished_d"])
             else:
                 d = self._ibb.create_session(
-                    client, stream_object, local_jid, peer_jid, sid)
+                    client, stream_object, local_jid, peer_jid, sid
+                )
                 d.chainDeferred(content_data["finished_d"])
         else:
             log.warning("FIXME: unmanaged action {}".format(action))
--- a/libervia/backend/plugins/plugin_xep_0264.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0264.py	Wed Jun 19 18:44:57 2024 +0200
@@ -124,7 +124,7 @@
 
         if thumbnails:
             # we want thumbnails ordered from smallest to biggest
-            thumbnails.sort(key=lambda t: t.get('size', (0, 0)))
+            thumbnails.sort(key=lambda t: t.get("size", (0, 0)))
             file_data.setdefault("extra", {})[C.KEY_THUMBNAILS] = thumbnails
         return True
 
@@ -141,8 +141,8 @@
         return hashlib.sha256(repr((image_uid, size)).encode()).hexdigest()
 
     def _blocking_gen_thumb(
-            self, source_path, size=None, max_age=None, image_uid=None,
-            fix_orientation=True):
+        self, source_path, size=None, max_age=None, image_uid=None, fix_orientation=True
+    ):
         """Generate a thumbnail for image
 
         This is a blocking method and must be executed in a thread
@@ -171,7 +171,8 @@
         return img.size, uid
 
     def generate_thumbnail(
-        self, source_path, size=None, max_age=None, image_uid=None, fix_orientation=True):
+        self, source_path, size=None, max_age=None, image_uid=None, fix_orientation=True
+    ):
         """Generate a thumbnail of image
 
         @param source_path(unicode): absolute path to source image
@@ -188,8 +189,12 @@
             - unique Id of the thumbnail
         """
         d = threads.deferToThread(
-            self._blocking_gen_thumb, source_path, size, max_age, image_uid=image_uid,
-            fix_orientation=fix_orientation
+            self._blocking_gen_thumb,
+            source_path,
+            size,
+            max_age,
+            image_uid=image_uid,
+            fix_orientation=fix_orientation,
         )
         d.addErrback(
             lambda failure_: log.error("thumbnail generation error: {}".format(failure_))
--- a/libervia/backend/plugins/plugin_xep_0272.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0272.py	Wed Jun 19 18:44:57 2024 +0200
@@ -191,9 +191,9 @@
             # We ask frontend to initiate the session, so we know supported codecs.
             self.host.bridge.call_group_setup(
                 room.roomJID.full(),
-                data_format.serialise({
-                    "to_call": [entity.full() for entity in muji_data["to_call"]]
-                }),
+                data_format.serialise(
+                    {"to_call": [entity.full() for entity in muji_data["to_call"]]}
+                ),
                 client.profile,
             )
 
@@ -215,9 +215,7 @@
         try:
             room = self._muc.get_room(client, room_jid)
         except exceptions.NotFound:
-            log.warning(
-                f"Ignoring MUJI element from an unknown room: {room_jid}"
-            )
+            log.warning(f"Ignoring MUJI element from an unknown room: {room_jid}")
             return
         sdp_data = mapping.parse_sdp(call_data["sdp"], self._j.ROLE_INITIATOR)
         presence_elt, muji_elt = self.generate_presence_and_muji(client, room)
@@ -253,10 +251,7 @@
         try:
             return room._xep_0272_data
         except AttributeError:
-            data = room._xep_0272_data = {
-                "preparing_jids": set(),
-                "to_call": set()
-            }
+            data = room._xep_0272_data = {"preparing_jids": set(), "to_call": set()}
             return data
 
     def generate_presence_and_muji(
--- a/libervia/backend/plugins/plugin_xep_0277.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0277.py	Wed Jun 19 18:44:57 2024 +0200
@@ -268,7 +268,7 @@
         service: Optional[jid.JID],
         # FIXME: node is Optional until all calls to item_2_mb_data set properly service
         #   and node. Once done, the Optional must be removed here
-        node: Optional[str]
+        node: Optional[str],
     ) -> dict:
         """Convert an XML Item to microblog data
 
@@ -283,10 +283,7 @@
             service = client.jid.userhostJID()
 
         extra: Dict[str, Any] = {}
-        mb_data: Dict[str, Any] = {
-            "service": service.full(),
-            "extra": extra
-        }
+        mb_data: Dict[str, Any] = {"service": service.full(), "extra": extra}
 
         def check_conflict(key, increment=False):
             """Check if key is already in microblog data
@@ -335,9 +332,7 @@
                     )
                 key = check_conflict("{}_xhtml".format(elem.name))
                 data = data_elt.toXml()
-                mb_data[key] = yield self.host.plugins["TEXT_SYNTAXES"].clean_xhtml(
-                    data
-                )
+                mb_data[key] = yield self.host.plugins["TEXT_SYNTAXES"].clean_xhtml(data)
             else:
                 key = check_conflict(elem.name)
                 mb_data[key] = str(elem)
@@ -361,7 +356,7 @@
         # FIXME: node should alway be set in the future, check FIXME in method signature
         if node is not None:
             mb_data["node"] = node
-            mb_data['uri'] = xmpp_uri.build_xmpp_uri(
+            mb_data["uri"] = xmpp_uri.build_xmpp_uri(
                 "pubsub",
                 path=service.full(),
                 node=node,
@@ -378,8 +373,9 @@
         try:
             id_elt = next(entry_elt.elements(NS_ATOM, "id"))
         except StopIteration:
-            msg = ("No atom id found in the pubsub item {}, this is not standard !"
-                   .format(id_))
+            msg = "No atom id found in the pubsub item {}, this is not standard !".format(
+                id_
+            )
             log.warning(msg)
             mb_data["atom_id"] = ""
         else:
@@ -452,16 +448,14 @@
             )
 
         # links
-        comments = mb_data['comments'] = []
+        comments = mb_data["comments"] = []
         for link_elt in entry_elt.elements(NS_ATOM, "link"):
             href = link_elt.getAttribute("href")
             if not href:
-                log.warning(
-                    f'missing href in <link> element: {link_elt.toXml()}'
-                )
+                log.warning(f"missing href in <link> element: {link_elt.toXml()}")
                 continue
             rel = link_elt.getAttribute("rel")
-            if (rel == "replies" and link_elt.getAttribute("title") == "comments"):
+            if rel == "replies" and link_elt.getAttribute("title") == "comments":
                 uri = href
                 comments_data = {
                     "uri": uri,
@@ -489,19 +483,12 @@
                     except (AttributeError, RuntimeError):
                         # we should always have either the "publisher" attribute or the
                         # stanza available
-                        log.error(
-                            f"Can't find repeater of the post: {item_elt.toXml()}"
-                        )
+                        log.error(f"Can't find repeater of the post: {item_elt.toXml()}")
                         continue
 
-                extra["repeated"] = {
-                    "by": repeater_jid.full(),
-                    "uri": href
-                }
+                extra["repeated"] = {"by": repeater_jid.full(), "uri": href}
             elif rel in ("related", "enclosure"):
-                attachment: Dict[str, Any] = {
-                    "sources": [{"url": href}]
-                }
+                attachment: Dict[str, Any] = {"sources": [{"url": href}]}
                 if rel == "related":
                     attachment["external"] = True
                 for attr, key in (
@@ -533,9 +520,7 @@
                     )
                 extra.setdefault("alt_links", []).append(link_data)
             else:
-                log.warning(
-                    f"Unmanaged link element: {link_elt.toXml()}"
-                )
+                log.warning(f"Unmanaged link element: {link_elt.toXml()}")
 
         # author
         publisher = item_elt.getAttribute("publisher")
@@ -558,9 +543,7 @@
             try:
                 uri_elt = next(author_elt.elements(NS_ATOM, "uri"))
             except StopIteration:
-                log.debug(
-                    "No uri element found in author element of item {}".format(id_)
-                )
+                log.debug("No uri element found in author element of item {}".format(id_))
                 if publisher:
                     mb_data["author_jid"] = publisher
             else:
@@ -569,16 +552,16 @@
                     uri = uri[5:]
                     mb_data["author_jid"] = uri
                 else:
-                    mb_data["author_jid"] = (
-                        item_elt.getAttribute("publisher") or ""
-                    )
+                    mb_data["author_jid"] = item_elt.getAttribute("publisher") or ""
                 if not author and mb_data["author_jid"]:
                     # FIXME: temporary workaround for missing author name, would be
                     #   better to use directly JID's identity (to be done from frontends?)
                     try:
                         mb_data["author"] = jid.JID(mb_data["author_jid"]).user
                     except Exception as e:
-                        log.warning(f"No author name found, and can't parse author jid: {e}")
+                        log.warning(
+                            f"No author name found, and can't parse author jid: {e}"
+                        )
 
                 if not publisher:
                     log.debug("No publisher attribute, we can't verify author jid")
@@ -620,9 +603,7 @@
 
         ## the trigger ##
         # if other plugins have things to add or change
-        yield self.host.trigger.point(
-            "XEP-0277_item2data", item_elt, entry_elt, mb_data
-        )
+        yield self.host.trigger.point("XEP-0277_item2data", item_elt, entry_elt, mb_data)
 
         defer.returnValue(mb_data)
 
@@ -655,7 +636,9 @@
                     if type_:
                         if type_ == "_rich":  # convert input from current syntax to XHTML
                             xml_content = await synt.convert(
-                                mb_data[attr], synt.get_current_syntax(client.profile), "XHTML"
+                                mb_data[attr],
+                                synt.get_current_syntax(client.profile),
+                                "XHTML",
                             )
                             if f"{elem_name}_xhtml" in mb_data:
                                 raise failure.Failure(
@@ -722,9 +705,7 @@
                     url = attachment["url"]
                 except KeyError:
                     try:
-                        url = next(
-                            s['url'] for s in attachment["sources"] if 'url' in s
-                        )
+                        url = next(s["url"] for s in attachment["sources"] if "url" in s)
                     except (StopIteration, KeyError):
                         log.warning(
                             f'"url" missing in attachment, ignoring: {attachment}'
@@ -746,11 +727,11 @@
                 for key, attr in (
                     ("media_type", "type"),
                     ("desc", "title"),
-                    ("size", "lenght")
+                    ("size", "lenght"),
                 ):
                     value = attachment.get(key)
                     if value:
-                        link_elt[attr]  = str(value)
+                        link_elt[attr] = str(value)
 
         ## alternate links ##
         alt_links = extra.get("alt_links")
@@ -760,7 +741,7 @@
                 url = url_template.format(
                     service=quote(service.full(), safe=""),
                     node=quote(node, safe=""),
-                    item=quote(item_id, safe="")
+                    item=quote(item_id, safe=""),
                 )
 
                 link_elt = entry_elt.addElement("link")
@@ -800,7 +781,8 @@
         ## published/updated time ##
         current_time = time.time()
         entry_elt.addElement(
-            "updated", content=utils.xmpp_date(float(mb_data.get("updated", current_time)))
+            "updated",
+            content=utils.xmpp_date(float(mb_data.get("updated", current_time))),
         )
         entry_elt.addElement(
             "published",
@@ -808,7 +790,7 @@
         )
 
         ## categories ##
-        for tag in mb_data.get('tags', []):
+        for tag in mb_data.get("tags", []):
             category_elt = entry_elt.addElement("category")
             category_elt["term"] = tag
 
@@ -825,7 +807,7 @@
         entry_elt.addElement("id", content=entry_id)  #
 
         ## comments ##
-        for comments_data in mb_data.get('comments', []):
+        for comments_data in mb_data.get("comments", []):
             link_elt = entry_elt.addElement("link")
             # XXX: "uri" is set in self._manage_comments if not already existing
             try:
@@ -844,9 +826,7 @@
                 link_elt["rel"] = "via"
                 link_elt["href"] = repeated["uri"]
             except KeyError as e:
-                log.warning(
-                    f"invalid repeated element({e}): {extra['repeated']}"
-                )
+                log.warning(f"invalid repeated element({e}): {extra['repeated']}")
 
         ## final item building ##
         item_elt = pubsub.Item(id=item_id, payload=entry_elt)
@@ -872,7 +852,7 @@
         """
         if not self.is_comment_node(item_id):
             raise ValueError("This node is not a comment node")
-        return item_id[len(NS_COMMENT_PREFIX):]
+        return item_id[len(NS_COMMENT_PREFIX) :]
 
     def get_comments_node(self, item_id):
         """Generate comment node
@@ -908,7 +888,9 @@
             client.pubsub_service if client.pubsub_service is not None else parent_service
         )
 
-    async def _manage_comments(self, client, mb_data, service, node, item_id, access=None):
+    async def _manage_comments(
+        self, client, mb_data, service, node, item_id, access=None
+    ):
         """Check comments keys in mb_data and create comments node if necessary
 
         if a comments node metadata is set in the mb_data['comments'] list, it is used
@@ -931,18 +913,16 @@
             if "comments" in mb_data:
                 log.warning(
                     "comments are not allowed but there is already a comments node, "
-                    "it may be lost: {uri}".format(
-                        uri=mb_data["comments"]
-                    )
+                    "it may be lost: {uri}".format(uri=mb_data["comments"])
                 )
                 del mb_data["comments"]
             return
 
         # we have usually a single comment node, but the spec allow several, so we need to
         # handle this in a list
-        if len(mb_data.setdefault('comments', [])) == 0:
+        if len(mb_data.setdefault("comments", [])) == 0:
             # we need at least one comment node
-            mb_data['comments'].append({})
+            mb_data["comments"].append({})
 
         if access is None:
             # TODO: cache access models per service/node
@@ -952,7 +932,9 @@
                 log.debug(f"Can't get parent node configuration: {e}")
                 access = self._p.ACCESS_OPEN
             else:
-                access = parent_node_config.get(self._p.OPT_ACCESS_MODEL, self._p.ACCESS_OPEN)
+                access = parent_node_config.get(
+                    self._p.OPT_ACCESS_MODEL, self._p.ACCESS_OPEN
+                )
 
         options = {
             self._p.OPT_ACCESS_MODEL: access,
@@ -967,9 +949,9 @@
         # if other plugins need to change the options
         self.host.trigger.point("XEP-0277_comments", client, mb_data, options)
 
-        for comments_data in mb_data['comments']:
-            uri = comments_data.get('uri')
-            comments_node = comments_data.get('node')
+        for comments_data in mb_data["comments"]:
+            uri = comments_data.get("uri")
+            comments_node = comments_data.get("node")
             try:
                 comments_service = jid.JID(comments_data["service"])
             except KeyError:
@@ -977,24 +959,26 @@
 
             if uri:
                 uri_service, uri_node = self.parse_comment_url(uri)
-                if ((comments_node is not None and comments_node!=uri_node)
-                     or (comments_service is not None and comments_service!=uri_service)):
+                if (comments_node is not None and comments_node != uri_node) or (
+                    comments_service is not None and comments_service != uri_service
+                ):
                     raise ValueError(
                         f"Incoherence between comments URI ({uri}) and comments_service "
-                        f"({comments_service}) or comments_node ({comments_node})")
-                comments_data['service'] = comments_service = uri_service
-                comments_data['node'] = comments_node = uri_node
+                        f"({comments_service}) or comments_node ({comments_node})"
+                    )
+                comments_data["service"] = comments_service = uri_service
+                comments_data["node"] = comments_node = uri_node
             else:
                 if not comments_node:
                     comments_node = self.get_comments_node(item_id)
-                comments_data['node'] = comments_node
+                comments_data["node"] = comments_node
                 if comments_service is None:
                     comments_service = await self.get_comments_service(client, service)
                     if comments_service is None:
                         comments_service = client.jid.userhostJID()
-                comments_data['service'] = comments_service
+                comments_data["service"] = comments_service
 
-                comments_data['uri'] = xmpp_uri.build_xmpp_uri(
+                comments_data["uri"] = xmpp_uri.build_xmpp_uri(
                     "pubsub",
                     path=comments_service.full(),
                     node=comments_node,
@@ -1031,11 +1015,11 @@
         """Generate a user friendly id from title or content"""
         # TODO: rich content should be converted to plain text
         id_base = regex.url_friendly_text(
-            data.get('title')
-            or data.get('title_rich')
-            or data.get('content')
-            or data.get('content_rich')
-            or ''
+            data.get("title")
+            or data.get("title_rich")
+            or data.get("content")
+            or data.get("content_rich")
+            or ""
         )
         if not data.get("user_friendly_id_suffix", True):
             return id_base
@@ -1054,7 +1038,7 @@
         client: SatXMPPEntity,
         data: dict,
         service: Optional[jid.JID] = None,
-        node: Optional[str] = NS_MICROBLOG
+        node: Optional[str] = NS_MICROBLOG,
     ) -> Optional[str]:
         """Send XEP-0277's microblog data
 
@@ -1082,10 +1066,7 @@
                     # the item doesn't already exist, and change ID if it's the case.
                     try:
                         items, __ = await self._p.get_items(
-                            client,
-                            service,
-                            node,
-                            item_ids = [item_id]
+                            client, service, node, item_ids=[item_id]
                         )
                     except exceptions.NotFound:
                         pass
@@ -1119,20 +1100,13 @@
         return item_id
 
     def _mb_repeat(
-            self,
-            service_s: str,
-            node: str,
-            item: str,
-            extra_s: str,
-            profile_key: str
+        self, service_s: str, node: str, item: str, extra_s: str, profile_key: str
     ) -> defer.Deferred:
         service = jid.JID(service_s) if service_s else None
         node = node if node else NS_MICROBLOG
         client = self.host.get_client(profile_key)
         extra = data_format.deserialise(extra_s)
-        d = defer.ensureDeferred(
-            self.repeat(client, item, service, node, extra)
-        )
+        d = defer.ensureDeferred(self.repeat(client, item, service, node, extra))
         # [repeat] can return None, and we always need a str
         d.addCallback(lambda ret: ret or "")
         return d
@@ -1154,12 +1128,7 @@
             service = client.jid.userhostJID()
 
         # we first get the post to repeat
-        items, __ = await self._p.get_items(
-            client,
-            service,
-            node,
-            item_ids = [item]
-        )
+        items, __ = await self._p.get_items(client, service, node, item_ids=[item])
         if not items:
             raise exceptions.NotFound(
                 f"no item found at node {node!r} on {service} with ID {item!r}"
@@ -1168,9 +1137,7 @@
         try:
             entry_elt = next(item_elt.elements(NS_ATOM, "entry"))
         except StopIteration:
-            raise exceptions.DataError(
-                "post to repeat is not a XEP-0277 blog item"
-            )
+            raise exceptions.DataError("post to repeat is not a XEP-0277 blog item")
 
         # we want to be sure that we have an author element
         try:
@@ -1198,10 +1165,7 @@
         )
 
         return await self._p.send_item(
-            client,
-            client.jid.userhostJID(),
-            NS_MICROBLOG,
-            entry_elt
+            client, client.jid.userhostJID(), NS_MICROBLOG, entry_elt
         )
 
     def _mb_preview(self, service, node, data, profile_key):
@@ -1218,7 +1182,7 @@
         client: SatXMPPEntity,
         data: dict,
         service: Optional[jid.JID] = None,
-        node: Optional[str] = NS_MICROBLOG
+        node: Optional[str] = NS_MICROBLOG,
     ) -> dict:
         """Preview microblog data without publishing them
 
@@ -1235,7 +1199,6 @@
         item_elt.uri = pubsub.NS_PUBSUB
         return await self.item_2_mb_data(client, item_elt, service, node)
 
-
     ## retract ##
 
     def _mb_retract(self, service_jid_s, nodeIdentifier, itemIdentifier, profile_key):
@@ -1252,11 +1215,18 @@
 
     def _mb_get_serialise(self, data):
         items, metadata = data
-        metadata['items'] = items
+        metadata["items"] = items
         return data_format.serialise(metadata)
 
-    def _mb_get(self, service="", node="", max_items=10, item_ids=None, extra="",
-               profile_key=C.PROF_KEY_NONE):
+    def _mb_get(
+        self,
+        service="",
+        node="",
+        max_items=10,
+        item_ids=None,
+        extra="",
+        profile_key=C.PROF_KEY_NONE,
+    ):
         """
         @param max_items(int): maximum number of item to get, C.NO_LIMIT for no limit
         @param item_ids (list[unicode]): list of item IDs
@@ -1266,8 +1236,15 @@
         max_items = None if max_items == C.NO_LIMIT else max_items
         extra = self._p.parse_extra(data_format.deserialise(extra))
         d = defer.ensureDeferred(
-            self.mb_get(client, service, node or None, max_items, item_ids,
-                       extra.rsm_request, extra.extra)
+            self.mb_get(
+                client,
+                service,
+                node or None,
+                max_items,
+                item_ids,
+                extra.rsm_request,
+                extra.extra,
+            )
         )
         d.addCallback(self._mb_get_serialise)
         return d
@@ -1280,7 +1257,7 @@
         max_items: Optional[int] = 10,
         item_ids: Optional[List[str]] = None,
         rsm_request: Optional[rsm.RSMRequest] = None,
-        extra: Optional[Dict[str, Any]] = None
+        extra: Optional[Dict[str, Any]] = None,
     ) -> Tuple[List[Dict[str, Any]], Dict[str, Any]]:
         """Get some microblogs
 
@@ -1309,7 +1286,8 @@
             extra=extra,
         )
         mb_data_list, metadata = await self._p.trans_items_data_d(
-            items_data, partial(self.item_2_mb_data, client, service=service, node=node))
+            items_data, partial(self.item_2_mb_data, client, service=service, node=node)
+        )
         encrypted = metadata.pop("encrypted", None)
         if encrypted is not None:
             for mb_data in mb_data_list:
@@ -1320,13 +1298,15 @@
         return (mb_data_list, metadata)
 
     def _mb_rename(self, service, node, item_id, new_id, profile_key):
-        return defer.ensureDeferred(self.mb_rename(
-            self.host.get_client(profile_key),
-            jid.JID(service) if service else None,
-            node or None,
-            item_id,
-            new_id
-        ))
+        return defer.ensureDeferred(
+            self.mb_rename(
+                self.host.get_client(profile_key),
+                jid.JID(service) if service else None,
+                node or None,
+                item_id,
+                new_id,
+            )
+        )
 
     async def mb_rename(
         self,
@@ -1334,7 +1314,7 @@
         service: Optional[jid.JID],
         node: Optional[str],
         item_id: str,
-        new_id: str
+        new_id: str,
     ) -> None:
         if not node:
             node = NS_MICROBLOG
@@ -1528,7 +1508,7 @@
                 items_data,
                 # FIXME: service and node should be used here
                 partial(self.item_2_mb_data, client),
-                serialise=True
+                serialise=True,
             )
             d.addCallback(lambda serialised: ("", serialised))
             return d
@@ -1552,8 +1532,14 @@
         )
         return d
 
-    def _mb_get_from_many(self, publishers_type, publishers, max_items=10, extra_dict=None,
-                       profile_key=C.PROF_KEY_NONE):
+    def _mb_get_from_many(
+        self,
+        publishers_type,
+        publishers,
+        max_items=10,
+        extra_dict=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         """
         @param max_items(int): maximum number of item to get, C.NO_LIMIT for no limit
         """
@@ -1569,8 +1555,15 @@
             profile_key,
         )
 
-    def mb_get_from_many(self, publishers_type, publishers, max_items=None, rsm_request=None,
-                      extra=None, profile_key=C.PROF_KEY_NONE):
+    def mb_get_from_many(
+        self,
+        publishers_type,
+        publishers,
+        max_items=None,
+        rsm_request=None,
+        extra=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         """Get the published microblogs for a list of groups or jids
 
         @param publishers_type (str): type of the list of publishers (one of "GROUP" or
@@ -1607,17 +1600,13 @@
             for item, item_metadata in items_data:
                 item = data_format.serialise(item)
                 items.append((item, item_metadata))
-            ret.append((
-                service.full(),
-                node,
-                failure_,
-                items,
-                metadata))
+            ret.append((service.full(), node, failure_, items, metadata))
 
         return data[0], ret
 
-    def _mb_get_from_many_with_comments_rt_result(self, session_id,
-                                           profile_key=C.PROF_KEY_DEFAULT):
+    def _mb_get_from_many_with_comments_rt_result(
+        self, session_id, profile_key=C.PROF_KEY_DEFAULT
+    ):
         """Get real-time results for [mb_get_from_many_with_comments] session
 
         @param session_id: id of the real-time deferred session
@@ -1643,9 +1632,16 @@
         d.addCallback(self._mb_get_from_many_with_comments_rt_result_serialise)
         return d
 
-    def _mb_get_from_many_with_comments(self, publishers_type, publishers, max_items=10,
-                                   max_comments=C.NO_LIMIT, extra_dict=None,
-                                   extra_comments_dict=None, profile_key=C.PROF_KEY_NONE):
+    def _mb_get_from_many_with_comments(
+        self,
+        publishers_type,
+        publishers,
+        max_items=10,
+        max_comments=C.NO_LIMIT,
+        extra_dict=None,
+        extra_comments_dict=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         """
         @param max_items(int): maximum number of item to get, C.NO_LIMIT for no limit
         @param max_comments(int): maximum number of comments to get, C.NO_LIMIT for no
@@ -1668,10 +1664,18 @@
             profile_key,
         )
 
-    def mb_get_from_many_with_comments(self, publishers_type, publishers, max_items=None,
-                                  max_comments=None, rsm_request=None, extra=None,
-                                  rsm_comments=None, extra_comments=None,
-                                  profile_key=C.PROF_KEY_NONE):
+    def mb_get_from_many_with_comments(
+        self,
+        publishers_type,
+        publishers,
+        max_items=None,
+        max_comments=None,
+        rsm_request=None,
+        extra=None,
+        rsm_comments=None,
+        extra_comments=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         """Helper method to get the microblogs and their comments in one shot
 
         @param publishers_type (str): type of the list of publishers (one of "GROUP" or
@@ -1729,9 +1733,12 @@
                             lambda items_data: self._p.trans_items_data_d(
                                 items_data,
                                 partial(
-                                    self.item_2_mb_data, client, service=service, node=node
+                                    self.item_2_mb_data,
+                                    client,
+                                    service=service,
+                                    node=node,
                                 ),
-                                serialise=True
+                                serialise=True,
                             )
                         )
                         # with failure handling
@@ -1764,9 +1771,11 @@
 
         deferreds = {}
         for service, node in node_data:
-            d = deferreds[(service, node)] = defer.ensureDeferred(self._p.get_items(
-                client, service, node, max_items, rsm_request=rsm_request, extra=extra
-            ))
+            d = deferreds[(service, node)] = defer.ensureDeferred(
+                self._p.get_items(
+                    client, service, node, max_items, rsm_request=rsm_request, extra=extra
+                )
+            )
             d.addCallback(
                 lambda items_data: self._p.trans_items_data_d(
                     items_data,
--- a/libervia/backend/plugins/plugin_xep_0280.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0280.py	Wed Jun 19 18:44:57 2024 +0200
@@ -75,7 +75,9 @@
         log.info(_("Plugin XEP_0280 initialization"))
         self.host = host
         host.memory.update_params(self.params)
-        host.trigger.add("message_received", self.message_received_trigger, priority=200000)
+        host.trigger.add(
+            "message_received", self.message_received_trigger, priority=200000
+        )
 
     def get_handler(self, client):
         return XEP_0280_handler()
@@ -159,9 +161,7 @@
                 pass
         else:
             log.warning(
-                "invalid message carbons received:\n{xml}".format(
-                    xml=message_elt.toXml()
-                )
+                "invalid message carbons received:\n{xml}".format(xml=message_elt.toXml())
             )
             return False
 
@@ -171,6 +171,7 @@
 
         return True
 
+
 @implementer(iwokkel.IDisco)
 class XEP_0280_handler(XMPPHandler):
 
--- a/libervia/backend/plugins/plugin_xep_0292.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0292.py	Wed Jun 19 18:44:57 2024 +0200
@@ -51,12 +51,8 @@
 
 NS_VCARD4 = "urn:ietf:params:xml:ns:vcard-4.0"
 VCARD4_NODE = "urn:xmpp:vcard4"
-text_fields = {
-    "fn": "name",
-    "nickname": "nicknames",
-    "note": "description"
-}
-text_fields_inv = {v: k for k,v in text_fields.items()}
+text_fields = {"fn": "name", "nickname": "nicknames", "note": "description"}
+text_fields_inv = {v: k for k, v in text_fields.items()}
 
 
 class XEP_0292:
@@ -74,20 +70,20 @@
         host.register_namespace("vcard4", NS_VCARD4)
         self.host = host
         self._p = host.plugins["XEP-0060"]
-        self._i = host.plugins['IDENTITY']
+        self._i = host.plugins["IDENTITY"]
         self._i.register(
             IMPORT_NAME,
-            'nicknames',
+            "nicknames",
             partial(self.getValue, field="nicknames"),
             partial(self.set_value, field="nicknames"),
-            priority=1000
+            priority=1000,
         )
         self._i.register(
             IMPORT_NAME,
-            'description',
+            "description",
             partial(self.getValue, field="description"),
             partial(self.set_value, field="description"),
-            priority=1000
+            priority=1000,
         )
 
     def get_handler(self, client):
@@ -106,7 +102,7 @@
                         value = str(metadata_elt.text)
                         if dest_type is str:
                             if dest_field in vcard:
-                                vcard[dest_field] +=  value
+                                vcard[dest_field] += value
                             else:
                                 vcard[dest_field] = value
                         elif dest_type is list:
@@ -135,9 +131,7 @@
                         field_elt = vcard_elt.addElement(elt_name)
                         field_elt.addElement("text", content=v)
                 else:
-                    log.warning(
-                        f"ignoring unexpected value: {value!r}"
-                    )
+                    log.warning(f"ignoring unexpected value: {value!r}")
 
         return vcard_elt
 
@@ -163,7 +157,7 @@
         self,
         client: SatXMPPEntity,
         vcard_elt: domish.Element,
-        entity: Optional[jid.JID] = None
+        entity: Optional[jid.JID] = None,
     ) -> None:
         """Update VCard 4 of given entity, create node if doesn't already exist
 
@@ -174,7 +168,7 @@
         service = entity or client.jid.userhostJID()
         node_options = {
             self._p.OPT_ACCESS_MODEL: self._p.ACCESS_OPEN,
-            self._p.OPT_PUBLISH_MODEL: self._p.PUBLISH_MODEL_PUBLISHERS
+            self._p.OPT_PUBLISH_MODEL: self._p.PUBLISH_MODEL_PUBLISHERS,
         }
         await self._p.create_if_new_node(client, service, VCARD4_NODE, node_options)
         await self._p.send_item(
@@ -225,7 +219,7 @@
         client: SatXMPPEntity,
         value: Union[str, List[str]],
         entity: jid.JID,
-        field: str
+        field: str,
     ) -> None:
         """Set generic value
 
--- a/libervia/backend/plugins/plugin_xep_0300.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0300.py	Wed Jun 19 18:44:57 2024 +0200
@@ -50,7 +50,7 @@
 
 NS_HASHES = "urn:xmpp:hashes:2"
 NS_HASHES_FUNCTIONS = "urn:xmpp:hash-function-text-names:{}"
-BUFFER_SIZE = 2 ** 12
+BUFFER_SIZE = 2**12
 ALGO_DEFAULT = "sha-256"
 
 
@@ -90,10 +90,10 @@
     def get_best_peer_algo(self, to_jid, profile):
         """Return the best available hashing algorith of other peer
 
-         @param to_jid(jid.JID): peer jid
-         @parm profile: %(doc_profile)s
-         @return (D(unicode, None)): best available algorithm,
-            or None if hashing is not possible
+        @param to_jid(jid.JID): peer jid
+        @parm profile: %(doc_profile)s
+        @return (D(unicode, None)): best available algorithm,
+           or None if hashing is not possible
         """
         client = self.host.get_client(profile)
         for algo in reversed(XEP_0300.ALGOS):
@@ -175,7 +175,7 @@
         assert algo
         hash_elt = domish.Element((NS_HASHES, "hash"))
         if hash_ is not None:
-            b64_hash = base64.b64encode(hash_.encode('utf-8')).decode('utf-8')
+            b64_hash = base64.b64encode(hash_.encode("utf-8")).decode("utf-8")
             hash_elt.addContent(b64_hash)
         hash_elt["algo"] = algo
         return hash_elt
@@ -205,7 +205,7 @@
 
             if best_algo is None or algos.index(best_algo) < idx:
                 best_algo = algo
-                best_value = base64.b64decode(str(hash_elt)).decode('utf-8')
+                best_value = base64.b64decode(str(hash_elt)).decode("utf-8")
 
         if not hash_elt:
             raise exceptions.NotFound
--- a/libervia/backend/plugins/plugin_xep_0308.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0308.py	Wed Jun 19 18:44:57 2024 +0200
@@ -35,6 +35,7 @@
 from libervia.backend.models.core import MessageData, MessageEdition
 from libervia.backend.tools.common import data_format
 from libervia.backend.tools.utils import aio
+
 log = getLogger(__name__)
 
 
@@ -139,7 +140,9 @@
 
         if store:
             flag_modified(edited_history, "extra")
-            edited_history.extra.setdefault(C.MESS_EXTRA_EDITIONS, []).append(previous_version)
+            edited_history.extra.setdefault(C.MESS_EXTRA_EDITIONS, []).append(
+                previous_version
+            )
             await self.host.memory.storage.add(edited_history)
 
             edit_data = MessageData(edited_history.serialise())
@@ -271,7 +274,7 @@
             last_mess[5],
             last_mess[-1],
             # message will be updated and signal sent on reception in group chat
-            store = not is_group_chat
+            store=not is_group_chat,
         )
 
         serialised = edited_history.serialise()
--- a/libervia/backend/plugins/plugin_xep_0313.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0313.py	Wed Jun 19 18:44:57 2024 +0200
@@ -73,28 +73,36 @@
         self._last_stanza_id_d = defer.Deferred()
         self._last_stanza_id_d.callback(None)
         host.bridge.add_method(
-            "mam_get", ".plugin", in_sign='sss',
-            out_sign='(a(sdssa{ss}a{ss}ss)ss)', method=self._get_archives,
-            async_=True)
+            "mam_get",
+            ".plugin",
+            in_sign="sss",
+            out_sign="(a(sdssa{ss}a{ss}ss)ss)",
+            method=self._get_archives,
+            async_=True,
+        )
 
     async def resume(self, client):
         """Retrieve one2one messages received since the last we have in local storage"""
         stanza_id_data = await self.host.memory.storage.get_privates(
-            mam.NS_MAM, [KEY_LAST_STANZA_ID], profile=client.profile)
+            mam.NS_MAM, [KEY_LAST_STANZA_ID], profile=client.profile
+        )
         stanza_id = stanza_id_data.get(KEY_LAST_STANZA_ID)
         rsm_req = None
         if stanza_id is None:
             log.info("can't retrieve last stanza ID, checking history")
             last_mess = await self.host.memory.history_get(
-                None, None, limit=1, filters={'not_types': C.MESS_TYPE_GROUPCHAT,
-                                              'last_stanza_id': True},
-                profile=client.profile)
+                None,
+                None,
+                limit=1,
+                filters={"not_types": C.MESS_TYPE_GROUPCHAT, "last_stanza_id": True},
+                profile=client.profile,
+            )
             if not last_mess:
                 log.info(_("It seems that we have no MAM history yet"))
                 stanza_id = None
                 rsm_req = rsm.RSMRequest(max_=50, before="")
             else:
-                stanza_id = last_mess[0][-1]['stanza_id']
+                stanza_id = last_mess[0][-1]["stanza_id"]
         if rsm_req is None:
             rsm_req = rsm.RSMRequest(max_=100, after=stanza_id)
         mam_req = mam.MAMRequest(rsm_=rsm_req)
@@ -102,8 +110,9 @@
         count = 0
         while not complete:
             try:
-                mam_data = await self.get_archives(client, mam_req,
-                                                  service=client.jid.userhostJID())
+                mam_data = await self.get_archives(
+                    client, mam_req, service=client.jid.userhostJID()
+                )
             except StanzaError as e:
                 log.warning(
                     f"Can't retrieve MAM archives: {e}\n"
@@ -112,9 +121,7 @@
                 )
                 return
             except Exception as e:
-                log.exception(
-                    f"Can't retrieve retrieve MAM archive"
-                )
+                log.exception(f"Can't retrieve retrieve MAM archive")
                 return
             elt_list, rsm_response, mam_response = mam_data
             complete = mam_response["complete"]
@@ -130,12 +137,13 @@
             for idx, mess_elt in enumerate(elt_list):
                 try:
                     fwd_message_elt = self.get_message_from_result(
-                        client, mess_elt, mam_req)
+                        client, mess_elt, mam_req
+                    )
                 except exceptions.DataError:
                     continue
 
                 try:
-                    destinee = jid.JID(fwd_message_elt['to'])
+                    destinee = jid.JID(fwd_message_elt["to"])
                 except KeyError:
                     log.warning(_('missing "to" attribute in forwarded message'))
                     destinee = client.jid
@@ -146,15 +154,17 @@
                 else:
                     # this message should be from us, we just add it to history
                     try:
-                        from_jid = jid.JID(fwd_message_elt['from'])
+                        from_jid = jid.JID(fwd_message_elt["from"])
                     except KeyError:
                         log.warning(_('missing "from" attribute in forwarded message'))
                         from_jid = client.jid
                     if from_jid.userhostJID() != client.jid.userhostJID():
-                        log.warning(_(
-                            'was expecting a message sent by our jid, but this one if '
-                            'from {from_jid}, ignoring\n{xml}').format(
-                                from_jid=from_jid.full(), xml=mess_elt.toXml()))
+                        log.warning(
+                            _(
+                                "was expecting a message sent by our jid, but this one if "
+                                "from {from_jid}, ignoring\n{xml}"
+                            ).format(from_jid=from_jid.full(), xml=mess_elt.toXml())
+                        )
                         continue
                     # adding message to history
                     mess_data = client.messageProt.parse_message(fwd_message_elt)
@@ -162,11 +172,14 @@
                         await client.messageProt.add_to_history(mess_data)
                     except exceptions.CancelError as e:
                         log.warning(
-                            "message has not been added to history: {e}".format(e=e))
+                            "message has not been added to history: {e}".format(e=e)
+                        )
                     except Exception as e:
                         log.error(
-                            "can't add message to history: {e}\n{xml}"
-                            .format(e=e, xml=mess_elt.toXml()))
+                            "can't add message to history: {e}\n{xml}".format(
+                                e=e, xml=mess_elt.toXml()
+                            )
+                        )
                     if complete and idx == len(elt_list) - 1:
                         # We are at the last message from archive, we store the ID to not
                         # ask again the same messages next time.
@@ -176,7 +189,7 @@
                                 namespace=mam.NS_MAM,
                                 key=KEY_LAST_STANZA_ID,
                                 value=stanza_id,
-                                profile=client.profile
+                                profile=client.profile,
                             )
                         except Exception:
                             log.exception("Can't store last stanza ID")
@@ -184,8 +197,11 @@
         if not count:
             log.info(_("We have received no message while offline"))
         else:
-            log.info(_("We have received {num_mess} message(s) while offline.")
-                .format(num_mess=count))
+            log.info(
+                _("We have received {num_mess} message(s) while offline.").format(
+                    num_mess=count
+                )
+            )
 
     def profile_connected(self, client):
         defer.ensureDeferred(self.resume(client))
@@ -209,22 +225,24 @@
                 value = extra.pop(MAM_PREFIX + arg)
                 form_args[arg] = datetime.fromtimestamp(float(value), tz.tzutc())
             except (TypeError, ValueError):
-                log.warning("Bad value for {arg} filter ({value}), ignoring".format(
-                    arg=arg, value=value))
+                log.warning(
+                    "Bad value for {arg} filter ({value}), ignoring".format(
+                        arg=arg, value=value
+                    )
+                )
             except KeyError:
                 continue
 
         try:
-            form_args["with_jid"] = jid.JID(extra.pop(
-                MAM_PREFIX + "with"))
-        except (jid.InvalidFormat):
+            form_args["with_jid"] = jid.JID(extra.pop(MAM_PREFIX + "with"))
+        except jid.InvalidFormat:
             log.warning("Bad value for jid filter")
         except KeyError:
             pass
 
         for name, value in extra.items():
             if name.startswith(FILTER_PREFIX):
-                var = name[len(FILTER_PREFIX):]
+                var = name[len(FILTER_PREFIX) :]
                 extra_fields = form_args.setdefault("extra_fields", [])
                 extra_fields.append(data_form.Field(var=var, value=value))
 
@@ -264,18 +282,22 @@
         @return domish.Element): <message/> that can be used directly with onMessage
         """
         if mess_elt.name != "message":
-            log.warning("unexpected stanza in archive: {xml}".format(
-                xml=mess_elt.toXml()))
+            log.warning(
+                "unexpected stanza in archive: {xml}".format(xml=mess_elt.toXml())
+            )
             raise exceptions.DataError("Invalid element")
         service_jid = client.jid.userhostJID() if service is None else service
         mess_from = mess_elt.getAttribute("from") or client.jid.userhost()
         # we check that the message has been sent by the right service
         # if service is None (i.e. message expected from our own server)
         # from can be server jid or user's bare jid
-        if (mess_from != service_jid.full()
-            and not (service is None and mess_from == client.jid.host)):
-            log.error("Message is not from our server, something went wrong: "
-                      "{xml}".format(xml=mess_elt.toXml()))
+        if mess_from != service_jid.full() and not (
+            service is None and mess_from == client.jid.host
+        ):
+            log.error(
+                "Message is not from our server, something went wrong: "
+                "{xml}".format(xml=mess_elt.toXml())
+            )
             raise exceptions.DataError("Invalid element")
         try:
             result_elt = next(mess_elt.elements(mam.NS_MAM, "result"))
@@ -287,16 +309,19 @@
                 delay_elt = None
             fwd_message_elt = next(forwarded_elt.elements(C.NS_CLIENT, "message"))
         except StopIteration:
-            log.warning("Invalid message received from MAM: {xml}".format(
-                xml=mess_elt.toXml()))
+            log.warning(
+                "Invalid message received from MAM: {xml}".format(xml=mess_elt.toXml())
+            )
             raise exceptions.DataError("Invalid element")
         else:
             if not result_elt["queryid"] == mam_req.query_id:
-                log.error("Unexpected query id (was expecting {query_id}): {xml}"
-                    .format(query_id=mam.query_id, xml=mess_elt.toXml()))
+                log.error(
+                    "Unexpected query id (was expecting {query_id}): {xml}".format(
+                        query_id=mam.query_id, xml=mess_elt.toXml()
+                    )
+                )
                 raise exceptions.DataError("Invalid element")
-            stanza_id = self._sid.get_stanza_id(fwd_message_elt,
-                                              service_jid)
+            stanza_id = self._sid.get_stanza_id(fwd_message_elt, service_jid)
             if stanza_id is None:
                 # not stanza-id element is present, we add one so message
                 # will be archived with it, and we won't request several times
@@ -304,10 +329,15 @@
                 try:
                     stanza_id = result_elt["id"]
                 except AttributeError:
-                    log.warning('Invalid MAM result: missing "id" attribute: {xml}'
-                                .format(xml=result_elt.toXml()))
+                    log.warning(
+                        'Invalid MAM result: missing "id" attribute: {xml}'.format(
+                            xml=result_elt.toXml()
+                        )
+                    )
                     raise exceptions.DataError("Invalid element")
-                self._sid.add_stanza_id(client, fwd_message_elt, stanza_id, by=service_jid)
+                self._sid.add_stanza_id(
+                    client, fwd_message_elt, stanza_id, by=service_jid
+                )
 
             if delay_elt is not None:
                 fwd_message_elt.addChild(delay_elt)
@@ -345,8 +375,10 @@
         except StopIteration:
             raise exceptions.DataError("Invalid MAM result")
 
-        mam_response = {"complete": C.bool(fin_elt.getAttribute("complete", C.BOOL_FALSE)),
-                        "stable": C.bool(fin_elt.getAttribute("stable", C.BOOL_TRUE))}
+        mam_response = {
+            "complete": C.bool(fin_elt.getAttribute("complete", C.BOOL_FALSE)),
+            "stable": C.bool(fin_elt.getAttribute("stable", C.BOOL_TRUE)),
+        }
 
         try:
             rsm_response = rsm.RSMResponse.fromElement(fin_elt)
@@ -359,14 +391,12 @@
         elt_list, rsm_response, mam_response = data
         mess_list = []
         for elt in elt_list:
-            fwd_message_elt = self.get_message_from_result(client, elt, mam_req,
-                                                        service=service)
+            fwd_message_elt = self.get_message_from_result(
+                client, elt, mam_req, service=service
+            )
             mess_data = client.messageProt.parse_message(fwd_message_elt)
             mess_list.append(client.message_get_bridge_args(mess_data))
-        metadata = {
-            'rsm': self._rsm.response2dict(rsm_response),
-            'mam': mam_response
-        }
+        metadata = {"rsm": self._rsm.response2dict(rsm_response), "mam": mam_response}
         return mess_list, data_format.serialise(metadata), client.profile
 
     def _get_archives(self, service, extra_ser, profile_key):
@@ -420,8 +450,14 @@
         # http://xmpp.org/extensions/xep-0313.html#prefs
         return client._mam.queryPrefs(service)
 
-    def _set_prefs(self, service_s=None, default="roster", always=None, never=None,
-                  profile_key=C.PROF_KEY_NONE):
+    def _set_prefs(
+        self,
+        service_s=None,
+        default="roster",
+        always=None,
+        never=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         service = jid.JID(service_s) if service_s else None
         always_jid = [jid.JID(entity) for entity in always]
         never_jid = [jid.JID(entity) for entity in never]
@@ -463,7 +499,9 @@
                     namespace=mam.NS_MAM,
                     key=KEY_LAST_STANZA_ID,
                     value=stanza_id,
-                    profile=client.profile))
+                    profile=client.profile,
+                )
+            )
 
 
 @implementer(disco.IDisco)
@@ -478,7 +516,8 @@
 
     def connectionInitialized(self):
         observer_xpath = MESSAGE_STANZA_ID.format(
-            ns_stanza_id=self.host.ns_map['stanza_id'])
+            ns_stanza_id=self.host.ns_map["stanza_id"]
+        )
         self.xmlstream.addObserver(
             observer_xpath, self.plugin_parent.on_message_stanza_id, client=self.parent
         )
--- a/libervia/backend/plugins/plugin_xep_0329.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0329.py	Wed Jun 19 18:44:57 2024 +0200
@@ -209,9 +209,12 @@
         path_elts = [_f for _f in path.split("/") if _f]
 
         if ".." in path_elts:
-            log.warning(_(
-                'parent dir ("..") found in path, hack attempt? path is {path} '
-                '[{profile}]').format(path=path, profile=client.profile))
+            log.warning(
+                _(
+                    'parent dir ("..") found in path, hack attempt? path is {path} '
+                    "[{profile}]"
+                ).format(path=path, profile=client.profile)
+            )
             raise exceptions.PermissionError("illegal path elements")
 
         node = client._XEP_0329_root_node
@@ -347,7 +350,9 @@
         )
         host.bridge.add_signal("fis_shared_path_new", ".plugin", signature="sss")
         host.bridge.add_signal("fis_shared_path_removed", ".plugin", signature="ss")
-        host.trigger.add("XEP-0234_fileSendingRequest", self._file_sending_request_trigger)
+        host.trigger.add(
+            "XEP-0234_fileSendingRequest", self._file_sending_request_trigger
+        )
         host.register_namespace("fis", NS_FIS)
 
     def get_handler(self, client):
@@ -356,8 +361,8 @@
     def profile_connected(self, client):
         if client.is_component:
             client._file_sharing_allowed_hosts = self.host.memory.config_get(
-                'component file_sharing',
-                'http_upload_allowed_hosts_list') or [client.host]
+                "component file_sharing", "http_upload_allowed_hosts_list"
+            ) or [client.host]
         else:
             client._XEP_0329_root_node = ShareNode(
                 None,
@@ -477,8 +482,11 @@
             size = os.path.getsize(path)
             mime_type = mimetypes.guess_type(path, strict=False)[0]
             file_elt = self._jf.build_file_element(
-                client=client, name=name, size=size, mime_type=mime_type,
-                modified=os.path.getmtime(path)
+                client=client,
+                name=name,
+                size=size,
+                mime_type=mime_type,
+                modified=os.path.getmtime(path),
             )
 
             query_elt.addChild(file_elt)
@@ -625,12 +633,12 @@
             self._iq_error(client, iq_elt)
             return
         except exceptions.PermissionError:
-            self._iq_error(client, iq_elt, condition='not-allowed')
+            self._iq_error(client, iq_elt, condition="not-allowed")
             return
         except Exception as e:
             tb = traceback.format_tb(e.__traceback__)
             log.error(f"internal server error: {e}\n{''.join(tb)}")
-            self._iq_error(client, iq_elt, condition='internal-server-error')
+            self._iq_error(client, iq_elt, condition="internal-server-error")
             return
         iq_result_elt = xmlstream.toResponse(iq_elt, "result")
         query_elt = iq_result_elt.addElement((NS_FIS, "query"))
@@ -642,13 +650,13 @@
             iq_result_elt,
             owner,
             node_path,
-            files_data
+            files_data,
         ):
             return
         for file_data in files_data:
-            if file_data['type'] == C.FILE_TYPE_DIRECTORY:
+            if file_data["type"] == C.FILE_TYPE_DIRECTORY:
                 directory_elt = query_elt.addElement("directory")
-                directory_elt['name'] = file_data['name']
+                directory_elt["name"] = file_data["name"]
                 self.host.trigger.point(
                     "XEP-0329_compGetFilesFromNode_build_directory",
                     client,
@@ -661,14 +669,17 @@
                 file_elt = self._jf.build_file_element_from_dict(
                     client,
                     file_data,
-                    modified=file_data.get("modified", file_data["created"])
+                    modified=file_data.get("modified", file_data["created"]),
                 )
                 query_elt.addChild(file_elt)
         client.send(iq_result_elt)
 
     def on_component_request(self, iq_elt, client):
         return self._request_handler(
-            client, iq_elt, self._comp_get_root_nodes_cb, self._comp_get_files_from_node_cb
+            client,
+            iq_elt,
+            self._comp_get_root_nodes_cb,
+            self._comp_get_files_from_node_cb,
         )
 
     async def _parse_result(self, client, peer_jid, iq_elt):
@@ -684,18 +695,22 @@
                     continue
                 file_data["type"] = C.FILE_TYPE_FILE
                 try:
-                    thumbs = file_data['extra'][C.KEY_THUMBNAILS]
+                    thumbs = file_data["extra"][C.KEY_THUMBNAILS]
                 except KeyError:
                     log.debug(f"No thumbnail found for {file_data}")
                 else:
                     for thumb in thumbs:
-                        if 'url' not in thumb and "id" in thumb:
+                        if "url" not in thumb and "id" in thumb:
                             try:
-                                file_path = await self._b.get_file(client, peer_jid, thumb['id'])
+                                file_path = await self._b.get_file(
+                                    client, peer_jid, thumb["id"]
+                                )
                             except Exception as e:
-                                log.warning(f"Can't get thumbnail {thumb['id']!r} for {file_data}: {e}")
+                                log.warning(
+                                    f"Can't get thumbnail {thumb['id']!r} for {file_data}: {e}"
+                                )
                             else:
-                                thumb['filename'] = file_path.name
+                                thumb["filename"] = file_path.name
 
             elif elt.name == "directory" and elt.uri == NS_FIS:
                 # we have a directory
@@ -709,8 +724,7 @@
                 )
             else:
                 log.warning(
-                    _("unexpected element, ignoring: {elt}")
-                    .format(elt=elt.toXml())
+                    _("unexpected element, ignoring: {elt}").format(elt=elt.toXml())
                 )
                 continue
             files.append(file_data)
@@ -721,10 +735,10 @@
     async def _parse_element(self, client, iq_elt, element, namespace):
         peer_jid, owner = client.get_owner_and_peer(iq_elt)
         elt = next(iq_elt.elements(namespace, element))
-        path = Path("/", elt['path'])
+        path = Path("/", elt["path"])
         if len(path.parts) < 2:
             raise RootPathException
-        namespace = elt.getAttribute('namespace')
+        namespace = elt.getAttribute("namespace")
         files_data = await self.host.memory.get_files(
             client,
             peer_jid=peer_jid,
@@ -734,7 +748,7 @@
             owner=owner,
         )
         if len(files_data) != 1:
-            client.sendError(iq_elt, 'item-not-found')
+            client.sendError(iq_elt, "item-not-found")
             raise exceptions.CancelError
         file_data = files_data[0]
         return peer_jid, elt, path, namespace, file_data
@@ -742,8 +756,9 @@
     def _affiliations_get(self, service_jid_s, namespace, path, profile):
         client = self.host.get_client(profile)
         service = jid.JID(service_jid_s)
-        d = defer.ensureDeferred(self.affiliationsGet(
-            client, service, namespace or None, path))
+        d = defer.ensureDeferred(
+            self.affiliationsGet(client, service, namespace or None, path)
+        )
         d.addCallback(
             lambda affiliations: {
                 str(entity): affiliation for entity, affiliation in affiliations.items()
@@ -752,33 +767,38 @@
         return d
 
     async def affiliationsGet(
-        self,
-        client: SatXMPPEntity,
-        service: jid.JID,
-        namespace: Optional[str],
-        path: str
+        self, client: SatXMPPEntity, service: jid.JID, namespace: Optional[str], path: str
     ) -> Dict[jid.JID, str]:
         if not path:
             raise ValueError(f"invalid path: {path!r}")
         iq_elt = client.IQ("get")
-        iq_elt['to'] = service.full()
+        iq_elt["to"] = service.full()
         affiliations_elt = iq_elt.addElement((NS_FIS_AFFILIATION, "affiliations"))
         if namespace:
             affiliations_elt["namespace"] = namespace
         affiliations_elt["path"] = path
         iq_result_elt = await iq_elt.send()
         try:
-            affiliations_elt = next(iq_result_elt.elements(NS_FIS_AFFILIATION, "affiliations"))
+            affiliations_elt = next(
+                iq_result_elt.elements(NS_FIS_AFFILIATION, "affiliations")
+            )
         except StopIteration:
-            raise exceptions.DataError(f"Invalid result to affiliations request: {iq_result_elt.toXml()}")
+            raise exceptions.DataError(
+                f"Invalid result to affiliations request: {iq_result_elt.toXml()}"
+            )
 
         affiliations = {}
-        for affiliation_elt in affiliations_elt.elements(NS_FIS_AFFILIATION, 'affiliation'):
+        for affiliation_elt in affiliations_elt.elements(
+            NS_FIS_AFFILIATION, "affiliation"
+        ):
             try:
-                affiliations[jid.JID(affiliation_elt['jid'])] = affiliation_elt['affiliation']
+                affiliations[jid.JID(affiliation_elt["jid"])] = affiliation_elt[
+                    "affiliation"
+                ]
             except (KeyError, RuntimeError):
                 raise exceptions.DataError(
-                    f"invalid affiliation element: {affiliation_elt.toXml()}")
+                    f"invalid affiliation element: {affiliation_elt.toXml()}"
+                )
 
         return affiliations
 
@@ -786,8 +806,9 @@
         client = self.host.get_client(profile)
         service = jid.JID(service_jid_s)
         affiliations = {jid.JID(e): a for e, a in affiliations.items()}
-        return defer.ensureDeferred(self.affiliationsSet(
-            client, service, namespace or None, path, affiliations))
+        return defer.ensureDeferred(
+            self.affiliationsSet(client, service, namespace or None, path, affiliations)
+        )
 
     async def affiliationsSet(
         self,
@@ -800,15 +821,15 @@
         if not path:
             raise ValueError(f"invalid path: {path!r}")
         iq_elt = client.IQ("set")
-        iq_elt['to'] = service.full()
+        iq_elt["to"] = service.full()
         affiliations_elt = iq_elt.addElement((NS_FIS_AFFILIATION, "affiliations"))
         if namespace:
             affiliations_elt["namespace"] = namespace
         affiliations_elt["path"] = path
         for entity_jid, affiliation in affiliations.items():
-            affiliation_elt = affiliations_elt.addElement('affiliation')
-            affiliation_elt['jid'] = entity_jid.full()
-            affiliation_elt['affiliation'] = affiliation
+            affiliation_elt = affiliations_elt.addElement("affiliation")
+            affiliation_elt["jid"] = entity_jid.full()
+            affiliation_elt["affiliation"] = affiliation
         await iq_elt.send()
 
     def _on_component_affiliations_get(self, iq_elt, client):
@@ -817,16 +838,18 @@
 
     async def on_component_affiliations_get(self, client, iq_elt):
         try:
-            (
-                from_jid, affiliations_elt, path, namespace, file_data
-            ) = await self._parse_element(client, iq_elt, "affiliations", NS_FIS_AFFILIATION)
+            (from_jid, affiliations_elt, path, namespace, file_data) = (
+                await self._parse_element(
+                    client, iq_elt, "affiliations", NS_FIS_AFFILIATION
+                )
+            )
         except exceptions.CancelError:
             return
         except RootPathException:
             # if root path is requested, we only get owner affiliation
             peer_jid, owner = client.get_owner_and_peer(iq_elt)
             is_owner = peer_jid.userhostJID() == owner
-            affiliations = {owner: 'owner'}
+            affiliations = {owner: "owner"}
         except exceptions.NotFound:
             client.sendError(iq_elt, "item-not-found")
             return
@@ -835,17 +858,17 @@
             return
         else:
             from_jid_bare = from_jid.userhostJID()
-            is_owner = from_jid_bare == file_data.get('owner')
+            is_owner = from_jid_bare == file_data.get("owner")
             affiliations = self.host.memory.get_file_affiliations(file_data)
         iq_result_elt = xmlstream.toResponse(iq_elt, "result")
-        affiliations_elt = iq_result_elt.addElement((NS_FIS_AFFILIATION, 'affiliations'))
+        affiliations_elt = iq_result_elt.addElement((NS_FIS_AFFILIATION, "affiliations"))
         for entity_jid, affiliation in affiliations.items():
             if not is_owner and entity_jid.userhostJID() != from_jid_bare:
                 # only onwer can get all affiliations
                 continue
-            affiliation_elt = affiliations_elt.addElement('affiliation')
-            affiliation_elt['jid'] = entity_jid.userhost()
-            affiliation_elt['affiliation'] = affiliation
+            affiliation_elt = affiliations_elt.addElement("affiliation")
+            affiliation_elt["jid"] = entity_jid.userhost()
+            affiliation_elt["affiliation"] = affiliation
         client.send(iq_result_elt)
 
     def _on_component_affiliations_set(self, iq_elt, client):
@@ -854,41 +877,41 @@
 
     async def on_component_affiliations_set(self, client, iq_elt):
         try:
-            (
-                from_jid, affiliations_elt, path, namespace, file_data
-            ) = await self._parse_element(client, iq_elt, "affiliations", NS_FIS_AFFILIATION)
+            (from_jid, affiliations_elt, path, namespace, file_data) = (
+                await self._parse_element(
+                    client, iq_elt, "affiliations", NS_FIS_AFFILIATION
+                )
+            )
         except exceptions.CancelError:
             return
         except RootPathException:
-            client.sendError(iq_elt, 'bad-request', "Root path can't be used")
+            client.sendError(iq_elt, "bad-request", "Root path can't be used")
             return
 
-        if from_jid.userhostJID() != file_data['owner']:
+        if from_jid.userhostJID() != file_data["owner"]:
             log.warning(
                 f"{from_jid} tried to modify {path} affiliations while the owner is "
                 f"{file_data['owner']}"
             )
-            client.sendError(iq_elt, 'forbidden')
+            client.sendError(iq_elt, "forbidden")
             return
 
         try:
             affiliations = {
-                jid.JID(e['jid']): e['affiliation']
-                for e in affiliations_elt.elements(NS_FIS_AFFILIATION, 'affiliation')
+                jid.JID(e["jid"]): e["affiliation"]
+                for e in affiliations_elt.elements(NS_FIS_AFFILIATION, "affiliation")
             }
         except (KeyError, RuntimeError):
-                log.warning(
-                    f"invalid affiliation element: {affiliations_elt.toXml()}"
-                )
-                client.sendError(iq_elt, 'bad-request', "invalid affiliation element")
-                return
+            log.warning(f"invalid affiliation element: {affiliations_elt.toXml()}")
+            client.sendError(iq_elt, "bad-request", "invalid affiliation element")
+            return
         except Exception as e:
-                log.error(
-                    f"unexepected exception while setting affiliation element: {e}\n"
-                    f"{affiliations_elt.toXml()}"
-                )
-                client.sendError(iq_elt, 'internal-server-error', f"{e}")
-                return
+            log.error(
+                f"unexepected exception while setting affiliation element: {e}\n"
+                f"{affiliations_elt.toXml()}"
+            )
+            client.sendError(iq_elt, "internal-server-error", f"{e}")
+            return
 
         await self.host.memory.set_file_affiliations(client, file_data, affiliations)
 
@@ -900,8 +923,9 @@
     def _configuration_get(self, service_jid_s, namespace, path, profile):
         client = self.host.get_client(profile)
         service = jid.JID(service_jid_s)
-        d = defer.ensureDeferred(self.configuration_get(
-            client, service, namespace or None, path))
+        d = defer.ensureDeferred(
+            self.configuration_get(client, service, namespace or None, path)
+        )
         d.addCallback(
             lambda configuration: {
                 str(entity): affiliation for entity, affiliation in configuration.items()
@@ -910,25 +934,25 @@
         return d
 
     async def configuration_get(
-        self,
-        client: SatXMPPEntity,
-        service: jid.JID,
-        namespace: Optional[str],
-        path: str
+        self, client: SatXMPPEntity, service: jid.JID, namespace: Optional[str], path: str
     ) -> Dict[str, str]:
         if not path:
             raise ValueError(f"invalid path: {path!r}")
         iq_elt = client.IQ("get")
-        iq_elt['to'] = service.full()
+        iq_elt["to"] = service.full()
         configuration_elt = iq_elt.addElement((NS_FIS_CONFIGURATION, "configuration"))
         if namespace:
             configuration_elt["namespace"] = namespace
         configuration_elt["path"] = path
         iq_result_elt = await iq_elt.send()
         try:
-            configuration_elt = next(iq_result_elt.elements(NS_FIS_CONFIGURATION, "configuration"))
+            configuration_elt = next(
+                iq_result_elt.elements(NS_FIS_CONFIGURATION, "configuration")
+            )
         except StopIteration:
-            raise exceptions.DataError(f"Invalid result to configuration request: {iq_result_elt.toXml()}")
+            raise exceptions.DataError(
+                f"Invalid result to configuration request: {iq_result_elt.toXml()}"
+            )
 
         form = data_form.findForm(configuration_elt, NS_FIS_CONFIGURATION)
         configuration = {f.var: f.value for f in form.fields.values()}
@@ -938,8 +962,11 @@
     def _configuration_set(self, service_jid_s, namespace, path, configuration, profile):
         client = self.host.get_client(profile)
         service = jid.JID(service_jid_s)
-        return defer.ensureDeferred(self.configuration_set(
-            client, service, namespace or None, path, configuration))
+        return defer.ensureDeferred(
+            self.configuration_set(
+                client, service, namespace or None, path, configuration
+            )
+        )
 
     async def configuration_set(
         self,
@@ -952,7 +979,7 @@
         if not path:
             raise ValueError(f"invalid path: {path!r}")
         iq_elt = client.IQ("set")
-        iq_elt['to'] = service.full()
+        iq_elt["to"] = service.full()
         configuration_elt = iq_elt.addElement((NS_FIS_CONFIGURATION, "configuration"))
         if namespace:
             configuration_elt["namespace"] = namespace
@@ -968,37 +995,42 @@
 
     async def on_component_configuration_get(self, client, iq_elt):
         try:
-            (
-                from_jid, configuration_elt, path, namespace, file_data
-            ) = await self._parse_element(client, iq_elt, "configuration", NS_FIS_CONFIGURATION)
+            (from_jid, configuration_elt, path, namespace, file_data) = (
+                await self._parse_element(
+                    client, iq_elt, "configuration", NS_FIS_CONFIGURATION
+                )
+            )
         except exceptions.CancelError:
             return
         except RootPathException:
-            client.sendError(iq_elt, 'bad-request', "Root path can't be used")
+            client.sendError(iq_elt, "bad-request", "Root path can't be used")
             return
         try:
-            access_type = file_data['access'][C.ACCESS_PERM_READ]['type']
+            access_type = file_data["access"][C.ACCESS_PERM_READ]["type"]
         except KeyError:
-            access_model = 'whitelist'
+            access_model = "whitelist"
         else:
-            access_model = 'open' if access_type == C.ACCESS_TYPE_PUBLIC else 'whitelist'
+            access_model = "open" if access_type == C.ACCESS_TYPE_PUBLIC else "whitelist"
 
         iq_result_elt = xmlstream.toResponse(iq_elt, "result")
-        configuration_elt = iq_result_elt.addElement((NS_FIS_CONFIGURATION, 'configuration'))
+        configuration_elt = iq_result_elt.addElement(
+            (NS_FIS_CONFIGURATION, "configuration")
+        )
         form = data_form.Form(formType="form", formNamespace=NS_FIS_CONFIGURATION)
-        form.makeFields({'access_model': access_model})
+        form.makeFields({"access_model": access_model})
         configuration_elt.addChild(form.toElement())
         client.send(iq_result_elt)
 
     async def _set_configuration(self, client, configuration_elt, file_data):
         form = data_form.findForm(configuration_elt, NS_FIS_CONFIGURATION)
         for name, value in form.items():
-            if name == 'access_model':
+            if name == "access_model":
                 await self.host.memory.set_file_access_model(client, file_data, value)
             else:
                 # TODO: send a IQ error?
                 log.warning(
-                    f"Trying to set a not implemented configuration option: {name}")
+                    f"Trying to set a not implemented configuration option: {name}"
+                )
 
     def _on_component_configuration_set(self, iq_elt, client):
         iq_elt.handled = True
@@ -1006,23 +1038,25 @@
 
     async def on_component_configuration_set(self, client, iq_elt):
         try:
-            (
-                from_jid, configuration_elt, path, namespace, file_data
-            ) = await self._parse_element(client, iq_elt, "configuration", NS_FIS_CONFIGURATION)
+            (from_jid, configuration_elt, path, namespace, file_data) = (
+                await self._parse_element(
+                    client, iq_elt, "configuration", NS_FIS_CONFIGURATION
+                )
+            )
         except exceptions.CancelError:
             return
         except RootPathException:
-            client.sendError(iq_elt, 'bad-request', "Root path can't be used")
+            client.sendError(iq_elt, "bad-request", "Root path can't be used")
             return
 
         from_jid_bare = from_jid.userhostJID()
-        is_owner = from_jid_bare == file_data.get('owner')
+        is_owner = from_jid_bare == file_data.get("owner")
         if not is_owner:
             log.warning(
                 f"{from_jid} tried to modify {path} configuration while the owner is "
                 f"{file_data['owner']}"
             )
-            client.sendError(iq_elt, 'forbidden')
+            client.sendError(iq_elt, "forbidden")
             return
 
         await self._set_configuration(client, configuration_elt, file_data)
@@ -1035,8 +1069,11 @@
     def _create_dir(self, service_jid_s, namespace, path, configuration, profile):
         client = self.host.get_client(profile)
         service = jid.JID(service_jid_s)
-        return defer.ensureDeferred(self.create_dir(
-            client, service, namespace or None, path, configuration or None))
+        return defer.ensureDeferred(
+            self.create_dir(
+                client, service, namespace or None, path, configuration or None
+            )
+        )
 
     async def create_dir(
         self,
@@ -1049,13 +1086,15 @@
         if not path:
             raise ValueError(f"invalid path: {path!r}")
         iq_elt = client.IQ("set")
-        iq_elt['to'] = service.full()
+        iq_elt["to"] = service.full()
         create_dir_elt = iq_elt.addElement((NS_FIS_CREATE, "dir"))
         if namespace:
             create_dir_elt["namespace"] = namespace
         create_dir_elt["path"] = path
         if configuration:
-            configuration_elt = create_dir_elt.addElement((NS_FIS_CONFIGURATION, "configuration"))
+            configuration_elt = create_dir_elt.addElement(
+                (NS_FIS_CONFIGURATION, "configuration")
+            )
             form = data_form.Form(formType="submit", formNamespace=NS_FIS_CONFIGURATION)
             form.makeFields(configuration)
             configuration_elt.addChild(form.toElement())
@@ -1068,13 +1107,13 @@
     async def on_component_create_dir(self, client, iq_elt):
         peer_jid, owner = client.get_owner_and_peer(iq_elt)
         if peer_jid.host not in client._file_sharing_allowed_hosts:
-            client.sendError(iq_elt, 'forbidden')
+            client.sendError(iq_elt, "forbidden")
             return
         create_dir_elt = next(iq_elt.elements(NS_FIS_CREATE, "dir"))
-        namespace = create_dir_elt.getAttribute('namespace')
-        path = Path("/", create_dir_elt['path'])
+        namespace = create_dir_elt.getAttribute("namespace")
+        path = Path("/", create_dir_elt["path"])
         if len(path.parts) < 2:
-            client.sendError(iq_elt, 'bad-request', "Root path can't be used")
+            client.sendError(iq_elt, "bad-request", "Root path can't be used")
             return
         # for root directories, we check permission here
         if len(path.parts) == 2 and owner != peer_jid.userhostJID():
@@ -1082,7 +1121,7 @@
                 f"{peer_jid} is trying to create a dir at {owner}'s repository:\n"
                 f"path: {path}\nnamespace: {namespace!r}"
             )
-            client.sendError(iq_elt, 'forbidden', "You can't create a directory there")
+            client.sendError(iq_elt, "forbidden", "You can't create a directory there")
             return
         # when going further into the path, the permissions will be checked by get_files
         files_data = await self.host.memory.get_files(
@@ -1092,18 +1131,20 @@
             namespace=namespace,
             owner=owner,
         )
-        if path.name in [d['name'] for d in files_data]:
+        if path.name in [d["name"] for d in files_data]:
             log.warning(
                 f"Conflict when trying to create a directory (from: {peer_jid} "
                 f"namespace: {namespace!r} path: {path!r})"
             )
             client.sendError(
-                iq_elt, 'conflict', "there is already a file or dir at this path")
+                iq_elt, "conflict", "there is already a file or dir at this path"
+            )
             return
 
         try:
             configuration_elt = next(
-                create_dir_elt.elements(NS_FIS_CONFIGURATION, 'configuration'))
+                create_dir_elt.elements(NS_FIS_CONFIGURATION, "configuration")
+            )
         except StopIteration:
             configuration_elt = None
 
@@ -1114,18 +1155,20 @@
             type_=C.FILE_TYPE_DIRECTORY,
             namespace=namespace,
             owner=owner,
-            peer_jid=peer_jid
+            peer_jid=peer_jid,
         )
 
         if configuration_elt is not None:
-            file_data = (await self.host.memory.get_files(
-                client,
-                peer_jid=peer_jid,
-                path=path.parent,
-                name=path.name,
-                namespace=namespace,
-                owner=owner,
-            ))[0]
+            file_data = (
+                await self.host.memory.get_files(
+                    client,
+                    peer_jid=peer_jid,
+                    path=path.parent,
+                    name=path.name,
+                    namespace=namespace,
+                    owner=owner,
+                )
+            )[0]
 
             await self._set_configuration(client, configuration_elt, file_data)
 
@@ -1137,9 +1180,7 @@
     def _serialize_data(self, files_data):
         for file_data in files_data:
             for key, value in file_data.items():
-                file_data[key] = (
-                    json.dumps(value) if key in ("extra",) else str(value)
-                )
+                file_data[key] = json.dumps(value) if key in ("extra",) else str(value)
         return files_data
 
     def _list_files(self, target_jid, path, extra, profile):
@@ -1207,9 +1248,12 @@
                 idx += 1
                 new_name = name + "_" + str(idx)
             name = new_name
-            log.info(_(
-                "A directory with this name is already shared, renamed to {new_name} "
-                "[{profile}]".format( new_name=new_name, profile=client.profile)))
+            log.info(
+                _(
+                    "A directory with this name is already shared, renamed to {new_name} "
+                    "[{profile}]".format(new_name=new_name, profile=client.profile)
+                )
+            )
 
         ShareNode(name=name, parent=node, type_=node_type, access=access, path=path)
         self.host.bridge.fis_shared_path_new(path, name, client.profile)
@@ -1236,32 +1280,34 @@
     def connectionInitialized(self):
         if self.parent.is_component:
             self.xmlstream.addObserver(
-                IQ_FIS_REQUEST, self.plugin_parent.on_component_request, client=self.parent
+                IQ_FIS_REQUEST,
+                self.plugin_parent.on_component_request,
+                client=self.parent,
             )
             self.xmlstream.addObserver(
                 IQ_FIS_AFFILIATION_GET,
                 self.plugin_parent._on_component_affiliations_get,
-                client=self.parent
+                client=self.parent,
             )
             self.xmlstream.addObserver(
                 IQ_FIS_AFFILIATION_SET,
                 self.plugin_parent._on_component_affiliations_set,
-                client=self.parent
+                client=self.parent,
             )
             self.xmlstream.addObserver(
                 IQ_FIS_CONFIGURATION_GET,
                 self.plugin_parent._on_component_configuration_get,
-                client=self.parent
+                client=self.parent,
             )
             self.xmlstream.addObserver(
                 IQ_FIS_CONFIGURATION_SET,
                 self.plugin_parent._on_component_configuration_set,
-                client=self.parent
+                client=self.parent,
             )
             self.xmlstream.addObserver(
                 IQ_FIS_CREATE_DIR,
                 self.plugin_parent._on_component_create_dir,
-                client=self.parent
+                client=self.parent,
             )
         else:
             self.xmlstream.addObserver(
--- a/libervia/backend/plugins/plugin_xep_0334.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0334.py	Wed Jun 19 18:44:57 2024 +0200
@@ -68,7 +68,9 @@
         log.info(_("Message Processing Hints plugin initialization"))
         self.host = host
         host.trigger.add("sendMessage", self.send_message_trigger)
-        host.trigger.add("message_received", self.message_received_trigger, priority=-1000)
+        host.trigger.add(
+            "message_received", self.message_received_trigger, priority=-1000
+        )
 
     def get_handler(self, client):
         return XEP_0334_handler()
@@ -85,7 +87,9 @@
         else:
             log.error("Unknown hint: {}".format(hint))
 
-    def add_hint_elements(self, message_elt: domish.Element, hints: Iterable[str]) -> None:
+    def add_hint_elements(
+        self, message_elt: domish.Element, hints: Iterable[str]
+    ) -> None:
         """Add hints elements to message stanza
 
         @param message_elt: stanza where hints must be added
@@ -95,7 +99,7 @@
             if not list(message_elt.elements(NS_HINTS, hint)):
                 message_elt.addElement((NS_HINTS, hint))
             else:
-                log.debug(f'Not adding {hint!r} hint: it is already present in <message>')
+                log.debug(f"Not adding {hint!r} hint: it is already present in <message>")
 
     def _send_post_xml_treatment(self, mess_data):
         if "hints" in mess_data:
--- a/libervia/backend/plugins/plugin_xep_0338.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0338.py	Wed Jun 19 18:44:57 2024 +0200
@@ -55,7 +55,9 @@
         host.trigger.add(
             "XEP-0167_generate_sdp_session", self._generate_sdp_session_trigger
         )
-        host.trigger.add("XEP-0167_jingle_session_init", self._jingle_session_init_trigger)
+        host.trigger.add(
+            "XEP-0167_jingle_session_init", self._jingle_session_init_trigger
+        )
         host.trigger.add("XEP-0167_jingle_handler", self._jingle_handler_trigger)
 
     def get_handler(self, client):
@@ -90,9 +92,7 @@
         for semantics, content_names in group_data.items():
             sdp_lines.append(f"a=group:{semantics} {' '.join(content_names)}")
 
-    def parse_group_element(
-        self, jingle_elt: domish.Element, session: dict
-    ) -> None:
+    def parse_group_element(self, jingle_elt: domish.Element, session: dict) -> None:
         """Parse the <group> and <content> elements"""
         for group_elt in jingle_elt.elements(NS_JINGLE_GROUPING, "group"):
             try:
@@ -104,9 +104,7 @@
             except KeyError as e:
                 log.warning(f"Error while parsing <group>: {e}\n{group_elt.toXml()}")
 
-    def add_group_element(
-        self, jingle_elt: domish.Element, session: dict
-    ) -> None:
+    def add_group_element(self, jingle_elt: domish.Element, session: dict) -> None:
         """Build the <group> and <content> elements if possible"""
         for semantics, content_names in session["metadata"].get("group", {}).items():
             # always add the <group> element as first child of <jingle> element to work
--- a/libervia/backend/plugins/plugin_xep_0339.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0339.py	Wed Jun 19 18:44:57 2024 +0200
@@ -98,7 +98,6 @@
             assert application_data is not None
             application_data["msid"] = " ".join(parts)
 
-
     def _generate_sdp_content_trigger(
         self,
         session: dict,
@@ -109,7 +108,7 @@
         application_data: dict,
         app_data_key: str,
         media_data: dict,
-        media: str
+        media: str,
     ) -> None:
         """Generate "msid" and "ssrc" attributes"""
         if "msid" in media_data:
@@ -155,9 +154,7 @@
                 semantics = ssrc_group_elt["semantics"]
                 semantic_ids = media_data.setdefault("ssrc-group", {})[semantics] = []
                 for source_elt in ssrc_group_elt.elements(NS_JINGLE_RTP_SSMA, "source"):
-                    semantic_ids.append(
-                        int(source_elt["ssrc"])
-                    )
+                    semantic_ids.append(int(source_elt["ssrc"]))
             except (KeyError, ValueError) as e:
                 log.warning(
                     f"Error while parsing <ssrc-group>: {e}\n{ssrc_group_elt.toXml()}"
--- a/libervia/backend/plugins/plugin_xep_0343.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0343.py	Wed Jun 19 18:44:57 2024 +0200
@@ -30,7 +30,10 @@
 from libervia.backend.core.i18n import _
 from libervia.backend.core.log import getLogger
 from libervia.backend.core.xmpp import SatXMPPEntity
-from libervia.backend.plugins.plugin_xep_0166.models import BaseTransportHandler, ContentData
+from libervia.backend.plugins.plugin_xep_0166.models import (
+    BaseTransportHandler,
+    ContentData,
+)
 from libervia.backend.tools.common import data_format
 
 from .plugin_xep_0167 import mapping
@@ -50,9 +53,9 @@
     C.PI_HANDLER: "yes",
     C.PI_DESCRIPTION: _("""Use WebRTC to create a generic data transport."""),
 }
-NS_JINGLE_WEBRTC_DATACHANNELS: Final[
-    str
-] = "urn:xmpp:jingle:transports:webrtc-datachannel:1"
+NS_JINGLE_WEBRTC_DATACHANNELS: Final[str] = (
+    "urn:xmpp:jingle:transports:webrtc-datachannel:1"
+)
 
 
 class XEP_0343(BaseTransportHandler):
@@ -73,9 +76,7 @@
         )
         host.trigger.add("XEP-0176_jingle_handler_send_buffer", self._on_send_ice_buffer)
         host.trigger.add("XEP-0176_ice_candidate_send", self._on_ice_candidate_send)
-        host.trigger.add(
-            "XEP-0234_file_jingle_send", self._file_jingle_send
-        )
+        host.trigger.add("XEP-0234_file_jingle_send", self._file_jingle_send)
 
     def get_handler(self, client: SatXMPPEntity):
         return XEP0343Handler()
@@ -115,7 +116,7 @@
         application_data: dict,
         app_data_key: str,
         media_data: dict,
-        media: str
+        media: str,
     ) -> None:
         """Generate "sctp-port" and "max-message-size" attributes"""
         transport_data = content_data["transport_data"]
@@ -127,10 +128,7 @@
         if max_message_size is not None:
             sdp_lines.append(f"a=max-message-size:{max_message_size}")
 
-    def _wrap_transport_element(
-        self,
-        transport_elt: domish.Element
-    ) -> None:
+    def _wrap_transport_element(self, transport_elt: domish.Element) -> None:
         """Wrap the XEP-0176 transport in a transport with this XEP namespace
 
         @param transport_elt: ICE UDP <transport>. Must be already a child of a <content>
@@ -152,7 +150,7 @@
         content_name: str,
         content_data: dict,
         transport_elt: domish.Element,
-        iq_elt: domish.Element
+        iq_elt: domish.Element,
     ) -> bool:
         if content_data["transport"].handler == self:
             self._wrap_transport_element(transport_elt)
@@ -165,7 +163,7 @@
         media_ice_data: dict[str, dict],
         content_name: str,
         content_data: dict,
-        iq_elt: domish.Element
+        iq_elt: domish.Element,
     ) -> bool:
         if content_data["transport"].handler == self:
             transport_elt = iq_elt.jingle.content.transport
@@ -175,10 +173,7 @@
         return True
 
     async def _file_jingle_send(
-        self,
-        client: SatXMPPEntity,
-        peer_jid: jid.JID,
-        content: dict
+        self, client: SatXMPPEntity, peer_jid: jid.JID, content: dict
     ) -> None:
         call_data = content["app_kwargs"]["extra"].pop("call_data", None)
         if call_data:
@@ -186,9 +181,7 @@
             try:
                 application_data = call_data["application"]
             except KeyError:
-                raise exceptions.DataError(
-                    '"call_data" must have an application media.'
-                )
+                raise exceptions.DataError('"call_data" must have an application media.')
             try:
                 content["transport_data"] = {
                     "sctp-port": metadata["sctp-port"],
@@ -198,7 +191,7 @@
                         "pwd": metadata["ice-pwd"],
                         "candidates": application_data.pop("ice-candidates"),
                         "fingerprint": application_data.pop("fingerprint", {}),
-                    }
+                    },
                 }
             except KeyError as e:
                 raise exceptions.DataError(f"Mandatory key is missing: {e}")
@@ -212,16 +205,14 @@
         content_data = session["contents"][content_name]
         transport_data = content_data["transport_data"]
         ice_transport_elt = await self._ice_udp.jingle_session_init(
-            client,
-            session,
-            content_name
+            client, session, content_name
         )
         transport_elt = domish.Element(
             (NS_JINGLE_WEBRTC_DATACHANNELS, "transport"),
             attribs={
                 "sctp-port": str(transport_data["sctp-port"]),
-                "max-message-size": str(transport_data["max-message-size"])
-            }
+                "max-message-size": str(transport_data["max-message-size"]),
+            },
         )
         transport_elt.addChild(ice_transport_elt)
         return transport_elt
--- a/libervia/backend/plugins/plugin_xep_0346.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0346.py	Wed Jun 19 18:44:57 2024 +0200
@@ -83,7 +83,8 @@
             in_sign="sss",
             out_sign="s",
             method=lambda service, nodeIdentifier, profile_key: self._get_ui_schema(
-                service, nodeIdentifier, default_node=None, profile_key=profile_key),
+                service, nodeIdentifier, default_node=None, profile_key=profile_key
+            ),
             async_=True,
         )
         host.bridge.add_method(
@@ -138,9 +139,9 @@
     def get_application_ns(self, namespace):
         """Retrieve application namespace, i.e. namespace without FDP prefix"""
         if namespace.startswith(SUBMITTED_PREFIX):
-            namespace = namespace[len(SUBMITTED_PREFIX):]
+            namespace = namespace[len(SUBMITTED_PREFIX) :]
         elif namespace.startswith(TEMPLATE_PREFIX):
-            namespace = namespace[len(TEMPLATE_PREFIX):]
+            namespace = namespace[len(TEMPLATE_PREFIX) :]
         return namespace
 
     def get_template_ns(self, namespace: str) -> str:
@@ -177,7 +178,7 @@
         node_id = f"{TEMPLATE_PREFIX}{app_ns}"
         items_data = await self._p.get_items(client, service, node_id, max_items=1)
         try:
-            schema = next(items_data[0][0].elements(data_form.NS_X_DATA, 'x'))
+            schema = next(items_data[0][0].elements(data_form.NS_X_DATA, "x"))
         except IndexError:
             schema = None
         except StopIteration:
@@ -188,8 +189,15 @@
             schema = None
         return schema
 
-    async def get_schema_form(self, client, service, nodeIdentifier, schema=None,
-                      form_type="form", copy_form=True):
+    async def get_schema_form(
+        self,
+        client,
+        service,
+        nodeIdentifier,
+        schema=None,
+        form_type="form",
+        copy_form=True,
+    ):
         """Get data form from node's schema
 
         @param service(None, jid.JID): PubSub service
@@ -219,11 +227,11 @@
                 #      domish.Element is present in the form fields (happens for
                 #      XEP-0315 data forms XML Element)
                 schema = data_form.Form(
-                    formType = schema.formType,
-                    title = schema.title,
-                    instructions = schema.instructions[:],
-                    formNamespace = schema.formNamespace,
-                    fields = schema.fieldList,
+                    formType=schema.formType,
+                    title=schema.title,
+                    instructions=schema.instructions[:],
+                    formNamespace=schema.formNamespace,
+                    fields=schema.fieldList,
                 )
             return schema
 
@@ -239,8 +247,9 @@
         xmlui = xml_tools.data_form_2_xmlui(form, "")
         return xmlui
 
-    def _get_ui_schema(self, service, nodeIdentifier, default_node=None,
-                     profile_key=C.PROF_KEY_NONE):
+    def _get_ui_schema(
+        self, service, nodeIdentifier, default_node=None, profile_key=C.PROF_KEY_NONE
+    ):
         if not nodeIdentifier:
             if not default_node:
                 raise ValueError(_("nodeIndentifier needs to be set"))
@@ -290,10 +299,8 @@
         return d
 
     async def get_schema_dict(
-        self,
-        client: SatXMPPEntity,
-        service: Optional[jid.JID],
-        nodeIdentifier: str) -> dict:
+        self, client: SatXMPPEntity, service: Optional[jid.JID], nodeIdentifier: str
+    ) -> dict:
         """Retrieve a node schema and format it a simple dictionary
 
         The dictionary is made so it can be easily serialisable
@@ -301,9 +308,18 @@
         schema_form = await self.get_schema_form(client, service, nodeIdentifier)
         return xml_tools.data_form_2_data_dict(schema_form)
 
-    def _get_data_form_items(self, form_ns="", service="", node="", schema="", max_items=10,
-                          item_ids=None, sub_id=None, extra="",
-                          profile_key=C.PROF_KEY_NONE):
+    def _get_data_form_items(
+        self,
+        form_ns="",
+        service="",
+        node="",
+        schema="",
+        max_items=10,
+        item_ids=None,
+        sub_id=None,
+        extra="",
+        profile_key=C.PROF_KEY_NONE,
+    ):
         client = self.host.get_client(profile_key)
         service = jid.JID(service) if service else None
         if not node:
@@ -331,9 +347,21 @@
         d.addCallback(self._p.trans_items_data)
         return d
 
-    async def get_data_form_items(self, client, service, nodeIdentifier, schema=None,
-                         max_items=None, item_ids=None, sub_id=None, rsm_request=None,
-                         extra=None, default_node=None, form_ns=None, filters=None):
+    async def get_data_form_items(
+        self,
+        client,
+        service,
+        nodeIdentifier,
+        schema=None,
+        max_items=None,
+        item_ids=None,
+        sub_id=None,
+        rsm_request=None,
+        extra=None,
+        default_node=None,
+        form_ns=None,
+        filters=None,
+    ):
         """Get items known as being data forms, and convert them to XMLUI
 
         @param schema(domish.Element, data_form.Form, None): schema of the node if known
@@ -386,7 +414,7 @@
                     ("label", "publisher"),
                 ]
                 try:
-                    publisher = jid.JID(item_elt['publisher'])
+                    publisher = jid.JID(item_elt["publisher"])
                 except (KeyError, jid.InvalidFormat):
                     pass
                 else:
@@ -404,8 +432,16 @@
                 break
         return (items_xmlui, metadata)
 
-    def _send_data_form_item(self, service, nodeIdentifier, values, schema=None,
-                          item_id=None, extra=None, profile_key=C.PROF_KEY_NONE):
+    def _send_data_form_item(
+        self,
+        service,
+        nodeIdentifier,
+        values,
+        schema=None,
+        item_id=None,
+        extra=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         client = self.host.get_client(profile_key)
         service = None if not service else jid.JID(service)
         if schema:
@@ -428,8 +464,16 @@
         return d
 
     async def send_data_form_item(
-        self, client, service, nodeIdentifier, values, schema=None, item_id=None,
-        extra=None, deserialise=False):
+        self,
+        client,
+        service,
+        nodeIdentifier,
+        values,
+        schema=None,
+        item_id=None,
+        extra=None,
+        deserialise=False,
+    ):
         """Publish an item as a dataform when we know that there is a schema
 
         @param values(dict[key(unicode), [iterable[object], object]]): values set for the
@@ -457,9 +501,7 @@
                     _("field {name} doesn't exist, ignoring it").format(name=name)
                 )
                 continue
-            if isinstance(values_list, str) or not isinstance(
-                values_list, Iterable
-            ):
+            if isinstance(values_list, str) or not isinstance(values_list, Iterable):
                 values_list = [values_list]
             if deserialise:
                 if field.fieldType == "boolean":
@@ -470,8 +512,9 @@
                         itertools.chain(*[v.splitlines() for v in values_list])
                     )
                 elif xml_tools.is_xhtml_field(field):
-                   values_list = [generic.parseXml(v.encode("utf-8"))
-                                  for v in values_list]
+                    values_list = [
+                        generic.parseXml(v.encode("utf-8")) for v in values_list
+                    ]
                 elif "jid" in (field.fieldType or ""):
                     values_list = [jid.JID(v) for v in values_list]
             if "list" in (field.fieldType or ""):
@@ -481,22 +524,25 @@
                 if not values_list:
                     # if values don't map to allowed values, we use default ones
                     values_list = field.values
-            elif field.ext_type == 'xml':
+            elif field.ext_type == "xml":
                 # FIXME: XML elements are not handled correctly, we need to know if we
                 #        have actual XML/XHTML, or text to escape
                 for idx, value in enumerate(values_list[:]):
                     if isinstance(value, domish.Element):
-                        if (field.value and (value.name != field.value.name
-                                             or value.uri != field.value.uri)):
+                        if field.value and (
+                            value.name != field.value.name or value.uri != field.value.uri
+                        ):
                             # the element is not the one expected in form, so we create the right element
                             # to wrap the current value
-                            wrapper_elt = domish.Element((field.value.uri, field.value.name))
+                            wrapper_elt = domish.Element(
+                                (field.value.uri, field.value.name)
+                            )
                             wrapper_elt.addChild(value)
                             values_list[idx] = wrapper_elt
                     else:
                         # we have to convert the value to a domish.Element
                         if field.value and field.value.uri == C.NS_XHTML:
-                            div_elt = domish.Element((C.NS_XHTML, 'div'))
+                            div_elt = domish.Element((C.NS_XHTML, "div"))
                             div_elt.addContent(str(value))
                             values_list[idx] = div_elt
                         else:
@@ -570,9 +616,19 @@
 
         return client, service, node, max_items, extra, sub_id
 
-    def _get(self, service="", node="", max_items=10, item_ids=None, sub_id=None,
-             extra="", default_node=None, form_ns=None, filters=None,
-             profile_key=C.PROF_KEY_NONE):
+    def _get(
+        self,
+        service="",
+        node="",
+        max_items=10,
+        item_ids=None,
+        sub_id=None,
+        extra="",
+        default_node=None,
+        form_ns=None,
+        filters=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         """bridge method to retrieve data from node with schema
 
         this method is a helper so dependant plugins can use it directly
@@ -647,9 +703,7 @@
             item_elt = items_data[0][0]
         except Exception as e:
             log.warning(
-                _("Can't get previous item, update ignored: {reason}").format(
-                    reason=e
-                )
+                _("Can't get previous item, update ignored: {reason}").format(reason=e)
             )
         else:
             # and parse it
@@ -663,9 +717,19 @@
                     if name not in values:
                         values[name] = "\n".join(str(v) for v in field.values)
 
-    def _set(self, service, node, values, schema=None, item_id=None, extra=None,
-             default_node=None, form_ns=None, fill_author=True,
-             profile_key=C.PROF_KEY_NONE):
+    def _set(
+        self,
+        service,
+        node,
+        values,
+        schema=None,
+        item_id=None,
+        extra=None,
+        default_node=None,
+        form_ns=None,
+        fill_author=True,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         """bridge method to set item in node with schema
 
         this method is a helper so dependant plugins can use it directly
@@ -674,25 +738,38 @@
         client, service, node, schema, item_id, extra = self.prepare_bridge_set(
             service, node, schema, item_id, extra
         )
-        d = defer.ensureDeferred(self.set(
-            client,
-            service,
-            node,
-            values,
-            schema,
-            item_id,
-            extra,
-            deserialise=True,
-            form_ns=form_ns,
-            default_node=default_node,
-            fill_author=fill_author,
-        ))
+        d = defer.ensureDeferred(
+            self.set(
+                client,
+                service,
+                node,
+                values,
+                schema,
+                item_id,
+                extra,
+                deserialise=True,
+                form_ns=form_ns,
+                default_node=default_node,
+                fill_author=fill_author,
+            )
+        )
         d.addCallback(lambda ret: ret or "")
         return d
 
     async def set(
-            self, client, service, node, values, schema, item_id, extra, deserialise,
-            form_ns, default_node=None, fill_author=True):
+        self,
+        client,
+        service,
+        node,
+        values,
+        schema,
+        item_id,
+        extra,
+        deserialise,
+        form_ns,
+        default_node=None,
+        fill_author=True,
+    ):
         """Set an item in a node with a schema
 
         This method can be used directly by *Set methods added by dependant plugin
@@ -725,13 +802,15 @@
                 raise exceptions.DataError(
                     _('if extra["update"] is set, item_id must be set too')
                 )
-            await self.copy_missing_values(client, service, node, item_id, form_ns, values)
+            await self.copy_missing_values(
+                client, service, node, item_id, form_ns, values
+            )
 
         values["updated"] = now
         if fill_author:
             if not values.get("author"):
                 id_data = await self._i.get_identity(client, None, ["nicknames"])
-                values["author"] = id_data['nicknames'][0]
+                values["author"] = id_data["nicknames"][0]
             if not values.get("author_jid"):
                 values["author_jid"] = client.jid.full()
         item_id = await self.send_data_form_item(
--- a/libervia/backend/plugins/plugin_xep_0352.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0352.py	Wed Jun 19 18:44:57 2024 +0200
@@ -32,8 +32,10 @@
     C.PI_DEPENDENCIES: [],
     C.PI_MAIN: "XEP_0352",
     C.PI_HANDLER: "no",
-    C.PI_DESCRIPTION: D_("Notify server when frontend is not actively used, to limit "
-                         "traffic and save bandwidth and battery life"),
+    C.PI_DESCRIPTION: D_(
+        "Notify server when frontend is not actively used, to limit "
+        "traffic and save bandwidth and battery life"
+    ),
 }
 
 NS_CSI = "urn:xmpp:csi:0"
@@ -54,30 +56,35 @@
         except AttributeError:
             # _xep_0352_active can not be set if is_active is called before
             # profile_connected has been called
-            log.debug("is_active called when XEP-0352 plugin has not yet set the "
-                      "attributes")
+            log.debug(
+                "is_active called when XEP-0352 plugin has not yet set the " "attributes"
+            )
             return True
 
     def profile_connected(self, client):
-        if (NS_CSI, 'csi') in client.xmlstream.features:
+        if (NS_CSI, "csi") in client.xmlstream.features:
             log.info(_("Client State Indication is available on this server"))
             client._xep_0352_enabled = True
             client._xep_0352_active = True
         else:
-            log.warning(_("Client State Indication is not available on this server, some"
-                          " bandwidth optimisations can't be used."))
+            log.warning(
+                _(
+                    "Client State Indication is not available on this server, some"
+                    " bandwidth optimisations can't be used."
+                )
+            )
             client._xep_0352_enabled = False
 
     def set_inactive(self, client):
         if self.is_active(client):
-            inactive_elt = domish.Element((NS_CSI, 'inactive'))
+            inactive_elt = domish.Element((NS_CSI, "inactive"))
             client.send(inactive_elt)
             client._xep_0352_active = False
             log.info("inactive state set")
 
     def set_active(self, client):
         if not self.is_active(client):
-            active_elt = domish.Element((NS_CSI, 'active'))
+            active_elt = domish.Element((NS_CSI, "active"))
             client.send(active_elt)
             client._xep_0352_active = True
             log.info("active state set")
--- a/libervia/backend/plugins/plugin_xep_0353.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0353.py	Wed Jun 19 18:44:57 2024 +0200
@@ -58,7 +58,7 @@
 
 class RejectException(exceptions.CancelError):
 
-    def __init__(self, reason: str, text: str|None = None):
+    def __init__(self, reason: str, text: str | None = None):
         super().__init__(text)
         self.reason = reason
 
@@ -207,10 +207,7 @@
         return True
 
     def _terminate_trigger(
-        self,
-        client: SatXMPPEntity,
-        session: dict,
-        reason_elt: domish.Element
+        self, client: SatXMPPEntity, session: dict, reason_elt: domish.Element
     ) -> bool:
         session_id = session["id"]
         try:
@@ -239,7 +236,7 @@
         self,
         client: SatXMPPEntity,
         message_elt: domish.Element,
-        mess_init_elt: domish.Element
+        mess_init_elt: domish.Element,
     ) -> None:
         if mess_init_elt.name == "propose":
             await self._handle_propose(client, message_elt, mess_init_elt)
@@ -257,10 +254,8 @@
             log.warning(f"invalid element: {mess_init_elt.toXml}")
 
     def _get_sid_and_session_d(
-        self,
-        client: SatXMPPEntity,
-        elt: domish.Element
-    ) -> tuple[str, defer.Deferred|list[defer.Deferred]]:
+        self, client: SatXMPPEntity, elt: domish.Element
+    ) -> tuple[str, defer.Deferred | list[defer.Deferred]]:
         """Retrieve session ID and deferred or list of deferred from response element"""
         try:
             session_id = elt["id"]
@@ -280,9 +275,7 @@
         return session_id, session_d
 
     def _get_sid_and_response_d(
-        self,
-        client: SatXMPPEntity,
-        elt: domish.Element
+        self, client: SatXMPPEntity, elt: domish.Element
     ) -> tuple[str, defer.Deferred]:
         """Retrieve session ID and response_d from response element"""
         session_id, response_d = self._get_sid_and_session_d(client, elt)
@@ -290,9 +283,7 @@
         return session_id, response_d
 
     def _get_sid_and_preflight_d_list(
-        self,
-        client: SatXMPPEntity,
-        elt: domish.Element
+        self, client: SatXMPPEntity, elt: domish.Element
     ) -> tuple[str, list[defer.Deferred]]:
         """Retrieve session ID and list of preflight_d from response element"""
         session_id, preflight_d_list = self._get_sid_and_session_d(client, elt)
@@ -300,10 +291,7 @@
         return session_id, preflight_d_list
 
     async def _handle_propose(
-        self,
-        client: SatXMPPEntity,
-        message_elt: domish.Element,
-        elt: domish.Element
+        self, client: SatXMPPEntity, message_elt: domish.Element, elt: domish.Element
     ) -> None:
         peer_jid = jid.JID(message_elt["from"])
         local_jid = jid.JID(message_elt["to"])
@@ -332,8 +320,7 @@
 
         cast(list[tuple[domish.Element, ApplicationData]], desc_and_apps)
         desc_and_apps.sort(
-            key=lambda desc_and_app: desc_and_app[1].priority,
-            reverse=True
+            key=lambda desc_and_app: desc_and_app[1].priority, reverse=True
         )
 
         session = self._j.create_session(
@@ -427,7 +414,7 @@
         self,
         client: SatXMPPEntity,
         message_elt: domish.Element,
-        proceed_elt: domish.Element
+        proceed_elt: domish.Element,
     ) -> None:
         from_jid = jid.JID(message_elt["from"])
         # session_d is the deferred of the session, it can be preflight_d or response_d
--- a/libervia/backend/plugins/plugin_xep_0359.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0359.py	Wed Jun 19 18:44:57 2024 +0200
@@ -59,21 +59,18 @@
         host.trigger.add("send_message_data", self._send_message_data_trigger)
 
     def _message_parse_trigger(
-        self,
-        client: SatXMPPEntity,
-        message_elt: domish.Element,
-        mess_data: MessageData
+        self, client: SatXMPPEntity, message_elt: domish.Element, mess_data: MessageData
     ) -> bool:
         """Check if message has a stanza-id"""
         stanza_id = self.get_stanza_id(message_elt, client.jid.userhostJID())
         if stanza_id is not None:
-            mess_data['extra']['stanza_id'] = stanza_id
+            mess_data["extra"]["stanza_id"] = stanza_id
         try:
             origin_id = self.get_origin_id(message_elt) or message_elt["id"]
         except KeyError:
             pass
         else:
-            mess_data['extra']['origin_id'] = origin_id
+            mess_data["extra"]["origin_id"] = origin_id
         return True
 
     def _send_message_data_trigger(self, client, mess_data):
@@ -97,7 +94,8 @@
                 if stanza_id is not None:
                     # we must not have more than one element (§3 #4)
                     raise exceptions.DataError(
-                        "More than one corresponding stanza-id found!")
+                        "More than one corresponding stanza-id found!"
+                    )
                 stanza_id = stanza_elt.getAttribute("id")
                 # we don't break to be sure that there is no more than one element
                 # with this "by" attribute
--- a/libervia/backend/plugins/plugin_xep_0363.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0363.py	Wed Jun 19 18:44:57 2024 +0200
@@ -57,12 +57,13 @@
 
 NS_HTTP_UPLOAD = "urn:xmpp:http:upload:0"
 IQ_HTTP_UPLOAD_REQUEST = C.IQ_GET + '/request[@xmlns="' + NS_HTTP_UPLOAD + '"]'
-ALLOWED_HEADERS = ('authorization', 'cookie', 'expires')
+ALLOWED_HEADERS = ("authorization", "cookie", "expires")
 
 
 @dataclass
 class Slot:
     """Upload slot"""
+
     put: str
     get: str
     headers: list
@@ -81,7 +82,7 @@
 
 
 class XEP_0363:
-    Slot=Slot
+    Slot = Slot
 
     def __init__(self, host):
         log.info(_("plugin HTTP File Upload initialization"))
@@ -136,11 +137,11 @@
     async def get_http_upload_entity(self, client, upload_jid=None):
         """Get HTTP upload capable entity
 
-         upload_jid is checked, then its components
-         @param upload_jid(None, jid.JID): entity to check
-         @return(D(jid.JID)): first HTTP upload capable entity
-         @raise exceptions.NotFound: no entity found
-         """
+        upload_jid is checked, then its components
+        @param upload_jid(None, jid.JID): entity to check
+        @return(D(jid.JID)): first HTTP upload capable entity
+        @raise exceptions.NotFound: no entity found
+        """
         try:
             entity = client.http_upload_service
         except AttributeError:
@@ -155,17 +156,25 @@
 
         return entity
 
-    def _file_http_upload(self, filepath, filename="", upload_jid="",
-                        ignore_tls_errors=False, profile=C.PROF_KEY_NONE):
+    def _file_http_upload(
+        self,
+        filepath,
+        filename="",
+        upload_jid="",
+        ignore_tls_errors=False,
+        profile=C.PROF_KEY_NONE,
+    ):
         assert os.path.isabs(filepath) and os.path.isfile(filepath)
         client = self.host.get_client(profile)
-        return defer.ensureDeferred(self.file_http_upload(
-            client,
-            filepath,
-            filename or None,
-            jid.JID(upload_jid) if upload_jid else None,
-            {"ignore_tls_errors": ignore_tls_errors},
-        ))
+        return defer.ensureDeferred(
+            self.file_http_upload(
+                client,
+                filepath,
+                filename or None,
+                jid.JID(upload_jid) if upload_jid else None,
+                {"ignore_tls_errors": ignore_tls_errors},
+            )
+        )
 
     async def file_http_upload(
         self,
@@ -173,7 +182,7 @@
         filepath: Path,
         filename: Optional[str] = None,
         upload_jid: Optional[jid.JID] = None,
-        extra: Optional[dict] = None
+        extra: Optional[dict] = None,
     ) -> Tuple[str, defer.Deferred]:
         """Upload a file through HTTP
 
@@ -200,13 +209,18 @@
         #: this trigger can be used to modify the filename or size requested when geting
         #: the slot, it is notably useful with encryption.
         self.host.trigger.point(
-            "XEP-0363_upload_pre_slot", client, extra, file_metadata,
-            triggers_no_cancel=True
+            "XEP-0363_upload_pre_slot",
+            client,
+            extra,
+            file_metadata,
+            triggers_no_cancel=True,
         )
         try:
             slot = await self.get_slot(
-                client, file_metadata["filename"], file_metadata["size"],
-                upload_jid=upload_jid
+                client,
+                file_metadata["filename"],
+                file_metadata["size"],
+                upload_jid=upload_jid,
             )
         except Exception as e:
             log.warning(_("Can't get upload slot: {reason}").format(reason=e))
@@ -214,9 +228,12 @@
         else:
             log.debug(f"Got upload slot: {slot}")
             sat_file = self.host.plugins["FILE"].File(
-                self.host, client, filepath, uid=extra.get("progress_id"),
+                self.host,
+                client,
+                filepath,
+                uid=extra.get("progress_id"),
                 size=file_metadata["size"],
-                auto_end_signals=False
+                auto_end_signals=False,
             )
             progress_id = sat_file.uid
 
@@ -230,14 +247,19 @@
             headers = {"User-Agent": [C.APP_NAME.encode("utf-8")]}
 
             for name, value in slot.headers:
-                name = name.encode('utf-8')
-                value = value.encode('utf-8')
+                name = name.encode("utf-8")
+                value = value.encode("utf-8")
                 headers[name] = [value]
 
-
             await self.host.trigger.async_point(
-                "XEP-0363_upload", client, extra, sat_file, file_producer, slot,
-                triggers_no_cancel=True)
+                "XEP-0363_upload",
+                client,
+                extra,
+                sat_file,
+                file_producer,
+                slot,
+                triggers_no_cancel=True,
+            )
 
             download_d = agent.request(
                 b"PUT",
@@ -286,8 +308,9 @@
             sat_file.progress_error(msg)
         raise failure_
 
-    def _get_slot(self, filename, size, content_type, upload_jid,
-                 profile_key=C.PROF_KEY_NONE):
+    def _get_slot(
+        self, filename, size, content_type, upload_jid, profile_key=C.PROF_KEY_NONE
+    ):
         """Get an upload slot
 
         This method can be used when uploading is done by the frontend
@@ -299,10 +322,15 @@
         """
         client = self.host.get_client(profile_key)
         filename = filename.replace("/", "_")
-        d = defer.ensureDeferred(self.get_slot(
-            client, filename, size, content_type or None,
-            jid.JID(upload_jid) if upload_jid else None
-        ))
+        d = defer.ensureDeferred(
+            self.get_slot(
+                client,
+                filename,
+                size,
+                content_type or None,
+                jid.JID(upload_jid) if upload_jid else None,
+            )
+        )
         d.addCallback(lambda slot: (slot.get, slot.put, slot.headers))
         return d
 
@@ -330,7 +358,8 @@
             except AttributeError:
                 found_entity = await self.get_http_upload_entity(client)
                 return await self.get_slot(
-                    client, filename, size, content_type, found_entity)
+                    client, filename, size, content_type, found_entity
+                )
             else:
                 if upload_jid is None:
                     raise exceptions.NotFound("No HTTP upload entity found")
@@ -348,9 +377,9 @@
         try:
             slot_elt = next(iq_result_elt.elements(NS_HTTP_UPLOAD, "slot"))
             put_elt = next(slot_elt.elements(NS_HTTP_UPLOAD, "put"))
-            put_url = put_elt['url']
+            put_url = put_elt["url"]
             get_elt = next(slot_elt.elements(NS_HTTP_UPLOAD, "get"))
-            get_url = get_elt['url']
+            get_url = get_elt["url"]
         except (StopIteration, KeyError):
             raise exceptions.DataError("Incorrect stanza received from server")
 
@@ -360,14 +389,18 @@
                 name = header_elt["name"]
                 value = str(header_elt)
             except KeyError:
-                log.warning(_("Invalid header element: {xml}").format(
-                    iq_result_elt.toXml()))
+                log.warning(
+                    _("Invalid header element: {xml}").format(iq_result_elt.toXml())
+                )
                 continue
-            name = name.replace('\n', '')
-            value = value.replace('\n', '')
+            name = name.replace("\n", "")
+            value = value.replace("\n", "")
             if name.lower() not in ALLOWED_HEADERS:
-                log.warning(_('Ignoring unauthorised header "{name}": {xml}')
-                    .format(name=name, xml = iq_result_elt.toXml()))
+                log.warning(
+                    _('Ignoring unauthorised header "{name}": {xml}').format(
+                        name=name, xml=iq_result_elt.toXml()
+                    )
+                )
                 continue
             headers.append((name, value))
 
@@ -376,17 +409,17 @@
     # component
 
     def on_component_request(self, iq_elt, client):
-        iq_elt.handled=True
+        iq_elt.handled = True
         defer.ensureDeferred(self.handle_component_request(client, iq_elt))
 
     async def handle_component_request(self, client, iq_elt):
         try:
             request_elt = next(iq_elt.elements(NS_HTTP_UPLOAD, "request"))
             request = UploadRequest(
-                from_=jid.JID(iq_elt['from']),
-                filename=parse.quote(request_elt['filename'].replace('/', '_'), safe=''),
-                size=int(request_elt['size']),
-                content_type=request_elt.getAttribute('content-type')
+                from_=jid.JID(iq_elt["from"]),
+                filename=parse.quote(request_elt["filename"].replace("/", "_"), safe=""),
+                size=int(request_elt["size"]),
+                content_type=request_elt.getAttribute("content-type"),
             )
         except (StopIteration, KeyError, ValueError):
             client.sendError(iq_elt, "bad-request")
@@ -411,19 +444,21 @@
                     break
         else:
             log.warning(
-                _("no service can handle HTTP Upload request: {elt}")
-                .format(elt=iq_elt.toXml()))
+                _("no service can handle HTTP Upload request: {elt}").format(
+                    elt=iq_elt.toXml()
+                )
+            )
             if err is None:
                 err = error.StanzaError("feature-not-implemented")
             client.send(err.toResponse(iq_elt))
             return
 
         iq_result_elt = xmlstream.toResponse(iq_elt, "result")
-        slot_elt = iq_result_elt.addElement((NS_HTTP_UPLOAD, 'slot'))
-        put_elt = slot_elt.addElement('put')
-        put_elt['url'] = slot.put
-        get_elt = slot_elt.addElement('get')
-        get_elt['url'] = slot.get
+        slot_elt = iq_result_elt.addElement((NS_HTTP_UPLOAD, "slot"))
+        put_elt = slot_elt.addElement("put")
+        put_elt["url"] = slot.put
+        get_elt = slot_elt.addElement("get")
+        get_elt["url"] = slot.get
         client.send(iq_result_elt)
 
 
@@ -434,16 +469,21 @@
         self.plugin_parent = plugin_parent
 
     def connectionInitialized(self):
-        if ((self.parent.is_component
-             and PLUGIN_INFO[C.PI_IMPORT_NAME] in self.parent.enabled_features)):
+        if (
+            self.parent.is_component
+            and PLUGIN_INFO[C.PI_IMPORT_NAME] in self.parent.enabled_features
+        ):
             self.xmlstream.addObserver(
-                IQ_HTTP_UPLOAD_REQUEST, self.plugin_parent.on_component_request,
-                client=self.parent
+                IQ_HTTP_UPLOAD_REQUEST,
+                self.plugin_parent.on_component_request,
+                client=self.parent,
             )
 
     def getDiscoInfo(self, requestor, target, nodeIdentifier=""):
-        if ((self.parent.is_component
-             and not PLUGIN_INFO[C.PI_IMPORT_NAME] in self.parent.enabled_features)):
+        if (
+            self.parent.is_component
+            and not PLUGIN_INFO[C.PI_IMPORT_NAME] in self.parent.enabled_features
+        ):
             return []
         else:
             return [disco.DiscoFeature(NS_HTTP_UPLOAD)]
--- a/libervia/backend/plugins/plugin_xep_0372.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0372.py	Wed Jun 19 18:44:57 2024 +0200
@@ -46,11 +46,15 @@
     C.PI_DEPENDENCIES: ["XEP-0334"],
     C.PI_MAIN: "XEP_0372",
     C.PI_HANDLER: "yes",
-    C.PI_DESCRIPTION: _(dedent("""\
+    C.PI_DESCRIPTION: _(
+        dedent(
+            """\
         XEP-0372 (References) implementation
 
         This plugin implement generic references and mentions.
-    """)),
+    """
+        )
+    ),
 }
 
 NS_REFS = "urn:xmpp:reference:0"
@@ -79,12 +83,11 @@
         return XEP_0372_Handler()
 
     def ref_element_to_ref_data(
-        self,
-        reference_elt: domish.Element
+        self, reference_elt: domish.Element
     ) -> Dict[str, Union[str, int, dict]]:
         ref_data: Dict[str, Union[str, int, dict]] = {
             "uri": reference_elt["uri"],
-            "type": reference_elt["type"]
+            "type": reference_elt["type"],
         }
 
         if ref_data["uri"].startswith("xmpp:"):
@@ -107,7 +110,7 @@
         self,
         client: SatXMPPEntity,
         message_elt: domish.Element,
-        post_treat: defer.Deferred
+        post_treat: defer.Deferred,
     ) -> bool:
         """Check if a direct invitation is in the message, and handle it"""
         reference_elt = next(message_elt.elements(NS_REFS, "reference"), None)
@@ -137,8 +140,7 @@
         if type_ not in ALLOWED_TYPES:
             raise ValueError(f"Unknown type: {type_!r}")
         reference_elt = domish.Element(
-            (NS_REFS, "reference"),
-            attribs={"uri": uri, "type": type_}
+            (NS_REFS, "reference"), attribs={"uri": uri, "type": type_}
         )
         if begin is not None:
             reference_elt["begin"] = str(begin)
@@ -149,22 +151,13 @@
         return reference_elt
 
     def _send_reference(
-        self,
-        recipient: str,
-        anchor: str,
-        type_: str,
-        extra_s: str,
-        profile_key: str
+        self, recipient: str, anchor: str, type_: str, extra_s: str, profile_key: str
     ) -> defer.Deferred:
         recipient_jid = jid.JID(recipient)
         client = self.host.get_client(profile_key)
         extra: dict = data_format.deserialise(extra_s, default={})
         self.send_reference(
-            client,
-            uri=extra.get("uri"),
-            type_=type_,
-            anchor=anchor,
-            to_jid=recipient_jid
+            client, uri=extra.get("uri"), type_=type_, anchor=anchor, to_jid=recipient_jid
         )
 
     def send_reference(
@@ -176,7 +169,7 @@
         end: Optional[int] = None,
         anchor: Optional[str] = None,
         message_elt: Optional[domish.Element] = None,
-        to_jid: Optional[jid.JID] = None
+        to_jid: Optional[jid.JID] = None,
     ) -> None:
         """Build and send a reference_elt
 
@@ -197,9 +190,7 @@
         """
         if uri is None:
             if to_jid is None:
-                raise exceptions.InternalError(
-                    '"to_jid" must be set if "uri is None"'
-                )
+                raise exceptions.InternalError('"to_jid" must be set if "uri is None"')
             uri = xmpp_uri.build_xmpp_uri(path=to_jid.full())
         if message_elt is None:
             message_elt = domish.Element((None, "message"))
@@ -212,7 +203,7 @@
             except (KeyError, RuntimeError):
                 raise exceptions.InternalError(
                     'invalid "to" attribute in given message element: '
-                    '{message_elt.toXml()}'
+                    "{message_elt.toXml()}"
                 )
 
         message_elt.addChild(self.build_ref_element(uri, type_, begin, end, anchor))
--- a/libervia/backend/plugins/plugin_xep_0373.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0373.py	Wed Jun 19 18:44:57 2024 +0200
@@ -75,7 +75,7 @@
     "GPGProvider",
     "PublicKeyMetadata",
     "gpg_provider",
-    "TrustLevel"
+    "TrustLevel",
 ]
 
 
@@ -86,8 +86,8 @@
     C.PI_NAME: "XEP-0373",
     C.PI_IMPORT_NAME: "XEP-0373",
     C.PI_TYPE: "SEC",
-    C.PI_PROTOCOLS: [ "XEP-0373" ],
-    C.PI_DEPENDENCIES: [ "XEP-0060", "XEP-0163" ],
+    C.PI_PROTOCOLS: ["XEP-0373"],
+    C.PI_DEPENDENCIES: ["XEP-0060", "XEP-0163"],
     C.PI_RECOMMENDATIONS: [],
     C.PI_MAIN: "XEP_0373",
     C.PI_HANDLER: "no",
@@ -306,10 +306,7 @@
 
     @abstractmethod
     def verify_detached(
-        self,
-        data: bytes,
-        signature: bytes,
-        public_keys: Set[GPGPublicKey]
+        self, data: bytes, signature: bytes, public_keys: Set[GPGPublicKey]
     ) -> None:
         """Verify signed data, where the signature was created detached from the data.
 
@@ -329,7 +326,7 @@
         self,
         plaintext: bytes,
         public_keys: Set[GPGPublicKey],
-        signing_keys: Optional[Set[GPGSecretKey]] = None
+        signing_keys: Optional[Set[GPGSecretKey]] = None,
     ) -> bytes:
         """Encrypt and optionally sign some data.
 
@@ -346,7 +343,7 @@
         self,
         ciphertext: bytes,
         secret_keys: Set[GPGSecretKey],
-        public_keys: Optional[Set[GPGPublicKey]] = None
+        public_keys: Optional[Set[GPGPublicKey]] = None,
     ) -> bytes:
         """Decrypt and optionally verify some data.
 
@@ -569,9 +566,7 @@
         with gpg.Context(home_dir=self.__home_dir) as c:
             try:
                 plaintext, __, __ = c.decrypt(
-                    ciphertext,
-                    passphrase=password,
-                    verify=False
+                    ciphertext, passphrase=password, verify=False
                 )
             except gpg.errors.GPGMEError as e:
                 # TODO: Find out what kind of error is raised if the password is wrong and
@@ -646,10 +641,7 @@
             return data
 
     def verify_detached(
-        self,
-        data: bytes,
-        signature: bytes,
-        public_keys: Set[GPGPublicKey]
+        self, data: bytes, signature: bytes, public_keys: Set[GPGPublicKey]
     ) -> None:
         with gpg.Context(home_dir=self.__home_dir) as c:
             try:
@@ -677,7 +669,7 @@
         self,
         plaintext: bytes,
         public_keys: Set[GPGPublicKey],
-        signing_keys: Optional[Set[GPGSecretKey]] = None
+        signing_keys: Optional[Set[GPGSecretKey]] = None,
     ) -> bytes:
         recipients = []
         for public_key in public_keys:
@@ -701,7 +693,7 @@
                     recipients=recipients,
                     sign=sign,
                     always_trust=True,
-                    add_encrypt_to=True
+                    add_encrypt_to=True,
                 )
             except gpg.errors.GPGMEError as e:
                 raise GPGProviderError("Internal GPGME error") from e
@@ -720,16 +712,13 @@
         self,
         ciphertext: bytes,
         secret_keys: Set[GPGSecretKey],
-        public_keys: Optional[Set[GPGPublicKey]] = None
+        public_keys: Optional[Set[GPGPublicKey]] = None,
     ) -> bytes:
         verify = public_keys is not None
 
         with gpg.Context(home_dir=self.__home_dir) as c:
             try:
-                plaintext, result, verify_result = c.decrypt(
-                    ciphertext,
-                    verify=verify
-                )
+                plaintext, result, verify_result = c.decrypt(ciphertext, verify=verify)
             except gpg.errors.GPGMEError as e:
                 raise GPGProviderError("Internal GPGME error") from e
             except gpg.UnsupportedAlgorithm as e:
@@ -760,8 +749,7 @@
             try:
                 return {
                     GPGME_GPGPublicKey(key)
-                    for key
-                    in c.keylist(pattern=user_id, secret=False)
+                    for key in c.keylist(pattern=user_id, secret=False)
                 }
             except gpg.errors.GPGMEError as e:
                 raise GPGProviderError("Internal GPGME error") from e
@@ -771,8 +759,7 @@
             try:
                 return {
                     GPGME_GPGSecretKey(GPGME_GPGPublicKey(key))
-                    for key
-                    in c.keylist(pattern=user_id, secret=True)
+                    for key in c.keylist(pattern=user_id, secret=True)
                 }
             except gpg.errors.GPGMEError as e:
                 raise GPGProviderError("Internal GPGME error") from e
@@ -797,7 +784,7 @@
                     encrypt=True,
                     certify=False,
                     authenticate=False,
-                    force=True
+                    force=True,
                 )
 
                 key_obj = c.get_key(result.fpr, secret=True)
@@ -813,19 +800,20 @@
     """
     Metadata about a published public key.
     """
+
     fingerprint: str
     timestamp: datetime
 
     def to_dict(self) -> dict:
         # Convert the instance to a dictionary and handle datetime serialization
         data = self._asdict()
-        data['timestamp'] = self.timestamp.isoformat()
+        data["timestamp"] = self.timestamp.isoformat()
         return data
 
     @staticmethod
-    def from_dict(data: dict) -> 'PublicKeyMetadata':
+    def from_dict(data: dict) -> "PublicKeyMetadata":
         # Load a serialised dictionary
-        data['timestamp'] = datetime.fromisoformat(data['timestamp'])
+        data["timestamp"] = datetime.fromisoformat(data["timestamp"])
         return PublicKeyMetadata(**data)
 
 
@@ -841,20 +829,23 @@
     DISTRUSTED: str = "DISTRUSTED"
 
 
-OPENPGP_SCHEMA = xmlschema.XMLSchema("""<?xml version="1.0" encoding="utf8"?>
+OPENPGP_SCHEMA = xmlschema.XMLSchema(
+    """<?xml version="1.0" encoding="utf8"?>
 <xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
     targetNamespace="urn:xmpp:openpgp:0"
     xmlns="urn:xmpp:openpgp:0">
 
     <xs:element name="openpgp" type="xs:base64Binary"/>
 </xs:schema>
-""")
+"""
+)
 
 
 # The following schema needs verion 1.1 of XML Schema, which is not supported by lxml.
 # Luckily, xmlschema exists, which is a clean, well maintained, cross-platform
 # implementation of XML Schema, including version 1.1.
-CONTENT_SCHEMA = xmlschema.XMLSchema11("""<?xml version="1.1" encoding="utf8"?>
+CONTENT_SCHEMA = xmlschema.XMLSchema11(
+    """<?xml version="1.1" encoding="utf8"?>
 <xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
     targetNamespace="urn:xmpp:openpgp:0"
     xmlns="urn:xmpp:openpgp:0">
@@ -914,11 +905,13 @@
         </xs:complexType>
     </xs:element>
 </xs:schema>
-""")
+"""
+)
 
 
 PUBLIC_KEYS_LIST_NODE = "urn:xmpp:openpgp:0:public-keys"
-PUBLIC_KEYS_LIST_SCHEMA = xmlschema.XMLSchema("""<?xml version="1.0" encoding="utf8"?>
+PUBLIC_KEYS_LIST_SCHEMA = xmlschema.XMLSchema(
+    """<?xml version="1.0" encoding="utf8"?>
 <xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
     targetNamespace="urn:xmpp:openpgp:0"
     xmlns="urn:xmpp:openpgp:0">
@@ -938,10 +931,12 @@
         </xs:complexType>
     </xs:element>
 </xs:schema>
-""")
+"""
+)
 
 
-PUBKEY_SCHEMA = xmlschema.XMLSchema("""<?xml version="1.0" encoding="utf8"?>
+PUBKEY_SCHEMA = xmlschema.XMLSchema(
+    """<?xml version="1.0" encoding="utf8"?>
 <xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
     targetNamespace="urn:xmpp:openpgp:0"
     xmlns="urn:xmpp:openpgp:0">
@@ -957,17 +952,20 @@
 
     <xs:element name="data" type="xs:base64Binary"/>
 </xs:schema>
-""")
+"""
+)
 
 
-SECRETKEY_SCHEMA = xmlschema.XMLSchema("""<?xml version="1.0" encoding="utf8"?>
+SECRETKEY_SCHEMA = xmlschema.XMLSchema(
+    """<?xml version="1.0" encoding="utf8"?>
 <xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
     targetNamespace="urn:xmpp:openpgp:0"
     xmlns="urn:xmpp:openpgp:0">
 
     <xs:element name="secretkey" type="xs:base64Binary"/>
 </xs:schema>
-""")
+"""
+)
 
 
 DEFAULT_TRUST_MODEL_PARAM = f"""
@@ -1005,9 +1003,10 @@
     @return: The passphrase.
     """
 
-    return "-".join("".join(
-        secrets.choice("123456789ABCDEFGHIJKLMNPQRSTUVWXYZ") for __ in range(4)
-    ) for __ in range(6))
+    return "-".join(
+        "".join(secrets.choice("123456789ABCDEFGHIJKLMNPQRSTUVWXYZ") for __ in range(4))
+        for __ in range(6)
+    )
 
 
 # TODO: Handle the user id mess
@@ -1038,15 +1037,14 @@
             PUBLIC_KEYS_LIST_NODE,
             lambda items_event, profile: defer.ensureDeferred(
                 self.__on_public_keys_list_update(items_event, profile)
-            )
+            ),
         )
 
     async def profile_connecting(self, client):
         client.gpg_provider = get_gpg_provider(self.host, client)
 
     async def profile_connected(  # pylint: disable=invalid-name
-        self,
-        client: SatXMPPClient
+        self, client: SatXMPPClient
     ) -> None:
         """
         @param client: The client.
@@ -1055,17 +1053,16 @@
         profile = cast(str, client.profile)
 
         if not profile in self.__storage:
-            self.__storage[profile] = \
-                persistent.LazyPersistentBinaryDict("XEP-0373", client.profile)
+            self.__storage[profile] = persistent.LazyPersistentBinaryDict(
+                "XEP-0373", client.profile
+            )
 
         if len(self.list_secret_keys(client)) == 0:
             log.debug(f"Generating first GPG key for {client.jid.userhost()}.")
             await self.create_key(client)
 
     async def __on_public_keys_list_update(
-        self,
-        items_event: pubsub.ItemsEvent,
-        profile: str
+        self, items_event: pubsub.ItemsEvent, profile: str
     ) -> None:
         """Handle public keys list updates fired by PEP.
 
@@ -1089,7 +1086,7 @@
 
         public_keys_list_elt = cast(
             Optional[domish.Element],
-            next(item_elt.elements(NS_OX, "public-keys-list"), None)
+            next(item_elt.elements(NS_OX, "public-keys-list"), None),
         )
 
         pubkey_metadata_elts: Optional[List[domish.Element]] = None
@@ -1100,17 +1097,21 @@
             except xmlschema.XMLSchemaValidationError:
                 pass
             else:
-                pubkey_metadata_elts = \
-                    list(public_keys_list_elt.elements(NS_OX, "pubkey-metadata"))
+                pubkey_metadata_elts = list(
+                    public_keys_list_elt.elements(NS_OX, "pubkey-metadata")
+                )
 
         if pubkey_metadata_elts is None:
             log.warning(f"Malformed public keys list update item: {item_elt.toXml()}")
             return
 
-        new_public_keys_metadata = { PublicKeyMetadata(
-            fingerprint=cast(str, pubkey_metadata_elt["v4-fingerprint"]),
-            timestamp=parse_datetime(cast(str, pubkey_metadata_elt["date"]))
-        ) for pubkey_metadata_elt in pubkey_metadata_elts }
+        new_public_keys_metadata = {
+            PublicKeyMetadata(
+                fingerprint=cast(str, pubkey_metadata_elt["v4-fingerprint"]),
+                timestamp=parse_datetime(cast(str, pubkey_metadata_elt["date"])),
+            )
+            for pubkey_metadata_elt in pubkey_metadata_elts
+        }
 
         storage_key = STR_KEY_PUBLIC_KEYS_METADATA.format(sender.userhost())
 
@@ -1140,11 +1141,15 @@
         # included in the update
         if sender.userhost() == client.jid.userhost():
             secret_keys = self.list_secret_keys(client)
-            missing_keys = set(filter(lambda secret_key: all(
-                key_metadata.fingerprint != secret_key.public_key.fingerprint
-                for key_metadata
-                in new_public_keys_metadata
-            ), secret_keys))
+            missing_keys = set(
+                filter(
+                    lambda secret_key: all(
+                        key_metadata.fingerprint != secret_key.public_key.fingerprint
+                        for key_metadata in new_public_keys_metadata
+                    ),
+                    secret_keys,
+                )
+            )
 
             if len(missing_keys) > 0:
                 log.warning(
@@ -1154,16 +1159,17 @@
 
                 for missing_key in missing_keys:
                     log.warning(missing_key.public_key.fingerprint)
-                    new_public_keys_metadata.add(PublicKeyMetadata(
-                        fingerprint=missing_key.public_key.fingerprint,
-                        timestamp=datetime.now(timezone.utc)
-                    ))
+                    new_public_keys_metadata.add(
+                        PublicKeyMetadata(
+                            fingerprint=missing_key.public_key.fingerprint,
+                            timestamp=datetime.now(timezone.utc),
+                        )
+                    )
 
                 await self.publish_public_keys_list(client, new_public_keys_metadata)
 
         await self.__storage[profile].force(
-            storage_key,
-            [pkm.to_dict() for pkm in new_public_keys_metadata]
+            storage_key, [pkm.to_dict() for pkm in new_public_keys_metadata]
         )
 
     def list_public_keys(self, client: SatXMPPClient, jid: jid.JID) -> Set[GPGPublicKey]:
@@ -1211,16 +1217,17 @@
             for pkm in await self.__storage[client.profile].get(storage_key, [])
         }
 
-        public_keys_list.add(PublicKeyMetadata(
-            fingerprint=secret_key.public_key.fingerprint,
-            timestamp=datetime.now(timezone.utc)
-        ))
+        public_keys_list.add(
+            PublicKeyMetadata(
+                fingerprint=secret_key.public_key.fingerprint,
+                timestamp=datetime.now(timezone.utc),
+            )
+        )
 
         await self.publish_public_keys_list(client, public_keys_list)
 
         await self.__storage[client.profile].force(
-            storage_key,
-            [pkm.to_dict() for pkm in public_keys_list]
+            storage_key, [pkm.to_dict() for pkm in public_keys_list]
         )
 
         return secret_key
@@ -1229,7 +1236,7 @@
     def __build_content_element(
         element_name: Literal["signcrypt", "sign", "crypt"],
         recipient_jids: Iterable[jid.JID],
-        include_rpad: bool
+        include_rpad: bool,
     ) -> Tuple[domish.Element, domish.Element]:
         """Build a content element.
 
@@ -1254,8 +1261,7 @@
             rpad_length = secrets.randbelow(201)
             rpad_content = "".join(
                 secrets.choice(string.digits + string.ascii_letters + string.punctuation)
-                for __
-                in range(rpad_length)
+                for __ in range(rpad_length)
             )
             content_elt.addElement("rpad", content=rpad_content)
 
@@ -1265,7 +1271,7 @@
 
     @staticmethod
     def build_signcrypt_element(
-        recipient_jids: Iterable[jid.JID]
+        recipient_jids: Iterable[jid.JID],
     ) -> Tuple[domish.Element, domish.Element]:
         """Build a ``<signcrypt/>`` content element.
 
@@ -1282,8 +1288,7 @@
 
     @staticmethod
     def build_sign_element(
-        recipient_jids: Iterable[jid.JID],
-        include_rpad: bool
+        recipient_jids: Iterable[jid.JID], include_rpad: bool
     ) -> Tuple[domish.Element, domish.Element]:
         """Build a ``<sign/>`` content element.
 
@@ -1302,7 +1307,7 @@
 
     @staticmethod
     def build_crypt_element(
-        recipient_jids: Iterable[jid.JID]
+        recipient_jids: Iterable[jid.JID],
     ) -> Tuple[domish.Element, domish.Element]:
         """Build a ``<crypt/>`` content element.
 
@@ -1319,7 +1324,7 @@
         self,
         client: SatXMPPClient,
         content_elt: domish.Element,
-        recipient_jids: Set[jid.JID]
+        recipient_jids: Set[jid.JID],
     ) -> domish.Element:
         """Build an ``<openpgp/>`` element.
 
@@ -1333,10 +1338,12 @@
 
         # TODO: I'm not sure whether we want to sign with all keys by default or choose
         # just one key/a subset of keys to sign with.
-        signing_keys = set(filter(
-            lambda secret_key: gpg_provider.can_sign(secret_key.public_key),
-            self.list_secret_keys(client)
-        ))
+        signing_keys = set(
+            filter(
+                lambda secret_key: gpg_provider.can_sign(secret_key.public_key),
+                self.list_secret_keys(client),
+            )
+        )
 
         encryption_keys: Set[GPGPublicKey] = set()
 
@@ -1370,7 +1377,7 @@
         client: SatXMPPClient,
         openpgp_elt: domish.Element,
         element_name: Literal["signcrypt", "sign", "crypt"],
-        sender_jid: jid.JID
+        sender_jid: jid.JID,
     ) -> Tuple[domish.Element, datetime]:
         """Verify, decrypt and unpack an ``<openpgp/>`` element.
 
@@ -1392,10 +1399,12 @@
 
         gpg_provider = get_gpg_provider(self.host, client)
 
-        decryption_keys = set(filter(
-            lambda secret_key: gpg_provider.can_encrypt(secret_key.public_key),
-            self.list_secret_keys(client)
-        ))
+        decryption_keys = set(
+            filter(
+                lambda secret_key: gpg_provider.can_encrypt(secret_key.public_key),
+                self.list_secret_keys(client),
+            )
+        )
 
         # import all keys of the sender
         all_public_keys = await self.import_all_public_keys(client, sender_jid)
@@ -1417,9 +1426,7 @@
 
         if element_name == "signcrypt":
             content = gpg_provider.decrypt(
-                openpgp_message,
-                decryption_keys,
-                public_keys=verification_keys
+                openpgp_message, decryption_keys, public_keys=verification_keys
             )
         elif element_name == "sign":
             content = gpg_provider.verify(openpgp_message, verification_keys)
@@ -1430,8 +1437,7 @@
 
         try:
             content_elt = cast(
-                domish.Element,
-                xml_tools.ElementParser()(content.decode("utf-8"))
+                domish.Element, xml_tools.ElementParser()(content.decode("utf-8"))
             )
         except UnicodeDecodeError as e:
             raise exceptions.ParsingError("UTF-8 decoding error") from e
@@ -1446,11 +1452,12 @@
         if content_elt.name != element_name:
             raise exceptions.ParsingError(f"Not a <{element_name}/> element.")
 
-        recipient_jids = \
-            { jid.JID(to_elt["jid"]) for to_elt in content_elt.elements(NS_OX, "to") }
+        recipient_jids = {
+            jid.JID(to_elt["jid"]) for to_elt in content_elt.elements(NS_OX, "to")
+        }
 
         if (
-            client.jid.userhostJID() not in { jid.userhostJID() for jid in recipient_jids }
+            client.jid.userhostJID() not in {jid.userhostJID() for jid in recipient_jids}
             and element_name != "crypt"
         ):
             raise VerificationError(
@@ -1467,9 +1474,7 @@
         return payload_elt, timestamp
 
     async def publish_public_key(
-        self,
-        client: SatXMPPClient,
-        public_key: GPGPublicKey
+        self, client: SatXMPPClient, public_key: GPGPublicKey
     ) -> None:
         """Publish a public key.
 
@@ -1499,19 +1504,17 @@
                     XEP_0060.EXTRA_PUBLISH_OPTIONS: {
                         XEP_0060.OPT_PERSIST_ITEMS: "true",
                         XEP_0060.OPT_ACCESS_MODEL: "open",
-                        XEP_0060.OPT_MAX_ITEMS: 1
+                        XEP_0060.OPT_MAX_ITEMS: 1,
                     },
                     # TODO: Do we really want publish_without_options here?
-                    XEP_0060.EXTRA_ON_PRECOND_NOT_MET: "publish_without_options"
-                }
+                    XEP_0060.EXTRA_ON_PRECOND_NOT_MET: "publish_without_options",
+                },
             )
         except Exception as e:
             raise XMPPInteractionFailed("Publishing the public key failed.") from e
 
     async def import_all_public_keys(
-        self,
-        client: SatXMPPClient,
-        entity_jid: jid.JID
+        self, client: SatXMPPClient, entity_jid: jid.JID
     ) -> Set[GPGPublicKey]:
         """import all public keys of a JID that have not been imported before.
 
@@ -1535,26 +1538,28 @@
                 client, entity_jid
             )
             if not public_keys_metadata:
-                raise exceptions.NotFound(
-                    f"Can't find public keys for {entity_jid}"
-                )
+                raise exceptions.NotFound(f"Can't find public keys for {entity_jid}")
             else:
                 await self.__storage[client.profile].aset(
-                    storage_key,
-                    [pkm.to_dict() for pkm in public_keys_metadata]
+                    storage_key, [pkm.to_dict() for pkm in public_keys_metadata]
                 )
 
-
-        missing_keys = set(filter(lambda public_key_metadata: all(
-            public_key_metadata.fingerprint != public_key.fingerprint
-            for public_key
-            in available_public_keys
-        ), public_keys_metadata))
+        missing_keys = set(
+            filter(
+                lambda public_key_metadata: all(
+                    public_key_metadata.fingerprint != public_key.fingerprint
+                    for public_key in available_public_keys
+                ),
+                public_keys_metadata,
+            )
+        )
 
         for missing_key in missing_keys:
             try:
                 available_public_keys.add(
-                    await self.import_public_key(client, entity_jid, missing_key.fingerprint)
+                    await self.import_public_key(
+                        client, entity_jid, missing_key.fingerprint
+                    )
                 )
             except Exception as e:
                 log.warning(
@@ -1565,10 +1570,7 @@
         return available_public_keys
 
     async def import_public_key(
-        self,
-        client: SatXMPPClient,
-        jid: jid.JID,
-        fingerprint: str
+        self, client: SatXMPPClient, jid: jid.JID, fingerprint: str
     ) -> GPGPublicKey:
         """import a public key.
 
@@ -1589,10 +1591,7 @@
 
         try:
             items, __ = await self.__xep_0060.get_items(
-                client,
-                jid.userhostJID(),
-                node,
-                max_items=1
+                client, jid.userhostJID(), node, max_items=1
             )
         except exceptions.NotFound as e:
             raise exceptions.NotFound(
@@ -1611,8 +1610,7 @@
             ) from e
 
         pubkey_elt = cast(
-            Optional[domish.Element],
-            next(item_elt.elements(NS_OX, "pubkey"), None)
+            Optional[domish.Element], next(item_elt.elements(NS_OX, "pubkey"), None)
         )
 
         if pubkey_elt is None:
@@ -1629,16 +1627,14 @@
                 f" schema validation."
             ) from e
 
-        public_key = gpg_provider.import_public_key(base64.b64decode(str(
-            next(pubkey_elt.elements(NS_OX, "data"))
-        )))
+        public_key = gpg_provider.import_public_key(
+            base64.b64decode(str(next(pubkey_elt.elements(NS_OX, "data"))))
+        )
 
         return public_key
 
     async def publish_public_keys_list(
-        self,
-        client: SatXMPPClient,
-        public_keys_list: Iterable[PublicKeyMetadata]
+        self, client: SatXMPPClient, public_keys_list: Iterable[PublicKeyMetadata]
     ) -> None:
         """Publish/update the own public keys list.
 
@@ -1650,7 +1646,7 @@
             beforehand.
         """
 
-        if len({ pkm.fingerprint for pkm in public_keys_list }) != len(public_keys_list):
+        if len({pkm.fingerprint for pkm in public_keys_list}) != len(public_keys_list):
             raise ValueError("Public keys list contains duplicate fingerprints.")
 
         node = "urn:xmpp:openpgp:0:public-keys"
@@ -1673,19 +1669,17 @@
                     XEP_0060.EXTRA_PUBLISH_OPTIONS: {
                         XEP_0060.OPT_PERSIST_ITEMS: "true",
                         XEP_0060.OPT_ACCESS_MODEL: "open",
-                        XEP_0060.OPT_MAX_ITEMS: 1
+                        XEP_0060.OPT_MAX_ITEMS: 1,
                     },
                     # TODO: Do we really want publish_without_options here?
-                    XEP_0060.EXTRA_ON_PRECOND_NOT_MET: "publish_without_options"
-                }
+                    XEP_0060.EXTRA_ON_PRECOND_NOT_MET: "publish_without_options",
+                },
             )
         except Exception as e:
             raise XMPPInteractionFailed("Publishing the public keys list failed.") from e
 
     async def download_public_keys_list(
-        self,
-        client: SatXMPPClient,
-        jid: jid.JID
+        self, client: SatXMPPClient, jid: jid.JID
     ) -> Optional[Set[PublicKeyMetadata]]:
         """Download the public keys list of a JID.
 
@@ -1701,10 +1695,7 @@
 
         try:
             items, __ = await self.__xep_0060.get_items(
-                client,
-                jid.userhostJID(),
-                node,
-                max_items=1
+                client, jid.userhostJID(), node, max_items=1
             )
         except exceptions.NotFound:
             return None
@@ -1718,7 +1709,7 @@
 
         public_keys_list_elt = cast(
             Optional[domish.Element],
-            next(item_elt.elements(NS_OX, "public-keys-list"), None)
+            next(item_elt.elements(NS_OX, "public-keys-list"), None),
         )
 
         if public_keys_list_elt is None:
@@ -1735,15 +1726,15 @@
         return {
             PublicKeyMetadata(
                 fingerprint=pubkey_metadata_elt["v4-fingerprint"],
-                timestamp=parse_datetime(pubkey_metadata_elt["date"])
+                timestamp=parse_datetime(pubkey_metadata_elt["date"]),
             )
-            for pubkey_metadata_elt
-            in public_keys_list_elt.elements(NS_OX, "pubkey-metadata")
+            for pubkey_metadata_elt in public_keys_list_elt.elements(
+                NS_OX, "pubkey-metadata"
+            )
         }
 
     async def __prepare_secret_key_synchronization(
-        self,
-        client: SatXMPPClient
+        self, client: SatXMPPClient
     ) -> Optional[domish.Element]:
         """Prepare for secret key synchronization.
 
@@ -1760,10 +1751,10 @@
         """
 
         try:
-            infos = cast(DiscoInfo, await self.host.memory.disco.get_infos(
-                client,
-                client.jid.userhostJID()
-            ))
+            infos = cast(
+                DiscoInfo,
+                await self.host.memory.disco.get_infos(client, client.jid.userhostJID()),
+            )
         except Exception as e:
             raise XMPPInteractionFailed(
                 "Error performing service discovery on the own bare JID."
@@ -1780,8 +1771,9 @@
                 "Server doesn't support the whitelist access model."
             )
 
-        persistent_items_supported = \
+        persistent_items_supported = (
             "http://jabber.org/protocol/pubsub#persistent-items" in features
+        )
 
         # TODO: persistent-items is a SHOULD, how do we handle the feature missing?
 
@@ -1789,10 +1781,7 @@
 
         try:
             items, __ = await self.__xep_0060.get_items(
-                client,
-                client.jid.userhostJID(),
-                node,
-                max_items=1
+                client, client.jid.userhostJID(), node, max_items=1
             )
         except exceptions.NotFound:
             try:
@@ -1803,8 +1792,8 @@
                     {
                         XEP_0060.OPT_PERSIST_ITEMS: "true",
                         XEP_0060.OPT_ACCESS_MODEL: "whitelist",
-                        XEP_0060.OPT_MAX_ITEMS: "1"
-                    }
+                        XEP_0060.OPT_MAX_ITEMS: "1",
+                    },
                 )
             except Exception as e:
                 raise XMPPInteractionFailed(
@@ -1821,9 +1810,7 @@
             return None
 
     async def export_secret_keys(
-        self,
-        client: SatXMPPClient,
-        secret_keys: Iterable[GPGSecretKey]
+        self, client: SatXMPPClient, secret_keys: Iterable[GPGSecretKey]
     ) -> str:
         """Export secret keys to synchronize them with other devices.
 
@@ -1854,10 +1841,7 @@
 
         try:
             await self.__xep_0060.send_item(
-                client,
-                client.jid.userhostJID(),
-                node,
-                secretkey_elt
+                client, client.jid.userhostJID(), node, secretkey_elt
             )
         except Exception as e:
             raise XMPPInteractionFailed("Publishing the secret keys failed.") from e
@@ -1885,8 +1869,7 @@
             return None
 
         secretkey_elt = cast(
-            Optional[domish.Element],
-            next(item_elt.elements(NS_OX, "secretkey"), None)
+            Optional[domish.Element], next(item_elt.elements(NS_OX, "secretkey"), None)
         )
 
         if secretkey_elt is None:
@@ -1902,10 +1885,7 @@
         return base64.b64decode(str(secretkey_elt))
 
     def import_secret_keys(
-        self,
-        client: SatXMPPClient,
-        ciphertext: bytes,
-        backup_code: str
+        self, client: SatXMPPClient, ciphertext: bytes, backup_code: str
     ) -> Set[GPGSecretKey]:
         """import previously downloaded secret keys.
 
@@ -1925,16 +1905,13 @@
 
         gpg_provider = get_gpg_provider(self.host, client)
 
-        return gpg_provider.restore_secret_keys(gpg_provider.decrypt_symmetrically(
-            ciphertext,
-            backup_code
-        ))
+        return gpg_provider.restore_secret_keys(
+            gpg_provider.decrypt_symmetrically(ciphertext, backup_code)
+        )
 
     @staticmethod
     def __get_joined_muc_users(
-        client: SatXMPPClient,
-        xep_0045: XEP_0045,
-        room_jid: jid.JID
+        client: SatXMPPClient, xep_0045: XEP_0045, room_jid: jid.JID
     ) -> Set[jid.JID]:
         """
         @param client: The client.
@@ -1968,10 +1945,7 @@
         return bare_jids
 
     async def get_trust(
-        self,
-        client: SatXMPPClient,
-        public_key: GPGPublicKey,
-        owner: jid.JID
+        self, client: SatXMPPClient, public_key: GPGPublicKey, owner: jid.JID
     ) -> TrustLevel:
         """Query the trust level of a public key.
 
@@ -1993,7 +1967,7 @@
         client: SatXMPPClient,
         public_key: GPGPublicKey,
         owner: jid.JID,
-        trust_level: TrustLevel
+        trust_level: TrustLevel,
     ) -> None:
         """Set the trust level of a public key.
 
@@ -2008,9 +1982,7 @@
         await self.__storage[client.profile].force(key, trust_level.name)
 
     async def get_trust_ui(  # pylint: disable=invalid-name
-        self,
-        client: SatXMPPClient,
-        entity: jid.JID
+        self, client: SatXMPPClient, entity: jid.JID
     ) -> xml_tools.XMLUI:
         """
         @param client: The client.
@@ -2026,17 +1998,17 @@
         if self.__xep_0045 is not None and self.__xep_0045.is_joined_room(client, entity):
             bare_jids = self.__get_joined_muc_users(client, self.__xep_0045, entity)
         else:
-            bare_jids = { entity.userhostJID() }
+            bare_jids = {entity.userhostJID()}
 
-        all_public_keys = list({
-            bare_jid: list(self.list_public_keys(client, bare_jid))
-            for bare_jid
-            in bare_jids
-        }.items())
+        all_public_keys = list(
+            {
+                bare_jid: list(self.list_public_keys(client, bare_jid))
+                for bare_jid in bare_jids
+            }.items()
+        )
 
         async def callback(
-            data: Any,
-            profile: str  # pylint: disable=unused-argument
+            data: Any, profile: str  # pylint: disable=unused-argument
         ) -> Dict[Never, Never]:
             """
             @param data: The XMLUI result produces by the trust UI form.
@@ -2049,8 +2021,7 @@
                 return {}
 
             data_form_result = cast(
-                Dict[str, str],
-                xml_tools.xmlui_result_2_data_form_result(data)
+                Dict[str, str], xml_tools.xmlui_result_2_data_form_result(data)
             )
             for key, value in data_form_result.items():
                 if not key.startswith("trust_"):
@@ -2070,22 +2041,22 @@
         submit_id = self.host.register_callback(callback, with_data=True, one_shot=True)
 
         result = xml_tools.XMLUI(
-            panel_type=C.XMLUI_FORM,
-            title=D_("OX trust management"),
-            submit_id=submit_id
+            panel_type=C.XMLUI_FORM, title=D_("OX trust management"), submit_id=submit_id
         )
         # Casting this to Any, otherwise all calls on the variable cause type errors
         # pylint: disable=no-member
         trust_ui = cast(Any, result)
-        trust_ui.addText(D_(
-            "This is OX trusting system. You'll see below the GPG keys of your "
-            "contacts, and a list selection to trust them or not. A trusted key "
-            "can read your messages in plain text, so be sure to only validate "
-            "keys that you are sure are belonging to your contact. It's better "
-            "to do this when you are next to your contact, so "
-            "you can check the \"fingerprint\" of the key "
-            "yourself. Do *not* validate a key if the fingerprint is wrong!"
-        ))
+        trust_ui.addText(
+            D_(
+                "This is OX trusting system. You'll see below the GPG keys of your "
+                "contacts, and a list selection to trust them or not. A trusted key "
+                "can read your messages in plain text, so be sure to only validate "
+                "keys that you are sure are belonging to your contact. It's better "
+                "to do this when you are next to your contact, so "
+                'you can check the "fingerprint" of the key '
+                "yourself. Do *not* validate a key if the fingerprint is wrong!"
+            )
+        )
 
         own_secret_keys = self.list_secret_keys(client)
 
@@ -2096,7 +2067,7 @@
             trust_ui.addEmpty()
             trust_ui.addEmpty()
 
-        for outer_index, [ owner, public_keys ] in enumerate(all_public_keys):
+        for outer_index, [owner, public_keys] in enumerate(all_public_keys):
             for inner_index, public_key in enumerate(public_keys):
                 trust_ui.addLabel(D_("Contact"))
                 trust_ui.addJid(jid.JID(owner))
@@ -2105,14 +2076,17 @@
                 trust_ui.addLabel(D_("Trust this device?"))
 
                 current_trust_level = await self.get_trust(client, public_key, owner)
-                avaiable_trust_levels = \
-                    { TrustLevel.DISTRUSTED, TrustLevel.TRUSTED, current_trust_level }
+                avaiable_trust_levels = {
+                    TrustLevel.DISTRUSTED,
+                    TrustLevel.TRUSTED,
+                    current_trust_level,
+                }
 
                 trust_ui.addList(
                     f"trust_{outer_index}_{inner_index}",
-                    options=[ trust_level.name for trust_level in avaiable_trust_levels ],
+                    options=[trust_level.name for trust_level in avaiable_trust_levels],
                     selected=current_trust_level.name,
-                    styles=[ "inline" ]
+                    styles=["inline"],
                 )
 
                 trust_ui.addEmpty()
--- a/libervia/backend/plugins/plugin_xep_0374.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0374.py	Wed Jun 19 18:44:57 2024 +0200
@@ -37,11 +37,7 @@
 from twisted.words.xish import domish
 
 
-__all__ = [  # pylint: disable=unused-variable
-    "PLUGIN_INFO",
-    "XEP_0374",
-    "NS_OXIM"
-]
+__all__ = ["PLUGIN_INFO", "XEP_0374", "NS_OXIM"]  # pylint: disable=unused-variable
 
 
 log = cast(Logger, getLogger(__name__))  # type: ignore[no-untyped-call]
@@ -51,9 +47,9 @@
     C.PI_NAME: "OXIM",
     C.PI_IMPORT_NAME: "XEP-0374",
     C.PI_TYPE: "SEC",
-    C.PI_PROTOCOLS: [ "XEP-0374" ],
-    C.PI_DEPENDENCIES: [ "XEP-0334", "XEP-0373" ],
-    C.PI_RECOMMENDATIONS: [ "XEP-0045" ],
+    C.PI_PROTOCOLS: ["XEP-0374"],
+    C.PI_DEPENDENCIES: ["XEP-0334", "XEP-0373"],
+    C.PI_RECOMMENDATIONS: ["XEP-0045"],
     C.PI_MAIN: "XEP_0374",
     C.PI_HANDLER: "no",
     C.PI_DESCRIPTION: _("""Implementation of OXIM"""),
@@ -85,9 +81,7 @@
 
         # Triggers
         sat.trigger.add(
-            "message_received",
-            self.__message_received_trigger,
-            priority=100050
+            "message_received", self.__message_received_trigger, priority=100050
         )
         sat.trigger.add("send", self.__send_trigger, priority=0)
 
@@ -95,9 +89,7 @@
         sat.register_encryption_plugin(self, "OXIM", NS_OX, 102)
 
     async def get_trust_ui(  # pylint: disable=invalid-name
-        self,
-        client: SatXMPPClient,
-        entity: jid.JID
+        self, client: SatXMPPClient, entity: jid.JID
     ) -> xml_tools.XMLUI:
         """
         @param client: The client.
@@ -110,9 +102,7 @@
 
     @staticmethod
     def __get_joined_muc_users(
-        client: SatXMPPClient,
-        xep_0045: XEP_0045,
-        room_jid: jid.JID
+        client: SatXMPPClient, xep_0045: XEP_0045, room_jid: jid.JID
     ) -> Set[jid.JID]:
         """
         @param client: The client.
@@ -148,7 +138,7 @@
         self,
         client: SatXMPPClient,
         message_elt: domish.Element,
-        post_treat: defer.Deferred
+        post_treat: defer.Deferred,
     ) -> bool:
         """
         @param client: The client which received the message.
@@ -216,10 +206,9 @@
 
         sender_bare_jid = sender_jid.userhost()
 
-        openpgp_elt = cast(Optional[domish.Element], next(
-            message_elt.elements(NS_OX, "openpgp"),
-            None
-        ))
+        openpgp_elt = cast(
+            Optional[domish.Element], next(message_elt.elements(NS_OX, "openpgp"), None)
+        )
 
         if openpgp_elt is None:
             # None of our business, let the flow continue
@@ -227,24 +216,22 @@
 
         try:
             payload_elt, timestamp = await self.__xep_0373.unpack_openpgp_element(
-                client,
-                openpgp_elt,
-                "signcrypt",
-                jid.JID(sender_bare_jid)
+                client, openpgp_elt, "signcrypt", jid.JID(sender_bare_jid)
             )
         except Exception as e:
             # TODO: More specific exception handling
-            log.warning(_("Can't decrypt message: {reason}\n{xml}").format(
-                reason=e,
-                xml=message_elt.toXml()
-            ))
+            log.warning(
+                _("Can't decrypt message: {reason}\n{xml}").format(
+                    reason=e, xml=message_elt.toXml()
+                )
+            )
             client.feedback(
                 feedback_jid,
                 D_(
                     f"An OXIM message from {sender_jid.full()} can't be decrypted:"
                     f" {e}"
                 ),
-                { C.MESS_EXTRA_INFO: C.EXTRA_INFO_DECR_ERR }
+                {C.MESS_EXTRA_INFO: C.EXTRA_INFO_DECR_ERR},
             )
             # No point in further processing this message
             return False
@@ -277,10 +264,7 @@
             post_treat.addCallback(client.encryption.mark_as_untrusted)
 
         # Mark the message as originally encrypted
-        post_treat.addCallback(
-            client.encryption.mark_as_encrypted,
-            namespace=NS_OX
-        )
+        post_treat.addCallback(client.encryption.mark_as_encrypted, namespace=NS_OX)
 
         # Message processed successfully, continue with the flow
         return True
@@ -322,11 +306,11 @@
             client,
             stanza,
             recipient_bare_jid,
-            stanza.getAttribute("type", "unkown") == C.MESS_TYPE_GROUPCHAT
+            stanza.getAttribute("type", "unkown") == C.MESS_TYPE_GROUPCHAT,
         )
 
         # Add a store hint if this is a message stanza
-        self.__xep_0334.add_hint_elements(stanza, [ "store" ])
+        self.__xep_0334.add_hint_elements(stanza, ["store"])
 
         # Let the flow continue.
         return True
@@ -336,7 +320,7 @@
         client: SatXMPPClient,
         stanza: domish.Element,
         recipient_jid: jid.JID,
-        is_muc_message: bool
+        is_muc_message: bool,
     ) -> None:
         """
         @param client: The client.
@@ -363,12 +347,10 @@
             room_jid = feedback_jid = recipient_jid.userhostJID()
 
             recipient_bare_jids = self.__get_joined_muc_users(
-                client,
-                self.__xep_0045,
-                room_jid
+                client, self.__xep_0045, room_jid
             )
         else:
-            recipient_bare_jids = { recipient_jid.userhostJID() }
+            recipient_bare_jids = {recipient_jid.userhostJID()}
             feedback_jid = recipient_jid.userhostJID()
 
         log.debug(
@@ -376,8 +358,9 @@
             f" {recipient_bare_jids}"
         )
 
-        signcrypt_elt, payload_elt = \
-            self.__xep_0373.build_signcrypt_element(recipient_bare_jids)
+        signcrypt_elt, payload_elt = self.__xep_0373.build_signcrypt_element(
+            recipient_bare_jids
+        )
 
         # Move elements from the stanza to the content element.
         # TODO: There should probably be explicitly forbidden elements here too, just as
@@ -404,22 +387,18 @@
 
         try:
             openpgp_elt = await self.__xep_0373.build_openpgp_element(
-                client,
-                signcrypt_elt,
-                recipient_bare_jids
+                client, signcrypt_elt, recipient_bare_jids
             )
         except Exception as e:
             msg = _(
                 # pylint: disable=consider-using-f-string
                 "Can't encrypt message for {entities}: {reason}".format(
-                    entities=', '.join(jid.userhost() for jid in recipient_bare_jids),
-                    reason=e
+                    entities=", ".join(jid.userhost() for jid in recipient_bare_jids),
+                    reason=e,
                 )
             )
             log.warning(msg)
-            client.feedback(feedback_jid, msg, {
-                C.MESS_EXTRA_INFO: C.EXTRA_INFO_ENCR_ERR
-            })
+            client.feedback(feedback_jid, msg, {C.MESS_EXTRA_INFO: C.EXTRA_INFO_ENCR_ERR})
             raise e
 
         stanza.addChild(openpgp_elt)
--- a/libervia/backend/plugins/plugin_xep_0376.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0376.py	Wed Jun 19 18:44:57 2024 +0200
@@ -73,7 +73,7 @@
         nodeIdentifier: str,
         sub_jid: Optional[jid.JID],
         options: Optional[dict],
-        subscribe: bool
+        subscribe: bool,
     ) -> None:
         if sub_jid is None:
             sub_jid = client.jid.userhostJID()
@@ -90,8 +90,7 @@
             options_elt["node"] = nodeIdentifier
             options_elt["jid"] = sub_jid.full()
             form = data_form.Form(
-                formType='submit',
-                formNamespace=pubsub.NS_PUBSUB_SUBSCRIBE_OPTIONS
+                formType="submit", formNamespace=pubsub.NS_PUBSUB_SUBSCRIBE_OPTIONS
             )
             form.makeFields(options)
             options_elt.addChild(form.toElement())
@@ -104,7 +103,7 @@
         service: jid.JID,
         nodeIdentifier: str,
         sub_jid: Optional[jid.JID] = None,
-        options: Optional[dict] = None
+        options: Optional[dict] = None,
     ) -> Tuple[bool, Optional[pubsub.Subscription]]:
         if not self.host.hasFeature(client, NS_PAM) or client.is_component:
             return True, None
@@ -161,7 +160,7 @@
                     ("service", "service"),
                     ("node", "node"),
                     ("jid", "subscriber"),
-                    ("subscription", "state")
+                    ("subscription", "state"),
                 ):
                     sub[key] = subscription_elt[attr]
             except KeyError as e:
--- a/libervia/backend/plugins/plugin_xep_0380.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0380.py	Wed Jun 19 18:44:57 2024 +0200
@@ -52,32 +52,33 @@
         host.register_namespace("eme", NS_EME)
 
     def _add_eme_element(self, mess_data, namespace, name):
-        message_elt = mess_data['xml']
-        encryption_elt = message_elt.addElement((NS_EME, 'encryption'))
-        encryption_elt['namespace'] = namespace
+        message_elt = mess_data["xml"]
+        encryption_elt = message_elt.addElement((NS_EME, "encryption"))
+        encryption_elt["namespace"] = namespace
         if name is not None:
-            encryption_elt['name'] = name
+            encryption_elt["name"] = name
         return mess_data
 
     def _send_message_trigger(self, client, mess_data, __, post_xml_treatments):
         encryption = mess_data.get(C.MESS_KEY_ENCRYPTION)
         if encryption is not None:
-            namespace = encryption['plugin'].namespace
+            namespace = encryption["plugin"].namespace
             if namespace not in KNOWN_NAMESPACES:
-                name = encryption['plugin'].name
+                name = encryption["plugin"].name
             else:
                 name = None
             post_xml_treatments.addCallback(
-                self._add_eme_element, namespace=namespace, name=name)
+                self._add_eme_element, namespace=namespace, name=name
+            )
         return True
 
     def _message_received_trigger(self, client, message_elt, post_treat):
         try:
-            encryption_elt = next(message_elt.elements(NS_EME, 'encryption'))
+            encryption_elt = next(message_elt.elements(NS_EME, "encryption"))
         except StopIteration:
             return True
 
-        namespace = encryption_elt['namespace']
+        namespace = encryption_elt["namespace"]
         if namespace in client.encryption.get_namespaces():
             # message is encrypted and we can decrypt it
             return True
@@ -86,16 +87,20 @@
 
         # at this point, message is encrypted but we know that we can't decrypt it,
         # we need to notify the user
-        sender_s = message_elt['from']
-        to_jid = jid.JID(message_elt['from'])
+        sender_s = message_elt["from"]
+        to_jid = jid.JID(message_elt["from"])
         algorithm = "{} [{}]".format(name, namespace) if name else namespace
         log.warning(
-            _("Message from {sender} is encrypted with {algorithm} and we can't "
-              "decrypt it.".format(sender=message_elt['from'], algorithm=algorithm)))
+            _(
+                "Message from {sender} is encrypted with {algorithm} and we can't "
+                "decrypt it.".format(sender=message_elt["from"], algorithm=algorithm)
+            )
+        )
 
         user_msg = D_(
             "User {sender} sent you an encrypted message (encrypted with {algorithm}), "
-            "and we can't decrypt it.").format(sender=sender_s, algorithm=algorithm)
+            "and we can't decrypt it."
+        ).format(sender=sender_s, algorithm=algorithm)
 
         extra = {C.MESS_EXTRA_INFO: C.EXTRA_INFO_DECR_ERR}
         client.feedback(to_jid, user_msg, extra)
--- a/libervia/backend/plugins/plugin_xep_0384.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0384.py	Wed Jun 19 18:44:57 2024 +0200
@@ -21,8 +21,20 @@
 import enum
 import logging
 import time
-from typing import \
-    Any, Dict, FrozenSet, Iterable, List, Literal, NamedTuple, Optional, Set, Type, Union, cast
+from typing import (
+    Any,
+    Dict,
+    FrozenSet,
+    Iterable,
+    List,
+    Literal,
+    NamedTuple,
+    Optional,
+    Set,
+    Type,
+    Union,
+    cast,
+)
 import uuid
 import xml.etree.ElementTree as ET
 from xml.sax.saxutils import quoteattr
@@ -45,8 +57,12 @@
 from libervia.backend.plugins.plugin_xep_0163 import XEP_0163
 from libervia.backend.plugins.plugin_xep_0334 import XEP_0334
 from libervia.backend.plugins.plugin_xep_0359 import XEP_0359
-from libervia.backend.plugins.plugin_xep_0420 import \
-    XEP_0420, SCEAffixPolicy, SCEAffixValues, SCEProfile
+from libervia.backend.plugins.plugin_xep_0420 import (
+    XEP_0420,
+    SCEAffixPolicy,
+    SCEAffixValues,
+    SCEProfile,
+)
 from libervia.backend.tools import xml_tools
 from twisted.internet import defer
 from twisted.words.protocols.jabber import error, jid
@@ -73,10 +89,7 @@
     ) from import_error
 
 
-__all__ = [  # pylint: disable=unused-variable
-    "PLUGIN_INFO",
-    "OMEMO"
-]
+__all__ = ["PLUGIN_INFO", "OMEMO"]  # pylint: disable=unused-variable
 
 log = cast(Logger, getLogger(__name__))  # type: ignore[no-untyped-call]
 
@@ -85,9 +98,9 @@
     C.PI_NAME: "OMEMO",
     C.PI_IMPORT_NAME: "XEP-0384",
     C.PI_TYPE: "SEC",
-    C.PI_PROTOCOLS: [ "XEP-0384" ],
-    C.PI_DEPENDENCIES: [ "XEP-0163", "XEP-0280", "XEP-0334", "XEP-0060", "XEP-0420" ],
-    C.PI_RECOMMENDATIONS: [ "XEP-0045", "XEP-0359", C.TEXT_CMDS ],
+    C.PI_PROTOCOLS: ["XEP-0384"],
+    C.PI_DEPENDENCIES: ["XEP-0163", "XEP-0280", "XEP-0334", "XEP-0060", "XEP-0420"],
+    C.PI_RECOMMENDATIONS: ["XEP-0045", "XEP-0359", C.TEXT_CMDS],
     C.PI_MAIN: "OMEMO",
     C.PI_HANDLER: "no",
     C.PI_DESCRIPTION: _("""Implementation of OMEMO"""),
@@ -219,8 +232,7 @@
             return None
 
         return oldmemo.migrations.OwnData(
-            own_bare_jid=self.__own_bare_jid,
-            own_device_id=own_device_id
+            own_bare_jid=self.__own_bare_jid, own_device_id=own_device_id
         )
 
     async def deleteOwnData(self) -> None:
@@ -232,7 +244,7 @@
     async def loadState(self) -> Optional[oldmemo.migrations.State]:
         return cast(
             Optional[oldmemo.migrations.State],
-            await self.__storage.get(LegacyStorageImpl.KEY_STATE, None)
+            await self.__storage.get(LegacyStorageImpl.KEY_STATE, None),
         )
 
     async def deleteState(self) -> None:
@@ -242,19 +254,16 @@
             pass
 
     async def loadSession(
-        self,
-        bare_jid: str,
-        device_id: int
+        self, bare_jid: str, device_id: int
     ) -> Optional[oldmemo.migrations.Session]:
-        key = "\n".join([ LegacyStorageImpl.KEY_SESSION, bare_jid, str(device_id) ])
+        key = "\n".join([LegacyStorageImpl.KEY_SESSION, bare_jid, str(device_id)])
 
         return cast(
-            Optional[oldmemo.migrations.Session],
-            await self.__storage.get(key, None)
+            Optional[oldmemo.migrations.Session], await self.__storage.get(key, None)
         )
 
     async def deleteSession(self, bare_jid: str, device_id: int) -> None:
-        key = "\n".join([ LegacyStorageImpl.KEY_SESSION, bare_jid, str(device_id) ])
+        key = "\n".join([LegacyStorageImpl.KEY_SESSION, bare_jid, str(device_id)])
 
         try:
             await self.__storage.remove(key)
@@ -262,23 +271,17 @@
             pass
 
     async def loadActiveDevices(self, bare_jid: str) -> Optional[List[int]]:
-        key = "\n".join([ LegacyStorageImpl.KEY_ACTIVE_DEVICES, bare_jid ])
-
-        return cast(
-            Optional[List[int]],
-            await self.__storage.get(key, None)
-        )
+        key = "\n".join([LegacyStorageImpl.KEY_ACTIVE_DEVICES, bare_jid])
+
+        return cast(Optional[List[int]], await self.__storage.get(key, None))
 
     async def loadInactiveDevices(self, bare_jid: str) -> Optional[Dict[int, int]]:
-        key = "\n".join([ LegacyStorageImpl.KEY_INACTIVE_DEVICES, bare_jid ])
-
-        return cast(
-            Optional[Dict[int, int]],
-            await self.__storage.get(key, None)
-        )
+        key = "\n".join([LegacyStorageImpl.KEY_INACTIVE_DEVICES, bare_jid])
+
+        return cast(Optional[Dict[int, int]], await self.__storage.get(key, None))
 
     async def deleteActiveDevices(self, bare_jid: str) -> None:
-        key = "\n".join([ LegacyStorageImpl.KEY_ACTIVE_DEVICES, bare_jid ])
+        key = "\n".join([LegacyStorageImpl.KEY_ACTIVE_DEVICES, bare_jid])
 
         try:
             await self.__storage.remove(key)
@@ -286,7 +289,7 @@
             pass
 
     async def deleteInactiveDevices(self, bare_jid: str) -> None:
-        key = "\n".join([ LegacyStorageImpl.KEY_INACTIVE_DEVICES, bare_jid ])
+        key = "\n".join([LegacyStorageImpl.KEY_INACTIVE_DEVICES, bare_jid])
 
         try:
             await self.__storage.remove(key)
@@ -294,19 +297,16 @@
             pass
 
     async def loadTrust(
-        self,
-        bare_jid: str,
-        device_id: int
+        self, bare_jid: str, device_id: int
     ) -> Optional[oldmemo.migrations.Trust]:
-        key = "\n".join([ LegacyStorageImpl.KEY_TRUST, bare_jid, str(device_id) ])
+        key = "\n".join([LegacyStorageImpl.KEY_TRUST, bare_jid, str(device_id)])
 
         return cast(
-            Optional[oldmemo.migrations.Trust],
-            await self.__storage.get(key, None)
+            Optional[oldmemo.migrations.Trust], await self.__storage.get(key, None)
         )
 
     async def deleteTrust(self, bare_jid: str, device_id: int) -> None:
-        key = "\n".join([ LegacyStorageImpl.KEY_TRUST, bare_jid, str(device_id) ])
+        key = "\n".join([LegacyStorageImpl.KEY_TRUST, bare_jid, str(device_id)])
 
         try:
             await self.__storage.remove(key)
@@ -326,10 +326,7 @@
 
 
 async def download_oldmemo_bundle(
-    client: SatXMPPClient,
-    xep_0060: XEP_0060,
-    bare_jid: str,
-    device_id: int
+    client: SatXMPPClient, xep_0060: XEP_0060, bare_jid: str, device_id: int
 ) -> oldmemo.oldmemo.BundleImpl:
     """Download the oldmemo bundle corresponding to a specific device.
 
@@ -361,8 +358,9 @@
             f" {namespace}: Unexpected number of items retrieved: {len(items)}."
         )
 
-    element = \
-        next(iter(xml_tools.domish_elt_2_et_elt(cast(domish.Element, items[0]))), None)
+    element = next(
+        iter(xml_tools.domish_elt_2_et_elt(cast(domish.Element, items[0]))), None
+    )
     if element is None:
         raise omemo.BundleDownloadFailed(
             f"Bundle download failed for {bare_jid}: {device_id} under namespace"
@@ -385,7 +383,8 @@
 NS_ATM: Final = "urn:xmpp:atm:1"
 
 
-TRUST_MESSAGE_SCHEMA = xmlschema.XMLSchema("""<?xml version='1.0' encoding='UTF-8'?>
+TRUST_MESSAGE_SCHEMA = xmlschema.XMLSchema(
+    """<?xml version='1.0' encoding='UTF-8'?>
 <xs:schema xmlns:xs='http://www.w3.org/2001/XMLSchema'
            targetNamespace='urn:xmpp:tm:1'
            xmlns='urn:xmpp:tm:1'
@@ -413,7 +412,8 @@
     </xs:complexType>
   </xs:element>
 </xs:schema>
-""")
+"""
+)
 
 
 # This is compatible with omemo:2's SCE profile
@@ -422,7 +422,7 @@
     time_policy=SCEAffixPolicy.REQUIRED,
     to_policy=SCEAffixPolicy.OPTIONAL,
     from_policy=SCEAffixPolicy.OPTIONAL,
-    custom_policies={}
+    custom_policies={},
 )
 
 
@@ -470,7 +470,7 @@
             "sender_jid": self.sender_jid.full(),
             "sender_key": self.sender_key.hex(),
             "timestamp": self.timestamp.isoformat(),
-            "trust_update": self.trust_update.to_dict()
+            "trust_update": self.trust_update.to_dict(),
         }
         return data
 
@@ -499,7 +499,7 @@
 async def manage_trust_message_cache(
     client: SatXMPPClient,
     session_manager: omemo.SessionManager,
-    applied_trust_updates: FrozenSet[TrustUpdate]
+    applied_trust_updates: FrozenSet[TrustUpdate],
 ) -> None:
     """Manage the ATM trust message cache after trust updates have been applied.
 
@@ -510,8 +510,7 @@
     """
 
     trust_message_cache = persistent.LazyPersistentBinaryDict(
-        "XEP-0384/TM",
-        client.profile
+        "XEP-0384/TM", client.profile
     )
 
     # Load cache entries
@@ -524,16 +523,14 @@
     cache_entries_by_target = {
         (
             cache_entry.trust_update.target_jid.userhostJID(),
-            cache_entry.trust_update.target_key
+            cache_entry.trust_update.target_key,
         ): cache_entry
-        for cache_entry
-        in cache_entries
+        for cache_entry in cache_entries
     }
 
     for trust_update in applied_trust_updates:
         cache_entry = cache_entries_by_target.get(
-            (trust_update.target_jid.userhostJID(), trust_update.target_key),
-            None
+            (trust_update.target_jid.userhostJID(), trust_update.target_key), None
         )
 
         if cache_entry is not None:
@@ -561,7 +558,7 @@
                     await session_manager.set_trust(
                         cache_entry.trust_update.target_jid.userhost(),
                         cache_entry.trust_update.target_key,
-                        trust_level.name
+                        trust_level.name,
                     )
 
                     # Track the fact that this trust update has been applied
@@ -571,10 +568,7 @@
                     cache_entries.remove(cache_entry)
 
     # Store the updated cache entries
-    await trust_message_cache.force(
-        "cache",
-        [tm.to_dict() for tm in cache_entries]
-    )
+    await trust_message_cache.force("cache", [tm.to_dict() for tm in cache_entries])
 
     # TODO: Notify the user ("feedback") about automatically updated trust?
 
@@ -582,15 +576,12 @@
         # If any trust has been updated, recursively perform another run of cache
         # management
         await manage_trust_message_cache(
-            client,
-            session_manager,
-            frozenset(new_trust_updates)
+            client, session_manager, frozenset(new_trust_updates)
         )
 
 
 async def get_trust_as_trust_updates(
-    session_manager: omemo.SessionManager,
-    target_jid: jid.JID
+    session_manager: omemo.SessionManager, target_jid: jid.JID
 ) -> FrozenSet[TrustUpdate]:
     """Get the trust status of all known keys of a JID as trust updates for use with ATM.
 
@@ -617,11 +608,13 @@
             # Skip devices that are not explicitly trusted or distrusted
             continue
 
-        trust_updates.add(TrustUpdate(
-            target_jid=target_jid.userhostJID(),
-            target_key=device.identity_key,
-            target_trust=target_trust
-        ))
+        trust_updates.add(
+            TrustUpdate(
+                target_jid=target_jid.userhostJID(),
+                target_key=device.identity_key,
+                target_trust=target_trust,
+            )
+        )
 
     return frozenset(trust_updates)
 
@@ -629,7 +622,7 @@
 async def send_trust_messages(
     client: SatXMPPClient,
     session_manager: omemo.SessionManager,
-    applied_trust_updates: FrozenSet[TrustUpdate]
+    applied_trust_updates: FrozenSet[TrustUpdate],
 ) -> None:
     """Send information about updated trust to peers via ATM (XEP-0450).
 
@@ -647,20 +640,21 @@
     own_trust_updates = await get_trust_as_trust_updates(session_manager, own_jid)
 
     # JIDs of which at least one device's trust has been updated
-    updated_jids = frozenset({
-        trust_update.target_jid.userhostJID()
-        for trust_update
-        in applied_trust_updates
-    })
+    updated_jids = frozenset(
+        {trust_update.target_jid.userhostJID() for trust_update in applied_trust_updates}
+    )
 
     trust_messages: Set[PartialTrustMessage] = set()
 
     for updated_jid in updated_jids:
         # Get the trust updates for that JID
-        trust_updates = frozenset({
-            trust_update for trust_update in applied_trust_updates
-            if trust_update.target_jid.userhostJID() == updated_jid
-        })
+        trust_updates = frozenset(
+            {
+                trust_update
+                for trust_update in applied_trust_updates
+                if trust_update.target_jid.userhostJID() == updated_jid
+            }
+        )
 
         if updated_jid == own_jid:
             # If the own JID is updated, _all_ peers have to be notified
@@ -668,66 +662,81 @@
             # and storage keys until I've added public API to get a list of peers to
             # python-omemo.
             storage: omemo.Storage = getattr(session_manager, "_SessionManager__storage")
-            peer_jids = frozenset({
-                jid.JID(bare_jid).userhostJID() for bare_jid in (await storage.load_list(
-                    f"/{OMEMO.NS_TWOMEMO}/bare_jids",
-                    str
-                )).maybe([])
-            })
+            peer_jids = frozenset(
+                {
+                    jid.JID(bare_jid).userhostJID()
+                    for bare_jid in (
+                        await storage.load_list(f"/{OMEMO.NS_TWOMEMO}/bare_jids", str)
+                    ).maybe([])
+                }
+            )
 
             if len(peer_jids) == 0:
                 # If there are no peers to notify, notify our other devices about the
                 # changes directly
-                trust_messages.add(PartialTrustMessage(
-                    recipient_jid=own_jid,
-                    updated_jid=own_jid,
-                    trust_updates=trust_updates
-                ))
+                trust_messages.add(
+                    PartialTrustMessage(
+                        recipient_jid=own_jid,
+                        updated_jid=own_jid,
+                        trust_updates=trust_updates,
+                    )
+                )
             else:
                 # Otherwise, notify all peers about the changes in trust and let carbons
                 # handle the copy to our own JID
                 for peer_jid in peer_jids:
-                    trust_messages.add(PartialTrustMessage(
-                        recipient_jid=peer_jid,
-                        updated_jid=own_jid,
-                        trust_updates=trust_updates
-                    ))
+                    trust_messages.add(
+                        PartialTrustMessage(
+                            recipient_jid=peer_jid,
+                            updated_jid=own_jid,
+                            trust_updates=trust_updates,
+                        )
+                    )
 
                     # Also send full trust information about _every_ peer to our newly
                     # trusted devices
-                    peer_trust_updates = \
-                        await get_trust_as_trust_updates(session_manager, peer_jid)
-
-                    trust_messages.add(PartialTrustMessage(
-                        recipient_jid=own_jid,
-                        updated_jid=peer_jid,
-                        trust_updates=peer_trust_updates
-                    ))
+                    peer_trust_updates = await get_trust_as_trust_updates(
+                        session_manager, peer_jid
+                    )
+
+                    trust_messages.add(
+                        PartialTrustMessage(
+                            recipient_jid=own_jid,
+                            updated_jid=peer_jid,
+                            trust_updates=peer_trust_updates,
+                        )
+                    )
 
             # Send information about our own devices to our newly trusted devices
-            trust_messages.add(PartialTrustMessage(
-                recipient_jid=own_jid,
-                updated_jid=own_jid,
-                trust_updates=own_trust_updates
-            ))
+            trust_messages.add(
+                PartialTrustMessage(
+                    recipient_jid=own_jid,
+                    updated_jid=own_jid,
+                    trust_updates=own_trust_updates,
+                )
+            )
         else:
             # Notify our other devices about the changes in trust
-            trust_messages.add(PartialTrustMessage(
-                recipient_jid=own_jid,
-                updated_jid=updated_jid,
-                trust_updates=trust_updates
-            ))
+            trust_messages.add(
+                PartialTrustMessage(
+                    recipient_jid=own_jid,
+                    updated_jid=updated_jid,
+                    trust_updates=trust_updates,
+                )
+            )
 
             # Send a summary of our own trust to newly trusted devices
-            trust_messages.add(PartialTrustMessage(
-                recipient_jid=updated_jid,
-                updated_jid=own_jid,
-                trust_updates=own_trust_updates
-            ))
+            trust_messages.add(
+                PartialTrustMessage(
+                    recipient_jid=updated_jid,
+                    updated_jid=own_jid,
+                    trust_updates=own_trust_updates,
+                )
+            )
 
     # All trust messages prepared. Merge all trust messages directed at the same
     # recipient.
-    recipient_jids = { trust_message.recipient_jid for trust_message in trust_messages }
+    recipient_jids = {trust_message.recipient_jid for trust_message in trust_messages}
 
     for recipient_jid in recipient_jids:
         updated: Dict[jid.JID, Set[TrustUpdate]] = {}
@@ -736,8 +745,9 @@
             # Merge trust messages directed at that recipient
             if trust_message.recipient_jid == recipient_jid:
                 # Merge the trust updates
-                updated[trust_message.updated_jid] = \
-                    updated.get(trust_message.updated_jid, set())
+                updated[trust_message.updated_jid] = updated.get(
+                    trust_message.updated_jid, set()
+                )
 
                 updated[trust_message.updated_jid] |= trust_message.trust_updates
 
@@ -751,31 +761,34 @@
             key_owner_elt["jid"] = updated_jid.userhost()
 
             for trust_update in trust_updates:
-                serialized_identity_key = \
-                    base64.b64encode(trust_update.target_key).decode("ASCII")
+                serialized_identity_key = base64.b64encode(
+                    trust_update.target_key
+                ).decode("ASCII")
 
                 if trust_update.target_trust:
                     key_owner_elt.addElement(
-                        (NS_TM, "trust"),
-                        content=serialized_identity_key
+                        (NS_TM, "trust"), content=serialized_identity_key
                     )
                 else:
                     key_owner_elt.addElement(
-                        (NS_TM, "distrust"),
-                        content=serialized_identity_key
+                        (NS_TM, "distrust"), content=serialized_identity_key
                     )
 
         # Finally, encrypt and send the trust message!
-        message_data = client.generate_message_xml(MessageData({
-            "from": own_jid,
-            "to": recipient_jid,
-            "uid": str(uuid.uuid4()),
-            "message": {},
-            "subject": {},
-            "type": C.MESS_TYPE_CHAT,
-            "extra": {},
-            "timestamp": time.time()
-        }))
+        message_data = client.generate_message_xml(
+            MessageData(
+                {
+                    "from": own_jid,
+                    "to": recipient_jid,
+                    "uid": str(uuid.uuid4()),
+                    "message": {},
+                    "subject": {},
+                    "type": C.MESS_TYPE_CHAT,
+                    "extra": {},
+                    "timestamp": time.time(),
+                }
+            )
+        )
 
         message_data["xml"].addChild(trust_message_elt)
 
@@ -786,23 +799,21 @@
         # TODO: The following is mostly duplicate code
         try:
             messages, encryption_errors = await session_manager.encrypt(
-                frozenset({ own_jid.userhost(), recipient_jid.userhost() }),
-                { OMEMO.NS_TWOMEMO: plaintext },
-                backend_priority_order=[ OMEMO.NS_TWOMEMO ],
-                identifier=feedback_jid.userhost()
+                frozenset({own_jid.userhost(), recipient_jid.userhost()}),
+                {OMEMO.NS_TWOMEMO: plaintext},
+                backend_priority_order=[OMEMO.NS_TWOMEMO],
+                identifier=feedback_jid.userhost(),
             )
         except Exception as e:
             msg = _(
                 # pylint: disable=consider-using-f-string
                 "Can't encrypt message for {entities}: {reason}".format(
-                    entities=', '.join({ own_jid.userhost(), recipient_jid.userhost() }),
-                    reason=e
+                    entities=", ".join({own_jid.userhost(), recipient_jid.userhost()}),
+                    reason=e,
                 )
             )
             log.warning(msg)
-            client.feedback(feedback_jid, msg, {
-                C.MESS_EXTRA_INFO: C.EXTRA_INFO_ENCR_ERR
-            })
+            client.feedback(feedback_jid, msg, {C.MESS_EXTRA_INFO: C.EXTRA_INFO_ENCR_ERR})
             raise e
 
         if len(encryption_errors) > 0:
@@ -811,12 +822,13 @@
                 f" {encryption_errors}"
             )
 
-            encrypted_errors_stringified = ", ".join([
-                f"device {err.device_id} of {err.bare_jid} under namespace"
-                f" {err.namespace}"
-                for err
-                in encryption_errors
-            ])
+            encrypted_errors_stringified = ", ".join(
+                [
+                    f"device {err.device_id} of {err.bare_jid} under namespace"
+                    f" {err.namespace}"
+                    for err in encryption_errors
+                ]
+            )
 
             client.feedback(
                 feedback_jid,
@@ -827,23 +839,24 @@
                     " incomplete or broken, which shouldn't happen for actively used"
                     " devices, and can usually be ignored. The following devices are"
                     f" affected: {encrypted_errors_stringified}."
-                )
+                ),
             )
 
         message = next(
-            message for message in messages
-            if message.namespace == OMEMO.NS_TWOMEMO
+            message for message in messages if message.namespace == OMEMO.NS_TWOMEMO
         )
 
         # Add the encrypted element
-        message_data["xml"].addChild(xml_tools.et_elt_2_domish_elt(
-            twomemo.etree.serialize_message(message)
-        ))
+        message_data["xml"].addChild(
+            xml_tools.et_elt_2_domish_elt(twomemo.etree.serialize_message(message))
+        )
 
         await client.a_send(message_data["xml"])
 
 
-def make_session_manager(sat: LiberviaBackend, profile: str) -> Type[omemo.SessionManager]:
+def make_session_manager(
+    sat: LiberviaBackend, profile: str
+) -> Type[omemo.SessionManager]:
     """
     @param sat: The SAT instance.
     @param profile: The profile.
@@ -876,10 +889,10 @@
                         extra={
                             XEP_0060.EXTRA_PUBLISH_OPTIONS: {
                                 XEP_0060.OPT_ACCESS_MODEL: "open",
-                                XEP_0060.OPT_MAX_ITEMS: "max"
+                                XEP_0060.OPT_MAX_ITEMS: "max",
                             },
-                            XEP_0060.EXTRA_ON_PRECOND_NOT_MET: "raise"
-                        }
+                            XEP_0060.EXTRA_ON_PRECOND_NOT_MET: "raise",
+                        },
                     )
                 except (error.StanzaError, Exception) as e:
                     if (
@@ -917,10 +930,10 @@
                         extra={
                             XEP_0060.EXTRA_PUBLISH_OPTIONS: {
                                 XEP_0060.OPT_ACCESS_MODEL: "open",
-                                XEP_0060.OPT_MAX_ITEMS: 1
+                                XEP_0060.OPT_MAX_ITEMS: 1,
                             },
-                            XEP_0060.EXTRA_ON_PRECOND_NOT_MET: "publish_without_options"
-                        }
+                            XEP_0060.EXTRA_ON_PRECOND_NOT_MET: "publish_without_options",
+                        },
                     )
                 except Exception as e:
                     raise omemo.BundleUploadFailed(
@@ -933,19 +946,14 @@
 
         @staticmethod
         async def _download_bundle(
-            namespace: str,
-            bare_jid: str,
-            device_id: int
+            namespace: str, bare_jid: str, device_id: int
         ) -> omemo.Bundle:
             if namespace == twomemo.twomemo.NAMESPACE:
                 node = "urn:xmpp:omemo:2:bundles"
 
                 try:
                     items, __ = await xep_0060.get_items(
-                        client,
-                        jid.JID(bare_jid),
-                        node,
-                        item_ids=[ str(device_id) ]
+                        client, jid.JID(bare_jid), node, item_ids=[str(device_id)]
                     )
                 except Exception as e:
                     raise omemo.BundleDownloadFailed(
@@ -962,7 +970,7 @@
 
                 element = next(
                     iter(xml_tools.domish_elt_2_et_elt(cast(domish.Element, items[0]))),
-                    None
+                    None,
                 )
                 if element is None:
                     raise omemo.BundleDownloadFailed(
@@ -981,10 +989,7 @@
 
             if namespace == oldmemo.oldmemo.NAMESPACE:
                 return await download_oldmemo_bundle(
-                    client,
-                    xep_0060,
-                    bare_jid,
-                    device_id
+                    client, xep_0060, bare_jid, device_id
                 )
 
             raise omemo.UnknownNamespace(f"Unknown namespace: {namespace}")
@@ -999,8 +1004,8 @@
                         client,
                         client.jid.userhostJID(),
                         node,
-                        [ str(device_id) ],
-                        notify=False
+                        [str(device_id)],
+                        notify=False,
                     )
                 except Exception as e:
                     raise omemo.BundleDeletionFailed(
@@ -1027,8 +1032,7 @@
 
         @staticmethod
         async def _upload_device_list(
-            namespace: str,
-            device_list: Dict[int, Optional[str]]
+            namespace: str, device_list: Dict[int, Optional[str]]
         ) -> None:
             element: Optional[ET.Element] = None
             node: Optional[str] = None
@@ -1053,10 +1057,10 @@
                     extra={
                         XEP_0060.EXTRA_PUBLISH_OPTIONS: {
                             XEP_0060.OPT_MAX_ITEMS: 1,
-                            XEP_0060.OPT_ACCESS_MODEL: "open"
+                            XEP_0060.OPT_ACCESS_MODEL: "open",
                         },
-                        XEP_0060.EXTRA_ON_PRECOND_NOT_MET: "raise"
-                    }
+                        XEP_0060.EXTRA_ON_PRECOND_NOT_MET: "raise",
+                    },
                 )
             except (error.StanzaError, Exception) as e:
                 if (
@@ -1080,8 +1084,7 @@
 
         @staticmethod
         async def _download_device_list(
-            namespace: str,
-            bare_jid: str
+            namespace: str, bare_jid: str
         ) -> Dict[int, Optional[str]]:
             node: Optional[str] = None
 
@@ -1113,8 +1116,7 @@
                 )
 
             element = next(
-                iter(xml_tools.domish_elt_2_et_elt(cast(domish.Element, items[0]))),
-                None
+                iter(xml_tools.domish_elt_2_et_elt(cast(domish.Element, items[0]))), None
             )
 
             if element is None:
@@ -1138,8 +1140,7 @@
             raise omemo.UnknownNamespace(f"Unknown namespace: {namespace}")
 
         async def _evaluate_custom_trust_level(
-            self,
-            device: omemo.DeviceInformation
+            self, device: omemo.DeviceInformation
         ) -> omemo.TrustLevel:
             # Get the custom trust level
             try:
@@ -1161,11 +1162,12 @@
             # on the trust system and phase
             if trust_level is TrustLevel.BLINDLY_TRUSTED:
                 # Get the name of the active trust system
-                trust_system = cast(str, sat.memory.param_get_a(
-                    PARAM_NAME,
-                    PARAM_CATEGORY,
-                    profile_key=profile
-                ))
+                trust_system = cast(
+                    str,
+                    sat.memory.param_get_a(
+                        PARAM_NAME, PARAM_CATEGORY, profile_key=profile
+                    ),
+                )
 
                 # If the trust model is BTBV, blind trust is always enabled
                 if trust_system == "btbv":
@@ -1177,10 +1179,11 @@
                     # Find out whether we are in phase one or two
                     devices = await self.get_device_information(device.bare_jid)
 
-                    phase_one = all(TrustLevel(device.trust_level_name) in {
-                        TrustLevel.UNDECIDED,
-                        TrustLevel.BLINDLY_TRUSTED
-                    } for device in devices)
+                    phase_one = all(
+                        TrustLevel(device.trust_level_name)
+                        in {TrustLevel.UNDECIDED, TrustLevel.BLINDLY_TRUSTED}
+                        for device in devices
+                    )
 
                     if phase_one:
                         return omemo.TrustLevel.TRUSTED
@@ -1194,9 +1197,7 @@
             assert_never(trust_level)
 
         async def _make_trust_decision(
-            self,
-            undecided: FrozenSet[omemo.DeviceInformation],
-            identifier: Optional[str]
+            self, undecided: FrozenSet[omemo.DeviceInformation], identifier: Optional[str]
         ) -> None:
             if identifier is None:
                 raise omemo.TrustDecisionFailed(
@@ -1210,7 +1211,7 @@
             # first manual verification is performed. Thus, we can separate bare JIDs into
             # two pools here, one pool of bare JIDs for which blind trust is active, and
             # one pool of bare JIDs for which manual trust is used instead.
-            bare_jids = { device.bare_jid for device in undecided }
+            bare_jids = {device.bare_jid for device in undecided}
 
             blind_trust_bare_jids: Set[str] = set()
             manual_trust_bare_jids: Set[str] = set()
@@ -1222,20 +1223,23 @@
 
                 # If the trust levels of all devices correspond to those used by blind
                 # trust, blind trust applies. Otherwise, fall back to manual trust.
-                if all(TrustLevel(device.trust_level_name) in {
-                    TrustLevel.UNDECIDED,
-                    TrustLevel.BLINDLY_TRUSTED
-                } for device in devices):
+                if all(
+                    TrustLevel(device.trust_level_name)
+                    in {TrustLevel.UNDECIDED, TrustLevel.BLINDLY_TRUSTED}
+                    for device in devices
+                ):
                     blind_trust_bare_jids.add(bare_jid)
                 else:
                     manual_trust_bare_jids.add(bare_jid)
 
             # With the JIDs sorted into their respective pools, the undecided devices can
             # be categorized too
-            blindly_trusted_devices = \
-                { dev for dev in undecided if dev.bare_jid in blind_trust_bare_jids }
-            manually_trusted_devices = \
-                { dev for dev in undecided if dev.bare_jid in manual_trust_bare_jids }
+            blindly_trusted_devices = {
+                dev for dev in undecided if dev.bare_jid in blind_trust_bare_jids
+            }
+            manually_trusted_devices = {
+                dev for dev in undecided if dev.bare_jid in manual_trust_bare_jids
+            }
 
             # Blindly trust devices handled by blind trust
             if len(blindly_trusted_devices) > 0:
@@ -1243,15 +1247,16 @@
                     await self.set_trust(
                         device.bare_jid,
                         device.identity_key,
-                        TrustLevel.BLINDLY_TRUSTED.name
+                        TrustLevel.BLINDLY_TRUSTED.name,
                     )
 
-                blindly_trusted_devices_stringified = ", ".join([
-                    f"device {device.device_id} of {device.bare_jid} under namespace"
-                    f" {device.namespaces}"
-                    for device
-                    in blindly_trusted_devices
-                ])
+                blindly_trusted_devices_stringified = ", ".join(
+                    [
+                        f"device {device.device_id} of {device.bare_jid} under namespace"
+                        f" {device.namespaces}"
+                        for device in blindly_trusted_devices
+                    ]
+                )
 
                 client.feedback(
                     feedback_jid,
@@ -1259,7 +1264,7 @@
                         "Not all destination devices are trusted, unknown devices will be"
                         " blindly trusted.\nFollowing devices have been automatically"
                         f" trusted: {blindly_trusted_devices_stringified}."
-                    )
+                    ),
                 )
 
             # Prompt the user for manual trust decisions on the devices handled by manual
@@ -1272,11 +1277,10 @@
                         " message in such a situation. Please indicate if you trust"
                         " those devices or not in the trust manager before we can"
                         " send this message."
-                    )
+                    ),
                 )
                 await self.__prompt_manual_trust(
-                    frozenset(manually_trusted_devices),
-                    feedback_jid
+                    frozenset(manually_trusted_devices), feedback_jid
                 )
 
         @staticmethod
@@ -1291,16 +1295,20 @@
             if element is None:
                 raise omemo.UnknownNamespace(f"Unknown namespace: {message.namespace}")
 
-            message_data = client.generate_message_xml(MessageData({
-                "from": client.jid,
-                "to": jid.JID(bare_jid),
-                "uid": str(uuid.uuid4()),
-                "message": {},
-                "subject": {},
-                "type": C.MESS_TYPE_CHAT,
-                "extra": {},
-                "timestamp": time.time()
-            }))
+            message_data = client.generate_message_xml(
+                MessageData(
+                    {
+                        "from": client.jid,
+                        "to": jid.JID(bare_jid),
+                        "uid": str(uuid.uuid4()),
+                        "message": {},
+                        "subject": {},
+                        "type": C.MESS_TYPE_CHAT,
+                        "extra": {},
+                        "timestamp": time.time(),
+                    }
+                )
+            )
 
             message_data["xml"].addChild(xml_tools.et_elt_2_domish_elt(element))
 
@@ -1310,9 +1318,7 @@
                 raise omemo.MessageSendingFailed() from e
 
         async def __prompt_manual_trust(
-            self,
-            undecided: FrozenSet[omemo.DeviceInformation],
-            feedback_jid: jid.JID
+            self, undecided: FrozenSet[omemo.DeviceInformation], feedback_jid: jid.JID
         ) -> None:
             """Asks the user to decide on the manual trust level of a set of devices.
 
@@ -1340,20 +1346,25 @@
 
             # Casting this to Any, otherwise all calls on the variable cause type errors
             # pylint: disable=no-member
-            trust_ui = cast(Any, xml_tools.XMLUI(
-                panel_type=C.XMLUI_FORM,
-                title=D_("OMEMO trust management"),
-                submit_id=""
-            ))
-            trust_ui.addText(D_(
-                "This is OMEMO trusting system. You'll see below the devices of your "
-                "contacts, and a checkbox to trust them or not. A trusted device "
-                "can read your messages in plain text, so be sure to only validate "
-                "devices that you are sure are belonging to your contact. It's better "
-                "to do this when you are next to your contact and their device, so "
-                "you can check the \"fingerprint\" (the number next to the device) "
-                "yourself. Do *not* validate a device if the fingerprint is wrong!"
-            ))
+            trust_ui = cast(
+                Any,
+                xml_tools.XMLUI(
+                    panel_type=C.XMLUI_FORM,
+                    title=D_("OMEMO trust management"),
+                    submit_id="",
+                ),
+            )
+            trust_ui.addText(
+                D_(
+                    "This is OMEMO trusting system. You'll see below the devices of your "
+                    "contacts, and a checkbox to trust them or not. A trusted device "
+                    "can read your messages in plain text, so be sure to only validate "
+                    "devices that you are sure are belonging to your contact. It's better "
+                    "to do this when you are next to your contact and their device, so "
+                    'you can check the "fingerprint" (the number next to the device) '
+                    "yourself. Do *not* validate a device if the fingerprint is wrong!"
+                )
+            )
 
             own_device, __ = await self.get_own_device_information()
 
@@ -1384,16 +1395,16 @@
             trust_ui_result = await xml_tools.defer_xmlui(
                 sat,
                 trust_ui,
-                action_extra={ "meta_encryption_trust": namespace },
-                profile=profile
+                action_extra={"meta_encryption_trust": namespace},
+                profile=profile,
             )
 
             if C.bool(trust_ui_result.get("cancelled", "false")):
                 raise omemo.TrustDecisionFailed("Trust UI cancelled.")
 
-            data_form_result = cast(Dict[str, str], xml_tools.xmlui_result_2_data_form_result(
-                trust_ui_result
-            ))
+            data_form_result = cast(
+                Dict[str, str], xml_tools.xmlui_result_2_data_form_result(trust_ui_result)
+            )
 
             trust_updates: Set[TrustUpdate] = set()
 
@@ -1401,29 +1412,29 @@
                 if not key.startswith("trust_"):
                     continue
 
-                device = undecided_ordered[int(key[len("trust_"):])]
+                device = undecided_ordered[int(key[len("trust_") :])]
                 target_trust = C.bool(value)
-                trust_level = \
+                trust_level = (
                     TrustLevel.TRUSTED if target_trust else TrustLevel.DISTRUSTED
+                )
 
                 await self.set_trust(
-                    device.bare_jid,
-                    device.identity_key,
-                    trust_level.name
+                    device.bare_jid, device.identity_key, trust_level.name
                 )
 
-                trust_updates.add(TrustUpdate(
-                    target_jid=jid.JID(device.bare_jid).userhostJID(),
-                    target_key=device.identity_key,
-                    target_trust=target_trust
-                ))
+                trust_updates.add(
+                    TrustUpdate(
+                        target_jid=jid.JID(device.bare_jid).userhostJID(),
+                        target_key=device.identity_key,
+                        target_trust=target_trust,
+                    )
+                )
 
             # Check whether ATM is enabled and handle everything in case it is
-            trust_system = cast(str, sat.memory.param_get_a(
-                PARAM_NAME,
-                PARAM_CATEGORY,
-                profile_key=profile
-            ))
+            trust_system = cast(
+                str,
+                sat.memory.param_get_a(PARAM_NAME, PARAM_CATEGORY, profile_key=profile),
+            )
 
             if trust_system == "atm":
                 await manage_trust_message_cache(client, self, frozenset(trust_updates))
@@ -1439,7 +1450,7 @@
     signed_pre_key_rotation_period: int = 7 * 24 * 60 * 60,
     pre_key_refill_threshold: int = 99,
     max_num_per_session_skipped_keys: int = 1000,
-    max_num_per_message_skipped_keys: Optional[int] = None
+    max_num_per_message_skipped_keys: Optional[int] = None,
 ) -> omemo.SessionManager:
     """Prepare the OMEMO library (storage, backends, core) for a specific profile.
 
@@ -1492,11 +1503,8 @@
         TrustLevel.UNDECIDED.name,
         TrustLevel.DISTRUSTED.name,
         lambda bare_jid, device_id: download_oldmemo_bundle(
-            client,
-            xep_0060,
-            bare_jid,
-            device_id
-        )
+            client, xep_0060, bare_jid, device_id
+        ),
     )
 
     session_manager = await make_session_manager(sat, profile).create(
@@ -1504,13 +1512,13 @@
             twomemo.Twomemo(
                 storage,
                 max_num_per_session_skipped_keys,
-                max_num_per_message_skipped_keys
+                max_num_per_message_skipped_keys,
             ),
             oldmemo.Oldmemo(
                 storage,
                 max_num_per_session_skipped_keys,
-                max_num_per_message_skipped_keys
-            )
+                max_num_per_message_skipped_keys,
+            ),
         ],
         storage,
         client.jid.userhost(),
@@ -1518,7 +1526,7 @@
         TrustLevel.UNDECIDED.value,
         signed_pre_key_rotation_period,
         pre_key_refill_threshold,
-        omemo.AsyncFramework.TWISTED
+        omemo.AsyncFramework.TWISTED,
     )
 
     # This shouldn't hurt here since we're not running on overly constrainted devices.
@@ -1563,6 +1571,7 @@
     between the two is maintained. MUC messages are supported next to one to one messages.
     For trust management, the two trust models "ATM" and "BTBV" are supported.
     """
+
     NS_TWOMEMO = twomemo.twomemo.NAMESPACE
     NS_OLDMEMO = oldmemo.oldmemo.NAMESPACE
 
@@ -1572,7 +1581,7 @@
         time_policy=SCEAffixPolicy.OPTIONAL,
         to_policy=SCEAffixPolicy.REQUIRED,
         from_policy=SCEAffixPolicy.OPTIONAL,
-        custom_policies={}
+        custom_policies={},
     )
 
     # For everything but MUC/MIX message stanzas, the <to/> affix is a MAY
@@ -1581,7 +1590,7 @@
         time_policy=SCEAffixPolicy.OPTIONAL,
         to_policy=SCEAffixPolicy.OPTIONAL,
         from_policy=SCEAffixPolicy.OPTIONAL,
-        custom_policies={}
+        custom_policies={},
     )
 
     def __init__(self, host: LiberviaBackend) -> None:
@@ -1625,9 +1634,7 @@
         # messages. Temporarily, until a more fitting trigger for SCE-based encryption is
         # added, the message_received trigger is also used for twomemo.
         host.trigger.add(
-            "message_received",
-            self._message_received_trigger,
-            priority=100050
+            "message_received", self._message_received_trigger, priority=100050
         )
 
         host.trigger.add("send", self.__send_trigger, priority=0)
@@ -1646,14 +1653,14 @@
             TWOMEMO_DEVICE_LIST_NODE,
             lambda items_event, profile: defer.ensureDeferred(
                 self.__on_device_list_update(items_event, profile)
-            )
+            ),
         )
         xep_0163.add_pep_event(
             "OLDMEMO_DEVICES",
             OLDMEMO_DEVICE_LIST_NODE,
             lambda items_event, profile: defer.ensureDeferred(
                 self.__on_device_list_update(items_event, profile)
-            )
+            ),
         )
 
         try:
@@ -1664,21 +1671,16 @@
             self.__text_commands.register_text_commands(self)
 
     def profile_connected(  # pylint: disable=invalid-name
-        self,
-        client: SatXMPPClient
+        self, client: SatXMPPClient
     ) -> None:
         """
         @param client: The client.
         """
 
-        defer.ensureDeferred(self.get_session_manager(
-            cast(str, client.profile)
-        ))
+        defer.ensureDeferred(self.get_session_manager(cast(str, client.profile)))
 
     async def cmd_omemo_reset(
-        self,
-        client: SatXMPPClient,
-        mess_data: MessageData
+        self, client: SatXMPPClient, mess_data: MessageData
     ) -> Literal[False]:
         """Reset all sessions of devices that belong to the recipient of ``mess_data``.
 
@@ -1693,16 +1695,18 @@
             the message is not supposed to be sent.
         """
 
-        twomemo_requested = \
-            client.encryption.is_encryption_requested(mess_data, twomemo.twomemo.NAMESPACE)
-        oldmemo_requested = \
-            client.encryption.is_encryption_requested(mess_data, oldmemo.oldmemo.NAMESPACE)
+        twomemo_requested = client.encryption.is_encryption_requested(
+            mess_data, twomemo.twomemo.NAMESPACE
+        )
+        oldmemo_requested = client.encryption.is_encryption_requested(
+            mess_data, oldmemo.oldmemo.NAMESPACE
+        )
 
         if not (twomemo_requested or oldmemo_requested):
             self.__text_commands.feed_back(
                 client,
                 _("You need to have OMEMO encryption activated to reset the session"),
-                mess_data
+                mess_data,
             )
             return False
 
@@ -1716,17 +1720,13 @@
             await session_manager.replace_sessions(device)
 
         self.__text_commands.feed_back(
-            client,
-            _("OMEMO session has been reset"),
-            mess_data
+            client, _("OMEMO session has been reset"), mess_data
         )
 
         return False
 
     async def get_trust_ui(  # pylint: disable=invalid-name
-        self,
-        client: SatXMPPClient,
-        entity: jid.JID
+        self, client: SatXMPPClient, entity: jid.JID
     ) -> xml_tools.XMLUI:
         """
         @param client: The client.
@@ -1742,22 +1742,23 @@
         if self.__xep_0045 is not None and self.__xep_0045.is_joined_room(client, entity):
             bare_jids = self.__get_joined_muc_users(client, self.__xep_0045, entity)
         else:
-            bare_jids = { entity.userhost() }
+            bare_jids = {entity.userhost()}
 
         session_manager = await self.get_session_manager(client.profile)
 
         # At least sort the devices by bare JID such that they aren't listed completely
         # random
-        devices = sorted(cast(Set[omemo.DeviceInformation], set()).union(*[
-            await session_manager.get_device_information(bare_jid)
-            for bare_jid
-            in bare_jids
-        ]), key=lambda device: device.bare_jid)
-
-        async def callback(
-            data: Any,
-            profile: str
-        ) -> Dict[Never, Never]:
+        devices = sorted(
+            cast(Set[omemo.DeviceInformation], set()).union(
+                *[
+                    await session_manager.get_device_information(bare_jid)
+                    for bare_jid in bare_jids
+                ]
+            ),
+            key=lambda device: device.bare_jid,
+        )
+
+        async def callback(data: Any, profile: str) -> Dict[Never, Never]:
             """
             @param data: The XMLUI result produces by the trust UI form.
             @param profile: The profile.
@@ -1769,8 +1770,7 @@
                 return {}
 
             data_form_result = cast(
-                Dict[str, str],
-                xml_tools.xmlui_result_2_data_form_result(data)
+                Dict[str, str], xml_tools.xmlui_result_2_data_form_result(data)
             )
 
             trust_updates: Set[TrustUpdate] = set()
@@ -1779,14 +1779,12 @@
                 if not key.startswith("trust_"):
                     continue
 
-                device = devices[int(key[len("trust_"):])]
+                device = devices[int(key[len("trust_") :])]
                 trust_level_name = value
 
                 if device.trust_level_name != trust_level_name:
                     await session_manager.set_trust(
-                        device.bare_jid,
-                        device.identity_key,
-                        trust_level_name
+                        device.bare_jid, device.identity_key, trust_level_name
                     )
 
                     target_trust: Optional[bool] = None
@@ -1797,31 +1795,30 @@
                         target_trust = False
 
                     if target_trust is not None:
-                        trust_updates.add(TrustUpdate(
-                            target_jid=jid.JID(device.bare_jid).userhostJID(),
-                            target_key=device.identity_key,
-                            target_trust=target_trust
-                        ))
+                        trust_updates.add(
+                            TrustUpdate(
+                                target_jid=jid.JID(device.bare_jid).userhostJID(),
+                                target_key=device.identity_key,
+                                target_trust=target_trust,
+                            )
+                        )
 
             # Check whether ATM is enabled and handle everything in case it is
-            trust_system = cast(str, self.host.memory.param_get_a(
-                PARAM_NAME,
-                PARAM_CATEGORY,
-                profile_key=profile
-            ))
+            trust_system = cast(
+                str,
+                self.host.memory.param_get_a(
+                    PARAM_NAME, PARAM_CATEGORY, profile_key=profile
+                ),
+            )
 
             if trust_system == "atm":
                 if len(trust_updates) > 0:
                     await manage_trust_message_cache(
-                        client,
-                        session_manager,
-                        frozenset(trust_updates)
+                        client, session_manager, frozenset(trust_updates)
                     )
 
                     await send_trust_messages(
-                        client,
-                        session_manager,
-                        frozenset(trust_updates)
+                        client, session_manager, frozenset(trust_updates)
                     )
 
             return {}
@@ -1831,22 +1828,24 @@
         result = xml_tools.XMLUI(
             panel_type=C.XMLUI_FORM,
             title=D_("OMEMO trust management"),
-            submit_id=submit_id
+            submit_id=submit_id,
         )
         # Casting this to Any, otherwise all calls on the variable cause type errors
         # pylint: disable=no-member
         trust_ui = cast(Any, result)
-        trust_ui.addText(D_(
-            "This is OMEMO trusting system. You'll see below the devices of your"
-            " contacts, and a list selection to trust them or not. A trusted device"
-            " can read your messages in plain text, so be sure to only validate"
-            " devices that you are sure are belonging to your contact. It's better"
-            " to do this when you are next to your contact and their device, so"
-            " you can check the \"fingerprint\" (the number next to the device)"
-            " yourself. Do *not* validate a device if the fingerprint is wrong!"
-            " Note that manually validating a fingerprint disables any form of automatic"
-            " trust."
-        ))
+        trust_ui.addText(
+            D_(
+                "This is OMEMO trusting system. You'll see below the devices of your"
+                " contacts, and a list selection to trust them or not. A trusted device"
+                " can read your messages in plain text, so be sure to only validate"
+                " devices that you are sure are belonging to your contact. It's better"
+                " to do this when you are next to your contact and their device, so"
+                ' you can check the "fingerprint" (the number next to the device)'
+                " yourself. Do *not* validate a device if the fingerprint is wrong!"
+                " Note that manually validating a fingerprint disables any form of automatic"
+                " trust."
+            )
+        )
 
         own_device, __ = await session_manager.get_own_device_information()
 
@@ -1854,9 +1853,9 @@
         trust_ui.addLabel(D_("This device ID"))
         trust_ui.addText(str(own_device.device_id))
         trust_ui.addLabel(D_("This device's fingerprint"))
-        trust_ui.addText(" ".join(session_manager.format_identity_key(
-            own_device.identity_key
-        )))
+        trust_ui.addText(
+            " ".join(session_manager.format_identity_key(own_device.identity_key))
+        )
         trust_ui.addEmpty()
         trust_ui.addEmpty()
 
@@ -1866,20 +1865,23 @@
             trust_ui.addLabel(D_("Device ID"))
             trust_ui.addText(str(device.device_id))
             trust_ui.addLabel(D_("Fingerprint"))
-            trust_ui.addText(" ".join(session_manager.format_identity_key(
-                device.identity_key
-            )))
+            trust_ui.addText(
+                " ".join(session_manager.format_identity_key(device.identity_key))
+            )
             trust_ui.addLabel(D_("Trust this device?"))
 
             current_trust_level = TrustLevel(device.trust_level_name)
-            avaiable_trust_levels = \
-                { TrustLevel.DISTRUSTED, TrustLevel.TRUSTED, current_trust_level }
+            avaiable_trust_levels = {
+                TrustLevel.DISTRUSTED,
+                TrustLevel.TRUSTED,
+                current_trust_level,
+            }
 
             trust_ui.addList(
                 f"trust_{index}",
-                options=[ trust_level.name for trust_level in avaiable_trust_levels ],
+                options=[trust_level.name for trust_level in avaiable_trust_levels],
                 selected=current_trust_level.name,
-                styles=[ "inline" ]
+                styles=["inline"],
             )
 
             twomemo_active = dict(device.active).get(twomemo.twomemo.NAMESPACE)
@@ -1905,9 +1907,7 @@
 
     @staticmethod
     def __get_joined_muc_users(
-        client: SatXMPPClient,
-        xep_0045: XEP_0045,
-        room_jid: jid.JID
+        client: SatXMPPClient, xep_0045: XEP_0045, room_jid: jid.JID
     ) -> Set[str]:
         """
         @param client: The client.
@@ -1966,9 +1966,7 @@
             # Build and store the session manager
             try:
                 session_manager = await prepare_for_profile(
-                    self.host,
-                    profile,
-                    initial_own_label="Libervia"
+                    self.host, profile, initial_own_label="Libervia"
                 )
             except Exception as e:
                 # In case of an error during initalization, notify the waiters accordingly
@@ -1995,7 +1993,7 @@
         message_elt: domish.Element,
         session_manager: omemo.SessionManager,
         sender_device_information: omemo.DeviceInformation,
-        timestamp: datetime
+        timestamp: datetime,
     ) -> None:
         """Check a newly decrypted message stanza for ATM content and perform ATM in case.
 
@@ -2008,8 +2006,7 @@
         """
 
         trust_message_cache = persistent.LazyPersistentBinaryDict(
-            "XEP-0384/TM",
-            client.profile
+            "XEP-0384/TM", client.profile
         )
 
         new_cache_entries: Set[TrustMessageCacheEntry] = set()
@@ -2040,30 +2037,34 @@
                 for trust_elt in key_owner_elt.elements(NS_TM, "trust"):
                     assert isinstance(trust_elt, domish.Element)
 
-                    new_cache_entries.add(TrustMessageCacheEntry(
-                        sender_jid=jid.JID(sender_device_information.bare_jid),
-                        sender_key=sender_device_information.identity_key,
-                        timestamp=timestamp,
-                        trust_update=TrustUpdate(
-                            target_jid=key_owner_jid,
-                            target_key=base64.b64decode(str(trust_elt)),
-                            target_trust=True
+                    new_cache_entries.add(
+                        TrustMessageCacheEntry(
+                            sender_jid=jid.JID(sender_device_information.bare_jid),
+                            sender_key=sender_device_information.identity_key,
+                            timestamp=timestamp,
+                            trust_update=TrustUpdate(
+                                target_jid=key_owner_jid,
+                                target_key=base64.b64decode(str(trust_elt)),
+                                target_trust=True,
+                            ),
                         )
-                    ))
+                    )
 
                 for distrust_elt in key_owner_elt.elements(NS_TM, "distrust"):
                     assert isinstance(distrust_elt, domish.Element)
 
-                    new_cache_entries.add(TrustMessageCacheEntry(
-                        sender_jid=jid.JID(sender_device_information.bare_jid),
-                        sender_key=sender_device_information.identity_key,
-                        timestamp=timestamp,
-                        trust_update=TrustUpdate(
-                            target_jid=key_owner_jid,
-                            target_key=base64.b64decode(str(distrust_elt)),
-                            target_trust=False
+                    new_cache_entries.add(
+                        TrustMessageCacheEntry(
+                            sender_jid=jid.JID(sender_device_information.bare_jid),
+                            sender_key=sender_device_information.identity_key,
+                            timestamp=timestamp,
+                            trust_update=TrustUpdate(
+                                target_jid=key_owner_jid,
+                                target_key=base64.b64decode(str(distrust_elt)),
+                                target_trust=False,
+                            ),
                         )
-                    ))
+                    )
 
         # Load existing cache entries
         existing_cache_entries = {
@@ -2075,10 +2076,9 @@
         existing_by_target = {
             (
                 cache_entry.trust_update.target_jid.userhostJID(),
-                cache_entry.trust_update.target_key
+                cache_entry.trust_update.target_key,
             ): cache_entry
-            for cache_entry
-            in existing_cache_entries
+            for cache_entry in existing_cache_entries
         }
 
         # Iterate over a copy here, such that new_cache_entries can be modified
@@ -2086,9 +2086,9 @@
             existing_cache_entry = existing_by_target.get(
                 (
                     new_cache_entry.trust_update.target_jid.userhostJID(),
-                    new_cache_entry.trust_update.target_key
+                    new_cache_entry.trust_update.target_key,
                 ),
-                None
+                None,
             )
 
             if existing_cache_entry is not None:
@@ -2118,7 +2118,7 @@
                 await session_manager.set_trust(
                     trust_update.target_jid.userhost(),
                     trust_update.target_key,
-                    trust_level.name
+                    trust_level.name,
                 )
 
                 applied_trust_updates.add(trust_update)
@@ -2127,23 +2127,20 @@
 
         # Store the remaining existing and new cache entries
         await trust_message_cache.force(
-            "cache",
-            [tm.to_dict() for tm in existing_cache_entries | new_cache_entries]
+            "cache", [tm.to_dict() for tm in existing_cache_entries | new_cache_entries]
         )
 
         # If the trust of at least one device was modified, run the ATM cache update logic
         if len(applied_trust_updates) > 0:
             await manage_trust_message_cache(
-                client,
-                session_manager,
-                frozenset(applied_trust_updates)
+                client, session_manager, frozenset(applied_trust_updates)
             )
 
     async def _message_received_trigger(
         self,
         client: SatXMPPClient,
         message_elt: domish.Element,
-        post_treat: defer.Deferred
+        post_treat: defer.Deferred,
     ) -> bool:
         """
         @param client: The client which received the message.
@@ -2211,9 +2208,7 @@
                 message_uid = message_elt.getAttribute("id")
             if message_uid is not None:
                 muc_plaintext_cache_key = MUCPlaintextCacheKey(
-                    client,
-                    room_jid,
-                    message_uid
+                    client, room_jid, message_uid
                 )
         else:
             # I'm not sure why this check is required, this code is copied from the old
@@ -2231,15 +2226,15 @@
         message: Optional[omemo.Message] = None
         encrypted_elt: Optional[domish.Element] = None
 
-        twomemo_encrypted_elt = cast(Optional[domish.Element], next(
-            message_elt.elements(twomemo.twomemo.NAMESPACE, "encrypted"),
-            None
-        ))
-
-        oldmemo_encrypted_elt = cast(Optional[domish.Element], next(
-            message_elt.elements(oldmemo.oldmemo.NAMESPACE, "encrypted"),
-            None
-        ))
+        twomemo_encrypted_elt = cast(
+            Optional[domish.Element],
+            next(message_elt.elements(twomemo.twomemo.NAMESPACE, "encrypted"), None),
+        )
+
+        oldmemo_encrypted_elt = cast(
+            Optional[domish.Element],
+            next(message_elt.elements(oldmemo.oldmemo.NAMESPACE, "encrypted"), None),
+        )
 
         try:
             session_manager = await self.get_session_manager(cast(str, client.profile))
@@ -2251,8 +2246,7 @@
         if twomemo_encrypted_elt is not None:
             try:
                 message = twomemo.etree.parse_message(
-                    xml_tools.domish_elt_2_et_elt(twomemo_encrypted_elt),
-                    sender_bare_jid
+                    xml_tools.domish_elt_2_et_elt(twomemo_encrypted_elt), sender_bare_jid
                 )
             except (ValueError, XMLSchemaValidationError):
                 log.warning(
@@ -2268,7 +2262,7 @@
                     xml_tools.domish_elt_2_et_elt(oldmemo_encrypted_elt),
                     sender_bare_jid,
                     client.jid.userhost(),
-                    session_manager
+                    session_manager,
                 )
             except (ValueError, XMLSchemaValidationError):
                 log.warning(
@@ -2324,7 +2318,7 @@
                             f"An OMEMO message from {sender_jid.full()} has not been"
                             f" encrypted for our device, we can't decrypt it."
                         ),
-                        { C.MESS_EXTRA_INFO: C.EXTRA_INFO_DECR_ERR }
+                        {C.MESS_EXTRA_INFO: C.EXTRA_INFO_DECR_ERR},
                     )
                     log.warning("Message not encrypted for us.")
                 else:
@@ -2333,17 +2327,18 @@
                 # No point in further processing this message.
                 return False
             except Exception as e:
-                log.warning(_("Can't decrypt message: {reason}\n{xml}").format(
-                    reason=e,
-                    xml=message_elt.toXml()
-                ))
+                log.warning(
+                    _("Can't decrypt message: {reason}\n{xml}").format(
+                        reason=e, xml=message_elt.toXml()
+                    )
+                )
                 client.feedback(
                     feedback_jid,
                     D_(
                         f"An OMEMO message from {sender_jid.full()} can't be decrypted:"
                         f" {e}"
                     ),
-                    { C.MESS_EXTRA_INFO: C.EXTRA_INFO_DECR_ERR }
+                    {C.MESS_EXTRA_INFO: C.EXTRA_INFO_DECR_ERR},
                 )
                 # No point in further processing this message
                 return False
@@ -2354,25 +2349,24 @@
             if plaintext is not None:
                 # XEP_0420.unpack_stanza handles the whole unpacking, including the
                 # relevant modifications to the element
-                sce_profile = \
+                sce_profile = (
                     OMEMO.SCE_PROFILE_GROUPCHAT if is_muc_message else OMEMO.SCE_PROFILE
+                )
                 try:
                     affix_values = self.__xep_0420.unpack_stanza(
-                        sce_profile,
-                        message_elt,
-                        plaintext
+                        sce_profile, message_elt, plaintext
                     )
                 except Exception as e:
-                    log.warning(D_(
-                        f"Error unpacking SCE-encrypted message: {e}\n{plaintext}"
-                    ))
+                    log.warning(
+                        D_(f"Error unpacking SCE-encrypted message: {e}\n{plaintext}")
+                    )
                     client.feedback(
                         feedback_jid,
                         D_(
                             f"An OMEMO message from {sender_jid.full()} was rejected:"
                             f" {e}"
                         ),
-                        { C.MESS_EXTRA_INFO: C.EXTRA_INFO_DECR_ERR }
+                        {C.MESS_EXTRA_INFO: C.EXTRA_INFO_DECR_ERR},
                     )
                     # No point in further processing this message
                     return False
@@ -2395,8 +2389,9 @@
                 message_elt.addElement("body", content=plaintext.decode("utf-8"))
 
         # Mark the message as trusted or untrusted. Undecided counts as untrusted here.
-        trust_level = \
-            await session_manager._evaluate_custom_trust_level(device_information)
+        trust_level = await session_manager._evaluate_custom_trust_level(
+            device_information
+        )
 
         if trust_level is omemo.TrustLevel.TRUSTED:
             post_treat.addCallback(client.encryption.mark_as_trusted)
@@ -2405,8 +2400,7 @@
 
         # Mark the message as originally encrypted
         post_treat.addCallback(
-            client.encryption.mark_as_encrypted,
-            namespace=message.namespace
+            client.encryption.mark_as_encrypted, namespace=message.namespace
         )
 
         # Handle potential ATM trust updates
@@ -2416,7 +2410,7 @@
                 message_elt,
                 session_manager,
                 device_information,
-                affix_values.timestamp
+                affix_values.timestamp,
             )
 
         # Message processed successfully, continue with the flow
@@ -2430,7 +2424,7 @@
         """
         # SCE is only applicable to message and IQ stanzas
         # FIXME: temporary disabling IQ stanza encryption
-        if stanza.name not in { "message" }:  # , "iq" }:
+        if stanza.name not in {"message"}:  # , "iq" }:
             return True
 
         # Get the intended recipient
@@ -2466,17 +2460,15 @@
                 stanza,
                 recipient_bare_jid,
                 stanza.getAttribute("type", C.MESS_TYPE_NORMAL) == C.MESS_TYPE_GROUPCHAT,
-                stanza.getAttribute("id", None)
+                stanza.getAttribute("id", None),
             )
         else:
             # Encryption is requested for this recipient, but not with twomemo
             return True
 
-
-
         # Add a store hint if this is a message stanza
         if stanza.name == "message":
-            self.__xep_0334.add_hint_elements(stanza, [ "store" ])
+            self.__xep_0334.add_hint_elements(stanza, ["store"])
 
         # Let the flow continue.
         return True
@@ -2501,9 +2493,8 @@
             device_information = await session_manager.get_device_information(
                 bare_jid.userhost()
             )
-            if (
-                not device_information
-                or not all(namespace in di.namespaces for di in device_information)
+            if not device_information or not all(
+                namespace in di.namespaces for di in device_information
             ):
                 if namespace == self.NS_TWOMEMO:
                     algo, node = "OMEMO", TWOMEMO_DEVICE_LIST_NODE
@@ -2528,7 +2519,7 @@
         stanza: domish.Element,
         recipient_jids: Union[jid.JID, Set[jid.JID]],
         is_muc_message: bool,
-        stanza_id: Optional[str]
+        stanza_id: Optional[str],
     ) -> None:
         """
         @param client: The client.
@@ -2577,15 +2568,11 @@
             room_jid = feedback_jid = recipient_jid.userhostJID()
 
             recipient_bare_jids = self.__get_joined_muc_users(
-                client,
-                self.__xep_0045,
-                room_jid
+                client, self.__xep_0045, room_jid
             )
 
             muc_plaintext_cache_key = MUCPlaintextCacheKey(
-                client=client,
-                room_jid=room_jid,
-                message_uid=stanza_id
+                client=client, room_jid=room_jid, message_uid=stanza_id
             )
         else:
             recipient_bare_jids = {r.userhost() for r in recipient_jids}
@@ -2611,7 +2598,7 @@
             if namespace == twomemo.twomemo.NAMESPACE:
                 return self.__xep_0420.pack_stanza(
                     OMEMO.SCE_PROFILE_GROUPCHAT if is_muc_message else OMEMO.SCE_PROFILE,
-                    stanza
+                    stanza,
                 )
 
             if namespace == oldmemo.oldmemo.NAMESPACE:
@@ -2622,7 +2609,7 @@
                         plaintext = str(child).encode("utf-8")
 
                     # Any other sensitive elements to remove here?
-                    if child.name in { "body", "html" }:
+                    if child.name in {"body", "html"}:
                         stanza.children.remove(child)
 
                 if plaintext is None:
@@ -2644,27 +2631,26 @@
         log.debug(f"Plaintext to encrypt: {plaintext}")
 
         session_manager = await self.get_session_manager(client.profile)
-        await self.download_missing_device_lists(client, namespace, recipient_jids, session_manager)
+        await self.download_missing_device_lists(
+            client, namespace, recipient_jids, session_manager
+        )
 
         try:
             messages, encryption_errors = await session_manager.encrypt(
                 frozenset(recipient_bare_jids),
-                { namespace: plaintext },
-                backend_priority_order=[ namespace ],
-                identifier=feedback_jid.userhost()
+                {namespace: plaintext},
+                backend_priority_order=[namespace],
+                identifier=feedback_jid.userhost(),
             )
         except Exception as e:
             msg = _(
                 # pylint: disable=consider-using-f-string
                 "Can't encrypt message for {entities}: {reason}".format(
-                    entities=', '.join(recipient_bare_jids),
-                    reason=e
+                    entities=", ".join(recipient_bare_jids), reason=e
                 )
             )
             log.warning(msg)
-            client.feedback(feedback_jid, msg, {
-                C.MESS_EXTRA_INFO: C.EXTRA_INFO_ENCR_ERR
-            })
+            client.feedback(feedback_jid, msg, {C.MESS_EXTRA_INFO: C.EXTRA_INFO_ENCR_ERR})
             raise e
 
         if len(encryption_errors) > 0:
@@ -2673,12 +2659,13 @@
                 f" {encryption_errors}"
             )
 
-            encrypted_errors_stringified = ", ".join([
-                f"device {err.device_id} of {err.bare_jid} under namespace"
-                f" {err.namespace}"
-                for err
-                in encryption_errors
-            ])
+            encrypted_errors_stringified = ", ".join(
+                [
+                    f"device {err.device_id} of {err.bare_jid} under namespace"
+                    f" {err.namespace}"
+                    for err in encryption_errors
+                ]
+            )
 
             client.feedback(
                 feedback_jid,
@@ -2689,30 +2676,28 @@
                     " incomplete or broken, which shouldn't happen for actively used"
                     " devices, and can usually be ignored. The following devices are"
                     f" affected: {encrypted_errors_stringified}."
-                )
+                ),
             )
 
         message = next(message for message in messages if message.namespace == namespace)
 
         if namespace == twomemo.twomemo.NAMESPACE:
             # Add the encrypted element
-            stanza.addChild(xml_tools.et_elt_2_domish_elt(
-                twomemo.etree.serialize_message(message)
-            ))
+            stanza.addChild(
+                xml_tools.et_elt_2_domish_elt(twomemo.etree.serialize_message(message))
+            )
 
         if namespace == oldmemo.oldmemo.NAMESPACE:
             # Add the encrypted element
-            stanza.addChild(xml_tools.et_elt_2_domish_elt(
-                oldmemo.etree.serialize_message(message)
-            ))
+            stanza.addChild(
+                xml_tools.et_elt_2_domish_elt(oldmemo.etree.serialize_message(message))
+            )
 
         if muc_plaintext_cache_key is not None:
             self.__muc_plaintext_cache[muc_plaintext_cache_key] = plaintext
 
     async def __on_device_list_update(
-        self,
-        items_event: pubsub.ItemsEvent,
-        profile: str
+        self, items_event: pubsub.ItemsEvent, profile: str
     ) -> None:
         """Handle device list updates fired by PEP.
 
@@ -2726,10 +2711,7 @@
         await self._update_device_list(client, sender, items)
 
     async def _update_device_list(
-        self,
-        client: SatXMPPEntity,
-        sender: jid.JID,
-        items: list[domish.Element]
+        self, client: SatXMPPEntity, sender: jid.JID, items: list[domish.Element]
     ) -> None:
 
         if len(items) > 1:
@@ -2774,7 +2756,5 @@
         session_manager = await self.get_session_manager(client.profile)
 
         await session_manager.update_device_list(
-            namespace,
-            sender.userhost(),
-            device_list
+            namespace, sender.userhost(), device_list
         )
--- a/libervia/backend/plugins/plugin_xep_0391.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0391.py	Wed Jun 19 18:44:57 2024 +0200
@@ -77,21 +77,11 @@
         self.host = host
         self._o = host.plugins["XEP-0384"]
         self._j = host.plugins["XEP-0166"]
-        host.trigger.add(
-            "XEP-0166_initiate_elt_built",
-            self._on_initiate_elt_build
-        )
+        host.trigger.add("XEP-0166_initiate_elt_built", self._on_initiate_elt_build)
+        host.trigger.add("XEP-0166_on_session_initiate", self._on_session_initiate)
+        host.trigger.add("XEP-0234_jingle_handler", self._add_encryption_filter)
         host.trigger.add(
-            "XEP-0166_on_session_initiate",
-            self._on_session_initiate
-        )
-        host.trigger.add(
-            "XEP-0234_jingle_handler",
-            self._add_encryption_filter
-        )
-        host.trigger.add(
-            "XEP-0234_file_receiving_request_conf",
-            self._add_encryption_filter
+            "XEP-0234_file_receiving_request_conf", self._add_encryption_filter
         )
 
     def get_handler(self, client):
@@ -102,11 +92,12 @@
         client: SatXMPPEntity,
         session: Dict[str, Any],
         iq_elt: domish.Element,
-        jingle_elt: domish.Element
+        jingle_elt: domish.Element,
     ) -> bool:
-        if client.encryption.get_namespace(
-               session["peer_jid"].userhostJID()
-           ) != self._o.NS_OLDMEMO:
+        if (
+            client.encryption.get_namespace(session["peer_jid"].userhostJID())
+            != self._o.NS_OLDMEMO
+        ):
             return True
         for content_elt in jingle_elt.elements(self._j.namespace, "content"):
             content_data = session["contents"][content_elt["name"]]
@@ -126,7 +117,7 @@
             security_elt["type"] = enc_type
             encryption_data = content_data["encryption"] = {
                 "cipher": cipher,
-                "type": enc_type
+                "type": enc_type,
             }
             session_manager = await self._o.get_session_manager(client.profile)
             await self._o.download_missing_device_lists(
@@ -136,19 +127,16 @@
                 messages, encryption_errors = await session_manager.encrypt(
                     frozenset({session["peer_jid"].userhost()}),
                     # the value seems to be the commonly used value
-                    { self._o.NS_OLDMEMO: b" " },
-                    backend_priority_order=[ self._o.NS_OLDMEMO ],
-                    identifier = client.jid.userhost()
+                    {self._o.NS_OLDMEMO: b" "},
+                    backend_priority_order=[self._o.NS_OLDMEMO],
+                    identifier=client.jid.userhost(),
                 )
             except Exception as e:
                 log.exception("Can't generate IV and keys")
                 raise e
             message, plain_key_material = next(iter(messages.items()))
             iv, key = message.content.initialization_vector, plain_key_material.key
-            content_data["encryption"].update({
-                "iv": iv,
-                "key": key
-            })
+            content_data["encryption"].update({"iv": iv, "key": key})
             encrypted_elt = xml_tools.et_elt_2_domish_elt(
                 oldmemo.etree.serialize_message(message)
             )
@@ -160,7 +148,7 @@
         client: SatXMPPEntity,
         session: Dict[str, Any],
         iq_elt: domish.Element,
-        jingle_elt: domish.Element
+        jingle_elt: domish.Element,
     ) -> bool:
         for content_elt in jingle_elt.elements(self._j.namespace, "content"):
             content_data = session["contents"][content_elt["name"]]
@@ -181,7 +169,7 @@
                     xml_tools.domish_elt_2_et_elt(encrypted_elt, False),
                     session["peer_jid"].userhost(),
                     client.jid.userhost(),
-                    session_manager
+                    session_manager,
                 )
                 __, __, plain_key_material = await session_manager.decrypt(message)
             except Exception as e:
@@ -192,7 +180,7 @@
                     "cipher": security_elt["cipher"],
                     "type": security_elt["type"],
                     "iv": message.content.initialization_vector,
-                    "key": plain_key_material.key
+                    "key": plain_key_material.key,
                 }
             except KeyError as e:
                 log.warning(f"missing data, can't decrypt: {e}")
@@ -201,10 +189,7 @@
         return True
 
     def __encrypt(
-        self,
-        data: bytes,
-        encryptor: CipherContext,
-        data_cb: Callable
+        self, data: bytes, encryptor: CipherContext, data_cb: Callable
     ) -> bytes:
         data_cb(data)
         if data:
@@ -213,23 +198,23 @@
             try:
                 return encryptor.finalize() + encryptor.tag
             except AlreadyFinalized:
-                return b''
+                return b""
 
     def __decrypt(
         self,
         data: bytes,
         buffer: list[bytes],
         decryptor: CipherContext,
-        data_cb: Callable
+        data_cb: Callable,
     ) -> bytes:
         buffer.append(data)
-        data = b''.join(buffer)
+        data = b"".join(buffer)
         buffer.clear()
         if len(data) > 16:
             decrypted = decryptor.update(data[:-16])
             data_cb(decrypted)
         else:
-            decrypted = b''
+            decrypted = b""
         buffer.append(data[-16:])
         return decrypted
 
@@ -239,7 +224,7 @@
         buffer: list[bytes],
         decryptor: CipherContext,
     ) -> None:
-        tag = b''.join(buffer)
+        tag = b"".join(buffer)
         file_obj.write(decryptor.finalize_with_tag(tag))
 
     async def _add_encryption_filter(
@@ -247,7 +232,7 @@
         client: SatXMPPEntity,
         session: Dict[str, Any],
         content_data: Dict[str, Any],
-        elt: domish.Element
+        elt: domish.Element,
     ) -> bool:
         try:
             file_obj = content_data["stream_object"].file_obj
@@ -258,7 +243,7 @@
                 log.debug("JET skipped due to webrtc transport.")
                 return True
         try:
-            encryption_data=content_data["encryption"]
+            encryption_data = content_data["encryption"]
         except KeyError:
             return True
         cipher = ciphers.Cipher(
@@ -274,20 +259,18 @@
                 self.__decrypt_finalize,
                 file_obj=file_obj,
                 buffer=buffer,
-                decryptor=decryptor
+                decryptor=decryptor,
             )
             file_obj.data_cb = partial(
                 self.__decrypt,
                 buffer=buffer,
                 decryptor=decryptor,
-                data_cb=file_obj.data_cb
+                data_cb=file_obj.data_cb,
             )
         else:
             # we are sending a file
             file_obj.data_cb = partial(
-                self.__encrypt,
-                encryptor=cipher.encryptor(),
-                data_cb=file_obj.data_cb
+                self.__encrypt, encryptor=cipher.encryptor(), data_cb=file_obj.data_cb
             )
 
         return True
--- a/libervia/backend/plugins/plugin_xep_0420.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0420.py	Wed Jun 19 18:44:57 2024 +0200
@@ -50,7 +50,7 @@
     "SCECustomAffix",
     "SCEAffixPolicy",
     "SCEProfile",
-    "SCEAffixValues"
+    "SCEAffixValues",
 ]
 
 
@@ -61,9 +61,9 @@
     C.PI_NAME: "SCE",
     C.PI_IMPORT_NAME: "XEP-0420",
     C.PI_TYPE: "SEC",
-    C.PI_PROTOCOLS: [ "XEP-0420" ],
-    C.PI_DEPENDENCIES: [ "XEP-0334", "XEP-0082" ],
-    C.PI_RECOMMENDATIONS: [ "XEP-0045", "XEP-0033", "XEP-0359" ],
+    C.PI_PROTOCOLS: ["XEP-0420"],
+    C.PI_DEPENDENCIES: ["XEP-0334", "XEP-0082"],
+    C.PI_RECOMMENDATIONS: ["XEP-0045", "XEP-0033", "XEP-0359"],
     C.PI_MAIN: "XEP_0420",
     C.PI_HANDLER: "no",
     C.PI_DESCRIPTION: D_("Implementation of Stanza Content Encryption"),
@@ -240,7 +240,7 @@
         NS_ADDRESS,
         # Not part of the specification (yet), but just doesn't make sense in an encrypted
         # envelope:
-        NS_EME
+        NS_EME,
     }
 
     # Set of (namespace, element name) tuples that define elements which are never allowed
@@ -331,8 +331,7 @@
             rpad_length = max(0, 53 - content_byte_size_diff) + secrets.randbelow(201)
             rpad_content = "".join(
                 secrets.choice(string.digits + string.ascii_letters + string.punctuation)
-                for __
-                in range(rpad_length)
+                for __ in range(rpad_length)
             )
             envelope.addElement((NS_SCE, "rpad"), content=rpad_content)
 
@@ -370,9 +369,7 @@
 
     @staticmethod
     def unpack_stanza(
-        profile: SCEProfile,
-        stanza: domish.Element,
-        envelope_serialized: bytes
+        profile: SCEProfile, stanza: domish.Element, envelope_serialized: bytes
     ) -> SCEAffixValues:
         """Unpack a stanza packed according to Stanza Content Encryption.
 
@@ -409,18 +406,21 @@
         custom_affixes = set(profile.custom_policies.keys())
 
         # Make sure the envelope adheres to the schema
-        parser = etree.XMLParser(schema=etree.XMLSchema(etree.XML(ENVELOPE_SCHEMA.format(
-            custom_affix_references="".join(
-                f'<xs:element ref="{custom_affix.element_name}" minOccurs="0"/>'
-                for custom_affix
-                in custom_affixes
-            ),
-            custom_affix_definitions="".join(
-                custom_affix.element_schema
-                for custom_affix
-                in custom_affixes
+        parser = etree.XMLParser(
+            schema=etree.XMLSchema(
+                etree.XML(
+                    ENVELOPE_SCHEMA.format(
+                        custom_affix_references="".join(
+                            f'<xs:element ref="{custom_affix.element_name}" minOccurs="0"/>'
+                            for custom_affix in custom_affixes
+                        ),
+                        custom_affix_definitions="".join(
+                            custom_affix.element_schema for custom_affix in custom_affixes
+                        ),
+                    ).encode("utf-8")
+                )
             )
-        ).encode("utf-8"))))
+        )
 
         try:
             etree.fromstring(envelope_serialized_string, parser)
@@ -435,20 +435,16 @@
 
         # Verify the affixes
         rpad_element = cast(
-            Optional[domish.Element],
-            next(envelope.elements(NS_SCE, "rpad"), None)
+            Optional[domish.Element], next(envelope.elements(NS_SCE, "rpad"), None)
         )
         time_element = cast(
-            Optional[domish.Element],
-            next(envelope.elements(NS_SCE, "time"), None)
+            Optional[domish.Element], next(envelope.elements(NS_SCE, "time"), None)
         )
         to_element = cast(
-            Optional[domish.Element],
-            next(envelope.elements(NS_SCE, "to"), None)
+            Optional[domish.Element], next(envelope.elements(NS_SCE, "to"), None)
         )
         from_element = cast(
-            Optional[domish.Element],
-            next(envelope.elements(NS_SCE, "from"), None)
+            Optional[domish.Element], next(envelope.elements(NS_SCE, "from"), None)
         )
 
         # The rpad doesn't need verification.
@@ -456,8 +452,11 @@
 
         # The time affix isn't verified other than that the timestamp is parseable.
         try:
-            timestamp_value = None if time_element is None else \
-                XEP_0082.parse_datetime(time_element["stamp"])
+            timestamp_value = (
+                None
+                if time_element is None
+                else XEP_0082.parse_datetime(time_element["stamp"])
+            )
         except ValueError as e:
             raise AffixVerificationFailed("Malformed time affix.") from e
 
@@ -513,25 +512,26 @@
             element_name = affix.element_name
             element = cast(
                 Optional[domish.Element],
-                next(envelope.elements(NS_SCE, element_name), None)
+                next(envelope.elements(NS_SCE, element_name), None),
             )
             if element is not None:
                 affix.verify(stanza, element)
                 custom_values[affix] = element
 
         # Check whether all affixes required by the profile are present
-        rpad_missing = \
+        rpad_missing = (
             profile.rpad_policy is SCEAffixPolicy.REQUIRED and rpad_element is None
-        time_missing = \
+        )
+        time_missing = (
             profile.time_policy is SCEAffixPolicy.REQUIRED and time_element is None
-        to_missing = \
-            profile.to_policy is SCEAffixPolicy.REQUIRED and to_element is None
-        from_missing = \
+        )
+        to_missing = profile.to_policy is SCEAffixPolicy.REQUIRED and to_element is None
+        from_missing = (
             profile.from_policy is SCEAffixPolicy.REQUIRED and from_element is None
+        )
         custom_missing = any(
             affix not in custom_values
-            for affix, policy
-            in profile.custom_policies.items()
+            for affix, policy in profile.custom_policies.items()
             if policy is SCEAffixPolicy.REQUIRED
         )
 
@@ -570,9 +570,5 @@
                 stanza.addChild(child)
 
         return SCEAffixValues(
-            rpad_value,
-            timestamp_value,
-            recipient_value,
-            sender_value,
-            custom_values
+            rpad_value, timestamp_value, recipient_value, sender_value, custom_values
         )
--- a/libervia/backend/plugins/plugin_xep_0422.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0422.py	Wed Jun 19 18:44:57 2024 +0200
@@ -72,7 +72,7 @@
         clear: Optional[bool] = None,
         shell: Optional[bool] = None,
         children: Optional[List[domish.Element]] = None,
-        external: Optional[List[Union[str, Tuple[str, str]]]] = None
+        external: Optional[List[Union[str, Tuple[str, str]]]] = None,
     ) -> domish.Element:
         """Generate, add and return <apply-to> element
 
@@ -112,9 +112,7 @@
 
     @async_lru(maxsize=5)
     async def get_fastened_elts(
-        self,
-        client: SatXMPPEntity,
-        message_elt: domish.Element
+        self, client: SatXMPPEntity, message_elt: domish.Element
     ) -> Optional[FastenMetadata]:
         """Get fastened elements
 
@@ -127,9 +125,7 @@
         else:
             origin_id = apply_to_elt.getAttribute("id")
             if not origin_id:
-                log.warning(
-                    f"Received invalid fastening message: {message_elt.toXml()}"
-                )
+                log.warning(f"Received invalid fastening message: {message_elt.toXml()}")
                 return None
             elements = apply_to_elt.children
             if not elements:
@@ -140,7 +136,7 @@
                 History,
                 History.origin_id,
                 origin_id,
-                (History.messages, History.subjects, History.thread)
+                (History.messages, History.subjects, History.thread),
             )
             return FastenMetadata(
                 elements,
--- a/libervia/backend/plugins/plugin_xep_0428.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0428.py	Wed Jun 19 18:44:57 2024 +0200
@@ -50,9 +50,7 @@
         host.register_namespace("fallback", NS_FALLBACK)
 
     def add_fallback_elt(
-        self,
-        message_elt: domish.Element,
-        msg: Optional[str] = None
+        self, message_elt: domish.Element, msg: Optional[str] = None
     ) -> None:
         """Add the fallback indication element
 
--- a/libervia/backend/plugins/plugin_xep_0446.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0446.py	Wed Jun 19 18:44:57 2024 +0200
@@ -104,8 +104,7 @@
         return file_elt
 
     def parse_file_metadata_elt(
-        self,
-        file_metadata_elt: domish.Element
+        self, file_metadata_elt: domish.Element
     ) -> Dict[str, Any]:
         """Parse <file/> element
 
@@ -156,6 +155,7 @@
             pass
         except exceptions.DataError:
             from libervia.backend.tools.xml_tools import p_fmt_elt
+
             log.warning("invalid <hash/> element:\n{p_fmt_elt(file_metadata_elt)}")
         else:
             data["file_hash"] = (algo, hash_)
--- a/libervia/backend/plugins/plugin_xep_0447.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0447.py	Wed Jun 19 18:44:57 2024 +0200
@@ -66,19 +66,21 @@
         self._m = host.plugins["XEP-0446"]
         self._http_upload = host.plugins.get("XEP-0363")
         self._attach = host.plugins["ATTACH"]
-        self._attach.register(
-            self.can_handle_attachment, self.attach, priority=1000
-        )
+        self._attach.register(self.can_handle_attachment, self.attach, priority=1000)
         self.register_source_handler(
             self._u.namespace, "url-data", self._u.parse_url_data_elt
         )
-        host.plugins["DOWNLOAD"].register_download_handler(self._u.namespace, self.download)
+        host.plugins["DOWNLOAD"].register_download_handler(
+            self._u.namespace, self.download
+        )
         host.trigger.add("message_received", self._message_received_trigger)
 
     def register_source_handler(
-        self, namespace: str, element_name: str,
+        self,
+        namespace: str,
+        element_name: str,
         callback: Callable[[domish.Element], Dict[str, Any]],
-        encrypted: bool = False
+        encrypted: bool = False,
     ) -> None:
         """Register a handler for file source
 
@@ -103,7 +105,7 @@
         attachment: Dict[str, Any],
         source: Dict[str, Any],
         dest_path: Union[Path, str],
-        extra: Optional[Dict[str, Any]] = None
+        extra: Optional[Dict[str, Any]] = None,
     ) -> Tuple[str, defer.Deferred]:
         # TODO: handle url-data headers
         if extra is None:
@@ -113,10 +115,8 @@
         except KeyError:
             raise ValueError(f"{source} has missing URL")
 
-        if extra.get('ignore_tls_errors', False):
-            log.warning(
-                "TLS certificate check disabled, this is highly insecure"
-            )
+        if extra.get("ignore_tls_errors", False):
+            log.warning("TLS certificate check disabled, this is highly insecure")
             treq_client = treq_client_no_ssl
         else:
             treq_client = treq
@@ -125,14 +125,14 @@
             file_size = int(attachment["size"])
         except (KeyError, ValueError):
             head_data = await treq_client.head(download_url)
-            file_size = int(head_data.headers.getRawHeaders('content-length')[0])
+            file_size = int(head_data.headers.getRawHeaders("content-length")[0])
 
         file_obj = stream.SatFile(
             self.host,
             client,
             dest_path,
             mode="wb",
-            size = file_size,
+            size=file_size,
         )
 
         progress_id = file_obj.uid
@@ -157,8 +157,7 @@
             return True
 
     def get_sources_elt(
-        self,
-        children: Optional[List[domish.Element]] = None
+        self, children: Optional[List[domish.Element]] = None
     ) -> domish.Element:
         """Generate <sources> element"""
         sources_elt = domish.Element((NS_SFS, "sources"))
@@ -211,20 +210,13 @@
         file_sharing_elt.addChild(sources_elt)
         for source_data in sources:
             if "url" in source_data:
-                sources_elt.addChild(
-                    self._u.get_url_data_elt(**source_data)
-                )
+                sources_elt.addChild(self._u.get_url_data_elt(**source_data))
             else:
-                raise NotImplementedError(
-                    f"source data not implemented: {source_data}"
-                )
+                raise NotImplementedError(f"source data not implemented: {source_data}")
 
         return file_sharing_elt
 
-    def parse_sources_elt(
-        self,
-        sources_elt: domish.Element
-    ) -> List[Dict[str, Any]]:
+    def parse_sources_elt(self, sources_elt: domish.Element) -> List[Dict[str, Any]]:
         """Parse <sources/> element
 
         @param sources_elt: <sources/> element, or a direct parent element
@@ -236,7 +228,8 @@
                 sources_elt = next(sources_elt.elements(NS_SFS, "sources"))
             except StopIteration:
                 raise exceptions.NotFound(
-                    f"<sources/> element is missing: {sources_elt.toXml()}")
+                    f"<sources/> element is missing: {sources_elt.toXml()}"
+                )
         sources = []
         for elt in sources_elt.elements():
             if not elt.uri:
@@ -257,10 +250,7 @@
                 sources.append(source_data)
         return sources
 
-    def parse_file_sharing_elt(
-        self,
-        file_sharing_elt: domish.Element
-    ) -> Dict[str, Any]:
+    def parse_file_sharing_elt(self, file_sharing_elt: domish.Element) -> Dict[str, Any]:
         """Parse <file-sharing/> element and return file-sharing data
 
         @param file_sharing_elt: <file-sharing/> element
@@ -269,9 +259,7 @@
         """
         if file_sharing_elt.name != "file-sharing" or file_sharing_elt.uri != NS_SFS:
             try:
-                file_sharing_elt = next(
-                    file_sharing_elt.elements(NS_SFS, "file-sharing")
-                )
+                file_sharing_elt = next(file_sharing_elt.elements(NS_SFS, "file-sharing"))
             except StopIteration:
                 raise exceptions.NotFound
         try:
@@ -289,10 +277,7 @@
         return data
 
     def _add_file_sharing_attachments(
-            self,
-            client: SatXMPPEntity,
-            message_elt: domish.Element,
-            data: Dict[str, Any]
+        self, client: SatXMPPEntity, message_elt: domish.Element, data: Dict[str, Any]
     ) -> Dict[str, Any]:
         """Check <message> for a shared file, and add it as an attachment"""
         # XXX: XEP-0447 doesn't support several attachments in a single message, for now
@@ -302,15 +287,14 @@
             attachment = self.parse_file_sharing_elt(message_elt)
 
             if any(
-                    s.get(C.MESS_KEY_ENCRYPTED, False)
-                    for s in attachment["sources"]
+                s.get(C.MESS_KEY_ENCRYPTED, False) for s in attachment["sources"]
             ) and client.encryption.isEncrypted(data):
                 # we don't add the encrypted flag if the message itself is not encrypted,
                 # because the decryption key is part of the link, so sending it over
                 # unencrypted channel is like having no encryption at all.
                 attachment[C.MESS_KEY_ENCRYPTED] = True
 
-            attachments = data['extra'].setdefault(C.KEY_ATTACHMENTS, [])
+            attachments = data["extra"].setdefault(C.KEY_ATTACHMENTS, [])
             attachments.append(attachment)
 
         return data
@@ -319,7 +303,7 @@
         # XXX: for now, XEP-0447 only allow to send one file per <message/>, thus we need
         #   to send each file in a separate message
         attachments = data["extra"][C.KEY_ATTACHMENTS]
-        if not data['message'] or data['message'] == {'': ''}:
+        if not data["message"] or data["message"] == {"": ""}:
             extra_attachments = attachments[1:]
             del attachments[1:]
         else:
@@ -346,23 +330,25 @@
                     size=attachment.get("size"),
                     desc=attachment.get("desc"),
                     media_type=attachment.get("media_type"),
-                    file_hash=file_hash
+                    file_hash=file_hash,
                 )
                 data["xml"].addChild(file_sharing_elt)
 
         for attachment in extra_attachments:
             # we send all remaining attachment in a separate message
             await client.sendMessage(
-                to_jid=data['to'],
-                message={'': ''},
-                subject=data['subject'],
-                mess_type=data['type'],
+                to_jid=data["to"],
+                message={"": ""},
+                subject=data["subject"],
+                mess_type=data["type"],
                 extra={C.KEY_ATTACHMENTS: [attachment]},
             )
 
-        if ((not data['extra']
-             and (not data['message'] or data['message'] == {'': ''})
-             and not data['subject'])):
+        if (
+            not data["extra"]
+            and (not data["message"] or data["message"] == {"": ""})
+            and not data["subject"]
+        ):
             # nothing left to send, we can cancel the message
             raise exceptions.CancelError("Cancelled by XEP_0447 attachment handling")
 
--- a/libervia/backend/plugins/plugin_xep_0448.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0448.py	Wed Jun 19 18:44:57 2024 +0200
@@ -53,14 +53,24 @@
     C.PI_TYPE: C.PLUG_TYPE_EXP,
     C.PI_PROTOCOLS: ["XEP-0448"],
     C.PI_DEPENDENCIES: [
-        "XEP-0103", "XEP-0300", "XEP-0334", "XEP-0363", "XEP-0384", "XEP-0447",
-        "DOWNLOAD", "ATTACH"
+        "XEP-0103",
+        "XEP-0300",
+        "XEP-0334",
+        "XEP-0363",
+        "XEP-0384",
+        "XEP-0447",
+        "DOWNLOAD",
+        "ATTACH",
     ],
     C.PI_MAIN: "XEP_0448",
     C.PI_HANDLER: "yes",
-    C.PI_DESCRIPTION: dedent(_("""\
+    C.PI_DESCRIPTION: dedent(
+        _(
+            """\
     Implementation of e2e encryption for media sharing
-    """)),
+    """
+        )
+    ),
 }
 
 NS_ESFS = "urn:xmpp:esfs:0"
@@ -137,7 +147,7 @@
         attachment: Dict[str, Any],
         source: Dict[str, Any],
         dest_path: Union[Path, str],
-        extra: Optional[Dict[str, Any]] = None
+        extra: Optional[Dict[str, Any]] = None,
     ) -> Tuple[str, defer.Deferred]:
         # TODO: check hash
         if extra is None:
@@ -154,10 +164,8 @@
         except KeyError:
             raise ValueError(f"{source} has missing URL")
 
-        if extra.get('ignore_tls_errors', False):
-            log.warning(
-                "TLS certificate check disabled, this is highly insecure"
-            )
+        if extra.get("ignore_tls_errors", False):
+            log.warning("TLS certificate check disabled, this is highly insecure")
             treq_client = treq_client_no_ssl
         else:
             treq_client = treq
@@ -166,7 +174,7 @@
             file_size = int(attachment["size"])
         except (KeyError, ValueError):
             head_data = await treq_client.head(download_url)
-            content_length = int(head_data.headers.getRawHeaders('content-length')[0])
+            content_length = int(head_data.headers.getRawHeaders("content-length")[0])
             # the 128 bits tag is put at the end
             file_size = content_length - 16
 
@@ -175,7 +183,7 @@
             client,
             dest_path,
             mode="wb",
-            size = file_size,
+            size=file_size,
         )
 
         if cipher in (NS_AES_128_GCM, NS_AES_256_GCM):
@@ -204,13 +212,13 @@
                 client=client,
                 file_obj=file_obj,
                 decryptor=decryptor,
-                unpadder=unpadder
+                unpadder=unpadder,
             )
             finalize_cb = partial(
                 self.cbc_decrypt_finalize,
                 file_obj=file_obj,
                 decryptor=decryptor,
-                unpadder=unpadder
+                unpadder=unpadder,
             )
         else:
             msg = f"cipher {cipher!r} is not supported"
@@ -253,10 +261,7 @@
         }
         attachment["filename"] = filename
         return await self._http_upload.file_http_upload(
-            client=client,
-            filepath=filepath,
-            filename="encrypted",
-            extra=extra
+            client=client, filepath=filepath, filename="encrypted", extra=extra
         )
 
     async def attach(self, client, data):
@@ -264,7 +269,7 @@
         #   we need to send each file in a separate message, in the same way as for
         #   plugin_sec_aesgcm.
         attachments = data["extra"][C.KEY_ATTACHMENTS]
-        if not data['message'] or data['message'] == {'': ''}:
+        if not data["message"] or data["message"] == {"": ""}:
             extra_attachments = attachments[1:]
             del attachments[1:]
         else:
@@ -287,24 +292,23 @@
                     [],
                     name=attachment["filename"],
                     size=attachment["size"],
-                    file_hash=file_hash
+                    file_hash=file_hash,
                 )
                 encrypted_elt = file_sharing_elt.sources.addElement(
                     (NS_ESFS, "encrypted")
                 )
                 encrypted_elt["cipher"] = NS_AES_256_GCM
                 encrypted_elt.addElement(
-                    "key",
-                    content=base64.b64encode(encryption_data["key"]).decode()
+                    "key", content=base64.b64encode(encryption_data["key"]).decode()
                 )
                 encrypted_elt.addElement(
-                    "iv",
-                    content=base64.b64encode(encryption_data["iv"]).decode()
+                    "iv", content=base64.b64encode(encryption_data["iv"]).decode()
                 )
-                encrypted_elt.addChild(self._h.build_hash_elt(
-                    attachment["encrypted_hash"],
-                    attachment["encrypted_hash_algo"]
-                ))
+                encrypted_elt.addChild(
+                    self._h.build_hash_elt(
+                        attachment["encrypted_hash"], attachment["encrypted_hash_algo"]
+                    )
+                )
                 encrypted_elt.addChild(
                     self._sfs.get_sources_elt(
                         [self._u.get_url_data_elt(attachment["url"])]
@@ -315,16 +319,18 @@
         for attachment in extra_attachments:
             # we send all remaining attachment in a separate message
             await client.sendMessage(
-                to_jid=data['to'],
-                message={'': ''},
-                subject=data['subject'],
-                mess_type=data['type'],
+                to_jid=data["to"],
+                message={"": ""},
+                subject=data["subject"],
+                mess_type=data["type"],
                 extra={C.KEY_ATTACHMENTS: [attachment]},
             )
 
-        if ((not data['extra']
-             and (not data['message'] or data['message'] == {'': ''})
-             and not data['subject'])):
+        if (
+            not data["extra"]
+            and (not data["message"] or data["message"] == {"": ""})
+            and not data["subject"]
+        ):
             # nothing left to send, we can cancel the message
             raise exceptions.CancelError("Cancelled by XEP_0448 attachment handling")
 
@@ -333,7 +339,7 @@
         data: bytes,
         client: SatXMPPEntity,
         file_obj: stream.SatFile,
-        decryptor: CipherContext
+        decryptor: CipherContext,
     ) -> None:
         if file_obj.tell() + len(data) > file_obj.size:  # type: ignore
             # we're reaching end of file with this bunch of data
@@ -371,16 +377,13 @@
         client: SatXMPPEntity,
         file_obj: stream.SatFile,
         decryptor: CipherContext,
-        unpadder: PaddingContext
+        unpadder: PaddingContext,
     ) -> None:
         decrypted = decryptor.update(data)
         file_obj.write(unpadder.update(decrypted))
 
     def cbc_decrypt_finalize(
-        self,
-        file_obj: stream.SatFile,
-        decryptor: CipherContext,
-        unpadder: PaddingContext
+        self, file_obj: stream.SatFile, decryptor: CipherContext, unpadder: PaddingContext
     ) -> None:
         decrypted = decryptor.finalize()
         file_obj.write(unpadder.update(decrypted))
@@ -388,7 +391,7 @@
         file_obj.close()
 
     def _upload_pre_slot(self, client, extra, file_metadata):
-        if extra.get('encryption') != IMPORT_NAME:
+        if extra.get("encryption") != IMPORT_NAME:
             return True
         # the tag is appended to the file
         file_metadata["size"] += 16
@@ -414,10 +417,10 @@
                 return ret
             except AlreadyFinalized:
                 # as we have already finalized, we can now send EOF
-                return b''
+                return b""
 
     def _upload_trigger(self, client, extra, sat_file, file_producer, slot):
-        if extra.get('encryption') != IMPORT_NAME:
+        if extra.get("encryption") != IMPORT_NAME:
             return True
         attachment = extra["attachment"]
         encryption_data = extra["encryption_data"]
@@ -442,14 +445,17 @@
 
         if sat_file.data_cb is not None:
             raise exceptions.InternalError(
-                f"data_cb was expected to be None, it is set to {sat_file.data_cb}")
+                f"data_cb was expected to be None, it is set to {sat_file.data_cb}"
+            )
 
-        attachment.update({
-            "hash_algo": self._h.ALGO_DEFAULT,
-            "hasher": self._h.get_hasher(),
-            "encrypted_hash_algo": self._h.ALGO_DEFAULT,
-            "encrypted_hasher": self._h.get_hasher(),
-        })
+        attachment.update(
+            {
+                "hash_algo": self._h.ALGO_DEFAULT,
+                "hasher": self._h.get_hasher(),
+                "encrypted_hash_algo": self._h.ALGO_DEFAULT,
+                "encrypted_hasher": self._h.get_hasher(),
+            }
+        )
 
         # with data_cb we encrypt the file on the fly
         sat_file.data_cb = partial(
--- a/libervia/backend/plugins/plugin_xep_0465.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0465.py	Wed Jun 19 18:44:57 2024 +0200
@@ -120,10 +120,7 @@
 
     @utils.ensure_deferred
     async def _subscriptions(
-        self,
-        service="",
-        nodeIdentifier="",
-        profile_key=C.PROF_KEY_NONE
+        self, service="", nodeIdentifier="", profile_key=C.PROF_KEY_NONE
     ) -> str:
         client = self.host.get_client(profile_key)
         service = None if not service else jid.JID(service)
@@ -134,7 +131,7 @@
         self,
         client: SatXMPPEntity,
         service: Optional[jid.JID] = None,
-        node: Optional[str] = None
+        node: Optional[str] = None,
     ) -> List[Dict[str, Union[str, bool]]]:
         """Retrieve public subscriptions from a service
 
@@ -170,9 +167,7 @@
                     "state": subscription_elt.getAttribute("subscription", "subscribed"),
                 }
             except KeyError:
-                log.warning(
-                    f"invalid <subscription> element: {subscription_elt.toXml()}"
-                )
+                log.warning(f"invalid <subscription> element: {subscription_elt.toXml()}")
                 continue
             if node is not None and sub_dict["node"] != node:
                 # if not is specified, we filter out any other node
@@ -183,10 +178,7 @@
 
     @utils.ensure_deferred
     async def _get_public_node_subscriptions(
-        self,
-        service: str,
-        node: str,
-        profile_key: str
+        self, service: str, node: str, profile_key: str
     ) -> Dict[str, str]:
         client = self.host.get_client(profile_key)
         subs = await self.get_public_node_subscriptions(
@@ -199,10 +191,7 @@
         return f"{NS_PPS_SUBSCRIBERS}/{node}"
 
     async def get_public_node_subscriptions(
-        self,
-        client: SatXMPPEntity,
-        service: Optional[jid.JID],
-        nodeIdentifier: str
+        self, client: SatXMPPEntity, service: Optional[jid.JID], nodeIdentifier: str
     ) -> Dict[jid.JID, str]:
         """Retrieve public subscriptions to a node
 
@@ -237,9 +226,7 @@
             try:
                 ret[jid.JID(subscriber_elt["jid"])] = "subscribed"
             except (KeyError, RuntimeError):
-                log.warning(
-                    f"invalid <subscriber> element: {subscriber_elt.toXml()}"
-                )
+                log.warning(f"invalid <subscriber> element: {subscriber_elt.toXml()}")
                 continue
         return ret
 
@@ -253,7 +240,7 @@
         """
         if options is None:
             options = {}
-        options[f'{{{NS_PPS}}}public'] = True
+        options[f"{{{NS_PPS}}}public"] = True
         return options
 
 
--- a/libervia/backend/plugins/plugin_xep_0470.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0470.py	Wed Jun 19 18:44:57 2024 +0200
@@ -93,12 +93,11 @@
         self,
         name: str,
         namespace: str,
-        get_cb: Callable[
-            [SatXMPPEntity, domish.Element, Dict[str, Any]],
-            None],
+        get_cb: Callable[[SatXMPPEntity, domish.Element, Dict[str, Any]], None],
         set_cb: Callable[
             [SatXMPPEntity, Dict[str, Any], Optional[domish.Element]],
-            Optional[domish.Element]],
+            Optional[domish.Element],
+        ],
     ) -> None:
         """Register callbacks to handle an attachment
 
@@ -120,18 +119,12 @@
             raise exceptions.ConflictError(
                 f"({name}, {namespace}) attachment handlers are already registered"
             )
-        self.handlers[(name, namespace)] = {
-            "get": get_cb,
-            "set": set_cb
-        }
+        self.handlers[(name, namespace)] = {"get": get_cb, "set": set_cb}
 
     def get_attachment_node_name(self, service: jid.JID, node: str, item: str) -> str:
         """Generate name to use for attachment node"""
         target_item_uri = uri.build_xmpp_uri(
-            "pubsub",
-            path=service.userhost(),
-            node=node,
-            item=item
+            "pubsub", path=service.userhost(), node=node, item=item
         )
         return f"{NS_PUBSUB_ATTACHMENTS}/{target_item_uri}"
 
@@ -162,7 +155,7 @@
         service: jid.JID,
         node: str,
         item: domish.Element,
-        data: dict
+        data: dict,
     ) -> bool:
         """trigger to create attachment node on each publication"""
         await self.create_attachments_node(
@@ -176,7 +169,7 @@
         service: jid.JID,
         node: str,
         item_id: str,
-        autocreate: bool = False
+        autocreate: bool = False,
     ):
         """Create node for attachements if necessary
 
@@ -190,9 +183,7 @@
         except error.StanzaError as e:
             if e.condition == "item-not-found" and autocreate:
                 # we auto-create the missing node
-                await self._p.createNode(
-                    client, service, node
-                )
+                await self._p.createNode(client, service, node)
                 node_config = await self._p.getConfiguration(client, service, node)
             elif e.condition in ("forbidden", "feature-not-implemented"):
                 node_config = self._p.make_configuration_form({})
@@ -213,9 +204,7 @@
             log.warning(f"Can't create attachment node {attachment_node}: {e}")
 
     def items_2_attachment_data(
-        self,
-        client: SatXMPPEntity,
-        items: List[domish.Element]
+        self, client: SatXMPPEntity, items: List[domish.Element]
     ) -> List[Dict[str, Any]]:
         """Convert items from attachment node to attachment data"""
         list_data = []
@@ -249,9 +238,7 @@
                     f"{item.toXml}"
                 )
                 continue
-            data = {
-                "from": item_id
-            }
+            data = {"from": item_id}
             for handler in self.handlers.values():
                 handler["get"](client, attachments_elt, data)
             if len(data) > 1:
@@ -265,7 +252,7 @@
         item: str,
         senders_s: List[str],
         extra_s: str,
-        profile_key: str
+        profile_key: str,
     ) -> defer.Deferred:
         client = self.host.get_client(profile_key)
         extra = data_format.deserialise(extra_s)
@@ -274,9 +261,7 @@
             self.get_attachments(client, jid.JID(service_s), node, item, senders)
         )
         d.addCallback(
-            lambda ret:
-            (data_format.serialise(ret[0]),
-             data_format.serialise(ret[1]))
+            lambda ret: (data_format.serialise(ret[0]), data_format.serialise(ret[1]))
         )
         return d
 
@@ -287,7 +272,7 @@
         node: str,
         item: str,
         senders: Optional[List[jid.JID]],
-        extra: Optional[dict] = None
+        extra: Optional[dict] = None,
     ) -> Tuple[List[Dict[str, Any]], dict]:
         """Retrieve data attached to a pubsub item
 
@@ -316,13 +301,9 @@
 
         return list_data, metadata
 
-    def _set(
-        self,
-        attachments_s: str,
-        profile_key: str
-    ) -> None:
+    def _set(self, attachments_s: str, profile_key: str) -> None:
         client = self.host.get_client(profile_key)
-        attachments = data_format.deserialise(attachments_s)  or {}
+        attachments = data_format.deserialise(attachments_s) or {}
         return defer.ensureDeferred(self.set_attachements(client, attachments))
 
     async def apply_set_handler(
@@ -389,9 +370,7 @@
         return item_elt
 
     async def set_attachements(
-        self,
-        client: SatXMPPEntity,
-        attachments_data: Dict[str, Any]
+        self, client: SatXMPPEntity, attachments_data: Dict[str, Any]
     ) -> None:
         """Set or update attachments
 
@@ -414,9 +393,7 @@
             node = attachments_data["node"]
             item = attachments_data["id"]
         except (KeyError, RuntimeError):
-            raise ValueError(
-                'data must have "service", "node" and "id" set'
-            )
+            raise ValueError('data must have "service", "node" and "id" set')
         attachment_node = self.get_attachment_node_name(service, node, item)
         try:
             items, __ = await self._p.get_items(
@@ -465,7 +442,6 @@
         attachment_node = self.get_attachment_node_name(service, node, item)
         await self._p.subscribe(client, service, attachment_node)
 
-
     def set_timestamp(self, attachment_elt: domish.Element, data: dict) -> None:
         """Check if a ``timestamp`` attribute is set, parse it, and fill data
 
@@ -489,15 +465,11 @@
         data: Dict[str, Any],
     ) -> None:
         try:
-            noticed_elt = next(
-                attachments_elt.elements(NS_PUBSUB_ATTACHMENTS, "noticed")
-            )
+            noticed_elt = next(attachments_elt.elements(NS_PUBSUB_ATTACHMENTS, "noticed"))
         except StopIteration:
             pass
         else:
-            noticed_data = {
-                "noticed": True
-            }
+            noticed_data = {"noticed": True}
             self.set_timestamp(noticed_elt, noticed_data)
             data["noticed"] = noticed_data
 
@@ -505,7 +477,7 @@
         self,
         client: SatXMPPEntity,
         data: Dict[str, Any],
-        former_elt: Optional[domish.Element]
+        former_elt: Optional[domish.Element],
     ) -> Optional[domish.Element]:
         """add or remove a <noticed> attachment
 
@@ -517,10 +489,7 @@
             return former_elt
         elif noticed:
             return domish.Element(
-                (NS_PUBSUB_ATTACHMENTS, "noticed"),
-                attribs = {
-                    "timestamp": xmpp_date()
-                }
+                (NS_PUBSUB_ATTACHMENTS, "noticed"), attribs={"timestamp": xmpp_date()}
             )
         else:
             return None
@@ -549,7 +518,7 @@
         self,
         client: SatXMPPEntity,
         data: Dict[str, Any],
-        former_elt: Optional[domish.Element]
+        former_elt: Optional[domish.Element],
     ) -> Optional[domish.Element]:
         """update the <reaction> attachment"""
         reactions_data = data["extra"].get("reactions")
@@ -557,9 +526,11 @@
             return former_elt
         operation_type = reactions_data.get("operation", "update")
         if operation_type == "update":
-            former_reactions = {
-                str(r) for r in former_elt.elements(NS_PUBSUB_ATTACHMENTS, "reaction")
-            } if former_elt is not None else set()
+            former_reactions = (
+                {str(r) for r in former_elt.elements(NS_PUBSUB_ATTACHMENTS, "reaction")}
+                if former_elt is not None
+                else set()
+            )
             added_reactions = set(reactions_data.get("add") or [])
             removed_reactions = set(reactions_data.get("remove") or [])
             reactions = list((former_reactions | added_reactions) - removed_reactions)
@@ -569,10 +540,7 @@
             raise exceptions.DataError(f"invalid reaction operation: {operation_type!r}")
         if reactions:
             reactions_elt = domish.Element(
-                (NS_PUBSUB_ATTACHMENTS, "reactions"),
-                attribs = {
-                    "timestamp": xmpp_date()
-                }
+                (NS_PUBSUB_ATTACHMENTS, "reactions"), attribs={"timestamp": xmpp_date()}
             )
             for reactions_data in reactions:
                 reactions_elt.addElement("reaction", content=reactions_data)
--- a/libervia/backend/plugins/plugin_xep_0471.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0471.py	Wed Jun 19 18:44:57 2024 +0200
@@ -53,8 +53,11 @@
     C.PI_MODES: C.PLUG_MODE_BOTH,
     C.PI_PROTOCOLS: [],
     C.PI_DEPENDENCIES: [
-        "XEP-0060", "XEP-0080", "XEP-0447", "XEP-0470", # "INVITATION", "PUBSUB_INVITATION",
-        # "LIST_INTEREST"
+        "XEP-0060",
+        "XEP-0080",
+        "XEP-0447",
+        "XEP-0470",  # "INVITATION", "PUBSUB_INVITATION",
+        # "LIST_INTEREST"
     ],
     C.PI_RECOMMENDATIONS: ["XEP-0277", "EMAIL_INVITATION"],
     C.PI_MAIN: "XEP_0471",
@@ -81,7 +84,9 @@
         self._a = host.plugins["XEP-0470"]
         # self._i = host.plugins.get("EMAIL_INVITATION")
         host.register_namespace("events", NS_EVENTS)
-        self._a.register_attachment_handler("rsvp", NS_EVENTS, self.rsvp_get, self.rsvp_set)
+        self._a.register_attachment_handler(
+            "rsvp", NS_EVENTS, self.rsvp_get, self.rsvp_set
+        )
         # host.plugins["PUBSUB_INVITATION"].register(NS_EVENTS, self)
         host.bridge.add_method(
             "events_get",
@@ -304,9 +309,7 @@
 
         for category_elt in event_elt.elements(NS_EVENTS, "category"):
             try:
-                category_data = {
-                    "term": category_elt["term"]
-                }
+                category_data = {"term": category_elt["term"]}
             except KeyError:
                 log.warning(
                     "<category/> element is missing mandatory term: "
@@ -360,10 +363,7 @@
             elt = next(event_elt.elements(NS_EVENTS, name), None)
             if elt is not None:
                 try:
-                    event_data[name] = {
-                        "service": elt["jid"],
-                        "node": elt["node"]
-                    }
+                    event_data[name] = {"service": elt["jid"], "node": elt["node"]}
                 except KeyError:
                     log.warning(f"invalid {name} element: {elt.toXml()}")
 
@@ -373,7 +373,8 @@
         if attachments_elt:
             attachments = event_data["attachments"] = []
             for file_sharing_elt in attachments_elt.elements(
-                    self._sfs.namespace, "file-sharing"):
+                self._sfs.namespace, "file-sharing"
+            ):
                 try:
                     file_sharing_data = self._sfs.parse_file_sharing_elt(file_sharing_elt)
                 except Exception as e:
@@ -409,7 +410,7 @@
                 event_data["external"] = {
                     "jid": external_elt["jid"],
                     "node": external_elt["node"],
-                    "item": external_elt["item"]
+                    "item": external_elt["item"],
                 }
             except KeyError:
                 log.warning(f"invalid <external/> element: {external_elt.toXml()}")
@@ -417,7 +418,7 @@
         return event_data
 
     def _events_get(
-            self, service: str, node: str, event_ids: List[str], extra: str, profile_key: str
+        self, service: str, node: str, event_ids: List[str], extra: str, profile_key: str
     ):
         client = self.host.get_client(profile_key)
         d = defer.ensureDeferred(
@@ -426,7 +427,7 @@
                 jid.JID(service) if service else None,
                 node if node else NS_EVENTS,
                 event_ids,
-                data_format.deserialise(extra)
+                data_format.deserialise(extra),
             )
         )
         d.addCallback(data_format.serialise)
@@ -457,9 +458,7 @@
             try:
                 events.append(self.event_elt_2_event_data((item)))
             except (ValueError, exceptions.NotFound):
-                log.warning(
-                    f"Can't parse event for item {item['id']}: {item.toXml()}"
-                )
+                log.warning(f"Can't parse event for item {item['id']}: {item.toXml()}")
 
         return events
 
@@ -469,7 +468,7 @@
         service: str,
         node: str,
         event_id: str = "",
-        profile_key: str = C.PROF_KEY_NONE
+        profile_key: str = C.PROF_KEY_NONE,
     ):
         client = self.host.get_client(profile_key)
         return defer.ensureDeferred(
@@ -478,7 +477,7 @@
                 data_format.deserialise(data_s),
                 jid.JID(service) if service else None,
                 node or None,
-                event_id or None
+                event_id or None,
             )
         )
 
@@ -690,17 +689,19 @@
                     try:
                         next(f for f in rsvp_data["fields"] if f["name"] == "attending")
                     except StopIteration:
-                        rsvp_data["fields"].append({
-                            "type": "list-single",
-                            "name": "attending",
-                            "label": "Attending",
-                            "options": [
-                                {"label": "maybe", "value": "maybe"},
-                                {"label": "yes", "value": "yes"},
-                                {"label": "no", "value": "no"}
-                            ],
-                            "required": True
-                        })
+                        rsvp_data["fields"].append(
+                            {
+                                "type": "list-single",
+                                "name": "attending",
+                                "label": "Attending",
+                                "options": [
+                                    {"label": "maybe", "value": "maybe"},
+                                    {"label": "yes", "value": "yes"},
+                                    {"label": "no", "value": "no"},
+                                ],
+                                "required": True,
+                            }
+                        )
                 rsvp_data["namespace"] = NS_RSVP
                 rsvp_form = xml_tools.data_dict_2_data_form(rsvp_data)
                 rsvp_elt.addChild(rsvp_form.toElement())
@@ -728,10 +729,7 @@
 
         extra = event_data.get("extra")
         if extra:
-            extra_form = data_form.Form(
-                "result",
-                formNamespace=NS_EXTRA
-            )
+            extra_form = data_form.Form("result", formNamespace=NS_EXTRA)
             for node_type in ("website", "status"):
                 if node_type in extra:
                     extra_form.addField(
@@ -803,7 +801,7 @@
         event_id: str,
         service: str,
         node: str,
-        profile_key: str = C.PROF_KEY_NONE
+        profile_key: str = C.PROF_KEY_NONE,
     ) -> None:
         client = self.host.get_client(profile_key)
         defer.ensureDeferred(
@@ -846,9 +844,7 @@
     ) -> None:
         """Get RSVP answers from attachments"""
         try:
-            rsvp_elt = next(
-                attachments_elt.elements(NS_EVENTS, "rsvp")
-            )
+            rsvp_elt = next(attachments_elt.elements(NS_EVENTS, "rsvp"))
         except StopIteration:
             pass
         else:
@@ -861,7 +857,7 @@
         self,
         client: SatXMPPEntity,
         data: Dict[str, Any],
-        former_elt: Optional[domish.Element]
+        former_elt: Optional[domish.Element],
     ) -> Optional[domish.Element]:
         """update the <reaction> attachment"""
         rsvp_data = data["extra"].get("rsvp")
@@ -869,10 +865,7 @@
             return former_elt
         elif rsvp_data:
             rsvp_elt = domish.Element(
-                (NS_EVENTS, "rsvp"),
-                attribs = {
-                    "timestamp": utils.xmpp_date()
-                }
+                (NS_EVENTS, "rsvp"), attribs={"timestamp": utils.xmpp_date()}
             )
             rsvp_form = data_form.Form("submit", formNamespace=NS_RSVP)
             rsvp_form.makeFields(rsvp_data)
@@ -888,7 +881,7 @@
         item: str,
         invitees: List[str],
         extra: str,
-        profile_key: str
+        profile_key: str,
     ) -> defer.Deferred:
         client = self.host.get_client(profile_key)
         if invitees:
@@ -902,7 +895,7 @@
                 node or None,
                 item,
                 invitees_jid,
-                data_format.deserialise(extra)
+                data_format.deserialise(extra),
             )
         )
         d.addCallback(lambda ret: data_format.serialise(ret))
@@ -945,12 +938,7 @@
         return ret
 
     def _event_invitee_set(
-        self,
-        service: str,
-        node: str,
-        item: str,
-        rsvp_s: str,
-        profile_key: str
+        self, service: str, node: str, item: str, rsvp_s: str, profile_key: str
     ):
         client = self.host.get_client(profile_key)
         return defer.ensureDeferred(
@@ -959,7 +947,7 @@
                 jid.JID(service) if service else None,
                 node or None,
                 item,
-                data_format.deserialise(rsvp_s)
+                data_format.deserialise(rsvp_s),
             )
         )
 
@@ -982,20 +970,21 @@
             service = client.jid.userhostJID()
         if node is None:
             node = NS_EVENTS
-        await self._a.set_attachements(client, {
-            "service": service.full(),
-            "node": node,
-            "id": item,
-            "extra": {"rsvp": rsvp}
-        })
+        await self._a.set_attachements(
+            client,
+            {
+                "service": service.full(),
+                "node": node,
+                "id": item,
+                "extra": {"rsvp": rsvp},
+            },
+        )
 
     def _event_invitees_list(self, service, node, profile_key):
         service = jid.JID(service) if service else None
         node = node if node else NS_EVENT
         client = self.host.get_client(profile_key)
-        return defer.ensureDeferred(
-            self.event_invitees_list(client, service, node)
-        )
+        return defer.ensureDeferred(self.event_invitees_list(client, service, node))
 
     async def event_invitees_list(self, client, service, node):
         """Retrieve attendance from event node
@@ -1012,9 +1001,13 @@
                 event_elt = next(item.elements(NS_EVENT, "invitee"))
             except StopIteration:
                 # no item found, event data are not set yet
-                log.warning(_(
-                    "no data found for {item_id} (service: {service}, node: {node})"
-                    .format(item_id=item["id"], service=service, node=node)))
+                log.warning(
+                    _(
+                        "no data found for {item_id} (service: {service}, node: {node})".format(
+                            item_id=item["id"], service=service, node=node
+                        )
+                    )
+                )
             else:
                 data = {}
                 for key in ("attend", "guests"):
@@ -1032,7 +1025,7 @@
         service: jid.JID,
         node: str,
         item_id: Optional[str] = None,
-        name: str = '',
+        name: str = "",
         extra: Optional[dict] = None,
     ) -> None:
         if self._b is None:
@@ -1066,9 +1059,7 @@
                 comments_node = item["comments_node"]
             except KeyError:
                 log.debug(
-                    "no comment service set for item {item_id}".format(
-                        item_id=item["id"]
-                    )
+                    "no comment service set for item {item_id}".format(item_id=item["id"])
                 )
             else:
                 await self._p.set_node_affiliations(
@@ -1081,10 +1072,21 @@
             invitee_jid, service, node, item_id or NS_EVENT, profile_key=profile
         )
 
-    def _invite_by_email(self, service, node, id_=NS_EVENT, email="", emails_extra=None,
-                       name="", host_name="", language="", url_template="",
-                       message_subject="", message_body="",
-                       profile_key=C.PROF_KEY_NONE):
+    def _invite_by_email(
+        self,
+        service,
+        node,
+        id_=NS_EVENT,
+        email="",
+        emails_extra=None,
+        name="",
+        host_name="",
+        language="",
+        url_template="",
+        message_subject="",
+        message_body="",
+        profile_key=C.PROF_KEY_NONE,
+    ):
         client = self.host.get_client(profile_key)
         kwargs = {
             "profile": client.profile,
@@ -1101,9 +1103,15 @@
         ):
             value = locals()[key]
             kwargs[key] = str(value)
-        return defer.ensureDeferred(self.invite_by_email(
-            client, jid.JID(service) if service else None, node, id_ or NS_EVENT, **kwargs
-        ))
+        return defer.ensureDeferred(
+            self.invite_by_email(
+                client,
+                jid.JID(service) if service else None,
+                node,
+                id_ or NS_EVENT,
+                **kwargs,
+            )
+        )
 
     async def invite_by_email(self, client, service, node, id_=NS_EVENT, **kwargs):
         """High level method to create an email invitation to an event
@@ -1139,7 +1147,7 @@
         service: jid.JID,
         node: str,
         item_id: Optional[str],
-        item_elt: domish.Element
+        item_elt: domish.Element,
     ) -> None:
         event_elt = item_elt.event
         link_elt = event_elt.addElement("link")
@@ -1153,8 +1161,8 @@
             pass
         else:
             extra["name"] = name
-        if 'image' in event_data:
-            extra["thumb_url"] = event_data['image']
+        if "image" in event_data:
+            extra["thumb_url"] = event_data["image"]
         extra["element"] = event_elt
 
 
--- a/libervia/backend/test/helpers.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/test/helpers.py	Wed Jun 19 18:44:57 2024 +0200
@@ -20,10 +20,12 @@
 
 ## logging configuration for tests ##
 from libervia.backend.core import log_config
+
 log_config.libervia_configure()
 
 import logging
 from libervia.backend.core.log import getLogger
+
 getLogger().setLevel(logging.WARNING)  # put this to DEBUG when needed
 
 from libervia.backend.core import exceptions
@@ -49,7 +51,7 @@
     @return: unicode conversion, according to bridge convention
 
     """
-    return  "True" if value else "False"
+    return "True" if value else "False"
 
 
 def mute_logging():
@@ -98,13 +100,15 @@
         self.profiles = {}
 
     def contact_del(self, to, profile_key):
-        #TODO
+        # TODO
         pass
 
     def register_callback(self, callback, *args, **kwargs):
         pass
 
-    def message_send(self, to_s, msg, subject=None, mess_type='auto', extra={}, profile_key='@NONE@'):
+    def message_send(
+        self, to_s, msg, subject=None, mess_type="auto", extra={}, profile_key="@NONE@"
+    ):
         self.send_and_store_message({"to": JID(to_s)})
 
     def _send_message_to_stream(self, mess_data, client):
@@ -113,7 +117,7 @@
         @param mess_data: message data dictionnary
         @param client: profile's client
         """
-        client.xmlstream.send(mess_data['xml'])
+        client.xmlstream.send(mess_data["xml"])
         return mess_data
 
     def _store_message(self, mess_data, client):
@@ -190,7 +194,9 @@
         entry = self.get_sent_message(profile_index)
         return entry.toXml() if entry else None
 
-    def find_features_set(self, features, identity=None, jid_=None, profile=C.PROF_KEY_NONE):
+    def find_features_set(
+        self, features, identity=None, jid_=None, profile=C.PROF_KEY_NONE
+    ):
         """Call self.add_feature from your tests to change the return value.
 
         @return: a set of entities
@@ -211,7 +217,7 @@
         To be called from your tests when needed.
         """
         client = self.get_client(profile_key)
-        if not hasattr(client, 'features'):
+        if not hasattr(client, "features"):
             client.features = {}
         if jid_ not in client.features:
             client.features[jid_] = set()
@@ -248,7 +254,9 @@
 
         setattr(self, name, check_call)
 
-    def add_method(self, name, int_suffix, in_sign, out_sign, method, async_=False, doc=None):
+    def add_method(
+        self, name, int_suffix, in_sign, out_sign, method, async_=False, doc=None
+    ):
         pass
 
     def add_signal(self, name, int_suffix, signature):
@@ -271,19 +279,19 @@
         Params.__init__(self, host, storage)
         self.params = {}  # naive simulation of values storage
 
-    def param_set(self, name, value, category, security_limit=-1, profile_key='@NONE@'):
+    def param_set(self, name, value, category, security_limit=-1, profile_key="@NONE@"):
         profile = self.get_profile_name(profile_key)
         self.params.setdefault(profile, {})
         self.params[profile_key][(category, name)] = value
 
-    def param_get_a(self, name, category, attr="value", profile_key='@NONE@'):
+    def param_get_a(self, name, category, attr="value", profile_key="@NONE@"):
         profile = self.get_profile_name(profile_key)
         return self.params[profile][(category, name)]
 
     def get_profile_name(self, profile_key, return_profile_keys=False):
-        if profile_key == '@DEFAULT@':
+        if profile_key == "@DEFAULT@":
             return C.PROFILE[0]
-        elif profile_key == '@NONE@':
+        elif profile_key == "@NONE@":
             raise exceptions.ProfileNotSetError
         else:
             return profile_key
@@ -315,13 +323,24 @@
     def get_profile_name(self, profile_key, return_profile_keys=False):
         return self.params.get_profile_name(profile_key, return_profile_keys)
 
-    def add_to_history(self, from_jid, to_jid, message, _type='chat', extra=None, timestamp=None, profile="@NONE@"):
+    def add_to_history(
+        self,
+        from_jid,
+        to_jid,
+        message,
+        _type="chat",
+        extra=None,
+        timestamp=None,
+        profile="@NONE@",
+    ):
         pass
 
-    def contact_add(self, contact_jid, attributes, groups, profile_key='@DEFAULT@'):
+    def contact_add(self, contact_jid, attributes, groups, profile_key="@DEFAULT@"):
         pass
 
-    def set_presence_status(self, contact_jid, show, priority, statuses, profile_key='@DEFAULT@'):
+    def set_presence_status(
+        self, contact_jid, show, priority, statuses, profile_key="@DEFAULT@"
+    ):
         pass
 
     def add_waiting_sub(self, type_, contact_jid, profile_key):
@@ -330,7 +349,9 @@
     def del_waiting_sub(self, contact_jid, profile_key):
         pass
 
-    def update_entity_data(self, entity_jid, key, value, silent=False, profile_key="@NONE@"):
+    def update_entity_data(
+        self, entity_jid, key, value, silent=False, profile_key="@NONE@"
+    ):
         self.entities_data.setdefault(entity_jid, {})
         self.entities_data[entity_jid][key] = value
 
@@ -366,10 +387,16 @@
             kwargs["subscriptionTo"] = True
             kwargs["subscriptionFrom"] = True
         roster_item = RosterItem(jid, *args, **kwargs)
-        attrs = {'to': b2s(roster_item.subscriptionTo), 'from': b2s(roster_item.subscriptionFrom), 'ask': b2s(roster_item.pendingOut)}
+        attrs = {
+            "to": b2s(roster_item.subscriptionTo),
+            "from": b2s(roster_item.subscriptionFrom),
+            "ask": b2s(roster_item.pendingOut),
+        }
         if roster_item.name:
-            attrs['name'] = roster_item.name
-        self.host.bridge.expect_call("contact_new", jid.full(), attrs, roster_item.groups, self.parent.profile)
+            attrs["name"] = roster_item.name
+        self.host.bridge.expect_call(
+            "contact_new", jid.full(), attrs, roster_item.groups, self.parent.profile
+        )
         self._jids[jid] = roster_item
         self._register_item(roster_item)
 
@@ -386,13 +413,13 @@
         @param obj (domish.Element, str or unicode): message to send
         """
         if not isinstance(obj, domish.Element):
-            assert(isinstance(obj, str) or isinstance(obj, str))
+            assert isinstance(obj, str) or isinstance(obj, str)
             obj = parseXml(obj)
 
-        if obj.name == 'iq':
+        if obj.name == "iq":
             # IQ request expects an answer, return the request itself so
             # you can check if it has been well built by your plugin.
-            self.iqDeferreds[obj['id']].callback(obj)
+            self.iqDeferreds[obj["id"]].callback(obj)
 
         self.sent.append(obj)
         return defer.succeed(None)
@@ -424,27 +451,27 @@
         def equal_elt(got_elt, exp_elt):
             if ignore_blank:
                 for elt in got_elt, exp_elt:
-                    for attr in ('text', 'tail'):
+                    for attr in ("text", "tail"):
                         value = getattr(elt, attr)
                         try:
                             value = value.strip() or None
                         except AttributeError:
                             value = None
                         setattr(elt, attr, value)
-            if (got_elt.tag != exp_elt.tag):
+            if got_elt.tag != exp_elt.tag:
                 print("XML are not equals (elt %s/%s):" % (got_elt, exp_elt))
                 print("tag: got [%s] expected: [%s]" % (got_elt.tag, exp_elt.tag))
                 return False
-            if (got_elt.attrib != exp_elt.attrib):
+            if got_elt.attrib != exp_elt.attrib:
                 print("XML are not equals (elt %s/%s):" % (got_elt, exp_elt))
                 print("attribs: got %s expected %s" % (got_elt.attrib, exp_elt.attrib))
                 return False
-            if (got_elt.tail != exp_elt.tail or got_elt.text != exp_elt.text):
+            if got_elt.tail != exp_elt.tail or got_elt.text != exp_elt.text:
                 print("XML are not equals (elt %s/%s):" % (got_elt, exp_elt))
                 print("text: got [%s] expected: [%s]" % (got_elt.text, exp_elt.text))
                 print("tail: got [%s] expected: [%s]" % (got_elt.tail, exp_elt.tail))
                 return False
-            if (len(got_elt) != len(exp_elt)):
+            if len(got_elt) != len(exp_elt):
                 print("XML are not equals (elt %s/%s):" % (got_elt, exp_elt))
                 print("children len: got %d expected: %d" % (len(got_elt), len(exp_elt)))
                 return False
@@ -454,17 +481,24 @@
             return True
 
         def remove_blank(xml):
-            lines = [line.strip() for line in re.sub(r'[ \t\r\f\v]+', ' ', xml).split('\n')]
-            return '\n'.join([line for line in lines if line])
+            lines = [
+                line.strip() for line in re.sub(r"[ \t\r\f\v]+", " ", xml).split("\n")
+            ]
+            return "\n".join([line for line in lines if line])
 
         xml_elt = etree.fromstring(remove_blank(xml) if ignore_blank else xml)
-        expected_elt = etree.fromstring(remove_blank(expected) if ignore_blank else expected)
+        expected_elt = etree.fromstring(
+            remove_blank(expected) if ignore_blank else expected
+        )
 
         if not equal_elt(xml_elt, expected_elt):
             print("---")
             print("XML are not equals:")
-            print("got:\n-\n%s\n-\n\n" % etree.tostring(xml_elt, encoding='utf-8'))
-            print("was expecting:\n-\n%s\n-\n\n" % etree.tostring(expected_elt, encoding='utf-8'))
+            print("got:\n-\n%s\n-\n\n" % etree.tostring(xml_elt, encoding="utf-8"))
+            print(
+                "was expecting:\n-\n%s\n-\n\n"
+                % etree.tostring(expected_elt, encoding="utf-8")
+            )
             print("---")
             raise DifferentXMLException
 
--- a/libervia/backend/test/helpers_plugins.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/test/helpers_plugins.py	Wed Jun 19 18:44:57 2024 +0200
@@ -294,7 +294,9 @@
     def subscriptions(self, service, nodeIdentifier, sender=None):
         return defer.succeed([])
 
-    def service_get_disco_items(self, service, nodeIdentifier, profile_key=C.PROF_KEY_NONE):
+    def service_get_disco_items(
+        self, service, nodeIdentifier, profile_key=C.PROF_KEY_NONE
+    ):
         items = DiscoItems()
         for item in list(self.__items.keys()):
             items.append(DiscoItem(service, item))
--- a/libervia/backend/test/test_core_xmpp.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/test/test_core_xmpp.py	Wed Jun 19 18:44:57 2024 +0200
@@ -30,7 +30,9 @@
 
     def setUp(self):
         self.host = helpers.FakeSAT()
-        self.client = xmpp.SatXMPPClient(self.host, Const.PROFILE[0], JID("test@example.org"), "test")
+        self.client = xmpp.SatXMPPClient(
+            self.host, Const.PROFILE[0], JID("test@example.org"), "test"
+        )
 
     def test_init(self):
         """Check that init values are correctly initialised"""
@@ -53,7 +55,15 @@
         </message>
         """
         stanza = parseXml(xml)
-        self.host.bridge.expect_call("message_new", "sender@example.net/house", "test", "chat", "test@example.org/SàT", {}, profile=Const.PROFILE[0])
+        self.host.bridge.expect_call(
+            "message_new",
+            "sender@example.net/house",
+            "test",
+            "chat",
+            "test@example.org/SàT",
+            {},
+            profile=Const.PROFILE[0],
+        )
         self.message.onMessage(stanza)
 
 
@@ -71,7 +81,13 @@
         roster_item.subscriptionFrom = True
         roster_item.ask = False
         roster_item.groups = set(["Test Group 1", "Test Group 2", "Test Group 3"])
-        self.host.bridge.expect_call("contact_new", Const.JID_STR[0], {'to': 'True', 'from': 'True', 'ask': 'False', 'name': 'Test Man'}, set(["Test Group 1", "Test Group 2", "Test Group 3"]), Const.PROFILE[0])
+        self.host.bridge.expect_call(
+            "contact_new",
+            Const.JID_STR[0],
+            {"to": "True", "from": "True", "ask": "False", "name": "Test Man"},
+            set(["Test Group 1", "Test Group 2", "Test Group 3"]),
+            Const.PROFILE[0],
+        )
         self.roster._register_item(roster_item)
 
 
@@ -83,29 +99,50 @@
         self.presence.parent = helpers.FakeClient(self.host)
 
     def test_availableReceived(self):
-        self.host.bridge.expect_call("presence_update", Const.JID_STR[0], "xa", 15, {'default': "test status", 'fr': 'statut de test'}, Const.PROFILE[0])
-        self.presence.availableReceived(Const.JID[0], 'xa', {None: "test status", 'fr': 'statut de test'}, 15)
+        self.host.bridge.expect_call(
+            "presence_update",
+            Const.JID_STR[0],
+            "xa",
+            15,
+            {"default": "test status", "fr": "statut de test"},
+            Const.PROFILE[0],
+        )
+        self.presence.availableReceived(
+            Const.JID[0], "xa", {None: "test status", "fr": "statut de test"}, 15
+        )
 
     def test_available_received_empty_statuses(self):
-        self.host.bridge.expect_call("presence_update", Const.JID_STR[0], "xa", 15, {}, Const.PROFILE[0])
-        self.presence.availableReceived(Const.JID[0], 'xa', None, 15)
+        self.host.bridge.expect_call(
+            "presence_update", Const.JID_STR[0], "xa", 15, {}, Const.PROFILE[0]
+        )
+        self.presence.availableReceived(Const.JID[0], "xa", None, 15)
 
     def test_unavailableReceived(self):
-        self.host.bridge.expect_call("presence_update", Const.JID_STR[0], "unavailable", 0, {}, Const.PROFILE[0])
+        self.host.bridge.expect_call(
+            "presence_update", Const.JID_STR[0], "unavailable", 0, {}, Const.PROFILE[0]
+        )
         self.presence.unavailableReceived(Const.JID[0], None)
 
     def test_subscribedReceived(self):
-        self.host.bridge.expect_call("subscribe", "subscribed", Const.JID[0].userhost(), Const.PROFILE[0])
+        self.host.bridge.expect_call(
+            "subscribe", "subscribed", Const.JID[0].userhost(), Const.PROFILE[0]
+        )
         self.presence.subscribedReceived(Const.JID[0])
 
     def test_unsubscribedReceived(self):
-        self.host.bridge.expect_call("subscribe", "unsubscribed", Const.JID[0].userhost(), Const.PROFILE[0])
+        self.host.bridge.expect_call(
+            "subscribe", "unsubscribed", Const.JID[0].userhost(), Const.PROFILE[0]
+        )
         self.presence.unsubscribedReceived(Const.JID[0])
 
     def test_subscribeReceived(self):
-        self.host.bridge.expect_call("subscribe", "subscribe", Const.JID[0].userhost(), Const.PROFILE[0])
+        self.host.bridge.expect_call(
+            "subscribe", "subscribe", Const.JID[0].userhost(), Const.PROFILE[0]
+        )
         self.presence.subscribeReceived(Const.JID[0])
 
     def test_unsubscribeReceived(self):
-        self.host.bridge.expect_call("subscribe", "unsubscribe", Const.JID[0].userhost(), Const.PROFILE[0])
+        self.host.bridge.expect_call(
+            "subscribe", "unsubscribe", Const.JID[0].userhost(), Const.PROFILE[0]
+        )
         self.presence.unsubscribeReceived(Const.JID[0])
--- a/libervia/backend/test/test_memory.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/test/test_memory.py	Wed Jun 19 18:44:57 2024 +0200
@@ -43,9 +43,9 @@
             """ % {
                 "param_name": name,
                 "param_label": _(name),
-                "security": ""
-                if security_level is None
-                else ('security="%d"' % security_level),
+                "security": (
+                    "" if security_level is None else ('security="%d"' % security_level)
+                ),
             }
 
         params = ""
@@ -198,21 +198,27 @@
         params = self._get_param_xml()
         self.host.memory.reinit()
         self.host.memory.update_params(params)
-        self._get_params(Const.NO_SECURITY_LIMIT).addCallback(self.assert_param_exists_async)
+        self._get_params(Const.NO_SECURITY_LIMIT).addCallback(
+            self.assert_param_exists_async
+        )
         self._get_params(0).addCallback(self.assert_param_not_exists_async)
         self._get_params(1).addCallback(self.assert_param_not_exists_async)
         # tests with security level 0 on the parameter (not secure)
         params = self._get_param_xml(security_level=0)
         self.host.memory.reinit()
         self.host.memory.update_params(params)
-        self._get_params(Const.NO_SECURITY_LIMIT).addCallback(self.assert_param_exists_async)
+        self._get_params(Const.NO_SECURITY_LIMIT).addCallback(
+            self.assert_param_exists_async
+        )
         self._get_params(0).addCallback(self.assert_param_exists_async)
         self._get_params(1).addCallback(self.assert_param_exists_async)
         # tests with security level 1 on the parameter (more secure)
         params = self._get_param_xml(security_level=1)
         self.host.memory.reinit()
         self.host.memory.update_params(params)
-        self._get_params(Const.NO_SECURITY_LIMIT).addCallback(self.assert_param_exists_async)
+        self._get_params(Const.NO_SECURITY_LIMIT).addCallback(
+            self.assert_param_exists_async
+        )
         self._get_params(0).addCallback(self.assert_param_not_exists_async)
         return self._get_params(1).addCallback(self.assert_param_exists_async)
 
@@ -304,7 +310,9 @@
         self._get_params(5, "").addCallback(self.assert_param_exists_async)
         self._get_params(5, "").addCallback(self.assert_param_exists_async, "2")
         self._get_params(5, Const.APP_NAME).addCallback(self.assert_param_exists_async)
-        self._get_params(5, Const.APP_NAME).addCallback(self.assert_param_exists_async, "2")
+        self._get_params(5, Const.APP_NAME).addCallback(
+            self.assert_param_exists_async, "2"
+        )
         self._get_params(5, "another_dummy_frontend").addCallback(
             self.assert_param_not_exists_async
         )
--- a/libervia/backend/test/test_plugin_misc_groupblog.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/test/test_plugin_misc_groupblog.py	Wed Jun 19 18:44:57 2024 +0200
@@ -155,7 +155,9 @@
         self.host = helpers.FakeSAT()
         self.host.plugins["XEP-0060"] = plugin_xep_0060.XEP_0060(self.host)
         self.host.plugins["XEP-0163"] = plugin_xep_0163.XEP_0163(self.host)
-        importlib.reload(plugin_misc_text_syntaxes)  # reload the plugin to avoid conflict error
+        importlib.reload(
+            plugin_misc_text_syntaxes
+        )  # reload the plugin to avoid conflict error
         self.host.plugins["TEXT_SYNTAXES"] = plugin_misc_text_syntaxes.TextSyntaxes(
             self.host
         )
@@ -189,9 +191,7 @@
         d = self.psclient.items(SERVICE, NODE_ID)
         d.addCallback(lambda items: self.assertEqual(len(items), 0))
         d.addCallback(
-            lambda __: self.plugin.sendGroupBlog(
-                "PUBLIC", [], "test", {}, C.PROFILE[0]
-            )
+            lambda __: self.plugin.sendGroupBlog("PUBLIC", [], "test", {}, C.PROFILE[0])
         )
         d.addCallback(lambda __: self.psclient.items(SERVICE, NODE_ID))
         return d.addCallback(lambda items: self.assertEqual(len(items), 1))
@@ -484,9 +484,7 @@
         d.addCallback(lambda __: self.psclient.items(SERVICE, COMMENTS_NODE_ID_1))
         d.addCallback(lambda items: self.assertEqual(len(items), 2))
 
-        d.addCallback(
-            lambda __: self.psclient.publish(SERVICE, OTHER_NODE_ID, [ITEM_2])
-        )
+        d.addCallback(lambda __: self.psclient.publish(SERVICE, OTHER_NODE_ID, [ITEM_2]))
         d.addCallback(
             lambda __: self.psclient.publish(
                 SERVICE, COMMENTS_NODE_ID_2, [COMMENT_1, COMMENT_2]
@@ -532,9 +530,7 @@
         d.addCallback(lambda __: self.psclient.items(SERVICE, COMMENTS_NODE_ID_1))
         d.addCallback(lambda items: self.assertEqual(len(items), 2))
 
-        d.addCallback(
-            lambda __: self.psclient.publish(SERVICE, OTHER_NODE_ID, [ITEM_2])
-        )
+        d.addCallback(lambda __: self.psclient.publish(SERVICE, OTHER_NODE_ID, [ITEM_2]))
         d.addCallback(
             lambda __: self.psclient.publish(
                 SERVICE, COMMENTS_NODE_ID_2, [COMMENT_1, COMMENT_2]
@@ -579,9 +575,7 @@
         d.addCallback(lambda __: self.psclient.items(SERVICE, COMMENTS_NODE_ID_1))
         d.addCallback(lambda items: self.assertEqual(len(items), 2))
 
-        d.addCallback(
-            lambda __: self.psclient.publish(SERVICE, OTHER_NODE_ID, [ITEM_2])
-        )
+        d.addCallback(lambda __: self.psclient.publish(SERVICE, OTHER_NODE_ID, [ITEM_2]))
         d.addCallback(
             lambda __: self.psclient.publish(
                 SERVICE, COMMENTS_NODE_ID_2, [COMMENT_1, COMMENT_2]
@@ -598,9 +592,7 @@
             ]
             return res
 
-        d.addCallback(
-            lambda __: self.plugin.deleteAllGroupBlogsAndComments(C.PROFILE[0])
-        )
+        d.addCallback(lambda __: self.plugin.deleteAllGroupBlogsAndComments(C.PROFILE[0]))
         d.addCallback(clean)
 
         d.addCallback(lambda __: self.psclient.items(SERVICE, NODE_ID))
--- a/libervia/backend/test/test_plugin_misc_radiocol.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/test/test_plugin_misc_radiocol.py	Wed Jun 19 18:44:57 2024 +0200
@@ -169,9 +169,9 @@
         sent = self.host.get_sent_message(0)
         attrs["sender"] = self.plugin_0045.get_nick(0, profile_index)
         radiocol_elt = next(domish.generateElementsNamed(sent.elements(), "radiocol"))
-        preload_elt = next(domish.generateElementsNamed(
-            radiocol_elt.elements(), "preload"
-        ))
+        preload_elt = next(
+            domish.generateElementsNamed(radiocol_elt.elements(), "preload")
+        )
         attrs["timestamp"] = preload_elt["timestamp"]  # we could not guess it...
         content = "<preload xmlns='' %s/>" % " ".join(
             ["%s='%s'" % (attr, attrs[attr]) for attr in attrs]
@@ -347,7 +347,9 @@
                     )
                 elif elt.name == "no_upload":
                     self.host.bridge.expect_call(
-                        "radiocol_no_upload", ROOM_JID.full(), Const.PROFILE[profile_index]
+                        "radiocol_no_upload",
+                        ROOM_JID.full(),
+                        Const.PROFILE[profile_index],
                     )
             sync_data[nick]
             self._room_game_cmd(sent, [])
--- a/libervia/backend/test/test_plugin_misc_room_game.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/test/test_plugin_misc_room_game.py	Wed Jun 19 18:44:57 2024 +0200
@@ -91,7 +91,9 @@
     def test_create_or_invite_solo(self):
         self.reinit()
         self.plugin_0045.join_room(0, 0)
-        self.plugin._create_or_invite(self.plugin_0045.get_room(0, 0), [], Const.PROFILE[0])
+        self.plugin._create_or_invite(
+            self.plugin_0045.get_room(0, 0), [], Const.PROFILE[0]
+        )
         self.assertTrue(self.plugin._game_exists(ROOM_JID, True))
 
     def test_create_or_invite_multi_not_waiting(self):
@@ -261,7 +263,9 @@
         self.reinit()
         self.init_game(0, 0)
         self.assertTrue(self.plugin.is_referee(ROOM_JID, self.plugin_0045.get_nick(0, 0)))
-        self.assertFalse(self.plugin.is_referee(ROOM_JID, self.plugin_0045.get_nick(0, 1)))
+        self.assertFalse(
+            self.plugin.is_referee(ROOM_JID, self.plugin_0045.get_nick(0, 1))
+        )
 
     def test_is_player(self):
         self.reinit()
@@ -276,7 +280,8 @@
         def check(value, other_players, confirmed, rest):
             room = self.plugin_0045.get_room(0, 0)
             self.assertEqual(
-                (value, confirmed, rest), self.plugin._check_wait_auth(room, other_players)
+                (value, confirmed, rest),
+                self.plugin._check_wait_auth(room, other_players),
             )
 
         self.reinit()
@@ -407,7 +412,8 @@
         # but Const.JID[3] is actually not in the room
         self.assertEqual(self.plugin_0045.get_nick_of_user(0, 3, 0), None)
         self.assertEqual(
-            (True, False), self.plugin._check_create_game_and_init(ROOM_JID, Const.PROFILE[0])
+            (True, False),
+            self.plugin._check_create_game_and_init(ROOM_JID, Const.PROFILE[0]),
         )
 
     def test_prepare_room_score_2(self):
@@ -442,7 +448,9 @@
         # wrong profile
         user_nick = self.plugin_0045.join_room(0, 1)
         room = self.plugin_0045.get_room(0, 1)
-        self.plugin.user_joined_trigger(room, User(user_nick, Const.JID[1]), OTHER_PROFILE)
+        self.plugin.user_joined_trigger(
+            room, User(user_nick, Const.JID[1]), OTHER_PROFILE
+        )
         self.assertEqual(
             self.host.get_sent_message(0), None
         )  # no new message has been sent
@@ -589,13 +597,15 @@
 
         helpers.mute_logging()
         self.assertEqual(
-            (False, False), self.plugin._check_create_game_and_init(ROOM_JID, OTHER_PROFILE)
+            (False, False),
+            self.plugin._check_create_game_and_init(ROOM_JID, OTHER_PROFILE),
         )
         helpers.unmute_logging()
 
         self.plugin_0045.join_room(0, 1)
         self.assertEqual(
-            (False, False), self.plugin._check_create_game_and_init(ROOM_JID, OTHER_PROFILE)
+            (False, False),
+            self.plugin._check_create_game_and_init(ROOM_JID, OTHER_PROFILE),
         )
 
         self.plugin.create_game(ROOM_JID, [Const.JID[1]], PROFILE)
@@ -603,7 +613,8 @@
             (False, True), self.plugin._check_create_game_and_init(ROOM_JID, PROFILE)
         )
         self.assertEqual(
-            (False, False), self.plugin._check_create_game_and_init(ROOM_JID, OTHER_PROFILE)
+            (False, False),
+            self.plugin._check_create_game_and_init(ROOM_JID, OTHER_PROFILE),
         )
 
     def test_create_game(self):
--- a/libervia/backend/test/test_plugin_misc_text_syntaxes.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/test/test_plugin_misc_text_syntaxes.py	Wed Jun 19 18:44:57 2024 +0200
@@ -59,7 +59,9 @@
 
     def setUp(self):
         self.host = helpers.FakeSAT()
-        importlib.reload(plugin_misc_text_syntaxes)  # reload the plugin to avoid conflict error
+        importlib.reload(
+            plugin_misc_text_syntaxes
+        )  # reload the plugin to avoid conflict error
         self.text_syntaxes = plugin_misc_text_syntaxes.TextSyntaxes(self.host)
 
     def test_xhtml_sanitise(self):
--- a/libervia/backend/test/test_plugin_xep_0033.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/test/test_plugin_xep_0033.py	Wed Jun 19 18:44:57 2024 +0200
@@ -141,7 +141,9 @@
             to_jid = JID(to_s)
             host = JID(to_jid.host)
             helpers.mute_logging()
-            d = self.host.find_features_set([plugin.NS_ADDRESS], jid_=host, profile=PROFILE)
+            d = self.host.find_features_set(
+                [plugin.NS_ADDRESS], jid_=host, profile=PROFILE
+            )
             d.addCallback(cb, to_jid)
             d_list.append(d)
 
--- a/libervia/backend/test/test_plugin_xep_0277.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/test/test_plugin_xep_0277.py	Wed Jun 19 18:44:57 2024 +0200
@@ -82,7 +82,9 @@
 
         self.host.plugins["XEP-0060"] = plugin_xep_0060.XEP_0060(self.host)
         self.host.plugins["XEP-0163"] = XEP_0163(self.host)
-        importlib.reload(plugin_misc_text_syntaxes)  # reload the plugin to avoid conflict error
+        importlib.reload(
+            plugin_misc_text_syntaxes
+        )  # reload the plugin to avoid conflict error
         self.host.plugins["TEXT_SYNTAXES"] = plugin_misc_text_syntaxes.TextSyntaxes(
             self.host
         )
@@ -99,8 +101,8 @@
             "content_xhtml": "<div><p>contenu</p>texte sans balise<p>autre contenu</p></div>",
             "author": "test1@souliane.org",
         }
-        item_elt = (
-            next(ElementParser()(self.PUBSUB_ENTRY_1, namespace=NS_PUBSUB).elements())
+        item_elt = next(
+            ElementParser()(self.PUBSUB_ENTRY_1, namespace=NS_PUBSUB).elements()
         )
         d = self.plugin.item2mbdata(item_elt)
         d.addCallback(self.assertEqual, expected)
@@ -118,8 +120,8 @@
             "content_xhtml": "<div><p>contenu</p>texte dans balise<p>autre contenu</p></div>",
             "author": "test1@souliane.org",
         }
-        item_elt = (
-            next(ElementParser()(self.PUBSUB_ENTRY_2, namespace=NS_PUBSUB).elements())
+        item_elt = next(
+            ElementParser()(self.PUBSUB_ENTRY_2, namespace=NS_PUBSUB).elements()
         )
         d = self.plugin.item2mbdata(item_elt)
         d.addCallback(self.assertEqual, expected)
--- a/libervia/backend/test/test_plugin_xep_0334.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/test/test_plugin_xep_0334.py	Wed Jun 19 18:44:57 2024 +0200
@@ -60,7 +60,10 @@
             }
             treatments = defer.Deferred()
             self.plugin.messageSendTrigger(
-                self.host.get_client(C.PROFILE[0]), mess_data, defer.Deferred(), treatments
+                self.host.get_client(C.PROFILE[0]),
+                mess_data,
+                defer.Deferred(),
+                treatments,
             )
             if treatments.callbacks:  # the trigger added a callback
                 expected_xml = template_xml % ('<%s xmlns="urn:xmpp:hints"/>' % key)
--- a/libervia/backend/tools/async_trigger.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/async_trigger.py	Wed Jun 19 18:44:57 2024 +0200
@@ -26,14 +26,11 @@
 from . import utils
 from twisted.internet import defer
 
+
 class TriggerManager(sync_trigger.TriggerManager):
     """This is a TriggerManager with an new async_point method"""
 
-    async def async_point(
-        self,
-        point_name: str,
-        *args, **kwargs
-    ) -> bool:
+    async def async_point(self, point_name: str, *args, **kwargs) -> bool:
         """This put a trigger point with potentially async Deferred
 
         All the triggers for that point will be run
@@ -48,7 +45,7 @@
         if point_name not in self.__triggers:
             return True
 
-        can_cancel = not kwargs.pop('triggers_no_cancel', False)
+        can_cancel = not kwargs.pop("triggers_no_cancel", False)
 
         for __, trigger in self.__triggers[point_name]:
             try:
@@ -60,9 +57,7 @@
         return True
 
     async def async_return_point(
-        self,
-        point_name: str,
-        *args, **kwargs
+        self, point_name: str, *args, **kwargs
     ) -> Tuple[bool, Any]:
         """Async version of return_point"""
         if point_name not in self.__triggers:
@@ -76,4 +71,3 @@
             except sync_trigger.SkipOtherTriggers:
                 break
         return True, None
-
--- a/libervia/backend/tools/common/async_process.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/common/async_process.py	Wed Jun 19 18:44:57 2024 +0200
@@ -26,11 +26,13 @@
 from libervia.backend.core.i18n import _
 from libervia.backend.core import exceptions
 from libervia.backend.core.log import getLogger
+
 log = getLogger(__name__)
 
 
 class CommandProtocol(protocol.ProcessProtocol):
     """handle an external command"""
+
     # name of the command (unicode)
     name = None
     # full path to the command (bytes)
@@ -47,8 +49,8 @@
         self._deferred = deferred
         self.data = []
         self.err_data = []
-        self.cmd_args: list[str]|None = None
-        self.cmd_kwargs: dict[str, Any]|None = None
+        self.cmd_args: list[str] | None = None
+        self.cmd_kwargs: dict[str, Any] | None = None
 
     @property
     def command_name(self):
@@ -56,10 +58,11 @@
         if self.name is not None:
             return self.name
         elif self.command is not None:
-            return os.path.splitext(os.path.basename(self.command))[0].decode('utf-8',
-                                                                              'ignore')
+            return os.path.splitext(os.path.basename(self.command))[0].decode(
+                "utf-8", "ignore"
+            )
         else:
-            return ''
+            return ""
 
     def connectionMade(self):
         if self._stdin is not None:
@@ -68,48 +71,45 @@
 
     def outReceived(self, data):
         if self.log:
-            log.info(data.decode('utf-8', 'replace'))
+            log.info(data.decode("utf-8", "replace"))
         self.data.append(data)
 
     def errReceived(self, data):
         if self.log:
-            log.warning(data.decode('utf-8', 'replace'))
+            log.warning(data.decode("utf-8", "replace"))
         self.err_data.append(data)
 
     def processEnded(self, reason):
-        data = b''.join(self.data)
-        if (reason.value.exitCode == 0):
-            log.debug(f'{self.command_name!r} command succeed')
+        data = b"".join(self.data)
+        if reason.value.exitCode == 0:
+            log.debug(f"{self.command_name!r} command succeed")
             # we don't use "replace" on purpose, we want an exception if decoding
             # is not working properly
             self._deferred.callback(data)
         else:
-            err_data = b''.join(self.err_data)
+            err_data = b"".join(self.err_data)
 
             assert self.cmd_args is not None
             assert self.cmd_kwargs is not None
-            msg = (
-                _(
-                    "Can't complete {name} command (error code: {code}):\n"
-                    "Executed command: {command}\n"
-                    "Keyword arguments:\n"
-                    "{command_kw}\n\n"
-                    "stderr:\n{stderr}\n{stdout}\n"
-                )
-                .format(
-                    name = self.command_name,
-                    code = reason.value.exitCode,
-                    command = " ".join(self.cmd_args),
-                    command_kw = "\n".join(
-                        f"    - {k} = {v!r}" for k,v in self.cmd_kwargs.items()
-                    ),
-                    stderr= err_data.decode(errors='replace'),
-                    stdout = "stdout: " + data.decode(errors='replace')
-                    if data else '',
-                )
+            msg = _(
+                "Can't complete {name} command (error code: {code}):\n"
+                "Executed command: {command}\n"
+                "Keyword arguments:\n"
+                "{command_kw}\n\n"
+                "stderr:\n{stderr}\n{stdout}\n"
+            ).format(
+                name=self.command_name,
+                code=reason.value.exitCode,
+                command=" ".join(self.cmd_args),
+                command_kw="\n".join(
+                    f"    - {k} = {v!r}" for k, v in self.cmd_kwargs.items()
+                ),
+                stderr=err_data.decode(errors="replace"),
+                stdout="stdout: " + data.decode(errors="replace") if data else "",
             )
-            self._deferred.errback(Failure(exceptions.CommandException(
-                msg, data, err_data)))
+            self._deferred.errback(
+                Failure(exceptions.CommandException(msg, data, err_data))
+            )
 
     @classmethod
     def run(cls, *args, **kwargs):
@@ -127,10 +127,10 @@
             stdin and stdout will be given as arguments
 
         """
-        stdin = kwargs.pop('stdin', None)
+        stdin = kwargs.pop("stdin", None)
         if stdin is not None:
-            stdin = stdin.encode('utf-8')
-        verbose = kwargs.pop('verbose', False)
+            stdin = stdin.encode("utf-8")
+        verbose = kwargs.pop("verbose", False)
         args = list(args)
         d = defer.Deferred()
         prot = cls(d, stdin=stdin)
@@ -140,7 +140,8 @@
             if not args:
                 raise ValueError(
                     "You must either specify cls.command or use a full path to command "
-                    "to execute as first argument")
+                    "to execute as first argument"
+                )
             command = args.pop(0)
             if prot.name is None:
                 name = os.path.splitext(os.path.basename(command))[0]
@@ -155,10 +156,7 @@
             # FIXME: `None` doesn't seem to work, despite what documentation says, to be
             #    checked and reported upstream if confirmed.
             kwargs["env"] = os.environ
-        reactor.spawnProcess(prot,
-                             command,
-                             cmd_args,
-                             **kwargs)
+        reactor.spawnProcess(prot, command, cmd_args, **kwargs)
         return d
 
 
--- a/libervia/backend/tools/common/async_utils.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/common/async_utils.py	Wed Jun 19 18:44:57 2024 +0200
@@ -30,12 +30,14 @@
 def async_lru(maxsize: Optional[int] = 50) -> Callable:
     """Decorator to cache async function results using LRU algorithm
 
-        @param maxsize: maximum number of items to keep in cache.
-            None to have no limit
+    @param maxsize: maximum number of items to keep in cache.
+        None to have no limit
 
     """
+
     def decorator(func: Callable) -> Callable:
         cache = OrderedDict()
+
         async def wrapper(*args) -> Awaitable:
             if args in cache:
                 log.debug(f"using result in cache for {args}")
@@ -49,5 +51,7 @@
                 value = cache.popitem(False)
                 log.debug(f"Removing LRU value: {value}")
             return result
+
         return wrapper
+
     return decorator
--- a/libervia/backend/tools/common/data_format.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/common/data_format.py	Wed Jun 19 18:44:57 2024 +0200
@@ -138,9 +138,7 @@
 
 
 def deserialise(
-    serialised_data: str,
-    default: Any = None,
-    type_check: type = dict
+    serialised_data: str, default: Any = None, type_check: type = dict
 ) -> Any:
     """Deserialize data from bridge
 
@@ -154,6 +152,9 @@
         return default
     ret = json.loads(serialised_data)
     if type_check is not None and not isinstance(ret, type_check):
-        raise ValueError("Bad data type, was expecting {type_check}, got {real_type}"
-            .format(type_check=type_check, real_type=type(ret)))
+        raise ValueError(
+            "Bad data type, was expecting {type_check}, got {real_type}".format(
+                type_check=type_check, real_type=type(ret)
+            )
+        )
     return ret
--- a/libervia/backend/tools/common/data_objects.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/common/data_objects.py	Wed Jun 19 18:44:57 2024 +0200
@@ -55,12 +55,10 @@
                 if "url" not in attachment:
                     try:
                         attachment["url"] = next(
-                            s['url'] for s in attachment["sources"] if 'url' in s
+                            s["url"] for s in attachment["sources"] if "url" in s
                         )
                     except (StopIteration, KeyError):
-                        log.warning(
-                            f"An attachment has no URL: {attachment}"
-                        )
+                        log.warning(f"An attachment has no URL: {attachment}")
 
     @property
     def id(self):
@@ -126,7 +124,6 @@
         return self.extra.get("attachments", [])
 
 
-
 class Messages(object):
     def __init__(self, msgs_data):
         self.messages = [Message(m) for m in msgs_data]
@@ -166,7 +163,7 @@
     @property
     def avatar_basename(self):
         try:
-            return basename(self.data['avatar']['path'])
+            return basename(self.data["avatar"]["path"])
         except (TypeError, KeyError):
             return None
 
--- a/libervia/backend/tools/common/date_utils.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/common/date_utils.py	Wed Jun 19 18:44:57 2024 +0200
@@ -41,7 +41,7 @@
     r"\s*(?P<quantity>\d+)\s*"
     r"(?P<unit>(second|sec|s|minute|min|month|mo|m|hour|hr|h|day|d|week|w|year|yr|y))s?"
     r"(?P<ago>\s+ago)?\s*",
-    re.I
+    re.I,
 )
 TIME_SYMBOL_MAP = {
     "s": "second",
@@ -72,9 +72,7 @@
     dayfirst = False if YEAR_FIRST_RE.match(value) else True
 
     try:
-        dt = default_tzinfo(
-            parser.parse(value, dayfirst=dayfirst),
-            default_tz)
+        dt = default_tzinfo(parser.parse(value, dayfirst=dayfirst), default_tz)
     except ParserError as e:
         if value == "now":
             dt = datetime.datetime.now(tz.tzutc())
@@ -86,7 +84,8 @@
                 raise e
     return calendar.timegm(dt.utctimetuple())
 
-def date_parse_ext(value: str, default_tz: datetime.tzinfo=TZ_UTC) -> float:
+
+def date_parse_ext(value: str, default_tz: datetime.tzinfo = TZ_UTC) -> float:
     """Extended date parse which accept relative date
 
     @param value: date to parse, in any format supported by parser
@@ -103,10 +102,9 @@
         return date_parse(value, default_tz=default_tz)
 
     if sum(1 for g in ("direction", "in", "ago") if m.group(g)) > 1:
-        raise ValueError(
-            _('You can use only one of direction (+ or -), "in" and "ago"'))
+        raise ValueError(_('You can use only one of direction (+ or -), "in" and "ago"'))
 
-    if m.group("direction") == '-' or m.group("ago"):
+    if m.group("direction") == "-" or m.group("ago"):
         direction = -1
     else:
         direction = 1
@@ -146,7 +144,7 @@
     auto_old_fmt: str = "short",
     auto_new_fmt: str = "relative",
     locale_str: str = C.DEFAULT_LOCALE,
-    tz_info: Union[datetime.tzinfo, str] = TZ_UTC
+    tz_info: Union[datetime.tzinfo, str] = TZ_UTC,
 ) -> str:
     """Format date according to locale
 
@@ -186,8 +184,9 @@
         if auto_limit == 0:
             now = datetime.datetime.now(tz_info)
             # we want to use given tz_info, so we don't use date() or today()
-            today = datetime.datetime(year=now.year, month=now.month, day=now.day,
-                                      tzinfo=now.tzinfo)
+            today = datetime.datetime(
+                year=now.year, month=now.month, day=now.day, tzinfo=now.tzinfo
+            )
             today = calendar.timegm(today.utctimetuple())
             if timestamp < today:
                 fmt = auto_old_fmt
@@ -210,8 +209,9 @@
             dt = datetime.datetime.fromtimestamp(timestamp, tz_info)
             return dates.format_date(dt, format=fmt, locale=locale_str)
         else:
-            return dates.format_datetime(timestamp, format=fmt, locale=locale_str,
-                                        tzinfo=tz_info)
+            return dates.format_datetime(
+                timestamp, format=fmt, locale=locale_str, tzinfo=tz_info
+            )
     elif fmt == "iso":
         if date_only:
             fmt = "yyyy-MM-dd"
@@ -219,8 +219,9 @@
             fmt = "yyyy-MM-ddTHH:mm:ss'Z'"
         return dates.format_datetime(timestamp, format=fmt)
     else:
-        return dates.format_datetime(timestamp, format=fmt, locale=locale_str,
-                                     tzinfo=tz_info)
+        return dates.format_datetime(
+            timestamp, format=fmt, locale=locale_str, tzinfo=tz_info
+        )
 
 
 def delta2human(start_ts: Union[float, int], end_ts: Union[float, int]) -> str:
@@ -234,8 +235,7 @@
             "end timestamp must be bigger or equal to start timestamp !"
         )
     rd = relativedelta(
-        datetime.datetime.fromtimestamp(end_ts),
-        datetime.datetime.fromtimestamp(start_ts)
+        datetime.datetime.fromtimestamp(end_ts), datetime.datetime.fromtimestamp(start_ts)
     )
     text_elems = []
     for unit in ("years", "months", "days", "hours", "minutes"):
--- a/libervia/backend/tools/common/email.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/common/email.py	Wed Jun 19 18:44:57 2024 +0200
@@ -62,8 +62,9 @@
     email_username = tools_config.config_get(config, None, "email_username")
     email_password = tools_config.config_get(config, None, "email_password")
     email_auth = C.bool(tools_config.config_get(config, None, "email_auth", C.BOOL_FALSE))
-    email_starttls = C.bool(tools_config.config_get(config, None, "email_starttls",
-                            C.BOOL_FALSE))
+    email_starttls = C.bool(
+        tools_config.config_get(config, None, "email_starttls", C.BOOL_FALSE)
+    )
 
     msg = MIMEText(body, "plain", "UTF-8")
     msg["Subject"] = subject
--- a/libervia/backend/tools/common/regex.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/common/regex.py	Wed Jun 19 18:44:57 2024 +0200
@@ -26,8 +26,8 @@
 path_escape_rev = {re.escape(v): k for k, v in path_escape.items()}
 path_escape = {re.escape(k): v for k, v in path_escape.items()}
 #  thanks to Martijn Pieters (https://stackoverflow.com/a/14693789)
-RE_ANSI_REMOVE = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])')
-RE_TEXT_URL = re.compile(r'[^a-zA-Z0-9,_]+')
+RE_ANSI_REMOVE = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])")
+RE_TEXT_URL = re.compile(r"[^a-zA-Z0-9,_]+")
 TEXT_MAX_LEN = 60
 # min lenght is currently deactivated
 TEXT_WORD_MIN_LENGHT = 0
@@ -84,12 +84,12 @@
     """Convert text to url-friendly one"""
     # we change special chars to ascii one,
     # trick found at https://stackoverflow.com/a/3194567
-    text = unicodedata.normalize('NFD', text).encode('ascii', 'ignore').decode('utf-8')
-    text = RE_TEXT_URL.sub(' ', text).lower()
-    text = '-'.join([t for t in text.split() if t and len(t)>=TEXT_WORD_MIN_LENGHT])
+    text = unicodedata.normalize("NFD", text).encode("ascii", "ignore").decode("utf-8")
+    text = RE_TEXT_URL.sub(" ", text).lower()
+    text = "-".join([t for t in text.split() if t and len(t) >= TEXT_WORD_MIN_LENGHT])
     while len(text) > TEXT_MAX_LEN:
-        if '-' in text:
-            text = text.rsplit('-', 1)[0]
+        if "-" in text:
+            text = text.rsplit("-", 1)[0]
         else:
             text = text[:TEXT_MAX_LEN]
     return text
--- a/libervia/backend/tools/common/template.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/common/template.py	Wed Jun 19 18:44:57 2024 +0200
@@ -72,7 +72,6 @@
     )
 
 
-
 HTML_EXT = ("html", "xhtml")
 RE_ATTR_ESCAPE = re.compile(r"[^a-z_-]")
 SITE_RESERVED_NAMES = ("sat",)
@@ -81,11 +80,12 @@
 BROWSER_DIR = "_browser"
 BROWSER_META_FILE = "browser_meta.json"
 
-TemplateData = namedtuple("TemplateData", ['site', 'theme', 'path'])
+TemplateData = namedtuple("TemplateData", ["site", "theme", "path"])
 
 
 class TemplateLoader(jinja2.BaseLoader):
     """A template loader which handle site, theme and absolute paths"""
+
     # TODO: list_templates should be implemented
 
     def __init__(self, sites_paths, sites_themes, trusted=False):
@@ -126,14 +126,14 @@
             except IndexError:
                 raise ValueError("incorrect site/theme in template")
             theme_data = template[1:theme_end]
-            theme_splitted = theme_data.split('/')
+            theme_splitted = theme_data.split("/")
             if len(theme_splitted) == 1:
                 site, theme = "", theme_splitted[0]
             elif len(theme_splitted) == 2:
                 site, theme = theme_splitted
             else:
                 raise ValueError("incorrect site/theme in template")
-            template_path = template[theme_end+1:]
+            template_path = template[theme_end + 1 :]
             if not template_path or template_path.startswith("/"):
                 raise ValueError("incorrect template path")
         elif template.startswith("/"):
@@ -152,26 +152,32 @@
             if not site:
                 site = ""
             elif site in SITE_RESERVED_NAMES:
-                raise ValueError(_("{site} can't be used as site name, "
-                                   "it's reserved.").format(site=site))
+                raise ValueError(
+                    _("{site} can't be used as site name, " "it's reserved.").format(
+                        site=site
+                    )
+                )
 
         if theme is not None:
             theme = theme.strip()
             if not theme:
                 theme = C.TEMPLATE_THEME_DEFAULT
             if RE_TPL_RESERVED_CHARS.search(theme):
-                raise ValueError(_("{theme} contain forbidden char. Following chars "
-                                   "are forbidden: {reserved}").format(
-                                   theme=theme, reserved=TPL_RESERVED_CHARS))
+                raise ValueError(
+                    _(
+                        "{theme} contain forbidden char. Following chars "
+                        "are forbidden: {reserved}"
+                    ).format(theme=theme, reserved=TPL_RESERVED_CHARS)
+                )
 
         return TemplateData(site, theme, template_path)
 
     @staticmethod
     def get_sites_and_themes(
-            site: str,
-            theme: str,
-            settings: Optional[dict] = None,
-        ) -> List[Tuple[str, str]]:
+        site: str,
+        theme: str,
+        settings: Optional[dict] = None,
+    ) -> List[Tuple[str, str]]:
         """Get sites and themes to check for template/file
 
         Will add default theme and default site in search list when suitable. Settings'
@@ -208,22 +214,24 @@
         """
         if site is None:
             raise exceptions.InternalError(
-                "_get_template_f must not be used with absolute path")
-        settings = self.sites_themes[site][theme]['settings']
+                "_get_template_f must not be used with absolute path"
+            )
+        settings = self.sites_themes[site][theme]["settings"]
         for site_to_check, theme_to_check in self.get_sites_and_themes(
-                site, theme, settings):
+            site, theme, settings
+        ):
             try:
                 base_path = self.sites_paths[site_to_check]
             except KeyError:
-                log.warning(_("Unregistered site requested: {site_to_check}").format(
-                    site_to_check=site_to_check))
+                log.warning(
+                    _("Unregistered site requested: {site_to_check}").format(
+                        site_to_check=site_to_check
+                    )
+                )
             filepath = os.path.join(
-                base_path,
-                C.TEMPLATE_TPL_DIR,
-                theme_to_check,
-                *path_elts
+                base_path, C.TEMPLATE_TPL_DIR, theme_to_check, *path_elts
             )
-            f = utils.open_if_exists(filepath, 'r')
+            f = utils.open_if_exists(filepath, "r")
             if f is not None:
                 return f, filepath
         return None, None
@@ -241,14 +249,19 @@
         if site is None:
             # we have an abolute template
             if theme is not None:
-                raise exceptions.InternalError("We can't have a theme with absolute "
-                                               "template.")
+                raise exceptions.InternalError(
+                    "We can't have a theme with absolute " "template."
+                )
             if not self.trusted:
-                log.error(_("Absolute template used while unsecure is disabled, hack "
-                            "attempt? Template: {template}").format(template=template))
+                log.error(
+                    _(
+                        "Absolute template used while unsecure is disabled, hack "
+                        "attempt? Template: {template}"
+                    ).format(template=template)
+                )
                 raise exceptions.PermissionError("absolute template is not allowed")
             filepath = template_path
-            f = utils.open_if_exists(filepath, 'r')
+            f = utils.open_if_exists(filepath, "r")
         else:
             # relative path, we have to deal with site and theme
             assert theme and template_path
@@ -257,12 +270,14 @@
             f, filepath = self._get_template_f(site, theme, path_elts)
 
         if f is None:
-            if (site is not None and path_elts[0] == "error"
-                and os.path.splitext(template_path)[1][1:] in HTML_EXT):
+            if (
+                site is not None
+                and path_elts[0] == "error"
+                and os.path.splitext(template_path)[1][1:] in HTML_EXT
+            ):
                 # if an HTML error is requested but doesn't exist, we try again
                 # with base error.
-                f, filepath = self._get_template_f(
-                    site, theme, ("error", "base.html"))
+                f, filepath = self._get_template_f(site, theme, ("error", "base.html"))
                 if f is None:
                     raise exceptions.InternalError("error/base.html should exist")
             else:
@@ -335,8 +350,9 @@
         for library, attribute in self.scripts:
             library_path = self.renderer.get_static_path(self.template_data, library)
             if library_path is None:
-                log.warning(_("Can't find {libary} javascript library").format(
-                    library=library))
+                log.warning(
+                    _("Can't find {libary} javascript library").format(library=library)
+                )
                 continue
             path = self.renderer.get_front_url(library_path)
             scripts.append(tpl.format(src=quoteattr(path), attribute=attribute))
@@ -346,7 +362,7 @@
 class Environment(jinja2.Environment):
 
     def get_template(self, name, parent=None, globals=None):
-        if name[0] not in ('/', '('):
+        if name[0] not in ("/", "("):
             # if name is not an absolute path or a full template name (this happen on
             # extend or import during rendering), we convert it to a full template name.
             # This is needed to handle cache correctly when a base template is overriden.
@@ -355,7 +371,8 @@
             name = "({site}/{theme}){template}".format(
                 site=self._template_data.site,
                 theme=self._template_data.theme,
-                template=name)
+                template=name,
+            )
 
         return super(Environment, self).get_template(name, parent, globals)
 
@@ -380,8 +397,7 @@
         self.sites_paths = {
             "": os.path.dirname(sat_templates.__file__),
         }
-        self.sites_themes = {
-        }
+        self.sites_themes = {}
         conf = config.parse_main_conf()
         public_sites = config.config_get(conf, None, "sites_path_public_dict", {})
         sites_data = [public_sites]
@@ -392,14 +408,21 @@
             normalised = {}
             for name, path in sites.items():
                 if RE_TPL_RESERVED_CHARS.search(name):
-                    log.warning(_("Can't add \"{name}\" site, it contains forbidden "
-                                  "characters. Forbidden characters are {forbidden}.")
-                                .format(name=name, forbidden=TPL_RESERVED_CHARS))
+                    log.warning(
+                        _(
+                            'Can\'t add "{name}" site, it contains forbidden '
+                            "characters. Forbidden characters are {forbidden}."
+                        ).format(name=name, forbidden=TPL_RESERVED_CHARS)
+                    )
                     continue
                 path = os.path.expanduser(os.path.normpath(path))
                 if not path or not path.startswith("/"):
-                    log.warning(_("Can't add \"{name}\" site, it should map to an "
-                                  "absolute path").format(name=name))
+                    log.warning(
+                        _(
+                            'Can\'t add "{name}" site, it should map to an '
+                            "absolute path"
+                        ).format(name=name)
+                    )
                     continue
                 normalised[name] = path
             self.sites_paths.update(normalised)
@@ -411,20 +434,24 @@
                     continue
                 log.debug(f"theme found for {site or 'default site'}: {p.name}")
                 theme_data = self.sites_themes.setdefault(site, {})[p.name] = {
-                    'path': p,
-                    'settings': {}}
+                    "path": p,
+                    "settings": {},
+                }
                 theme_settings = p / "settings.json"
                 if theme_settings.is_file:
                     try:
                         with theme_settings.open() as f:
                             settings = json.load(f)
                     except Exception as e:
-                        log.warning(_(
-                            "Can't load theme settings at {path}: {e}").format(
-                            path=theme_settings, e=e))
+                        log.warning(
+                            _("Can't load theme settings at {path}: {e}").format(
+                                path=theme_settings, e=e
+                            )
+                        )
                     else:
                         log.debug(
-                            f"found settings for theme {p.name!r} at {theme_settings}")
+                            f"found settings for theme {p.name!r} at {theme_settings}"
+                        )
                         fallback = settings.get("fallback")
                         if fallback is None:
                             settings["fallback"] = []
@@ -433,17 +460,17 @@
                         elif not isinstance(fallback, list):
                             raise ValueError(
                                 'incorrect type for "fallback" in settings '
-                                f'({type(fallback)}) at {theme_settings}: {fallback}'
+                                f"({type(fallback)}) at {theme_settings}: {fallback}"
                             )
-                        theme_data['settings'] = settings
+                        theme_data["settings"] = settings
                 browser_path = p / BROWSER_DIR
                 if browser_path.is_dir():
-                    theme_data['browser_path'] = browser_path
+                    theme_data["browser_path"] = browser_path
                 browser_meta_path = browser_path / BROWSER_META_FILE
                 if browser_meta_path.is_file():
                     try:
                         with browser_meta_path.open() as f:
-                            theme_data['browser_meta'] = json.load(f)
+                            theme_data["browser_meta"] = json.load(f)
                     except Exception as e:
                         log.error(
                             f"Can't parse browser metadata at {browser_meta_path}: {e}"
@@ -454,7 +481,7 @@
             loader=TemplateLoader(
                 sites_paths=self.sites_paths,
                 sites_themes=self.sites_themes,
-                trusted=trusted
+                trusted=trusted,
             ),
             autoescape=jinja2.select_autoescape(["html", "xhtml", "xml"]),
             trim_blocks=True,
@@ -482,8 +509,9 @@
         self.env.filters["adv_format"] = self._adv_format
         self.env.filters["dict_ext"] = self._dict_ext
         self.env.filters["highlight"] = self.highlight
-        self.env.filters["front_url"] = (self._front_url if front_url_filter is None
-                                         else front_url_filter)
+        self.env.filters["front_url"] = (
+            self._front_url if front_url_filter is None else front_url_filter
+        )
         self.env.filters["media_type_main"] = self.media_type_main
         self.env.filters["media_type_sub"] = self.media_type_sub
         # custom tests
@@ -495,7 +523,7 @@
         self.env.policies["json.dumps_kwargs"] = {
             "sort_keys": True,
             # if object can't be serialised, we use None
-            "default": lambda o: o.to_json() if hasattr(o, "to_json") else None
+            "default": lambda o: o.to_json() if hasattr(o, "to_json") else None,
         }
 
     def get_front_url(self, template_data, path=None):
@@ -505,15 +533,16 @@
         @param path(unicode, None): relative path of file to get,
             if set, will remplate template_data.path
         """
-        return self.env.filters["front_url"]({"template_data": template_data},
-                                path or template_data.path)
+        return self.env.filters["front_url"](
+            {"template_data": template_data}, path or template_data.path
+        )
 
     def install_translations(self):
         # TODO: support multi translation
         #       for now, only translations in sat_templates are handled
         self.translations = {}
         for site_key, site_path in self.sites_paths.items():
-            site_prefix = "[{}] ".format(site_key) if site_key else ''
+            site_prefix = "[{}] ".format(site_key) if site_key else ""
             i18n_dir = os.path.join(site_path, "i18n")
             for lang_dir in os.listdir(i18n_dir):
                 lang_path = os.path.join(i18n_dir, lang_dir)
@@ -532,14 +561,21 @@
                 except EnvironmentError:
                     log.error(
                         _("Can't find template translation at {path}").format(
-                            path=po_path))
+                            path=po_path
+                        )
+                    )
                 except UnknownLocaleError as e:
-                    log.error(_("{site}Invalid locale name: {msg}").format(
-                        site=site_prefix, msg=e))
+                    log.error(
+                        _("{site}Invalid locale name: {msg}").format(
+                            site=site_prefix, msg=e
+                        )
+                    )
                 else:
-                    log.info(_("{site}loaded {lang} templates translations").format(
-                        site = site_prefix,
-                        lang=lang_dir))
+                    log.info(
+                        _("{site}loaded {lang} templates translations").format(
+                            site=site_prefix, lang=lang_dir
+                        )
+                    )
 
         default_locale = Locale.parse(self._locale_str)
         if default_locale not in self.translations:
@@ -550,9 +586,9 @@
         self.env.install_null_translations(True)
         # we generate a tuple of locales ordered by display name that templates can access
         # through the "locales" variable
-        self.locales = tuple(sorted(list(self.translations.keys()),
-                                    key=lambda l: l.language_name.lower()))
-
+        self.locales = tuple(
+            sorted(list(self.translations.keys()), key=lambda l: l.language_name.lower())
+        )
 
     def set_locale(self, locale_str):
         """set current locale
@@ -601,7 +637,7 @@
         site, theme, __ = self.env.loader.parse_template(template)
         if site is None:
             # absolute template
-            return  "", os.path.dirname(template)
+            return "", os.path.dirname(template)
         try:
             site_root_dir = self.sites_paths[site]
         except KeyError:
@@ -615,11 +651,8 @@
             raise exceptions.NotFound(f"no theme found for {site_name}")
 
     def get_static_path(
-            self,
-            template_data: TemplateData,
-            filename: str,
-            settings: Optional[dict]=None
-        ) -> Optional[TemplateData]:
+        self, template_data: TemplateData, filename: str, settings: Optional[dict] = None
+    ) -> Optional[TemplateData]:
         """Retrieve path of a static file if it exists with current theme or default
 
         File will be looked at <site_root_dir>/<theme_dir>/<static_dir>/filename,
@@ -637,10 +670,10 @@
         """
         if template_data.site is None:
             # we have an absolue path
-            if (not template_data.theme is None
-                or not template_data.path.startswith('/')):
+            if not template_data.theme is None or not template_data.path.startswith("/"):
                 raise exceptions.InternalError(
-                    "invalid template data, was expecting absolute URL")
+                    "invalid template data, was expecting absolute URL"
+                )
             static_dir = os.path.dirname(template_data.path)
             file_path = os.path.join(static_dir, filename)
             if os.path.exists(file_path):
@@ -648,28 +681,28 @@
             else:
                 return None
 
-        sites_and_themes = TemplateLoader.get_sites_and_themes(template_data.site,
-                                                            template_data.theme,
-                                                            settings)
+        sites_and_themes = TemplateLoader.get_sites_and_themes(
+            template_data.site, template_data.theme, settings
+        )
         for site, theme in sites_and_themes:
             site_root_dir = self.sites_paths[site]
             relative_path = os.path.join(C.TEMPLATE_STATIC_DIR, filename)
-            absolute_path = os.path.join(site_root_dir, C.TEMPLATE_TPL_DIR,
-                                         theme, relative_path)
+            absolute_path = os.path.join(
+                site_root_dir, C.TEMPLATE_TPL_DIR, theme, relative_path
+            )
             if os.path.exists(absolute_path):
                 return TemplateData(site=site, theme=theme, path=relative_path)
 
         return None
 
     def _append_css_paths(
-            self,
-            template_data: TemplateData,
-            css_files: list,
-            css_files_noscript: list,
-            name_root: str,
-            settings: dict
-
-        ) -> None:
+        self,
+        template_data: TemplateData,
+        css_files: list,
+        css_files_noscript: list,
+        name_root: str,
+        settings: dict,
+    ) -> None:
         """Append found css to css_files and css_files_noscript
 
         @param css_files: list to fill of relative path to found css file
@@ -681,8 +714,7 @@
         if css_path is not None:
             css_files.append(self.get_front_url(css_path))
             noscript_name = name_root + "_noscript.css"
-            noscript_path = self.get_static_path(
-                template_data, noscript_name, settings)
+            noscript_path = self.get_static_path(template_data, noscript_name, settings)
             if noscript_path is not None:
                 css_files_noscript.append(self.get_front_url(noscript_path))
 
@@ -718,27 +750,29 @@
         # TODO: some caching would be nice
         css_files = []
         css_files_noscript = []
-        path_elems = template_data.path.split('/')
+        path_elems = template_data.path.split("/")
         path_elems[-1] = os.path.splitext(path_elems[-1])[0]
         site = template_data.site
         if site is None:
             # absolute path
             settings = {}
         else:
-            settings = self.sites_themes[site][template_data.theme]['settings']
+            settings = self.sites_themes[site][template_data.theme]["settings"]
 
-        css_path = self.get_static_path(template_data, 'fonts.css', settings)
+        css_path = self.get_static_path(template_data, "fonts.css", settings)
         if css_path is not None:
             css_files.append(self.get_front_url(css_path))
 
-        for name_root in ('styles', 'styles_extra', 'highlight'):
+        for name_root in ("styles", "styles_extra", "highlight"):
             self._append_css_paths(
-                template_data, css_files, css_files_noscript, name_root, settings)
+                template_data, css_files, css_files_noscript, name_root, settings
+            )
 
         for idx in range(len(path_elems)):
-            name_root = "_".join(path_elems[:idx+1])
+            name_root = "_".join(path_elems[: idx + 1])
             self._append_css_paths(
-                template_data, css_files, css_files_noscript, name_root, settings)
+                template_data, css_files, css_files_noscript, name_root, settings
+            )
 
         return css_files, css_files_noscript
 
@@ -750,17 +784,18 @@
 
         This default method return absolute full path
         """
-        template_data = ctx['template_data']
+        template_data = ctx["template_data"]
         if template_data.site is None:
             assert template_data.theme is None
             assert template_data.path.startswith("/")
             return os.path.join(os.path.dirname(template_data.path, relative_url))
 
         site_root_dir = self.sites_paths[template_data.site]
-        return os.path.join(site_root_dir, C.TEMPLATE_TPL_DIR, template_data.theme,
-                            relative_url)
+        return os.path.join(
+            site_root_dir, C.TEMPLATE_TPL_DIR, template_data.theme, relative_url
+        )
 
-    def _bare_jid(self, full_jid: str|jid.JID) -> str:
+    def _bare_jid(self, full_jid: str | jid.JID) -> str:
         """Return the bare JID"""
         return str(jid.JID(str(full_jid)).bare)
 
@@ -784,16 +819,21 @@
         auto_limit: int = 7,
         auto_old_fmt: str = "short",
         auto_new_fmt: str = "relative",
-        tz_name: Optional[str] = None
+        tz_name: Optional[str] = None,
     ) -> str:
         if is_undefined(fmt):
             fmt = "short"
 
         try:
             return date_utils.date_fmt(
-                timestamp, fmt, date_only, auto_limit, auto_old_fmt,
-                auto_new_fmt, locale_str = self._locale_str,
-                tz_info=tz_name or date_utils.TZ_UTC
+                timestamp,
+                fmt,
+                date_only,
+                auto_limit,
+                auto_old_fmt,
+                auto_new_fmt,
+                locale_str=self._locale_str,
+                tz_info=tz_name or date_utils.TZ_UTC,
             )
         except Exception as e:
             log.warning(_("Can't parse date: {msg}").format(msg=e))
@@ -986,30 +1026,39 @@
             '<svg class="svg-icon{cls}"{extra_attrs} xmlns="http://www.w3.org/2000/svg" '
             'viewBox="0 0 100 100">\n'
             '    <use href="#{name}"/>'
-            '</svg>\n'.format(
+            "</svg>\n".format(
                 name=name,
                 cls=(" " + cls) if cls else "",
-                extra_attrs=" " + extra_attrs if extra_attrs else ""
+                extra_attrs=" " + extra_attrs if extra_attrs else "",
             )
         )
 
     def _icon_from_client(self, client):
         """Get icon name to represent a disco client"""
         if client is None:
-            return 'desktop'
-        elif 'pc' in client:
-            return 'desktop'
-        elif 'phone' in client:
-            return 'mobile'
-        elif 'web' in client:
-            return 'globe'
-        elif 'console' in client:
-            return 'terminal'
+            return "desktop"
+        elif "pc" in client:
+            return "desktop"
+        elif "phone" in client:
+            return "mobile"
+        elif "web" in client:
+            return "globe"
+        elif "console" in client:
+            return "terminal"
         else:
-            return 'desktop'
+            return "desktop"
 
-    def render(self, template, site=None, theme=None, locale=C.DEFAULT_LOCALE,
-               media_path="", css_files=None, css_inline=False, **kwargs):
+    def render(
+        self,
+        template,
+        site=None,
+        theme=None,
+        locale=C.DEFAULT_LOCALE,
+        media_path="",
+        css_files=None,
+        css_inline=False,
+        **kwargs,
+    ):
         """Render a template
 
         @param template(unicode): template to render (e.g. blog/articles.html)
@@ -1031,7 +1080,7 @@
         if site is not None or theme is not None:
             # user wants to set site and/or theme, so we add it to the template path
             if site is None:
-                site = ''
+                site = ""
             if theme is None:
                 theme = C.TEMPLATE_THEME_DEFAULT
             if template[0] == "(":
@@ -1042,7 +1091,8 @@
 
             template_data = TemplateData(site, theme, template)
             template = "({site}/{theme}){template}".format(
-                site=site, theme=theme, template=template)
+                site=site, theme=theme, template=template
+            )
         else:
             template_data = self.env.loader.parse_template(template)
 
@@ -1066,8 +1116,7 @@
 
         if css_inline:
             css_contents = []
-            for files, suffix in ((css_files, ""),
-                                  (css_files_noscript, "_noscript")):
+            for files, suffix in ((css_files, ""), (css_files_noscript, "_noscript")):
                 site_root_dir = self.sites_paths[template_data.site]
                 for css_file in files:
                     css_file_path = os.path.join(site_root_dir, css_file)
@@ -1090,7 +1139,7 @@
             locales=self.locales,
             gidx=Indexer(),
             script=scripts_handler,
-            **kwargs
+            **kwargs,
         )
         self.env._template_data = None
         return rendered
--- a/libervia/backend/tools/common/template_xmlui.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/common/template_xmlui.py	Wed Jun 19 18:44:57 2024 +0200
@@ -26,6 +26,7 @@
 from libervia.backend.core.log import getLogger
 from libervia.frontends.tools import xmlui
 from libervia.frontends.tools import jid
+
 try:
     from jinja2 import Markup as safe
 except ImportError:
@@ -158,6 +159,7 @@
 class JidInputWidget(xmlui.JidInputWidget, StringWidget):
     type = "jid"
 
+
 class TextBoxWidget(xmlui.TextWidget, InputWidget):
     type = "textbox"
 
@@ -170,7 +172,8 @@
         #      This is normally done in the backend, the frontends should not need to
         #      worry about it.
         super(XHTMLBoxWidget, self).__init__(
-            xmlui_parent=xmlui_parent, value=safe(value), read_only=read_only)
+            xmlui_parent=xmlui_parent, value=safe(value), read_only=read_only
+        )
 
 
 class ListWidget(xmlui.ListWidget, OptionsWidget):
@@ -238,6 +241,7 @@
         raise NotImplementedError
 
 
-create = partial(xmlui.create, class_map={
-    xmlui.CLASS_PANEL: XMLUIPanel,
-    xmlui.CLASS_DIALOG: XMLUIDialog})
+create = partial(
+    xmlui.create,
+    class_map={xmlui.CLASS_PANEL: XMLUIPanel, xmlui.CLASS_DIALOG: XMLUIDialog},
+)
--- a/libervia/backend/tools/common/tls.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/common/tls.py	Wed Jun 19 18:44:57 2024 +0200
@@ -35,7 +35,7 @@
 
 def get_options_from_config(config, section=""):
     options = {}
-    for option in ('tls_certificate', 'tls_private_key', 'tls_chain'):
+    for option in ("tls_certificate", "tls_private_key", "tls_chain"):
         options[option] = tools_config.config_get(config, section, option)
     return options
 
@@ -47,7 +47,8 @@
     """
     if not options["tls_certificate"]:
         raise exceptions.ConfigError(
-            "a TLS certificate is needed to activate HTTPS connection")
+            "a TLS certificate is needed to activate HTTPS connection"
+        )
     if not options["tls_private_key"]:
         options["tls_private_key"] = options["tls_certificate"]
 
@@ -68,9 +69,7 @@
         buf.append(line)
         if "-----END CERTIFICATE-----" in line:
             certificates.append(
-                OpenSSL.crypto.load_certificate(
-                    OpenSSL.crypto.FILETYPE_PEM, "".join(buf)
-                )
+                OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, "".join(buf))
             )
             buf = []
         elif not line:
@@ -127,10 +126,7 @@
                 f"Error while parsing file {path} for option {option}, are you sure "
                 f"it is a valid .pem file?"
             )
-            if (
-                option == "tls_private_key"
-                and options["tls_certificate"] == path
-            ):
+            if option == "tls_private_key" and options["tls_certificate"] == path:
                 raise exceptions.ConfigError(
                     f"You are using the same file for private key and public "
                     f"certificate, make sure that both a in {path} or use "
--- a/libervia/backend/tools/common/utils.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/common/utils.py	Wed Jun 19 18:44:57 2024 +0200
@@ -19,6 +19,7 @@
 """Misc utils for both backend and frontends"""
 
 import collections.abc
+
 size_units = {
     "b": 1,
     "kb": 1000,
@@ -80,8 +81,10 @@
             ori[k] = v
     return ori
 
+
 class OrderedSet(collections.abc.MutableSet):
     """A mutable sequence which doesn't keep duplicates"""
+
     # TODO: complete missing set methods
 
     def __init__(self, values=None):
@@ -135,7 +138,8 @@
                     multiplier = size_units[symbol.lower()]
                 except KeyError:
                     raise ValueError(
-                        "unknown size multiplier symbole: {symbol!r} (size: {size!r})")
+                        "unknown size multiplier symbole: {symbol!r} (size: {size!r})"
+                    )
                 else:
                     return number * multiplier
             return number
@@ -143,7 +147,7 @@
             raise ValueError(f"invalid size: {e}")
 
 
-def get_size_multiplier(size: int|str, suffix="o"):
+def get_size_multiplier(size: int | str, suffix="o"):
     """Get multiplier of a file size"""
     size = int(size)
     #  cf. https://stackoverflow.com/a/1094933 (thanks)
@@ -154,7 +158,7 @@
     return size, f"Yi{suffix}"
 
 
-def get_human_size(size: int|str, suffix: str="o", sep: str=" ") -> str:
+def get_human_size(size: int | str, suffix: str = "o", sep: str = " ") -> str:
     """Return data size in a human readable format."""
     size, symbol = get_size_multiplier(size, suffix)
     return f"{size:.2f}{sep}{symbol}"
--- a/libervia/backend/tools/config.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/config.py	Wed Jun 19 18:44:57 2024 +0200
@@ -70,9 +70,12 @@
     if not silent:
         if option in ("passphrase",):  # list here the options storing a password
             value = "******"
-        log.warning(_("Config auto-update: {option} set to {value} in the file "
-                      "{config_file}.").format(option=option, value=value,
-                                                config_file=target_file))
+        log.warning(
+            _(
+                "Config auto-update: {option} set to {value} in the file "
+                "{config_file}."
+            ).format(option=option, value=value, config_file=target_file)
+        )
 
 
 def parse_main_conf(log_filenames=False):
@@ -90,11 +93,11 @@
             if filenames:
                 log.info(
                     _("Configuration was read from: {filenames}").format(
-                        filenames=', '.join(filenames)))
+                        filenames=", ".join(filenames)
+                    )
+                )
             else:
-                log.warning(
-                    _("No configuration file found, using default settings")
-                )
+                log.warning(_("No configuration file found, using default settings"))
 
     return config
 
@@ -125,9 +128,9 @@
         value = os.path.expanduser(value)
     # thx to Brian (http://stackoverflow.com/questions/186857/splitting-a-semicolon-separated-string-to-a-dictionary-in-python/186873#186873)
     elif name.endswith("_list"):
-        value = next(csv.reader(
-            [value], delimiter=",", quotechar='"', skipinitialspace=True
-        ))
+        value = next(
+            csv.reader([value], delimiter=",", quotechar='"', skipinitialspace=True)
+        )
     elif name.endswith("_dict"):
         try:
             value = json.loads(value)
@@ -146,11 +149,7 @@
 
 
 def get_conf(
-    conf: ConfigParser,
-    prefix: str,
-    section: str,
-    name: str,
-    default: Any
+    conf: ConfigParser, prefix: str, section: str, name: str, default: Any
 ) -> Any:
     """Get configuration value from environment or config file
 
--- a/libervia/backend/tools/image.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/image.py	Wed Jun 19 18:44:57 2024 +0200
@@ -32,8 +32,7 @@
 try:
     import cairosvg
 except Exception as e:
-    log.warning(_("SVG support not available, please install cairosvg: {e}").format(
-        e=e))
+    log.warning(_("SVG support not available, please install cairosvg: {e}").format(e=e))
     cairosvg = None
 
 
@@ -55,16 +54,19 @@
     if max_size is None:
         max_size = tuple(host.memory.config_get(None, "image_max", (1200, 720)))
     if image.size > max_size:
-        report['too_large'] = True
+        report["too_large"] = True
         if image.size[0] > max_size[0]:
             factor = max_size[0] / image.size[0]
             if image.size[1] * factor > max_size[1]:
                 factor = max_size[1] / image.size[1]
         else:
             factor = max_size[1] / image.size[1]
-        report['recommended_size'] = [int(image.width*factor), int(image.height*factor)]
+        report["recommended_size"] = [
+            int(image.width * factor),
+            int(image.height * factor),
+        ]
     else:
-        report['too_large'] = False
+        report["too_large"] = False
 
     return report
 
@@ -79,7 +81,7 @@
     if dest is None:
         dest = tempfile.NamedTemporaryFile(suffix=im_path.suffix, delete=False)
     elif isinstance(dest, Path):
-        dest = dest.open('wb')
+        dest = dest.open("wb")
 
     with dest as f:
         resized.save(f, format=im.format)
@@ -102,7 +104,8 @@
         The image at this path should be deleted after use
     """
     return threads.deferToThread(
-        _resize_blocking, image_path, new_size, dest, fix_orientation)
+        _resize_blocking, image_path, new_size, dest, fix_orientation
+    )
 
 
 def _convert_blocking(image_path, dest, extra):
@@ -123,25 +126,26 @@
             try:
                 filepath = Path(name)
             except TypeError:
-                filepath = Path('noname.png')
+                filepath = Path("noname.png")
         else:
-            filepath = Path('noname.png')
+            filepath = Path("noname.png")
 
     if media_type == "image/svg+xml":
         if cairosvg is None:
             raise exceptions.MissingModule(
-                f"Can't convert SVG image at {image_path} due to missing CairoSVG module")
-        width, height = extra.get('width'), extra.get('height')
+                f"Can't convert SVG image at {image_path} due to missing CairoSVG module"
+            )
+        width, height = extra.get("width"), extra.get("height")
         cairosvg.svg2png(
-            url=str(image_path), write_to=dest,
-            output_width=width, output_height=height
+            url=str(image_path), write_to=dest, output_width=width, output_height=height
         )
     else:
         suffix = filepath.suffix
         if not suffix:
             raise ValueError(
                 "A suffix is missing for destination, it is needed to determine file "
-                "format")
+                "format"
+            )
         if not suffix in Image.EXTENSION:
             Image.init()
         try:
@@ -185,7 +189,7 @@
     im_format = im.format
     exif = im.getexif()
     orientation = exif.get(0x0112)
-    if orientation is None or orientation<2:
+    if orientation is None or orientation < 2:
         # nothing to do
         return False
     im = ImageOps.exif_transpose(im)
@@ -218,7 +222,7 @@
         if media_type is not None:
             return media_type
 
-    # file name is not enough, we try to open it
+    # file name is not enough, we try to open it
     img = Image.open(source)
     try:
         return Image.MIME[img.format]
--- a/libervia/backend/tools/sat_defer.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/sat_defer.py	Wed Jun 19 18:44:57 2024 +0200
@@ -38,7 +38,7 @@
 def stanza_2_not_found(failure_):
     """Convert item-not-found StanzaError to exceptions.NotFound"""
     failure_.trap(jabber_error.StanzaError)
-    if failure_.value.condition == 'item-not-found':
+    if failure_.value.condition == "item-not-found":
         raise exceptions.NotFound(failure_.value.text or failure_.value.condition)
     return failure_
 
--- a/libervia/backend/tools/stream.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/stream.py	Wed Jun 19 18:44:57 2024 +0200
@@ -61,7 +61,7 @@
         data_cb: Optional[Callable] = None,
         auto_end_signals: bool = True,
         check_size_with_read: bool = False,
-        pre_close_cb: Optional[Callable]=None
+        pre_close_cb: Optional[Callable] = None,
     ) -> None:
         """
         @param host: %(doc_host)s
--- a/libervia/backend/tools/trigger.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/trigger.py	Wed Jun 19 18:44:57 2024 +0200
@@ -33,7 +33,7 @@
 
 
 class SkipOtherTriggers(Exception):
-    """ Exception to raise if normal behaviour must be followed instead of following triggers list """
+    """Exception to raise if normal behaviour must be followed instead of following triggers list"""
 
     pass
 
@@ -62,7 +62,6 @@
             # plugins are always avaialble for normal clients
             return True
 
-
     def add(self, point_name, callback: Callable, priority=0):
         """Add a trigger to a point
 
@@ -89,11 +88,7 @@
         )
 
     def add_with_check(
-        self,
-        point_name: str,
-        plugin,
-        callback: Callable,
-        priority: int=0
+        self, point_name: str, plugin, callback: Callable, priority: int = 0
     ) -> None:
         """Like [Add], but check session before running the trigger
 
@@ -109,20 +104,24 @@
         @param priority: callback will be called in priority order, biggest first
         """
         if inspect.iscoroutinefunction(callback):
+
             async def async_wrapper(client: SatXMPPEntity, *args, **kwargs):
                 if client.is_component and plugin not in client.plugins:
                     log.debug(f"Ignoring {callback} as parent plugin is not available")
                     return True
                 else:
                     return await callback(client, *args, **kwargs)
+
             self.add(point_name, async_wrapper, priority)
         else:
+
             def sync_wrapper(client: SatXMPPEntity, *args, **kwargs):
                 if client.is_component and plugin not in client.plugins:
                     log.debug(f"Ignoring {callback} as parent plugin is not available")
                     return True
                 else:
                     return callback(client, *args, **kwargs)
+
             self.add(point_name, sync_wrapper, priority)
 
     def remove(self, point_name, callback):
@@ -152,7 +151,7 @@
         if point_name not in self.__triggers:
             return True
 
-        can_cancel = not kwargs.pop('triggers_no_cancel', False)
+        can_cancel = not kwargs.pop("triggers_no_cancel", False)
 
         for priority, trigger in self.__triggers[point_name]:
             try:
--- a/libervia/backend/tools/utils.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/utils.py	Wed Jun 19 18:44:57 2024 +0200
@@ -119,8 +119,10 @@
 
     Functions with this decorator are run in asyncio context
     """
+
     def wrapper(*args, **kwargs):
         return defer.Deferred.fromFuture(asyncio.ensure_future(func(*args, **kwargs)))
+
     return wrapper
 
 
@@ -134,14 +136,15 @@
     to be used when the function is called by third party library (e.g. wokkel)
     Otherwise, it's better to use ensureDeferred as early as possible.
     """
+
     def wrapper(*args, **kwargs):
         return defer.ensureDeferred(func(*args, **kwargs))
+
     return wrapper
 
 
 def xmpp_date(
-    timestamp: Optional[Union[float, int]] = None,
-    with_time: bool = True
+    timestamp: Optional[Union[float, int]] = None, with_time: bool = True
 ) -> str:
     """Return date according to XEP-0082 specification
 
@@ -153,20 +156,17 @@
     @return(unicode): XEP-0082 formatted date and time
     """
     dtime = datetime.datetime.fromtimestamp(
-        time.time() if timestamp is None else timestamp,
-        datetime.timezone.utc
+        time.time() if timestamp is None else timestamp, datetime.timezone.utc
     )
 
     return (
-        xmpp_datetime.format_datetime(dtime) if with_time
+        xmpp_datetime.format_datetime(dtime)
+        if with_time
         else xmpp_datetime.format_date(dtime.date())
     )
 
 
-def parse_xmpp_date(
-    xmpp_date_str: str,
-    with_time: bool = True
-) -> float:
+def parse_xmpp_date(xmpp_date_str: str, with_time: bool = True) -> float:
     """Get timestamp from XEP-0082 datetime
 
     @param xmpp_date_str: XEP-0082 formatted datetime or time
@@ -194,7 +194,10 @@
     random.seed()
     if vocabulary is None:
         vocabulary = [
-            chr(i) for i in list(range(0x30, 0x3A)) + list(range(0x41, 0x5B)) + list(range(0x61, 0x7B))
+            chr(i)
+            for i in list(range(0x30, 0x3A))
+            + list(range(0x41, 0x5B))
+            + list(range(0x61, 0x7B))
         ]
     return "".join([random.choice(vocabulary) for i in range(15)])
 
@@ -268,7 +271,7 @@
                     "{latesttag}\n"
                     "{latesttagdistance}",
                 ],
-                text=True
+                text=True,
             )
         except subprocess.CalledProcessError as e:
             log.error(f"Can't get repository data: {e}")
@@ -279,7 +282,9 @@
         else:
             hg_data = dict(list(zip(KEYS, hg_data_raw.split("\n"))))
             try:
-                hg_data["modified"] = "+" in subprocess.check_output(["python3", hg_path, "id", "-i"], text=True)
+                hg_data["modified"] = "+" in subprocess.check_output(
+                    ["python3", hg_path, "id", "-i"], text=True
+                )
             except subprocess.CalledProcessError:
                 pass
     else:
@@ -293,7 +298,7 @@
         else:
             os.chdir(os.path.abspath(os.path.dirname(repos_root)))
         try:
-            with open(".hg/dirstate", 'rb') as hg_dirstate:
+            with open(".hg/dirstate", "rb") as hg_dirstate:
                 hg_data["node"] = hg_dirstate.read(20).hex()
                 hg_data["node_short"] = hg_data["node"][:12]
         except IOError:
@@ -323,8 +328,7 @@
             else:
                 if version != C.APP_VERSION:
                     log.warning(
-                        "Incompatible version ({version}) and pkg_version ({pkg_version})"
-                        .format(
+                        "Incompatible version ({version}) and pkg_version ({pkg_version})".format(
                             version=C.APP_VERSION, pkg_version=pkg_version
                         )
                     )
--- a/libervia/backend/tools/video.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/video.py	Wed Jun 19 18:44:57 2024 +0200
@@ -29,14 +29,10 @@
 log = getLogger(__name__)
 
 
-
-
-
 try:
-    ffmpeg_path = which('ffmpeg')[0]
+    ffmpeg_path = which("ffmpeg")[0]
 except IndexError:
-    log.warning(_(
-        "ffmpeg executable not found, video thumbnails won't be available"))
+    log.warning(_("ffmpeg executable not found, video thumbnails won't be available"))
     ffmpeg_path = None
 
 
@@ -53,7 +49,8 @@
     """
     if ffmpeg_path is None:
         raise exceptions.NotFound(
-            _("ffmpeg executable is not available, can't generate video thumbnail"))
+            _("ffmpeg executable is not available, can't generate video thumbnail")
+        )
 
     await async_process.run(
         ffmpeg_path, "-i", str(video_path), "-ss", "10", "-frames:v", "1", str(dest_path)
--- a/libervia/backend/tools/web.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/web.py	Wed Jun 19 18:44:57 2024 +0200
@@ -75,9 +75,7 @@
 
 
 async def download_file(
-    url: str,
-    dest: Union[str, Path, BufferedIOBase],
-    max_size: Optional[int] = None
+    url: str, dest: Union[str, Path, BufferedIOBase], max_size: Optional[int] = None
 ) -> None:
     """Helper method to download a file
 
--- a/libervia/backend/tools/xml_tools.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/xml_tools.py	Wed Jun 19 18:44:57 2024 +0200
@@ -81,8 +81,9 @@
                 widget_kwargs["read_only"] = read_only
         else:
 
-            raise exceptions.DataError("unknown extended type {ext_type}".format(
-                ext_type = field.ext_type))
+            raise exceptions.DataError(
+                "unknown extended type {ext_type}".format(ext_type=field.ext_type)
+            )
 
     elif field.fieldType == "fixed" or field.fieldType is None:
         widget_type = "text"
@@ -130,7 +131,7 @@
             (option.value, option.label or option.value) for option in field.options
         ]
         widget_kwargs["selected"] = widget_args
-        widget_kwargs["styles"] =  ["multi"]
+        widget_kwargs["styles"] = ["multi"]
         widget_args = []
     else:
         log.error(
@@ -144,6 +145,7 @@
 
     return widget_type, widget_args, widget_kwargs
 
+
 def data_form_2_widgets(form_ui, form, read_only=False, prepend=None, filters=None):
     """Complete an existing XMLUI with widget converted from XEP-0004 data forms.
 
@@ -214,20 +216,18 @@
     see data_dict_2_data_form for a description of the format
     """
     fields = []
-    data_dict = {
-        "fields": fields
-    }
+    data_dict = {"fields": fields}
     if form.formNamespace:
         data_dict["namespace"] = form.formNamespace
     for form_field in form.fieldList:
         field = {"type": form_field.fieldType}
         fields.append(field)
         for src_name, dest_name in (
-            ('var', 'name'),
-            ('label', 'label'),
-            ('value', 'value'),
+            ("var", "name"),
+            ("label", "label"),
+            ("value", "value"),
             # FIXME: we probably should have only "values"
-            ('values', 'values')
+            ("values", "values"),
         ):
             value = getattr(form_field, src_name, None)
             if value:
@@ -242,8 +242,7 @@
 
         if form_field.fieldType is None and form_field.ext_type == "xml":
             if isinstance(form_field.value, domish.Element):
-                if ((form_field.value.uri == C.NS_XHTML
-                     and form_field.value.name == "div")):
+                if form_field.value.uri == C.NS_XHTML and form_field.value.name == "div":
                     field["type"] = "xhtml"
                     if form_field.value.children:
                         log.warning(
@@ -270,19 +269,21 @@
     # TODO: describe format
     fields = []
     for field_data in data_dict["fields"]:
-        field_type = field_data.get('type', 'text-single')
+        field_type = field_data.get("type", "text-single")
         kwargs = {
             "fieldType": field_type,
             "var": field_data["name"],
-            "label": field_data.get('label'),
+            "label": field_data.get("label"),
             "value": field_data.get("value"),
-            "required": field_data.get("required")
+            "required": field_data.get("required"),
         }
         if field_type == "xhtml":
-            kwargs.update({
-                "fieldType": None,
-                "ext_type": "xml",
-            })
+            kwargs.update(
+                {
+                    "fieldType": None,
+                    "ext_type": "xml",
+                }
+            )
             if kwargs["value"] is None:
                 kwargs["value"] = domish.Element((C.NS_XHTML, "div"))
         elif "options" in field_data:
@@ -292,11 +293,7 @@
             ]
         field = data_form.Field(**kwargs)
         fields.append(field)
-    return data_form.Form(
-        "form",
-        formNamespace=data_dict.get("namespace"),
-        fields=fields
-    )
+    return data_form.Form("form", formNamespace=data_dict.get("namespace"), fields=fields)
 
 
 def data_form_elt_result_2_xmlui_data(form_xml):
@@ -391,8 +388,9 @@
     return xml_ui
 
 
-def data_form_result_2_xmlui(result_form, base_form, session_id=None, prepend=None,
-                         filters=None, read_only=True):
+def data_form_result_2_xmlui(
+    result_form, base_form, session_id=None, prepend=None, filters=None, read_only=True
+):
     """Convert data form result to SàT XMLUI.
 
     @param result_form (data_form.Form): result form to convert
@@ -407,7 +405,7 @@
     # we deepcopy the form because _data_form_field_2_xmlui_data can modify the value
     # FIXME: check if it's really important, the only modified value seems to be
     #        the replacement of None by "" on fixed fields
-    # form = deepcopy(result_form)
+    # form = deepcopy(result_form)
     form = result_form
     for name, field in form.fields.items():
         try:
@@ -416,7 +414,9 @@
             continue
         field.options = base_field.options[:]
     xml_ui = XMLUI("window", "vertical", session_id=session_id)
-    data_form_2_widgets(xml_ui, form, read_only=read_only, prepend=prepend, filters=filters)
+    data_form_2_widgets(
+        xml_ui, form, read_only=read_only, prepend=prepend, filters=filters
+    )
     return xml_ui
 
 
@@ -434,7 +434,7 @@
 
 
 def xmlui_result_2_data_form_result(xmlui_data):
-    """ Extract form data from a XMLUI return.
+    """Extract form data from a XMLUI return.
 
     @param xmlui_data (dict): data returned by frontends for XMLUI form
     @return: dict of data usable by Wokkel's data form
@@ -446,7 +446,7 @@
         if isinstance(value, str):
             if "\n" in value:
                 # data form expects multi-lines text to be in separated values
-                value = value.split('\n')
+                value = value.split("\n")
             elif "\t" in value:
                 # FIXME: workaround to handle multiple values. Proper serialisation must
                 #   be done in XMLUI
@@ -466,7 +466,7 @@
 
 def is_xmlui_cancelled(raw_xmlui):
     """Tell if an XMLUI has been cancelled by checking raw XML"""
-    return C.bool(raw_xmlui.get('cancelled', C.BOOL_FALSE))
+    return C.bool(raw_xmlui.get("cancelled", C.BOOL_FALSE))
 
 
 def xmlui_result_to_elt(xmlui_data):
@@ -619,7 +619,7 @@
 
 
 class Element(object):
-    """ Base XMLUI element """
+    """Base XMLUI element"""
 
     type = None
 
@@ -651,7 +651,7 @@
 
 
 class TopElement(Element):
-    """ Main XML Element """
+    """Main XML Element"""
 
     type = "top"
 
@@ -661,7 +661,7 @@
 
 
 class TabElement(Element):
-    """ Used by TabsContainer to give name and label to tabs."""
+    """Used by TabsContainer to give name and label to tabs."""
 
     type = "tab"
 
@@ -690,7 +690,7 @@
 
 
 class FieldBackElement(Element):
-    """ Used by ButtonWidget to indicate which field have to be sent back """
+    """Used by ButtonWidget to indicate which field have to be sent back"""
 
     type = "field_back"
 
@@ -701,7 +701,7 @@
 
 
 class InternalFieldElement(Element):
-    """ Used by internal callbacks to indicate which fields are manipulated """
+    """Used by internal callbacks to indicate which fields are manipulated"""
 
     type = "internal_field"
 
@@ -711,7 +711,7 @@
 
 
 class InternalDataElement(Element):
-    """ Used by internal callbacks to retrieve extra data """
+    """Used by internal callbacks to retrieve extra data"""
 
     type = "internal_data"
 
@@ -723,7 +723,7 @@
 
 
 class OptionElement(Element):
-    """" Used by ListWidget to specify options """
+    """ " Used by ListWidget to specify options"""
 
     type = "option"
 
@@ -749,7 +749,7 @@
 
 
 class JidElement(Element):
-    """" Used by JidsListWidget to specify jids"""
+    """ " Used by JidsListWidget to specify jids"""
 
     type = "jid"
 
@@ -770,7 +770,7 @@
 
 
 class RowElement(Element):
-    """" Used by AdvancedListContainer """
+    """ " Used by AdvancedListContainer"""
 
     type = "row"
 
@@ -785,7 +785,7 @@
 
 
 class HeaderElement(Element):
-    """" Used by AdvancedListContainer """
+    """ " Used by AdvancedListContainer"""
 
     type = "header"
 
@@ -810,7 +810,7 @@
 
 
 class Container(Element):
-    """ And Element which contains other ones and has a layout """
+    """And Element which contains other ones and has a layout"""
 
     type = None
 
@@ -825,7 +825,7 @@
         self.elem.setAttribute("type", self.type)
 
     def get_parent_container(self):
-        """ Return first parent container
+        """Return first parent container
 
         @return: parent container or None
         """
@@ -845,11 +845,13 @@
 
 class PairsContainer(Container):
     """Container with series of 2 elements"""
+
     type = "pairs"
 
 
 class LabelContainer(Container):
     """Like PairsContainer, but first element can only be a label"""
+
     type = "label"
 
 
@@ -872,7 +874,7 @@
         return self.xmlui.change_container(new_container)
 
     def end(self):
-        """ Called when we have finished tabs
+        """Called when we have finished tabs
 
         change current container to first container parent
         """
@@ -952,7 +954,7 @@
             self.append(item)
 
     def set_row_index(self, idx):
-        """ Set index for next row
+        """Set index for next row
 
         index are returned when a row is selected, in data's "index" key
         @param idx: string index to associate to the next row
@@ -968,7 +970,7 @@
         self._item_idx += 1
 
     def end(self):
-        """ Called when we have finished list
+        """Called when we have finished list
 
         change current container to first container parent
         """
@@ -997,9 +999,7 @@
             self.elem.setAttribute("name", name)
             if name in xmlui.named_widgets:
                 raise exceptions.ConflictError(
-                    _('A widget with the name "{name}" already exists.').format(
-                        name=name
-                    )
+                    _('A widget with the name "{name}" already exists.').format(name=name)
                 )
             xmlui.named_widgets[name] = self
         self.elem.setAttribute("type", self.type)
@@ -1058,6 +1058,7 @@
 
     used most of time to display the desciption or name of the next widget
     """
+
     type = "label"
 
     def __init__(self, xmlui, label, name=None, parent=None):
@@ -1067,6 +1068,7 @@
 
 class HiddenWidget(Widget):
     """Not displayed widget, frontends will just copy the value(s)"""
+
     type = "hidden"
 
     def __init__(self, xmlui, value, name, parent=None):
@@ -1102,7 +1104,7 @@
     type = "divider"
 
     def __init__(self, xmlui, style="line", name=None, parent=None):
-        """ Create a divider
+        """Create a divider
 
         @param xmlui: XMLUI instance
         @param style: one of:
@@ -1160,6 +1162,7 @@
 
 class XHTMLBoxWidget(StringWidget):
     """Specialized textbox to manipulate XHTML"""
+
     type = "xhtmlbox"
 
     def __init__(self, xmlui, value, name=None, parent=None, read_only=False, clean=True):
@@ -1170,12 +1173,12 @@
         """
         if clean:
             if clean_xhtml is None:
-                raise exceptions.NotFound(
-                    "No cleaning method set, can't clean the XHTML")
+                raise exceptions.NotFound("No cleaning method set, can't clean the XHTML")
             value = clean_xhtml(value)
 
         super(XHTMLBoxWidget, self).__init__(
-            xmlui, value=value, name=name, parent=parent, read_only=read_only)
+            xmlui, value=value, name=name, parent=parent, read_only=read_only
+        )
 
 
 class JidInputWidget(StringWidget):
@@ -1283,7 +1286,7 @@
         self.set_styles(styles)
 
     def add_options(self, options, selected=None):
-        """Add options to a multi-values element (e.g. list) """
+        """Add options to a multi-values element (e.g. list)"""
         if selected:
             if isinstance(selected, str):
                 selected = [selected]
@@ -1347,7 +1350,7 @@
 
 
 class DialogElement(Element):
-    """Main dialog element """
+    """Main dialog element"""
 
     type = "dialog"
 
@@ -1410,8 +1413,15 @@
 class XMLUI(object):
     """This class is used to create a user interface (form/window/parameters/etc) using SàT XML"""
 
-    def __init__(self, panel_type="window", container="vertical", dialog_opt=None,
-        title=None, submit_id=None, session_id=None):
+    def __init__(
+        self,
+        panel_type="window",
+        container="vertical",
+        dialog_opt=None,
+        title=None,
+        submit_id=None,
+        session_id=None,
+    ):
         """Init SàT XML Panel
 
         @param panel_type: one of
@@ -1511,8 +1521,9 @@
         #       remove the creator_wrapper
         def create_widget(self, *args, **kwargs):
             if self.type == C.XMLUI_DIALOG:
-                raise exceptions.InternalError(_(
-                    "create_widget can't be used with dialogs"))
+                raise exceptions.InternalError(
+                    _("create_widget can't be used with dialogs")
+                )
             if "parent" not in kwargs:
                 kwargs["parent"] = self.current_container
             if "name" not in kwargs and is_input:
@@ -1521,11 +1532,12 @@
                 args = list(args)
                 kwargs["name"] = args.pop(0)
             return widget_cls(self, *args, **kwargs)
+
         return create_widget
 
     @classmethod
     def _introspect(cls):
-        """ Introspect module to find Widgets and Containers, and create addXXX methods"""
+        """Introspect module to find Widgets and Containers, and create addXXX methods"""
         # FIXME: we can't log anything because this file is used
         #        in bin/sat script then evaluated
         #        bin/sat should be refactored
@@ -1539,7 +1551,7 @@
                         continue
                     cls._widgets[obj.type] = obj
                     creator_name = "add" + obj.__name__
-                    if creator_name.endswith('Widget'):
+                    if creator_name.endswith("Widget"):
                         creator_name = creator_name[:-6]
                     is_input = issubclass(obj, InputWidget)
                     # FIXME: cf. above comment
@@ -1651,7 +1663,7 @@
             self.current_container = self._create_container(
                 container,
                 self.current_container.get_parent_container() or self.main_container,
-                **kwargs
+                **kwargs,
             )
         else:
             self.current_container = (
@@ -1728,8 +1740,14 @@
     return xmlui_d
 
 
-def defer_xmlui(host, xmlui, action_extra=None, security_limit=C.NO_SECURITY_LIMIT,
-    chained=False, profile=C.PROF_KEY_NONE):
+def defer_xmlui(
+    host,
+    xmlui,
+    action_extra=None,
+    security_limit=C.NO_SECURITY_LIMIT,
+    chained=False,
+    profile=C.PROF_KEY_NONE,
+):
     """Create a deferred linked to XMLUI
 
     @param xmlui(XMLUI): instance of the XMLUI
@@ -1764,7 +1782,7 @@
     action_extra: Optional[dict] = None,
     security_limit: int = C.NO_SECURITY_LIMIT,
     chained: bool = False,
-    profile: str = C.PROF_KEY_NONE
+    profile: str = C.PROF_KEY_NONE,
 ) -> defer.Deferred:
     """Create a submitable dialog and manage it with a deferred
 
@@ -1798,10 +1816,9 @@
 
 # Misc other funtions
 
+
 def element_copy(
-    element: domish.Element,
-    with_parent: bool = True,
-    with_children: bool = True
+    element: domish.Element, with_parent: bool = True, with_children: bool = True
 ) -> domish.Element:
     """Make a copy of a domish.Element
 
@@ -1813,9 +1830,10 @@
     """
     new_elt = domish.Element(
         (element.uri, element.name),
-        defaultUri = element.defaultUri,
-        attribs = element.attributes,
-        localPrefixes = element.localPrefixes)
+        defaultUri=element.defaultUri,
+        attribs=element.attributes,
+        localPrefixes=element.localPrefixes,
+    )
     if with_parent:
         new_elt.parent = element.parent
     if with_children:
@@ -1825,8 +1843,11 @@
 
 def is_xhtml_field(field):
     """Check if a data_form.Field is an XHTML one"""
-    return (field.fieldType is None and field.ext_type == "xml" and
-            field.value.uri == C.NS_XHTML)
+    return (
+        field.fieldType is None
+        and field.ext_type == "xml"
+        and field.value.uri == C.NS_XHTML
+    )
 
 
 class ElementParser:
@@ -1937,10 +1958,8 @@
 
 
 def find_ancestor(
-    elt,
-    name: str,
-    namespace: Optional[Union[str, Iterable[str]]] = None
-    ) -> domish.Element:
+    elt, name: str, namespace: Optional[Union[str, Iterable[str]]] = None
+) -> domish.Element:
     """Retrieve ancestor of an element
 
     @param elt: starting element, its parent will be checked recursively until the
@@ -1971,7 +1990,7 @@
     strings = []
     for child in elt.children:
         if domish.IElement.providedBy(child):
-            strings.append(p_fmt_elt(child, indent+2, defaultUri=elt.defaultUri))
+            strings.append(p_fmt_elt(child, indent + 2, defaultUri=elt.defaultUri))
         else:
             strings.append(f"{(indent+2)*' '}{child!s}")
     if elt.children:
@@ -1982,7 +2001,7 @@
         strings.append(f"{indent*' '}</{nochild_elt.name}>")
     else:
         strings.append(f"{indent*' '}{elt.toXml(defaultUri=defaultUri)}")
-    return '\n'.join(strings)
+    return "\n".join(strings)
 
 
 def pp_elt(elt):
@@ -1992,6 +2011,7 @@
 
 # ElementTree
 
+
 def et_get_namespace_and_name(et_elt: ET.Element) -> Tuple[Optional[str], str]:
     """Retrieve element namespace and name from ElementTree element
 
@@ -2007,7 +2027,7 @@
     end_idx = name.find("}")
     if end_idx == -1:
         raise ValueError("Invalid ET name")
-    return name[1:end_idx], name[end_idx+1:]
+    return name[1:end_idx], name[end_idx + 1 :]
 
 
 def et_elt_2_domish_elt(et_elt: Union[ET.Element, etree.Element]) -> domish.Element:
@@ -2026,20 +2046,20 @@
 
 
 @overload
-def domish_elt_2_et_elt(elt: domish.Element, lxml: Literal[False]) -> ET.Element:
-    ...
+def domish_elt_2_et_elt(elt: domish.Element, lxml: Literal[False]) -> ET.Element: ...
+
 
 @overload
-def domish_elt_2_et_elt(elt: domish.Element, lxml: Literal[True]) -> etree.Element:
-    ...
+def domish_elt_2_et_elt(elt: domish.Element, lxml: Literal[True]) -> etree.Element: ...
+
 
 @overload
 def domish_elt_2_et_elt(
     elt: domish.Element, lxml: bool
-) -> Union[ET.Element, etree.Element]:
-    ...
+) -> Union[ET.Element, etree.Element]: ...
 
-def domish_elt_2_et_elt(elt, lxml = False):
+
+def domish_elt_2_et_elt(elt, lxml=False):
     """Convert Twisted's domish.Element to ElementTree equivalent
 
     Note: this is a naive implementation, adapted to XMPP, and some text content may be
--- a/libervia/backend/tools/xmpp_datetime.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/tools/xmpp_datetime.py	Wed Jun 19 18:44:57 2024 +0200
@@ -29,7 +29,7 @@
     "format_datetime",
     "parse_datetime",
     "format_time",
-    "parse_time"
+    "parse_time",
 ]
 
 
@@ -51,11 +51,11 @@
     microsecond: Optional[int] = None
     if match is not None:
         # Remove the fraction of a second from the input string
-        value = value[:match.start()] + value[match.end():]
+        value = value[: match.start()] + value[match.end() :]
 
         # datetime supports microsecond resolution for the fraction of a second, thus
         # limit/pad the parsed fraction of a second to six digits
-        microsecond = int(match.group(1)[:6].ljust(6, '0'))
+        microsecond = int(match.group(1)[:6].ljust(6, "0"))
 
     return value, microsecond
 
@@ -92,8 +92,7 @@
 
 
 def format_datetime(
-    value: Optional[datetime] = None,
-    include_microsecond: bool = False
+    value: Optional[datetime] = None, include_microsecond: bool = False
 ) -> str:
     """
     @param value: The datetime to format. Defaults to the current datetime.
@@ -184,7 +183,7 @@
     # UTC. This can be fixed with a simple string replacement of 'Z' with "+00:00", which
     # is another way to represent UTC.
     try:
-        result = time.fromisoformat(value.replace('Z', "+00:00"))
+        result = time.fromisoformat(value.replace("Z", "+00:00"))
     except ValueError as e:
         raise exceptions.ParsingError() from e
 
--- a/libervia/cli/arg_tools.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/arg_tools.py	Wed Jun 19 18:44:57 2024 +0200
@@ -92,9 +92,7 @@
         else:
             if verbose:
                 host.disp(
-                    _("arg {name}={value} (in USE)").format(
-                        name=arg, value=escape(value)
-                    )
+                    _("arg {name}={value} (in USE)").format(name=arg, value=escape(value))
                 )
             if not action.option_strings:
                 pos_args.append(value)
--- a/libervia/cli/base.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/base.py	Wed Jun 19 18:44:57 2024 +0200
@@ -21,8 +21,8 @@
 
 ### logging ###
 import logging as log
-log.basicConfig(level=log.WARNING,
-                format='[%(name)s] %(message)s')
+
+log.basicConfig(level=log.WARNING, format="[%(name)s] %(message)s")
 ###
 
 import sys
@@ -55,31 +55,36 @@
 from rich import console
 
 ## bridge handling
-# we get bridge name from conf and initialise the right class accordingly
+# we get bridge name from conf and initialise the right class accordingly
 main_config = config.parse_main_conf()
-bridge_name = config.config_get(main_config, '', 'bridge', 'dbus')
+bridge_name = config.config_get(main_config, "", "bridge", "dbus")
 LiberviaCLILoop = get_libervia_cli_loop(bridge_name)
 
 
 try:
     import progressbar
 except ImportError:
-    msg = (_('ProgressBar not available, please download it at '
-             'http://pypi.python.org/pypi/progressbar\n'
-             'Progress bar deactivated\n--\n'))
+    msg = _(
+        "ProgressBar not available, please download it at "
+        "http://pypi.python.org/pypi/progressbar\n"
+        "Progress bar deactivated\n--\n"
+    )
     print(msg, file=sys.stderr)
-    progressbar=None
+    progressbar = None
 
-#consts
-DESCRIPTION = """This software is a command line tool for XMPP.
-Get the latest version at """ + C.APP_URL
+# consts
+DESCRIPTION = (
+    """This software is a command line tool for XMPP.
+Get the latest version at """
+    + C.APP_URL
+)
 
 COPYLEFT = """Copyright (C) 2009-2024 Jérôme Poisson, Adrien Cossa
 This program comes with ABSOLUTELY NO WARRANTY;
 This is free software, and you are welcome to redistribute it under certain conditions.
 """
 
-PROGRESS_DELAY = 0.1 # the progression will be checked every PROGRESS_DELAY s
+PROGRESS_DELAY = 0.1  # the progression will be checked every PROGRESS_DELAY s
 
 
 def date_decoder(arg):
@@ -95,6 +100,7 @@
     specify what kind of operation you want to perform.
 
     """
+
     def __init__(self):
         """
 
@@ -110,7 +116,7 @@
         self.console = console.Console(theme=C.THEME_DEFAULT)
         self.sat_conf = main_config
         self.set_color_theme()
-        bridge_module = dynamic_import.bridge(bridge_name, 'libervia.frontends.bridge')
+        bridge_module = dynamic_import.bridge(bridge_name, "libervia.frontends.bridge")
         if bridge_module is None:
             log.error("Can't import {} bridge".format(bridge_name))
             sys.exit(1)
@@ -132,15 +138,18 @@
 
         color_fg_bg = os.getenv("COLORFGBG")
 
-        if ((sys.stdin.isatty() and sys.stdout.isatty()
-             and (
-                 # XTerm
-                 os.getenv("XTERM_VERSION")
-                 # Konsole
-                 or os.getenv("KONSOLE_VERSION")
-                 # All VTE based terminals
-                 or vte_version >= 3502
-             ))):
+        if (
+            sys.stdin.isatty()
+            and sys.stdout.isatty()
+            and (
+                # XTerm
+                os.getenv("XTERM_VERSION")
+                # Konsole
+                or os.getenv("KONSOLE_VERSION")
+                # All VTE based terminals
+                or vte_version >= 3502
+            )
+        ):
             # ANSI escape sequence
             stdin_fd = sys.stdin.fileno()
             old_settings = termios.tcgetattr(stdin_fd)
@@ -155,9 +164,9 @@
                     if ch != c:
                         # background id is not supported, we default to "dark"
                         # TODO: log something?
-                        return 'dark'
+                        return "dark"
                 red, green, blue = [
-                    int(c, 16)/65535 for c in sys.stdin.read(14).split('/')
+                    int(c, 16) / 65535 for c in sys.stdin.read(14).split("/")
                 ]
                 # '\a' is the last character
                 sys.stdin.read(1)
@@ -166,9 +175,9 @@
 
             lum = utils.per_luminance(red, green, blue)
             if lum <= 0.5:
-                return 'dark'
+                return "dark"
             else:
-                return 'light'
+                return "light"
         elif color_fg_bg:
             # no luck with ANSI escape sequence, we try COLORFGBG environment variable
             try:
@@ -184,17 +193,18 @@
             return "dark"
 
     def set_color_theme(self):
-        background = self.get_config('background', default='auto')
-        if background == 'auto':
+        background = self.get_config("background", default="auto")
+        if background == "auto":
             background = self.guess_background()
-        if background not in ('dark', 'light'):
-            raise exceptions.ConfigError(_(
-                'Invalid value set for "background" ({background}), please check '
-                'your settings in libervia.conf').format(
-                    background=repr(background)
-                ))
+        if background not in ("dark", "light"):
+            raise exceptions.ConfigError(
+                _(
+                    'Invalid value set for "background" ({background}), please check '
+                    "your settings in libervia.conf"
+                ).format(background=repr(background))
+            )
         self.background = background
-        if background == 'light':
+        if background == "light":
             C.A_HEADER = A.FG_MAGENTA
             C.A_SUBHEADER = A.BOLD + A.FG_RED
             C.A_LEVEL_COLORS = (C.A_HEADER, A.BOLD + A.FG_BLUE, A.FG_MAGENTA, A.FG_CYAN)
@@ -208,14 +218,16 @@
 
     def _bridge_connected(self):
         self.parser = argparse.ArgumentParser(
-            formatter_class=argparse.RawDescriptionHelpFormatter, description=DESCRIPTION)
+            formatter_class=argparse.RawDescriptionHelpFormatter, description=DESCRIPTION
+        )
         self._make_parents()
         self.add_parser_options()
         self.subparsers = self.parser.add_subparsers(
-            title=_('Available commands'), dest='command', required=True)
+            title=_("Available commands"), dest="command", required=True
+        )
 
         # progress attributes
-        self._progress_id = None # TODO: manage several progress ids
+        self._progress_id = None  # TODO: manage several progress ids
         self.quit_on_progress_end = True
 
         # outputs
@@ -233,7 +245,7 @@
     async def set_progress_id(self, progress_id):
         # because we use async, we need an explicit setter
         self._progress_id = progress_id
-        await self.replay_cache('progress_ids_cache')
+        await self.replay_cache("progress_ids_cache")
 
     @property
     def watch_progress(self):
@@ -272,7 +284,7 @@
             for cache_data in cache:
                 await cache_data[0](*cache_data[1:])
 
-    def disp(self, msg, verbosity=0, error=False, end='\n'):
+    def disp(self, msg, verbosity=0, error=False, end="\n"):
         """Print a message to user
 
         @param msg(unicode): message to print
@@ -290,7 +302,7 @@
         if name in extra_outputs:
             method = extra_outputs[name]
         else:
-            method = self._outputs[type_][name]['callback']
+            method = self._outputs[type_][name]["callback"]
 
         ret = method(data)
         if inspect.isawaitable(ret):
@@ -317,55 +329,83 @@
         # we have a special case here as the start-session option is present only if
         # connection is not needed, so we create two similar parents, one with the
         # option, the other one without it
-        for parent_name in ('profile', 'profile_session'):
+        for parent_name in ("profile", "profile_session"):
             parent = self.parents[parent_name] = argparse.ArgumentParser(add_help=False)
             parent.add_argument(
-                "-p", "--profile", action="store", type=str, default='@DEFAULT@',
-                help=_("Use PROFILE profile key (default: %(default)s)"))
+                "-p",
+                "--profile",
+                action="store",
+                type=str,
+                default="@DEFAULT@",
+                help=_("Use PROFILE profile key (default: %(default)s)"),
+            )
             parent.add_argument(
-                "--pwd", action="store", metavar='PASSWORD',
-                help=_("Password used to connect profile, if necessary"))
+                "--pwd",
+                action="store",
+                metavar="PASSWORD",
+                help=_("Password used to connect profile, if necessary"),
+            )
 
-        profile_parent, profile_session_parent = (self.parents['profile'],
-                                                  self.parents['profile_session'])
+        profile_parent, profile_session_parent = (
+            self.parents["profile"],
+            self.parents["profile_session"],
+        )
 
         connect_short, connect_long, connect_action, connect_help = (
-            "-c", "--connect", "store_true",
-            _("Connect the profile before doing anything else")
+            "-c",
+            "--connect",
+            "store_true",
+            _("Connect the profile before doing anything else"),
         )
         profile_parent.add_argument(
-            connect_short, connect_long, action=connect_action, help=connect_help)
+            connect_short, connect_long, action=connect_action, help=connect_help
+        )
 
-        profile_session_connect_group = profile_session_parent.add_mutually_exclusive_group()
-        profile_session_connect_group.add_argument(
-            connect_short, connect_long, action=connect_action, help=connect_help)
+        profile_session_connect_group = (
+            profile_session_parent.add_mutually_exclusive_group()
+        )
         profile_session_connect_group.add_argument(
-            "--start-session", action="store_true",
-            help=_("Start a profile session without connecting"))
+            connect_short, connect_long, action=connect_action, help=connect_help
+        )
+        profile_session_connect_group.add_argument(
+            "--start-session",
+            action="store_true",
+            help=_("Start a profile session without connecting"),
+        )
 
-        progress_parent = self.parents['progress'] = argparse.ArgumentParser(
-            add_help=False)
+        progress_parent = self.parents["progress"] = argparse.ArgumentParser(
+            add_help=False
+        )
         if progressbar:
             progress_parent.add_argument(
-                "-P", "--progress", action="store_true", help=_("Show progress bar"))
+                "-P", "--progress", action="store_true", help=_("Show progress bar")
+            )
 
-        verbose_parent = self.parents['verbose'] = argparse.ArgumentParser(add_help=False)
+        verbose_parent = self.parents["verbose"] = argparse.ArgumentParser(add_help=False)
         verbose_parent.add_argument(
-            '--verbose', '-v', action='count', default=0,
-            help=_("Add a verbosity level (can be used multiple times)"))
+            "--verbose",
+            "-v",
+            action="count",
+            default=0,
+            help=_("Add a verbosity level (can be used multiple times)"),
+        )
 
-        quiet_parent = self.parents['quiet'] = argparse.ArgumentParser(add_help=False)
+        quiet_parent = self.parents["quiet"] = argparse.ArgumentParser(add_help=False)
         quiet_parent.add_argument(
-            '--quiet', '-q', action='store_true',
-            help=_("be quiet (only output machine readable data)"))
+            "--quiet",
+            "-q",
+            action="store_true",
+            help=_("be quiet (only output machine readable data)"),
+        )
 
-        draft_parent = self.parents['draft'] = argparse.ArgumentParser(add_help=False)
-        draft_group = draft_parent.add_argument_group(_('draft handling'))
+        draft_parent = self.parents["draft"] = argparse.ArgumentParser(add_help=False)
+        draft_group = draft_parent.add_argument_group(_("draft handling"))
         draft_group.add_argument(
-            "-D", "--current", action="store_true", help=_("load current draft"))
+            "-D", "--current", action="store_true", help=_("load current draft")
+        )
         draft_group.add_argument(
-            "-F", "--draft-path", type=Path, help=_("path to a draft file to retrieve"))
-
+            "-F", "--draft-path", type=Path, help=_("path to a draft file to retrieve")
+        )
 
     def make_pubsub_group(self, flags, defaults):
         """Generate pubsub options according to flags
@@ -378,75 +418,106 @@
         """
         flags = misc.FlagsHandler(flags)
         parent = argparse.ArgumentParser(add_help=False)
-        pubsub_group = parent.add_argument_group('pubsub')
-        pubsub_group.add_argument("-u", "--pubsub-url",
-                                  help=_("Pubsub URL (xmpp or http)"))
+        pubsub_group = parent.add_argument_group("pubsub")
+        pubsub_group.add_argument(
+            "-u", "--pubsub-url", help=_("Pubsub URL (xmpp or http)")
+        )
 
         service_help = _("JID of the PubSub service")
         if not flags.service:
-            default = defaults.pop('service', _('PEP service'))
+            default = defaults.pop("service", _("PEP service"))
             if default is not None:
                 service_help += _(" (DEFAULT: {default})".format(default=default))
-        pubsub_group.add_argument("-s", "--service", default='',
-                                  help=service_help)
+        pubsub_group.add_argument("-s", "--service", default="", help=service_help)
 
         node_help = _("node to request")
         if not flags.node:
-            default = defaults.pop('node', _('standard node'))
+            default = defaults.pop("node", _("standard node"))
             if default is not None:
                 node_help += _(" (DEFAULT: {default})".format(default=default))
-        pubsub_group.add_argument("-n", "--node", default='', help=node_help)
+        pubsub_group.add_argument("-n", "--node", default="", help=node_help)
 
         if flags.single_item:
-            item_help = ("item to retrieve")
+            item_help = "item to retrieve"
             if not flags.item:
-                default = defaults.pop('item', _('last item'))
+                default = defaults.pop("item", _("last item"))
                 if default is not None:
                     item_help += _(" (DEFAULT: {default})".format(default=default))
-            pubsub_group.add_argument("-i", "--item", default='',
-                                      help=item_help)
+            pubsub_group.add_argument("-i", "--item", default="", help=item_help)
             pubsub_group.add_argument(
-                "-L", "--last-item", action='store_true', help=_('retrieve last item'))
+                "-L", "--last-item", action="store_true", help=_("retrieve last item")
+            )
         elif flags.multi_items:
             # mutiple items, this activate several features: max-items, RSM, MAM
             # and Orbder-by
             pubsub_group.add_argument(
-                "-i", "--item", action='append', dest='items', default=[],
-                help=_("items to retrieve (DEFAULT: all)"))
+                "-i",
+                "--item",
+                action="append",
+                dest="items",
+                default=[],
+                help=_("items to retrieve (DEFAULT: all)"),
+            )
             if not flags.no_max:
                 max_group = pubsub_group.add_mutually_exclusive_group()
                 # XXX: defaut value for --max-items or --max is set in parse_pubsub_args
                 max_group.add_argument(
-                    "-M", "--max-items", dest="max", type=int,
-                    help=_("maximum number of items to get ({no_limit} to get all items)"
-                           .format(no_limit=C.NO_LIMIT)))
+                    "-M",
+                    "--max-items",
+                    dest="max",
+                    type=int,
+                    help=_(
+                        "maximum number of items to get ({no_limit} to get all items)".format(
+                            no_limit=C.NO_LIMIT
+                        )
+                    ),
+                )
                 # FIXME: it could be possible to no duplicate max (between pubsub
                 #        max-items and RSM max)should not be duplicated, RSM could be
                 #        used when available and pubsub max otherwise
                 max_group.add_argument(
-                    "-m", "--max", dest="rsm_max", type=int,
-                    help=_("maximum number of items to get per page (DEFAULT: 10)"))
+                    "-m",
+                    "--max",
+                    dest="rsm_max",
+                    type=int,
+                    help=_("maximum number of items to get per page (DEFAULT: 10)"),
+                )
 
             # RSM
 
             rsm_page_group = pubsub_group.add_mutually_exclusive_group()
             rsm_page_group.add_argument(
-                "-a", "--after", dest="rsm_after",
-                help=_("find page after this item"), metavar='ITEM_ID')
+                "-a",
+                "--after",
+                dest="rsm_after",
+                help=_("find page after this item"),
+                metavar="ITEM_ID",
+            )
             rsm_page_group.add_argument(
-                "-b", "--before", dest="rsm_before",
-                help=_("find page before this item"), metavar='ITEM_ID')
+                "-b",
+                "--before",
+                dest="rsm_before",
+                help=_("find page before this item"),
+                metavar="ITEM_ID",
+            )
             rsm_page_group.add_argument(
-                "--index", dest="rsm_index", type=int,
-                help=_("index of the first item to retrieve"))
-
+                "--index",
+                dest="rsm_index",
+                type=int,
+                help=_("index of the first item to retrieve"),
+            )
 
             # MAM
 
             pubsub_group.add_argument(
-                "-f", "--filter", dest='mam_filters', nargs=2,
-                action='append', default=[], help=_("MAM filters to use"),
-                metavar=("FILTER_NAME", "VALUE")
+                "-f",
+                "--filter",
+                dest="mam_filters",
+                nargs=2,
+                action="append",
+                default=[],
+                help=_("MAM filters to use"),
+                metavar=("FILTER_NAME", "VALUE"),
             )
 
             # Order-By
@@ -456,57 +527,62 @@
             #       current specifications, as only "creation" and "modification" are
             #       available)
             pubsub_group.add_argument(
-                "-o", "--order-by", choices=[C.ORDER_BY_CREATION,
-                                             C.ORDER_BY_MODIFICATION],
-                help=_("how items should be ordered"))
+                "-o",
+                "--order-by",
+                choices=[C.ORDER_BY_CREATION, C.ORDER_BY_MODIFICATION],
+                help=_("how items should be ordered"),
+            )
 
         if flags[C.CACHE]:
             pubsub_group.add_argument(
-                "-C", "--no-cache", dest="use_cache", action='store_false',
-                help=_("don't use Pubsub cache")
+                "-C",
+                "--no-cache",
+                dest="use_cache",
+                action="store_false",
+                help=_("don't use Pubsub cache"),
             )
 
         if not flags.all_used:
-            raise exceptions.InternalError('unknown flags: {flags}'.format(
-                flags=', '.join(flags.unused)))
+            raise exceptions.InternalError(
+                "unknown flags: {flags}".format(flags=", ".join(flags.unused))
+            )
         if defaults:
-            raise exceptions.InternalError(f'unused defaults: {defaults}')
+            raise exceptions.InternalError(f"unused defaults: {defaults}")
 
         return parent
 
     def add_parser_options(self):
         self.parser.add_argument(
-            '--version',
-            action='version',
-            version=("{name} {version} {copyleft}".format(
-                name = C.APP_NAME,
-                version = self.version,
-                copyleft = COPYLEFT))
+            "--version",
+            action="version",
+            version=(
+                "{name} {version} {copyleft}".format(
+                    name=C.APP_NAME, version=self.version, copyleft=COPYLEFT
+                )
+            ),
         )
 
     def register_output(self, type_, name, callback, description="", default=False):
         if type_ not in C.OUTPUT_TYPES:
             log.error("Invalid output type {}".format(type_))
             return
-        self._outputs[type_][name] = {'callback': callback,
-                                      'description': description
-                                     }
+        self._outputs[type_][name] = {"callback": callback, "description": description}
         if default:
             if type_ in self.default_output:
                 self.disp(
-                    _('there is already a default output for {type}, ignoring new one')
-                    .format(type=type_)
+                    _(
+                        "there is already a default output for {type}, ignoring new one"
+                    ).format(type=type_)
                 )
             else:
                 self.default_output[type_] = name
 
-
     def parse_output_options(self):
         options = self.command.args.output_opts
         options_dict = {}
         for option in options:
             try:
-                key, value = option.split('=', 1)
+                key, value = option.split("=", 1)
             except ValueError:
                 key, value = option, None
             options_dict[key.strip()] = value.strip() if value is not None else None
@@ -516,8 +592,10 @@
         if not accepted_set.issuperset(options):
             self.disp(
                 _("The following output options are invalid: {invalid_options}").format(
-                invalid_options = ', '.join(set(options).difference(accepted_set))),
-                error=True)
+                    invalid_options=", ".join(set(options).difference(accepted_set))
+                ),
+                error=True,
+            )
             self.quit(C.EXIT_BAD_ARG)
 
     def import_plugins(self):
@@ -527,11 +605,14 @@
         """
         path = os.path.dirname(libervia.cli.__file__)
         # XXX: outputs must be imported before commands as they are used for arguments
-        for type_, pattern in ((C.PLUGIN_OUTPUT, 'output_*.py'),
-                               (C.PLUGIN_CMD, 'cmd_*.py')):
+        for type_, pattern in (
+            (C.PLUGIN_OUTPUT, "output_*.py"),
+            (C.PLUGIN_CMD, "cmd_*.py"),
+        ):
             modules = (
                 os.path.splitext(module)[0]
-                for module in map(os.path.basename, iglob(os.path.join(path, pattern))))
+                for module in map(os.path.basename, iglob(os.path.join(path, pattern)))
+            )
             for module_name in modules:
                 module_path = "libervia.cli." + module_name
                 try:
@@ -540,15 +621,21 @@
                 except exceptions.CancelError:
                     continue
                 except exceptions.MissingModule as e:
-                    self.disp(_("Missing module for plugin {name}: {missing}".format(
-                        name = module_path,
-                        missing = e)), error=True)
+                    self.disp(
+                        _(
+                            "Missing module for plugin {name}: {missing}".format(
+                                name=module_path, missing=e
+                            )
+                        ),
+                        error=True,
+                    )
                 except Exception as e:
                     self.disp(
-                        _("Can't import {module_path} plugin, ignoring it: {e}")
-                        .format(module_path=module_path, e=e),
-                        error=True)
-
+                        _("Can't import {module_path} plugin, ignoring it: {e}").format(
+                            module_path=module_path, e=e
+                        ),
+                        error=True,
+                    )
 
     def import_plugin_module(self, module, type_):
         """add commands or outpus from a module to CLI frontend
@@ -557,12 +644,14 @@
         @param type_(str): one of C_PLUGIN_*
         """
         try:
-            class_names =  getattr(module, '__{}__'.format(type_))
+            class_names = getattr(module, "__{}__".format(type_))
         except AttributeError:
             log.disp(
-                _("Invalid plugin module [{type}] {module}")
-                .format(type=type_, module=module),
-                error=True)
+                _("Invalid plugin module [{type}] {module}").format(
+                    type=type_, module=module
+                ),
+                error=True,
+            )
             raise ImportError
         else:
             for class_name in class_names:
@@ -571,12 +660,12 @@
 
     def get_xmpp_uri_from_http(self, http_url):
         """parse HTML page at http(s) URL, and looks for xmpp: uri"""
-        if http_url.startswith('https'):
-            scheme = 'https'
-        elif http_url.startswith('http'):
-            scheme = 'http'
+        if http_url.startswith("https"):
+            scheme = "https"
+        elif http_url.startswith("http"):
+            scheme = "http"
         else:
-            raise exceptions.InternalError('An HTTP scheme is expected in this method')
+            raise exceptions.InternalError("An HTTP scheme is expected in this method")
         self.disp(f"{scheme.upper()} URL found, trying to find associated xmpp: URI", 1)
         # HTTP URL, we try to find xmpp: links
         try:
@@ -584,10 +673,12 @@
         except ImportError:
             self.disp(
                 "lxml module must be installed to use http(s) scheme, please install it "
-                "with \"pip install lxml\"",
-                error=True)
+                'with "pip install lxml"',
+                error=True,
+            )
             self.quit(1)
         import urllib.request, urllib.error, urllib.parse
+
         parser = etree.HTMLParser()
         try:
             root = etree.parse(urllib.request.urlopen(http_url), parser)
@@ -598,33 +689,36 @@
             links = root.xpath("//link[@rel='alternate' and starts-with(@href, 'xmpp:')]")
         if not links:
             self.disp(
-                _('Could not find alternate "xmpp:" URI, can\'t find associated XMPP '
-                  'PubSub node/item'),
-                error=True)
+                _(
+                    'Could not find alternate "xmpp:" URI, can\'t find associated XMPP '
+                    "PubSub node/item"
+                ),
+                error=True,
+            )
             self.quit(1)
-        xmpp_uri = links[0].get('href')
+        xmpp_uri = links[0].get("href")
         return xmpp_uri
 
     def parse_pubsub_args(self):
         if self.args.pubsub_url is not None:
             url = self.args.pubsub_url
 
-            if url.startswith('http'):
-                # http(s) URL, we try to retrieve xmpp one from there
+            if url.startswith("http"):
+                # http(s) URL, we try to retrieve xmpp one from there
                 url = self.get_xmpp_uri_from_http(url)
 
             try:
                 uri_data = uri.parse_xmpp_uri(url)
             except ValueError:
-                self.parser.error(_('invalid XMPP URL: {url}').format(url=url))
+                self.parser.error(_("invalid XMPP URL: {url}").format(url=url))
             else:
-                if uri_data['type'] == 'pubsub':
+                if uri_data["type"] == "pubsub":
                     # URL is alright, we only set data not already set by other options
                     if not self.args.service:
-                        self.args.service = uri_data['path']
+                        self.args.service = uri_data["path"]
                     if not self.args.node:
-                        self.args.node = uri_data['node']
-                    uri_item = uri_data.get('item')
+                        self.args.node = uri_data["node"]
+                    uri_item = uri_data.get("item")
                     if uri_item:
                         # there is an item in URI
                         # we use it only if item is not already set
@@ -636,9 +730,12 @@
                                 items = self.args.items
                             except AttributeError:
                                 self.disp(
-                                    _("item specified in URL but not needed in command, "
-                                      "ignoring it"),
-                                    error=True)
+                                    _(
+                                        "item specified in URL but not needed in command, "
+                                        "ignoring it"
+                                    ),
+                                    error=True,
+                                )
                             else:
                                 if not items:
                                     self.args.items = [uri_item]
@@ -652,7 +749,7 @@
                                     self.args.item = uri_item
                 else:
                     self.parser.error(
-                        _('XMPP URL is not a pubsub one: {url}').format(url=url)
+                        _("XMPP URL is not a pubsub one: {url}").format(url=url)
                     )
         flags = self.args._cmd._pubsub_flags
         # we check required arguments here instead of using add_arguments' required option
@@ -669,7 +766,8 @@
         try:
             if self.args.item and self.args.item_last:
                 self.parser.error(
-                    _("--item and --item-last can't be used at the same time"))
+                    _("--item and --item-last can't be used at the same time")
+                )
         except AttributeError:
             pass
 
@@ -683,8 +781,13 @@
             # to use pubsub's max or RSM's max. The later is used if any RSM or MAM
             # argument is set
             if max_items is None and rsm_max is None:
-                to_check = ('mam_filters', 'rsm_max', 'rsm_after', 'rsm_before',
-                            'rsm_index')
+                to_check = (
+                    "mam_filters",
+                    "rsm_max",
+                    "rsm_after",
+                    "rsm_before",
+                    "rsm_index",
+                )
                 if any((getattr(self.args, name) for name in to_check)):
                     # we use RSM
                     self.args.rsm_max = 10
@@ -700,17 +803,17 @@
         except Exception as e:
             if isinstance(e, exceptions.BridgeExceptionNoService):
                 print(
-                    _("Can't connect to Libervia backend, are you sure that it's "
-                      "launched ?")
+                    _(
+                        "Can't connect to Libervia backend, are you sure that it's "
+                        "launched ?"
+                    )
                 )
                 self.quit(C.EXIT_BACKEND_NOT_FOUND, raise_exc=False)
             elif isinstance(e, exceptions.BridgeInitError):
                 print(_("Can't init bridge"))
                 self.quit(C.EXIT_BRIDGE_ERROR, raise_exc=False)
             else:
-                print(
-                    _("Error while initialising bridge: {e}").format(e=e)
-                )
+                print(_("Error while initialising bridge: {e}").format(e=e))
                 self.quit(C.EXIT_BRIDGE_ERROR, raise_exc=False)
             return
         # we wait on init_pre_script instead of ready_get, so the CLI frontend can be used
@@ -743,7 +846,6 @@
     def run(cls):
         cls()._run()
 
-
     def _read_stdin(self, stdin_fut):
         """Callback called by ainput to read stdin"""
         line = sys.stdin.readline()
@@ -752,9 +854,9 @@
         else:
             stdin_fut.set_exception(EOFError())
 
-    async def ainput(self, msg=''):
+    async def ainput(self, msg=""):
         """Asynchronous version of buildin "input" function"""
-        self.disp(msg, end=' ')
+        self.disp(msg, end=" ")
         sys.stdout.flush()
         loop = asyncio.get_running_loop()
         stdin_fut = loop.create_future()
@@ -766,7 +868,9 @@
         res = await self.ainput(f"{message} (y/N)? ")
         return res in ("y", "Y")
 
-    async def confirm_or_quit(self, message, cancel_message=_("action cancelled by user")):
+    async def confirm_or_quit(
+        self, message, cancel_message=_("action cancelled by user")
+    ):
         """Request user to confirm action, and quit if he doesn't"""
         confirmed = await self.confirm(message)
         if not confirmed:
@@ -804,7 +908,7 @@
         if raise_exc:
             raise QuitException
 
-    async def a_quit(self, exit_code: int=0, raise_exc=True):
+    async def a_quit(self, exit_code: int = 0, raise_exc=True):
         """Execute async quit callback before actually quitting
 
         This method should be prefered to ``quit``, as it executes async quit callbacks
@@ -864,10 +968,10 @@
 
         def check(jid):
             if not jid.is_valid:
-                log.error (_("%s is not a valid JID !"), jid)
+                log.error(_("%s is not a valid JID !"), jid)
                 self.quit(1)
 
-        dest_jids=[]
+        dest_jids = []
         try:
             for i in range(len(jids)):
                 dest_jids.append(expand_jid(jids[i]))
@@ -877,7 +981,7 @@
 
         return dest_jids
 
-    async def a_pwd_input(self, msg=''):
+    async def a_pwd_input(self, msg=""):
         """Like ainput but with echo disabled (useful for passwords)"""
         # we disable echo, code adapted from getpass standard module which has been
         # written by Piers Lauder (original), Guido van Rossum (Windows support and
@@ -888,7 +992,7 @@
         new = old[:]
         new[3] &= ~termios.ECHO
         tcsetattr_flags = termios.TCSAFLUSH
-        if hasattr(termios, 'TCSASOFT'):
+        if hasattr(termios, "TCSASOFT"):
             tcsetattr_flags |= termios.TCSASOFT
         try:
             termios.tcsetattr(stdin_fd, tcsetattr_flags, new)
@@ -896,7 +1000,7 @@
         finally:
             termios.tcsetattr(stdin_fd, tcsetattr_flags, old)
             sys.stderr.flush()
-        self.disp('')
+        self.disp("")
         return pwd
 
     async def connect_or_prompt(self, method, err_msg=None):
@@ -910,15 +1014,16 @@
         password = self.args.pwd
         while True:
             try:
-                await method(password or '')
+                await method(password or "")
             except Exception as e:
-                if ((isinstance(e, BridgeException)
-                     and e.classname == 'PasswordError'
-                     and self.args.pwd is None)):
+                if (
+                    isinstance(e, BridgeException)
+                    and e.classname == "PasswordError"
+                    and self.args.pwd is None
+                ):
                     if password is not None:
                         self.disp(A.color(C.A_WARNING, _("invalid password")))
-                    password = await self.a_pwd_input(
-                        _("please enter profile password:"))
+                    password = await self.a_pwd_input(_("please enter profile password:"))
                 else:
                     self.disp(err_msg.format(profile=self.profile, e=e), error=True)
                     self.quit(C.EXIT_ERROR)
@@ -938,8 +1043,9 @@
 
         if not self.profile:
             log.error(
-                _("The profile [{profile}] doesn't exist")
-                .format(profile=self.args.profile)
+                _("The profile [{profile}] doesn't exist").format(
+                    profile=self.args.profile
+                )
             )
             self.quit(C.EXIT_ERROR)
 
@@ -951,38 +1057,40 @@
             if start_session:
                 await self.connect_or_prompt(
                     lambda pwd: self.bridge.profile_start_session(pwd, self.profile),
-                    err_msg="Can't start {profile}'s session: {e}"
+                    err_msg="Can't start {profile}'s session: {e}",
                 )
                 return
             elif not await self.bridge.profile_is_session_started(self.profile):
                 if not self.args.connect:
-                    self.disp(_(
-                        "Session for [{profile}] is not started, please start it "
-                        "before using libervia-cli, or use either --start-session or "
-                        "--connect option"
-                        .format(profile=self.profile)
-                    ), error=True)
+                    self.disp(
+                        _(
+                            "Session for [{profile}] is not started, please start it "
+                            "before using libervia-cli, or use either --start-session or "
+                            "--connect option".format(profile=self.profile)
+                        ),
+                        error=True,
+                    )
                     self.quit(1)
             elif not getattr(self.args, "connect", False):
                 return
 
-
-        if not hasattr(self.args, 'connect'):
+        if not hasattr(self.args, "connect"):
             # a profile can be present without connect option (e.g. on profile
             # creation/deletion)
             return
         elif self.args.connect is True:  # if connection is asked, we connect the profile
             await self.connect_or_prompt(
                 lambda pwd: self.bridge.connect(self.profile, pwd, {}),
-                err_msg = 'Can\'t connect profile "{profile!s}": {e}'
+                err_msg='Can\'t connect profile "{profile!s}": {e}',
             )
             return
         else:
             if not await self.bridge.is_connected(self.profile):
                 log.error(
-                    _("Profile [{profile}] is not connected, please connect it "
-                      "before using libervia-cli, or use --connect option")
-                    .format(profile=self.profile)
+                    _(
+                        "Profile [{profile}] is not connected, please connect it "
+                        "before using libervia-cli, or use --connect option"
+                    ).format(profile=self.profile)
                 )
                 self.quit(1)
 
@@ -992,7 +1100,7 @@
         #   as backend now handle jingles message initiation
         _jid = JID(param_jid)
         if not _jid.resource:
-            #if the resource is not given, we try to add the main resource
+            # if the resource is not given, we try to add the main resource
             main_resource = await self.bridge.main_resource_get(param_jid, self.profile)
             if main_resource:
                 return f"{_jid.bare}/{main_resource}"
@@ -1017,7 +1125,7 @@
         extra_outputs: Optional[dict] = None,
         need_connect: Optional[bool] = None,
         help: Optional[str] = None,
-        **kwargs
+        **kwargs,
     ):
         """Initialise CommandBase
 
@@ -1051,20 +1159,21 @@
                 C.ITEM: item is required
                 C.SINGLE_ITEM: only one item is allowed
         """
-        try: # If we have subcommands, host is a CommandBase and we need to use host.host
+        try:  # If we have subcommands, host is a CommandBase and we need to use host.host
             self.host = host.host
         except AttributeError:
             self.host = host
 
         # --profile option
-        parents = kwargs.setdefault('parents', set())
+        parents = kwargs.setdefault("parents", set())
         if use_profile:
             # self.host.parents['profile'] is an ArgumentParser with profile connection
             # arguments
             if need_connect is None:
                 need_connect = True
             parents.add(
-                self.host.parents['profile' if need_connect else 'profile_session'])
+                self.host.parents["profile" if need_connect else "profile_session"]
+            )
         else:
             assert need_connect is None
         self.need_connect = need_connect
@@ -1085,37 +1194,47 @@
             choices.update(extra_outputs)
             if not choices:
                 raise exceptions.InternalError(
-                    "No choice found for {} output type".format(use_output))
+                    "No choice found for {} output type".format(use_output)
+                )
             try:
                 default = self.host.default_output[use_output]
             except KeyError:
-                if 'default' in choices:
-                    default = 'default'
-                elif 'simple' in choices:
-                    default = 'simple'
+                if "default" in choices:
+                    default = "default"
+                elif "simple" in choices:
+                    default = "simple"
                 else:
                     default = list(choices)[0]
             output_parent.add_argument(
-                '--output', '-O', choices=sorted(choices), default=default,
-                help=_("select output format (default: {})".format(default)))
+                "--output",
+                "-O",
+                choices=sorted(choices),
+                default=default,
+                help=_("select output format (default: {})".format(default)),
+            )
             output_parent.add_argument(
-                '--output-option', '--oo', action="append", dest='output_opts',
-                default=[], help=_("output specific option"))
+                "--output-option",
+                "--oo",
+                action="append",
+                dest="output_opts",
+                default=[],
+                help=_("output specific option"),
+            )
             parents.add(output_parent)
         else:
             assert extra_outputs is None
 
-        self._use_pubsub = kwargs.pop('use_pubsub', False)
+        self._use_pubsub = kwargs.pop("use_pubsub", False)
         if self._use_pubsub:
-            flags = kwargs.pop('pubsub_flags', [])
-            defaults = kwargs.pop('pubsub_defaults', {})
+            flags = kwargs.pop("pubsub_flags", [])
+            defaults = kwargs.pop("pubsub_defaults", {})
             parents.add(self.host.make_pubsub_group(flags, defaults))
             self._pubsub_flags = flags
 
         # other common options
-        use_opts = {k:v for k,v in kwargs.items() if k.startswith('use_')}
+        use_opts = {k: v for k, v in kwargs.items() if k.startswith("use_")}
         for param, do_use in use_opts.items():
-            opt=param[4:] # if param is use_verbose, opt is verbose
+            opt = param[4:]  # if param is use_verbose, opt is verbose
             if opt not in self.host.parents:
                 raise exceptions.InternalError("Unknown parent option {}".format(opt))
             del kwargs[param]
@@ -1124,7 +1243,7 @@
 
         self.parser = host.subparsers.add_parser(name, help=help, **kwargs)
         if hasattr(self, "subcommands"):
-            self.subparsers = self.parser.add_subparsers(dest='subcommand', required=True)
+            self.subparsers = self.parser.add_subparsers(dest="subcommand", required=True)
         else:
             self.parser.set_defaults(_cmd=self)
         self.add_parser_options()
@@ -1198,7 +1317,7 @@
             return
         if uid == self.progress_id:
             if self.args.progress:
-                self.disp('') # progress is not finished, so we skip a line
+                self.disp("")  # progress is not finished, so we skip a line
             if self.host.quit_on_progress_end:
                 await self.on_progress_error(message)
                 self.host.quit_from_signal(C.EXIT_ERROR)
@@ -1211,13 +1330,16 @@
         data = await self.host.bridge.progress_get(self.progress_id, self.profile)
         if data:
             try:
-                size = data['size']
+                size = data["size"]
             except KeyError:
-                self.disp(_("file size is not known, we can't show a progress bar"), 1,
-                          error=True)
+                self.disp(
+                    _("file size is not known, we can't show a progress bar"),
+                    1,
+                    error=True,
+                )
                 return False
             if self.host.pbar is None:
-                #first answer, we must construct the bar
+                # first answer, we must construct the bar
 
                 # if the instance has a pbar_template attribute, it is used has model,
                 # else default one is used
@@ -1228,8 +1350,14 @@
                     template = self.pbar_template
                 except AttributeError:
                     template = [
-                        _("Progress: "), ["Percentage"], " ", ["Bar"], " ",
-                        ["FileTransferSpeed"], " ", ["ETA"]
+                        _("Progress: "),
+                        ["Percentage"],
+                        " ",
+                        ["Bar"],
+                        " ",
+                        ["FileTransferSpeed"],
+                        " ",
+                        ["ETA"],
                     ]
 
                 widgets = []
@@ -1240,10 +1368,12 @@
                         widget = getattr(progressbar, part.pop(0))
                         widgets.append(widget(*part))
 
-                self.host.pbar = progressbar.ProgressBar(max_value=int(size), widgets=widgets)
+                self.host.pbar = progressbar.ProgressBar(
+                    max_value=int(size), widgets=widgets
+                )
                 self.host.pbar.start()
 
-            self.host.pbar.update(int(data['position']))
+            self.host.pbar.update(int(data["position"]))
 
         elif self.host.pbar is not None:
             return False
@@ -1283,7 +1413,7 @@
         """
         self.disp(_("Error while doing operation: {e}").format(e=e), error=True)
 
-    def disp(self, msg, verbosity=0, error=False, end='\n'):
+    def disp(self, msg, verbosity=0, error=False, end="\n"):
         return self.host.disp(msg, verbosity, error, end)
 
     def output(self, data):
@@ -1291,7 +1421,8 @@
             output_type = self._output_type
         except AttributeError:
             raise exceptions.InternalError(
-                _('trying to use output when use_output has not been set'))
+                _("trying to use output when use_output has not been set")
+            )
         return self.host.output(output_type, self.args.output, self.extra_outputs, data)
 
     def get_pubsub_extra(self, extra: Optional[dict] = None) -> str:
@@ -1307,27 +1438,30 @@
             if intersection:
                 raise exceptions.ConflictError(
                     "given extra dict has conflicting keys with pubsub keys "
-                    "{intersection}".format(intersection=intersection))
+                    "{intersection}".format(intersection=intersection)
+                )
 
         # RSM
 
-        for attribute in ('max', 'after', 'before', 'index'):
-            key = 'rsm_' + attribute
+        for attribute in ("max", "after", "before", "index"):
+            key = "rsm_" + attribute
             if key in extra:
                 raise exceptions.ConflictError(
-                    "This key already exists in extra: u{key}".format(key=key))
+                    "This key already exists in extra: u{key}".format(key=key)
+                )
             value = getattr(self.args, key, None)
             if value is not None:
                 extra[key] = str(value)
 
         # MAM
 
-        if hasattr(self.args, 'mam_filters'):
+        if hasattr(self.args, "mam_filters"):
             for key, value in self.args.mam_filters:
-                key = 'filter_' + key
+                key = "filter_" + key
                 if key in extra:
                     raise exceptions.ConflictError(
-                        "This key already exists in extra: u{key}".format(key=key))
+                        "This key already exists in extra: u{key}".format(key=key)
+                    )
                 extra[key] = value
 
         # Order-By
@@ -1390,11 +1524,14 @@
             # we need to register the following signal even if we don't display the
             # progress bar
             self.host.bridge.register_signal(
-                "progress_started", self.progress_started_handler)
+                "progress_started", self.progress_started_handler
+            )
             self.host.bridge.register_signal(
-                "progress_finished", self.progress_finished_handler)
+                "progress_finished", self.progress_finished_handler
+            )
             self.host.bridge.register_signal(
-                "progress_error", self.progress_error_handler)
+                "progress_error", self.progress_error_handler
+            )
 
         if self.need_connect is not None:
             await self.host.connect_profile()
@@ -1413,26 +1550,23 @@
 
     to manage action_types answer,
     """
-    action_callbacks = {} # XXX: set managed action types in a dict here:
-                          # key is the action_type, value is the callable
-                          # which will manage the answer. profile filtering is
-                          # already managed when callback is called
+
+    action_callbacks = {}  # XXX: set managed action types in a dict here:
+    # key is the action_type, value is the callable
+    # which will manage the answer. profile filtering is
+    # already managed when callback is called
 
     def __init__(self, *args, **kwargs):
         super(CommandAnswering, self).__init__(*args, **kwargs)
 
     async def on_action_new(
-        self,
-        action_data_s: str,
-        action_id: str,
-        security_limit: int,
-        profile: str
+        self, action_data_s: str, action_id: str, security_limit: int, profile: str
     ) -> None:
         if profile != self.profile:
             return
         action_data = data_format.deserialise(action_data_s)
         try:
-            action_type = action_data['type']
+            action_type = action_data["type"]
         except KeyError:
             try:
                 xml_ui = action_data["xmlui"]
@@ -1456,7 +1590,7 @@
         # FIXME: we temporarily use ElementTree, but a real XMLUI managing module
         #        should be available in the future
         # TODO: XMLUI module
-        ui = ET.fromstring(xml_ui.encode('utf-8'))
+        ui = ET.fromstring(xml_ui.encode("utf-8"))
         dialog = ui.find("dialog")
         if dialog is not None:
             self.disp(dialog.findtext("message"), error=dialog.get("level") == "error")
@@ -1466,4 +1600,6 @@
         self.host.bridge.register_signal("action_new", self.on_action_new)
         actions = await self.host.bridge.actions_get(self.profile)
         for action_data_s, action_id, security_limit in actions:
-            await self.on_action_new(action_data_s, action_id, security_limit, self.profile)
+            await self.on_action_new(
+                action_data_s, action_id, security_limit, self.profile
+            )
--- a/libervia/cli/call_gui.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/call_gui.py	Wed Jun 19 18:44:57 2024 +0200
@@ -51,14 +51,11 @@
 
 from libervia.backend.core.i18n import _
 from libervia.frontends.tools import aio, display_servers, webrtc
-gi.require_versions({
-    "Gst": "1.0",
-    "GstWebRTC": "1.0"
-})
+
+gi.require_versions({"Gst": "1.0", "GstWebRTC": "1.0"})
 from gi.repository import Gst
 
 
-
 ICON_SIZE = QSize(45, 45)
 BUTTON_SIZE = QSize(50, 50)
 running = False
@@ -389,7 +386,7 @@
             self.fullscreen_btn.setIcon(self.fullscreen_icon_normal)
             self.showNormal()
 
-    def closeEvent(self, a0: QCloseEvent|None) -> None:
+    def closeEvent(self, a0: QCloseEvent | None) -> None:
         super().closeEvent(a0)
         global running
         running = False
--- a/libervia/cli/call_simple.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/call_simple.py	Wed Jun 19 18:44:57 2024 +0200
@@ -88,7 +88,9 @@
     def get_video_display(self):
         assert self.webrtc is not None
         if self.webrtc.video_muted:
-            return Panel(Text("❌ ") + self.styled_shortcut_key("Video Off"), expand=False)
+            return Panel(
+                Text("❌ ") + self.styled_shortcut_key("Video Off"), expand=False
+            )
         else:
             return Panel(Text("🎥 ") + self.styled_shortcut_key("Video On"), expand=False)
 
--- a/libervia/cli/call_tui.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/call_tui.py	Wed Jun 19 18:44:57 2024 +0200
@@ -114,7 +114,7 @@
         if self.render_class is None:
             self.render_class = t_image.auto_image_class()
 
-        loop_sleep = 1/self.fps
+        loop_sleep = 1 / self.fps
 
         with self.input.raw_mode():
             # for whatever reason, using self.input.attach is breaking KittyImage and uses
--- a/libervia/cli/cmd_account.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_account.py	Wed Jun 19 18:44:57 2024 +0200
@@ -43,12 +43,8 @@
         )
 
     def add_parser_options(self):
-        self.parser.add_argument(
-            "jid", help=_("jid to create")
-        )
-        self.parser.add_argument(
-            "password", help=_("password of the account")
-        )
+        self.parser.add_argument("jid", help=_("jid to create"))
+        self.parser.add_argument("password", help=_("password of the account"))
         self.parser.add_argument(
             "-p",
             "--profile",
@@ -73,9 +69,7 @@
             "--port",
             type=int,
             default=0,
-            help=_("server port (default: {port})").format(
-                port=C.XMPP_C2S_PORT
-            ),
+            help=_("server port (default: {port})").format(port=C.XMPP_C2S_PORT),
         )
 
     async def start(self):
@@ -89,16 +83,15 @@
             )
 
         except BridgeException as e:
-            if e.condition == 'conflict':
-                self.disp(
-                    f"The account {self.args.jid} already exists",
-                    error=True
-                )
+            if e.condition == "conflict":
+                self.disp(f"The account {self.args.jid} already exists", error=True)
                 self.host.quit(C.EXIT_CONFLICT)
             else:
                 self.disp(
                     f"can't create account on {self.args.host or 'localhost'!r} with jid "
-                    f"{self.args.jid!r} using In-Band Registration: {e}", error=True)
+                    f"{self.args.jid!r} using In-Band Registration: {e}",
+                    error=True,
+                )
                 self.host.quit(C.EXIT_BRIDGE_ERRBACK)
         except Exception as e:
             self.disp(f"Internal error: {e}", error=True)
@@ -109,7 +102,6 @@
         if self.args.profile is None:
             self.host.quit()
 
-
         self.disp(_("creating profile"), 2)
         try:
             await self.host.bridge.profile_create(
@@ -118,20 +110,15 @@
                 "",
             )
         except BridgeException as e:
-            if e.condition == 'conflict':
-                self.disp(
-                    f"The profile {self.args.profile} already exists",
-                    error=True
-                )
+            if e.condition == "conflict":
+                self.disp(f"The profile {self.args.profile} already exists", error=True)
                 self.host.quit(C.EXIT_CONFLICT)
             else:
                 self.disp(
-                    _("Can't create profile {profile} to associate with jid "
-                      "{jid}: {e}").format(
-                          profile=self.args.profile,
-                          jid=self.args.jid,
-                          e=e
-                      ),
+                    _(
+                        "Can't create profile {profile} to associate with jid "
+                        "{jid}: {e}"
+                    ).format(profile=self.args.profile, jid=self.args.jid, e=e),
                     error=True,
                 )
                 self.host.quit(C.EXIT_BRIDGE_ERRBACK)
@@ -173,7 +160,9 @@
 
         self.disp(
             f"profile {self.args.profile} successfully created and associated to the new "
-            f"account", 1)
+            f"account",
+            1,
+        )
         self.host.quit()
 
 
@@ -184,9 +173,7 @@
         )
 
     def add_parser_options(self):
-        self.parser.add_argument(
-            "password", help=_("new XMPP password")
-        )
+        self.parser.add_argument("password", help=_("new XMPP password"))
 
     async def start(self):
         try:
--- a/libervia/cli/cmd_adhoc.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_adhoc.py	Wed Jun 19 18:44:57 2024 +0200
@@ -97,8 +97,9 @@
                 for method in methods:
                     path, iface, command = method
                     self.disp(
-                        _("Command found: (path:{path}, iface: {iface}) [{command}]")
-                        .format(path=path, iface=iface, command=command),
+                        _(
+                            "Command found: (path:{path}, iface: {iface}) [{command}]"
+                        ).format(path=path, iface=iface, command=command),
                         1,
                     )
                 self.host.quit()
--- a/libervia/cli/cmd_application.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_application.py	Wed Jun 19 18:44:57 2024 +0200
@@ -29,8 +29,11 @@
 
     def __init__(self, host):
         super(List, self).__init__(
-            host, "list", use_profile=False, use_output=C.OUTPUT_LIST,
-            help=_("list available applications")
+            host,
+            "list",
+            use_profile=False,
+            use_output=C.OUTPUT_LIST,
+            help=_("list available applications"),
         )
 
     def add_parser_options(self):
@@ -52,7 +55,7 @@
         if self.args.filters:
             self.args.filters = list(set(self.args.filters))
         else:
-            self.args.filters = ['available']
+            self.args.filters = ["available"]
 
         try:
             found_apps = await self.host.bridge.applications_list(self.args.filters)
@@ -123,12 +126,12 @@
             )
         except Exception as e:
             if self.args.name is not None:
-                self.disp(
-                    f"can't stop application {self.args.name!r}: {e}", error=True)
+                self.disp(f"can't stop application {self.args.name!r}: {e}", error=True)
             else:
                 self.disp(
                     f"can't stop application instance with id {self.args.id!r}: {e}",
-                    error=True)
+                    error=True,
+                )
             self.host.quit(C.EXIT_BRIDGE_ERRBACK)
         else:
             self.host.quit()
@@ -138,8 +141,11 @@
 
     def __init__(self, host):
         super(Exposed, self).__init__(
-            host, "exposed", use_profile=False, use_output=C.OUTPUT_DICT,
-            help=_("show data exposed by a running application")
+            host,
+            "exposed",
+            use_profile=False,
+            use_output=C.OUTPUT_DICT,
+            help=_("show data exposed by a running application"),
         )
 
     def add_parser_options(self):
@@ -169,11 +175,13 @@
             if self.args.name is not None:
                 self.disp(
                     f"can't get values exposed from application {self.args.name!r}: {e}",
-                    error=True)
+                    error=True,
+                )
             else:
                 self.disp(
                     f"can't values exposed from  application instance with id {self.args.id!r}: {e}",
-                    error=True)
+                    error=True,
+                )
             self.host.quit(C.EXIT_BRIDGE_ERRBACK)
         else:
             exposed_data = data_format.deserialise(exposed_data_raw)
@@ -186,6 +194,9 @@
 
     def __init__(self, host):
         super(Application, self).__init__(
-            host, "application", use_profile=False, help=_("manage applications"),
-            aliases=['app'],
+            host,
+            "application",
+            use_profile=False,
+            help=_("manage applications"),
+            aliases=["app"],
         )
--- a/libervia/cli/cmd_avatar.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_avatar.py	Wed Jun 19 18:44:57 2024 +0200
@@ -45,7 +45,7 @@
         self.parser.add_argument(
             "-s", "--show", action="store_true", help=_("show avatar")
         )
-        self.parser.add_argument("jid", nargs='?', default='', help=_("entity"))
+        self.parser.add_argument("jid", nargs="?", default="", help=_("entity"))
 
     async def show_image(self, path):
         sat_conf = config.parse_main_conf()
@@ -87,7 +87,7 @@
             self.disp(_("No avatar found."), 1)
             self.host.quit(C.EXIT_NOT_FOUND)
 
-        avatar_path = avatar_data['path']
+        avatar_path = avatar_data["path"]
 
         self.disp(avatar_path)
         if self.args.show:
@@ -99,13 +99,16 @@
 class Set(base.CommandBase):
     def __init__(self, host):
         super(Set, self).__init__(
-            host, "set", use_verbose=True,
-            help=_("set avatar of the profile or an entity")
+            host,
+            "set",
+            use_verbose=True,
+            help=_("set avatar of the profile or an entity"),
         )
 
     def add_parser_options(self):
         self.parser.add_argument(
-            "-j", "--jid", default='', help=_("entity whose avatar must be changed"))
+            "-j", "--jid", default="", help=_("entity whose avatar must be changed")
+        )
         self.parser.add_argument(
             "image_path", type=str, help=_("path to the image to upload")
         )
--- a/libervia/cli/cmd_blocking.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_blocking.py	Wed Jun 19 18:44:57 2024 +0200
@@ -72,10 +72,7 @@
 
     async def start(self):
         try:
-            await self.host.bridge.blocking_block(
-                self.args.entities,
-                self.profile
-            )
+            await self.host.bridge.blocking_block(self.args.entities, self.profile)
         except Exception as e:
             self.disp(f"can't block entities: {e}", error=True)
             self.host.quit(C.EXIT_BRIDGE_ERRBACK)
@@ -110,17 +107,14 @@
             if not self.args.force:
                 await self.host.confirm_or_quit(
                     _("All entities will be unblocked, are you sure"),
-                    _("unblock cancelled")
+                    _("unblock cancelled"),
                 )
             self.args.entities.clear()
         elif self.args.force:
             self.parser.error(_('--force is only allowed when "all" is used as target'))
 
         try:
-            await self.host.bridge.blocking_unblock(
-                self.args.entities,
-                self.profile
-            )
+            await self.host.bridge.blocking_unblock(self.args.entities, self.profile)
         except Exception as e:
             self.disp(f"can't unblock entities: {e}", error=True)
             self.host.quit(C.EXIT_BRIDGE_ERRBACK)
@@ -132,6 +126,4 @@
     subcommands = (List, Block, Unblock)
 
     def __init__(self, host):
-        super().__init__(
-            host, "blocking", use_profile=False, help=_("entities blocking")
-        )
+        super().__init__(host, "blocking", use_profile=False, help=_("entities blocking"))
--- a/libervia/cli/cmd_blog.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_blog.py	Wed Jun 19 18:44:57 2024 +0200
@@ -86,7 +86,7 @@
     "content_xhtml",
     "title",
     "title_xhtml",
-    "extra"
+    "extra",
 )
 OUTPUT_OPT_NO_HEADER = "no-header"
 RE_ATTACHMENT_METADATA = re.compile(r"^(?P<key>[a-z_]+)=(?P<value>.*)")
@@ -130,9 +130,9 @@
                 f"invalid number of argument for {', '.join(self.option_strings)}, it "
                 "must have at most 2 arguments."
             )
-        alt_link = {'url': url}
+        alt_link = {"url": url}
         if media_type is not None:
-            alt_link['media_type'] = media_type
+            alt_link["media_type"] = media_type
         alt_links = getattr(namespace, self.dest)
         if alt_links is None:
             alt_links = []
@@ -189,20 +189,18 @@
             "--attachment",
             dest="attachments",
             nargs="+",
-            help=_(
-                "attachment in the form URL [metadata_name=value]"
-            )
+            help=_("attachment in the form URL [metadata_name=value]"),
         )
 
         self.parser.add_argument(
-            '--alt-link',
+            "--alt-link",
             action=AltLinkAction,
             dest="alt_links",
-            metavar=('URL', 'MEDIA_TYPE'),
+            metavar=("URL", "MEDIA_TYPE"),
             help=(
                 "add an alternative link, you can use {service}, {node} and {item} "
                 "template values in URL"
-            )
+            ),
         )
 
         comments_group = self.parser.add_mutually_exclusive_group()
@@ -228,7 +226,7 @@
         self.parser.add_argument(
             "--no-id-suffix",
             action="store_true",
-            help=_("do no add random suffix to friendly ID")
+            help=_("do no add random suffix to friendly ID"),
         )
 
         self.parser.add_argument(
@@ -240,19 +238,19 @@
             "-e",
             "--encrypt",
             action="store_true",
-            help=_("end-to-end encrypt the blog post")
+            help=_("end-to-end encrypt the blog post"),
         )
         self.parser.add_argument(
             "--encrypt-for",
             metavar="JID",
             action="append",
-            help=_("encrypt a single item for")
+            help=_("encrypt a single item for"),
         )
         self.parser.add_argument(
             "-X",
             "--sign",
             action="store_true",
-            help=_("cryptographically sign the blog post")
+            help=_("cryptographically sign the blog post"),
         )
 
     async def set_mb_data_content(self, content, mb_data):
@@ -301,7 +299,7 @@
                     value = m.group("value").strip()
                     if key == "external":
                         if not value:
-                            value=True
+                            value = True
                         else:
                             value = C.bool(value)
                     attachment[key] = value
@@ -1217,7 +1215,6 @@
         super().__init__(host)
         self.override_pubsub_flags({C.SERVICE, C.SINGLE_ITEM})
 
-
     async def start(self):
         if not self.args.node:
             namespaces = await self.host.bridge.namespaces_get()
--- a/libervia/cli/cmd_call.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_call.py	Wed Jun 19 18:44:57 2024 +0200
@@ -37,7 +37,6 @@
 
 class Common(base.CommandBase):
 
-
     def __init__(self, *args, **kwargs):
         super().__init__(
             *args,
@@ -52,7 +51,7 @@
                 #: experimental TUI output
                 "tui": partial(self.use_output, "tui"),
             },
-            **kwargs
+            **kwargs,
         )
 
     def add_parser_options(self):
@@ -61,8 +60,11 @@
         )
         sources_group = self.parser.add_mutually_exclusive_group()
         sources_group.add_argument(
-            "-s", "--sources", choices=['auto', 'test'], default='auto',
-            help='Well-known sources to use (default: "auto").'
+            "-s",
+            "--sources",
+            choices=["auto", "test"],
+            default="auto",
+            help='Well-known sources to use (default: "auto").',
         )
 
     def get_call_data_kw(self) -> dict[str, Any]:
@@ -70,10 +72,10 @@
         kwargs: dict[str, Any] = {}
         if self.args.sources == "test":
             from libervia.frontends.tools.webrtc_models import SourcesTest
+
             kwargs["sources_data"] = SourcesTest()
         return kwargs
 
-
     async def start(self):
         root_logger = logging.getLogger()
         # we don't want any formatting for messages from webrtc
@@ -133,10 +135,9 @@
 
     async def start(self):
         await super().start()
-        await super().output(CallData(
-            callee=jid.JID(self.args.entity),
-            kwargs=self.get_call_data_kw()
-        ))
+        await super().output(
+            CallData(callee=jid.JID(self.args.entity), kwargs=self.get_call_data_kw())
+        )
 
 
 class Receive(Common):
@@ -157,12 +158,14 @@
             action="append",
             metavar="JID",
             default=[],
-            help=_("automatically accept call from this jid (can be used multiple times)")
+            help=_(
+                "automatically accept call from this jid (can be used multiple times)"
+            ),
         )
         auto_accept_group.add_argument(
             "--auto-accept-all",
             action="store_true",
-            help=_("automatically accept call from anybody")
+            help=_("automatically accept call from anybody"),
         )
 
     async def on_action_new(
@@ -179,9 +182,7 @@
             not self.args.auto_accept_all
             and caller not in self.args.auto_accept
             and not await self.host.confirm(
-                _("📞 Incoming call from {caller}, do you accept?").format(
-                    caller=caller
-                )
+                _("📞 Incoming call from {caller}, do you accept?").format(caller=caller)
             )
         ):
             await self.host.bridge.action_launch(
@@ -191,12 +192,14 @@
 
         self.disp(_("✅ Incoming call from {caller} accepted.").format(caller=caller))
 
-        await super().output(CallData(
-            callee=peer_jid,
-            sid=action_data["session_id"],
-            action_id=action_id,
-            kwargs=self.get_call_data_kw()
-        ))
+        await super().output(
+            CallData(
+                callee=peer_jid,
+                sid=action_data["session_id"],
+                action_id=action_id,
+                kwargs=self.get_call_data_kw(),
+            )
+        )
 
     async def start(self):
         await super().start()
--- a/libervia/cli/cmd_encryption.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_encryption.py	Wed Jun 19 18:44:57 2024 +0200
@@ -31,11 +31,13 @@
     def __init__(self, host):
         extra_outputs = {"default": self.default_output}
         super(EncryptionAlgorithms, self).__init__(
-            host, "algorithms",
+            host,
+            "algorithms",
             use_output=C.OUTPUT_LIST_DICT,
             extra_outputs=extra_outputs,
             use_profile=False,
-            help=_("show available encryption algorithms"))
+            help=_("show available encryption algorithms"),
+        )
 
     def add_parser_options(self):
         pass
@@ -44,8 +46,11 @@
         if not plugins:
             self.disp(_("No encryption plugin registered!"))
         else:
-            self.disp(_("Following encryption algorithms are available: {algos}").format(
-                algos=', '.join([p['name'] for p in plugins])))
+            self.disp(
+                _("Following encryption algorithms are available: {algos}").format(
+                    algos=", ".join([p["name"] for p in plugins])
+                )
+            )
 
     async def start(self):
         try:
@@ -63,15 +68,11 @@
 
     def __init__(self, host):
         super(EncryptionGet, self).__init__(
-            host, "get",
-            use_output=C.OUTPUT_DICT,
-            help=_("get encryption session data"))
+            host, "get", use_output=C.OUTPUT_DICT, help=_("get encryption session data")
+        )
 
     def add_parser_options(self):
-        self.parser.add_argument(
-            "jid",
-            help=_("jid of the entity to check")
-        )
+        self.parser.add_argument("jid", help=_("jid of the entity to check"))
 
     async def start(self):
         jids = await self.host.check_jids([self.args.jid])
@@ -84,8 +85,7 @@
 
         session_data = data_format.deserialise(serialised)
         if session_data is None:
-            self.disp(
-                "No encryption session found, the messages are sent in plain text.")
+            self.disp("No encryption session found, the messages are sent in plain text.")
             self.host.quit(C.EXIT_NOT_FOUND)
         await self.output(session_data)
         self.host.quit()
@@ -95,29 +95,34 @@
 
     def __init__(self, host):
         super(EncryptionStart, self).__init__(
-            host, "start",
-            help=_("start encrypted session with an entity"))
+            host, "start", help=_("start encrypted session with an entity")
+        )
 
     def add_parser_options(self):
         self.parser.add_argument(
             "--encrypt-noreplace",
             action="store_true",
-            help=_("don't replace encryption algorithm if an other one is already used"))
+            help=_("don't replace encryption algorithm if an other one is already used"),
+        )
         algorithm = self.parser.add_mutually_exclusive_group()
         algorithm.add_argument(
-            "-n", "--name", help=_("algorithm name (DEFAULT: choose automatically)"))
+            "-n", "--name", help=_("algorithm name (DEFAULT: choose automatically)")
+        )
         algorithm.add_argument(
-            "-N", "--namespace",
-            help=_("algorithm namespace (DEFAULT: choose automatically)"))
+            "-N",
+            "--namespace",
+            help=_("algorithm namespace (DEFAULT: choose automatically)"),
+        )
         self.parser.add_argument(
-            "jid",
-            help=_("jid of the entity to stop encrypted session with")
+            "jid", help=_("jid of the entity to stop encrypted session with")
         )
 
     async def start(self):
         if self.args.name is not None:
             try:
-                namespace = await self.host.bridge.encryption_namespace_get(self.args.name)
+                namespace = await self.host.bridge.encryption_namespace_get(
+                    self.args.name
+                )
             except Exception as e:
                 self.disp(f"can't get encryption namespace: {e}", error=True)
                 self.host.quit(C.EXIT_BRIDGE_ERRBACK)
@@ -131,8 +136,8 @@
 
         try:
             await self.host.bridge.message_encryption_start(
-                jid, namespace, not self.args.encrypt_noreplace,
-                self.profile)
+                jid, namespace, not self.args.encrypt_noreplace, self.profile
+            )
         except Exception as e:
             self.disp(f"can't get encryption namespace: {e}", error=True)
             self.host.quit(C.EXIT_BRIDGE_ERRBACK)
@@ -144,13 +149,12 @@
 
     def __init__(self, host):
         super(EncryptionStop, self).__init__(
-            host, "stop",
-            help=_("stop encrypted session with an entity"))
+            host, "stop", help=_("stop encrypted session with an entity")
+        )
 
     def add_parser_options(self):
         self.parser.add_argument(
-            "jid",
-            help=_("jid of the entity to stop encrypted session with")
+            "jid", help=_("jid of the entity to stop encrypted session with")
         )
 
     async def start(self):
@@ -168,26 +172,28 @@
 class TrustUI(base.CommandBase):
 
     def __init__(self, host):
-        super(TrustUI, self).__init__(
-            host, "ui",
-            help=_("get UI to manage trust"))
+        super(TrustUI, self).__init__(host, "ui", help=_("get UI to manage trust"))
 
     def add_parser_options(self):
         self.parser.add_argument(
-            "jid",
-            help=_("jid of the entity to stop encrypted session with")
+            "jid", help=_("jid of the entity to stop encrypted session with")
         )
         algorithm = self.parser.add_mutually_exclusive_group()
         algorithm.add_argument(
-            "-n", "--name", help=_("algorithm name (DEFAULT: current algorithm)"))
+            "-n", "--name", help=_("algorithm name (DEFAULT: current algorithm)")
+        )
         algorithm.add_argument(
-            "-N", "--namespace",
-            help=_("algorithm namespace (DEFAULT: current algorithm)"))
+            "-N",
+            "--namespace",
+            help=_("algorithm namespace (DEFAULT: current algorithm)"),
+        )
 
     async def start(self):
         if self.args.name is not None:
             try:
-                namespace = await self.host.bridge.encryption_namespace_get(self.args.name)
+                namespace = await self.host.bridge.encryption_namespace_get(
+                    self.args.name
+                )
             except Exception as e:
                 self.disp(f"can't get encryption namespace: {e}", error=True)
                 self.host.quit(C.EXIT_BRIDGE_ERRBACK)
@@ -201,7 +207,8 @@
 
         try:
             xmlui_raw = await self.host.bridge.encryption_trust_ui_get(
-                jid, namespace, self.profile)
+                jid, namespace, self.profile
+            )
         except Exception as e:
             self.disp(f"can't get encryption session trust UI: {e}", error=True)
             self.host.quit(C.EXIT_BRIDGE_ERRBACK)
@@ -212,6 +219,7 @@
             await xmlui.submit_form()
         self.host.quit()
 
+
 class EncryptionTrust(base.CommandBase):
     subcommands = (TrustUI,)
 
@@ -222,8 +230,13 @@
 
 
 class Encryption(base.CommandBase):
-    subcommands = (EncryptionAlgorithms, EncryptionGet, EncryptionStart, EncryptionStop,
-                   EncryptionTrust)
+    subcommands = (
+        EncryptionAlgorithms,
+        EncryptionGet,
+        EncryptionStart,
+        EncryptionStop,
+        EncryptionTrust,
+    )
 
     def __init__(self, host):
         super(Encryption, self).__init__(
--- a/libervia/cli/cmd_event.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_event.py	Wed Jun 19 18:44:57 2024 +0200
@@ -87,16 +87,22 @@
                 start, "medium", tz_info=date_utils.TZ_LOCAL
             )
             end = event["end"]
-            self.disp(A.color(
-                A.BOLD, start_human, A.RESET, " ",
-                f"({date_utils.delta2human(start, end)}) ",
-                C.A_HEADER, name
-            ))
+            self.disp(
+                A.color(
+                    A.BOLD,
+                    start_human,
+                    A.RESET,
+                    " ",
+                    f"({date_utils.delta2human(start, end)}) ",
+                    C.A_HEADER,
+                    name,
+                )
+            )
             if self.verbosity > 0:
                 descriptions = event.get("descriptions", [])
                 if descriptions:
                     self.disp(descriptions[0]["description"])
-            if idx < (nb_events-1):
+            if idx < (nb_events - 1):
                 self.disp("")
 
 
@@ -106,7 +112,7 @@
         if nargs is not None or metavar is not None:
             raise ValueError("nargs and metavar must not be used")
         if metavar is not None:
-            metavar="TERM WIKIDATA_ID LANG"
+            metavar = "TERM WIKIDATA_ID LANG"
         if "--help" in sys.argv:
             # FIXME: dirty workaround to have correct --help message
             #   argparse doesn't normally allow variable number of arguments beside "+"
@@ -128,9 +134,7 @@
         if not values:
             parser.error("category values must be set")
 
-        category = {
-            "term": values[0]
-        }
+        category = {"term": values[0]}
 
         if len(values) == 1:
             pass
@@ -153,14 +157,21 @@
 class EventBase:
     def add_parser_options(self):
         self.parser.add_argument(
-            "-S", "--start", type=base.date_decoder, metavar="TIME_PATTERN",
-            help=_("the start time of the event"))
+            "-S",
+            "--start",
+            type=base.date_decoder,
+            metavar="TIME_PATTERN",
+            help=_("the start time of the event"),
+        )
         end_group = self.parser.add_mutually_exclusive_group()
         end_group.add_argument(
-            "-E", "--end", type=base.date_decoder, metavar="TIME_PATTERN",
-            help=_("the time of the end of the event"))
-        end_group.add_argument(
-            "-D", "--duration", help=_("duration of the event"))
+            "-E",
+            "--end",
+            type=base.date_decoder,
+            metavar="TIME_PATTERN",
+            help=_("the time of the end of the event"),
+        )
+        end_group.add_argument("-D", "--duration", help=_("duration of the event"))
         self.parser.add_argument(
             "-H", "--head-picture", help="URL to a picture to use as head-picture"
         )
@@ -168,47 +179,65 @@
             "-d", "--description", help="plain text description the event"
         )
         self.parser.add_argument(
-            "-C", "--category", action=CategoryAction, dest="categories",
-            help="Category of the event"
+            "-C",
+            "--category",
+            action=CategoryAction,
+            dest="categories",
+            help="Category of the event",
         )
         self.parser.add_argument(
-            "-l", "--location", action="append", nargs="+", metavar="[KEY] VALUE",
-            help="Location metadata"
+            "-l",
+            "--location",
+            action="append",
+            nargs="+",
+            metavar="[KEY] VALUE",
+            help="Location metadata",
         )
         rsvp_group = self.parser.add_mutually_exclusive_group()
         rsvp_group.add_argument(
-            "--rsvp", action="store_true", help=_("RSVP is requested"))
+            "--rsvp", action="store_true", help=_("RSVP is requested")
+        )
         rsvp_group.add_argument(
-            "--rsvp_json", metavar="JSON", help=_("JSON description of RSVP form"))
+            "--rsvp_json", metavar="JSON", help=_("JSON description of RSVP form")
+        )
         for node_type in ("invitees", "comments", "blog", "schedule"):
             self.parser.add_argument(
                 f"--{node_type}",
                 nargs=2,
                 metavar=("JID", "NODE"),
-                help=_("link {node_type} pubsub node").format(node_type=node_type)
+                help=_("link {node_type} pubsub node").format(node_type=node_type),
             )
         self.parser.add_argument(
-            "-a", "--attachment", action="append", dest="attachments",
-            help=_("attach a file")
+            "-a",
+            "--attachment",
+            action="append",
+            dest="attachments",
+            help=_("attach a file"),
         )
         self.parser.add_argument("--website", help=_("website of the event"))
         self.parser.add_argument(
-            "--status", choices=["confirmed", "tentative", "cancelled"],
-            help=_("status of the event")
+            "--status",
+            choices=["confirmed", "tentative", "cancelled"],
+            help=_("status of the event"),
         )
         self.parser.add_argument(
-            "-T", "--language", metavar="LANG", action="append", dest="languages",
-            help=_("main languages spoken at the event")
+            "-T",
+            "--language",
+            metavar="LANG",
+            action="append",
+            dest="languages",
+            help=_("main languages spoken at the event"),
         )
         self.parser.add_argument(
-            "--wheelchair", choices=["full", "partial", "no"],
-            help=_("is the location accessible by wheelchair")
+            "--wheelchair",
+            choices=["full", "partial", "no"],
+            help=_("is the location accessible by wheelchair"),
         )
         self.parser.add_argument(
             "--external",
             nargs=3,
             metavar=("JID", "NODE", "ITEM"),
-            help=_("link to an external event")
+            help=_("link to an external event"),
         )
 
     def get_event_data(self):
@@ -229,17 +258,10 @@
             event["end"] = self.args.end
 
         if self.args.head_picture:
-            event["head-picture"] = {
-                "sources": [{
-                    "url": self.args.head_picture
-                }]
-            }
+            event["head-picture"] = {"sources": [{"url": self.args.head_picture}]}
         if self.args.description:
             event["descriptions"] = [
-                {
-                    "type": "text",
-                    "description": self.args.description
-                }
+                {"type": "text", "description": self.args.description}
             ]
         if self.args.categories:
             event["categories"] = self.args.categories
@@ -270,9 +292,7 @@
         if self.args.attachments:
             attachments = event["attachments"] = []
             for attachment in self.args.attachments:
-                attachments.append({
-                    "sources": [{"url": attachment}]
-                })
+                attachments.append({"sources": [{"url": attachment}]})
 
         extra = {}
 
@@ -288,11 +308,7 @@
 
         if self.args.external:
             ext_jid, ext_node, ext_item = self.args.external
-            event["external"] = {
-                "jid": ext_jid,
-                "node": ext_node,
-                "item": ext_item
-            }
+            event["external"] = {"jid": ext_jid, "node": ext_node, "item": ext_item}
         return event
 
 
@@ -388,8 +404,12 @@
 
     def add_parser_options(self):
         self.parser.add_argument(
-            "-j", "--jid", action="append", dest="jids", default=[],
-            help=_("only retrieve RSVP from those JIDs")
+            "-j",
+            "--jid",
+            action="append",
+            dest="jids",
+            default=[],
+            help=_("only retrieve RSVP from those JIDs"),
         )
 
     async def start(self):
--- a/libervia/cli/cmd_file.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_file.py	Wed Jun 19 18:44:57 2024 +0200
@@ -80,12 +80,10 @@
             "-e",
             "--encrypt",
             action="store_true",
-            help=_("end-to-end encrypt the file transfer")
+            help=_("end-to-end encrypt the file transfer"),
         )
         self.parser.add_argument(
-            "--webrtc",
-            action="store_true",
-            help=_("Use WebRTC Data Channel transport.")
+            "--webrtc", action="store_true", help=_("Use WebRTC Data Channel transport.")
         )
 
     async def on_progress_started(self, metadata):
@@ -111,7 +109,6 @@
             self.disp(_("Can't send file to {jid}".format(jid=self.args.jid)), error=True)
             self.host.quit(2)
 
-
     async def start(self):
         file_ = None
         for file_ in self.args.files:
@@ -170,17 +167,16 @@
                 if self.verbosity >= 2:
                     root_logger.setLevel(logging.DEBUG)
                 from libervia.frontends.tools.webrtc_file import WebRTCFileSender
+
                 aio.install_glib_asyncio_iteration()
                 file_sender = WebRTCFileSender(
                     self.host.bridge,
                     self.profile,
                     on_call_start_cb=self.got_id,
-                    end_call_cb=self.host.a_quit
+                    end_call_cb=self.host.a_quit,
                 )
                 await file_sender.send_file_webrtc(
-                    file_path,
-                    self.args.jid,
-                    self.args.name
+                    file_path, self.args.jid, self.args.name
                 )
             else:
                 try:
@@ -319,7 +315,9 @@
             use_verbose=True,
             help=_("wait for a file to be sent by a contact"),
         )
-        self._overwrite_refused = False  # True when one overwrite has already been refused
+        self._overwrite_refused = (
+            False  # True when one overwrite has already been refused
+        )
         self.action_callbacks = {
             C.META_TYPE_CONFIRM: self.on_confirm_action,
             C.META_TYPE_FILE: self.on_file_action,
@@ -377,12 +375,10 @@
             await self.host.a_quit()
 
     async def on_webrtc_file(
-        self,
-        from_jid: jid.JID,
-        session_id: str,
-        file_data: dict
+        self, from_jid: jid.JID, session_id: str, file_data: dict
     ) -> None:
         from libervia.frontends.tools.webrtc_file import WebRTCFileReceiver
+
         aio.install_glib_asyncio_iteration()
         root_logger = logging.getLogger()
         # we don't want any formatting for messages from webrtc
@@ -401,29 +397,20 @@
             filename = file_data.get("name", "unammed_file")
             dest_path /= filename
             if dest_path.exists() and not self.args.force:
-                self.host.disp(
-                    "Destination file already exists",
-                    error=True
-                )
+                self.host.disp("Destination file already exists", error=True)
                 aio.run_from_thread(
                     self.host.a_quit, C.EXIT_ERROR, loop=self.host.loop.loop
                 )
                 return
 
         file_receiver = WebRTCFileReceiver(
-            self.host.bridge,
-            self.profile,
-            on_close_cb=self._on_webrtc_close
+            self.host.bridge, self.profile, on_close_cb=self._on_webrtc_close
         )
 
         await file_receiver.receive_file_webrtc(
-            from_jid,
-            session_id,
-            dest_path,
-            file_data
+            from_jid, session_id, dest_path, file_data
         )
 
-
     def get_xmlui_id(self, action_data):
         # FIXME: we temporarily use ElementTree, but a real XMLUI managing module
         #        should be available in the futur
@@ -482,8 +469,9 @@
             if self._overwrite_refused:
                 self.disp(_("File refused because overwrite is needed"), error=True)
                 await self.host.bridge.action_launch(
-                    xmlui_id, data_format.serialise({"cancelled": C.BOOL_TRUE}),
-                    profile_key=profile
+                    xmlui_id,
+                    data_format.serialise({"cancelled": C.BOOL_TRUE}),
+                    profile_key=profile,
                 )
                 return self.host.quit_from_signal(2)
             await self.set_progress_id(progress_id)
@@ -495,11 +483,7 @@
                 except KeyError:
                     self.disp(_("ignoring action without session id"), 1)
                     return
-                await self.on_webrtc_file(
-                    from_jid,
-                    session_id,
-                    file_data
-                )
+                await self.on_webrtc_file(from_jid, session_id, file_data)
 
             else:
                 xmlui_data = {"path": self.path}
@@ -604,8 +588,9 @@
             help=_("overwrite existing file without confirmation"),
         )
         self.parser.add_argument(
-            "attachment", type=str,
-            help=_("URI of the file to retrieve or JSON of the whole attachment")
+            "attachment",
+            type=str,
+            help=_("URI of the file to retrieve or JSON of the whole attachment"),
         )
 
     async def on_progress_started(self, metadata):
--- a/libervia/cli/cmd_identity.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_identity.py	Wed Jun 19 18:44:57 2024 +0200
@@ -41,18 +41,13 @@
         self.parser.add_argument(
             "--no-cache", action="store_true", help=_("do no use cached values")
         )
-        self.parser.add_argument(
-            "jid", help=_("entity to check")
-        )
+        self.parser.add_argument("jid", help=_("entity to check"))
 
     async def start(self):
         jid_ = (await self.host.check_jids([self.args.jid]))[0]
         try:
             data = await self.host.bridge.identity_get(
-                jid_,
-                [],
-                not self.args.no_cache,
-                self.profile
+                jid_, [], not self.args.no_cache, self.profile
             )
         except Exception as e:
             self.disp(f"can't get identity data: {e}", error=True)
--- a/libervia/cli/cmd_info.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_info.py	Wed Jun 19 18:44:57 2024 +0200
@@ -131,45 +131,53 @@
                 data = {k: e[k] for k in sorted(e)}
                 host = data.pop("host")
                 type_ = data.pop("type")
-                fmt_lines.append(A.color(
-                    "\t",
-                    C.A_SUBHEADER,
-                    host,
-                    " ",
-                    A.RESET,
-                    "[",
-                    C.A_LEVEL_COLORS[1],
-                    type_,
-                    A.RESET,
-                    "]",
-                ))
+                fmt_lines.append(
+                    A.color(
+                        "\t",
+                        C.A_SUBHEADER,
+                        host,
+                        " ",
+                        A.RESET,
+                        "[",
+                        C.A_LEVEL_COLORS[1],
+                        type_,
+                        A.RESET,
+                        "]",
+                    )
+                )
                 extended = data.pop("extended", None)
                 for key, value in data.items():
-                    fmt_lines.append(A.color(
-                        "\t\t",
-                        C.A_LEVEL_COLORS[2],
-                        f"{key}: ",
-                        C.A_LEVEL_COLORS[3],
-                        str(value)
-                    ))
+                    fmt_lines.append(
+                        A.color(
+                            "\t\t",
+                            C.A_LEVEL_COLORS[2],
+                            f"{key}: ",
+                            C.A_LEVEL_COLORS[3],
+                            str(value),
+                        )
+                    )
                 if extended:
-                    fmt_lines.append(A.color(
-                        "\t\t",
-                        C.A_HEADER,
-                        "extended",
-                    ))
+                    fmt_lines.append(
+                        A.color(
+                            "\t\t",
+                            C.A_HEADER,
+                            "extended",
+                        )
+                    )
                     nb_extended = len(extended)
                     for idx, form_data in enumerate(extended):
                         namespace = form_data.get("namespace")
                         if namespace:
-                            fmt_lines.append(A.color(
-                                "\t\t",
-                                C.A_LEVEL_COLORS[2],
-                                "namespace: ",
-                                C.A_LEVEL_COLORS[3],
-                                A.BOLD,
-                                namespace
-                            ))
+                            fmt_lines.append(
+                                A.color(
+                                    "\t\t",
+                                    C.A_LEVEL_COLORS[2],
+                                    "namespace: ",
+                                    C.A_LEVEL_COLORS[3],
+                                    A.BOLD,
+                                    namespace,
+                                )
+                            )
                         for field_data in form_data["fields"]:
                             name = field_data.get("name")
                             if not name:
@@ -183,15 +191,17 @@
                                     continue
                                 if field_type == "boolean":
                                     value = C.bool(value)
-                            fmt_lines.append(A.color(
-                                "\t\t",
-                                C.A_LEVEL_COLORS[2],
-                                f"{name}: ",
-                                C.A_LEVEL_COLORS[3],
-                                A.BOLD,
-                                str(value)
-                            ))
-                        if nb_extended>1 and idx < nb_extended-1:
+                            fmt_lines.append(
+                                A.color(
+                                    "\t\t",
+                                    C.A_LEVEL_COLORS[2],
+                                    f"{name}: ",
+                                    C.A_LEVEL_COLORS[3],
+                                    A.BOLD,
+                                    str(value),
+                                )
+                            )
+                        if nb_extended > 1 and idx < nb_extended - 1:
                             fmt_lines.append("\n")
 
                 fmt_lines.append("\n")
@@ -218,7 +228,7 @@
         if self.args.node:
             if self.args.type == "external":
                 self.parser.error(
-                    '--node can\'t be used with discovery of external services '
+                    "--node can't be used with discovery of external services "
                     '(--type="external")'
                 )
             else:
@@ -245,7 +255,11 @@
                 features.sort()
                 identities.sort(key=lambda identity: identity[2])
                 data.update(
-                    {"features": features, "identities": identities, "extensions": extensions}
+                    {
+                        "features": features,
+                        "identities": identities,
+                        "extensions": extensions,
+                    }
                 )
 
         # items
@@ -274,7 +288,7 @@
             except Exception as e:
                 self.disp(
                     _("error while doing external service discovery: {e}").format(e=e),
-                    error=True
+                    error=True,
                 )
                 self.host.quit(C.EXIT_BRIDGE_ERRBACK)
             else:
--- a/libervia/cli/cmd_input.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_input.py	Wed Jun 19 18:44:57 2024 +0200
@@ -194,10 +194,10 @@
                 error=True,
             )
             self.host.quit(C.EXIT_DATA_ERROR)
-        end = '\n' if self.args.debug else ' '
+        end = "\n" if self.args.debug else " "
         self.disp(
             A.color(C.A_HEADER, _("command {idx}").format(idx=self.idx)),
-            end = end,
+            end=end,
         )
         stdin = "".join(self._stdin)
         if self.args.debug:
@@ -206,7 +206,7 @@
                     C.A_SUBHEADER,
                     _("values: "),
                     A.RESET,
-                    ", ".join([shlex.quote(a) for a in self._values_ori])
+                    ", ".join([shlex.quote(a) for a in self._values_ori]),
                 ),
                 2,
             )
@@ -227,7 +227,7 @@
             )
             self.disp("\n")
         else:
-            self.disp(" (" + ", ".join(self._values_ori) + ")", 2, end=' ')
+            self.disp(" (" + ", ".join(self._values_ori) + ")", 2, end=" ")
             args = [sys.argv[0]] + self.args.command + self._opts + self._pos
             p = await asyncio.create_subprocess_exec(
                 *args,
@@ -235,18 +235,24 @@
                 stdout=subprocess.PIPE,
                 stderr=subprocess.PIPE,
             )
-            stdout, stderr = await p.communicate(stdin.encode('utf-8'))
+            stdout, stderr = await p.communicate(stdin.encode("utf-8"))
             log = self.args.log
             log_err = self.args.log_err
             log_tpl = "{command}\n{buff}\n\n"
             if log:
-                log.write(log_tpl.format(
-                    command=" ".join(shlex.quote(a) for a in args),
-                    buff=stdout.decode('utf-8', 'replace')))
+                log.write(
+                    log_tpl.format(
+                        command=" ".join(shlex.quote(a) for a in args),
+                        buff=stdout.decode("utf-8", "replace"),
+                    )
+                )
             if log_err:
-                log_err.write(log_tpl.format(
-                    command=" ".join(shlex.quote(a) for a in args),
-                    buff=stderr.decode('utf-8', 'replace')))
+                log_err.write(
+                    log_tpl.format(
+                        command=" ".join(shlex.quote(a) for a in args),
+                        buff=stderr.decode("utf-8", "replace"),
+                    )
+                )
             ret = p.returncode
             if ret == 0:
                 self.disp(A.color(C.A_SUCCESS, _("OK")))
--- a/libervia/cli/cmd_message.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_message.py	Wed Jun 19 18:44:57 2024 +0200
@@ -55,9 +55,7 @@
             "-n",
             "--new-line",
             action="store_true",
-            help=_(
-                "add a new line at the beginning of the input"
-            ),
+            help=_("add a new line at the beginning of the input"),
         )
         self.parser.add_argument(
             "-S",
@@ -74,22 +72,29 @@
             default=C.MESS_TYPE_AUTO,
             help=_("type of the message"),
         )
-        self.parser.add_argument("-e", "--encrypt", metavar="ALGORITHM",
-                                 help=_("encrypt message using given algorithm"))
+        self.parser.add_argument(
+            "-e",
+            "--encrypt",
+            metavar="ALGORITHM",
+            help=_("encrypt message using given algorithm"),
+        )
         self.parser.add_argument(
             "--encrypt-noreplace",
             action="store_true",
-            help=_("don't replace encryption algorithm if an other one is already used"))
+            help=_("don't replace encryption algorithm if an other one is already used"),
+        )
         self.parser.add_argument(
-            "-a", "--attach", dest="attachments", action="append", metavar="FILE_PATH",
-            help=_("add a file as an attachment")
+            "-a",
+            "--attach",
+            dest="attachments",
+            action="append",
+            metavar="FILE_PATH",
+            help=_("add a file as an attachment"),
         )
         syntax = self.parser.add_mutually_exclusive_group()
         syntax.add_argument("-x", "--xhtml", action="store_true", help=_("XHTML body"))
         syntax.add_argument("-r", "--rich", action="store_true", help=_("rich body"))
-        self.parser.add_argument(
-            "jid", help=_("the destination jid")
-        )
+        self.parser.add_argument("jid", help=_("the destination jid"))
 
     async def send_stdin(self, dest_jid):
         """Send incomming data on stdin to jabber contact
@@ -98,9 +103,7 @@
         """
         header = "\n" if self.args.new_line else ""
         # FIXME: stdin is not read asynchronously at the moment
-        stdin_lines = [
-            stream for stream in sys.stdin.readlines()
-        ]
+        stdin_lines = [stream for stream in sys.stdin.readlines()]
         extra = {}
         if self.args.subject is None:
             subject = {}
@@ -134,8 +137,9 @@
                     self.disp(f"can't send header: {e}", error=True)
                     error = True
 
-            to_send.extend({self.args.lang: clean_ustr(l.replace("\n", ""))}
-                           for l in stdin_lines)
+            to_send.extend(
+                {self.args.lang: clean_ustr(l.replace("\n", ""))} for l in stdin_lines
+            )
         else:
             # we sent all in a single message
             if not (self.args.xhtml or self.args.rich):
@@ -166,7 +170,8 @@
                     subject,
                     self.args.type,
                     data_format.serialise(extra),
-                    profile_key=self.host.profile)
+                    profile_key=self.host.profile,
+                )
             except Exception as e:
                 self.disp(f"can't send message {msg!r}: {e}", error=True)
                 error = True
@@ -194,7 +199,8 @@
         if self.args.encrypt is not None:
             try:
                 namespace = await self.host.bridge.encryption_namespace_get(
-                    self.args.encrypt)
+                    self.args.encrypt
+                )
             except Exception as e:
                 self.disp(f"can't get encryption namespace: {e}", error=True)
                 self.host.quit(C.EXIT_BRIDGE_ERRBACK)
@@ -216,24 +222,19 @@
         super().__init__(host, "retract", help=_("retract a message"))
 
     def add_parser_options(self):
-        self.parser.add_argument(
-            "message_id",
-            help=_("ID of the message (internal ID)")
-        )
+        self.parser.add_argument("message_id", help=_("ID of the message (internal ID)"))
 
     async def start(self):
         try:
-            await self.host.bridge.message_retract(
-                self.args.message_id,
-                self.profile
-            )
+            await self.host.bridge.message_retract(self.args.message_id, self.profile)
         except Exception as e:
             self.disp(f"can't retract message: {e}", error=True)
             self.host.quit(C.EXIT_BRIDGE_ERRBACK)
         else:
             self.disp(
                 "message retraction has been requested, please note that this is a "
-                "request which can't be enforced (see documentation for details).")
+                "request which can't be enforced (see documentation for details)."
+            )
             self.host.quit(C.EXIT_OK)
 
 
@@ -241,36 +242,66 @@
 
     def __init__(self, host):
         super(MAM, self).__init__(
-            host, "mam", use_output=C.OUTPUT_MESS, use_verbose=True,
-            help=_("query archives using MAM"))
+            host,
+            "mam",
+            use_output=C.OUTPUT_MESS,
+            use_verbose=True,
+            help=_("query archives using MAM"),
+        )
 
     def add_parser_options(self):
         self.parser.add_argument(
-            "-s", "--service", default="",
-            help=_("jid of the service (default: profile's server"))
+            "-s",
+            "--service",
+            default="",
+            help=_("jid of the service (default: profile's server"),
+        )
         self.parser.add_argument(
-            "-S", "--start", dest="mam_start", type=base.date_decoder,
-            help=_(
-                "start fetching archive from this date (default: from the beginning)"))
+            "-S",
+            "--start",
+            dest="mam_start",
+            type=base.date_decoder,
+            help=_("start fetching archive from this date (default: from the beginning)"),
+        )
         self.parser.add_argument(
-            "-E", "--end", dest="mam_end", type=base.date_decoder,
-            help=_("end fetching archive after this date (default: no limit)"))
+            "-E",
+            "--end",
+            dest="mam_end",
+            type=base.date_decoder,
+            help=_("end fetching archive after this date (default: no limit)"),
+        )
         self.parser.add_argument(
-            "-W", "--with", dest="mam_with",
-            help=_("retrieve only archives with this jid"))
+            "-W",
+            "--with",
+            dest="mam_with",
+            help=_("retrieve only archives with this jid"),
+        )
         self.parser.add_argument(
-            "-m", "--max", dest="rsm_max", type=int, default=20,
-            help=_("maximum number of items to retrieve, using RSM (default: 20))"))
+            "-m",
+            "--max",
+            dest="rsm_max",
+            type=int,
+            default=20,
+            help=_("maximum number of items to retrieve, using RSM (default: 20))"),
+        )
         rsm_page_group = self.parser.add_mutually_exclusive_group()
         rsm_page_group.add_argument(
-            "-a", "--after", dest="rsm_after",
-            help=_("find page after this item"), metavar='ITEM_ID')
+            "-a",
+            "--after",
+            dest="rsm_after",
+            help=_("find page after this item"),
+            metavar="ITEM_ID",
+        )
         rsm_page_group.add_argument(
-            "-b", "--before", dest="rsm_before",
-            help=_("find page before this item"), metavar='ITEM_ID')
+            "-b",
+            "--before",
+            dest="rsm_before",
+            help=_("find page before this item"),
+            metavar="ITEM_ID",
+        )
         rsm_page_group.add_argument(
-            "--index", dest="rsm_index", type=int,
-            help=_("index of the page to retrieve"))
+            "--index", dest="rsm_index", type=int, help=_("index of the page to retrieve")
+        )
 
     async def start(self):
         extra = {}
@@ -280,14 +311,15 @@
             extra["mam_end"] = float(self.args.mam_end)
         if self.args.mam_with is not None:
             extra["mam_with"] = self.args.mam_with
-        for suff in ('max', 'after', 'before', 'index'):
-            key = 'rsm_' + suff
-            value = getattr(self.args,key)
+        for suff in ("max", "after", "before", "index"):
+            key = "rsm_" + suff
+            value = getattr(self.args, key)
             if value is not None:
                 extra[key] = str(value)
         try:
             data, metadata_s, profile = await self.host.bridge.mam_get(
-                self.args.service, data_format.serialise(extra), self.profile)
+                self.args.service, data_format.serialise(extra), self.profile
+            )
         except Exception as e:
             self.disp(f"can't retrieve MAM archives: {e}", error=True)
             self.host.quit(C.EXIT_BRIDGE_ERRBACK)
@@ -308,12 +340,17 @@
         # FIXME: metadata are not displayed correctly and don't play nice with output
         #        they should be added to output data somehow
         if self.verbosity:
-            for value in ("rsm_first", "rsm_last", "rsm_index", "rsm_count",
-                          "mam_complete", "mam_stable"):
+            for value in (
+                "rsm_first",
+                "rsm_last",
+                "rsm_index",
+                "rsm_count",
+                "mam_complete",
+                "mam_stable",
+            ):
                 if value in metadata:
                     label = value.split("_")[1]
-                    self.disp(A.color(
-                        C.A_HEADER, label, ': ' , A.RESET, metadata[value]))
+                    self.disp(A.color(C.A_HEADER, label, ": ", A.RESET, metadata[value]))
 
         self.host.quit()
 
--- a/libervia/cli/cmd_notifications.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_notifications.py	Wed Jun 19 18:44:57 2024 +0200
@@ -98,10 +98,10 @@
             await self.host.bridge.notification_add(
                 self.args.type,
                 self.args.body_plain,
-                "", # TODO: self.args.body_rich or "",
+                "",  # TODO: self.args.body_rich or "",
                 self.args.title or "",
                 self.args.is_global,
-                False, # TODO: self.args.requires_action,
+                False,  # TODO: self.args.requires_action,
                 self.args.priority,
                 self.args.expire_at,
                 "",
@@ -300,7 +300,8 @@
         )
 
         self.parser.add_argument(
-            "-g", "--is-global",
+            "-g",
+            "--is-global",
             action="store_true",
             help=_("true if the notification is a global one"),
         )
--- a/libervia/cli/cmd_param.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_param.py	Wed Jun 19 18:44:57 2024 +0200
@@ -54,8 +54,10 @@
             print("\n".join(categories))
         elif self.args.name is None:
             try:
-                values_dict = await self.host.bridge.params_values_from_category_get_async(
-                    self.args.category, self.args.security_limit, "", "", self.profile
+                values_dict = (
+                    await self.host.bridge.params_values_from_category_get_async(
+                        self.args.category, self.args.security_limit, "", "", self.profile
+                    )
                 )
             except Exception as e:
                 self.disp(
--- a/libervia/cli/cmd_pipe.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_pipe.py	Wed Jun 19 18:44:57 2024 +0200
@@ -40,12 +40,10 @@
         super(PipeOut, self).__init__(host, "out", help=_("send a pipe a stream"))
 
     def add_parser_options(self):
-        self.parser.add_argument(
-            "jid", help=_("the destination jid")
-        )
+        self.parser.add_argument("jid", help=_("the destination jid"))
 
     async def start(self):
-        """ Create named pipe, and send stdin to it """
+        """Create named pipe, and send stdin to it"""
         try:
             port = await self.host.bridge.stream_out(
                 await self.host.get_full_jid(self.args.jid),
@@ -133,7 +131,8 @@
             while True:
                 try:
                     server = await asyncio.start_server(
-                        partial(handle_stream_in, host=self.host), host, port)
+                        partial(handle_stream_in, host=self.host), host, port
+                    )
                 except socket.error as e:
                     if e.errno == errno.EADDRINUSE:
                         port += 1
--- a/libervia/cli/cmd_profile.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_profile.py	Wed Jun 19 18:44:57 2024 +0200
@@ -30,7 +30,7 @@
 
 __commands__ = ["Profile"]
 
-PROFILE_HELP = _('The name of the profile')
+PROFILE_HELP = _("The name of the profile")
 
 
 class ProfileConnect(base.CommandBase):
@@ -39,7 +39,9 @@
     def __init__(self, host):
         # it's weird to have a command named "connect" with need_connect=False, but it can be handy to be able
         # to launch just the session, so some paradoxes don't hurt
-        super(ProfileConnect, self).__init__(host, 'connect', need_connect=False, help=('connect a profile'))
+        super(ProfileConnect, self).__init__(
+            host, "connect", need_connect=False, help=("connect a profile")
+        )
 
     def add_parser_options(self):
         pass
@@ -51,10 +53,13 @@
             self.parser.error(_("You need to use either --connect or --start-session"))
         self.host.quit()
 
+
 class ProfileDisconnect(base.CommandBase):
 
     def __init__(self, host):
-        super(ProfileDisconnect, self).__init__(host, 'disconnect', need_connect=False, help=('disconnect a profile'))
+        super(ProfileDisconnect, self).__init__(
+            host, "disconnect", need_connect=False, help=("disconnect a profile")
+        )
 
     def add_parser_options(self):
         pass
@@ -72,29 +77,44 @@
 class ProfileCreate(base.CommandBase):
     def __init__(self, host):
         super(ProfileCreate, self).__init__(
-            host, 'create', use_profile=False, help=('create a new profile'))
+            host, "create", use_profile=False, help=("create a new profile")
+        )
 
     def add_parser_options(self):
-        self.parser.add_argument('profile', type=str, help=_('the name of the profile'))
-        self.parser.add_argument(
-            '-p', '--password', type=str, default='',
-            help=_('the password of the profile'))
-        self.parser.add_argument(
-            '-j', '--jid', type=str, help=_('the jid of the profile'))
+        self.parser.add_argument("profile", type=str, help=_("the name of the profile"))
         self.parser.add_argument(
-            '-x', '--xmpp-password', type=str,
-            help=_(
-                'the password of the XMPP account (use profile password if not specified)'
-            ),
-            metavar='PASSWORD')
+            "-p",
+            "--password",
+            type=str,
+            default="",
+            help=_("the password of the profile"),
+        )
         self.parser.add_argument(
-            '-A', '--autoconnect', choices=[C.BOOL_TRUE, C.BOOL_FALSE], nargs='?',
-            const=C.BOOL_TRUE,
-            help=_('connect this profile automatically when backend starts')
+            "-j", "--jid", type=str, help=_("the jid of the profile")
         )
         self.parser.add_argument(
-            '-C', '--component', default='',
-            help=_('set to component import name (entry point) if this is a component'))
+            "-x",
+            "--xmpp-password",
+            type=str,
+            help=_(
+                "the password of the XMPP account (use profile password if not specified)"
+            ),
+            metavar="PASSWORD",
+        )
+        self.parser.add_argument(
+            "-A",
+            "--autoconnect",
+            choices=[C.BOOL_TRUE, C.BOOL_FALSE],
+            nargs="?",
+            const=C.BOOL_TRUE,
+            help=_("connect this profile automatically when backend starts"),
+        )
+        self.parser.add_argument(
+            "-C",
+            "--component",
+            default="",
+            help=_("set to component import name (entry point) if this is a component"),
+        )
 
     async def start(self):
         """Create a new profile"""
@@ -103,55 +123,70 @@
             self.host.quit(C.EXIT_BRIDGE_ERROR)
         try:
             await self.host.bridge.profile_create(
-                self.args.profile, self.args.password, self.args.component)
+                self.args.profile, self.args.password, self.args.component
+            )
         except Exception as e:
             self.disp(f"can't create profile: {e}", error=True)
             self.host.quit(C.EXIT_BRIDGE_ERRBACK)
 
         try:
             await self.host.bridge.profile_start_session(
-                self.args.password, self.args.profile)
+                self.args.password, self.args.profile
+            )
         except Exception as e:
             self.disp(f"can't start profile session: {e}", error=True)
             self.host.quit(C.EXIT_BRIDGE_ERRBACK)
 
         if self.args.jid:
             await self.host.bridge.param_set(
-                "JabberID", self.args.jid, "Connection", profile_key=self.args.profile)
+                "JabberID", self.args.jid, "Connection", profile_key=self.args.profile
+            )
         xmpp_pwd = self.args.password or self.args.xmpp_password
         if xmpp_pwd:
             await self.host.bridge.param_set(
-                "Password", xmpp_pwd, "Connection", profile_key=self.args.profile)
+                "Password", xmpp_pwd, "Connection", profile_key=self.args.profile
+            )
 
         if self.args.autoconnect is not None:
             await self.host.bridge.param_set(
-                "autoconnect_backend", self.args.autoconnect, "Connection",
-                profile_key=self.args.profile)
+                "autoconnect_backend",
+                self.args.autoconnect,
+                "Connection",
+                profile_key=self.args.profile,
+            )
 
-        self.disp(f'profile {self.args.profile} created successfully', 1)
+        self.disp(f"profile {self.args.profile} created successfully", 1)
         self.host.quit()
 
 
 class ProfileDefault(base.CommandBase):
     def __init__(self, host):
         super(ProfileDefault, self).__init__(
-            host, 'default', use_profile=False, help=('print default profile'))
+            host, "default", use_profile=False, help=("print default profile")
+        )
 
     def add_parser_options(self):
         pass
 
     async def start(self):
-        print(await self.host.bridge.profile_name_get('@DEFAULT@'))
+        print(await self.host.bridge.profile_name_get("@DEFAULT@"))
         self.host.quit()
 
 
 class ProfileDelete(base.CommandBase):
     def __init__(self, host):
-        super(ProfileDelete, self).__init__(host, 'delete', use_profile=False, help=('delete a profile'))
+        super(ProfileDelete, self).__init__(
+            host, "delete", use_profile=False, help=("delete a profile")
+        )
 
     def add_parser_options(self):
-        self.parser.add_argument('profile', type=str, help=PROFILE_HELP)
-        self.parser.add_argument('-f', '--force', action='store_true', help=_('delete profile without confirmation'))
+        self.parser.add_argument("profile", type=str, help=PROFILE_HELP)
+        self.parser.add_argument(
+            "-f",
+            "--force",
+            action="store_true",
+            help=_("delete profile without confirmation"),
+        )
 
     async def start(self):
         if self.args.profile not in await self.host.bridge.profiles_list_get():
@@ -170,25 +205,35 @@
 
     def __init__(self, host):
         super(ProfileInfo, self).__init__(
-            host, 'info', need_connect=False, use_output=C.OUTPUT_DICT,
-            help=_('get information about a profile'))
-        self.to_show = [(_("jid"), "Connection", "JabberID"),]
+            host,
+            "info",
+            need_connect=False,
+            use_output=C.OUTPUT_DICT,
+            help=_("get information about a profile"),
+        )
+        self.to_show = [
+            (_("jid"), "Connection", "JabberID"),
+        ]
 
     def add_parser_options(self):
         self.parser.add_argument(
-            '--show-password', action='store_true',
-            help=_('show the XMPP password IN CLEAR TEXT'))
+            "--show-password",
+            action="store_true",
+            help=_("show the XMPP password IN CLEAR TEXT"),
+        )
 
     async def start(self):
         if self.args.show_password:
             self.to_show.append((_("XMPP password"), "Connection", "Password"))
-        self.to_show.append((_("autoconnect (backend)"), "Connection",
-                                "autoconnect_backend"))
+        self.to_show.append(
+            (_("autoconnect (backend)"), "Connection", "autoconnect_backend")
+        )
         data = {}
         for label, category, name in self.to_show:
             try:
                 value = await self.host.bridge.param_get_a_async(
-                    name, category, profile_key=self.host.profile)
+                    name, category, profile_key=self.host.profile
+                )
             except Exception as e:
                 self.disp(f"can't get {name}/{category} param: {e}", error=True)
             else:
@@ -201,15 +246,20 @@
 class ProfileList(base.CommandBase):
     def __init__(self, host):
         super(ProfileList, self).__init__(
-            host, 'list', use_profile=False, use_output='list', help=('list profiles'))
+            host, "list", use_profile=False, use_output="list", help=("list profiles")
+        )
 
     def add_parser_options(self):
         group = self.parser.add_mutually_exclusive_group()
         group.add_argument(
-            '-c', '--clients', action='store_true', help=_('get clients profiles only'))
+            "-c", "--clients", action="store_true", help=_("get clients profiles only")
+        )
         group.add_argument(
-            '-C', '--components', action='store_true',
-            help=('get components profiles only'))
+            "-C",
+            "--components",
+            action="store_true",
+            help=("get components profiles only"),
+        )
 
     async def start(self):
         if self.args.clients:
@@ -226,55 +276,82 @@
 
     def __init__(self, host):
         super(ProfileModify, self).__init__(
-            host, 'modify', need_connect=False, help=_('modify an existing profile'))
+            host, "modify", need_connect=False, help=_("modify an existing profile")
+        )
 
     def add_parser_options(self):
         profile_pwd_group = self.parser.add_mutually_exclusive_group()
         profile_pwd_group.add_argument(
-            '-w', '--password', help=_('change the password of the profile'))
+            "-w", "--password", help=_("change the password of the profile")
+        )
         profile_pwd_group.add_argument(
-            '--disable-password', action='store_true',
-            help=_('disable profile password (dangerous!)'))
-        self.parser.add_argument('-j', '--jid', help=_('the jid of the profile'))
+            "--disable-password",
+            action="store_true",
+            help=_("disable profile password (dangerous!)"),
+        )
+        self.parser.add_argument("-j", "--jid", help=_("the jid of the profile"))
         self.parser.add_argument(
-            '-x', '--xmpp-password', help=_('change the password of the XMPP account'),
-            metavar='PASSWORD')
+            "-x",
+            "--xmpp-password",
+            help=_("change the password of the XMPP account"),
+            metavar="PASSWORD",
+        )
         self.parser.add_argument(
-            '-D', '--default', action='store_true', help=_('set as default profile'))
+            "-D", "--default", action="store_true", help=_("set as default profile")
+        )
         self.parser.add_argument(
-            '-A', '--autoconnect', choices=[C.BOOL_TRUE, C.BOOL_FALSE], nargs='?',
+            "-A",
+            "--autoconnect",
+            choices=[C.BOOL_TRUE, C.BOOL_FALSE],
+            nargs="?",
             const=C.BOOL_TRUE,
-            help=_('connect this profile automatically when backend starts')
+            help=_("connect this profile automatically when backend starts"),
         )
 
     async def start(self):
         if self.args.disable_password:
-            self.args.password = ''
+            self.args.password = ""
         if self.args.password is not None:
             await self.host.bridge.param_set(
-                "Password", self.args.password, "General", profile_key=self.host.profile)
+                "Password", self.args.password, "General", profile_key=self.host.profile
+            )
         if self.args.jid is not None:
             await self.host.bridge.param_set(
-                "JabberID", self.args.jid, "Connection", profile_key=self.host.profile)
+                "JabberID", self.args.jid, "Connection", profile_key=self.host.profile
+            )
         if self.args.xmpp_password is not None:
             await self.host.bridge.param_set(
-                "Password", self.args.xmpp_password, "Connection",
-                profile_key=self.host.profile)
+                "Password",
+                self.args.xmpp_password,
+                "Connection",
+                profile_key=self.host.profile,
+            )
         if self.args.default:
             await self.host.bridge.profile_set_default(self.host.profile)
         if self.args.autoconnect is not None:
             await self.host.bridge.param_set(
-                "autoconnect_backend", self.args.autoconnect, "Connection",
-                profile_key=self.host.profile)
+                "autoconnect_backend",
+                self.args.autoconnect,
+                "Connection",
+                profile_key=self.host.profile,
+            )
 
         self.host.quit()
 
 
 class Profile(base.CommandBase):
     subcommands = (
-        ProfileConnect, ProfileDisconnect, ProfileCreate, ProfileDefault, ProfileDelete,
-        ProfileInfo, ProfileList, ProfileModify)
+        ProfileConnect,
+        ProfileDisconnect,
+        ProfileCreate,
+        ProfileDefault,
+        ProfileDelete,
+        ProfileInfo,
+        ProfileList,
+        ProfileModify,
+    )
 
     def __init__(self, host):
         super(Profile, self).__init__(
-            host, 'profile', use_profile=False, help=_('profile commands'))
+            host, "profile", use_profile=False, help=_("profile commands")
+        )
--- a/libervia/cli/cmd_pubsub.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_pubsub.py	Wed Jun 19 18:44:57 2024 +0200
@@ -874,39 +874,63 @@
 
     def add_parser_options(self):
         self.parser.add_argument(
-            "-s", "--service", action="append", metavar="JID", dest="services",
+            "-s",
+            "--service",
+            action="append",
+            metavar="JID",
+            dest="services",
             help="purge items only for these services. If not specified, items from ALL "
-            "services will be purged. May be used several times."
+            "services will be purged. May be used several times.",
         )
         self.parser.add_argument(
-            "-n", "--node", action="append", dest="nodes",
+            "-n",
+            "--node",
+            action="append",
+            dest="nodes",
             help="purge items only for these nodes. If not specified, items from ALL "
-            "nodes will be purged. May be used several times."
+            "nodes will be purged. May be used several times.",
         )
         self.parser.add_argument(
-            "-p", "--profile", action="append", dest="profiles",
+            "-p",
+            "--profile",
+            action="append",
+            dest="profiles",
             help="purge items only for these profiles. If not specified, items from ALL "
-            "profiles will be purged. May be used several times."
+            "profiles will be purged. May be used several times.",
         )
         self.parser.add_argument(
-            "-b", "--updated-before", type=base.date_decoder, metavar="TIME_PATTERN",
-            help="purge items which have been last updated before given time."
+            "-b",
+            "--updated-before",
+            type=base.date_decoder,
+            metavar="TIME_PATTERN",
+            help="purge items which have been last updated before given time.",
         )
         self.parser.add_argument(
-            "-C", "--created-before", type=base.date_decoder, metavar="TIME_PATTERN",
-            help="purge items which have been last created before given time."
+            "-C",
+            "--created-before",
+            type=base.date_decoder,
+            metavar="TIME_PATTERN",
+            help="purge items which have been last created before given time.",
         )
         self.parser.add_argument(
-            "-t", "--type", action="append", dest="types",
-            help="purge items flagged with TYPE. May be used several times."
+            "-t",
+            "--type",
+            action="append",
+            dest="types",
+            help="purge items flagged with TYPE. May be used several times.",
         )
         self.parser.add_argument(
-            "-S", "--subtype", action="append", dest="subtypes",
-            help="purge items flagged with SUBTYPE. May be used several times."
+            "-S",
+            "--subtype",
+            action="append",
+            dest="subtypes",
+            help="purge items flagged with SUBTYPE. May be used several times.",
         )
         self.parser.add_argument(
-            "-f", "--force", action="store_true",
-            help=_("purge items without confirmation")
+            "-f",
+            "--force",
+            action="store_true",
+            help=_("purge items without confirmation"),
         )
 
     async def start(self):
@@ -916,22 +940,23 @@
                     "Are you sure to purge items from cache? You'll have to bypass cache "
                     "or resynchronise nodes to access deleted items again."
                 ),
-                _("Items purgins has been cancelled.")
+                _("Items purgins has been cancelled."),
             )
         purge_data = {}
         for key in (
-                "services", "nodes", "profiles", "updated_before", "created_before",
-                "types", "subtypes"
+            "services",
+            "nodes",
+            "profiles",
+            "updated_before",
+            "created_before",
+            "types",
+            "subtypes",
         ):
             value = getattr(self.args, key)
             if value is not None:
                 purge_data[key] = value
         try:
-            await self.host.bridge.ps_cache_purge(
-                data_format.serialise(
-                    purge_data
-                )
-            )
+            await self.host.bridge.ps_cache_purge(data_format.serialise(purge_data))
         except Exception as e:
             self.disp(f"Internal error: {e}", error=True)
             self.host.quit(C.EXIT_INTERNAL_ERROR)
@@ -951,8 +976,10 @@
 
     def add_parser_options(self):
         self.parser.add_argument(
-            "-f", "--force", action="store_true",
-            help=_("reset cache without confirmation")
+            "-f",
+            "--force",
+            action="store_true",
+            help=_("reset cache without confirmation"),
         )
 
     async def start(self):
@@ -963,7 +990,7 @@
                     "from it, then it will be progressively refilled as if it were new. "
                     "This may be resources intensive."
                 ),
-                _("Pubsub cache reset has been cancelled.")
+                _("Pubsub cache reset has been cancelled."),
             )
         try:
             await self.host.bridge.ps_cache_reset()
@@ -995,32 +1022,55 @@
             "-f", "--fts", help=_("Full-Text Search query"), metavar="FTS_QUERY"
         )
         self.parser.add_argument(
-            "-p", "--profile", action="append", dest="profiles", metavar="PROFILE",
-            help="search items only from these profiles. May be used several times."
+            "-p",
+            "--profile",
+            action="append",
+            dest="profiles",
+            metavar="PROFILE",
+            help="search items only from these profiles. May be used several times.",
         )
         self.parser.add_argument(
-            "-s", "--service", action="append", dest="services", metavar="SERVICE",
-            help="items must be from specified service. May be used several times."
+            "-s",
+            "--service",
+            action="append",
+            dest="services",
+            metavar="SERVICE",
+            help="items must be from specified service. May be used several times.",
         )
         self.parser.add_argument(
-            "-n", "--node", action="append", dest="nodes", metavar="NODE",
-            help="items must be in the specified node. May be used several times."
+            "-n",
+            "--node",
+            action="append",
+            dest="nodes",
+            metavar="NODE",
+            help="items must be in the specified node. May be used several times.",
         )
         self.parser.add_argument(
-            "-t", "--type", action="append", dest="types", metavar="TYPE",
-            help="items must be of specified type. May be used several times."
+            "-t",
+            "--type",
+            action="append",
+            dest="types",
+            metavar="TYPE",
+            help="items must be of specified type. May be used several times.",
         )
         self.parser.add_argument(
-            "-S", "--subtype", action="append", dest="subtypes", metavar="SUBTYPE",
-            help="items must be of specified subtype. May be used several times."
+            "-S",
+            "--subtype",
+            action="append",
+            dest="subtypes",
+            metavar="SUBTYPE",
+            help="items must be of specified subtype. May be used several times.",
         )
         self.parser.add_argument(
             "-P", "--payload", action="store_true", help=_("include item XML payload")
         )
         self.parser.add_argument(
-            "-o", "--order-by", action="append", nargs="+",
+            "-o",
+            "--order-by",
+            action="append",
+            nargs="+",
             metavar=("ORDER", "[FIELD] [DIRECTION]"),
-            help=_("how items must be ordered. May be used several times.")
+            help=_("how items must be ordered. May be used several times."),
         )
         self.parser.add_argument(
             "-l", "--limit", type=int, help=_("maximum number of items to return")
@@ -1075,10 +1125,12 @@
                     elif len(args) == 2:
                         path, direction = args
                     else:
-                        self.parser.error(_(
-                            "You can't specify more that 2 arguments for a field in "
-                            "--order-by"
-                        ))
+                        self.parser.error(
+                            _(
+                                "You can't specify more that 2 arguments for a field in "
+                                "--order-by"
+                            )
+                        )
                     try:
                         path = json.loads(path)
                     except json.JSONDecodeError:
@@ -1087,17 +1139,15 @@
                         "path": path,
                     }
                 else:
-                    order_query = {
-                        "order": order
-                    }
+                    order_query = {"order": order}
                     if not args:
                         direction = "asc"
                     elif len(args) == 1:
                         direction = args[0]
                     else:
-                        self.parser.error(_(
-                            "there are too many arguments in --order-by option"
-                        ))
+                        self.parser.error(
+                            _("there are too many arguments in --order-by option")
+                        )
                 if direction.lower() not in ("asc", "desc"):
                     self.parser.error(_("invalid --order-by direction: {direction!r}"))
                 order_query["direction"] = direction
@@ -1118,10 +1168,14 @@
 
                 # handling of TP(<time pattern>)
                 if operator in (">", "gt", "<", "le", "between"):
+
                     def datetime_sub(match):
-                        return str(date_utils.date_parse_ext(
-                            match.group(1), default_tz=date_utils.TZ_LOCAL
-                        ))
+                        return str(
+                            date_utils.date_parse_ext(
+                                match.group(1), default_tz=date_utils.TZ_LOCAL
+                            )
+                        )
+
                     value = re.sub(r"\bTP\(([^)]+)\)", datetime_sub, value)
 
                 try:
@@ -1134,11 +1188,7 @@
                     if not isinstance(value, list):
                         value = [value]
 
-                parsed.append({
-                    "path": path,
-                    "op": operator,
-                    "value": value
-                })
+                parsed.append({"path": path, "op": operator, "value": value})
 
             query["parsed"] = parsed
 
@@ -1148,9 +1198,7 @@
                 self.args.keys.append("item_payload")
         try:
             found_items = data_format.deserialise(
-                await self.host.bridge.ps_cache_search(
-                    data_format.serialise(query)
-                ),
+                await self.host.bridge.ps_cache_search(data_format.serialise(query)),
                 type_check=list,
             )
         except BridgeException as e:
@@ -1162,7 +1210,7 @@
         else:
             if self.args.keys:
                 found_items = [
-                    {k: v for k,v in item.items() if k in self.args.keys}
+                    {k: v for k, v in item.items() if k in self.args.keys}
                     for item in found_items
                 ]
             await self.output(found_items)
@@ -1228,19 +1276,19 @@
             "-e",
             "--encrypt",
             action="store_true",
-            help=_("end-to-end encrypt the blog item")
+            help=_("end-to-end encrypt the blog item"),
         )
         self.parser.add_argument(
             "--encrypt-for",
             metavar="JID",
             action="append",
-            help=_("encrypt a single item for")
+            help=_("encrypt a single item for"),
         )
         self.parser.add_argument(
             "-X",
             "--sign",
             action="store_true",
-            help=_("cryptographically sign the blog post")
+            help=_("cryptographically sign the blog post"),
         )
         self.parser.add_argument(
             "item",
@@ -1363,8 +1411,10 @@
             "-f", "--force", action="store_true", help=_("delete without confirmation")
         )
         self.parser.add_argument(
-            "--no-notification", dest="notify", action="store_false",
-            help=_("do not send notification (not recommended)")
+            "--no-notification",
+            dest="notify",
+            action="store_false",
+            help=_("do not send notification (not recommended)"),
         )
 
     async def start(self):
@@ -1410,19 +1460,19 @@
             "-e",
             "--encrypt",
             action="store_true",
-            help=_("end-to-end encrypt the blog item")
+            help=_("end-to-end encrypt the blog item"),
         )
         self.parser.add_argument(
             "--encrypt-for",
             metavar="JID",
             action="append",
-            help=_("encrypt a single item for")
+            help=_("encrypt a single item for"),
         )
         self.parser.add_argument(
             "-X",
             "--sign",
             action="store_true",
-            help=_("cryptographically sign the blog post")
+            help=_("cryptographically sign the blog post"),
         )
 
     async def publish(self, content):
@@ -1544,7 +1594,8 @@
             except KeyError:
                 self.disp(
                     "Pubsub Public Subscription plugin is not loaded, can't use --public "
-                    "option, subscription stopped", error=True
+                    "option, subscription stopped",
+                    error=True,
                 )
                 self.host.quit(C.EXIT_MISSING_FEATURE)
             else:
@@ -1628,7 +1679,7 @@
                     self.args.node,
                     self.profile,
                 ),
-                type_check=list
+                type_check=list,
             )
         except Exception as e:
             self.disp(_("can't retrieve subscriptions: {e}").format(e=e), error=True)
@@ -1685,10 +1736,7 @@
             choices=("data", "mention"),
             help=_("type of reference to send (DEFAULT: mention)"),
         )
-        self.parser.add_argument(
-            "recipient",
-            help=_("recipient of the reference")
-        )
+        self.parser.add_argument("recipient", help=_("recipient of the reference"))
 
     async def start(self):
         service = self.args.service or await self.host.get_profile_jid()
@@ -2496,7 +2544,7 @@
             help=_(
                 "get attached data published only by those JIDs (DEFAULT: get all "
                 "attached data)"
-            )
+            ),
         )
 
     async def start(self):
@@ -2535,7 +2583,7 @@
             help=_(
                 "replace previous versions of attachments (DEFAULT: update previous "
                 "version)"
-            )
+            ),
         )
         self.parser.add_argument(
             "-N",
@@ -2543,21 +2591,21 @@
             metavar="BOOLEAN",
             nargs="?",
             default="keep",
-            help=_("mark item as (un)noticed (DEFAULT: keep current value))")
+            help=_("mark item as (un)noticed (DEFAULT: keep current value))"),
         )
         self.parser.add_argument(
             "-r",
             "--reactions",
             # FIXME: to be replaced by "extend" when we stop supporting python 3.7
             action="append",
-            help=_("emojis to add to react to an item")
+            help=_("emojis to add to react to an item"),
         )
         self.parser.add_argument(
             "-R",
             "--reactions-remove",
             # FIXME: to be replaced by "extend" when we stop supporting python 3.7
             action="append",
-            help=_("emojis to remove from reactions to an item")
+            help=_("emojis to remove from reactions to an item"),
         )
 
     async def start(self):
@@ -2565,7 +2613,7 @@
             "service": self.args.service,
             "node": self.args.node,
             "id": self.args.item,
-            "extra": {}
+            "extra": {},
         }
         operation = "replace" if self.args.replace else "update"
         if self.args.noticed != "keep":
@@ -2574,16 +2622,13 @@
             attachments_data["extra"]["noticed"] = C.bool(self.args.noticed)
 
         if self.args.reactions or self.args.reactions_remove:
-            reactions = attachments_data["extra"]["reactions"] = {
-                "operation": operation
-            }
+            reactions = attachments_data["extra"]["reactions"] = {"operation": operation}
             if self.args.replace:
                 reactions["reactions"] = self.args.reactions
             else:
                 reactions["add"] = self.args.reactions
                 reactions["remove"] = self.args.reactions_remove
 
-
         if not attachments_data["extra"]:
             self.parser.error(_("At leat one attachment must be specified."))
 
@@ -2633,7 +2678,7 @@
             "extra": {
                 # we set None to use profile's bare JID
                 "signature": {"signer": None}
-            }
+            },
         }
         try:
             await self.host.bridge.ps_attachments_set(
@@ -2660,11 +2705,7 @@
         )
 
     def add_parser_options(self):
-        self.parser.add_argument(
-            "signature",
-            metavar="JSON",
-            help=_("signature data")
-        )
+        self.parser.add_argument("signature", metavar="JSON", help=_("signature data"))
 
     async def start(self):
         try:
@@ -2690,9 +2731,7 @@
     )
 
     def __init__(self, host):
-        super().__init__(
-            host, "signature", use_profile=False, help=_("items signatures")
-        )
+        super().__init__(host, "signature", use_profile=False, help=_("items signatures"))
 
 
 class SecretShare(base.CommandBase):
@@ -2707,11 +2746,16 @@
 
     def add_parser_options(self):
         self.parser.add_argument(
-            "-k", "--key", metavar="ID", dest="secret_ids", action="append", default=[],
+            "-k",
+            "--key",
+            metavar="ID",
+            dest="secret_ids",
+            action="append",
+            default=[],
             help=_(
                 "only share secrets with those IDs (default: share all secrets of the "
                 "node)"
-            )
+            ),
         )
         self.parser.add_argument(
             "recipient", metavar="JID", help=_("entity who must get the shared secret")
@@ -2745,15 +2789,18 @@
         )
 
     def add_parser_options(self):
+        self.parser.add_argument("secret_id", help=_("ID of the secrets to revoke"))
         self.parser.add_argument(
-            "secret_id", help=_("ID of the secrets to revoke")
-        )
-        self.parser.add_argument(
-            "-r", "--recipient", dest="recipients", metavar="JID", action="append",
-            default=[], help=_(
+            "-r",
+            "--recipient",
+            dest="recipients",
+            metavar="JID",
+            action="append",
+            default=[],
+            help=_(
                 "entity who must get the revocation notification (default: send to all "
                 "entities known to have the shared secret)"
-            )
+            ),
         )
 
     async def start(self):
@@ -2785,11 +2832,16 @@
 
     def add_parser_options(self):
         self.parser.add_argument(
-            "-r", "--recipient", dest="recipients", metavar="JID", action="append",
-            default=[], help=_(
+            "-r",
+            "--recipient",
+            dest="recipients",
+            metavar="JID",
+            action="append",
+            default=[],
+            help=_(
                 "entity who must get the revocation and shared secret notifications "
                 "(default: send to all entities known to have the shared secret)"
-            )
+            ),
         )
 
     async def start(self):
@@ -2817,7 +2869,7 @@
             use_verbose=True,
             pubsub_flags={C.NODE},
             help=_("list known secrets for a pubsub node"),
-            use_output=C.OUTPUT_LIST_DICT
+            use_output=C.OUTPUT_LIST_DICT,
         )
 
     def add_parser_options(self):
@@ -2825,11 +2877,14 @@
 
     async def start(self):
         try:
-            secrets = data_format.deserialise(await self.host.bridge.ps_secrets_list(
-                self.args.service,
-                self.args.node,
-                self.profile,
-            ), type_check=list)
+            secrets = data_format.deserialise(
+                await self.host.bridge.ps_secrets_list(
+                    self.args.service,
+                    self.args.node,
+                    self.profile,
+                ),
+                type_check=list,
+            )
         except Exception as e:
             self.disp(f"can't list node secrets: {e}", error=True)
             self.host.quit(C.EXIT_BRIDGE_ERRBACK)
--- a/libervia/cli/cmd_remote_control.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_remote_control.py	Wed Jun 19 18:44:57 2024 +0200
@@ -363,9 +363,7 @@
         # FIXME: for now AUTO always do the screen sharing, but is should be disabled and
         #   appropriate method should be used when no desktop environment is detected.
         with_screen_sharing = self.args.share_screen in ("yes", "auto")
-        await self.receiver.request_remote_desktop(
-            with_screen_sharing
-        )
+        await self.receiver.request_remote_desktop(with_screen_sharing)
 
         self.disp(_("Waiting for controlling device…"))
         await self.start_answering()
--- a/libervia/cli/cmd_roster.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_roster.py	Wed Jun 19 18:44:57 2024 +0200
@@ -31,9 +31,13 @@
 
     def __init__(self, host):
         super().__init__(
-            host, 'get', use_output=C.OUTPUT_DICT, use_verbose=True,
-            extra_outputs = {"default": self.default_output},
-            help=_('retrieve the roster entities'))
+            host,
+            "get",
+            use_output=C.OUTPUT_DICT,
+            use_verbose=True,
+            extra_outputs={"default": self.default_output},
+            help=_("retrieve the roster entities"),
+        )
 
     def add_parser_options(self):
         pass
@@ -42,11 +46,11 @@
         for contact_jid, contact_data in data.items():
             all_keys = list(contact_data.keys())
             keys_to_show = []
-            name = contact_data.get('name', contact_jid.node)
+            name = contact_data.get("name", contact_jid.node)
 
             if self.verbosity >= 1:
-                keys_to_show.append('groups')
-                all_keys.remove('groups')
+                keys_to_show.append("groups")
+                all_keys.remove("groups")
             if self.verbosity >= 2:
                 keys_to_show.extend(all_keys)
 
@@ -58,9 +62,10 @@
                 value = contact_data[k]
                 if value:
                     if isinstance(value, list):
-                        value = ', '.join(value)
-                    self.disp(A.color(
-                        "    ", C.A_SUBHEADER, f"{k}: ", A.RESET, str(value)))
+                        value = ", ".join(value)
+                    self.disp(
+                        A.color("    ", C.A_SUBHEADER, f"{k}: ", A.RESET, str(value))
+                    )
 
     async def start(self):
         try:
@@ -73,10 +78,10 @@
         for contact_jid_s, data, groups in contacts:
             # FIXME: we have to convert string to bool here for historical reason
             #        contacts_get format should be changed and serialised properly
-            for key in ('from', 'to', 'ask'):
+            for key in ("from", "to", "ask"):
                 if key in data:
                     data[key] = C.bool(data[key])
-            data['groups'] = list(groups)
+            data["groups"] = list(groups)
             contacts_dict[jid.JID(contact_jid_s)] = data
 
         await self.output(contacts_dict)
@@ -86,19 +91,28 @@
 class Set(base.CommandBase):
 
     def __init__(self, host):
-        super().__init__(host, 'set', help=_('set metadata for a roster entity'))
+        super().__init__(host, "set", help=_("set metadata for a roster entity"))
 
     def add_parser_options(self):
         self.parser.add_argument(
-            "-n", "--name", default="", help=_('name to use for this entity'))
-        self.parser.add_argument(
-            "-g", "--group", dest='groups', action='append', metavar='GROUP', default=[],
-            help=_('groups for this entity'))
+            "-n", "--name", default="", help=_("name to use for this entity")
+        )
         self.parser.add_argument(
-            "-R", "--replace", action="store_true",
-            help=_("replace all metadata instead of adding them"))
+            "-g",
+            "--group",
+            dest="groups",
+            action="append",
+            metavar="GROUP",
+            default=[],
+            help=_("groups for this entity"),
+        )
         self.parser.add_argument(
-            "jid", help=_("jid of the roster entity"))
+            "-R",
+            "--replace",
+            action="store_true",
+            help=_("replace all metadata instead of adding them"),
+        )
+        self.parser.add_argument("jid", help=_("jid of the roster entity"))
 
     async def start(self):
 
@@ -108,17 +122,19 @@
         else:
             try:
                 entity_data = await self.host.bridge.contact_get(
-                    self.args.jid, self.host.profile)
+                    self.args.jid, self.host.profile
+                )
             except Exception as e:
                 self.disp(f"error while retrieving the contact: {e}", error=True)
                 self.host.quit(C.EXIT_BRIDGE_ERRBACK)
-            name = self.args.name or entity_data[0].get('name') or ''
+            name = self.args.name or entity_data[0].get("name") or ""
             groups = set(entity_data[1])
             groups = list(groups.union(self.args.groups))
 
         try:
             await self.host.bridge.contact_update(
-                self.args.jid, name, groups, self.host.profile)
+                self.args.jid, name, groups, self.host.profile
+            )
         except Exception as e:
             self.disp(f"error while updating the contact: {e}", error=True)
             self.host.quit(C.EXIT_BRIDGE_ERRBACK)
@@ -128,14 +144,13 @@
 class Delete(base.CommandBase):
 
     def __init__(self, host):
-        super().__init__(host, 'delete', help=_('remove an entity from roster'))
+        super().__init__(host, "delete", help=_("remove an entity from roster"))
 
     def add_parser_options(self):
         self.parser.add_argument(
             "-f", "--force", action="store_true", help=_("delete without confirmation")
         )
-        self.parser.add_argument(
-            "jid", help=_("jid of the roster entity"))
+        self.parser.add_argument("jid", help=_("jid of the roster entity"))
 
     async def start(self):
         if not self.args.force:
@@ -144,8 +159,7 @@
             )
             await self.host.confirm_or_quit(message, _("entity deletion cancelled"))
         try:
-            await self.host.bridge.contact_del(
-                self.args.jid, self.host.profile)
+            await self.host.bridge.contact_del(self.args.jid, self.host.profile)
         except Exception as e:
             self.disp(f"error while deleting the entity: {e}", error=True)
             self.host.quit(C.EXIT_BRIDGE_ERRBACK)
@@ -155,7 +169,9 @@
 class Stats(base.CommandBase):
 
     def __init__(self, host):
-        super(Stats, self).__init__(host, 'stats', help=_('Show statistics about a roster'))
+        super(Stats, self).__init__(
+            host, "stats", help=_("Show statistics about a roster")
+        )
 
     def add_parser_options(self):
         pass
@@ -189,15 +205,18 @@
                 total_group_subscription += len(groups)
             if not groups:
                 no_group += 1
-        hosts = OrderedDict(sorted(list(hosts.items()), key=lambda item:-item[1]))
+        hosts = OrderedDict(sorted(list(hosts.items()), key=lambda item: -item[1]))
 
         print()
         print("Total number of contacts: %d" % len(contacts))
         print("Number of different hosts: %d" % len(hosts))
         print()
         for host, count in hosts.items():
-            print("Contacts on {host}: {count} ({rate:.1f}%)".format(
-                host=host, count=count, rate=100 * float(count) / len(contacts)))
+            print(
+                "Contacts on {host}: {count} ({rate:.1f}%)".format(
+                    host=host, count=count, rate=100 * float(count) / len(contacts)
+                )
+            )
         print()
         print("Contacts with no 'from' subscription: %d" % no_from)
         print("Contacts with no 'to' subscription: %d" % no_to)
@@ -222,16 +241,22 @@
 
     def __init__(self, host):
         super(Purge, self).__init__(
-            host, 'purge',
-            help=_('purge the roster from its contacts with no subscription'))
+            host,
+            "purge",
+            help=_("purge the roster from its contacts with no subscription"),
+        )
 
     def add_parser_options(self):
         self.parser.add_argument(
-            "--no-from", action="store_true",
-            help=_("also purge contacts with no 'from' subscription"))
+            "--no-from",
+            action="store_true",
+            help=_("also purge contacts with no 'from' subscription"),
+        )
         self.parser.add_argument(
-            "--no-to", action="store_true",
-            help=_("also purge contacts with no 'to' subscription"))
+            "--no-to",
+            action="store_true",
+            help=_("also purge contacts with no 'to' subscription"),
+        )
 
     async def start(self):
         try:
@@ -259,7 +284,8 @@
             for contact in no_sub + no_from + no_to:
                 try:
                     await self.host.bridge.contact_del(
-                        contact, profile_key=self.host.profile)
+                        contact, profile_key=self.host.profile
+                    )
                 except Exception as e:
                     self.disp(f"can't delete contact {contact!r}: {e}", error=True)
                 else:
@@ -278,24 +304,27 @@
         if no_sub:
             self.disp(
                 f"There's no subscription between profile {self.host.profile!r} and the "
-                f"following contacts:")
+                f"following contacts:"
+            )
             self.disp("    " + "\n    ".join(no_sub))
         if no_from:
             self.disp(
                 f"There's no 'from' subscription between profile {self.host.profile!r} "
-                f"and the following contacts:")
+                f"and the following contacts:"
+            )
             self.disp("    " + "\n    ".join(no_from))
         if no_to:
             self.disp(
                 f"There's no 'to' subscription between profile {self.host.profile!r} and "
-                f"the following contacts:")
+                f"the following contacts:"
+            )
             self.disp("    " + "\n    ".join(no_to))
         message = f"REMOVE them from profile {self.host.profile}'s roster"
         while True:
             res = await self.host.ainput(f"{message} (y/N)? ")
-            if not res or res.lower() == 'n':
+            if not res or res.lower() == "n":
                 return False
-            if res.lower() == 'y':
+            if res.lower() == "y":
                 return True
 
 
@@ -303,7 +332,8 @@
 
     def __init__(self, host):
         super(Resync, self).__init__(
-            host, 'resync', help=_('do a full resynchronisation of roster with server'))
+            host, "resync", help=_("do a full resynchronisation of roster with server")
+        )
 
     def add_parser_options(self):
         pass
@@ -324,4 +354,5 @@
 
     def __init__(self, host):
         super(Roster, self).__init__(
-            host, 'roster', use_profile=True, help=_("Manage an entity's roster"))
+            host, "roster", use_profile=True, help=_("Manage an entity's roster")
+        )
--- a/libervia/cli/cmd_shell.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_shell.py	Wed Jun 19 18:44:57 2024 +0200
@@ -44,8 +44,7 @@
 class Shell(base.CommandBase, cmd.Cmd):
     def __init__(self, host):
         base.CommandBase.__init__(
-            self, host, "shell",
-            help=_("launch libervia-cli in shell (REPL) mode")
+            self, host, "shell", help=_("launch libervia-cli in shell (REPL) mode")
         )
         cmd.Cmd.__init__(self)
 
@@ -146,7 +145,7 @@
     def do_help(self, args):
         """show help message"""
         if not args:
-            self.disp(A.color(C.A_HEADER, _("Shell commands:")), end=' ')
+            self.disp(A.color(C.A_HEADER, _("Shell commands:")), end=" ")
         super(Shell, self).do_help(args)
         if not args:
             self.disp(A.color(C.A_HEADER, _("Action commands:")))
@@ -191,7 +190,7 @@
 
     def do_version(self, args):
         """show current backend/CLI version"""
-        self.run_cmd(['--version'])
+        self.run_cmd(["--version"])
 
     def do_shell(self, args):
         """launch an external command (you can use ![command] too)"""
--- a/libervia/cli/cmd_uri.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/cmd_uri.py	Wed Jun 19 18:44:57 2024 +0200
@@ -38,9 +38,7 @@
         )
 
     def add_parser_options(self):
-        self.parser.add_argument(
-            "uri", help=_("XMPP URI to parse")
-        )
+        self.parser.add_argument("uri", help=_("XMPP URI to parse"))
 
     async def start(self):
         await self.output(uri.parse_xmpp_uri(self.args.uri))
--- a/libervia/cli/common.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/common.py	Wed Jun 19 18:44:57 2024 +0200
@@ -210,9 +210,9 @@
         content_file_obj.close()
 
         # we prepare arguments
-        editor = config.config_get(self.sat_conf, C.CONFIG_SECTION, "editor") or os.getenv(
-            "EDITOR", "vi"
-        )
+        editor = config.config_get(
+            self.sat_conf, C.CONFIG_SECTION, "editor"
+        ) or os.getenv("EDITOR", "vi")
         try:
             # is there custom arguments in sat.conf ?
             editor_args = config.config_get(
@@ -436,7 +436,9 @@
             self.disp("Editing requested published item", 2)
             try:
                 if self.use_metadata:
-                    content, metadata, item = await self.get_item_data(service, node, item)
+                    content, metadata, item = await self.get_item_data(
+                        service, node, item
+                    )
                 else:
                     content, item = await self.get_item_data(service, node, item)
             except Exception as e:
@@ -600,7 +602,10 @@
             filters = {}
         filters = [filters.get(k) for k in keys]
         return cls(
-            host, (cls.read_dict_values(d, keys, defaults) for d in data), headers, filters
+            host,
+            (cls.read_dict_values(d, keys, defaults) for d in data),
+            headers,
+            filters,
         )
 
     def _headers(self, head_sep, headers, sizes, alignment="left", style=None):
--- a/libervia/cli/constants.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/constants.py	Wed Jun 19 18:44:57 2024 +0200
@@ -90,11 +90,13 @@
     A_FILE = A.FG_WHITE
 
     # rich
-    THEME_DEFAULT = Theme({
-        "priority_low": "green",
-        "priority_medium": "yellow",
-        "priority_high": "bold magenta",
-        "priority_urgent": "bold red",
-        "notif_title": "bold",
-        "shortcut": "bold underline"
-    })
+    THEME_DEFAULT = Theme(
+        {
+            "priority_low": "green",
+            "priority_medium": "yellow",
+            "priority_high": "bold magenta",
+            "priority_urgent": "bold red",
+            "notif_title": "bold",
+            "shortcut": "bold underline",
+        }
+    )
--- a/libervia/cli/loops.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/loops.py	Wed Jun 19 18:44:57 2024 +0200
@@ -23,8 +23,7 @@
 from libervia.cli.constants import Const as C
 from libervia.frontends.tools import aio
 
-log.basicConfig(level=log.WARNING,
-                format='[%(name)s] %(message)s')
+log.basicConfig(level=log.WARNING, format="[%(name)s] %(message)s")
 
 USER_INTER_MSG = _("User interruption: good bye")
 
@@ -37,7 +36,7 @@
 
 
 def get_libervia_cli_loop(bridge_name):
-    if 'dbus' in bridge_name:
+    if "dbus" in bridge_name:
         import signal
 
         class LiberviaCLILoop:
@@ -76,9 +75,11 @@
                 """
                 print("\r" + USER_INTER_MSG)
                 self.quit(C.EXIT_USER_CANCELLED)
+
     else:
         import signal
         from twisted.internet import asyncioreactor
+
         asyncioreactor.install()
         from twisted.internet import reactor, defer
 
@@ -105,6 +106,7 @@
                     await defer.Deferred.fromFuture(fut)
                 except BaseException:
                     import traceback
+
                     traceback.print_exc()
                     libervia_cli.quit(1)
 
@@ -136,7 +138,6 @@
                 self._exit_code = C.EXIT_USER_CANCELLED
                 reactor.callFromThread(reactor.stop)
 
-
     if bridge_name == "embedded":
         raise NotImplementedError
         # from sat.core import sat_main
--- a/libervia/cli/output_std.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/output_std.py	Wed Jun 19 18:44:57 2024 +0200
@@ -79,10 +79,12 @@
     def messages(self, data):
         # TODO: handle lang, and non chat message (normal, headline)
         for mess_data in data:
-            (uid, timestamp, from_jid, to_jid, message, subject, mess_type,
-             extra) = mess_data
-            time_str = date_utils.date_fmt(timestamp, "auto_day",
-                                           tz_info=date_utils.TZ_LOCAL)
+            (uid, timestamp, from_jid, to_jid, message, subject, mess_type, extra) = (
+                mess_data
+            )
+            time_str = date_utils.date_fmt(
+                timestamp, "auto_day", tz_info=date_utils.TZ_LOCAL
+            )
             from_jid = jid.JID(from_jid)
             if mess_type == C.MESS_TYPE_GROUPCHAT:
                 nick = from_jid.resource
@@ -95,10 +97,19 @@
                 nick_color = A.BOLD + A.FG_YELLOW
             message = list(message.values())[0] if message else ""
 
-            self.host.disp(A.color(
-                A.FG_CYAN, '['+time_str+'] ',
-                nick_color, nick, A.RESET, A.BOLD, '> ',
-                A.RESET, message))
+            self.host.disp(
+                A.color(
+                    A.FG_CYAN,
+                    "[" + time_str + "] ",
+                    nick_color,
+                    nick,
+                    A.RESET,
+                    A.BOLD,
+                    "> ",
+                    A.RESET,
+                    message,
+                )
+            )
 
 
 class Json(object):
--- a/libervia/cli/output_template.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/output_template.py	Wed Jun 19 18:44:57 2024 +0200
@@ -43,15 +43,19 @@
 
     def _front_url_tmp_dir(self, ctx, relative_url, tmp_dir):
         """Get front URL for temporary directory"""
-        template_data = ctx['template_data']
+        template_data = ctx["template_data"]
         return "file://" + os.path.join(tmp_dir, template_data.theme, relative_url)
 
     def _do_render(self, template_path, css_inline, **kwargs):
         try:
             return self.renderer.render(template_path, css_inline=css_inline, **kwargs)
         except template.TemplateNotFound:
-            self.host.disp(_("Can't find requested template: {template_path}")
-                .format(template_path=template_path), error=True)
+            self.host.disp(
+                _("Can't find requested template: {template_path}").format(
+                    template_path=template_path
+                ),
+                error=True,
+            )
             self.host.quit(C.EXIT_NOT_FOUND)
 
     def render(self, data):
@@ -71,9 +75,11 @@
             template_path = cmd.TEMPLATE
         except AttributeError:
             if not "template" in cmd.args.output_opts:
-                self.host.disp(_(
-                    "no default template set for this command, you need to specify a "
-                    "template using --oo template=[path/to/template.html]"),
+                self.host.disp(
+                    _(
+                        "no default template set for this command, you need to specify a "
+                        "template using --oo template=[path/to/template.html]"
+                    ),
                     error=True,
                 )
                 self.host.quit(C.EXIT_BAD_ARG)
@@ -86,8 +92,7 @@
             # template is not specified, we use default one
             pass
         if template_path is None:
-            self.host.disp(_("Can't parse template, please check its syntax"),
-                           error=True)
+            self.host.disp(_("Can't parse template, please check its syntax"), error=True)
             self.host.quit(C.EXIT_BAD_ARG)
 
         try:
@@ -104,12 +109,16 @@
             tmp_dir = tempfile.mkdtemp()
             front_url_filter = partial(self._front_url_tmp_dir, tmp_dir=tmp_dir)
             self.renderer = template.Renderer(
-                self.host, front_url_filter=front_url_filter, trusted=True)
+                self.host, front_url_filter=front_url_filter, trusted=True
+            )
             rendered = self._do_render(template_path, css_inline=css_inline, **kwargs)
-            self.host.disp(_(
-                "Browser opening requested.\n"
-                "Temporary files are put in the following directory, you'll have to "
-                "delete it yourself once finished viewing: {}").format(tmp_dir))
+            self.host.disp(
+                _(
+                    "Browser opening requested.\n"
+                    "Temporary files are put in the following directory, you'll have to "
+                    "delete it yourself once finished viewing: {}"
+                ).format(tmp_dir)
+            )
             tmp_file = os.path.join(tmp_dir, template_name)
             with open(tmp_file, "w") as f:
                 f.write(rendered.encode("utf-8"))
@@ -122,6 +131,7 @@
                 # we have to copy static files in a subdirectory, to avoid file download
                 # to be blocked by same origin policy
                 import shutil
+
                 shutil.copytree(
                     static_dir, os.path.join(tmp_dir, theme, C.TEMPLATE_STATIC_DIR)
                 )
--- a/libervia/cli/output_xmlui.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/output_xmlui.py	Wed Jun 19 18:44:57 2024 +0200
@@ -35,9 +35,7 @@
     def __init__(self, host):
         self.host = host
         host.register_output(C.OUTPUT_XMLUI, "simple", self.xmlui, default=True)
-        host.register_output(
-            C.OUTPUT_LIST_XMLUI, "simple", self.xmlui_list, default=True
-        )
+        host.register_output(C.OUTPUT_LIST_XMLUI, "simple", self.xmlui_list, default=True)
 
     async def xmlui(self, data):
         xmlui = xmlui_manager.create(self.host, data)
--- a/libervia/cli/xml_tools.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/cli/xml_tools.py	Wed Jun 19 18:44:57 2024 +0200
@@ -20,6 +20,7 @@
 from libervia.backend.core.i18n import _
 from libervia.cli.constants import Const as C
 
+
 def etree_parse(cmd, raw_xml, reraise=False):
     """import lxml and parse raw XML
 
@@ -46,11 +47,10 @@
     except Exception as e:
         if reraise:
             raise e
-        cmd.parser.error(
-            _("Can't parse the payload XML in input: {msg}").format(msg=e)
-        )
+        cmd.parser.error(_("Can't parse the payload XML in input: {msg}").format(msg=e))
     return element, etree
 
+
 def get_payload(cmd, element):
     """Retrieve payload element and exit with and error if not found
 
@@ -59,8 +59,9 @@
     """
     if element.tag in ("item", "{http://jabber.org/protocol/pubsub}item"):
         if len(element) > 1:
-            cmd.disp(_("<item> can only have one child element (the payload)"),
-                     error=True)
+            cmd.disp(
+                _("<item> can only have one child element (the payload)"), error=True
+            )
             cmd.host.quit(C.EXIT_DATA_ERROR)
         element = element[0]
     return element
--- a/libervia/frontends/bridge/dbus_bridge.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/frontends/bridge/dbus_bridge.py	Wed Jun 19 18:44:57 2024 +0200
@@ -33,12 +33,10 @@
 
 # Interface prefix
 const_INT_PREFIX = config.config_get(
-    config.parse_main_conf(),
-    "",
-    "bridge_dbus_int_prefix",
-    "org.libervia.Libervia")
+    config.parse_main_conf(), "", "bridge_dbus_int_prefix", "org.libervia.Libervia"
+)
 const_ERROR_PREFIX = const_INT_PREFIX + ".error"
-const_OBJ_PATH = '/org/libervia/Libervia/bridge'
+const_OBJ_PATH = "/org/libervia/Libervia/bridge"
 const_CORE_SUFFIX = ".core"
 const_PLUGIN_SUFFIX = ".plugin"
 const_TIMEOUT = 120
@@ -52,7 +50,7 @@
     """
     full_name = dbus_e.get_dbus_name()
     if full_name.startswith(const_ERROR_PREFIX):
-        name = dbus_e.get_dbus_name()[len(const_ERROR_PREFIX) + 1:]
+        name = dbus_e.get_dbus_name()[len(const_ERROR_PREFIX) + 1 :]
     else:
         name = full_name
     # XXX: dbus_e.args doesn't contain the original DBusException args, but we
@@ -62,7 +60,7 @@
     try:
         message, condition = ast.literal_eval(message)
     except (SyntaxError, ValueError, TypeError):
-        condition = ''
+        condition = ""
     return BridgeException(name, message, condition)
 
 
@@ -71,24 +69,33 @@
     def bridge_connect(self, callback, errback):
         try:
             self.sessions_bus = dbus.SessionBus()
-            self.db_object = self.sessions_bus.get_object(const_INT_PREFIX,
-                                                          const_OBJ_PATH)
-            self.db_core_iface = dbus.Interface(self.db_object,
-                                                dbus_interface=const_INT_PREFIX + const_CORE_SUFFIX)
-            self.db_plugin_iface = dbus.Interface(self.db_object,
-                                                  dbus_interface=const_INT_PREFIX + const_PLUGIN_SUFFIX)
+            self.db_object = self.sessions_bus.get_object(
+                const_INT_PREFIX, const_OBJ_PATH
+            )
+            self.db_core_iface = dbus.Interface(
+                self.db_object, dbus_interface=const_INT_PREFIX + const_CORE_SUFFIX
+            )
+            self.db_plugin_iface = dbus.Interface(
+                self.db_object, dbus_interface=const_INT_PREFIX + const_PLUGIN_SUFFIX
+            )
         except dbus.exceptions.DBusException as e:
-            if e._dbus_error_name in ('org.freedesktop.DBus.Error.ServiceUnknown',
-                                      'org.freedesktop.DBus.Error.Spawn.ExecFailed'):
+            if e._dbus_error_name in (
+                "org.freedesktop.DBus.Error.ServiceUnknown",
+                "org.freedesktop.DBus.Error.Spawn.ExecFailed",
+            ):
                 errback(BridgeExceptionNoService())
-            elif e._dbus_error_name == 'org.freedesktop.DBus.Error.NotSupported':
-                log.error(_("D-Bus is not launched, please see README to see instructions on how to launch it"))
+            elif e._dbus_error_name == "org.freedesktop.DBus.Error.NotSupported":
+                log.error(
+                    _(
+                        "D-Bus is not launched, please see README to see instructions on how to launch it"
+                    )
+                )
                 errback(BridgeInitError)
             else:
                 errback(e)
         else:
             callback()
-        #props = self.db_core_iface.getProperties()
+        # props = self.db_core_iface.getProperties()
 
     def register_signal(self, functionName, handler, iface="core"):
         if iface == "core":
@@ -96,10 +103,10 @@
         elif iface == "plugin":
             self.db_plugin_iface.connect_to_signal(functionName, handler)
         else:
-            log.error(_('Unknown interface'))
+            log.error(_("Unknown interface"))
 
     def __getattribute__(self, name):
-        """ usual __getattribute__ if the method exists, else try to find a plugin method """
+        """usual __getattribute__ if the method exists, else try to find a plugin method"""
         try:
             return object.__getattribute__(self, name)
         except AttributeError:
@@ -114,20 +121,26 @@
                 args = list(args)
 
                 if kwargs:
-                    if 'callback' in kwargs:
+                    if "callback" in kwargs:
                         async_ = True
-                        _callback = kwargs.pop('callback')
-                        _errback = kwargs.pop('errback', lambda failure: log.error(str(failure)))
+                        _callback = kwargs.pop("callback")
+                        _errback = kwargs.pop(
+                            "errback", lambda failure: log.error(str(failure))
+                        )
                     try:
-                        args.append(kwargs.pop('profile'))
+                        args.append(kwargs.pop("profile"))
                     except KeyError:
                         try:
-                            args.append(kwargs.pop('profile_key'))
+                            args.append(kwargs.pop("profile_key"))
                         except KeyError:
                             pass
                     # at this point, kwargs should be empty
                     if kwargs:
-                        log.warning("unexpected keyword arguments, they will be ignored: {}".format(kwargs))
+                        log.warning(
+                            "unexpected keyword arguments, they will be ignored: {}".format(
+                                kwargs
+                            )
+                        )
                 elif len(args) >= 2 and callable(args[-1]) and callable(args[-2]):
                     async_ = True
                     _errback = args.pop()
@@ -136,9 +149,11 @@
                 method = getattr(self.db_plugin_iface, name)
 
                 if async_:
-                    kwargs['timeout'] = const_TIMEOUT
-                    kwargs['reply_handler'] = _callback
-                    kwargs['error_handler'] = lambda err: _errback(dbus_to_bridge_exception(err))
+                    kwargs["timeout"] = const_TIMEOUT
+                    kwargs["reply_handler"] = _callback
+                    kwargs["error_handler"] = lambda err: _errback(
+                        dbus_to_bridge_exception(err)
+                    )
 
                 try:
                     return method(*args, **kwargs)
@@ -158,14 +173,25 @@
 
             return get_plugin_method
 
-    def action_launch(self, callback_id, data, profile_key="@DEFAULT@", callback=None, errback=None):
+    def action_launch(
+        self, callback_id, data, profile_key="@DEFAULT@", callback=None, errback=None
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return str(self.db_core_iface.action_launch(callback_id, data, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler))
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return str(
+            self.db_core_iface.action_launch(
+                callback_id,
+                data,
+                profile_key,
+                timeout=const_TIMEOUT,
+                reply_handler=callback,
+                error_handler=error_handler,
+            )
+        )
 
     def actions_get(self, profile_key="@DEFAULT@", callback=None, errback=None):
         if callback is None:
@@ -173,12 +199,12 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.actions_get(profile_key, **kwargs)
 
     def config_get(self, section, name, callback=None, errback=None):
@@ -187,45 +213,69 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return str(self.db_core_iface.config_get(section, name, **kwargs))
 
-    def connect(self, profile_key="@DEFAULT@", password='', options={}, callback=None, errback=None):
+    def connect(
+        self,
+        profile_key="@DEFAULT@",
+        password="",
+        options={},
+        callback=None,
+        errback=None,
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.connect(profile_key, password, options, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.connect(
+            profile_key,
+            password,
+            options,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
-    def contact_add(self, entity_jid, profile_key="@DEFAULT@", callback=None, errback=None):
+    def contact_add(
+        self, entity_jid, profile_key="@DEFAULT@", callback=None, errback=None
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.contact_add(entity_jid, profile_key, **kwargs)
 
-    def contact_del(self, entity_jid, profile_key="@DEFAULT@", callback=None, errback=None):
+    def contact_del(
+        self, entity_jid, profile_key="@DEFAULT@", callback=None, errback=None
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.contact_del(entity_jid, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.contact_del(
+            entity_jid,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
     def contact_get(self, arg_0, profile_key="@DEFAULT@", callback=None, errback=None):
         if callback is None:
@@ -233,22 +283,38 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.contact_get(arg_0, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.contact_get(
+            arg_0,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
-    def contact_update(self, entity_jid, name, groups, profile_key="@DEFAULT@", callback=None, errback=None):
+    def contact_update(
+        self,
+        entity_jid,
+        name,
+        groups,
+        profile_key="@DEFAULT@",
+        callback=None,
+        errback=None,
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
-        return self.db_core_iface.contact_update(entity_jid, name, groups, profile_key, **kwargs)
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
+        return self.db_core_iface.contact_update(
+            entity_jid, name, groups, profile_key, **kwargs
+        )
 
     def contacts_get(self, profile_key="@DEFAULT@", callback=None, errback=None):
         if callback is None:
@@ -256,21 +322,28 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.contacts_get(profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.contacts_get(
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
-    def contacts_get_from_group(self, group, profile_key="@DEFAULT@", callback=None, errback=None):
+    def contacts_get_from_group(
+        self, group, profile_key="@DEFAULT@", callback=None, errback=None
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.contacts_get_from_group(group, profile_key, **kwargs)
 
     def devices_infos_get(self, bare_jid, profile_key, callback=None, errback=None):
@@ -279,35 +352,99 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return str(self.db_core_iface.devices_infos_get(bare_jid, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler))
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return str(
+            self.db_core_iface.devices_infos_get(
+                bare_jid,
+                profile_key,
+                timeout=const_TIMEOUT,
+                reply_handler=callback,
+                error_handler=error_handler,
+            )
+        )
 
-    def disco_find_by_features(self, namespaces, identities, bare_jid=False, service=True, roster=True, own_jid=True, local_device=False, profile_key="@DEFAULT@", callback=None, errback=None):
+    def disco_find_by_features(
+        self,
+        namespaces,
+        identities,
+        bare_jid=False,
+        service=True,
+        roster=True,
+        own_jid=True,
+        local_device=False,
+        profile_key="@DEFAULT@",
+        callback=None,
+        errback=None,
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.disco_find_by_features(namespaces, identities, bare_jid, service, roster, own_jid, local_device, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.disco_find_by_features(
+            namespaces,
+            identities,
+            bare_jid,
+            service,
+            roster,
+            own_jid,
+            local_device,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
-    def disco_infos(self, entity_jid, node=u'', use_cache=True, profile_key="@DEFAULT@", callback=None, errback=None):
+    def disco_infos(
+        self,
+        entity_jid,
+        node="",
+        use_cache=True,
+        profile_key="@DEFAULT@",
+        callback=None,
+        errback=None,
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.disco_infos(entity_jid, node, use_cache, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.disco_infos(
+            entity_jid,
+            node,
+            use_cache,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
-    def disco_items(self, entity_jid, node=u'', use_cache=True, profile_key="@DEFAULT@", callback=None, errback=None):
+    def disco_items(
+        self,
+        entity_jid,
+        node="",
+        use_cache=True,
+        profile_key="@DEFAULT@",
+        callback=None,
+        errback=None,
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.disco_items(entity_jid, node, use_cache, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.disco_items(
+            entity_jid,
+            node,
+            use_cache,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
     def disconnect(self, profile_key="@DEFAULT@", callback=None, errback=None):
         if callback is None:
@@ -315,8 +452,13 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.disconnect(profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.disconnect(
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
     def encryption_namespace_get(self, arg_0, callback=None, errback=None):
         if callback is None:
@@ -324,12 +466,12 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return str(self.db_core_iface.encryption_namespace_get(arg_0, **kwargs))
 
     def encryption_plugins_get(self, callback=None, errback=None):
@@ -338,22 +480,33 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return str(self.db_core_iface.encryption_plugins_get(**kwargs))
 
-    def encryption_trust_ui_get(self, to_jid, namespace, profile_key, callback=None, errback=None):
+    def encryption_trust_ui_get(
+        self, to_jid, namespace, profile_key, callback=None, errback=None
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return str(self.db_core_iface.encryption_trust_ui_get(to_jid, namespace, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler))
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return str(
+            self.db_core_iface.encryption_trust_ui_get(
+                to_jid,
+                namespace,
+                profile_key,
+                timeout=const_TIMEOUT,
+                reply_handler=callback,
+                error_handler=error_handler,
+            )
+        )
 
     def entities_data_get(self, jids, keys, profile, callback=None, errback=None):
         if callback is None:
@@ -361,12 +514,12 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.entities_data_get(jids, keys, profile, **kwargs)
 
     def entity_data_get(self, jid, keys, profile, callback=None, errback=None):
@@ -375,12 +528,12 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.entity_data_get(jid, keys, profile, **kwargs)
 
     def features_get(self, profile_key, callback=None, errback=None):
@@ -389,17 +542,42 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.features_get(profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.features_get(
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
-    def history_get(self, from_jid, to_jid, limit, between=True, filters='', profile="@NONE@", callback=None, errback=None):
+    def history_get(
+        self,
+        from_jid,
+        to_jid,
+        limit,
+        between=True,
+        filters="",
+        profile="@NONE@",
+        callback=None,
+        errback=None,
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.history_get(from_jid, to_jid, limit, between, filters, profile, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.history_get(
+            from_jid,
+            to_jid,
+            limit,
+            between,
+            filters,
+            profile,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
     def image_check(self, arg_0, callback=None, errback=None):
         if callback is None:
@@ -407,12 +585,12 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return str(self.db_core_iface.image_check(arg_0, **kwargs))
 
     def image_convert(self, source, dest, arg_2, extra, callback=None, errback=None):
@@ -421,17 +599,37 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return str(self.db_core_iface.image_convert(source, dest, arg_2, extra, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler))
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return str(
+            self.db_core_iface.image_convert(
+                source,
+                dest,
+                arg_2,
+                extra,
+                timeout=const_TIMEOUT,
+                reply_handler=callback,
+                error_handler=error_handler,
+            )
+        )
 
-    def image_generate_preview(self, image_path, profile_key, callback=None, errback=None):
+    def image_generate_preview(
+        self, image_path, profile_key, callback=None, errback=None
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return str(self.db_core_iface.image_generate_preview(image_path, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler))
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return str(
+            self.db_core_iface.image_generate_preview(
+                image_path,
+                profile_key,
+                timeout=const_TIMEOUT,
+                reply_handler=callback,
+                error_handler=error_handler,
+            )
+        )
 
     def image_resize(self, image_path, width, height, callback=None, errback=None):
         if callback is None:
@@ -439,8 +637,17 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return str(self.db_core_iface.image_resize(image_path, width, height, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler))
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return str(
+            self.db_core_iface.image_resize(
+                image_path,
+                width,
+                height,
+                timeout=const_TIMEOUT,
+                reply_handler=callback,
+                error_handler=error_handler,
+            )
+        )
 
     def init_pre_script(self, callback=None, errback=None):
         if callback is None:
@@ -448,8 +655,10 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.init_pre_script(timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.init_pre_script(
+            timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler
+        )
 
     def is_connected(self, profile_key="@DEFAULT@", callback=None, errback=None):
         if callback is None:
@@ -457,27 +666,31 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.is_connected(profile_key, **kwargs)
 
-    def main_resource_get(self, contact_jid, profile_key="@DEFAULT@", callback=None, errback=None):
+    def main_resource_get(
+        self, contact_jid, profile_key="@DEFAULT@", callback=None, errback=None
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
-        return str(self.db_core_iface.main_resource_get(contact_jid, profile_key, **kwargs))
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
+        return str(
+            self.db_core_iface.main_resource_get(contact_jid, profile_key, **kwargs)
+        )
 
     def menu_help_get(self, menu_id, language, callback=None, errback=None):
         if callback is None:
@@ -485,22 +698,40 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return str(self.db_core_iface.menu_help_get(menu_id, language, **kwargs))
 
-    def menu_launch(self, menu_type, path, data, security_limit, profile_key, callback=None, errback=None):
+    def menu_launch(
+        self,
+        menu_type,
+        path,
+        data,
+        security_limit,
+        profile_key,
+        callback=None,
+        errback=None,
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.menu_launch(menu_type, path, data, security_limit, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.menu_launch(
+            menu_type,
+            path,
+            data,
+            security_limit,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
     def menus_get(self, language, security_limit, callback=None, errback=None):
         if callback is None:
@@ -508,12 +739,12 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.menus_get(language, security_limit, **kwargs)
 
     def message_encryption_get(self, to_jid, profile_key, callback=None, errback=None):
@@ -522,22 +753,40 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
-        return str(self.db_core_iface.message_encryption_get(to_jid, profile_key, **kwargs))
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
+        return str(
+            self.db_core_iface.message_encryption_get(to_jid, profile_key, **kwargs)
+        )
 
-    def message_encryption_start(self, to_jid, namespace='', replace=False, profile_key="@NONE@", callback=None, errback=None):
+    def message_encryption_start(
+        self,
+        to_jid,
+        namespace="",
+        replace=False,
+        profile_key="@NONE@",
+        callback=None,
+        errback=None,
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.message_encryption_start(to_jid, namespace, replace, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.message_encryption_start(
+            to_jid,
+            namespace,
+            replace,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
     def message_encryption_stop(self, to_jid, profile_key, callback=None, errback=None):
         if callback is None:
@@ -545,17 +794,43 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.message_encryption_stop(to_jid, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.message_encryption_stop(
+            to_jid,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
-    def message_send(self, to_jid, message, subject={}, mess_type="auto", extra={}, profile_key="@NONE@", callback=None, errback=None):
+    def message_send(
+        self,
+        to_jid,
+        message,
+        subject={},
+        mess_type="auto",
+        extra={},
+        profile_key="@NONE@",
+        callback=None,
+        errback=None,
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.message_send(to_jid, message, subject, mess_type, extra, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.message_send(
+            to_jid,
+            message,
+            subject,
+            mess_type,
+            extra,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
     def namespaces_get(self, callback=None, errback=None):
         if callback is None:
@@ -563,55 +838,89 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.namespaces_get(**kwargs)
 
-    def notification_add(self, type_, body_plain, body_rich, title, is_global, requires_action, arg_6, priority, expire_at, extra, callback=None, errback=None):
+    def notification_add(
+        self,
+        type_,
+        body_plain,
+        body_rich,
+        title,
+        is_global,
+        requires_action,
+        arg_6,
+        priority,
+        expire_at,
+        extra,
+        callback=None,
+        errback=None,
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
-        return self.db_core_iface.notification_add(type_, body_plain, body_rich, title, is_global, requires_action, arg_6, priority, expire_at, extra, **kwargs)
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
+        return self.db_core_iface.notification_add(
+            type_,
+            body_plain,
+            body_rich,
+            title,
+            is_global,
+            requires_action,
+            arg_6,
+            priority,
+            expire_at,
+            extra,
+            **kwargs,
+        )
 
-    def notification_delete(self, id_, is_global, profile_key, callback=None, errback=None):
+    def notification_delete(
+        self, id_, is_global, profile_key, callback=None, errback=None
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
-        return self.db_core_iface.notification_delete(id_, is_global, profile_key, **kwargs)
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
+        return self.db_core_iface.notification_delete(
+            id_, is_global, profile_key, **kwargs
+        )
 
-    def notifications_expired_clean(self, limit_timestamp, profile_key, callback=None, errback=None):
+    def notifications_expired_clean(
+        self, limit_timestamp, profile_key, callback=None, errback=None
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
-        return self.db_core_iface.notifications_expired_clean(limit_timestamp, profile_key, **kwargs)
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
+        return self.db_core_iface.notifications_expired_clean(
+            limit_timestamp, profile_key, **kwargs
+        )
 
     def notifications_get(self, filters, profile_key, callback=None, errback=None):
         if callback is None:
@@ -619,59 +928,120 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return str(self.db_core_iface.notifications_get(filters, profile_key, **kwargs))
 
-    def param_get_a(self, name, category, attribute="value", profile_key="@DEFAULT@", callback=None, errback=None):
+    def param_get_a(
+        self,
+        name,
+        category,
+        attribute="value",
+        profile_key="@DEFAULT@",
+        callback=None,
+        errback=None,
+    ):
+        if callback is None:
+            error_handler = None
+        else:
+            if errback is None:
+                errback = log.error
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
+        if callback is not None:
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
+        return str(
+            self.db_core_iface.param_get_a(
+                name, category, attribute, profile_key, **kwargs
+            )
+        )
+
+    def param_get_a_async(
+        self,
+        name,
+        category,
+        attribute="value",
+        security_limit=-1,
+        profile_key="@DEFAULT@",
+        callback=None,
+        errback=None,
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
-        if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
-        return str(self.db_core_iface.param_get_a(name, category, attribute, profile_key, **kwargs))
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return str(
+            self.db_core_iface.param_get_a_async(
+                name,
+                category,
+                attribute,
+                security_limit,
+                profile_key,
+                timeout=const_TIMEOUT,
+                reply_handler=callback,
+                error_handler=error_handler,
+            )
+        )
 
-    def param_get_a_async(self, name, category, attribute="value", security_limit=-1, profile_key="@DEFAULT@", callback=None, errback=None):
+    def param_set(
+        self,
+        name,
+        value,
+        category,
+        security_limit=-1,
+        profile_key="@DEFAULT@",
+        callback=None,
+        errback=None,
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return str(self.db_core_iface.param_get_a_async(name, category, attribute, security_limit, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler))
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
+        if callback is not None:
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
+        return self.db_core_iface.param_set(
+            name, value, category, security_limit, profile_key, **kwargs
+        )
 
-    def param_set(self, name, value, category, security_limit=-1, profile_key="@DEFAULT@", callback=None, errback=None):
+    def param_ui_get(
+        self,
+        security_limit=-1,
+        app="",
+        extra="",
+        profile_key="@DEFAULT@",
+        callback=None,
+        errback=None,
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
-        if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
-        return self.db_core_iface.param_set(name, value, category, security_limit, profile_key, **kwargs)
-
-    def param_ui_get(self, security_limit=-1, app='', extra='', profile_key="@DEFAULT@", callback=None, errback=None):
-        if callback is None:
-            error_handler = None
-        else:
-            if errback is None:
-                errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return str(self.db_core_iface.param_ui_get(security_limit, app, extra, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler))
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return str(
+            self.db_core_iface.param_ui_get(
+                security_limit,
+                app,
+                extra,
+                profile_key,
+                timeout=const_TIMEOUT,
+                reply_handler=callback,
+                error_handler=error_handler,
+            )
+        )
 
     def params_categories_get(self, callback=None, errback=None):
         if callback is None:
@@ -679,26 +1049,28 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.params_categories_get(**kwargs)
 
-    def params_register_app(self, xml, security_limit=-1, app='', callback=None, errback=None):
+    def params_register_app(
+        self, xml, security_limit=-1, app="", callback=None, errback=None
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.params_register_app(xml, security_limit, app, **kwargs)
 
     def params_template_load(self, filename, callback=None, errback=None):
@@ -707,12 +1079,12 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.params_template_load(filename, **kwargs)
 
     def params_template_save(self, filename, callback=None, errback=None):
@@ -721,36 +1093,64 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.params_template_save(filename, **kwargs)
 
-    def params_values_from_category_get_async(self, category, security_limit=-1, app="", extra="", profile_key="@DEFAULT@", callback=None, errback=None):
+    def params_values_from_category_get_async(
+        self,
+        category,
+        security_limit=-1,
+        app="",
+        extra="",
+        profile_key="@DEFAULT@",
+        callback=None,
+        errback=None,
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.params_values_from_category_get_async(category, security_limit, app, extra, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.params_values_from_category_get_async(
+            category,
+            security_limit,
+            app,
+            extra,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
-    def presence_set(self, to_jid='', show='', statuses={}, profile_key="@DEFAULT@", callback=None, errback=None):
+    def presence_set(
+        self,
+        to_jid="",
+        show="",
+        statuses={},
+        profile_key="@DEFAULT@",
+        callback=None,
+        errback=None,
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
-        return self.db_core_iface.presence_set(to_jid, show, statuses, profile_key, **kwargs)
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
+        return self.db_core_iface.presence_set(
+            to_jid, show, statuses, profile_key, **kwargs
+        )
 
     def presence_statuses_get(self, profile_key="@DEFAULT@", callback=None, errback=None):
         if callback is None:
@@ -758,12 +1158,12 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.presence_statuses_get(profile_key, **kwargs)
 
     def private_data_delete(self, namespace, key, arg_2, callback=None, errback=None):
@@ -772,8 +1172,15 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.private_data_delete(namespace, key, arg_2, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.private_data_delete(
+            namespace,
+            key,
+            arg_2,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
     def private_data_get(self, namespace, key, profile_key, callback=None, errback=None):
         if callback is None:
@@ -781,26 +1188,54 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return str(self.db_core_iface.private_data_get(namespace, key, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler))
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return str(
+            self.db_core_iface.private_data_get(
+                namespace,
+                key,
+                profile_key,
+                timeout=const_TIMEOUT,
+                reply_handler=callback,
+                error_handler=error_handler,
+            )
+        )
 
-    def private_data_set(self, namespace, key, data, profile_key, callback=None, errback=None):
+    def private_data_set(
+        self, namespace, key, data, profile_key, callback=None, errback=None
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.private_data_set(namespace, key, data, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.private_data_set(
+            namespace,
+            key,
+            data,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
-    def profile_create(self, profile, password='', component='', callback=None, errback=None):
+    def profile_create(
+        self, profile, password="", component="", callback=None, errback=None
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.profile_create(profile, password, component, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.profile_create(
+            profile,
+            password,
+            component,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
     def profile_delete_async(self, profile, callback=None, errback=None):
         if callback is None:
@@ -808,21 +1243,28 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.profile_delete_async(profile, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.profile_delete_async(
+            profile,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
-    def profile_is_session_started(self, profile_key="@DEFAULT@", callback=None, errback=None):
+    def profile_is_session_started(
+        self, profile_key="@DEFAULT@", callback=None, errback=None
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.profile_is_session_started(profile_key, **kwargs)
 
     def profile_name_get(self, profile_key="@DEFAULT@", callback=None, errback=None):
@@ -831,12 +1273,12 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return str(self.db_core_iface.profile_name_get(profile_key, **kwargs))
 
     def profile_set_default(self, profile, callback=None, errback=None):
@@ -845,35 +1287,45 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.profile_set_default(profile, **kwargs)
 
-    def profile_start_session(self, password='', profile_key="@DEFAULT@", callback=None, errback=None):
+    def profile_start_session(
+        self, password="", profile_key="@DEFAULT@", callback=None, errback=None
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.profile_start_session(password, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.profile_start_session(
+            password,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
-    def profiles_list_get(self, clients=True, components=False, callback=None, errback=None):
+    def profiles_list_get(
+        self, clients=True, components=False, callback=None, errback=None
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.profiles_list_get(clients, components, **kwargs)
 
     def progress_get(self, id, profile, callback=None, errback=None):
@@ -882,12 +1334,12 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.progress_get(id, profile, **kwargs)
 
     def progress_get_all(self, profile, callback=None, errback=None):
@@ -896,12 +1348,12 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.progress_get_all(profile, **kwargs)
 
     def progress_get_all_metadata(self, profile, callback=None, errback=None):
@@ -910,12 +1362,12 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.progress_get_all_metadata(profile, **kwargs)
 
     def ready_get(self, callback=None, errback=None):
@@ -924,8 +1376,10 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.ready_get(timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.ready_get(
+            timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler
+        )
 
     def roster_resync(self, profile_key="@DEFAULT@", callback=None, errback=None):
         if callback is None:
@@ -933,8 +1387,13 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.roster_resync(profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.roster_resync(
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
     def session_infos_get(self, profile_key, callback=None, errback=None):
         if callback is None:
@@ -942,8 +1401,13 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        return self.db_core_iface.session_infos_get(profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        return self.db_core_iface.session_infos_get(
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=callback,
+            error_handler=error_handler,
+        )
 
     def sub_waiting_get(self, profile_key="@DEFAULT@", callback=None, errback=None):
         if callback is None:
@@ -951,26 +1415,28 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.sub_waiting_get(profile_key, **kwargs)
 
-    def subscription(self, sub_type, entity, profile_key="@DEFAULT@", callback=None, errback=None):
+    def subscription(
+        self, sub_type, entity, profile_key="@DEFAULT@", callback=None, errback=None
+    ):
         if callback is None:
             error_handler = None
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return self.db_core_iface.subscription(sub_type, entity, profile_key, **kwargs)
 
     def version_get(self, callback=None, errback=None):
@@ -979,12 +1445,12 @@
         else:
             if errback is None:
                 errback = log.error
-            error_handler = lambda err:errback(dbus_to_bridge_exception(err))
-        kwargs={}
+            error_handler = lambda err: errback(dbus_to_bridge_exception(err))
+        kwargs = {}
         if callback is not None:
-            kwargs['timeout'] = const_TIMEOUT
-            kwargs['reply_handler'] = callback
-            kwargs['error_handler'] = error_handler
+            kwargs["timeout"] = const_TIMEOUT
+            kwargs["reply_handler"] = callback
+            kwargs["error_handler"] = error_handler
         return str(self.db_core_iface.version_get(**kwargs))
 
 
@@ -992,11 +1458,13 @@
 
     def register_signal(self, functionName, handler, iface="core"):
         loop = asyncio.get_running_loop()
-        async_handler = lambda *args: asyncio.run_coroutine_threadsafe(handler(*args), loop)
+        async_handler = lambda *args: asyncio.run_coroutine_threadsafe(
+            handler(*args), loop
+        )
         return super().register_signal(functionName, async_handler, iface)
 
     def __getattribute__(self, name):
-        """ usual __getattribute__ if the method exists, else try to find a plugin method """
+        """usual __getattribute__ if the method exists, else try to find a plugin method"""
         try:
             return object.__getattribute__(self, name)
         except AttributeError:
@@ -1006,16 +1474,18 @@
                 fut = loop.create_future()
                 method = getattr(self.db_plugin_iface, name)
                 reply_handler = lambda ret=None: loop.call_soon_threadsafe(
-                    fut.set_result, ret)
+                    fut.set_result, ret
+                )
                 error_handler = lambda err: loop.call_soon_threadsafe(
-                    fut.set_exception, dbus_to_bridge_exception(err))
+                    fut.set_exception, dbus_to_bridge_exception(err)
+                )
                 try:
                     method(
                         *args,
                         **kwargs,
                         timeout=const_TIMEOUT,
                         reply_handler=reply_handler,
-                        error_handler=error_handler
+                        error_handler=error_handler,
                     )
                 except ValueError as e:
                     if e.args[0].startswith("Unable to guess signature"):
@@ -1030,7 +1500,7 @@
                             **kwargs,
                             timeout=const_TIMEOUT,
                             reply_handler=reply_handler,
-                            error_handler=error_handler
+                            error_handler=error_handler,
                         )
 
                     else:
@@ -1044,7 +1514,7 @@
         fut = loop.create_future()
         super().bridge_connect(
             callback=lambda: loop.call_soon_threadsafe(fut.set_result, None),
-            errback=lambda e: loop.call_soon_threadsafe(fut.set_exception, e)
+            errback=lambda e: loop.call_soon_threadsafe(fut.set_exception, e),
         )
         return fut
 
@@ -1052,566 +1522,1212 @@
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.action_launch(callback_id, data, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.action_launch(
+            callback_id,
+            data,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def actions_get(self, profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.actions_get(profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.actions_get(
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def config_get(self, section, name):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.config_get(section, name, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.config_get(
+            section,
+            name,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
-    def connect(self, profile_key="@DEFAULT@", password='', options={}):
+    def connect(self, profile_key="@DEFAULT@", password="", options={}):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.connect(profile_key, password, options, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.connect(
+            profile_key,
+            password,
+            options,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def contact_add(self, entity_jid, profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.contact_add(entity_jid, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.contact_add(
+            entity_jid,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def contact_del(self, entity_jid, profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.contact_del(entity_jid, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.contact_del(
+            entity_jid,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def contact_get(self, arg_0, profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.contact_get(arg_0, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.contact_get(
+            arg_0,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def contact_update(self, entity_jid, name, groups, profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.contact_update(entity_jid, name, groups, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.contact_update(
+            entity_jid,
+            name,
+            groups,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def contacts_get(self, profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.contacts_get(profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.contacts_get(
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def contacts_get_from_group(self, group, profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.contacts_get_from_group(group, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.contacts_get_from_group(
+            group,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def devices_infos_get(self, bare_jid, profile_key):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.devices_infos_get(bare_jid, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.devices_infos_get(
+            bare_jid,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
-    def disco_find_by_features(self, namespaces, identities, bare_jid=False, service=True, roster=True, own_jid=True, local_device=False, profile_key="@DEFAULT@"):
+    def disco_find_by_features(
+        self,
+        namespaces,
+        identities,
+        bare_jid=False,
+        service=True,
+        roster=True,
+        own_jid=True,
+        local_device=False,
+        profile_key="@DEFAULT@",
+    ):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.disco_find_by_features(namespaces, identities, bare_jid, service, roster, own_jid, local_device, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.disco_find_by_features(
+            namespaces,
+            identities,
+            bare_jid,
+            service,
+            roster,
+            own_jid,
+            local_device,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
-    def disco_infos(self, entity_jid, node=u'', use_cache=True, profile_key="@DEFAULT@"):
+    def disco_infos(self, entity_jid, node="", use_cache=True, profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.disco_infos(entity_jid, node, use_cache, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.disco_infos(
+            entity_jid,
+            node,
+            use_cache,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
-    def disco_items(self, entity_jid, node=u'', use_cache=True, profile_key="@DEFAULT@"):
+    def disco_items(self, entity_jid, node="", use_cache=True, profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.disco_items(entity_jid, node, use_cache, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.disco_items(
+            entity_jid,
+            node,
+            use_cache,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def disconnect(self, profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.disconnect(profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.disconnect(
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def encryption_namespace_get(self, arg_0):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.encryption_namespace_get(arg_0, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.encryption_namespace_get(
+            arg_0,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def encryption_plugins_get(self):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.encryption_plugins_get(timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.encryption_plugins_get(
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def encryption_trust_ui_get(self, to_jid, namespace, profile_key):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.encryption_trust_ui_get(to_jid, namespace, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.encryption_trust_ui_get(
+            to_jid,
+            namespace,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def entities_data_get(self, jids, keys, profile):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.entities_data_get(jids, keys, profile, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.entities_data_get(
+            jids,
+            keys,
+            profile,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def entity_data_get(self, jid, keys, profile):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.entity_data_get(jid, keys, profile, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.entity_data_get(
+            jid,
+            keys,
+            profile,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def features_get(self, profile_key):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.features_get(profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.features_get(
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
-    def history_get(self, from_jid, to_jid, limit, between=True, filters='', profile="@NONE@"):
+    def history_get(
+        self, from_jid, to_jid, limit, between=True, filters="", profile="@NONE@"
+    ):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.history_get(from_jid, to_jid, limit, between, filters, profile, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.history_get(
+            from_jid,
+            to_jid,
+            limit,
+            between,
+            filters,
+            profile,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def image_check(self, arg_0):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.image_check(arg_0, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.image_check(
+            arg_0,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def image_convert(self, source, dest, arg_2, extra):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.image_convert(source, dest, arg_2, extra, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.image_convert(
+            source,
+            dest,
+            arg_2,
+            extra,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def image_generate_preview(self, image_path, profile_key):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.image_generate_preview(image_path, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.image_generate_preview(
+            image_path,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def image_resize(self, image_path, width, height):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.image_resize(image_path, width, height, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.image_resize(
+            image_path,
+            width,
+            height,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def init_pre_script(self):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.init_pre_script(timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.init_pre_script(
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def is_connected(self, profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.is_connected(profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.is_connected(
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def main_resource_get(self, contact_jid, profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.main_resource_get(contact_jid, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.main_resource_get(
+            contact_jid,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def menu_help_get(self, menu_id, language):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.menu_help_get(menu_id, language, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.menu_help_get(
+            menu_id,
+            language,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def menu_launch(self, menu_type, path, data, security_limit, profile_key):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.menu_launch(menu_type, path, data, security_limit, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.menu_launch(
+            menu_type,
+            path,
+            data,
+            security_limit,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def menus_get(self, language, security_limit):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.menus_get(language, security_limit, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.menus_get(
+            language,
+            security_limit,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def message_encryption_get(self, to_jid, profile_key):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.message_encryption_get(to_jid, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.message_encryption_get(
+            to_jid,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
-    def message_encryption_start(self, to_jid, namespace='', replace=False, profile_key="@NONE@"):
+    def message_encryption_start(
+        self, to_jid, namespace="", replace=False, profile_key="@NONE@"
+    ):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.message_encryption_start(to_jid, namespace, replace, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.message_encryption_start(
+            to_jid,
+            namespace,
+            replace,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def message_encryption_stop(self, to_jid, profile_key):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.message_encryption_stop(to_jid, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.message_encryption_stop(
+            to_jid,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
-    def message_send(self, to_jid, message, subject={}, mess_type="auto", extra={}, profile_key="@NONE@"):
+    def message_send(
+        self,
+        to_jid,
+        message,
+        subject={},
+        mess_type="auto",
+        extra={},
+        profile_key="@NONE@",
+    ):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.message_send(to_jid, message, subject, mess_type, extra, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.message_send(
+            to_jid,
+            message,
+            subject,
+            mess_type,
+            extra,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def namespaces_get(self):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.namespaces_get(timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.namespaces_get(
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
-    def notification_add(self, type_, body_plain, body_rich, title, is_global, requires_action, arg_6, priority, expire_at, extra):
+    def notification_add(
+        self,
+        type_,
+        body_plain,
+        body_rich,
+        title,
+        is_global,
+        requires_action,
+        arg_6,
+        priority,
+        expire_at,
+        extra,
+    ):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.notification_add(type_, body_plain, body_rich, title, is_global, requires_action, arg_6, priority, expire_at, extra, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.notification_add(
+            type_,
+            body_plain,
+            body_rich,
+            title,
+            is_global,
+            requires_action,
+            arg_6,
+            priority,
+            expire_at,
+            extra,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def notification_delete(self, id_, is_global, profile_key):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.notification_delete(id_, is_global, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.notification_delete(
+            id_,
+            is_global,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def notifications_expired_clean(self, limit_timestamp, profile_key):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.notifications_expired_clean(limit_timestamp, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.notifications_expired_clean(
+            limit_timestamp,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def notifications_get(self, filters, profile_key):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.notifications_get(filters, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.notifications_get(
+            filters,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def param_get_a(self, name, category, attribute="value", profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.param_get_a(name, category, attribute, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.param_get_a(
+            name,
+            category,
+            attribute,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
-    def param_get_a_async(self, name, category, attribute="value", security_limit=-1, profile_key="@DEFAULT@"):
+    def param_get_a_async(
+        self,
+        name,
+        category,
+        attribute="value",
+        security_limit=-1,
+        profile_key="@DEFAULT@",
+    ):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.param_get_a_async(name, category, attribute, security_limit, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.param_get_a_async(
+            name,
+            category,
+            attribute,
+            security_limit,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
-    def param_set(self, name, value, category, security_limit=-1, profile_key="@DEFAULT@"):
+    def param_set(
+        self, name, value, category, security_limit=-1, profile_key="@DEFAULT@"
+    ):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.param_set(name, value, category, security_limit, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.param_set(
+            name,
+            value,
+            category,
+            security_limit,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
-    def param_ui_get(self, security_limit=-1, app='', extra='', profile_key="@DEFAULT@"):
+    def param_ui_get(self, security_limit=-1, app="", extra="", profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.param_ui_get(security_limit, app, extra, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.param_ui_get(
+            security_limit,
+            app,
+            extra,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def params_categories_get(self):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.params_categories_get(timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.params_categories_get(
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
-    def params_register_app(self, xml, security_limit=-1, app=''):
+    def params_register_app(self, xml, security_limit=-1, app=""):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.params_register_app(xml, security_limit, app, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.params_register_app(
+            xml,
+            security_limit,
+            app,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def params_template_load(self, filename):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.params_template_load(filename, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.params_template_load(
+            filename,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def params_template_save(self, filename):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.params_template_save(filename, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.params_template_save(
+            filename,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
-    def params_values_from_category_get_async(self, category, security_limit=-1, app="", extra="", profile_key="@DEFAULT@"):
+    def params_values_from_category_get_async(
+        self, category, security_limit=-1, app="", extra="", profile_key="@DEFAULT@"
+    ):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.params_values_from_category_get_async(category, security_limit, app, extra, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.params_values_from_category_get_async(
+            category,
+            security_limit,
+            app,
+            extra,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
-    def presence_set(self, to_jid='', show='', statuses={}, profile_key="@DEFAULT@"):
+    def presence_set(self, to_jid="", show="", statuses={}, profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.presence_set(to_jid, show, statuses, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.presence_set(
+            to_jid,
+            show,
+            statuses,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def presence_statuses_get(self, profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.presence_statuses_get(profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.presence_statuses_get(
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def private_data_delete(self, namespace, key, arg_2):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.private_data_delete(namespace, key, arg_2, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.private_data_delete(
+            namespace,
+            key,
+            arg_2,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def private_data_get(self, namespace, key, profile_key):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.private_data_get(namespace, key, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.private_data_get(
+            namespace,
+            key,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def private_data_set(self, namespace, key, data, profile_key):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.private_data_set(namespace, key, data, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.private_data_set(
+            namespace,
+            key,
+            data,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
-    def profile_create(self, profile, password='', component=''):
+    def profile_create(self, profile, password="", component=""):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.profile_create(profile, password, component, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.profile_create(
+            profile,
+            password,
+            component,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def profile_delete_async(self, profile):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.profile_delete_async(profile, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.profile_delete_async(
+            profile,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def profile_is_session_started(self, profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.profile_is_session_started(profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.profile_is_session_started(
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def profile_name_get(self, profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.profile_name_get(profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.profile_name_get(
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def profile_set_default(self, profile):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.profile_set_default(profile, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.profile_set_default(
+            profile,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
-    def profile_start_session(self, password='', profile_key="@DEFAULT@"):
+    def profile_start_session(self, password="", profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.profile_start_session(password, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.profile_start_session(
+            password,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def profiles_list_get(self, clients=True, components=False):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.profiles_list_get(clients, components, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.profiles_list_get(
+            clients,
+            components,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def progress_get(self, id, profile):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.progress_get(id, profile, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.progress_get(
+            id,
+            profile,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def progress_get_all(self, profile):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.progress_get_all(profile, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.progress_get_all(
+            profile,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def progress_get_all_metadata(self, profile):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.progress_get_all_metadata(profile, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.progress_get_all_metadata(
+            profile,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def ready_get(self):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.ready_get(timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.ready_get(
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def roster_resync(self, profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.roster_resync(profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.roster_resync(
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def session_infos_get(self, profile_key):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.session_infos_get(profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.session_infos_get(
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def sub_waiting_get(self, profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.sub_waiting_get(profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.sub_waiting_get(
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def subscription(self, sub_type, entity, profile_key="@DEFAULT@"):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.subscription(sub_type, entity, profile_key, timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.subscription(
+            sub_type,
+            entity,
+            profile_key,
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
 
     def version_get(self):
         loop = asyncio.get_running_loop()
         fut = loop.create_future()
         reply_handler = lambda ret=None: loop.call_soon_threadsafe(fut.set_result, ret)
-        error_handler = lambda err: loop.call_soon_threadsafe(fut.set_exception, dbus_to_bridge_exception(err))
-        self.db_core_iface.version_get(timeout=const_TIMEOUT, reply_handler=reply_handler, error_handler=error_handler)
+        error_handler = lambda err: loop.call_soon_threadsafe(
+            fut.set_exception, dbus_to_bridge_exception(err)
+        )
+        self.db_core_iface.version_get(
+            timeout=const_TIMEOUT,
+            reply_handler=reply_handler,
+            error_handler=error_handler,
+        )
         return fut
--- a/libervia/frontends/bridge/pb.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/frontends/bridge/pb.py	Wed Jun 19 18:44:57 2024 +0200
@@ -48,9 +48,7 @@
             pass
         else:
             raise exceptions.InternalError(
-                "{name} signal handler has been registered twice".format(
-                    name=method_name
-                )
+                "{name} signal handler has been registered twice".format(name=method_name)
             )
         setattr(self, method_name, handler)
 
@@ -70,8 +68,7 @@
         """Convert Failure to BridgeException"""
         ori_errback(
             BridgeException(
-                name=failure_.type.decode('utf-8'),
-                message=str(failure_.value)
+                name=failure_.type.decode("utf-8"), message=str(failure_.value)
             )
         )
 
@@ -164,8 +161,9 @@
     def register_signal(self, functionName, handler, iface="core"):
         self.signals_handler.register_signal(functionName, handler, iface)
 
-
-    def action_launch(self, callback_id, data, profile_key="@DEFAULT@", callback=None, errback=None):
+    def action_launch(
+        self, callback_id, data, profile_key="@DEFAULT@", callback=None, errback=None
+    ):
         d = self.root.callRemote("action_launch", callback_id, data, profile_key)
         if callback is not None:
             d.addCallback(callback)
@@ -192,7 +190,14 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def connect(self, profile_key="@DEFAULT@", password='', options={}, callback=None, errback=None):
+    def connect(
+        self,
+        profile_key="@DEFAULT@",
+        password="",
+        options={},
+        callback=None,
+        errback=None,
+    ):
         d = self.root.callRemote("connect", profile_key, password, options)
         if callback is not None:
             d.addCallback(callback)
@@ -201,7 +206,9 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def contact_add(self, entity_jid, profile_key="@DEFAULT@", callback=None, errback=None):
+    def contact_add(
+        self, entity_jid, profile_key="@DEFAULT@", callback=None, errback=None
+    ):
         d = self.root.callRemote("contact_add", entity_jid, profile_key)
         if callback is not None:
             d.addCallback(lambda __: callback())
@@ -210,7 +217,9 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def contact_del(self, entity_jid, profile_key="@DEFAULT@", callback=None, errback=None):
+    def contact_del(
+        self, entity_jid, profile_key="@DEFAULT@", callback=None, errback=None
+    ):
         d = self.root.callRemote("contact_del", entity_jid, profile_key)
         if callback is not None:
             d.addCallback(lambda __: callback())
@@ -228,7 +237,15 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def contact_update(self, entity_jid, name, groups, profile_key="@DEFAULT@", callback=None, errback=None):
+    def contact_update(
+        self,
+        entity_jid,
+        name,
+        groups,
+        profile_key="@DEFAULT@",
+        callback=None,
+        errback=None,
+    ):
         d = self.root.callRemote("contact_update", entity_jid, name, groups, profile_key)
         if callback is not None:
             d.addCallback(lambda __: callback())
@@ -246,7 +263,9 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def contacts_get_from_group(self, group, profile_key="@DEFAULT@", callback=None, errback=None):
+    def contacts_get_from_group(
+        self, group, profile_key="@DEFAULT@", callback=None, errback=None
+    ):
         d = self.root.callRemote("contacts_get_from_group", group, profile_key)
         if callback is not None:
             d.addCallback(callback)
@@ -264,8 +283,30 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def disco_find_by_features(self, namespaces, identities, bare_jid=False, service=True, roster=True, own_jid=True, local_device=False, profile_key="@DEFAULT@", callback=None, errback=None):
-        d = self.root.callRemote("disco_find_by_features", namespaces, identities, bare_jid, service, roster, own_jid, local_device, profile_key)
+    def disco_find_by_features(
+        self,
+        namespaces,
+        identities,
+        bare_jid=False,
+        service=True,
+        roster=True,
+        own_jid=True,
+        local_device=False,
+        profile_key="@DEFAULT@",
+        callback=None,
+        errback=None,
+    ):
+        d = self.root.callRemote(
+            "disco_find_by_features",
+            namespaces,
+            identities,
+            bare_jid,
+            service,
+            roster,
+            own_jid,
+            local_device,
+            profile_key,
+        )
         if callback is not None:
             d.addCallback(callback)
         if errback is None:
@@ -273,7 +314,15 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def disco_infos(self, entity_jid, node=u'', use_cache=True, profile_key="@DEFAULT@", callback=None, errback=None):
+    def disco_infos(
+        self,
+        entity_jid,
+        node="",
+        use_cache=True,
+        profile_key="@DEFAULT@",
+        callback=None,
+        errback=None,
+    ):
         d = self.root.callRemote("disco_infos", entity_jid, node, use_cache, profile_key)
         if callback is not None:
             d.addCallback(callback)
@@ -282,7 +331,15 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def disco_items(self, entity_jid, node=u'', use_cache=True, profile_key="@DEFAULT@", callback=None, errback=None):
+    def disco_items(
+        self,
+        entity_jid,
+        node="",
+        use_cache=True,
+        profile_key="@DEFAULT@",
+        callback=None,
+        errback=None,
+    ):
         d = self.root.callRemote("disco_items", entity_jid, node, use_cache, profile_key)
         if callback is not None:
             d.addCallback(callback)
@@ -318,8 +375,12 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def encryption_trust_ui_get(self, to_jid, namespace, profile_key, callback=None, errback=None):
-        d = self.root.callRemote("encryption_trust_ui_get", to_jid, namespace, profile_key)
+    def encryption_trust_ui_get(
+        self, to_jid, namespace, profile_key, callback=None, errback=None
+    ):
+        d = self.root.callRemote(
+            "encryption_trust_ui_get", to_jid, namespace, profile_key
+        )
         if callback is not None:
             d.addCallback(callback)
         if errback is None:
@@ -354,8 +415,20 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def history_get(self, from_jid, to_jid, limit, between=True, filters='', profile="@NONE@", callback=None, errback=None):
-        d = self.root.callRemote("history_get", from_jid, to_jid, limit, between, filters, profile)
+    def history_get(
+        self,
+        from_jid,
+        to_jid,
+        limit,
+        between=True,
+        filters="",
+        profile="@NONE@",
+        callback=None,
+        errback=None,
+    ):
+        d = self.root.callRemote(
+            "history_get", from_jid, to_jid, limit, between, filters, profile
+        )
         if callback is not None:
             d.addCallback(callback)
         if errback is None:
@@ -381,7 +454,9 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def image_generate_preview(self, image_path, profile_key, callback=None, errback=None):
+    def image_generate_preview(
+        self, image_path, profile_key, callback=None, errback=None
+    ):
         d = self.root.callRemote("image_generate_preview", image_path, profile_key)
         if callback is not None:
             d.addCallback(callback)
@@ -417,7 +492,9 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def main_resource_get(self, contact_jid, profile_key="@DEFAULT@", callback=None, errback=None):
+    def main_resource_get(
+        self, contact_jid, profile_key="@DEFAULT@", callback=None, errback=None
+    ):
         d = self.root.callRemote("main_resource_get", contact_jid, profile_key)
         if callback is not None:
             d.addCallback(callback)
@@ -435,8 +512,19 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def menu_launch(self, menu_type, path, data, security_limit, profile_key, callback=None, errback=None):
-        d = self.root.callRemote("menu_launch", menu_type, path, data, security_limit, profile_key)
+    def menu_launch(
+        self,
+        menu_type,
+        path,
+        data,
+        security_limit,
+        profile_key,
+        callback=None,
+        errback=None,
+    ):
+        d = self.root.callRemote(
+            "menu_launch", menu_type, path, data, security_limit, profile_key
+        )
         if callback is not None:
             d.addCallback(callback)
         if errback is None:
@@ -462,8 +550,18 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def message_encryption_start(self, to_jid, namespace='', replace=False, profile_key="@NONE@", callback=None, errback=None):
-        d = self.root.callRemote("message_encryption_start", to_jid, namespace, replace, profile_key)
+    def message_encryption_start(
+        self,
+        to_jid,
+        namespace="",
+        replace=False,
+        profile_key="@NONE@",
+        callback=None,
+        errback=None,
+    ):
+        d = self.root.callRemote(
+            "message_encryption_start", to_jid, namespace, replace, profile_key
+        )
         if callback is not None:
             d.addCallback(lambda __: callback())
         if errback is None:
@@ -480,8 +578,20 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def message_send(self, to_jid, message, subject={}, mess_type="auto", extra={}, profile_key="@NONE@", callback=None, errback=None):
-        d = self.root.callRemote("message_send", to_jid, message, subject, mess_type, extra, profile_key)
+    def message_send(
+        self,
+        to_jid,
+        message,
+        subject={},
+        mess_type="auto",
+        extra={},
+        profile_key="@NONE@",
+        callback=None,
+        errback=None,
+    ):
+        d = self.root.callRemote(
+            "message_send", to_jid, message, subject, mess_type, extra, profile_key
+        )
         if callback is not None:
             d.addCallback(lambda __: callback())
         if errback is None:
@@ -498,8 +608,34 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def notification_add(self, type_, body_plain, body_rich, title, is_global, requires_action, arg_6, priority, expire_at, extra, callback=None, errback=None):
-        d = self.root.callRemote("notification_add", type_, body_plain, body_rich, title, is_global, requires_action, arg_6, priority, expire_at, extra)
+    def notification_add(
+        self,
+        type_,
+        body_plain,
+        body_rich,
+        title,
+        is_global,
+        requires_action,
+        arg_6,
+        priority,
+        expire_at,
+        extra,
+        callback=None,
+        errback=None,
+    ):
+        d = self.root.callRemote(
+            "notification_add",
+            type_,
+            body_plain,
+            body_rich,
+            title,
+            is_global,
+            requires_action,
+            arg_6,
+            priority,
+            expire_at,
+            extra,
+        )
         if callback is not None:
             d.addCallback(lambda __: callback())
         if errback is None:
@@ -507,7 +643,9 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def notification_delete(self, id_, is_global, profile_key, callback=None, errback=None):
+    def notification_delete(
+        self, id_, is_global, profile_key, callback=None, errback=None
+    ):
         d = self.root.callRemote("notification_delete", id_, is_global, profile_key)
         if callback is not None:
             d.addCallback(lambda __: callback())
@@ -516,8 +654,12 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def notifications_expired_clean(self, limit_timestamp, profile_key, callback=None, errback=None):
-        d = self.root.callRemote("notifications_expired_clean", limit_timestamp, profile_key)
+    def notifications_expired_clean(
+        self, limit_timestamp, profile_key, callback=None, errback=None
+    ):
+        d = self.root.callRemote(
+            "notifications_expired_clean", limit_timestamp, profile_key
+        )
         if callback is not None:
             d.addCallback(lambda __: callback())
         if errback is None:
@@ -534,7 +676,15 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def param_get_a(self, name, category, attribute="value", profile_key="@DEFAULT@", callback=None, errback=None):
+    def param_get_a(
+        self,
+        name,
+        category,
+        attribute="value",
+        profile_key="@DEFAULT@",
+        callback=None,
+        errback=None,
+    ):
         d = self.root.callRemote("param_get_a", name, category, attribute, profile_key)
         if callback is not None:
             d.addCallback(callback)
@@ -543,8 +693,19 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def param_get_a_async(self, name, category, attribute="value", security_limit=-1, profile_key="@DEFAULT@", callback=None, errback=None):
-        d = self.root.callRemote("param_get_a_async", name, category, attribute, security_limit, profile_key)
+    def param_get_a_async(
+        self,
+        name,
+        category,
+        attribute="value",
+        security_limit=-1,
+        profile_key="@DEFAULT@",
+        callback=None,
+        errback=None,
+    ):
+        d = self.root.callRemote(
+            "param_get_a_async", name, category, attribute, security_limit, profile_key
+        )
         if callback is not None:
             d.addCallback(callback)
         if errback is None:
@@ -552,8 +713,19 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def param_set(self, name, value, category, security_limit=-1, profile_key="@DEFAULT@", callback=None, errback=None):
-        d = self.root.callRemote("param_set", name, value, category, security_limit, profile_key)
+    def param_set(
+        self,
+        name,
+        value,
+        category,
+        security_limit=-1,
+        profile_key="@DEFAULT@",
+        callback=None,
+        errback=None,
+    ):
+        d = self.root.callRemote(
+            "param_set", name, value, category, security_limit, profile_key
+        )
         if callback is not None:
             d.addCallback(lambda __: callback())
         if errback is None:
@@ -561,7 +733,15 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def param_ui_get(self, security_limit=-1, app='', extra='', profile_key="@DEFAULT@", callback=None, errback=None):
+    def param_ui_get(
+        self,
+        security_limit=-1,
+        app="",
+        extra="",
+        profile_key="@DEFAULT@",
+        callback=None,
+        errback=None,
+    ):
         d = self.root.callRemote("param_ui_get", security_limit, app, extra, profile_key)
         if callback is not None:
             d.addCallback(callback)
@@ -579,7 +759,9 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def params_register_app(self, xml, security_limit=-1, app='', callback=None, errback=None):
+    def params_register_app(
+        self, xml, security_limit=-1, app="", callback=None, errback=None
+    ):
         d = self.root.callRemote("params_register_app", xml, security_limit, app)
         if callback is not None:
             d.addCallback(lambda __: callback())
@@ -606,8 +788,24 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def params_values_from_category_get_async(self, category, security_limit=-1, app="", extra="", profile_key="@DEFAULT@", callback=None, errback=None):
-        d = self.root.callRemote("params_values_from_category_get_async", category, security_limit, app, extra, profile_key)
+    def params_values_from_category_get_async(
+        self,
+        category,
+        security_limit=-1,
+        app="",
+        extra="",
+        profile_key="@DEFAULT@",
+        callback=None,
+        errback=None,
+    ):
+        d = self.root.callRemote(
+            "params_values_from_category_get_async",
+            category,
+            security_limit,
+            app,
+            extra,
+            profile_key,
+        )
         if callback is not None:
             d.addCallback(callback)
         if errback is None:
@@ -615,7 +813,15 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def presence_set(self, to_jid='', show='', statuses={}, profile_key="@DEFAULT@", callback=None, errback=None):
+    def presence_set(
+        self,
+        to_jid="",
+        show="",
+        statuses={},
+        profile_key="@DEFAULT@",
+        callback=None,
+        errback=None,
+    ):
         d = self.root.callRemote("presence_set", to_jid, show, statuses, profile_key)
         if callback is not None:
             d.addCallback(lambda __: callback())
@@ -651,7 +857,9 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def private_data_set(self, namespace, key, data, profile_key, callback=None, errback=None):
+    def private_data_set(
+        self, namespace, key, data, profile_key, callback=None, errback=None
+    ):
         d = self.root.callRemote("private_data_set", namespace, key, data, profile_key)
         if callback is not None:
             d.addCallback(lambda __: callback())
@@ -660,7 +868,9 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def profile_create(self, profile, password='', component='', callback=None, errback=None):
+    def profile_create(
+        self, profile, password="", component="", callback=None, errback=None
+    ):
         d = self.root.callRemote("profile_create", profile, password, component)
         if callback is not None:
             d.addCallback(lambda __: callback())
@@ -678,7 +888,9 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def profile_is_session_started(self, profile_key="@DEFAULT@", callback=None, errback=None):
+    def profile_is_session_started(
+        self, profile_key="@DEFAULT@", callback=None, errback=None
+    ):
         d = self.root.callRemote("profile_is_session_started", profile_key)
         if callback is not None:
             d.addCallback(callback)
@@ -705,7 +917,9 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def profile_start_session(self, password='', profile_key="@DEFAULT@", callback=None, errback=None):
+    def profile_start_session(
+        self, password="", profile_key="@DEFAULT@", callback=None, errback=None
+    ):
         d = self.root.callRemote("profile_start_session", password, profile_key)
         if callback is not None:
             d.addCallback(callback)
@@ -714,7 +928,9 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def profiles_list_get(self, clients=True, components=False, callback=None, errback=None):
+    def profiles_list_get(
+        self, clients=True, components=False, callback=None, errback=None
+    ):
         d = self.root.callRemote("profiles_list_get", clients, components)
         if callback is not None:
             d.addCallback(callback)
@@ -786,7 +1002,9 @@
         else:
             d.addErrback(self._errback, ori_errback=errback)
 
-    def subscription(self, sub_type, entity, profile_key="@DEFAULT@", callback=None, errback=None):
+    def subscription(
+        self, sub_type, entity, profile_key="@DEFAULT@", callback=None, errback=None
+    ):
         d = self.root.callRemote("subscription", sub_type, entity, profile_key)
         if callback is not None:
             d.addCallback(lambda __: callback())
@@ -809,7 +1027,8 @@
 
     def register_signal(self, name, handler, iface="core"):
         async_handler = lambda *args, **kwargs: defer.Deferred.fromFuture(
-            asyncio.ensure_future(handler(*args, **kwargs)))
+            asyncio.ensure_future(handler(*args, **kwargs))
+        )
         return super().register_signal(name, async_handler, iface)
 
 
@@ -821,9 +1040,8 @@
     def _errback(self, failure_):
         """Convert Failure to BridgeException"""
         raise BridgeException(
-            name=failure_.type.decode('utf-8'),
-            message=str(failure_.value)
-            )
+            name=failure_.type.decode("utf-8"), message=str(failure_.value)
+        )
 
     def call(self, name, *args, **kwargs):
         d = self.root.callRemote(name, *args, *kwargs)
@@ -849,7 +1067,7 @@
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
-    def connect(self, profile_key="@DEFAULT@", password='', options={}):
+    def connect(self, profile_key="@DEFAULT@", password="", options={}):
         d = self.root.callRemote("connect", profile_key, password, options)
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
@@ -889,17 +1107,37 @@
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
-    def disco_find_by_features(self, namespaces, identities, bare_jid=False, service=True, roster=True, own_jid=True, local_device=False, profile_key="@DEFAULT@"):
-        d = self.root.callRemote("disco_find_by_features", namespaces, identities, bare_jid, service, roster, own_jid, local_device, profile_key)
+    def disco_find_by_features(
+        self,
+        namespaces,
+        identities,
+        bare_jid=False,
+        service=True,
+        roster=True,
+        own_jid=True,
+        local_device=False,
+        profile_key="@DEFAULT@",
+    ):
+        d = self.root.callRemote(
+            "disco_find_by_features",
+            namespaces,
+            identities,
+            bare_jid,
+            service,
+            roster,
+            own_jid,
+            local_device,
+            profile_key,
+        )
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
-    def disco_infos(self, entity_jid, node=u'', use_cache=True, profile_key="@DEFAULT@"):
+    def disco_infos(self, entity_jid, node="", use_cache=True, profile_key="@DEFAULT@"):
         d = self.root.callRemote("disco_infos", entity_jid, node, use_cache, profile_key)
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
-    def disco_items(self, entity_jid, node=u'', use_cache=True, profile_key="@DEFAULT@"):
+    def disco_items(self, entity_jid, node="", use_cache=True, profile_key="@DEFAULT@"):
         d = self.root.callRemote("disco_items", entity_jid, node, use_cache, profile_key)
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
@@ -920,7 +1158,9 @@
         return d.asFuture(asyncio.get_event_loop())
 
     def encryption_trust_ui_get(self, to_jid, namespace, profile_key):
-        d = self.root.callRemote("encryption_trust_ui_get", to_jid, namespace, profile_key)
+        d = self.root.callRemote(
+            "encryption_trust_ui_get", to_jid, namespace, profile_key
+        )
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
@@ -939,8 +1179,12 @@
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
-    def history_get(self, from_jid, to_jid, limit, between=True, filters='', profile="@NONE@"):
-        d = self.root.callRemote("history_get", from_jid, to_jid, limit, between, filters, profile)
+    def history_get(
+        self, from_jid, to_jid, limit, between=True, filters="", profile="@NONE@"
+    ):
+        d = self.root.callRemote(
+            "history_get", from_jid, to_jid, limit, between, filters, profile
+        )
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
@@ -985,7 +1229,9 @@
         return d.asFuture(asyncio.get_event_loop())
 
     def menu_launch(self, menu_type, path, data, security_limit, profile_key):
-        d = self.root.callRemote("menu_launch", menu_type, path, data, security_limit, profile_key)
+        d = self.root.callRemote(
+            "menu_launch", menu_type, path, data, security_limit, profile_key
+        )
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
@@ -999,8 +1245,12 @@
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
-    def message_encryption_start(self, to_jid, namespace='', replace=False, profile_key="@NONE@"):
-        d = self.root.callRemote("message_encryption_start", to_jid, namespace, replace, profile_key)
+    def message_encryption_start(
+        self, to_jid, namespace="", replace=False, profile_key="@NONE@"
+    ):
+        d = self.root.callRemote(
+            "message_encryption_start", to_jid, namespace, replace, profile_key
+        )
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
@@ -1009,8 +1259,18 @@
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
-    def message_send(self, to_jid, message, subject={}, mess_type="auto", extra={}, profile_key="@NONE@"):
-        d = self.root.callRemote("message_send", to_jid, message, subject, mess_type, extra, profile_key)
+    def message_send(
+        self,
+        to_jid,
+        message,
+        subject={},
+        mess_type="auto",
+        extra={},
+        profile_key="@NONE@",
+    ):
+        d = self.root.callRemote(
+            "message_send", to_jid, message, subject, mess_type, extra, profile_key
+        )
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
@@ -1019,8 +1279,32 @@
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
-    def notification_add(self, type_, body_plain, body_rich, title, is_global, requires_action, arg_6, priority, expire_at, extra):
-        d = self.root.callRemote("notification_add", type_, body_plain, body_rich, title, is_global, requires_action, arg_6, priority, expire_at, extra)
+    def notification_add(
+        self,
+        type_,
+        body_plain,
+        body_rich,
+        title,
+        is_global,
+        requires_action,
+        arg_6,
+        priority,
+        expire_at,
+        extra,
+    ):
+        d = self.root.callRemote(
+            "notification_add",
+            type_,
+            body_plain,
+            body_rich,
+            title,
+            is_global,
+            requires_action,
+            arg_6,
+            priority,
+            expire_at,
+            extra,
+        )
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
@@ -1030,7 +1314,9 @@
         return d.asFuture(asyncio.get_event_loop())
 
     def notifications_expired_clean(self, limit_timestamp, profile_key):
-        d = self.root.callRemote("notifications_expired_clean", limit_timestamp, profile_key)
+        d = self.root.callRemote(
+            "notifications_expired_clean", limit_timestamp, profile_key
+        )
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
@@ -1044,17 +1330,30 @@
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
-    def param_get_a_async(self, name, category, attribute="value", security_limit=-1, profile_key="@DEFAULT@"):
-        d = self.root.callRemote("param_get_a_async", name, category, attribute, security_limit, profile_key)
+    def param_get_a_async(
+        self,
+        name,
+        category,
+        attribute="value",
+        security_limit=-1,
+        profile_key="@DEFAULT@",
+    ):
+        d = self.root.callRemote(
+            "param_get_a_async", name, category, attribute, security_limit, profile_key
+        )
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
-    def param_set(self, name, value, category, security_limit=-1, profile_key="@DEFAULT@"):
-        d = self.root.callRemote("param_set", name, value, category, security_limit, profile_key)
+    def param_set(
+        self, name, value, category, security_limit=-1, profile_key="@DEFAULT@"
+    ):
+        d = self.root.callRemote(
+            "param_set", name, value, category, security_limit, profile_key
+        )
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
-    def param_ui_get(self, security_limit=-1, app='', extra='', profile_key="@DEFAULT@"):
+    def param_ui_get(self, security_limit=-1, app="", extra="", profile_key="@DEFAULT@"):
         d = self.root.callRemote("param_ui_get", security_limit, app, extra, profile_key)
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
@@ -1064,7 +1363,7 @@
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
-    def params_register_app(self, xml, security_limit=-1, app=''):
+    def params_register_app(self, xml, security_limit=-1, app=""):
         d = self.root.callRemote("params_register_app", xml, security_limit, app)
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
@@ -1079,12 +1378,21 @@
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
-    def params_values_from_category_get_async(self, category, security_limit=-1, app="", extra="", profile_key="@DEFAULT@"):
-        d = self.root.callRemote("params_values_from_category_get_async", category, security_limit, app, extra, profile_key)
+    def params_values_from_category_get_async(
+        self, category, security_limit=-1, app="", extra="", profile_key="@DEFAULT@"
+    ):
+        d = self.root.callRemote(
+            "params_values_from_category_get_async",
+            category,
+            security_limit,
+            app,
+            extra,
+            profile_key,
+        )
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
-    def presence_set(self, to_jid='', show='', statuses={}, profile_key="@DEFAULT@"):
+    def presence_set(self, to_jid="", show="", statuses={}, profile_key="@DEFAULT@"):
         d = self.root.callRemote("presence_set", to_jid, show, statuses, profile_key)
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
@@ -1109,7 +1417,7 @@
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
-    def profile_create(self, profile, password='', component=''):
+    def profile_create(self, profile, password="", component=""):
         d = self.root.callRemote("profile_create", profile, password, component)
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
@@ -1134,7 +1442,7 @@
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
 
-    def profile_start_session(self, password='', profile_key="@DEFAULT@"):
+    def profile_start_session(self, password="", profile_key="@DEFAULT@"):
         d = self.root.callRemote("profile_start_session", password, profile_key)
         d.addErrback(self._errback)
         return d.asFuture(asyncio.get_event_loop())
--- a/libervia/frontends/quick_frontend/quick_app.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/frontends/quick_frontend/quick_app.py	Wed Jun 19 18:44:57 2024 +0200
@@ -44,7 +44,7 @@
     #       and a way to keep some XMLUI request between sessions is expected in backend
     host = None
     bridge = None
-    cache_keys_to_get = ['avatar', 'nicknames']
+    cache_keys_to_get = ["avatar", "nicknames"]
 
     def __init__(self, profile):
         self.profile = profile
@@ -136,10 +136,13 @@
 
     def _plug_profile_get_features_cb(self, features):
         self.host.features = features
-        self.host.bridge.entities_data_get([], ProfileManager.cache_keys_to_get,
-                                         profile=self.profile,
-                                         callback=self._plug_profile_got_cached_values,
-                                         errback=self._plug_profile_failed_cached_values)
+        self.host.bridge.entities_data_get(
+            [],
+            ProfileManager.cache_keys_to_get,
+            profile=self.profile,
+            callback=self._plug_profile_got_cached_values,
+            errback=self._plug_profile_failed_cached_values,
+        )
 
     def _plug_profile_failed_cached_values(self, failure):
         log.error("Couldn't get cached values: {}".format(failure))
@@ -153,7 +156,9 @@
                 self.host.entity_data_updated_handler(entity_s, key, value, self.profile)
 
         if not self.connected:
-            self.host.set_presence_status(C.PRESENCE_UNAVAILABLE, "", profile=self.profile)
+            self.host.set_presence_status(
+                C.PRESENCE_UNAVAILABLE, "", profile=self.profile
+            )
         else:
 
             contact_list.fill()
@@ -258,8 +263,12 @@
     AUTO_RESYNC = True
 
     def __init__(
-        self, bridge_factory, xmlui, check_options=None, connect_bridge=True,
-        async_bridge_factory=None
+        self,
+        bridge_factory,
+        xmlui,
+        check_options=None,
+        connect_bridge=True,
+        async_bridge_factory=None,
     ):
         """Create a frontend application
 
@@ -276,7 +285,7 @@
         self._plugs_in_progress = set()
         self.ready_profiles = set()  # profiles which are connected and ready
         self.signals_cache = {}  # used to keep signal received between start of
-                                 # plug_profile and when the profile is actualy ready
+        # plug_profile and when the profile is actualy ready
         self.contact_lists = quick_contact_list.QuickContactListHandler(self)
         self.widgets = quick_widgets.QuickWidgetsManager(self)
         if check_options is not None:
@@ -289,7 +298,7 @@
 
         # listeners are callable watching events
         self._listeners = {}  # key: listener type ("avatar", "selected", etc),
-                              # value: list of callbacks
+        # value: list of callbacks
 
         # cf. [register_action_handler]
         self._action_handlers: dict[str, Callable[[dict, str, int, str], None]] = {}
@@ -391,13 +400,14 @@
     def on_backend_ready(self):
         log.info("backend is ready")
         self.bridge.namespaces_get(
-            callback=self._namespaces_get_cb, errback=self._namespaces_get_eb)
+            callback=self._namespaces_get_cb, errback=self._namespaces_get_eb
+        )
         # we cache available encryption plugins, as we'll use them on each
         # new chat widget
         self.bridge.encryption_plugins_get(
             callback=self._encryption_plugins_get_cb,
-            errback=self._encryption_plugins_get_eb)
-
+            errback=self._encryption_plugins_get_eb,
+        )
 
     @property
     def current_profile(self):
@@ -596,7 +606,8 @@
             if not ignore_missing:
                 log.error(
                     f"Trying to remove an inexisting listener (type = {type_}): "
-                    f"{callback}")
+                    f"{callback}"
+                )
 
     def call_listeners(self, type_, *args, **kwargs):
         """Call the methods which listen type_ event. If a profiles filter has
@@ -769,17 +780,28 @@
     def contact_new_handler(self, jid_s, attributes, groups, profile):
         entity = jid.JID(jid_s)
         groups = list(groups)
-        self.contact_lists[profile].set_contact(entity, groups, attributes, in_roster=True)
+        self.contact_lists[profile].set_contact(
+            entity, groups, attributes, in_roster=True
+        )
 
     def message_new_handler(
-            self, uid, timestamp, from_jid_s, to_jid_s, msg, subject, type_, extra_s,
-            profile):
+        self, uid, timestamp, from_jid_s, to_jid_s, msg, subject, type_, extra_s, profile
+    ):
         from_jid = jid.JID(from_jid_s)
         to_jid = jid.JID(to_jid_s)
         extra = data_format.deserialise(extra_s)
         if not self.trigger.point(
-            "messageNewTrigger", uid, timestamp, from_jid, to_jid, msg, subject, type_,
-            extra, profile=profile,):
+            "messageNewTrigger",
+            uid,
+            timestamp,
+            from_jid,
+            to_jid,
+            msg,
+            subject,
+            type_,
+            extra,
+            profile=profile,
+        ):
             return
 
         from_me = from_jid.bare == self.profiles[profile].whoami.bare
@@ -800,9 +822,9 @@
         self.widgets.get_or_create_widget(
             quick_chat.QuickChat,
             target,
-            type_ = C.CHAT_GROUP if is_room else C.CHAT_ONE2ONE,
-            on_new_widget = None,
-            profile = profile,
+            type_=C.CHAT_GROUP if is_room else C.CHAT_ONE2ONE,
+            on_new_widget=None,
+            profile=profile,
         )
 
         if (
@@ -824,24 +846,34 @@
     def message_encryption_started_handler(self, destinee_jid_s, plugin_data, profile):
         destinee_jid = jid.JID(destinee_jid_s)
         plugin_data = data_format.deserialise(plugin_data)
-        for widget in self.widgets.get_widgets(quick_chat.QuickChat,
-                                              target=destinee_jid.bare,
-                                              profiles=(profile,)):
+        for widget in self.widgets.get_widgets(
+            quick_chat.QuickChat, target=destinee_jid.bare, profiles=(profile,)
+        ):
             widget.message_encryption_started(plugin_data)
 
     def message_encryption_stopped_handler(self, destinee_jid_s, plugin_data, profile):
         destinee_jid = jid.JID(destinee_jid_s)
-        for widget in self.widgets.get_widgets(quick_chat.QuickChat,
-                                              target=destinee_jid.bare,
-                                              profiles=(profile,)):
+        for widget in self.widgets.get_widgets(
+            quick_chat.QuickChat, target=destinee_jid.bare, profiles=(profile,)
+        ):
             widget.message_encryption_stopped(plugin_data)
 
     def message_state_handler(self, uid, status, profile):
         for widget in self.widgets.get_widgets(quick_chat.QuickChat, profiles=(profile,)):
             widget.on_message_state(uid, status, profile)
 
-    def message_send(self, to_jid, message, subject=None, mess_type="auto", extra=None, callback=None, errback=None, profile_key=C.PROF_KEY_NONE):
-        if not subject and not extra and (not message or message == {'': ''}):
+    def message_send(
+        self,
+        to_jid,
+        message,
+        subject=None,
+        mess_type="auto",
+        extra=None,
+        callback=None,
+        errback=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
+        if not subject and not extra and (not message or message == {"": ""}):
             log.debug("Not sending empty message")
             return
 
@@ -853,13 +885,23 @@
             callback = (
                 lambda __=None: None
             )  # FIXME: optional argument is here because pyjamas doesn't support callback
-               #        without arg with json proxy
+            #        without arg with json proxy
         if errback is None:
             errback = lambda failure: self.show_dialog(
                 message=failure.message, title=failure.fullname, type="error"
             )
 
-        if not self.trigger.point("messageSendTrigger", to_jid, message, subject, mess_type, extra, callback, errback, profile_key=profile_key):
+        if not self.trigger.point(
+            "messageSendTrigger",
+            to_jid,
+            message,
+            subject,
+            mess_type,
+            extra,
+            callback,
+            errback,
+            profile_key=profile_key,
+        ):
             return
 
         self.bridge.message_send(
@@ -906,7 +948,8 @@
         self.call_listeners("presence", entity, show, priority, statuses, profile=profile)
 
     def muc_room_joined_handler(
-            self, room_jid_s, occupants, user_nick, subject, statuses, profile):
+        self, room_jid_s, occupants, user_nick, subject, statuses, profile
+    ):
         """Called when a MUC room is joined"""
         log.debug(
             "Room [{room_jid}] joined by {profile}, users presents:{users}".format(
@@ -936,7 +979,8 @@
         chat_widget = self.widgets.get_widget(quick_chat.QuickChat, room_jid, profile)
         if chat_widget:
             self.widgets.delete_widget(
-                chat_widget, all_instances=True, explicit_close=True)
+                chat_widget, all_instances=True, explicit_close=True
+            )
         self.contact_lists[profile].remove_contact(room_jid)
 
     def muc_room_user_changed_nick_handler(self, room_jid_s, old_nick, new_nick, profile):
@@ -971,12 +1015,22 @@
         @param profile (unicode): current profile
         """
         from_jid = jid.JID(from_jid_s)
-        for widget in self.widgets.get_widgets(quick_chat.QuickChat, target=from_jid.bare,
-                                              profiles=(profile,)):
+        for widget in self.widgets.get_widgets(
+            quick_chat.QuickChat, target=from_jid.bare, profiles=(profile,)
+        ):
             widget.on_chat_state(from_jid, state, profile)
 
-    def notify(self, type_, entity=None, message=None, subject=None, callback=None,
-               cb_args=None, widget=None, profile=C.PROF_KEY_NONE):
+    def notify(
+        self,
+        type_,
+        entity=None,
+        message=None,
+        subject=None,
+        callback=None,
+        cb_args=None,
+        widget=None,
+        profile=C.PROF_KEY_NONE,
+    ):
         """Trigger an event notification
 
         @param type_(unicode): notifation kind,
@@ -1009,7 +1063,9 @@
         self._notif_id += 1
         self.call_listeners("notification", entity, notif_data, profile=profile)
 
-    def get_notifs(self, entity=None, type_=None, exact_jid=None, profile=C.PROF_KEY_NONE):
+    def get_notifs(
+        self, entity=None, type_=None, exact_jid=None, profile=C.PROF_KEY_NONE
+    ):
         """return notifications for given entity
 
         @param entity(jid.JID, None, C.ENTITY_ALL): jid of the entity to check
@@ -1203,11 +1259,11 @@
     def _debug_handler(self, action, parameters, profile):
         if action == "widgets_dump":
             from pprint import pformat
+
             log.info("Widgets dump:\n{data}".format(data=pformat(self.widgets._widgets)))
         else:
             log.warning("Unknown debug action: {action}".format(action=action))
 
-
     def show_dialog(self, message, title, type="info", answer_cb=None, answer_data=None):
         """Show a dialog to user
 
@@ -1276,9 +1332,7 @@
             self.call_listeners("avatar", entity, value, profile=profile)
 
     def register_action_handler(
-        self,
-        action_type: str,
-        handler: Callable[[dict, str, int, str], None]
+        self, action_type: str, handler: Callable[[dict, str, int, str], None]
     ) -> None:
         """Register a handler for action type.
 
@@ -1307,13 +1361,13 @@
     def action_manager(
         self,
         action_data: dict,
-        callback: Callable|None = None,
-        ui_show_cb: Callable|None = None,
+        callback: Callable | None = None,
+        ui_show_cb: Callable | None = None,
         user_action: bool = True,
-        action_id: str|None = None,
-        progress_cb: Callable|None = None,
-        progress_eb: Callable|None = None,
-        profile: str = C.PROF_KEY_NONE
+        action_id: str | None = None,
+        progress_cb: Callable | None = None,
+        progress_eb: Callable | None = None,
+        profile: str = C.PROF_KEY_NONE,
     ) -> None:
         """Handle backend action
 
@@ -1386,8 +1440,11 @@
             data_format.deserialise(data), callback, callback_id, profile
         )
         self.bridge.action_launch(
-            callback_id, data_format.serialise(data), profile, callback=action_cb,
-            errback=self.dialog_failure
+            callback_id,
+            data_format.serialise(data),
+            profile,
+            callback=action_cb,
+            errback=self.dialog_failure,
         )
 
     def launch_menu(
--- a/libervia/frontends/quick_frontend/quick_blog.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/frontends/quick_frontend/quick_blog.py	Wed Jun 19 18:44:57 2024 +0200
@@ -53,7 +53,7 @@
         self.title = data.get("title")
         self.title_rich = None
         self.title_xhtml = data.get("title_xhtml")
-        self.tags = data.get('tags', [])
+        self.tags = data.get("tags", [])
         self.content = data.get("content")
         self.content_rich = None
         self.content_xhtml = data.get("content_xhtml")
@@ -145,8 +145,16 @@
         for item, comments in items:
             self.add_entry(item, comments, service=service, node=node, with_update=False)
 
-    def add_entry(self, item=None, comments=None, service=None, node=None,
-                 with_update=True, editable=False, edit_entry=False):
+    def add_entry(
+        self,
+        item=None,
+        comments=None,
+        service=None,
+        node=None,
+        with_update=True,
+        editable=False,
+        edit_entry=False,
+    ):
         """Add a microblog entry
 
         @param editable (bool): True if the entry can be modified
@@ -275,7 +283,7 @@
                 if value is not None:
                     mb_data[name] = value
 
-        mb_data['tags'] = self.item.tags
+        mb_data["tags"] = self.item.tags
 
         if self.blog.new_message_target not in (C.PUBLIC, C.GROUP):
             raise NotImplementedError
@@ -284,7 +292,7 @@
             mb_data["allow_comments"] = True
 
         if self.blog.new_message_target == C.GROUP:
-            mb_data['groups'] = list(self.blog.targets)
+            mb_data["groups"] = list(self.blog.targets)
 
         self.blog.host.bridge.mb_send(
             str(self.service or ""),
@@ -391,10 +399,13 @@
             for item_data in items_data:
                 item_data[0] = data_format.deserialise(item_data[0])
                 for item_metadata in item_data[1]:
-                    item_metadata[3] = [data_format.deserialise(i) for i in item_metadata[3]]
+                    item_metadata[3] = [
+                        data_format.deserialise(i) for i in item_metadata[3]
+                    ]
             if not failure:
-                self._add_mb_items_with_comments((items_data, metadata),
-                                             service=jid.JID(service))
+                self._add_mb_items_with_comments(
+                    (items_data, metadata), service=jid.JID(service)
+                )
 
         self.update()
         if remaining:
--- a/libervia/frontends/quick_frontend/quick_chat.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/frontends/quick_frontend/quick_chat.py	Wed Jun 19 18:44:57 2024 +0200
@@ -43,8 +43,18 @@
     """Message metadata"""
 
     def __init__(
-            self, parent, uid, timestamp, from_jid, to_jid, msg, subject, type_, extra,
-            profile):
+        self,
+        parent,
+        uid,
+        timestamp,
+        from_jid,
+        to_jid,
+        msg,
+        subject,
+        type_,
+        extra,
+        profile,
+    ):
         self.parent = parent
         self.profile = profile
         self.uid = uid
@@ -77,7 +87,8 @@
             mess_type=self.type,
             time=self.time_text,
             nick=self.nick,
-            message=self.main_message)
+            message=self.main_message,
+        )
 
     def __contains__(self, item):
         return hasattr(self, item) or item in self.extra
@@ -175,7 +186,8 @@
             except (exceptions.NotFound, KeyError):
                 # we check result as listener will be called
                 self.host.bridge.identity_get(
-                    entity.bare, ["nicknames"], True, self.profile)
+                    entity.bare, ["nicknames"], True, self.profile
+                )
                 return entity.node or entity
 
             if nicknames:
@@ -228,6 +240,7 @@
 
 class MessageWidget:
     """Base classe for widgets"""
+
     # This class does nothing and is only used to have a common ancestor
 
     pass
@@ -290,8 +303,17 @@
 class QuickChat(quick_widgets.QuickWidget):
     visible_states = ["chat_state"]  # FIXME: to be removed, used only in quick_games
 
-    def __init__(self, host, target, type_=C.CHAT_ONE2ONE, nick=None, occupants=None,
-                 subject=None, statuses=None, profiles=None):
+    def __init__(
+        self,
+        host,
+        target,
+        type_=C.CHAT_ONE2ONE,
+        nick=None,
+        occupants=None,
+        subject=None,
+        statuses=None,
+        profiles=None,
+    ):
         """
         @param type_: can be C.CHAT_ONE2ONE for single conversation or C.CHAT_GROUP for
                       chat à la IRC
@@ -366,7 +388,9 @@
             if uid not in self.messages:
                 self.message_new(*data)
             else:
-                log.debug("discarding message already in history: {data}, ".format(data=data))
+                log.debug(
+                    "discarding message already in history: {data}, ".format(data=data)
+                )
         del self._cache
         log.debug("{wid} is now unlocked".format(wid=self))
 
@@ -433,13 +457,17 @@
         if self.type == C.CHAT_GROUP:
             self.occupants_clear()
             self.host.bridge.muc_occupants_get(
-                str(self.target), self.profile, callback=self.update_occupants,
-                errback=log.error)
+                str(self.target),
+                self.profile,
+                callback=self.update_occupants,
+                errback=log.error,
+            )
         self.history_print(
             size=C.HISTORY_LIMIT_NONE,
-            filters={'timestamp_start': last_message.timestamp},
+            filters={"timestamp_start": last_message.timestamp},
             callback=self._resync_complete,
-            profile=self.profile)
+            profile=self.profile,
+        )
 
     ## Widget management ##
 
@@ -464,9 +492,7 @@
     def add_target(self, target):
         super(QuickChat, self).add_target(target)
         if target.resource:
-            self.current_target = (
-                target
-            )  # FIXME: tmp, must use resource priority throught contactList instead
+            self.current_target = target  # FIXME: tmp, must use resource priority throught contactList instead
 
     def recreate_args(self, args, kwargs):
         """copy important attribute for a new widget"""
@@ -526,12 +552,15 @@
         updated_occupants = set(occupants)
         left_occupants = local_occupants - updated_occupants
         joined_occupants = updated_occupants - local_occupants
-        log.debug("updating occupants for {room}:\n"
-                  "left: {left_occupants}\n"
-                  "joined: {joined_occupants}"
-                  .format(room=self.target,
-                          left_occupants=", ".join(left_occupants),
-                          joined_occupants=", ".join(joined_occupants)))
+        log.debug(
+            "updating occupants for {room}:\n"
+            "left: {left_occupants}\n"
+            "joined: {joined_occupants}".format(
+                room=self.target,
+                left_occupants=", ".join(left_occupants),
+                joined_occupants=", ".join(joined_occupants),
+            )
+        )
         for nick in left_occupants:
             self.removeUser(occupants[nick])
         for nick in joined_occupants:
@@ -559,10 +588,11 @@
 
     def change_user_nick(self, old_nick, new_nick):
         """Change nick of a user in group list"""
-        log.info("{old} is now known as {new} in room {room_jid}".format(
-            old = old_nick,
-            new = new_nick,
-            room_jid = self.target))
+        log.info(
+            "{old} is now known as {new} in room {room_jid}".format(
+                old=old_nick, new=new_nick, room_jid=self.target
+            )
+        )
 
     ## Messages ##
 
@@ -584,7 +614,9 @@
                 return True
         return False
 
-    def update_history(self, size=C.HISTORY_LIMIT_DEFAULT, filters=None, profile="@NONE@"):
+    def update_history(
+        self, size=C.HISTORY_LIMIT_DEFAULT, filters=None, profile="@NONE@"
+    ):
         """Called when history need to be recreated
 
         Remove all message from history then call history_print
@@ -605,8 +637,9 @@
         """
         self.set_unlocked()
 
-    def history_print(self, size=C.HISTORY_LIMIT_DEFAULT, filters=None, callback=None,
-                     profile="@NONE@"):
+    def history_print(
+        self, size=C.HISTORY_LIMIT_DEFAULT, filters=None, callback=None, profile="@NONE@"
+    ):
         """Print the current history
 
         Note: self.set_unlocked will be called once history is printed
@@ -690,7 +723,7 @@
             str(target),
             size,
             True,
-            {k: str(v) for k,v in filters.items()},
+            {k: str(v) for k, v in filters.items()},
             profile,
             callback=_history_get_cb,
             errback=_history_get_eb,
@@ -712,13 +745,16 @@
         """
         if self.type == C.CHAT_GROUP:
             return
-        self.host.bridge.message_encryption_get(str(self.target.bare), self.profile,
-                                              callback=self.message_encryption_get_cb,
-                                              errback=self.message_encryption_get_eb)
+        self.host.bridge.message_encryption_get(
+            str(self.target.bare),
+            self.profile,
+            callback=self.message_encryption_get_cb,
+            errback=self.message_encryption_get_eb,
+        )
 
-
-    def message_new(self, uid, timestamp, from_jid, to_jid, msg, subject, type_, extra,
-                   profile):
+    def message_new(
+        self, uid, timestamp, from_jid, to_jid, msg, subject, type_, extra, profile
+    ):
         if self._locked:
             self._cache[uid] = (
                 uid,
@@ -733,8 +769,12 @@
             )
             return
 
-        if ((not msg and not subject and not extra[C.KEY_ATTACHMENTS]
-             and type_ != C.MESS_TYPE_INFO)):
+        if (
+            not msg
+            and not subject
+            and not extra[C.KEY_ATTACHMENTS]
+            and type_ != C.MESS_TYPE_INFO
+        ):
             log.warning("Received an empty message for uid {}".format(uid))
             return
 
@@ -772,13 +812,19 @@
 
     def message_encryption_started(self, session_data):
         self.encrypted = True
-        log.debug(_("message encryption started with {target} using {encryption}").format(
-            target=self.target, encryption=session_data['name']))
+        log.debug(
+            _("message encryption started with {target} using {encryption}").format(
+                target=self.target, encryption=session_data["name"]
+            )
+        )
 
     def message_encryption_stopped(self, session_data):
         self.encrypted = False
-        log.debug(_("message encryption stopped with {target} (was using {encryption})")
-                 .format(target=self.target, encryption=session_data['name']))
+        log.debug(
+            _("message encryption stopped with {target} (was using {encryption})").format(
+                target=self.target, encryption=session_data["name"]
+            )
+        )
 
     def create_message(self, message, append=False):
         """Must be implemented by frontend to create and show a new message widget
--- a/libervia/frontends/quick_frontend/quick_contact_list.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/frontends/quick_frontend/quick_contact_list.py	Wed Jun 19 18:44:57 2024 +0200
@@ -232,8 +232,10 @@
                 # we use entity's bare jid to cache data, so a resource here
                 # will cause troubles
                 log.warning(
-                    "Roster entities with resources are not managed, ignoring {entity}"
-                    .format(entity=entity))
+                    "Roster entities with resources are not managed, ignoring {entity}".format(
+                        entity=entity
+                    )
+                )
                 continue
             self.host.contact_new_handler(*contact, profile=self.profile)
         handler._contacts_filled(self.profile)
@@ -250,8 +252,13 @@
         handler.fill(self.profile)
 
     def getCache(
-        self, entity, name=None, bare_default=True, create_if_not_found=False,
-        default=Exception):
+        self,
+        entity,
+        name=None,
+        bare_default=True,
+        create_if_not_found=False,
+        default=Exception,
+    ):
         """Return a cache value for a contact
 
         @param entity(jid.JID): entity of the contact from who we want data
@@ -515,8 +522,8 @@
             else:
                 self._specials.add(entity)
                 cache[C.CONTACT_MAIN_RESOURCE] = None
-                if 'nicknames' in cache:
-                    del cache['nicknames']
+                if "nicknames" in cache:
+                    del cache["nicknames"]
 
         # now the attributes we keep in cache
         # XXX: if entity is a full jid, we store the value for the resource only
@@ -573,7 +580,8 @@
                 self.show_entities_with_notifs
                 and next(self.host.get_notifs(entity.bare, profile=self.profile), None)
             )
-            or entity.resource is None and self.is_room(entity.bare)
+            or entity.resource is None
+            and self.is_room(entity.bare)
         )
 
     def any_entity_visible(self, entities, check_resources=False):
@@ -676,7 +684,7 @@
                 priority_resource = max(
                     resources_data,
                     key=lambda res: resources_data[res].get(
-                        C.PRESENCE_PRIORITY, -2 ** 32
+                        C.PRESENCE_PRIORITY, -(2**32)
                     ),
                 )
                 cache[C.CONTACT_MAIN_RESOURCE] = priority_resource
@@ -710,7 +718,7 @@
     def unselect(self, entity):
         """Unselect an entity
 
-         @param entity(jid.JID): entity to unselect
+        @param entity(jid.JID): entity to unselect
         """
         try:
             cache = self._cache[entity.bare]
@@ -912,9 +920,9 @@
     def items_sort(self, items):
         """sort items
 
-       @param items(dict): items to sort (will be emptied !)
-       @return (OrderedDict): sorted items
-       """
+        @param items(dict): items to sort (will be emptied !)
+        @return (OrderedDict): sorted items
+        """
         ordered_items = OrderedDict()
         bare_jids = sorted(items.keys())
         for jid_ in bare_jids:
@@ -1043,9 +1051,7 @@
             what youa re doing!
         """
         log.debug(
-            "Contact lists updates are now {}".format(
-                "LOCKED" if locked else "UNLOCKED"
-            )
+            "Contact lists updates are now {}".format("LOCKED" if locked else "UNLOCKED")
         )
         self._update_locked = locked
         if not locked and do_update:
@@ -1089,11 +1095,11 @@
         return handler.items
 
     @property
-    def show_resources(self) -> bool|None:
+    def show_resources(self) -> bool | None:
         return self._show_resources
 
     @show_resources.setter
-    def show_resources(self, show: bool|None) -> None:
+    def show_resources(self, show: bool | None) -> None:
         self._show_resources = show
 
     @property
--- a/libervia/frontends/quick_frontend/quick_game_tarot.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/frontends/quick_frontend/quick_game_tarot.py	Wed Jun 19 18:44:57 2024 +0200
@@ -69,9 +69,9 @@
         @param dir: directory where the PNG files are"""
         self.cards = {}
         self.deck = []
-        self.cards[
-            "atout"
-        ] = {}  # As Tarot is a french game, it's more handy & logical to keep french names
+        self.cards["atout"] = (
+            {}
+        )  # As Tarot is a french game, it's more handy & logical to keep french names
         self.cards["pique"] = {}  # spade
         self.cards["coeur"] = {}  # heart
         self.cards["carreau"] = {}  # diamond
--- a/libervia/frontends/quick_frontend/quick_list_manager.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/frontends/quick_frontend/quick_list_manager.py	Wed Jun 19 18:44:57 2024 +0200
@@ -38,7 +38,7 @@
     @property
     def items(self):
         """Return a sorted list of all items, tagged or untagged.
-        
+
         @return list
         """
         res = list(set(self.tagged).union(self.untagged))
@@ -60,7 +60,7 @@
 
     def untag(self, items):
         """Untag some items.
-  
+
         @param items (list): items to be untagged
         """
         for item in items:
--- a/libervia/frontends/quick_frontend/quick_profile_manager.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/frontends/quick_frontend/quick_profile_manager.py	Wed Jun 19 18:44:57 2024 +0200
@@ -221,9 +221,7 @@
                 self.host.bridge.param_set(
                     "Password", password, "Connection", profile_key=self.current.profile
                 )
-                log.info(
-                    "password updated for profile [{}]".format(self.current.profile)
-                )
+                log.info("password updated for profile [{}]".format(self.current.profile))
 
     ## graphic updates (should probably be overriden in frontends) ##
 
--- a/libervia/frontends/quick_frontend/quick_utils.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/frontends/quick_frontend/quick_utils.py	Wed Jun 19 18:44:57 2024 +0200
@@ -23,7 +23,7 @@
 
 
 def get_new_path(path):
-    """ Check if path exists, and find a non existant path if needed """
+    """Check if path exists, and find a non existant path if needed"""
     idx = 2
     if not exists(path):
         return path
--- a/libervia/frontends/quick_frontend/quick_widgets.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/frontends/quick_frontend/quick_widgets.py	Wed Jun 19 18:44:57 2024 +0200
@@ -55,7 +55,8 @@
 
 class QuickWidgetsManager(object):
     """This class is used to manage all the widgets of a frontend
-    A widget can be a window, a graphical thing, or someting else depending of the frontend"""
+    A widget can be a window, a graphical thing, or someting else depending of the frontend
+    """
 
     def __init__(self, host):
         self.host = host
@@ -257,20 +258,26 @@
                     recreate_args(_args, _kwargs)
                 widget = cls(*_args, **_kwargs)
                 widgets_map[hash_].append(widget)
-                log.debug("widget <{wid}> already exists, a new one has been recreated"
-                    .format(wid=widget))
+                log.debug(
+                    "widget <{wid}> already exists, a new one has been recreated".format(
+                        wid=widget
+                    )
+                )
             elif callable(on_existing_widget):
                 widget = on_existing_widget(widget)
                 if widget is None:
                     raise exceptions.InternalError(
-                        "on_existing_widget method must return the widget to use")
+                        "on_existing_widget method must return the widget to use"
+                    )
                 if widget not in widgets_map[hash_]:
                     log.debug(
-                        "the widget returned by on_existing_widget is new, adding it")
+                        "the widget returned by on_existing_widget is new, adding it"
+                    )
                     widgets_map[hash_].append(widget)
             else:
                 raise exceptions.InternalError(
-                    "Unexpected on_existing_widget value ({})".format(on_existing_widget))
+                    "Unexpected on_existing_widget value ({})".format(on_existing_widget)
+                )
 
         return widget
 
@@ -296,13 +303,14 @@
         """
         # TODO: all_instances must be independante kwargs, this is not possible with Python 2
         #       but will be with Python 3
-        all_instances = kwargs.get('all_instances', False)
+        all_instances = kwargs.get("all_instances", False)
 
         if all_instances:
             for w in self.get_widget_instances(widget_to_delete):
                 if w.on_delete(**kwargs) == False:
                     log.debug(
-                        f"Deletion of {widget_to_delete} cancelled by widget itself")
+                        f"Deletion of {widget_to_delete} cancelled by widget itself"
+                    )
                     return
         else:
             if widget_to_delete.on_delete(**kwargs) == False:
@@ -318,8 +326,9 @@
         except KeyError:
             log.error("no widgets_map found for class {cls}".format(cls=class_))
             return
-        widget_hash = str(class_.get_widget_hash(widget_to_delete.target,
-                                                   widget_to_delete.profiles))
+        widget_hash = str(
+            class_.get_widget_hash(widget_to_delete.target, widget_to_delete.profiles)
+        )
         try:
             widget_instances = widgets_map[widget_hash]
         except KeyError:
@@ -340,13 +349,17 @@
             # all instances with this hash have been deleted
             # we remove the hash itself
             del widgets_map[widget_hash]
-            log.debug("All instances of {cls} with hash {widget_hash!r} have been deleted"
-                .format(cls=class_, widget_hash=widget_hash))
+            log.debug(
+                "All instances of {cls} with hash {widget_hash!r} have been deleted".format(
+                    cls=class_, widget_hash=widget_hash
+                )
+            )
             self.host.call_listeners("widgetDeleted", widget_to_delete)
 
 
 class QuickWidget(object):
     """generic widget base"""
+
     # FIXME: sometime a single target is used, sometimes several ones
     #        This should be sorted out in the same way as for profiles: a single
     #        target should be possible when appropriate attribute is set.
--- a/libervia/frontends/tools/portal_desktop.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/frontends/tools/portal_desktop.py	Wed Jun 19 18:44:57 2024 +0200
@@ -77,8 +77,7 @@
         method_name: str,
         response: Literal[False],
         **kwargs,
-    ) -> None:
-       ...
+    ) -> None: ...
 
     @overload
     async def dbus_call(
@@ -87,9 +86,7 @@
         method_name: str,
         response: Literal[True],
         **kwargs,
-    ) -> dict:
-       ...
-
+    ) -> dict: ...
 
     async def dbus_call(
         self,
@@ -97,7 +94,7 @@
         method_name: str,
         response: bool,
         **kwargs,
-    ) -> dict|None:
+    ) -> dict | None:
         """Call a portal method
 
         This method handle the signal response.
@@ -264,7 +261,7 @@
                     "types": self.sources_type,
                     # hidden cursor (should be the default, but cursor appears during
                     # tests))
-                    "cursor_mode": dbus.UInt32(1)
+                    "cursor_mode": dbus.UInt32(1),
                 },
             )
 
--- a/libervia/frontends/tools/strings.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/frontends/tools/strings.py	Wed Jun 19 18:44:57 2024 +0200
@@ -57,6 +57,7 @@
     @param string (unicode): text to process
     @param new_target (bool): if True, make the link open in a new window
     """
+
     # XXX: report any change to libervia.browser.strings.add_url_to_text
     def repl(match):
         url = match.group(0)
@@ -73,6 +74,7 @@
 
     @param string (unicode): text to process
     """
+
     # XXX: report any change to libervia.browser.strings.add_url_to_image
     def repl(match):
         url = match.group(1)
--- a/libervia/frontends/tools/webrtc.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/frontends/tools/webrtc.py	Wed Jun 19 18:44:57 2024 +0200
@@ -82,13 +82,13 @@
         self,
         bridge,
         profile: str,
-        sources_data: SourcesData|None = None,
-        sinks_data: SinksData|None = None,
+        sources_data: SourcesData | None = None,
+        sinks_data: SinksData | None = None,
         reset_cb: Callable | None = None,
         merge_pip: bool | None = None,
         target_size: tuple[int, int] | None = None,
         call_start_cb: Callable[[str, dict, str], Awaitable[str]] | None = None,
-        dc_data_list: list[SourcesDataChannel|SinksDataChannel]|None = None
+        dc_data_list: list[SourcesDataChannel | SinksDataChannel] | None = None,
     ) -> None:
         """Initializes a new WebRTC instance.
 
@@ -386,32 +386,32 @@
         @param sdp: The Session Description Protocol offer string.
         """
         lines = sdp.splitlines()
-        media = ''
+        media = ""
         mid_media_map = {}
         bundle_media = set()
-        bundle_ufrag = ''
-        bundle_pwd = ''
+        bundle_ufrag = ""
+        bundle_pwd = ""
         in_bundle = False
 
         for line in lines:
-            if line.startswith('m='):
-                media = line.split('=')[1].split()[0]
-            elif line.startswith('a=mid:'):
-                mid = line.split(':')[1].strip()
+            if line.startswith("m="):
+                media = line.split("=")[1].split()[0]
+            elif line.startswith("a=mid:"):
+                mid = line.split(":")[1].strip()
                 mid_media_map[mid] = media
-            elif line.startswith('a=group:BUNDLE'):
+            elif line.startswith("a=group:BUNDLE"):
                 in_bundle = True
-                bundle_media = set(line.split(':')[1].strip().split())
-            elif line.startswith('a=ice-ufrag:'):
+                bundle_media = set(line.split(":")[1].strip().split())
+            elif line.startswith("a=ice-ufrag:"):
                 if in_bundle:
-                    bundle_ufrag = line.split(':')[1].strip()
+                    bundle_ufrag = line.split(":")[1].strip()
                 else:
-                    self.ufrag[media] = line.split(':')[1].strip()
-            elif line.startswith('a=ice-pwd:'):
+                    self.ufrag[media] = line.split(":")[1].strip()
+            elif line.startswith("a=ice-pwd:"):
                 if in_bundle:
-                    bundle_pwd = line.split(':')[1].strip()
+                    bundle_pwd = line.split(":")[1].strip()
                 else:
-                    self.pwd[media] = line.split(':')[1].strip()
+                    self.pwd[media] = line.split(":")[1].strip()
             else:
                 in_bundle = False
 
@@ -487,11 +487,10 @@
         assert role in ("initiator", "responder")
         self.role = role
 
-
         if isinstance(self.sources_data, SourcesPipeline):
-            if self.sources_data.video_pipeline!= "" and video_pt is None:
+            if self.sources_data.video_pipeline != "" and video_pt is None:
                 raise NotImplementedError(NONE_NOT_IMPLEMENTED_MSG)
-            if self.sources_data.audio_pipeline!= "" and audio_pt is None:
+            if self.sources_data.audio_pipeline != "" and audio_pt is None:
                 raise NotImplementedError(NONE_NOT_IMPLEMENTED_MSG)
         elif isinstance(self.sources_data, SourcesNone):
             pass
@@ -559,7 +558,8 @@
         if video_source_elt:
             # Video source with an input-selector to switch between normal and video mute
             # (or desktop sharing).
-            gst_pipe_elements.append(f"""
+            gst_pipe_elements.append(
+                f"""
         input-selector name=video_selector
         ! videorate drop-only=1 max-rate=30
         ! video/x-raw,framerate=30/1
@@ -581,20 +581,24 @@
         ! rtpvp8pay picture-id-mode=15-bit
         ! application/x-rtp,media=video,encoding-name=VP8,payload={video_pt}
         ! sendrecv.
-        """)
+        """
+            )
 
         if local_video_sink_elt:
             # Local video feedback.
-            gst_pipe_elements.append(f"""
+            gst_pipe_elements.append(
+                f"""
         t.
         ! queue max-size-buffers=1 max-size-time=0 max-size-bytes=0 leaky=downstream
         ! videoconvert
         ! {local_video_sink_elt}
-        """)
+        """
+            )
 
         if audio_source_elt:
             # Audio with a valve for muting.
-            gst_pipe_elements.append(r"""
+            gst_pipe_elements.append(
+                r"""
         {audio_source_elt} name=audio_src
         ! valve
         ! queue max-size-buffers=10 max-size-time=0 max-size-bytes=0 leaky=downstream
@@ -604,7 +608,8 @@
         ! rtpopuspay
         ! application/x-rtp,media=audio,encoding-name=OPUS,payload={audio_pt}
         ! sendrecv.
-        """)
+        """
+            )
 
         self.gst_pipe_desc = "\n\n".join(gst_pipe_elements)
 
@@ -708,7 +713,7 @@
         for dc_data in self.dc_data_list:
             self.create_data_channel(dc_data)
 
-    def create_data_channel(self, dc_data: SourcesDataChannel|SinksDataChannel) -> None:
+    def create_data_channel(self, dc_data: SourcesDataChannel | SinksDataChannel) -> None:
         """Create a Data Channel and connect relevant callbacks."""
         assert self.pipeline is not None
         if isinstance(dc_data, SourcesDataChannel):
@@ -729,9 +734,7 @@
         elif isinstance(dc_data, SinksDataChannel):
             self.webrtcbin.connect("on-data-channel", dc_data.dc_on_data_channel)
         else:
-            raise ValueError(
-                "Only SourcesDataChannel or SinksDataChannel are allowed."
-            )
+            raise ValueError("Only SourcesDataChannel or SinksDataChannel are allowed.")
 
     def start_pipeline(self) -> None:
         """Starts the GStreamer pipeline."""
@@ -1100,7 +1103,7 @@
             ice_data = {
                 "ufrag": self.ufrag[media_type],
                 "pwd": self.pwd[media_type],
-                "candidates": [parsed_candidate]
+                "candidates": [parsed_candidate],
             }
             self._a_call(
                 self.bridge.ice_candidates_add,
--- a/libervia/frontends/tools/webrtc_file.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/frontends/tools/webrtc_file.py	Wed Jun 19 18:44:57 2024 +0200
@@ -130,7 +130,7 @@
 
     async def send_file_webrtc(
         self,
-        file_path: Path|str,
+        file_path: Path | str,
         callee: jid.JID,
         file_name: str | None = None,
     ) -> None:
@@ -178,9 +178,7 @@
 
     @staticmethod
     def format_confirm_msg(
-        action_data: dict,
-        peer_jid: jid.JID,
-        peer_name: str|None = None
+        action_data: dict, peer_jid: jid.JID, peer_name: str | None = None
     ) -> str:
         """Format a user-friendly confirmation message.
 
@@ -191,15 +189,15 @@
         """
         file_data = action_data.get("file_data", {})
 
-        file_name = file_data.get('name')
-        file_size = file_data.get('size')
+        file_name = file_data.get("name")
+        file_size = file_data.get("size")
 
         if file_name:
             file_name_msg = 'wants to send you the file "{file_name}"'.format(
                 file_name=file_name
             )
         else:
-            file_name_msg = 'wants to send you an unnamed file'
+            file_name_msg = "wants to send you an unnamed file"
 
         if file_size is not None:
             file_size_msg = "which has a size of {file_size_human}".format(
@@ -208,7 +206,7 @@
         else:
             file_size_msg = "which has an unknown size"
 
-        file_description = file_data.get('desc')
+        file_description = file_data.get("desc")
         if file_description:
             description_msg = " Description: {}.".format(file_description)
         else:
@@ -221,14 +219,14 @@
         else:
             peer_name = f"{peer_name} ({peer_jid})"
 
-        return (
-            _("{peer_name} {file_name_msg} {file_size_msg}.{description_msg} "
-            "Do you accept?").format(
-                peer_name=peer_name,
-                file_name_msg=file_name_msg,
-                file_size_msg=file_size_msg,
-                description_msg=description_msg
-            )
+        return _(
+            "{peer_name} {file_name_msg} {file_size_msg}.{description_msg} "
+            "Do you accept?"
+        ).format(
+            peer_name=peer_name,
+            file_name_msg=file_name_msg,
+            file_size_msg=file_size_msg,
+            description_msg=description_msg,
         )
 
     def _on_dc_message_data(self, fd, data_channel, glib_data) -> None:
--- a/libervia/frontends/tools/webrtc_models.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/frontends/tools/webrtc_models.py	Wed Jun 19 18:44:57 2024 +0200
@@ -62,6 +62,7 @@
     @param dc_open_cb: Called when Data Channel is open.
         This callback will be run in a GStreamer thread.
     """
+
     name: str = Field(default_factory=lambda: str(uuid.uuid4()))
     dc_open_cb: Callable[[GstWebRTC.WebRTCDataChannel], None]
 
@@ -79,8 +80,9 @@
     @param audio_properties: Elements properties to set.
 
     """
-    video_pipeline: str|None = None
-    audio_pipeline: str|None = None
+
+    video_pipeline: str | None = None
+    audio_pipeline: str | None = None
     video_properties: dict = Field(default_factory=lambda: {})
     audio_properties: dict = Field(default_factory=lambda: {})
 
--- a/libervia/frontends/tools/webrtc_remote_control.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/frontends/tools/webrtc_remote_control.py	Wed Jun 19 18:44:57 2024 +0200
@@ -37,7 +37,7 @@
 )
 
 gi.require_versions({"Gst": "1.0", "GstWebRTC": "1.0"})
-OnOpenCbType = Callable[["WebRTCRemoteController"], None|Awaitable[None]]
+OnOpenCbType = Callable[["WebRTCRemoteController"], None | Awaitable[None]]
 MOUSE_BTN_LEFT = 0x110
 MOUSE_BTN_RIGHT = 0x111
 MOUSE_BTN_MIDDLE = 0x112
@@ -71,7 +71,7 @@
         self.on_call_start_cb = on_call_start_cb
         self.end_call_cb = end_call_cb
         self.loop = asyncio.get_event_loop()
-        self.data_channel: GstWebRTC.WebRTCDataChannel|None = None
+        self.data_channel: GstWebRTC.WebRTCDataChannel | None = None
 
     def send_input(self, input_data: dict) -> None:
         """Send an input data to controlled device
@@ -95,9 +95,7 @@
         rc_data.update(options)
         remote_control_data_s = await self.bridge.remote_control_start(
             str(callee),
-            data_format.serialise(
-               rc_data
-            ),
+            data_format.serialise(rc_data),
             profile,
         )
         remote_control_data = data_format.deserialise(remote_control_data_s)
@@ -126,10 +124,7 @@
         await aio.maybe_async(on_open_cb(self))
 
     async def start(
-        self,
-        callee: jid.JID,
-        options: dict,
-        on_open_cb: OnOpenCbType
+        self, callee: jid.JID, options: dict, on_open_cb: OnOpenCbType
     ) -> None:
         """Start a remote control session with ``callee``
 
@@ -154,8 +149,11 @@
 class WebRTCRemoteControlReceiver:
 
     def __init__(
-        self, bridge, profile: str, on_close_cb: Callable[[], Any] | None = None,
-        verbose: bool = False
+        self,
+        bridge,
+        profile: str,
+        on_close_cb: Callable[[], Any] | None = None,
+        verbose: bool = False,
     ) -> None:
         """Initializes the File Receiver.
 
@@ -169,8 +167,8 @@
         self.on_close_cb = on_close_cb
         self.loop = asyncio.get_event_loop()
         self.desktop_portal = None
-        self.remote_desktop_data: dict|None = None
-        self.stream_node_id: int|None = None
+        self.remote_desktop_data: dict | None = None
+        self.stream_node_id: int | None = None
         self.verbose = verbose
 
     async def do_input(self, data: dict) -> None:
@@ -183,9 +181,7 @@
                         x, y = data["x"], data["y"]
                     except KeyError:
                         dx, dy = data["movementX"], data["movementY"]
-                        await self.desktop_portal.notify_pointer_motion(
-                            dx, dy
-                        )
+                        await self.desktop_portal.notify_pointer_motion(dx, dy)
                     else:
                         assert self.stream_node_id is not None
                         await self.desktop_portal.notify_pointer_motion_absolute(
@@ -200,28 +196,23 @@
                     # see https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent/buttons#value
                     if buttons & 1:
                         await self.desktop_portal.notify_pointer_button(
-                            MOUSE_BTN_LEFT,
-                            state
+                            MOUSE_BTN_LEFT, state
                         )
                     if buttons & 2:
                         await self.desktop_portal.notify_pointer_button(
-                            MOUSE_BTN_RIGHT,
-                            state
+                            MOUSE_BTN_RIGHT, state
                         )
                     if buttons & 4:
                         await self.desktop_portal.notify_pointer_button(
-                            MOUSE_BTN_MIDDLE,
-                            state
+                            MOUSE_BTN_MIDDLE, state
                         )
                     if buttons & 8:
                         await self.desktop_portal.notify_pointer_button(
-                            MOUSE_BTN_BACK,
-                            state
+                            MOUSE_BTN_BACK, state
                         )
                     if buttons & 16:
                         await self.desktop_portal.notify_pointer_button(
-                            MOUSE_BTN_FORWARD,
-                            state
+                            MOUSE_BTN_FORWARD, state
                         )
             elif type_ == "wheel":
                 dx = data.get("deltaX", 0)
@@ -229,23 +220,14 @@
                 delta_mode = data["deltaMode"]
                 if delta_mode == 0:
                     # deltas are in pixels
-                    await self.desktop_portal.notify_pointer_axis(
-                        dx,
-                        dy
-                    )
+                    await self.desktop_portal.notify_pointer_axis(dx, dy)
                 else:
                     # deltas are in steps (see
                     # https://developer.mozilla.org/en-US/docs/Web/API/Element/wheel_event#event_properties)
                     if dx:
-                        await self.desktop_portal.notify_pointer_axis(
-                            1,
-                            dx
-                        )
+                        await self.desktop_portal.notify_pointer_axis(1, dx)
                     if dy:
-                        await self.desktop_portal.notify_pointer_axis(
-                            0,
-                            dy
-                        )
+                        await self.desktop_portal.notify_pointer_axis(0, dy)
             elif type_.startswith("key"):
                 # FIXME: this is a really naive implementation, it needs tot be improved.
                 key = data["key"]
@@ -258,18 +240,13 @@
         except Exception:
             log.exception(f"Can't handle input {data}")
 
-
     def _on_dc_message_data(self, data_channel, glib_data) -> None:
         """A data chunk of the file has been received."""
         raw = glib_data.get_data()
         data = cbor2.loads(raw)
         if self.verbose:
             print(data)
-        aio.run_from_thread(
-            self.do_input,
-            data,
-            loop=self.loop
-        )
+        aio.run_from_thread(self.do_input, data, loop=self.loop)
 
     def _on_dc_close(self, data_channel) -> None:
         """Data channel is closed
@@ -284,9 +261,7 @@
 
     def _on_data_channel(self, webrtcbin, data_channel) -> None:
         """The data channel has been opened."""
-        data_channel.connect(
-            "on-message-data", self._on_dc_message_data
-        )
+        data_channel.connect("on-message-data", self._on_dc_message_data)
         data_channel.connect("on-close", self._on_dc_close)
 
     async def request_remote_desktop(self, with_screen_sharing: bool) -> None:
@@ -295,6 +270,7 @@
         @param with_screen_sharing: True if screen must be shared.
         """
         from .portal_desktop import DesktopPortal
+
         self.desktop_portal = DesktopPortal()
         self.remote_desktop_data = await self.desktop_portal.request_remote_desktop(
             with_screen_sharing
@@ -302,10 +278,7 @@
         print(self.remote_desktop_data)
 
     async def start_receiving(
-        self,
-        from_jid: jid.JID,
-        session_id: str,
-        screenshare: dict
+        self, from_jid: jid.JID, session_id: str, screenshare: dict
     ) -> None:
         """Receives a file via WebRTC and saves it to the specified path.
 
@@ -328,7 +301,7 @@
                     video_properties={
                         "path": str(self.stream_node_id),
                         "do-timestamp": 1,
-                    }
+                    },
                 )
             except KeyError:
                 sources_data = SourcesNone()
@@ -339,9 +312,11 @@
             self.bridge,
             self.profile,
             call_data,
-            sources_data = sources_data,
+            sources_data=sources_data,
             sinks_data=webrtc.SinksNone(),
-            dc_data_list=[webrtc.SinksDataChannel(
-                dc_on_data_channel=self._on_data_channel,
-            )],
+            dc_data_list=[
+                webrtc.SinksDataChannel(
+                    dc_on_data_channel=self._on_data_channel,
+                )
+            ],
         )
--- a/libervia/frontends/tools/xmltools.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/frontends/tools/xmltools.py	Wed Jun 19 18:44:57 2024 +0200
@@ -24,7 +24,7 @@
 
 
 def inline_root(doc):
-    """ make the root attribute inline
+    """make the root attribute inline
     @param root_node: minidom's Document compatible class
     @return: plain XML
     """
--- a/libervia/frontends/tools/xmlui.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/frontends/tools/xmlui.py	Wed Jun 19 18:44:57 2024 +0200
@@ -272,8 +272,15 @@
     This class must not be instancied directly
     """
 
-    def __init__(self, host, parsed_dom, title=None, flags=None, callback=None,
-                 profile=C.PROF_KEY_NONE):
+    def __init__(
+        self,
+        host,
+        parsed_dom,
+        title=None,
+        flags=None,
+        callback=None,
+        profile=C.PROF_KEY_NONE,
+    ):
         """Initialise the XMLUI instance
 
         @param host: %(doc_host)s
@@ -364,6 +371,7 @@
 
 class ValueGetter(object):
     """dict like object which return values of widgets"""
+
     # FIXME: widget which can keep multiple values are not handled
 
     def __init__(self, widgets, attr="value"):
@@ -401,8 +409,17 @@
 
     widget_factory = None
 
-    def __init__(self, host, parsed_dom, title=None, flags=None, callback=None,
-                 ignore=None, whitelist=None, profile=C.PROF_KEY_NONE):
+    def __init__(
+        self,
+        host,
+        parsed_dom,
+        title=None,
+        flags=None,
+        callback=None,
+        ignore=None,
+        whitelist=None,
+        profile=C.PROF_KEY_NONE,
+    ):
         """
 
         @param title(unicode, None): title of the
@@ -449,7 +466,9 @@
             raise ValueError(_("XMLUI can have only one main container"))
         self._main_cont = value
 
-    def _parse_childs(self, _xmlui_parent, current_node, wanted=("container",), data=None):
+    def _parse_childs(
+        self, _xmlui_parent, current_node, wanted=("container",), data=None
+    ):
         """Recursively parse childNodes of an element
 
         @param _xmlui_parent: widget container with '_xmlui_append' method
@@ -487,7 +506,10 @@
                         #        used or even useful, it should probably be removed
                         #        and all "is not None" tests for it should be removed too
                         #        to be checked for 0.8
-                        cont, node, ("widget", "container"), {CURRENT_LABEL: None}
+                        cont,
+                        node,
+                        ("widget", "container"),
+                        {CURRENT_LABEL: None},
                     )
                 elif type_ == "advanced_list":
                     try:
@@ -536,9 +558,7 @@
                 if not name or not "tabs_cont" in data:
                     raise InvalidXMLUI
                 if self.type == "param":
-                    self._current_category = (
-                        name
-                    )  # XXX: awful hack because params need category and we don't keep parent
+                    self._current_category = name  # XXX: awful hack because params need category and we don't keep parent
                 tab_cont = data["tabs_cont"]
                 new_tab = tab_cont._xmlui_add_tab(label or name, selected)
                 self._parse_childs(new_tab, node, ("widget", "container"))
@@ -586,8 +606,10 @@
                     data[CURRENT_LABEL] = ctrl
                 elif type_ == "hidden":
                     if name in self.hidden:
-                        raise exceptions.ConflictError("Conflict on hidden value with "
-                                                       "name {name}".format(name=name))
+                        raise exceptions.ConflictError(
+                            "Conflict on hidden value with "
+                            "name {name}".format(name=name)
+                        )
                     self.hidden[name] = value
                     continue
                 elif type_ == "jid":
@@ -1090,27 +1112,40 @@
     # TODO: remove this method, as there are seme use cases where different XMLUI
     #       classes can be used in the same frontend, so a global value is not good
     assert type_ in (CLASS_PANEL, CLASS_DIALOG)
-    log.warning("register_class for XMLUI is deprecated, please use partial with "
-                "xmlui.create and class_map instead")
+    log.warning(
+        "register_class for XMLUI is deprecated, please use partial with "
+        "xmlui.create and class_map instead"
+    )
     if type_ in _class_map:
-        log.debug(_("XMLUI class already registered for {type_}, ignoring").format(
-            type_=type_))
+        log.debug(
+            _("XMLUI class already registered for {type_}, ignoring").format(type_=type_)
+        )
         return
 
     _class_map[type_] = class_
 
 
-def create(host, xml_data, title=None, flags=None, dom_parse=None, dom_free=None,
-           callback=None, ignore=None, whitelist=None, class_map=None,
-           profile=C.PROF_KEY_NONE):
+def create(
+    host,
+    xml_data,
+    title=None,
+    flags=None,
+    dom_parse=None,
+    dom_free=None,
+    callback=None,
+    ignore=None,
+    whitelist=None,
+    class_map=None,
+    profile=C.PROF_KEY_NONE,
+):
     """
-        @param dom_parse: methode equivalent to minidom.parseString (but which must manage unicode), or None to use default one
-        @param dom_free: method used to free the parsed DOM
-        @param ignore(list[unicode], None): name of widgets to ignore
-            widgets with name in this list and their label will be ignored
-        @param whitelist(list[unicode], None): name of widgets to keep
-            when not None, only widgets in this list and their label will be kept
-            mutually exclusive with ignore
+    @param dom_parse: methode equivalent to minidom.parseString (but which must manage unicode), or None to use default one
+    @param dom_free: method used to free the parsed DOM
+    @param ignore(list[unicode], None): name of widgets to ignore
+        widgets with name in this list and their label will be ignored
+    @param whitelist(list[unicode], None): name of widgets to keep
+        when not None, only widgets in this list and their label will be kept
+        mutually exclusive with ignore
     """
     if class_map is None:
         class_map = _class_map
--- a/libervia/tui/base.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/tui/base.py	Wed Jun 19 18:44:57 2024 +0200
@@ -20,8 +20,10 @@
 from libervia.backend.core.i18n import _, D_
 from libervia.tui.constants import Const as C
 from libervia.backend.core import log_config
+
 log_config.libervia_configure(C.LOG_BACKEND_STANDARD, C)
 from libervia.backend.core import log as logging
+
 log = logging.getLogger(__name__)
 from libervia.backend.tools import config as sat_config
 import urwid
@@ -43,12 +45,13 @@
 from libervia.frontends.tools import jid
 import signal
 import sys
+
 ## bridge handling
-# we get bridge name from conf and initialise the right class accordingly
+# we get bridge name from conf and initialise the right class accordingly
 main_config = sat_config.parse_main_conf()
-bridge_name = sat_config.config_get(main_config, '', 'bridge', 'dbus')
-if 'dbus' not in bridge_name:
-    print(u"only D-Bus bridge is currently supported")
+bridge_name = sat_config.config_get(main_config, "", "bridge", "dbus")
+if "dbus" not in bridge_name:
+    print("only D-Bus bridge is currently supported")
     sys.exit(3)
 
 
@@ -58,13 +61,15 @@
     """
 
     def __init__(self, host):
-        modes = {None: (C.MODE_NORMAL, u''),
-                 a_key['MODE_INSERTION']: (C.MODE_INSERTION, u'> '),
-                 a_key['MODE_COMMAND']: (C.MODE_COMMAND, u':')} #XXX: captions *MUST* be unicode
+        modes = {
+            None: (C.MODE_NORMAL, ""),
+            a_key["MODE_INSERTION"]: (C.MODE_INSERTION, "> "),
+            a_key["MODE_COMMAND"]: (C.MODE_COMMAND, ":"),
+        }  # XXX: captions *MUST* be unicode
         super(EditBar, self).__init__(modes)
         self.host = host
         self.set_completion_method(self._text_completion)
-        urwid.connect_signal(self, 'click', self.on_text_entered)
+        urwid.connect_signal(self, "click", self.on_text_entered)
 
     def _text_completion(self, text, completion_data, mode):
         if mode == C.MODE_INSERTION:
@@ -85,24 +90,31 @@
                 chat_widget = self.host.selected_widget
                 self.host.message_send(
                     chat_widget.target,
-                    {'': editBar.get_edit_text()}, # TODO: handle language
-                    mess_type = C.MESS_TYPE_GROUPCHAT if chat_widget.type == C.CHAT_GROUP else C.MESS_TYPE_CHAT, # TODO: put this in QuickChat
-                    errback=lambda failure: self.host.show_dialog(_("Error while sending message ({})").format(failure), type="error"),
-                    profile_key=chat_widget.profile
-                    )
-                editBar.set_edit_text('')
+                    {"": editBar.get_edit_text()},  # TODO: handle language
+                    mess_type=(
+                        C.MESS_TYPE_GROUPCHAT
+                        if chat_widget.type == C.CHAT_GROUP
+                        else C.MESS_TYPE_CHAT
+                    ),  # TODO: put this in QuickChat
+                    errback=lambda failure: self.host.show_dialog(
+                        _("Error while sending message ({})").format(failure),
+                        type="error",
+                    ),
+                    profile_key=chat_widget.profile,
+                )
+                editBar.set_edit_text("")
         elif self.mode == C.MODE_COMMAND:
             self.command_handler()
 
     def command_handler(self):
-        #TODO: separate class with auto documentation (with introspection)
+        # TODO: separate class with auto documentation (with introspection)
         #      and completion method
-        tokens = self.get_edit_text().split(' ')
+        tokens = self.get_edit_text().split(" ")
         command, args = tokens[0], tokens[1:]
-        if command == 'quit':
+        if command == "quit":
             self.host.on_exit()
             raise urwid.ExitMainLoop()
-        elif command == 'messages':
+        elif command == "messages":
             wid = sat_widgets.GenericList(logging.memory_get())
             self.host.select_widget(wid)
         # FIXME: reactivate the command
@@ -119,7 +131,7 @@
         #         self.host.status_bar.on_change(user_data=sat_widgets.AdvancedEdit(args[0]))
         #     else:
         #         self.host.status_bar.on_status_click()
-        elif command == 'history':
+        elif command == "history":
             widget = self.host.selected_widget
             if isinstance(widget, quick_chat.QuickChat):
                 try:
@@ -127,22 +139,28 @@
                 except (IndexError, ValueError):
                     limit = 50
                 widget.update_history(size=limit, profile=widget.profile)
-        elif command == 'search':
+        elif command == "search":
             widget = self.host.selected_widget
             if isinstance(widget, quick_chat.QuickChat):
                 pattern = " ".join(args)
                 if not pattern:
-                    self.host.notif_bar.add_message(D_("Please specify the globbing pattern to search for"))
+                    self.host.notif_bar.add_message(
+                        D_("Please specify the globbing pattern to search for")
+                    )
                 else:
-                    widget.update_history(size=C.HISTORY_LIMIT_NONE, filters={'search': pattern}, profile=widget.profile)
-        elif command == 'filter':
+                    widget.update_history(
+                        size=C.HISTORY_LIMIT_NONE,
+                        filters={"search": pattern},
+                        profile=widget.profile,
+                    )
+        elif command == "filter":
             # FIXME: filter is now only for current widget,
             #        need to be able to set it globally or per widget
             widget = self.host.selected_widget
             # FIXME: Q&D way, need to be more generic
             if isinstance(widget, quick_chat.QuickChat):
                 widget.set_filter(args)
-        elif command in ('topic', 'suject', 'title'):
+        elif command in ("topic", "suject", "title"):
             try:
                 new_title = args[0].strip()
             except IndexError:
@@ -152,7 +170,7 @@
                 widget.on_subject_dialog(new_title)
         else:
             return
-        self.set_edit_text('')
+        self.set_edit_text("")
 
     def _history_cb(self, text):
         self.set_edit_text(text)
@@ -161,52 +179,67 @@
     def keypress(self, size, key):
         """Callback when a key is pressed. Send "composing" states
         and move the index of the temporary history stack."""
-        if key == a_key['MODAL_ESCAPE']:
+        if key == a_key["MODAL_ESCAPE"]:
             # first save the text to the current mode, then change to NORMAL
             self.host._update_input_history(self.get_edit_text(), mode=self.mode)
             self.host._update_input_history(mode=C.MODE_NORMAL)
         if self._mode == C.MODE_NORMAL and key in self._modes:
             self.host._update_input_history(mode=self._modes[key][0])
-        if key == a_key['HISTORY_PREV']:
-            self.host._update_input_history(self.get_edit_text(), -1, self._history_cb, self.mode)
+        if key == a_key["HISTORY_PREV"]:
+            self.host._update_input_history(
+                self.get_edit_text(), -1, self._history_cb, self.mode
+            )
             return
-        elif key == a_key['HISTORY_NEXT']:
-            self.host._update_input_history(self.get_edit_text(), +1, self._history_cb, self.mode)
+        elif key == a_key["HISTORY_NEXT"]:
+            self.host._update_input_history(
+                self.get_edit_text(), +1, self._history_cb, self.mode
+            )
             return
-        elif key == a_key['EDIT_ENTER']:
+        elif key == a_key["EDIT_ENTER"]:
             self.host._update_input_history(self.get_edit_text(), mode=self.mode)
         else:
-            if (self._mode == C.MODE_INSERTION
+            if (
+                self._mode == C.MODE_INSERTION
                 and isinstance(self.host.selected_widget, quick_chat.QuickChat)
                 and key not in sat_widgets.FOCUS_KEYS
-                and key not in (a_key['HISTORY_PREV'], a_key['HISTORY_NEXT'])
-                and self.host.sync):
-                self.host.bridge.chat_state_composing(self.host.selected_widget.target, self.host.selected_widget.profile)
+                and key not in (a_key["HISTORY_PREV"], a_key["HISTORY_NEXT"])
+                and self.host.sync
+            ):
+                self.host.bridge.chat_state_composing(
+                    self.host.selected_widget.target, self.host.selected_widget.profile
+                )
 
         return super(EditBar, self).keypress(size, key)
 
 
 class LiberviaTUITopWidget(sat_widgets.FocusPile):
     """Top most widget used in LiberviaTUI"""
+
     _focus_inversed = True
-    positions = ('menu', 'body', 'notif_bar', 'edit_bar')
-    can_hide = ('menu', 'notif_bar')
+    positions = ("menu", "body", "notif_bar", "edit_bar")
+    can_hide = ("menu", "notif_bar")
 
     def __init__(self, body, menu, notif_bar, edit_bar):
         self._body = body
         self._menu = menu
         self._notif_bar = notif_bar
         self._edit_bar = edit_bar
-        self._hidden = {'notif_bar'}
+        self._hidden = {"notif_bar"}
         self._focus_extra = False
-        super(LiberviaTUITopWidget, self).__init__([('pack', self._menu), self._body, ('pack', self._edit_bar)])
+        super(LiberviaTUITopWidget, self).__init__(
+            [("pack", self._menu), self._body, ("pack", self._edit_bar)]
+        )
         for position in self.positions:
-            setattr(self,
-                    position,
-                    property(lambda: self, self.widget_get(position=position),
-                             lambda pos, new_wid: self.widget_set(new_wid, position=pos))
-                   )
-        self.focus_position = len(self.contents)-1
+            setattr(
+                self,
+                position,
+                property(
+                    lambda: self,
+                    self.widget_get(position=position),
+                    lambda pos, new_wid: self.widget_set(new_wid, position=pos),
+                ),
+            )
+        self.focus_position = len(self.contents) - 1
 
     def get_visible_positions(self, keep=None):
         """Return positions that are not hidden in the right order
@@ -215,26 +248,30 @@
                     (can be useful to find its index)
         @return (list): list of visible positions
         """
-        return [pos for pos in self.positions if (keep and pos == keep) or pos not in self._hidden]
+        return [
+            pos
+            for pos in self.positions
+            if (keep and pos == keep) or pos not in self._hidden
+        ]
 
     def keypress(self, size, key):
         """Manage FOCUS keys that focus directly a main part (one of self.positions)
 
         To avoid key conflicts, a combinaison must be made with FOCUS_EXTRA then an other key
         """
-        if key == a_key['FOCUS_EXTRA']:
+        if key == a_key["FOCUS_EXTRA"]:
             self._focus_extra = True
             return
         if self._focus_extra:
             self._focus_extra = False
-            if key in ('m', '1'):
-                focus = 'menu'
-            elif key in ('b', '2'):
-                focus = 'body'
-            elif key in ('n', '3'):
-                focus = 'notif_bar'
-            elif key in ('e', '4'):
-                focus = 'edit_bar'
+            if key in ("m", "1"):
+                focus = "menu"
+            elif key in ("b", "2"):
+                focus = "body"
+            elif key in ("n", "3"):
+                focus = "notif_bar"
+            elif key in ("e", "4"):
+                focus = "edit_bar"
             else:
                 return super(LiberviaTUITopWidget, self).keypress(size, key)
 
@@ -246,12 +283,12 @@
 
         return super(LiberviaTUITopWidget, self).keypress(size, key)
 
-    def widget_get(self,  position):
+    def widget_get(self, position):
         if not position in self.positions:
             raise ValueError("Unknown position {}".format(position))
         return getattr(self, "_{}".format(position))
 
-    def widget_set(self,  widget, position):
+    def widget_set(self, widget, position):
         if not position in self.positions:
             raise ValueError("Unknown position {}".format(position))
         return setattr(self, "_{}".format(position), widget)
@@ -266,7 +303,7 @@
             del self.contents[idx]
             self._hidden.add(position)
         else:
-            self.contents.insert(idx, (widget, ('pack', None)))
+            self.contents.insert(idx, (widget, ("pack", None)))
             self._hidden.remove(position)
 
     def show(self, position):
@@ -283,16 +320,30 @@
     AVATARS_HANDLER = False
 
     def __init__(self):
-        bridge_module = dynamic_import.bridge(bridge_name, 'libervia.frontends.bridge')
+        bridge_module = dynamic_import.bridge(bridge_name, "libervia.frontends.bridge")
         if bridge_module is None:
-            log.error(u"Can't import {} bridge".format(bridge_name))
+            log.error("Can't import {} bridge".format(bridge_name))
             sys.exit(3)
         else:
-            log.debug(u"Loading {} bridge".format(bridge_name))
-        QuickApp.__init__(self, bridge_factory=bridge_module.bridge, xmlui=xmlui, check_options=quick_utils.check_options, connect_bridge=False)
+            log.debug("Loading {} bridge".format(bridge_name))
+        QuickApp.__init__(
+            self,
+            bridge_factory=bridge_module.bridge,
+            xmlui=xmlui,
+            check_options=quick_utils.check_options,
+            connect_bridge=False,
+        )
         ## main loop setup ##
-        event_loop = urwid.GLibEventLoop if 'dbus' in bridge_name else urwid.TwistedEventLoop
-        self.loop = urwid.MainLoop(urwid.SolidFill(), C.PALETTE, event_loop=event_loop(), input_filter=self.input_filter, unhandled_input=self.key_handler)
+        event_loop = (
+            urwid.GLibEventLoop if "dbus" in bridge_name else urwid.TwistedEventLoop
+        )
+        self.loop = urwid.MainLoop(
+            urwid.SolidFill(),
+            C.PALETTE,
+            event_loop=event_loop(),
+            input_filter=self.input_filter,
+            unhandled_input=self.key_handler,
+        )
 
     @classmethod
     def run(cls):
@@ -303,10 +354,16 @@
         ##misc setup##
         self._visible_widgets = set()
         self.notif_bar = sat_widgets.NotificationBar()
-        urwid.connect_signal(self.notif_bar, 'change', self.on_notification)
+        urwid.connect_signal(self.notif_bar, "change", self.on_notification)
 
-        self.progress_wid = self.widgets.get_or_create_widget(Progress, None, on_new_widget=None)
-        urwid.connect_signal(self.notif_bar.progress, 'click', lambda x: self.select_widget(self.progress_wid))
+        self.progress_wid = self.widgets.get_or_create_widget(
+            Progress, None, on_new_widget=None
+        )
+        urwid.connect_signal(
+            self.notif_bar.progress,
+            "click",
+            lambda x: self.select_widget(self.progress_wid),
+        )
         self.__saved_overlay = None
 
         self.x_notify = Notify()
@@ -315,7 +372,7 @@
         signal.signal(signal.SIGINT, signal.SIG_IGN)
         sat_conf = sat_config.parse_main_conf()
         self._bracketed_paste = C.bool(
-            sat_config.config_get(sat_conf, C.CONFIG_SECTION, 'bracketed_paste', 'false')
+            sat_config.config_get(sat_conf, C.CONFIG_SECTION, "bracketed_paste", "false")
         )
         if self._bracketed_paste:
             log.debug("setting bracketed paste mode as requested")
@@ -344,18 +401,22 @@
 
     def debug(self):
         """convenient method to reset screen and launch (i)p(u)db"""
-        log.info('Entered debug mode')
+        log.info("Entered debug mode")
         try:
             import pudb
+
             pudb.set_trace()
         except ImportError:
             import os
-            os.system('reset')
+
+            os.system("reset")
             try:
                 import ipdb
+
                 ipdb.set_trace()
             except ImportError:
                 import pdb
+
                 pdb.set_trace()
 
     def redraw(self):
@@ -373,8 +434,13 @@
         try:
             config.apply_config(self)
         except Exception as e:
-            log.error(u"configuration error: {}".format(e))
-            popup = self.alert(_(u"Configuration Error"), _(u"Something went wrong while reading the configuration, please check :messages"))
+            log.error("configuration error: {}".format(e))
+            popup = self.alert(
+                _("Configuration Error"),
+                _(
+                    "Something went wrong while reading the configuration, please check :messages"
+                ),
+            )
             if self.options.profile:
                 self._early_popup = popup
             else:
@@ -384,31 +450,35 @@
     def keys_to_text(self, keys):
         """Generator return normal text from urwid keys"""
         for k in keys:
-            if k == 'tab':
-                yield u'\t'
-            elif k == 'enter':
-                yield u'\n'
-            elif is_wide_char(k,0) or (len(k)==1 and ord(k) >= 32):
+            if k == "tab":
+                yield "\t"
+            elif k == "enter":
+                yield "\n"
+            elif is_wide_char(k, 0) or (len(k) == 1 and ord(k) >= 32):
                 yield k
 
     def input_filter(self, input_, raw):
-        if self.__saved_overlay and input_ != a_key['OVERLAY_HIDE']:
+        if self.__saved_overlay and input_ != a_key["OVERLAY_HIDE"]:
             return
 
         ## paste detection/handling
-        if (len(input_) > 1 and                  # XXX: it may be needed to increase this value if buffer
-            not isinstance(input_[0], tuple) and #      or other things result in several chars at once
-            not 'window resize' in input_):      #      (e.g. using LiberviaTUI through ssh). Need some testing
-                                                 #      and experience to adjust value.
-            if input_[0] == 'begin paste' and not self._bracketed_paste:
-                log.info(u"Bracketed paste mode detected")
+        if (
+            len(input_) > 1  # XXX: it may be needed to increase this value if buffer
+            and not isinstance(
+                input_[0], tuple
+            )  #      or other things result in several chars at once
+            and not "window resize" in input_
+        ):  #      (e.g. using LiberviaTUI through ssh). Need some testing
+            #      and experience to adjust value.
+            if input_[0] == "begin paste" and not self._bracketed_paste:
+                log.info("Bracketed paste mode detected")
                 self._bracketed_paste = True
 
             if self._bracketed_paste:
                 # after this block, extra will contain non pasted keys
                 # and input_ will contain pasted keys
                 try:
-                    begin_idx = input_.index('begin paste')
+                    begin_idx = input_.index("begin paste")
                 except ValueError:
                     # this is not a paste, maybe we have something buffering
                     # or bracketed mode is set in conf but not enabled in term
@@ -416,22 +486,22 @@
                     input_ = []
                 else:
                     try:
-                        end_idx = input_.index('end paste')
+                        end_idx = input_.index("end paste")
                     except ValueError:
-                        log.warning(u"missing end paste sequence, discarding paste")
+                        log.warning("missing end paste sequence, discarding paste")
                         extra = input_[:begin_idx]
                         del input_[begin_idx:]
                     else:
-                        extra = input_[:begin_idx] + input_[end_idx+1:]
-                        input_ = input_[begin_idx+1:end_idx]
+                        extra = input_[:begin_idx] + input_[end_idx + 1 :]
+                        input_ = input_[begin_idx + 1 : end_idx]
             else:
                 extra = None
 
-            log.debug(u"Paste detected (len {})".format(len(input_)))
+            log.debug("Paste detected (len {})".format(len(input_)))
             try:
                 edit_bar = self.editBar
             except AttributeError:
-                log.warning(u"Paste treated as normal text: there is no edit bar yet")
+                log.warning("Paste treated as normal text: there is no edit bar yet")
                 if extra is None:
                     extra = []
                 extra.extend(input_)
@@ -439,10 +509,14 @@
                 if self.main_widget.focus == edit_bar:
                     # XXX: if a paste is detected, we append it directly to the edit bar text
                     #      so the user can check it and press [enter] if it's OK
-                    buf_paste = u''.join(self.keys_to_text(input_))
+                    buf_paste = "".join(self.keys_to_text(input_))
                     pos = edit_bar.edit_pos
-                    edit_bar.set_edit_text(u'{}{}{}'.format(edit_bar.edit_text[:pos], buf_paste, edit_bar.edit_text[pos:]))
-                    edit_bar.edit_pos+=len(buf_paste)
+                    edit_bar.set_edit_text(
+                        "{}{}{}".format(
+                            edit_bar.edit_text[:pos], buf_paste, edit_bar.edit_text[pos:]
+                        )
+                    )
+                    edit_bar.edit_pos += len(buf_paste)
                 else:
                     # we are not on the edit_bar,
                     # so we treat pasted text as normal text
@@ -455,27 +529,27 @@
         ## end of paste detection/handling
 
         for i in input_:
-            if isinstance(i,tuple):
-                if i[0] == 'mouse press':
-                    if i[1] == 4: #Mouse wheel up
-                        input_[input_.index(i)] = a_key['HISTORY_PREV']
-                    if i[1] == 5: #Mouse wheel down
-                        input_[input_.index(i)] = a_key['HISTORY_NEXT']
+            if isinstance(i, tuple):
+                if i[0] == "mouse press":
+                    if i[1] == 4:  # Mouse wheel up
+                        input_[input_.index(i)] = a_key["HISTORY_PREV"]
+                    if i[1] == 5:  # Mouse wheel down
+                        input_[input_.index(i)] = a_key["HISTORY_NEXT"]
         return input_
 
     def key_handler(self, input_):
-        if input_ == a_key['MENU_HIDE']:
+        if input_ == a_key["MENU_HIDE"]:
             """User want to (un)hide the menu roller"""
             try:
-                self.main_widget.hide_switch('menu')
+                self.main_widget.hide_switch("menu")
             except AttributeError:
                 pass
-        elif input_ == a_key['NOTIFICATION_NEXT']:
+        elif input_ == a_key["NOTIFICATION_NEXT"]:
             """User wants to see next notification"""
             self.notif_bar.show_next()
-        elif input_ == a_key['OVERLAY_HIDE']:
+        elif input_ == a_key["OVERLAY_HIDE"]:
             """User wants to (un)hide overlay window"""
-            if isinstance(self.loop.widget,urwid.Overlay):
+            if isinstance(self.loop.widget, urwid.Overlay):
                 self.__saved_overlay = self.loop.widget
                 self.loop.widget = self.main_widget
             else:
@@ -483,27 +557,33 @@
                     self.loop.widget = self.__saved_overlay
                     self.__saved_overlay = None
 
-        elif input_ == a_key['DEBUG'] and '.dev0' in self.bridge.version_get(): #Debug only for dev versions
+        elif (
+            input_ == a_key["DEBUG"] and ".dev0" in self.bridge.version_get()
+        ):  # Debug only for dev versions
             self.debug()
-        elif input_ == a_key['CONTACTS_HIDE']: #user wants to (un)hide the contact lists
+        elif input_ == a_key["CONTACTS_HIDE"]:  # user wants to (un)hide the contact lists
             try:
                 for wid, options in self.center_part.contents:
                     if self.contact_lists_pile is wid:
                         self.center_part.contents.remove((wid, options))
                         break
                 else:
-                    self.center_part.contents.insert(0, (self.contact_lists_pile, ('weight', 2, False)))
+                    self.center_part.contents.insert(
+                        0, (self.contact_lists_pile, ("weight", 2, False))
+                    )
             except AttributeError:
-                #The main widget is not built (probably in Profile Manager)
+                # The main widget is not built (probably in Profile Manager)
                 pass
-        elif input_ == 'window resize':
-            width,height = self.loop.screen_size
-            if height<=5 and width<=35:
-                if not 'save_main_widget' in dir(self):
+        elif input_ == "window resize":
+            width, height = self.loop.screen_size
+            if height <= 5 and width <= 35:
+                if not "save_main_widget" in dir(self):
                     self.save_main_widget = self.loop.widget
-                    self.loop.widget = urwid.Filler(urwid.Text(_("Pleeeeasse, I can't even breathe !")))
+                    self.loop.widget = urwid.Filler(
+                        urwid.Text(_("Pleeeeasse, I can't even breathe !"))
+                    )
             else:
-                if 'save_main_widget' in dir(self):
+                if "save_main_widget" in dir(self):
                     self.loop.widget = self.save_main_widget
                     del self.save_main_widget
         try:
@@ -518,15 +598,20 @@
         @param menu_data: data to send with these menus
 
         """
+
         def add_menu_cb(callback_id):
             self.action_launch(callback_id, menu_data, profile=self.current_profile)
-        for id_, type_, path, path_i18n, extra  in self.bridge.menus_get("", C.NO_SECURITY_LIMIT ): # TODO: manage extra
+
+        for id_, type_, path, path_i18n, extra in self.bridge.menus_get(
+            "", C.NO_SECURITY_LIMIT
+        ):  # TODO: manage extra
             if type_ != type_filter:
                 continue
             if len(path) != 2:
                 raise NotImplementedError("Menu with a path != 2 are not implemented yet")
-            menu.add_menu(path_i18n[0], path_i18n[1], lambda dummy,id_=id_: add_menu_cb(id_))
-
+            menu.add_menu(
+                path_i18n[0], path_i18n[1], lambda dummy, id_=id_: add_menu_cb(id_)
+            )
 
     def _build_menu_roller(self):
         menu = sat_widgets.Menu(self.loop)
@@ -535,26 +620,35 @@
         menu.add_menu(general, _("Disconnect"), self.on_disconnect_request)
         menu.add_menu(general, _("Parameters"), self.on_param)
         menu.add_menu(general, _("About"), self.on_about_request)
-        menu.add_menu(general, _("Exit"), self.on_exit_request, a_key['APP_QUIT'])
+        menu.add_menu(general, _("Exit"), self.on_exit_request, a_key["APP_QUIT"])
         menu.add_menu(_("Contacts"))  # add empty menu to save the place in the menu order
         groups = _("Groups")
         menu.add_menu(groups)
-        menu.add_menu(groups, _("Join room"), self.on_join_room_request, a_key['ROOM_JOIN'])
-        #additionals menus
-        #FIXME: do this in a more generic way (in quickapp)
+        menu.add_menu(
+            groups, _("Join room"), self.on_join_room_request, a_key["ROOM_JOIN"]
+        )
+        # additionals menus
+        # FIXME: do this in a more generic way (in quickapp)
         self.add_menus(menu, C.MENU_GLOBAL)
 
-        menu_roller = sat_widgets.MenuRoller([(_('Main menu'), menu, C.MENU_ID_MAIN)])
+        menu_roller = sat_widgets.MenuRoller([(_("Main menu"), menu, C.MENU_ID_MAIN)])
         return menu_roller
 
     def _build_main_widget(self):
         self.contact_lists_pile = urwid.Pile([])
-        #self.center_part = urwid.Columns([('weight',2,self.contact_lists[profile]),('weight',8,Chat('',self))])
-        self.center_part = urwid.Columns([('weight', 2, self.contact_lists_pile), ('weight', 8, urwid.Filler(urwid.Text('')))])
+        # self.center_part = urwid.Columns([('weight',2,self.contact_lists[profile]),('weight',8,Chat('',self))])
+        self.center_part = urwid.Columns(
+            [
+                ("weight", 2, self.contact_lists_pile),
+                ("weight", 8, urwid.Filler(urwid.Text(""))),
+            ]
+        )
 
         self.editBar = EditBar(self)
         self.menu_roller = self._build_menu_roller()
-        self.main_widget = LiberviaTUITopWidget(self.center_part, self.menu_roller, self.notif_bar, self.editBar)
+        self.main_widget = LiberviaTUITopWidget(
+            self.center_part, self.menu_roller, self.notif_bar, self.editBar
+        )
         return self.main_widget
 
     def plugging_profiles(self):
@@ -570,8 +664,15 @@
 
     def profile_plugged(self, profile):
         QuickApp.profile_plugged(self, profile)
-        contact_list = self.widgets.get_or_create_widget(ContactList, None, on_new_widget=None, on_click=self.contact_selected, on_change=lambda w: self.redraw(), profile=profile)
-        self.contact_lists_pile.contents.append((contact_list, ('weight', 1)))
+        contact_list = self.widgets.get_or_create_widget(
+            ContactList,
+            None,
+            on_new_widget=None,
+            on_click=self.contact_selected,
+            on_change=lambda w: self.redraw(),
+            profile=profile,
+        )
+        self.contact_lists_pile.contents.append((contact_list, ("weight", 1)))
         return contact_list
 
     def is_hidden(self):
@@ -590,7 +691,7 @@
         @return (urwid_satext.Alert): the created Alert instance
         """
         popup = sat_widgets.Alert(title, message)
-        popup.set_callback('ok', lambda dummy: self.remove_pop_up(popup))
+        popup.set_callback("ok", lambda dummy: self.remove_pop_up(popup))
         self.show_pop_up(popup, width=75, height=20)
         return popup
 
@@ -609,18 +710,21 @@
                     try:
                         self.notif_bar.remove_pop_up(widget)
                     except ValueError:
-                        log.warning(u"Trying to remove an unknown widget {}".format(widget))
+                        log.warning(
+                            "Trying to remove an unknown widget {}".format(widget)
+                        )
                     return
         self.loop.widget = self.main_widget
         next_popup = self.notif_bar.get_next_popup()
         if next_popup:
-            #we still have popup to show, we display it
+            # we still have popup to show, we display it
             self.show_pop_up(next_popup)
         else:
             self.redraw()
 
-    def show_pop_up(self, pop_up_widget, width=None, height=None, align='center',
-                  valign='middle'):
+    def show_pop_up(
+        self, pop_up_widget, width=None, height=None, align="center", valign="middle"
+    ):
         """Show a pop-up window if possible, else put it in queue
 
         @param pop_up_widget: pop up to show
@@ -636,31 +740,43 @@
             height = 20 if isinstance(pop_up_widget, xmlui.LiberviaTUINoteDialog) else 40
         if not isinstance(self.loop.widget, urwid.Overlay):
             display_widget = urwid.Overlay(
-                pop_up_widget, self.main_widget, align, width, valign, height)
+                pop_up_widget, self.main_widget, align, width, valign, height
+            )
             self.loop.widget = display_widget
             self.redraw()
         else:
             self.notif_bar.add_pop_up(pop_up_widget)
 
     def bar_notify(self, message):
-        """"Notify message to user via notification bar"""
+        """ "Notify message to user via notification bar"""
         self.notif_bar.add_message(message)
         self.redraw()
 
-    def notify(self, type_, entity=None, message=None, subject=None, callback=None, cb_args=None, widget=None, profile=C.PROF_KEY_NONE):
+    def notify(
+        self,
+        type_,
+        entity=None,
+        message=None,
+        subject=None,
+        callback=None,
+        cb_args=None,
+        widget=None,
+        profile=C.PROF_KEY_NONE,
+    ):
         if widget is None or widget is not None and widget != self.selected_widget:
             # we ignore notification if the widget is selected but we can
             # still do a desktop notification is the X window has not the focus
-            super(LiberviaTUIApp, self).notify(type_, entity, message, subject, callback, cb_args, widget, profile)
+            super(LiberviaTUIApp, self).notify(
+                type_, entity, message, subject, callback, cb_args, widget, profile
+            )
         # we don't want notifications without message on desktop
         if message is not None and not self.x_notify.has_focus():
             if message is None:
                 message = _("{app}: a new event has just happened{entity}").format(
-                    app=C.APP_NAME,
-                    entity=u' ({})'.format(entity) if entity else '')
+                    app=C.APP_NAME, entity=" ({})".format(entity) if entity else ""
+                )
             self.x_notify.send_notification(message)
 
-
     def new_widget(self, widget, user_action=False):
         """Method called when a new widget is created
 
@@ -680,8 +796,8 @@
         else add it in the notification bar queue
         @param widget: BoxWidget
         """
-        assert len(self.center_part.widget_list)<=2
-        wid_idx = len(self.center_part.widget_list)-1
+        assert len(self.center_part.widget_list) <= 2
+        wid_idx = len(self.center_part.widget_list) - 1
         self.center_part.widget_list[wid_idx] = widget
         try:
             self.menu_roller.remove_menu(C.MENU_ID_WIDGET)
@@ -694,10 +810,16 @@
             pass
         else:
             on_selected()
-        self._visible_widgets = set([widget]) # XXX: we can only have one widget visible at the time for now
+        self._visible_widgets = set(
+            [widget]
+        )  # XXX: we can only have one widget visible at the time for now
         self.contact_lists.select(None)
 
-        for wid in self.visible_widgets: # FIXME: check if widgets.get_widgets is not more appropriate
+        for (
+            wid
+        ) in (
+            self.visible_widgets
+        ):  # FIXME: check if widgets.get_widgets is not more appropriate
             if isinstance(wid, Chat):
                 contact_list = self.contact_lists[wid.profile]
                 contact_list.select(wid.target)
@@ -706,10 +828,10 @@
 
     def remove_window(self):
         """Remove window showed on the right column"""
-        #TODO: better Window management than this hack
+        # TODO: better Window management than this hack
         assert len(self.center_part.widget_list) <= 2
-        wid_idx = len(self.center_part.widget_list)-1
-        self.center_part.widget_list[wid_idx] = urwid.Filler(urwid.Text(''))
+        wid_idx = len(self.center_part.widget_list) - 1
+        self.center_part.widget_list[wid_idx] = urwid.Filler(urwid.Text(""))
         self.center_part.focus_position = 0
         self.redraw()
 
@@ -729,11 +851,21 @@
         self.clear_notifs(entity, profile=contact_list.profile)
         if entity.resource:
             # we have clicked on a private MUC conversation
-            chat_widget = self.widgets.get_or_create_widget(Chat, entity, on_new_widget=None, force_hash = Chat.get_private_hash(contact_list.profile, entity), profile=contact_list.profile)
+            chat_widget = self.widgets.get_or_create_widget(
+                Chat,
+                entity,
+                on_new_widget=None,
+                force_hash=Chat.get_private_hash(contact_list.profile, entity),
+                profile=contact_list.profile,
+            )
         else:
-            chat_widget = self.widgets.get_or_create_widget(Chat, entity, on_new_widget=None, profile=contact_list.profile)
+            chat_widget = self.widgets.get_or_create_widget(
+                Chat, entity, on_new_widget=None, profile=contact_list.profile
+            )
         self.select_widget(chat_widget)
-        self.menu_roller.add_menu(_('Chat menu'), chat_widget.get_menu(), C.MENU_ID_WIDGET)
+        self.menu_roller.add_menu(
+            _("Chat menu"), chat_widget.get_menu(), C.MENU_ID_WIDGET
+        )
 
     def _dialog_ok_cb(self, widget, data):
         popup, answer_cb, answer_data = data
@@ -747,24 +879,28 @@
         if answer_cb is not None:
             answer_cb(False, answer_data)
 
-    def show_dialog(self, message, title="", type="info", answer_cb = None, answer_data = None):
-        if type == 'info':
+    def show_dialog(
+        self, message, title="", type="info", answer_cb=None, answer_data=None
+    ):
+        if type == "info":
             popup = sat_widgets.Alert(title, message, ok_cb=answer_cb)
             if answer_cb is None:
-                popup.set_callback('ok', lambda dummy: self.remove_pop_up(popup))
-        elif type == 'error':
+                popup.set_callback("ok", lambda dummy: self.remove_pop_up(popup))
+        elif type == "error":
             popup = sat_widgets.Alert(title, message, ok_cb=answer_cb)
             if answer_cb is None:
-                popup.set_callback('ok', lambda dummy: self.remove_pop_up(popup))
-        elif type == 'yes/no':
+                popup.set_callback("ok", lambda dummy: self.remove_pop_up(popup))
+        elif type == "yes/no":
             popup = sat_widgets.ConfirmDialog(message)
-            popup.set_callback('yes', self._dialog_ok_cb, (popup, answer_cb, answer_data))
-            popup.set_callback('no', self._dialog_cancel_cb, (popup, answer_cb, answer_data))
+            popup.set_callback("yes", self._dialog_ok_cb, (popup, answer_cb, answer_data))
+            popup.set_callback(
+                "no", self._dialog_cancel_cb, (popup, answer_cb, answer_data)
+            )
         else:
             popup = sat_widgets.Alert(title, message, ok_cb=answer_cb)
             if answer_cb is None:
-                popup.set_callback('ok', lambda dummy: self.remove_pop_up(popup))
-            log.error(u'unmanaged dialog type: {}'.format(type))
+                popup.set_callback("ok", lambda dummy: self.remove_pop_up(popup))
+            log.error("unmanaged dialog type: {}".format(type))
         self.show_pop_up(popup)
 
     def dialog_failure(self, failure):
@@ -777,20 +913,22 @@
     def on_notification(self, notif_bar):
         """Called when a new notification has been received"""
         if not isinstance(self.main_widget, LiberviaTUITopWidget):
-            #if we are not in the main configuration, we ignore the notifications bar
+            # if we are not in the main configuration, we ignore the notifications bar
             return
         if self.notif_bar.can_hide():
-                #No notification left, we can hide the bar
-                self.main_widget.hide('notif_bar')
+            # No notification left, we can hide the bar
+            self.main_widget.hide("notif_bar")
         else:
-            self.main_widget.show('notif_bar')
-            self.redraw() # FIXME: invalidate cache in a more efficient way
+            self.main_widget.show("notif_bar")
+            self.redraw()  # FIXME: invalidate cache in a more efficient way
 
     def _action_manager_unknown_error(self):
-        self.alert(_("Error"), _(u"Unmanaged action"))
+        self.alert(_("Error"), _("Unmanaged action"))
 
     def room_joined_handler(self, room_jid_s, room_nicks, user_nick, subject, profile):
-        super(LiberviaTUIApp, self).room_joined_handler(room_jid_s, room_nicks, user_nick, subject, profile)
+        super(LiberviaTUIApp, self).room_joined_handler(
+            room_jid_s, room_nicks, user_nick, subject, profile
+        )
         # if self.selected_widget is None:
         #     for contact_list in self.widgets.get_widgets(ContactList):
         #         if profile in contact_list.profiles:
@@ -798,24 +936,30 @@
 
     def progress_started_handler(self, pid, metadata, profile):
         super(LiberviaTUIApp, self).progress_started_handler(pid, metadata, profile)
-        self.add_progress(pid, metadata.get('name', _(u'unkown')), profile)
+        self.add_progress(pid, metadata.get("name", _("unkown")), profile)
 
     def progress_finished_handler(self, pid, metadata, profile):
-        log.info(u"Progress {} finished".format(pid))
+        log.info("Progress {} finished".format(pid))
         super(LiberviaTUIApp, self).progress_finished_handler(pid, metadata, profile)
 
     def progress_error_handler(self, pid, err_msg, profile):
-        log.warning(u"Progress {pid} error: {err_msg}".format(pid=pid, err_msg=err_msg))
+        log.warning("Progress {pid} error: {err_msg}".format(pid=pid, err_msg=err_msg))
         super(LiberviaTUIApp, self).progress_error_handler(pid, err_msg, profile)
 
-
     ##DIALOGS CALLBACKS##
     def on_join_room(self, button, edit):
         self.remove_pop_up()
         room_jid = jid.JID(edit.get_edit_text())
-        self.bridge.muc_join(room_jid, self.profiles[self.current_profile].whoami.node, {}, self.current_profile, callback=lambda dummy: None, errback=self.dialog_failure)
+        self.bridge.muc_join(
+            room_jid,
+            self.profiles[self.current_profile].whoami.node,
+            {},
+            self.current_profile,
+            callback=lambda dummy: None,
+            errback=self.dialog_failure,
+        )
 
-    #MENU EVENTS#
+    # MENU EVENTS#
     def on_connect_request(self, menu):
         QuickApp.connect(self, self.current_profile)
 
@@ -829,12 +973,20 @@
 
         def failure(error):
             self.alert(_("Error"), _("Can't get parameters (%s)") % error)
-        self.bridge.param_ui_get(app=C.APP_NAME, profile_key=self.current_profile, callback=success, errback=failure)
+
+        self.bridge.param_ui_get(
+            app=C.APP_NAME,
+            profile_key=self.current_profile,
+            callback=success,
+            errback=failure,
+        )
 
     def on_exit_request(self, menu):
         QuickApp.on_exit(self)
         try:
-            if self._bracketed_mode_set: # we don't unset if bracketed paste mode was detected automatically (i.e. not in conf)
+            if (
+                self._bracketed_mode_set
+            ):  # we don't unset if bracketed paste mode was detected automatically (i.e. not in conf)
                 log.debug("unsetting bracketed paste mode")
                 sys.stdout.write("\033[?2004l")
         except AttributeError:
@@ -843,21 +995,29 @@
 
     def on_join_room_request(self, menu):
         """User wants to join a MUC room"""
-        pop_up_widget = sat_widgets.InputDialog(_("Entering a MUC room"), _("Please enter MUC's JID"), default_txt=self.bridge.muc_get_default_service(), ok_cb=self.on_join_room)
-        pop_up_widget.set_callback('cancel', lambda dummy: self.remove_pop_up(pop_up_widget))
+        pop_up_widget = sat_widgets.InputDialog(
+            _("Entering a MUC room"),
+            _("Please enter MUC's JID"),
+            default_txt=self.bridge.muc_get_default_service(),
+            ok_cb=self.on_join_room,
+        )
+        pop_up_widget.set_callback(
+            "cancel", lambda dummy: self.remove_pop_up(pop_up_widget)
+        )
         self.show_pop_up(pop_up_widget)
 
     def on_about_request(self, menu):
         self.alert(_("About"), C.APP_NAME + " v" + self.bridge.version_get())
 
-    #MISC CALLBACKS#
+    # MISC CALLBACKS#
 
-    def set_presence_status(self, show='', status=None, profile=C.PROF_KEY_NONE):
+    def set_presence_status(self, show="", status=None, profile=C.PROF_KEY_NONE):
         contact_list_wid = self.widgets.get_widget(ContactList, profiles=profile)
         if contact_list_wid is not None:
             contact_list_wid.status_bar.set_presence_status(show, status)
         else:
-            log.warning(u"No ContactList widget found for profile {}".format(profile))
+            log.warning("No ContactList widget found for profile {}".format(profile))
+
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     LiberviaTUIApp().start()
--- a/libervia/tui/chat.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/tui/chat.py	Wed Jun 19 18:44:57 2024 +0200
@@ -245,7 +245,9 @@
             urwid.ListBox(self.occupants_walker), footer=self.occupants_footer
         )
         super(OccupantsWidget, self).__init__(occupants_widget)
-        occupants_list = sorted(list(self.parent.occupants.keys()), key=lambda o: o.lower())
+        occupants_list = sorted(
+            list(self.parent.occupants.keys()), key=lambda o: o.lower()
+        )
         for occupant in occupants_list:
             occupant_data = self.parent.occupants[occupant]
             self.occupants_walker.append(OccupantWidget(occupant_data))
@@ -284,8 +286,17 @@
 
 
 class Chat(LiberviaTUIWidget, quick_chat.QuickChat):
-    def __init__(self, host, target, type_=C.CHAT_ONE2ONE, nick=None, occupants=None,
-                 subject=None, statuses=None, profiles=None):
+    def __init__(
+        self,
+        host,
+        target,
+        type_=C.CHAT_ONE2ONE,
+        nick=None,
+        occupants=None,
+        subject=None,
+        statuses=None,
+        profiles=None,
+    ):
         self.filters = []  # list of filter callbacks to apply
         self.mess_walker = urwid.SimpleListWalker([])
         self.mess_widgets = urwid.ListBox(self.mess_walker)
@@ -294,8 +305,15 @@
         self.pile = urwid.Pile([self.chat_colums])
         LiberviaTUIWidget.__init__(self, self.pile, target)
         quick_chat.QuickChat.__init__(
-            self, host, target, type_, nick, occupants, subject, statuses,
-            profiles=profiles
+            self,
+            host,
+            target,
+            type_,
+            nick,
+            occupants,
+            subject,
+            statuses,
+            profiles=profiles,
         )
 
         # we must adapt the behaviour with the type
@@ -334,7 +352,7 @@
         elif key == a_key["SHORT_NICKNAME"]:  # user wants to (not) use short nick
             self.show_short_nick = not self.show_short_nick
             self.redraw()
-        elif (key == a_key["SUBJECT_SWITCH"]):
+        elif key == a_key["SUBJECT_SWITCH"]:
             # user wants to (un)hide group's subject or change its apperance
             if self.subject:
                 self.show_title = (self.show_title + 1) % 3
@@ -448,11 +466,11 @@
             #   it should be done in a more user friendly way
             for lang, body in message.message.items():
                 for attachment in message.attachments:
-                    if 'url' in attachment:
-                        body+=f"\n{attachment['url']}"
-                    elif 'path' in attachment:
-                        path = Path(attachment['path'])
-                        body+=f"\n{path.as_uri()}"
+                    if "url" in attachment:
+                        body += f"\n{attachment['url']}"
+                    elif "path" in attachment:
+                        path = Path(attachment["path"])
+                        body += f"\n{path.as_uri()}"
                     else:
                         log.warning(f'No "url" nor "path" in attachment: {attachment}')
                     message.message[lang] = body
@@ -464,8 +482,9 @@
         if self.handle_user_moved(message):
             return
 
-        if ((self.host.selected_widget != self or not self.host.x_notify.has_focus())
-            and self.focus_marker_set is not None):
+        if (
+            self.host.selected_widget != self or not self.host.x_notify.has_focus()
+        ) and self.focus_marker_set is not None:
             if not self.focus_marker_set and not self._locked and self.mess_walker:
                 if self.focus_marker is not None:
                     try:
@@ -604,7 +623,9 @@
             except AttributeError:
                 pass
 
-    def update_history(self, size=C.HISTORY_LIMIT_DEFAULT, filters=None, profile="@NONE@"):
+    def update_history(
+        self, size=C.HISTORY_LIMIT_DEFAULT, filters=None, profile="@NONE@"
+    ):
         del self.mess_walker[:]
         if filters and "search" in filters:
             self.mess_walker.append(
--- a/libervia/tui/contact_list.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/tui/contact_list.py	Wed Jun 19 18:44:57 2024 +0200
@@ -225,10 +225,10 @@
             self.host.get_notifs(entity, exact_jid=special, profile=self.profile)
         )
         mentions = list(
-                self.host.get_notifs(entity.bare, C.NOTIFY_MENTION, profile=self.profile)
-            )
+            self.host.get_notifs(entity.bare, C.NOTIFY_MENTION, profile=self.profile)
+        )
         if notifs or mentions:
-            attr = 'cl_mention' if mentions else 'cl_notifs'
+            attr = "cl_mention" if mentions else "cl_notifs"
             header = [(attr, "({})".format(len(notifs) + len(mentions))), " "]
         else:
             header = ""
@@ -269,10 +269,12 @@
             if self.contact_list.show_resources:
                 for resource in self.contact_list.getCache(entity, C.CONTACT_RESOURCES):
                     resource_disp = (
-                        "resource_main"
-                        if resource
-                        == self.contact_list.getCache(entity, C.CONTACT_MAIN_RESOURCE)
-                        else "resource",
+                        (
+                            "resource_main"
+                            if resource
+                            == self.contact_list.getCache(entity, C.CONTACT_MAIN_RESOURCE)
+                            else "resource"
+                        ),
                         "\n  " + resource,
                     )
                     markup_extra.append(resource_disp)
@@ -314,7 +316,9 @@
             else:
                 # the special widgets
                 if entity.resource:
-                    widget = self._build_entity_widget(entity, ("resource",), special=True)
+                    widget = self._build_entity_widget(
+                        entity, ("resource",), special=True
+                    )
                 else:
                     widget = self._build_entity_widget(
                         entity,
@@ -333,7 +337,7 @@
             content.append(urwid.Divider("="))
 
         groups = list(self.contact_list._groups)
-        groups.sort(key=lambda x: x.lower() if x else '')
+        groups.sort(key=lambda x: x.lower() if x else "")
         for group in groups:
             data = self.contact_list.get_group_data(group)
             folded = data.get(C.GROUP_DATA_FOLDED, False)
--- a/libervia/tui/game_tarot.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/tui/game_tarot.py	Wed Jun 19 18:44:57 2024 +0200
@@ -202,7 +202,7 @@
 
     def display_widget(self, size, focus):
         cards = {}
-        max_col, = size
+        (max_col,) = size
         separator = " - "
         margin = max((max_col - Card.SIZE) / 2, 0) * " "
         margin_center = max((max_col - Card.SIZE * 2 - len(separator)) / 2, 0) * " "
@@ -261,7 +261,12 @@
             self.cards[card.suit, card.value] = card
             self.deck.append(card)
         for suit in ["pique", "coeur", "carreau", "trefle"]:
-            for value in list(map(str, list(range(1, 11)))) + ["valet", "cavalier", "dame", "roi"]:
+            for value in list(map(str, list(range(1, 11)))) + [
+                "valet",
+                "cavalier",
+                "dame",
+                "roi",
+            ]:
                 card = Card(suit, value)
                 self.cards[card.suit, card.value] = card
                 self.deck.append(card)
--- a/libervia/tui/progress.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/tui/progress.py	Wed Jun 19 18:44:57 2024 +0200
@@ -76,7 +76,7 @@
     def _remove_bar(self, progress_id, profile):
         wid = self.progress_dict[(progress_id, profile)]["full"]
         self.progress_list.remove(wid)
-        del (self.progress_dict[(progress_id, profile)])
+        del self.progress_dict[(progress_id, profile)]
 
     def _on_clear(self, button):
         to_remove = []
--- a/libervia/tui/status.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/tui/status.py	Wed Jun 19 18:44:57 2024 +0200
@@ -51,7 +51,9 @@
             return
         options = [commonConst.PRESENCE[presence] for presence in commonConst.PRESENCE]
         list_widget = sat_widgets.GenericList(
-            options=options, option_type=sat_widgets.ClickableText, on_click=self.on_change
+            options=options,
+            option_type=sat_widgets.ClickableText,
+            on_click=self.on_change,
         )
         decorated = sat_widgets.LabelLine(
             list_widget, sat_widgets.SurroundedText(_("Set your presence"))
--- a/libervia/tui/xmlui.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/tui/xmlui.py	Wed Jun 19 18:44:57 2024 +0200
@@ -32,14 +32,14 @@
 
 
 class LiberviaTUIEvents(object):
-    """ Used to manage change event of LiberviaTUI widgets """
+    """Used to manage change event of LiberviaTUI widgets"""
 
     def _event_callback(self, ctrl, *args, **kwargs):
-        """" Call xmlui callback and ignore any extra argument """
+        """ " Call xmlui callback and ignore any extra argument"""
         args[-1](ctrl)
 
     def _xmlui_on_change(self, callback):
-        """ Call callback with widget as only argument """
+        """Call callback with widget as only argument"""
         urwid.connect_signal(self, "change", self._event_callback, callback)
 
 
@@ -266,7 +266,7 @@
         return self.get_selected_index()
 
     def _xmlui_on_select(self, callback):
-        """ Call callback with widget as only argument """
+        """Call callback with widget as only argument"""
         urwid.connect_signal(self, "click", self._event_callback, callback)
 
 
@@ -432,8 +432,9 @@
         )
         LiberviaTUIWidget.__init__(self, self.main_cont, self.xmlui_title)
 
-
-    def _parse_childs(self, _xmlui_parent, current_node, wanted=("container",), data=None):
+    def _parse_childs(
+        self, _xmlui_parent, current_node, wanted=("container",), data=None
+    ):
         # Small hack to always have a VerticalContainer as main container in LiberviaTUI.
         # this used to be the default behaviour for all frontends, but now
         # TabsContainer can also be the main container.
@@ -442,9 +443,9 @@
             if node.nodeName == "container" and node.getAttribute("type") == "tabs":
                 _xmlui_parent = self.widget_factory.createVerticalContainer(self)
                 self.main_cont = _xmlui_parent
-        return super(XMLUIPanel, self)._parse_childs(_xmlui_parent, current_node, wanted,
-                                                    data)
-
+        return super(XMLUIPanel, self)._parse_childs(
+            _xmlui_parent, current_node, wanted, data
+        )
 
     def construct_ui(self, parsed_dom):
         def post_treat():