diff libervia/backend/plugins/plugin_xep_0277.py @ 4270:0d7bb4df2343

Reformatted code base using black.
author Goffi <goffi@goffi.org>
date Wed, 19 Jun 2024 18:44:57 +0200
parents c86a22009c1f
children
line wrap: on
line diff
--- a/libervia/backend/plugins/plugin_xep_0277.py	Tue Jun 18 12:06:45 2024 +0200
+++ b/libervia/backend/plugins/plugin_xep_0277.py	Wed Jun 19 18:44:57 2024 +0200
@@ -268,7 +268,7 @@
         service: Optional[jid.JID],
         # FIXME: node is Optional until all calls to item_2_mb_data set properly service
         #   and node. Once done, the Optional must be removed here
-        node: Optional[str]
+        node: Optional[str],
     ) -> dict:
         """Convert an XML Item to microblog data
 
@@ -283,10 +283,7 @@
             service = client.jid.userhostJID()
 
         extra: Dict[str, Any] = {}
-        mb_data: Dict[str, Any] = {
-            "service": service.full(),
-            "extra": extra
-        }
+        mb_data: Dict[str, Any] = {"service": service.full(), "extra": extra}
 
         def check_conflict(key, increment=False):
             """Check if key is already in microblog data
@@ -335,9 +332,7 @@
                     )
                 key = check_conflict("{}_xhtml".format(elem.name))
                 data = data_elt.toXml()
-                mb_data[key] = yield self.host.plugins["TEXT_SYNTAXES"].clean_xhtml(
-                    data
-                )
+                mb_data[key] = yield self.host.plugins["TEXT_SYNTAXES"].clean_xhtml(data)
             else:
                 key = check_conflict(elem.name)
                 mb_data[key] = str(elem)
@@ -361,7 +356,7 @@
         # FIXME: node should alway be set in the future, check FIXME in method signature
         if node is not None:
             mb_data["node"] = node
-            mb_data['uri'] = xmpp_uri.build_xmpp_uri(
+            mb_data["uri"] = xmpp_uri.build_xmpp_uri(
                 "pubsub",
                 path=service.full(),
                 node=node,
@@ -378,8 +373,9 @@
         try:
             id_elt = next(entry_elt.elements(NS_ATOM, "id"))
         except StopIteration:
-            msg = ("No atom id found in the pubsub item {}, this is not standard !"
-                   .format(id_))
+            msg = "No atom id found in the pubsub item {}, this is not standard !".format(
+                id_
+            )
             log.warning(msg)
             mb_data["atom_id"] = ""
         else:
@@ -452,16 +448,14 @@
             )
 
         # links
-        comments = mb_data['comments'] = []
+        comments = mb_data["comments"] = []
         for link_elt in entry_elt.elements(NS_ATOM, "link"):
             href = link_elt.getAttribute("href")
             if not href:
-                log.warning(
-                    f'missing href in <link> element: {link_elt.toXml()}'
-                )
+                log.warning(f"missing href in <link> element: {link_elt.toXml()}")
                 continue
             rel = link_elt.getAttribute("rel")
-            if (rel == "replies" and link_elt.getAttribute("title") == "comments"):
+            if rel == "replies" and link_elt.getAttribute("title") == "comments":
                 uri = href
                 comments_data = {
                     "uri": uri,
@@ -489,19 +483,12 @@
                     except (AttributeError, RuntimeError):
                         # we should always have either the "publisher" attribute or the
                         # stanza available
-                        log.error(
-                            f"Can't find repeater of the post: {item_elt.toXml()}"
-                        )
+                        log.error(f"Can't find repeater of the post: {item_elt.toXml()}")
                         continue
 
-                extra["repeated"] = {
-                    "by": repeater_jid.full(),
-                    "uri": href
-                }
+                extra["repeated"] = {"by": repeater_jid.full(), "uri": href}
             elif rel in ("related", "enclosure"):
-                attachment: Dict[str, Any] = {
-                    "sources": [{"url": href}]
-                }
+                attachment: Dict[str, Any] = {"sources": [{"url": href}]}
                 if rel == "related":
                     attachment["external"] = True
                 for attr, key in (
@@ -533,9 +520,7 @@
                     )
                 extra.setdefault("alt_links", []).append(link_data)
             else:
-                log.warning(
-                    f"Unmanaged link element: {link_elt.toXml()}"
-                )
+                log.warning(f"Unmanaged link element: {link_elt.toXml()}")
 
         # author
         publisher = item_elt.getAttribute("publisher")
@@ -558,9 +543,7 @@
             try:
                 uri_elt = next(author_elt.elements(NS_ATOM, "uri"))
             except StopIteration:
-                log.debug(
-                    "No uri element found in author element of item {}".format(id_)
-                )
+                log.debug("No uri element found in author element of item {}".format(id_))
                 if publisher:
                     mb_data["author_jid"] = publisher
             else:
@@ -569,16 +552,16 @@
                     uri = uri[5:]
                     mb_data["author_jid"] = uri
                 else:
-                    mb_data["author_jid"] = (
-                        item_elt.getAttribute("publisher") or ""
-                    )
+                    mb_data["author_jid"] = item_elt.getAttribute("publisher") or ""
                 if not author and mb_data["author_jid"]:
                     # FIXME: temporary workaround for missing author name, would be
                     #   better to use directly JID's identity (to be done from frontends?)
                     try:
                         mb_data["author"] = jid.JID(mb_data["author_jid"]).user
                     except Exception as e:
-                        log.warning(f"No author name found, and can't parse author jid: {e}")
+                        log.warning(
+                            f"No author name found, and can't parse author jid: {e}"
+                        )
 
                 if not publisher:
                     log.debug("No publisher attribute, we can't verify author jid")
@@ -620,9 +603,7 @@
 
         ## the trigger ##
         # if other plugins have things to add or change
-        yield self.host.trigger.point(
-            "XEP-0277_item2data", item_elt, entry_elt, mb_data
-        )
+        yield self.host.trigger.point("XEP-0277_item2data", item_elt, entry_elt, mb_data)
 
         defer.returnValue(mb_data)
 
@@ -655,7 +636,9 @@
                     if type_:
                         if type_ == "_rich":  # convert input from current syntax to XHTML
                             xml_content = await synt.convert(
-                                mb_data[attr], synt.get_current_syntax(client.profile), "XHTML"
+                                mb_data[attr],
+                                synt.get_current_syntax(client.profile),
+                                "XHTML",
                             )
                             if f"{elem_name}_xhtml" in mb_data:
                                 raise failure.Failure(
@@ -722,9 +705,7 @@
                     url = attachment["url"]
                 except KeyError:
                     try:
-                        url = next(
-                            s['url'] for s in attachment["sources"] if 'url' in s
-                        )
+                        url = next(s["url"] for s in attachment["sources"] if "url" in s)
                     except (StopIteration, KeyError):
                         log.warning(
                             f'"url" missing in attachment, ignoring: {attachment}'
@@ -746,11 +727,11 @@
                 for key, attr in (
                     ("media_type", "type"),
                     ("desc", "title"),
-                    ("size", "lenght")
+                    ("size", "lenght"),
                 ):
                     value = attachment.get(key)
                     if value:
-                        link_elt[attr]  = str(value)
+                        link_elt[attr] = str(value)
 
         ## alternate links ##
         alt_links = extra.get("alt_links")
@@ -760,7 +741,7 @@
                 url = url_template.format(
                     service=quote(service.full(), safe=""),
                     node=quote(node, safe=""),
-                    item=quote(item_id, safe="")
+                    item=quote(item_id, safe=""),
                 )
 
                 link_elt = entry_elt.addElement("link")
@@ -800,7 +781,8 @@
         ## published/updated time ##
         current_time = time.time()
         entry_elt.addElement(
-            "updated", content=utils.xmpp_date(float(mb_data.get("updated", current_time)))
+            "updated",
+            content=utils.xmpp_date(float(mb_data.get("updated", current_time))),
         )
         entry_elt.addElement(
             "published",
@@ -808,7 +790,7 @@
         )
 
         ## categories ##
-        for tag in mb_data.get('tags', []):
+        for tag in mb_data.get("tags", []):
             category_elt = entry_elt.addElement("category")
             category_elt["term"] = tag
 
@@ -825,7 +807,7 @@
         entry_elt.addElement("id", content=entry_id)  #
 
         ## comments ##
-        for comments_data in mb_data.get('comments', []):
+        for comments_data in mb_data.get("comments", []):
             link_elt = entry_elt.addElement("link")
             # XXX: "uri" is set in self._manage_comments if not already existing
             try:
@@ -844,9 +826,7 @@
                 link_elt["rel"] = "via"
                 link_elt["href"] = repeated["uri"]
             except KeyError as e:
-                log.warning(
-                    f"invalid repeated element({e}): {extra['repeated']}"
-                )
+                log.warning(f"invalid repeated element({e}): {extra['repeated']}")
 
         ## final item building ##
         item_elt = pubsub.Item(id=item_id, payload=entry_elt)
@@ -872,7 +852,7 @@
         """
         if not self.is_comment_node(item_id):
             raise ValueError("This node is not a comment node")
-        return item_id[len(NS_COMMENT_PREFIX):]
+        return item_id[len(NS_COMMENT_PREFIX) :]
 
     def get_comments_node(self, item_id):
         """Generate comment node
@@ -908,7 +888,9 @@
             client.pubsub_service if client.pubsub_service is not None else parent_service
         )
 
-    async def _manage_comments(self, client, mb_data, service, node, item_id, access=None):
+    async def _manage_comments(
+        self, client, mb_data, service, node, item_id, access=None
+    ):
         """Check comments keys in mb_data and create comments node if necessary
 
         if a comments node metadata is set in the mb_data['comments'] list, it is used
@@ -931,18 +913,16 @@
             if "comments" in mb_data:
                 log.warning(
                     "comments are not allowed but there is already a comments node, "
-                    "it may be lost: {uri}".format(
-                        uri=mb_data["comments"]
-                    )
+                    "it may be lost: {uri}".format(uri=mb_data["comments"])
                 )
                 del mb_data["comments"]
             return
 
         # we have usually a single comment node, but the spec allow several, so we need to
         # handle this in a list
-        if len(mb_data.setdefault('comments', [])) == 0:
+        if len(mb_data.setdefault("comments", [])) == 0:
             # we need at least one comment node
-            mb_data['comments'].append({})
+            mb_data["comments"].append({})
 
         if access is None:
             # TODO: cache access models per service/node
@@ -952,7 +932,9 @@
                 log.debug(f"Can't get parent node configuration: {e}")
                 access = self._p.ACCESS_OPEN
             else:
-                access = parent_node_config.get(self._p.OPT_ACCESS_MODEL, self._p.ACCESS_OPEN)
+                access = parent_node_config.get(
+                    self._p.OPT_ACCESS_MODEL, self._p.ACCESS_OPEN
+                )
 
         options = {
             self._p.OPT_ACCESS_MODEL: access,
@@ -967,9 +949,9 @@
         # if other plugins need to change the options
         self.host.trigger.point("XEP-0277_comments", client, mb_data, options)
 
-        for comments_data in mb_data['comments']:
-            uri = comments_data.get('uri')
-            comments_node = comments_data.get('node')
+        for comments_data in mb_data["comments"]:
+            uri = comments_data.get("uri")
+            comments_node = comments_data.get("node")
             try:
                 comments_service = jid.JID(comments_data["service"])
             except KeyError:
@@ -977,24 +959,26 @@
 
             if uri:
                 uri_service, uri_node = self.parse_comment_url(uri)
-                if ((comments_node is not None and comments_node!=uri_node)
-                     or (comments_service is not None and comments_service!=uri_service)):
+                if (comments_node is not None and comments_node != uri_node) or (
+                    comments_service is not None and comments_service != uri_service
+                ):
                     raise ValueError(
                         f"Incoherence between comments URI ({uri}) and comments_service "
-                        f"({comments_service}) or comments_node ({comments_node})")
-                comments_data['service'] = comments_service = uri_service
-                comments_data['node'] = comments_node = uri_node
+                        f"({comments_service}) or comments_node ({comments_node})"
+                    )
+                comments_data["service"] = comments_service = uri_service
+                comments_data["node"] = comments_node = uri_node
             else:
                 if not comments_node:
                     comments_node = self.get_comments_node(item_id)
-                comments_data['node'] = comments_node
+                comments_data["node"] = comments_node
                 if comments_service is None:
                     comments_service = await self.get_comments_service(client, service)
                     if comments_service is None:
                         comments_service = client.jid.userhostJID()
-                comments_data['service'] = comments_service
+                comments_data["service"] = comments_service
 
-                comments_data['uri'] = xmpp_uri.build_xmpp_uri(
+                comments_data["uri"] = xmpp_uri.build_xmpp_uri(
                     "pubsub",
                     path=comments_service.full(),
                     node=comments_node,
@@ -1031,11 +1015,11 @@
         """Generate a user friendly id from title or content"""
         # TODO: rich content should be converted to plain text
         id_base = regex.url_friendly_text(
-            data.get('title')
-            or data.get('title_rich')
-            or data.get('content')
-            or data.get('content_rich')
-            or ''
+            data.get("title")
+            or data.get("title_rich")
+            or data.get("content")
+            or data.get("content_rich")
+            or ""
         )
         if not data.get("user_friendly_id_suffix", True):
             return id_base
@@ -1054,7 +1038,7 @@
         client: SatXMPPEntity,
         data: dict,
         service: Optional[jid.JID] = None,
-        node: Optional[str] = NS_MICROBLOG
+        node: Optional[str] = NS_MICROBLOG,
     ) -> Optional[str]:
         """Send XEP-0277's microblog data
 
@@ -1082,10 +1066,7 @@
                     # the item doesn't already exist, and change ID if it's the case.
                     try:
                         items, __ = await self._p.get_items(
-                            client,
-                            service,
-                            node,
-                            item_ids = [item_id]
+                            client, service, node, item_ids=[item_id]
                         )
                     except exceptions.NotFound:
                         pass
@@ -1119,20 +1100,13 @@
         return item_id
 
     def _mb_repeat(
-            self,
-            service_s: str,
-            node: str,
-            item: str,
-            extra_s: str,
-            profile_key: str
+        self, service_s: str, node: str, item: str, extra_s: str, profile_key: str
     ) -> defer.Deferred:
         service = jid.JID(service_s) if service_s else None
         node = node if node else NS_MICROBLOG
         client = self.host.get_client(profile_key)
         extra = data_format.deserialise(extra_s)
-        d = defer.ensureDeferred(
-            self.repeat(client, item, service, node, extra)
-        )
+        d = defer.ensureDeferred(self.repeat(client, item, service, node, extra))
         # [repeat] can return None, and we always need a str
         d.addCallback(lambda ret: ret or "")
         return d
@@ -1154,12 +1128,7 @@
             service = client.jid.userhostJID()
 
         # we first get the post to repeat
-        items, __ = await self._p.get_items(
-            client,
-            service,
-            node,
-            item_ids = [item]
-        )
+        items, __ = await self._p.get_items(client, service, node, item_ids=[item])
         if not items:
             raise exceptions.NotFound(
                 f"no item found at node {node!r} on {service} with ID {item!r}"
@@ -1168,9 +1137,7 @@
         try:
             entry_elt = next(item_elt.elements(NS_ATOM, "entry"))
         except StopIteration:
-            raise exceptions.DataError(
-                "post to repeat is not a XEP-0277 blog item"
-            )
+            raise exceptions.DataError("post to repeat is not a XEP-0277 blog item")
 
         # we want to be sure that we have an author element
         try:
@@ -1198,10 +1165,7 @@
         )
 
         return await self._p.send_item(
-            client,
-            client.jid.userhostJID(),
-            NS_MICROBLOG,
-            entry_elt
+            client, client.jid.userhostJID(), NS_MICROBLOG, entry_elt
         )
 
     def _mb_preview(self, service, node, data, profile_key):
@@ -1218,7 +1182,7 @@
         client: SatXMPPEntity,
         data: dict,
         service: Optional[jid.JID] = None,
-        node: Optional[str] = NS_MICROBLOG
+        node: Optional[str] = NS_MICROBLOG,
     ) -> dict:
         """Preview microblog data without publishing them
 
@@ -1235,7 +1199,6 @@
         item_elt.uri = pubsub.NS_PUBSUB
         return await self.item_2_mb_data(client, item_elt, service, node)
 
-
     ## retract ##
 
     def _mb_retract(self, service_jid_s, nodeIdentifier, itemIdentifier, profile_key):
@@ -1252,11 +1215,18 @@
 
     def _mb_get_serialise(self, data):
         items, metadata = data
-        metadata['items'] = items
+        metadata["items"] = items
         return data_format.serialise(metadata)
 
-    def _mb_get(self, service="", node="", max_items=10, item_ids=None, extra="",
-               profile_key=C.PROF_KEY_NONE):
+    def _mb_get(
+        self,
+        service="",
+        node="",
+        max_items=10,
+        item_ids=None,
+        extra="",
+        profile_key=C.PROF_KEY_NONE,
+    ):
         """
         @param max_items(int): maximum number of item to get, C.NO_LIMIT for no limit
         @param item_ids (list[unicode]): list of item IDs
@@ -1266,8 +1236,15 @@
         max_items = None if max_items == C.NO_LIMIT else max_items
         extra = self._p.parse_extra(data_format.deserialise(extra))
         d = defer.ensureDeferred(
-            self.mb_get(client, service, node or None, max_items, item_ids,
-                       extra.rsm_request, extra.extra)
+            self.mb_get(
+                client,
+                service,
+                node or None,
+                max_items,
+                item_ids,
+                extra.rsm_request,
+                extra.extra,
+            )
         )
         d.addCallback(self._mb_get_serialise)
         return d
@@ -1280,7 +1257,7 @@
         max_items: Optional[int] = 10,
         item_ids: Optional[List[str]] = None,
         rsm_request: Optional[rsm.RSMRequest] = None,
-        extra: Optional[Dict[str, Any]] = None
+        extra: Optional[Dict[str, Any]] = None,
     ) -> Tuple[List[Dict[str, Any]], Dict[str, Any]]:
         """Get some microblogs
 
@@ -1309,7 +1286,8 @@
             extra=extra,
         )
         mb_data_list, metadata = await self._p.trans_items_data_d(
-            items_data, partial(self.item_2_mb_data, client, service=service, node=node))
+            items_data, partial(self.item_2_mb_data, client, service=service, node=node)
+        )
         encrypted = metadata.pop("encrypted", None)
         if encrypted is not None:
             for mb_data in mb_data_list:
@@ -1320,13 +1298,15 @@
         return (mb_data_list, metadata)
 
     def _mb_rename(self, service, node, item_id, new_id, profile_key):
-        return defer.ensureDeferred(self.mb_rename(
-            self.host.get_client(profile_key),
-            jid.JID(service) if service else None,
-            node or None,
-            item_id,
-            new_id
-        ))
+        return defer.ensureDeferred(
+            self.mb_rename(
+                self.host.get_client(profile_key),
+                jid.JID(service) if service else None,
+                node or None,
+                item_id,
+                new_id,
+            )
+        )
 
     async def mb_rename(
         self,
@@ -1334,7 +1314,7 @@
         service: Optional[jid.JID],
         node: Optional[str],
         item_id: str,
-        new_id: str
+        new_id: str,
     ) -> None:
         if not node:
             node = NS_MICROBLOG
@@ -1528,7 +1508,7 @@
                 items_data,
                 # FIXME: service and node should be used here
                 partial(self.item_2_mb_data, client),
-                serialise=True
+                serialise=True,
             )
             d.addCallback(lambda serialised: ("", serialised))
             return d
@@ -1552,8 +1532,14 @@
         )
         return d
 
-    def _mb_get_from_many(self, publishers_type, publishers, max_items=10, extra_dict=None,
-                       profile_key=C.PROF_KEY_NONE):
+    def _mb_get_from_many(
+        self,
+        publishers_type,
+        publishers,
+        max_items=10,
+        extra_dict=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         """
         @param max_items(int): maximum number of item to get, C.NO_LIMIT for no limit
         """
@@ -1569,8 +1555,15 @@
             profile_key,
         )
 
-    def mb_get_from_many(self, publishers_type, publishers, max_items=None, rsm_request=None,
-                      extra=None, profile_key=C.PROF_KEY_NONE):
+    def mb_get_from_many(
+        self,
+        publishers_type,
+        publishers,
+        max_items=None,
+        rsm_request=None,
+        extra=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         """Get the published microblogs for a list of groups or jids
 
         @param publishers_type (str): type of the list of publishers (one of "GROUP" or
@@ -1607,17 +1600,13 @@
             for item, item_metadata in items_data:
                 item = data_format.serialise(item)
                 items.append((item, item_metadata))
-            ret.append((
-                service.full(),
-                node,
-                failure_,
-                items,
-                metadata))
+            ret.append((service.full(), node, failure_, items, metadata))
 
         return data[0], ret
 
-    def _mb_get_from_many_with_comments_rt_result(self, session_id,
-                                           profile_key=C.PROF_KEY_DEFAULT):
+    def _mb_get_from_many_with_comments_rt_result(
+        self, session_id, profile_key=C.PROF_KEY_DEFAULT
+    ):
         """Get real-time results for [mb_get_from_many_with_comments] session
 
         @param session_id: id of the real-time deferred session
@@ -1643,9 +1632,16 @@
         d.addCallback(self._mb_get_from_many_with_comments_rt_result_serialise)
         return d
 
-    def _mb_get_from_many_with_comments(self, publishers_type, publishers, max_items=10,
-                                   max_comments=C.NO_LIMIT, extra_dict=None,
-                                   extra_comments_dict=None, profile_key=C.PROF_KEY_NONE):
+    def _mb_get_from_many_with_comments(
+        self,
+        publishers_type,
+        publishers,
+        max_items=10,
+        max_comments=C.NO_LIMIT,
+        extra_dict=None,
+        extra_comments_dict=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         """
         @param max_items(int): maximum number of item to get, C.NO_LIMIT for no limit
         @param max_comments(int): maximum number of comments to get, C.NO_LIMIT for no
@@ -1668,10 +1664,18 @@
             profile_key,
         )
 
-    def mb_get_from_many_with_comments(self, publishers_type, publishers, max_items=None,
-                                  max_comments=None, rsm_request=None, extra=None,
-                                  rsm_comments=None, extra_comments=None,
-                                  profile_key=C.PROF_KEY_NONE):
+    def mb_get_from_many_with_comments(
+        self,
+        publishers_type,
+        publishers,
+        max_items=None,
+        max_comments=None,
+        rsm_request=None,
+        extra=None,
+        rsm_comments=None,
+        extra_comments=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         """Helper method to get the microblogs and their comments in one shot
 
         @param publishers_type (str): type of the list of publishers (one of "GROUP" or
@@ -1729,9 +1733,12 @@
                             lambda items_data: self._p.trans_items_data_d(
                                 items_data,
                                 partial(
-                                    self.item_2_mb_data, client, service=service, node=node
+                                    self.item_2_mb_data,
+                                    client,
+                                    service=service,
+                                    node=node,
                                 ),
-                                serialise=True
+                                serialise=True,
                             )
                         )
                         # with failure handling
@@ -1764,9 +1771,11 @@
 
         deferreds = {}
         for service, node in node_data:
-            d = deferreds[(service, node)] = defer.ensureDeferred(self._p.get_items(
-                client, service, node, max_items, rsm_request=rsm_request, extra=extra
-            ))
+            d = deferreds[(service, node)] = defer.ensureDeferred(
+                self._p.get_items(
+                    client, service, node, max_items, rsm_request=rsm_request, extra=extra
+                )
+            )
             d.addCallback(
                 lambda items_data: self._p.trans_items_data_d(
                     items_data,