diff sat/plugins/plugin_xep_0277.py @ 2624:56f94936df1e

code style reformatting using black
author Goffi <goffi@goffi.org>
date Wed, 27 Jun 2018 20:14:46 +0200
parents 26edcf3a30eb
children 3480d4fdf83a
line wrap: on
line diff
--- a/sat/plugins/plugin_xep_0277.py	Wed Jun 27 07:51:29 2018 +0200
+++ b/sat/plugins/plugin_xep_0277.py	Wed Jun 27 20:14:46 2018 +0200
@@ -20,6 +20,7 @@
 from sat.core.i18n import _
 from sat.core.constants import Const as C
 from sat.core.log import getLogger
+
 log = getLogger(__name__)
 from twisted.words.protocols.jabber import jid, error
 from twisted.words.protocols.jabber.xmlstream import XMPPHandler
@@ -43,10 +44,10 @@
 import calendar
 import urlparse
 
-NS_MICROBLOG = 'urn:xmpp:microblog:0'
-NS_ATOM = 'http://www.w3.org/2005/Atom'
+NS_MICROBLOG = "urn:xmpp:microblog:0"
+NS_ATOM = "http://www.w3.org/2005/Atom"
 NS_PUBSUB_EVENT = "{}{}".format(pubsub.NS_PUBSUB, "#event")
-NS_COMMENT_PREFIX = '{}:comments/'.format(NS_MICROBLOG)
+NS_COMMENT_PREFIX = "{}:comments/".format(NS_MICROBLOG)
 
 
 PLUGIN_INFO = {
@@ -58,7 +59,7 @@
     C.PI_RECOMMENDATIONS: ["XEP-0059", "EXTRA-PEP"],
     C.PI_MAIN: "XEP_0277",
     C.PI_HANDLER: "yes",
-    C.PI_DESCRIPTION: _("""Implementation of microblogging Protocol""")
+    C.PI_DESCRIPTION: _("""Implementation of microblogging Protocol"""),
 }
 
 
@@ -72,49 +73,97 @@
     def __init__(self, host):
         log.info(_(u"Microblogging plugin initialization"))
         self.host = host
-        host.registerNamespace('microblog', NS_MICROBLOG)
-        self._p = self.host.plugins["XEP-0060"] # this facilitate the access to pubsub plugin
+        host.registerNamespace("microblog", NS_MICROBLOG)
+        self._p = self.host.plugins[
+            "XEP-0060"
+        ]  # this facilitate the access to pubsub plugin
         self.rt_sessions = sat_defer.RTDeferredSessions()
-        self.host.plugins["XEP-0060"].addManagedNode(NS_MICROBLOG, items_cb=self._itemsReceived)
+        self.host.plugins["XEP-0060"].addManagedNode(
+            NS_MICROBLOG, items_cb=self._itemsReceived
+        )
 
-        host.bridge.addMethod("mbSend", ".plugin",
-                              in_sign='ssa{ss}s', out_sign='',
-                              method=self._mbSend,
-                              async=True)
-        host.bridge.addMethod("mbRetract", ".plugin",
-                              in_sign='ssss', out_sign='',
-                              method=self._mbRetract,
-                              async=True)
-        host.bridge.addMethod("mbGet", ".plugin",
-                              in_sign='ssiasa{ss}s', out_sign='(aa{ss}a{ss})',
-                              method=self._mbGet,
-                              async=True)
-        host.bridge.addMethod("mbSetAccess", ".plugin", in_sign='ss', out_sign='',
-                              method=self.mbSetAccess,
-                              async=True)
-        host.bridge.addMethod("mbSubscribeToMany", ".plugin", in_sign='sass', out_sign='s',
-                              method=self._mbSubscribeToMany)
-        host.bridge.addMethod("mbGetFromManyRTResult", ".plugin", in_sign='ss', out_sign='(ua(sssaa{ss}a{ss}))',
-                              method=self._mbGetFromManyRTResult, async=True)
-        host.bridge.addMethod("mbGetFromMany", ".plugin", in_sign='sasia{ss}s', out_sign='s',
-                              method=self._mbGetFromMany)
-        host.bridge.addMethod("mbGetFromManyWithCommentsRTResult", ".plugin", in_sign='ss', out_sign='(ua(sssa(a{ss}a(sssaa{ss}a{ss}))a{ss}))',
-                              method=self._mbGetFromManyWithCommentsRTResult, async=True)
-        host.bridge.addMethod("mbGetFromManyWithComments", ".plugin", in_sign='sasiia{ss}a{ss}s', out_sign='s',
-                              method=self._mbGetFromManyWithComments)
+        host.bridge.addMethod(
+            "mbSend",
+            ".plugin",
+            in_sign="ssa{ss}s",
+            out_sign="",
+            method=self._mbSend,
+            async=True,
+        )
+        host.bridge.addMethod(
+            "mbRetract",
+            ".plugin",
+            in_sign="ssss",
+            out_sign="",
+            method=self._mbRetract,
+            async=True,
+        )
+        host.bridge.addMethod(
+            "mbGet",
+            ".plugin",
+            in_sign="ssiasa{ss}s",
+            out_sign="(aa{ss}a{ss})",
+            method=self._mbGet,
+            async=True,
+        )
+        host.bridge.addMethod(
+            "mbSetAccess",
+            ".plugin",
+            in_sign="ss",
+            out_sign="",
+            method=self.mbSetAccess,
+            async=True,
+        )
+        host.bridge.addMethod(
+            "mbSubscribeToMany",
+            ".plugin",
+            in_sign="sass",
+            out_sign="s",
+            method=self._mbSubscribeToMany,
+        )
+        host.bridge.addMethod(
+            "mbGetFromManyRTResult",
+            ".plugin",
+            in_sign="ss",
+            out_sign="(ua(sssaa{ss}a{ss}))",
+            method=self._mbGetFromManyRTResult,
+            async=True,
+        )
+        host.bridge.addMethod(
+            "mbGetFromMany",
+            ".plugin",
+            in_sign="sasia{ss}s",
+            out_sign="s",
+            method=self._mbGetFromMany,
+        )
+        host.bridge.addMethod(
+            "mbGetFromManyWithCommentsRTResult",
+            ".plugin",
+            in_sign="ss",
+            out_sign="(ua(sssa(a{ss}a(sssaa{ss}a{ss}))a{ss}))",
+            method=self._mbGetFromManyWithCommentsRTResult,
+            async=True,
+        )
+        host.bridge.addMethod(
+            "mbGetFromManyWithComments",
+            ".plugin",
+            in_sign="sasiia{ss}a{ss}s",
+            out_sign="s",
+            method=self._mbGetFromManyWithComments,
+        )
 
     def getHandler(self, client):
         return XEP_0277_handler()
 
     def _checkFeaturesCb(self, available):
-        return {'available': C.BOOL_TRUE}
+        return {"available": C.BOOL_TRUE}
 
     def _checkFeaturesEb(self, fail):
-        return {'available': C.BOOL_FALSE}
+        return {"available": C.BOOL_FALSE}
 
     def getFeatures(self, profile):
         client = self.host.getClient(profile)
-        d = self.host.checkFeatures(client, [], identity=('pubsub', 'pep'))
+        d = self.host.checkFeatures(client, [], identity=("pubsub", "pep"))
         d.addCallbacks(self._checkFeaturesCb, self._checkFeaturesEb)
         return d
 
@@ -122,18 +171,27 @@
 
     def _itemsReceived(self, client, itemsEvent):
         """Callback which manage items notifications (publish + retract)"""
+
         def manageItem(data, event):
-            self.host.bridge.psEvent(C.PS_MICROBLOG, itemsEvent.sender.full(), itemsEvent.nodeIdentifier, event, data, client.profile)
+            self.host.bridge.psEvent(
+                C.PS_MICROBLOG,
+                itemsEvent.sender.full(),
+                itemsEvent.nodeIdentifier,
+                event,
+                data,
+                client.profile,
+            )
 
         for item in itemsEvent.items:
             if item.name == C.PS_ITEM:
-                self.item2mbdata(item).addCallbacks(manageItem, lambda failure: None, (C.PS_PUBLISH,))
+                self.item2mbdata(item).addCallbacks(
+                    manageItem, lambda failure: None, (C.PS_PUBLISH,)
+                )
             elif item.name == C.PS_RETRACT:
-                manageItem({'id': item['id']}, C.PS_RETRACT)
+                manageItem({"id": item["id"]}, C.PS_RETRACT)
             else:
                 raise exceptions.InternalError("Invalid event value")
 
-
     ## data/item transformation ##
 
     @defer.inlineCallbacks
@@ -156,13 +214,17 @@
             """
             if key in microblog_data:
                 if not increment:
-                    raise failure.Failure(exceptions.DataError("key {} is already present for item {}").format(key, item_elt['id']))
+                    raise failure.Failure(
+                        exceptions.DataError(
+                            "key {} is already present for item {}"
+                        ).format(key, item_elt["id"])
+                    )
                 else:
-                    idx=1 # the idx 0 is the key without suffix
+                    idx = 1  # the idx 0 is the key without suffix
                     fmt = "{}#{}"
                     new_key = fmt.format(key, idx)
                     while new_key in microblog_data:
-                        idx+=1
+                        idx += 1
                         new_key = fmt.format(key, idx)
                     key = new_key
             return key
@@ -170,49 +232,62 @@
         @defer.inlineCallbacks
         def parseElement(elem):
             """Parse title/content elements and fill microblog_data accordingly"""
-            type_ = elem.getAttribute('type')
-            if type_ == 'xhtml':
+            type_ = elem.getAttribute("type")
+            if type_ == "xhtml":
                 data_elt = elem.firstChildElement()
                 if data_elt is None:
-                    raise failure.Failure(exceptions.DataError(u"XHML content not wrapped in a <div/> element, this is not standard !"))
+                    raise failure.Failure(
+                        exceptions.DataError(
+                            u"XHML content not wrapped in a <div/> element, this is not standard !"
+                        )
+                    )
                 if data_elt.uri != C.NS_XHTML:
-                    raise failure.Failure(exceptions.DataError(_('Content of type XHTML must declare its namespace!')))
-                key = check_conflict(u'{}_xhtml'.format(elem.name))
+                    raise failure.Failure(
+                        exceptions.DataError(
+                            _("Content of type XHTML must declare its namespace!")
+                        )
+                    )
+                key = check_conflict(u"{}_xhtml".format(elem.name))
                 data = data_elt.toXml()
-                microblog_data[key] = yield self.host.plugins["TEXT-SYNTAXES"].cleanXHTML(data)
+                microblog_data[key] = yield self.host.plugins["TEXT-SYNTAXES"].cleanXHTML(
+                    data
+                )
             else:
                 key = check_conflict(elem.name)
                 microblog_data[key] = unicode(elem)
 
-
-        id_ = item_elt.getAttribute('id', '') # there can be no id for transient nodes
-        microblog_data[u'id'] = id_
+        id_ = item_elt.getAttribute("id", "")  # there can be no id for transient nodes
+        microblog_data[u"id"] = id_
         if item_elt.uri not in (pubsub.NS_PUBSUB, NS_PUBSUB_EVENT):
-            msg = u"Unsupported namespace {ns} in pubsub item {id_}".format(ns=item_elt.uri, id_=id_)
+            msg = u"Unsupported namespace {ns} in pubsub item {id_}".format(
+                ns=item_elt.uri, id_=id_
+            )
             log.warning(msg)
             raise failure.Failure(exceptions.DataError(msg))
 
         try:
-            entry_elt = item_elt.elements(NS_ATOM, 'entry').next()
+            entry_elt = item_elt.elements(NS_ATOM, "entry").next()
         except StopIteration:
-            msg = u'No atom entry found in the pubsub item {}'.format(id_)
+            msg = u"No atom entry found in the pubsub item {}".format(id_)
             raise failure.Failure(exceptions.DataError(msg))
 
         # language
         try:
-            microblog_data[u'language'] = entry_elt[(C.NS_XML, u'lang')].strip()
+            microblog_data[u"language"] = entry_elt[(C.NS_XML, u"lang")].strip()
         except KeyError:
             pass
 
         # atom:id
         try:
-            id_elt = entry_elt.elements(NS_ATOM, 'id').next()
+            id_elt = entry_elt.elements(NS_ATOM, "id").next()
         except StopIteration:
-            msg = u'No atom id found in the pubsub item {}, this is not standard !'.format(id_)
+            msg = u"No atom id found in the pubsub item {}, this is not standard !".format(
+                id_
+            )
             log.warning(msg)
-            microblog_data[u'atom_id'] = ""
+            microblog_data[u"atom_id"] = ""
         else:
-            microblog_data[u'atom_id'] = unicode(id_elt)
+            microblog_data[u"atom_id"] = unicode(id_elt)
 
         # title/content(s)
 
@@ -225,9 +300,9 @@
         # except StopIteration:
         #     msg = u'No atom title found in the pubsub item {}'.format(id_)
         #     raise failure.Failure(exceptions.DataError(msg))
-        title_elts = list(entry_elt.elements(NS_ATOM, 'title'))
+        title_elts = list(entry_elt.elements(NS_ATOM, "title"))
         if not title_elts:
-            msg = u'No atom title found in the pubsub item {}'.format(id_)
+            msg = u"No atom title found in the pubsub item {}".format(id_)
             raise failure.Failure(exceptions.DataError(msg))
         for title_elt in title_elts:
             yield parseElement(title_elt)
@@ -235,113 +310,146 @@
         # FIXME: as for <title/>, Atom only authorise at most 1 content
         #        but XEP-0277 allows several ones. So for no we handle as
         #        if more than one can be present
-        for content_elt in entry_elt.elements(NS_ATOM, 'content'):
+        for content_elt in entry_elt.elements(NS_ATOM, "content"):
             yield parseElement(content_elt)
 
         # we check that text content is present
-        for key in ('title', 'content'):
-            if key not in microblog_data and ('{}_xhtml'.format(key)) in microblog_data:
-                log.warning(u"item {id_} provide a {key}_xhtml data but not a text one".format(id_=id_, key=key))
+        for key in ("title", "content"):
+            if key not in microblog_data and ("{}_xhtml".format(key)) in microblog_data:
+                log.warning(
+                    u"item {id_} provide a {key}_xhtml data but not a text one".format(
+                        id_=id_, key=key
+                    )
+                )
                 # ... and do the conversion if it's not
-                microblog_data[key] = yield self.host.plugins["TEXT-SYNTAXES"].\
-                                            convert(microblog_data[u'{}_xhtml'.format(key)],
-                                            self.host.plugins["TEXT-SYNTAXES"].SYNTAX_XHTML,
-                                            self.host.plugins["TEXT-SYNTAXES"].SYNTAX_TEXT,
-                                            False)
+                microblog_data[key] = yield self.host.plugins["TEXT-SYNTAXES"].convert(
+                    microblog_data[u"{}_xhtml".format(key)],
+                    self.host.plugins["TEXT-SYNTAXES"].SYNTAX_XHTML,
+                    self.host.plugins["TEXT-SYNTAXES"].SYNTAX_TEXT,
+                    False,
+                )
 
-        if 'content' not in microblog_data:
+        if "content" not in microblog_data:
             # use the atom title data as the microblog body content
-            microblog_data[u'content'] = microblog_data[u'title']
-            del microblog_data[u'title']
-            if 'title_xhtml' in microblog_data:
-                microblog_data[u'content_xhtml'] = microblog_data[u'title_xhtml']
-                del microblog_data[u'title_xhtml']
+            microblog_data[u"content"] = microblog_data[u"title"]
+            del microblog_data[u"title"]
+            if "title_xhtml" in microblog_data:
+                microblog_data[u"content_xhtml"] = microblog_data[u"title_xhtml"]
+                del microblog_data[u"title_xhtml"]
 
         # published/updated dates
         try:
-            updated_elt = entry_elt.elements(NS_ATOM, 'updated').next()
+            updated_elt = entry_elt.elements(NS_ATOM, "updated").next()
         except StopIteration:
-            msg = u'No atom updated element found in the pubsub item {}'.format(id_)
+            msg = u"No atom updated element found in the pubsub item {}".format(id_)
             raise failure.Failure(exceptions.DataError(msg))
-        microblog_data[u'updated'] = unicode(calendar.timegm(dateutil.parser.parse(unicode(updated_elt)).utctimetuple()))
+        microblog_data[u"updated"] = unicode(
+            calendar.timegm(dateutil.parser.parse(unicode(updated_elt)).utctimetuple())
+        )
         try:
-            published_elt = entry_elt.elements(NS_ATOM, 'published').next()
+            published_elt = entry_elt.elements(NS_ATOM, "published").next()
         except StopIteration:
-            microblog_data[u'published'] = microblog_data[u'updated']
+            microblog_data[u"published"] = microblog_data[u"updated"]
         else:
-            microblog_data[u'published'] = unicode(calendar.timegm(dateutil.parser.parse(unicode(published_elt)).utctimetuple()))
+            microblog_data[u"published"] = unicode(
+                calendar.timegm(
+                    dateutil.parser.parse(unicode(published_elt)).utctimetuple()
+                )
+            )
 
         # links
-        for link_elt in entry_elt.elements(NS_ATOM, 'link'):
-            if link_elt.getAttribute('rel') == 'replies' and link_elt.getAttribute('title') == 'comments':
-                key = check_conflict('comments', True)
-                microblog_data[key] = link_elt['href']
+        for link_elt in entry_elt.elements(NS_ATOM, "link"):
+            if (
+                link_elt.getAttribute("rel") == "replies"
+                and link_elt.getAttribute("title") == "comments"
+            ):
+                key = check_conflict("comments", True)
+                microblog_data[key] = link_elt["href"]
                 try:
                     service, node = self.parseCommentUrl(microblog_data[key])
                 except:
                     log.warning(u"Can't parse url {}".format(microblog_data[key]))
                     del microblog_data[key]
                 else:
-                    microblog_data[u'{}_service'.format(key)] = service.full()
-                    microblog_data[u'{}_node'.format(key)] = node
+                    microblog_data[u"{}_service".format(key)] = service.full()
+                    microblog_data[u"{}_node".format(key)] = node
             else:
-                rel = link_elt.getAttribute('rel','')
-                title = link_elt.getAttribute('title','')
-                href = link_elt.getAttribute('href','')
-                log.warning(u"Unmanaged link element: rel={rel} title={title} href={href}".format(rel=rel, title=title, href=href))
+                rel = link_elt.getAttribute("rel", "")
+                title = link_elt.getAttribute("title", "")
+                href = link_elt.getAttribute("href", "")
+                log.warning(
+                    u"Unmanaged link element: rel={rel} title={title} href={href}".format(
+                        rel=rel, title=title, href=href
+                    )
+                )
 
         # author
         try:
-            author_elt = entry_elt.elements(NS_ATOM, 'author').next()
+            author_elt = entry_elt.elements(NS_ATOM, "author").next()
         except StopIteration:
             log.debug(u"Can't find author element in item {}".format(id_))
         else:
             publisher = item_elt.getAttribute("publisher")
             # name
             try:
-                name_elt = author_elt.elements(NS_ATOM, 'name').next()
+                name_elt = author_elt.elements(NS_ATOM, "name").next()
             except StopIteration:
-                log.warning(u"No name element found in author element of item {}".format(id_))
+                log.warning(
+                    u"No name element found in author element of item {}".format(id_)
+                )
             else:
-                microblog_data[u'author'] = unicode(name_elt)
+                microblog_data[u"author"] = unicode(name_elt)
             # uri
             try:
-                uri_elt = author_elt.elements(NS_ATOM, 'uri').next()
+                uri_elt = author_elt.elements(NS_ATOM, "uri").next()
             except StopIteration:
-                log.debug(u"No uri element found in author element of item {}".format(id_))
+                log.debug(
+                    u"No uri element found in author element of item {}".format(id_)
+                )
                 if publisher:
-                    microblog_data[u'author_jid'] = publisher
+                    microblog_data[u"author_jid"] = publisher
             else:
                 uri = unicode(uri_elt)
                 if uri.startswith("xmpp:"):
                     uri = uri[5:]
-                    microblog_data[u'author_jid'] = uri
+                    microblog_data[u"author_jid"] = uri
                 else:
-                    microblog_data[u'author_jid'] = item_elt.getAttribute(u"publisher") or ""
+                    microblog_data[u"author_jid"] = (
+                        item_elt.getAttribute(u"publisher") or ""
+                    )
 
                 if not publisher:
                     log.debug(u"No publisher attribute, we can't verify author jid")
-                    microblog_data[u'author_jid_verified'] = C.BOOL_FALSE
+                    microblog_data[u"author_jid_verified"] = C.BOOL_FALSE
                 elif jid.JID(publisher).userhostJID() == jid.JID(uri).userhostJID():
-                    microblog_data[u'author_jid_verified'] = C.BOOL_TRUE
+                    microblog_data[u"author_jid_verified"] = C.BOOL_TRUE
                 else:
-                    log.warning(u"item atom:uri differ from publisher attribute, spoofing attempt ? atom:uri = {} publisher = {}".format(uri, item_elt.getAttribute("publisher")))
-                    microblog_data[u'author_jid_verified'] = C.BOOL_FALSE
+                    log.warning(
+                        u"item atom:uri differ from publisher attribute, spoofing attempt ? atom:uri = {} publisher = {}".format(
+                            uri, item_elt.getAttribute("publisher")
+                        )
+                    )
+                    microblog_data[u"author_jid_verified"] = C.BOOL_FALSE
             # email
             try:
-                email_elt = author_elt.elements(NS_ATOM, 'email').next()
+                email_elt = author_elt.elements(NS_ATOM, "email").next()
             except StopIteration:
                 pass
             else:
-                microblog_data[u'author_email'] = unicode(email_elt)
+                microblog_data[u"author_email"] = unicode(email_elt)
 
             # categories
-            categories = (category_elt.getAttribute('term','') for category_elt in entry_elt.elements(NS_ATOM, 'category'))
-            data_format.iter2dict('tag', categories, microblog_data)
+            categories = (
+                category_elt.getAttribute("term", "")
+                for category_elt in entry_elt.elements(NS_ATOM, "category")
+            )
+            data_format.iter2dict("tag", categories, microblog_data)
 
         ## the trigger ##
         # if other plugins have things to add or change
-        yield self.host.trigger.point("XEP-0277_item2data", item_elt, entry_elt, microblog_data)
+        yield self.host.trigger.point(
+            "XEP-0277_item2data", item_elt, entry_elt, microblog_data
+        )
 
         defer.returnValue(microblog_data)
 
@@ -357,115 +465,144 @@
             Needed to construct Atom id
         @return: deferred which fire domish.Element
         """
-        entry_elt = domish.Element((NS_ATOM, 'entry'))
+        entry_elt = domish.Element((NS_ATOM, "entry"))
 
         ## language ##
-        if u'language' in data:
-            entry_elt[(C.NS_XML, u'lang')] = data[u'language'].strip()
+        if u"language" in data:
+            entry_elt[(C.NS_XML, u"lang")] = data[u"language"].strip()
 
         ## content and title ##
         synt = self.host.plugins["TEXT-SYNTAXES"]
 
-        for elem_name in ('title', 'content'):
-            for type_ in ['', '_rich', '_xhtml']:
+        for elem_name in ("title", "content"):
+            for type_ in ["", "_rich", "_xhtml"]:
                 attr = "{}{}".format(elem_name, type_)
                 if attr in data:
                     elem = entry_elt.addElement(elem_name)
                     if type_:
-                        if type_ == '_rich':  # convert input from current syntax to XHTML
-                            xml_content = yield synt.convert(data[attr], synt.getCurrentSyntax(client.profile), "XHTML")
-                            if '{}_xhtml'.format(elem_name) in data:
-                                raise failure.Failure(exceptions.DataError(_("Can't have xhtml and rich content at the same time")))
+                        if type_ == "_rich":  # convert input from current syntax to XHTML
+                            xml_content = yield synt.convert(
+                                data[attr], synt.getCurrentSyntax(client.profile), "XHTML"
+                            )
+                            if "{}_xhtml".format(elem_name) in data:
+                                raise failure.Failure(
+                                    exceptions.DataError(
+                                        _(
+                                            "Can't have xhtml and rich content at the same time"
+                                        )
+                                    )
+                                )
                         else:
                             xml_content = data[attr]
 
-                        div_elt = xml_tools.ElementParser()(xml_content, namespace=C.NS_XHTML)
-                        if div_elt.name != 'div' or div_elt.uri != C.NS_XHTML or div_elt.attributes:
+                        div_elt = xml_tools.ElementParser()(
+                            xml_content, namespace=C.NS_XHTML
+                        )
+                        if (
+                            div_elt.name != "div"
+                            or div_elt.uri != C.NS_XHTML
+                            or div_elt.attributes
+                        ):
                             # we need a wrapping <div/> at the top with XHTML namespace
-                            wrap_div_elt = domish.Element((C.NS_XHTML, 'div'))
+                            wrap_div_elt = domish.Element((C.NS_XHTML, "div"))
                             wrap_div_elt.addChild(div_elt)
                             div_elt = wrap_div_elt
                         elem.addChild(div_elt)
-                        elem['type'] = 'xhtml'
+                        elem["type"] = "xhtml"
                         if elem_name not in data:
                             # there is raw text content, which is mandatory
                             # so we create one from xhtml content
                             elem_txt = entry_elt.addElement(elem_name)
-                            text_content = yield self.host.plugins["TEXT-SYNTAXES"].convert(xml_content,
+                            text_content = yield self.host.plugins[
+                                "TEXT-SYNTAXES"
+                            ].convert(
+                                xml_content,
                                 self.host.plugins["TEXT-SYNTAXES"].SYNTAX_XHTML,
                                 self.host.plugins["TEXT-SYNTAXES"].SYNTAX_TEXT,
-                                False)
+                                False,
+                            )
                             elem_txt.addContent(text_content)
-                            elem_txt['type'] = 'text'
+                            elem_txt["type"] = "text"
 
                     else:  # raw text only needs to be escaped to get HTML-safe sequence
                         elem.addContent(data[attr])
-                        elem['type'] = 'text'
+                        elem["type"] = "text"
 
         try:
-            entry_elt.elements(NS_ATOM, 'title').next()
+            entry_elt.elements(NS_ATOM, "title").next()
         except StopIteration:
             # we have no title element which is mandatory
             # so we transform content element to title
-            elems = list(entry_elt.elements(NS_ATOM, 'content'))
+            elems = list(entry_elt.elements(NS_ATOM, "content"))
             if not elems:
-                raise exceptions.DataError("There must be at least one content or title element")
+                raise exceptions.DataError(
+                    "There must be at least one content or title element"
+                )
             for elem in elems:
-                elem.name = 'title'
+                elem.name = "title"
 
         ## author ##
-        author_elt = entry_elt.addElement('author')
+        author_elt = entry_elt.addElement("author")
         try:
-            author_name = data['author']
+            author_name = data["author"]
         except KeyError:
             # FIXME: must use better name
             author_name = client.jid.user
-        author_elt.addElement('name', content=author_name)
+        author_elt.addElement("name", content=author_name)
 
         try:
-            author_jid_s = data['author_jid']
+            author_jid_s = data["author_jid"]
         except KeyError:
             author_jid_s = client.jid.userhost()
-        author_elt.addElement('uri', content="xmpp:{}".format(author_jid_s))
+        author_elt.addElement("uri", content="xmpp:{}".format(author_jid_s))
 
         try:
-            author_jid_s = data['author_email']
+            author_jid_s = data["author_email"]
         except KeyError:
             pass
 
         ## published/updated time ##
         current_time = time.time()
-        entry_elt.addElement('updated',
-            content = utils.xmpp_date(float(data.get('updated', current_time))))
-        entry_elt.addElement('published',
-            content = utils.xmpp_date(float(data.get('published', current_time))))
+        entry_elt.addElement(
+            "updated", content=utils.xmpp_date(float(data.get("updated", current_time)))
+        )
+        entry_elt.addElement(
+            "published",
+            content=utils.xmpp_date(float(data.get("published", current_time))),
+        )
 
         ## categories ##
         for tag in data_format.dict2iter("tag", data):
             category_elt = entry_elt.addElement("category")
-            category_elt['term'] = tag
+            category_elt["term"] = tag
 
         ## id ##
-        entry_id = data.get('id', xmpp_uri.buildXMPPUri(
-            u'pubsub',
-            path=service.full() if service is not None else client.jid.userhost(),
-            node=node,
-            item=item_id))
-        entry_elt.addElement('id', content=entry_id) #
+        entry_id = data.get(
+            "id",
+            xmpp_uri.buildXMPPUri(
+                u"pubsub",
+                path=service.full() if service is not None else client.jid.userhost(),
+                node=node,
+                item=item_id,
+            ),
+        )
+        entry_elt.addElement("id", content=entry_id)  #
 
         ## comments ##
-        if 'comments' in data:
-            link_elt = entry_elt.addElement('link')
-            link_elt['href'] = data['comments']
-            link_elt['rel'] = 'replies'
-            link_elt['title'] = 'comments'
+        if "comments" in data:
+            link_elt = entry_elt.addElement("link")
+            link_elt["href"] = data["comments"]
+            link_elt["rel"] = "replies"
+            link_elt["title"] = "comments"
 
         ## final item building ##
         item_elt = pubsub.Item(id=item_id, payload=entry_elt)
 
         ## the trigger ##
         # if other plugins have things to add or change
-        yield self.host.trigger.point("XEP-0277_data2entry", client, data, entry_elt, item_elt)
+        yield self.host.trigger.point(
+            "XEP-0277_data2entry", client, data, entry_elt, item_elt
+        )
 
         defer.returnValue(item_elt)
 
@@ -489,17 +626,21 @@
             if parent_service.user:
                 # we are on a PEP
                 if parent_service.host == client.jid.host:
-                    # it's our server, we use already found client.pubsub_service below
+                    #  it's our server, we use already found client.pubsub_service below
                     pass
                 else:
                     # other server, let's try to find a non PEP service there
-                    d = self.host.findServiceEntity(client, "pubsub", "service", parent_service)
+                    d = self.host.findServiceEntity(
+                        client, "pubsub", "service", parent_service
+                    )
                     d.addCallback(lambda entity: entity or parent_service)
             else:
                 # parent is already on a normal Pubsub service, we re-use it
                 return defer.succeed(parent_service)
 
-        return defer.succeed(client.pubsub_service if client.pubsub_service is not None else parent_service)
+        return defer.succeed(
+            client.pubsub_service if client.pubsub_service is not None else parent_service
+        )
 
     @defer.inlineCallbacks
     def _manageComments(self, client, mb_data, service, node, item_id, access=None):
@@ -517,9 +658,13 @@
         # FIXME: if 'comments' already exists in mb_data, it is not used to create the Node
         allow_comments = C.bool(mb_data.pop("allow_comments", "false"))
         if not allow_comments:
-            if 'comments' in mb_data:
-                log.warning(u"comments are not allowed but there is already a comments node, it may be lost: {uri}".format(uri=mb_data['comments']))
-                del mb_data['comments']
+            if "comments" in mb_data:
+                log.warning(
+                    u"comments are not allowed but there is already a comments node, it may be lost: {uri}".format(
+                        uri=mb_data["comments"]
+                    )
+                )
+                del mb_data["comments"]
             return
 
         if access is None:
@@ -527,60 +672,75 @@
             parent_node_config = yield self._p.getConfiguration(client, service, node)
             access = parent_node_config.get(self._p.OPT_ACCESS_MODEL, self._p.ACCESS_OPEN)
 
-        options = {self._p.OPT_ACCESS_MODEL: access,
-                   self._p.OPT_PERSIST_ITEMS: 1,
-                   self._p.OPT_MAX_ITEMS: -1,
-                   self._p.OPT_DELIVER_PAYLOADS: 1,
-                   self._p.OPT_SEND_ITEM_SUBSCRIBE: 1,
-                   # FIXME: would it make sense to restrict publish model to subscribers?
-                   self._p.OPT_PUBLISH_MODEL: self._p.ACCESS_OPEN,
-                   }
+        options = {
+            self._p.OPT_ACCESS_MODEL: access,
+            self._p.OPT_PERSIST_ITEMS: 1,
+            self._p.OPT_MAX_ITEMS: -1,
+            self._p.OPT_DELIVER_PAYLOADS: 1,
+            self._p.OPT_SEND_ITEM_SUBSCRIBE: 1,
+            # FIXME: would it make sense to restrict publish model to subscribers?
+            self._p.OPT_PUBLISH_MODEL: self._p.ACCESS_OPEN,
+        }
 
         # if other plugins need to change the options
         yield self.host.trigger.point("XEP-0277_comments", client, mb_data, options)
 
         try:
-            comments_node = mb_data['comments_node']
+            comments_node = mb_data["comments_node"]
         except KeyError:
             comments_node = self.getCommentsNode(item_id)
         else:
             if not comments_node:
-                raise exceptions.DataError(u"if comments_node is present, it must not be empty")
+                raise exceptions.DataError(
+                    u"if comments_node is present, it must not be empty"
+                )
 
         try:
-            comments_service = jid.JID(mb_data['comments_service'])
+            comments_service = jid.JID(mb_data["comments_service"])
         except KeyError:
             comments_service = yield self.getCommentsService(client, service)
 
         try:
             yield self._p.createNode(client, comments_service, comments_node, options)
         except error.StanzaError as e:
-            if e.condition == 'conflict':
-                log.info(u"node {} already exists on service {}".format(comments_node, comments_service))
+            if e.condition == "conflict":
+                log.info(
+                    u"node {} already exists on service {}".format(
+                        comments_node, comments_service
+                    )
+                )
             else:
                 raise e
         else:
             if access == self._p.ACCESS_WHITELIST:
                 # for whitelist access we need to copy affiliations from parent item
-                comments_affiliations = yield self._p.getNodeAffiliations(client, service, node)
+                comments_affiliations = yield self._p.getNodeAffiliations(
+                    client, service, node
+                )
                 # …except for "member", that we transform to publisher
                 # because we wants members to be able to write to comments
                 for jid_, affiliation in comments_affiliations.items():
-                    if affiliation == 'member':
-                        comments_affiliations[jid_] == 'publisher'
+                    if affiliation == "member":
+                        comments_affiliations[jid_] == "publisher"
 
-                yield self._p.setNodeAffiliations(client, comments_service, comments_node, comments_affiliations)
+                yield self._p.setNodeAffiliations(
+                    client, comments_service, comments_node, comments_affiliations
+                )
 
         if comments_service is None:
             comments_service = client.jid.userhostJID()
 
-        if 'comments' in mb_data:
-            if not mb_data['comments']:
-                raise exceptions.DataError(u"if comments is present, it must not be empty")
-            if 'comments_node' in mb_data or 'comments_service' in mb_data:
-                raise exceptions.DataError(u"You can't use comments_service/comments_node and comments at the same time")
+        if "comments" in mb_data:
+            if not mb_data["comments"]:
+                raise exceptions.DataError(
+                    u"if comments is present, it must not be empty"
+                )
+            if "comments_node" in mb_data or "comments_service" in mb_data:
+                raise exceptions.DataError(
+                    u"You can't use comments_service/comments_node and comments at the same time"
+                )
         else:
-            mb_data['comments'] = self._p.getNodeURI(comments_service, comments_node)
+            mb_data["comments"] = self._p.getNodeURI(comments_service, comments_node)
 
     def _mbSend(self, service, node, data, profile_key):
         service = jid.JID(service) if service else None
@@ -604,7 +764,7 @@
         if node is None:
             node = NS_MICROBLOG
 
-        item_id = data.get('id') or unicode(shortuuid.uuid())
+        item_id = data.get("id") or unicode(shortuuid.uuid())
 
         try:
             yield self._manageComments(client, data, service, node, item_id, access=None)
@@ -618,11 +778,25 @@
 
     def _mbRetract(self, service_jid_s, nodeIdentifier, itemIdentifier, profile_key):
         """Call self._p._retractItem, but use default node if node is empty"""
-        return self._p._retractItem(service_jid_s, nodeIdentifier or NS_MICROBLOG, itemIdentifier, True, profile_key)
+        return self._p._retractItem(
+            service_jid_s,
+            nodeIdentifier or NS_MICROBLOG,
+            itemIdentifier,
+            True,
+            profile_key,
+        )
 
     ## get ##
 
-    def _mbGet(self, service='', node='', max_items=10, item_ids=None, extra_dict=None, profile_key=C.PROF_KEY_NONE):
+    def _mbGet(
+        self,
+        service="",
+        node="",
+        max_items=10,
+        item_ids=None,
+        extra_dict=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         """
         @param max_items(int): maximum number of item to get, C.NO_LIMIT for no limit
         @param item_ids (list[unicode]): list of item IDs
@@ -631,11 +805,27 @@
         service = jid.JID(service) if service else None
         max_items = None if max_items == C.NO_LIMIT else max_items
         extra = self._p.parseExtra(extra_dict)
-        return self.mbGet(client, service, node or None, max_items, item_ids, extra.rsm_request, extra.extra)
-
+        return self.mbGet(
+            client,
+            service,
+            node or None,
+            max_items,
+            item_ids,
+            extra.rsm_request,
+            extra.extra,
+        )
 
     @defer.inlineCallbacks
-    def mbGet(self, client, service=None, node=None, max_items=10, item_ids=None, rsm_request=None, extra=None):
+    def mbGet(
+        self,
+        client,
+        service=None,
+        node=None,
+        max_items=10,
+        item_ids=None,
+        rsm_request=None,
+        extra=None,
+    ):
         """Get some microblogs
 
         @param service(jid.JID, None): jid of the publisher
@@ -650,7 +840,15 @@
         """
         if node is None:
             node = NS_MICROBLOG
-        items_data = yield self._p.getItems(client, service, node, max_items=max_items, item_ids=item_ids, rsm_request=rsm_request, extra=extra)
+        items_data = yield self._p.getItems(
+            client,
+            service,
+            node,
+            max_items=max_items,
+            item_ids=item_ids,
+            rsm_request=rsm_request,
+            extra=extra,
+        )
         serialised = yield self._p.serItemsDataD(items_data, self.item2mbdata)
         defer.returnValue(serialised)
 
@@ -663,13 +861,13 @@
         will return(JID(u'sat-pubsub.example.net'), 'urn:xmpp:comments:_af43b363-3259-4b2a-ba4c-1bc33aa87634__urn:xmpp:groupblog:somebody@example.net')
         @return (tuple[jid.JID, unicode]): service and node
         """
-        parsed_url = urlparse.urlparse(node_url, 'xmpp')
+        parsed_url = urlparse.urlparse(node_url, "xmpp")
         service = jid.JID(parsed_url.path)
-        parsed_queries = urlparse.parse_qs(parsed_url.query.encode('utf-8'))
-        node = parsed_queries.get('node', [''])[0].decode('utf-8')
+        parsed_queries = urlparse.parse_qs(parsed_url.query.encode("utf-8"))
+        node = parsed_queries.get("node", [""])[0].decode("utf-8")
 
         if not node:
-            raise failure.Failure(exceptions.DataError('Invalid comments link'))
+            raise failure.Failure(exceptions.DataError("Invalid comments link"))
 
         return (service, node)
 
@@ -682,35 +880,49 @@
         @param access: Node access model, according to xep-0060 #4.5
         @param profile_key: profile key
         """
-        # FIXME: check if this mehtod is need, deprecate it if not
+        #  FIXME: check if this mehtod is need, deprecate it if not
         client = self.host.getClient(profile_key)
 
-        _options = {self._p.OPT_ACCESS_MODEL: access, self._p.OPT_PERSIST_ITEMS: 1, self._p.OPT_MAX_ITEMS: -1, self._p.OPT_DELIVER_PAYLOADS: 1, self._p.OPT_SEND_ITEM_SUBSCRIBE: 1}
+        _options = {
+            self._p.OPT_ACCESS_MODEL: access,
+            self._p.OPT_PERSIST_ITEMS: 1,
+            self._p.OPT_MAX_ITEMS: -1,
+            self._p.OPT_DELIVER_PAYLOADS: 1,
+            self._p.OPT_SEND_ITEM_SUBSCRIBE: 1,
+        }
 
         def cb(result):
-            #Node is created with right permission
+            # Node is created with right permission
             log.debug(_(u"Microblog node has now access %s") % access)
 
         def fatal_err(s_error):
-            #Something went wrong
+            # Something went wrong
             log.error(_(u"Can't set microblog access"))
             raise NodeAccessChangeException()
 
         def err_cb(s_error):
-            #If the node already exists, the condition is "conflict",
-            #else we have an unmanaged error
-            if s_error.value.condition == 'conflict':
-                #d = self.host.plugins["XEP-0060"].deleteNode(client, client.jid.userhostJID(), NS_MICROBLOG)
-                #d.addCallback(lambda x: create_node().addCallback(cb).addErrback(fatal_err))
+            # If the node already exists, the condition is "conflict",
+            # else we have an unmanaged error
+            if s_error.value.condition == "conflict":
+                # d = self.host.plugins["XEP-0060"].deleteNode(client, client.jid.userhostJID(), NS_MICROBLOG)
+                # d.addCallback(lambda x: create_node().addCallback(cb).addErrback(fatal_err))
                 change_node_options().addCallback(cb).addErrback(fatal_err)
             else:
                 fatal_err(s_error)
 
         def create_node():
-            return self._p.createNode(client, client.jid.userhostJID(), NS_MICROBLOG, _options)
+            return self._p.createNode(
+                client, client.jid.userhostJID(), NS_MICROBLOG, _options
+            )
 
         def change_node_options():
-            return self._p.setOptions(client.jid.userhostJID(), NS_MICROBLOG, client.jid.userhostJID(), _options, profile_key=profile_key)
+            return self._p.setOptions(
+                client.jid.userhostJID(),
+                NS_MICROBLOG,
+                client.jid.userhostJID(),
+                _options,
+                profile_key=profile_key,
+            )
 
         create_node().addCallback(cb).addErrback(err_cb)
 
@@ -735,12 +947,17 @@
             jids_set = client.roster.getJidsSet(publishers_type, publishers)
             if publishers_type == C.ALL:
                 try:  # display messages from salut-a-toi@libervia.org or other PEP services
-                    services = self.host.plugins["EXTRA-PEP"].getFollowedEntities(profile_key)
+                    services = self.host.plugins["EXTRA-PEP"].getFollowedEntities(
+                        profile_key
+                    )
                 except KeyError:
                     pass  # plugin is not loaded
                 else:
                     if services:
-                        log.debug("Extra PEP followed entities: %s" % ", ".join([unicode(service) for service in services]))
+                        log.debug(
+                            "Extra PEP followed entities: %s"
+                            % ", ".join([unicode(service) for service in services])
+                        )
                         jids_set.update(services)
 
         node_data = []
@@ -757,7 +974,11 @@
         """
         if publishers_type == C.ALL:
             if publishers:
-                raise failure.Failure(ValueError("Can't use publishers with {} type".format(publishers_type)))
+                raise failure.Failure(
+                    ValueError(
+                        "Can't use publishers with {} type".format(publishers_type)
+                    )
+                )
             else:
                 publishers = None
         elif publishers_type == C.JID:
@@ -785,8 +1006,12 @@
         @param profile: %(doc_profile)s
         @return (str): session id
         """
-        client, node_data = self._getClientAndNodeData(publishers_type, publishers, profile_key)
-        return self._p.subscribeToMany(node_data, client.jid.userhostJID(), profile_key=profile_key)
+        client, node_data = self._getClientAndNodeData(
+            publishers_type, publishers, profile_key
+        )
+        return self._p.subscribeToMany(
+            node_data, client.jid.userhostJID(), profile_key=profile_key
+        )
 
     # get #
 
@@ -804,32 +1029,65 @@
                 - items_metadata(dict): metadata as returned by [mbGet]
         @param profile_key: %(doc_profile_key)s
         """
+
         def onSuccess(items_data):
             """convert items elements to list of microblog data in items_data"""
             d = self._p.serItemsDataD(items_data, self.item2mbdata)
-            d.addCallback(lambda serialised:('', serialised))
+            d.addCallback(lambda serialised: ("", serialised))
             return d
 
         profile = self.host.getClient(profile_key).profile
-        d = self._p.getRTResults(session_id,
-                                 on_success = onSuccess,
-                                 on_error = lambda failure: (unicode(failure.value), ([],{})),
-                                 profile = profile)
-        d.addCallback(lambda ret: (ret[0],
-                                   [(service.full(), node, failure, items, metadata)
-                                    for (service, node), (success, (failure, (items, metadata))) in ret[1].iteritems()]))
+        d = self._p.getRTResults(
+            session_id,
+            on_success=onSuccess,
+            on_error=lambda failure: (unicode(failure.value), ([], {})),
+            profile=profile,
+        )
+        d.addCallback(
+            lambda ret: (
+                ret[0],
+                [
+                    (service.full(), node, failure, items, metadata)
+                    for (service, node), (success, (failure, (items, metadata))) in ret[
+                        1
+                    ].iteritems()
+                ],
+            )
+        )
         return d
 
-    def _mbGetFromMany(self, publishers_type, publishers, max_items=10, extra_dict=None, profile_key=C.PROF_KEY_NONE):
+    def _mbGetFromMany(
+        self,
+        publishers_type,
+        publishers,
+        max_items=10,
+        extra_dict=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         """
         @param max_items(int): maximum number of item to get, C.NO_LIMIT for no limit
         """
         max_items = None if max_items == C.NO_LIMIT else max_items
         publishers_type, publishers = self._checkPublishers(publishers_type, publishers)
         extra = self._p.parseExtra(extra_dict)
-        return self.mbGetFromMany(publishers_type, publishers, max_items, extra.rsm_request, extra.extra, profile_key)
+        return self.mbGetFromMany(
+            publishers_type,
+            publishers,
+            max_items,
+            extra.rsm_request,
+            extra.extra,
+            profile_key,
+        )
 
-    def mbGetFromMany(self, publishers_type, publishers, max_items=None, rsm_request=None, extra=None, profile_key=C.PROF_KEY_NONE):
+    def mbGetFromMany(
+        self,
+        publishers_type,
+        publishers,
+        max_items=None,
+        rsm_request=None,
+        extra=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         """Get the published microblogs for a list of groups or jids
 
         @param publishers_type (str): type of the list of publishers (one of "GROUP" or "JID" or "ALL")
@@ -841,12 +1099,18 @@
         @return (str): RT Deferred session id
         """
         # XXX: extra is unused here so far
-        client, node_data = self._getClientAndNodeData(publishers_type, publishers, profile_key)
-        return self._p.getFromMany(node_data, max_items, rsm_request, profile_key=profile_key)
+        client, node_data = self._getClientAndNodeData(
+            publishers_type, publishers, profile_key
+        )
+        return self._p.getFromMany(
+            node_data, max_items, rsm_request, profile_key=profile_key
+        )
 
     # comments #
 
-    def _mbGetFromManyWithCommentsRTResult(self, session_id, profile_key=C.PROF_KEY_DEFAULT):
+    def _mbGetFromManyWithCommentsRTResult(
+        self, session_id, profile_key=C.PROF_KEY_DEFAULT
+    ):
         """Get real-time results for [mbGetFromManyWithComments] session
 
         @param session_id: id of the real-time deferred session
@@ -869,12 +1133,29 @@
         """
         profile = self.host.getClient(profile_key).profile
         d = self.rt_sessions.getResults(session_id, profile=profile)
-        d.addCallback(lambda ret: (ret[0],
-                                   [(service.full(), node, failure, items, metadata)
-                                    for (service, node), (success, (failure, (items, metadata))) in ret[1].iteritems()]))
+        d.addCallback(
+            lambda ret: (
+                ret[0],
+                [
+                    (service.full(), node, failure, items, metadata)
+                    for (service, node), (success, (failure, (items, metadata))) in ret[
+                        1
+                    ].iteritems()
+                ],
+            )
+        )
         return d
 
-    def _mbGetFromManyWithComments(self, publishers_type, publishers, max_items=10, max_comments=C.NO_LIMIT, extra_dict=None, extra_comments_dict=None, profile_key=C.PROF_KEY_NONE):
+    def _mbGetFromManyWithComments(
+        self,
+        publishers_type,
+        publishers,
+        max_items=10,
+        max_comments=C.NO_LIMIT,
+        extra_dict=None,
+        extra_comments_dict=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         """
         @param max_items(int): maximum number of item to get, C.NO_LIMIT for no limit
         @param max_comments(int): maximum number of comments to get, C.NO_LIMIT for no limit
@@ -884,14 +1165,30 @@
         publishers_type, publishers = self._checkPublishers(publishers_type, publishers)
         extra = self._p.parseExtra(extra_dict)
         extra_comments = self._p.parseExtra(extra_comments_dict)
-        return self.mbGetFromManyWithComments(publishers_type, publishers, max_items, max_comments or None,
-                                              extra.rsm_request,
-                                              extra.extra,
-                                              extra_comments.rsm_request,
-                                              extra_comments.extra,
-                                              profile_key)
+        return self.mbGetFromManyWithComments(
+            publishers_type,
+            publishers,
+            max_items,
+            max_comments or None,
+            extra.rsm_request,
+            extra.extra,
+            extra_comments.rsm_request,
+            extra_comments.extra,
+            profile_key,
+        )
 
-    def mbGetFromManyWithComments(self, publishers_type, publishers, max_items=None, max_comments=None, rsm_request=None, extra=None, rsm_comments=None, extra_comments=None, profile_key=C.PROF_KEY_NONE):
+    def mbGetFromManyWithComments(
+        self,
+        publishers_type,
+        publishers,
+        max_items=None,
+        max_comments=None,
+        rsm_request=None,
+        extra=None,
+        rsm_comments=None,
+        extra_comments=None,
+        profile_key=C.PROF_KEY_NONE,
+    ):
         """Helper method to get the microblogs and their comments in one shot
 
         @param publishers_type (str): type of the list of publishers (one of "GROUP" or "JID" or "ALL")
@@ -909,7 +1206,9 @@
         #      to serialise and associate the data, but it make life in frontends side
         #      a lot easier
 
-        client, node_data = self._getClientAndNodeData(publishers_type, publishers, profile_key)
+        client, node_data = self._getClientAndNodeData(
+            publishers_type, publishers, profile_key
+        )
 
         def getComments(items_data):
             """Retrieve comments and add them to the items_data
@@ -919,29 +1218,50 @@
                 with a list of comments data (service, node, list of items, metadata)
             """
             items, metadata = items_data
-            items_dlist = [] # deferred list for items
+            items_dlist = []  # deferred list for items
             for item in items:
-                dlist = [] # deferred list for comments
+                dlist = []  # deferred list for comments
                 for key, value in item.iteritems():
                     # we look for comments
-                    if key.startswith('comments') and key.endswith('_service'):
-                        prefix = key[:key.find('_')]
+                    if key.startswith("comments") and key.endswith("_service"):
+                        prefix = key[: key.find("_")]
                         service_s = value
                         node = item["{}{}".format(prefix, "_node")]
                         # time to get the comments
-                        d = self._p.getItems(client, jid.JID(service_s), node, max_comments, rsm_request=rsm_comments, extra=extra_comments)
+                        d = self._p.getItems(
+                            client,
+                            jid.JID(service_s),
+                            node,
+                            max_comments,
+                            rsm_request=rsm_comments,
+                            extra=extra_comments,
+                        )
                         # then serialise
-                        d.addCallback(lambda items_data: self._p.serItemsDataD(items_data, self.item2mbdata))
+                        d.addCallback(
+                            lambda items_data: self._p.serItemsDataD(
+                                items_data, self.item2mbdata
+                            )
+                        )
                         # with failure handling
-                        d.addCallback(lambda serialised_items_data: ('',) + serialised_items_data)
+                        d.addCallback(
+                            lambda serialised_items_data: ("",) + serialised_items_data
+                        )
                         d.addErrback(lambda failure: (unicode(failure.value), [], {}))
                         # and associate with service/node (needed if there are several comments nodes)
-                        d.addCallback(lambda serialised, service_s=service_s, node=node: (service_s, node) + serialised)
+                        d.addCallback(
+                            lambda serialised, service_s=service_s, node=node: (
+                                service_s,
+                                node,
+                            )
+                            + serialised
+                        )
                         dlist.append(d)
                 # we get the comments
                 comments_d = defer.gatherResults(dlist)
                 # and add them to the item data
-                comments_d.addCallback(lambda comments_data, item=item: (item, comments_data))
+                comments_d.addCallback(
+                    lambda comments_data, item=item: (item, comments_data)
+                )
                 items_dlist.append(comments_d)
             # we gather the items + comments in a list
             items_d = defer.gatherResults(items_dlist)
@@ -951,11 +1271,15 @@
 
         deferreds = {}
         for service, node in node_data:
-            d = deferreds[(service, node)] = self._p.getItems(client, service, node, max_items, rsm_request=rsm_request, extra=extra)
-            d.addCallback(lambda items_data: self._p.serItemsDataD(items_data, self.item2mbdata))
+            d = deferreds[(service, node)] = self._p.getItems(
+                client, service, node, max_items, rsm_request=rsm_request, extra=extra
+            )
+            d.addCallback(
+                lambda items_data: self._p.serItemsDataD(items_data, self.item2mbdata)
+            )
             d.addCallback(getComments)
-            d.addCallback(lambda items_comments_data: ('', items_comments_data))
-            d.addErrback(lambda failure: (unicode(failure.value), ([],{})))
+            d.addCallback(lambda items_comments_data: ("", items_comments_data))
+            d.addErrback(lambda failure: (unicode(failure.value), ([], {})))
 
         return self.rt_sessions.newSession(deferreds, client.profile)
 
@@ -963,8 +1287,8 @@
 class XEP_0277_handler(XMPPHandler):
     implements(iwokkel.IDisco)
 
-    def getDiscoInfo(self, requestor, target, nodeIdentifier=''):
+    def getDiscoInfo(self, requestor, target, nodeIdentifier=""):
         return [disco.DiscoFeature(NS_MICROBLOG)]
 
-    def getDiscoItems(self, requestor, target, nodeIdentifier=''):
+    def getDiscoItems(self, requestor, target, nodeIdentifier=""):
         return []