diff sat/plugins/plugin_xep_0277.py @ 4037:524856bd7b19

massive refactoring to switch from camelCase to snake_case: historically, Libervia (SàT before) was using camelCase as allowed by PEP8 when using a pre-PEP8 code, to use the same coding style as in Twisted. However, snake_case is more readable and it's better to follow PEP8 best practices, so it has been decided to move on full snake_case. Because Libervia has a huge codebase, this ended with a ugly mix of camelCase and snake_case. To fix that, this patch does a big refactoring by renaming every function and method (including bridge) that are not coming from Twisted or Wokkel, to use fully snake_case. This is a massive change, and may result in some bugs.
author Goffi <goffi@goffi.org>
date Sat, 08 Apr 2023 13:54:42 +0200
parents 78b5f356900c
children
line wrap: on
line diff
--- a/sat/plugins/plugin_xep_0277.py	Fri Apr 07 15:18:39 2023 +0200
+++ b/sat/plugins/plugin_xep_0277.py	Sat Apr 08 13:54:42 2023 +0200
@@ -84,13 +84,13 @@
     def __init__(self, host):
         log.info(_("Microblogging plugin initialization"))
         self.host = host
-        host.registerNamespace("microblog", NS_MICROBLOG)
+        host.register_namespace("microblog", NS_MICROBLOG)
         self._p = self.host.plugins[
             "XEP-0060"
         ]  # this facilitate the access to pubsub plugin
         ps_cache = self.host.plugins.get("PUBSUB_CACHE")
         if ps_cache is not None:
-            ps_cache.registerAnalyser(
+            ps_cache.register_analyser(
                 {
                     "name": "XEP-0277",
                     "node": NS_MICROBLOG,
@@ -98,119 +98,119 @@
                     "type": "blog",
                     "to_sync": True,
                     "parser": self.item_2_mb_data,
-                    "match_cb": self._cacheNodeMatchCb,
+                    "match_cb": self._cache_node_match_cb,
                 }
             )
         self.rt_sessions = sat_defer.RTDeferredSessions()
-        self.host.plugins["XEP-0060"].addManagedNode(
-            NS_MICROBLOG, items_cb=self._itemsReceived
+        self.host.plugins["XEP-0060"].add_managed_node(
+            NS_MICROBLOG, items_cb=self._items_received
         )
 
-        host.bridge.addMethod(
-            "mbSend",
+        host.bridge.add_method(
+            "mb_send",
             ".plugin",
             in_sign="ssss",
             out_sign="s",
-            method=self._mbSend,
+            method=self._mb_send,
             async_=True,
         )
-        host.bridge.addMethod(
-            "mbRepeat",
+        host.bridge.add_method(
+            "mb_repeat",
             ".plugin",
             in_sign="sssss",
             out_sign="s",
-            method=self._mbRepeat,
+            method=self._mb_repeat,
             async_=True,
         )
-        host.bridge.addMethod(
-            "mbPreview",
+        host.bridge.add_method(
+            "mb_preview",
             ".plugin",
             in_sign="ssss",
             out_sign="s",
-            method=self._mbPreview,
+            method=self._mb_preview,
             async_=True,
         )
-        host.bridge.addMethod(
-            "mbRetract",
+        host.bridge.add_method(
+            "mb_retract",
             ".plugin",
             in_sign="ssss",
             out_sign="",
-            method=self._mbRetract,
+            method=self._mb_retract,
             async_=True,
         )
-        host.bridge.addMethod(
-            "mbGet",
+        host.bridge.add_method(
+            "mb_get",
             ".plugin",
             in_sign="ssiasss",
             out_sign="s",
-            method=self._mbGet,
+            method=self._mb_get,
             async_=True,
         )
-        host.bridge.addMethod(
-            "mbRename",
+        host.bridge.add_method(
+            "mb_rename",
             ".plugin",
             in_sign="sssss",
             out_sign="",
-            method=self._mbRename,
+            method=self._mb_rename,
             async_=True,
         )
-        host.bridge.addMethod(
-            "mbSetAccess",
+        host.bridge.add_method(
+            "mb_access_set",
             ".plugin",
             in_sign="ss",
             out_sign="",
-            method=self.mbSetAccess,
+            method=self.mb_access_set,
             async_=True,
         )
-        host.bridge.addMethod(
-            "mbSubscribeToMany",
+        host.bridge.add_method(
+            "mb_subscribe_to_many",
             ".plugin",
             in_sign="sass",
             out_sign="s",
-            method=self._mbSubscribeToMany,
+            method=self._mb_subscribe_to_many,
         )
-        host.bridge.addMethod(
-            "mbGetFromManyRTResult",
+        host.bridge.add_method(
+            "mb_get_from_many_rt_result",
             ".plugin",
             in_sign="ss",
             out_sign="(ua(sssasa{ss}))",
-            method=self._mbGetFromManyRTResult,
+            method=self._mb_get_from_many_rt_result,
             async_=True,
         )
-        host.bridge.addMethod(
-            "mbGetFromMany",
+        host.bridge.add_method(
+            "mb_get_from_many",
             ".plugin",
             in_sign="sasia{ss}s",
             out_sign="s",
-            method=self._mbGetFromMany,
+            method=self._mb_get_from_many,
         )
-        host.bridge.addMethod(
-            "mbGetFromManyWithCommentsRTResult",
+        host.bridge.add_method(
+            "mb_get_from_many_with_comments_rt_result",
             ".plugin",
             in_sign="ss",
             out_sign="(ua(sssa(sa(sssasa{ss}))a{ss}))",
-            method=self._mbGetFromManyWithCommentsRTResult,
+            method=self._mb_get_from_many_with_comments_rt_result,
             async_=True,
         )
-        host.bridge.addMethod(
-            "mbGetFromManyWithComments",
+        host.bridge.add_method(
+            "mb_get_from_many_with_comments",
             ".plugin",
             in_sign="sasiia{ss}a{ss}s",
             out_sign="s",
-            method=self._mbGetFromManyWithComments,
+            method=self._mb_get_from_many_with_comments,
         )
-        host.bridge.addMethod(
-            "mbIsCommentNode",
+        host.bridge.add_method(
+            "mb_is_comment_node",
             ".plugin",
             in_sign="s",
             out_sign="b",
-            method=self.isCommentNode,
+            method=self.is_comment_node,
         )
 
-    def getHandler(self, client):
+    def get_handler(self, client):
         return XEP_0277_handler()
 
-    def _cacheNodeMatchCb(
+    def _cache_node_match_cb(
         self,
         client: SatXMPPEntity,
         analyse: dict,
@@ -219,25 +219,25 @@
         if analyse["node"].startswith(NS_COMMENT_PREFIX):
             analyse["subtype"] = "comment"
 
-    def _checkFeaturesCb(self, available):
+    def _check_features_cb(self, available):
         return {"available": C.BOOL_TRUE}
 
-    def _checkFeaturesEb(self, fail):
+    def _check_features_eb(self, fail):
         return {"available": C.BOOL_FALSE}
 
-    def getFeatures(self, profile):
-        client = self.host.getClient(profile)
-        d = self.host.checkFeatures(client, [], identity=("pubsub", "pep"))
-        d.addCallbacks(self._checkFeaturesCb, self._checkFeaturesEb)
+    def features_get(self, profile):
+        client = self.host.get_client(profile)
+        d = self.host.check_features(client, [], identity=("pubsub", "pep"))
+        d.addCallbacks(self._check_features_cb, self._check_features_eb)
         return d
 
     ## plugin management methods ##
 
-    def _itemsReceived(self, client, itemsEvent):
+    def _items_received(self, client, itemsEvent):
         """Callback which manage items notifications (publish + retract)"""
 
-        def manageItem(data, event):
-            self.host.bridge.psEvent(
+        def manage_item(data, event):
+            self.host.bridge.ps_event(
                 C.PS_MICROBLOG,
                 itemsEvent.sender.full(),
                 itemsEvent.nodeIdentifier,
@@ -250,10 +250,10 @@
             if item.name == C.PS_ITEM:
                 # FIXME: service and node should be used here
                 self.item_2_mb_data(client, item, None, None).addCallbacks(
-                    manageItem, lambda failure: None, (C.PS_PUBLISH,)
+                    manage_item, lambda failure: None, (C.PS_PUBLISH,)
                 )
             elif item.name == C.PS_RETRACT:
-                manageItem({"id": item["id"]}, C.PS_RETRACT)
+                manage_item({"id": item["id"]}, C.PS_RETRACT)
             else:
                 raise exceptions.InternalError("Invalid event value")
 
@@ -334,7 +334,7 @@
                     )
                 key = check_conflict("{}_xhtml".format(elem.name))
                 data = data_elt.toXml()
-                microblog_data[key] = yield self.host.plugins["TEXT_SYNTAXES"].cleanXHTML(
+                microblog_data[key] = yield self.host.plugins["TEXT_SYNTAXES"].clean_xhtml(
                     data
                 )
             else:
@@ -360,7 +360,7 @@
         # FIXME: node should alway be set in the future, check FIXME in method signature
         if node is not None:
             microblog_data["node"] = node
-            microblog_data['uri'] = xmpp_uri.buildXMPPUri(
+            microblog_data['uri'] = xmpp_uri.build_xmpp_uri(
                 "pubsub",
                 path=service.full(),
                 node=node,
@@ -466,7 +466,7 @@
                     "uri": uri,
                 }
                 try:
-                    comment_service, comment_node = self.parseCommentUrl(uri)
+                    comment_service, comment_node = self.parse_comment_url(uri)
                 except Exception as e:
                     log.warning(f"Can't parse comments url: {e}")
                     continue
@@ -596,7 +596,7 @@
                 microblog_data["author_jid"] = publisher
                 microblog_data["author_jid_verified"] = True
             else:
-                iq_elt = xml_tools.findAncestor(item_elt, "iq", C.NS_STREAM)
+                iq_elt = xml_tools.find_ancestor(item_elt, "iq", C.NS_STREAM)
                 microblog_data["author_jid"] = iq_elt["from"]
                 microblog_data["author_jid_verified"] = False
 
@@ -644,7 +644,7 @@
                     if type_:
                         if type_ == "_rich":  # convert input from current syntax to XHTML
                             xml_content = await synt.convert(
-                                mb_data[attr], synt.getCurrentSyntax(client.profile), "XHTML"
+                                mb_data[attr], synt.get_current_syntax(client.profile), "XHTML"
                             )
                             if f"{elem_name}_xhtml" in mb_data:
                                 raise failure.Failure(
@@ -724,7 +724,7 @@
                     log.warning(f"non HTTP URL in attachment, ignoring: {attachment}")
                     continue
                 link_elt = entry_elt.addElement("link")
-                # XXX: "uri" is set in self._manageComments if not already existing
+                # XXX: "uri" is set in self._manage_comments if not already existing
                 link_elt["href"] = url
                 if attachment.get("external", False):
                     # this is a link to an external data such as a website
@@ -779,7 +779,7 @@
         ## id ##
         entry_id = mb_data.get(
             "id",
-            xmpp_uri.buildXMPPUri(
+            xmpp_uri.build_xmpp_uri(
                 "pubsub",
                 path=service.full() if service is not None else client.jid.userhost(),
                 node=node,
@@ -791,7 +791,7 @@
         ## comments ##
         for comments_data in mb_data.get('comments', []):
             link_elt = entry_elt.addElement("link")
-            # XXX: "uri" is set in self._manageComments if not already existing
+            # XXX: "uri" is set in self._manage_comments if not already existing
             link_elt["href"] = comments_data["uri"]
             link_elt["rel"] = "replies"
             link_elt["title"] = "comments"
@@ -820,20 +820,20 @@
 
     ## publish/preview ##
 
-    def isCommentNode(self, node: str) -> bool:
+    def is_comment_node(self, node: str) -> bool:
         """Indicate if the node is prefixed with comments namespace"""
         return node.startswith(NS_COMMENT_PREFIX)
 
-    def getParentItem(self, item_id: str) -> str:
+    def get_parent_item(self, item_id: str) -> str:
         """Return parent of a comment node
 
         @param item_id: a comment node
         """
-        if not self.isCommentNode(item_id):
+        if not self.is_comment_node(item_id):
             raise ValueError("This node is not a comment node")
         return item_id[len(NS_COMMENT_PREFIX):]
 
-    def getCommentsNode(self, item_id):
+    def get_comments_node(self, item_id):
         """Generate comment node
 
         @param item_id(unicode): id of the parent item
@@ -841,7 +841,7 @@
         """
         return f"{NS_COMMENT_PREFIX}{item_id}"
 
-    def getCommentsService(self, client, parent_service=None):
+    def get_comments_service(self, client, parent_service=None):
         """Get prefered PubSub service to create comment node
 
         @param pubsub_service(jid.JID, None): PubSub service of the parent item
@@ -855,7 +855,7 @@
                     pass
                 else:
                     # other server, let's try to find a non PEP service there
-                    d = self.host.findServiceEntity(
+                    d = self.host.find_service_entity(
                         client, "pubsub", "service", parent_service
                     )
                     d.addCallback(lambda entity: entity or parent_service)
@@ -867,7 +867,7 @@
             client.pubsub_service if client.pubsub_service is not None else parent_service
         )
 
-    async def _manageComments(self, client, mb_data, service, node, item_id, access=None):
+    async def _manage_comments(self, client, mb_data, service, node, item_id, access=None):
         """Check comments keys in mb_data and create comments node if necessary
 
         if a comments node metadata is set in the mb_data['comments'] list, it is used
@@ -931,7 +931,7 @@
                 comments_service = None
 
             if uri:
-                uri_service, uri_node = self.parseCommentUrl(uri)
+                uri_service, uri_node = self.parse_comment_url(uri)
                 if ((comments_node is not None and comments_node!=uri_node)
                      or (comments_service is not None and comments_service!=uri_service)):
                     raise ValueError(
@@ -941,15 +941,15 @@
                 comments_data['node'] = comments_node = uri_node
             else:
                 if not comments_node:
-                    comments_node = self.getCommentsNode(item_id)
+                    comments_node = self.get_comments_node(item_id)
                 comments_data['node'] = comments_node
                 if comments_service is None:
-                    comments_service = await self.getCommentsService(client, service)
+                    comments_service = await self.get_comments_service(client, service)
                     if comments_service is None:
                         comments_service = client.jid.userhostJID()
                 comments_data['service'] = comments_service
 
-                comments_data['uri'] = xmpp_uri.buildXMPPUri(
+                comments_data['uri'] = xmpp_uri.build_xmpp_uri(
                     "pubsub",
                     path=comments_service.full(),
                     node=comments_node,
@@ -969,7 +969,7 @@
             else:
                 if access == self._p.ACCESS_WHITELIST:
                     # for whitelist access we need to copy affiliations from parent item
-                    comments_affiliations = await self._p.getNodeAffiliations(
+                    comments_affiliations = await self._p.get_node_affiliations(
                         client, service, node
                     )
                     # …except for "member", that we transform to publisher
@@ -978,14 +978,14 @@
                         if affiliation == "member":
                             comments_affiliations[jid_] == "publisher"
 
-                    await self._p.setNodeAffiliations(
+                    await self._p.set_node_affiliations(
                         client, comments_service, comments_node, comments_affiliations
                     )
 
-    def friendlyId(self, data):
+    def friendly_id(self, data):
         """Generate a user friendly id from title or content"""
         # TODO: rich content should be converted to plain text
-        id_base = regex.urlFriendlyText(
+        id_base = regex.url_friendly_text(
             data.get('title')
             or data.get('title_rich')
             or data.get('content')
@@ -994,10 +994,10 @@
         )
         return f"{id_base}-{token_urlsafe(3)}"
 
-    def _mbSend(self, service, node, data, profile_key):
+    def _mb_send(self, service, node, data, profile_key):
         service = jid.JID(service) if service else None
         node = node if node else NS_MICROBLOG
-        client = self.host.getClient(profile_key)
+        client = self.host.get_client(profile_key)
         data = data_format.deserialise(data)
         return defer.ensureDeferred(self.send(client, data, service, node))
 
@@ -1028,17 +1028,17 @@
         item_id = data.get("id")
         if item_id is None:
             if data.get("user_friendly_id", True):
-                item_id = self.friendlyId(data)
+                item_id = self.friendly_id(data)
             else:
                 item_id = str(shortuuid.uuid())
 
         try:
-            await self._manageComments(client, data, service, node, item_id, access=None)
+            await self._manage_comments(client, data, service, node, item_id, access=None)
         except error.StanzaError:
             log.warning("Can't create comments node for item {}".format(item_id))
         item = await self.mb_data_2_entry_elt(client, data, item_id, service, node)
 
-        if not await self.host.trigger.asyncPoint(
+        if not await self.host.trigger.async_point(
             "XEP-0277_send", client, service, node, item, data
         ):
             return None
@@ -1052,7 +1052,7 @@
         await self._p.publish(client, service, node, [item], extra=extra)
         return item_id
 
-    def _mbRepeat(
+    def _mb_repeat(
             self,
             service_s: str,
             node: str,
@@ -1062,7 +1062,7 @@
     ) -> defer.Deferred:
         service = jid.JID(service_s) if service_s else None
         node = node if node else NS_MICROBLOG
-        client = self.host.getClient(profile_key)
+        client = self.host.get_client(profile_key)
         extra = data_format.deserialise(extra_s)
         d = defer.ensureDeferred(
             self.repeat(client, item, service, node, extra)
@@ -1088,7 +1088,7 @@
             service = client.jid.userhostJID()
 
         # we first get the post to repeat
-        items, __ = await self._p.getItems(
+        items, __ = await self._p.get_items(
             client,
             service,
             node,
@@ -1121,27 +1121,27 @@
             next(author_elt.elements(NS_ATOM, "uri"))
         except StopIteration:
             entry_elt.addElement(
-                "uri", content=xmpp_uri.buildXMPPUri(None, path=service.full())
+                "uri", content=xmpp_uri.build_xmpp_uri(None, path=service.full())
             )
 
         # we add the link indicating that it's a repeated post
         link_elt = entry_elt.addElement("link")
         link_elt["rel"] = "via"
-        link_elt["href"] = xmpp_uri.buildXMPPUri(
+        link_elt["href"] = xmpp_uri.build_xmpp_uri(
             "pubsub", path=service.full(), node=node, item=item
         )
 
-        return await self._p.sendItem(
+        return await self._p.send_item(
             client,
             client.jid.userhostJID(),
             NS_MICROBLOG,
             entry_elt
         )
 
-    def _mbPreview(self, service, node, data, profile_key):
+    def _mb_preview(self, service, node, data, profile_key):
         service = jid.JID(service) if service else None
         node = node if node else NS_MICROBLOG
-        client = self.host.getClient(profile_key)
+        client = self.host.get_client(profile_key)
         data = data_format.deserialise(data)
         d = defer.ensureDeferred(self.preview(client, data, service, node))
         d.addCallback(data_format.serialise)
@@ -1172,9 +1172,9 @@
 
     ## retract ##
 
-    def _mbRetract(self, service_jid_s, nodeIdentifier, itemIdentifier, profile_key):
-        """Call self._p._retractItem, but use default node if node is empty"""
-        return self._p._retractItem(
+    def _mb_retract(self, service_jid_s, nodeIdentifier, itemIdentifier, profile_key):
+        """Call self._p._retract_item, but use default node if node is empty"""
+        return self._p._retract_item(
             service_jid_s,
             nodeIdentifier or NS_MICROBLOG,
             itemIdentifier,
@@ -1184,29 +1184,29 @@
 
     ## get ##
 
-    def _mbGetSerialise(self, data):
+    def _mb_get_serialise(self, data):
         items, metadata = data
         metadata['items'] = items
         return data_format.serialise(metadata)
 
-    def _mbGet(self, service="", node="", max_items=10, item_ids=None, extra="",
+    def _mb_get(self, service="", node="", max_items=10, item_ids=None, extra="",
                profile_key=C.PROF_KEY_NONE):
         """
         @param max_items(int): maximum number of item to get, C.NO_LIMIT for no limit
         @param item_ids (list[unicode]): list of item IDs
         """
-        client = self.host.getClient(profile_key)
+        client = self.host.get_client(profile_key)
         service = jid.JID(service) if service else None
         max_items = None if max_items == C.NO_LIMIT else max_items
-        extra = self._p.parseExtra(data_format.deserialise(extra))
+        extra = self._p.parse_extra(data_format.deserialise(extra))
         d = defer.ensureDeferred(
-            self.mbGet(client, service, node or None, max_items, item_ids,
+            self.mb_get(client, service, node or None, max_items, item_ids,
                        extra.rsm_request, extra.extra)
         )
-        d.addCallback(self._mbGetSerialise)
+        d.addCallback(self._mb_get_serialise)
         return d
 
-    async def mbGet(
+    async def mb_get(
         self,
         client: SatXMPPEntity,
         service: Optional[jid.JID] = None,
@@ -1233,7 +1233,7 @@
             node = NS_MICROBLOG
         if rsm_request:
             max_items = None
-        items_data = await self._p.getItems(
+        items_data = await self._p.get_items(
             client,
             service,
             node,
@@ -1242,7 +1242,7 @@
             rsm_request=rsm_request,
             extra=extra,
         )
-        mb_data_list, metadata = await self._p.transItemsDataD(
+        mb_data_list, metadata = await self._p.trans_items_data_d(
             items_data, partial(self.item_2_mb_data, client, service=service, node=node))
         encrypted = metadata.pop("encrypted", None)
         if encrypted is not None:
@@ -1253,16 +1253,16 @@
                     pass
         return (mb_data_list, metadata)
 
-    def _mbRename(self, service, node, item_id, new_id, profile_key):
-        return defer.ensureDeferred(self.mbRename(
-            self.host.getClient(profile_key),
+    def _mb_rename(self, service, node, item_id, new_id, profile_key):
+        return defer.ensureDeferred(self.mb_rename(
+            self.host.get_client(profile_key),
             jid.JID(service) if service else None,
             node or None,
             item_id,
             new_id
         ))
 
-    async def mbRename(
+    async def mb_rename(
         self,
         client: SatXMPPEntity,
         service: Optional[jid.JID],
@@ -1272,9 +1272,9 @@
     ) -> None:
         if not node:
             node = NS_MICROBLOG
-        await self._p.renameItem(client, service, node, item_id, new_id)
+        await self._p.rename_item(client, service, node, item_id, new_id)
 
-    def parseCommentUrl(self, node_url):
+    def parse_comment_url(self, node_url):
         """Parse a XMPP URI
 
         Determine the fields comments_service and comments_node of a microblog data
@@ -1284,7 +1284,7 @@
         @return (tuple[jid.JID, unicode]): service and node
         """
         try:
-            parsed_url = xmpp_uri.parseXMPPUri(node_url)
+            parsed_url = xmpp_uri.parse_xmpp_uri(node_url)
             service = jid.JID(parsed_url["path"])
             node = parsed_url["node"]
         except Exception as e:
@@ -1294,7 +1294,7 @@
 
     ## configure ##
 
-    def mbSetAccess(self, access="presence", profile_key=C.PROF_KEY_NONE):
+    def mb_access_set(self, access="presence", profile_key=C.PROF_KEY_NONE):
         """Create a microblog node on PEP with given access
 
         If the node already exists, it change options
@@ -1302,7 +1302,7 @@
         @param profile_key: profile key
         """
         #  FIXME: check if this mehtod is need, deprecate it if not
-        client = self.host.getClient(profile_key)
+        client = self.host.get_client(profile_key)
 
         _options = {
             self._p.OPT_ACCESS_MODEL: access,
@@ -1351,7 +1351,7 @@
 
     # common
 
-    def _getClientAndNodeData(self, publishers_type, publishers, profile_key):
+    def _get_client_and_node_data(self, publishers_type, publishers, profile_key):
         """Helper method to construct node_data from publishers_type/publishers
 
         @param publishers_type: type of the list of publishers, one of:
@@ -1362,15 +1362,15 @@
             list of groups or list of jids)
         @param profile_key: %(doc_profile_key)s
         """
-        client = self.host.getClient(profile_key)
+        client = self.host.get_client(profile_key)
         if publishers_type == C.JID:
             jids_set = set(publishers)
         else:
-            jids_set = client.roster.getJidsSet(publishers_type, publishers)
+            jids_set = client.roster.get_jids_set(publishers_type, publishers)
             if publishers_type == C.ALL:
                 try:
                     # display messages from salut-a-toi@libervia.org or other PEP services
-                    services = self.host.plugins["EXTRA-PEP"].getFollowedEntities(
+                    services = self.host.plugins["EXTRA-PEP"].get_followed_entities(
                         profile_key
                     )
                 except KeyError:
@@ -1388,7 +1388,7 @@
             node_data.append((jid_, NS_MICROBLOG))
         return client, node_data
 
-    def _checkPublishers(self, publishers_type, publishers):
+    def _check_publishers(self, publishers_type, publishers):
         """Helper method to deserialise publishers coming from bridge
 
         publishers_type(unicode): type of the list of publishers, one of:
@@ -1410,15 +1410,15 @@
 
     # subscribe #
 
-    def _mbSubscribeToMany(self, publishers_type, publishers, profile_key):
+    def _mb_subscribe_to_many(self, publishers_type, publishers, profile_key):
         """
 
         @return (str): session id: Use pubsub.getSubscribeRTResult to get the results
         """
-        publishers_type, publishers = self._checkPublishers(publishers_type, publishers)
-        return self.mbSubscribeToMany(publishers_type, publishers, profile_key)
+        publishers_type, publishers = self._check_publishers(publishers_type, publishers)
+        return self.mb_subscribe_to_many(publishers_type, publishers, profile_key)
 
-    def mbSubscribeToMany(self, publishers_type, publishers, profile_key):
+    def mb_subscribe_to_many(self, publishers_type, publishers, profile_key):
         """Subscribe microblogs for a list of groups or jids
 
         @param publishers_type: type of the list of publishers, one of:
@@ -1430,17 +1430,17 @@
         @param profile: %(doc_profile)s
         @return (str): session id
         """
-        client, node_data = self._getClientAndNodeData(
+        client, node_data = self._get_client_and_node_data(
             publishers_type, publishers, profile_key
         )
-        return self._p.subscribeToMany(
+        return self._p.subscribe_to_many(
             node_data, client.jid.userhostJID(), profile_key=profile_key
         )
 
     # get #
 
-    def _mbGetFromManyRTResult(self, session_id, profile_key=C.PROF_KEY_DEFAULT):
-        """Get real-time results for mbGetFromMany session
+    def _mb_get_from_many_rt_result(self, session_id, profile_key=C.PROF_KEY_DEFAULT):
+        """Get real-time results for mb_get_from_many session
 
         @param session_id: id of the real-time deferred session
         @param return (tuple): (remaining, results) where:
@@ -1449,16 +1449,16 @@
                 - service (unicode): pubsub service
                 - node (unicode): pubsub node
                 - failure (unicode): empty string in case of success, error message else
-                - items_data(list): data as returned by [mbGet]
-                - items_metadata(dict): metadata as returned by [mbGet]
+                - items_data(list): data as returned by [mb_get]
+                - items_metadata(dict): metadata as returned by [mb_get]
         @param profile_key: %(doc_profile_key)s
         """
 
-        client = self.host.getClient(profile_key)
+        client = self.host.get_client(profile_key)
 
         def onSuccess(items_data):
             """convert items elements to list of microblog data in items_data"""
-            d = self._p.transItemsDataD(
+            d = self._p.trans_items_data_d(
                 items_data,
                 # FIXME: service and node should be used here
                 partial(self.item_2_mb_data, client),
@@ -1467,7 +1467,7 @@
             d.addCallback(lambda serialised: ("", serialised))
             return d
 
-        d = self._p.getRTResults(
+        d = self._p.get_rt_results(
             session_id,
             on_success=onSuccess,
             on_error=lambda failure: (str(failure.value), ([], {})),
@@ -1486,15 +1486,15 @@
         )
         return d
 
-    def _mbGetFromMany(self, publishers_type, publishers, max_items=10, extra_dict=None,
+    def _mb_get_from_many(self, publishers_type, publishers, max_items=10, extra_dict=None,
                        profile_key=C.PROF_KEY_NONE):
         """
         @param max_items(int): maximum number of item to get, C.NO_LIMIT for no limit
         """
         max_items = None if max_items == C.NO_LIMIT else max_items
-        publishers_type, publishers = self._checkPublishers(publishers_type, publishers)
-        extra = self._p.parseExtra(extra_dict)
-        return self.mbGetFromMany(
+        publishers_type, publishers = self._check_publishers(publishers_type, publishers)
+        extra = self._p.parse_extra(extra_dict)
+        return self.mb_get_from_many(
             publishers_type,
             publishers,
             max_items,
@@ -1503,7 +1503,7 @@
             profile_key,
         )
 
-    def mbGetFromMany(self, publishers_type, publishers, max_items=None, rsm_request=None,
+    def mb_get_from_many(self, publishers_type, publishers, max_items=None, rsm_request=None,
                       extra=None, profile_key=C.PROF_KEY_NONE):
         """Get the published microblogs for a list of groups or jids
 
@@ -1518,21 +1518,21 @@
         @return (str): RT Deferred session id
         """
         # XXX: extra is unused here so far
-        client, node_data = self._getClientAndNodeData(
+        client, node_data = self._get_client_and_node_data(
             publishers_type, publishers, profile_key
         )
-        return self._p.getFromMany(
+        return self._p.get_from_many(
             node_data, max_items, rsm_request, profile_key=profile_key
         )
 
     # comments #
 
-    def _mbGetFromManyWithCommentsRTResultSerialise(self, data):
+    def _mb_get_from_many_with_comments_rt_result_serialise(self, data):
         """Serialisation of result
 
         This is probably the longest method name of whole SàT ecosystem ^^
         @param data(dict): data as received by rt_sessions
-        @return (tuple): see [_mbGetFromManyWithCommentsRTResult]
+        @return (tuple): see [_mb_get_from_many_with_comments_rt_result]
         """
         ret = []
         data_iter = iter(data[1].items())
@@ -1550,9 +1550,9 @@
 
         return data[0], ret
 
-    def _mbGetFromManyWithCommentsRTResult(self, session_id,
+    def _mb_get_from_many_with_comments_rt_result(self, session_id,
                                            profile_key=C.PROF_KEY_DEFAULT):
-        """Get real-time results for [mbGetFromManyWithComments] session
+        """Get real-time results for [mb_get_from_many_with_comments] session
 
         @param session_id: id of the real-time deferred session
         @param return (tuple): (remaining, results) where:
@@ -1572,12 +1572,12 @@
                 - metadata(dict): original node metadata
         @param profile_key: %(doc_profile_key)s
         """
-        profile = self.host.getClient(profile_key).profile
-        d = self.rt_sessions.getResults(session_id, profile=profile)
-        d.addCallback(self._mbGetFromManyWithCommentsRTResultSerialise)
+        profile = self.host.get_client(profile_key).profile
+        d = self.rt_sessions.get_results(session_id, profile=profile)
+        d.addCallback(self._mb_get_from_many_with_comments_rt_result_serialise)
         return d
 
-    def _mbGetFromManyWithComments(self, publishers_type, publishers, max_items=10,
+    def _mb_get_from_many_with_comments(self, publishers_type, publishers, max_items=10,
                                    max_comments=C.NO_LIMIT, extra_dict=None,
                                    extra_comments_dict=None, profile_key=C.PROF_KEY_NONE):
         """
@@ -1587,10 +1587,10 @@
         """
         max_items = None if max_items == C.NO_LIMIT else max_items
         max_comments = None if max_comments == C.NO_LIMIT else max_comments
-        publishers_type, publishers = self._checkPublishers(publishers_type, publishers)
-        extra = self._p.parseExtra(extra_dict)
-        extra_comments = self._p.parseExtra(extra_comments_dict)
-        return self.mbGetFromManyWithComments(
+        publishers_type, publishers = self._check_publishers(publishers_type, publishers)
+        extra = self._p.parse_extra(extra_dict)
+        extra_comments = self._p.parse_extra(extra_comments_dict)
+        return self.mb_get_from_many_with_comments(
             publishers_type,
             publishers,
             max_items,
@@ -1602,7 +1602,7 @@
             profile_key,
         )
 
-    def mbGetFromManyWithComments(self, publishers_type, publishers, max_items=None,
+    def mb_get_from_many_with_comments(self, publishers_type, publishers, max_items=None,
                                   max_comments=None, rsm_request=None, extra=None,
                                   rsm_comments=None, extra_comments=None,
                                   profile_key=C.PROF_KEY_NONE):
@@ -1625,11 +1625,11 @@
         #      to serialise and associate the data, but it make life in frontends side
         #      a lot easier
 
-        client, node_data = self._getClientAndNodeData(
+        client, node_data = self._get_client_and_node_data(
             publishers_type, publishers, profile_key
         )
 
-        def getComments(items_data):
+        def get_comments(items_data):
             """Retrieve comments and add them to the items_data
 
             @param items_data: serialised items data
@@ -1649,7 +1649,7 @@
                         node = item["{}{}".format(prefix, "_node")]
                         # time to get the comments
                         d = defer.ensureDeferred(
-                            self._p.getItems(
+                            self._p.get_items(
                                 client,
                                 service,
                                 node,
@@ -1660,7 +1660,7 @@
                         )
                         # then serialise
                         d.addCallback(
-                            lambda items_data: self._p.transItemsDataD(
+                            lambda items_data: self._p.trans_items_data_d(
                                 items_data,
                                 partial(
                                     self.item_2_mb_data, client, service=service, node=node
@@ -1698,20 +1698,20 @@
 
         deferreds = {}
         for service, node in node_data:
-            d = deferreds[(service, node)] = defer.ensureDeferred(self._p.getItems(
+            d = deferreds[(service, node)] = defer.ensureDeferred(self._p.get_items(
                 client, service, node, max_items, rsm_request=rsm_request, extra=extra
             ))
             d.addCallback(
-                lambda items_data: self._p.transItemsDataD(
+                lambda items_data: self._p.trans_items_data_d(
                     items_data,
                     partial(self.item_2_mb_data, client, service=service, node=node),
                 )
             )
-            d.addCallback(getComments)
+            d.addCallback(get_comments)
             d.addCallback(lambda items_comments_data: ("", items_comments_data))
             d.addErrback(lambda failure: (str(failure.value), ([], {})))
 
-        return self.rt_sessions.newSession(deferreds, client.profile)
+        return self.rt_sessions.new_session(deferreds, client.profile)
 
 
 @implementer(iwokkel.IDisco)