changeset 2807:0b7ce5daee9b

plugin XEP-0277: blog items data are now entirely serialised before going to bridge: So far, and for historical reasons, blog items data where serialised using a unicode: unicode dict, which was causing trouble for many types of values (timestamps, booleans, lists). This patch changes it by serialising the whole items before going to bridge, and deserialising it when going back. This way, complex data can be used easily in items. This impact psEvent and serItemsData* methods which are renamed transItemsData* because there are not always serialising anymore (a new argument "serialise" allows to specify it). When editing a blog post in jp, metadata are now more easy to manipulate, specially lists like tags.
author Goffi <goffi@goffi.org>
date Sat, 23 Feb 2019 18:59:00 +0100
parents 2400cad2dace
children 51c53fc4fc4a
files sat/plugins/plugin_exp_pubsub_schema.py sat/plugins/plugin_misc_groupblog.py sat/plugins/plugin_misc_merge_requests.py sat/plugins/plugin_xep_0060.py sat/plugins/plugin_xep_0163.py sat/plugins/plugin_xep_0277.py sat/tools/common/data_objects.py sat_frontends/jp/cmd_blog.py sat_frontends/quick_frontend/quick_app.py sat_frontends/quick_frontend/quick_blog.py
diffstat 10 files changed, 188 insertions(+), 184 deletions(-) [+]
line wrap: on
line diff
--- a/sat/plugins/plugin_exp_pubsub_schema.py	Wed Feb 20 19:42:35 2019 +0100
+++ b/sat/plugins/plugin_exp_pubsub_schema.py	Sat Feb 23 18:59:00 2019 +0100
@@ -251,7 +251,7 @@
             extra.extra,
             form_ns=form_ns or None,
         )
-        d.addCallback(self._p.serItemsData)
+        d.addCallback(self._p.transItemsData)
         return d
 
     @defer.inlineCallbacks
@@ -494,7 +494,7 @@
             form_ns=form_ns,
             filters=filters,
         )
-        d.addCallback(self._p.serItemsData)
+        d.addCallback(self._p.transItemsData)
         return d
 
     def prepareBridgeSet(self, service, node, schema, item_id, extra, profile_key):
--- a/sat/plugins/plugin_misc_groupblog.py	Wed Feb 20 19:42:35 2019 +0100
+++ b/sat/plugins/plugin_misc_groupblog.py	Sat Feb 23 18:59:00 2019 +0100
@@ -26,7 +26,6 @@
 from sat.core import exceptions
 from wokkel import disco, data_form, iwokkel
 from zope.interface import implements
-from sat.tools.common import data_format
 
 try:
     from twisted.words.protocols.xmlstream import XMPPHandler
@@ -107,11 +106,8 @@
             return
         access_model = config_form.get(self._p.OPT_ACCESS_MODEL, self._p.ACCESS_OPEN)
         if access_model == self._p.ACCESS_PUBLISHER_ROSTER:
-            data_format.iter2dict(
-                "group",
-                config_form.fields[self._p.OPT_ROSTER_GROUPS_ALLOWED].values,
-                microblog_data,
-            )
+            opt = self._p.OPT_ROSTER_GROUPS_ALLOWED
+            microblog_data['groups'] = config_form.fields[opt].values
 
     def _data2entryTrigger(self, client, mb_data, entry_elt, item_elt):
         """Build fine access permission if needed
@@ -119,7 +115,7 @@
         This trigger check if "group*" key are present,
         and create a fine item config to restrict view to these groups
         """
-        groups = list(data_format.dict2iter("group", mb_data))
+        groups = mb_data.get('groups', [])
         if not groups:
             return
         if not client.server_groupblog_available:
@@ -141,10 +137,7 @@
         """
         if "group" in mb_data:
             options[self._p.OPT_ACCESS_MODEL] = self._p.ACCESS_PUBLISHER_ROSTER
-            options[self._p.OPT_ROSTER_GROUPS_ALLOWED] = list(
-                data_format.dict2iter("group", mb_data)
-            )
-
+            options[self._p.OPT_ROSTER_GROUPS_ALLOWED] = mb_data['groups']
 
 class GroupBlog_handler(XMPPHandler):
     implements(iwokkel.IDisco)
--- a/sat/plugins/plugin_misc_merge_requests.py	Wed Feb 20 19:42:35 2019 +0100
+++ b/sat/plugins/plugin_misc_merge_requests.py	Sat Feb 23 18:59:00 2019 +0100
@@ -142,7 +142,7 @@
         d = self.get(client, service, node or None, max_items, item_ids, sub_id or None,
                      extra.rsm_request, extra.extra)
         d.addCallback(lambda (tickets, metadata, parsed_patches): (
-            self._p.serItemsData((tickets, metadata)) +
+            self._p.transItemsData((tickets, metadata)) +
             ([[{key: unicode(value) for key, value in p.iteritems()}
                 for p in patches] for patches in parsed_patches],)))
         return d
--- a/sat/plugins/plugin_xep_0060.py	Wed Feb 20 19:42:35 2019 +0100
+++ b/sat/plugins/plugin_xep_0060.py	Sat Feb 23 18:59:00 2019 +0100
@@ -24,6 +24,7 @@
 log = getLogger(__name__)
 from sat.core import exceptions
 from sat.tools import sat_defer
+from sat.tools.common import data_format
 
 from twisted.words.protocols.jabber import jid, error
 from twisted.internet import reactor, defer
@@ -280,7 +281,7 @@
 
         #  high level observer method
         host.bridge.addSignal(
-            "psEvent", ".plugin", signature="ssssa{ss}s"
+            "psEvent", ".plugin", signature="ssssss"
         )  # args: category, service(jid), node, type (C.PS_ITEMS, C.PS_DELETE), data, profile
 
         # low level observer method, used if service/node is in watching list (see psNodeWatch* methods)
@@ -568,7 +569,7 @@
             extra.rsm_request,
             extra.extra,
         )
-        d.addCallback(self.serItemsData)
+        d.addCallback(self.transItemsData)
         return d
 
     def getItems(self, client, service, node, max_items=None, item_ids=None, sub_id=None,
@@ -648,7 +649,7 @@
             )
 
         def doSubscribe(data):
-            self.subscribe(service, node, profile_key=client.profile).addErrback(
+            self.subscribe(client, service, node).addErrback(
                 subscribeEb, service, node
             )
             return data
@@ -1075,32 +1076,46 @@
     ):
         return self.rt_sessions.getResults(session_id, on_success, on_error, profile)
 
-    def serItemsData(self, items_data, item_cb=lambda item: item.toXml()):
-        """Helper method to serialise result from [getItems]
+    def transItemsData(self, items_data, item_cb=lambda item: item.toXml(),
+            serialise=False):
+        """Helper method to transform result from [getItems]
 
         the items_data must be a tuple(list[domish.Element], dict[unicode, unicode])
         as returned by [getItems]. metadata values are then casted to unicode and
-        each item is passed to items_cb
+        each item is passed to items_cb then optionally serialised with
+            data_format.serialise.
         @param items_data(tuple): tuple returned by [getItems]
         @param item_cb(callable): method to transform each item
+        @param serialise(bool): if True do a data_format.serialise
+            after applying item_cb
         @return (tuple): a serialised form ready to go throught bridge
         """
         items, metadata = items_data
+        if serialise:
+            items = [data_format.serialise(item_cb(item)) for item in items]
+        else:
+            items = [item_cb(item) for item in items]
+
         return (
-            [item_cb(item) for item in items],
+            items,
             {key: unicode(value) for key, value in metadata.iteritems()},
         )
 
-    def serItemsDataD(self, items_data, item_cb):
-        """Helper method to serialise result from [getItems], deferred version
+    def transItemsDataD(self, items_data, item_cb, serialise=False):
+        """Helper method to transform result from [getItems], deferred version
 
         the items_data must be a tuple(list[domish.Element], dict[unicode, unicode])
         as returned by [getItems]. metadata values are then casted to unicode and
-        each item is passed to items_cb
-        An errback is added to item_cb, and when it is fired the value is filtered from final items
+        each item is passed to items_cb then optionally serialised with
+            data_format.serialise.
+        An errback is added to item_cb, and when it is fired the value is filtered from
+            final items
         @param items_data(tuple): tuple returned by [getItems]
         @param item_cb(callable): method to transform each item (must return a deferred)
-        @return (tuple): a deferred which fire a serialised form ready to go throught bridge
+        @param serialise(bool): if True do a data_format.serialise
+            after applying item_cb
+        @return (tuple): a deferred which fire a serialised form ready to go throught
+            bridge
         """
         items, metadata = items_data
 
@@ -1111,9 +1126,14 @@
 
         d = defer.gatherResults([item_cb(item).addErrback(eb) for item in items])
 
-        def finishSerialisation(serialised_items):
+        def finishSerialisation(parsed_items):
+            if serialise:
+                items = [data_format.serialise(i) for i in parsed_items if i is not None]
+            else:
+                items = [i for i in parsed_items if i is not None]
+
             return (
-                [item for item in serialised_items if item is not None],
+                items,
                 {key: unicode(value) for key, value in metadata.iteritems()},
             )
 
@@ -1315,7 +1335,7 @@
         profile = self.host.getClient(profile_key).profile
         d = self.rt_sessions.getResults(
             session_id,
-            on_success=lambda result: ("", self.serItemsData(result)),
+            on_success=lambda result: ("", self.transItemsData(result)),
             on_error=lambda failure: (unicode(failure.value) or UNSPECIFIED, ([], {})),
             profile=profile,
         )
@@ -1348,14 +1368,8 @@
             profile_key,
         )
 
-    def getFromMany(
-        self,
-        node_data,
-        max_item=None,
-        rsm_request=None,
-        extra=None,
-        profile_key=C.PROF_KEY_NONE,
-    ):
+    def getFromMany(self, node_data, max_item=None, rsm_request=None, extra=None,
+                    profile_key=C.PROF_KEY_NONE):
         """Get items from many nodes at once
 
         @param node_data (iterable[tuple]): iterable of tuple (service, node) where:
--- a/sat/plugins/plugin_xep_0163.py	Wed Feb 20 19:42:35 2019 +0100
+++ b/sat/plugins/plugin_xep_0163.py	Sat Feb 23 18:59:00 2019 +0100
@@ -27,6 +27,7 @@
 
 from wokkel import disco, pubsub
 from wokkel.formats import Mood
+from sat.tools.common import data_format
 
 NS_USER_MOOD = "http://jabber.org/protocol/mood"
 
@@ -156,7 +157,7 @@
             itemsEvent.sender.full(),
             itemsEvent.nodeIdentifier,
             "MOOD",
-            {"mood": mood.value or "", "text": mood.text or ""},
+            data_format.serialise({"mood": mood.value or "", "text": mood.text or ""}),
             profile,
         )
 
--- a/sat/plugins/plugin_xep_0277.py	Wed Feb 20 19:42:35 2019 +0100
+++ b/sat/plugins/plugin_xep_0277.py	Sat Feb 23 18:59:00 2019 +0100
@@ -85,7 +85,7 @@
         host.bridge.addMethod(
             "mbSend",
             ".plugin",
-            in_sign="ssa{ss}s",
+            in_sign="ssss",
             out_sign="",
             method=self._mbSend,
             async=True,
@@ -102,7 +102,7 @@
             "mbGet",
             ".plugin",
             in_sign="ssiasa{ss}s",
-            out_sign="(aa{ss}a{ss})",
+            out_sign="(asa{ss})",
             method=self._mbGet,
             async=True,
         )
@@ -125,7 +125,7 @@
             "mbGetFromManyRTResult",
             ".plugin",
             in_sign="ss",
-            out_sign="(ua(sssaa{ss}a{ss}))",
+            out_sign="(ua(sssasa{ss}))",
             method=self._mbGetFromManyRTResult,
             async=True,
         )
@@ -140,7 +140,7 @@
             "mbGetFromManyWithCommentsRTResult",
             ".plugin",
             in_sign="ss",
-            out_sign="(ua(sssa(a{ss}a(sssaa{ss}a{ss}))a{ss}))",
+            out_sign="(ua(sssa(sa(sssasa{ss}))a{ss}))",
             method=self._mbGetFromManyWithCommentsRTResult,
             async=True,
         )
@@ -178,7 +178,7 @@
                 itemsEvent.sender.full(),
                 itemsEvent.nodeIdentifier,
                 event,
-                data,
+                data_format.serialise(data),
                 client.profile,
             )
 
@@ -196,7 +196,7 @@
 
     @defer.inlineCallbacks
     def item2mbdata(self, item_elt):
-        """Convert an XML Item to microblog data used in bridge API
+        """Convert an XML Item to microblog data
 
         @param item_elt: domish.Element of microblog item
         @return: microblog data (dictionary)
@@ -238,7 +238,8 @@
                 if data_elt is None:
                     raise failure.Failure(
                         exceptions.DataError(
-                            u"XHML content not wrapped in a <div/> element, this is not standard !"
+                            u"XHML content not wrapped in a <div/> element, this is not "
+                            u"standard !"
                         )
                     )
                 if data_elt.uri != C.NS_XHTML:
@@ -281,9 +282,8 @@
         try:
             id_elt = entry_elt.elements(NS_ATOM, "id").next()
         except StopIteration:
-            msg = u"No atom id found in the pubsub item {}, this is not standard !".format(
-                id_
-            )
+            msg = (u"No atom id found in the pubsub item {}, this is not standard !"
+                   .format(id_))
             log.warning(msg)
             microblog_data[u"atom_id"] = ""
         else:
@@ -343,18 +343,16 @@
         except StopIteration:
             msg = u"No atom updated element found in the pubsub item {}".format(id_)
             raise failure.Failure(exceptions.DataError(msg))
-        microblog_data[u"updated"] = unicode(
-            calendar.timegm(dateutil.parser.parse(unicode(updated_elt)).utctimetuple())
+        microblog_data[u"updated"] = calendar.timegm(
+            dateutil.parser.parse(unicode(updated_elt)).utctimetuple()
         )
         try:
             published_elt = entry_elt.elements(NS_ATOM, "published").next()
         except StopIteration:
             microblog_data[u"published"] = microblog_data[u"updated"]
         else:
-            microblog_data[u"published"] = unicode(
-                calendar.timegm(
-                    dateutil.parser.parse(unicode(published_elt)).utctimetuple()
-                )
+            microblog_data[u"published"] = calendar.timegm(
+                dateutil.parser.parse(unicode(published_elt)).utctimetuple()
             )
 
         # links
@@ -420,16 +418,17 @@
 
                 if not publisher:
                     log.debug(u"No publisher attribute, we can't verify author jid")
-                    microblog_data[u"author_jid_verified"] = C.BOOL_FALSE
+                    microblog_data[u"author_jid_verified"] = False
                 elif jid.JID(publisher).userhostJID() == jid.JID(uri).userhostJID():
-                    microblog_data[u"author_jid_verified"] = C.BOOL_TRUE
+                    microblog_data[u"author_jid_verified"] = True
                 else:
                     log.warning(
-                        u"item atom:uri differ from publisher attribute, spoofing attempt ? atom:uri = {} publisher = {}".format(
+                        u"item atom:uri differ from publisher attribute, spoofing "
+                        u"attempt ? atom:uri = {} publisher = {}".format(
                             uri, item_elt.getAttribute("publisher")
                         )
                     )
-                    microblog_data[u"author_jid_verified"] = C.BOOL_FALSE
+                    microblog_data[u"author_jid_verified"] = False
             # email
             try:
                 email_elt = author_elt.elements(NS_ATOM, "email").next()
@@ -439,11 +438,11 @@
                 microblog_data[u"author_email"] = unicode(email_elt)
 
             # categories
-            categories = (
+            categories = [
                 category_elt.getAttribute("term", "")
                 for category_elt in entry_elt.elements(NS_ATOM, "category")
-            )
-            data_format.iter2dict("tag", categories, microblog_data)
+            ]
+            microblog_data[u"tags"] = categories
 
         ## the trigger ##
         # if other plugins have things to add or change
@@ -572,7 +571,7 @@
         )
 
         ## categories ##
-        for tag in data_format.dict2iter("tag", data):
+        for tag in data.get('tags', []):
             category_elt = entry_elt.addElement("category")
             category_elt["term"] = tag
 
@@ -655,12 +654,16 @@
         @param access(unicode, None): access model
             None to use same access model as parent item
         """
-        # FIXME: if 'comments' already exists in mb_data, it is not used to create the Node
-        allow_comments = C.bool(mb_data.pop("allow_comments", "false"))
-        if not allow_comments:
+        # FIXME: if 'comments' already exists in mb_data,
+        #        it is not used to create the Node
+        allow_comments = mb_data.pop("allow_comments", None)
+        if allow_comments is None:
+            return
+        elif allow_comments == False:
             if "comments" in mb_data:
                 log.warning(
-                    u"comments are not allowed but there is already a comments node, it may be lost: {uri}".format(
+                    u"comments are not allowed but there is already a comments node, "
+                    u"it may be lost: {uri}".format(
                         uri=mb_data["comments"]
                     )
                 )
@@ -737,7 +740,8 @@
                 )
             if "comments_node" in mb_data or "comments_service" in mb_data:
                 raise exceptions.DataError(
-                    u"You can't use comments_service/comments_node and comments at the same time"
+                    u"You can't use comments_service/comments_node and comments at the "
+                    u"same time"
                 )
         else:
             mb_data["comments"] = self._p.getNodeURI(comments_service, comments_node)
@@ -746,6 +750,7 @@
         service = jid.JID(service) if service else None
         node = node if node else NS_MICROBLOG
         client = self.host.getClient(profile_key)
+        data = data_format.deserialise(data)
         return self.send(client, data, service, node)
 
     @defer.inlineCallbacks
@@ -760,7 +765,7 @@
             None is equivalend as using default value
         """
         # TODO: check that all data keys are used, this would avoid sending publicly a private message
-        #       by accident (e.g. if group pluging is not loaded, and "grou*" key are not used)
+        #       by accident (e.g. if group plugin is not loaded, and "group*" key are not used)
         if node is None:
             node = NS_MICROBLOG
 
@@ -788,6 +793,11 @@
 
     ## get ##
 
+    def _mbGetSerialise(self, data):
+        items, metadata = data
+        items = [data_format.serialise(item) for item in items]
+        return items, metadata
+
     def _mbGet(self, service="", node="", max_items=10, item_ids=None, extra_dict=None,
                profile_key=C.PROF_KEY_NONE):
         """
@@ -798,15 +808,10 @@
         service = jid.JID(service) if service else None
         max_items = None if max_items == C.NO_LIMIT else max_items
         extra = self._p.parseExtra(extra_dict)
-        return self.mbGet(
-            client,
-            service,
-            node or None,
-            max_items,
-            item_ids,
-            extra.rsm_request,
-            extra.extra,
-        )
+        d = self.mbGet(client, service, node or None, max_items, item_ids,
+                       extra.rsm_request, extra.extra)
+        d.addCallback(self._mbGetSerialise)
+        return d
 
     @defer.inlineCallbacks
     def mbGet(self, client, service=None, node=None, max_items=10, item_ids=None,
@@ -834,8 +839,8 @@
             rsm_request=rsm_request,
             extra=extra,
         )
-        serialised = yield self._p.serItemsDataD(items_data, self.item2mbdata)
-        defer.returnValue(serialised)
+        mb_data = yield self._p.transItemsDataD(items_data, self.item2mbdata)
+        defer.returnValue(mb_data)
 
     def parseCommentUrl(self, node_url):
         """Parse a XMPP URI
@@ -922,7 +927,8 @@
             C.ALL: get all jids from roster, publishers is not used
             C.GROUP: get jids from groups
             C.JID: use publishers directly as list of jids
-        @param publishers: list of publishers, according to "publishers_type" (None, list of groups or list of jids)
+        @param publishers: list of publishers, according to "publishers_type" (None,
+            list of groups or list of jids)
         @param profile_key: %(doc_profile_key)s
         """
         client = self.host.getClient(profile_key)
@@ -931,7 +937,8 @@
         else:
             jids_set = client.roster.getJidsSet(publishers_type, publishers)
             if publishers_type == C.ALL:
-                try:  # display messages from salut-a-toi@libervia.org or other PEP services
+                try:
+                    # display messages from salut-a-toi@libervia.org or other PEP services
                     services = self.host.plugins["EXTRA-PEP"].getFollowedEntities(
                         profile_key
                     )
@@ -987,7 +994,8 @@
             C.ALL: get all jids from roster, publishers is not used
             C.GROUP: get jids from groups
             C.JID: use publishers directly as list of jids
-        @param publishers: list of publishers, according to "publishers_type" (None, list of groups or list of jids)
+        @param publishers: list of publishers, according to "publishers_type" (None, list
+            of groups or list of jids)
         @param profile: %(doc_profile)s
         @return (str): session id
         """
@@ -1017,7 +1025,7 @@
 
         def onSuccess(items_data):
             """convert items elements to list of microblog data in items_data"""
-            d = self._p.serItemsDataD(items_data, self.item2mbdata)
+            d = self._p.transItemsDataD(items_data, self.item2mbdata, serialise=True)
             d.addCallback(lambda serialised: ("", serialised))
             return d
 
@@ -1041,14 +1049,8 @@
         )
         return d
 
-    def _mbGetFromMany(
-        self,
-        publishers_type,
-        publishers,
-        max_items=10,
-        extra_dict=None,
-        profile_key=C.PROF_KEY_NONE,
-    ):
+    def _mbGetFromMany(self, publishers_type, publishers, max_items=10, extra_dict=None,
+                       profile_key=C.PROF_KEY_NONE):
         """
         @param max_items(int): maximum number of item to get, C.NO_LIMIT for no limit
         """
@@ -1064,19 +1066,14 @@
             profile_key,
         )
 
-    def mbGetFromMany(
-        self,
-        publishers_type,
-        publishers,
-        max_items=None,
-        rsm_request=None,
-        extra=None,
-        profile_key=C.PROF_KEY_NONE,
-    ):
+    def mbGetFromMany(self, publishers_type, publishers, max_items=None, rsm_request=None,
+                      extra=None, profile_key=C.PROF_KEY_NONE):
         """Get the published microblogs for a list of groups or jids
 
-        @param publishers_type (str): type of the list of publishers (one of "GROUP" or "JID" or "ALL")
-        @param publishers (list): list of publishers, according to publishers_type (list of groups or list of jids)
+        @param publishers_type (str): type of the list of publishers (one of "GROUP" or
+            "JID" or "ALL")
+        @param publishers (list): list of publishers, according to publishers_type (list
+            of groups or list of jids)
         @param max_items (int): optional limit on the number of retrieved items.
         @param rsm_request (rsm.RSMRequest): RSM request data, common to all publishers
         @param extra (dict): Extra data
@@ -1093,9 +1090,31 @@
 
     # comments #
 
-    def _mbGetFromManyWithCommentsRTResult(
-        self, session_id, profile_key=C.PROF_KEY_DEFAULT
-    ):
+    def _mbGetFromManyWithCommentsRTResultSerialise(self, data):
+        """Serialisation of result
+
+        This is probably the longest method name of whole SàT ecosystem ^^
+        @param data(dict): data as received by rt_sessions
+        @return (tuple): see [_mbGetFromManyWithCommentsRTResult]
+        """
+        ret = []
+        data_iter = data[1].iteritems()
+        for (service, node), (success, (failure_, (items_data, metadata))) in data_iter:
+            items = []
+            for item, item_metadata in items_data:
+                item = data_format.serialise(item)
+                items.append((item, item_metadata))
+            ret.append((
+                service.full(),
+                node,
+                failure_,
+                items,
+                metadata))
+
+        return data[0], ret
+
+    def _mbGetFromManyWithCommentsRTResult(self, session_id,
+                                           profile_key=C.PROF_KEY_DEFAULT):
         """Get real-time results for [mbGetFromManyWithComments] session
 
         @param session_id: id of the real-time deferred session
@@ -1118,32 +1137,16 @@
         """
         profile = self.host.getClient(profile_key).profile
         d = self.rt_sessions.getResults(session_id, profile=profile)
-        d.addCallback(
-            lambda ret: (
-                ret[0],
-                [
-                    (service.full(), node, failure, items, metadata)
-                    for (service, node), (success, (failure, (items, metadata))) in ret[
-                        1
-                    ].iteritems()
-                ],
-            )
-        )
+        d.addCallback(self._mbGetFromManyWithCommentsRTResultSerialise)
         return d
 
-    def _mbGetFromManyWithComments(
-        self,
-        publishers_type,
-        publishers,
-        max_items=10,
-        max_comments=C.NO_LIMIT,
-        extra_dict=None,
-        extra_comments_dict=None,
-        profile_key=C.PROF_KEY_NONE,
-    ):
+    def _mbGetFromManyWithComments(self, publishers_type, publishers, max_items=10,
+                                   max_comments=C.NO_LIMIT, extra_dict=None,
+                                   extra_comments_dict=None, profile_key=C.PROF_KEY_NONE):
         """
         @param max_items(int): maximum number of item to get, C.NO_LIMIT for no limit
-        @param max_comments(int): maximum number of comments to get, C.NO_LIMIT for no limit
+        @param max_comments(int): maximum number of comments to get, C.NO_LIMIT for no
+            limit
         """
         max_items = None if max_items == C.NO_LIMIT else max_items
         max_comments = None if max_comments == C.NO_LIMIT else max_comments
@@ -1162,22 +1165,16 @@
             profile_key,
         )
 
-    def mbGetFromManyWithComments(
-        self,
-        publishers_type,
-        publishers,
-        max_items=None,
-        max_comments=None,
-        rsm_request=None,
-        extra=None,
-        rsm_comments=None,
-        extra_comments=None,
-        profile_key=C.PROF_KEY_NONE,
-    ):
+    def mbGetFromManyWithComments(self, publishers_type, publishers, max_items=None,
+                                  max_comments=None, rsm_request=None, extra=None,
+                                  rsm_comments=None, extra_comments=None,
+                                  profile_key=C.PROF_KEY_NONE):
         """Helper method to get the microblogs and their comments in one shot
 
-        @param publishers_type (str): type of the list of publishers (one of "GROUP" or "JID" or "ALL")
-        @param publishers (list): list of publishers, according to publishers_type (list of groups or list of jids)
+        @param publishers_type (str): type of the list of publishers (one of "GROUP" or
+            "JID" or "ALL")
+        @param publishers (list): list of publishers, according to publishers_type (list
+            of groups or list of jids)
         @param max_items (int): optional limit on the number of retrieved items.
         @param max_comments (int): maximum number of comments to retrieve
         @param rsm_request (rsm.RSMRequest): RSM request for initial items only
@@ -1223,8 +1220,8 @@
                         )
                         # then serialise
                         d.addCallback(
-                            lambda items_data: self._p.serItemsDataD(
-                                items_data, self.item2mbdata
+                            lambda items_data: self._p.transItemsDataD(
+                                items_data, self.item2mbdata, serialise=True
                             )
                         )
                         # with failure handling
@@ -1232,7 +1229,8 @@
                             lambda serialised_items_data: ("",) + serialised_items_data
                         )
                         d.addErrback(lambda failure: (unicode(failure.value), [], {}))
-                        # and associate with service/node (needed if there are several comments nodes)
+                        # and associate with service/node (needed if there are several
+                        # comments nodes)
                         d.addCallback(
                             lambda serialised, service_s=service_s, node=node: (
                                 service_s,
@@ -1260,7 +1258,7 @@
                 client, service, node, max_items, rsm_request=rsm_request, extra=extra
             )
             d.addCallback(
-                lambda items_data: self._p.serItemsDataD(items_data, self.item2mbdata)
+                lambda items_data: self._p.transItemsDataD(items_data, self.item2mbdata)
             )
             d.addCallback(getComments)
             d.addCallback(lambda items_comments_data: ("", items_comments_data))
--- a/sat/tools/common/data_objects.py	Wed Feb 20 19:42:35 2019 +0100
+++ b/sat/tools/common/data_objects.py	Sat Feb 23 18:59:00 2019 +0100
@@ -73,10 +73,8 @@
 
 class BlogItem(object):
     def __init__(self, mb_data, parent):
-        self.mb_data = mb_data
+        self.mb_data = data_format.deserialise(mb_data)
         self.parent = parent
-        self._tags = None
-        self._groups = None
         self._comments = None
         self._comments_items_list = None
 
@@ -126,15 +124,11 @@
 
     @property
     def tags(self):
-        if self._tags is None:
-            self._tags = list(data_format.dict2iter("tag", self.mb_data))
-        return self._tags
+        return self.mb_data.get(u'tags', [])
 
     @property
     def groups(self):
-        if self._groups is None:
-            self._groups = list(data_format.dict2iter("group", self.mb_data))
-        return self._groups
+        return self.mb_data.get(u'groups', [])
 
     @property
     def title(self):
--- a/sat_frontends/jp/cmd_blog.py	Wed Feb 20 19:42:35 2019 +0100
+++ b/sat_frontends/jp/cmd_blog.py	Sat Feb 23 18:59:00 2019 +0100
@@ -142,9 +142,10 @@
 
         if metadata already exist, it will be overwritten
         """
-        mb_data["allow_comments"] = C.boolConst(self.args.comments)
+        if self.args.comments is not None:
+            mb_data["allow_comments"] = self.args.comments
         if self.args.tag:
-            data_format.iter2dict("tag", self.args.tag, mb_data, check_conflict=False)
+            mb_data[u'tags'] = self.args.tag
         if self.args.title is not None:
             mb_data["title"] = self.args.title
 
@@ -179,7 +180,7 @@
         self.host.bridge.mbSend(
             self.args.service,
             self.args.node,
-            mb_data,
+            data_format.serialise(mb_data),
             self.profile,
             callback=self.exitCb,
             errback=partial(
@@ -239,7 +240,7 @@
         return u"\n".join(lines)
 
     def format_tags(self, item, keys):
-        tags = data_format.dict2iter("tag", item, pop=True)
+        tags = item.pop(u'tags', [])
         return u", ".join(tags)
 
     def format_updated(self, item, keys):
@@ -318,6 +319,8 @@
                         sep=u"\n" if "content" in k else u"",
                     )
                 value = k_cb[k](item, keys) if k in k_cb else item[k]
+                if isinstance(value, bool):
+                    value = unicode(value).lower()
                 self.disp(header + value)
             # we want a separation line after each item but the last one
             if idx < len(items) - 1:
@@ -353,7 +356,7 @@
             else:
                 author = published = updated = None
             if verbosity > 1:
-                tags = list(data_format.dict2iter("tag", item, pop=True))
+                tags = item.pop('tags', [])
             else:
                 tags = None
             content = item.get(u"content")
@@ -380,6 +383,9 @@
             print(u"\n" + sep + "\n")
 
     def mbGetCb(self, mb_result):
+        items, metadata = mb_result
+        items = [data_format.deserialise(i) for i in items]
+        mb_result = items, metadata
         self.output(mb_result)
         self.host.quit(C.EXIT_OK)
 
@@ -527,6 +533,8 @@
         if self.pubsub_item is not None:
             mb_data["id"] = self.pubsub_item
 
+        mb_data = data_format.serialise(mb_data)
+
         self.host.bridge.mbSend(
             self.pubsub_service, self.pubsub_node, mb_data, self.profile
         )
@@ -539,6 +547,7 @@
     def getItemData(self, service, node, item):
         items = [item] if item is not None else []
         mb_data = self.host.bridge.mbGet(service, node, 1, items, {}, self.profile)[0][0]
+        mb_data = data_format.deserialise(mb_data)
         try:
             content = mb_data["content_xhtml"]
         except KeyError:
--- a/sat_frontends/quick_frontend/quick_app.py	Wed Feb 20 19:42:35 2019 +0100
+++ b/sat_frontends/quick_frontend/quick_app.py	Sat Feb 23 18:59:00 2019 +0100
@@ -988,8 +988,9 @@
         @param service_s (unicode): pubsub service
         @param node (unicode): pubsub node
         @param event_type (unicode): event type (one of C.PUBLISH, C.RETRACT, C.DELETE)
-        @param data (dict): event data
+        @param data (serialised_dict): event data
         """
+        data = data_format.deserialise(data)
         service_s = jid.JID(service_s)
 
         if category == C.PS_MICROBLOG and self.MB_HANDLER:
@@ -997,9 +998,9 @@
                 if not "content" in data:
                     log.warning("No content found in microblog data")
                     return
-                _groups = (
-                    set(data_format.dict2iter("group", data)) or None
-                )  # FIXME: check if [] make sense (instead of None)
+
+                # FIXME: check if [] make sense (instead of None)
+                _groups = data.get("group")
 
                 for wid in self.widgets.getWidgets(quick_blog.QuickBlog):
                     wid.addEntryIfAccepted(service_s, node, data, _groups, profile)
--- a/sat_frontends/quick_frontend/quick_blog.py	Wed Feb 20 19:42:35 2019 +0100
+++ b/sat_frontends/quick_frontend/quick_blog.py	Sat Feb 23 18:59:00 2019 +0100
@@ -53,7 +53,7 @@
         self.title = data.get("title")
         self.title_rich = None
         self.title_xhtml = data.get("title_xhtml")
-        self.tags = list(data_format.dict2iter("tag", data))
+        self.tags = data.get('tags', [])
         self.content = data.get("content")
         self.content_rich = None
         self.content_xhtml = data.get("content_xhtml")
@@ -64,10 +64,7 @@
         except KeyError:
             self.author_jid = None
 
-        try:
-            self.author_verified = C.bool(data["author_jid_verified"])
-        except KeyError:
-            self.author_verified = False
+        self.author_verified = data.get("author_jid_verified", False)
 
         try:
             self.updated = float(
@@ -148,16 +145,8 @@
         for item, comments in items:
             self.addEntry(item, comments, service=service, node=node, with_update=False)
 
-    def addEntry(
-        self,
-        item=None,
-        comments=None,
-        service=None,
-        node=None,
-        with_update=True,
-        editable=False,
-        edit_entry=False,
-    ):
+    def addEntry(self, item=None, comments=None, service=None, node=None,
+                 with_update=True, editable=False, edit_entry=False):
         """Add a microblog entry
 
         @param editable (bool): True if the entry can be modified
@@ -286,21 +275,21 @@
                 if value is not None:
                     mb_data[name] = value
 
-        data_format.iter2dict("tag", self.item.tags, mb_data)
+        mb_data['tags'] = self.item.tags
 
         if self.blog.new_message_target not in (C.PUBLIC, C.GROUP):
             raise NotImplementedError
 
         if self.level == 0:
-            mb_data["allow_comments"] = C.BOOL_TRUE
+            mb_data["allow_comments"] = True
 
         if self.blog.new_message_target == C.GROUP:
-            data_format.iter2dict("group", self.blog.targets, mb_data)
+            mb_data['groups'] = list(self.blog.targets)
 
         self.blog.host.bridge.mbSend(
             unicode(self.service or ""),
             self.node or "",
-            mb_data,
+            data_format.serialise(mb_data),
             profile=self.blog.profile,
         )
 
@@ -398,9 +387,14 @@
             )
         )
         for result in results:
-            service, node, failure, items, metadata = result
+            service, node, failure, items_data, metadata = result
+            for item_data in items_data:
+                item_data[0] = data_format.deserialise(item_data[0])
+                for item_metadata in item_data[1]:
+                    item_metadata[3] = [data_format.deserialise(i) for i in item_metadata[3]]
             if not failure:
-                self._addMBItemsWithComments((items, metadata), service=jid.JID(service))
+                self._addMBItemsWithComments((items_data, metadata),
+                                             service=jid.JID(service))
 
         self.update()
         if remaining: