# HG changeset patch # User Goffi # Date 1439670278 -7200 # Node ID 4e2fab4de195018eb2ea9acec8455686c72933e9 # Parent d5e72362ee91fb7f4f90188112877f4c1ebf8730 plugin XEP-0277: added mBGetFromManyWithComments to have items + comments in one method call diff -r d5e72362ee91 -r 4e2fab4de195 src/core/constants.py --- a/src/core/constants.py Sat Aug 15 22:22:36 2015 +0200 +++ b/src/core/constants.py Sat Aug 15 22:24:38 2015 +0200 @@ -207,6 +207,7 @@ IQ_SET = '/iq[@type="set"]' ENV_PREFIX = 'SAT_' # Prefix used for environment variables IGNORE = 'ignore' + NO_LIMIT = -1 # used in bridge when a integer value is expected ## ANSI escape sequences ## diff -r d5e72362ee91 -r 4e2fab4de195 src/plugins/plugin_misc_groupblog.py --- a/src/plugins/plugin_misc_groupblog.py Sat Aug 15 22:22:36 2015 +0200 +++ b/src/plugins/plugin_misc_groupblog.py Sat Aug 15 22:24:38 2015 +0200 @@ -108,10 +108,10 @@ method=self.sendGroupBlogComment, async=True) - host.bridge.addMethod("getGroupBlogs", ".plugin", - in_sign='sasa{ss}bs', out_sign='(aa{ss}a{ss})', - method=self.getGroupBlogs, - async=True) + # host.bridge.addMethod("getGroupBlogs", ".plugin", + # in_sign='sasa{ss}bs', out_sign='(aa{ss}a{ss})', + # method=self.getGroupBlogs, + # async=True) host.bridge.addMethod("getGroupBlogsWithComments", ".plugin", in_sign='sasa{ss}is', out_sign='(a(a{ss}(aa{ss}a{ss}))a{ss})', @@ -123,19 +123,19 @@ method=self.getGroupBlogsAtom, async=True) - host.bridge.addMethod("getMassiveGroupBlogs", ".plugin", - in_sign='sasa{ss}s', out_sign='a{s(aa{ss}a{ss})}', - method=self._getMassiveGroupBlogs, - async=True) + # host.bridge.addMethod("getMassiveGroupBlogs", ".plugin", + # in_sign='sasa{ss}s', out_sign='a{s(aa{ss}a{ss})}', + # method=self._getMassiveGroupBlogs, + # async=True) - host.bridge.addMethod("getGroupBlogComments", ".plugin", - in_sign='ssa{ss}s', out_sign='(aa{ss}a{ss})', - method=self.getGroupBlogComments, - async=True) + # host.bridge.addMethod("getGroupBlogComments", ".plugin", + # in_sign='ssa{ss}s', out_sign='(aa{ss}a{ss})', + # method=self.getGroupBlogComments, + # async=True) - host.bridge.addMethod("subscribeGroupBlog", ".plugin", in_sign='ss', out_sign='', - method=self.subscribeGroupBlog, - async=True) + # host.bridge.addMethod("subscribeGroupBlog", ".plugin", in_sign='ss', out_sign='', + # method=self.subscribeGroupBlog, + # async=True) host.trigger.add("PubSubItemsReceived", self.pubSubItemsReceivedTrigger) @@ -245,6 +245,7 @@ access_model = form.get(P.OPT_ACCESS_MODEL, 'open') if access_model == "roster": try: + # FIXME: groups are xs:string, so they can contain "\n" ! This code is bugged microblog_data["groups"] = '\n'.join(form.fields[P.OPT_ROSTER_GROUPS_ALLOWED].values) except KeyError: log.warning("No group found for roster access-model") @@ -553,22 +554,22 @@ return DeferredItems(self, cb, None, profile_key).get(self.getNodeName(pub_jid), item_ids, rsm_data=rsm_data) - def getGroupBlogs(self, pub_jid_s, item_ids=None, rsm_data=None, count_comments=True, profile_key=C.PROF_KEY_NONE): - """Get the published microblogs of the specified IDs. If item_ids is - None, the result would be the same than calling getGroupBlogs - with the default value for the attribute max_items. + # def getGroupBlogs(self, pub_jid_s, item_ids=None, rsm_data=None, count_comments=True, profile_key=C.PROF_KEY_NONE): + # """Get the published microblogs of the specified IDs. If item_ids is + # None, the result would be the same than calling getGroupBlogs + # with the default value for the attribute max_items. - @param pub_jid_s: jid of the publisher - @param item_ids: list of microblogs items IDs - @param rsm_data (dict): RSM request data - @param count_comments (bool): also count the comments if True - @param profile_key (str): %(doc_profile_key)s - @return: a deferred couple (list, dict) containing: - - list of microblog data - - RSM response data - """ - max_comments = 0 if count_comments else DO_NOT_COUNT_COMMENTS - return self._getGroupBlogs(pub_jid_s, item_ids=item_ids, rsm_data=rsm_data, max_comments=max_comments, profile_key=profile_key) + # @param pub_jid_s: jid of the publisher + # @param item_ids: list of microblogs items IDs + # @param rsm_data (dict): RSM request data + # @param count_comments (bool): also count the comments if True + # @param profile_key (str): %(doc_profile_key)s + # @return: a deferred couple (list, dict) containing: + # - list of microblog data + # - RSM response data + # """ + # max_comments = 0 if count_comments else DO_NOT_COUNT_COMMENTS + # return self._getGroupBlogs(pub_jid_s, item_ids=item_ids, rsm_data=rsm_data, max_comments=max_comments, profile_key=profile_key) def getGroupBlogsWithComments(self, pub_jid_s, item_ids=None, rsm_data=None, max_comments=None, profile_key=C.PROF_KEY_NONE): """Get the published microblogs of the specified IDs and their comments. If @@ -631,28 +632,12 @@ d = DeferredItems(self, cb, lambda dummy: [''], profile_key).get(self.getNodeName(pub_jid), rsm_data=rsm_data) return d.addCallback(lambda res: res[0]) - def getGroupBlogComments(self, service_s, node, rsm_data=None, profile_key=C.PROF_KEY_NONE): - """Get all comments of given node - @param service_s: service hosting the node - @param node: comments node - @param profile_key: profile key - @return: a deferred couple (list, dict) containing: - - list of microblog data - - RSM response data - """ - service = jid.JID(service_s) - - def cb(items, client): - return self._handleCommentsItems(items, service, node) - - return DeferredItems(self, cb, None, profile_key).get(node, rsm_data=rsm_data) - - def _getMassiveGroupBlogs(self, publishers_type, publishers, rsm_data=None, profile_key=C.PROF_KEY_NONE): - if publishers_type == 'JID': - publishers_jids = [jid.JID(publisher) for publisher in publishers] - else: - publishers_jids = publishers - return self.getMassiveGroupBlogs(publishers_type, publishers_jids, rsm_data, profile_key) + # def _getMassiveGroupBlogs(self, publishers_type, publishers, rsm_data=None, profile_key=C.PROF_KEY_NONE): + # if publishers_type == 'JID': + # publishers_jids = [jid.JID(publisher) for publisher in publishers] + # else: + # publishers_jids = publishers + # return self.getMassiveGroupBlogs(publishers_type, publishers_jids, rsm_data, profile_key) # def _getPublishersJIDs(self, publishers_type, publishers, client): # #TODO: custom exception @@ -674,36 +659,36 @@ # raise UnknownType # return jids - def getMassiveGroupBlogs(self, publishers_type, publishers, rsm_data=None, profile_key=C.PROF_KEY_NONE): - """Get the last published microblogs for a list of groups or jids - @param publishers_type (str): type of the list of publishers (one of "GROUP" or "JID" or "ALL") - @param publishers (list): list of publishers, according to publishers_type (list of groups or list of jids) - @param rsm_data (dict): RSM request data, common to all publishers - @param profile_key: profile key - @return: a deferred dict with: - - key: publisher (unicode) - - value: couple (list[dict], dict) with: - - the microblogs data - - RSM response data - """ - def cb(items, publisher, client): - d = self._itemsConstruction(items, publisher, client) - return d.addCallback(self._getOrCountComments, False, profile_key) + # def getMassiveGroupBlogs(self, publishers_type, publishers, rsm_data=None, profile_key=C.PROF_KEY_NONE): + # """Get the last published microblogs for a list of groups or jids + # @param publishers_type (str): type of the list of publishers (one of "GROUP" or "JID" or "ALL") + # @param publishers (list): list of publishers, according to publishers_type (list of groups or list of jids) + # @param rsm_data (dict): RSM request data, common to all publishers + # @param profile_key: profile key + # @return: a deferred dict with: + # - key: publisher (unicode) + # - value: couple (list[dict], dict) with: + # - the microblogs data + # - RSM response data + # """ + # def cb(items, publisher, client): + # d = self._itemsConstruction(items, publisher, client) + # return d.addCallback(self._getOrCountComments, False, profile_key) - #TODO: we need to use the server corresponding to the host of the jid - return DeferredItemsFromMany(self, cb, profile_key).get(publishers_type, publishers, rsm_data=rsm_data) + # #TODO: we need to use the server corresponding to the host of the jid + # return DeferredItemsFromMany(self, cb, profile_key).get(publishers_type, publishers, rsm_data=rsm_data) ## subscribe ## - def subscribeGroupBlog(self, pub_jid, profile_key=C.PROF_KEY_NONE): - def initialised(result): - profile, client = result - d = self.host.plugins["XEP-0060"].subscribe(client.item_access_pubsub, self.getNodeName(jid.JID(pub_jid)), - profile_key=profile_key) - return d + # def subscribeGroupBlog(self, pub_jid, profile_key=C.PROF_KEY_NONE): + # def initialised(result): + # profile, client = result + # d = self.host.plugins["XEP-0060"].subscribe(client.item_access_pubsub, self.getNodeName(jid.JID(pub_jid)), + # profile_key=profile_key) + # return d - #TODO: we need to use the server corresponding the the host of the jid - return self._initialise(profile_key).addCallback(initialised) + # #TODO: we need to use the server corresponding the the host of the jid + # return self._initialise(profile_key).addCallback(initialised) ## delete ## diff -r d5e72362ee91 -r 4e2fab4de195 src/plugins/plugin_xep_0277.py --- a/src/plugins/plugin_xep_0277.py Sat Aug 15 22:22:36 2015 +0200 +++ b/src/plugins/plugin_xep_0277.py Sat Aug 15 22:24:38 2015 +0200 @@ -26,6 +26,7 @@ from twisted.python import failure from sat.core import exceptions from sat.tools.xml_tools import ElementParser +from sat.tools import sat_defer from wokkel import pubsub from wokkel import rsm @@ -63,6 +64,7 @@ log.info(_("Microblogging plugin initialization")) self.host = host self._p = self.host.plugins["XEP-0060"] # this facilitate the access to pubsub plugin + self.rt_sessions = sat_defer.RTDeferredSessions() self.host.plugins["XEP-0163"].addPEPEvent("MICROBLOG", NS_MICROBLOG, self.microblogCB, self.sendMicroblog, notify=False) host.bridge.addMethod("getLastMicroblogs", ".plugin", in_sign='sis', out_sign='(aa{ss}a{ss})', @@ -82,6 +84,9 @@ method=self._mBGetFromManyRTResult, async=True) host.bridge.addMethod("mBGetFromMany", ".plugin", in_sign='sasia{ss}s', out_sign='s', method=self._mBGetFromMany) + host.bridge.addMethod("mBGetFromManyWithCommentsRTResult", ".plugin", in_sign='ss', out_sign='(ua(sssa(a{ss}a(sssaa{ss}a{ss}))a{ss}))', + method=self._mBGetFromManyWithCommentsRTResult, async=True) + host.bridge.addMethod("mBGetFromManyWithComments", ".plugin", in_sign='sasiia{ss}a{ss}s', out_sign='s', method=self._mBGetFromManyWithComments) ## plugin management methods ## @@ -565,3 +570,116 @@ """ client, node_data = self._getClientAndNodeData(publishers_type, publishers, profile_key) return self._p.getFromMany(node_data, max_item, rsm_data, profile_key=profile_key) + + # comments # + + def _mBGetFromManyWithCommentsRTResult(self, session_id, profile_key=C.PROF_KEY_DEFAULT): + """Get real-time results for [mBGetFromManyWithComments] session + + @param session_id: id of the real-time deferred session + @param return (tuple): (remaining, results) where: + - remaining is the number of still expected results + - results is a list of tuple with + - service (unicode): pubsub service + - node (unicode): pubsub node + - success (bool): True if the getItems was successful + - failure (unicode): empty string in case of success, error message else + - items(list): list of items with: + - item(dict): item microblog data + - comments_list(list): list of comments with + - service (unicode): pubsub service where the comments node is + - node (unicode): comments node + - failure (unicode): empty in case of success, else error message + - comments(list[dict]): list of microblog data + - comments_metadata(dict): metadata of the comment node + - metadata(dict): original node metadata + @param profile_key: %(doc_profile_key)s + """ + profile = self.host.getClient(profile_key).profile + d = self.rt_sessions.getResults(session_id, profile=profile) + d.addCallback(lambda ret: (ret[0], + [(service.full(), node, failure, items, metadata) + for (service, node), (success, (failure, (items, metadata))) in ret[1].iteritems()])) + return d + + def _mBGetFromManyWithComments(self, publishers_type, publishers, max_item=10, max_comments=C.NO_LIMIT, rsm_dict=None, rsm_comments_dict=None, profile_key=C.PROF_KEY_NONE): + """ + @param max_item(int): maximum number of item to get, C.NO_LIMIT for no limit + @param max_comments(int): maximum number of comments to get, C.NO_LIMIT for no limit + """ + max_item = None if max_item == C.NO_LIMIT else max_item + max_comments = None if max_comments == C.NO_LIMIT else max_comments + publishers_type, publishers = self._checkPublishers(publishers_type, publishers) + return self.mBGetFromManyWithComments(publishers_type, publishers, max_item, max_comments, + rsm.RSMRequest(**rsm_dict) if rsm_dict else None, + rsm.RSMRequest(**rsm_comments_dict) if rsm_comments_dict else None, + profile_key) + + def mBGetFromManyWithComments(self, publishers_type, publishers, max_item=None, max_comments=None, rsm_request=None, rsm_comments=None, profile_key=C.PROF_KEY_NONE): + """Helper method to get the microblogs and their comments in one shot + + @param publishers_type (str): type of the list of publishers (one of "GROUP" or "JID" or "ALL") + @param publishers (list): list of publishers, according to publishers_type (list of groups or list of jids) + @param max_items (int): optional limit on the number of retrieved items. + @param max_comments (int): maximum number of comments to retrieve + @param rsm_request (rsm.RSMRequest): RSM request for initial items only + @param rsm_comments (rsm.RSMRequest): RSM request for comments only + @param profile_key: profile key + @return: a deferred dict with: + - key: publisher (unicode) + - value: couple (list[dict], dict) with: + - the microblogs data + - RSM response data + """ + # XXX: this method seems complicated because it do a couple of treatments + # to serialise and associate the data, but it make life in frontends side + # a lot easier + + def getComments(items_data): + """Retrieve comments and add them to the items_data + + @param items_data: serialised items data + @return (defer.Deferred): list of items where each item is associated + with a list of comments data (service, node, list of items, metadata) + """ + items, metadata = items_data + items_dlist = [] # deferred list for items + for item in items: + dlist = [] # deferred list for comments + for key, value in item.iteritems(): + # we look for comments + if key.startswith('comments') and key.endswith('_service'): + prefix = key[:key.find('_')] + service_s = value + node = item["{}{}".format(prefix, "_node")] + # time to get the comments + d = self._p.getItems(jid.JID(service_s), node, max_comments, rsm_request=rsm_comments, profile_key=profile_key) + # then serialise + d.addCallback(lambda items_data: self._p.serItemsDataD(items_data, self.item2mbdata)) + # with failure handling + d.addCallback(lambda serialised_items_data: ('',) + serialised_items_data) + d.addErrback(lambda failure: (unicode(failure.value), [], {})) + # and associate with service/node (needed if there are several comments nodes) + d.addCallback(lambda serialised: (service_s, node) + serialised) + dlist.append(d) + # we get the comments + comments_d = defer.gatherResults(dlist) + # and add them to the item data + comments_d.addCallback(lambda comments_data: (item, comments_data)) + items_dlist.append(comments_d) + # we gather the items + comments in a list + items_d = defer.gatherResults(items_dlist) + # and add the metadata + items_d.addCallback(lambda items: (items, metadata)) + return items_d + + client, node_data = self._getClientAndNodeData(publishers_type, publishers, profile_key) + deferreds = {} + for service, node in node_data: + d = deferreds[(service, node)] = self._p.getItems(service, node, max_item, rsm_request=rsm_request, profile_key=profile_key) + d.addCallback(lambda items_data: self._p.serItemsDataD(items_data, self.item2mbdata)) + d.addCallback(getComments) + d.addCallback(lambda items_comments_data: ('', items_comments_data)) + d.addErrback(lambda failure: (unicode(failure.value), ([],{}))) + + return self.rt_sessions.newSession(deferreds, client.profile)