Mercurial > libervia-backend
comparison sat/plugins/plugin_xep_0277.py @ 2624:56f94936df1e
code style reformatting using black
author | Goffi <goffi@goffi.org> |
---|---|
date | Wed, 27 Jun 2018 20:14:46 +0200 |
parents | 26edcf3a30eb |
children | 3480d4fdf83a |
comparison
equal
deleted
inserted
replaced
2623:49533de4540b | 2624:56f94936df1e |
---|---|
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. | 18 # along with this program. If not, see <http://www.gnu.org/licenses/>. |
19 | 19 |
20 from sat.core.i18n import _ | 20 from sat.core.i18n import _ |
21 from sat.core.constants import Const as C | 21 from sat.core.constants import Const as C |
22 from sat.core.log import getLogger | 22 from sat.core.log import getLogger |
23 | |
23 log = getLogger(__name__) | 24 log = getLogger(__name__) |
24 from twisted.words.protocols.jabber import jid, error | 25 from twisted.words.protocols.jabber import jid, error |
25 from twisted.words.protocols.jabber.xmlstream import XMPPHandler | 26 from twisted.words.protocols.jabber.xmlstream import XMPPHandler |
26 from twisted.words.xish import domish | 27 from twisted.words.xish import domish |
27 from twisted.internet import defer | 28 from twisted.internet import defer |
41 import time | 42 import time |
42 import dateutil | 43 import dateutil |
43 import calendar | 44 import calendar |
44 import urlparse | 45 import urlparse |
45 | 46 |
46 NS_MICROBLOG = 'urn:xmpp:microblog:0' | 47 NS_MICROBLOG = "urn:xmpp:microblog:0" |
47 NS_ATOM = 'http://www.w3.org/2005/Atom' | 48 NS_ATOM = "http://www.w3.org/2005/Atom" |
48 NS_PUBSUB_EVENT = "{}{}".format(pubsub.NS_PUBSUB, "#event") | 49 NS_PUBSUB_EVENT = "{}{}".format(pubsub.NS_PUBSUB, "#event") |
49 NS_COMMENT_PREFIX = '{}:comments/'.format(NS_MICROBLOG) | 50 NS_COMMENT_PREFIX = "{}:comments/".format(NS_MICROBLOG) |
50 | 51 |
51 | 52 |
52 PLUGIN_INFO = { | 53 PLUGIN_INFO = { |
53 C.PI_NAME: "Microblogging over XMPP Plugin", | 54 C.PI_NAME: "Microblogging over XMPP Plugin", |
54 C.PI_IMPORT_NAME: "XEP-0277", | 55 C.PI_IMPORT_NAME: "XEP-0277", |
56 C.PI_PROTOCOLS: ["XEP-0277"], | 57 C.PI_PROTOCOLS: ["XEP-0277"], |
57 C.PI_DEPENDENCIES: ["XEP-0163", "XEP-0060", "TEXT-SYNTAXES"], | 58 C.PI_DEPENDENCIES: ["XEP-0163", "XEP-0060", "TEXT-SYNTAXES"], |
58 C.PI_RECOMMENDATIONS: ["XEP-0059", "EXTRA-PEP"], | 59 C.PI_RECOMMENDATIONS: ["XEP-0059", "EXTRA-PEP"], |
59 C.PI_MAIN: "XEP_0277", | 60 C.PI_MAIN: "XEP_0277", |
60 C.PI_HANDLER: "yes", | 61 C.PI_HANDLER: "yes", |
61 C.PI_DESCRIPTION: _("""Implementation of microblogging Protocol""") | 62 C.PI_DESCRIPTION: _("""Implementation of microblogging Protocol"""), |
62 } | 63 } |
63 | 64 |
64 | 65 |
65 class NodeAccessChangeException(Exception): | 66 class NodeAccessChangeException(Exception): |
66 pass | 67 pass |
70 namespace = NS_MICROBLOG | 71 namespace = NS_MICROBLOG |
71 | 72 |
72 def __init__(self, host): | 73 def __init__(self, host): |
73 log.info(_(u"Microblogging plugin initialization")) | 74 log.info(_(u"Microblogging plugin initialization")) |
74 self.host = host | 75 self.host = host |
75 host.registerNamespace('microblog', NS_MICROBLOG) | 76 host.registerNamespace("microblog", NS_MICROBLOG) |
76 self._p = self.host.plugins["XEP-0060"] # this facilitate the access to pubsub plugin | 77 self._p = self.host.plugins[ |
78 "XEP-0060" | |
79 ] # this facilitate the access to pubsub plugin | |
77 self.rt_sessions = sat_defer.RTDeferredSessions() | 80 self.rt_sessions = sat_defer.RTDeferredSessions() |
78 self.host.plugins["XEP-0060"].addManagedNode(NS_MICROBLOG, items_cb=self._itemsReceived) | 81 self.host.plugins["XEP-0060"].addManagedNode( |
79 | 82 NS_MICROBLOG, items_cb=self._itemsReceived |
80 host.bridge.addMethod("mbSend", ".plugin", | 83 ) |
81 in_sign='ssa{ss}s', out_sign='', | 84 |
82 method=self._mbSend, | 85 host.bridge.addMethod( |
83 async=True) | 86 "mbSend", |
84 host.bridge.addMethod("mbRetract", ".plugin", | 87 ".plugin", |
85 in_sign='ssss', out_sign='', | 88 in_sign="ssa{ss}s", |
86 method=self._mbRetract, | 89 out_sign="", |
87 async=True) | 90 method=self._mbSend, |
88 host.bridge.addMethod("mbGet", ".plugin", | 91 async=True, |
89 in_sign='ssiasa{ss}s', out_sign='(aa{ss}a{ss})', | 92 ) |
90 method=self._mbGet, | 93 host.bridge.addMethod( |
91 async=True) | 94 "mbRetract", |
92 host.bridge.addMethod("mbSetAccess", ".plugin", in_sign='ss', out_sign='', | 95 ".plugin", |
93 method=self.mbSetAccess, | 96 in_sign="ssss", |
94 async=True) | 97 out_sign="", |
95 host.bridge.addMethod("mbSubscribeToMany", ".plugin", in_sign='sass', out_sign='s', | 98 method=self._mbRetract, |
96 method=self._mbSubscribeToMany) | 99 async=True, |
97 host.bridge.addMethod("mbGetFromManyRTResult", ".plugin", in_sign='ss', out_sign='(ua(sssaa{ss}a{ss}))', | 100 ) |
98 method=self._mbGetFromManyRTResult, async=True) | 101 host.bridge.addMethod( |
99 host.bridge.addMethod("mbGetFromMany", ".plugin", in_sign='sasia{ss}s', out_sign='s', | 102 "mbGet", |
100 method=self._mbGetFromMany) | 103 ".plugin", |
101 host.bridge.addMethod("mbGetFromManyWithCommentsRTResult", ".plugin", in_sign='ss', out_sign='(ua(sssa(a{ss}a(sssaa{ss}a{ss}))a{ss}))', | 104 in_sign="ssiasa{ss}s", |
102 method=self._mbGetFromManyWithCommentsRTResult, async=True) | 105 out_sign="(aa{ss}a{ss})", |
103 host.bridge.addMethod("mbGetFromManyWithComments", ".plugin", in_sign='sasiia{ss}a{ss}s', out_sign='s', | 106 method=self._mbGet, |
104 method=self._mbGetFromManyWithComments) | 107 async=True, |
108 ) | |
109 host.bridge.addMethod( | |
110 "mbSetAccess", | |
111 ".plugin", | |
112 in_sign="ss", | |
113 out_sign="", | |
114 method=self.mbSetAccess, | |
115 async=True, | |
116 ) | |
117 host.bridge.addMethod( | |
118 "mbSubscribeToMany", | |
119 ".plugin", | |
120 in_sign="sass", | |
121 out_sign="s", | |
122 method=self._mbSubscribeToMany, | |
123 ) | |
124 host.bridge.addMethod( | |
125 "mbGetFromManyRTResult", | |
126 ".plugin", | |
127 in_sign="ss", | |
128 out_sign="(ua(sssaa{ss}a{ss}))", | |
129 method=self._mbGetFromManyRTResult, | |
130 async=True, | |
131 ) | |
132 host.bridge.addMethod( | |
133 "mbGetFromMany", | |
134 ".plugin", | |
135 in_sign="sasia{ss}s", | |
136 out_sign="s", | |
137 method=self._mbGetFromMany, | |
138 ) | |
139 host.bridge.addMethod( | |
140 "mbGetFromManyWithCommentsRTResult", | |
141 ".plugin", | |
142 in_sign="ss", | |
143 out_sign="(ua(sssa(a{ss}a(sssaa{ss}a{ss}))a{ss}))", | |
144 method=self._mbGetFromManyWithCommentsRTResult, | |
145 async=True, | |
146 ) | |
147 host.bridge.addMethod( | |
148 "mbGetFromManyWithComments", | |
149 ".plugin", | |
150 in_sign="sasiia{ss}a{ss}s", | |
151 out_sign="s", | |
152 method=self._mbGetFromManyWithComments, | |
153 ) | |
105 | 154 |
106 def getHandler(self, client): | 155 def getHandler(self, client): |
107 return XEP_0277_handler() | 156 return XEP_0277_handler() |
108 | 157 |
109 def _checkFeaturesCb(self, available): | 158 def _checkFeaturesCb(self, available): |
110 return {'available': C.BOOL_TRUE} | 159 return {"available": C.BOOL_TRUE} |
111 | 160 |
112 def _checkFeaturesEb(self, fail): | 161 def _checkFeaturesEb(self, fail): |
113 return {'available': C.BOOL_FALSE} | 162 return {"available": C.BOOL_FALSE} |
114 | 163 |
115 def getFeatures(self, profile): | 164 def getFeatures(self, profile): |
116 client = self.host.getClient(profile) | 165 client = self.host.getClient(profile) |
117 d = self.host.checkFeatures(client, [], identity=('pubsub', 'pep')) | 166 d = self.host.checkFeatures(client, [], identity=("pubsub", "pep")) |
118 d.addCallbacks(self._checkFeaturesCb, self._checkFeaturesEb) | 167 d.addCallbacks(self._checkFeaturesCb, self._checkFeaturesEb) |
119 return d | 168 return d |
120 | 169 |
121 ## plugin management methods ## | 170 ## plugin management methods ## |
122 | 171 |
123 def _itemsReceived(self, client, itemsEvent): | 172 def _itemsReceived(self, client, itemsEvent): |
124 """Callback which manage items notifications (publish + retract)""" | 173 """Callback which manage items notifications (publish + retract)""" |
174 | |
125 def manageItem(data, event): | 175 def manageItem(data, event): |
126 self.host.bridge.psEvent(C.PS_MICROBLOG, itemsEvent.sender.full(), itemsEvent.nodeIdentifier, event, data, client.profile) | 176 self.host.bridge.psEvent( |
177 C.PS_MICROBLOG, | |
178 itemsEvent.sender.full(), | |
179 itemsEvent.nodeIdentifier, | |
180 event, | |
181 data, | |
182 client.profile, | |
183 ) | |
127 | 184 |
128 for item in itemsEvent.items: | 185 for item in itemsEvent.items: |
129 if item.name == C.PS_ITEM: | 186 if item.name == C.PS_ITEM: |
130 self.item2mbdata(item).addCallbacks(manageItem, lambda failure: None, (C.PS_PUBLISH,)) | 187 self.item2mbdata(item).addCallbacks( |
188 manageItem, lambda failure: None, (C.PS_PUBLISH,) | |
189 ) | |
131 elif item.name == C.PS_RETRACT: | 190 elif item.name == C.PS_RETRACT: |
132 manageItem({'id': item['id']}, C.PS_RETRACT) | 191 manageItem({"id": item["id"]}, C.PS_RETRACT) |
133 else: | 192 else: |
134 raise exceptions.InternalError("Invalid event value") | 193 raise exceptions.InternalError("Invalid event value") |
135 | |
136 | 194 |
137 ## data/item transformation ## | 195 ## data/item transformation ## |
138 | 196 |
139 @defer.inlineCallbacks | 197 @defer.inlineCallbacks |
140 def item2mbdata(self, item_elt): | 198 def item2mbdata(self, item_elt): |
154 @raise exceptions.DataError: the key already exists | 212 @raise exceptions.DataError: the key already exists |
155 (not raised if increment is True) | 213 (not raised if increment is True) |
156 """ | 214 """ |
157 if key in microblog_data: | 215 if key in microblog_data: |
158 if not increment: | 216 if not increment: |
159 raise failure.Failure(exceptions.DataError("key {} is already present for item {}").format(key, item_elt['id'])) | 217 raise failure.Failure( |
218 exceptions.DataError( | |
219 "key {} is already present for item {}" | |
220 ).format(key, item_elt["id"]) | |
221 ) | |
160 else: | 222 else: |
161 idx=1 # the idx 0 is the key without suffix | 223 idx = 1 # the idx 0 is the key without suffix |
162 fmt = "{}#{}" | 224 fmt = "{}#{}" |
163 new_key = fmt.format(key, idx) | 225 new_key = fmt.format(key, idx) |
164 while new_key in microblog_data: | 226 while new_key in microblog_data: |
165 idx+=1 | 227 idx += 1 |
166 new_key = fmt.format(key, idx) | 228 new_key = fmt.format(key, idx) |
167 key = new_key | 229 key = new_key |
168 return key | 230 return key |
169 | 231 |
170 @defer.inlineCallbacks | 232 @defer.inlineCallbacks |
171 def parseElement(elem): | 233 def parseElement(elem): |
172 """Parse title/content elements and fill microblog_data accordingly""" | 234 """Parse title/content elements and fill microblog_data accordingly""" |
173 type_ = elem.getAttribute('type') | 235 type_ = elem.getAttribute("type") |
174 if type_ == 'xhtml': | 236 if type_ == "xhtml": |
175 data_elt = elem.firstChildElement() | 237 data_elt = elem.firstChildElement() |
176 if data_elt is None: | 238 if data_elt is None: |
177 raise failure.Failure(exceptions.DataError(u"XHML content not wrapped in a <div/> element, this is not standard !")) | 239 raise failure.Failure( |
240 exceptions.DataError( | |
241 u"XHML content not wrapped in a <div/> element, this is not standard !" | |
242 ) | |
243 ) | |
178 if data_elt.uri != C.NS_XHTML: | 244 if data_elt.uri != C.NS_XHTML: |
179 raise failure.Failure(exceptions.DataError(_('Content of type XHTML must declare its namespace!'))) | 245 raise failure.Failure( |
180 key = check_conflict(u'{}_xhtml'.format(elem.name)) | 246 exceptions.DataError( |
247 _("Content of type XHTML must declare its namespace!") | |
248 ) | |
249 ) | |
250 key = check_conflict(u"{}_xhtml".format(elem.name)) | |
181 data = data_elt.toXml() | 251 data = data_elt.toXml() |
182 microblog_data[key] = yield self.host.plugins["TEXT-SYNTAXES"].cleanXHTML(data) | 252 microblog_data[key] = yield self.host.plugins["TEXT-SYNTAXES"].cleanXHTML( |
253 data | |
254 ) | |
183 else: | 255 else: |
184 key = check_conflict(elem.name) | 256 key = check_conflict(elem.name) |
185 microblog_data[key] = unicode(elem) | 257 microblog_data[key] = unicode(elem) |
186 | 258 |
187 | 259 id_ = item_elt.getAttribute("id", "") # there can be no id for transient nodes |
188 id_ = item_elt.getAttribute('id', '') # there can be no id for transient nodes | 260 microblog_data[u"id"] = id_ |
189 microblog_data[u'id'] = id_ | |
190 if item_elt.uri not in (pubsub.NS_PUBSUB, NS_PUBSUB_EVENT): | 261 if item_elt.uri not in (pubsub.NS_PUBSUB, NS_PUBSUB_EVENT): |
191 msg = u"Unsupported namespace {ns} in pubsub item {id_}".format(ns=item_elt.uri, id_=id_) | 262 msg = u"Unsupported namespace {ns} in pubsub item {id_}".format( |
263 ns=item_elt.uri, id_=id_ | |
264 ) | |
192 log.warning(msg) | 265 log.warning(msg) |
193 raise failure.Failure(exceptions.DataError(msg)) | 266 raise failure.Failure(exceptions.DataError(msg)) |
194 | 267 |
195 try: | 268 try: |
196 entry_elt = item_elt.elements(NS_ATOM, 'entry').next() | 269 entry_elt = item_elt.elements(NS_ATOM, "entry").next() |
197 except StopIteration: | 270 except StopIteration: |
198 msg = u'No atom entry found in the pubsub item {}'.format(id_) | 271 msg = u"No atom entry found in the pubsub item {}".format(id_) |
199 raise failure.Failure(exceptions.DataError(msg)) | 272 raise failure.Failure(exceptions.DataError(msg)) |
200 | 273 |
201 # language | 274 # language |
202 try: | 275 try: |
203 microblog_data[u'language'] = entry_elt[(C.NS_XML, u'lang')].strip() | 276 microblog_data[u"language"] = entry_elt[(C.NS_XML, u"lang")].strip() |
204 except KeyError: | 277 except KeyError: |
205 pass | 278 pass |
206 | 279 |
207 # atom:id | 280 # atom:id |
208 try: | 281 try: |
209 id_elt = entry_elt.elements(NS_ATOM, 'id').next() | 282 id_elt = entry_elt.elements(NS_ATOM, "id").next() |
210 except StopIteration: | 283 except StopIteration: |
211 msg = u'No atom id found in the pubsub item {}, this is not standard !'.format(id_) | 284 msg = u"No atom id found in the pubsub item {}, this is not standard !".format( |
285 id_ | |
286 ) | |
212 log.warning(msg) | 287 log.warning(msg) |
213 microblog_data[u'atom_id'] = "" | 288 microblog_data[u"atom_id"] = "" |
214 else: | 289 else: |
215 microblog_data[u'atom_id'] = unicode(id_elt) | 290 microblog_data[u"atom_id"] = unicode(id_elt) |
216 | 291 |
217 # title/content(s) | 292 # title/content(s) |
218 | 293 |
219 # FIXME: ATOM and XEP-0277 only allow 1 <title/> element | 294 # FIXME: ATOM and XEP-0277 only allow 1 <title/> element |
220 # but in the wild we have some blogs with several ones | 295 # but in the wild we have some blogs with several ones |
223 # try: | 298 # try: |
224 # title_elt = entry_elt.elements(NS_ATOM, 'title').next() | 299 # title_elt = entry_elt.elements(NS_ATOM, 'title').next() |
225 # except StopIteration: | 300 # except StopIteration: |
226 # msg = u'No atom title found in the pubsub item {}'.format(id_) | 301 # msg = u'No atom title found in the pubsub item {}'.format(id_) |
227 # raise failure.Failure(exceptions.DataError(msg)) | 302 # raise failure.Failure(exceptions.DataError(msg)) |
228 title_elts = list(entry_elt.elements(NS_ATOM, 'title')) | 303 title_elts = list(entry_elt.elements(NS_ATOM, "title")) |
229 if not title_elts: | 304 if not title_elts: |
230 msg = u'No atom title found in the pubsub item {}'.format(id_) | 305 msg = u"No atom title found in the pubsub item {}".format(id_) |
231 raise failure.Failure(exceptions.DataError(msg)) | 306 raise failure.Failure(exceptions.DataError(msg)) |
232 for title_elt in title_elts: | 307 for title_elt in title_elts: |
233 yield parseElement(title_elt) | 308 yield parseElement(title_elt) |
234 | 309 |
235 # FIXME: as for <title/>, Atom only authorise at most 1 content | 310 # FIXME: as for <title/>, Atom only authorise at most 1 content |
236 # but XEP-0277 allows several ones. So for no we handle as | 311 # but XEP-0277 allows several ones. So for no we handle as |
237 # if more than one can be present | 312 # if more than one can be present |
238 for content_elt in entry_elt.elements(NS_ATOM, 'content'): | 313 for content_elt in entry_elt.elements(NS_ATOM, "content"): |
239 yield parseElement(content_elt) | 314 yield parseElement(content_elt) |
240 | 315 |
241 # we check that text content is present | 316 # we check that text content is present |
242 for key in ('title', 'content'): | 317 for key in ("title", "content"): |
243 if key not in microblog_data and ('{}_xhtml'.format(key)) in microblog_data: | 318 if key not in microblog_data and ("{}_xhtml".format(key)) in microblog_data: |
244 log.warning(u"item {id_} provide a {key}_xhtml data but not a text one".format(id_=id_, key=key)) | 319 log.warning( |
320 u"item {id_} provide a {key}_xhtml data but not a text one".format( | |
321 id_=id_, key=key | |
322 ) | |
323 ) | |
245 # ... and do the conversion if it's not | 324 # ... and do the conversion if it's not |
246 microblog_data[key] = yield self.host.plugins["TEXT-SYNTAXES"].\ | 325 microblog_data[key] = yield self.host.plugins["TEXT-SYNTAXES"].convert( |
247 convert(microblog_data[u'{}_xhtml'.format(key)], | 326 microblog_data[u"{}_xhtml".format(key)], |
248 self.host.plugins["TEXT-SYNTAXES"].SYNTAX_XHTML, | 327 self.host.plugins["TEXT-SYNTAXES"].SYNTAX_XHTML, |
249 self.host.plugins["TEXT-SYNTAXES"].SYNTAX_TEXT, | 328 self.host.plugins["TEXT-SYNTAXES"].SYNTAX_TEXT, |
250 False) | 329 False, |
251 | 330 ) |
252 if 'content' not in microblog_data: | 331 |
332 if "content" not in microblog_data: | |
253 # use the atom title data as the microblog body content | 333 # use the atom title data as the microblog body content |
254 microblog_data[u'content'] = microblog_data[u'title'] | 334 microblog_data[u"content"] = microblog_data[u"title"] |
255 del microblog_data[u'title'] | 335 del microblog_data[u"title"] |
256 if 'title_xhtml' in microblog_data: | 336 if "title_xhtml" in microblog_data: |
257 microblog_data[u'content_xhtml'] = microblog_data[u'title_xhtml'] | 337 microblog_data[u"content_xhtml"] = microblog_data[u"title_xhtml"] |
258 del microblog_data[u'title_xhtml'] | 338 del microblog_data[u"title_xhtml"] |
259 | 339 |
260 # published/updated dates | 340 # published/updated dates |
261 try: | 341 try: |
262 updated_elt = entry_elt.elements(NS_ATOM, 'updated').next() | 342 updated_elt = entry_elt.elements(NS_ATOM, "updated").next() |
263 except StopIteration: | 343 except StopIteration: |
264 msg = u'No atom updated element found in the pubsub item {}'.format(id_) | 344 msg = u"No atom updated element found in the pubsub item {}".format(id_) |
265 raise failure.Failure(exceptions.DataError(msg)) | 345 raise failure.Failure(exceptions.DataError(msg)) |
266 microblog_data[u'updated'] = unicode(calendar.timegm(dateutil.parser.parse(unicode(updated_elt)).utctimetuple())) | 346 microblog_data[u"updated"] = unicode( |
267 try: | 347 calendar.timegm(dateutil.parser.parse(unicode(updated_elt)).utctimetuple()) |
268 published_elt = entry_elt.elements(NS_ATOM, 'published').next() | 348 ) |
349 try: | |
350 published_elt = entry_elt.elements(NS_ATOM, "published").next() | |
269 except StopIteration: | 351 except StopIteration: |
270 microblog_data[u'published'] = microblog_data[u'updated'] | 352 microblog_data[u"published"] = microblog_data[u"updated"] |
271 else: | 353 else: |
272 microblog_data[u'published'] = unicode(calendar.timegm(dateutil.parser.parse(unicode(published_elt)).utctimetuple())) | 354 microblog_data[u"published"] = unicode( |
355 calendar.timegm( | |
356 dateutil.parser.parse(unicode(published_elt)).utctimetuple() | |
357 ) | |
358 ) | |
273 | 359 |
274 # links | 360 # links |
275 for link_elt in entry_elt.elements(NS_ATOM, 'link'): | 361 for link_elt in entry_elt.elements(NS_ATOM, "link"): |
276 if link_elt.getAttribute('rel') == 'replies' and link_elt.getAttribute('title') == 'comments': | 362 if ( |
277 key = check_conflict('comments', True) | 363 link_elt.getAttribute("rel") == "replies" |
278 microblog_data[key] = link_elt['href'] | 364 and link_elt.getAttribute("title") == "comments" |
365 ): | |
366 key = check_conflict("comments", True) | |
367 microblog_data[key] = link_elt["href"] | |
279 try: | 368 try: |
280 service, node = self.parseCommentUrl(microblog_data[key]) | 369 service, node = self.parseCommentUrl(microblog_data[key]) |
281 except: | 370 except: |
282 log.warning(u"Can't parse url {}".format(microblog_data[key])) | 371 log.warning(u"Can't parse url {}".format(microblog_data[key])) |
283 del microblog_data[key] | 372 del microblog_data[key] |
284 else: | 373 else: |
285 microblog_data[u'{}_service'.format(key)] = service.full() | 374 microblog_data[u"{}_service".format(key)] = service.full() |
286 microblog_data[u'{}_node'.format(key)] = node | 375 microblog_data[u"{}_node".format(key)] = node |
287 else: | 376 else: |
288 rel = link_elt.getAttribute('rel','') | 377 rel = link_elt.getAttribute("rel", "") |
289 title = link_elt.getAttribute('title','') | 378 title = link_elt.getAttribute("title", "") |
290 href = link_elt.getAttribute('href','') | 379 href = link_elt.getAttribute("href", "") |
291 log.warning(u"Unmanaged link element: rel={rel} title={title} href={href}".format(rel=rel, title=title, href=href)) | 380 log.warning( |
381 u"Unmanaged link element: rel={rel} title={title} href={href}".format( | |
382 rel=rel, title=title, href=href | |
383 ) | |
384 ) | |
292 | 385 |
293 # author | 386 # author |
294 try: | 387 try: |
295 author_elt = entry_elt.elements(NS_ATOM, 'author').next() | 388 author_elt = entry_elt.elements(NS_ATOM, "author").next() |
296 except StopIteration: | 389 except StopIteration: |
297 log.debug(u"Can't find author element in item {}".format(id_)) | 390 log.debug(u"Can't find author element in item {}".format(id_)) |
298 else: | 391 else: |
299 publisher = item_elt.getAttribute("publisher") | 392 publisher = item_elt.getAttribute("publisher") |
300 # name | 393 # name |
301 try: | 394 try: |
302 name_elt = author_elt.elements(NS_ATOM, 'name').next() | 395 name_elt = author_elt.elements(NS_ATOM, "name").next() |
303 except StopIteration: | 396 except StopIteration: |
304 log.warning(u"No name element found in author element of item {}".format(id_)) | 397 log.warning( |
398 u"No name element found in author element of item {}".format(id_) | |
399 ) | |
305 else: | 400 else: |
306 microblog_data[u'author'] = unicode(name_elt) | 401 microblog_data[u"author"] = unicode(name_elt) |
307 # uri | 402 # uri |
308 try: | 403 try: |
309 uri_elt = author_elt.elements(NS_ATOM, 'uri').next() | 404 uri_elt = author_elt.elements(NS_ATOM, "uri").next() |
310 except StopIteration: | 405 except StopIteration: |
311 log.debug(u"No uri element found in author element of item {}".format(id_)) | 406 log.debug( |
407 u"No uri element found in author element of item {}".format(id_) | |
408 ) | |
312 if publisher: | 409 if publisher: |
313 microblog_data[u'author_jid'] = publisher | 410 microblog_data[u"author_jid"] = publisher |
314 else: | 411 else: |
315 uri = unicode(uri_elt) | 412 uri = unicode(uri_elt) |
316 if uri.startswith("xmpp:"): | 413 if uri.startswith("xmpp:"): |
317 uri = uri[5:] | 414 uri = uri[5:] |
318 microblog_data[u'author_jid'] = uri | 415 microblog_data[u"author_jid"] = uri |
319 else: | 416 else: |
320 microblog_data[u'author_jid'] = item_elt.getAttribute(u"publisher") or "" | 417 microblog_data[u"author_jid"] = ( |
418 item_elt.getAttribute(u"publisher") or "" | |
419 ) | |
321 | 420 |
322 if not publisher: | 421 if not publisher: |
323 log.debug(u"No publisher attribute, we can't verify author jid") | 422 log.debug(u"No publisher attribute, we can't verify author jid") |
324 microblog_data[u'author_jid_verified'] = C.BOOL_FALSE | 423 microblog_data[u"author_jid_verified"] = C.BOOL_FALSE |
325 elif jid.JID(publisher).userhostJID() == jid.JID(uri).userhostJID(): | 424 elif jid.JID(publisher).userhostJID() == jid.JID(uri).userhostJID(): |
326 microblog_data[u'author_jid_verified'] = C.BOOL_TRUE | 425 microblog_data[u"author_jid_verified"] = C.BOOL_TRUE |
327 else: | 426 else: |
328 log.warning(u"item atom:uri differ from publisher attribute, spoofing attempt ? atom:uri = {} publisher = {}".format(uri, item_elt.getAttribute("publisher"))) | 427 log.warning( |
329 microblog_data[u'author_jid_verified'] = C.BOOL_FALSE | 428 u"item atom:uri differ from publisher attribute, spoofing attempt ? atom:uri = {} publisher = {}".format( |
429 uri, item_elt.getAttribute("publisher") | |
430 ) | |
431 ) | |
432 microblog_data[u"author_jid_verified"] = C.BOOL_FALSE | |
330 # email | 433 # email |
331 try: | 434 try: |
332 email_elt = author_elt.elements(NS_ATOM, 'email').next() | 435 email_elt = author_elt.elements(NS_ATOM, "email").next() |
333 except StopIteration: | 436 except StopIteration: |
334 pass | 437 pass |
335 else: | 438 else: |
336 microblog_data[u'author_email'] = unicode(email_elt) | 439 microblog_data[u"author_email"] = unicode(email_elt) |
337 | 440 |
338 # categories | 441 # categories |
339 categories = (category_elt.getAttribute('term','') for category_elt in entry_elt.elements(NS_ATOM, 'category')) | 442 categories = ( |
340 data_format.iter2dict('tag', categories, microblog_data) | 443 category_elt.getAttribute("term", "") |
444 for category_elt in entry_elt.elements(NS_ATOM, "category") | |
445 ) | |
446 data_format.iter2dict("tag", categories, microblog_data) | |
341 | 447 |
342 ## the trigger ## | 448 ## the trigger ## |
343 # if other plugins have things to add or change | 449 # if other plugins have things to add or change |
344 yield self.host.trigger.point("XEP-0277_item2data", item_elt, entry_elt, microblog_data) | 450 yield self.host.trigger.point( |
451 "XEP-0277_item2data", item_elt, entry_elt, microblog_data | |
452 ) | |
345 | 453 |
346 defer.returnValue(microblog_data) | 454 defer.returnValue(microblog_data) |
347 | 455 |
348 @defer.inlineCallbacks | 456 @defer.inlineCallbacks |
349 def data2entry(self, client, data, item_id, service, node): | 457 def data2entry(self, client, data, item_id, service, node): |
355 Needed to construct Atom id | 463 Needed to construct Atom id |
356 @param node(unicode): pubsub node where the item is sent | 464 @param node(unicode): pubsub node where the item is sent |
357 Needed to construct Atom id | 465 Needed to construct Atom id |
358 @return: deferred which fire domish.Element | 466 @return: deferred which fire domish.Element |
359 """ | 467 """ |
360 entry_elt = domish.Element((NS_ATOM, 'entry')) | 468 entry_elt = domish.Element((NS_ATOM, "entry")) |
361 | 469 |
362 ##Â language ## | 470 ##Â language ## |
363 if u'language' in data: | 471 if u"language" in data: |
364 entry_elt[(C.NS_XML, u'lang')] = data[u'language'].strip() | 472 entry_elt[(C.NS_XML, u"lang")] = data[u"language"].strip() |
365 | 473 |
366 ## content and title ## | 474 ## content and title ## |
367 synt = self.host.plugins["TEXT-SYNTAXES"] | 475 synt = self.host.plugins["TEXT-SYNTAXES"] |
368 | 476 |
369 for elem_name in ('title', 'content'): | 477 for elem_name in ("title", "content"): |
370 for type_ in ['', '_rich', '_xhtml']: | 478 for type_ in ["", "_rich", "_xhtml"]: |
371 attr = "{}{}".format(elem_name, type_) | 479 attr = "{}{}".format(elem_name, type_) |
372 if attr in data: | 480 if attr in data: |
373 elem = entry_elt.addElement(elem_name) | 481 elem = entry_elt.addElement(elem_name) |
374 if type_: | 482 if type_: |
375 if type_ == '_rich': # convert input from current syntax to XHTML | 483 if type_ == "_rich": # convert input from current syntax to XHTML |
376 xml_content = yield synt.convert(data[attr], synt.getCurrentSyntax(client.profile), "XHTML") | 484 xml_content = yield synt.convert( |
377 if '{}_xhtml'.format(elem_name) in data: | 485 data[attr], synt.getCurrentSyntax(client.profile), "XHTML" |
378 raise failure.Failure(exceptions.DataError(_("Can't have xhtml and rich content at the same time"))) | 486 ) |
487 if "{}_xhtml".format(elem_name) in data: | |
488 raise failure.Failure( | |
489 exceptions.DataError( | |
490 _( | |
491 "Can't have xhtml and rich content at the same time" | |
492 ) | |
493 ) | |
494 ) | |
379 else: | 495 else: |
380 xml_content = data[attr] | 496 xml_content = data[attr] |
381 | 497 |
382 div_elt = xml_tools.ElementParser()(xml_content, namespace=C.NS_XHTML) | 498 div_elt = xml_tools.ElementParser()( |
383 if div_elt.name != 'div' or div_elt.uri != C.NS_XHTML or div_elt.attributes: | 499 xml_content, namespace=C.NS_XHTML |
500 ) | |
501 if ( | |
502 div_elt.name != "div" | |
503 or div_elt.uri != C.NS_XHTML | |
504 or div_elt.attributes | |
505 ): | |
384 # we need a wrapping <div/> at the top with XHTML namespace | 506 # we need a wrapping <div/> at the top with XHTML namespace |
385 wrap_div_elt = domish.Element((C.NS_XHTML, 'div')) | 507 wrap_div_elt = domish.Element((C.NS_XHTML, "div")) |
386 wrap_div_elt.addChild(div_elt) | 508 wrap_div_elt.addChild(div_elt) |
387 div_elt = wrap_div_elt | 509 div_elt = wrap_div_elt |
388 elem.addChild(div_elt) | 510 elem.addChild(div_elt) |
389 elem['type'] = 'xhtml' | 511 elem["type"] = "xhtml" |
390 if elem_name not in data: | 512 if elem_name not in data: |
391 # there is raw text content, which is mandatory | 513 # there is raw text content, which is mandatory |
392 # so we create one from xhtml content | 514 # so we create one from xhtml content |
393 elem_txt = entry_elt.addElement(elem_name) | 515 elem_txt = entry_elt.addElement(elem_name) |
394 text_content = yield self.host.plugins["TEXT-SYNTAXES"].convert(xml_content, | 516 text_content = yield self.host.plugins[ |
517 "TEXT-SYNTAXES" | |
518 ].convert( | |
519 xml_content, | |
395 self.host.plugins["TEXT-SYNTAXES"].SYNTAX_XHTML, | 520 self.host.plugins["TEXT-SYNTAXES"].SYNTAX_XHTML, |
396 self.host.plugins["TEXT-SYNTAXES"].SYNTAX_TEXT, | 521 self.host.plugins["TEXT-SYNTAXES"].SYNTAX_TEXT, |
397 False) | 522 False, |
523 ) | |
398 elem_txt.addContent(text_content) | 524 elem_txt.addContent(text_content) |
399 elem_txt['type'] = 'text' | 525 elem_txt["type"] = "text" |
400 | 526 |
401 else: # raw text only needs to be escaped to get HTML-safe sequence | 527 else: # raw text only needs to be escaped to get HTML-safe sequence |
402 elem.addContent(data[attr]) | 528 elem.addContent(data[attr]) |
403 elem['type'] = 'text' | 529 elem["type"] = "text" |
404 | 530 |
405 try: | 531 try: |
406 entry_elt.elements(NS_ATOM, 'title').next() | 532 entry_elt.elements(NS_ATOM, "title").next() |
407 except StopIteration: | 533 except StopIteration: |
408 # we have no title element which is mandatory | 534 # we have no title element which is mandatory |
409 # so we transform content element to title | 535 # so we transform content element to title |
410 elems = list(entry_elt.elements(NS_ATOM, 'content')) | 536 elems = list(entry_elt.elements(NS_ATOM, "content")) |
411 if not elems: | 537 if not elems: |
412 raise exceptions.DataError("There must be at least one content or title element") | 538 raise exceptions.DataError( |
539 "There must be at least one content or title element" | |
540 ) | |
413 for elem in elems: | 541 for elem in elems: |
414 elem.name = 'title' | 542 elem.name = "title" |
415 | 543 |
416 ## author ## | 544 ## author ## |
417 author_elt = entry_elt.addElement('author') | 545 author_elt = entry_elt.addElement("author") |
418 try: | 546 try: |
419 author_name = data['author'] | 547 author_name = data["author"] |
420 except KeyError: | 548 except KeyError: |
421 # FIXME: must use better name | 549 # FIXME: must use better name |
422 author_name = client.jid.user | 550 author_name = client.jid.user |
423 author_elt.addElement('name', content=author_name) | 551 author_elt.addElement("name", content=author_name) |
424 | 552 |
425 try: | 553 try: |
426 author_jid_s = data['author_jid'] | 554 author_jid_s = data["author_jid"] |
427 except KeyError: | 555 except KeyError: |
428 author_jid_s = client.jid.userhost() | 556 author_jid_s = client.jid.userhost() |
429 author_elt.addElement('uri', content="xmpp:{}".format(author_jid_s)) | 557 author_elt.addElement("uri", content="xmpp:{}".format(author_jid_s)) |
430 | 558 |
431 try: | 559 try: |
432 author_jid_s = data['author_email'] | 560 author_jid_s = data["author_email"] |
433 except KeyError: | 561 except KeyError: |
434 pass | 562 pass |
435 | 563 |
436 ## published/updated time ## | 564 ## published/updated time ## |
437 current_time = time.time() | 565 current_time = time.time() |
438 entry_elt.addElement('updated', | 566 entry_elt.addElement( |
439 content = utils.xmpp_date(float(data.get('updated', current_time)))) | 567 "updated", content=utils.xmpp_date(float(data.get("updated", current_time))) |
440 entry_elt.addElement('published', | 568 ) |
441 content = utils.xmpp_date(float(data.get('published', current_time)))) | 569 entry_elt.addElement( |
570 "published", | |
571 content=utils.xmpp_date(float(data.get("published", current_time))), | |
572 ) | |
442 | 573 |
443 ## categories ## | 574 ## categories ## |
444 for tag in data_format.dict2iter("tag", data): | 575 for tag in data_format.dict2iter("tag", data): |
445 category_elt = entry_elt.addElement("category") | 576 category_elt = entry_elt.addElement("category") |
446 category_elt['term'] = tag | 577 category_elt["term"] = tag |
447 | 578 |
448 ## id ## | 579 ## id ## |
449 entry_id = data.get('id', xmpp_uri.buildXMPPUri( | 580 entry_id = data.get( |
450 u'pubsub', | 581 "id", |
451 path=service.full() if service is not None else client.jid.userhost(), | 582 xmpp_uri.buildXMPPUri( |
452 node=node, | 583 u"pubsub", |
453 item=item_id)) | 584 path=service.full() if service is not None else client.jid.userhost(), |
454 entry_elt.addElement('id', content=entry_id) # | 585 node=node, |
586 item=item_id, | |
587 ), | |
588 ) | |
589 entry_elt.addElement("id", content=entry_id) # | |
455 | 590 |
456 ## comments ## | 591 ## comments ## |
457 if 'comments' in data: | 592 if "comments" in data: |
458 link_elt = entry_elt.addElement('link') | 593 link_elt = entry_elt.addElement("link") |
459 link_elt['href'] = data['comments'] | 594 link_elt["href"] = data["comments"] |
460 link_elt['rel'] = 'replies' | 595 link_elt["rel"] = "replies" |
461 link_elt['title'] = 'comments' | 596 link_elt["title"] = "comments" |
462 | 597 |
463 ## final item building ## | 598 ## final item building ## |
464 item_elt = pubsub.Item(id=item_id, payload=entry_elt) | 599 item_elt = pubsub.Item(id=item_id, payload=entry_elt) |
465 | 600 |
466 ## the trigger ## | 601 ## the trigger ## |
467 # if other plugins have things to add or change | 602 # if other plugins have things to add or change |
468 yield self.host.trigger.point("XEP-0277_data2entry", client, data, entry_elt, item_elt) | 603 yield self.host.trigger.point( |
604 "XEP-0277_data2entry", client, data, entry_elt, item_elt | |
605 ) | |
469 | 606 |
470 defer.returnValue(item_elt) | 607 defer.returnValue(item_elt) |
471 | 608 |
472 ## publish ## | 609 ## publish ## |
473 | 610 |
487 """ | 624 """ |
488 if parent_service is not None: | 625 if parent_service is not None: |
489 if parent_service.user: | 626 if parent_service.user: |
490 # we are on a PEP | 627 # we are on a PEP |
491 if parent_service.host == client.jid.host: | 628 if parent_service.host == client.jid.host: |
492 #Â it's our server, we use already found client.pubsub_service below | 629 # Â it's our server, we use already found client.pubsub_service below |
493 pass | 630 pass |
494 else: | 631 else: |
495 # other server, let's try to find a non PEP service there | 632 # other server, let's try to find a non PEP service there |
496 d = self.host.findServiceEntity(client, "pubsub", "service", parent_service) | 633 d = self.host.findServiceEntity( |
634 client, "pubsub", "service", parent_service | |
635 ) | |
497 d.addCallback(lambda entity: entity or parent_service) | 636 d.addCallback(lambda entity: entity or parent_service) |
498 else: | 637 else: |
499 # parent is already on a normal Pubsub service, we re-use it | 638 # parent is already on a normal Pubsub service, we re-use it |
500 return defer.succeed(parent_service) | 639 return defer.succeed(parent_service) |
501 | 640 |
502 return defer.succeed(client.pubsub_service if client.pubsub_service is not None else parent_service) | 641 return defer.succeed( |
642 client.pubsub_service if client.pubsub_service is not None else parent_service | |
643 ) | |
503 | 644 |
504 @defer.inlineCallbacks | 645 @defer.inlineCallbacks |
505 def _manageComments(self, client, mb_data, service, node, item_id, access=None): | 646 def _manageComments(self, client, mb_data, service, node, item_id, access=None): |
506 """Check comments keys in mb_data and create comments node if necessary | 647 """Check comments keys in mb_data and create comments node if necessary |
507 | 648 |
515 None to use same access model as parent item | 656 None to use same access model as parent item |
516 """ | 657 """ |
517 # FIXME: if 'comments' already exists in mb_data, it is not used to create the Node | 658 # FIXME: if 'comments' already exists in mb_data, it is not used to create the Node |
518 allow_comments = C.bool(mb_data.pop("allow_comments", "false")) | 659 allow_comments = C.bool(mb_data.pop("allow_comments", "false")) |
519 if not allow_comments: | 660 if not allow_comments: |
520 if 'comments' in mb_data: | 661 if "comments" in mb_data: |
521 log.warning(u"comments are not allowed but there is already a comments node, it may be lost: {uri}".format(uri=mb_data['comments'])) | 662 log.warning( |
522 del mb_data['comments'] | 663 u"comments are not allowed but there is already a comments node, it may be lost: {uri}".format( |
664 uri=mb_data["comments"] | |
665 ) | |
666 ) | |
667 del mb_data["comments"] | |
523 return | 668 return |
524 | 669 |
525 if access is None: | 670 if access is None: |
526 # TODO: cache access models per service/node | 671 # TODO: cache access models per service/node |
527 parent_node_config = yield self._p.getConfiguration(client, service, node) | 672 parent_node_config = yield self._p.getConfiguration(client, service, node) |
528 access = parent_node_config.get(self._p.OPT_ACCESS_MODEL, self._p.ACCESS_OPEN) | 673 access = parent_node_config.get(self._p.OPT_ACCESS_MODEL, self._p.ACCESS_OPEN) |
529 | 674 |
530 options = {self._p.OPT_ACCESS_MODEL: access, | 675 options = { |
531 self._p.OPT_PERSIST_ITEMS: 1, | 676 self._p.OPT_ACCESS_MODEL: access, |
532 self._p.OPT_MAX_ITEMS: -1, | 677 self._p.OPT_PERSIST_ITEMS: 1, |
533 self._p.OPT_DELIVER_PAYLOADS: 1, | 678 self._p.OPT_MAX_ITEMS: -1, |
534 self._p.OPT_SEND_ITEM_SUBSCRIBE: 1, | 679 self._p.OPT_DELIVER_PAYLOADS: 1, |
535 # FIXME: would it make sense to restrict publish model to subscribers? | 680 self._p.OPT_SEND_ITEM_SUBSCRIBE: 1, |
536 self._p.OPT_PUBLISH_MODEL: self._p.ACCESS_OPEN, | 681 # FIXME: would it make sense to restrict publish model to subscribers? |
537 } | 682 self._p.OPT_PUBLISH_MODEL: self._p.ACCESS_OPEN, |
683 } | |
538 | 684 |
539 # if other plugins need to change the options | 685 # if other plugins need to change the options |
540 yield self.host.trigger.point("XEP-0277_comments", client, mb_data, options) | 686 yield self.host.trigger.point("XEP-0277_comments", client, mb_data, options) |
541 | 687 |
542 try: | 688 try: |
543 comments_node = mb_data['comments_node'] | 689 comments_node = mb_data["comments_node"] |
544 except KeyError: | 690 except KeyError: |
545 comments_node = self.getCommentsNode(item_id) | 691 comments_node = self.getCommentsNode(item_id) |
546 else: | 692 else: |
547 if not comments_node: | 693 if not comments_node: |
548 raise exceptions.DataError(u"if comments_node is present, it must not be empty") | 694 raise exceptions.DataError( |
549 | 695 u"if comments_node is present, it must not be empty" |
550 try: | 696 ) |
551 comments_service = jid.JID(mb_data['comments_service']) | 697 |
698 try: | |
699 comments_service = jid.JID(mb_data["comments_service"]) | |
552 except KeyError: | 700 except KeyError: |
553 comments_service = yield self.getCommentsService(client, service) | 701 comments_service = yield self.getCommentsService(client, service) |
554 | 702 |
555 try: | 703 try: |
556 yield self._p.createNode(client, comments_service, comments_node, options) | 704 yield self._p.createNode(client, comments_service, comments_node, options) |
557 except error.StanzaError as e: | 705 except error.StanzaError as e: |
558 if e.condition == 'conflict': | 706 if e.condition == "conflict": |
559 log.info(u"node {} already exists on service {}".format(comments_node, comments_service)) | 707 log.info( |
708 u"node {} already exists on service {}".format( | |
709 comments_node, comments_service | |
710 ) | |
711 ) | |
560 else: | 712 else: |
561 raise e | 713 raise e |
562 else: | 714 else: |
563 if access == self._p.ACCESS_WHITELIST: | 715 if access == self._p.ACCESS_WHITELIST: |
564 # for whitelist access we need to copy affiliations from parent item | 716 # for whitelist access we need to copy affiliations from parent item |
565 comments_affiliations = yield self._p.getNodeAffiliations(client, service, node) | 717 comments_affiliations = yield self._p.getNodeAffiliations( |
718 client, service, node | |
719 ) | |
566 # …except for "member", that we transform to publisher | 720 # …except for "member", that we transform to publisher |
567 # because we wants members to be able to write to comments | 721 # because we wants members to be able to write to comments |
568 for jid_, affiliation in comments_affiliations.items(): | 722 for jid_, affiliation in comments_affiliations.items(): |
569 if affiliation == 'member': | 723 if affiliation == "member": |
570 comments_affiliations[jid_] == 'publisher' | 724 comments_affiliations[jid_] == "publisher" |
571 | 725 |
572 yield self._p.setNodeAffiliations(client, comments_service, comments_node, comments_affiliations) | 726 yield self._p.setNodeAffiliations( |
727 client, comments_service, comments_node, comments_affiliations | |
728 ) | |
573 | 729 |
574 if comments_service is None: | 730 if comments_service is None: |
575 comments_service = client.jid.userhostJID() | 731 comments_service = client.jid.userhostJID() |
576 | 732 |
577 if 'comments' in mb_data: | 733 if "comments" in mb_data: |
578 if not mb_data['comments']: | 734 if not mb_data["comments"]: |
579 raise exceptions.DataError(u"if comments is present, it must not be empty") | 735 raise exceptions.DataError( |
580 if 'comments_node' in mb_data or 'comments_service' in mb_data: | 736 u"if comments is present, it must not be empty" |
581 raise exceptions.DataError(u"You can't use comments_service/comments_node and comments at the same time") | 737 ) |
738 if "comments_node" in mb_data or "comments_service" in mb_data: | |
739 raise exceptions.DataError( | |
740 u"You can't use comments_service/comments_node and comments at the same time" | |
741 ) | |
582 else: | 742 else: |
583 mb_data['comments'] = self._p.getNodeURI(comments_service, comments_node) | 743 mb_data["comments"] = self._p.getNodeURI(comments_service, comments_node) |
584 | 744 |
585 def _mbSend(self, service, node, data, profile_key): | 745 def _mbSend(self, service, node, data, profile_key): |
586 service = jid.JID(service) if service else None | 746 service = jid.JID(service) if service else None |
587 node = node if node else NS_MICROBLOG | 747 node = node if node else NS_MICROBLOG |
588 client = self.host.getClient(profile_key) | 748 client = self.host.getClient(profile_key) |
602 # TODO: check that all data keys are used, this would avoid sending publicly a private message | 762 # TODO: check that all data keys are used, this would avoid sending publicly a private message |
603 # by accident (e.g. if group pluging is not loaded, and "grou*" key are not used) | 763 # by accident (e.g. if group pluging is not loaded, and "grou*" key are not used) |
604 if node is None: | 764 if node is None: |
605 node = NS_MICROBLOG | 765 node = NS_MICROBLOG |
606 | 766 |
607 item_id = data.get('id') or unicode(shortuuid.uuid()) | 767 item_id = data.get("id") or unicode(shortuuid.uuid()) |
608 | 768 |
609 try: | 769 try: |
610 yield self._manageComments(client, data, service, node, item_id, access=None) | 770 yield self._manageComments(client, data, service, node, item_id, access=None) |
611 except error.StanzaError: | 771 except error.StanzaError: |
612 log.warning(u"Can't create comments node for item {}".format(item_id)) | 772 log.warning(u"Can't create comments node for item {}".format(item_id)) |
616 | 776 |
617 ## retract ## | 777 ## retract ## |
618 | 778 |
619 def _mbRetract(self, service_jid_s, nodeIdentifier, itemIdentifier, profile_key): | 779 def _mbRetract(self, service_jid_s, nodeIdentifier, itemIdentifier, profile_key): |
620 """Call self._p._retractItem, but use default node if node is empty""" | 780 """Call self._p._retractItem, but use default node if node is empty""" |
621 return self._p._retractItem(service_jid_s, nodeIdentifier or NS_MICROBLOG, itemIdentifier, True, profile_key) | 781 return self._p._retractItem( |
782 service_jid_s, | |
783 nodeIdentifier or NS_MICROBLOG, | |
784 itemIdentifier, | |
785 True, | |
786 profile_key, | |
787 ) | |
622 | 788 |
623 ## get ## | 789 ## get ## |
624 | 790 |
625 def _mbGet(self, service='', node='', max_items=10, item_ids=None, extra_dict=None, profile_key=C.PROF_KEY_NONE): | 791 def _mbGet( |
792 self, | |
793 service="", | |
794 node="", | |
795 max_items=10, | |
796 item_ids=None, | |
797 extra_dict=None, | |
798 profile_key=C.PROF_KEY_NONE, | |
799 ): | |
626 """ | 800 """ |
627 @param max_items(int): maximum number of item to get, C.NO_LIMIT for no limit | 801 @param max_items(int): maximum number of item to get, C.NO_LIMIT for no limit |
628 @param item_ids (list[unicode]): list of item IDs | 802 @param item_ids (list[unicode]): list of item IDs |
629 """ | 803 """ |
630 client = self.host.getClient(profile_key) | 804 client = self.host.getClient(profile_key) |
631 service = jid.JID(service) if service else None | 805 service = jid.JID(service) if service else None |
632 max_items = None if max_items == C.NO_LIMIT else max_items | 806 max_items = None if max_items == C.NO_LIMIT else max_items |
633 extra = self._p.parseExtra(extra_dict) | 807 extra = self._p.parseExtra(extra_dict) |
634 return self.mbGet(client, service, node or None, max_items, item_ids, extra.rsm_request, extra.extra) | 808 return self.mbGet( |
635 | 809 client, |
810 service, | |
811 node or None, | |
812 max_items, | |
813 item_ids, | |
814 extra.rsm_request, | |
815 extra.extra, | |
816 ) | |
636 | 817 |
637 @defer.inlineCallbacks | 818 @defer.inlineCallbacks |
638 def mbGet(self, client, service=None, node=None, max_items=10, item_ids=None, rsm_request=None, extra=None): | 819 def mbGet( |
820 self, | |
821 client, | |
822 service=None, | |
823 node=None, | |
824 max_items=10, | |
825 item_ids=None, | |
826 rsm_request=None, | |
827 extra=None, | |
828 ): | |
639 """Get some microblogs | 829 """Get some microblogs |
640 | 830 |
641 @param service(jid.JID, None): jid of the publisher | 831 @param service(jid.JID, None): jid of the publisher |
642 None to get profile's PEP | 832 None to get profile's PEP |
643 @param node(unicode, None): node to get (or microblog node if None) | 833 @param node(unicode, None): node to get (or microblog node if None) |
648 | 838 |
649 @return: a deferred couple with the list of items and metadatas. | 839 @return: a deferred couple with the list of items and metadatas. |
650 """ | 840 """ |
651 if node is None: | 841 if node is None: |
652 node = NS_MICROBLOG | 842 node = NS_MICROBLOG |
653 items_data = yield self._p.getItems(client, service, node, max_items=max_items, item_ids=item_ids, rsm_request=rsm_request, extra=extra) | 843 items_data = yield self._p.getItems( |
844 client, | |
845 service, | |
846 node, | |
847 max_items=max_items, | |
848 item_ids=item_ids, | |
849 rsm_request=rsm_request, | |
850 extra=extra, | |
851 ) | |
654 serialised = yield self._p.serItemsDataD(items_data, self.item2mbdata) | 852 serialised = yield self._p.serItemsDataD(items_data, self.item2mbdata) |
655 defer.returnValue(serialised) | 853 defer.returnValue(serialised) |
656 | 854 |
657 def parseCommentUrl(self, node_url): | 855 def parseCommentUrl(self, node_url): |
658 """Parse a XMPP URI | 856 """Parse a XMPP URI |
661 from the href attribute of an entry's link element. For example this input: | 859 from the href attribute of an entry's link element. For example this input: |
662 xmpp:sat-pubsub.example.net?;node=urn%3Axmpp%3Acomments%3A_af43b363-3259-4b2a-ba4c-1bc33aa87634__urn%3Axmpp%3Agroupblog%3Asomebody%40example.net | 860 xmpp:sat-pubsub.example.net?;node=urn%3Axmpp%3Acomments%3A_af43b363-3259-4b2a-ba4c-1bc33aa87634__urn%3Axmpp%3Agroupblog%3Asomebody%40example.net |
663 will return(JID(u'sat-pubsub.example.net'), 'urn:xmpp:comments:_af43b363-3259-4b2a-ba4c-1bc33aa87634__urn:xmpp:groupblog:somebody@example.net') | 861 will return(JID(u'sat-pubsub.example.net'), 'urn:xmpp:comments:_af43b363-3259-4b2a-ba4c-1bc33aa87634__urn:xmpp:groupblog:somebody@example.net') |
664 @return (tuple[jid.JID, unicode]): service and node | 862 @return (tuple[jid.JID, unicode]): service and node |
665 """ | 863 """ |
666 parsed_url = urlparse.urlparse(node_url, 'xmpp') | 864 parsed_url = urlparse.urlparse(node_url, "xmpp") |
667 service = jid.JID(parsed_url.path) | 865 service = jid.JID(parsed_url.path) |
668 parsed_queries = urlparse.parse_qs(parsed_url.query.encode('utf-8')) | 866 parsed_queries = urlparse.parse_qs(parsed_url.query.encode("utf-8")) |
669 node = parsed_queries.get('node', [''])[0].decode('utf-8') | 867 node = parsed_queries.get("node", [""])[0].decode("utf-8") |
670 | 868 |
671 if not node: | 869 if not node: |
672 raise failure.Failure(exceptions.DataError('Invalid comments link')) | 870 raise failure.Failure(exceptions.DataError("Invalid comments link")) |
673 | 871 |
674 return (service, node) | 872 return (service, node) |
675 | 873 |
676 ## configure ## | 874 ## configure ## |
677 | 875 |
680 | 878 |
681 If the node already exists, it change options | 879 If the node already exists, it change options |
682 @param access: Node access model, according to xep-0060 #4.5 | 880 @param access: Node access model, according to xep-0060 #4.5 |
683 @param profile_key: profile key | 881 @param profile_key: profile key |
684 """ | 882 """ |
685 #Â FIXME: check if this mehtod is need, deprecate it if not | 883 # Â FIXME: check if this mehtod is need, deprecate it if not |
686 client = self.host.getClient(profile_key) | 884 client = self.host.getClient(profile_key) |
687 | 885 |
688 _options = {self._p.OPT_ACCESS_MODEL: access, self._p.OPT_PERSIST_ITEMS: 1, self._p.OPT_MAX_ITEMS: -1, self._p.OPT_DELIVER_PAYLOADS: 1, self._p.OPT_SEND_ITEM_SUBSCRIBE: 1} | 886 _options = { |
887 self._p.OPT_ACCESS_MODEL: access, | |
888 self._p.OPT_PERSIST_ITEMS: 1, | |
889 self._p.OPT_MAX_ITEMS: -1, | |
890 self._p.OPT_DELIVER_PAYLOADS: 1, | |
891 self._p.OPT_SEND_ITEM_SUBSCRIBE: 1, | |
892 } | |
689 | 893 |
690 def cb(result): | 894 def cb(result): |
691 #Node is created with right permission | 895 # Node is created with right permission |
692 log.debug(_(u"Microblog node has now access %s") % access) | 896 log.debug(_(u"Microblog node has now access %s") % access) |
693 | 897 |
694 def fatal_err(s_error): | 898 def fatal_err(s_error): |
695 #Something went wrong | 899 # Something went wrong |
696 log.error(_(u"Can't set microblog access")) | 900 log.error(_(u"Can't set microblog access")) |
697 raise NodeAccessChangeException() | 901 raise NodeAccessChangeException() |
698 | 902 |
699 def err_cb(s_error): | 903 def err_cb(s_error): |
700 #If the node already exists, the condition is "conflict", | 904 # If the node already exists, the condition is "conflict", |
701 #else we have an unmanaged error | 905 # else we have an unmanaged error |
702 if s_error.value.condition == 'conflict': | 906 if s_error.value.condition == "conflict": |
703 #d = self.host.plugins["XEP-0060"].deleteNode(client, client.jid.userhostJID(), NS_MICROBLOG) | 907 # d = self.host.plugins["XEP-0060"].deleteNode(client, client.jid.userhostJID(), NS_MICROBLOG) |
704 #d.addCallback(lambda x: create_node().addCallback(cb).addErrback(fatal_err)) | 908 # d.addCallback(lambda x: create_node().addCallback(cb).addErrback(fatal_err)) |
705 change_node_options().addCallback(cb).addErrback(fatal_err) | 909 change_node_options().addCallback(cb).addErrback(fatal_err) |
706 else: | 910 else: |
707 fatal_err(s_error) | 911 fatal_err(s_error) |
708 | 912 |
709 def create_node(): | 913 def create_node(): |
710 return self._p.createNode(client, client.jid.userhostJID(), NS_MICROBLOG, _options) | 914 return self._p.createNode( |
915 client, client.jid.userhostJID(), NS_MICROBLOG, _options | |
916 ) | |
711 | 917 |
712 def change_node_options(): | 918 def change_node_options(): |
713 return self._p.setOptions(client.jid.userhostJID(), NS_MICROBLOG, client.jid.userhostJID(), _options, profile_key=profile_key) | 919 return self._p.setOptions( |
920 client.jid.userhostJID(), | |
921 NS_MICROBLOG, | |
922 client.jid.userhostJID(), | |
923 _options, | |
924 profile_key=profile_key, | |
925 ) | |
714 | 926 |
715 create_node().addCallback(cb).addErrback(err_cb) | 927 create_node().addCallback(cb).addErrback(err_cb) |
716 | 928 |
717 ## methods to manage several stanzas/jids at once ## | 929 ## methods to manage several stanzas/jids at once ## |
718 | 930 |
733 jids_set = set(publishers) | 945 jids_set = set(publishers) |
734 else: | 946 else: |
735 jids_set = client.roster.getJidsSet(publishers_type, publishers) | 947 jids_set = client.roster.getJidsSet(publishers_type, publishers) |
736 if publishers_type == C.ALL: | 948 if publishers_type == C.ALL: |
737 try: # display messages from salut-a-toi@libervia.org or other PEP services | 949 try: # display messages from salut-a-toi@libervia.org or other PEP services |
738 services = self.host.plugins["EXTRA-PEP"].getFollowedEntities(profile_key) | 950 services = self.host.plugins["EXTRA-PEP"].getFollowedEntities( |
951 profile_key | |
952 ) | |
739 except KeyError: | 953 except KeyError: |
740 pass # plugin is not loaded | 954 pass # plugin is not loaded |
741 else: | 955 else: |
742 if services: | 956 if services: |
743 log.debug("Extra PEP followed entities: %s" % ", ".join([unicode(service) for service in services])) | 957 log.debug( |
958 "Extra PEP followed entities: %s" | |
959 % ", ".join([unicode(service) for service in services]) | |
960 ) | |
744 jids_set.update(services) | 961 jids_set.update(services) |
745 | 962 |
746 node_data = [] | 963 node_data = [] |
747 for jid_ in jids_set: | 964 for jid_ in jids_set: |
748 node_data.append((jid_, NS_MICROBLOG)) | 965 node_data.append((jid_, NS_MICROBLOG)) |
755 publishers: list of publishers according to type | 972 publishers: list of publishers according to type |
756 @return: deserialised (publishers_type, publishers) tuple | 973 @return: deserialised (publishers_type, publishers) tuple |
757 """ | 974 """ |
758 if publishers_type == C.ALL: | 975 if publishers_type == C.ALL: |
759 if publishers: | 976 if publishers: |
760 raise failure.Failure(ValueError("Can't use publishers with {} type".format(publishers_type))) | 977 raise failure.Failure( |
978 ValueError( | |
979 "Can't use publishers with {} type".format(publishers_type) | |
980 ) | |
981 ) | |
761 else: | 982 else: |
762 publishers = None | 983 publishers = None |
763 elif publishers_type == C.JID: | 984 elif publishers_type == C.JID: |
764 publishers[:] = [jid.JID(publisher) for publisher in publishers] | 985 publishers[:] = [jid.JID(publisher) for publisher in publishers] |
765 return publishers_type, publishers | 986 return publishers_type, publishers |
783 C.JID: use publishers directly as list of jids | 1004 C.JID: use publishers directly as list of jids |
784 @param publishers: list of publishers, according to "publishers_type" (None, list of groups or list of jids) | 1005 @param publishers: list of publishers, according to "publishers_type" (None, list of groups or list of jids) |
785 @param profile: %(doc_profile)s | 1006 @param profile: %(doc_profile)s |
786 @return (str): session id | 1007 @return (str): session id |
787 """ | 1008 """ |
788 client, node_data = self._getClientAndNodeData(publishers_type, publishers, profile_key) | 1009 client, node_data = self._getClientAndNodeData( |
789 return self._p.subscribeToMany(node_data, client.jid.userhostJID(), profile_key=profile_key) | 1010 publishers_type, publishers, profile_key |
1011 ) | |
1012 return self._p.subscribeToMany( | |
1013 node_data, client.jid.userhostJID(), profile_key=profile_key | |
1014 ) | |
790 | 1015 |
791 # get # | 1016 # get # |
792 | 1017 |
793 def _mbGetFromManyRTResult(self, session_id, profile_key=C.PROF_KEY_DEFAULT): | 1018 def _mbGetFromManyRTResult(self, session_id, profile_key=C.PROF_KEY_DEFAULT): |
794 """Get real-time results for mbGetFromMany session | 1019 """Get real-time results for mbGetFromMany session |
802 - failure (unicode): empty string in case of success, error message else | 1027 - failure (unicode): empty string in case of success, error message else |
803 - items_data(list): data as returned by [mbGet] | 1028 - items_data(list): data as returned by [mbGet] |
804 - items_metadata(dict): metadata as returned by [mbGet] | 1029 - items_metadata(dict): metadata as returned by [mbGet] |
805 @param profile_key: %(doc_profile_key)s | 1030 @param profile_key: %(doc_profile_key)s |
806 """ | 1031 """ |
1032 | |
807 def onSuccess(items_data): | 1033 def onSuccess(items_data): |
808 """convert items elements to list of microblog data in items_data""" | 1034 """convert items elements to list of microblog data in items_data""" |
809 d = self._p.serItemsDataD(items_data, self.item2mbdata) | 1035 d = self._p.serItemsDataD(items_data, self.item2mbdata) |
810 d.addCallback(lambda serialised:('', serialised)) | 1036 d.addCallback(lambda serialised: ("", serialised)) |
811 return d | 1037 return d |
812 | 1038 |
813 profile = self.host.getClient(profile_key).profile | 1039 profile = self.host.getClient(profile_key).profile |
814 d = self._p.getRTResults(session_id, | 1040 d = self._p.getRTResults( |
815 on_success = onSuccess, | 1041 session_id, |
816 on_error = lambda failure: (unicode(failure.value), ([],{})), | 1042 on_success=onSuccess, |
817 profile = profile) | 1043 on_error=lambda failure: (unicode(failure.value), ([], {})), |
818 d.addCallback(lambda ret: (ret[0], | 1044 profile=profile, |
819 [(service.full(), node, failure, items, metadata) | 1045 ) |
820 for (service, node), (success, (failure, (items, metadata))) in ret[1].iteritems()])) | 1046 d.addCallback( |
1047 lambda ret: ( | |
1048 ret[0], | |
1049 [ | |
1050 (service.full(), node, failure, items, metadata) | |
1051 for (service, node), (success, (failure, (items, metadata))) in ret[ | |
1052 1 | |
1053 ].iteritems() | |
1054 ], | |
1055 ) | |
1056 ) | |
821 return d | 1057 return d |
822 | 1058 |
823 def _mbGetFromMany(self, publishers_type, publishers, max_items=10, extra_dict=None, profile_key=C.PROF_KEY_NONE): | 1059 def _mbGetFromMany( |
1060 self, | |
1061 publishers_type, | |
1062 publishers, | |
1063 max_items=10, | |
1064 extra_dict=None, | |
1065 profile_key=C.PROF_KEY_NONE, | |
1066 ): | |
824 """ | 1067 """ |
825 @param max_items(int): maximum number of item to get, C.NO_LIMIT for no limit | 1068 @param max_items(int): maximum number of item to get, C.NO_LIMIT for no limit |
826 """ | 1069 """ |
827 max_items = None if max_items == C.NO_LIMIT else max_items | 1070 max_items = None if max_items == C.NO_LIMIT else max_items |
828 publishers_type, publishers = self._checkPublishers(publishers_type, publishers) | 1071 publishers_type, publishers = self._checkPublishers(publishers_type, publishers) |
829 extra = self._p.parseExtra(extra_dict) | 1072 extra = self._p.parseExtra(extra_dict) |
830 return self.mbGetFromMany(publishers_type, publishers, max_items, extra.rsm_request, extra.extra, profile_key) | 1073 return self.mbGetFromMany( |
831 | 1074 publishers_type, |
832 def mbGetFromMany(self, publishers_type, publishers, max_items=None, rsm_request=None, extra=None, profile_key=C.PROF_KEY_NONE): | 1075 publishers, |
1076 max_items, | |
1077 extra.rsm_request, | |
1078 extra.extra, | |
1079 profile_key, | |
1080 ) | |
1081 | |
1082 def mbGetFromMany( | |
1083 self, | |
1084 publishers_type, | |
1085 publishers, | |
1086 max_items=None, | |
1087 rsm_request=None, | |
1088 extra=None, | |
1089 profile_key=C.PROF_KEY_NONE, | |
1090 ): | |
833 """Get the published microblogs for a list of groups or jids | 1091 """Get the published microblogs for a list of groups or jids |
834 | 1092 |
835 @param publishers_type (str): type of the list of publishers (one of "GROUP" or "JID" or "ALL") | 1093 @param publishers_type (str): type of the list of publishers (one of "GROUP" or "JID" or "ALL") |
836 @param publishers (list): list of publishers, according to publishers_type (list of groups or list of jids) | 1094 @param publishers (list): list of publishers, according to publishers_type (list of groups or list of jids) |
837 @param max_items (int): optional limit on the number of retrieved items. | 1095 @param max_items (int): optional limit on the number of retrieved items. |
839 @param extra (dict): Extra data | 1097 @param extra (dict): Extra data |
840 @param profile_key: profile key | 1098 @param profile_key: profile key |
841 @return (str): RT Deferred session id | 1099 @return (str): RT Deferred session id |
842 """ | 1100 """ |
843 # XXX: extra is unused here so far | 1101 # XXX: extra is unused here so far |
844 client, node_data = self._getClientAndNodeData(publishers_type, publishers, profile_key) | 1102 client, node_data = self._getClientAndNodeData( |
845 return self._p.getFromMany(node_data, max_items, rsm_request, profile_key=profile_key) | 1103 publishers_type, publishers, profile_key |
1104 ) | |
1105 return self._p.getFromMany( | |
1106 node_data, max_items, rsm_request, profile_key=profile_key | |
1107 ) | |
846 | 1108 |
847 # comments # | 1109 # comments # |
848 | 1110 |
849 def _mbGetFromManyWithCommentsRTResult(self, session_id, profile_key=C.PROF_KEY_DEFAULT): | 1111 def _mbGetFromManyWithCommentsRTResult( |
1112 self, session_id, profile_key=C.PROF_KEY_DEFAULT | |
1113 ): | |
850 """Get real-time results for [mbGetFromManyWithComments] session | 1114 """Get real-time results for [mbGetFromManyWithComments] session |
851 | 1115 |
852 @param session_id: id of the real-time deferred session | 1116 @param session_id: id of the real-time deferred session |
853 @param return (tuple): (remaining, results) where: | 1117 @param return (tuple): (remaining, results) where: |
854 - remaining is the number of still expected results | 1118 - remaining is the number of still expected results |
867 - metadata(dict): original node metadata | 1131 - metadata(dict): original node metadata |
868 @param profile_key: %(doc_profile_key)s | 1132 @param profile_key: %(doc_profile_key)s |
869 """ | 1133 """ |
870 profile = self.host.getClient(profile_key).profile | 1134 profile = self.host.getClient(profile_key).profile |
871 d = self.rt_sessions.getResults(session_id, profile=profile) | 1135 d = self.rt_sessions.getResults(session_id, profile=profile) |
872 d.addCallback(lambda ret: (ret[0], | 1136 d.addCallback( |
873 [(service.full(), node, failure, items, metadata) | 1137 lambda ret: ( |
874 for (service, node), (success, (failure, (items, metadata))) in ret[1].iteritems()])) | 1138 ret[0], |
1139 [ | |
1140 (service.full(), node, failure, items, metadata) | |
1141 for (service, node), (success, (failure, (items, metadata))) in ret[ | |
1142 1 | |
1143 ].iteritems() | |
1144 ], | |
1145 ) | |
1146 ) | |
875 return d | 1147 return d |
876 | 1148 |
877 def _mbGetFromManyWithComments(self, publishers_type, publishers, max_items=10, max_comments=C.NO_LIMIT, extra_dict=None, extra_comments_dict=None, profile_key=C.PROF_KEY_NONE): | 1149 def _mbGetFromManyWithComments( |
1150 self, | |
1151 publishers_type, | |
1152 publishers, | |
1153 max_items=10, | |
1154 max_comments=C.NO_LIMIT, | |
1155 extra_dict=None, | |
1156 extra_comments_dict=None, | |
1157 profile_key=C.PROF_KEY_NONE, | |
1158 ): | |
878 """ | 1159 """ |
879 @param max_items(int): maximum number of item to get, C.NO_LIMIT for no limit | 1160 @param max_items(int): maximum number of item to get, C.NO_LIMIT for no limit |
880 @param max_comments(int): maximum number of comments to get, C.NO_LIMIT for no limit | 1161 @param max_comments(int): maximum number of comments to get, C.NO_LIMIT for no limit |
881 """ | 1162 """ |
882 max_items = None if max_items == C.NO_LIMIT else max_items | 1163 max_items = None if max_items == C.NO_LIMIT else max_items |
883 max_comments = None if max_comments == C.NO_LIMIT else max_comments | 1164 max_comments = None if max_comments == C.NO_LIMIT else max_comments |
884 publishers_type, publishers = self._checkPublishers(publishers_type, publishers) | 1165 publishers_type, publishers = self._checkPublishers(publishers_type, publishers) |
885 extra = self._p.parseExtra(extra_dict) | 1166 extra = self._p.parseExtra(extra_dict) |
886 extra_comments = self._p.parseExtra(extra_comments_dict) | 1167 extra_comments = self._p.parseExtra(extra_comments_dict) |
887 return self.mbGetFromManyWithComments(publishers_type, publishers, max_items, max_comments or None, | 1168 return self.mbGetFromManyWithComments( |
888 extra.rsm_request, | 1169 publishers_type, |
889 extra.extra, | 1170 publishers, |
890 extra_comments.rsm_request, | 1171 max_items, |
891 extra_comments.extra, | 1172 max_comments or None, |
892 profile_key) | 1173 extra.rsm_request, |
893 | 1174 extra.extra, |
894 def mbGetFromManyWithComments(self, publishers_type, publishers, max_items=None, max_comments=None, rsm_request=None, extra=None, rsm_comments=None, extra_comments=None, profile_key=C.PROF_KEY_NONE): | 1175 extra_comments.rsm_request, |
1176 extra_comments.extra, | |
1177 profile_key, | |
1178 ) | |
1179 | |
1180 def mbGetFromManyWithComments( | |
1181 self, | |
1182 publishers_type, | |
1183 publishers, | |
1184 max_items=None, | |
1185 max_comments=None, | |
1186 rsm_request=None, | |
1187 extra=None, | |
1188 rsm_comments=None, | |
1189 extra_comments=None, | |
1190 profile_key=C.PROF_KEY_NONE, | |
1191 ): | |
895 """Helper method to get the microblogs and their comments in one shot | 1192 """Helper method to get the microblogs and their comments in one shot |
896 | 1193 |
897 @param publishers_type (str): type of the list of publishers (one of "GROUP" or "JID" or "ALL") | 1194 @param publishers_type (str): type of the list of publishers (one of "GROUP" or "JID" or "ALL") |
898 @param publishers (list): list of publishers, according to publishers_type (list of groups or list of jids) | 1195 @param publishers (list): list of publishers, according to publishers_type (list of groups or list of jids) |
899 @param max_items (int): optional limit on the number of retrieved items. | 1196 @param max_items (int): optional limit on the number of retrieved items. |
907 """ | 1204 """ |
908 # XXX: this method seems complicated because it do a couple of treatments | 1205 # XXX: this method seems complicated because it do a couple of treatments |
909 # to serialise and associate the data, but it make life in frontends side | 1206 # to serialise and associate the data, but it make life in frontends side |
910 # a lot easier | 1207 # a lot easier |
911 | 1208 |
912 client, node_data = self._getClientAndNodeData(publishers_type, publishers, profile_key) | 1209 client, node_data = self._getClientAndNodeData( |
1210 publishers_type, publishers, profile_key | |
1211 ) | |
913 | 1212 |
914 def getComments(items_data): | 1213 def getComments(items_data): |
915 """Retrieve comments and add them to the items_data | 1214 """Retrieve comments and add them to the items_data |
916 | 1215 |
917 @param items_data: serialised items data | 1216 @param items_data: serialised items data |
918 @return (defer.Deferred): list of items where each item is associated | 1217 @return (defer.Deferred): list of items where each item is associated |
919 with a list of comments data (service, node, list of items, metadata) | 1218 with a list of comments data (service, node, list of items, metadata) |
920 """ | 1219 """ |
921 items, metadata = items_data | 1220 items, metadata = items_data |
922 items_dlist = [] # deferred list for items | 1221 items_dlist = [] # deferred list for items |
923 for item in items: | 1222 for item in items: |
924 dlist = [] # deferred list for comments | 1223 dlist = [] # deferred list for comments |
925 for key, value in item.iteritems(): | 1224 for key, value in item.iteritems(): |
926 # we look for comments | 1225 # we look for comments |
927 if key.startswith('comments') and key.endswith('_service'): | 1226 if key.startswith("comments") and key.endswith("_service"): |
928 prefix = key[:key.find('_')] | 1227 prefix = key[: key.find("_")] |
929 service_s = value | 1228 service_s = value |
930 node = item["{}{}".format(prefix, "_node")] | 1229 node = item["{}{}".format(prefix, "_node")] |
931 # time to get the comments | 1230 # time to get the comments |
932 d = self._p.getItems(client, jid.JID(service_s), node, max_comments, rsm_request=rsm_comments, extra=extra_comments) | 1231 d = self._p.getItems( |
1232 client, | |
1233 jid.JID(service_s), | |
1234 node, | |
1235 max_comments, | |
1236 rsm_request=rsm_comments, | |
1237 extra=extra_comments, | |
1238 ) | |
933 # then serialise | 1239 # then serialise |
934 d.addCallback(lambda items_data: self._p.serItemsDataD(items_data, self.item2mbdata)) | 1240 d.addCallback( |
1241 lambda items_data: self._p.serItemsDataD( | |
1242 items_data, self.item2mbdata | |
1243 ) | |
1244 ) | |
935 # with failure handling | 1245 # with failure handling |
936 d.addCallback(lambda serialised_items_data: ('',) + serialised_items_data) | 1246 d.addCallback( |
1247 lambda serialised_items_data: ("",) + serialised_items_data | |
1248 ) | |
937 d.addErrback(lambda failure: (unicode(failure.value), [], {})) | 1249 d.addErrback(lambda failure: (unicode(failure.value), [], {})) |
938 # and associate with service/node (needed if there are several comments nodes) | 1250 # and associate with service/node (needed if there are several comments nodes) |
939 d.addCallback(lambda serialised, service_s=service_s, node=node: (service_s, node) + serialised) | 1251 d.addCallback( |
1252 lambda serialised, service_s=service_s, node=node: ( | |
1253 service_s, | |
1254 node, | |
1255 ) | |
1256 + serialised | |
1257 ) | |
940 dlist.append(d) | 1258 dlist.append(d) |
941 # we get the comments | 1259 # we get the comments |
942 comments_d = defer.gatherResults(dlist) | 1260 comments_d = defer.gatherResults(dlist) |
943 # and add them to the item data | 1261 # and add them to the item data |
944 comments_d.addCallback(lambda comments_data, item=item: (item, comments_data)) | 1262 comments_d.addCallback( |
1263 lambda comments_data, item=item: (item, comments_data) | |
1264 ) | |
945 items_dlist.append(comments_d) | 1265 items_dlist.append(comments_d) |
946 # we gather the items + comments in a list | 1266 # we gather the items + comments in a list |
947 items_d = defer.gatherResults(items_dlist) | 1267 items_d = defer.gatherResults(items_dlist) |
948 # and add the metadata | 1268 # and add the metadata |
949 items_d.addCallback(lambda items_completed: (items_completed, metadata)) | 1269 items_d.addCallback(lambda items_completed: (items_completed, metadata)) |
950 return items_d | 1270 return items_d |
951 | 1271 |
952 deferreds = {} | 1272 deferreds = {} |
953 for service, node in node_data: | 1273 for service, node in node_data: |
954 d = deferreds[(service, node)] = self._p.getItems(client, service, node, max_items, rsm_request=rsm_request, extra=extra) | 1274 d = deferreds[(service, node)] = self._p.getItems( |
955 d.addCallback(lambda items_data: self._p.serItemsDataD(items_data, self.item2mbdata)) | 1275 client, service, node, max_items, rsm_request=rsm_request, extra=extra |
1276 ) | |
1277 d.addCallback( | |
1278 lambda items_data: self._p.serItemsDataD(items_data, self.item2mbdata) | |
1279 ) | |
956 d.addCallback(getComments) | 1280 d.addCallback(getComments) |
957 d.addCallback(lambda items_comments_data: ('', items_comments_data)) | 1281 d.addCallback(lambda items_comments_data: ("", items_comments_data)) |
958 d.addErrback(lambda failure: (unicode(failure.value), ([],{}))) | 1282 d.addErrback(lambda failure: (unicode(failure.value), ([], {}))) |
959 | 1283 |
960 return self.rt_sessions.newSession(deferreds, client.profile) | 1284 return self.rt_sessions.newSession(deferreds, client.profile) |
961 | 1285 |
962 | 1286 |
963 class XEP_0277_handler(XMPPHandler): | 1287 class XEP_0277_handler(XMPPHandler): |
964 implements(iwokkel.IDisco) | 1288 implements(iwokkel.IDisco) |
965 | 1289 |
966 def getDiscoInfo(self, requestor, target, nodeIdentifier=''): | 1290 def getDiscoInfo(self, requestor, target, nodeIdentifier=""): |
967 return [disco.DiscoFeature(NS_MICROBLOG)] | 1291 return [disco.DiscoFeature(NS_MICROBLOG)] |
968 | 1292 |
969 def getDiscoItems(self, requestor, target, nodeIdentifier=''): | 1293 def getDiscoItems(self, requestor, target, nodeIdentifier=""): |
970 return [] | 1294 return [] |