Mercurial > libervia-backend
comparison sat/plugins/plugin_comp_file_sharing.py @ 2624:56f94936df1e
code style reformatting using black
author | Goffi <goffi@goffi.org> |
---|---|
date | Wed, 27 Jun 2018 20:14:46 +0200 |
parents | 26edcf3a30eb |
children | 378188abe941 |
comparison
equal
deleted
inserted
replaced
2623:49533de4540b | 2624:56f94936df1e |
---|---|
19 | 19 |
20 from sat.core.i18n import _ | 20 from sat.core.i18n import _ |
21 from sat.core.constants import Const as C | 21 from sat.core.constants import Const as C |
22 from sat.core import exceptions | 22 from sat.core import exceptions |
23 from sat.core.log import getLogger | 23 from sat.core.log import getLogger |
24 | |
24 log = getLogger(__name__) | 25 log = getLogger(__name__) |
25 from sat.tools.common import regex | 26 from sat.tools.common import regex |
26 from sat.tools.common import uri | 27 from sat.tools.common import uri |
27 from sat.tools import stream | 28 from sat.tools import stream |
28 from twisted.internet import defer | 29 from twisted.internet import defer |
39 C.PI_NAME: "File sharing component", | 40 C.PI_NAME: "File sharing component", |
40 C.PI_IMPORT_NAME: "file_sharing", | 41 C.PI_IMPORT_NAME: "file_sharing", |
41 C.PI_MODES: [C.PLUG_MODE_COMPONENT], | 42 C.PI_MODES: [C.PLUG_MODE_COMPONENT], |
42 C.PI_TYPE: C.PLUG_TYPE_ENTRY_POINT, | 43 C.PI_TYPE: C.PLUG_TYPE_ENTRY_POINT, |
43 C.PI_PROTOCOLS: [], | 44 C.PI_PROTOCOLS: [], |
44 C.PI_DEPENDENCIES: ["FILE", "XEP-0231", "XEP-0234", "XEP-0260", "XEP-0261", "XEP-0264", "XEP-0329"], | 45 C.PI_DEPENDENCIES: [ |
46 "FILE", | |
47 "XEP-0231", | |
48 "XEP-0234", | |
49 "XEP-0260", | |
50 "XEP-0261", | |
51 "XEP-0264", | |
52 "XEP-0329", | |
53 ], | |
45 C.PI_RECOMMENDATIONS: [], | 54 C.PI_RECOMMENDATIONS: [], |
46 C.PI_MAIN: "FileSharing", | 55 C.PI_MAIN: "FileSharing", |
47 C.PI_HANDLER: C.BOOL_TRUE, | 56 C.PI_HANDLER: C.BOOL_TRUE, |
48 C.PI_DESCRIPTION: _(u"""Component hosting and sharing files""") | 57 C.PI_DESCRIPTION: _(u"""Component hosting and sharing files"""), |
49 } | 58 } |
50 | 59 |
51 HASH_ALGO = u'sha-256' | 60 HASH_ALGO = u"sha-256" |
52 NS_COMMENTS = 'org.salut-a-toi.comments' | 61 NS_COMMENTS = "org.salut-a-toi.comments" |
53 COMMENT_NODE_PREFIX = 'org.salut-a-toi.file_comments/' | 62 COMMENT_NODE_PREFIX = "org.salut-a-toi.file_comments/" |
54 | 63 |
55 | 64 |
56 class FileSharing(object): | 65 class FileSharing(object): |
57 | |
58 def __init__(self, host): | 66 def __init__(self, host): |
59 log.info(_(u"File Sharing initialization")) | 67 log.info(_(u"File Sharing initialization")) |
60 self.host = host | 68 self.host = host |
61 self._f = host.plugins['FILE'] | 69 self._f = host.plugins["FILE"] |
62 self._jf = host.plugins['XEP-0234'] | 70 self._jf = host.plugins["XEP-0234"] |
63 self._h = host.plugins['XEP-0300'] | 71 self._h = host.plugins["XEP-0300"] |
64 self._t = host.plugins['XEP-0264'] | 72 self._t = host.plugins["XEP-0264"] |
65 host.trigger.add("FILE_getDestDir", self._getDestDirTrigger) | 73 host.trigger.add("FILE_getDestDir", self._getDestDirTrigger) |
66 host.trigger.add("XEP-0234_fileSendingRequest", self._fileSendingRequestTrigger, priority=1000) | 74 host.trigger.add( |
75 "XEP-0234_fileSendingRequest", self._fileSendingRequestTrigger, priority=1000 | |
76 ) | |
67 host.trigger.add("XEP-0234_buildFileElement", self._addFileComments) | 77 host.trigger.add("XEP-0234_buildFileElement", self._addFileComments) |
68 host.trigger.add("XEP-0234_parseFileElement", self._getFileComments) | 78 host.trigger.add("XEP-0234_parseFileElement", self._getFileComments) |
69 host.trigger.add("XEP-0329_compGetFilesFromNode", self._addCommentsData) | 79 host.trigger.add("XEP-0329_compGetFilesFromNode", self._addCommentsData) |
70 self.files_path = host.getLocalPath(None, C.FILES_DIR, profile=False) | 80 self.files_path = host.getLocalPath(None, C.FILES_DIR, profile=False) |
71 | 81 |
72 def getHandler(self, client): | 82 def getHandler(self, client): |
73 return Comments_handler(self) | 83 return Comments_handler(self) |
74 | 84 |
75 def profileConnected(self, client): | 85 def profileConnected(self, client): |
76 path = client.file_tmp_dir = os.path.join( | 86 path = client.file_tmp_dir = os.path.join( |
77 self.host.memory.getConfig('', 'local_dir'), | 87 self.host.memory.getConfig("", "local_dir"), |
78 C.FILES_TMP_DIR, | 88 C.FILES_TMP_DIR, |
79 regex.pathEscape(client.profile)) | 89 regex.pathEscape(client.profile), |
90 ) | |
80 if not os.path.exists(path): | 91 if not os.path.exists(path): |
81 os.makedirs(path) | 92 os.makedirs(path) |
82 | 93 |
83 @defer.inlineCallbacks | 94 @defer.inlineCallbacks |
84 def _fileTransferedCb(self, dummy, client, peer_jid, file_data, file_path): | 95 def _fileTransferedCb(self, dummy, client, peer_jid, file_data, file_path): |
85 """post file reception tasks | 96 """post file reception tasks |
86 | 97 |
87 on file is received, this method create hash/thumbnails if necessary | 98 on file is received, this method create hash/thumbnails if necessary |
88 move the file to the right location, and create metadata entry in database | 99 move the file to the right location, and create metadata entry in database |
89 """ | 100 """ |
90 name = file_data[u'name'] | 101 name = file_data[u"name"] |
91 extra = {} | 102 extra = {} |
92 | 103 |
93 if file_data[u'hash_algo'] == HASH_ALGO: | 104 if file_data[u"hash_algo"] == HASH_ALGO: |
94 log.debug(_(u"Reusing already generated hash")) | 105 log.debug(_(u"Reusing already generated hash")) |
95 file_hash = file_data[u'hash_hasher'].hexdigest() | 106 file_hash = file_data[u"hash_hasher"].hexdigest() |
96 else: | 107 else: |
97 hasher = self._h.getHasher(HASH_ALGO) | 108 hasher = self._h.getHasher(HASH_ALGO) |
98 with open('file_path') as f: | 109 with open("file_path") as f: |
99 file_hash = yield self._h.calculateHash(f, hasher) | 110 file_hash = yield self._h.calculateHash(f, hasher) |
100 final_path = os.path.join(self.files_path, file_hash) | 111 final_path = os.path.join(self.files_path, file_hash) |
101 | 112 |
102 if os.path.isfile(final_path): | 113 if os.path.isfile(final_path): |
103 log.debug(u"file [{file_hash}] already exists, we can remove temporary one".format(file_hash = file_hash)) | 114 log.debug( |
115 u"file [{file_hash}] already exists, we can remove temporary one".format( | |
116 file_hash=file_hash | |
117 ) | |
118 ) | |
104 os.unlink(file_path) | 119 os.unlink(file_path) |
105 else: | 120 else: |
106 os.rename(file_path, final_path) | 121 os.rename(file_path, final_path) |
107 log.debug(u"file [{file_hash}] moved to {files_path}".format(file_hash=file_hash, files_path=self.files_path)) | 122 log.debug( |
108 | 123 u"file [{file_hash}] moved to {files_path}".format( |
109 mime_type = file_data.get(u'mime_type') | 124 file_hash=file_hash, files_path=self.files_path |
110 if not mime_type or mime_type == u'application/octet-stream': | 125 ) |
126 ) | |
127 | |
128 mime_type = file_data.get(u"mime_type") | |
129 if not mime_type or mime_type == u"application/octet-stream": | |
111 mime_type = mimetypes.guess_type(name)[0] | 130 mime_type = mimetypes.guess_type(name)[0] |
112 | 131 |
113 if mime_type is not None and mime_type.startswith(u'image'): | 132 if mime_type is not None and mime_type.startswith(u"image"): |
114 thumbnails = extra.setdefault(C.KEY_THUMBNAILS, []) | 133 thumbnails = extra.setdefault(C.KEY_THUMBNAILS, []) |
115 for max_thumb_size in (self._t.SIZE_SMALL, self._t.SIZE_MEDIUM): | 134 for max_thumb_size in (self._t.SIZE_SMALL, self._t.SIZE_MEDIUM): |
116 try: | 135 try: |
117 thumb_size, thumb_id = yield self._t.generateThumbnail(final_path, | 136 thumb_size, thumb_id = yield self._t.generateThumbnail( |
118 max_thumb_size, | 137 final_path, |
119 # we keep thumbnails for 6 months | 138 max_thumb_size, |
120 60*60*24*31*6) | 139 # we keep thumbnails for 6 months |
140 60 * 60 * 24 * 31 * 6, | |
141 ) | |
121 except Exception as e: | 142 except Exception as e: |
122 log.warning(_(u"Can't create thumbnail: {reason}").format(reason=e)) | 143 log.warning(_(u"Can't create thumbnail: {reason}").format(reason=e)) |
123 break | 144 break |
124 thumbnails.append({u'id': thumb_id, u'size': thumb_size}) | 145 thumbnails.append({u"id": thumb_id, u"size": thumb_size}) |
125 | 146 |
126 self.host.memory.setFile(client, | 147 self.host.memory.setFile( |
127 name=name, | 148 client, |
128 version=u'', | 149 name=name, |
129 file_hash=file_hash, | 150 version=u"", |
130 hash_algo=HASH_ALGO, | 151 file_hash=file_hash, |
131 size=file_data[u'size'], | 152 hash_algo=HASH_ALGO, |
132 path=file_data.get(u'path'), | 153 size=file_data[u"size"], |
133 namespace=file_data.get(u'namespace'), | 154 path=file_data.get(u"path"), |
134 mime_type=mime_type, | 155 namespace=file_data.get(u"namespace"), |
135 owner=peer_jid, | 156 mime_type=mime_type, |
136 extra=extra) | 157 owner=peer_jid, |
137 | 158 extra=extra, |
138 def _getDestDirTrigger(self, client, peer_jid, transfer_data, file_data, stream_object): | 159 ) |
160 | |
161 def _getDestDirTrigger( | |
162 self, client, peer_jid, transfer_data, file_data, stream_object | |
163 ): | |
139 """This trigger accept file sending request, and store file locally""" | 164 """This trigger accept file sending request, and store file locally""" |
140 if not client.is_component: | 165 if not client.is_component: |
141 return True, None | 166 return True, None |
142 assert stream_object | 167 assert stream_object |
143 assert 'stream_object' not in transfer_data | 168 assert "stream_object" not in transfer_data |
144 assert C.KEY_PROGRESS_ID in file_data | 169 assert C.KEY_PROGRESS_ID in file_data |
145 filename = file_data['name'] | 170 filename = file_data["name"] |
146 assert filename and not '/' in filename | 171 assert filename and not "/" in filename |
147 file_tmp_dir = self.host.getLocalPath(client, C.FILES_TMP_DIR, peer_jid.userhost(), component=True, profile=False) | 172 file_tmp_dir = self.host.getLocalPath( |
148 file_tmp_path = file_data['file_path'] = os.path.join(file_tmp_dir, file_data['name']) | 173 client, C.FILES_TMP_DIR, peer_jid.userhost(), component=True, profile=False |
149 | 174 ) |
150 transfer_data['finished_d'].addCallback(self._fileTransferedCb, client, peer_jid, file_data, file_tmp_path) | 175 file_tmp_path = file_data["file_path"] = os.path.join( |
151 | 176 file_tmp_dir, file_data["name"] |
152 self._f.openFileWrite(client, file_tmp_path, transfer_data, file_data, stream_object) | 177 ) |
178 | |
179 transfer_data["finished_d"].addCallback( | |
180 self._fileTransferedCb, client, peer_jid, file_data, file_tmp_path | |
181 ) | |
182 | |
183 self._f.openFileWrite( | |
184 client, file_tmp_path, transfer_data, file_data, stream_object | |
185 ) | |
153 return False, defer.succeed(True) | 186 return False, defer.succeed(True) |
154 | 187 |
155 @defer.inlineCallbacks | 188 @defer.inlineCallbacks |
156 def _retrieveFiles(self, client, session, content_data, content_name, file_data, file_elt): | 189 def _retrieveFiles( |
190 self, client, session, content_data, content_name, file_data, file_elt | |
191 ): | |
157 """This method retrieve a file on request, and send if after checking permissions""" | 192 """This method retrieve a file on request, and send if after checking permissions""" |
158 peer_jid = session[u'peer_jid'] | 193 peer_jid = session[u"peer_jid"] |
159 try: | 194 try: |
160 found_files = yield self.host.memory.getFiles(client, | 195 found_files = yield self.host.memory.getFiles( |
161 peer_jid=peer_jid, | 196 client, |
162 name=file_data.get(u'name'), | 197 peer_jid=peer_jid, |
163 file_hash=file_data.get(u'file_hash'), | 198 name=file_data.get(u"name"), |
164 hash_algo=file_data.get(u'hash_algo'), | 199 file_hash=file_data.get(u"file_hash"), |
165 path=file_data.get(u'path'), | 200 hash_algo=file_data.get(u"hash_algo"), |
166 namespace=file_data.get(u'namespace')) | 201 path=file_data.get(u"path"), |
202 namespace=file_data.get(u"namespace"), | |
203 ) | |
167 except exceptions.NotFound: | 204 except exceptions.NotFound: |
168 found_files = None | 205 found_files = None |
169 except exceptions.PermissionError: | 206 except exceptions.PermissionError: |
170 log.warning(_(u"{peer_jid} is trying to access an unauthorized file: {name}").format( | 207 log.warning( |
171 peer_jid=peer_jid, name=file_data.get(u'name'))) | 208 _(u"{peer_jid} is trying to access an unauthorized file: {name}").format( |
209 peer_jid=peer_jid, name=file_data.get(u"name") | |
210 ) | |
211 ) | |
172 defer.returnValue(False) | 212 defer.returnValue(False) |
173 | 213 |
174 if not found_files: | 214 if not found_files: |
175 log.warning(_(u"no matching file found ({file_data})").format(file_data=file_data)) | 215 log.warning( |
216 _(u"no matching file found ({file_data})").format(file_data=file_data) | |
217 ) | |
176 defer.returnValue(False) | 218 defer.returnValue(False) |
177 | 219 |
178 # we only use the first found file | 220 # we only use the first found file |
179 found_file = found_files[0] | 221 found_file = found_files[0] |
180 file_hash = found_file[u'file_hash'] | 222 file_hash = found_file[u"file_hash"] |
181 file_path = os.path.join(self.files_path, file_hash) | 223 file_path = os.path.join(self.files_path, file_hash) |
182 file_data[u'hash_hasher'] = hasher = self._h.getHasher(found_file[u'hash_algo']) | 224 file_data[u"hash_hasher"] = hasher = self._h.getHasher(found_file[u"hash_algo"]) |
183 size = file_data[u'size'] = found_file[u'size'] | 225 size = file_data[u"size"] = found_file[u"size"] |
184 file_data[u'file_hash'] = file_hash | 226 file_data[u"file_hash"] = file_hash |
185 file_data[u'hash_algo'] = found_file[u'hash_algo'] | 227 file_data[u"hash_algo"] = found_file[u"hash_algo"] |
186 | 228 |
187 # we complete file_elt so peer can have some details on the file | 229 # we complete file_elt so peer can have some details on the file |
188 if u'name' not in file_data: | 230 if u"name" not in file_data: |
189 file_elt.addElement(u'name', content=found_file[u'name']) | 231 file_elt.addElement(u"name", content=found_file[u"name"]) |
190 file_elt.addElement(u'size', content=unicode(size)) | 232 file_elt.addElement(u"size", content=unicode(size)) |
191 content_data['stream_object'] = stream.FileStreamObject( | 233 content_data["stream_object"] = stream.FileStreamObject( |
192 self.host, | 234 self.host, |
193 client, | 235 client, |
194 file_path, | 236 file_path, |
195 uid=self._jf.getProgressId(session, content_name), | 237 uid=self._jf.getProgressId(session, content_name), |
196 size=size, | 238 size=size, |
197 data_cb=lambda data: hasher.update(data), | 239 data_cb=lambda data: hasher.update(data), |
198 ) | 240 ) |
199 defer.returnValue(True) | 241 defer.returnValue(True) |
200 | 242 |
201 def _fileSendingRequestTrigger(self, client, session, content_data, content_name, file_data, file_elt): | 243 def _fileSendingRequestTrigger( |
244 self, client, session, content_data, content_name, file_data, file_elt | |
245 ): | |
202 if not client.is_component: | 246 if not client.is_component: |
203 return True, None | 247 return True, None |
204 else: | 248 else: |
205 return False, self._retrieveFiles(client, session, content_data, content_name, file_data, file_elt) | 249 return ( |
250 False, | |
251 self._retrieveFiles( | |
252 client, session, content_data, content_name, file_data, file_elt | |
253 ), | |
254 ) | |
206 | 255 |
207 ## comments triggers ## | 256 ## comments triggers ## |
208 | 257 |
209 def _addFileComments(self, file_elt, extra_args): | 258 def _addFileComments(self, file_elt, extra_args): |
210 try: | 259 try: |
211 comments_url = extra_args.pop('comments_url') | 260 comments_url = extra_args.pop("comments_url") |
212 except KeyError: | 261 except KeyError: |
213 return | 262 return |
214 | 263 |
215 comment_elt = file_elt.addElement((NS_COMMENTS, 'comments'), content=comments_url) | 264 comment_elt = file_elt.addElement((NS_COMMENTS, "comments"), content=comments_url) |
216 | 265 |
217 try: | 266 try: |
218 count = len(extra_args[u'extra'][u'comments']) | 267 count = len(extra_args[u"extra"][u"comments"]) |
219 except KeyError: | 268 except KeyError: |
220 count = 0 | 269 count = 0 |
221 | 270 |
222 comment_elt['count'] = unicode(count) | 271 comment_elt["count"] = unicode(count) |
223 return True | 272 return True |
224 | 273 |
225 def _getFileComments(self, file_elt, file_data): | 274 def _getFileComments(self, file_elt, file_data): |
226 try: | 275 try: |
227 comments_elt = next(file_elt.elements(NS_COMMENTS, 'comments')) | 276 comments_elt = next(file_elt.elements(NS_COMMENTS, "comments")) |
228 except StopIteration: | 277 except StopIteration: |
229 return | 278 return |
230 file_data['comments_url'] = unicode(comments_elt) | 279 file_data["comments_url"] = unicode(comments_elt) |
231 file_data['comments_count'] = comments_elt['count'] | 280 file_data["comments_count"] = comments_elt["count"] |
232 return True | 281 return True |
233 | 282 |
234 def _addCommentsData(self, client, iq_elt, owner, node_path, files_data): | 283 def _addCommentsData(self, client, iq_elt, owner, node_path, files_data): |
235 for file_data in files_data: | 284 for file_data in files_data: |
236 file_data['comments_url'] = uri.buildXMPPUri('pubsub', | 285 file_data["comments_url"] = uri.buildXMPPUri( |
237 path=client.jid.full(), | 286 "pubsub", |
238 node=COMMENT_NODE_PREFIX + file_data['id']) | 287 path=client.jid.full(), |
288 node=COMMENT_NODE_PREFIX + file_data["id"], | |
289 ) | |
239 return True | 290 return True |
240 | 291 |
241 | 292 |
242 class Comments_handler(pubsub.PubSubService): | 293 class Comments_handler(pubsub.PubSubService): |
243 """This class is a minimal Pubsub service handling virtual nodes for comments""" | 294 """This class is a minimal Pubsub service handling virtual nodes for comments""" |
244 | 295 |
245 def __init__(self, plugin_parent): | 296 def __init__(self, plugin_parent): |
246 super(Comments_handler, self).__init__() # PubsubVirtualResource()) | 297 super(Comments_handler, self).__init__() # PubsubVirtualResource()) |
247 self.host = plugin_parent.host | 298 self.host = plugin_parent.host |
248 self.plugin_parent = plugin_parent | 299 self.plugin_parent = plugin_parent |
249 self.discoIdentity = {'category': 'pubsub', | 300 self.discoIdentity = { |
250 'type': 'virtual', # FIXME: non standard, here to avoid this service being considered as main pubsub one | 301 "category": "pubsub", |
251 'name': 'files commenting service'} | 302 "type": "virtual", # FIXME: non standard, here to avoid this service being considered as main pubsub one |
303 "name": "files commenting service", | |
304 } | |
252 | 305 |
253 def _getFileId(self, nodeIdentifier): | 306 def _getFileId(self, nodeIdentifier): |
254 if not nodeIdentifier.startswith(COMMENT_NODE_PREFIX): | 307 if not nodeIdentifier.startswith(COMMENT_NODE_PREFIX): |
255 raise error.StanzaError('item-not-found') | 308 raise error.StanzaError("item-not-found") |
256 file_id = nodeIdentifier[len(COMMENT_NODE_PREFIX):] | 309 file_id = nodeIdentifier[len(COMMENT_NODE_PREFIX) :] |
257 if not file_id: | 310 if not file_id: |
258 raise error.StanzaError('item-not-found') | 311 raise error.StanzaError("item-not-found") |
259 return file_id | 312 return file_id |
260 | 313 |
261 @defer.inlineCallbacks | 314 @defer.inlineCallbacks |
262 def getFileData(self, requestor, nodeIdentifier): | 315 def getFileData(self, requestor, nodeIdentifier): |
263 file_id = self._getFileId(nodeIdentifier) | 316 file_id = self._getFileId(nodeIdentifier) |
264 try: | 317 try: |
265 files = yield self.host.memory.getFiles(self.parent, requestor, file_id) | 318 files = yield self.host.memory.getFiles(self.parent, requestor, file_id) |
266 except (exceptions.NotFound, exceptions.PermissionError): | 319 except (exceptions.NotFound, exceptions.PermissionError): |
267 # we don't differenciate between NotFound and PermissionError | 320 # we don't differenciate between NotFound and PermissionError |
268 # to avoid leaking information on existing files | 321 # to avoid leaking information on existing files |
269 raise error.StanzaError('item-not-found') | 322 raise error.StanzaError("item-not-found") |
270 if not files: | 323 if not files: |
271 raise error.StanzaError('item-not-found') | 324 raise error.StanzaError("item-not-found") |
272 if len(files) > 1: | 325 if len(files) > 1: |
273 raise error.InternalError('there should be only one file') | 326 raise error.InternalError("there should be only one file") |
274 defer.returnValue(files[0]) | 327 defer.returnValue(files[0]) |
275 | 328 |
276 def commentsUpdate(self, extra, new_comments, peer_jid): | 329 def commentsUpdate(self, extra, new_comments, peer_jid): |
277 """update comments (replace or insert new_comments) | 330 """update comments (replace or insert new_comments) |
278 | 331 |
279 @param extra(dict): extra data to update | 332 @param extra(dict): extra data to update |
280 @param new_comments(list[tuple(unicode, unicode, unicode)]): comments to update or insert | 333 @param new_comments(list[tuple(unicode, unicode, unicode)]): comments to update or insert |
281 @param peer_jid(unicode, None): bare jid of the requestor, or None if request is done by owner | 334 @param peer_jid(unicode, None): bare jid of the requestor, or None if request is done by owner |
282 """ | 335 """ |
283 current_comments = extra.setdefault('comments', []) | 336 current_comments = extra.setdefault("comments", []) |
284 new_comments_by_id = {c[0]:c for c in new_comments} | 337 new_comments_by_id = {c[0]: c for c in new_comments} |
285 updated = [] | 338 updated = [] |
286 # we now check every current comment, to see if one id in new ones | 339 # we now check every current comment, to see if one id in new ones |
287 # exist, in which case we must update | 340 # exist, in which case we must update |
288 for idx, comment in enumerate(current_comments): | 341 for idx, comment in enumerate(current_comments): |
289 comment_id = comment[0] | 342 comment_id = comment[0] |
304 | 357 |
305 current_comments.extend(new_comments) | 358 current_comments.extend(new_comments) |
306 | 359 |
307 def commentsDelete(self, extra, comments): | 360 def commentsDelete(self, extra, comments): |
308 try: | 361 try: |
309 comments_dict = extra['comments'] | 362 comments_dict = extra["comments"] |
310 except KeyError: | 363 except KeyError: |
311 return | 364 return |
312 for comment in comments: | 365 for comment in comments: |
313 try: | 366 try: |
314 comments_dict.remove(comment) | 367 comments_dict.remove(comment) |
322 @return (unicode): full jid as string | 375 @return (unicode): full jid as string |
323 """ | 376 """ |
324 iq_elt = item_elt | 377 iq_elt = item_elt |
325 while iq_elt.parent != None: | 378 while iq_elt.parent != None: |
326 iq_elt = iq_elt.parent | 379 iq_elt = iq_elt.parent |
327 return iq_elt['from'] | 380 return iq_elt["from"] |
328 | 381 |
329 @defer.inlineCallbacks | 382 @defer.inlineCallbacks |
330 def publish(self, requestor, service, nodeIdentifier, items): | 383 def publish(self, requestor, service, nodeIdentifier, items): |
331 # we retrieve file a first time to check authorisations | 384 # we retrieve file a first time to check authorisations |
332 file_data = yield self.getFileData(requestor, nodeIdentifier) | 385 file_data = yield self.getFileData(requestor, nodeIdentifier) |
333 file_id = file_data['id'] | 386 file_id = file_data["id"] |
334 comments = [(item['id'], self._getFrom(item), item.toXml()) for item in items] | 387 comments = [(item["id"], self._getFrom(item), item.toXml()) for item in items] |
335 if requestor.userhostJID() == file_data['owner']: | 388 if requestor.userhostJID() == file_data["owner"]: |
336 peer_jid = None | 389 peer_jid = None |
337 else: | 390 else: |
338 peer_jid = requestor.userhost() | 391 peer_jid = requestor.userhost() |
339 update_cb = partial(self.commentsUpdate, new_comments=comments, peer_jid=peer_jid) | 392 update_cb = partial(self.commentsUpdate, new_comments=comments, peer_jid=peer_jid) |
340 try: | 393 try: |
341 yield self.host.memory.fileUpdate(file_id, 'extra', update_cb) | 394 yield self.host.memory.fileUpdate(file_id, "extra", update_cb) |
342 except exceptions.PermissionError: | 395 except exceptions.PermissionError: |
343 raise error.StanzaError('not-authorized') | 396 raise error.StanzaError("not-authorized") |
344 | 397 |
345 @defer.inlineCallbacks | 398 @defer.inlineCallbacks |
346 def items(self, requestor, service, nodeIdentifier, maxItems, | 399 def items(self, requestor, service, nodeIdentifier, maxItems, itemIdentifiers): |
347 itemIdentifiers): | |
348 file_data = yield self.getFileData(requestor, nodeIdentifier) | 400 file_data = yield self.getFileData(requestor, nodeIdentifier) |
349 comments = file_data['extra'].get('comments', []) | 401 comments = file_data["extra"].get("comments", []) |
350 if itemIdentifiers: | 402 if itemIdentifiers: |
351 defer.returnValue([generic.parseXml(c[2]) for c in comments if c[0] in itemIdentifiers]) | 403 defer.returnValue( |
404 [generic.parseXml(c[2]) for c in comments if c[0] in itemIdentifiers] | |
405 ) | |
352 else: | 406 else: |
353 defer.returnValue([generic.parseXml(c[2]) for c in comments]) | 407 defer.returnValue([generic.parseXml(c[2]) for c in comments]) |
354 | 408 |
355 @defer.inlineCallbacks | 409 @defer.inlineCallbacks |
356 def retract(self, requestor, service, nodeIdentifier, itemIdentifiers): | 410 def retract(self, requestor, service, nodeIdentifier, itemIdentifiers): |
357 file_data = yield self.getFileData(requestor, nodeIdentifier) | 411 file_data = yield self.getFileData(requestor, nodeIdentifier) |
358 file_id = file_data['id'] | 412 file_id = file_data["id"] |
359 try: | 413 try: |
360 comments = file_data['extra']['comments'] | 414 comments = file_data["extra"]["comments"] |
361 except KeyError: | 415 except KeyError: |
362 raise error.StanzaError('item-not-found') | 416 raise error.StanzaError("item-not-found") |
363 | 417 |
364 to_remove = [] | 418 to_remove = [] |
365 for comment in comments: | 419 for comment in comments: |
366 comment_id = comment[0] | 420 comment_id = comment[0] |
367 if comment_id in itemIdentifiers: | 421 if comment_id in itemIdentifiers: |
370 if not itemIdentifiers: | 424 if not itemIdentifiers: |
371 break | 425 break |
372 | 426 |
373 if itemIdentifiers: | 427 if itemIdentifiers: |
374 # not all items have been to_remove, we can't continue | 428 # not all items have been to_remove, we can't continue |
375 raise error.StanzaError('item-not-found') | 429 raise error.StanzaError("item-not-found") |
376 | 430 |
377 if requestor.userhostJID() != file_data['owner']: | 431 if requestor.userhostJID() != file_data["owner"]: |
378 if not all([c[1] == requestor.userhost() for c in to_remove]): | 432 if not all([c[1] == requestor.userhost() for c in to_remove]): |
379 raise error.StanzaError('not-authorized') | 433 raise error.StanzaError("not-authorized") |
380 | 434 |
381 remove_cb = partial(self.commentsDelete, comments=to_remove) | 435 remove_cb = partial(self.commentsDelete, comments=to_remove) |
382 yield self.host.memory.fileUpdate(file_id, 'extra', remove_cb) | 436 yield self.host.memory.fileUpdate(file_id, "extra", remove_cb) |