comparison sat/plugins/plugin_comp_file_sharing.py @ 3028:ab2696e34d29

Python 3 port: /!\ this is a huge commit /!\ starting from this commit, SàT is needs Python 3.6+ /!\ SàT maybe be instable or some feature may not work anymore, this will improve with time This patch port backend, bridge and frontends to Python 3. Roughly this has been done this way: - 2to3 tools has been applied (with python 3.7) - all references to python2 have been replaced with python3 (notably shebangs) - fixed files not handled by 2to3 (notably the shell script) - several manual fixes - fixed issues reported by Python 3 that where not handled in Python 2 - replaced "async" with "async_" when needed (it's a reserved word from Python 3.7) - replaced zope's "implements" with @implementer decorator - temporary hack to handle data pickled in database, as str or bytes may be returned, to be checked later - fixed hash comparison for password - removed some code which is not needed anymore with Python 3 - deactivated some code which needs to be checked (notably certificate validation) - tested with jp, fixed reported issues until some basic commands worked - ported Primitivus (after porting dependencies like urwid satext) - more manual fixes
author Goffi <goffi@goffi.org>
date Tue, 13 Aug 2019 19:08:41 +0200
parents e0429ff7f6b6
children 9d0df638c8b4
comparison
equal deleted inserted replaced
3027:ff5bcb12ae60 3028:ab2696e34d29
1 #!/usr/bin/env python2 1 #!/usr/bin/env python3
2 # -*- coding: utf-8 -*- 2 # -*- coding: utf-8 -*-
3 3
4 # SAT plugin for parrot mode (experimental) 4 # SAT plugin for parrot mode (experimental)
5 # Copyright (C) 2009-2019 Jérôme Poisson (goffi@goffi.org) 5 # Copyright (C) 2009-2019 Jérôme Poisson (goffi@goffi.org)
6 6
53 "XEP-0329", 53 "XEP-0329",
54 ], 54 ],
55 C.PI_RECOMMENDATIONS: [], 55 C.PI_RECOMMENDATIONS: [],
56 C.PI_MAIN: "FileSharing", 56 C.PI_MAIN: "FileSharing",
57 C.PI_HANDLER: C.BOOL_TRUE, 57 C.PI_HANDLER: C.BOOL_TRUE,
58 C.PI_DESCRIPTION: _(u"""Component hosting and sharing files"""), 58 C.PI_DESCRIPTION: _("""Component hosting and sharing files"""),
59 } 59 }
60 60
61 HASH_ALGO = u"sha-256" 61 HASH_ALGO = "sha-256"
62 NS_COMMENTS = "org.salut-a-toi.comments" 62 NS_COMMENTS = "org.salut-a-toi.comments"
63 COMMENT_NODE_PREFIX = "org.salut-a-toi.file_comments/" 63 COMMENT_NODE_PREFIX = "org.salut-a-toi.file_comments/"
64 64
65 65
66 class FileSharing(object): 66 class FileSharing(object):
67 def __init__(self, host): 67 def __init__(self, host):
68 log.info(_(u"File Sharing initialization")) 68 log.info(_("File Sharing initialization"))
69 self.host = host 69 self.host = host
70 self._f = host.plugins["FILE"] 70 self._f = host.plugins["FILE"]
71 self._jf = host.plugins["XEP-0234"] 71 self._jf = host.plugins["XEP-0234"]
72 self._h = host.plugins["XEP-0300"] 72 self._h = host.plugins["XEP-0300"]
73 self._t = host.plugins["XEP-0264"] 73 self._t = host.plugins["XEP-0264"]
97 """post file reception tasks 97 """post file reception tasks
98 98
99 on file is received, this method create hash/thumbnails if necessary 99 on file is received, this method create hash/thumbnails if necessary
100 move the file to the right location, and create metadata entry in database 100 move the file to the right location, and create metadata entry in database
101 """ 101 """
102 name = file_data[u"name"] 102 name = file_data["name"]
103 extra = {} 103 extra = {}
104 104
105 if file_data[u"hash_algo"] == HASH_ALGO: 105 if file_data["hash_algo"] == HASH_ALGO:
106 log.debug(_(u"Reusing already generated hash")) 106 log.debug(_("Reusing already generated hash"))
107 file_hash = file_data[u"hash_hasher"].hexdigest() 107 file_hash = file_data["hash_hasher"].hexdigest()
108 else: 108 else:
109 hasher = self._h.getHasher(HASH_ALGO) 109 hasher = self._h.getHasher(HASH_ALGO)
110 with open("file_path") as f: 110 with open("file_path") as f:
111 file_hash = yield self._h.calculateHash(f, hasher) 111 file_hash = yield self._h.calculateHash(f, hasher)
112 final_path = os.path.join(self.files_path, file_hash) 112 final_path = os.path.join(self.files_path, file_hash)
113 113
114 if os.path.isfile(final_path): 114 if os.path.isfile(final_path):
115 log.debug( 115 log.debug(
116 u"file [{file_hash}] already exists, we can remove temporary one".format( 116 "file [{file_hash}] already exists, we can remove temporary one".format(
117 file_hash=file_hash 117 file_hash=file_hash
118 ) 118 )
119 ) 119 )
120 os.unlink(file_path) 120 os.unlink(file_path)
121 else: 121 else:
122 os.rename(file_path, final_path) 122 os.rename(file_path, final_path)
123 log.debug( 123 log.debug(
124 u"file [{file_hash}] moved to {files_path}".format( 124 "file [{file_hash}] moved to {files_path}".format(
125 file_hash=file_hash, files_path=self.files_path 125 file_hash=file_hash, files_path=self.files_path
126 ) 126 )
127 ) 127 )
128 128
129 mime_type = file_data.get(u"mime_type") 129 mime_type = file_data.get("mime_type")
130 if not mime_type or mime_type == u"application/octet-stream": 130 if not mime_type or mime_type == "application/octet-stream":
131 mime_type = mimetypes.guess_type(name)[0] 131 mime_type = mimetypes.guess_type(name)[0]
132 132
133 if mime_type is not None and mime_type.startswith(u"image"): 133 if mime_type is not None and mime_type.startswith("image"):
134 thumbnails = extra.setdefault(C.KEY_THUMBNAILS, []) 134 thumbnails = extra.setdefault(C.KEY_THUMBNAILS, [])
135 for max_thumb_size in (self._t.SIZE_SMALL, self._t.SIZE_MEDIUM): 135 for max_thumb_size in (self._t.SIZE_SMALL, self._t.SIZE_MEDIUM):
136 try: 136 try:
137 thumb_size, thumb_id = yield self._t.generateThumbnail( 137 thumb_size, thumb_id = yield self._t.generateThumbnail(
138 final_path, 138 final_path,
139 max_thumb_size, 139 max_thumb_size,
140 #  we keep thumbnails for 6 months 140 #  we keep thumbnails for 6 months
141 60 * 60 * 24 * 31 * 6, 141 60 * 60 * 24 * 31 * 6,
142 ) 142 )
143 except Exception as e: 143 except Exception as e:
144 log.warning(_(u"Can't create thumbnail: {reason}").format(reason=e)) 144 log.warning(_("Can't create thumbnail: {reason}").format(reason=e))
145 break 145 break
146 thumbnails.append({u"id": thumb_id, u"size": thumb_size}) 146 thumbnails.append({"id": thumb_id, "size": thumb_size})
147 147
148 self.host.memory.setFile( 148 self.host.memory.setFile(
149 client, 149 client,
150 name=name, 150 name=name,
151 version=u"", 151 version="",
152 file_hash=file_hash, 152 file_hash=file_hash,
153 hash_algo=HASH_ALGO, 153 hash_algo=HASH_ALGO,
154 size=file_data[u"size"], 154 size=file_data["size"],
155 path=file_data.get(u"path"), 155 path=file_data.get("path"),
156 namespace=file_data.get(u"namespace"), 156 namespace=file_data.get("namespace"),
157 mime_type=mime_type, 157 mime_type=mime_type,
158 owner=peer_jid, 158 owner=peer_jid,
159 extra=extra, 159 extra=extra,
160 ) 160 )
161 161
189 @defer.inlineCallbacks 189 @defer.inlineCallbacks
190 def _retrieveFiles( 190 def _retrieveFiles(
191 self, client, session, content_data, content_name, file_data, file_elt 191 self, client, session, content_data, content_name, file_data, file_elt
192 ): 192 ):
193 """This method retrieve a file on request, and send if after checking permissions""" 193 """This method retrieve a file on request, and send if after checking permissions"""
194 peer_jid = session[u"peer_jid"] 194 peer_jid = session["peer_jid"]
195 try: 195 try:
196 found_files = yield self.host.memory.getFiles( 196 found_files = yield self.host.memory.getFiles(
197 client, 197 client,
198 peer_jid=peer_jid, 198 peer_jid=peer_jid,
199 name=file_data.get(u"name"), 199 name=file_data.get("name"),
200 file_hash=file_data.get(u"file_hash"), 200 file_hash=file_data.get("file_hash"),
201 hash_algo=file_data.get(u"hash_algo"), 201 hash_algo=file_data.get("hash_algo"),
202 path=file_data.get(u"path"), 202 path=file_data.get("path"),
203 namespace=file_data.get(u"namespace"), 203 namespace=file_data.get("namespace"),
204 ) 204 )
205 except exceptions.NotFound: 205 except exceptions.NotFound:
206 found_files = None 206 found_files = None
207 except exceptions.PermissionError: 207 except exceptions.PermissionError:
208 log.warning( 208 log.warning(
209 _(u"{peer_jid} is trying to access an unauthorized file: {name}").format( 209 _("{peer_jid} is trying to access an unauthorized file: {name}").format(
210 peer_jid=peer_jid, name=file_data.get(u"name") 210 peer_jid=peer_jid, name=file_data.get("name")
211 ) 211 )
212 ) 212 )
213 defer.returnValue(False) 213 defer.returnValue(False)
214 214
215 if not found_files: 215 if not found_files:
216 log.warning( 216 log.warning(
217 _(u"no matching file found ({file_data})").format(file_data=file_data) 217 _("no matching file found ({file_data})").format(file_data=file_data)
218 ) 218 )
219 defer.returnValue(False) 219 defer.returnValue(False)
220 220
221 # we only use the first found file 221 # we only use the first found file
222 found_file = found_files[0] 222 found_file = found_files[0]
223 if found_file[u'type'] != C.FILE_TYPE_FILE: 223 if found_file['type'] != C.FILE_TYPE_FILE:
224 raise TypeError(u"a file was expected, type is {type_}".format( 224 raise TypeError("a file was expected, type is {type_}".format(
225 type_=found_file[u'type'])) 225 type_=found_file['type']))
226 file_hash = found_file[u"file_hash"] 226 file_hash = found_file["file_hash"]
227 file_path = os.path.join(self.files_path, file_hash) 227 file_path = os.path.join(self.files_path, file_hash)
228 file_data[u"hash_hasher"] = hasher = self._h.getHasher(found_file[u"hash_algo"]) 228 file_data["hash_hasher"] = hasher = self._h.getHasher(found_file["hash_algo"])
229 size = file_data[u"size"] = found_file[u"size"] 229 size = file_data["size"] = found_file["size"]
230 file_data[u"file_hash"] = file_hash 230 file_data["file_hash"] = file_hash
231 file_data[u"hash_algo"] = found_file[u"hash_algo"] 231 file_data["hash_algo"] = found_file["hash_algo"]
232 232
233 # we complete file_elt so peer can have some details on the file 233 # we complete file_elt so peer can have some details on the file
234 if u"name" not in file_data: 234 if "name" not in file_data:
235 file_elt.addElement(u"name", content=found_file[u"name"]) 235 file_elt.addElement("name", content=found_file["name"])
236 file_elt.addElement(u"size", content=unicode(size)) 236 file_elt.addElement("size", content=str(size))
237 content_data["stream_object"] = stream.FileStreamObject( 237 content_data["stream_object"] = stream.FileStreamObject(
238 self.host, 238 self.host,
239 client, 239 client,
240 file_path, 240 file_path,
241 uid=self._jf.getProgressId(session, content_name), 241 uid=self._jf.getProgressId(session, content_name),
266 return 266 return
267 267
268 comment_elt = file_elt.addElement((NS_COMMENTS, "comments"), content=comments_url) 268 comment_elt = file_elt.addElement((NS_COMMENTS, "comments"), content=comments_url)
269 269
270 try: 270 try:
271 count = len(extra_args[u"extra"][u"comments"]) 271 count = len(extra_args["extra"]["comments"])
272 except KeyError: 272 except KeyError:
273 count = 0 273 count = 0
274 274
275 comment_elt["count"] = unicode(count) 275 comment_elt["count"] = str(count)
276 return True 276 return True
277 277
278 def _getFileComments(self, file_elt, file_data): 278 def _getFileComments(self, file_elt, file_data):
279 try: 279 try:
280 comments_elt = next(file_elt.elements(NS_COMMENTS, "comments")) 280 comments_elt = next(file_elt.elements(NS_COMMENTS, "comments"))
281 except StopIteration: 281 except StopIteration:
282 return 282 return
283 file_data["comments_url"] = unicode(comments_elt) 283 file_data["comments_url"] = str(comments_elt)
284 file_data["comments_count"] = comments_elt["count"] 284 file_data["comments_count"] = comments_elt["count"]
285 return True 285 return True
286 286
287 def _addCommentsData(self, client, iq_elt, owner, node_path, files_data): 287 def _addCommentsData(self, client, iq_elt, owner, node_path, files_data):
288 for file_data in files_data: 288 for file_data in files_data: