Mercurial > libervia-backend
comparison sat_frontends/jp/common.py @ 3040:fee60f17ebac
jp: jp asyncio port:
/!\ this commit is huge. Jp is temporarily not working with `dbus` bridge /!\
This patch implements the port of jp to asyncio, so it is now correctly using the bridge
asynchronously, and it can be used with bridges like `pb`. This also simplify the code,
notably for things which were previously implemented with many callbacks (like pagination
with RSM).
During the process, some behaviours have been modified/fixed, in jp and backends, check
diff for details.
author | Goffi <goffi@goffi.org> |
---|---|
date | Wed, 25 Sep 2019 08:56:41 +0200 |
parents | d314d4181f30 |
children | d9f328374473 |
comparison
equal
deleted
inserted
replaced
3039:a1bc34f90fa5 | 3040:fee60f17ebac |
---|---|
15 # GNU Affero General Public License for more details. | 15 # GNU Affero General Public License for more details. |
16 | 16 |
17 # You should have received a copy of the GNU Affero General Public License | 17 # You should have received a copy of the GNU Affero General Public License |
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. | 18 # along with this program. If not, see <http://www.gnu.org/licenses/>. |
19 | 19 |
20 import json | |
21 import os | |
22 import os.path | |
23 import time | |
24 import tempfile | |
25 import asyncio | |
26 import shlex | |
27 from pathlib import Path | |
20 from sat_frontends.jp.constants import Const as C | 28 from sat_frontends.jp.constants import Const as C |
21 from sat.core.i18n import _ | 29 from sat.core.i18n import _ |
22 from sat.core import exceptions | 30 from sat.core import exceptions |
23 from sat.tools.common import regex | 31 from sat.tools.common import regex |
24 from sat.tools.common.ansi import ANSI as A | 32 from sat.tools.common.ansi import ANSI as A |
25 from sat.tools.common import uri as xmpp_uri | 33 from sat.tools.common import uri as xmpp_uri |
26 from sat.tools import config | 34 from sat.tools import config |
27 from configparser import NoSectionError, NoOptionError | 35 from configparser import NoSectionError, NoOptionError |
28 from collections import namedtuple | 36 from collections import namedtuple |
29 from functools import partial | |
30 import json | |
31 import os | |
32 import os.path | |
33 import time | |
34 import tempfile | |
35 import subprocess | |
36 import glob | |
37 import shlex | |
38 | 37 |
39 # defaut arguments used for some known editors (editing with metadata) | 38 # defaut arguments used for some known editors (editing with metadata) |
40 VIM_SPLIT_ARGS = "-c 'set nospr|vsplit|wincmd w|next|wincmd w'" | 39 VIM_SPLIT_ARGS = "-c 'set nospr|vsplit|wincmd w|next|wincmd w'" |
41 EMACS_SPLIT_ARGS = '--eval "(split-window-horizontally)"' | 40 EMACS_SPLIT_ARGS = '--eval "(split-window-horizontally)"' |
42 EDITOR_ARGS_MAGIC = { | 41 EDITOR_ARGS_MAGIC = { |
74 | 73 |
75 def getTmpDir(sat_conf, cat_dir, sub_dir=None): | 74 def getTmpDir(sat_conf, cat_dir, sub_dir=None): |
76 """Return directory used to store temporary files | 75 """Return directory used to store temporary files |
77 | 76 |
78 @param sat_conf(ConfigParser.ConfigParser): instance opened on sat configuration | 77 @param sat_conf(ConfigParser.ConfigParser): instance opened on sat configuration |
79 @param cat_dir(unicode): directory of the category (e.g. "blog") | 78 @param cat_dir(str): directory of the category (e.g. "blog") |
80 @param sub_dir(str): sub directory where data need to be put | 79 @param sub_dir(str): sub directory where data need to be put |
81 profile can be used here, or special directory name | 80 profile can be used here, or special directory name |
82 sub_dir will be escaped to be usable in path (use regex.pathUnescape to find | 81 sub_dir will be escaped to be usable in path (use regex.pathUnescape to find |
83 initial str) | 82 initial str) |
84 @return (str): path to the dir | 83 @return (Path): path to the dir |
85 """ | 84 """ |
86 local_dir = config.getConfig(sat_conf, "", "local_dir", Exception) | 85 local_dir = config.getConfig(sat_conf, "", "local_dir", Exception) |
87 path = [local_dir.encode("utf-8"), cat_dir.encode("utf-8")] | 86 path_elts = [local_dir, cat_dir] |
88 if sub_dir is not None: | 87 if sub_dir is not None: |
89 path.append(regex.pathEscape(sub_dir)) | 88 path_elts.append(regex.pathEscape(sub_dir)) |
90 return os.path.join(*path) | 89 return Path(*path_elts) |
91 | 90 |
92 | 91 |
93 def parse_args(host, cmd_line, **format_kw): | 92 def parse_args(host, cmd_line, **format_kw): |
94 """Parse command arguments | 93 """Parse command arguments |
95 | 94 |
119 """ | 118 """ |
120 @param sat_conf(ConfigParser.ConfigParser): instance opened on sat configuration | 119 @param sat_conf(ConfigParser.ConfigParser): instance opened on sat configuration |
121 @param cat_dir(unicode): directory to use for drafts | 120 @param cat_dir(unicode): directory to use for drafts |
122 this will be a sub-directory of SàT's local_dir | 121 this will be a sub-directory of SàT's local_dir |
123 @param use_metadata(bool): True is edition need a second file for metadata | 122 @param use_metadata(bool): True is edition need a second file for metadata |
124 most of signature change with use_metadata with an additional metadata argument. | 123 most of signature change with use_metadata with an additional metadata |
125 This is done to raise error if a command needs metadata but forget the flag, and vice versa | 124 argument. |
125 This is done to raise error if a command needs metadata but forget the flag, | |
126 and vice versa | |
126 """ | 127 """ |
127 self.host = host | 128 self.host = host |
128 self.sat_conf = config.parseMainConf() | 129 self.sat_conf = config.parseMainConf() |
129 self.cat_dir_str = cat_dir.encode("utf-8") | 130 self.cat_dir = cat_dir |
130 self.use_metadata = use_metadata | 131 self.use_metadata = use_metadata |
131 | 132 |
132 def secureUnlink(self, path): | 133 def secureUnlink(self, path): |
133 """Unlink given path after keeping it for a while | 134 """Unlink given path after keeping it for a while |
134 | 135 |
135 This method is used to prevent accidental deletion of a draft | 136 This method is used to prevent accidental deletion of a draft |
136 If there are more file in SECURE_UNLINK_DIR than SECURE_UNLINK_MAX, | 137 If there are more file in SECURE_UNLINK_DIR than SECURE_UNLINK_MAX, |
137 older file are deleted | 138 older file are deleted |
138 @param path(str): file to unlink | 139 @param path(Path, str): file to unlink |
139 """ | 140 """ |
140 if not os.path.isfile(path): | 141 path = Path(path).resolve() |
142 if not path.is_file: | |
141 raise OSError("path must link to a regular file") | 143 raise OSError("path must link to a regular file") |
142 if not path.startswith(getTmpDir(self.sat_conf, self.cat_dir_str)): | 144 if path.parent != getTmpDir(self.sat_conf, self.cat_dir): |
143 self.disp( | 145 self.disp( |
144 "File {} is not in SàT temporary hierarchy, we do not remove it".format( | 146 f"File {path} is not in SàT temporary hierarchy, we do not remove " |
145 path | 147 f"it", |
146 ), | |
147 2, | 148 2, |
148 ) | 149 ) |
149 return | 150 return |
150 # we have 2 files per draft with use_metadata, so we double max | 151 # we have 2 files per draft with use_metadata, so we double max |
151 unlink_max = SECURE_UNLINK_MAX * 2 if self.use_metadata else SECURE_UNLINK_MAX | 152 unlink_max = SECURE_UNLINK_MAX * 2 if self.use_metadata else SECURE_UNLINK_MAX |
152 backup_dir = getTmpDir(self.sat_conf, self.cat_dir_str, SECURE_UNLINK_DIR) | 153 backup_dir = getTmpDir(self.sat_conf, self.cat_dir, SECURE_UNLINK_DIR) |
153 if not os.path.exists(backup_dir): | 154 if not os.path.exists(backup_dir): |
154 os.makedirs(backup_dir) | 155 os.makedirs(backup_dir) |
155 filename = os.path.basename(path) | 156 filename = os.path.basename(path) |
156 backup_path = os.path.join(backup_dir, filename) | 157 backup_path = os.path.join(backup_dir, filename) |
157 # we move file to backup dir | 158 # we move file to backup dir |
168 backup_files.sort(key=lambda path: os.stat(path).st_mtime) | 169 backup_files.sort(key=lambda path: os.stat(path).st_mtime) |
169 for path in backup_files[: len(backup_files) - unlink_max]: | 170 for path in backup_files[: len(backup_files) - unlink_max]: |
170 self.host.disp("Purging backup file {}".format(path), 2) | 171 self.host.disp("Purging backup file {}".format(path), 2) |
171 os.unlink(path) | 172 os.unlink(path) |
172 | 173 |
173 def runEditor( | 174 async def runEditor(self, editor_args_opt, content_file_path, content_file_obj, |
174 self, | 175 meta_file_path=None, meta_ori=None): |
175 editor_args_opt, | 176 """Run editor to edit content and metadata |
176 content_file_path, | |
177 content_file_obj, | |
178 meta_file_path=None, | |
179 meta_ori=None, | |
180 ): | |
181 """run editor to edit content and metadata | |
182 | 177 |
183 @param editor_args_opt(unicode): option in [jp] section in configuration for | 178 @param editor_args_opt(unicode): option in [jp] section in configuration for |
184 specific args | 179 specific args |
185 @param content_file_path(str): path to the content file | 180 @param content_file_path(str): path to the content file |
186 @param content_file_obj(file): opened file instance | 181 @param content_file_obj(file): opened file instance |
187 @param meta_file_path(str, None): metadata file path | 182 @param meta_file_path(str, Path, None): metadata file path |
188 if None metadata will not be used | 183 if None metadata will not be used |
189 @param meta_ori(dict, None): original cotent of metadata | 184 @param meta_ori(dict, None): original cotent of metadata |
190 can't be used if use_metadata is False | 185 can't be used if use_metadata is False |
191 """ | 186 """ |
192 if not self.use_metadata: | 187 if not self.use_metadata: |
221 args = parse_args(self.host, editor_args, **parse_kwargs) | 216 args = parse_args(self.host, editor_args, **parse_kwargs) |
222 if not args: | 217 if not args: |
223 args = [content_file_path] | 218 args = [content_file_path] |
224 | 219 |
225 # actual editing | 220 # actual editing |
226 editor_exit = subprocess.call([editor] + args) | 221 editor_process = await asyncio.create_subprocess_exec( |
222 editor, *[str(a) for a in args]) | |
223 editor_exit = await editor_process.wait() | |
227 | 224 |
228 # edition will now be checked, and data will be sent if it was a success | 225 # edition will now be checked, and data will be sent if it was a success |
229 if editor_exit != 0: | 226 if editor_exit != 0: |
230 self.disp( | 227 self.disp( |
231 "Editor exited with an error code, so temporary file has not be deleted, and item is not published.\nYou can find temporary file at {path}".format( | 228 f"Editor exited with an error code, so temporary file has not be " |
232 path=content_file_path | 229 f"deleted, and item is not published.\nYou can find temporary file " |
233 ), | 230 f"at {content_file_path}", |
234 error=True, | 231 error=True, |
235 ) | 232 ) |
236 else: | 233 else: |
237 # main content | 234 # main content |
238 try: | 235 try: |
239 with open(content_file_path, "rb") as f: | 236 with content_file_path.open("rb") as f: |
240 content = f.read() | 237 content = f.read() |
241 except (OSError, IOError): | 238 except (OSError, IOError): |
242 self.disp( | 239 self.disp( |
243 "Can read file at {content_path}, have it been deleted?\nCancelling edition".format( | 240 f"Can read file at {content_file_path}, have it been deleted?\n" |
244 content_path=content_file_path | 241 f"Cancelling edition", |
245 ), | |
246 error=True, | 242 error=True, |
247 ) | 243 ) |
248 self.host.quit(C.EXIT_NOT_FOUND) | 244 self.host.quit(C.EXIT_NOT_FOUND) |
249 | 245 |
250 # metadata | 246 # metadata |
251 if self.use_metadata: | 247 if self.use_metadata: |
252 try: | 248 try: |
253 with open(meta_file_path, "rb") as f: | 249 with meta_file_path.open("rb") as f: |
254 metadata = json.load(f) | 250 metadata = json.load(f) |
255 except (OSError, IOError): | 251 except (OSError, IOError): |
256 self.disp( | 252 self.disp( |
257 "Can read file at {meta_file_path}, have it been deleted?\nCancelling edition".format( | 253 f"Can read file at {meta_file_path}, have it been deleted?\n" |
258 content_path=content_file_path, meta_path=meta_file_path | 254 f"Cancelling edition", |
259 ), | |
260 error=True, | 255 error=True, |
261 ) | 256 ) |
262 self.host.quit(C.EXIT_NOT_FOUND) | 257 self.host.quit(C.EXIT_NOT_FOUND) |
263 except ValueError: | 258 except ValueError: |
264 self.disp( | 259 self.disp( |
265 "Can't parse metadata, please check it is correct JSON format. Cancelling edition.\n" | 260 f"Can't parse metadata, please check it is correct JSON format. " |
266 + "You can find tmp file at {content_path} and temporary meta file at {meta_path}.".format( | 261 f"Cancelling edition.\nYou can find tmp file at " |
267 content_path=content_file_path, meta_path=meta_file_path | 262 f"{content_file_path} and temporary meta file at " |
268 ), | 263 f"{meta_file_path}.", |
269 error=True, | 264 error=True, |
270 ) | 265 ) |
271 self.host.quit(C.EXIT_DATA_ERROR) | 266 self.host.quit(C.EXIT_DATA_ERROR) |
272 | 267 |
273 if self.use_metadata and not metadata.get("publish", True): | 268 if self.use_metadata and not metadata.get("publish", True): |
274 self.disp( | 269 self.disp( |
275 'Publication blocked by "publish" key in metadata, cancelling edition.\n\n' | 270 f'Publication blocked by "publish" key in metadata, cancelling ' |
276 + "temporary file path:\t{content_path}\nmetadata file path:\t{meta_path}".format( | 271 f'edition.\n\ntemporary file path:\t{content_file_path}\nmetadata ' |
277 content_path=content_file_path, meta_path=meta_file_path | 272 f'file path:\t{meta_file_path}', |
278 ), | |
279 error=True, | 273 error=True, |
280 ) | 274 ) |
281 self.host.quit() | 275 self.host.quit() |
282 | 276 |
283 if len(content) == 0: | 277 if len(content) == 0: |
284 self.disp("Content is empty, cancelling the edition") | 278 self.disp("Content is empty, cancelling the edition") |
285 if not content_file_path.startswith( | 279 if content_file_path.parent != getTmpDir(self.sat_conf, self.cat_dir): |
286 getTmpDir(self.sat_conf, self.cat_dir_str) | |
287 ): | |
288 self.disp( | 280 self.disp( |
289 "File are not in SàT temporary hierarchy, we do not remove them", | 281 "File are not in SàT temporary hierarchy, we do not remove them", |
290 2, | 282 2, |
291 ) | 283 ) |
292 self.host.quit() | 284 self.host.quit() |
293 self.disp("Deletion of {}".format(content_file_path), 2) | 285 self.disp(f"Deletion of {content_file_path}", 2) |
294 os.unlink(content_file_path) | 286 os.unlink(content_file_path) |
295 if self.use_metadata: | 287 if self.use_metadata: |
296 self.disp("Deletion of {}".format(meta_file_path), 2) | 288 self.disp(f"Deletion of {meta_file_path}".format(meta_file_path), 2) |
297 os.unlink(meta_file_path) | 289 os.unlink(meta_file_path) |
298 self.host.quit() | 290 self.host.quit() |
299 | 291 |
300 # time to re-check the hash | 292 # time to re-check the hash |
301 elif tmp_ori_hash == hashlib.sha1(content).digest() and ( | 293 elif tmp_ori_hash == hashlib.sha1(content).digest() and ( |
307 else: | 299 else: |
308 # we can now send the item | 300 # we can now send the item |
309 content = content.decode("utf-8-sig") # we use utf-8-sig to avoid BOM | 301 content = content.decode("utf-8-sig") # we use utf-8-sig to avoid BOM |
310 try: | 302 try: |
311 if self.use_metadata: | 303 if self.use_metadata: |
312 self.publish(content, metadata) | 304 await self.publish(content, metadata) |
313 else: | 305 else: |
314 self.publish(content) | 306 await self.publish(content) |
315 except Exception as e: | 307 except Exception as e: |
316 if self.use_metadata: | 308 if self.use_metadata: |
317 self.disp( | 309 self.disp( |
318 "Error while sending your item, the temporary files have been kept at {content_path} and {meta_path}: {reason}".format( | 310 f"Error while sending your item, the temporary files have " |
319 content_path=content_file_path, | 311 f"been kept at {content_file_path} and {meta_file_path}: " |
320 meta_path=meta_file_path, | 312 f"{e}", |
321 reason=e, | |
322 ), | |
323 error=True, | 313 error=True, |
324 ) | 314 ) |
325 else: | 315 else: |
326 self.disp( | 316 self.disp( |
327 "Error while sending your item, the temporary file has been kept at {content_path}: {reason}".format( | 317 f"Error while sending your item, the temporary file has been " |
328 content_path=content_file_path, reason=e | 318 f"kept at {content_file_path}: {e}", |
329 ), | |
330 error=True, | 319 error=True, |
331 ) | 320 ) |
332 self.host.quit(1) | 321 self.host.quit(1) |
333 | 322 |
334 self.secureUnlink(content_file_path) | 323 self.secureUnlink(content_file_path) |
335 if self.use_metadata: | 324 if self.use_metadata: |
336 self.secureUnlink(meta_file_path) | 325 self.secureUnlink(meta_file_path) |
337 | 326 |
338 def publish(self, content): | 327 async def publish(self, content): |
339 # if metadata is needed, publish will be called with it last argument | 328 # if metadata is needed, publish will be called with it last argument |
340 raise NotImplementedError | 329 raise NotImplementedError |
341 | 330 |
342 def getTmpFile(self): | 331 def getTmpFile(self): |
343 """Create a temporary file | 332 """Create a temporary file |
344 | 333 |
345 @param suff (str): suffix to use for the filename | 334 @return (tuple(file, Path)): opened (w+b) file object and file path |
346 @return (tuple(file, str)): opened (w+b) file object and file path | |
347 """ | 335 """ |
348 suff = "." + self.getTmpSuff() | 336 suff = "." + self.getTmpSuff() |
349 cat_dir_str = self.cat_dir_str | 337 cat_dir_str = self.cat_dir |
350 tmp_dir = getTmpDir(self.sat_conf, self.cat_dir_str, self.profile.encode("utf-8")) | 338 tmp_dir = getTmpDir(self.sat_conf, self.cat_dir, self.profile) |
351 if not os.path.exists(tmp_dir): | 339 if not tmp_dir.exists(): |
352 try: | 340 try: |
353 os.makedirs(tmp_dir) | 341 tmp_dir.mkdir(parents=True) |
354 except OSError as e: | 342 except OSError as e: |
355 self.disp( | 343 self.disp( |
356 "Can't create {path} directory: {reason}".format( | 344 f"Can't create {tmp_dir} directory: {e}", |
357 path=tmp_dir, reason=e | |
358 ), | |
359 error=True, | 345 error=True, |
360 ) | 346 ) |
361 self.host.quit(1) | 347 self.host.quit(1) |
362 try: | 348 try: |
363 fd, path = tempfile.mkstemp( | 349 fd, path = tempfile.mkstemp( |
364 suffix=suff.encode("utf-8"), | 350 suffix=suff, |
365 prefix=time.strftime(cat_dir_str + "_%Y-%m-%d_%H:%M:%S_"), | 351 prefix=time.strftime(cat_dir_str + "_%Y-%m-%d_%H:%M:%S_"), |
366 dir=tmp_dir, | 352 dir=tmp_dir, |
367 text=True, | 353 text=True, |
368 ) | 354 ) |
369 return os.fdopen(fd, "w+b"), path | 355 return os.fdopen(fd, "w+b"), Path(path) |
370 except OSError as e: | 356 except OSError as e: |
371 self.disp( | 357 self.disp( |
372 "Can't create temporary file: {reason}".format(reason=e), error=True | 358 f"Can't create temporary file: {e}", error=True |
373 ) | 359 ) |
374 self.host.quit(1) | 360 self.host.quit(1) |
375 | 361 |
376 def getCurrentFile(self, profile): | 362 def getCurrentFile(self, profile): |
377 """Get most recently edited file | 363 """Get most recently edited file |
378 | 364 |
379 @param profile(unicode): profile linked to the draft | 365 @param profile(unicode): profile linked to the draft |
380 @return(str): full path of current file | 366 @return(Path): full path of current file |
381 """ | 367 """ |
382 # we guess the item currently edited by choosing | 368 # we guess the item currently edited by choosing |
383 # the most recent file corresponding to temp file pattern | 369 # the most recent file corresponding to temp file pattern |
384 # in tmp_dir, excluding metadata files | 370 # in tmp_dir, excluding metadata files |
385 cat_dir_str = self.cat_dir_str | 371 tmp_dir = getTmpDir(self.sat_conf, self.cat_dir, profile) |
386 tmp_dir = getTmpDir(self.sat_conf, self.cat_dir_str, profile.encode("utf-8")) | |
387 available = [ | 372 available = [ |
388 path | 373 p |
389 for path in glob.glob(os.path.join(tmp_dir, cat_dir_str + "_*")) | 374 for p in tmp_dir.glob(f'{self.cat_dir}_*') |
390 if not path.endswith(METADATA_SUFF) | 375 if not p.match(f"*{METADATA_SUFF}") |
391 ] | 376 ] |
392 if not available: | 377 if not available: |
393 self.disp( | 378 self.disp( |
394 "Could not find any content draft in {path}".format(path=tmp_dir), | 379 f"Could not find any content draft in {tmp_dir}", |
395 error=True, | 380 error=True, |
396 ) | 381 ) |
397 self.host.quit(1) | 382 self.host.quit(1) |
398 return max(available, key=lambda path: os.stat(path).st_mtime) | 383 return max(available, key=lambda p: p.stat().st_mtime) |
399 | 384 |
400 def getItemData(self, service, node, item): | 385 async def getItemData(self, service, node, item): |
401 """return formatted content, metadata (or not if use_metadata is false), and item id""" | 386 """return formatted content, metadata (or not if use_metadata is false), and item id""" |
402 raise NotImplementedError | 387 raise NotImplementedError |
403 | 388 |
404 def getTmpSuff(self): | 389 def getTmpSuff(self): |
405 """return suffix used for content file""" | 390 """return suffix used for content file""" |
406 return "xml" | 391 return "xml" |
407 | 392 |
408 def getItemPath(self): | 393 async def getItemPath(self): |
409 """retrieve item path (i.e. service and node) from item argument | 394 """Retrieve item path (i.e. service and node) from item argument |
410 | 395 |
411 This method is obviously only useful for edition of PubSub based features | 396 This method is obviously only useful for edition of PubSub based features |
412 """ | 397 """ |
413 service = self.args.service | 398 service = self.args.service |
414 node = self.args.node | 399 node = self.args.node |
417 | 402 |
418 if self.args.current: | 403 if self.args.current: |
419 # user wants to continue current draft | 404 # user wants to continue current draft |
420 content_file_path = self.getCurrentFile(self.profile) | 405 content_file_path = self.getCurrentFile(self.profile) |
421 self.disp("Continuing edition of current draft", 2) | 406 self.disp("Continuing edition of current draft", 2) |
422 content_file_obj = open(content_file_path, "r+b") | 407 content_file_obj = content_file_path.open("r+b") |
423 # we seek at the end of file in case of an item already exist | 408 # we seek at the end of file in case of an item already exist |
424 # this will write content of the existing item at the end of the draft. | 409 # this will write content of the existing item at the end of the draft. |
425 # This way no data should be lost. | 410 # This way no data should be lost. |
426 content_file_obj.seek(0, os.SEEK_END) | 411 content_file_obj.seek(0, os.SEEK_END) |
427 elif self.args.draft_path: | 412 elif self.args.draft_path: |
428 # there is an existing draft that we use | 413 # there is an existing draft that we use |
429 content_file_path = os.path.expanduser(self.args.draft_path) | 414 content_file_path = self.args.draft_path.expanduser() |
430 content_file_obj = open(content_file_path, "r+b") | 415 content_file_obj = content_file_path.open("r+b") |
431 # we seek at the end for the same reason as above | 416 # we seek at the end for the same reason as above |
432 content_file_obj.seek(0, os.SEEK_END) | 417 content_file_obj.seek(0, os.SEEK_END) |
433 else: | 418 else: |
434 # we need a temporary file | 419 # we need a temporary file |
435 content_file_obj, content_file_path = self.getTmpFile() | 420 content_file_obj, content_file_path = self.getTmpFile() |
436 | 421 |
437 if item or last_item: | 422 if item or last_item: |
438 self.disp("Editing requested published item", 2) | 423 self.disp("Editing requested published item", 2) |
439 try: | 424 try: |
440 if self.use_metadata: | 425 if self.use_metadata: |
441 content, metadata, item = self.getItemData(service, node, item) | 426 content, metadata, item = await self.getItemData(service, node, item) |
442 else: | 427 else: |
443 content, item = self.getItemData(service, node, item) | 428 content, item = await self.getItemData(service, node, item) |
444 except Exception as e: | 429 except Exception as e: |
445 # FIXME: ugly but we have not good may to check errors in bridge | 430 # FIXME: ugly but we have not good may to check errors in bridge |
446 if "item-not-found" in str(e): | 431 if "item-not-found" in str(e): |
447 # item doesn't exist, we create a new one with requested id | 432 # item doesn't exist, we create a new one with requested id |
448 metadata = None | 433 metadata = None |
449 if last_item: | 434 if last_item: |
450 self.disp(_("no item found at all, we create a new one"), 2) | 435 self.disp(_("no item found at all, we create a new one"), 2) |
451 else: | 436 else: |
452 self.disp( | 437 self.disp( |
453 _( | 438 _( |
454 'item "{item_id}" not found, we create a new item with this id' | 439 f'item "{item}" not found, we create a new item with' |
455 ).format(item_id=item), | 440 f'this id' |
441 ), | |
456 2, | 442 2, |
457 ) | 443 ) |
458 content_file_obj.seek(0) | 444 content_file_obj.seek(0) |
459 else: | 445 else: |
460 self.disp("Error while retrieving item: {}".format(e)) | 446 self.disp(f"Error while retrieving item: {e}") |
461 self.host.quit(C.EXIT_ERROR) | 447 self.host.quit(C.EXIT_ERROR) |
462 else: | 448 else: |
463 # item exists, we write content | 449 # item exists, we write content |
464 if content_file_obj.tell() != 0: | 450 if content_file_obj.tell() != 0: |
465 # we already have a draft, | 451 # we already have a draft, |
466 # we copy item content after it and add an indicator | 452 # we copy item content after it and add an indicator |
467 content_file_obj.write("\n*****\n") | 453 content_file_obj.write("\n*****\n") |
468 content_file_obj.write(content.encode("utf-8")) | 454 content_file_obj.write(content.encode("utf-8")) |
469 content_file_obj.seek(0) | 455 content_file_obj.seek(0) |
470 self.disp( | 456 self.disp( |
471 _('item "{item_id}" found, we edit it').format(item_id=item), 2 | 457 _(f'item "{item}" found, we edit it'), 2 |
472 ) | 458 ) |
473 else: | 459 else: |
474 self.disp("Editing a new item", 2) | 460 self.disp("Editing a new item", 2) |
475 if self.use_metadata: | 461 if self.use_metadata: |
476 metadata = None | 462 metadata = None |
755 kwargs_ = {"col_sep": " ", "head_line_sep": " ", "show_borders": False} | 741 kwargs_ = {"col_sep": " ", "head_line_sep": " ", "show_borders": False} |
756 kwargs_.update(kwargs) | 742 kwargs_.update(kwargs) |
757 return self.display(**kwargs_) | 743 return self.display(**kwargs_) |
758 | 744 |
759 | 745 |
760 class URIFinder(object): | 746 async def fill_well_known_uri(command, path, key, meta_map=None): |
761 """Helper class to find URIs in well-known locations""" | 747 """Look for URIs in well-known location and fill appropriate args if suitable |
762 | 748 |
763 def __init__(self, command, path, key, callback, meta_map=None): | 749 @param command(CommandBase): command instance |
764 """ | 750 args of this instance will be updated with found values |
765 @param command(CommandBase): command instance | 751 @param path(unicode): absolute path to use as a starting point to look for URIs |
766 args of this instance will be updated with found values | 752 @param key(unicode): key to look for |
767 @param path(unicode): absolute path to use as a starting point to look for URIs | 753 @param meta_map(dict, None): if not None, map metadata to arg name |
768 @param key(unicode): key to look for | 754 key is metadata used attribute name |
769 @param callback(callable): method to call once URIs are found (or not) | 755 value is name to actually use, or None to ignore |
770 @param meta_map(dict, None): if not None, map metadata to arg name | 756 use empty dict to only retrieve URI |
771 key is metadata used attribute name | 757 possible keys are currently: |
772 value is name to actually use, or None to ignore | 758 - labels |
773 use empty dict to only retrieve URI | 759 """ |
774 possible keys are currently: | 760 args = command.args |
775 - labels | 761 if args.service or args.node: |
776 """ | 762 # we only look for URIs if a service and a node are not already specified |
777 if not command.args.service and not command.args.node: | 763 return |
778 self.host = command.host | 764 |
779 self.args = command.args | 765 host = command.host |
780 self.key = key | 766 |
781 self.callback = callback | 767 try: |
782 self.meta_map = meta_map | 768 uris_data = await host.bridge.URIFind(path, [key]) |
783 self.host.bridge.URIFind( | 769 except Exception as e: |
784 path, | 770 host.disp(f"can't find {key} URI: {e}", error=True) |
785 [key], | 771 host.quit(C.EXIT_BRIDGE_ERRBACK) |
786 callback=self.URIFindCb, | 772 |
787 errback=partial( | 773 try: |
788 command.errback, | 774 uri_data = uris_data[key] |
789 msg=_("can't find " + key + " URI: {}"), | 775 except KeyError: |
790 exit_code=C.EXIT_BRIDGE_ERRBACK, | 776 host.disp( |
791 ), | 777 _(f"No {key} URI specified for this project, please specify service and " |
792 ) | 778 f"node"), |
793 else: | 779 error=True, |
794 callback() | 780 ) |
795 | 781 host.quit(C.EXIT_NOT_FOUND) |
796 def setMetadataList(self, uri_data, key): | 782 |
797 """Helper method to set list of values from metadata | 783 uri = uri_data["uri"] |
798 | 784 |
799 @param uri_data(dict): data of the found URI | 785 # set extra metadata if they are specified |
800 @param key(unicode): key of the value to retrieve | 786 for data_key in ['labels']: |
801 """ | 787 new_values_json = uri_data.get(data_key) |
802 new_values_json = uri_data.get(key) | |
803 if uri_data is not None: | 788 if uri_data is not None: |
804 if self.meta_map is None: | 789 if meta_map is None: |
805 dest = key | 790 dest = data_key |
806 else: | 791 else: |
807 dest = self.meta_map.get(key) | 792 dest = meta_map.get(data_key) |
808 if dest is None: | 793 if dest is None: |
809 return | 794 continue |
810 | 795 |
811 try: | 796 try: |
812 values = getattr(self.args, key) | 797 values = getattr(args, data_key) |
813 except AttributeError: | 798 except AttributeError: |
814 raise exceptions.InternalError( | 799 raise exceptions.InternalError(f'there is no {data_key!r} arguments') |
815 'there is no "{key}" arguments'.format(key=key) | |
816 ) | |
817 else: | 800 else: |
818 if values is None: | 801 if values is None: |
819 values = [] | 802 values = [] |
820 values.extend(json.loads(new_values_json)) | 803 values.extend(json.loads(new_values_json)) |
821 setattr(self.args, dest, values) | 804 setattr(args, dest, values) |
822 | 805 |
823 def URIFindCb(self, uris_data): | 806 parsed_uri = xmpp_uri.parseXMPPUri(uri) |
824 try: | 807 try: |
825 uri_data = uris_data[self.key] | 808 args.service = parsed_uri["path"] |
826 except KeyError: | 809 args.node = parsed_uri["node"] |
827 self.host.disp( | 810 except KeyError: |
828 _( | 811 host.disp(_(f"Invalid URI found: {uri}"), error=True) |
829 "No {key} URI specified for this project, please specify service and node" | 812 host.quit(C.EXIT_DATA_ERROR) |
830 ).format(key=self.key), | |
831 error=True, | |
832 ) | |
833 self.host.quit(C.EXIT_NOT_FOUND) | |
834 else: | |
835 uri = uri_data["uri"] | |
836 | |
837 self.setMetadataList(uri_data, "labels") | |
838 parsed_uri = xmpp_uri.parseXMPPUri(uri) | |
839 try: | |
840 self.args.service = parsed_uri["path"] | |
841 self.args.node = parsed_uri["node"] | |
842 except KeyError: | |
843 self.host.disp(_("Invalid URI found: {uri}").format(uri=uri), error=True) | |
844 self.host.quit(C.EXIT_DATA_ERROR) | |
845 self.callback() |