comparison sat_frontends/jp/common.py @ 3028:ab2696e34d29

Python 3 port: /!\ this is a huge commit /!\ starting from this commit, SàT is needs Python 3.6+ /!\ SàT maybe be instable or some feature may not work anymore, this will improve with time This patch port backend, bridge and frontends to Python 3. Roughly this has been done this way: - 2to3 tools has been applied (with python 3.7) - all references to python2 have been replaced with python3 (notably shebangs) - fixed files not handled by 2to3 (notably the shell script) - several manual fixes - fixed issues reported by Python 3 that where not handled in Python 2 - replaced "async" with "async_" when needed (it's a reserved word from Python 3.7) - replaced zope's "implements" with @implementer decorator - temporary hack to handle data pickled in database, as str or bytes may be returned, to be checked later - fixed hash comparison for password - removed some code which is not needed anymore with Python 3 - deactivated some code which needs to be checked (notably certificate validation) - tested with jp, fixed reported issues until some basic commands worked - ported Primitivus (after porting dependencies like urwid satext) - more manual fixes
author Goffi <goffi@goffi.org>
date Tue, 13 Aug 2019 19:08:41 +0200
parents d9491cb81726
children d314d4181f30
comparison
equal deleted inserted replaced
3027:ff5bcb12ae60 3028:ab2696e34d29
22 from sat.core import exceptions 22 from sat.core import exceptions
23 from sat.tools.common import regex 23 from sat.tools.common import regex
24 from sat.tools.common.ansi import ANSI as A 24 from sat.tools.common.ansi import ANSI as A
25 from sat.tools.common import uri as xmpp_uri 25 from sat.tools.common import uri as xmpp_uri
26 from sat.tools import config 26 from sat.tools import config
27 from ConfigParser import NoSectionError, NoOptionError 27 from configparser import NoSectionError, NoOptionError
28 from collections import namedtuple 28 from collections import namedtuple
29 from functools import partial 29 from functools import partial
30 import json 30 import json
31 import os 31 import os
32 import os.path 32 import os.path
53 53
54 54
55 def ansi_ljust(s, width): 55 def ansi_ljust(s, width):
56 """ljust method handling ANSI escape codes""" 56 """ljust method handling ANSI escape codes"""
57 cleaned = regex.ansiRemove(s) 57 cleaned = regex.ansiRemove(s)
58 return s + u" " * (width - len(cleaned)) 58 return s + " " * (width - len(cleaned))
59 59
60 60
61 def ansi_center(s, width): 61 def ansi_center(s, width):
62 """ljust method handling ANSI escape codes""" 62 """ljust method handling ANSI escape codes"""
63 cleaned = regex.ansiRemove(s) 63 cleaned = regex.ansiRemove(s)
64 diff = width - len(cleaned) 64 diff = width - len(cleaned)
65 half = diff / 2 65 half = diff / 2
66 return half * u" " + s + (half + diff % 2) * u" " 66 return half * " " + s + (half + diff % 2) * " "
67 67
68 68
69 def ansi_rjust(s, width): 69 def ansi_rjust(s, width):
70 """ljust method handling ANSI escape codes""" 70 """ljust method handling ANSI escape codes"""
71 cleaned = regex.ansiRemove(s) 71 cleaned = regex.ansiRemove(s)
72 return u" " * (width - len(cleaned)) + s 72 return " " * (width - len(cleaned)) + s
73 73
74 74
75 def getTmpDir(sat_conf, cat_dir, sub_dir=None): 75 def getTmpDir(sat_conf, cat_dir, sub_dir=None):
76 """Return directory used to store temporary files 76 """Return directory used to store temporary files
77 77
101 # we split the arguments and add the known fields 101 # we split the arguments and add the known fields
102 # we split arguments first to avoid escaping issues in file names 102 # we split arguments first to avoid escaping issues in file names
103 return [a.format(**format_kw) for a in shlex.split(cmd_line)] 103 return [a.format(**format_kw) for a in shlex.split(cmd_line)]
104 except ValueError as e: 104 except ValueError as e:
105 host.disp( 105 host.disp(
106 u"Couldn't parse editor cmd [{cmd}]: {reason}".format(cmd=cmd_line, reason=e) 106 "Couldn't parse editor cmd [{cmd}]: {reason}".format(cmd=cmd_line, reason=e)
107 ) 107 )
108 return [] 108 return []
109 109
110 110
111 class BaseEdit(object): 111 class BaseEdit(object):
112 u"""base class for editing commands 112 """base class for editing commands
113 113
114 This class allows to edit file for PubSub or something else. 114 This class allows to edit file for PubSub or something else.
115 It works with temporary files in SàT local_dir, in a "cat_dir" subdir 115 It works with temporary files in SàT local_dir, in a "cat_dir" subdir
116 """ 116 """
117 117
136 If there are more file in SECURE_UNLINK_DIR than SECURE_UNLINK_MAX, 136 If there are more file in SECURE_UNLINK_DIR than SECURE_UNLINK_MAX,
137 older file are deleted 137 older file are deleted
138 @param path(str): file to unlink 138 @param path(str): file to unlink
139 """ 139 """
140 if not os.path.isfile(path): 140 if not os.path.isfile(path):
141 raise OSError(u"path must link to a regular file") 141 raise OSError("path must link to a regular file")
142 if not path.startswith(getTmpDir(self.sat_conf, self.cat_dir_str)): 142 if not path.startswith(getTmpDir(self.sat_conf, self.cat_dir_str)):
143 self.disp( 143 self.disp(
144 u"File {} is not in SàT temporary hierarchy, we do not remove it".format( 144 "File {} is not in SàT temporary hierarchy, we do not remove it".format(
145 path.decode("utf-8") 145 path
146 ), 146 ),
147 2, 147 2,
148 ) 148 )
149 return 149 return
150 # we have 2 files per draft with use_metadata, so we double max 150 # we have 2 files per draft with use_metadata, so we double max
154 os.makedirs(backup_dir) 154 os.makedirs(backup_dir)
155 filename = os.path.basename(path) 155 filename = os.path.basename(path)
156 backup_path = os.path.join(backup_dir, filename) 156 backup_path = os.path.join(backup_dir, filename)
157 # we move file to backup dir 157 # we move file to backup dir
158 self.host.disp( 158 self.host.disp(
159 u"Backuping file {src} to {dst}".format( 159 "Backuping file {src} to {dst}".format(
160 src=path.decode("utf-8"), dst=backup_path.decode("utf-8") 160 src=path, dst=backup_path
161 ), 161 ),
162 1, 162 1,
163 ) 163 )
164 os.rename(path, backup_path) 164 os.rename(path, backup_path)
165 # and if we exceeded the limit, we remove older file 165 # and if we exceeded the limit, we remove older file
166 backup_files = [os.path.join(backup_dir, f) for f in os.listdir(backup_dir)] 166 backup_files = [os.path.join(backup_dir, f) for f in os.listdir(backup_dir)]
167 if len(backup_files) > unlink_max: 167 if len(backup_files) > unlink_max:
168 backup_files.sort(key=lambda path: os.stat(path).st_mtime) 168 backup_files.sort(key=lambda path: os.stat(path).st_mtime)
169 for path in backup_files[: len(backup_files) - unlink_max]: 169 for path in backup_files[: len(backup_files) - unlink_max]:
170 self.host.disp(u"Purging backup file {}".format(path.decode("utf-8")), 2) 170 self.host.disp("Purging backup file {}".format(path), 2)
171 os.unlink(path) 171 os.unlink(path)
172 172
173 def runEditor( 173 def runEditor(
174 self, 174 self,
175 editor_args_opt, 175 editor_args_opt,
226 editor_exit = subprocess.call([editor] + args) 226 editor_exit = subprocess.call([editor] + args)
227 227
228 # edition will now be checked, and data will be sent if it was a success 228 # edition will now be checked, and data will be sent if it was a success
229 if editor_exit != 0: 229 if editor_exit != 0:
230 self.disp( 230 self.disp(
231 u"Editor exited with an error code, so temporary file has not be deleted, and item is not published.\nYou can find temporary file at {path}".format( 231 "Editor exited with an error code, so temporary file has not be deleted, and item is not published.\nYou can find temporary file at {path}".format(
232 path=content_file_path 232 path=content_file_path
233 ), 233 ),
234 error=True, 234 error=True,
235 ) 235 )
236 else: 236 else:
238 try: 238 try:
239 with open(content_file_path, "rb") as f: 239 with open(content_file_path, "rb") as f:
240 content = f.read() 240 content = f.read()
241 except (OSError, IOError): 241 except (OSError, IOError):
242 self.disp( 242 self.disp(
243 u"Can read file at {content_path}, have it been deleted?\nCancelling edition".format( 243 "Can read file at {content_path}, have it been deleted?\nCancelling edition".format(
244 content_path=content_file_path 244 content_path=content_file_path
245 ), 245 ),
246 error=True, 246 error=True,
247 ) 247 )
248 self.host.quit(C.EXIT_NOT_FOUND) 248 self.host.quit(C.EXIT_NOT_FOUND)
252 try: 252 try:
253 with open(meta_file_path, "rb") as f: 253 with open(meta_file_path, "rb") as f:
254 metadata = json.load(f) 254 metadata = json.load(f)
255 except (OSError, IOError): 255 except (OSError, IOError):
256 self.disp( 256 self.disp(
257 u"Can read file at {meta_file_path}, have it been deleted?\nCancelling edition".format( 257 "Can read file at {meta_file_path}, have it been deleted?\nCancelling edition".format(
258 content_path=content_file_path, meta_path=meta_file_path 258 content_path=content_file_path, meta_path=meta_file_path
259 ), 259 ),
260 error=True, 260 error=True,
261 ) 261 )
262 self.host.quit(C.EXIT_NOT_FOUND) 262 self.host.quit(C.EXIT_NOT_FOUND)
263 except ValueError: 263 except ValueError:
264 self.disp( 264 self.disp(
265 u"Can't parse metadata, please check it is correct JSON format. Cancelling edition.\n" 265 "Can't parse metadata, please check it is correct JSON format. Cancelling edition.\n"
266 + "You can find tmp file at {content_path} and temporary meta file at {meta_path}.".format( 266 + "You can find tmp file at {content_path} and temporary meta file at {meta_path}.".format(
267 content_path=content_file_path, meta_path=meta_file_path 267 content_path=content_file_path, meta_path=meta_file_path
268 ), 268 ),
269 error=True, 269 error=True,
270 ) 270 )
271 self.host.quit(C.EXIT_DATA_ERROR) 271 self.host.quit(C.EXIT_DATA_ERROR)
272 272
273 if self.use_metadata and not metadata.get("publish", True): 273 if self.use_metadata and not metadata.get("publish", True):
274 self.disp( 274 self.disp(
275 u'Publication blocked by "publish" key in metadata, cancelling edition.\n\n' 275 'Publication blocked by "publish" key in metadata, cancelling edition.\n\n'
276 + "temporary file path:\t{content_path}\nmetadata file path:\t{meta_path}".format( 276 + "temporary file path:\t{content_path}\nmetadata file path:\t{meta_path}".format(
277 content_path=content_file_path, meta_path=meta_file_path 277 content_path=content_file_path, meta_path=meta_file_path
278 ), 278 ),
279 error=True, 279 error=True,
280 ) 280 )
281 self.host.quit() 281 self.host.quit()
282 282
283 if len(content) == 0: 283 if len(content) == 0:
284 self.disp(u"Content is empty, cancelling the edition") 284 self.disp("Content is empty, cancelling the edition")
285 if not content_file_path.startswith( 285 if not content_file_path.startswith(
286 getTmpDir(self.sat_conf, self.cat_dir_str) 286 getTmpDir(self.sat_conf, self.cat_dir_str)
287 ): 287 ):
288 self.disp( 288 self.disp(
289 u"File are not in SàT temporary hierarchy, we do not remove them", 289 "File are not in SàT temporary hierarchy, we do not remove them",
290 2, 290 2,
291 ) 291 )
292 self.host.quit() 292 self.host.quit()
293 self.disp(u"Deletion of {}".format(content_file_path.decode("utf-8")), 2) 293 self.disp("Deletion of {}".format(content_file_path), 2)
294 os.unlink(content_file_path) 294 os.unlink(content_file_path)
295 if self.use_metadata: 295 if self.use_metadata:
296 self.disp(u"Deletion of {}".format(meta_file_path.decode("utf-8")), 2) 296 self.disp("Deletion of {}".format(meta_file_path), 2)
297 os.unlink(meta_file_path) 297 os.unlink(meta_file_path)
298 self.host.quit() 298 self.host.quit()
299 299
300 # time to re-check the hash 300 # time to re-check the hash
301 elif tmp_ori_hash == hashlib.sha1(content).digest() and ( 301 elif tmp_ori_hash == hashlib.sha1(content).digest() and (
302 not self.use_metadata or meta_ori == metadata 302 not self.use_metadata or meta_ori == metadata
303 ): 303 ):
304 self.disp(u"The content has not been modified, cancelling the edition") 304 self.disp("The content has not been modified, cancelling the edition")
305 self.host.quit() 305 self.host.quit()
306 306
307 else: 307 else:
308 # we can now send the item 308 # we can now send the item
309 content = content.decode("utf-8-sig") # we use utf-8-sig to avoid BOM 309 content = content.decode("utf-8-sig") # we use utf-8-sig to avoid BOM
313 else: 313 else:
314 self.publish(content) 314 self.publish(content)
315 except Exception as e: 315 except Exception as e:
316 if self.use_metadata: 316 if self.use_metadata:
317 self.disp( 317 self.disp(
318 u"Error while sending your item, the temporary files have been kept at {content_path} and {meta_path}: {reason}".format( 318 "Error while sending your item, the temporary files have been kept at {content_path} and {meta_path}: {reason}".format(
319 content_path=content_file_path, 319 content_path=content_file_path,
320 meta_path=meta_file_path, 320 meta_path=meta_file_path,
321 reason=e, 321 reason=e,
322 ), 322 ),
323 error=True, 323 error=True,
324 ) 324 )
325 else: 325 else:
326 self.disp( 326 self.disp(
327 u"Error while sending your item, the temporary file has been kept at {content_path}: {reason}".format( 327 "Error while sending your item, the temporary file has been kept at {content_path}: {reason}".format(
328 content_path=content_file_path, reason=e 328 content_path=content_file_path, reason=e
329 ), 329 ),
330 error=True, 330 error=True,
331 ) 331 )
332 self.host.quit(1) 332 self.host.quit(1)
351 if not os.path.exists(tmp_dir): 351 if not os.path.exists(tmp_dir):
352 try: 352 try:
353 os.makedirs(tmp_dir) 353 os.makedirs(tmp_dir)
354 except OSError as e: 354 except OSError as e:
355 self.disp( 355 self.disp(
356 u"Can't create {path} directory: {reason}".format( 356 "Can't create {path} directory: {reason}".format(
357 path=tmp_dir, reason=e 357 path=tmp_dir, reason=e
358 ), 358 ),
359 error=True, 359 error=True,
360 ) 360 )
361 self.host.quit(1) 361 self.host.quit(1)
367 text=True, 367 text=True,
368 ) 368 )
369 return os.fdopen(fd, "w+b"), path 369 return os.fdopen(fd, "w+b"), path
370 except OSError as e: 370 except OSError as e:
371 self.disp( 371 self.disp(
372 u"Can't create temporary file: {reason}".format(reason=e), error=True 372 "Can't create temporary file: {reason}".format(reason=e), error=True
373 ) 373 )
374 self.host.quit(1) 374 self.host.quit(1)
375 375
376 def getCurrentFile(self, profile): 376 def getCurrentFile(self, profile):
377 """Get most recently edited file 377 """Get most recently edited file
389 for path in glob.glob(os.path.join(tmp_dir, cat_dir_str + "_*")) 389 for path in glob.glob(os.path.join(tmp_dir, cat_dir_str + "_*"))
390 if not path.endswith(METADATA_SUFF) 390 if not path.endswith(METADATA_SUFF)
391 ] 391 ]
392 if not available: 392 if not available:
393 self.disp( 393 self.disp(
394 u"Could not find any content draft in {path}".format(path=tmp_dir), 394 "Could not find any content draft in {path}".format(path=tmp_dir),
395 error=True, 395 error=True,
396 ) 396 )
397 self.host.quit(1) 397 self.host.quit(1)
398 return max(available, key=lambda path: os.stat(path).st_mtime) 398 return max(available, key=lambda path: os.stat(path).st_mtime)
399 399
401 """return formatted content, metadata (or not if use_metadata is false), and item id""" 401 """return formatted content, metadata (or not if use_metadata is false), and item id"""
402 raise NotImplementedError 402 raise NotImplementedError
403 403
404 def getTmpSuff(self): 404 def getTmpSuff(self):
405 """return suffix used for content file""" 405 """return suffix used for content file"""
406 return u"xml" 406 return "xml"
407 407
408 def getItemPath(self): 408 def getItemPath(self):
409 """retrieve item path (i.e. service and node) from item argument 409 """retrieve item path (i.e. service and node) from item argument
410 410
411 This method is obviously only useful for edition of PubSub based features 411 This method is obviously only useful for edition of PubSub based features
416 last_item = self.args.last_item 416 last_item = self.args.last_item
417 417
418 if self.args.current: 418 if self.args.current:
419 # user wants to continue current draft 419 # user wants to continue current draft
420 content_file_path = self.getCurrentFile(self.profile) 420 content_file_path = self.getCurrentFile(self.profile)
421 self.disp(u"Continuing edition of current draft", 2) 421 self.disp("Continuing edition of current draft", 2)
422 content_file_obj = open(content_file_path, "r+b") 422 content_file_obj = open(content_file_path, "r+b")
423 # we seek at the end of file in case of an item already exist 423 # we seek at the end of file in case of an item already exist
424 # this will write content of the existing item at the end of the draft. 424 # this will write content of the existing item at the end of the draft.
425 # This way no data should be lost. 425 # This way no data should be lost.
426 content_file_obj.seek(0, os.SEEK_END) 426 content_file_obj.seek(0, os.SEEK_END)
433 else: 433 else:
434 # we need a temporary file 434 # we need a temporary file
435 content_file_obj, content_file_path = self.getTmpFile() 435 content_file_obj, content_file_path = self.getTmpFile()
436 436
437 if item or last_item: 437 if item or last_item:
438 self.disp(u"Editing requested published item", 2) 438 self.disp("Editing requested published item", 2)
439 try: 439 try:
440 if self.use_metadata: 440 if self.use_metadata:
441 content, metadata, item = self.getItemData(service, node, item) 441 content, metadata, item = self.getItemData(service, node, item)
442 else: 442 else:
443 content, item = self.getItemData(service, node, item) 443 content, item = self.getItemData(service, node, item)
444 except Exception as e: 444 except Exception as e:
445 # FIXME: ugly but we have not good may to check errors in bridge 445 # FIXME: ugly but we have not good may to check errors in bridge
446 if u"item-not-found" in unicode(e): 446 if "item-not-found" in str(e):
447 #  item doesn't exist, we create a new one with requested id 447 #  item doesn't exist, we create a new one with requested id
448 metadata = None 448 metadata = None
449 if last_item: 449 if last_item:
450 self.disp(_(u"no item found at all, we create a new one"), 2) 450 self.disp(_("no item found at all, we create a new one"), 2)
451 else: 451 else:
452 self.disp( 452 self.disp(
453 _( 453 _(
454 u'item "{item_id}" not found, we create a new item with this id' 454 'item "{item_id}" not found, we create a new item with this id'
455 ).format(item_id=item), 455 ).format(item_id=item),
456 2, 456 2,
457 ) 457 )
458 content_file_obj.seek(0) 458 content_file_obj.seek(0)
459 else: 459 else:
460 self.disp(u"Error while retrieving item: {}".format(e)) 460 self.disp("Error while retrieving item: {}".format(e))
461 self.host.quit(C.EXIT_ERROR) 461 self.host.quit(C.EXIT_ERROR)
462 else: 462 else:
463 # item exists, we write content 463 # item exists, we write content
464 if content_file_obj.tell() != 0: 464 if content_file_obj.tell() != 0:
465 # we already have a draft, 465 # we already have a draft,
466 # we copy item content after it and add an indicator 466 # we copy item content after it and add an indicator
467 content_file_obj.write("\n*****\n") 467 content_file_obj.write("\n*****\n")
468 content_file_obj.write(content.encode("utf-8")) 468 content_file_obj.write(content.encode("utf-8"))
469 content_file_obj.seek(0) 469 content_file_obj.seek(0)
470 self.disp( 470 self.disp(
471 _(u'item "{item_id}" found, we edit it').format(item_id=item), 2 471 _('item "{item_id}" found, we edit it').format(item_id=item), 2
472 ) 472 )
473 else: 473 else:
474 self.disp(u"Editing a new item", 2) 474 self.disp("Editing a new item", 2)
475 if self.use_metadata: 475 if self.use_metadata:
476 metadata = None 476 metadata = None
477 477
478 if self.use_metadata: 478 if self.use_metadata:
479 return service, node, item, content_file_path, content_file_obj, metadata 479 return service, node, item, content_file_path, content_file_obj, metadata
518 new_row = [] 518 new_row = []
519 row_data_list = list(row_data) 519 row_data_list = list(row_data)
520 for idx, value in enumerate(row_data_list): 520 for idx, value in enumerate(row_data_list):
521 if filters is not None and filters[idx] is not None: 521 if filters is not None and filters[idx] is not None:
522 filter_ = filters[idx] 522 filter_ = filters[idx]
523 if isinstance(filter_, basestring): 523 if isinstance(filter_, str):
524 col_value = filter_.format(value) 524 col_value = filter_.format(value)
525 else: 525 else:
526 try: 526 try:
527 col_value = filter_(value, row_cls(*row_data_list)) 527 col_value = filter_(value, row_cls(*row_data_list))
528 except TypeError: 528 except TypeError:
529 col_value = filter_(value) 529 col_value = filter_(value)
530 # we count size without ANSI code as they will change length of the string 530 # we count size without ANSI code as they will change length of the string
531 # when it's mostly style/color changes. 531 # when it's mostly style/color changes.
532 col_size = len(regex.ansiRemove(col_value)) 532 col_size = len(regex.ansiRemove(col_value))
533 else: 533 else:
534 col_value = unicode(value) 534 col_value = str(value)
535 col_size = len(col_value) 535 col_size = len(col_value)
536 new_row.append(col_value) 536 new_row.append(col_value)
537 if size is None: 537 if size is None:
538 self.sizes.append(col_size) 538 self.sizes.append(col_size)
539 else: 539 else:
540 self.sizes[idx] = max(self.sizes[idx], col_size) 540 self.sizes[idx] = max(self.sizes[idx], col_size)
541 if size is None: 541 if size is None:
542 size = len(new_row) 542 size = len(new_row)
543 if headers is not None and len(headers) != size: 543 if headers is not None and len(headers) != size:
544 raise exceptions.DataError(u"headers size is not coherent with rows") 544 raise exceptions.DataError("headers size is not coherent with rows")
545 else: 545 else:
546 if len(new_row) != size: 546 if len(new_row) != size:
547 raise exceptions.DataError(u"rows size is not coherent") 547 raise exceptions.DataError("rows size is not coherent")
548 self.rows.append(new_row) 548 self.rows.append(new_row)
549 549
550 if not data and headers is not None: 550 if not data and headers is not None:
551 #  the table is empty, we print headers at their lenght 551 #  the table is empty, we print headers at their lenght
552 self.sizes = [len(h) for h in headers] 552 self.sizes = [len(h) for h in headers]
553 553
554 @property 554 @property
555 def string(self): 555 def string(self):
556 if self._buffer is None: 556 if self._buffer is None:
557 raise exceptions.InternalError(u"buffer must be used to get a string") 557 raise exceptions.InternalError("buffer must be used to get a string")
558 return u"\n".join(self._buffer) 558 return "\n".join(self._buffer)
559 559
560 @staticmethod 560 @staticmethod
561 def readDictValues(data, keys, defaults=None): 561 def readDictValues(data, keys, defaults=None):
562 if defaults is None: 562 if defaults is None:
563 defaults = {} 563 defaults = {}
587 @param defaults(dict[unicode, unicode]): default value to use 587 @param defaults(dict[unicode, unicode]): default value to use
588 if None, an exception will be raised if not value is found 588 if None, an exception will be raised if not value is found
589 """ 589 """
590 if keys is None and headers is not None: 590 if keys is None and headers is not None:
591 # FIXME: keys are not needed with OrderedDict, 591 # FIXME: keys are not needed with OrderedDict,
592 raise exceptions.DataError(u"You must specify keys order to used headers") 592 raise exceptions.DataError("You must specify keys order to used headers")
593 if keys is None: 593 if keys is None:
594 keys = data[0].keys() 594 keys = list(data[0].keys())
595 if headers is None: 595 if headers is None:
596 headers = keys 596 headers = keys
597 filters = [filters.get(k) for k in keys] 597 filters = [filters.get(k) for k in keys]
598 return cls( 598 return cls(
599 host, (cls.readDictValues(d, keys, defaults) for d in data), headers, filters 599 host, (cls.readDictValues(d, keys, defaults) for d in data), headers, filters
600 ) 600 )
601 601
602 def _headers(self, head_sep, headers, sizes, alignment=u"left", style=None): 602 def _headers(self, head_sep, headers, sizes, alignment="left", style=None):
603 """Render headers 603 """Render headers
604 604
605 @param head_sep(unicode): sequence to use as separator 605 @param head_sep(unicode): sequence to use as separator
606 @param alignment(unicode): how to align, can be left, center or right 606 @param alignment(unicode): how to align, can be left, center or right
607 @param style(unicode, iterable[unicode], None): ANSI escape sequences to apply 607 @param style(unicode, iterable[unicode], None): ANSI escape sequences to apply
608 @param headers(list[unicode]): headers to show 608 @param headers(list[unicode]): headers to show
609 @param sizes(list[int]): sizes of columns 609 @param sizes(list[int]): sizes of columns
610 """ 610 """
611 rendered_headers = [] 611 rendered_headers = []
612 if isinstance(style, basestring): 612 if isinstance(style, str):
613 style = [style] 613 style = [style]
614 for idx, header in enumerate(headers): 614 for idx, header in enumerate(headers):
615 size = sizes[idx] 615 size = sizes[idx]
616 if alignment == u"left": 616 if alignment == "left":
617 rendered = header[:size].ljust(size) 617 rendered = header[:size].ljust(size)
618 elif alignment == u"center": 618 elif alignment == "center":
619 rendered = header[:size].center(size) 619 rendered = header[:size].center(size)
620 elif alignment == u"right": 620 elif alignment == "right":
621 rendered = header[:size].rjust(size) 621 rendered = header[:size].rjust(size)
622 else: 622 else:
623 raise exceptions.InternalError(u"bad alignment argument") 623 raise exceptions.InternalError("bad alignment argument")
624 if style: 624 if style:
625 args = style + [rendered] 625 args = style + [rendered]
626 rendered = A.color(*args) 626 rendered = A.color(*args)
627 rendered_headers.append(rendered) 627 rendered_headers.append(rendered)
628 return head_sep.join(rendered_headers) 628 return head_sep.join(rendered_headers)
634 else: 634 else:
635 self.host.disp(data) 635 self.host.disp(data)
636 636
637 def display( 637 def display(
638 self, 638 self,
639 head_alignment=u"left", 639 head_alignment="left",
640 columns_alignment=u"left", 640 columns_alignment="left",
641 head_style=None, 641 head_style=None,
642 show_header=True, 642 show_header=True,
643 show_borders=True, 643 show_borders=True,
644 hide_cols=None, 644 hide_cols=None,
645 col_sep=u" │ ", 645 col_sep=" │ ",
646 top_left=u"┌", 646 top_left="┌",
647 top=u"─", 647 top="─",
648 top_sep=u"─┬─", 648 top_sep="─┬─",
649 top_right=u"┐", 649 top_right="┐",
650 left=u"│", 650 left="│",
651 right=None, 651 right=None,
652 head_sep=None, 652 head_sep=None,
653 head_line=u"┄", 653 head_line="┄",
654 head_line_left=u"├", 654 head_line_left="├",
655 head_line_sep=u"┄┼┄", 655 head_line_sep="┄┼┄",
656 head_line_right=u"┤", 656 head_line_right="┤",
657 bottom_left=u"└", 657 bottom_left="└",
658 bottom=None, 658 bottom=None,
659 bottom_sep=u"─┴─", 659 bottom_sep="─┴─",
660 bottom_right=u"┘", 660 bottom_right="┘",
661 ): 661 ):
662 """Print the table 662 """Print the table
663 663
664 @param show_header(bool): True if header need no be shown 664 @param show_header(bool): True if header need no be shown
665 @param show_borders(bool): True if borders need no be shown 665 @param show_borders(bool): True if borders need no be shown
698 if bottom is None: 698 if bottom is None:
699 bottom = top 699 bottom = top
700 if bottom_sep is None: 700 if bottom_sep is None:
701 bottom_sep = col_sep_size * bottom 701 bottom_sep = col_sep_size * bottom
702 if not show_borders: 702 if not show_borders:
703 left = right = head_line_left = head_line_right = u"" 703 left = right = head_line_left = head_line_right = ""
704 # top border 704 # top border
705 if show_borders: 705 if show_borders:
706 self._disp( 706 self._disp(
707 top_left + top_sep.join([top * size for size in sizes]) + top_right 707 top_left + top_sep.join([top * size for size in sizes]) + top_right
708 ) 708 )
720 + head_line_sep.join([head_line * size for size in sizes]) 720 + head_line_sep.join([head_line * size for size in sizes])
721 + head_line_right 721 + head_line_right
722 ) 722 )
723 723
724 # content 724 # content
725 if columns_alignment == u"left": 725 if columns_alignment == "left":
726 alignment = lambda idx, s: ansi_ljust(s, sizes[idx]) 726 alignment = lambda idx, s: ansi_ljust(s, sizes[idx])
727 elif columns_alignment == u"center": 727 elif columns_alignment == "center":
728 alignment = lambda idx, s: ansi_center(s, sizes[idx]) 728 alignment = lambda idx, s: ansi_center(s, sizes[idx])
729 elif columns_alignment == u"right": 729 elif columns_alignment == "right":
730 alignment = lambda idx, s: ansi_rjust(s, sizes[idx]) 730 alignment = lambda idx, s: ansi_rjust(s, sizes[idx])
731 else: 731 else:
732 raise exceptions.InternalError(u"bad columns alignment argument") 732 raise exceptions.InternalError("bad columns alignment argument")
733 733
734 for row in self.rows: 734 for row in self.rows:
735 if hide_cols: 735 if hide_cols:
736 row = [v for idx, v in enumerate(row) if idx not in ignore_idx] 736 row = [v for idx, v in enumerate(row) if idx not in ignore_idx]
737 self._disp( 737 self._disp(
750 #  we return self so string can be used after display (table.display().string) 750 #  we return self so string can be used after display (table.display().string)
751 return self 751 return self
752 752
753 def display_blank(self, **kwargs): 753 def display_blank(self, **kwargs):
754 """Display table without visible borders""" 754 """Display table without visible borders"""
755 kwargs_ = {"col_sep": u" ", "head_line_sep": u" ", "show_borders": False} 755 kwargs_ = {"col_sep": " ", "head_line_sep": " ", "show_borders": False}
756 kwargs_.update(kwargs) 756 kwargs_.update(kwargs)
757 return self.display(**kwargs_) 757 return self.display(**kwargs_)
758 758
759 759
760 class URIFinder(object): 760 class URIFinder(object):
784 path, 784 path,
785 [key], 785 [key],
786 callback=self.URIFindCb, 786 callback=self.URIFindCb,
787 errback=partial( 787 errback=partial(
788 command.errback, 788 command.errback,
789 msg=_(u"can't find " + key + u" URI: {}"), 789 msg=_("can't find " + key + " URI: {}"),
790 exit_code=C.EXIT_BRIDGE_ERRBACK, 790 exit_code=C.EXIT_BRIDGE_ERRBACK,
791 ), 791 ),
792 ) 792 )
793 else: 793 else:
794 callback() 794 callback()
810 810
811 try: 811 try:
812 values = getattr(self.args, key) 812 values = getattr(self.args, key)
813 except AttributeError: 813 except AttributeError:
814 raise exceptions.InternalError( 814 raise exceptions.InternalError(
815 u'there is no "{key}" arguments'.format(key=key) 815 'there is no "{key}" arguments'.format(key=key)
816 ) 816 )
817 else: 817 else:
818 if values is None: 818 if values is None:
819 values = [] 819 values = []
820 values.extend(json.loads(new_values_json)) 820 values.extend(json.loads(new_values_json))
824 try: 824 try:
825 uri_data = uris_data[self.key] 825 uri_data = uris_data[self.key]
826 except KeyError: 826 except KeyError:
827 self.host.disp( 827 self.host.disp(
828 _( 828 _(
829 u"No {key} URI specified for this project, please specify service and node" 829 "No {key} URI specified for this project, please specify service and node"
830 ).format(key=self.key), 830 ).format(key=self.key),
831 error=True, 831 error=True,
832 ) 832 )
833 self.host.quit(C.EXIT_NOT_FOUND) 833 self.host.quit(C.EXIT_NOT_FOUND)
834 else: 834 else:
835 uri = uri_data[u"uri"] 835 uri = uri_data["uri"]
836 836
837 self.setMetadataList(uri_data, u"labels") 837 self.setMetadataList(uri_data, "labels")
838 parsed_uri = xmpp_uri.parseXMPPUri(uri) 838 parsed_uri = xmpp_uri.parseXMPPUri(uri)
839 try: 839 try:
840 self.args.service = parsed_uri[u"path"] 840 self.args.service = parsed_uri["path"]
841 self.args.node = parsed_uri[u"node"] 841 self.args.node = parsed_uri["node"]
842 except KeyError: 842 except KeyError:
843 self.host.disp(_(u"Invalid URI found: {uri}").format(uri=uri), error=True) 843 self.host.disp(_("Invalid URI found: {uri}").format(uri=uri), error=True)
844 self.host.quit(C.EXIT_DATA_ERROR) 844 self.host.quit(C.EXIT_DATA_ERROR)
845 self.callback() 845 self.callback()