Mercurial > libervia-backend
comparison sat_frontends/jp/common.py @ 2562:26edcf3a30eb
core, setup: huge cleaning:
- moved directories from src and frontends/src to sat and sat_frontends, which is the recommanded naming convention
- move twisted directory to root
- removed all hacks from setup.py, and added missing dependencies, it is now clean
- use https URL for website in setup.py
- removed "Environment :: X11 Applications :: GTK", as wix is deprecated and removed
- renamed sat.sh to sat and fixed its installation
- added python_requires to specify Python version needed
- replaced glib2reactor which use deprecated code by gtk3reactor
sat can now be installed directly from virtualenv without using --system-site-packages anymore \o/
author | Goffi <goffi@goffi.org> |
---|---|
date | Mon, 02 Apr 2018 19:44:50 +0200 |
parents | frontends/src/jp/common.py@501b0f827f63 |
children | c9dddf691d7b |
comparison
equal
deleted
inserted
replaced
2561:bd30dc3ffe5a | 2562:26edcf3a30eb |
---|---|
1 #!/usr/bin/env python2 | |
2 # -*- coding: utf-8 -*- | |
3 | |
4 # jp: a SàT command line tool | |
5 # Copyright (C) 2009-2018 Jérôme Poisson (goffi@goffi.org) | |
6 | |
7 # This program is free software: you can redistribute it and/or modify | |
8 # it under the terms of the GNU Affero General Public License as published by | |
9 # the Free Software Foundation, either version 3 of the License, or | |
10 # (at your option) any later version. | |
11 | |
12 # This program is distributed in the hope that it will be useful, | |
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 # GNU Affero General Public License for more details. | |
16 | |
17 # You should have received a copy of the GNU Affero General Public License | |
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
19 | |
20 from sat_frontends.jp.constants import Const as C | |
21 from sat.core.i18n import _ | |
22 from sat.core import exceptions | |
23 from sat.tools.common import regex | |
24 from sat.tools.common.ansi import ANSI as A | |
25 from sat.tools.common import uri as xmpp_uri | |
26 from sat.tools import config | |
27 from ConfigParser import NoSectionError, NoOptionError | |
28 from collections import namedtuple | |
29 from functools import partial | |
30 import json | |
31 import os | |
32 import os.path | |
33 import time | |
34 import tempfile | |
35 import subprocess | |
36 import glob | |
37 import shlex | |
38 | |
39 # defaut arguments used for some known editors (editing with metadata) | |
40 VIM_SPLIT_ARGS = "-c 'set nospr|vsplit|wincmd w|next|wincmd w'" | |
41 EMACS_SPLIT_ARGS = '--eval "(split-window-horizontally)"' | |
42 EDITOR_ARGS_MAGIC = { | |
43 'vim': VIM_SPLIT_ARGS + ' {content_file} {metadata_file}', | |
44 'gvim': VIM_SPLIT_ARGS + ' --nofork {content_file} {metadata_file}', | |
45 'emacs': EMACS_SPLIT_ARGS + ' {content_file} {metadata_file}', | |
46 'xemacs': EMACS_SPLIT_ARGS + ' {content_file} {metadata_file}', | |
47 'nano': ' -F {content_file} {metadata_file}', | |
48 } | |
49 | |
50 SECURE_UNLINK_MAX = 10 | |
51 SECURE_UNLINK_DIR = ".backup" | |
52 METADATA_SUFF = '_metadata.json' | |
53 | |
54 | |
55 def ansi_ljust(s, width): | |
56 """ljust method handling ANSI escape codes""" | |
57 cleaned = regex.ansiRemove(s) | |
58 return s + u' ' * (width - len(cleaned)) | |
59 | |
60 | |
61 def ansi_center(s, width): | |
62 """ljust method handling ANSI escape codes""" | |
63 cleaned = regex.ansiRemove(s) | |
64 diff = width - len(cleaned) | |
65 half = diff/2 | |
66 return half * u' ' + s + (half + diff % 2) * u' ' | |
67 | |
68 | |
69 def ansi_rjust(s, width): | |
70 """ljust method handling ANSI escape codes""" | |
71 cleaned = regex.ansiRemove(s) | |
72 return u' ' * (width - len(cleaned)) + s | |
73 | |
74 | |
75 def getTmpDir(sat_conf, cat_dir, sub_dir=None): | |
76 """Return directory used to store temporary files | |
77 | |
78 @param sat_conf(ConfigParser.ConfigParser): instance opened on sat configuration | |
79 @param cat_dir(unicode): directory of the category (e.g. "blog") | |
80 @param sub_dir(str): sub directory where data need to be put | |
81 profile can be used here, or special directory name | |
82 sub_dir will be escaped to be usable in path (use regex.pathUnescape to find | |
83 initial str) | |
84 @return (str): path to the dir | |
85 """ | |
86 local_dir = config.getConfig(sat_conf, '', 'local_dir', Exception) | |
87 path = [local_dir.encode('utf-8'), cat_dir.encode('utf-8')] | |
88 if sub_dir is not None: | |
89 path.append(regex.pathEscape(sub_dir)) | |
90 return os.path.join(*path) | |
91 | |
92 | |
93 def parse_args(host, cmd_line, **format_kw): | |
94 """Parse command arguments | |
95 | |
96 @param cmd_line(unicode): command line as found in sat.conf | |
97 @param format_kw: keywords used for formating | |
98 @return (list(unicode)): list of arguments to pass to subprocess function | |
99 """ | |
100 try: | |
101 # we split the arguments and add the known fields | |
102 # we split arguments first to avoid escaping issues in file names | |
103 return [a.format(**format_kw) for a in shlex.split(cmd_line)] | |
104 except ValueError as e: | |
105 host.disp(u"Couldn't parse editor cmd [{cmd}]: {reason}".format(cmd=cmd_line, reason=e)) | |
106 return [] | |
107 | |
108 | |
109 class BaseEdit(object): | |
110 u"""base class for editing commands | |
111 | |
112 This class allows to edit file for PubSub or something else. | |
113 It works with temporary files in SàT local_dir, in a "cat_dir" subdir | |
114 """ | |
115 | |
116 def __init__(self, host, cat_dir, use_metadata=False): | |
117 """ | |
118 @param sat_conf(ConfigParser.ConfigParser): instance opened on sat configuration | |
119 @param cat_dir(unicode): directory to use for drafts | |
120 this will be a sub-directory of SàT's local_dir | |
121 @param use_metadata(bool): True is edition need a second file for metadata | |
122 most of signature change with use_metadata with an additional metadata argument. | |
123 This is done to raise error if a command needs metadata but forget the flag, and vice versa | |
124 """ | |
125 self.host = host | |
126 self.sat_conf = config.parseMainConf() | |
127 self.cat_dir_str = cat_dir.encode('utf-8') | |
128 self.use_metadata = use_metadata | |
129 | |
130 def secureUnlink(self, path): | |
131 """Unlink given path after keeping it for a while | |
132 | |
133 This method is used to prevent accidental deletion of a draft | |
134 If there are more file in SECURE_UNLINK_DIR than SECURE_UNLINK_MAX, | |
135 older file are deleted | |
136 @param path(str): file to unlink | |
137 """ | |
138 if not os.path.isfile(path): | |
139 raise OSError(u"path must link to a regular file") | |
140 if not path.startswith(getTmpDir(self.sat_conf, self.cat_dir_str)): | |
141 self.disp(u"File {} is not in SàT temporary hierarchy, we do not remove it".format(path.decode('utf-8')), 2) | |
142 return | |
143 # we have 2 files per draft with use_metadata, so we double max | |
144 unlink_max = SECURE_UNLINK_MAX * 2 if self.use_metadata else SECURE_UNLINK_MAX | |
145 backup_dir = getTmpDir(self.sat_conf, self.cat_dir_str, SECURE_UNLINK_DIR) | |
146 if not os.path.exists(backup_dir): | |
147 os.makedirs(backup_dir) | |
148 filename = os.path.basename(path) | |
149 backup_path = os.path.join(backup_dir, filename) | |
150 # we move file to backup dir | |
151 self.host.disp(u"Backuping file {src} to {dst}".format( | |
152 src=path.decode('utf-8'), dst=backup_path.decode('utf-8')), 1) | |
153 os.rename(path, backup_path) | |
154 # and if we exceeded the limit, we remove older file | |
155 backup_files = [os.path.join(backup_dir, f) for f in os.listdir(backup_dir)] | |
156 if len(backup_files) > unlink_max: | |
157 backup_files.sort(key=lambda path: os.stat(path).st_mtime) | |
158 for path in backup_files[:len(backup_files) - unlink_max]: | |
159 self.host.disp(u"Purging backup file {}".format(path.decode('utf-8')), 2) | |
160 os.unlink(path) | |
161 | |
162 def runEditor(self, editor_args_opt, content_file_path, | |
163 content_file_obj, meta_file_path=None, meta_ori=None): | |
164 """run editor to edit content and metadata | |
165 | |
166 @param editor_args_opt(unicode): option in [jp] section in configuration for | |
167 specific args | |
168 @param content_file_path(str): path to the content file | |
169 @param content_file_obj(file): opened file instance | |
170 @param meta_file_path(str, None): metadata file path | |
171 if None metadata will not be used | |
172 @param meta_ori(dict, None): original cotent of metadata | |
173 can't be used if use_metadata is False | |
174 """ | |
175 if not self.use_metadata: | |
176 assert meta_file_path is None | |
177 assert meta_ori is None | |
178 | |
179 # we calculate hashes to check for modifications | |
180 import hashlib | |
181 content_file_obj.seek(0) | |
182 tmp_ori_hash = hashlib.sha1(content_file_obj.read()).digest() | |
183 content_file_obj.close() | |
184 | |
185 # we prepare arguments | |
186 editor = config.getConfig(self.sat_conf, 'jp', 'editor') or os.getenv('EDITOR', 'vi') | |
187 try: | |
188 # is there custom arguments in sat.conf ? | |
189 editor_args = config.getConfig(self.sat_conf, 'jp', editor_args_opt, Exception) | |
190 except (NoOptionError, NoSectionError): | |
191 # no, we check if we know the editor and have special arguments | |
192 if self.use_metadata: | |
193 editor_args = EDITOR_ARGS_MAGIC.get(os.path.basename(editor), '') | |
194 else: | |
195 editor_args = '' | |
196 parse_kwargs = {'content_file': content_file_path} | |
197 if self.use_metadata: | |
198 parse_kwargs['metadata_file'] = meta_file_path | |
199 args = parse_args(self.host, editor_args, **parse_kwargs) | |
200 if not args: | |
201 args = [content_file_path] | |
202 | |
203 # actual editing | |
204 editor_exit = subprocess.call([editor] + args) | |
205 | |
206 # edition will now be checked, and data will be sent if it was a success | |
207 if editor_exit != 0: | |
208 self.disp(u"Editor exited with an error code, so temporary file has not be deleted, and item is not published.\nYou can find temporary file at {path}".format( | |
209 path=content_file_path), error=True) | |
210 else: | |
211 # main content | |
212 try: | |
213 with open(content_file_path, 'rb') as f: | |
214 content = f.read() | |
215 except (OSError, IOError): | |
216 self.disp(u"Can read file at {content_path}, have it been deleted?\nCancelling edition".format( | |
217 content_path=content_file_path), error=True) | |
218 self.host.quit(C.EXIT_NOT_FOUND) | |
219 | |
220 # metadata | |
221 if self.use_metadata: | |
222 try: | |
223 with open(meta_file_path, 'rb') as f: | |
224 metadata = json.load(f) | |
225 except (OSError, IOError): | |
226 self.disp(u"Can read file at {meta_file_path}, have it been deleted?\nCancelling edition".format( | |
227 content_path=content_file_path, meta_path=meta_file_path), error=True) | |
228 self.host.quit(C.EXIT_NOT_FOUND) | |
229 except ValueError: | |
230 self.disp(u"Can't parse metadata, please check it is correct JSON format. Cancelling edition.\n" + | |
231 "You can find tmp file at {content_path} and temporary meta file at {meta_path}.".format( | |
232 content_path=content_file_path, | |
233 meta_path=meta_file_path), error=True) | |
234 self.host.quit(C.EXIT_DATA_ERROR) | |
235 | |
236 if self.use_metadata and not C.bool(metadata.get('publish', "true")): | |
237 self.disp(u'Publication blocked by "publish" key in metadata, cancelling edition.\n\n' + | |
238 "temporary file path:\t{content_path}\nmetadata file path:\t{meta_path}".format( | |
239 content_path=content_file_path, meta_path=meta_file_path), error=True) | |
240 self.host.quit() | |
241 | |
242 if len(content) == 0: | |
243 self.disp(u"Content is empty, cancelling the edition") | |
244 if not content_file_path.startswith(getTmpDir(self.sat_conf, self.cat_dir_str)): | |
245 self.disp(u"File are not in SàT temporary hierarchy, we do not remove them", 2) | |
246 self.host.quit() | |
247 self.disp(u"Deletion of {}".format(content_file_path.decode('utf-8')), 2) | |
248 os.unlink(content_file_path) | |
249 if self.use_metadata: | |
250 self.disp(u"Deletion of {}".format(meta_file_path.decode('utf-8')), 2) | |
251 os.unlink(meta_file_path) | |
252 self.host.quit() | |
253 | |
254 # time to re-check the hash | |
255 elif (tmp_ori_hash == hashlib.sha1(content).digest() and | |
256 (not self.use_metadata or meta_ori == metadata)): | |
257 self.disp(u"The content has not been modified, cancelling the edition") | |
258 self.host.quit() | |
259 | |
260 else: | |
261 # we can now send the item | |
262 content = content.decode('utf-8-sig') # we use utf-8-sig to avoid BOM | |
263 try: | |
264 if self.use_metadata: | |
265 self.publish(content, metadata) | |
266 else: | |
267 self.publish(content) | |
268 except Exception as e: | |
269 if self.use_metadata: | |
270 self.disp(u"Error while sending your item, the temporary files have been kept at {content_path} and {meta_path}: {reason}".format( | |
271 content_path=content_file_path, meta_path=meta_file_path, reason=e), error=True) | |
272 else: | |
273 self.disp(u"Error while sending your item, the temporary file has been kept at {content_path}: {reason}".format( | |
274 content_path=content_file_path, reason=e), error=True) | |
275 self.host.quit(1) | |
276 | |
277 self.secureUnlink(content_file_path) | |
278 if self.use_metadata: | |
279 self.secureUnlink(meta_file_path) | |
280 | |
281 def publish(self, content): | |
282 # if metadata is needed, publish will be called with it last argument | |
283 raise NotImplementedError | |
284 | |
285 def getTmpFile(self): | |
286 """Create a temporary file | |
287 | |
288 @param suff (str): suffix to use for the filename | |
289 @return (tuple(file, str)): opened (w+b) file object and file path | |
290 """ | |
291 suff = '.' + self.getTmpSuff() | |
292 cat_dir_str = self.cat_dir_str | |
293 tmp_dir = getTmpDir(self.sat_conf, self.cat_dir_str, self.profile.encode('utf-8')) | |
294 if not os.path.exists(tmp_dir): | |
295 try: | |
296 os.makedirs(tmp_dir) | |
297 except OSError as e: | |
298 self.disp(u"Can't create {path} directory: {reason}".format( | |
299 path=tmp_dir, reason=e), error=True) | |
300 self.host.quit(1) | |
301 try: | |
302 fd, path = tempfile.mkstemp(suffix=suff.encode('utf-8'), | |
303 prefix=time.strftime(cat_dir_str + '_%Y-%m-%d_%H:%M:%S_'), | |
304 dir=tmp_dir, text=True) | |
305 return os.fdopen(fd, 'w+b'), path | |
306 except OSError as e: | |
307 self.disp(u"Can't create temporary file: {reason}".format(reason=e), error=True) | |
308 self.host.quit(1) | |
309 | |
310 def getCurrentFile(self, profile): | |
311 """Get most recently edited file | |
312 | |
313 @param profile(unicode): profile linked to the draft | |
314 @return(str): full path of current file | |
315 """ | |
316 # we guess the item currently edited by choosing | |
317 # the most recent file corresponding to temp file pattern | |
318 # in tmp_dir, excluding metadata files | |
319 cat_dir_str = self.cat_dir_str | |
320 tmp_dir = getTmpDir(self.sat_conf, self.cat_dir_str, profile.encode('utf-8')) | |
321 available = [path for path in glob.glob(os.path.join(tmp_dir, cat_dir_str + '_*')) if not path.endswith(METADATA_SUFF)] | |
322 if not available: | |
323 self.disp(u"Could not find any content draft in {path}".format(path=tmp_dir), error=True) | |
324 self.host.quit(1) | |
325 return max(available, key=lambda path: os.stat(path).st_mtime) | |
326 | |
327 def getItemData(self, service, node, item): | |
328 """return formatted content, metadata (or not if use_metadata is false), and item id""" | |
329 raise NotImplementedError | |
330 | |
331 def getTmpSuff(self): | |
332 """return suffix used for content file""" | |
333 return u'xml' | |
334 | |
335 def getItemPath(self): | |
336 """retrieve item path (i.e. service and node) from item argument | |
337 | |
338 This method is obviously only useful for edition of PubSub based features | |
339 """ | |
340 service = self.args.service | |
341 node = self.args.node | |
342 item = self.args.item | |
343 last_item = self.args.last_item | |
344 | |
345 if self.args.current: | |
346 # user wants to continue current draft | |
347 content_file_path = self.getCurrentFile(self.profile) | |
348 self.disp(u'Continuing edition of current draft', 2) | |
349 content_file_obj = open(content_file_path, 'r+b') | |
350 # we seek at the end of file in case of an item already exist | |
351 # this will write content of the existing item at the end of the draft. | |
352 # This way no data should be lost. | |
353 content_file_obj.seek(0, os.SEEK_END) | |
354 elif self.args.draft_path: | |
355 # there is an existing draft that we use | |
356 content_file_path = os.path.expanduser(self.args.item) | |
357 content_file_obj = open(content_file_path, 'r+b') | |
358 # we seek at the end for the same reason as above | |
359 content_file_obj.seek(0, os.SEEK_END) | |
360 else: | |
361 # we need a temporary file | |
362 content_file_obj, content_file_path = self.getTmpFile() | |
363 | |
364 if item or last_item: | |
365 self.disp(u'Editing requested published item', 2) | |
366 try: | |
367 if self.use_metadata: | |
368 content, metadata, item = self.getItemData(service, node, item) | |
369 else: | |
370 content, item = self.getItemData(service, node, item) | |
371 except Exception as e: | |
372 # FIXME: ugly but we have not good may to check errors in bridge | |
373 if u'item-not-found' in unicode(e): | |
374 # item doesn't exist, we create a new one with requested id | |
375 metadata = None | |
376 if last_item: | |
377 self.disp(_(u'no item found at all, we create a new one'), 2) | |
378 else: | |
379 self.disp(_(u'item "{item_id}" not found, we create a new item with this id').format(item_id=item), 2) | |
380 content_file_obj.seek(0) | |
381 else: | |
382 self.disp(u"Error while retrieving item: {}".format(e)) | |
383 self.host.quit(C.EXIT_ERROR) | |
384 else: | |
385 # item exists, we write content | |
386 if content_file_obj.tell() != 0: | |
387 # we already have a draft, | |
388 # we copy item content after it and add an indicator | |
389 content_file_obj.write('\n*****\n') | |
390 content_file_obj.write(content.encode('utf-8')) | |
391 content_file_obj.seek(0) | |
392 self.disp(_(u'item "{item_id}" found, we edit it').format(item_id=item), 2) | |
393 else: | |
394 self.disp(u'Editing a new item', 2) | |
395 if self.use_metadata: | |
396 metadata = None | |
397 | |
398 if self.use_metadata: | |
399 return service, node, item, content_file_path, content_file_obj, metadata | |
400 else: | |
401 return service, node, item, content_file_path, content_file_obj | |
402 | |
403 | |
404 class Table(object): | |
405 | |
406 def __init__(self, host, data, headers=None, filters=None, use_buffer=False): | |
407 """ | |
408 @param data(iterable[list]): table data | |
409 all lines must have the same number of columns | |
410 @param headers(iterable[unicode], None): names/titles of the columns | |
411 if not None, must have same number of columns as data | |
412 @param filters(iterable[(callable, unicode)], None): values filters | |
413 the callable will get col value as argument and must return a string | |
414 if it's unicode, it will be used with .format and must countain u'{}' which will be replaced with the string | |
415 if not None, must have same number of columns as data | |
416 @param use_buffer(bool): if True, bufferise output instead of printing it directly | |
417 """ | |
418 self.host = host | |
419 self._buffer = [] if use_buffer else None | |
420 # headers are columns names/titles, can be None | |
421 self.headers = headers | |
422 # sizes fof columns without headers, | |
423 # headers may be larger | |
424 self.sizes = [] | |
425 # rows countains one list per row with columns values | |
426 self.rows = [] | |
427 | |
428 size = None | |
429 if headers: | |
430 row_cls = namedtuple('RowData', headers) | |
431 else: | |
432 row_cls = tuple | |
433 | |
434 for row_data in data: | |
435 new_row = [] | |
436 row_data_list = list(row_data) | |
437 for idx, value in enumerate(row_data_list): | |
438 if filters is not None and filters[idx] is not None: | |
439 filter_ = filters[idx] | |
440 if isinstance(filter_, basestring): | |
441 col_value = filter_.format(value) | |
442 else: | |
443 col_value = filter_(value, row_cls(*row_data_list)) | |
444 # we count size without ANSI code as they will change length of the string | |
445 # when it's mostly style/color changes. | |
446 col_size = len(regex.ansiRemove(col_value)) | |
447 else: | |
448 col_value = unicode(value) | |
449 col_size = len(col_value) | |
450 new_row.append(col_value) | |
451 if size is None: | |
452 self.sizes.append(col_size) | |
453 else: | |
454 self.sizes[idx] = max(self.sizes[idx], col_size) | |
455 if size is None: | |
456 size = len(new_row) | |
457 if headers is not None and len(headers) != size: | |
458 raise exceptions.DataError(u'headers size is not coherent with rows') | |
459 else: | |
460 if len(new_row) != size: | |
461 raise exceptions.DataError(u'rows size is not coherent') | |
462 self.rows.append(new_row) | |
463 | |
464 if not data and headers is not None: | |
465 # the table is empty, we print headers at their lenght | |
466 self.sizes = [len(h) for h in headers] | |
467 | |
468 @property | |
469 def string(self): | |
470 if self._buffer is None: | |
471 raise exceptions.InternalError(u'buffer must be used to get a string') | |
472 return u'\n'.join(self._buffer) | |
473 | |
474 @staticmethod | |
475 def readDictValues(data, keys, defaults=None): | |
476 if defaults is None: | |
477 defaults = {} | |
478 for key in keys: | |
479 try: | |
480 yield data[key] | |
481 except KeyError as e: | |
482 default = defaults.get(key) | |
483 if default is not None: | |
484 yield default | |
485 else: | |
486 raise e | |
487 | |
488 @classmethod | |
489 def fromDict(cls, host, data, keys=None, headers=None, filters=None, defaults=None): | |
490 """Prepare a table to display it | |
491 | |
492 the whole data will be read and kept into memory, | |
493 to be printed | |
494 @param data(list[dict[unicode, unicode]]): data to create the table from | |
495 @param keys(iterable[unicode], None): keys to get | |
496 if None, all keys will be used | |
497 @param headers(iterable[unicode], None): name of the columns | |
498 names must be in same order as keys | |
499 @param filters(dict[unicode, (callable,unicode)), None): filter to use on values | |
500 keys correspond to keys to filter, and value is a callable or unicode which | |
501 will get the value as argument and must return a string | |
502 @param defaults(dict[unicode, unicode]): default value to use | |
503 if None, an exception will be raised if not value is found | |
504 """ | |
505 if keys is None and headers is not None: | |
506 # FIXME: keys are not needed with OrderedDict, | |
507 raise exceptions.DataError(u'You must specify keys order to used headers') | |
508 if keys is None: | |
509 keys = data[0].keys() | |
510 if headers is None: | |
511 headers = keys | |
512 filters = [filters.get(k) for k in keys] | |
513 return cls(host, (cls.readDictValues(d, keys, defaults) for d in data), headers, filters) | |
514 | |
515 def _headers(self, head_sep, headers, sizes, alignment=u'left', style=None): | |
516 """Render headers | |
517 | |
518 @param head_sep(unicode): sequence to use as separator | |
519 @param alignment(unicode): how to align, can be left, center or right | |
520 @param style(unicode, iterable[unicode], None): ANSI escape sequences to apply | |
521 @param headers(list[unicode]): headers to show | |
522 @param sizes(list[int]): sizes of columns | |
523 """ | |
524 rendered_headers = [] | |
525 if isinstance(style, basestring): | |
526 style = [style] | |
527 for idx, header in enumerate(headers): | |
528 size = sizes[idx] | |
529 if alignment == u'left': | |
530 rendered = header[:size].ljust(size) | |
531 elif alignment == u'center': | |
532 rendered = header[:size].center(size) | |
533 elif alignment == u'right': | |
534 rendered = header[:size].rjust(size) | |
535 else: | |
536 raise exceptions.InternalError(u'bad alignment argument') | |
537 if style: | |
538 args = style + [rendered] | |
539 rendered = A.color(*args) | |
540 rendered_headers.append(rendered) | |
541 return head_sep.join(rendered_headers) | |
542 | |
543 def _disp(self, data): | |
544 """output data (can be either bufferised or printed)""" | |
545 if self._buffer is not None: | |
546 self._buffer.append(data) | |
547 else: | |
548 self.host.disp(data) | |
549 | |
550 def display(self, | |
551 head_alignment = u'left', | |
552 columns_alignment = u'left', | |
553 head_style = None, | |
554 show_header=True, | |
555 show_borders=True, | |
556 hide_cols=None, | |
557 col_sep=u' │ ', | |
558 top_left=u'┌', | |
559 top=u'─', | |
560 top_sep=u'─┬─', | |
561 top_right=u'┐', | |
562 left=u'│', | |
563 right=None, | |
564 head_sep=None, | |
565 head_line=u'┄', | |
566 head_line_left=u'├', | |
567 head_line_sep=u'┄┼┄', | |
568 head_line_right=u'┤', | |
569 bottom_left=u'└', | |
570 bottom=None, | |
571 bottom_sep=u'─┴─', | |
572 bottom_right=u'┘', | |
573 ): | |
574 """Print the table | |
575 | |
576 @param show_header(bool): True if header need no be shown | |
577 @param show_borders(bool): True if borders need no be shown | |
578 @param hide_cols(None, iterable(unicode)): columns which should not be displayed | |
579 @param head_alignment(unicode): how to align headers, can be left, center or right | |
580 @param columns_alignment(unicode): how to align columns, can be left, center or right | |
581 @param col_sep(unicode): separator betweens columns | |
582 @param head_line(unicode): character to use to make line under head | |
583 @param disp(callable, None): method to use to display the table | |
584 None to use self.host.disp | |
585 """ | |
586 if not self.sizes: | |
587 # the table is empty | |
588 return | |
589 col_sep_size = len(regex.ansiRemove(col_sep)) | |
590 | |
591 # if we have columns to hide, we remove them from headers and size | |
592 if not hide_cols: | |
593 headers = self.headers | |
594 sizes = self.sizes | |
595 else: | |
596 headers = list(self.headers) | |
597 sizes = self.sizes[:] | |
598 ignore_idx = [headers.index(to_hide) for to_hide in hide_cols] | |
599 for to_hide in hide_cols: | |
600 hide_idx = headers.index(to_hide) | |
601 del headers[hide_idx] | |
602 del sizes[hide_idx] | |
603 | |
604 if right is None: | |
605 right = left | |
606 if top_sep is None: | |
607 top_sep = col_sep_size * top | |
608 if head_sep is None: | |
609 head_sep = col_sep | |
610 if bottom is None: | |
611 bottom = top | |
612 if bottom_sep is None: | |
613 bottom_sep = col_sep_size * bottom | |
614 if not show_borders: | |
615 left = right = head_line_left = head_line_right = u'' | |
616 # top border | |
617 if show_borders: | |
618 self._disp( | |
619 top_left | |
620 + top_sep.join([top*size for size in sizes]) | |
621 + top_right | |
622 ) | |
623 | |
624 # headers | |
625 if show_header: | |
626 self._disp( | |
627 left | |
628 + self._headers(head_sep, headers, sizes, head_alignment, head_style) | |
629 + right | |
630 ) | |
631 # header line | |
632 self._disp( | |
633 head_line_left | |
634 + head_line_sep.join([head_line*size for size in sizes]) | |
635 + head_line_right | |
636 ) | |
637 | |
638 # content | |
639 if columns_alignment == u'left': | |
640 alignment = lambda idx, s: ansi_ljust(s, sizes[idx]) | |
641 elif columns_alignment == u'center': | |
642 alignment = lambda idx, s: ansi_center(s, sizes[idx]) | |
643 elif columns_alignment == u'right': | |
644 alignment = lambda idx, s: ansi_rjust(s, sizes[idx]) | |
645 else: | |
646 raise exceptions.InternalError(u'bad columns alignment argument') | |
647 | |
648 for row in self.rows: | |
649 if hide_cols: | |
650 row = [v for idx,v in enumerate(row) if idx not in ignore_idx] | |
651 self._disp(left + col_sep.join([alignment(idx,c) for idx,c in enumerate(row)]) + right) | |
652 | |
653 if show_borders: | |
654 # bottom border | |
655 self._disp( | |
656 bottom_left | |
657 + bottom_sep.join([bottom*size for size in sizes]) | |
658 + bottom_right | |
659 ) | |
660 # we return self so string can be used after display (table.display().string) | |
661 return self | |
662 | |
663 def display_blank(self, **kwargs): | |
664 """Display table without visible borders""" | |
665 kwargs_ = {'col_sep':u' ', 'head_line_sep':u' ', 'show_borders':False} | |
666 kwargs_.update(kwargs) | |
667 return self.display(**kwargs_) | |
668 | |
669 | |
670 class URIFinder(object): | |
671 """Helper class to find URIs in well-known locations""" | |
672 | |
673 def __init__(self, command, path, key, callback, meta_map=None): | |
674 """ | |
675 @param command(CommandBase): command instance | |
676 args of this instance will be updated with found values | |
677 @param path(unicode): absolute path to use as a starting point to look for URIs | |
678 @param key(unicode): key to look for | |
679 @param callback(callable): method to call once URIs are found (or not) | |
680 @param meta_map(dict, None): if not None, map metadata to arg name | |
681 key is metadata used attribute name | |
682 value is name to actually use, or None to ignore | |
683 use empty dict to only retrieve URI | |
684 possible keys are currently: | |
685 - labels | |
686 """ | |
687 if not command.args.service and not command.args.node: | |
688 self.host = command.host | |
689 self.args = command.args | |
690 self.key = key | |
691 self.callback = callback | |
692 self.meta_map = meta_map | |
693 self.host.bridge.URIFind(path, | |
694 [key], | |
695 callback=self.URIFindCb, | |
696 errback=partial(command.errback, | |
697 msg=_(u"can't find " + key + u" URI: {}"), | |
698 exit_code=C.EXIT_BRIDGE_ERRBACK)) | |
699 else: | |
700 callback() | |
701 | |
702 def setMetadataList(self, uri_data, key): | |
703 """Helper method to set list of values from metadata | |
704 | |
705 @param uri_data(dict): data of the found URI | |
706 @param key(unicode): key of the value to retrieve | |
707 """ | |
708 new_values_json = uri_data.get(key) | |
709 if uri_data is not None: | |
710 if self.meta_map is None: | |
711 dest = key | |
712 else: | |
713 dest = self.meta_map.get(key) | |
714 if dest is None: | |
715 return | |
716 | |
717 try: | |
718 values = getattr(self.args, key) | |
719 except AttributeError: | |
720 raise exceptions.InternalError(u'there is no "{key}" arguments'.format( | |
721 key=key)) | |
722 else: | |
723 if values is None: | |
724 values = [] | |
725 values.extend(json.loads(new_values_json)) | |
726 setattr(self.args, dest, values) | |
727 | |
728 | |
729 def URIFindCb(self, uris_data): | |
730 try: | |
731 uri_data = uris_data[self.key] | |
732 except KeyError: | |
733 self.host.disp(_(u"No {key} URI specified for this project, please specify service and node").format(key=self.key), error=True) | |
734 self.host.quit(C.EXIT_NOT_FOUND) | |
735 else: | |
736 uri = uri_data[u'uri'] | |
737 | |
738 self.setMetadataList(uri_data, u'labels') | |
739 parsed_uri = xmpp_uri.parseXMPPUri(uri) | |
740 try: | |
741 self.args.service = parsed_uri[u'path'] | |
742 self.args.node = parsed_uri[u'node'] | |
743 except KeyError: | |
744 self.host.disp(_(u"Invalid URI found: {uri}").format(uri=uri), error=True) | |
745 self.host.quit(C.EXIT_DATA_ERROR) | |
746 self.callback() |