diff sat_frontends/jp/common.py @ 2624:56f94936df1e

code style reformatting using black
author Goffi <goffi@goffi.org>
date Wed, 27 Jun 2018 20:14:46 +0200
parents c9dddf691d7b
children 003b8b4b56a7
line wrap: on
line diff
--- a/sat_frontends/jp/common.py	Wed Jun 27 07:51:29 2018 +0200
+++ b/sat_frontends/jp/common.py	Wed Jun 27 20:14:46 2018 +0200
@@ -40,36 +40,36 @@
 VIM_SPLIT_ARGS = "-c 'set nospr|vsplit|wincmd w|next|wincmd w'"
 EMACS_SPLIT_ARGS = '--eval "(split-window-horizontally)"'
 EDITOR_ARGS_MAGIC = {
-    'vim': VIM_SPLIT_ARGS + ' {content_file} {metadata_file}',
-    'gvim': VIM_SPLIT_ARGS + ' --nofork {content_file} {metadata_file}',
-    'emacs': EMACS_SPLIT_ARGS + ' {content_file} {metadata_file}',
-    'xemacs': EMACS_SPLIT_ARGS + ' {content_file} {metadata_file}',
-    'nano': ' -F {content_file} {metadata_file}',
-    }
+    "vim": VIM_SPLIT_ARGS + " {content_file} {metadata_file}",
+    "gvim": VIM_SPLIT_ARGS + " --nofork {content_file} {metadata_file}",
+    "emacs": EMACS_SPLIT_ARGS + " {content_file} {metadata_file}",
+    "xemacs": EMACS_SPLIT_ARGS + " {content_file} {metadata_file}",
+    "nano": " -F {content_file} {metadata_file}",
+}
 
 SECURE_UNLINK_MAX = 10
 SECURE_UNLINK_DIR = ".backup"
-METADATA_SUFF = '_metadata.json'
+METADATA_SUFF = "_metadata.json"
 
 
 def ansi_ljust(s, width):
     """ljust method handling ANSI escape codes"""
     cleaned = regex.ansiRemove(s)
-    return s + u' ' * (width - len(cleaned))
+    return s + u" " * (width - len(cleaned))
 
 
 def ansi_center(s, width):
     """ljust method handling ANSI escape codes"""
     cleaned = regex.ansiRemove(s)
     diff = width - len(cleaned)
-    half = diff/2
-    return half * u' ' + s + (half + diff % 2) * u' '
+    half = diff / 2
+    return half * u" " + s + (half + diff % 2) * u" "
 
 
 def ansi_rjust(s, width):
     """ljust method handling ANSI escape codes"""
     cleaned = regex.ansiRemove(s)
-    return u' ' * (width - len(cleaned)) + s
+    return u" " * (width - len(cleaned)) + s
 
 
 def getTmpDir(sat_conf, cat_dir, sub_dir=None):
@@ -83,8 +83,8 @@
         initial str)
     @return (str): path to the dir
     """
-    local_dir = config.getConfig(sat_conf, '', 'local_dir', Exception)
-    path = [local_dir.encode('utf-8'), cat_dir.encode('utf-8')]
+    local_dir = config.getConfig(sat_conf, "", "local_dir", Exception)
+    path = [local_dir.encode("utf-8"), cat_dir.encode("utf-8")]
     if sub_dir is not None:
         path.append(regex.pathEscape(sub_dir))
     return os.path.join(*path)
@@ -102,7 +102,9 @@
         # we split arguments first to avoid escaping issues in file names
         return [a.format(**format_kw) for a in shlex.split(cmd_line)]
     except ValueError as e:
-        host.disp(u"Couldn't parse editor cmd [{cmd}]: {reason}".format(cmd=cmd_line, reason=e))
+        host.disp(
+            u"Couldn't parse editor cmd [{cmd}]: {reason}".format(cmd=cmd_line, reason=e)
+        )
         return []
 
 
@@ -124,7 +126,7 @@
         """
         self.host = host
         self.sat_conf = config.parseMainConf()
-        self.cat_dir_str = cat_dir.encode('utf-8')
+        self.cat_dir_str = cat_dir.encode("utf-8")
         self.use_metadata = use_metadata
 
     def secureUnlink(self, path):
@@ -138,7 +140,12 @@
         if not os.path.isfile(path):
             raise OSError(u"path must link to a regular file")
         if not path.startswith(getTmpDir(self.sat_conf, self.cat_dir_str)):
-            self.disp(u"File {} is not in SàT temporary hierarchy, we do not remove it".format(path.decode('utf-8')), 2)
+            self.disp(
+                u"File {} is not in SàT temporary hierarchy, we do not remove it".format(
+                    path.decode("utf-8")
+                ),
+                2,
+            )
             return
         # we have 2 files per draft with use_metadata, so we double max
         unlink_max = SECURE_UNLINK_MAX * 2 if self.use_metadata else SECURE_UNLINK_MAX
@@ -148,19 +155,29 @@
         filename = os.path.basename(path)
         backup_path = os.path.join(backup_dir, filename)
         # we move file to backup dir
-        self.host.disp(u"Backuping file {src} to {dst}".format(
-            src=path.decode('utf-8'), dst=backup_path.decode('utf-8')), 1)
+        self.host.disp(
+            u"Backuping file {src} to {dst}".format(
+                src=path.decode("utf-8"), dst=backup_path.decode("utf-8")
+            ),
+            1,
+        )
         os.rename(path, backup_path)
         # and if we exceeded the limit, we remove older file
         backup_files = [os.path.join(backup_dir, f) for f in os.listdir(backup_dir)]
         if len(backup_files) > unlink_max:
             backup_files.sort(key=lambda path: os.stat(path).st_mtime)
-            for path in backup_files[:len(backup_files) - unlink_max]:
-                self.host.disp(u"Purging backup file {}".format(path.decode('utf-8')), 2)
+            for path in backup_files[: len(backup_files) - unlink_max]:
+                self.host.disp(u"Purging backup file {}".format(path.decode("utf-8")), 2)
                 os.unlink(path)
 
-    def runEditor(self, editor_args_opt, content_file_path,
-                 content_file_obj, meta_file_path=None, meta_ori=None):
+    def runEditor(
+        self,
+        editor_args_opt,
+        content_file_path,
+        content_file_obj,
+        meta_file_path=None,
+        meta_ori=None,
+    ):
         """run editor to edit content and metadata
 
         @param editor_args_opt(unicode): option in [jp] section in configuration for
@@ -178,24 +195,29 @@
 
         # we calculate hashes to check for modifications
         import hashlib
+
         content_file_obj.seek(0)
         tmp_ori_hash = hashlib.sha1(content_file_obj.read()).digest()
         content_file_obj.close()
 
         # we prepare arguments
-        editor = config.getConfig(self.sat_conf, 'jp', 'editor') or os.getenv('EDITOR', 'vi')
+        editor = config.getConfig(self.sat_conf, "jp", "editor") or os.getenv(
+            "EDITOR", "vi"
+        )
         try:
             # is there custom arguments in sat.conf ?
-            editor_args = config.getConfig(self.sat_conf, 'jp', editor_args_opt, Exception)
+            editor_args = config.getConfig(
+                self.sat_conf, "jp", editor_args_opt, Exception
+            )
         except (NoOptionError, NoSectionError):
             # no, we check if we know the editor and have special arguments
             if self.use_metadata:
-                editor_args = EDITOR_ARGS_MAGIC.get(os.path.basename(editor), '')
+                editor_args = EDITOR_ARGS_MAGIC.get(os.path.basename(editor), "")
             else:
-                editor_args = ''
-        parse_kwargs = {'content_file': content_file_path}
+                editor_args = ""
+        parse_kwargs = {"content_file": content_file_path}
         if self.use_metadata:
-            parse_kwargs['metadata_file'] = meta_file_path
+            parse_kwargs["metadata_file"] = meta_file_path
         args = parse_args(self.host, editor_args, **parse_kwargs)
         if not args:
             args = [content_file_path]
@@ -205,61 +227,86 @@
 
         # edition will now be checked, and data will be sent if it was a success
         if editor_exit != 0:
-            self.disp(u"Editor exited with an error code, so temporary file has not be deleted, and item is not published.\nYou can find temporary file at {path}".format(
-                path=content_file_path), error=True)
+            self.disp(
+                u"Editor exited with an error code, so temporary file has not be deleted, and item is not published.\nYou can find temporary file at {path}".format(
+                    path=content_file_path
+                ),
+                error=True,
+            )
         else:
             # main content
             try:
-                with open(content_file_path, 'rb') as f:
+                with open(content_file_path, "rb") as f:
                     content = f.read()
             except (OSError, IOError):
-                self.disp(u"Can read file at {content_path}, have it been deleted?\nCancelling edition".format(
-                    content_path=content_file_path), error=True)
+                self.disp(
+                    u"Can read file at {content_path}, have it been deleted?\nCancelling edition".format(
+                        content_path=content_file_path
+                    ),
+                    error=True,
+                )
                 self.host.quit(C.EXIT_NOT_FOUND)
 
             # metadata
             if self.use_metadata:
                 try:
-                    with open(meta_file_path, 'rb') as f:
+                    with open(meta_file_path, "rb") as f:
                         metadata = json.load(f)
                 except (OSError, IOError):
-                    self.disp(u"Can read file at {meta_file_path}, have it been deleted?\nCancelling edition".format(
-                        content_path=content_file_path, meta_path=meta_file_path), error=True)
+                    self.disp(
+                        u"Can read file at {meta_file_path}, have it been deleted?\nCancelling edition".format(
+                            content_path=content_file_path, meta_path=meta_file_path
+                        ),
+                        error=True,
+                    )
                     self.host.quit(C.EXIT_NOT_FOUND)
                 except ValueError:
-                    self.disp(u"Can't parse metadata, please check it is correct JSON format. Cancelling edition.\n" +
-                        "You can find tmp file at {content_path} and temporary meta file at {meta_path}.".format(
-                        content_path=content_file_path,
-                        meta_path=meta_file_path), error=True)
+                    self.disp(
+                        u"Can't parse metadata, please check it is correct JSON format. Cancelling edition.\n"
+                        + "You can find tmp file at {content_path} and temporary meta file at {meta_path}.".format(
+                            content_path=content_file_path, meta_path=meta_file_path
+                        ),
+                        error=True,
+                    )
                     self.host.quit(C.EXIT_DATA_ERROR)
 
-            if self.use_metadata and not C.bool(metadata.get('publish', "true")):
-                self.disp(u'Publication blocked by "publish" key in metadata, cancelling edition.\n\n' +
-                    "temporary file path:\t{content_path}\nmetadata file path:\t{meta_path}".format(
-                    content_path=content_file_path, meta_path=meta_file_path), error=True)
+            if self.use_metadata and not C.bool(metadata.get("publish", "true")):
+                self.disp(
+                    u'Publication blocked by "publish" key in metadata, cancelling edition.\n\n'
+                    + "temporary file path:\t{content_path}\nmetadata file path:\t{meta_path}".format(
+                        content_path=content_file_path, meta_path=meta_file_path
+                    ),
+                    error=True,
+                )
                 self.host.quit()
 
             if len(content) == 0:
                 self.disp(u"Content is empty, cancelling the edition")
-                if not content_file_path.startswith(getTmpDir(self.sat_conf, self.cat_dir_str)):
-                    self.disp(u"File are not in SàT temporary hierarchy, we do not remove them", 2)
+                if not content_file_path.startswith(
+                    getTmpDir(self.sat_conf, self.cat_dir_str)
+                ):
+                    self.disp(
+                        u"File are not in SàT temporary hierarchy, we do not remove them",
+                        2,
+                    )
                     self.host.quit()
-                self.disp(u"Deletion of {}".format(content_file_path.decode('utf-8')), 2)
+                self.disp(u"Deletion of {}".format(content_file_path.decode("utf-8")), 2)
                 os.unlink(content_file_path)
                 if self.use_metadata:
-                    self.disp(u"Deletion of {}".format(meta_file_path.decode('utf-8')), 2)
+                    self.disp(u"Deletion of {}".format(meta_file_path.decode("utf-8")), 2)
                     os.unlink(meta_file_path)
                 self.host.quit()
 
             # time to re-check the hash
-            elif (tmp_ori_hash == hashlib.sha1(content).digest() and
-                  (not self.use_metadata or meta_ori == metadata)):
+            elif tmp_ori_hash == hashlib.sha1(content).digest() and (
+                not self.use_metadata or meta_ori == metadata
+            ):
                 self.disp(u"The content has not been modified, cancelling the edition")
                 self.host.quit()
 
             else:
                 # we can now send the item
-                content = content.decode('utf-8-sig') # we use utf-8-sig to avoid BOM
+                content = content.decode("utf-8-sig")  # we use utf-8-sig to avoid BOM
                 try:
                     if self.use_metadata:
                         self.publish(content, metadata)
@@ -267,11 +314,21 @@
                         self.publish(content)
                 except Exception as e:
                     if self.use_metadata:
-                        self.disp(u"Error while sending your item, the temporary files have been kept at {content_path} and {meta_path}: {reason}".format(
-                            content_path=content_file_path, meta_path=meta_file_path, reason=e), error=True)
+                        self.disp(
+                            u"Error while sending your item, the temporary files have been kept at {content_path} and {meta_path}: {reason}".format(
+                                content_path=content_file_path,
+                                meta_path=meta_file_path,
+                                reason=e,
+                            ),
+                            error=True,
+                        )
                     else:
-                        self.disp(u"Error while sending your item, the temporary file has been kept at {content_path}: {reason}".format(
-                            content_path=content_file_path, reason=e), error=True)
+                        self.disp(
+                            u"Error while sending your item, the temporary file has been kept at {content_path}: {reason}".format(
+                                content_path=content_file_path, reason=e
+                            ),
+                            error=True,
+                        )
                     self.host.quit(1)
 
             self.secureUnlink(content_file_path)
@@ -288,23 +345,32 @@
         @param suff (str): suffix to use for the filename
         @return (tuple(file, str)): opened (w+b) file object and file path
         """
-        suff = '.' + self.getTmpSuff()
+        suff = "." + self.getTmpSuff()
         cat_dir_str = self.cat_dir_str
-        tmp_dir = getTmpDir(self.sat_conf, self.cat_dir_str, self.profile.encode('utf-8'))
+        tmp_dir = getTmpDir(self.sat_conf, self.cat_dir_str, self.profile.encode("utf-8"))
         if not os.path.exists(tmp_dir):
             try:
                 os.makedirs(tmp_dir)
             except OSError as e:
-                self.disp(u"Can't create {path} directory: {reason}".format(
-                    path=tmp_dir, reason=e), error=True)
+                self.disp(
+                    u"Can't create {path} directory: {reason}".format(
+                        path=tmp_dir, reason=e
+                    ),
+                    error=True,
+                )
                 self.host.quit(1)
         try:
-            fd, path = tempfile.mkstemp(suffix=suff.encode('utf-8'),
-                prefix=time.strftime(cat_dir_str + '_%Y-%m-%d_%H:%M:%S_'),
-                dir=tmp_dir, text=True)
-            return os.fdopen(fd, 'w+b'), path
+            fd, path = tempfile.mkstemp(
+                suffix=suff.encode("utf-8"),
+                prefix=time.strftime(cat_dir_str + "_%Y-%m-%d_%H:%M:%S_"),
+                dir=tmp_dir,
+                text=True,
+            )
+            return os.fdopen(fd, "w+b"), path
         except OSError as e:
-            self.disp(u"Can't create temporary file: {reason}".format(reason=e), error=True)
+            self.disp(
+                u"Can't create temporary file: {reason}".format(reason=e), error=True
+            )
             self.host.quit(1)
 
     def getCurrentFile(self, profile):
@@ -317,10 +383,17 @@
         # the most recent file corresponding to temp file pattern
         # in tmp_dir, excluding metadata files
         cat_dir_str = self.cat_dir_str
-        tmp_dir = getTmpDir(self.sat_conf, self.cat_dir_str, profile.encode('utf-8'))
-        available = [path for path in glob.glob(os.path.join(tmp_dir, cat_dir_str + '_*')) if not path.endswith(METADATA_SUFF)]
+        tmp_dir = getTmpDir(self.sat_conf, self.cat_dir_str, profile.encode("utf-8"))
+        available = [
+            path
+            for path in glob.glob(os.path.join(tmp_dir, cat_dir_str + "_*"))
+            if not path.endswith(METADATA_SUFF)
+        ]
         if not available:
-            self.disp(u"Could not find any content draft in {path}".format(path=tmp_dir), error=True)
+            self.disp(
+                u"Could not find any content draft in {path}".format(path=tmp_dir),
+                error=True,
+            )
             self.host.quit(1)
         return max(available, key=lambda path: os.stat(path).st_mtime)
 
@@ -330,7 +403,7 @@
 
     def getTmpSuff(self):
         """return suffix used for content file"""
-        return u'xml'
+        return u"xml"
 
     def getItemPath(self):
         """retrieve item path (i.e. service and node) from item argument
@@ -345,8 +418,8 @@
         if self.args.current:
             # user wants to continue current draft
             content_file_path = self.getCurrentFile(self.profile)
-            self.disp(u'Continuing edition of current draft', 2)
-            content_file_obj = open(content_file_path, 'r+b')
+            self.disp(u"Continuing edition of current draft", 2)
+            content_file_obj = open(content_file_path, "r+b")
             # we seek at the end of file in case of an item already exist
             # this will write content of the existing item at the end of the draft.
             # This way no data should be lost.
@@ -354,7 +427,7 @@
         elif self.args.draft_path:
             # there is an existing draft that we use
             content_file_path = os.path.expanduser(self.args.item)
-            content_file_obj = open(content_file_path, 'r+b')
+            content_file_obj = open(content_file_path, "r+b")
             # we seek at the end for the same reason as above
             content_file_obj.seek(0, os.SEEK_END)
         else:
@@ -362,7 +435,7 @@
             content_file_obj, content_file_path = self.getTmpFile()
 
         if item or last_item:
-            self.disp(u'Editing requested published item', 2)
+            self.disp(u"Editing requested published item", 2)
             try:
                 if self.use_metadata:
                     content, metadata, item = self.getItemData(service, node, item)
@@ -370,13 +443,18 @@
                     content, item = self.getItemData(service, node, item)
             except Exception as e:
                 # FIXME: ugly but we have not good may to check errors in bridge
-                if u'item-not-found' in unicode(e):
-                    # item doesn't exist, we create a new one with requested id
+                if u"item-not-found" in unicode(e):
+                    #  item doesn't exist, we create a new one with requested id
                     metadata = None
                     if last_item:
-                        self.disp(_(u'no item found at all, we create a new one'), 2)
+                        self.disp(_(u"no item found at all, we create a new one"), 2)
                     else:
-                        self.disp(_(u'item "{item_id}" not found, we create a new item with this id').format(item_id=item), 2)
+                        self.disp(
+                            _(
+                                u'item "{item_id}" not found, we create a new item with this id'
+                            ).format(item_id=item),
+                            2,
+                        )
                     content_file_obj.seek(0)
                 else:
                     self.disp(u"Error while retrieving item: {}".format(e))
@@ -386,12 +464,14 @@
                 if content_file_obj.tell() != 0:
                     # we already have a draft,
                     # we copy item content after it and add an indicator
-                    content_file_obj.write('\n*****\n')
-                content_file_obj.write(content.encode('utf-8'))
+                    content_file_obj.write("\n*****\n")
+                content_file_obj.write(content.encode("utf-8"))
                 content_file_obj.seek(0)
-                self.disp(_(u'item "{item_id}" found, we edit it').format(item_id=item), 2)
+                self.disp(
+                    _(u'item "{item_id}" found, we edit it').format(item_id=item), 2
+                )
         else:
-            self.disp(u'Editing a new item', 2)
+            self.disp(u"Editing a new item", 2)
             if self.use_metadata:
                 metadata = None
 
@@ -402,7 +482,6 @@
 
 
 class Table(object):
-
     def __init__(self, host, data, headers=None, filters=None, use_buffer=False):
         """
         @param data(iterable[list]): table data
@@ -421,17 +500,17 @@
         """
         self.host = host
         self._buffer = [] if use_buffer else None
-        # headers are columns names/titles, can be None
+        #  headers are columns names/titles, can be None
         self.headers = headers
-        # sizes fof columns without headers,
+        #  sizes fof columns without headers,
         # headers may be larger
         self.sizes = []
-        # rows countains one list per row with columns values
+        #  rows countains one list per row with columns values
         self.rows = []
 
         size = None
         if headers:
-            row_cls = namedtuple('RowData', headers)
+            row_cls = namedtuple("RowData", headers)
         else:
             row_cls = tuple
 
@@ -462,21 +541,21 @@
             if size is None:
                 size = len(new_row)
                 if headers is not None and len(headers) != size:
-                    raise exceptions.DataError(u'headers size is not coherent with rows')
+                    raise exceptions.DataError(u"headers size is not coherent with rows")
             else:
                 if len(new_row) != size:
-                    raise exceptions.DataError(u'rows size is not coherent')
+                    raise exceptions.DataError(u"rows size is not coherent")
             self.rows.append(new_row)
 
         if not data and headers is not None:
-            # the table is empty, we print headers at their lenght
+            #  the table is empty, we print headers at their lenght
             self.sizes = [len(h) for h in headers]
 
     @property
     def string(self):
         if self._buffer is None:
-            raise exceptions.InternalError(u'buffer must be used to get a string')
-        return u'\n'.join(self._buffer)
+            raise exceptions.InternalError(u"buffer must be used to get a string")
+        return u"\n".join(self._buffer)
 
     @staticmethod
     def readDictValues(data, keys, defaults=None):
@@ -510,15 +589,17 @@
         """
         if keys is None and headers is not None:
             # FIXME: keys are not needed with OrderedDict,
-            raise exceptions.DataError(u'You must specify keys order to used headers')
+            raise exceptions.DataError(u"You must specify keys order to used headers")
         if keys is None:
             keys = data[0].keys()
         if headers is None:
             headers = keys
         filters = [filters.get(k) for k in keys]
-        return cls(host, (cls.readDictValues(d, keys, defaults) for d in data), headers, filters)
+        return cls(
+            host, (cls.readDictValues(d, keys, defaults) for d in data), headers, filters
+        )
 
-    def _headers(self, head_sep, headers, sizes, alignment=u'left', style=None):
+    def _headers(self, head_sep, headers, sizes, alignment=u"left", style=None):
         """Render headers
 
         @param head_sep(unicode): sequence to use as separator
@@ -532,14 +613,14 @@
             style = [style]
         for idx, header in enumerate(headers):
             size = sizes[idx]
-            if alignment == u'left':
+            if alignment == u"left":
                 rendered = header[:size].ljust(size)
-            elif alignment == u'center':
+            elif alignment == u"center":
                 rendered = header[:size].center(size)
-            elif alignment == u'right':
+            elif alignment == u"right":
                 rendered = header[:size].rjust(size)
             else:
-                raise exceptions.InternalError(u'bad alignment argument')
+                raise exceptions.InternalError(u"bad alignment argument")
             if style:
                 args = style + [rendered]
                 rendered = A.color(*args)
@@ -553,30 +634,31 @@
         else:
             self.host.disp(data)
 
-    def display(self,
-                head_alignment = u'left',
-                columns_alignment = u'left',
-                head_style = None,
-                show_header=True,
-                show_borders=True,
-                hide_cols=None,
-                col_sep=u' │ ',
-                top_left=u'┌',
-                top=u'─',
-                top_sep=u'─┬─',
-                top_right=u'┐',
-                left=u'│',
-                right=None,
-                head_sep=None,
-                head_line=u'┄',
-                head_line_left=u'├',
-                head_line_sep=u'┄┼┄',
-                head_line_right=u'┤',
-                bottom_left=u'└',
-                bottom=None,
-                bottom_sep=u'─┴─',
-                bottom_right=u'┘',
-                ):
+    def display(
+        self,
+        head_alignment=u"left",
+        columns_alignment=u"left",
+        head_style=None,
+        show_header=True,
+        show_borders=True,
+        hide_cols=None,
+        col_sep=u" │ ",
+        top_left=u"┌",
+        top=u"─",
+        top_sep=u"─┬─",
+        top_right=u"┐",
+        left=u"│",
+        right=None,
+        head_sep=None,
+        head_line=u"┄",
+        head_line_left=u"├",
+        head_line_sep=u"┄┼┄",
+        head_line_right=u"┤",
+        bottom_left=u"└",
+        bottom=None,
+        bottom_sep=u"─┴─",
+        bottom_right=u"┘",
+    ):
         """Print the table
 
         @param show_header(bool): True if header need no be shown
@@ -618,14 +700,12 @@
         if bottom_sep is None:
             bottom_sep = col_sep_size * bottom
         if not show_borders:
-            left = right = head_line_left = head_line_right = u''
+            left = right = head_line_left = head_line_right = u""
         # top border
         if show_borders:
             self._disp(
-                top_left
-                + top_sep.join([top*size for size in sizes])
-                + top_right
-                )
+                top_left + top_sep.join([top * size for size in sizes]) + top_right
+            )
 
         # headers
         if show_header:
@@ -633,42 +713,46 @@
                 left
                 + self._headers(head_sep, headers, sizes, head_alignment, head_style)
                 + right
-                )
+            )
             # header line
             self._disp(
                 head_line_left
-                + head_line_sep.join([head_line*size for size in sizes])
+                + head_line_sep.join([head_line * size for size in sizes])
                 + head_line_right
-                )
+            )
 
         # content
-        if columns_alignment == u'left':
+        if columns_alignment == u"left":
             alignment = lambda idx, s: ansi_ljust(s, sizes[idx])
-        elif columns_alignment == u'center':
+        elif columns_alignment == u"center":
             alignment = lambda idx, s: ansi_center(s, sizes[idx])
-        elif columns_alignment == u'right':
+        elif columns_alignment == u"right":
             alignment = lambda idx, s: ansi_rjust(s, sizes[idx])
         else:
-            raise exceptions.InternalError(u'bad columns alignment argument')
+            raise exceptions.InternalError(u"bad columns alignment argument")
 
         for row in self.rows:
             if hide_cols:
-                row = [v for idx,v in enumerate(row) if idx not in ignore_idx]
-            self._disp(left + col_sep.join([alignment(idx,c) for idx,c in enumerate(row)]) + right)
+                row = [v for idx, v in enumerate(row) if idx not in ignore_idx]
+            self._disp(
+                left
+                + col_sep.join([alignment(idx, c) for idx, c in enumerate(row)])
+                + right
+            )
 
         if show_borders:
             # bottom border
             self._disp(
                 bottom_left
-                + bottom_sep.join([bottom*size for size in sizes])
+                + bottom_sep.join([bottom * size for size in sizes])
                 + bottom_right
-                )
-        # we return self so string can be used after display (table.display().string)
+            )
+        #  we return self so string can be used after display (table.display().string)
         return self
 
     def display_blank(self, **kwargs):
         """Display table without visible borders"""
-        kwargs_ = {'col_sep':u' ', 'head_line_sep':u' ', 'show_borders':False}
+        kwargs_ = {"col_sep": u" ", "head_line_sep": u" ", "show_borders": False}
         kwargs_.update(kwargs)
         return self.display(**kwargs_)
 
@@ -696,12 +780,16 @@
             self.key = key
             self.callback = callback
             self.meta_map = meta_map
-            self.host.bridge.URIFind(path,
-                                     [key],
-                                     callback=self.URIFindCb,
-                                     errback=partial(command.errback,
-                                                     msg=_(u"can't find " + key + u" URI: {}"),
-                                                     exit_code=C.EXIT_BRIDGE_ERRBACK))
+            self.host.bridge.URIFind(
+                path,
+                [key],
+                callback=self.URIFindCb,
+                errback=partial(
+                    command.errback,
+                    msg=_(u"can't find " + key + u" URI: {}"),
+                    exit_code=C.EXIT_BRIDGE_ERRBACK,
+                ),
+            )
         else:
             callback()
 
@@ -723,29 +811,34 @@
             try:
                 values = getattr(self.args, key)
             except AttributeError:
-                raise exceptions.InternalError(u'there is no "{key}" arguments'.format(
-                    key=key))
+                raise exceptions.InternalError(
+                    u'there is no "{key}" arguments'.format(key=key)
+                )
             else:
                 if values is None:
                     values = []
                 values.extend(json.loads(new_values_json))
                 setattr(self.args, dest, values)
 
-
     def URIFindCb(self, uris_data):
         try:
             uri_data = uris_data[self.key]
         except KeyError:
-            self.host.disp(_(u"No {key} URI specified for this project, please specify service and node").format(key=self.key), error=True)
+            self.host.disp(
+                _(
+                    u"No {key} URI specified for this project, please specify service and node"
+                ).format(key=self.key),
+                error=True,
+            )
             self.host.quit(C.EXIT_NOT_FOUND)
         else:
-            uri = uri_data[u'uri']
+            uri = uri_data[u"uri"]
 
-        self.setMetadataList(uri_data, u'labels')
+        self.setMetadataList(uri_data, u"labels")
         parsed_uri = xmpp_uri.parseXMPPUri(uri)
         try:
-            self.args.service = parsed_uri[u'path']
-            self.args.node = parsed_uri[u'node']
+            self.args.service = parsed_uri[u"path"]
+            self.args.node = parsed_uri[u"node"]
         except KeyError:
             self.host.disp(_(u"Invalid URI found: {uri}").format(uri=uri), error=True)
             self.host.quit(C.EXIT_DATA_ERROR)