diff sat_frontends/jp/common.py @ 3568:04283582966f

core, frontends: fix invalid translatable strings. Some f-strings where used in translatable text, this has been fixed by using explicit `format()` call (using a script based on `tokenize`). As tokenize messes with spaces, a reformating tool (`black`) has been applied to some files afterwards.
author Goffi <goffi@goffi.org>
date Mon, 14 Jun 2021 18:35:12 +0200
parents be6d91572633
children 82e616b70a2a
line wrap: on
line diff
--- a/sat_frontends/jp/common.py	Mon Jun 14 12:19:21 2021 +0200
+++ b/sat_frontends/jp/common.py	Mon Jun 14 18:35:12 2021 +0200
@@ -143,12 +143,11 @@
             raise OSError("path must link to a regular file")
         if path.parent != getTmpDir(self.sat_conf, self.cat_dir):
             self.disp(
-                f"File {path} is not in SàT temporary hierarchy, we do not remove "
-                f"it",
+                f"File {path} is not in SàT temporary hierarchy, we do not remove " f"it",
                 2,
             )
             return
-        # we have 2 files per draft with use_metadata, so we double max
+            # we have 2 files per draft with use_metadata, so we double max
         unlink_max = SECURE_UNLINK_MAX * 2 if self.use_metadata else SECURE_UNLINK_MAX
         backup_dir = getTmpDir(self.sat_conf, self.cat_dir, SECURE_UNLINK_DIR)
         if not os.path.exists(backup_dir):
@@ -157,9 +156,7 @@
         backup_path = os.path.join(backup_dir, filename)
         # we move file to backup dir
         self.host.disp(
-            "Backuping file {src} to {dst}".format(
-                src=path, dst=backup_path
-            ),
+            "Backuping file {src} to {dst}".format(src=path, dst=backup_path),
             1,
         )
         os.rename(path, backup_path)
@@ -171,8 +168,14 @@
                 self.host.disp("Purging backup file {}".format(path), 2)
                 os.unlink(path)
 
-    async def runEditor(self, editor_args_opt, content_file_path, content_file_obj,
-                        meta_file_path=None, meta_ori=None):
+    async def runEditor(
+        self,
+        editor_args_opt,
+        content_file_path,
+        content_file_obj,
+        meta_file_path=None,
+        meta_ori=None,
+    ):
         """Run editor to edit content and metadata
 
         @param editor_args_opt(unicode): option in [jp] section in configuration for
@@ -188,7 +191,7 @@
             assert meta_file_path is None
             assert meta_ori is None
 
-        # we calculate hashes to check for modifications
+            # we calculate hashes to check for modifications
         import hashlib
 
         content_file_obj.seek(0)
@@ -217,9 +220,10 @@
         if not args:
             args = [content_file_path]
 
-        # actual editing
+            # actual editing
         editor_process = await asyncio.create_subprocess_exec(
-            editor, *[str(a) for a in args])
+            editor, *[str(a) for a in args]
+        )
         editor_exit = await editor_process.wait()
 
         # edition will now be checked, and data will be sent if it was a success
@@ -243,7 +247,7 @@
                 )
                 self.host.quit(C.EXIT_NOT_FOUND)
 
-            # metadata
+                # metadata
             if self.use_metadata:
                 try:
                     with meta_file_path.open("rb") as f:
@@ -268,8 +272,8 @@
             if self.use_metadata and not metadata.get("publish", True):
                 self.disp(
                     f'Publication blocked by "publish" key in metadata, cancelling '
-                    f'edition.\n\ntemporary file path:\t{content_file_path}\nmetadata '
-                    f'file path:\t{meta_file_path}',
+                    f"edition.\n\ntemporary file path:\t{content_file_path}\nmetadata "
+                    f"file path:\t{meta_file_path}",
                     error=True,
                 )
                 self.host.quit()
@@ -289,7 +293,7 @@
                     os.unlink(meta_file_path)
                 self.host.quit()
 
-            # time to re-check the hash
+                # time to re-check the hash
             elif tmp_ori_hash == hashlib.sha1(content).digest() and (
                 not self.use_metadata or meta_ori == metadata
             ):
@@ -354,9 +358,7 @@
             )
             return os.fdopen(fd, "w+b"), Path(path)
         except OSError as e:
-            self.disp(
-                f"Can't create temporary file: {e}", error=True
-            )
+            self.disp(f"Can't create temporary file: {e}", error=True)
             self.host.quit(1)
 
     def getCurrentFile(self, profile):
@@ -371,7 +373,7 @@
         tmp_dir = getTmpDir(self.sat_conf, self.cat_dir, profile)
         available = [
             p
-            for p in tmp_dir.glob(f'{self.cat_dir}_*')
+            for p in tmp_dir.glob(f"{self.cat_dir}_*")
             if not p.match(f"*{METADATA_SUFF}")
         ]
         if not available:
@@ -436,9 +438,9 @@
                     else:
                         self.disp(
                             _(
-                                f'item "{item}" not found, we create a new item with'
-                                f'this id'
-                            ),
+                                'item "{item}" not found, we create a new item with'
+                                "this id"
+                            ).format(item=item),
                             2,
                         )
                     content_file_obj.seek(0)
@@ -453,9 +455,7 @@
                     content_file_obj.write("\n*****\n")
                 content_file_obj.write(content.encode("utf-8"))
                 content_file_obj.seek(0)
-                self.disp(
-                    _(f'item "{item}" found, we edit it'), 2
-                )
+                self.disp(_('item "{item}" found, we edit it').format(item=item), 2)
         else:
             self.disp("Editing a new item", 2)
             if self.use_metadata:
@@ -498,7 +498,7 @@
         size = None
         if headers:
             # we use a namedtuple to make the value easily accessible from filters
-            headers_safe = [re.sub(r'[^a-zA-Z_]', '_', h) for h in headers]
+            headers_safe = [re.sub(r"[^a-zA-Z_]", "_", h) for h in headers]
             row_cls = namedtuple("RowData", headers_safe)
         else:
             row_cls = tuple
@@ -516,8 +516,8 @@
                             col_value = filter_(value, row_cls(*row_data_list))
                         except TypeError:
                             col_value = filter_(value)
-                    # we count size without ANSI code as they will change length of the
-                    # string when it's mostly style/color changes.
+                            # we count size without ANSI code as they will change length of the
+                            # string when it's mostly style/color changes.
                     col_size = len(regex.ansiRemove(col_value))
                 else:
                     col_value = str(value)
@@ -562,7 +562,8 @@
 
     @classmethod
     def fromListDict(
-        cls, host, data, keys=None, headers=None, filters=None, defaults=None):
+        cls, host, data, keys=None, headers=None, filters=None, defaults=None
+    ):
         """Create a table from a list of dictionaries
 
         each dictionary is a row of the table, keys being columns names.
@@ -694,13 +695,13 @@
             bottom_sep = col_sep_size * bottom
         if not show_borders:
             left = right = head_line_left = head_line_right = ""
-        # top border
+            # top border
         if show_borders:
             self._disp(
                 top_left + top_sep.join([top * size for size in sizes]) + top_right
             )
 
-        # headers
+            # headers
         if show_header and self.headers is not None:
             self._disp(
                 left
@@ -714,7 +715,7 @@
                 + head_line_right
             )
 
-        # content
+            # content
         if columns_alignment == "left":
             alignment = lambda idx, s: ansi_ljust(s, sizes[idx])
         elif columns_alignment == "center":
@@ -740,7 +741,7 @@
                 + bottom_sep.join([bottom * size for size in sizes])
                 + bottom_right
             )
-        #  we return self so string can be used after display (table.display().string)
+            #  we return self so string can be used after display (table.display().string)
         return self
 
     def display_blank(self, **kwargs):
@@ -781,8 +782,10 @@
         uri_data = uris_data[key]
     except KeyError:
         host.disp(
-            _(f"No {key} URI specified for this project, please specify service and "
-              f"node"),
+            _(
+                "No {key} URI specified for this project, please specify service and "
+                "node"
+            ).format(key=key),
             error=True,
         )
         host.quit(C.EXIT_NOT_FOUND)
@@ -790,7 +793,7 @@
     uri = uri_data["uri"]
 
     # set extra metadata if they are specified
-    for data_key in ['labels']:
+    for data_key in ["labels"]:
         new_values_json = uri_data.get(data_key)
         if uri_data is not None:
             if meta_map is None:
@@ -803,7 +806,7 @@
             try:
                 values = getattr(args, data_key)
             except AttributeError:
-                raise exceptions.InternalError(f'there is no {data_key!r} arguments')
+                raise exceptions.InternalError(f"there is no {data_key!r} arguments")
             else:
                 if values is None:
                     values = []
@@ -815,5 +818,5 @@
         args.service = parsed_uri["path"]
         args.node = parsed_uri["node"]
     except KeyError:
-        host.disp(_(f"Invalid URI found: {uri}"), error=True)
+        host.disp(_("Invalid URI found: {uri}").format(uri=uri), error=True)
         host.quit(C.EXIT_DATA_ERROR)