Mercurial > libervia-backend
comparison sat_frontends/jp/cmd_blog.py @ 3028:ab2696e34d29
Python 3 port:
/!\ this is a huge commit
/!\ starting from this commit, SàT is needs Python 3.6+
/!\ SàT maybe be instable or some feature may not work anymore, this will improve with time
This patch port backend, bridge and frontends to Python 3.
Roughly this has been done this way:
- 2to3 tools has been applied (with python 3.7)
- all references to python2 have been replaced with python3 (notably shebangs)
- fixed files not handled by 2to3 (notably the shell script)
- several manual fixes
- fixed issues reported by Python 3 that where not handled in Python 2
- replaced "async" with "async_" when needed (it's a reserved word from Python 3.7)
- replaced zope's "implements" with @implementer decorator
- temporary hack to handle data pickled in database, as str or bytes may be returned,
to be checked later
- fixed hash comparison for password
- removed some code which is not needed anymore with Python 3
- deactivated some code which needs to be checked (notably certificate validation)
- tested with jp, fixed reported issues until some basic commands worked
- ported Primitivus (after porting dependencies like urwid satext)
- more manual fixes
author | Goffi <goffi@goffi.org> |
---|---|
date | Tue, 13 Aug 2019 19:08:41 +0200 |
parents | d603550d5e99 |
children | fee60f17ebac |
comparison
equal
deleted
inserted
replaced
3027:ff5bcb12ae60 | 3028:ab2696e34d29 |
---|---|
16 | 16 |
17 # You should have received a copy of the GNU Affero General Public License | 17 # You should have received a copy of the GNU Affero General Public License |
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. | 18 # along with this program. If not, see <http://www.gnu.org/licenses/>. |
19 | 19 |
20 | 20 |
21 import base | 21 from . import base |
22 from sat.core.i18n import _ | 22 from sat.core.i18n import _ |
23 from sat_frontends.jp.constants import Const as C | 23 from sat_frontends.jp.constants import Const as C |
24 from sat_frontends.jp import common | 24 from sat_frontends.jp import common |
25 from sat.tools.common.ansi import ANSI as A | 25 from sat.tools.common.ansi import ANSI as A |
26 from sat.tools.common import data_objects | 26 from sat.tools.common import data_objects |
27 from sat.tools.common import uri | 27 from sat.tools.common import uri |
28 from sat.tools import config | 28 from sat.tools import config |
29 from ConfigParser import NoSectionError, NoOptionError | 29 from configparser import NoSectionError, NoOptionError |
30 from functools import partial | 30 from functools import partial |
31 import json | 31 import json |
32 import sys | 32 import sys |
33 import os.path | 33 import os.path |
34 import os | 34 import os |
38 import codecs | 38 import codecs |
39 from sat.tools.common import data_format | 39 from sat.tools.common import data_format |
40 | 40 |
41 __commands__ = ["Blog"] | 41 __commands__ = ["Blog"] |
42 | 42 |
43 SYNTAX_XHTML = u"xhtml" | 43 SYNTAX_XHTML = "xhtml" |
44 # extensions to use with known syntaxes | 44 # extensions to use with known syntaxes |
45 SYNTAX_EXT = { | 45 SYNTAX_EXT = { |
46 # FIXME: default syntax doesn't sounds needed, there should always be a syntax set | 46 # FIXME: default syntax doesn't sounds needed, there should always be a syntax set |
47 # by the plugin. | 47 # by the plugin. |
48 "": "txt", # used when the syntax is not found | 48 "": "txt", # used when the syntax is not found |
49 SYNTAX_XHTML: "xhtml", | 49 SYNTAX_XHTML: "xhtml", |
50 "markdown": "md", | 50 "markdown": "md", |
51 } | 51 } |
52 | 52 |
53 | 53 |
54 CONF_SYNTAX_EXT = u"syntax_ext_dict" | 54 CONF_SYNTAX_EXT = "syntax_ext_dict" |
55 BLOG_TMP_DIR = u"blog" | 55 BLOG_TMP_DIR = "blog" |
56 # key to remove from metadata tmp file if they exist | 56 # key to remove from metadata tmp file if they exist |
57 KEY_TO_REMOVE_METADATA = ( | 57 KEY_TO_REMOVE_METADATA = ( |
58 "id", | 58 "id", |
59 "content", | 59 "content", |
60 "content_xhtml", | 60 "content_xhtml", |
64 ) | 64 ) |
65 | 65 |
66 URL_REDIRECT_PREFIX = "url_redirect_" | 66 URL_REDIRECT_PREFIX = "url_redirect_" |
67 INOTIFY_INSTALL = '"pip install inotify"' | 67 INOTIFY_INSTALL = '"pip install inotify"' |
68 MB_KEYS = ( | 68 MB_KEYS = ( |
69 u"id", | 69 "id", |
70 u"url", | 70 "url", |
71 u"atom_id", | 71 "atom_id", |
72 u"updated", | 72 "updated", |
73 u"published", | 73 "published", |
74 u"language", | 74 "language", |
75 u"comments", # this key is used for all comments* keys | 75 "comments", # this key is used for all comments* keys |
76 u"tags", # this key is used for all tag* keys | 76 "tags", # this key is used for all tag* keys |
77 u"author", | 77 "author", |
78 u"author_jid", | 78 "author_jid", |
79 u"author_email", | 79 "author_email", |
80 u"author_jid_verified", | 80 "author_jid_verified", |
81 u"content", | 81 "content", |
82 u"content_xhtml", | 82 "content_xhtml", |
83 u"title", | 83 "title", |
84 u"title_xhtml", | 84 "title_xhtml", |
85 ) | 85 ) |
86 OUTPUT_OPT_NO_HEADER = u"no-header" | 86 OUTPUT_OPT_NO_HEADER = "no-header" |
87 | 87 |
88 | 88 |
89 def guessSyntaxFromPath(host, sat_conf, path): | 89 def guessSyntaxFromPath(host, sat_conf, path): |
90 """Return syntax guessed according to filename extension | 90 """Return syntax guessed according to filename extension |
91 | 91 |
94 @return(unicode): syntax to use | 94 @return(unicode): syntax to use |
95 """ | 95 """ |
96 # we first try to guess syntax with extension | 96 # we first try to guess syntax with extension |
97 ext = os.path.splitext(path)[1][1:] # we get extension without the '.' | 97 ext = os.path.splitext(path)[1][1:] # we get extension without the '.' |
98 if ext: | 98 if ext: |
99 for k, v in SYNTAX_EXT.iteritems(): | 99 for k, v in SYNTAX_EXT.items(): |
100 if k and ext == v: | 100 if k and ext == v: |
101 return k | 101 return k |
102 | 102 |
103 # if not found, we use current syntax | 103 # if not found, we use current syntax |
104 return host.bridge.getParamA("Syntax", "Composition", "value", host.profile) | 104 return host.bridge.getParamA("Syntax", "Composition", "value", host.profile) |
115 ) | 115 ) |
116 return self._current_syntax | 116 return self._current_syntax |
117 | 117 |
118 def add_parser_options(self): | 118 def add_parser_options(self): |
119 self.parser.add_argument( | 119 self.parser.add_argument( |
120 "-T", "--title", type=base.unicode_decoder, help=_(u"title of the item") | 120 "-T", "--title", help=_("title of the item") |
121 ) | 121 ) |
122 self.parser.add_argument( | 122 self.parser.add_argument( |
123 "-t", | 123 "-t", |
124 "--tag", | 124 "--tag", |
125 type=base.unicode_decoder, | |
126 action="append", | 125 action="append", |
127 help=_(u"tag (category) of your item"), | 126 help=_("tag (category) of your item"), |
128 ) | 127 ) |
129 | 128 |
130 comments_group = self.parser.add_mutually_exclusive_group() | 129 comments_group = self.parser.add_mutually_exclusive_group() |
131 comments_group.add_argument( | 130 comments_group.add_argument( |
132 "-C", "--comments", action="store_const", const=True, dest="comments", | 131 "-C", "--comments", action="store_const", const=True, dest="comments", |
133 help=_(u"enable comments (default: comments not enabled except if they " | 132 help=_("enable comments (default: comments not enabled except if they " |
134 u"already exist)") | 133 "already exist)") |
135 ) | 134 ) |
136 comments_group.add_argument( | 135 comments_group.add_argument( |
137 "--no-comments", action="store_const", const=False, dest="comments", | 136 "--no-comments", action="store_const", const=False, dest="comments", |
138 help=_(u"disable comments (will remove comments node if it exist)") | 137 help=_("disable comments (will remove comments node if it exist)") |
139 ) | 138 ) |
140 | 139 |
141 self.parser.add_argument( | 140 self.parser.add_argument( |
142 "-S", | 141 "-S", |
143 "--syntax", | 142 "--syntax", |
144 type=base.unicode_decoder, | 143 help=_("syntax to use (default: get profile's default syntax)"), |
145 help=_(u"syntax to use (default: get profile's default syntax)"), | |
146 ) | 144 ) |
147 | 145 |
148 def setMbDataContent(self, content, mb_data): | 146 def setMbDataContent(self, content, mb_data): |
149 if self.args.syntax is None: | 147 if self.args.syntax is None: |
150 # default syntax has been used | 148 # default syntax has been used |
162 if metadata already exist, it will be overwritten | 160 if metadata already exist, it will be overwritten |
163 """ | 161 """ |
164 if self.args.comments is not None: | 162 if self.args.comments is not None: |
165 mb_data["allow_comments"] = self.args.comments | 163 mb_data["allow_comments"] = self.args.comments |
166 if self.args.tag: | 164 if self.args.tag: |
167 mb_data[u'tags'] = self.args.tag | 165 mb_data['tags'] = self.args.tag |
168 if self.args.title is not None: | 166 if self.args.title is not None: |
169 mb_data["title"] = self.args.title | 167 mb_data["title"] = self.args.title |
170 | 168 |
171 | 169 |
172 class Set(base.CommandBase, BlogPublishCommon): | 170 class Set(base.CommandBase, BlogPublishCommon): |
175 self, | 173 self, |
176 host, | 174 host, |
177 "set", | 175 "set", |
178 use_pubsub=True, | 176 use_pubsub=True, |
179 pubsub_flags={C.SINGLE_ITEM}, | 177 pubsub_flags={C.SINGLE_ITEM}, |
180 help=_(u"publish a new blog item or update an existing one"), | 178 help=_("publish a new blog item or update an existing one"), |
181 ) | 179 ) |
182 BlogPublishCommon.__init__(self) | 180 BlogPublishCommon.__init__(self) |
183 self.need_loop = True | 181 self.need_loop = True |
184 | 182 |
185 def add_parser_options(self): | 183 def add_parser_options(self): |
186 BlogPublishCommon.add_parser_options(self) | 184 BlogPublishCommon.add_parser_options(self) |
187 | 185 |
188 def mbSendCb(self): | 186 def mbSendCb(self): |
189 self.disp(u"Item published") | 187 self.disp("Item published") |
190 self.host.quit(C.EXIT_OK) | 188 self.host.quit(C.EXIT_OK) |
191 | 189 |
192 def start(self): | 190 def start(self): |
193 self._current_syntax = self.args.syntax | 191 self._current_syntax = self.args.syntax |
194 self.pubsub_item = self.args.item | 192 self.pubsub_item = self.args.item |
205 data_format.serialise(mb_data), | 203 data_format.serialise(mb_data), |
206 self.profile, | 204 self.profile, |
207 callback=self.exitCb, | 205 callback=self.exitCb, |
208 errback=partial( | 206 errback=partial( |
209 self.errback, | 207 self.errback, |
210 msg=_(u"can't send item: {}"), | 208 msg=_("can't send item: {}"), |
211 exit_code=C.EXIT_BRIDGE_ERRBACK, | 209 exit_code=C.EXIT_BRIDGE_ERRBACK, |
212 ), | 210 ), |
213 ) | 211 ) |
214 | 212 |
215 | 213 |
216 class Get(base.CommandBase): | 214 class Get(base.CommandBase): |
217 TEMPLATE = u"blog/articles.html" | 215 TEMPLATE = "blog/articles.html" |
218 | 216 |
219 def __init__(self, host): | 217 def __init__(self, host): |
220 extra_outputs = {"default": self.default_output, "fancy": self.fancy_output} | 218 extra_outputs = {"default": self.default_output, "fancy": self.fancy_output} |
221 base.CommandBase.__init__( | 219 base.CommandBase.__init__( |
222 self, | 220 self, |
225 use_verbose=True, | 223 use_verbose=True, |
226 use_pubsub=True, | 224 use_pubsub=True, |
227 pubsub_flags={C.MULTI_ITEMS}, | 225 pubsub_flags={C.MULTI_ITEMS}, |
228 use_output=C.OUTPUT_COMPLEX, | 226 use_output=C.OUTPUT_COMPLEX, |
229 extra_outputs=extra_outputs, | 227 extra_outputs=extra_outputs, |
230 help=_(u"get blog item(s)"), | 228 help=_("get blog item(s)"), |
231 ) | 229 ) |
232 self.need_loop = True | 230 self.need_loop = True |
233 | 231 |
234 def add_parser_options(self): | 232 def add_parser_options(self): |
235 # TODO: a key(s) argument to select keys to display | 233 # TODO: a key(s) argument to select keys to display |
236 self.parser.add_argument( | 234 self.parser.add_argument( |
237 "-k", | 235 "-k", |
238 "--key", | 236 "--key", |
239 type=base.unicode_decoder, | |
240 action="append", | 237 action="append", |
241 dest="keys", | 238 dest="keys", |
242 help=_(u"microblog data key(s) to display (default: depend of verbosity)"), | 239 help=_("microblog data key(s) to display (default: depend of verbosity)"), |
243 ) | 240 ) |
244 # TODO: add MAM filters | 241 # TODO: add MAM filters |
245 | 242 |
246 def template_data_mapping(self, data): | 243 def template_data_mapping(self, data): |
247 return {u"items": data_objects.BlogItems(data, deserialise=False)} | 244 return {"items": data_objects.BlogItems(data, deserialise=False)} |
248 | 245 |
249 def format_comments(self, item, keys): | 246 def format_comments(self, item, keys): |
250 comments_data = data_format.dict2iterdict( | 247 comments_data = data_format.dict2iterdict( |
251 u"comments", item, (u"node", u"service"), pop=True | 248 "comments", item, ("node", "service"), pop=True |
252 ) | 249 ) |
253 lines = [] | 250 lines = [] |
254 for data in comments_data: | 251 for data in comments_data: |
255 lines.append(data[u"comments"]) | 252 lines.append(data["comments"]) |
256 for k in (u"node", u"service"): | 253 for k in ("node", "service"): |
257 if OUTPUT_OPT_NO_HEADER in self.args.output_opts: | 254 if OUTPUT_OPT_NO_HEADER in self.args.output_opts: |
258 header = u"" | 255 header = "" |
259 else: | 256 else: |
260 header = C.A_HEADER + k + u": " + A.RESET | 257 header = C.A_HEADER + k + ": " + A.RESET |
261 lines.append(header + data[k]) | 258 lines.append(header + data[k]) |
262 return u"\n".join(lines) | 259 return "\n".join(lines) |
263 | 260 |
264 def format_tags(self, item, keys): | 261 def format_tags(self, item, keys): |
265 tags = item.pop(u'tags', []) | 262 tags = item.pop('tags', []) |
266 return u", ".join(tags) | 263 return ", ".join(tags) |
267 | 264 |
268 def format_updated(self, item, keys): | 265 def format_updated(self, item, keys): |
269 return self.format_time(item["updated"]) | 266 return self.format_time(item["updated"]) |
270 | 267 |
271 def format_published(self, item, keys): | 268 def format_published(self, item, keys): |
272 return self.format_time(item["published"]) | 269 return self.format_time(item["published"]) |
273 | 270 |
274 def format_url(self, item, keys): | 271 def format_url(self, item, keys): |
275 return uri.buildXMPPUri( | 272 return uri.buildXMPPUri( |
276 u"pubsub", | 273 "pubsub", |
277 subtype=u"microblog", | 274 subtype="microblog", |
278 path=self.metadata[u"service"], | 275 path=self.metadata["service"], |
279 node=self.metadata[u"node"], | 276 node=self.metadata["node"], |
280 item=item[u"id"], | 277 item=item["id"], |
281 ) | 278 ) |
282 | 279 |
283 def get_keys(self): | 280 def get_keys(self): |
284 """return keys to display according to verbosity or explicit key request""" | 281 """return keys to display according to verbosity or explicit key request""" |
285 verbosity = self.args.verbose | 282 verbosity = self.args.verbose |
286 if self.args.keys: | 283 if self.args.keys: |
287 if not set(MB_KEYS).issuperset(self.args.keys): | 284 if not set(MB_KEYS).issuperset(self.args.keys): |
288 self.disp( | 285 self.disp( |
289 u"following keys are invalid: {invalid}.\n" | 286 "following keys are invalid: {invalid}.\n" |
290 u"Valid keys are: {valid}.".format( | 287 "Valid keys are: {valid}.".format( |
291 invalid=u", ".join(set(self.args.keys).difference(MB_KEYS)), | 288 invalid=", ".join(set(self.args.keys).difference(MB_KEYS)), |
292 valid=u", ".join(sorted(MB_KEYS)), | 289 valid=", ".join(sorted(MB_KEYS)), |
293 ), | 290 ), |
294 error=True, | 291 error=True, |
295 ) | 292 ) |
296 self.host.quit(C.EXIT_BAD_ARG) | 293 self.host.quit(C.EXIT_BAD_ARG) |
297 return self.args.keys | 294 return self.args.keys |
298 else: | 295 else: |
299 if verbosity == 0: | 296 if verbosity == 0: |
300 return (u"title", u"content") | 297 return ("title", "content") |
301 elif verbosity == 1: | 298 elif verbosity == 1: |
302 return ( | 299 return ( |
303 u"title", | 300 "title", |
304 u"tags", | 301 "tags", |
305 u"author", | 302 "author", |
306 u"author_jid", | 303 "author_jid", |
307 u"author_email", | 304 "author_email", |
308 u"author_jid_verified", | 305 "author_jid_verified", |
309 u"published", | 306 "published", |
310 u"updated", | 307 "updated", |
311 u"content", | 308 "content", |
312 ) | 309 ) |
313 else: | 310 else: |
314 return MB_KEYS | 311 return MB_KEYS |
315 | 312 |
316 def default_output(self, data): | 313 def default_output(self, data): |
332 if k not in item and k not in k_cb: | 329 if k not in item and k not in k_cb: |
333 continue | 330 continue |
334 if OUTPUT_OPT_NO_HEADER in self.args.output_opts: | 331 if OUTPUT_OPT_NO_HEADER in self.args.output_opts: |
335 header = "" | 332 header = "" |
336 else: | 333 else: |
337 header = u"{k_fmt}{key}:{k_fmt_e} {sep}".format( | 334 header = "{k_fmt}{key}:{k_fmt_e} {sep}".format( |
338 k_fmt=C.A_HEADER, | 335 k_fmt=C.A_HEADER, |
339 key=k, | 336 key=k, |
340 k_fmt_e=A.RESET, | 337 k_fmt_e=A.RESET, |
341 sep=u"\n" if "content" in k else u"", | 338 sep="\n" if "content" in k else "", |
342 ) | 339 ) |
343 value = k_cb[k](item, keys) if k in k_cb else item[k] | 340 value = k_cb[k](item, keys) if k in k_cb else item[k] |
344 if isinstance(value, bool): | 341 if isinstance(value, bool): |
345 value = unicode(value).lower() | 342 value = str(value).lower() |
346 self.disp(header + value) | 343 self.disp(header + value) |
347 # we want a separation line after each item but the last one | 344 # we want a separation line after each item but the last one |
348 if idx < len(items) - 1: | 345 if idx < len(items) - 1: |
349 print(u"") | 346 print("") |
350 | 347 |
351 def format_time(self, timestamp): | 348 def format_time(self, timestamp): |
352 """return formatted date for timestamp | 349 """return formatted date for timestamp |
353 | 350 |
354 @param timestamp(str,int,float): unix timestamp | 351 @param timestamp(str,int,float): unix timestamp |
355 @return (unicode): formatted date | 352 @return (unicode): formatted date |
356 """ | 353 """ |
357 fmt = u"%d/%m/%Y %H:%M:%S" | 354 fmt = "%d/%m/%Y %H:%M:%S" |
358 return time.strftime(fmt, time.localtime(float(timestamp))) | 355 return time.strftime(fmt, time.localtime(float(timestamp))) |
359 | 356 |
360 def fancy_output(self, data): | 357 def fancy_output(self, data): |
361 """display blog is a nice to read way | 358 """display blog is a nice to read way |
362 | 359 |
363 this output doesn't use keys filter | 360 this output doesn't use keys filter |
364 """ | 361 """ |
365 # thanks to http://stackoverflow.com/a/943921 | 362 # thanks to http://stackoverflow.com/a/943921 |
366 rows, columns = map(int, os.popen("stty size", "r").read().split()) | 363 rows, columns = list(map(int, os.popen("stty size", "r").read().split())) |
367 items, metadata = data | 364 items, metadata = data |
368 verbosity = self.args.verbose | 365 verbosity = self.args.verbose |
369 sep = A.color(A.FG_BLUE, columns * u"▬") | 366 sep = A.color(A.FG_BLUE, columns * "▬") |
370 if items: | 367 if items: |
371 print(u"\n" + sep + "\n") | 368 print(("\n" + sep + "\n")) |
372 | 369 |
373 for idx, item in enumerate(items): | 370 for idx, item in enumerate(items): |
374 title = item.get(u"title") | 371 title = item.get("title") |
375 if verbosity > 0: | 372 if verbosity > 0: |
376 author = item[u"author"] | 373 author = item["author"] |
377 published, updated = item[u"published"], item.get("updated") | 374 published, updated = item["published"], item.get("updated") |
378 else: | 375 else: |
379 author = published = updated = None | 376 author = published = updated = None |
380 if verbosity > 1: | 377 if verbosity > 1: |
381 tags = item.pop('tags', []) | 378 tags = item.pop('tags', []) |
382 else: | 379 else: |
383 tags = None | 380 tags = None |
384 content = item.get(u"content") | 381 content = item.get("content") |
385 | 382 |
386 if title: | 383 if title: |
387 print(A.color(A.BOLD, A.FG_CYAN, item[u"title"])) | 384 print((A.color(A.BOLD, A.FG_CYAN, item["title"]))) |
388 meta = [] | 385 meta = [] |
389 if author: | 386 if author: |
390 meta.append(A.color(A.FG_YELLOW, author)) | 387 meta.append(A.color(A.FG_YELLOW, author)) |
391 if published: | 388 if published: |
392 meta.append(A.color(A.FG_YELLOW, u"on ", self.format_time(published))) | 389 meta.append(A.color(A.FG_YELLOW, "on ", self.format_time(published))) |
393 if updated != published: | 390 if updated != published: |
394 meta.append( | 391 meta.append( |
395 A.color(A.FG_YELLOW, u"(updated on ", self.format_time(updated), u")") | 392 A.color(A.FG_YELLOW, "(updated on ", self.format_time(updated), ")") |
396 ) | 393 ) |
397 print(u" ".join(meta)) | 394 print((" ".join(meta))) |
398 if tags: | 395 if tags: |
399 print(A.color(A.FG_MAGENTA, u", ".join(tags))) | 396 print((A.color(A.FG_MAGENTA, ", ".join(tags)))) |
400 if (title or tags) and content: | 397 if (title or tags) and content: |
401 print("") | 398 print("") |
402 if content: | 399 if content: |
403 self.disp(content) | 400 self.disp(content) |
404 | 401 |
405 print(u"\n" + sep + "\n") | 402 print(("\n" + sep + "\n")) |
406 | 403 |
407 def mbGetCb(self, mb_result): | 404 def mbGetCb(self, mb_result): |
408 items, metadata = mb_result | 405 items, metadata = mb_result |
409 items = [data_format.deserialise(i) for i in items] | 406 items = [data_format.deserialise(i) for i in items] |
410 mb_result = items, metadata | 407 mb_result = items, metadata |
411 self.output(mb_result) | 408 self.output(mb_result) |
412 self.host.quit(C.EXIT_OK) | 409 self.host.quit(C.EXIT_OK) |
413 | 410 |
414 def mbGetEb(self, failure_): | 411 def mbGetEb(self, failure_): |
415 self.disp(u"can't get blog items: {reason}".format(reason=failure_), error=True) | 412 self.disp("can't get blog items: {reason}".format(reason=failure_), error=True) |
416 self.host.quit(C.EXIT_BRIDGE_ERRBACK) | 413 self.host.quit(C.EXIT_BRIDGE_ERRBACK) |
417 | 414 |
418 def start(self): | 415 def start(self): |
419 self.host.bridge.mbGet( | 416 self.host.bridge.mbGet( |
420 self.args.service, | 417 self.args.service, |
436 "edit", | 433 "edit", |
437 use_pubsub=True, | 434 use_pubsub=True, |
438 pubsub_flags={C.SINGLE_ITEM}, | 435 pubsub_flags={C.SINGLE_ITEM}, |
439 use_draft=True, | 436 use_draft=True, |
440 use_verbose=True, | 437 use_verbose=True, |
441 help=_(u"edit an existing or new blog post"), | 438 help=_("edit an existing or new blog post"), |
442 ) | 439 ) |
443 BlogPublishCommon.__init__(self) | 440 BlogPublishCommon.__init__(self) |
444 common.BaseEdit.__init__(self, self.host, BLOG_TMP_DIR, use_metadata=True) | 441 common.BaseEdit.__init__(self, self.host, BLOG_TMP_DIR, use_metadata=True) |
445 | 442 |
446 def add_parser_options(self): | 443 def add_parser_options(self): |
447 BlogPublishCommon.add_parser_options(self) | 444 BlogPublishCommon.add_parser_options(self) |
448 self.parser.add_argument( | 445 self.parser.add_argument( |
449 "-P", | 446 "-P", |
450 "--preview", | 447 "--preview", |
451 action="store_true", | 448 action="store_true", |
452 help=_(u"launch a blog preview in parallel"), | 449 help=_("launch a blog preview in parallel"), |
453 ) | 450 ) |
454 | 451 |
455 def buildMetadataFile(self, content_file_path, mb_data=None): | 452 def buildMetadataFile(self, content_file_path, mb_data=None): |
456 """Build a metadata file using json | 453 """Build a metadata file using json |
457 | 454 |
463 """ | 460 """ |
464 # we first construct metadata from edited item ones and CLI argumments | 461 # we first construct metadata from edited item ones and CLI argumments |
465 # or re-use the existing one if it exists | 462 # or re-use the existing one if it exists |
466 meta_file_path = os.path.splitext(content_file_path)[0] + common.METADATA_SUFF | 463 meta_file_path = os.path.splitext(content_file_path)[0] + common.METADATA_SUFF |
467 if os.path.exists(meta_file_path): | 464 if os.path.exists(meta_file_path): |
468 self.disp(u"Metadata file already exists, we re-use it") | 465 self.disp("Metadata file already exists, we re-use it") |
469 try: | 466 try: |
470 with open(meta_file_path, "rb") as f: | 467 with open(meta_file_path, "rb") as f: |
471 mb_data = json.load(f) | 468 mb_data = json.load(f) |
472 except (OSError, IOError, ValueError) as e: | 469 except (OSError, IOError, ValueError) as e: |
473 self.disp( | 470 self.disp( |
474 u"Can't read existing metadata file at {path}, aborting: {reason}".format( | 471 "Can't read existing metadata file at {path}, aborting: {reason}".format( |
475 path=meta_file_path, reason=e | 472 path=meta_file_path, reason=e |
476 ), | 473 ), |
477 error=True, | 474 error=True, |
478 ) | 475 ) |
479 self.host.quit(1) | 476 self.host.quit(1) |
511 # we first create metadata file | 508 # we first create metadata file |
512 meta_ori, meta_file_path = self.buildMetadataFile(content_file_path, mb_data) | 509 meta_ori, meta_file_path = self.buildMetadataFile(content_file_path, mb_data) |
513 | 510 |
514 # do we need a preview ? | 511 # do we need a preview ? |
515 if self.args.preview: | 512 if self.args.preview: |
516 self.disp(u"Preview requested, launching it", 1) | 513 self.disp("Preview requested, launching it", 1) |
517 # we redirect outputs to /dev/null to avoid console pollution in editor | 514 # we redirect outputs to /dev/null to avoid console pollution in editor |
518 # if user wants to see messages, (s)he can call "blog preview" directly | 515 # if user wants to see messages, (s)he can call "blog preview" directly |
519 DEVNULL = open(os.devnull, "wb") | 516 DEVNULL = open(os.devnull, "wb") |
520 subprocess.Popen( | 517 subprocess.Popen( |
521 [ | 518 [ |
550 mb_data = data_format.serialise(mb_data) | 547 mb_data = data_format.serialise(mb_data) |
551 | 548 |
552 self.host.bridge.mbSend( | 549 self.host.bridge.mbSend( |
553 self.pubsub_service, self.pubsub_node, mb_data, self.profile | 550 self.pubsub_service, self.pubsub_node, mb_data, self.profile |
554 ) | 551 ) |
555 self.disp(u"Blog item published") | 552 self.disp("Blog item published") |
556 | 553 |
557 def getTmpSuff(self): | 554 def getTmpSuff(self): |
558 # we get current syntax to determine file extension | 555 # we get current syntax to determine file extension |
559 return SYNTAX_EXT.get(self.current_syntax, SYNTAX_EXT[""]) | 556 return SYNTAX_EXT.get(self.current_syntax, SYNTAX_EXT[""]) |
560 | 557 |
575 content, SYNTAX_XHTML, self.current_syntax, False, self.profile | 572 content, SYNTAX_XHTML, self.current_syntax, False, self.profile |
576 ) | 573 ) |
577 if content and self.current_syntax == SYNTAX_XHTML: | 574 if content and self.current_syntax == SYNTAX_XHTML: |
578 content = content.strip() | 575 content = content.strip() |
579 if not content.startswith('<div>'): | 576 if not content.startswith('<div>'): |
580 content = u'<div>' + content + u'</div>' | 577 content = '<div>' + content + '</div>' |
581 try: | 578 try: |
582 from lxml import etree | 579 from lxml import etree |
583 except ImportError: | 580 except ImportError: |
584 self.disp(_(u"You need lxml to edit pretty XHTML")) | 581 self.disp(_("You need lxml to edit pretty XHTML")) |
585 else: | 582 else: |
586 parser = etree.XMLParser(remove_blank_text=True) | 583 parser = etree.XMLParser(remove_blank_text=True) |
587 root = etree.fromstring(content, parser) | 584 root = etree.fromstring(content, parser) |
588 content = etree.tostring(root, encoding=unicode, pretty_print=True) | 585 content = etree.tostring(root, encoding=str, pretty_print=True) |
589 | 586 |
590 return content, mb_data, mb_data["id"] | 587 return content, mb_data, mb_data["id"] |
591 | 588 |
592 def start(self): | 589 def start(self): |
593 # if there are user defined extension, we use them | 590 # if there are user defined extension, we use them |
597 try: | 594 try: |
598 self._current_syntax = self.args.syntax = self.host.bridge.syntaxGet( | 595 self._current_syntax = self.args.syntax = self.host.bridge.syntaxGet( |
599 self.current_syntax | 596 self.current_syntax |
600 ) | 597 ) |
601 except Exception as e: | 598 except Exception as e: |
602 if "NotFound" in unicode( | 599 if "NotFound" in str( |
603 e | 600 e |
604 ): # FIXME: there is not good way to check bridge errors | 601 ): # FIXME: there is not good way to check bridge errors |
605 self.parser.error( | 602 self.parser.error( |
606 _(u"unknown syntax requested ({syntax})").format( | 603 _("unknown syntax requested ({syntax})").format( |
607 syntax=self.args.syntax | 604 syntax=self.args.syntax |
608 ) | 605 ) |
609 ) | 606 ) |
610 else: | 607 else: |
611 raise e | 608 raise e |
625 class Preview(base.CommandBase, common.BaseEdit): | 622 class Preview(base.CommandBase, common.BaseEdit): |
626 # TODO: need to be rewritten with template output | 623 # TODO: need to be rewritten with template output |
627 | 624 |
628 def __init__(self, host): | 625 def __init__(self, host): |
629 base.CommandBase.__init__( | 626 base.CommandBase.__init__( |
630 self, host, "preview", use_verbose=True, help=_(u"preview a blog content") | 627 self, host, "preview", use_verbose=True, help=_("preview a blog content") |
631 ) | 628 ) |
632 common.BaseEdit.__init__(self, self.host, BLOG_TMP_DIR, use_metadata=True) | 629 common.BaseEdit.__init__(self, self.host, BLOG_TMP_DIR, use_metadata=True) |
633 | 630 |
634 def add_parser_options(self): | 631 def add_parser_options(self): |
635 self.parser.add_argument( | 632 self.parser.add_argument( |
636 "--inotify", | 633 "--inotify", |
637 type=str, | 634 type=str, |
638 choices=("auto", "true", "false"), | 635 choices=("auto", "true", "false"), |
639 default=u"auto", | 636 default="auto", |
640 help=_(u"use inotify to handle preview"), | 637 help=_("use inotify to handle preview"), |
641 ) | 638 ) |
642 self.parser.add_argument( | 639 self.parser.add_argument( |
643 "file", | 640 "file", |
644 type=base.unicode_decoder, | |
645 nargs="?", | 641 nargs="?", |
646 default=u"current", | 642 default="current", |
647 help=_(u"path to the content file"), | 643 help=_("path to the content file"), |
648 ) | 644 ) |
649 | 645 |
650 def showPreview(self): | 646 def showPreview(self): |
651 # we implement showPreview here so we don't have to import webbrowser and urllib | 647 # we implement showPreview here so we don't have to import webbrowser and urllib |
652 # when preview is not used | 648 # when preview is not used |
658 args = common.parse_args( | 654 args = common.parse_args( |
659 self.host, cmd_line, url=url, preview_file=self.preview_file_path | 655 self.host, cmd_line, url=url, preview_file=self.preview_file_path |
660 ) | 656 ) |
661 if not args: | 657 if not args: |
662 self.disp( | 658 self.disp( |
663 u'Couln\'t find command in "{name}", abording'.format(name=opt_name), | 659 'Couln\'t find command in "{name}", abording'.format(name=opt_name), |
664 error=True, | 660 error=True, |
665 ) | 661 ) |
666 self.host.quit(1) | 662 self.host.quit(1) |
667 subprocess.Popen(args) | 663 subprocess.Popen(args) |
668 | 664 |
681 content = self.host.bridge.syntaxConvert( | 677 content = self.host.bridge.syntaxConvert( |
682 content, self.syntax, SYNTAX_XHTML, True, self.profile | 678 content, self.syntax, SYNTAX_XHTML, True, self.profile |
683 ) | 679 ) |
684 | 680 |
685 xhtml = ( | 681 xhtml = ( |
686 u'<html xmlns="http://www.w3.org/1999/xhtml">' | 682 '<html xmlns="http://www.w3.org/1999/xhtml">' |
687 u'<head><meta http-equiv="Content-Type" content="text/html;charset=utf-8" />' | 683 '<head><meta http-equiv="Content-Type" content="text/html;charset=utf-8" />' |
688 u"</head>" | 684 "</head>" |
689 u"<body>{}</body>" | 685 "<body>{}</body>" |
690 u"</html>" | 686 "</html>" |
691 ).format(content) | 687 ).format(content) |
692 | 688 |
693 with open(self.preview_file_path, "wb") as f: | 689 with open(self.preview_file_path, "wb") as f: |
694 f.write(xhtml.encode("utf-8")) | 690 f.write(xhtml.encode("utf-8")) |
695 | 691 |
696 def start(self): | 692 def start(self): |
697 import webbrowser | 693 import webbrowser |
698 import urllib | 694 import urllib.request, urllib.parse, urllib.error |
699 | 695 |
700 self.webbrowser, self.urllib = webbrowser, urllib | 696 self.webbrowser, self.urllib = webbrowser, urllib |
701 | 697 |
702 if self.args.inotify != "false": | 698 if self.args.inotify != "false": |
703 try: | 699 try: |
706 from inotify.calls import InotifyError | 702 from inotify.calls import InotifyError |
707 except ImportError: | 703 except ImportError: |
708 if self.args.inotify == "auto": | 704 if self.args.inotify == "auto": |
709 inotify = None | 705 inotify = None |
710 self.disp( | 706 self.disp( |
711 u"inotify module not found, deactivating feature. You can install" | 707 "inotify module not found, deactivating feature. You can install" |
712 u" it with {install}".format(install=INOTIFY_INSTALL) | 708 " it with {install}".format(install=INOTIFY_INSTALL) |
713 ) | 709 ) |
714 else: | 710 else: |
715 self.disp( | 711 self.disp( |
716 u"inotify not found, can't activate the feature! Please install " | 712 "inotify not found, can't activate the feature! Please install " |
717 u"it with {install}".format(install=INOTIFY_INSTALL), | 713 "it with {install}".format(install=INOTIFY_INSTALL), |
718 error=True, | 714 error=True, |
719 ) | 715 ) |
720 self.host.quit(1) | 716 self.host.quit(1) |
721 else: | 717 else: |
722 # we deactivate logging in inotify, which is quite annoying | 718 # we deactivate logging in inotify, which is quite annoying |
723 try: | 719 try: |
724 inotify.adapters._LOGGER.setLevel(40) | 720 inotify.adapters._LOGGER.setLevel(40) |
725 except AttributeError: | 721 except AttributeError: |
726 self.disp( | 722 self.disp( |
727 u"Logger doesn't exists, inotify may have chanded", error=True | 723 "Logger doesn't exists, inotify may have chanded", error=True |
728 ) | 724 ) |
729 else: | 725 else: |
730 inotify = None | 726 inotify = None |
731 | 727 |
732 sat_conf = config.parseMainConf() | 728 sat_conf = config.parseMainConf() |
766 | 762 |
767 if inotify is None: | 763 if inotify is None: |
768 # XXX: we don't delete file automatically because browser need it | 764 # XXX: we don't delete file automatically because browser need it |
769 # (and webbrowser.open can return before it is read) | 765 # (and webbrowser.open can return before it is read) |
770 self.disp( | 766 self.disp( |
771 u"temporary file created at {}\nthis file will NOT BE DELETED " | 767 "temporary file created at {}\nthis file will NOT BE DELETED " |
772 u"AUTOMATICALLY, please delete it yourself when you have finished".format( | 768 "AUTOMATICALLY, please delete it yourself when you have finished".format( |
773 self.preview_file_path | 769 self.preview_file_path |
774 ) | 770 ) |
775 ) | 771 ) |
776 open_cb() | 772 open_cb() |
777 else: | 773 else: |
791 add_watch() | 787 add_watch() |
792 | 788 |
793 try: | 789 try: |
794 for event in i.event_gen(): | 790 for event in i.event_gen(): |
795 if event is not None: | 791 if event is not None: |
796 self.disp(u"Content updated", 1) | 792 self.disp("Content updated", 1) |
797 if {"IN_DELETE_SELF", "IN_MOVE_SELF"}.intersection(event[1]): | 793 if {"IN_DELETE_SELF", "IN_MOVE_SELF"}.intersection(event[1]): |
798 self.disp( | 794 self.disp( |
799 u"{} event catched, changing the watch".format( | 795 "{} event catched, changing the watch".format( |
800 ", ".join(event[1]) | 796 ", ".join(event[1]) |
801 ), | 797 ), |
802 2, | 798 2, |
803 ) | 799 ) |
804 i.remove_watch(self.content_file_path) | 800 i.remove_watch(self.content_file_path) |
811 add_watch() | 807 add_watch() |
812 self.updateContent() | 808 self.updateContent() |
813 update_cb() | 809 update_cb() |
814 except InotifyError: | 810 except InotifyError: |
815 self.disp( | 811 self.disp( |
816 u"Can't catch inotify events, as the file been deleted?", error=True | 812 "Can't catch inotify events, as the file been deleted?", error=True |
817 ) | 813 ) |
818 finally: | 814 finally: |
819 os.unlink(self.preview_file_path) | 815 os.unlink(self.preview_file_path) |
820 try: | 816 try: |
821 i.remove_watch(self.content_file_path) | 817 i.remove_watch(self.content_file_path) |
828 super(Import, self).__init__( | 824 super(Import, self).__init__( |
829 host, | 825 host, |
830 "import", | 826 "import", |
831 use_pubsub=True, | 827 use_pubsub=True, |
832 use_progress=True, | 828 use_progress=True, |
833 help=_(u"import an external blog"), | 829 help=_("import an external blog"), |
834 ) | 830 ) |
835 self.need_loop = True | 831 self.need_loop = True |
836 | 832 |
837 def add_parser_options(self): | 833 def add_parser_options(self): |
838 self.parser.add_argument( | 834 self.parser.add_argument( |
839 "importer", | 835 "importer", |
840 type=base.unicode_decoder, | |
841 nargs="?", | 836 nargs="?", |
842 help=_(u"importer name, nothing to display importers list"), | 837 help=_("importer name, nothing to display importers list"), |
843 ) | 838 ) |
844 self.parser.add_argument( | 839 self.parser.add_argument( |
845 "--host", type=base.unicode_decoder, help=_(u"original blog host") | 840 "--host", help=_("original blog host") |
846 ) | 841 ) |
847 self.parser.add_argument( | 842 self.parser.add_argument( |
848 "--no-images-upload", | 843 "--no-images-upload", |
849 action="store_true", | 844 action="store_true", |
850 help=_(u"do *NOT* upload images (default: do upload images)"), | 845 help=_("do *NOT* upload images (default: do upload images)"), |
851 ) | 846 ) |
852 self.parser.add_argument( | 847 self.parser.add_argument( |
853 "--upload-ignore-host", | 848 "--upload-ignore-host", |
854 help=_(u"do not upload images from this host (default: upload all images)"), | 849 help=_("do not upload images from this host (default: upload all images)"), |
855 ) | 850 ) |
856 self.parser.add_argument( | 851 self.parser.add_argument( |
857 "--ignore-tls-errors", | 852 "--ignore-tls-errors", |
858 action="store_true", | 853 action="store_true", |
859 help=_("ignore invalide TLS certificate for uploads"), | 854 help=_("ignore invalide TLS certificate for uploads"), |
862 "-o", | 857 "-o", |
863 "--option", | 858 "--option", |
864 action="append", | 859 action="append", |
865 nargs=2, | 860 nargs=2, |
866 default=[], | 861 default=[], |
867 metavar=(u"NAME", u"VALUE"), | 862 metavar=("NAME", "VALUE"), |
868 help=_(u"importer specific options (see importer description)"), | 863 help=_("importer specific options (see importer description)"), |
869 ) | 864 ) |
870 self.parser.add_argument( | 865 self.parser.add_argument( |
871 "location", | 866 "location", |
872 type=base.unicode_decoder, | |
873 nargs="?", | 867 nargs="?", |
874 help=_( | 868 help=_( |
875 u"importer data location (see importer description), nothing to show " | 869 "importer data location (see importer description), nothing to show " |
876 u"importer description" | 870 "importer description" |
877 ), | 871 ), |
878 ) | 872 ) |
879 | 873 |
880 def onProgressStarted(self, metadata): | 874 def onProgressStarted(self, metadata): |
881 self.disp(_(u"Blog upload started"), 2) | 875 self.disp(_("Blog upload started"), 2) |
882 | 876 |
883 def onProgressFinished(self, metadata): | 877 def onProgressFinished(self, metadata): |
884 self.disp(_(u"Blog uploaded successfully"), 2) | 878 self.disp(_("Blog uploaded successfully"), 2) |
885 redirections = { | 879 redirections = { |
886 k[len(URL_REDIRECT_PREFIX) :]: v | 880 k[len(URL_REDIRECT_PREFIX) :]: v |
887 for k, v in metadata.iteritems() | 881 for k, v in metadata.items() |
888 if k.startswith(URL_REDIRECT_PREFIX) | 882 if k.startswith(URL_REDIRECT_PREFIX) |
889 } | 883 } |
890 if redirections: | 884 if redirections: |
891 conf = u"\n".join( | 885 conf = "\n".join( |
892 [ | 886 [ |
893 u"url_redirections_dict = {}".format( | 887 "url_redirections_dict = {}".format( |
894 # we need to add ' ' before each new line | 888 # we need to add ' ' before each new line |
895 # and to double each '%' for ConfigParser | 889 # and to double each '%' for ConfigParser |
896 u"\n ".join( | 890 "\n ".join( |
897 json.dumps(redirections, indent=1, separators=(",", ": ")) | 891 json.dumps(redirections, indent=1, separators=(",", ": ")) |
898 .replace(u"%", u"%%") | 892 .replace("%", "%%") |
899 .split(u"\n") | 893 .split("\n") |
900 ) | 894 ) |
901 ), | 895 ), |
902 ] | 896 ] |
903 ) | 897 ) |
904 self.disp( | 898 self.disp( |
905 _( | 899 _( |
906 u"\nTo redirect old URLs to new ones, put the following lines in your" | 900 "\nTo redirect old URLs to new ones, put the following lines in your" |
907 u" sat.conf file, in [libervia] section:\n\n{conf}".format(conf=conf) | 901 " sat.conf file, in [libervia] section:\n\n{conf}".format(conf=conf) |
908 ) | 902 ) |
909 ) | 903 ) |
910 | 904 |
911 def onProgressError(self, error_msg): | 905 def onProgressError(self, error_msg): |
912 self.disp(_(u"Error while uploading blog: {}").format(error_msg), error=True) | 906 self.disp(_("Error while uploading blog: {}").format(error_msg), error=True) |
913 | 907 |
914 def error(self, failure): | 908 def error(self, failure): |
915 self.disp( | 909 self.disp( |
916 _("Error while trying to upload a blog: {reason}").format(reason=failure), | 910 _("Error while trying to upload a blog: {reason}").format(reason=failure), |
917 error=True, | 911 error=True, |
922 if self.args.location is None: | 916 if self.args.location is None: |
923 for name in ("option", "service", "no_images_upload"): | 917 for name in ("option", "service", "no_images_upload"): |
924 if getattr(self.args, name): | 918 if getattr(self.args, name): |
925 self.parser.error( | 919 self.parser.error( |
926 _( | 920 _( |
927 u"{name} argument can't be used without location argument" | 921 "{name} argument can't be used without location argument" |
928 ).format(name=name) | 922 ).format(name=name) |
929 ) | 923 ) |
930 if self.args.importer is None: | 924 if self.args.importer is None: |
931 self.disp( | 925 self.disp( |
932 u"\n".join( | 926 "\n".join( |
933 [ | 927 [ |
934 u"{}: {}".format(name, desc) | 928 "{}: {}".format(name, desc) |
935 for name, desc in self.host.bridge.blogImportList() | 929 for name, desc in self.host.bridge.blogImportList() |
936 ] | 930 ] |
937 ) | 931 ) |
938 ) | 932 ) |
939 else: | 933 else: |
940 try: | 934 try: |
941 short_desc, long_desc = self.host.bridge.blogImportDesc( | 935 short_desc, long_desc = self.host.bridge.blogImportDesc( |
942 self.args.importer | 936 self.args.importer |
943 ) | 937 ) |
944 except Exception as e: | 938 except Exception as e: |
945 msg = [l for l in unicode(e).split("\n") if l][ | 939 msg = [l for l in str(e).split("\n") if l][ |
946 -1 | 940 -1 |
947 ] # we only keep the last line | 941 ] # we only keep the last line |
948 self.disp(msg) | 942 self.disp(msg) |
949 self.host.quit(1) | 943 self.host.quit(1) |
950 else: | 944 else: |
951 self.disp( | 945 self.disp( |
952 u"{name}: {short_desc}\n\n{long_desc}".format( | 946 "{name}: {short_desc}\n\n{long_desc}".format( |
953 name=self.args.importer, | 947 name=self.args.importer, |
954 short_desc=short_desc, | 948 short_desc=short_desc, |
955 long_desc=long_desc, | 949 long_desc=long_desc, |
956 ) | 950 ) |
957 ) | 951 ) |
965 options["ignore_tls_errors"] = C.BOOL_TRUE | 959 options["ignore_tls_errors"] = C.BOOL_TRUE |
966 if self.args.no_images_upload: | 960 if self.args.no_images_upload: |
967 options["upload_images"] = C.BOOL_FALSE | 961 options["upload_images"] = C.BOOL_FALSE |
968 if self.args.upload_ignore_host: | 962 if self.args.upload_ignore_host: |
969 self.parser.error( | 963 self.parser.error( |
970 u"upload-ignore-host option can't be used when no-images-upload " | 964 "upload-ignore-host option can't be used when no-images-upload " |
971 u"is set" | 965 "is set" |
972 ) | 966 ) |
973 elif self.args.upload_ignore_host: | 967 elif self.args.upload_ignore_host: |
974 options["upload_ignore_host"] = self.args.upload_ignore_host | 968 options["upload_ignore_host"] = self.args.upload_ignore_host |
975 | 969 |
976 def gotId(id_): | 970 def gotId(id_): |