Mercurial > libervia-backend
changeset 3028:ab2696e34d29
Python 3 port:
/!\ this is a huge commit
/!\ starting from this commit, SàT is needs Python 3.6+
/!\ SàT maybe be instable or some feature may not work anymore, this will improve with time
This patch port backend, bridge and frontends to Python 3.
Roughly this has been done this way:
- 2to3 tools has been applied (with python 3.7)
- all references to python2 have been replaced with python3 (notably shebangs)
- fixed files not handled by 2to3 (notably the shell script)
- several manual fixes
- fixed issues reported by Python 3 that where not handled in Python 2
- replaced "async" with "async_" when needed (it's a reserved word from Python 3.7)
- replaced zope's "implements" with @implementer decorator
- temporary hack to handle data pickled in database, as str or bytes may be returned,
to be checked later
- fixed hash comparison for password
- removed some code which is not needed anymore with Python 3
- deactivated some code which needs to be checked (notably certificate validation)
- tested with jp, fixed reported issues until some basic commands worked
- ported Primitivus (after porting dependencies like urwid satext)
- more manual fixes
line wrap: on
line diff
--- a/CHANGELOG Wed Jul 31 11:31:22 2019 +0200 +++ b/CHANGELOG Tue Aug 13 19:08:41 2019 +0200 @@ -1,5 +1,8 @@ All theses changelogs are not exhaustive, please check the Mercurial repository for more details. +v 0.8.0 « La Cecília » (NOT RELEASED YET): + - Python 3 port + v 0.7.0 « La Commune » (24/07/19): This version is a huge gap with previous one, changelog only show a part of novelties. This is also the first "general audience" version.
--- a/bin/sat Wed Jul 31 11:31:22 2019 +0200 +++ b/bin/sat Tue Aug 13 19:08:41 2019 +0200 @@ -2,7 +2,7 @@ DEBUG="" DAEMON="" -PYTHON="python2" +PYTHON="python3" TWISTD="$(which twistd)" kill_process() { @@ -29,17 +29,13 @@ eval `"$PYTHON" << PYTHONEND from sat.core.constants import Const as C from sat.memory.memory import fixLocalDir -from ConfigParser import SafeConfigParser +from configparser import ConfigParser from os.path import expanduser, join import sys -import codecs -import locale - -sys.stdout = codecs.getwriter(locale.getpreferredencoding())(sys.stdout) fixLocalDir() # XXX: tmp update code, will be removed in the future -config = SafeConfigParser(defaults=C.DEFAULT_CONFIG) +config = ConfigParser(defaults=C.DEFAULT_CONFIG) try: config.read(C.CONFIG_FILES) except: @@ -52,7 +48,7 @@ env.append("LOG_DIR='%s'" % join(expanduser(config.get('DEFAULT', 'log_dir')),'')) env.append("APP_NAME='%s'" % C.APP_NAME) env.append("APP_NAME_FILE='%s'" % C.APP_NAME_FILE) -print ";".join(env) +print (";".join(env)) PYTHONEND ` APP_NAME="$APP_NAME"
--- a/sat/__init__.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/__init__.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client
--- a/sat/bridge/bridge_constructor/base_constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/base_constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -20,7 +20,7 @@ """base constructor class""" from sat.bridge.bridge_constructor.constants import Const as C -from ConfigParser import NoOptionError +from configparser import NoOptionError import sys import os import os.path @@ -191,7 +191,7 @@ for arg in self.argumentsParser(signature): attr_string.append( ( - "unicode(%(name)s)%(default)s" + "str(%(name)s)%(default)s" if (unicode_protect and arg == "s") else "%(name)s%(default)s" ) @@ -240,7 +240,7 @@ method = self.generateCoreSide elif side == "frontend": if not self.FRONTEND_ACTIVATE: - print(u"This constructor only handle core, please use core side") + print("This constructor only handle core, please use core side") sys.exit(1) method = self.generateFrontendSide except AttributeError: @@ -271,7 +271,7 @@ sections.sort() for section in sections: function = self.getValues(section) - print("Adding %s %s" % (section, function["type"])) + print(("Adding %s %s" % (section, function["type"]))) default = self.getDefault(section) arg_doc = self.getArgumentsDoc(section) async_ = "async" in self.getFlags(section) @@ -291,7 +291,7 @@ ) extend_method(completion, function, default, arg_doc, async_) - for part, fmt in FORMATS.iteritems(): + for part, fmt in FORMATS.items(): if part.startswith(function["type"]): parts[part.upper()].append(fmt.format(**completion)) @@ -300,7 +300,7 @@ bridge = [] const_override = { env[len(C.ENV_OVERRIDE) :]: v - for env, v in os.environ.iteritems() + for env, v in os.environ.items() if env.startswith(C.ENV_OVERRIDE) } template_path = self.getTemplatePath(TEMPLATE) @@ -308,7 +308,7 @@ with open(template_path) as template: for line in template: - for part, extend_list in parts.iteritems(): + for part, extend_list in parts.items(): if line.startswith("##{}_PART##".format(part)): bridge.extend(extend_list) break @@ -317,7 +317,7 @@ if line.startswith("const_"): const_name = line[len("const_") : line.find(" = ")].strip() if const_name in const_override: - print("const {} overriden".format(const_name)) + print(("const {} overriden".format(const_name))) bridge.append( "const_{} = {}".format( const_name, const_override[const_name] @@ -326,7 +326,7 @@ continue bridge.append(line.replace("\n", "")) except IOError: - print("can't open template file [{}]".format(template_path)) + print(("can't open template file [{}]".format(template_path))) sys.exit(1) # now we write to final file @@ -348,15 +348,15 @@ os.mkdir(self.args.dest_dir) full_path = os.path.join(self.args.dest_dir, filename) if os.path.exists(full_path) and not self.args.force: - print( + print(( "The destination file [%s] already exists ! Use --force to overwrite it" % full_path - ) + )) try: with open(full_path, "w") as dest_file: dest_file.write("\n".join(file_buf)) except IOError: - print("Can't open destination file [%s]" % full_path) + print(("Can't open destination file [%s]" % full_path)) except OSError: print("It's not possible to generate the file, check your permissions") exit(1)
--- a/sat/bridge/bridge_constructor/bridge_constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/bridge_constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -22,7 +22,7 @@ from sat.bridge.bridge_constructor.constants import Const as C from sat.bridge.bridge_constructor import constructors, base_constructor import argparse -from ConfigParser import SafeConfigParser as Parser +from configparser import ConfigParser as Parser from importlib import import_module import os import os.path @@ -87,7 +87,7 @@ parser.add_argument( "-t", "--template", - type=file, + type=argparse.FileType(), default=default_template, help="use TEMPLATE to generate bridge (default: %(default)s)", )
--- a/sat/bridge/bridge_constructor/bridge_template.ini Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/bridge_template.ini Tue Aug 13 19:08:41 2019 +0200 @@ -507,7 +507,7 @@ sig_out=s doc=Get XMLUI to manage trust for given encryption algorithm doc_param_0=to_jid: bare JID of entity to manage -doc_param_0=namespace: namespace of the algorithm to manage +doc_param_1=namespace: namespace of the algorithm to manage doc_param_2=%(doc_profile_key)s doc_return=(XMLUI) UI of the trust management @@ -811,7 +811,7 @@ sig_out=(asa(sss)a{sa(a{ss}as)}) param_1_default=u'' param_2_default=True -param_3_default=u"@DEFAULT@" +param_3_default="@DEFAULT@" doc=Discover infos on an entity doc_param_0=entity_jid: JID to discover doc_param_1=node: node to use @@ -837,7 +837,7 @@ sig_out=a(sss) param_1_default=u'' param_2_default=True -param_3_default=u"@DEFAULT@" +param_3_default="@DEFAULT@" doc=Discover items of an entity doc_param_0=entity_jid: JID to discover doc_param_1=node: node to use @@ -856,7 +856,7 @@ param_4_default=True param_5_default=True param_6_default=False -param_7_default=u"@DEFAULT@" +param_7_default="@DEFAULT@" doc=Discover items of an entity doc_param_0=namespaces: namespaces of the features to check doc_param_1=identities: identities to filter
--- a/sat/bridge/bridge_constructor/constants.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constants.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -22,9 +22,9 @@ class Const(constants.Const): - NAME = u"bridge_constructor" + NAME = "bridge_constructor" DEST_DIR_DEFAULT = "generated" - DESCRIPTION = u"""{name} Copyright (C) 2009-2019 Jérôme Poisson (aka Goffi) + DESCRIPTION = """{name} Copyright (C) 2009-2019 Jérôme Poisson (aka Goffi) This script construct a SàT bridge using the given protocol
--- a/sat/bridge/bridge_constructor/constructors/dbus-xml/constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/dbus-xml/constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -53,7 +53,7 @@ sections.sort() for section in sections: function = self.getValues(section) - print("Adding %s %s" % (section, function["type"])) + print(("Adding %s %s" % (section, function["type"]))) new_elt = doc.createElement( "method" if function["type"] == "method" else "signal" )
--- a/sat/bridge/bridge_constructor/constructors/dbus/constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/dbus/constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -114,5 +114,5 @@ % completion ) completion["result"] = ( - "unicode(%s)" if self.args.unicode and function["sig_out"] == "s" else "%s" + "str(%s)" if self.args.unicode and function["sig_out"] == "s" else "%s" ) % result
--- a/sat/bridge/bridge_constructor/constructors/dbus/dbus_core_template.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/dbus/dbus_core_template.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -101,11 +101,11 @@ raise InternalError callback = kwargs.pop("callback") errback = kwargs.pop("errback") - async = True + async_ = True else: - async = False + async_ = False result = self.cb[name](*args, **kwargs) - if async: + if async_: if not isinstance(result, Deferred): log.error("Asynchronous method [%s] does not return a Deferred." % name) raise AsyncNotDeferred @@ -170,9 +170,9 @@ i += 1 return attr - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False): """Dynamically add a method to Dbus Bridge""" - inspect_args = inspect.getargspec(method) + inspect_args = inspect.getfullargspec(method) _arguments = inspect_args.args _defaults = list(inspect_args.defaults or []) @@ -186,12 +186,12 @@ [repr(name)] + ( (_arguments + ["callback=callback", "errback=errback"]) - if async + if async_ else _arguments ) ) - if async: + if async_: _arguments.extend(["callback", "errback"]) _defaults.extend([None, None]) @@ -213,7 +213,7 @@ ) exec(code) # FIXME: to the same thing in a cleaner way, without compile/exec method = locals()[name] - async_callbacks = ("callback", "errback") if async else None + async_callbacks = ("callback", "errback") if async_ else None setattr( DbusObject, name, @@ -265,7 +265,7 @@ if e._dbus_error_name == "org.freedesktop.DBus.Error.NotSupported": log.error( _( - u"D-Bus is not launched, please see README to see instructions on how to launch it" + "D-Bus is not launched, please see README to see instructions on how to launch it" ) ) raise BridgeInitError @@ -277,11 +277,11 @@ log.debug("registering DBus bridge method [%s]" % name) self.dbus_bridge.register_method(name, callback) - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc={}): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}): """Dynamically add a method to Dbus Bridge""" # FIXME: doc parameter is kept only temporary, the time to remove it from calls log.debug("Adding method [%s] to DBus bridge" % name) - self.dbus_bridge.addMethod(name, int_suffix, in_sign, out_sign, method, async) + self.dbus_bridge.addMethod(name, int_suffix, in_sign, out_sign, method, async_) self.register_method(name, method) def addSignal(self, name, int_suffix, signature, doc={}):
--- a/sat/bridge/bridge_constructor/constructors/dbus/dbus_frontend_template.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/dbus/dbus_frontend_template.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 #-*- coding: utf-8 -*- # SAT communication bridge @@ -70,12 +70,12 @@ dbus_interface=const_INT_PREFIX + const_CORE_SUFFIX) self.db_plugin_iface = dbus.Interface(self.db_object, dbus_interface=const_INT_PREFIX + const_PLUGIN_SUFFIX) - except dbus.exceptions.DBusException, e: + except dbus.exceptions.DBusException as e: if e._dbus_error_name in ('org.freedesktop.DBus.Error.ServiceUnknown', 'org.freedesktop.DBus.Error.Spawn.ExecFailed'): errback(BridgeExceptionNoService()) elif e._dbus_error_name == 'org.freedesktop.DBus.Error.NotSupported': - log.error(_(u"D-Bus is not launched, please see README to see instructions on how to launch it")) + log.error(_("D-Bus is not launched, please see README to see instructions on how to launch it")) errback(BridgeInitError) else: errback(e) @@ -102,14 +102,14 @@ # - if we have the 'callback' and 'errback' keyword arguments # - or if the last two arguments are callable - async = False + async_ = False args = list(args) if kwargs: if 'callback' in kwargs: - async = True + async_ = True _callback = kwargs.pop('callback') - _errback = kwargs.pop('errback', lambda failure: log.error(unicode(failure))) + _errback = kwargs.pop('errback', lambda failure: log.error(str(failure))) try: args.append(kwargs.pop('profile')) except KeyError: @@ -119,15 +119,15 @@ pass # at this point, kwargs should be empty if kwargs: - log.warnings(u"unexpected keyword arguments, they will be ignored: {}".format(kwargs)) + log.warnings("unexpected keyword arguments, they will be ignored: {}".format(kwargs)) elif len(args) >= 2 and callable(args[-1]) and callable(args[-2]): - async = True + async_ = True _errback = args.pop() _callback = args.pop() method = getattr(self.db_plugin_iface, name) - if async: + if async_: kwargs['timeout'] = const_TIMEOUT kwargs['reply_handler'] = _callback kwargs['error_handler'] = lambda err: _errback(dbus_to_bridge_exception(err))
--- a/sat/bridge/bridge_constructor/constructors/embedded/constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/embedded/constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client
--- a/sat/bridge/bridge_constructor/constructors/embedded/embedded_frontend_template.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/embedded/embedded_frontend_template.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client
--- a/sat/bridge/bridge_constructor/constructors/embedded/embedded_template.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/embedded/embedded_template.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -25,7 +25,7 @@ class _Bridge(object): def __init__(self): - log.debug(u"Init embedded bridge...") + log.debug("Init embedded bridge...") self._methods_cbs = {} self._signals_cbs = {"core": {}, "plugin": {}} @@ -33,16 +33,16 @@ callback() def register_method(self, name, callback): - log.debug(u"registering embedded bridge method [{}]".format(name)) + log.debug("registering embedded bridge method [{}]".format(name)) if name in self._methods_cbs: - raise exceptions.ConflictError(u"method {} is already regitered".format(name)) + raise exceptions.ConflictError("method {} is already regitered".format(name)) self._methods_cbs[name] = callback def register_signal(self, functionName, handler, iface="core"): iface_dict = self._signals_cbs[iface] if functionName in iface_dict: raise exceptions.ConflictError( - u"signal {name} is already regitered for interface {iface}".format( + "signal {name} is already regitered for interface {iface}".format( name=functionName, iface=iface ) ) @@ -81,11 +81,11 @@ try: cb = self._signals_cbs["plugin"][name] except KeyError: - log.debug(u"ignoring signal {}: no callback registered".format(name)) + log.debug("ignoring signal {}: no callback registered".format(name)) else: cb(*args, **kwargs) - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc={}): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}): # FIXME: doc parameter is kept only temporary, the time to remove it from calls log.debug("Adding method [{}] to embedded bridge".format(name)) self.register_method(name, method)
--- a/sat/bridge/bridge_constructor/constructors/mediawiki/constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/mediawiki/constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -85,7 +85,7 @@ sections.sort() for section in sections: function = self.getValues(section) - print("Adding %s %s" % (section, function["type"])) + print(("Adding %s %s" % (section, function["type"]))) async_msg = """<br />'''This method is asynchronous'''""" deprecated_msg = """<br />'''<font color="#FF0000">/!\ WARNING /!\ : This method is deprecated, please don't use it !</font>'''""" signature_signal = ( @@ -161,7 +161,7 @@ else: core_bridge.append(line.replace("\n", "")) except IOError: - print("Can't open template file [%s]" % template_path) + print(("Can't open template file [%s]" % template_path)) sys.exit(1) # now we write to final file
--- a/sat/bridge/bridge_constructor/constructors/pb/constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/pb/constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client
--- a/sat/bridge/bridge_constructor/constructors/pb/pb_core_template.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/pb/pb_core_template.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -46,11 +46,11 @@ def remote_initBridge(self, signals_handler): self.signals_handlers.append(signals_handler) - log.info(u"registered signal handler") + log.info("registered signal handler") def sendSignalEb(self, failure, signal_name): log.error( - u"Error while sending signal {name}: {msg}".format( + "Error while sending signal {name}: {msg}".format( name=signal_name, msg=failure ) ) @@ -66,27 +66,27 @@ d.addErrback(self.sendSignalEb, name) if to_remove: for handler in to_remove: - log.debug(u"Removing signal handler for dead frontend") + log.debug("Removing signal handler for dead frontend") self.signals_handlers.remove(handler) def _bridgeDeactivateSignals(self): if hasattr(self, "signals_paused"): - log.warning(u"bridge signals already deactivated") + log.warning("bridge signals already deactivated") if self.signals_handler: self.signals_paused.extend(self.signals_handler) else: self.signals_paused = self.signals_handlers self.signals_handlers = [] - log.debug(u"bridge signals have been deactivated") + log.debug("bridge signals have been deactivated") def _bridgeReactivateSignals(self): try: self.signals_handlers = self.signals_paused except AttributeError: - log.debug(u"signals were already activated") + log.debug("signals were already activated") else: del self.signals_paused - log.debug(u"bridge signals have been reactivated") + log.debug("bridge signals have been reactivated") ##METHODS_PART## @@ -102,14 +102,14 @@ def remote_initBridge(self, signals_handler): self.signals_handlers.append(signals_handler) - log.info(u"registered signal handler") + log.info("registered signal handler") def register_method(self, name, callback): log.debug("registering PB bridge method [%s]" % name) setattr(self.root, "remote_" + name, callback) # self.root.register_method(name, callback) - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc={}): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}): """Dynamically add a method to PB Bridge""" # FIXME: doc parameter is kept only temporary, the time to remove it from calls log.debug("Adding method {name} to PB bridge".format(name=name))
--- a/sat/bridge/bridge_constructor/constructors/pb/pb_frontend_template.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/pb/pb_frontend_template.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT communication bridge @@ -28,7 +28,7 @@ class SignalsHandler(pb.Referenceable): def __getattr__(self, name): if name.startswith("remote_"): - log.debug(u"calling an unregistered signal: {name}".format(name=name[7:])) + log.debug("calling an unregistered signal: {name}".format(name=name[7:])) return lambda *args, **kwargs: None else: @@ -43,7 +43,7 @@ pass else: raise exceptions.InternalError( - u"{name} signal handler has been registered twice".format( + "{name} signal handler has been registered twice".format( name=method_name ) ) @@ -99,7 +99,7 @@ d.addErrback(errback) def _initBridgeEb(self, failure): - log.error(u"Can't init bridge: {msg}".format(msg=failure)) + log.error("Can't init bridge: {msg}".format(msg=failure)) def _set_root(self, root): """set remote root object @@ -112,7 +112,7 @@ return d def _generic_errback(self, failure): - log.error(u"bridge failure: {}".format(failure)) + log.error("bridge failure: {}".format(failure)) def bridgeConnect(self, callback, errback): factory = pb.PBClientFactory()
--- a/sat/bridge/dbus_bridge.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/dbus_bridge.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -101,11 +101,11 @@ raise InternalError callback = kwargs.pop("callback") errback = kwargs.pop("errback") - async = True + async_ = True else: - async = False + async_ = False result = self.cb[name](*args, **kwargs) - if async: + if async_: if not isinstance(result, Deferred): log.error("Asynchronous method [%s] does not return a Deferred." % name) raise AsyncNotDeferred @@ -214,73 +214,73 @@ in_signature='s', out_signature='a(a{ss}si)', async_callbacks=None) def actionsGet(self, profile_key="@DEFAULT@"): - return self._callback("actionsGet", unicode(profile_key)) + return self._callback("actionsGet", str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='', async_callbacks=None) def addContact(self, entity_jid, profile_key="@DEFAULT@"): - return self._callback("addContact", unicode(entity_jid), unicode(profile_key)) + return self._callback("addContact", str(entity_jid), str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='', async_callbacks=('callback', 'errback')) def asyncDeleteProfile(self, profile, callback=None, errback=None): - return self._callback("asyncDeleteProfile", unicode(profile), callback=callback, errback=errback) + return self._callback("asyncDeleteProfile", str(profile), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sssis', out_signature='s', async_callbacks=('callback', 'errback')) def asyncGetParamA(self, name, category, attribute="value", security_limit=-1, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("asyncGetParamA", unicode(name), unicode(category), unicode(attribute), security_limit, unicode(profile_key), callback=callback, errback=errback) + return self._callback("asyncGetParamA", str(name), str(category), str(attribute), security_limit, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sis', out_signature='a{ss}', async_callbacks=('callback', 'errback')) def asyncGetParamsValuesFromCategory(self, category, security_limit=-1, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("asyncGetParamsValuesFromCategory", unicode(category), security_limit, unicode(profile_key), callback=callback, errback=errback) + return self._callback("asyncGetParamsValuesFromCategory", str(category), security_limit, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssa{ss}', out_signature='b', async_callbacks=('callback', 'errback')) def connect(self, profile_key="@DEFAULT@", password='', options={}, callback=None, errback=None): - return self._callback("connect", unicode(profile_key), unicode(password), options, callback=callback, errback=errback) + return self._callback("connect", str(profile_key), str(password), options, callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='', async_callbacks=('callback', 'errback')) def delContact(self, entity_jid, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("delContact", unicode(entity_jid), unicode(profile_key), callback=callback, errback=errback) + return self._callback("delContact", str(entity_jid), str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='asa(ss)bbbbbs', out_signature='(a{sa(sss)}a{sa(sss)}a{sa(sss)})', async_callbacks=('callback', 'errback')) - def discoFindByFeatures(self, namespaces, identities, bare_jid=False, service=True, roster=True, own_jid=True, local_device=False, profile_key=u"@DEFAULT@", callback=None, errback=None): - return self._callback("discoFindByFeatures", namespaces, identities, bare_jid, service, roster, own_jid, local_device, unicode(profile_key), callback=callback, errback=errback) + def discoFindByFeatures(self, namespaces, identities, bare_jid=False, service=True, roster=True, own_jid=True, local_device=False, profile_key="@DEFAULT@", callback=None, errback=None): + return self._callback("discoFindByFeatures", namespaces, identities, bare_jid, service, roster, own_jid, local_device, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssbs', out_signature='(asa(sss)a{sa(a{ss}as)})', async_callbacks=('callback', 'errback')) - def discoInfos(self, entity_jid, node=u'', use_cache=True, profile_key=u"@DEFAULT@", callback=None, errback=None): - return self._callback("discoInfos", unicode(entity_jid), unicode(node), use_cache, unicode(profile_key), callback=callback, errback=errback) + def discoInfos(self, entity_jid, node=u'', use_cache=True, profile_key="@DEFAULT@", callback=None, errback=None): + return self._callback("discoInfos", str(entity_jid), str(node), use_cache, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssbs', out_signature='a(sss)', async_callbacks=('callback', 'errback')) - def discoItems(self, entity_jid, node=u'', use_cache=True, profile_key=u"@DEFAULT@", callback=None, errback=None): - return self._callback("discoItems", unicode(entity_jid), unicode(node), use_cache, unicode(profile_key), callback=callback, errback=errback) + def discoItems(self, entity_jid, node=u'', use_cache=True, profile_key="@DEFAULT@", callback=None, errback=None): + return self._callback("discoItems", str(entity_jid), str(node), use_cache, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='', async_callbacks=('callback', 'errback')) def disconnect(self, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("disconnect", unicode(profile_key), callback=callback, errback=errback) + return self._callback("disconnect", str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='s', async_callbacks=None) def encryptionNamespaceGet(self, arg_0): - return self._callback("encryptionNamespaceGet", unicode(arg_0)) + return self._callback("encryptionNamespaceGet", str(arg_0)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='', out_signature='aa{ss}', @@ -291,56 +291,56 @@ @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sss', out_signature='s', async_callbacks=('callback', 'errback')) - def encryptionTrustUIGet(self, namespace, arg_1, profile_key, callback=None, errback=None): - return self._callback("encryptionTrustUIGet", unicode(namespace), unicode(arg_1), unicode(profile_key), callback=callback, errback=errback) + def encryptionTrustUIGet(self, to_jid, namespace, profile_key, callback=None, errback=None): + return self._callback("encryptionTrustUIGet", str(to_jid), str(namespace), str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='s', async_callbacks=None) def getConfig(self, section, name): - return self._callback("getConfig", unicode(section), unicode(name)) + return self._callback("getConfig", str(section), str(name)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='a(sa{ss}as)', async_callbacks=('callback', 'errback')) def getContacts(self, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("getContacts", unicode(profile_key), callback=callback, errback=errback) + return self._callback("getContacts", str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='as', async_callbacks=None) def getContactsFromGroup(self, group, profile_key="@DEFAULT@"): - return self._callback("getContactsFromGroup", unicode(group), unicode(profile_key)) + return self._callback("getContactsFromGroup", str(group), str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='asass', out_signature='a{sa{ss}}', async_callbacks=None) def getEntitiesData(self, jids, keys, profile): - return self._callback("getEntitiesData", jids, keys, unicode(profile)) + return self._callback("getEntitiesData", jids, keys, str(profile)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sass', out_signature='a{ss}', async_callbacks=None) def getEntityData(self, jid, keys, profile): - return self._callback("getEntityData", unicode(jid), keys, unicode(profile)) + return self._callback("getEntityData", str(jid), keys, str(profile)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='a{sa{ss}}', async_callbacks=('callback', 'errback')) def getFeatures(self, profile_key, callback=None, errback=None): - return self._callback("getFeatures", unicode(profile_key), callback=callback, errback=errback) + return self._callback("getFeatures", str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='s', async_callbacks=None) def getMainResource(self, contact_jid, profile_key="@DEFAULT@"): - return self._callback("getMainResource", unicode(contact_jid), unicode(profile_key)) + return self._callback("getMainResource", str(contact_jid), str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssss', out_signature='s', async_callbacks=None) def getParamA(self, name, category, attribute="value", profile_key="@DEFAULT@"): - return self._callback("getParamA", unicode(name), unicode(category), unicode(attribute), unicode(profile_key)) + return self._callback("getParamA", str(name), str(category), str(attribute), str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='', out_signature='as', @@ -352,13 +352,13 @@ in_signature='iss', out_signature='s', async_callbacks=('callback', 'errback')) def getParamsUI(self, security_limit=-1, app='', profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("getParamsUI", security_limit, unicode(app), unicode(profile_key), callback=callback, errback=errback) + return self._callback("getParamsUI", security_limit, str(app), str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='a{sa{s(sia{ss})}}', async_callbacks=None) def getPresenceStatuses(self, profile_key="@DEFAULT@"): - return self._callback("getPresenceStatuses", unicode(profile_key)) + return self._callback("getPresenceStatuses", str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='', out_signature='', @@ -376,73 +376,73 @@ in_signature='s', out_signature='a{ss}', async_callbacks=None) def getWaitingSub(self, profile_key="@DEFAULT@"): - return self._callback("getWaitingSub", unicode(profile_key)) + return self._callback("getWaitingSub", str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssiba{ss}s', out_signature='a(sdssa{ss}a{ss}sa{ss})', async_callbacks=('callback', 'errback')) def historyGet(self, from_jid, to_jid, limit, between=True, filters='', profile="@NONE@", callback=None, errback=None): - return self._callback("historyGet", unicode(from_jid), unicode(to_jid), limit, between, filters, unicode(profile), callback=callback, errback=errback) + return self._callback("historyGet", str(from_jid), str(to_jid), limit, between, filters, str(profile), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='b', async_callbacks=None) def isConnected(self, profile_key="@DEFAULT@"): - return self._callback("isConnected", unicode(profile_key)) + return self._callback("isConnected", str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sa{ss}s', out_signature='a{ss}', async_callbacks=('callback', 'errback')) def launchAction(self, callback_id, data, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("launchAction", unicode(callback_id), data, unicode(profile_key), callback=callback, errback=errback) + return self._callback("launchAction", str(callback_id), data, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='b', async_callbacks=None) def loadParamsTemplate(self, filename): - return self._callback("loadParamsTemplate", unicode(filename)) + return self._callback("loadParamsTemplate", str(filename)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='s', async_callbacks=None) def menuHelpGet(self, menu_id, language): - return self._callback("menuHelpGet", unicode(menu_id), unicode(language)) + return self._callback("menuHelpGet", str(menu_id), str(language)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sasa{ss}is', out_signature='a{ss}', async_callbacks=('callback', 'errback')) def menuLaunch(self, menu_type, path, data, security_limit, profile_key, callback=None, errback=None): - return self._callback("menuLaunch", unicode(menu_type), path, data, security_limit, unicode(profile_key), callback=callback, errback=errback) + return self._callback("menuLaunch", str(menu_type), path, data, security_limit, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='si', out_signature='a(ssasasa{ss})', async_callbacks=None) def menusGet(self, language, security_limit): - return self._callback("menusGet", unicode(language), security_limit) + return self._callback("menusGet", str(language), security_limit) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='s', async_callbacks=None) def messageEncryptionGet(self, to_jid, profile_key): - return self._callback("messageEncryptionGet", unicode(to_jid), unicode(profile_key)) + return self._callback("messageEncryptionGet", str(to_jid), str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssbs', out_signature='', async_callbacks=('callback', 'errback')) def messageEncryptionStart(self, to_jid, namespace='', replace=False, profile_key="@NONE@", callback=None, errback=None): - return self._callback("messageEncryptionStart", unicode(to_jid), unicode(namespace), replace, unicode(profile_key), callback=callback, errback=errback) + return self._callback("messageEncryptionStart", str(to_jid), str(namespace), replace, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='', async_callbacks=('callback', 'errback')) def messageEncryptionStop(self, to_jid, profile_key, callback=None, errback=None): - return self._callback("messageEncryptionStop", unicode(to_jid), unicode(profile_key), callback=callback, errback=errback) + return self._callback("messageEncryptionStop", str(to_jid), str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sa{ss}a{ss}sa{ss}s', out_signature='', async_callbacks=('callback', 'errback')) def messageSend(self, to_jid, message, subject={}, mess_type="auto", extra={}, profile_key="@NONE@", callback=None, errback=None): - return self._callback("messageSend", unicode(to_jid), message, subject, unicode(mess_type), extra, unicode(profile_key), callback=callback, errback=errback) + return self._callback("messageSend", str(to_jid), message, subject, str(mess_type), extra, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='', out_signature='a{ss}', @@ -454,37 +454,37 @@ in_signature='sis', out_signature='', async_callbacks=None) def paramsRegisterApp(self, xml, security_limit=-1, app=''): - return self._callback("paramsRegisterApp", unicode(xml), security_limit, unicode(app)) + return self._callback("paramsRegisterApp", str(xml), security_limit, str(app)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sss', out_signature='', async_callbacks=('callback', 'errback')) def profileCreate(self, profile, password='', component='', callback=None, errback=None): - return self._callback("profileCreate", unicode(profile), unicode(password), unicode(component), callback=callback, errback=errback) + return self._callback("profileCreate", str(profile), str(password), str(component), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='b', async_callbacks=None) def profileIsSessionStarted(self, profile_key="@DEFAULT@"): - return self._callback("profileIsSessionStarted", unicode(profile_key)) + return self._callback("profileIsSessionStarted", str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='s', async_callbacks=None) def profileNameGet(self, profile_key="@DEFAULT@"): - return self._callback("profileNameGet", unicode(profile_key)) + return self._callback("profileNameGet", str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='', async_callbacks=None) def profileSetDefault(self, profile): - return self._callback("profileSetDefault", unicode(profile)) + return self._callback("profileSetDefault", str(profile)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='b', async_callbacks=('callback', 'errback')) def profileStartSession(self, password='', profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("profileStartSession", unicode(password), unicode(profile_key), callback=callback, errback=errback) + return self._callback("profileStartSession", str(password), str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='bb', out_signature='as', @@ -496,61 +496,61 @@ in_signature='ss', out_signature='a{ss}', async_callbacks=None) def progressGet(self, id, profile): - return self._callback("progressGet", unicode(id), unicode(profile)) + return self._callback("progressGet", str(id), str(profile)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='a{sa{sa{ss}}}', async_callbacks=None) def progressGetAll(self, profile): - return self._callback("progressGetAll", unicode(profile)) + return self._callback("progressGetAll", str(profile)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='a{sa{sa{ss}}}', async_callbacks=None) def progressGetAllMetadata(self, profile): - return self._callback("progressGetAllMetadata", unicode(profile)) + return self._callback("progressGetAllMetadata", str(profile)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='', async_callbacks=('callback', 'errback')) def rosterResync(self, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("rosterResync", unicode(profile_key), callback=callback, errback=errback) + return self._callback("rosterResync", str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='b', async_callbacks=None) def saveParamsTemplate(self, filename): - return self._callback("saveParamsTemplate", unicode(filename)) + return self._callback("saveParamsTemplate", str(filename)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='a{ss}', async_callbacks=('callback', 'errback')) def sessionInfosGet(self, profile_key, callback=None, errback=None): - return self._callback("sessionInfosGet", unicode(profile_key), callback=callback, errback=errback) + return self._callback("sessionInfosGet", str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sssis', out_signature='', async_callbacks=None) def setParam(self, name, value, category, security_limit=-1, profile_key="@DEFAULT@"): - return self._callback("setParam", unicode(name), unicode(value), unicode(category), security_limit, unicode(profile_key)) + return self._callback("setParam", str(name), str(value), str(category), security_limit, str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssa{ss}s', out_signature='', async_callbacks=None) def setPresence(self, to_jid='', show='', statuses={}, profile_key="@DEFAULT@"): - return self._callback("setPresence", unicode(to_jid), unicode(show), statuses, unicode(profile_key)) + return self._callback("setPresence", str(to_jid), str(show), statuses, str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sss', out_signature='', async_callbacks=None) def subscription(self, sub_type, entity, profile_key="@DEFAULT@"): - return self._callback("subscription", unicode(sub_type), unicode(entity), unicode(profile_key)) + return self._callback("subscription", str(sub_type), str(entity), str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssass', out_signature='', async_callbacks=('callback', 'errback')) def updateContact(self, entity_jid, name, groups, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("updateContact", unicode(entity_jid), unicode(name), groups, unicode(profile_key), callback=callback, errback=errback) + return self._callback("updateContact", str(entity_jid), str(name), groups, str(profile_key), callback=callback, errback=errback) def __attributes(self, in_sign): """Return arguments to user given a in_sign @@ -590,9 +590,9 @@ i += 1 return attr - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False): """Dynamically add a method to Dbus Bridge""" - inspect_args = inspect.getargspec(method) + inspect_args = inspect.getfullargspec(method) _arguments = inspect_args.args _defaults = list(inspect_args.defaults or []) @@ -606,12 +606,12 @@ [repr(name)] + ( (_arguments + ["callback=callback", "errback=errback"]) - if async + if async_ else _arguments ) ) - if async: + if async_: _arguments.extend(["callback", "errback"]) _defaults.extend([None, None]) @@ -633,7 +633,7 @@ ) exec(code) # FIXME: to the same thing in a cleaner way, without compile/exec method = locals()[name] - async_callbacks = ("callback", "errback") if async else None + async_callbacks = ("callback", "errback") if async_ else None setattr( DbusObject, name, @@ -685,7 +685,7 @@ if e._dbus_error_name == "org.freedesktop.DBus.Error.NotSupported": log.error( _( - u"D-Bus is not launched, please see README to see instructions on how to launch it" + "D-Bus is not launched, please see README to see instructions on how to launch it" ) ) raise BridgeInitError @@ -744,11 +744,11 @@ log.debug("registering DBus bridge method [%s]" % name) self.dbus_bridge.register_method(name, callback) - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc={}): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}): """Dynamically add a method to Dbus Bridge""" # FIXME: doc parameter is kept only temporary, the time to remove it from calls log.debug("Adding method [%s] to DBus bridge" % name) - self.dbus_bridge.addMethod(name, int_suffix, in_sign, out_sign, method, async) + self.dbus_bridge.addMethod(name, int_suffix, in_sign, out_sign, method, async_) self.register_method(name, method) def addSignal(self, name, int_suffix, signature, doc={}):
--- a/sat/bridge/pb.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/pb.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -46,11 +46,11 @@ def remote_initBridge(self, signals_handler): self.signals_handlers.append(signals_handler) - log.info(u"registered signal handler") + log.info("registered signal handler") def sendSignalEb(self, failure, signal_name): log.error( - u"Error while sending signal {name}: {msg}".format( + "Error while sending signal {name}: {msg}".format( name=signal_name, msg=failure ) ) @@ -66,27 +66,27 @@ d.addErrback(self.sendSignalEb, name) if to_remove: for handler in to_remove: - log.debug(u"Removing signal handler for dead frontend") + log.debug("Removing signal handler for dead frontend") self.signals_handlers.remove(handler) def _bridgeDeactivateSignals(self): if hasattr(self, "signals_paused"): - log.warning(u"bridge signals already deactivated") + log.warning("bridge signals already deactivated") if self.signals_handler: self.signals_paused.extend(self.signals_handler) else: self.signals_paused = self.signals_handlers self.signals_handlers = [] - log.debug(u"bridge signals have been deactivated") + log.debug("bridge signals have been deactivated") def _bridgeReactivateSignals(self): try: self.signals_handlers = self.signals_paused except AttributeError: - log.debug(u"signals were already activated") + log.debug("signals were already activated") else: del self.signals_paused - log.debug(u"bridge signals have been reactivated") + log.debug("bridge signals have been reactivated") ##METHODS_PART## @@ -102,14 +102,14 @@ def remote_initBridge(self, signals_handler): self.signals_handlers.append(signals_handler) - log.info(u"registered signal handler") + log.info("registered signal handler") def register_method(self, name, callback): log.debug("registering PB bridge method [%s]" % name) setattr(self.root, "remote_" + name, callback) # self.root.register_method(name, callback) - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc={}): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}): """Dynamically add a method to PB Bridge""" # FIXME: doc parameter is kept only temporary, the time to remove it from calls log.debug("Adding method {name} to PB bridge".format(name=name))
--- a/sat/core/constants.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/constants.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -28,21 +28,21 @@ class Const(object): ## Application ## - APP_NAME = u"Salut à Toi" - APP_NAME_SHORT = u"SàT" - APP_NAME_FILE = u"sat" - APP_NAME_FULL = u"{name_short} ({name})".format( + APP_NAME = "Salut à Toi" + APP_NAME_SHORT = "SàT" + APP_NAME_FILE = "sat" + APP_NAME_FULL = "{name_short} ({name})".format( name_short=APP_NAME_SHORT, name=APP_NAME ) APP_VERSION = ( sat.__version__ ) # Please add 'D' at the end of version in sat/VERSION for dev versions - APP_RELEASE_NAME = u"La Cecília" - APP_URL = u"https://salut-a-toi.org" + APP_RELEASE_NAME = "La Cecília" + APP_URL = "https://salut-a-toi.org" ## Runtime ## PLUGIN_EXT = "py" - HISTORY_SKIP = u"skip" + HISTORY_SKIP = "skip" ## Main config ## DEFAULT_BRIDGE = "dbus" @@ -122,15 +122,15 @@ ) MESS_TYPE_ALL = MESS_TYPE_STANDARD + (MESS_TYPE_INFO, MESS_TYPE_AUTO) - MESS_EXTRA_INFO = u"info_type" - EXTRA_INFO_DECR_ERR = u"DECRYPTION_ERROR" - EXTRA_INFO_ENCR_ERR = u"ENCRYPTION_ERROR" + MESS_EXTRA_INFO = "info_type" + EXTRA_INFO_DECR_ERR = "DECRYPTION_ERROR" + EXTRA_INFO_ENCR_ERR = "ENCRYPTION_ERROR" # encryption is a key for plugins - MESS_KEY_ENCRYPTION = u"ENCRYPTION" + MESS_KEY_ENCRYPTION = "ENCRYPTION" # encrypted is a key for frontends - MESS_KEY_ENCRYPTED = u"encrypted" - MESS_KEY_TRUSTED = u"trusted" + MESS_KEY_ENCRYPTED = "encrypted" + MESS_KEY_TRUSTED = "trusted" ## Chat ## CHAT_ONE2ONE = "one2one" @@ -162,110 +162,44 @@ ## Directories ## # directory for components specific data - COMPONENTS_DIR = u"components" - CACHE_DIR = u"cache" + COMPONENTS_DIR = "components" + CACHE_DIR = "cache" # files in file dir are stored for long term # files dir is global, i.e. for all profiles - FILES_DIR = u"files" + FILES_DIR = "files" # FILES_LINKS_DIR is a directory where files owned by a specific profile # are linked to the global files directory. This way the directory can be # shared per profiles while keeping global directory where identical files # shared between different profiles are not duplicated. - FILES_LINKS_DIR = u"files_links" + FILES_LINKS_DIR = "files_links" # FILES_TMP_DIR is where profile's partially transfered files are put. # Once transfer is completed, they are moved to FILES_DIR - FILES_TMP_DIR = u"files_tmp" - - ## Configuration ## - if ( - BaseDirectory - ): # skipped when xdg module is not available (should not happen in backend) - if "org.salutatoi.cagou" in BaseDirectory.__file__: - # FIXME: hack to make config read from the right location on Android - # TODO: fix it in a more proper way - - # we need to use Android API to get downloads directory - import os.path - from jnius import autoclass - - Environment = autoclass("android.os.Environment") - - BaseDirectory = None - DEFAULT_CONFIG = { - "local_dir": "/data/data/org.salutatoi.cagou/app", - "media_dir": "/data/data/org.salutatoi.cagou/files/app/media", - # FIXME: temporary location for downloads, need to call API properly - "downloads_dir": os.path.join( - Environment.getExternalStoragePublicDirectory( - Environment.DIRECTORY_DOWNLOADS - ).getAbsolutePath(), - APP_NAME_FILE, - ), - "pid_dir": "%(local_dir)s", - "log_dir": "%(local_dir)s", - } - CONFIG_FILES = [ - "/data/data/org.salutatoi.cagou/files/app/android/" - + APP_NAME_FILE - + ".conf" - ] - else: - import os - CONFIG_PATHS = ( - ["/etc/", "~/", "~/.", "", "."] - + [ - "%s/" % path - for path in list(BaseDirectory.load_config_paths(APP_NAME_FILE)) - ] - ) - - # on recent versions of Flatpak, FLATPAK_ID is set at run time - # it seems that this is not the case on older versions, - # but FLATPAK_SANDBOX_DIR seems set then - if os.getenv('FLATPAK_ID') or os.getenv('FLATPAK_SANDBOX_DIR'): - # for Flatpak, the conf can't be set in /etc or $HOME, so we have - # to add /app - CONFIG_PATHS.append('/app/') - - ## Configuration ## - DEFAULT_CONFIG = { - "media_dir": "/usr/share/" + APP_NAME_FILE + "/media", - "local_dir": BaseDirectory.save_data_path(APP_NAME_FILE), - "downloads_dir": "~/Downloads/" + APP_NAME_FILE, - "pid_dir": "%(local_dir)s", - "log_dir": "%(local_dir)s", - } - - # List of the configuration filenames sorted by ascending priority - CONFIG_FILES = [ - realpath(expanduser(path) + APP_NAME_FILE + ".conf") - for path in CONFIG_PATHS - ] + FILES_TMP_DIR = "files_tmp" ## Templates ## - TEMPLATE_TPL_DIR = u"templates" - TEMPLATE_THEME_DEFAULT = u"default" - TEMPLATE_STATIC_DIR = u"static" - KEY_LANG = u"lang" # templates i18n + TEMPLATE_TPL_DIR = "templates" + TEMPLATE_THEME_DEFAULT = "default" + TEMPLATE_STATIC_DIR = "static" + KEY_LANG = "lang" # templates i18n ## Plugins ## # PLUGIN_INFO keys # XXX: we use PI instead of PLUG_INFO which would normally be used # to make the header more readable - PI_NAME = u"name" - PI_IMPORT_NAME = u"import_name" - PI_MAIN = u"main" - PI_HANDLER = u"handler" + PI_NAME = "name" + PI_IMPORT_NAME = "import_name" + PI_MAIN = "main" + PI_HANDLER = "handler" PI_TYPE = ( - u"type" + "type" ) # FIXME: should be types, and should handle single unicode type or tuple of types (e.g. "blog" and "import") - PI_MODES = u"modes" - PI_PROTOCOLS = u"protocols" - PI_DEPENDENCIES = u"dependencies" - PI_RECOMMENDATIONS = u"recommendations" - PI_DESCRIPTION = u"description" - PI_USAGE = u"usage" + PI_MODES = "modes" + PI_PROTOCOLS = "protocols" + PI_DEPENDENCIES = "dependencies" + PI_RECOMMENDATIONS = "recommendations" + PI_DESCRIPTION = "description" + PI_USAGE = "usage" # Types PLUG_TYPE_XEP = "XEP" @@ -387,8 +321,8 @@ META_TYPE_OVERWRITE = "overwrite" ## HARD-CODED ACTIONS IDS (generated with uuid.uuid4) ## - AUTHENTICATE_PROFILE_ID = u"b03bbfa8-a4ae-4734-a248-06ce6c7cf562" - CHANGE_XMPP_PASSWD_ID = u"878b9387-de2b-413b-950f-e424a147bcd0" + AUTHENTICATE_PROFILE_ID = "b03bbfa8-a4ae-4734-a248-06ce6c7cf562" + CHANGE_XMPP_PASSWD_ID = "878b9387-de2b-413b-950f-e424a147bcd0" ## Text values ## BOOL_TRUE = "true" @@ -399,32 +333,32 @@ HISTORY_LIMIT_NONE = -2 ## Progress error special values ## - PROGRESS_ERROR_DECLINED = u"declined" # session has been declined by peer user + PROGRESS_ERROR_DECLINED = "declined" # session has been declined by peer user ## Files ## FILE_TYPE_DIRECTORY = "directory" FILE_TYPE_FILE = "file" ## Permissions management ## - ACCESS_PERM_READ = u"read" - ACCESS_PERM_WRITE = u"write" + ACCESS_PERM_READ = "read" + ACCESS_PERM_WRITE = "write" ACCESS_PERMS = {ACCESS_PERM_READ, ACCESS_PERM_WRITE} - ACCESS_TYPE_PUBLIC = u"public" - ACCESS_TYPE_WHITELIST = u"whitelist" + ACCESS_TYPE_PUBLIC = "public" + ACCESS_TYPE_WHITELIST = "whitelist" ACCESS_TYPES = (ACCESS_TYPE_PUBLIC, ACCESS_TYPE_WHITELIST) ## Common data keys ## - KEY_THUMBNAILS = u"thumbnails" - KEY_PROGRESS_ID = u"progress_id" + KEY_THUMBNAILS = "thumbnails" + KEY_PROGRESS_ID = "progress_id" ## Common extra keys/values ## - KEY_ORDER_BY = u"order_by" + KEY_ORDER_BY = "order_by" - ORDER_BY_CREATION = u'creation' - ORDER_BY_MODIFICATION = u'modification' + ORDER_BY_CREATION = 'creation' + ORDER_BY_MODIFICATION = 'modification' # internationalisation - DEFAULT_LOCALE = u"en_GB" + DEFAULT_LOCALE = "en_GB" ## Misc ## SAVEFILE_DATABASE = APP_NAME_FILE + ".db" @@ -434,11 +368,11 @@ NO_LIMIT = -1 # used in bridge when a integer value is expected DEFAULT_MAX_AGE = 1209600 # default max age of cached files, in seconds HASH_SHA1_EMPTY = "da39a3ee5e6b4b0d3255bfef95601890afd80709" - STANZA_NAMES = (u"iq", u"message", u"presence") + STANZA_NAMES = ("iq", "message", "presence") # Stream Hooks - STREAM_HOOK_SEND = u"send" - STREAM_HOOK_RECEIVE = u"receive" + STREAM_HOOK_SEND = "send" + STREAM_HOOK_RECEIVE = "receive" @classmethod def LOG_OPTIONS(cls): @@ -456,7 +390,7 @@ @classmethod def bool(cls, value): """@return (bool): bool value for associated constant""" - assert isinstance(value, basestring) + assert isinstance(value, str) return value.lower() in (cls.BOOL_TRUE, "1", "yes", "on") @classmethod @@ -464,3 +398,72 @@ """@return (str): constant associated to bool value""" assert isinstance(value, bool) return cls.BOOL_TRUE if value else cls.BOOL_FALSE + + + +## Configuration ## +if ( + BaseDirectory +): # skipped when xdg module is not available (should not happen in backend) + if "org.salutatoi.cagou" in BaseDirectory.__file__: + # FIXME: hack to make config read from the right location on Android + # TODO: fix it in a more proper way + + # we need to use Android API to get downloads directory + import os.path + from jnius import autoclass + + Environment = autoclass("android.os.Environment") + + BaseDirectory = None + Const.DEFAULT_CONFIG = { + "local_dir": "/data/data/org.salutatoi.cagou/app", + "media_dir": "/data/data/org.salutatoi.cagou/files/app/media", + # FIXME: temporary location for downloads, need to call API properly + "downloads_dir": os.path.join( + Environment.getExternalStoragePublicDirectory( + Environment.DIRECTORY_DOWNLOADS + ).getAbsolutePath(), + Const.APP_NAME_FILE, + ), + "pid_dir": "%(local_dir)s", + "log_dir": "%(local_dir)s", + } + Const.CONFIG_FILES = [ + "/data/data/org.salutatoi.cagou/files/app/android/" + + Const.APP_NAME_FILE + + ".conf" + ] + else: + import os + Const.CONFIG_PATHS = ( + ["/etc/", "~/", "~/.", "", "."] + + [ + "%s/" % path + for path in list(BaseDirectory.load_config_paths(Const.APP_NAME_FILE)) + ] + ) + + # on recent versions of Flatpak, FLATPAK_ID is set at run time + # it seems that this is not the case on older versions, + # but FLATPAK_SANDBOX_DIR seems set then + if os.getenv('FLATPAK_ID') or os.getenv('FLATPAK_SANDBOX_DIR'): + # for Flatpak, the conf can't be set in /etc or $HOME, so we have + # to add /app + Const.CONFIG_PATHS.append('/app/') + + ## Configuration ## + Const.DEFAULT_CONFIG = { + "media_dir": "/usr/share/" + Const.APP_NAME_FILE + "/media", + "local_dir": BaseDirectory.save_data_path(Const.APP_NAME_FILE), + "downloads_dir": "~/Downloads/" + Const.APP_NAME_FILE, + "pid_dir": "%(local_dir)s", + "log_dir": "%(local_dir)s", + } + + # List of the configuration filenames sorted by ascending priority + Const.CONFIG_FILES = [ + realpath(expanduser(path) + Const.APP_NAME_FILE + ".conf") + for path in Const.CONFIG_PATHS + ] +
--- a/sat/core/exceptions.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/exceptions.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT Exceptions
--- a/sat/core/i18n.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/i18n.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -26,7 +26,7 @@ import gettext - _ = gettext.translation("sat", "i18n", fallback=True).ugettext + _ = gettext.translation("sat", "i18n", fallback=True).gettext _translators = {None: gettext.NullTranslations()} def languageSwitch(lang=None): @@ -34,7 +34,7 @@ _translators[lang] = gettext.translation( "sat", languages=[lang], fallback=True ) - _translators[lang].install(unicode=True) + _translators[lang].install() except ImportError:
--- a/sat/core/log.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/log.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -67,7 +67,7 @@ """ if kwargs.get('exc_info', False): message = self.addTraceback(message) - print message + print(message) def log(self, level, message, **kwargs): """Print message @@ -197,7 +197,7 @@ """update existing logger to the class needed for this backend""" if self.LOGGER_CLASS is None: return - for name, logger in _loggers.items(): + for name, logger in list(_loggers.items()): _loggers[name] = self.LOGGER_CLASS(logger) def preTreatment(self): @@ -235,7 +235,7 @@ def configureColors(self, colors, force_colors, levels_taints_dict): if colors: # if color are used, we need to handle levels_taints_dict - for level in levels_taints_dict.keys(): + for level in list(levels_taints_dict.keys()): # we wants levels in uppercase to correspond to contstants levels_taints_dict[level.upper()] = levels_taints_dict[level] taints = self.__class__.taints = {} @@ -283,7 +283,7 @@ options = None if output not in (C.LOG_OPT_OUTPUT_DEFAULT, C.LOG_OPT_OUTPUT_FILE, C.LOG_OPT_OUTPUT_MEMORY): - raise ValueError(u"Invalid output [%s]" % output) + raise ValueError("Invalid output [%s]" % output) if output == C.LOG_OPT_OUTPUT_DEFAULT: # no option for defaut handler @@ -303,7 +303,7 @@ handlers[output] = limit if options: # we should not have unparsed options - raise ValueError(u"options [{options}] are not supported for {handler} output".format(options=options, handler=output)) + raise ValueError("options [{options}] are not supported for {handler} output".format(options=options, handler=output)) @staticmethod def memoryGet(size=None):
--- a/sat/core/log_config.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/log_config.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -125,7 +125,7 @@ ) or self.LOGGER_CLASS.force_colors: message = event.get("message", tuple()) if message: - event["message"] = ("".join(message),) # must be a tuple + event["message"] = (b"".join(message),) # must be a tuple observer(event) # we can now call the original observer return observer_hook @@ -158,7 +158,7 @@ observer = self.changeObserver(observer, can_colors=True) else: # we use print because log system is not fully initialized - print("Unmanaged observer [%s]" % observer) + print(("Unmanaged observer [%s]" % observer)) return observer self.observers[ori] = observer return observer @@ -202,10 +202,10 @@ import types # see https://stackoverflow.com/a/4267590 (thx Chris Morgan/aaronasterling) twisted_log.addObserver = types.MethodType( - addObserverObserver, self.log_publisher, twisted_log.LogPublisher + addObserverObserver, self.log_publisher ) twisted_log.removeObserver = types.MethodType( - removeObserverObserver, self.log_publisher, twisted_log.LogPublisher + removeObserverObserver, self.log_publisher ) # we now change existing observers @@ -282,7 +282,7 @@ if event.get("isError", False) else twisted_logger.info ) - log_method(text.decode("utf-8")) + log_method(text) self.log_publisher._originalAddObserver(twistedObserver) @@ -336,7 +336,7 @@ import sys class SatFormatter(logging.Formatter): - u"""Formatter which manage SàT specificities""" + """Formatter which manage SàT specificities""" _format = fmt _with_profile = "%(profile)s" in fmt @@ -395,7 +395,7 @@ root_logger = logging.getLogger() if len(root_logger.handlers) == 0: - for handler, options in log.handlers.items(): + for handler, options in list(log.handlers.items()): if handler == C.LOG_OPT_OUTPUT_DEFAULT: hdlr = logging.StreamHandler() try: @@ -426,7 +426,7 @@ else: raise ValueError("Unknown handler type") else: - root_logger.warning(u"Handlers already set on root logger") + root_logger.warning("Handlers already set on root logger") @staticmethod def memoryGet(size=None):
--- a/sat/core/patches.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/patches.py Tue Aug 13 19:08:41 2019 +0200 @@ -84,7 +84,7 @@ def addHook(self, hook_type, callback): """Add a send or receive hook""" - conflict_msg = (u"Hook conflict: can't add {hook_type} hook {callback}" + conflict_msg = ("Hook conflict: can't add {hook_type} hook {callback}" .format(hook_type=hook_type, callback=callback)) if hook_type == C.STREAM_HOOK_RECEIVE: if callback not in self._onElementHooks: @@ -97,7 +97,7 @@ else: log.warning(conflict_msg) else: - raise ValueError(u"Invalid hook type: {hook_type}" + raise ValueError("Invalid hook type: {hook_type}" .format(hook_type=hook_type)) def onElement(self, element): @@ -161,9 +161,10 @@ def apply(): - # certificate validation - xmlstream.TLSInitiatingInitializer = TLSInitiatingInitializer - client.XMPPClient = XMPPClient + # FIXME: certificate validation is now implemented in Twisted trunk, to be removed + # # certificate validation + # xmlstream.TLSInitiatingInitializer = TLSInitiatingInitializer + # client.XMPPClient = XMPPClient # XmlStream triggers xmlstream.XmlStreamFactory.protocol = XmlStream # jid fix
--- a/sat/core/sat_main.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/sat_main.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -66,8 +66,8 @@ self.plugins = {} # map for short name to whole namespace, self.ns_map = { - u"x-data": xmpp.NS_X_DATA, - u"disco#info": xmpp.NS_DISCO_INFO, + "x-data": xmpp.NS_X_DATA, + "disco#info": xmpp.NS_DISCO_INFO, } # extended by plugins with registerNamespace self.memory = memory.Memory(self) @@ -79,13 +79,13 @@ bridge_module = dynamic_import.bridge(bridge_name) if bridge_module is None: - log.error(u"Can't find bridge module of name {}".format(bridge_name)) + log.error("Can't find bridge module of name {}".format(bridge_name)) sys.exit(1) - log.info(u"using {} bridge".format(bridge_name)) + log.info("using {} bridge".format(bridge_name)) try: self.bridge = bridge_module.Bridge() except exceptions.BridgeInitError: - log.error(u"Bridge can't be initialised, can't start SàT core") + log.error("Bridge can't be initialised, can't start SàT core") sys.exit(1) self.bridge.register_method("getReady", lambda: self.initialised) self.bridge.register_method("getVersion", lambda: self.full_version) @@ -181,7 +181,7 @@ try: return self._version_cache except AttributeError: - self._version_cache = u"{} « {} » ({})".format( + self._version_cache = "{} « {} » ({})".format( version, C.APP_RELEASE_NAME, utils.getRepositoryData(sat) ) return self._version_cache @@ -202,14 +202,14 @@ ui_profile_manager.ProfileManager(self) except Exception as e: log.error( - _(u"Could not initialize backend: {reason}").format( + _("Could not initialize backend: {reason}").format( reason=str(e).decode("utf-8", "ignore") ) ) sys.exit(1) self._addBaseMenus() self.initialised.callback(None) - log.info(_(u"Backend is ready")) + log.info(_("Backend is ready")) def _addBaseMenus(self): """Add base menus""" @@ -246,15 +246,15 @@ except exceptions.MissingModule as e: self._unimport_plugin(plugin_path) log.warning( - u"Can't import plugin [{path}] because of an unavailale third party " - u"module:\n{msg}".format( + "Can't import plugin [{path}] because of an unavailale third party " + "module:\n{msg}".format( path=plugin_path, msg=e ) ) continue except exceptions.CancelError as e: log.info( - u"Plugin [{path}] cancelled its own import: {msg}".format( + "Plugin [{path}] cancelled its own import: {msg}".format( path=plugin_path, msg=e ) ) @@ -264,7 +264,7 @@ import traceback log.error( - _(u"Can't import plugin [{path}]:\n{error}").format( + _("Can't import plugin [{path}]:\n{error}").format( path=plugin_path, error=traceback.format_exc() ) ) @@ -274,17 +274,17 @@ plugin_info = mod.PLUGIN_INFO import_name = plugin_info["import_name"] - plugin_modes = plugin_info[u"modes"] = set( - plugin_info.setdefault(u"modes", C.PLUG_MODE_DEFAULT) + plugin_modes = plugin_info["modes"] = set( + plugin_info.setdefault("modes", C.PLUG_MODE_DEFAULT) ) # if the plugin is an entry point, it must work in component mode - if plugin_info[u"type"] == C.PLUG_TYPE_ENTRY_POINT: + if plugin_info["type"] == C.PLUG_TYPE_ENTRY_POINT: # if plugin is an entrypoint, we cache it if C.PLUG_MODE_COMPONENT not in plugin_modes: log.error( _( - u"{type} type must be used with {mode} mode, ignoring plugin" + "{type} type must be used with {mode} mode, ignoring plugin" ).format(type=C.PLUG_TYPE_ENTRY_POINT, mode=C.PLUG_MODE_COMPONENT) ) self._unimport_plugin(plugin_path) @@ -293,8 +293,8 @@ if import_name in plugins_to_import: log.error( _( - u"Name conflict for import name [{import_name}], can't import " - u"plugin [{name}]" + "Name conflict for import name [{import_name}], can't import " + "plugin [{name}]" ).format(**plugin_info) ) continue @@ -320,7 +320,7 @@ is raised """ if import_name in self.plugins: - log.debug(u"Plugin {} already imported, passing".format(import_name)) + log.debug("Plugin {} already imported, passing".format(import_name)) return if not import_name: import_name, (plugin_path, mod, plugin_info) = plugins_to_import.popitem() @@ -328,10 +328,10 @@ if not import_name in plugins_to_import: if optional: log.warning( - _(u"Recommended plugin not found: {}").format(import_name) + _("Recommended plugin not found: {}").format(import_name) ) return - msg = u"Dependency not found: {}".format(import_name) + msg = "Dependency not found: {}".format(import_name) log.error(msg) raise ImportError(msg) plugin_path, mod, plugin_info = plugins_to_import.pop(import_name) @@ -340,7 +340,7 @@ for to_import in dependencies + recommendations: if to_import not in self.plugins: log.debug( - u"Recursively import dependency of [%s]: [%s]" + "Recursively import dependency of [%s]: [%s]" % (import_name, to_import) ) try: @@ -349,7 +349,7 @@ ) except ImportError as e: log.warning( - _(u"Can't import plugin {name}: {error}").format( + _("Can't import plugin {name}: {error}").format( name=plugin_info["name"], error=e ) ) @@ -362,13 +362,13 @@ self.plugins[import_name] = getattr(mod, plugin_info["main"])(self) except Exception as e: log.warning( - u'Error while loading plugin "{name}", ignoring it: {error}'.format( + 'Error while loading plugin "{name}", ignoring it: {error}'.format( name=plugin_info["name"], error=e ) ) if optional: return - raise ImportError(u"Error during initiation") + raise ImportError("Error during initiation") if C.bool(plugin_info.get(C.PI_HANDLER, C.BOOL_FALSE)): self.plugins[import_name].is_handler = True else: @@ -386,7 +386,7 @@ # pluging depending on the unloaded one should be unloaded too # for now, just a basic call on plugin.unload is done defers_list = [] - for plugin in self.plugins.itervalues(): + for plugin in self.plugins.values(): try: unload = plugin.unload except AttributeError: @@ -419,7 +419,7 @@ def connectProfile(__=None): if self.isConnected(profile): - log.info(_(u"already connected !")) + log.info(_("already connected !")) return True if self.memory.isComponent(profile): @@ -439,7 +439,7 @@ if not self.isConnected(profile_key): # isConnected is checked here and not on client # because client is deleted when session is ended - log.info(_(u"not connected !")) + log.info(_("not connected !")) return defer.succeed(None) client = self.getClient(profile_key) return client.entityDisconnect() @@ -468,7 +468,7 @@ pass features = [] - for import_name, plugin in self.plugins.iteritems(): + for import_name, plugin in self.plugins.items(): try: features_d = defer.maybeDeferred(plugin.getFeatures, profile_key) except AttributeError: @@ -485,14 +485,14 @@ ret[name] = data else: log.warning( - u"Error while getting features for {name}: {failure}".format( + "Error while getting features for {name}: {failure}".format( name=name, failure=data ) ) ret[name] = {} return ret - d_list.addCallback(buildFeatures, self.plugins.keys()) + d_list.addCallback(buildFeatures, list(self.plugins.keys())) return d_list def getContacts(self, profile_key): @@ -527,10 +527,10 @@ self.memory.purgeProfileSession(profile) def startService(self): - log.info(u"Salut à toi ô mon frère !") + log.info("Salut à toi ô mon frère !") def stopService(self): - log.info(u"Salut aussi à Rantanplan") + log.info("Salut aussi à Rantanplan") return self.pluginsUnload() def run(self): @@ -576,13 +576,13 @@ @return: list of clients """ if not profile_key: - raise exceptions.DataError(_(u"profile_key must not be empty")) + raise exceptions.DataError(_("profile_key must not be empty")) try: profile = self.memory.getProfileName(profile_key, True) except exceptions.ProfileUnknownError: return [] if profile == C.PROF_KEY_ALL: - return self.profiles.values() + return list(self.profiles.values()) elif profile[0] == "@": # only profile keys can start with "@" raise exceptions.ProfileKeyUnknown return [self.profiles[profile]] @@ -594,9 +594,9 @@ @param name: name of the option @return: unicode representation of the option """ - return unicode(self.memory.getConfig(section, name, "")) + return str(self.memory.getConfig(section, name, "")) - def logErrback(self, failure_, msg=_(u"Unexpected error: {failure_}")): + def logErrback(self, failure_, msg=_("Unexpected error: {failure_}")): """Generic errback logging @param msg(unicode): error message ("failure_" key will be use for format) @@ -610,7 +610,7 @@ def registerNamespace(self, short_name, namespace): """associate a namespace to a short name""" if short_name in self.ns_map: - raise exceptions.ConflictError(u"this short name is already used") + raise exceptions.ConflictError("this short name is already used") self.ns_map[short_name] = namespace def getNamespaces(self): @@ -620,7 +620,7 @@ try: return self.ns_map[short_name] except KeyError: - raise exceptions.NotFound(u"namespace {short_name} is not registered" + raise exceptions.NotFound("namespace {short_name} is not registered" .format(short_name=short_name)) def getSessionInfos(self, profile_key): @@ -628,7 +628,7 @@ client = self.getClient(profile_key) data = { "jid": client.jid.full(), - "started": unicode(int(client.started)) + "started": str(int(client.started)) } return defer.succeed(data) @@ -714,9 +714,9 @@ ret = [] for p in plugins: ret.append({ - u"name": p.name, - u"namespace": p.namespace, - u"priority": unicode(p.priority), + "name": p.name, + "namespace": p.namespace, + "priority": str(p.priority), }) return ret @@ -740,7 +740,7 @@ message, subject, mess_type, - {unicode(key): unicode(value) for key, value in extra.items()}, + {str(key): str(value) for key, value in list(extra.items())}, ) def _setPresence(self, to="", show="", statuses=None, profile_key=C.PROF_KEY_NONE): @@ -774,7 +774,7 @@ assert profile to_jid = jid.JID(raw_jid) log.debug( - _(u"subsciption request [%(subs_type)s] for %(jid)s") + _("subsciption request [%(subs_type)s] for %(jid)s") % {"subs_type": subs_type, "jid": to_jid.full()} ) if subs_type == "subscribe": @@ -901,15 +901,15 @@ service_jid = services_jids[idx] if not success: log.warning( - _(u"Can't find features for service {service_jid}, ignoring") + _("Can't find features for service {service_jid}, ignoring") .format(service_jid=service_jid.full())) continue if (identities is not None and not set(infos.identities.keys()).issuperset(identities)): continue found_identities = [ - (cat, type_, name or u"") - for (cat, type_), name in infos.identities.iteritems() + (cat, type_, name or "") + for (cat, type_), name in infos.identities.items() ] found_service[service_jid.full()] = found_identities @@ -960,7 +960,7 @@ full_jid = full_jids[idx] if not success: log.warning( - _(u"Can't retrieve {full_jid} infos, ignoring") + _("Can't retrieve {full_jid} infos, ignoring") .format(full_jid=full_jid.full())) continue if infos.features.issuperset(namespaces): @@ -969,8 +969,8 @@ ).issuperset(identities): continue found_identities = [ - (cat, type_, name or u"") - for (cat, type_), name in infos.identities.iteritems() + (cat, type_, name or "") + for (cat, type_), name in infos.identities.items() ] found[full_jid.full()] = found_identities @@ -979,7 +979,7 @@ ## Generic HMI ## def _killAction(self, keep_id, client): - log.debug(u"Killing action {} for timeout".format(keep_id)) + log.debug("Killing action {} for timeout".format(keep_id)) client.actions[keep_id] def actionNew( @@ -998,7 +998,7 @@ Action will be deleted after 30 min. @param profile: %(doc_profile)s """ - id_ = unicode(uuid.uuid4()) + id_ = str(uuid.uuid4()) if keep_id is not None: client = self.getClient(profile) action_timer = reactor.callLater(60 * 30, self._killAction, keep_id, client) @@ -1012,7 +1012,7 @@ @param profile: %(doc_profile)s """ client = self.getClient(profile) - return [action_tuple[:-1] for action_tuple in client.actions.itervalues()] + return [action_tuple[:-1] for action_tuple in client.actions.values()] def registerProgressCb( self, progress_id, callback, metadata=None, profile=C.PROF_KEY_NONE @@ -1022,7 +1022,7 @@ metadata = {} client = self.getClient(profile) if progress_id in client._progress_cb: - raise exceptions.ConflictError(u"Progress ID is not unique !") + raise exceptions.ConflictError("Progress ID is not unique !") client._progress_cb[progress_id] = (callback, metadata) def removeProgressCb(self, progress_id, profile): @@ -1031,11 +1031,11 @@ try: del client._progress_cb[progress_id] except KeyError: - log.error(_(u"Trying to remove an unknow progress callback")) + log.error(_("Trying to remove an unknow progress callback")) def _progressGet(self, progress_id, profile): data = self.progressGet(progress_id, profile) - return {k: unicode(v) for k, v in data.iteritems()} + return {k: str(v) for k, v in data.items()} def progressGet(self, progress_id, profile): """Return a dict with progress information @@ -1057,10 +1057,10 @@ def _progressGetAll(self, profile_key): progress_all = self.progressGetAll(profile_key) - for profile, progress_dict in progress_all.iteritems(): - for progress_id, data in progress_dict.iteritems(): - for key, value in data.iteritems(): - data[key] = unicode(value) + for profile, progress_dict in progress_all.items(): + for progress_id, data in progress_dict.items(): + for key, value in data.items(): + data[key] = str(value) return progress_all def progressGetAllMetadata(self, profile_key): @@ -1082,7 +1082,7 @@ for ( progress_id, (__, progress_metadata), - ) in client._progress_cb.iteritems(): + ) in client._progress_cb.items(): progress_dict[progress_id] = progress_metadata return progress_all @@ -1101,7 +1101,7 @@ profile = client.profile progress_dict = {} progress_all[profile] = progress_dict - for progress_id, (progress_cb, __) in client._progress_cb.iteritems(): + for progress_id, (progress_cb, __) in client._progress_cb.items(): progress_dict[progress_id] = progress_cb(progress_id, profile) return progress_all @@ -1121,7 +1121,7 @@ callback_id = str(uuid.uuid4()) else: if callback_id in self._cb_map: - raise exceptions.ConflictError(_(u"id already registered")) + raise exceptions.ConflictError(_("id already registered")) self._cb_map[callback_id] = (callback, args, kwargs) if "one_shot" in kwargs: # One Shot callback are removed after 30 min @@ -1163,7 +1163,7 @@ profile = self.memory.getProfileName(profile_key) if not profile: raise exceptions.ProfileUnknownError( - _(u"trying to launch action with a non-existant profile") + _("trying to launch action with a non-existant profile") ) else: profile = client.profile @@ -1179,7 +1179,7 @@ try: callback, args, kwargs = self._cb_map[callback_id] except KeyError: - raise exceptions.DataError(u"Unknown callback id {}".format(callback_id)) + raise exceptions.DataError("Unknown callback id {}".format(callback_id)) if kwargs.get("with_data", False): if data is None: @@ -1210,7 +1210,7 @@ def importMenu(self, path, callback, security_limit=C.NO_SECURITY_LIMIT, help_string="", type_=C.MENU_GLOBAL): - """register a new menu for frontends + r"""register a new menu for frontends @param path(iterable[unicode]): path to go to the menu (category/subcategory/.../item) (e.g.: ("File", "Open")) @@ -1245,7 +1245,7 @@ if callable(callback): callback_id = self.registerCallback(callback, with_data=True) - elif isinstance(callback, basestring): + elif isinstance(callback, str): # The callback is already registered callback_id = callback try: @@ -1256,7 +1256,7 @@ else: raise exceptions.DataError("Unknown callback type") - for menu_data in self._menus.itervalues(): + for menu_data in self._menus.values(): if menu_data["path"] == path and menu_data["type"] == type_: raise exceptions.ConflictError( _("A menu with the same path and type already exists") @@ -1267,7 +1267,7 @@ if menu_key in self._menus_paths: raise exceptions.ConflictError( - u"this menu path is already used: {path} ({menu_key})".format( + "this menu path is already used: {path} ({menu_key})".format( path=path_canonical, menu_key=menu_key ) ) @@ -1300,7 +1300,7 @@ - help_url: link to a page with more complete documentation (TODO) """ ret = [] - for menu_id, menu_data in self._menus.iteritems(): + for menu_id, menu_data in self._menus.items(): type_ = menu_data["type"] path = menu_data["path"] menu_security_limit = menu_data["security_limit"] @@ -1339,7 +1339,7 @@ callback_id = self._menus_paths[menu_key] except KeyError: raise exceptions.NotFound( - u"Can't find menu {path} ({menu_type})".format( + "Can't find menu {path} ({menu_type})".format( path=canonical_path, menu_type=menu_type ) )
--- a/sat/core/xmpp.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/xmpp.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -40,18 +40,18 @@ from sat.memory import encryption from sat.memory import persistent from sat.tools import xml_tools -from zope.interface import implements +from zope.interface import implementer log = getLogger(__name__) -NS_X_DATA = u"jabber:x:data" -NS_DISCO_INFO = u"http://jabber.org/protocol/disco#info" -NS_XML_ELEMENT = u"urn:xmpp:xml-element" -NS_ROSTER_VER = u"urn:xmpp:features:rosterver" +NS_X_DATA = "jabber:x:data" +NS_DISCO_INFO = "http://jabber.org/protocol/disco#info" +NS_XML_ELEMENT = "urn:xmpp:xml-element" +NS_ROSTER_VER = "urn:xmpp:features:rosterver" # we use 2 "@" which is illegal in a jid, to be sure we are not mixing keys # with roster jids -ROSTER_VER_KEY = u"@version@" +ROSTER_VER_KEY = "@version@" class SatXMPPEntity(object): @@ -65,9 +65,9 @@ clientConnectionFailed_ori = factory.clientConnectionFailed clientConnectionLost_ori = factory.clientConnectionLost factory.clientConnectionFailed = partial( - self.connectionTerminated, term_type=u"failed", cb=clientConnectionFailed_ori) + self.connectionTerminated, term_type="failed", cb=clientConnectionFailed_ori) factory.clientConnectionLost = partial( - self.connectionTerminated, term_type=u"lost", cb=clientConnectionLost_ori) + self.connectionTerminated, term_type="lost", cb=clientConnectionLost_ori) factory.maxRetries = max_retries factory.maxDelay = 30 @@ -87,7 +87,7 @@ self.encryption = encryption.EncryptionHandler(self) def __unicode__(self): - return u"Client instance for profile {profile}".format(profile=self.profile) + return "Client instance for profile {profile}".format(profile=self.profile) def __str__(self): return self.__unicode__.encode('utf-8') @@ -206,11 +206,11 @@ def logPluginResults(results): all_succeed = all([success for success, result in results]) if not all_succeed: - log.error(_(u"Plugins initialisation error")) + log.error(_("Plugins initialisation error")) for idx, (success, result) in enumerate(results): if not success: log.error( - u"error (plugin %(name)s): %(failure)s" + "error (plugin %(name)s): %(failure)s" % { "name": plugin_conn_cb[idx][0]._info["import_name"], "failure": result, @@ -226,11 +226,11 @@ self._connected_d = None def _disconnectionEb(self, failure_): - log.error(_(u"Error while disconnecting: {}".format(failure_))) + log.error(_("Error while disconnecting: {}".format(failure_))) def _authd(self, xmlstream): super(SatXMPPEntity, self)._authd(xmlstream) - log.debug(_(u"{profile} identified").format(profile=self.profile)) + log.debug(_("{profile} identified").format(profile=self.profile)) self.streamInitialized() def _finish_connection(self, __): @@ -238,7 +238,7 @@ def streamInitialized(self): """Called after _authd""" - log.debug(_(u"XML stream is initialized")) + log.debug(_("XML stream is initialized")) if not self.host_app.trigger.point("xml_init", self): return self.postStreamInit() @@ -246,7 +246,7 @@ def postStreamInit(self): """Workflow after stream initalisation.""" log.info( - _(u"********** [{profile}] CONNECTED **********").format(profile=self.profile) + _("********** [{profile}] CONNECTED **********").format(profile=self.profile) ) # the following Deferred is used to know when we are connected @@ -273,7 +273,7 @@ def initializationFailed(self, reason): log.error( _( - u"ERROR: XMPP connection failed for profile '%(profile)s': %(reason)s" + "ERROR: XMPP connection failed for profile '%(profile)s': %(reason)s" % {"profile": self.profile, "reason": reason} ) ) @@ -306,17 +306,17 @@ if reason is not None and not isinstance(reason.value, internet_error.ConnectionDone): try: - reason_str = unicode(reason.value) + reason_str = str(reason.value) except Exception: # FIXME: workaround for Android were p4a strips docstrings # while Twisted use docstring in __str__ # TODO: create a ticket upstream, Twisted should work when optimization # is used - reason_str = unicode(reason.value.__class__) - log.warning(u"Connection {term_type}: {reason}".format( + reason_str = str(reason.value.__class__) + log.warning("Connection {term_type}: {reason}".format( term_type = term_type, reason=reason_str)) - if not self.host_app.trigger.point(u"connection_" + term_type, connector, reason): + if not self.host_app.trigger.point("connection_" + term_type, connector, reason): return return cb(connector, reason) @@ -327,7 +327,7 @@ Retrying is disabled too, as it makes no sense to try without network, and it may use resources (notably battery on mobiles). """ - log.info(_(u"stopping connection because of network disabled")) + log.info(_("stopping connection because of network disabled")) self.factory.continueTrying = 0 self._network_disabled = True if self.xmlstream is not None: @@ -344,13 +344,13 @@ except AttributeError: # connection has not been stopped by networkDisabled # we don't have to restart it - log.debug(u"no connection to restart") + log.debug("no connection to restart") return else: del self._network_disabled if not network_disabled: - raise exceptions.InternalError(u"network_disabled should be True") - log.info(_(u"network is available, trying to connect")) + raise exceptions.InternalError("network_disabled should be True") + log.info(_("network is available, trying to connect")) # we want to be sure to start fresh self.factory.resetDelay() # we have a saved connector, meaning the connection has been stopped previously @@ -378,23 +378,23 @@ self.profile ) # and we remove references to this client log.info( - _(u"********** [{profile}] DISCONNECTED **********").format( + _("********** [{profile}] DISCONNECTED **********").format( profile=self.profile ) ) if not self.conn_deferred.called: if reason is None: - err = error.StreamError(u"Server unexpectedly closed the connection") + err = error.StreamError("Server unexpectedly closed the connection") else: err = reason try: if err.value.args[0][0][2] == "certificate verify failed": err = exceptions.InvalidCertificate( - _(u"Your server certificate is not valid " - u"(its identity can't be checked).\n\n" - u"This should never happen and may indicate that " - u"somebody is trying to spy on you.\n" - u"Please contact your server administrator.")) + _("Your server certificate is not valid " + "(its identity can't be checked).\n\n" + "This should never happen and may indicate that " + "somebody is trying to spy on you.\n" + "Please contact your server administrator.")) self.factory.stopTrying() try: # with invalid certificate, we should not retry to connect @@ -434,7 +434,7 @@ def entityDisconnect(self): if not self.host_app.trigger.point("disconnecting", self): return - log.info(_(u"Disconnecting...")) + log.info(_("Disconnecting...")) self.stopService() if self._connected_d is not None: return self._connected_d @@ -443,7 +443,7 @@ ## sending ## - def IQ(self, type_=u"set", timeout=60): + def IQ(self, type_="set", timeout=60): """shortcut to create an IQ element managing deferred @param type_(unicode): IQ type ('set' or 'get') @@ -486,11 +486,11 @@ if data["uid"]: # key must be present but can be set to '' # by a plugin to avoid id on purpose message_elt["id"] = data["uid"] - for lang, subject in data["subject"].iteritems(): + for lang, subject in data["subject"].items(): subject_elt = message_elt.addElement("subject", content=subject) if lang: subject_elt[(C.NS_XML, "lang")] = lang - for lang, message in data["message"].iteritems(): + for lang, message in data["message"].items(): body_elt = message_elt.addElement("body", content=message) if lang: body_elt[(C.NS_XML, "lang")] = lang @@ -499,7 +499,7 @@ except KeyError: if "thread_parent" in data["extra"]: raise exceptions.InternalError( - u"thread_parent found while there is not associated thread" + "thread_parent found while there is not associated thread" ) else: thread_elt = message_elt.addElement("thread", content=thread) @@ -546,7 +546,7 @@ data = { # dict is similar to the one used in client.onMessage "from": self.jid, "to": to_jid, - "uid": uid or unicode(uuid.uuid4()), + "uid": uid or str(uuid.uuid4()), "message": message, "subject": subject, "type": mess_type, @@ -599,15 +599,15 @@ ): return defer.succeed(None) - log.debug(_(u"Sending message (type {type}, to {to})") + log.debug(_("Sending message (type {type}, to {to})") .format(type=data["type"], to=to_jid.full())) pre_xml_treatments.addCallback(lambda __: self.generateMessageXML(data)) pre_xml_treatments.chainDeferred(post_xml_treatments) post_xml_treatments.addCallback(self.sendMessageData) if send_only: - log.debug(_(u"Triggers, storage and echo have been inhibited by the " - u"'send_only' parameter")) + log.debug(_("Triggers, storage and echo have been inhibited by the " + "'send_only' parameter")) else: self.addPostXmlCallbacks(post_xml_treatments) post_xml_treatments.addErrback(self._cancelErrorTrap) @@ -625,22 +625,22 @@ @param data: message data dictionnary @param client: profile's client """ - if data[u"type"] != C.MESS_TYPE_GROUPCHAT: + if data["type"] != C.MESS_TYPE_GROUPCHAT: # we don't add groupchat message to history, as we get them back # and they will be added then - if data[u"message"] or data[u"subject"]: # we need a message to store + if data["message"] or data["subject"]: # we need a message to store self.host_app.memory.addToHistory(self, data) else: log.warning( - u"No message found" + "No message found" ) # empty body should be managed by plugins before this point return data def messageGetBridgeArgs(self, data): """Generate args to use with bridge from data dict""" - return (data[u"uid"], data[u"timestamp"], data[u"from"].full(), - data[u"to"].full(), data[u"message"], data[u"subject"], - data[u"type"], data[u"extra"]) + return (data["uid"], data["timestamp"], data["from"].full(), + data["to"].full(), data["message"], data["subject"], + data["type"], data["extra"]) def messageSendToBridge(self, data): @@ -649,10 +649,10 @@ @param data: message data dictionnary @param client: profile's client """ - if data[u"type"] != C.MESS_TYPE_GROUPCHAT: + if data["type"] != C.MESS_TYPE_GROUPCHAT: # we don't send groupchat message to bridge, as we get them back # and they will be added the - if (data[u"message"] or data[u"subject"]): # we need a message to send + if (data["message"] or data["subject"]): # we need a message to send # something # We send back the message, so all frontends are aware of it @@ -661,12 +661,12 @@ profile=self.profile ) else: - log.warning(_(u"No message found")) + log.warning(_("No message found")) return data +@implementer(iwokkel.IDisco) class SatXMPPClient(SatXMPPEntity, wokkel_client.XMPPClient): - implements(iwokkel.IDisco) trigger_suffix = "" is_component = False @@ -681,34 +681,34 @@ # with a web frontend, # etc., we should implement a way to dynamically update identities through the # bridge - self.identities = [disco.DiscoIdentity(u"client", u"pc", C.APP_NAME)] + self.identities = [disco.DiscoIdentity("client", "pc", C.APP_NAME)] if sys.platform == "android": # FIXME: temporary hack as SRV is not working on android # TODO: remove this hack and fix SRV - log.info(u"FIXME: Android hack, ignoring SRV") + log.info("FIXME: Android hack, ignoring SRV") if host is None: host = user_jid.host # for now we consider Android devices to be always phones - self.identities = [disco.DiscoIdentity(u"client", u"phone", C.APP_NAME)] + self.identities = [disco.DiscoIdentity("client", "phone", C.APP_NAME)] hosts_map = host_app.memory.getConfig(None, "hosts_dict", {}) if host is None and user_jid.host in hosts_map: host_data = hosts_map[user_jid.host] - if isinstance(host_data, basestring): + if isinstance(host_data, str): host = host_data elif isinstance(host_data, dict): - if u"host" in host_data: - host = host_data[u"host"] - if u"port" in host_data: - port = host_data[u"port"] + if "host" in host_data: + host = host_data["host"] + if "port" in host_data: + port = host_data["port"] else: log.warning( - _(u"invalid data used for host: {data}").format(data=host_data) + _("invalid data used for host: {data}").format(data=host_data) ) host_data = None if host_data is not None: log.info( - u"using {host}:{port} for host {host_ori} as requested in config" + "using {host}:{port} for host {host_ori} as requested in config" .format(host_ori=user_jid.host, host=host, port=port) ) @@ -717,22 +717,22 @@ wokkel_client.XMPPClient.__init__( self, user_jid, password, host or None, port or C.XMPP_C2S_PORT, - check_certificate = self.check_certificate + # check_certificate = self.check_certificate # FIXME: currently disabled with Python 3 port ) SatXMPPEntity.__init__(self, host_app, profile, max_retries) if not self.check_certificate: - msg = (_(u"Certificate validation is deactivated, this is unsecure and " - u"somebody may be spying on you. If you have no good reason to disable " - u"certificate validation, please activate \"Check certificate\" in your " - u"settings in \"Connection\" tab.")) - xml_tools.quickNote(host_app, self, msg, _(u"Security notice"), + msg = (_("Certificate validation is deactivated, this is unsecure and " + "somebody may be spying on you. If you have no good reason to disable " + "certificate validation, please activate \"Check certificate\" in your " + "settings in \"Connection\" tab.")) + xml_tools.quickNote(host_app, self, msg, _("Security notice"), level = C.XMLUI_DATA_LVL_WARNING) def _getPluginsList(self): - for p in self.host_app.plugins.itervalues(): - if C.PLUG_MODE_CLIENT in p._info[u"modes"]: + for p in self.host_app.plugins.values(): + if C.PLUG_MODE_CLIENT in p._info["modes"]: yield p def _createSubProtocols(self): @@ -795,7 +795,7 @@ # This trigger point can't cancel the method yield self.host_app.trigger.asyncPoint("sendMessageData", self, mess_data, triggers_no_cancel=True) - self.send(mess_data[u"xml"]) + self.send(mess_data["xml"]) defer.returnValue(mess_data) def feedback(self, to_jid, message, extra=None): @@ -811,11 +811,11 @@ if extra is None: extra = {} self.host_app.bridge.messageNew( - uid=unicode(uuid.uuid4()), + uid=str(uuid.uuid4()), timestamp=time.time(), from_jid=self.jid.full(), to_jid=to_jid.full(), - message={u"": message}, + message={"": message}, subject={}, mess_type=C.MESS_TYPE_INFO, extra=extra, @@ -827,6 +827,7 @@ d.addCallback(lambda __: super(SatXMPPClient, self)._finish_connection(__)) +@implementer(iwokkel.IDisco) class SatXMPPComponent(SatXMPPEntity, component.Component): """XMPP component @@ -835,7 +836,6 @@ Component need to instantiate MessageProtocol itself """ - implements(iwokkel.IDisco) trigger_suffix = ( "Component" ) # used for to distinguish some trigger points set in SatXMPPEntity @@ -857,19 +857,19 @@ self.entry_plugin = host_app.plugins[entry_point] except KeyError: raise exceptions.NotFound( - _(u"The requested entry point ({entry_point}) is not available").format( + _("The requested entry point ({entry_point}) is not available").format( entry_point=entry_point ) ) - self.identities = [disco.DiscoIdentity(u"component", u"generic", C.APP_NAME)] + self.identities = [disco.DiscoIdentity("component", "generic", C.APP_NAME)] # jid is set automatically on bind by Twisted for Client, but not for Component self.jid = component_jid if host is None: try: - host = component_jid.host.split(u".", 1)[1] + host = component_jid.host.split(".", 1)[1] except IndexError: - raise ValueError(u"Can't guess host from jid, please specify a host") + raise ValueError("Can't guess host from jid, please specify a host") # XXX: component.Component expect unicode jid, while Client expect jid.JID. # this is not consistent, so we use jid.JID for SatXMPP* component.Component.__init__(self, host, port, component_jid.full(), password) @@ -890,20 +890,20 @@ @raise KeyError: one plugin should be present in self.host_app.plugins but it is not """ - if C.PLUG_MODE_COMPONENT not in current._info[u"modes"]: + if C.PLUG_MODE_COMPONENT not in current._info["modes"]: if not required: return else: log.error( _( - u"Plugin {current_name} is needed for {entry_name}, " - u"but it doesn't handle component mode" + "Plugin {current_name} is needed for {entry_name}, " + "but it doesn't handle component mode" ).format( - current_name=current._info[u"import_name"], - entry_name=self.entry_plugin._info[u"import_name"], + current_name=current._info["import_name"], + entry_name=self.entry_plugin._info["import_name"], ) ) - raise exceptions.InternalError(_(u"invalid plugin mode")) + raise exceptions.InternalError(_("invalid plugin mode")) for import_name in current._info.get(C.PI_DEPENDENCIES, []): # plugins are already loaded as dependencies @@ -960,9 +960,9 @@ if None, mapping will not be done @return(dict): message data """ - if message_elt.name != u"message": + if message_elt.name != "message": log.warning(_( - u"parseMessage used with a non <message/> stanza, ignoring: {xml}" + "parseMessage used with a non <message/> stanza, ignoring: {xml}" .format(xml=message_elt.toXml()))) return {} @@ -974,31 +974,31 @@ c.uri = C.NS_CLIENT elif message_elt.uri != C.NS_CLIENT: log.warning(_( - u"received <message> with a wrong namespace: {xml}" + "received <message> with a wrong namespace: {xml}" .format(xml=message_elt.toXml()))) client = self.parent - if not message_elt.hasAttribute(u'to'): + if not message_elt.hasAttribute('to'): message_elt['to'] = client.jid.full() message = {} subject = {} extra = {} data = { - u"from": jid.JID(message_elt["from"]), - u"to": jid.JID(message_elt["to"]), - u"uid": message_elt.getAttribute( - u"uid", unicode(uuid.uuid4()) + "from": jid.JID(message_elt["from"]), + "to": jid.JID(message_elt["to"]), + "uid": message_elt.getAttribute( + "uid", str(uuid.uuid4()) ), # XXX: uid is not a standard attribute but may be added by plugins - u"message": message, - u"subject": subject, - u"type": message_elt.getAttribute(u"type", u"normal"), - u"extra": extra, + "message": message, + "subject": subject, + "type": message_elt.getAttribute("type", "normal"), + "extra": extra, } try: - message_id = data[u"extra"][u"message_id"] = message_elt[u"id"] + message_id = data["extra"]["message_id"] = message_elt["id"] except KeyError: pass else: @@ -1006,11 +1006,11 @@ # message for e in message_elt.elements(C.NS_CLIENT, "body"): - message[e.getAttribute((C.NS_XML, "lang"), "")] = unicode(e) + message[e.getAttribute((C.NS_XML, "lang"), "")] = str(e) # subject for e in message_elt.elements(C.NS_CLIENT, "subject"): - subject[e.getAttribute((C.NS_XML, "lang"), "")] = unicode(e) + subject[e.getAttribute((C.NS_XML, "lang"), "")] = str(e) # delay and timestamp try: @@ -1018,12 +1018,12 @@ except AttributeError: # message_elt._received_timestamp should have been set in onMessage # but if parseMessage is called directly, it can be missing - log.debug(u"missing received timestamp for {message_elt}".format( + log.debug("missing received timestamp for {message_elt}".format( message_elt=message_elt)) received_timestamp = time.time() try: - delay_elt = message_elt.elements(delay.NS_DELAY, "delay").next() + delay_elt = next(message_elt.elements(delay.NS_DELAY, "delay")) except StopIteration: data["timestamp"] = received_timestamp else: @@ -1060,7 +1060,7 @@ client = self.parent if not "from" in message_elt.attributes: message_elt["from"] = client.jid.host - log.debug(_(u"got message from: {from_}").format(from_=message_elt["from"])) + log.debug(_("got message from: {from_}").format(from_=message_elt["from"])) # plugin can add their treatments to this deferred post_treat = defer.Deferred() @@ -1077,24 +1077,24 @@ return data def addToHistory(self, data): - if data.pop(u"history", None) == C.HISTORY_SKIP: - log.info(u"history is skipped as requested") - data[u"extra"][u"history"] = C.HISTORY_SKIP + if data.pop("history", None) == C.HISTORY_SKIP: + log.info("history is skipped as requested") + data["extra"]["history"] = C.HISTORY_SKIP else: - if data[u"message"] or data[u"subject"]: # we need a message to store + if data["message"] or data["subject"]: # we need a message to store return self.host.memory.addToHistory(self.parent, data) else: - log.debug(u"not storing empty message to history: {data}" + log.debug("not storing empty message to history: {data}" .format(data=data)) def bridgeSignal(self, __, data): try: - data["extra"]["received_timestamp"] = unicode(data["received_timestamp"]) + data["extra"]["received_timestamp"] = str(data["received_timestamp"]) data["extra"]["delay_sender"] = data["delay_sender"] except KeyError: pass if C.MESS_KEY_ENCRYPTION in data: - data[u"extra"][u"encrypted"] = C.BOOL_TRUE + data["extra"]["encrypted"] = C.BOOL_TRUE if data is not None: if data["message"] or data["subject"] or data["type"] == C.MESS_TYPE_INFO: self.host.bridge.messageNew( @@ -1109,7 +1109,7 @@ profile=self.parent.profile, ) else: - log.debug(u"Discarding bridge signal for empty message: {data}".format( + log.debug("Discarding bridge signal for empty message: {data}".format( data=data)) return data @@ -1131,7 +1131,7 @@ @property def versioning(self): """True if server support roster versioning""" - return (NS_ROSTER_VER, u'ver') in self.parent.xmlstream.features + return (NS_ROSTER_VER, 'ver') in self.parent.xmlstream.features @property def roster_cache(self): @@ -1148,23 +1148,23 @@ item must be already registered in self._jids before this method is called @param item (RosterIem): item added """ - log.debug(u"registering item: {}".format(item.entity.full())) + log.debug("registering item: {}".format(item.entity.full())) if item.entity.resource: log.warning( - u"Received a roster item with a resource, this is not common but not " - u"restricted by RFC 6121, this case may be not well tested." + "Received a roster item with a resource, this is not common but not " + "restricted by RFC 6121, this case may be not well tested." ) if not item.subscriptionTo: if not item.subscriptionFrom: log.info( - _(u"There's no subscription between you and [{}]!").format( + _("There's no subscription between you and [{}]!").format( item.entity.full() ) ) else: - log.info(_(u"You are not subscribed to [{}]!").format(item.entity.full())) + log.info(_("You are not subscribed to [{}]!").format(item.entity.full())) if not item.subscriptionFrom: - log.info(_(u"[{}] is not subscribed to you!").format(item.entity.full())) + log.info(_("[{}] is not subscribed to you!").format(item.entity.full())) for group in item.groups: self._groups.setdefault(group, set()).add(item.entity) @@ -1178,7 +1178,7 @@ roster_cache = self.roster_cache yield roster_cache.clear() roster_cache[ROSTER_VER_KEY] = version - for roster_jid, roster_item in self._jids.iteritems(): + for roster_jid, roster_item in self._jids.items(): roster_jid_s = roster_jid.full() roster_item_elt = roster_item.toElement().toXml() roster_cache[roster_jid_s] = roster_item_elt @@ -1200,19 +1200,19 @@ def requestRoster(self): """Ask the server for Roster list """ if self.versioning: - log.info(_(u"our server support roster versioning, we use it")) + log.info(_("our server support roster versioning, we use it")) roster_cache = self.roster_cache yield roster_cache.load() try: version = roster_cache[ROSTER_VER_KEY] except KeyError: - log.info(_(u"no roster in cache, we start fresh")) + log.info(_("no roster in cache, we start fresh")) # u"" means we use versioning without valid roster in cache - version = u"" + version = "" else: - log.info(_(u"We have roster v{version} in cache").format(version=version)) + log.info(_("We have roster v{version} in cache").format(version=version)) # we deserialise cached roster to our local cache - for roster_jid_s, roster_item_elt_s in roster_cache.iteritems(): + for roster_jid_s, roster_item_elt_s in roster_cache.items(): if roster_jid_s == ROSTER_VER_KEY: continue roster_jid = jid.JID(roster_jid_s) @@ -1221,26 +1221,26 @@ self._jids[roster_jid] = roster_item self._registerItem(roster_item) else: - log.warning(_(u"our server doesn't support roster versioning")) + log.warning(_("our server doesn't support roster versioning")) version = None log.debug("requesting roster") roster = yield self.getRoster(version=version) if roster is None: - log.debug(u"empty roster result received, we'll get roster item with roster " - u"pushes") + log.debug("empty roster result received, we'll get roster item with roster " + "pushes") else: # a full roster is received self._groups.clear() self._jids = roster - for item in roster.itervalues(): + for item in roster.values(): if not item.subscriptionTo and not item.subscriptionFrom and not item.ask: # XXX: current behaviour: we don't want contact in our roster list # if there is no presence subscription # may change in the future log.info( - u"Removing contact {} from roster because there is no presence " - u"subscription".format( + "Removing contact {} from roster because there is no presence " + "subscription".format( item.jid ) ) @@ -1267,9 +1267,9 @@ @return: dictionary of attributes """ item_attr = { - "to": unicode(item.subscriptionTo), - "from": unicode(item.subscriptionFrom), - "ask": unicode(item.ask), + "to": str(item.subscriptionTo), + "from": str(item.subscriptionFrom), + "ask": str(item.ask), } if item.name: item_attr["name"] = item.name @@ -1278,7 +1278,7 @@ def setReceived(self, request): item = request.item entity = item.entity - log.info(_(u"adding {entity} to roster").format(entity=entity.full())) + log.info(_("adding {entity} to roster").format(entity=entity.full())) if request.version is not None: # we update the cache in storage roster_cache = self.roster_cache @@ -1302,7 +1302,7 @@ def removeReceived(self, request): entity = request.item.entity - log.info(_(u"removing {entity} from roster").format(entity=entity.full())) + log.info(_("removing {entity} from roster").format(entity=entity.full())) if request.version is not None: # we update the cache in storage roster_cache = self.roster_cache @@ -1319,7 +1319,7 @@ item = self._jids.pop(entity) except KeyError: log.error( - u"Received a roster remove event for an item not in cache ({})".format( + "Received a roster remove event for an item not in cache ({})".format( entity ) ) @@ -1332,8 +1332,8 @@ del self._groups[group] except KeyError: log.warning( - u"there is no cache for the group [{group}] of the removed roster " - u"item [{jid_}]".format(group=group, jid=entity) + "there is no cache for the group [{group}] of the removed roster " + "item [{jid_}]".format(group=group, jid=entity) ) # then we send the bridge signal @@ -1341,7 +1341,7 @@ def getGroups(self): """Return a list of groups""" - return self._groups.keys() + return list(self._groups.keys()) def getItem(self, entity_jid): """Return RosterItem for a given jid @@ -1354,7 +1354,7 @@ def getJids(self): """Return all jids of the roster""" - return self._jids.keys() + return list(self._jids.keys()) def isJidInRoster(self, entity_jid): """Return True if jid is in roster""" @@ -1370,7 +1370,7 @@ def getItems(self): """Return all items of the roster""" - return self._jids.values() + return list(self._jids.values()) def getJidsFromGroup(self, group): try: @@ -1398,7 +1398,7 @@ jids.update(self.getJidsFromGroup(group)) return jids else: - raise ValueError(u"Unexpected type_ {}".format(type_)) + raise ValueError("Unexpected type_ {}".format(type_)) def getNick(self, entity_jid): """Return a nick name for an entity @@ -1447,7 +1447,7 @@ def unavailableReceived(self, entity, statuses=None): log.debug( - _(u"presence update for [%(entity)s] (unavailable, statuses=%(statuses)s)") + _("presence update for [%(entity)s] (unavailable, statuses=%(statuses)s)") % {"entity": entity, C.PRESENCE_STATUSES: statuses} ) @@ -1539,16 +1539,16 @@ self.host.memory.delWaitingSub(entity.userhost(), self.parent.profile) def subscribedReceived(self, entity): - log.debug(_(u"subscription approved for [%s]") % entity.userhost()) + log.debug(_("subscription approved for [%s]") % entity.userhost()) self.host.bridge.subscribe("subscribed", entity.userhost(), self.parent.profile) def unsubscribedReceived(self, entity): - log.debug(_(u"unsubscription confirmed for [%s]") % entity.userhost()) + log.debug(_("unsubscription confirmed for [%s]") % entity.userhost()) self.host.bridge.subscribe("unsubscribed", entity.userhost(), self.parent.profile) @defer.inlineCallbacks def subscribeReceived(self, entity): - log.debug(_(u"subscription request from [%s]") % entity.userhost()) + log.debug(_("subscription request from [%s]") % entity.userhost()) yield self.parent.roster.got_roster item = self.parent.roster.getItem(entity) if item and item.subscriptionTo: @@ -1566,7 +1566,7 @@ @defer.inlineCallbacks def unsubscribeReceived(self, entity): - log.debug(_(u"unsubscription asked for [%s]") % entity.userhost()) + log.debug(_("unsubscription asked for [%s]") % entity.userhost()) yield self.parent.roster.got_roster item = self.parent.roster.getItem(entity) if item and item.subscriptionFrom: # we automatically remove contact @@ -1575,8 +1575,8 @@ self.host.bridge.subscribe("unsubscribe", entity.userhost(), self.parent.profile) +@implementer(iwokkel.IDisco) class SatDiscoProtocol(disco.DiscoClientProtocol): - implements(iwokkel.IDisco) def __init__(self, host): disco.DiscoClientProtocol.__init__(self) @@ -1599,7 +1599,7 @@ def iqFallback(self, iq): if iq.handled is True: return - log.debug(u"iqFallback: xml = [%s]" % (iq.toXml())) + log.debug("iqFallback: xml = [%s]" % (iq.toXml())) generic.FallbackHandler.iqFallback(self, iq) @@ -1615,9 +1615,9 @@ return generic.VersionHandler.getDiscoInfo(self, requestor, target, None) +@implementer(iwokkel.IDisco) class SatIdentityHandler(XMPPHandler): """Manage disco Identity of SàT.""" - implements(iwokkel.IDisco) # TODO: dynamic identity update (see docstring). Note that a XMPP entity can have # several identities
--- a/sat/memory/cache.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/cache.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -23,7 +23,7 @@ from sat.tools.common import regex from sat.core import exceptions from sat.core.constants import Const as C -import cPickle as pickle +import pickle as pickle import mimetypes import os.path import time @@ -42,9 +42,9 @@ self.profile = profile path_elts = [host.memory.getConfig("", "local_dir"), C.CACHE_DIR] if profile: - path_elts.extend([u"profiles", regex.pathEscape(profile)]) + path_elts.extend(["profiles", regex.pathEscape(profile)]) else: - path_elts.append(u"common") + path_elts.append("common") self.cache_dir = os.path.join(*path_elts) if not os.path.exists(self.cache_dir): @@ -55,11 +55,11 @@ @param filename(unicode): cached file name (cache data or actual file) """ - if not filename or u"/" in filename: + if not filename or "/" in filename: log.error( - u"invalid char found in file name, hack attempt? name:{}".format(filename) + "invalid char found in file name, hack attempt? name:{}".format(filename) ) - raise exceptions.DataError(u"Invalid char found") + raise exceptions.DataError("Invalid char found") return os.path.join(self.cache_dir, filename) def getMetadata(self, uid): @@ -73,7 +73,7 @@ uid = uid.strip() if not uid: - raise exceptions.InternalError(u"uid must not be empty") + raise exceptions.InternalError("uid must not be empty") cache_url = self.getPath(uid) if not os.path.exists(cache_url): return None @@ -82,20 +82,20 @@ with open(cache_url, "rb") as f: cache_data = pickle.load(f) except IOError: - log.warning(u"can't read cache at {}".format(cache_url)) + log.warning("can't read cache at {}".format(cache_url)) return None except pickle.UnpicklingError: - log.warning(u"invalid cache found at {}".format(cache_url)) + log.warning("invalid cache found at {}".format(cache_url)) return None try: eol = cache_data["eol"] except KeyError: - log.warning(u"no End Of Life found for cached file {}".format(uid)) + log.warning("no End Of Life found for cached file {}".format(uid)) eol = 0 if eol < time.time(): log.debug( - u"removing expired cache (expired for {}s)".format(time.time() - eol) + "removing expired cache (expired for {}s)".format(time.time() - eol) ) return None @@ -135,11 +135,11 @@ ext = mimetypes.guess_extension(mime_type, strict=False) if ext is None: log.warning( - u"can't find extension for MIME type {}".format(mime_type) + "can't find extension for MIME type {}".format(mime_type) ) ext = DEFAULT_EXT - elif ext == u".jpe": - ext = u".jpg" + elif ext == ".jpe": + ext = ".jpg" else: ext = DEFAULT_EXT mime_type = None @@ -147,10 +147,10 @@ if max_age is None: max_age = C.DEFAULT_MAX_AGE cache_data = { - u"source": source, - u"filename": filename, - u"eol": int(time.time()) + max_age, - u"mime_type": mime_type, + "source": source, + "filename": filename, + "eol": int(time.time()) + max_age, + "mime_type": mime_type, } file_path = self.getPath(filename)
--- a/sat/memory/crypto.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/crypto.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -92,7 +92,7 @@ # a decrypted empty value and a decryption failure... both return # the empty value. Fortunately, we detect empty passwords beforehand # thanks to the "leave_empty" parameter which is used by default. - d.addCallback(lambda text: text.decode("utf-8") if text else None) + d.addCallback(lambda text: text if text else None) return d @classmethod @@ -114,11 +114,12 @@ def pad(self, s): """Method from http://stackoverflow.com/a/12525165""" bs = BlockCipher.BLOCK_SIZE - return s + (bs - len(s) % bs) * chr(bs - len(s) % bs) + return s + (bs - len(s) % bs) * (chr(bs - len(s) % bs)).encode('utf-8') @classmethod def unpad(self, s): """Method from http://stackoverflow.com/a/12525165""" + s = s.decode('utf-8') return s[0 : -ord(s[-1])] @@ -136,7 +137,7 @@ @return: Deferred: base-64 encoded str """ if leave_empty and password == "": - return succeed(password) + return succeed(b"") salt = ( b64decode(salt)[: PasswordHasher.SALT_LEN] if salt @@ -147,6 +148,11 @@ return d @classmethod + def compare_hash(cls, hashed_attempt, hashed): + assert isinstance(hashed, bytes) + return hashed_attempt == hashed + + @classmethod def verify(cls, attempt, hashed): """Verify a password attempt. @@ -156,5 +162,5 @@ """ leave_empty = hashed == "" d = PasswordHasher.hash(attempt, hashed, leave_empty) - d.addCallback(lambda hashed_attempt: hashed_attempt == hashed) + d.addCallback(cls.compare_hash, hashed=hashed.encode('utf-8')) return d
--- a/sat/memory/disco.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/disco.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -50,11 +50,11 @@ assert isinstance(identity, disco.DiscoIdentity) self.category = identity.category.encode("utf-8") self.idType = identity.type.encode("utf-8") - self.name = identity.name.encode("utf-8") if identity.name else "" - self.lang = lang.encode("utf-8") if lang is not None else "" + self.name = identity.name.encode("utf-8") if identity.name else b"" + self.lang = lang.encode("utf-8") if lang is not None else b"" - def __str__(self): - return "%s/%s/%s/%s" % (self.category, self.idType, self.lang, self.name) + def __bytes__(self): + return b"%s/%s/%s/%s" % (self.category, self.idType, self.lang, self.name) class HashManager(object): @@ -74,7 +74,7 @@ def __setitem__(self, hash_, disco_info): if hash_ in self.hashes: - log.debug(u"ignoring hash set: it is already known") + log.debug("ignoring hash set: it is already known") return self.hashes[hash_] = disco_info self.persistent[hash_] = disco_info.toElement().toXml() @@ -84,19 +84,19 @@ def load(self): def fillHashes(hashes): - for hash_, xml in hashes.iteritems(): + for hash_, xml in hashes.items(): element = xml_tools.ElementParser()(xml) disco_info = disco.DiscoInfo.fromElement(element) if not disco_info.features and not disco_info.identities: log.warning( _( - u"no feature/identity found in disco element (hash: {cap_hash}), ignoring: {xml}" + "no feature/identity found in disco element (hash: {cap_hash}), ignoring: {xml}" ).format(cap_hash=hash_, xml=xml) ) else: self.hashes[hash_] = disco_info - log.info(u"Disco hashes loaded") + log.info("Disco hashes loaded") d = self.persistent.load() d.addCallback(fillHashes) @@ -116,7 +116,7 @@ return self.hashes.load() @defer.inlineCallbacks - def hasFeature(self, client, feature, jid_=None, node=u""): + def hasFeature(self, client, feature, jid_=None, node=""): """Tell if an entity has the required feature @param feature: feature namespace @@ -128,7 +128,7 @@ defer.returnValue(feature in disco_infos.features) @defer.inlineCallbacks - def checkFeature(self, client, feature, jid_=None, node=u""): + def checkFeature(self, client, feature, jid_=None, node=""): """Like hasFeature, but raise an exception is feature is not Found @param feature: feature namespace @@ -142,7 +142,7 @@ raise failure.Failure(exceptions.FeatureNotFound) @defer.inlineCallbacks - def checkFeatures(self, client, features, jid_=None, identity=None, node=u""): + def checkFeatures(self, client, features, jid_=None, identity=None, node=""): """Like checkFeature, but check several features at once, and check also identity @param features(iterable[unicode]): features to check @@ -159,7 +159,7 @@ if identity is not None and identity not in disco_infos.identities: raise failure.Failure(exceptions.FeatureNotFound()) - def getInfos(self, client, jid_=None, node=u"", use_cache=True): + def getInfos(self, client, jid_=None, node="", use_cache=True): """get disco infos from jid_, filling capability hash if needed @param jid_: jid of the target, or None for profile's server @@ -188,16 +188,16 @@ def infosEb(fail): if fail.check(defer.CancelledError): - reason = u"request time-out" + reason = "request time-out" fail = failure.Failure(exceptions.TimeOutError(fail.message)) else: try: - reason = unicode(fail.value) + reason = str(fail.value) except AttributeError: - reason = unicode(fail) + reason = str(fail) log.warning( - u"Error while requesting disco infos from {jid}: {reason}".format( + "Error while requesting disco infos from {jid}: {reason}".format( jid=jid_.full(), reason=reason ) ) @@ -218,7 +218,7 @@ return defer.succeed(disco_infos) @defer.inlineCallbacks - def getItems(self, client, jid_=None, node=u"", use_cache=True): + def getItems(self, client, jid_=None, node="", use_cache=True): """get disco items from jid_, cache them for our own server @param jid_(jid.JID): jid of the target, or None for profile's server @@ -236,12 +236,12 @@ items = self.host.memory.getEntityData( jid_, ["DISCO_ITEMS"], client.profile )["DISCO_ITEMS"] - log.debug(u"[%s] disco items are in cache" % jid_.full()) + log.debug("[%s] disco items are in cache" % jid_.full()) if not use_cache: # we ignore cache, so we pretend we haven't found it raise KeyError except (KeyError, exceptions.UnknownEntityError): - log.debug(u"Caching [%s] disco items" % jid_.full()) + log.debug("Caching [%s] disco items" % jid_.full()) items = yield client.disco.requestItems(jid_, nodeIdentifier=node) self.host.memory.updateEntityData( jid_, "DISCO_ITEMS", items, profile_key=client.profile @@ -251,7 +251,7 @@ items = yield client.disco.requestItems(jid_, nodeIdentifier=node) except StanzaError as e: log.warning( - u"Error while requesting items for {jid}: {reason}".format( + "Error while requesting items for {jid}: {reason}".format( jid=jid_.full(), reason=e.condition ) ) @@ -262,7 +262,7 @@ def _infosEb(self, failure_, entity_jid): failure_.trap(StanzaError) log.warning( - _(u"Error while requesting [%(jid)s]: %(error)s") + _("Error while requesting [%(jid)s]: %(error)s") % {"jid": entity_jid.full(), "error": failure_.getErrorMessage()} ) @@ -326,7 +326,7 @@ def infosCb(infos, entity): if entity is None: - log.warning(_(u"received an item without jid")) + log.warning(_("received an item without jid")) return if identity is not None and identity not in infos.identities: return @@ -367,8 +367,8 @@ byte_identities.sort(key=lambda i: i.idType) byte_identities.sort(key=lambda i: i.category) for identity in byte_identities: - s.append(str(identity)) - s.append("<") + s.append(bytes(identity)) + s.append(b"<") # features byte_features = [ service.encode("utf-8") @@ -378,32 +378,32 @@ byte_features.sort() # XXX: the default sort has the same behaviour as the requested RFC 4790 i;octet sort for feature in byte_features: s.append(feature) - s.append("<") + s.append(b"<") # extensions - ext = services.extensions.values() + ext = list(services.extensions.values()) ext.sort(key=lambda f: f.formNamespace.encode('utf-8')) for extension in ext: s.append(extension.formNamespace.encode('utf-8')) - s.append("<") + s.append(b"<") fields = extension.fieldList fields.sort(key=lambda f: f.var.encode('utf-8')) for field in fields: s.append(field.var.encode('utf-8')) - s.append("<") + s.append(b"<") values = [v.encode('utf-8') for v in field.values] values.sort() for value in values: s.append(value) - s.append("<") + s.append(b"<") - cap_hash = b64encode(sha1("".join(s)).digest()) - log.debug(_(u"Capability hash generated: [{cap_hash}]").format(cap_hash=cap_hash)) + cap_hash = b64encode(sha1(b"".join(s)).digest()).decode('utf-8') + log.debug(_("Capability hash generated: [{cap_hash}]").format(cap_hash=cap_hash)) return cap_hash @defer.inlineCallbacks def _discoInfos( - self, entity_jid_s, node=u"", use_cache=True, profile_key=C.PROF_KEY_NONE + self, entity_jid_s, node="", use_cache=True, profile_key=C.PROF_KEY_NONE ): """ Discovery method for the bridge @param entity_jid_s: entity we want to discover @@ -417,7 +417,7 @@ disco_infos = yield self.getInfos(client, entity, node, use_cache) extensions = {} # FIXME: should extensions be serialised using tools.common.data_format? - for form_type, form in disco_infos.extensions.items(): + for form_type, form in list(disco_infos.extensions.items()): fields = [] for field in form.fieldList: data = {"type": field.fieldType} @@ -427,7 +427,7 @@ data[attr] = value values = [field.value] if field.value is not None else field.values - if field.fieldType == u"boolean": + if field.fieldType == "boolean": values = [C.boolConst(v) for v in values] fields.append((data, values)) @@ -436,7 +436,7 @@ defer.returnValue(( disco_infos.features, [(cat, type_, name or "") - for (cat, type_), name in disco_infos.identities.items()], + for (cat, type_), name in list(disco_infos.identities.items())], extensions)) def items2tuples(self, disco_items): @@ -447,13 +447,13 @@ """ for item in disco_items: if not item.entity: - log.warning(_(u"invalid item (no jid)")) + log.warning(_("invalid item (no jid)")) continue yield (item.entity.full(), item.nodeIdentifier or "", item.name or "") @defer.inlineCallbacks def _discoItems( - self, entity_jid_s, node=u"", use_cache=True, profile_key=C.PROF_KEY_NONE + self, entity_jid_s, node="", use_cache=True, profile_key=C.PROF_KEY_NONE ): """ Discovery method for the bridge
--- a/sat/memory/encryption.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/encryption.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -91,7 +91,7 @@ directed=directed) cls.plugins.append(plugin) cls.plugins.sort(key=lambda p: p.priority) - log.info(_(u"Encryption plugin registered: {name}").format(name=name)) + log.info(_("Encryption plugin registered: {name}").format(name=name)) @classmethod def getPlugins(cls): @@ -103,7 +103,7 @@ return next(p for p in cls.plugins if p.namespace == namespace) except StopIteration: raise exceptions.NotFound(_( - u"Can't find requested encryption plugin: {namespace}").format( + "Can't find requested encryption plugin: {namespace}").format( namespace=namespace)) @classmethod @@ -123,7 +123,7 @@ if p.name.lower() == name.lower(): return p.namespace raise exceptions.NotFound(_( - u"Can't find a plugin with the name \"{name}\".".format( + "Can't find a plugin with the name \"{name}\".".format( name=name))) def getBridgeData(self, session): @@ -133,12 +133,12 @@ @return (unicode): serialized data for bridge """ if session is None: - return u'' - plugin = session[u'plugin'] + return '' + plugin = session['plugin'] bridge_data = {'name': plugin.name, 'namespace': plugin.namespace} - if u'directed_devices' in session: - bridge_data[u'directed_devices'] = session[u'directed_devices'] + if 'directed_devices' in session: + bridge_data['directed_devices'] = session['directed_devices'] return data_format.serialise(bridge_data) @@ -151,7 +151,7 @@ try: start_encryption = plugin.instance.startEncryption except AttributeError: - log.debug(u"No startEncryption method found for {plugin}".format( + log.debug("No startEncryption method found for {plugin}".format( plugin = plugin.namespace)) return defer.succeed(None) else: @@ -167,7 +167,7 @@ try: stop_encryption = plugin.instance.stopEncryption except AttributeError: - log.debug(u"No stopEncryption method found for {plugin}".format( + log.debug("No stopEncryption method found for {plugin}".format( plugin = plugin.namespace)) return defer.succeed(None) else: @@ -187,8 +187,8 @@ it will be replaced by the new one """ if not self.plugins: - raise exceptions.NotFound(_(u"No encryption plugin is registered, " - u"an encryption session can't be started")) + raise exceptions.NotFound(_("No encryption plugin is registered, " + "an encryption session can't be started")) if namespace is None: plugin = self.plugins[0] @@ -198,10 +198,10 @@ bare_jid = entity.userhostJID() if bare_jid in self._sessions: # we have already an encryption session with this contact - former_plugin = self._sessions[bare_jid][u"plugin"] + former_plugin = self._sessions[bare_jid]["plugin"] if former_plugin.namespace == namespace: - log.info(_(u"Session with {bare_jid} is already encrypted with {name}. " - u"Nothing to do.").format( + log.info(_("Session with {bare_jid} is already encrypted with {name}. " + "Nothing to do.").format( bare_jid=bare_jid, name=former_plugin.name)) return @@ -211,8 +211,8 @@ del self._sessions[bare_jid] yield self._stopEncryption(former_plugin, entity) else: - msg = (_(u"Session with {bare_jid} is already encrypted with {name}. " - u"Please stop encryption session before changing algorithm.") + msg = (_("Session with {bare_jid} is already encrypted with {name}. " + "Please stop encryption session before changing algorithm.") .format(bare_jid=bare_jid, name=plugin.name)) log.warning(msg) raise exceptions.ConflictError(msg) @@ -223,34 +223,34 @@ entity.resource = self.host.memory.getMainResource(self.client, entity) if not entity.resource: raise exceptions.NotFound( - _(u"No resource found for {destinee}, can't encrypt with {name}") + _("No resource found for {destinee}, can't encrypt with {name}") .format(destinee=entity.full(), name=plugin.name)) - log.info(_(u"No resource specified to encrypt with {name}, using " - u"{destinee}.").format(destinee=entity.full(), + log.info(_("No resource specified to encrypt with {name}, using " + "{destinee}.").format(destinee=entity.full(), name=plugin.name)) # indicate that we encrypt only for some devices - directed_devices = data[u'directed_devices'] = [entity.resource] + directed_devices = data['directed_devices'] = [entity.resource] elif entity.resource: - raise ValueError(_(u"{name} encryption must be used with bare jids.")) + raise ValueError(_("{name} encryption must be used with bare jids.")) yield self._startEncryption(plugin, entity) self._sessions[entity.userhostJID()] = data - log.info(_(u"Encryption session has been set for {entity_jid} with " - u"{encryption_name}").format( + log.info(_("Encryption session has been set for {entity_jid} with " + "{encryption_name}").format( entity_jid=entity.full(), encryption_name=plugin.name)) self.host.bridge.messageEncryptionStarted( entity.full(), self.getBridgeData(data), self.client.profile) - msg = D_(u"Encryption session started: your messages with {destinee} are " - u"now end to end encrypted using {name} algorithm.").format( + msg = D_("Encryption session started: your messages with {destinee} are " + "now end to end encrypted using {name} algorithm.").format( destinee=entity.full(), name=plugin.name) - directed_devices = data.get(u'directed_devices') + directed_devices = data.get('directed_devices') if directed_devices: - msg += u"\n" + D_(u"Message are encrypted only for {nb_devices} device(s): " - u"{devices_list}.").format( + msg += "\n" + D_("Message are encrypted only for {nb_devices} device(s): " + "{devices_list}.").format( nb_devices=len(directed_devices), - devices_list = u', '.join(directed_devices)) + devices_list = ', '.join(directed_devices)) self.client.feedback(bare_jid, msg) @@ -266,29 +266,29 @@ session = self.getSession(entity.userhostJID()) if not session: raise failure.Failure( - exceptions.NotFound(_(u"There is no encryption session with this " - u"entity."))) + exceptions.NotFound(_("There is no encryption session with this " + "entity."))) plugin = session['plugin'] if namespace is not None and plugin.namespace != namespace: raise exceptions.InternalError(_( - u"The encryption session is not run with the expected plugin: encrypted " - u"with {current_name} and was expecting {expected_name}").format( - current_name=session[u'plugin'].namespace, + "The encryption session is not run with the expected plugin: encrypted " + "with {current_name} and was expecting {expected_name}").format( + current_name=session['plugin'].namespace, expected_name=namespace)) if entity.resource: try: - directed_devices = session[u'directed_devices'] + directed_devices = session['directed_devices'] except KeyError: raise exceptions.NotFound(_( - u"There is a session for the whole entity (i.e. all devices of the " - u"entity), not a directed one. Please use bare jid if you want to " - u"stop the whole encryption with this entity.")) + "There is a session for the whole entity (i.e. all devices of the " + "entity), not a directed one. Please use bare jid if you want to " + "stop the whole encryption with this entity.")) try: directed_devices.remove(entity.resource) except ValueError: - raise exceptions.NotFound(_(u"There is no directed session with this " - u"entity.")) + raise exceptions.NotFound(_("There is no directed session with this " + "entity.")) else: if not directed_devices: # if we have no more directed device sessions, @@ -302,7 +302,7 @@ del self._sessions[entity.userhostJID()] yield self._stopEncryption(plugin, entity) - log.info(_(u"encryption session stopped with entity {entity}").format( + log.info(_("encryption session stopped with entity {entity}").format( entity=entity.full())) self.host.bridge.messageEncryptionStopped( entity.full(), @@ -310,9 +310,9 @@ 'namespace': plugin.namespace, }, self.client.profile) - msg = D_(u"Encryption session finished: your messages with {destinee} are " - u"NOT end to end encrypted anymore.\nYour server administrators or " - u"{destinee} server administrators will be able to read them.").format( + msg = D_("Encryption session finished: your messages with {destinee} are " + "NOT end to end encrypted anymore.\nYour server administrators or " + "{destinee} server administrators will be able to read them.").format( destinee=entity.full()) self.client.feedback(entity, msg) @@ -326,7 +326,7 @@ None if there is not encryption for this session with this jid """ if entity.resource: - raise ValueError(u"Full jid given when expecting bare jid") + raise ValueError("Full jid given when expecting bare jid") return self._sessions.get(entity) def getTrustUI(self, entity_jid, namespace=None): @@ -346,7 +346,7 @@ session = self.getSession(entity_jid) if not session: raise exceptions.NotFound( - u"No encryption session currently active for {entity_jid}" + "No encryption session currently active for {entity_jid}" .format(entity_jid=entity_jid.full())) plugin = session['plugin'] else: @@ -355,7 +355,7 @@ get_trust_ui = plugin.instance.getTrustUI except AttributeError: raise NotImplementedError( - u"Encryption plugin doesn't handle trust management UI") + "Encryption plugin doesn't handle trust management UI") else: return defer.maybeDeferred(get_trust_ui, self.client, entity_jid) @@ -364,32 +364,32 @@ @classmethod def _importMenus(cls, host): host.importMenu( - (D_(u"Encryption"), D_(u"unencrypted (plain text)")), + (D_("Encryption"), D_("unencrypted (plain text)")), partial(cls._onMenuUnencrypted, host=host), security_limit=0, - help_string=D_(u"End encrypted session"), + help_string=D_("End encrypted session"), type_=C.MENU_SINGLE, ) for plg in cls.getPlugins(): host.importMenu( - (D_(u"Encryption"), plg.name), + (D_("Encryption"), plg.name), partial(cls._onMenuName, host=host, plg=plg), security_limit=0, - help_string=D_(u"Start {name} session").format(name=plg.name), + help_string=D_("Start {name} session").format(name=plg.name), type_=C.MENU_SINGLE, ) host.importMenu( - (D_(u"Encryption"), D_(u"⛨ {name} trust").format(name=plg.name)), + (D_("Encryption"), D_("⛨ {name} trust").format(name=plg.name)), partial(cls._onMenuTrust, host=host, plg=plg), security_limit=0, - help_string=D_(u"Manage {name} trust").format(name=plg.name), + help_string=D_("Manage {name} trust").format(name=plg.name), type_=C.MENU_SINGLE, ) @classmethod def _onMenuUnencrypted(cls, data, host, profile): client = host.getClient(profile) - peer_jid = jid.JID(data[u'jid']).userhostJID() + peer_jid = jid.JID(data['jid']).userhostJID() d = client.encryption.stop(peer_jid) d.addCallback(lambda __: {}) return d @@ -397,7 +397,7 @@ @classmethod def _onMenuName(cls, data, host, plg, profile): client = host.getClient(profile) - peer_jid = jid.JID(data[u'jid']) + peer_jid = jid.JID(data['jid']) if not plg.directed: peer_jid = peer_jid.userhostJID() d = client.encryption.start(peer_jid, plg.namespace, replace=True) @@ -408,9 +408,9 @@ @defer.inlineCallbacks def _onMenuTrust(cls, data, host, plg, profile): client = host.getClient(profile) - peer_jid = jid.JID(data[u'jid']).userhostJID() + peer_jid = jid.JID(data['jid']).userhostJID() ui = yield client.encryption.getTrustUI(peer_jid, plg.namespace) - defer.returnValue({u'xmlui': ui.toXml()}) + defer.returnValue({'xmlui': ui.toXml()}) ## Triggers ##
--- a/sat/memory/memory.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/memory.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -26,7 +26,7 @@ import os.path import copy from collections import namedtuple -from ConfigParser import SafeConfigParser, NoOptionError, NoSectionError +from configparser import SafeConfigParser, NoOptionError, NoSectionError from uuid import uuid4 from twisted.python import failure from twisted.internet import defer, reactor, error @@ -76,7 +76,7 @@ session_id = str(uuid4()) elif session_id in self._sessions: raise exceptions.ConflictError( - u"Session id {} is already used".format(session_id) + "Session id {} is already used".format(session_id) ) timer = reactor.callLater(self.timeout, self._purgeSession, session_id) if session_data is None: @@ -99,9 +99,9 @@ pass del self._sessions[session_id] log.debug( - u"Session {} purged{}".format( + "Session {} purged{}".format( session_id, - u" (profile {})".format(profile) if profile is not None else u"", + " (profile {})".format(profile) if profile is not None else "", ) ) @@ -147,10 +147,10 @@ self._purgeSession(session_id) def keys(self): - return self._sessions.keys() + return list(self._sessions.keys()) def iterkeys(self): - return self._sessions.iterkeys() + return iter(self._sessions.keys()) class ProfileSessions(Sessions): @@ -165,7 +165,7 @@ @return: a list containing the sessions ids """ ret = [] - for session_id in self._sessions.iterkeys(): + for session_id in self._sessions.keys(): try: timer, session_data, profile_set = self._sessions[session_id] except ValueError: @@ -245,7 +245,7 @@ if not silent: log.warning( _( - u"A database has been found in the default local_dir for previous versions (< 0.5)" + "A database has been found in the default local_dir for previous versions (< 0.5)" ) ) tools_config.fixConfigOption("", "local_dir", old_default, silent) @@ -306,10 +306,10 @@ if os.path.exists(filename): try: self.params.load_xml(filename) - log.debug(_(u"Parameters loaded from file: %s") % filename) + log.debug(_("Parameters loaded from file: %s") % filename) return True except Exception as e: - log.error(_(u"Can't load parameters from file: %s") % e) + log.error(_("Can't load parameters from file: %s") % e) return False def save_xml(self, filename): @@ -324,10 +324,10 @@ filename = os.path.expanduser(filename) try: self.params.save_xml(filename) - log.debug(_(u"Parameters saved to file: %s") % filename) + log.debug(_("Parameters saved to file: %s") % filename) return True except Exception as e: - log.error(_(u"Can't save parameters to file: %s") % e) + log.error(_("Can't save parameters to file: %s") % e) return False def load(self): @@ -356,7 +356,7 @@ def createSession(__): """Called once params are loaded.""" self._entities_cache[profile] = {} - log.info(u"[{}] Profile session started".format(profile)) + log.info("[{}] Profile session started".format(profile)) return False def backendInitialised(__): @@ -392,13 +392,13 @@ @param profile: %(doc_profile)s """ if self.host.isConnected(profile): - log.debug(u"Disconnecting profile because of session stop") + log.debug("Disconnecting profile because of session stop") self.host.disconnect(profile) self.auth_sessions.profileDelUnique(profile) try: self._entities_cache[profile] except KeyError: - log.warning(u"Profile was not in cache") + log.warning("Profile was not in cache") def _isSessionStarted(self, profile_key): return self.isSessionStarted(self.getProfileName(profile_key)) @@ -428,10 +428,10 @@ def check_result(result): if not result: - log.warning(u"Authentication failure of profile {}".format(profile)) + log.warning("Authentication failure of profile {}".format(profile)) raise failure.Failure( exceptions.PasswordError( - u"The provided profile password doesn't match." + "The provided profile password doesn't match." ) ) if ( @@ -460,7 +460,7 @@ self.auth_sessions.newSession( {C.MEMORY_CRYPTO_KEY: personal_key}, profile=profile ) - log.debug(u"auth session created for profile %s" % profile) + log.debug("auth session created for profile %s" % profile) d = PersistentDict(C.MEMORY_CRYPTO_NAMESPACE, profile).load() d.addCallback(lambda data: BlockCipher.decrypt(key, data[C.MEMORY_CRYPTO_KEY])) @@ -476,7 +476,7 @@ except KeyError: log.error( _( - u"Trying to purge roster status cache for a profile not in memory: [%s]" + "Trying to purge roster status cache for a profile not in memory: [%s]" ) % profile ) @@ -489,7 +489,7 @@ @return (list[unicode]): selected profiles """ if not clients and not components: - log.warning(_(u"requesting no profiles at all")) + log.warning(_("requesting no profiles at all")) return [] profiles = self.storage.getProfilesList() if clients and components: @@ -533,20 +533,20 @@ @raise exceptions.NotFound: component is not a known plugin import name """ if not name: - raise ValueError(u"Empty profile name") + raise ValueError("Empty profile name") if name[0] == "@": - raise ValueError(u"A profile name can't start with a '@'") + raise ValueError("A profile name can't start with a '@'") if "\n" in name: - raise ValueError(u"A profile name can't contain line feed ('\\n')") + raise ValueError("A profile name can't contain line feed ('\\n')") if name in self._entities_cache: - raise exceptions.ConflictError(u"A session for this profile exists") + raise exceptions.ConflictError("A session for this profile exists") if component: if not component in self.host.plugins: raise exceptions.NotFound( _( - u"Can't find component {component} entry point".format( + "Can't find component {component} entry point".format( component=component ) ) @@ -664,7 +664,7 @@ def _getPresenceStatuses(self, profile_key): ret = self.getPresenceStatuses(profile_key) - return {entity.full(): data for entity, data in ret.iteritems()} + return {entity.full(): data for entity, data in ret.items()} def getPresenceStatuses(self, profile_key): """Get all the presence statuses of a profile @@ -676,8 +676,8 @@ profile_cache = self._getProfileCache(client) entities_presence = {} - for entity_jid, entity_data in profile_cache.iteritems(): - for resource, resource_data in entity_data.iteritems(): + for entity_jid, entity_data in profile_cache.items(): + for resource, resource_data in entity_data.items(): full_jid = copy.copy(entity_jid) full_jid.resource = resource try: @@ -736,7 +736,7 @@ entity_data = profile_cache[entity_jid.userhostJID()] except KeyError: raise exceptions.UnknownEntityError( - u"Entity {} not in cache".format(entity_jid) + "Entity {} not in cache".format(entity_jid) ) resources = set(entity_data.keys()) resources.discard(None) @@ -758,7 +758,7 @@ try: presence_data = self.getEntityDatum(full_jid, "presence", client.profile) except KeyError: - log.debug(u"Can't get presence data for {}".format(full_jid)) + log.debug("Can't get presence data for {}".format(full_jid)) else: if presence_data.show != C.PRESENCE_UNAVAILABLE: available.append(resource) @@ -787,7 +787,7 @@ try: resources = self.getAllResources(client, entity_jid) except exceptions.UnknownEntityError: - log.warning(u"Entity is not in cache, we can't find any resource") + log.warning("Entity is not in cache, we can't find any resource") return None priority_resources = [] for resource in resources: @@ -796,13 +796,13 @@ try: presence_data = self.getEntityDatum(full_jid, "presence", client.profile) except KeyError: - log.debug(u"No presence information for {}".format(full_jid)) + log.debug("No presence information for {}".format(full_jid)) continue priority_resources.append((resource, presence_data.priority)) try: return max(priority_resources, key=lambda res_tuple: res_tuple[1])[0] except ValueError: - log.warning(u"No resource found at all for {}".format(entity_jid)) + log.warning("No resource found at all for {}".format(entity_jid)) return None ## Entities data ## @@ -835,8 +835,8 @@ """ profile_cache = self._getProfileCache(client) # we construct a list of all known full jids (bare jid of entities x resources) - for bare_jid, entity_data in profile_cache.iteritems(): - for resource in entity_data.iterkeys(): + for bare_jid, entity_data in profile_cache.items(): + for resource in entity_data.keys(): if resource is None: continue full_jid = copy.copy(bare_jid) @@ -871,9 +871,9 @@ entity_data[key] = value if key in self._key_signals and not silent: - if not isinstance(value, basestring): + if not isinstance(value, str): log.error( - u"Setting a non string value ({}) for a key ({}) which has a signal flag".format( + "Setting a non string value ({}) for a key ({}) which has a signal flag".format( value, key ) ) @@ -905,7 +905,7 @@ entity_data = profile_cache[jid_.userhostJID()][jid_.resource] except KeyError: raise exceptions.UnknownEntityError( - u"Entity {} not in cache".format(jid_) + "Entity {} not in cache".format(jid_) ) try: del entity_data[key] @@ -919,7 +919,7 @@ ret = self.getEntitiesData( [jid.JID(jid_) for jid_ in entities_jids], keys_list, profile_key ) - return {jid_.full(): data for jid_, data in ret.iteritems()} + return {jid_.full(): data for jid_, data in ret.items()} def getEntitiesData(self, entities_jids, keys_list=None, profile_key=C.PROF_KEY_NONE): """Get a list of cached values for several entities at once @@ -961,8 +961,8 @@ continue ret_data[entity.full()] = fillEntityData(entity_cache_data, keys_list) else: - for bare_jid, data in profile_cache.iteritems(): - for resource, entity_cache_data in data.iteritems(): + for bare_jid, data in profile_cache.items(): + for resource, entity_cache_data in data.items(): full_jid = copy.copy(bare_jid) full_jid.resource = resource ret_data[full_jid] = fillEntityData(entity_cache_data) @@ -987,7 +987,7 @@ entity_data = profile_cache[entity_jid.userhostJID()][entity_jid.resource] except KeyError: raise exceptions.UnknownEntityError( - u"Entity {} not in cache (was requesting {})".format( + "Entity {} not in cache (was requesting {})".format( entity_jid, keys_list ) ) @@ -1030,14 +1030,14 @@ del profile_cache[entity_jid] except KeyError: raise exceptions.UnknownEntityError( - u"Entity {} not in cache".format(entity_jid) + "Entity {} not in cache".format(entity_jid) ) else: try: del profile_cache[entity_jid.userhostJID()][entity_jid.resource] except KeyError: raise exceptions.UnknownEntityError( - u"Entity {} not in cache".format(entity_jid) + "Entity {} not in cache".format(entity_jid) ) ## Encryption ## @@ -1103,7 +1103,7 @@ def done(__): log.debug( - _(u"Personal data (%(ns)s, %(key)s) has been successfuly encrypted") + _("Personal data (%(ns)s, %(key)s) has been successfuly encrypted") % {"ns": C.MEMORY_CRYPTO_NAMESPACE, "key": data_key} ) @@ -1225,21 +1225,21 @@ # the owner has all rights return if not C.ACCESS_PERMS.issuperset(perms_to_check): - raise exceptions.InternalError(_(u"invalid permission")) + raise exceptions.InternalError(_("invalid permission")) for perm in perms_to_check: # we check each perm and raise PermissionError as soon as one condition is not valid # we must never return here, we only return after the loop if nothing was blocking the access try: - perm_data = file_data[u"access"][perm] - perm_type = perm_data[u"type"] + perm_data = file_data["access"][perm] + perm_type = perm_data["type"] except KeyError: raise exceptions.PermissionError() if perm_type == C.ACCESS_TYPE_PUBLIC: continue elif perm_type == C.ACCESS_TYPE_WHITELIST: try: - jids = perm_data[u"jids"] + jids = perm_data["jids"] except KeyError: raise exceptions.PermissionError() if peer_jid.full() in jids: @@ -1248,7 +1248,7 @@ raise exceptions.PermissionError() else: raise exceptions.InternalError( - _(u"unknown access type: {type}").format(type=perm_type) + _("unknown access type: {type}").format(type=perm_type) ) @defer.inlineCallbacks @@ -1257,7 +1257,7 @@ current = file_data while True: self.checkFilePermission(current, peer_jid, perms_to_check) - parent = current[u"parent"] + parent = current["parent"] if not parent: break files_data = yield self.getFile( @@ -1266,7 +1266,7 @@ try: current = files_data[0] except IndexError: - raise exceptions.DataError(u"Missing parent") + raise exceptions.DataError("Missing parent") @defer.inlineCallbacks def _getParentDir( @@ -1283,15 +1283,15 @@ # if path is set, we have to retrieve parent directory of the file(s) from it if parent is not None: raise exceptions.ConflictError( - _(u"You can't use path and parent at the same time") + _("You can't use path and parent at the same time") ) - path_elts = filter(None, path.split(u"/")) - if {u"..", u"."}.intersection(path_elts): - raise ValueError(_(u'".." or "." can\'t be used in path')) + path_elts = [_f for _f in path.split("/") if _f] + if {"..", "."}.intersection(path_elts): + raise ValueError(_('".." or "." can\'t be used in path')) # we retrieve all directories from path until we get the parent container # non existing directories will be created - parent = u"" + parent = "" for idx, path_elt in enumerate(path_elts): directories = yield self.storage.getFiles( client, @@ -1306,12 +1306,12 @@ # from this point, directories don't exist anymore, we have to create them elif len(directories) > 1: raise exceptions.InternalError( - _(u"Several directories found, this should not happen") + _("Several directories found, this should not happen") ) else: directory = directories[0] self.checkFilePermission(directory, peer_jid, perms_to_check) - parent = directory[u"id"] + parent = directory["id"] defer.returnValue((parent, [])) @defer.inlineCallbacks @@ -1357,8 +1357,8 @@ """ if peer_jid is None and perms_to_check or perms_to_check is None and peer_jid: raise exceptions.InternalError( - u"if you want to disable permission check, both peer_jid and " - u"perms_to_check must be None" + "if you want to disable permission check, both peer_jid and " + "perms_to_check must be None" ) if owner is not None: owner = owner.userhostJID() @@ -1378,7 +1378,7 @@ try: parent_data = parent_data[0] except IndexError: - raise exceptions.DataError(u"mising parent") + raise exceptions.DataError("mising parent") yield self.checkPermissionToRoot( client, parent_data, peer_jid, perms_to_check ) @@ -1414,7 +1414,7 @@ @defer.inlineCallbacks def setFile( - self, client, name, file_id=None, version=u"", parent=None, path=None, + self, client, name, file_id=None, version="", parent=None, path=None, type_=C.FILE_TYPE_FILE, file_hash=None, hash_algo=None, size=None, namespace=None, mime_type=None, created=None, modified=None, owner=None, access=None, extra=None, peer_jid=None, perms_to_check=(C.ACCESS_PERM_WRITE,) @@ -1481,7 +1481,7 @@ if type_ == C.FILE_TYPE_DIRECTORY: if any(version, file_hash, size, mime_type): raise ValueError( - u"version, file_hash, size and mime_type can't be set for a directory" + "version, file_hash, size and mime_type can't be set for a directory" ) if owner is not None: owner = owner.userhostJID() @@ -1498,7 +1498,7 @@ client, name=new_dir, file_id=new_dir_id, - version=u"", + version="", parent=parent, type_=C.FILE_TYPE_DIRECTORY, namespace=namespace, @@ -1509,7 +1509,7 @@ ) parent = new_dir_id elif parent is None: - parent = u"" + parent = "" yield self.storage.setFile( client, @@ -1552,35 +1552,35 @@ @param files_path(unicode): path of the directory containing the actual files @param file_data(dict): data of the file to delete """ - if file_data[u'owner'] != peer_jid: + if file_data['owner'] != peer_jid: raise exceptions.PermissionError( - u"file {file_name} can't be deleted, {peer_jid} is not the owner" - .format(file_name=file_data[u'name'], peer_jid=peer_jid.full())) - if file_data[u'type'] == C.FILE_TYPE_DIRECTORY: - sub_files = yield self.getFiles(client, peer_jid, parent=file_data[u'id']) + "file {file_name} can't be deleted, {peer_jid} is not the owner" + .format(file_name=file_data['name'], peer_jid=peer_jid.full())) + if file_data['type'] == C.FILE_TYPE_DIRECTORY: + sub_files = yield self.getFiles(client, peer_jid, parent=file_data['id']) if sub_files and not recursive: - raise exceptions.DataError(_(u"Can't delete directory, it is not empty")) + raise exceptions.DataError(_("Can't delete directory, it is not empty")) # we first delete the sub-files for sub_file_data in sub_files: yield self._deleteFile(client, peer_jid, recursive, sub_file_data) # then the directory itself - yield self.storage.fileDelete(file_data[u'id']) - elif file_data[u'type'] == C.FILE_TYPE_FILE: - log.info(_(u"deleting file {name} with hash {file_hash}").format( - name=file_data[u'name'], file_hash=file_data[u'file_hash'])) - yield self.storage.fileDelete(file_data[u'id']) + yield self.storage.fileDelete(file_data['id']) + elif file_data['type'] == C.FILE_TYPE_FILE: + log.info(_("deleting file {name} with hash {file_hash}").format( + name=file_data['name'], file_hash=file_data['file_hash'])) + yield self.storage.fileDelete(file_data['id']) references = yield self.getFiles( - client, peer_jid, file_hash=file_data[u'file_hash']) + client, peer_jid, file_hash=file_data['file_hash']) if references: - log.debug(u"there are still references to the file, we keep it") + log.debug("there are still references to the file, we keep it") else: - file_path = os.path.join(files_path, file_data[u'file_hash']) - log.info(_(u"no reference left to {file_path}, deleting").format( + file_path = os.path.join(files_path, file_data['file_hash']) + log.info(_("no reference left to {file_path}, deleting").format( file_path=file_path)) os.unlink(file_path) else: - raise exceptions.InternalError(u'Unexpected file type: {file_type}' - .format(file_type=file_data[u'type'])) + raise exceptions.InternalError('Unexpected file type: {file_type}' + .format(file_type=file_data['type'])) @defer.inlineCallbacks def fileDelete(self, client, peer_jid, file_id, recursive=False): @@ -1595,11 +1595,11 @@ # should be checked too files_data = yield self.getFiles(client, peer_jid, file_id) if not files_data: - raise exceptions.NotFound(u"Can't find the file with id {file_id}".format( + raise exceptions.NotFound("Can't find the file with id {file_id}".format( file_id=file_id)) file_data = files_data[0] - if file_data[u"type"] != C.FILE_TYPE_DIRECTORY and recursive: - raise ValueError(u"recursive can only be set for directories") + if file_data["type"] != C.FILE_TYPE_DIRECTORY and recursive: + raise ValueError("recursive can only be set for directories") files_path = self.host.getLocalPath(None, C.FILES_DIR, profile=False) yield self._deleteFile(client, peer_jid, recursive, files_path, file_data) @@ -1618,6 +1618,6 @@ try: presence_data = self.getEntityDatum(entity_jid, "presence", client.profile) except KeyError: - log.debug(u"No presence information for {}".format(entity_jid)) + log.debug("No presence information for {}".format(entity_jid)) return False return presence_data.show != C.PRESENCE_UNAVAILABLE
--- a/sat/memory/params.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/params.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -56,7 +56,7 @@ # TODO: when priority is changed, a new presence stanza must be emitted # TODO: int type (Priority should be int instead of string) - default_xml = u""" + default_xml = """ <params> <general> </general> @@ -80,20 +80,20 @@ </individual> </params> """ % { - u"category_general": D_(u"General"), - u"category_connection": D_(u"Connection"), - u"history_param": C.HISTORY_LIMIT, - u"history_label": D_(u"Chat history limit"), - u"show_offline_contacts": C.SHOW_OFFLINE_CONTACTS, - u"show_offline_contacts_label": D_(u"Show offline contacts"), - u"show_empty_groups": C.SHOW_EMPTY_GROUPS, - u"show_empty_groups_label": D_(u"Show empty groups"), - u"force_server_param": C.FORCE_SERVER_PARAM, - u"force_port_param": C.FORCE_PORT_PARAM, - u"new_account_label": D_(u"Register new account"), - u"autoconnect_label": D_(u"Connect on frontend startup"), - u"autodisconnect_label": D_(u"Disconnect on frontend closure"), - u"check_certificate_label": D_(u"Check certificate (don't uncheck if unsure)"), + "category_general": D_("General"), + "category_connection": D_("Connection"), + "history_param": C.HISTORY_LIMIT, + "history_label": D_("Chat history limit"), + "show_offline_contacts": C.SHOW_OFFLINE_CONTACTS, + "show_offline_contacts_label": D_("Show offline contacts"), + "show_empty_groups": C.SHOW_EMPTY_GROUPS, + "show_empty_groups_label": D_("Show empty groups"), + "force_server_param": C.FORCE_SERVER_PARAM, + "force_port_param": C.FORCE_PORT_PARAM, + "new_account_label": D_("Register new account"), + "autoconnect_label": D_("Connect on frontend startup"), + "autodisconnect_label": D_("Disconnect on frontend closure"), + "check_certificate_label": D_("Check certificate (don't uncheck if unsure)"), } def load_default_params(self): @@ -158,7 +158,7 @@ del self.params[profile] except KeyError: log.error( - _(u"Trying to purge cache of a profile not in memory: [%s]") % profile + _("Trying to purge cache of a profile not in memory: [%s]") % profile ) def save_xml(self, filename): @@ -238,7 +238,7 @@ elif return_profile_keys and profile_key in [C.PROF_KEY_ALL]: return profile_key # this value must be managed by the caller if not self.storage.hasProfile(profile_key): - log.error(_(u"Trying to access an unknown profile (%s)") % profile_key) + log.error(_("Trying to access an unknown profile (%s)") % profile_key) raise exceptions.ProfileUnknownError(profile_key) return profile_key @@ -294,7 +294,7 @@ if ( len(cat_node.childNodes) == to_remove_count ): # remove empty category - for __ in xrange(0, to_remove_count): + for __ in range(0, to_remove_count): to_remove.pop() to_remove.append(cat_node) for node in to_remove: @@ -333,7 +333,7 @@ if not app: log.warning( _( - u"Trying to register frontends parameters with no specified app: aborted" + "Trying to register frontends parameters with no specified app: aborted" ) ) return @@ -342,14 +342,14 @@ if app in self.frontends_cache: log.debug( _( - u"Trying to register twice frontends parameters for %(app)s: aborted" + "Trying to register twice frontends parameters for %(app)s: aborted" % {"app": app} ) ) return self.frontends_cache.append(app) self.updateParams(xml, security_limit, app) - log.debug(u"Frontends parameters registered for %(app)s" % {"app": app}) + log.debug("Frontends parameters registered for %(app)s" % {"app": app}) def __default_ok(self, value, name, category): # FIXME: will not work with individual parameters @@ -357,7 +357,7 @@ def __default_ko(self, failure, name, category): log.error( - _(u"Can't determine default value for [%(category)s/%(name)s]: %(reason)s") + _("Can't determine default value for [%(category)s/%(name)s]: %(reason)s") % {"category": category, "name": name, "reason": str(failure.value)} ) @@ -380,7 +380,7 @@ if not node: log.error( _( - u"Requested param [%(name)s] in category [%(category)s] doesn't exist !" + "Requested param [%(name)s] in category [%(category)s] doesn't exist !" ) % {"name": name, "category": category} ) @@ -443,14 +443,14 @@ if len(selected) == 0: log.error( _( - u"Parameter (%(cat)s, %(param)s) of type list has no default option!" + "Parameter (%(cat)s, %(param)s) of type list has no default option!" ) % {"cat": cat, "param": param} ) else: log.error( _( - u"Parameter (%(cat)s, %(param)s) of type list has more than one default option!" + "Parameter (%(cat)s, %(param)s) of type list has more than one default option!" ) % {"cat": cat, "param": param} ) @@ -468,7 +468,7 @@ jids[idx] = jid.JID(value) except (RuntimeError, jid.InvalidFormat, AttributeError): log.warning( - u"Incorrect jid value found in jids list: [{}]".format(value) + "Incorrect jid value found in jids list: [{}]".format(value) ) to_delete.append(value) for value in to_delete: @@ -564,7 +564,7 @@ if not node: log.error( _( - u"Requested param [%(name)s] in category [%(category)s] doesn't exist !" + "Requested param [%(name)s] in category [%(category)s] doesn't exist !" ) % {"name": name, "category": category} ) @@ -630,7 +630,7 @@ if not node: log.error( _( - u"Requested param [%(name)s] in category [%(category)s] doesn't exist !" + "Requested param [%(name)s] in category [%(category)s] doesn't exist !" ) % {"name": name, "category": category} ) @@ -639,7 +639,7 @@ if not self.checkSecurityLimit(node[1], security_limit): log.warning( _( - u"Trying to get parameter '%(param)s' in category '%(cat)s' without authorization!!!" + "Trying to get parameter '%(param)s' in category '%(cat)s' without authorization!!!" % {"param": name, "cat": category} ) ) @@ -697,7 +697,7 @@ name = param_node.getAttribute("name") if not name: log.warning( - u"ignoring attribute without name: {}".format( + "ignoring attribute without name: {}".format( param_node.toxml() ) ) @@ -850,7 +850,7 @@ AttributeError, ): log.warning( - u"Incorrect jid value found in jids list: [{}]".format( + "Incorrect jid value found in jids list: [{}]".format( jid_ ) ) @@ -982,13 +982,13 @@ if profile_key != C.PROF_KEY_NONE: profile = self.getProfileName(profile_key) if not profile: - log.error(_(u"Trying to set parameter for an unknown profile")) + log.error(_("Trying to set parameter for an unknown profile")) raise exceptions.ProfileUnknownError(profile_key) node = self._getParamNode(name, category, "@ALL@") if not node: log.error( - _(u"Requesting an unknown parameter (%(category)s/%(name)s)") + _("Requesting an unknown parameter (%(category)s/%(name)s)") % {"category": category, "name": name} ) return defer.succeed(None) @@ -996,7 +996,7 @@ if not self.checkSecurityLimit(node[1], security_limit): log.warning( _( - u"Trying to set parameter '%(param)s' in category '%(cat)s' without authorization!!!" + "Trying to set parameter '%(param)s' in category '%(cat)s' without authorization!!!" % {"param": name, "cat": category} ) ) @@ -1012,7 +1012,7 @@ except ValueError: log.debug( _( - u"Trying to set parameter '%(param)s' in category '%(cat)s' with an non-integer value" + "Trying to set parameter '%(param)s' in category '%(cat)s' with an non-integer value" % {"param": name, "cat": category} ) ) @@ -1051,7 +1051,7 @@ assert profile_key != C.PROF_KEY_NONE if type_ == "button": - log.debug(u"Clicked param button %s" % node.toxml()) + log.debug("Clicked param button %s" % node.toxml()) return defer.succeed(None) elif type_ == "password": try:
--- a/sat/memory/persistent.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/persistent.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -63,10 +63,10 @@ return d def iteritems(self): - return self._cache.iteritems() + return iter(self._cache.items()) def items(self): - return self._cache.items() + return list(self._cache.items()) def __repr__(self): return self._cache.__repr__() @@ -98,8 +98,8 @@ def __hash__(self): return self._cache.__hash__() - def __nonzero__(self): - return self._cache.__len__() + def __bool__(self): + return self._cache.__len__() != 0 def __contains__(self, key): return self._cache.__contains__(key) @@ -149,7 +149,7 @@ class LazyPersistentBinaryDict(PersistentBinaryDict): - ur"""PersistentBinaryDict which get key/value when needed + r"""PersistentBinaryDict which get key/value when needed This Persistent need more database access, it is suitable for largest data, to save memory. @@ -160,7 +160,7 @@ def load(self): # we show a warning as calling load on LazyPersistentBinaryDict sounds like a code mistake - log.warning(_(u"Calling load on LazyPersistentBinaryDict while it's not needed")) + log.warning(_("Calling load on LazyPersistentBinaryDict while it's not needed")) def iteritems(self): raise NotImplementedError @@ -196,9 +196,9 @@ raise NotImplementedError def __hash__(self): - return hash(unicode(self.__class__) + self.namespace + (self.profile or u'')) + return hash(str(self.__class__) + self.namespace + (self.profile or '')) - def __nonzero__(self): + def __bool__(self): raise NotImplementedError def __contains__(self, key):
--- a/sat/memory/sqlite.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/sqlite.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -31,7 +31,7 @@ import sys import re import os.path -import cPickle as pickle +import pickle as pickle import hashlib import sqlite3 import json @@ -152,12 +152,12 @@ # Sqlite integration, probably with high level library retry -= 1 if retry == 0: - log.error(_(u'too many db tries, we abandon! Error message: {msg}\n' - u'query was {query}' - .format(msg=e, query=u' '.join([unicode(a) for a in args])))) + log.error(_('too many db tries, we abandon! Error message: {msg}\n' + 'query was {query}' + .format(msg=e, query=' '.join([str(a) for a in args])))) raise e log.warning( - _(u'exception while running query, retrying ({try_}): {msg}').format( + _('exception while running query, retrying ({try_}): {msg}').format( try_ = 6 - retry, msg = e)) kw['query_retry'] = retry @@ -175,14 +175,14 @@ retry -= 1 if retry == 0: log.error( - _(u'too many interaction tries, we abandon! Error message: {msg}\n' - u'interaction method was: {interaction}\n' - u'interaction arguments were: {args}' + _('too many interaction tries, we abandon! Error message: {msg}\n' + 'interaction method was: {interaction}\n' + 'interaction arguments were: {args}' .format(msg=e, interaction=interaction, - args=u', '.join([unicode(a) for a in args])))) + args=', '.join([str(a) for a in args])))) raise e log.warning( - _(u'exception while running interaction, retrying ({try_}): {msg}') + _('exception while running interaction, retrying ({try_}): {msg}') .format(try_ = 4 - retry, msg = e)) kw['interaction_retry'] = retry return self._runInteraction(interaction, *args, **kw) @@ -204,7 +204,7 @@ if new_base: # the dir may not exist if it's not the XDG recommended one dir_ = os.path.dirname(db_filename) if not os.path.exists(dir_): - os.makedirs(dir_, 0700) + os.makedirs(dir_, 0o700) def foreignKeysOn(sqlite): sqlite.execute('PRAGMA foreign_keys = ON') @@ -240,7 +240,7 @@ if statements is None: return defer.succeed(None) - log.debug(u"\n===== COMMITTING STATEMENTS =====\n%s\n============\n\n" % '\n'.join(statements)) + log.debug("\n===== COMMITTING STATEMENTS =====\n%s\n============\n\n" % '\n'.join(statements)) d = self.dbpool.runInteraction(self._updateDb, tuple(statements)) return d @@ -270,7 +270,7 @@ def getProfilesList(self): """"Return list of all registered profiles""" - return self.profiles.keys() + return list(self.profiles.keys()) def hasProfile(self, profile_name): """return True if profile_name exists @@ -283,13 +283,13 @@ try: return self.profiles[profile_name] in self.components except KeyError: - raise exceptions.NotFound(u"the requested profile doesn't exists") + raise exceptions.NotFound("the requested profile doesn't exists") def getEntryPoint(self, profile_name): try: return self.components[self.profiles[profile_name]] except KeyError: - raise exceptions.NotFound(u"the requested profile doesn't exists or is not a component") + raise exceptions.NotFound("the requested profile doesn't exists or is not a component") def createProfile(self, name, component=None): """Create a new profile @@ -326,7 +326,7 @@ @return: deferred triggered once profile is actually deleted """ def deletionError(failure_): - log.error(_(u"Can't delete profile [%s]") % name) + log.error(_("Can't delete profile [%s]") % name) return failure_ def delete(txn): @@ -359,7 +359,7 @@ for param in result: category, name, value = param params_gen[(category, name)] = value - log.debug(_(u"loading general parameters from database")) + log.debug(_("loading general parameters from database")) return self.dbpool.runQuery("SELECT category,name,value FROM param_gen").addCallback(fillParams) def loadIndParams(self, params_ind, profile): @@ -374,7 +374,7 @@ for param in result: category, name, value = param params_ind[(category, name)] = value - log.debug(_(u"loading individual parameters from database")) + log.debug(_("loading individual parameters from database")) d = self.dbpool.runQuery("SELECT category,name,value FROM param_ind WHERE profile_id=?", (self.profiles[profile], )) d.addCallback(fillParams) return d @@ -399,7 +399,7 @@ @param value: value to set @return: deferred""" d = self.dbpool.runQuery("REPLACE INTO param_gen(category,name,value) VALUES (?,?,?)", (category, name, value)) - d.addErrback(lambda ignore: log.error(_(u"Can't set general parameter (%(category)s/%(name)s) in database" % {"category": category, "name": name}))) + d.addErrback(lambda ignore: log.error(_("Can't set general parameter (%(category)s/%(name)s) in database" % {"category": category, "name": name}))) return d def setIndParam(self, category, name, value, profile): @@ -412,7 +412,7 @@ @return: deferred """ d = self.dbpool.runQuery("REPLACE INTO param_ind(category,name,profile_id,value) VALUES (?,?,?,?)", (category, name, self.profiles[profile], value)) - d.addErrback(lambda ignore: log.error(_(u"Can't set individual parameter (%(category)s/%(name)s) for [%(profile)s] in database" % {"category": category, "name": name, "profile": profile}))) + d.addErrback(lambda ignore: log.error(_("Can't set individual parameter (%(category)s/%(name)s) for [%(profile)s] in database" % {"category": category, "name": name, "profile": profile}))) return d ## History @@ -423,14 +423,14 @@ uid = data['uid'] d_list = [] for key in ('message', 'subject'): - for lang, value in data[key].iteritems(): + for lang, value in data[key].items(): d = self.dbpool.runQuery( "INSERT INTO {key}(history_uid, {key}, language) VALUES (?,?,?)" .format(key=key), (uid, value, lang or None)) d.addErrback(lambda __: log.error( - _(u"Can't save following {key} in history (uid: {uid}, lang:{lang}):" - u" {value}").format( + _("Can't save following {key} in history (uid: {uid}, lang:{lang}):" + " {value}").format( key=key, uid=uid, lang=lang, value=value))) d_list.append(d) try: @@ -443,8 +443,8 @@ "INSERT INTO thread(history_uid, thread_id, parent_id) VALUES (?,?,?)", (uid, thread, thread_parent)) d.addErrback(lambda __: log.error( - _(u"Can't save following thread in history (uid: {uid}): thread: " - u"{thread}), parent:{parent}").format( + _("Can't save following thread in history (uid: {uid}): thread: " + "{thread}), parent:{parent}").format( uid=uid, thread=thread, parent=thread_parent))) d_list.append(d) return defer.DeferredList(d_list) @@ -453,24 +453,24 @@ failure_.trap(sqlite3.IntegrityError) sqlite_msg = failure_.value.args[0] if "UNIQUE constraint failed" in sqlite_msg: - log.debug(u"message {} is already in history, not storing it again" + log.debug("message {} is already in history, not storing it again" .format(data['uid'])) if 'received_timestamp' not in data: log.warning( - u"duplicate message is not delayed, this is maybe a bug: data={}" + "duplicate message is not delayed, this is maybe a bug: data={}" .format(data)) # we cancel message to avoid sending duplicate message to frontends raise failure.Failure(exceptions.CancelError("Cancelled duplicated message")) else: - log.error(u"Can't store message in history: {}".format(failure_)) + log.error("Can't store message in history: {}".format(failure_)) def _logHistoryError(self, failure_, from_jid, to_jid, data): if failure_.check(exceptions.CancelError): # we propagate CancelError to avoid sending message to frontends raise failure_ log.error(_( - u"Can't save following message in history: from [{from_jid}] to [{to_jid}] " - u"(uid: {uid})") + "Can't save following message in history: from [{from_jid}] to [{to_jid}] " + "(uid: {uid})") .format(from_jid=from_jid.full(), to_jid=to_jid.full(), uid=data['uid'])) def addToHistory(self, data, profile): @@ -478,14 +478,14 @@ @param data(dict): message data as build by SatMessageProtocol.onMessage """ - extra = pickle.dumps({k: v for k, v in data['extra'].iteritems() + extra = pickle.dumps({k: v for k, v in data['extra'].items() if k not in NOT_IN_EXTRA}, 0) from_jid = data['from'] to_jid = data['to'] d = self.dbpool.runQuery( - u"INSERT INTO history(uid, stanza_id, update_uid, profile_id, source, dest, " - u"source_res, dest_res, timestamp, received_timestamp, type, extra) VALUES " - u"(?,?,?,?,?,?,?,?,?,?,?,?)", + "INSERT INTO history(uid, stanza_id, update_uid, profile_id, source, dest, " + "source_res, dest_res, timestamp, received_timestamp, type, extra) VALUES " + "(?,?,?,?,?,?,?,?,?,?,?,?)", (data['uid'], data['extra'].get('stanza_id'), data['extra'].get('update_uid'), self.profiles[profile], data['from'].userhost(), to_jid.userhost(), from_jid.resource, to_jid.resource, data['timestamp'], @@ -508,7 +508,7 @@ if uid != current['uid']: # new message try: - extra = pickle.loads(str(extra or "")) + extra = pickle.loads(extra or b"") except EOFError: extra = {} current = { @@ -543,8 +543,8 @@ else: if thread_parent is not None: log.error( - u"Database inconsistency: thread parent without thread (uid: " - u"{uid}, thread_parent: {parent})" + "Database inconsistency: thread parent without thread (uid: " + "{uid}, thread_parent: {parent})" .format(uid=uid, parent=thread_parent)) return result @@ -575,7 +575,7 @@ if limit == 0: return defer.succeed([]) - query_parts = [u"SELECT uid, stanza_id, update_uid, source, dest, source_res, dest_res, timestamp, received_timestamp,\ + query_parts = ["SELECT uid, stanza_id, update_uid, source, dest, source_res, dest_res, timestamp, received_timestamp,\ type, extra, message, message.language, subject, subject.language, thread_id, thread.parent_id\ FROM history LEFT JOIN message ON history.uid = message.history_uid\ LEFT JOIN subject ON history.uid=subject.history_uid\ @@ -587,8 +587,8 @@ values.append(jid_.userhost()) if jid_.resource: values.append(jid_.resource) - return u'({type_}=? AND {type_}_res=?)'.format(type_=type_) - return u'{type_}=?'.format(type_=type_) + return '({type_}=? AND {type_}_res=?)'.format(type_=type_) + return '{type_}=?'.format(type_=type_) if not from_jid and not to_jid: # not jid specified, we want all one2one communications @@ -598,15 +598,15 @@ # we only have one jid specified, we check all messages # from or to this jid jid_ = from_jid or to_jid - query_parts.append(u"AND ({source} OR {dest})".format( - source=test_jid(u'source', jid_), - dest=test_jid(u'dest' , jid_))) + query_parts.append("AND ({source} OR {dest})".format( + source=test_jid('source', jid_), + dest=test_jid('dest' , jid_))) else: # we have 2 jids specified, we check all communications between # those 2 jids query_parts.append( - u"AND (({source_from} AND {dest_to}) " - u"OR ({source_to} AND {dest_from}))".format( + "AND (({source_from} AND {dest_to}) " + "OR ({source_to} AND {dest_from}))".format( source_from=test_jid('source', from_jid), dest_to=test_jid('dest', to_jid), source_to=test_jid('source', to_jid), @@ -619,47 +619,47 @@ q.append(test_jid('source', from_jid)) if to_jid is not None: q.append(test_jid('dest', to_jid)) - query_parts.append(u"AND " + u" AND ".join(q)) + query_parts.append("AND " + " AND ".join(q)) if filters: - if u'timestamp_start' in filters: - query_parts.append(u"AND timestamp>= ?") - values.append(float(filters[u'timestamp_start'])) - if u'body' in filters: + if 'timestamp_start' in filters: + query_parts.append("AND timestamp>= ?") + values.append(float(filters['timestamp_start'])) + if 'body' in filters: # TODO: use REGEXP (function to be defined) instead of GLOB: https://www.sqlite.org/lang_expr.html - query_parts.append(u"AND message LIKE ?") - values.append(u"%{}%".format(filters['body'])) - if u'search' in filters: - query_parts.append(u"AND (message LIKE ? OR source_res LIKE ?)") - values.extend([u"%{}%".format(filters['search'])] * 2) - if u'types' in filters: + query_parts.append("AND message LIKE ?") + values.append("%{}%".format(filters['body'])) + if 'search' in filters: + query_parts.append("AND (message LIKE ? OR source_res LIKE ?)") + values.extend(["%{}%".format(filters['search'])] * 2) + if 'types' in filters: types = filters['types'].split() - query_parts.append(u"AND type IN ({})".format(u','.join("?"*len(types)))) + query_parts.append("AND type IN ({})".format(','.join("?"*len(types)))) values.extend(types) - if u'not_types' in filters: + if 'not_types' in filters: types = filters['not_types'].split() - query_parts.append(u"AND type NOT IN ({})".format(u','.join("?"*len(types)))) + query_parts.append("AND type NOT IN ({})".format(','.join("?"*len(types)))) values.extend(types) - if u'last_stanza_id' in filters: + if 'last_stanza_id' in filters: # this request get the last message with a "stanza_id" that we # have in history. This is mainly used to retrieve messages sent # while we were offline, using MAM (XEP-0313). - if (filters[u'last_stanza_id'] is not True + if (filters['last_stanza_id'] is not True or limit != 1): - raise ValueError(u"Unexpected values for last_stanza_id filter") - query_parts.append(u"AND stanza_id IS NOT NULL") + raise ValueError("Unexpected values for last_stanza_id filter") + query_parts.append("AND stanza_id IS NOT NULL") # timestamp may be identical for 2 close messages (specially when delay is # used) that's why we order ties by received_timestamp # We'll reverse the order in sqliteHistoryToList # we use DESC here so LIMIT keep the last messages - query_parts.append(u"ORDER BY timestamp DESC, history.received_timestamp DESC") + query_parts.append("ORDER BY timestamp DESC, history.received_timestamp DESC") if limit is not None: - query_parts.append(u"LIMIT ?") + query_parts.append("LIMIT ?") values.append(limit) - d = self.dbpool.runQuery(u" ".join(query_parts), values) + d = self.dbpool.runQuery(" ".join(query_parts), values) d.addCallback(self.sqliteHistoryToList) d.addCallback(self.listDict2listTuple) return d @@ -668,32 +668,41 @@ def _privateDataEb(self, failure_, operation, namespace, key=None, profile=None): """generic errback for data queries""" - log.error(_(u"Can't {operation} data in database for namespace {namespace}{and_key}{for_profile}: {msg}").format( + log.error(_("Can't {operation} data in database for namespace {namespace}{and_key}{for_profile}: {msg}").format( operation = operation, namespace = namespace, - and_key = (u" and key " + key) if key is not None else u"", - for_profile = (u' [' + profile + u']') if profile is not None else u'', + and_key = (" and key " + key) if key is not None else "", + for_profile = (' [' + profile + ']') if profile is not None else '', msg = failure_)) + def _load_pickle(self, v): + # FIXME: workaround for Python 3 port, some pickled data are byte while other are strings + try: + return pickle.loads(v) + except TypeError: + data = pickle.loads(v.encode('utf-8')) + log.warning(f"encoding issue in pickled data: {data}") + return data + def _generateDataDict(self, query_result, binary): if binary: - return {k: pickle.loads(str(v)) for k,v in query_result} + return {k: self._load_pickle(v) for k,v in query_result} else: return dict(query_result) def _getPrivateTable(self, binary, profile): """Get table to use for private values""" - table = [u'private'] + table = ['private'] if profile is None: - table.append(u'gen') + table.append('gen') else: - table.append(u'ind') + table.append('ind') if binary: - table.append(u'bin') + table.append('bin') - return u'_'.join(table) + return '_'.join(table) def getPrivates(self, namespace, keys=None, binary=False, profile=None): """Get private value(s) from databases @@ -706,27 +715,27 @@ None to use general values @return (dict[unicode, object]): gotten keys/values """ - log.debug(_(u"getting {type}{binary} private values from database for namespace {namespace}{keys}".format( - type = u"general" if profile is None else "individual", - binary = u" binary" if binary else u"", + log.debug(_("getting {type}{binary} private values from database for namespace {namespace}{keys}".format( + type = "general" if profile is None else "individual", + binary = " binary" if binary else "", namespace = namespace, - keys = u" with keys {}".format(u", ".join(keys)) if keys is not None else u""))) + keys = " with keys {}".format(", ".join(keys)) if keys is not None else ""))) table = self._getPrivateTable(binary, profile) - query_parts = [u"SELECT key,value FROM", table, "WHERE namespace=?"] + query_parts = ["SELECT key,value FROM", table, "WHERE namespace=?"] args = [namespace] if keys is not None: - placeholders = u','.join(len(keys) * u'?') - query_parts.append(u'AND key IN (' + placeholders + u')') + placeholders = ','.join(len(keys) * '?') + query_parts.append('AND key IN (' + placeholders + ')') args.extend(keys) if profile is not None: - query_parts.append(u'AND profile_id=?') + query_parts.append('AND profile_id=?') args.append(self.profiles[profile]) - d = self.dbpool.runQuery(u" ".join(query_parts), args) + d = self.dbpool.runQuery(" ".join(query_parts), args) d.addCallback(self._generateDataDict, binary) - d.addErrback(self._privateDataEb, u"get", namespace, profile=profile) + d.addErrback(self._privateDataEb, "get", namespace, profile=profile) return d def setPrivateValue(self, namespace, key, value, binary=False, profile=None): @@ -741,7 +750,7 @@ if None, it's a general value """ table = self._getPrivateTable(binary, profile) - query_values_names = [u'namespace', u'key', u'value'] + query_values_names = ['namespace', 'key', 'value'] query_values = [namespace, key] if binary: @@ -750,14 +759,14 @@ query_values.append(value) if profile is not None: - query_values_names.append(u'profile_id') + query_values_names.append('profile_id') query_values.append(self.profiles[profile]) - query_parts = [u"REPLACE INTO", table, u'(', u','.join(query_values_names), u')', - u"VALUES (", u",".join(u'?'*len(query_values_names)), u')'] + query_parts = ["REPLACE INTO", table, '(', ','.join(query_values_names), ')', + "VALUES (", ",".join('?'*len(query_values_names)), ')'] - d = self.dbpool.runQuery(u" ".join(query_parts), query_values) - d.addErrback(self._privateDataEb, u"set", namespace, key, profile=profile) + d = self.dbpool.runQuery(" ".join(query_parts), query_values) + d.addErrback(self._privateDataEb, "set", namespace, key, profile=profile) return d def delPrivateValue(self, namespace, key, binary=False, profile=None): @@ -770,13 +779,13 @@ if None, it's a general value """ table = self._getPrivateTable(binary, profile) - query_parts = [u"DELETE FROM", table, u"WHERE namespace=? AND key=?"] + query_parts = ["DELETE FROM", table, "WHERE namespace=? AND key=?"] args = [namespace, key] if profile is not None: - query_parts.append(u"AND profile_id=?") + query_parts.append("AND profile_id=?") args.append(self.profiles[profile]) - d = self.dbpool.runQuery(u" ".join(query_parts), args) - d.addErrback(self._privateDataEb, u"delete", namespace, key, profile=profile) + d = self.dbpool.runQuery(" ".join(query_parts), args) + d.addErrback(self._privateDataEb, "delete", namespace, key, profile=profile) return d def delPrivateNamespace(self, namespace, binary=False, profile=None): @@ -787,19 +796,19 @@ Params are the same as for delPrivateValue """ table = self._getPrivateTable(binary, profile) - query_parts = [u"DELETE FROM", table, u"WHERE namespace=?"] + query_parts = ["DELETE FROM", table, "WHERE namespace=?"] args = [namespace] if profile is not None: - query_parts.append(u"AND profile_id=?") + query_parts.append("AND profile_id=?") args.append(self.profiles[profile]) - d = self.dbpool.runQuery(u" ".join(query_parts), args) - d.addErrback(self._privateDataEb, u"delete namespace", namespace, profile=profile) + d = self.dbpool.runQuery(" ".join(query_parts), args) + d.addErrback(self._privateDataEb, "delete namespace", namespace, profile=profile) return d ## Files @defer.inlineCallbacks - def getFiles(self, client, file_id=None, version=u'', parent=None, type_=None, + def getFiles(self, client, file_id=None, version='', parent=None, type_=None, file_hash=None, hash_algo=None, name=None, namespace=None, mime_type=None, owner=None, access=None, projection=None, unique=False): """retrieve files with with given filters @@ -831,45 +840,45 @@ args = [self.profiles[client.profile]] if file_id is not None: - filters.append(u'id=?') + filters.append('id=?') args.append(file_id) if version is not None: - filters.append(u'version=?') + filters.append('version=?') args.append(version) if parent is not None: - filters.append(u'parent=?') + filters.append('parent=?') args.append(parent) if type_ is not None: - filters.append(u'type=?') + filters.append('type=?') args.append(type_) if file_hash is not None: - filters.append(u'file_hash=?') + filters.append('file_hash=?') args.append(file_hash) if hash_algo is not None: - filters.append(u'hash_algo=?') + filters.append('hash_algo=?') args.append(hash_algo) if name is not None: - filters.append(u'name=?') + filters.append('name=?') args.append(name) if namespace is not None: - filters.append(u'namespace=?') + filters.append('namespace=?') args.append(namespace) if mime_type is not None: - filters.append(u'mime_type=?') + filters.append('mime_type=?') args.append(mime_type) if owner is not None: - filters.append(u'owner=?') + filters.append('owner=?') args.append(owner.full()) if access is not None: raise NotImplementedError('Access check is not implemented yet') # a JSON comparison is needed here - filters = u' AND '.join(filters) + filters = ' AND '.join(filters) query_parts.append(filters) - query = u' '.join(query_parts) + query = ' '.join(query_parts) result = yield self.dbpool.runQuery(query, args) - files_data = [dict(zip(projection, row)) for row in result] + files_data = [dict(list(zip(projection, row))) for row in result] to_parse = {'access', 'extra'}.intersection(projection) to_filter = {'owner'}.intersection(projection) if to_parse or to_filter: @@ -882,7 +891,7 @@ file_data['owner'] = jid.JID(owner) defer.returnValue(files_data) - def setFile(self, client, name, file_id, version=u'', parent=None, type_=C.FILE_TYPE_FILE, + def setFile(self, client, name, file_id, version='', parent=None, type_=C.FILE_TYPE_FILE, file_hash=None, hash_algo=None, size=None, namespace=None, mime_type=None, created=None, modified=None, owner=None, access=None, extra=None): """set a file metadata @@ -921,12 +930,12 @@ json.dumps(access) if access else None, json.dumps(extra) if extra else None, self.profiles[client.profile])) - d.addErrback(lambda failure: log.error(_(u"Can't save file metadata for [{profile}]: {reason}".format(profile=client.profile, reason=failure)))) + d.addErrback(lambda failure: log.error(_("Can't save file metadata for [{profile}]: {reason}".format(profile=client.profile, reason=failure)))) return d def _fileUpdate(self, cursor, file_id, column, update_cb): query = 'SELECT {column} FROM files where id=?'.format(column=column) - for i in xrange(5): + for i in range(5): cursor.execute(query, [file_id]) try: older_value_raw = cursor.fetchone()[0] @@ -951,9 +960,9 @@ else: if cursor.rowcount == 1: break; - log.warning(_(u"table not updated, probably due to race condition, trying again ({tries})").format(tries=i+1)) + log.warning(_("table not updated, probably due to race condition, trying again ({tries})").format(tries=i+1)) else: - log.error(_(u"Can't update file table")) + log.error(_("Can't update file table")) def fileUpdate(self, file_id, column, update_cb): """Update a column value using a method to avoid race conditions @@ -1072,17 +1081,17 @@ update_raw = yield self.update2raw(update_data, True) defer.returnValue(update_raw) else: - log.error(_(u"schema version is up-to-date, but local schema differ from expected current schema")) + log.error(_("schema version is up-to-date, but local schema differ from expected current schema")) update_data = self.generateUpdateData(local_sch, current_sch, True) update_raw = yield self.update2raw(update_data) - log.warning(_(u"Here are the commands that should fix the situation, use at your own risk (do a backup before modifying database), you can go to SàT's MUC room at sat@chat.jabberfr.org for help\n### SQL###\n%s\n### END SQL ###\n") % u'\n'.join("%s;" % statement for statement in update_raw)) + log.warning(_("Here are the commands that should fix the situation, use at your own risk (do a backup before modifying database), you can go to SàT's MUC room at sat@chat.jabberfr.org for help\n### SQL###\n%s\n### END SQL ###\n") % '\n'.join("%s;" % statement for statement in update_raw)) raise exceptions.DatabaseError("Database mismatch") else: if local_version > CURRENT_DB_VERSION: log.error(_( - u"You database version is higher than the one used in this SàT " - u"version, are you using several version at the same time? We " - u"can't run SàT with this database.")) + "You database version is higher than the one used in this SàT " + "version, are you using several version at the same time? We " + "can't run SàT with this database.")) sys.exit(1) # Database is not up-to-date, we'll do the update @@ -1091,7 +1100,7 @@ else: log.info(_("Database schema has changed, local database will be updated")) update_raw = [] - for version in xrange(local_version + 1, CURRENT_DB_VERSION + 1): + for version in range(local_version + 1, CURRENT_DB_VERSION + 1): try: update_data = DATABASE_SCHEMAS[version] except KeyError: @@ -1150,17 +1159,17 @@ ret = [] assert isinstance(data, tuple) for table, col_data in data: - assert isinstance(table, basestring) + assert isinstance(table, str) assert isinstance(col_data, tuple) for cols in col_data: if isinstance(cols, tuple): - assert all([isinstance(c, basestring) for c in cols]) - indexed_cols = u','.join(cols) - elif isinstance(cols, basestring): + assert all([isinstance(c, str) for c in cols]) + indexed_cols = ','.join(cols) + elif isinstance(cols, str): indexed_cols = cols else: - raise exceptions.InternalError(u"unexpected index columns value") - index_name = table + u'__' + indexed_cols.replace(u',', u'_') + raise exceptions.InternalError("unexpected index columns value") + index_name = table + '__' + indexed_cols.replace(',', '_') ret.append(Updater.INDEX_SQL % (index_name, table, indexed_cols)) return ret @@ -1173,7 +1182,7 @@ @return: hash as string """ hash_ = hashlib.sha1() - tables = data.keys() + tables = list(data.keys()) tables.sort() def stmnts2str(stmts): @@ -1181,7 +1190,9 @@ for table in tables: col_defs, col_constr = data[table] - hash_.update("%s:%s:%s" % (table, stmnts2str(col_defs), stmnts2str(col_constr))) + hash_.update( + ("%s:%s:%s" % (table, stmnts2str(col_defs), stmnts2str(col_constr))) + .encode('utf-8')) return hash_.digest() def rawStatements2data(self, raw_statements): @@ -1324,7 +1335,7 @@ def update_v8(self): """Update database from v7 to v8 (primary keys order changes + indexes)""" - log.info(u"Database update to v8") + log.info("Database update to v8") statements = ["PRAGMA foreign_keys = OFF"] # here is a copy of create and index data, we can't use "current" table @@ -1357,11 +1368,11 @@ schema = {table: create[table]} cols = [d.split()[0] for d in schema[table][0]] statements.extend(Updater.createData2Raw(schema)) - statements.append(u"INSERT INTO {table}({cols}) " - u"SELECT {cols} FROM {table}_old".format( + statements.append("INSERT INTO {table}({cols}) " + "SELECT {cols} FROM {table}_old".format( table=table, - cols=u','.join(cols))) - statements.append(u"DROP TABLE {}_old".format(table)) + cols=','.join(cols))) + statements.append("DROP TABLE {}_old".format(table)) statements.extend(Updater.indexData2Raw(index)) statements.append("PRAGMA foreign_keys = ON") @@ -1370,48 +1381,48 @@ @defer.inlineCallbacks def update_v7(self): """Update database from v6 to v7 (history unique constraint change)""" - log.info(u"Database update to v7, this may be long depending on your history " - u"size, please be patient.") + log.info("Database update to v7, this may be long depending on your history " + "size, please be patient.") - log.info(u"Some cleaning first") + log.info("Some cleaning first") # we need to fix duplicate stanza_id, as it can result in conflicts with the new schema # normally database should not contain any, but better safe than sorry. rows = yield self.dbpool.runQuery( - u"SELECT stanza_id, COUNT(*) as c FROM history WHERE stanza_id is not NULL " - u"GROUP BY stanza_id HAVING c>1") + "SELECT stanza_id, COUNT(*) as c FROM history WHERE stanza_id is not NULL " + "GROUP BY stanza_id HAVING c>1") if rows: count = sum([r[1] for r in rows]) - len(rows) - log.info(u"{count} duplicate stanzas found, cleaning".format(count=count)) + log.info("{count} duplicate stanzas found, cleaning".format(count=count)) for stanza_id, count in rows: - log.info(u"cleaning duplicate stanza {stanza_id}".format(stanza_id=stanza_id)) + log.info("cleaning duplicate stanza {stanza_id}".format(stanza_id=stanza_id)) row_uids = yield self.dbpool.runQuery( "SELECT uid FROM history WHERE stanza_id = ? LIMIT ?", (stanza_id, count-1)) uids = [r[0] for r in row_uids] yield self.dbpool.runQuery( - "DELETE FROM history WHERE uid IN ({})".format(u",".join(u"?"*len(uids))), + "DELETE FROM history WHERE uid IN ({})".format(",".join("?"*len(uids))), uids) def deleteInfo(txn): # with foreign_keys on, the delete takes ages, so we deactivate it here # the time to delete info messages from history. txn.execute("PRAGMA foreign_keys = OFF") - txn.execute(u"DELETE FROM message WHERE history_uid IN (SELECT uid FROM history WHERE " - u"type='info')") - txn.execute(u"DELETE FROM subject WHERE history_uid IN (SELECT uid FROM history WHERE " - u"type='info')") - txn.execute(u"DELETE FROM thread WHERE history_uid IN (SELECT uid FROM history WHERE " - u"type='info')") - txn.execute(u"DELETE FROM message WHERE history_uid IN (SELECT uid FROM history WHERE " - u"type='info')") - txn.execute(u"DELETE FROM history WHERE type='info'") + txn.execute("DELETE FROM message WHERE history_uid IN (SELECT uid FROM history WHERE " + "type='info')") + txn.execute("DELETE FROM subject WHERE history_uid IN (SELECT uid FROM history WHERE " + "type='info')") + txn.execute("DELETE FROM thread WHERE history_uid IN (SELECT uid FROM history WHERE " + "type='info')") + txn.execute("DELETE FROM message WHERE history_uid IN (SELECT uid FROM history WHERE " + "type='info')") + txn.execute("DELETE FROM history WHERE type='info'") # not sure that is is necessary to reactivate here, but in doubt… txn.execute("PRAGMA foreign_keys = ON") - log.info(u'Deleting "info" messages (this can take a while)') + log.info('Deleting "info" messages (this can take a while)') yield self.dbpool.runInteraction(deleteInfo) - log.info(u"Cleaning done") + log.info("Cleaning done") # we have to rename table we will replace # tables referencing history need to be replaced to, else reference would @@ -1423,68 +1434,68 @@ yield self.dbpool.runQuery("ALTER TABLE thread RENAME TO thread_old") # history - query = (u"CREATE TABLE history (uid TEXT PRIMARY KEY, stanza_id TEXT, " - u"update_uid TEXT, profile_id INTEGER, source TEXT, dest TEXT, " - u"source_res TEXT, dest_res TEXT, timestamp DATETIME NOT NULL, " - u"received_timestamp DATETIME, type TEXT, extra BLOB, " - u"FOREIGN KEY(profile_id) REFERENCES profiles(id) ON DELETE CASCADE, " - u"FOREIGN KEY(type) REFERENCES message_types(type), " - u"UNIQUE (profile_id, stanza_id, source, dest))") + query = ("CREATE TABLE history (uid TEXT PRIMARY KEY, stanza_id TEXT, " + "update_uid TEXT, profile_id INTEGER, source TEXT, dest TEXT, " + "source_res TEXT, dest_res TEXT, timestamp DATETIME NOT NULL, " + "received_timestamp DATETIME, type TEXT, extra BLOB, " + "FOREIGN KEY(profile_id) REFERENCES profiles(id) ON DELETE CASCADE, " + "FOREIGN KEY(type) REFERENCES message_types(type), " + "UNIQUE (profile_id, stanza_id, source, dest))") yield self.dbpool.runQuery(query) # message - query = (u"CREATE TABLE message (id INTEGER PRIMARY KEY ASC, history_uid INTEGER" - u", message TEXT, language TEXT, FOREIGN KEY(history_uid) REFERENCES " - u"history(uid) ON DELETE CASCADE)") + query = ("CREATE TABLE message (id INTEGER PRIMARY KEY ASC, history_uid INTEGER" + ", message TEXT, language TEXT, FOREIGN KEY(history_uid) REFERENCES " + "history(uid) ON DELETE CASCADE)") yield self.dbpool.runQuery(query) # subject - query = (u"CREATE TABLE subject (id INTEGER PRIMARY KEY ASC, history_uid INTEGER" - u", subject TEXT, language TEXT, FOREIGN KEY(history_uid) REFERENCES " - u"history(uid) ON DELETE CASCADE)") + query = ("CREATE TABLE subject (id INTEGER PRIMARY KEY ASC, history_uid INTEGER" + ", subject TEXT, language TEXT, FOREIGN KEY(history_uid) REFERENCES " + "history(uid) ON DELETE CASCADE)") yield self.dbpool.runQuery(query) # thread - query = (u"CREATE TABLE thread (id INTEGER PRIMARY KEY ASC, history_uid INTEGER" - u", thread_id TEXT, parent_id TEXT, FOREIGN KEY(history_uid) REFERENCES " - u"history(uid) ON DELETE CASCADE)") + query = ("CREATE TABLE thread (id INTEGER PRIMARY KEY ASC, history_uid INTEGER" + ", thread_id TEXT, parent_id TEXT, FOREIGN KEY(history_uid) REFERENCES " + "history(uid) ON DELETE CASCADE)") yield self.dbpool.runQuery(query) - log.info(u"Now transfering old data to new tables, please be patient.") + log.info("Now transfering old data to new tables, please be patient.") - log.info(u"\nTransfering table history") - query = (u"INSERT INTO history (uid, stanza_id, update_uid, profile_id, source, " - u"dest, source_res, dest_res, timestamp, received_timestamp, type, extra" - u") SELECT uid, stanza_id, update_uid, profile_id, source, dest, " - u"source_res, dest_res, timestamp, received_timestamp, type, extra " - u"FROM history_old") + log.info("\nTransfering table history") + query = ("INSERT INTO history (uid, stanza_id, update_uid, profile_id, source, " + "dest, source_res, dest_res, timestamp, received_timestamp, type, extra" + ") SELECT uid, stanza_id, update_uid, profile_id, source, dest, " + "source_res, dest_res, timestamp, received_timestamp, type, extra " + "FROM history_old") yield self.dbpool.runQuery(query) - log.info(u"\nTransfering table message") - query = (u"INSERT INTO message (id, history_uid, message, language) SELECT id, " - u"history_uid, message, language FROM message_old") + log.info("\nTransfering table message") + query = ("INSERT INTO message (id, history_uid, message, language) SELECT id, " + "history_uid, message, language FROM message_old") yield self.dbpool.runQuery(query) - log.info(u"\nTransfering table subject") - query = (u"INSERT INTO subject (id, history_uid, subject, language) SELECT id, " - u"history_uid, subject, language FROM subject_old") + log.info("\nTransfering table subject") + query = ("INSERT INTO subject (id, history_uid, subject, language) SELECT id, " + "history_uid, subject, language FROM subject_old") yield self.dbpool.runQuery(query) - log.info(u"\nTransfering table thread") - query = (u"INSERT INTO thread (id, history_uid, thread_id, parent_id) SELECT id" - u", history_uid, thread_id, parent_id FROM thread_old") + log.info("\nTransfering table thread") + query = ("INSERT INTO thread (id, history_uid, thread_id, parent_id) SELECT id" + ", history_uid, thread_id, parent_id FROM thread_old") yield self.dbpool.runQuery(query) - log.info(u"\nRemoving old tables") + log.info("\nRemoving old tables") # because of foreign keys, tables referencing history_old # must be deleted first yield self.dbpool.runQuery("DROP TABLE thread_old") yield self.dbpool.runQuery("DROP TABLE subject_old") yield self.dbpool.runQuery("DROP TABLE message_old") yield self.dbpool.runQuery("DROP TABLE history_old") - log.info(u"\nReducing database size (this can take a while)") + log.info("\nReducing database size (this can take a while)") yield self.dbpool.runQuery("VACUUM") - log.info(u"Database update done :)") + log.info("Database update done :)") @defer.inlineCallbacks def update_v3(self): @@ -1494,7 +1505,7 @@ # big database for tests. If issues are happening, we can cut it # in smaller transactions using LIMIT and by deleting already updated # messages - log.info(u"Database update to v3, this may take a while") + log.info("Database update to v3, this may take a while") # we need to fix duplicate timestamp, as it can result in conflicts with the new schema rows = yield self.dbpool.runQuery("SELECT timestamp, COUNT(*) as c FROM history GROUP BY timestamp HAVING c>1") @@ -1506,10 +1517,10 @@ for idx, (id_,) in enumerate(ids_rows): fixed.append(id_) yield self.dbpool.runQuery("UPDATE history SET timestamp=? WHERE id=?", (float(timestamp) + idx * 0.001, id_)) - log.info(u"fixed messages with ids {}".format(u', '.join([unicode(id_) for id_ in fixed]))) + log.info("fixed messages with ids {}".format(', '.join([str(id_) for id_ in fixed]))) def historySchema(txn): - log.info(u"History schema update") + log.info("History schema update") txn.execute("ALTER TABLE history RENAME TO tmp_sat_update") txn.execute("CREATE TABLE history (uid TEXT PRIMARY KEY, update_uid TEXT, profile_id INTEGER, source TEXT, dest TEXT, source_res TEXT, dest_res TEXT, timestamp DATETIME NOT NULL, received_timestamp DATETIME, type TEXT, extra BLOB, FOREIGN KEY(profile_id) REFERENCES profiles(id) ON DELETE CASCADE, FOREIGN KEY(type) REFERENCES message_types(type), UNIQUE (profile_id, timestamp, source, dest, source_res, dest_res))") txn.execute("INSERT INTO history (uid, profile_id, source, dest, source_res, dest_res, timestamp, type, extra) SELECT id, profile_id, source, dest, source_res, dest_res, timestamp, type, extra FROM tmp_sat_update") @@ -1517,17 +1528,17 @@ yield self.dbpool.runInteraction(historySchema) def newTables(txn): - log.info(u"Creating new tables") + log.info("Creating new tables") txn.execute("CREATE TABLE message (id INTEGER PRIMARY KEY ASC, history_uid INTEGER, message TEXT, language TEXT, FOREIGN KEY(history_uid) REFERENCES history(uid) ON DELETE CASCADE)") txn.execute("CREATE TABLE thread (id INTEGER PRIMARY KEY ASC, history_uid INTEGER, thread_id TEXT, parent_id TEXT, FOREIGN KEY(history_uid) REFERENCES history(uid) ON DELETE CASCADE)") txn.execute("CREATE TABLE subject (id INTEGER PRIMARY KEY ASC, history_uid INTEGER, subject TEXT, language TEXT, FOREIGN KEY(history_uid) REFERENCES history(uid) ON DELETE CASCADE)") yield self.dbpool.runInteraction(newTables) - log.info(u"inserting new message type") + log.info("inserting new message type") yield self.dbpool.runQuery("INSERT INTO message_types VALUES (?)", ('info',)) - log.info(u"messages update") + log.info("messages update") rows = yield self.dbpool.runQuery("SELECT id, timestamp, message, extra FROM tmp_sat_update") total = len(rows) @@ -1545,7 +1556,7 @@ except EOFError: extra = {} except Exception: - log.warning(u"Can't handle extra data for message id {}, ignoring it".format(id_)) + log.warning("Can't handle extra data for message id {}, ignoring it".format(id_)) extra = {} queries.append(("INSERT INTO message(history_uid, message) VALUES (?,?)", (id_, message))) @@ -1556,9 +1567,9 @@ pass else: try: - subject = subject.decode('utf-8') + subject = subject except UnicodeEncodeError: - log.warning(u"Error while decoding subject, ignoring it") + log.warning("Error while decoding subject, ignoring it") del extra['subject'] else: queries.append(("INSERT INTO subject(history_uid, subject) VALUES (?,?)", (id_, subject))) @@ -1597,7 +1608,7 @@ try: id_ = result[0][0] except IndexError: - log.error(u"Profile of id %d is referenced in 'param_ind' but it doesn't exist!" % profile_id) + log.error("Profile of id %d is referenced in 'param_ind' but it doesn't exist!" % profile_id) return defer.succeed(None) sat_password = xmpp_password
--- a/sat/plugins/plugin_adhoc_dbus.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_adhoc_dbus.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for adding D-Bus to Ad-Hoc Commands @@ -30,8 +30,8 @@ from lxml import etree except ImportError: etree = None - log.warning(u"Missing module lxml, please download/install it from http://lxml.de/ ." - u"Auto D-Bus discovery will be disabled") + log.warning("Missing module lxml, please download/install it from http://lxml.de/ ." + "Auto D-Bus discovery will be disabled") from collections import OrderedDict import os.path import uuid @@ -40,8 +40,8 @@ from dbus.mainloop.glib import DBusGMainLoop except ImportError: dbus = None - log.warning(u"Missing module dbus, please download/install it" - u"auto D-Bus discovery will be disabled") + log.warning("Missing module dbus, please download/install it" + "auto D-Bus discovery will be disabled") else: DBusGMainLoop(set_as_default=True) @@ -50,18 +50,18 @@ FD_NAME = "org.freedesktop.DBus" FD_PATH = "/org/freedekstop/DBus" INTROSPECT_IFACE = "org.freedesktop.DBus.Introspectable" -MPRIS_PREFIX = u"org.mpris.MediaPlayer2" -CMD_GO_BACK = u"GoBack" -CMD_GO_FWD = u"GoFW" +MPRIS_PREFIX = "org.mpris.MediaPlayer2" +CMD_GO_BACK = "GoBack" +CMD_GO_FWD = "GoFW" SEEK_OFFSET = 5 * 1000 * 1000 -MPRIS_COMMANDS = [u"org.mpris.MediaPlayer2.Player." + cmd for cmd in ( - u"Previous", CMD_GO_BACK, u"PlayPause", CMD_GO_FWD, u"Next")] -MPRIS_PATH = u"/org/mpris/MediaPlayer2" +MPRIS_COMMANDS = ["org.mpris.MediaPlayer2.Player." + cmd for cmd in ( + "Previous", CMD_GO_BACK, "PlayPause", CMD_GO_FWD, "Next")] +MPRIS_PATH = "/org/mpris/MediaPlayer2" MPRIS_PROPERTIES = OrderedDict(( - (u"org.mpris.MediaPlayer2", ( + ("org.mpris.MediaPlayer2", ( "Identity", )), - (u"org.mpris.MediaPlayer2.Player", ( + ("org.mpris.MediaPlayer2.Player", ( "Metadata", "PlaybackStatus", "Volume", @@ -69,7 +69,7 @@ )) MPRIS_METADATA_KEY = "Metadata" MPRIS_METADATA_MAP = OrderedDict(( - ("xesam:title", u"Title"), + ("xesam:title", "Title"), )) INTROSPECT_METHOD = "Introspect" @@ -88,7 +88,7 @@ C.PI_DEPENDENCIES: ["XEP-0050"], C.PI_MAIN: "AdHocDBus", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _(u"""Add D-Bus management to Ad-Hoc commands"""), + C.PI_DESCRIPTION: _("""Add D-Bus management to Ad-Hoc commands"""), } @@ -104,7 +104,7 @@ in_sign="sasasasasasass", out_sign="(sa(sss))", method=self._adHocDBusAddAuto, - async=True, + async_=True, ) host.bridge.addMethod( "adHocRemotesGet", @@ -112,10 +112,10 @@ in_sign="s", out_sign="a(sss)", method=self._adHocRemotesGet, - async=True, + async_=True, ) self._c = host.plugins["XEP-0050"] - host.registerNamespace(u"mediaplayer", NS_MEDIA_PLAYER) + host.registerNamespace("mediaplayer", NS_MEDIA_PLAYER) if dbus is not None: self.session_bus = dbus.SessionBus() self.fd_object = self.session_bus.get_object( @@ -124,7 +124,7 @@ def profileConnected(self, client): if dbus is not None: self._c.addAdHocCommand( - client, self.localMediaCb, D_(u"Media Players"), + client, self.localMediaCb, D_("Media Players"), node=NS_MEDIA_PLAYER, timeout=60*60*6 # 6 hours timeout, to avoid breaking remote # in the middle of a movie @@ -151,7 +151,7 @@ def _DBusGetProperty(self, proxy, interface, name): return self._DBusAsyncCall( - proxy, u"Get", interface, name, interface=u"org.freedesktop.DBus.Properties") + proxy, "Get", interface, name, interface="org.freedesktop.DBus.Properties") def _DBusListNames(self): @@ -271,7 +271,7 @@ elif len(actions) == 2: # we should have the answer here try: - x_elt = command_elt.elements(data_form.NS_X_DATA, "x").next() + x_elt = next(command_elt.elements(data_form.NS_X_DATA, "x")) answer_form = data_form.Form.fromElement(x_elt) command = answer_form["command"] except (KeyError, StopIteration): @@ -295,11 +295,11 @@ return DBusCallback( client, None, session_data, self._c.ACTION.EXECUTE, node ) - form = data_form.Form("form", title=_(u"Updated")) - form.addField(data_form.Field("fixed", u"Command sent")) + form = data_form.Form("form", title=_("Updated")) + form.addField(data_form.Field("fixed", "Command sent")) status = self._c.STATUS.COMPLETED payload = None - note = (self._c.NOTE.INFO, _(u"Command sent")) + note = (self._c.NOTE.INFO, _("Command sent")) else: raise self._c.AdHocError(self._c.ERROR.INTERNAL) @@ -363,18 +363,18 @@ opt.label or opt.value)) except Exception as e: log.warning(_( - u"Can't retrieve remote controllers on {device_jid}: " - u"{reason}".format(device_jid=device_jid, reason=e))) + "Can't retrieve remote controllers on {device_jid}: " + "{reason}".format(device_jid=device_jid, reason=e))) break defer.returnValue(remotes) def doMPRISCommand(self, proxy, command): - iface, command = command.rsplit(u".", 1) + iface, command = command.rsplit(".", 1) if command == CMD_GO_BACK: - command = u'Seek' + command = 'Seek' args = [-SEEK_OFFSET] elif command == CMD_GO_FWD: - command = u'Seek' + command = 'Seek' args = [SEEK_OFFSET] else: args = [] @@ -382,17 +382,17 @@ def addMPRISMetadata(self, form, metadata): """Serialise MRPIS Metadata according to MPRIS_METADATA_MAP""" - for mpris_key, name in MPRIS_METADATA_MAP.iteritems(): + for mpris_key, name in MPRIS_METADATA_MAP.items(): if mpris_key in metadata: - value = unicode(metadata[mpris_key]) - form.addField(data_form.Field(fieldType=u"fixed", + value = str(metadata[mpris_key]) + form.addField(data_form.Field(fieldType="fixed", var=name, value=value)) @defer.inlineCallbacks def localMediaCb(self, client, command_elt, session_data, action, node): try: - x_elt = command_elt.elements(data_form.NS_X_DATA, "x").next() + x_elt = next(command_elt.elements(data_form.NS_X_DATA, "x")) command_form = data_form.Form.fromElement(x_elt) except StopIteration: command_form = None @@ -402,16 +402,16 @@ bus_names = yield self._DBusListNames() bus_names = [b for b in bus_names if b.startswith(MPRIS_PREFIX)] if len(bus_names) == 0: - note = (self._c.NOTE.INFO, D_(u"No media player found.")) + note = (self._c.NOTE.INFO, D_("No media player found.")) defer.returnValue((None, self._c.STATUS.COMPLETED, None, note)) options = [] status = self._c.STATUS.EXECUTING - form = data_form.Form("form", title=D_(u"Media Player Selection"), + form = data_form.Form("form", title=D_("Media Player Selection"), formNamespace=NS_MEDIA_PLAYER) for bus in bus_names: player_name = bus[len(MPRIS_PREFIX)+1:] if not player_name: - log.warning(_(u"Ignoring MPRIS bus without suffix")) + log.warning(_("Ignoring MPRIS bus without suffix")) continue options.append(data_form.Option(bus, player_name)) field = data_form.Field( @@ -423,53 +423,53 @@ else: # player request try: - bus_name = command_form[u"media_player"] + bus_name = command_form["media_player"] except KeyError: - raise ValueError(_(u"missing media_player value")) + raise ValueError(_("missing media_player value")) if not bus_name.startswith(MPRIS_PREFIX): - log.warning(_(u"Media player ad-hoc command trying to use non MPRIS bus. " - u"Hack attempt? Refused bus: {bus_name}").format( + log.warning(_("Media player ad-hoc command trying to use non MPRIS bus. " + "Hack attempt? Refused bus: {bus_name}").format( bus_name=bus_name)) - note = (self._c.NOTE.ERROR, D_(u"Invalid player name.")) + note = (self._c.NOTE.ERROR, D_("Invalid player name.")) defer.returnValue((None, self._c.STATUS.COMPLETED, None, note)) try: proxy = self.session_bus.get_object(bus_name, MPRIS_PATH) except dbus.exceptions.DBusException as e: - log.warning(_(u"Can't get D-Bus proxy: {reason}").format(reason=e)) - note = (self._c.NOTE.ERROR, D_(u"Media player is not available anymore")) + log.warning(_("Can't get D-Bus proxy: {reason}").format(reason=e)) + note = (self._c.NOTE.ERROR, D_("Media player is not available anymore")) defer.returnValue((None, self._c.STATUS.COMPLETED, None, note)) try: - command = command_form[u"command"] + command = command_form["command"] except KeyError: pass else: yield self.doMPRISCommand(proxy, command) # we construct the remote control form - form = data_form.Form("form", title=D_(u"Media Player Selection")) - form.addField(data_form.Field(fieldType=u"hidden", - var=u"media_player", + form = data_form.Form("form", title=D_("Media Player Selection")) + form.addField(data_form.Field(fieldType="hidden", + var="media_player", value=bus_name)) - for iface, properties_names in MPRIS_PROPERTIES.iteritems(): + for iface, properties_names in MPRIS_PROPERTIES.items(): for name in properties_names: try: value = yield self._DBusGetProperty(proxy, iface, name) except Exception as e: - log.warning(_(u"Can't retrieve attribute {name}: {reason}") + log.warning(_("Can't retrieve attribute {name}: {reason}") .format(name=name, reason=e)) continue if name == MPRIS_METADATA_KEY: self.addMPRISMetadata(form, value) else: - form.addField(data_form.Field(fieldType=u"fixed", + form.addField(data_form.Field(fieldType="fixed", var=name, - value=unicode(value))) + value=str(value))) - commands = [data_form.Option(c, c.rsplit(u".", 1)[1]) for c in MPRIS_COMMANDS] - form.addField(data_form.Field(fieldType=u"list-single", - var=u"command", + commands = [data_form.Option(c, c.rsplit(".", 1)[1]) for c in MPRIS_COMMANDS] + form.addField(data_form.Field(fieldType="list-single", + var="command", options=commands, required=True))
--- a/sat/plugins/plugin_blog_import.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_blog_import.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for import external blogs @@ -31,7 +31,7 @@ import os import os.path import tempfile -import urlparse +import urllib.parse import shortuuid @@ -43,7 +43,7 @@ C.PI_MAIN: "BlogImportPlugin", C.PI_HANDLER: "no", C.PI_DESCRIPTION: _( - u"""Blog import management: + """Blog import management: This plugin manage the different blog importers which can register to it, and handle generic importing tasks.""" ), } @@ -67,7 +67,7 @@ self._p = host.plugins["XEP-0060"] self._m = host.plugins["XEP-0277"] self._s = self.host.plugins["TEXT_SYNTAXES"] - host.plugins["IMPORT"].initialize(self, u"blog") + host.plugins["IMPORT"].initialize(self, "blog") def importItem( self, client, item_import_data, session, options, return_data, service, node @@ -107,7 +107,7 @@ try: item_id = mb_data["id"] except KeyError: - item_id = mb_data["id"] = unicode(shortuuid.uuid()) + item_id = mb_data["id"] = str(shortuuid.uuid()) try: # we keep the link between old url and new blog item @@ -121,7 +121,7 @@ node or self._m.namespace, item_id, ) - log.info(u"url link from {old} to {new}".format(old=old_uri, new=new_uri)) + log.info("url link from {old} to {new}".format(old=old_uri, new=new_uri)) return mb_data @@ -129,7 +129,7 @@ def importSubItems(self, client, item_import_data, mb_data, session, options): # comments data if len(item_import_data["comments"]) != 1: - raise NotImplementedError(u"can't manage multiple comment links") + raise NotImplementedError("can't manage multiple comment links") allow_comments = C.bool(mb_data.get("allow_comments", C.BOOL_FALSE)) if allow_comments: comments_service = yield self._m.getCommentsService(client) @@ -145,13 +145,13 @@ else: if item_import_data["comments"][0]: raise exceptions.DataError( - u"allow_comments set to False, but comments are there" + "allow_comments set to False, but comments are there" ) defer.returnValue(None) def publishItem(self, client, mb_data, service, node, session): log.debug( - u"uploading item [{id}]: {title}".format( + "uploading item [{id}]: {title}".format( id=mb_data["id"], title=mb_data.get("title", "") ) ) @@ -182,7 +182,7 @@ else: if "{}_xhtml".format(prefix) in mb_data: raise exceptions.DataError( - u"importer gave {prefix}_rich and {prefix}_xhtml at the same time, this is not allowed".format( + "importer gave {prefix}_rich and {prefix}_xhtml at the same time, this is not allowed".format( prefix=prefix ) ) @@ -200,14 +200,14 @@ else: if "{}_xhtml".format(prefix) in mb_data: log.warning( - u"{prefix}_text will be replaced by converted {prefix}_xhtml, so filters can be handled".format( + "{prefix}_text will be replaced by converted {prefix}_xhtml, so filters can be handled".format( prefix=prefix ) ) del mb_data["{}_text".format(prefix)] else: log.warning( - u"importer gave a text {prefix}, blog filters don't work on text {prefix}".format( + "importer gave a text {prefix}, blog filters don't work on text {prefix}".format( prefix=prefix ) ) @@ -225,8 +225,8 @@ opt_host = options.get(OPT_HOST) if opt_host: # we normalise the domain - parsed_host = urlparse.urlsplit(opt_host) - opt_host = urlparse.urlunsplit( + parsed_host = urllib.parse.urlsplit(opt_host) + opt_host = urllib.parse.urlunsplit( ( parsed_host.scheme or "http", parsed_host.netloc or parsed_host.path, @@ -239,7 +239,7 @@ tmp_dir = tempfile.mkdtemp() try: # TODO: would be nice to also update the hyperlinks to these images, e.g. when you have <a href="{url}"><img src="{url}"></a> - for img_elt in xml_tools.findAll(top_elt, names=[u"img"]): + for img_elt in xml_tools.findAll(top_elt, names=["img"]): yield self.imgFilters(client, img_elt, options, opt_host, tmp_dir) finally: os.rmdir(tmp_dir) # XXX: tmp_dir should be empty, or something went wrong @@ -260,21 +260,21 @@ """ try: url = img_elt["src"] - if url[0] == u"/": + if url[0] == "/": if not opt_host: log.warning( - u"host was not specified, we can't deal with src without host ({url}) and have to ignore the following <img/>:\n{xml}".format( + "host was not specified, we can't deal with src without host ({url}) and have to ignore the following <img/>:\n{xml}".format( url=url, xml=img_elt.toXml() ) ) return else: - url = urlparse.urljoin(opt_host, url) + url = urllib.parse.urljoin(opt_host, url) filename = url.rsplit("/", 1)[-1].strip() if not filename: raise KeyError except (KeyError, IndexError): - log.warning(u"ignoring invalid img element: {}".format(img_elt.toXml())) + log.warning("ignoring invalid img element: {}".format(img_elt.toXml())) return # we change the url for the normalized one @@ -288,10 +288,10 @@ pass else: # host is the ignored one, we skip - parsed_url = urlparse.urlsplit(url) + parsed_url = urllib.parse.urlsplit(url) if ignore_host in parsed_url.hostname: log.info( - u"Don't upload image at {url} because of {opt} option".format( + "Don't upload image at {url} because of {opt} option".format( url=url, opt=OPT_UPLOAD_IGNORE_HOST ) ) @@ -304,7 +304,7 @@ try: yield web_client.downloadPage(url.encode("utf-8"), tmp_file) filename = filename.replace( - u"%", u"_" + "%", "_" ) # FIXME: tmp workaround for a bug in prosody http upload __, download_d = yield self._u.upload( client, tmp_file, filename, options=upload_options @@ -312,7 +312,7 @@ download_url = yield download_d except Exception as e: log.warning( - u"can't download image at {url}: {reason}".format(url=url, reason=e) + "can't download image at {url}: {reason}".format(url=url, reason=e) ) else: img_elt["src"] = download_url
--- a/sat/plugins/plugin_blog_import_dokuwiki.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_blog_import_dokuwiki.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin to import dokuwiki blogs @@ -28,8 +28,8 @@ from twisted.internet import threads from collections import OrderedDict import calendar -import urllib -import urlparse +import urllib.request, urllib.parse, urllib.error +import urllib.parse import tempfile import re import time @@ -39,13 +39,13 @@ from dokuwiki import DokuWiki, DokuWikiError # this is a new dependency except ImportError: raise exceptions.MissingModule( - u'Missing module dokuwiki, please install it with "pip install dokuwiki"' + 'Missing module dokuwiki, please install it with "pip install dokuwiki"' ) try: from PIL import Image # this is already needed by plugin XEP-0054 except: raise exceptions.MissingModule( - u"Missing module pillow, please download/install it from https://python-pillow.github.io" + "Missing module pillow, please download/install it from https://python-pillow.github.io" ) PLUGIN_INFO = { @@ -58,10 +58,10 @@ C.PI_DESCRIPTION: _("""Blog importer for Dokuwiki blog engine."""), } -SHORT_DESC = D_(u"import posts from Dokuwiki blog engine") +SHORT_DESC = D_("import posts from Dokuwiki blog engine") LONG_DESC = D_( - u"""This importer handle Dokuwiki blog engine. + """This importer handle Dokuwiki blog engine. To use it, you need an admin access to a running Dokuwiki website (local or on the Internet). The importer retrieves the data using @@ -129,7 +129,7 @@ @param post(dict): parsed post data @return (unicode): post unique item id """ - return unicode(post["id"]) + return str(post["id"]) def getPostUpdated(self, post): """Return the update date. @@ -137,7 +137,7 @@ @param post(dict): parsed post data @return (unicode): update date """ - return unicode(post["mtime"]) + return str(post["mtime"]) def getPostPublished(self, post): """Try to parse the date from the message ID, else use "mtime". @@ -148,7 +148,7 @@ @param post (dict): parsed post data @return (unicode): publication date """ - id_, default = unicode(post["id"]), unicode(post["mtime"]) + id_, default = str(post["id"]), str(post["mtime"]) try: date = id_.split(":")[-1].split("_")[0] except KeyError: @@ -160,7 +160,7 @@ time_struct = time.strptime(date, "%Y%m%d") except ValueError: return default - return unicode(calendar.timegm(time_struct)) + return str(calendar.timegm(time_struct)) def processPost(self, post, profile_jid): """Process a single page. @@ -235,7 +235,7 @@ if count >= self.limit: break - return (self.posts_data.itervalues(), len(self.posts_data)) + return (iter(self.posts_data.values()), len(self.posts_data)) def processContent(self, text, backlinks, profile_jid): """Do text substitutions and file copy. @@ -243,7 +243,7 @@ @param text (unicode): message content @param backlinks (list[unicode]): list of backlinks """ - text = text.strip(u"\ufeff") # this is at the beginning of the file (BOM) + text = text.strip("\ufeff") # this is at the beginning of the file (BOM) for backlink in backlinks: src = '/doku.php?id=%s"' % backlink @@ -261,9 +261,9 @@ if self.media_repo: self.moveMedia(link, subs) elif link not in subs: - subs[link] = urlparse.urljoin(self.url, link) + subs[link] = urllib.parse.urljoin(self.url, link) - for url, new_url in subs.iteritems(): + for url, new_url in subs.items(): text = text.replace(url, new_url) return text @@ -274,12 +274,12 @@ @param link (unicode): media link @param subs (dict): substitutions data """ - url = urlparse.urljoin(self.url, link) + url = urllib.parse.urljoin(self.url, link) user_media = re.match(r"(/lib/exe/\w+.php\?)(.*)", link) thumb_width = None if user_media: # media that has been added by the user - params = urlparse.parse_qs(urlparse.urlparse(url).query) + params = urllib.parse.parse_qs(urllib.parse.urlparse(url).query) try: media = params["media"][0] except KeyError: @@ -295,7 +295,7 @@ filename = media.replace(":", "/") # XXX: avoid "precondition failed" error (only keep the media parameter) - url = urlparse.urljoin(self.url, "/lib/exe/fetch.php?media=%s" % media) + url = urllib.parse.urljoin(self.url, "/lib/exe/fetch.php?media=%s" % media) elif link.startswith("/lib/plugins/"): # other link added by a plugin or something else @@ -324,7 +324,7 @@ if not os.path.exists(dest): if not os.path.exists(dirname): os.makedirs(dirname) - urllib.urlretrieve(source, dest) + urllib.request.urlretrieve(source, dest) log.debug("DokuWiki media file copied to %s" % dest) def createThumbnail(self, source, dest, width):
--- a/sat/plugins/plugin_blog_import_dotclear.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_blog_import_dotclear.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for import external blogs @@ -42,10 +42,10 @@ C.PI_DESCRIPTION: _("""Blog importer for Dotclear blog engine."""), } -SHORT_DESC = D_(u"import posts from Dotclear blog engine") +SHORT_DESC = D_("import posts from Dotclear blog engine") LONG_DESC = D_( - u"""This importer handle Dotclear blog engine. + """This importer handle Dotclear blog engine. To use it, you'll need to export your blog to a flat file. You must go in your admin interface and select Plugins/Maintenance then Backup. @@ -55,7 +55,7 @@ location: you must use the absolute path to your backup for the location parameter """ ) -POST_ID_PREFIX = u"sat_dc_" +POST_ID_PREFIX = "sat_dc_" KNOWN_DATA_TYPES = ( "link", "setting", @@ -66,7 +66,7 @@ "comment", "captcha", ) -ESCAPE_MAP = {"r": u"\r", "n": u"\n", '"': u'"', "\\": u"\\"} +ESCAPE_MAP = {"r": "\r", "n": "\n", '"': '"', "\\": "\\"} class DotclearParser(object): @@ -83,7 +83,7 @@ @param post(dict): parsed post data @return (unicode): post unique item id """ - return u"{}_{}_{}_{}:{}".format( + return "{}_{}_{}_{}:{}".format( POST_ID_PREFIX, post["blog_id"], post["user_id"], @@ -99,7 +99,7 @@ """ post_id = comment["post_id"] parent_item_id = self.posts_data[post_id]["blog"]["id"] - return u"{}_comment_{}".format(parent_item_id, comment["comment_id"]) + return "{}_comment_{}".format(parent_item_id, comment["comment_id"]) def getTime(self, data, key): """Parse time as given by dotclear, with timezone handling @@ -125,18 +125,18 @@ if char == '"': # we have reached the end of this field, # we try to parse a new one - yield u"".join(buf) + yield "".join(buf) buf = [] idx += 1 try: separator = fields_data[idx] except IndexError: return - if separator != u",": + if separator != ",": raise exceptions.ParsingError("Field separator was expeceted") idx += 1 break # we have a new field - elif char == u"\\": + elif char == "\\": idx += 1 try: char = ESCAPE_MAP[fields_data[idx]] @@ -144,22 +144,22 @@ raise exceptions.ParsingError("Escaped char was expected") except KeyError: char = fields_data[idx] - log.warning(u"Unknown key to escape: {}".format(char)) + log.warning("Unknown key to escape: {}".format(char)) buf.append(char) def parseFields(self, headers, data): - return dict(itertools.izip(headers, self.readFields(data))) + return dict(zip(headers, self.readFields(data))) def postHandler(self, headers, data, index): post = self.parseFields(headers, data) - log.debug(u"({}) post found: {}".format(index, post["post_title"])) + log.debug("({}) post found: {}".format(index, post["post_title"])) mb_data = { "id": self.getPostId(post), "published": self.getTime(post, "post_creadt"), "updated": self.getTime(post, "post_upddt"), "author": post["user_id"], # there use info are not in the archive # TODO: option to specify user info - "content_xhtml": u"{}{}".format( + "content_xhtml": "{}{}".format( post["post_content_xhtml"], post["post_excerpt_xhtml"] ), "title": post["post_title"], @@ -168,7 +168,7 @@ self.posts_data[post["post_id"]] = { "blog": mb_data, "comments": [[]], - "url": u"/post/{}".format(post["post_url"]), + "url": "/post/{}".format(post["post_url"]), } def metaHandler(self, headers, data, index): @@ -178,7 +178,7 @@ tags.add(meta["meta_id"]) def metaFinishedHandler(self): - for post_id, tags in self.tags.iteritems(): + for post_id, tags in self.tags.items(): data_format.iter2dict("tag", tags, self.posts_data[post_id]["blog"]) del self.tags @@ -186,9 +186,9 @@ comment = self.parseFields(headers, data) if comment["comment_site"]: # we don't use atom:uri because it's used for jid in XMPP - content = u'{}\n<hr>\n<a href="{}">author website</a>'.format( + content = '{}\n<hr>\n<a href="{}">author website</a>'.format( comment["comment_content"], - cgi.escape(comment["comment_site"]).replace('"', u"%22"), + cgi.escape(comment["comment_site"]).replace('"', "%22"), ) else: content = comment["comment_content"] @@ -208,24 +208,24 @@ def parse(self, db_path): with open(db_path) as f: - signature = f.readline().decode("utf-8") + signature = f.readline() try: version = signature.split("|")[1] except IndexError: version = None - log.debug(u"Dotclear version: {}".format(version)) + log.debug("Dotclear version: {}".format(version)) data_type = None data_headers = None index = None while True: - buf = f.readline().decode("utf-8") + buf = f.readline() if not buf: break if buf.startswith("["): header = buf.split(" ", 1) data_type = header[0][1:] if data_type not in KNOWN_DATA_TYPES: - log.warning(u"unkown data type: {}".format(data_type)) + log.warning("unkown data type: {}".format(data_type)) index = 0 try: data_headers = header[1].split(",") @@ -233,7 +233,7 @@ last_header = data_headers[-1] data_headers[-1] = last_header[: last_header.rfind("]")] except IndexError: - log.warning(u"Can't read data)") + log.warning("Can't read data)") else: if data_type is None: continue @@ -247,7 +247,7 @@ pass else: finished_handler() - log.debug(u"{} data finished".format(data_type)) + log.debug("{} data finished".format(data_type)) data_type = None continue assert data_type @@ -258,7 +258,7 @@ else: fields_handler(data_headers, buf, index) index += 1 - return (self.posts_data.itervalues(), len(self.posts_data)) + return (iter(self.posts_data.values()), len(self.posts_data)) class DotclearImport(object): @@ -272,7 +272,7 @@ def DcImport(self, client, location, options=None): if not os.path.isabs(location): raise exceptions.DataError( - u"An absolute path to backup data need to be given as location" + "An absolute path to backup data need to be given as location" ) dc_parser = DotclearParser() d = threads.deferToThread(dc_parser.parse, location)
--- a/sat/plugins/plugin_comp_file_sharing.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_comp_file_sharing.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for parrot mode (experimental) @@ -55,17 +55,17 @@ C.PI_RECOMMENDATIONS: [], C.PI_MAIN: "FileSharing", C.PI_HANDLER: C.BOOL_TRUE, - C.PI_DESCRIPTION: _(u"""Component hosting and sharing files"""), + C.PI_DESCRIPTION: _("""Component hosting and sharing files"""), } -HASH_ALGO = u"sha-256" +HASH_ALGO = "sha-256" NS_COMMENTS = "org.salut-a-toi.comments" COMMENT_NODE_PREFIX = "org.salut-a-toi.file_comments/" class FileSharing(object): def __init__(self, host): - log.info(_(u"File Sharing initialization")) + log.info(_("File Sharing initialization")) self.host = host self._f = host.plugins["FILE"] self._jf = host.plugins["XEP-0234"] @@ -99,12 +99,12 @@ on file is received, this method create hash/thumbnails if necessary move the file to the right location, and create metadata entry in database """ - name = file_data[u"name"] + name = file_data["name"] extra = {} - if file_data[u"hash_algo"] == HASH_ALGO: - log.debug(_(u"Reusing already generated hash")) - file_hash = file_data[u"hash_hasher"].hexdigest() + if file_data["hash_algo"] == HASH_ALGO: + log.debug(_("Reusing already generated hash")) + file_hash = file_data["hash_hasher"].hexdigest() else: hasher = self._h.getHasher(HASH_ALGO) with open("file_path") as f: @@ -113,7 +113,7 @@ if os.path.isfile(final_path): log.debug( - u"file [{file_hash}] already exists, we can remove temporary one".format( + "file [{file_hash}] already exists, we can remove temporary one".format( file_hash=file_hash ) ) @@ -121,16 +121,16 @@ else: os.rename(file_path, final_path) log.debug( - u"file [{file_hash}] moved to {files_path}".format( + "file [{file_hash}] moved to {files_path}".format( file_hash=file_hash, files_path=self.files_path ) ) - mime_type = file_data.get(u"mime_type") - if not mime_type or mime_type == u"application/octet-stream": + mime_type = file_data.get("mime_type") + if not mime_type or mime_type == "application/octet-stream": mime_type = mimetypes.guess_type(name)[0] - if mime_type is not None and mime_type.startswith(u"image"): + if mime_type is not None and mime_type.startswith("image"): thumbnails = extra.setdefault(C.KEY_THUMBNAILS, []) for max_thumb_size in (self._t.SIZE_SMALL, self._t.SIZE_MEDIUM): try: @@ -141,19 +141,19 @@ 60 * 60 * 24 * 31 * 6, ) except Exception as e: - log.warning(_(u"Can't create thumbnail: {reason}").format(reason=e)) + log.warning(_("Can't create thumbnail: {reason}").format(reason=e)) break - thumbnails.append({u"id": thumb_id, u"size": thumb_size}) + thumbnails.append({"id": thumb_id, "size": thumb_size}) self.host.memory.setFile( client, name=name, - version=u"", + version="", file_hash=file_hash, hash_algo=HASH_ALGO, - size=file_data[u"size"], - path=file_data.get(u"path"), - namespace=file_data.get(u"namespace"), + size=file_data["size"], + path=file_data.get("path"), + namespace=file_data.get("namespace"), mime_type=mime_type, owner=peer_jid, extra=extra, @@ -191,49 +191,49 @@ self, client, session, content_data, content_name, file_data, file_elt ): """This method retrieve a file on request, and send if after checking permissions""" - peer_jid = session[u"peer_jid"] + peer_jid = session["peer_jid"] try: found_files = yield self.host.memory.getFiles( client, peer_jid=peer_jid, - name=file_data.get(u"name"), - file_hash=file_data.get(u"file_hash"), - hash_algo=file_data.get(u"hash_algo"), - path=file_data.get(u"path"), - namespace=file_data.get(u"namespace"), + name=file_data.get("name"), + file_hash=file_data.get("file_hash"), + hash_algo=file_data.get("hash_algo"), + path=file_data.get("path"), + namespace=file_data.get("namespace"), ) except exceptions.NotFound: found_files = None except exceptions.PermissionError: log.warning( - _(u"{peer_jid} is trying to access an unauthorized file: {name}").format( - peer_jid=peer_jid, name=file_data.get(u"name") + _("{peer_jid} is trying to access an unauthorized file: {name}").format( + peer_jid=peer_jid, name=file_data.get("name") ) ) defer.returnValue(False) if not found_files: log.warning( - _(u"no matching file found ({file_data})").format(file_data=file_data) + _("no matching file found ({file_data})").format(file_data=file_data) ) defer.returnValue(False) # we only use the first found file found_file = found_files[0] - if found_file[u'type'] != C.FILE_TYPE_FILE: - raise TypeError(u"a file was expected, type is {type_}".format( - type_=found_file[u'type'])) - file_hash = found_file[u"file_hash"] + if found_file['type'] != C.FILE_TYPE_FILE: + raise TypeError("a file was expected, type is {type_}".format( + type_=found_file['type'])) + file_hash = found_file["file_hash"] file_path = os.path.join(self.files_path, file_hash) - file_data[u"hash_hasher"] = hasher = self._h.getHasher(found_file[u"hash_algo"]) - size = file_data[u"size"] = found_file[u"size"] - file_data[u"file_hash"] = file_hash - file_data[u"hash_algo"] = found_file[u"hash_algo"] + file_data["hash_hasher"] = hasher = self._h.getHasher(found_file["hash_algo"]) + size = file_data["size"] = found_file["size"] + file_data["file_hash"] = file_hash + file_data["hash_algo"] = found_file["hash_algo"] # we complete file_elt so peer can have some details on the file - if u"name" not in file_data: - file_elt.addElement(u"name", content=found_file[u"name"]) - file_elt.addElement(u"size", content=unicode(size)) + if "name" not in file_data: + file_elt.addElement("name", content=found_file["name"]) + file_elt.addElement("size", content=str(size)) content_data["stream_object"] = stream.FileStreamObject( self.host, client, @@ -268,11 +268,11 @@ comment_elt = file_elt.addElement((NS_COMMENTS, "comments"), content=comments_url) try: - count = len(extra_args[u"extra"][u"comments"]) + count = len(extra_args["extra"]["comments"]) except KeyError: count = 0 - comment_elt["count"] = unicode(count) + comment_elt["count"] = str(count) return True def _getFileComments(self, file_elt, file_data): @@ -280,7 +280,7 @@ comments_elt = next(file_elt.elements(NS_COMMENTS, "comments")) except StopIteration: return - file_data["comments_url"] = unicode(comments_elt) + file_data["comments_url"] = str(comments_elt) file_data["comments_count"] = comments_elt["count"] return True
--- a/sat/plugins/plugin_comp_file_sharing_management.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_comp_file_sharing_management.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to detect language (experimental) @@ -31,25 +31,25 @@ PLUGIN_INFO = { - C.PI_NAME: u"File Sharing Management", - C.PI_IMPORT_NAME: u"FILE_SHARING_MANAGEMENT", + C.PI_NAME: "File Sharing Management", + C.PI_IMPORT_NAME: "FILE_SHARING_MANAGEMENT", C.PI_MODES: [C.PLUG_MODE_COMPONENT], - C.PI_TYPE: u"EXP", + C.PI_TYPE: "EXP", C.PI_PROTOCOLS: [], - C.PI_DEPENDENCIES: [u"XEP-0050", u"XEP-0264"], + C.PI_DEPENDENCIES: ["XEP-0050", "XEP-0264"], C.PI_RECOMMENDATIONS: [], - C.PI_MAIN: u"FileSharingManagement", - C.PI_HANDLER: u"no", + C.PI_MAIN: "FileSharingManagement", + C.PI_HANDLER: "no", C.PI_DESCRIPTION: _( - u"Experimental handling of file management for file sharing. This plugins allows " - u"to change permissions of stored files/directories or remove them." + "Experimental handling of file management for file sharing. This plugins allows " + "to change permissions of stored files/directories or remove them." ), } -NS_FILE_MANAGEMENT = u"https://salut-a-toi.org/protocol/file-management:0" -NS_FILE_MANAGEMENT_PERM = u"https://salut-a-toi.org/protocol/file-management:0#perm" -NS_FILE_MANAGEMENT_DELETE = u"https://salut-a-toi.org/protocol/file-management:0#delete" -NS_FILE_MANAGEMENT_THUMB = u"https://salut-a-toi.org/protocol/file-management:0#thumb" +NS_FILE_MANAGEMENT = "https://salut-a-toi.org/protocol/file-management:0" +NS_FILE_MANAGEMENT_PERM = "https://salut-a-toi.org/protocol/file-management:0#perm" +NS_FILE_MANAGEMENT_DELETE = "https://salut-a-toi.org/protocol/file-management:0#delete" +NS_FILE_MANAGEMENT_THUMB = "https://salut-a-toi.org/protocol/file-management:0#thumb" class WorkflowError(Exception): @@ -68,7 +68,7 @@ # syntax?) should be elaborated and proposed as a standard. def __init__(self, host): - log.info(_(u"File Sharing Management plugin initialization")) + log.info(_("File Sharing Management plugin initialization")) self.host = host self._c = host.plugins["XEP-0050"] self._t = host.plugins["XEP-0264"] @@ -76,17 +76,17 @@ def profileConnected(self, client): self._c.addAdHocCommand( - client, self._onChangeFile, u"Change Permissions of File(s)", + client, self._onChangeFile, "Change Permissions of File(s)", node=NS_FILE_MANAGEMENT_PERM, allowed_magics=C.ENTITY_ALL, ) self._c.addAdHocCommand( - client, self._onDeleteFile, u"Delete File(s)", + client, self._onDeleteFile, "Delete File(s)", node=NS_FILE_MANAGEMENT_DELETE, allowed_magics=C.ENTITY_ALL, ) self._c.addAdHocCommand( - client, self._onGenThumbnails, u"Generate Thumbnails", + client, self._onGenThumbnails, "Generate Thumbnails", node=NS_FILE_MANAGEMENT_THUMB, allowed_magics=C.ENTITY_ALL, ) @@ -109,7 +109,7 @@ @return (tuple): arguments to use in defer.returnValue """ status = self._c.STATUS.EXECUTING - form = data_form.Form("form", title=u"File Management", + form = data_form.Form("form", title="File Management", formNamespace=NS_FILE_MANAGEMENT) field = data_form.Field( @@ -136,17 +136,17 @@ """ fields = command_form.fields try: - path = fields[u'path'].value.strip() - namespace = fields[u'namespace'].value or None + path = fields['path'].value.strip() + namespace = fields['namespace'].value or None except KeyError: self._c.adHocError(self._c.ERROR.BAD_PAYLOAD) if not path: self._c.adHocError(self._c.ERROR.BAD_PAYLOAD) - requestor = session_data[u'requestor'] + requestor = session_data['requestor'] requestor_bare = requestor.userhostJID() - path = path.rstrip(u'/') + path = path.rstrip('/') parent_path, basename = os.path.split(path) # TODO: if parent_path and basename are empty, we ask for root directory @@ -158,31 +158,31 @@ namespace=namespace) found_file = found_files[0] except (exceptions.NotFound, IndexError): - raise WorkflowError(self._err(_(u"file not found"))) + raise WorkflowError(self._err(_("file not found"))) except exceptions.PermissionError: - raise WorkflowError(self._err(_(u"forbidden"))) + raise WorkflowError(self._err(_("forbidden"))) if found_file['owner'] != requestor_bare: # only owner can manage files - log.warning(_(u"Only owner can manage files")) - raise WorkflowError(self._err(_(u"forbidden"))) + log.warning(_("Only owner can manage files")) + raise WorkflowError(self._err(_("forbidden"))) - session_data[u'found_file'] = found_file - session_data[u'namespace'] = namespace + session_data['found_file'] = found_file + session_data['namespace'] = namespace defer.returnValue(found_file) def _updateReadPermission(self, access, allowed_jids): if not allowed_jids: if C.ACCESS_PERM_READ in access: del access[C.ACCESS_PERM_READ] - elif allowed_jids == u'PUBLIC': + elif allowed_jids == 'PUBLIC': access[C.ACCESS_PERM_READ] = { - u"type": C.ACCESS_TYPE_PUBLIC + "type": C.ACCESS_TYPE_PUBLIC } else: access[C.ACCESS_PERM_READ] = { - u"type": C.ACCESS_TYPE_WHITELIST, - u"jids": [j.full() for j in allowed_jids] + "type": C.ACCESS_TYPE_WHITELIST, + "jids": [j.full() for j in allowed_jids] } @defer.inlineCallbacks @@ -192,30 +192,30 @@ @param file_data(dict): metadata of the file @param allowed_jids(list[jid.JID]): list of entities allowed to read the file """ - assert file_data[u'type'] == C.FILE_TYPE_DIRECTORY + assert file_data['type'] == C.FILE_TYPE_DIRECTORY files_data = yield self.host.memory.getFiles( - client, requestor, parent=file_data[u'id'], namespace=namespace) + client, requestor, parent=file_data['id'], namespace=namespace) for file_data in files_data: - if not file_data[u'access'].get(C.ACCESS_PERM_READ, {}): - log.debug(u"setting {perm} read permission for {name}".format( - perm=allowed_jids, name=file_data[u'name'])) + if not file_data['access'].get(C.ACCESS_PERM_READ, {}): + log.debug("setting {perm} read permission for {name}".format( + perm=allowed_jids, name=file_data['name'])) yield self.host.memory.fileUpdate( - file_data[u'id'], u'access', + file_data['id'], 'access', partial(self._updateReadPermission, allowed_jids=allowed_jids)) - if file_data[u'type'] == C.FILE_TYPE_DIRECTORY: - yield self._updateDir(client, requestor, namespace, file_data, u'PUBLIC') + if file_data['type'] == C.FILE_TYPE_DIRECTORY: + yield self._updateDir(client, requestor, namespace, file_data, 'PUBLIC') @defer.inlineCallbacks def _onChangeFile(self, client, command_elt, session_data, action, node): try: - x_elt = command_elt.elements(data_form.NS_X_DATA, "x").next() + x_elt = next(command_elt.elements(data_form.NS_X_DATA, "x")) command_form = data_form.Form.fromElement(x_elt) except StopIteration: command_form = None found_file = session_data.get('found_file') - requestor = session_data[u'requestor'] + requestor = session_data['requestor'] requestor_bare = requestor.userhostJID() if command_form is None or len(command_form.fields) == 0: @@ -230,31 +230,31 @@ defer.returnValue(e.err_args) # management request - if found_file[u'type'] == C.FILE_TYPE_DIRECTORY: - instructions = D_(u"Please select permissions for this directory") + if found_file['type'] == C.FILE_TYPE_DIRECTORY: + instructions = D_("Please select permissions for this directory") else: - instructions = D_(u"Please select permissions for this file") + instructions = D_("Please select permissions for this file") - form = data_form.Form("form", title=u"File Management", + form = data_form.Form("form", title="File Management", instructions=[instructions], formNamespace=NS_FILE_MANAGEMENT) field = data_form.Field( "text-multi", "read_allowed", required=False, - desc=u'list of jids allowed to read this file (beside yourself), or ' - u'"PUBLIC" to let a public access' + desc='list of jids allowed to read this file (beside yourself), or ' + '"PUBLIC" to let a public access' ) - read_access = found_file[u"access"].get(C.ACCESS_PERM_READ, {}) - access_type = read_access.get(u'type', C.ACCESS_TYPE_WHITELIST) + read_access = found_file["access"].get(C.ACCESS_PERM_READ, {}) + access_type = read_access.get('type', C.ACCESS_TYPE_WHITELIST) if access_type == C.ACCESS_TYPE_PUBLIC: - field.values = [u'PUBLIC'] + field.values = ['PUBLIC'] else: field.values = read_access.get('jids', []) form.addField(field) - if found_file[u'type'] == C.FILE_TYPE_DIRECTORY: + if found_file['type'] == C.FILE_TYPE_DIRECTORY: field = data_form.Field( "boolean", "recursive", value=False, required=False, - desc=u"Files under it will be made public to follow this dir " - u"permission (only if they don't have already a permission set)." + desc="Files under it will be made public to follow this dir " + "permission (only if they don't have already a permission set)." ) form.addField(field) @@ -269,22 +269,22 @@ except KeyError: self._c.adHocError(self._c.ERROR.BAD_PAYLOAD) - if read_allowed.value == u'PUBLIC': - allowed_jids = u'PUBLIC' - elif read_allowed.value.strip() == u'': + if read_allowed.value == 'PUBLIC': + allowed_jids = 'PUBLIC' + elif read_allowed.value.strip() == '': allowed_jids = None else: try: allowed_jids = [jid.JID(v.strip()) for v in read_allowed.values if v.strip()] except RuntimeError as e: - log.warning(_(u"Can't use read_allowed values: {reason}").format( + log.warning(_("Can't use read_allowed values: {reason}").format( reason=e)) self._c.adHocError(self._c.ERROR.BAD_PAYLOAD) - if found_file[u'type'] == C.FILE_TYPE_FILE: + if found_file['type'] == C.FILE_TYPE_FILE: yield self.host.memory.fileUpdate( - found_file[u'id'], u'access', + found_file['id'], 'access', partial(self._updateReadPermission, allowed_jids=allowed_jids)) else: try: @@ -292,32 +292,32 @@ except KeyError: self._c.adHocError(self._c.ERROR.BAD_PAYLOAD) yield self.host.memory.fileUpdate( - found_file[u'id'], u'access', + found_file['id'], 'access', partial(self._updateReadPermission, allowed_jids=allowed_jids)) if recursive: # we set all file under the directory as public (if they haven't # already a permission set), so allowed entities of root directory # can read them. - namespace = session_data[u'namespace'] + namespace = session_data['namespace'] yield self._updateDir( - client, requestor_bare, namespace, found_file, u'PUBLIC') + client, requestor_bare, namespace, found_file, 'PUBLIC') # job done, we can end the session status = self._c.STATUS.COMPLETED payload = None - note = (self._c.NOTE.INFO, _(u"management session done")) + note = (self._c.NOTE.INFO, _("management session done")) defer.returnValue((payload, status, None, note)) @defer.inlineCallbacks def _onDeleteFile(self, client, command_elt, session_data, action, node): try: - x_elt = command_elt.elements(data_form.NS_X_DATA, "x").next() + x_elt = next(command_elt.elements(data_form.NS_X_DATA, "x")) command_form = data_form.Form.fromElement(x_elt) except StopIteration: command_form = None found_file = session_data.get('found_file') - requestor = session_data[u'requestor'] + requestor = session_data['requestor'] requestor_bare = requestor.userhostJID() if command_form is None or len(command_form.fields) == 0: @@ -330,18 +330,18 @@ found_file = yield self._getFileData(client, session_data, command_form) except WorkflowError as e: defer.returnValue(e.err_args) - if found_file[u'type'] == C.FILE_TYPE_DIRECTORY: - msg = D_(u"Are you sure to delete directory {name} and all files and " - u"directories under it?").format(name=found_file[u'name']) + if found_file['type'] == C.FILE_TYPE_DIRECTORY: + msg = D_("Are you sure to delete directory {name} and all files and " + "directories under it?").format(name=found_file['name']) else: - msg = D_(u"Are you sure to delete file {name}?" - .format(name=found_file[u'name'])) - form = data_form.Form("form", title=u"File Management", + msg = D_("Are you sure to delete file {name}?" + .format(name=found_file['name'])) + form = data_form.Form("form", title="File Management", instructions = [msg], formNamespace=NS_FILE_MANAGEMENT) field = data_form.Field( "boolean", "confirm", value=False, required=True, - desc=u"check this box to confirm" + desc="check this box to confirm" ) form.addField(field) status = self._c.STATUS.EXECUTING @@ -357,10 +357,10 @@ if not confirmed: note = None else: - recursive = found_file[u'type'] == C.FILE_TYPE_DIRECTORY + recursive = found_file['type'] == C.FILE_TYPE_DIRECTORY yield self.host.memory.fileDelete( - client, requestor_bare, found_file[u'id'], recursive) - note = (self._c.NOTE.INFO, _(u"file deleted")) + client, requestor_bare, found_file['id'], recursive) + note = (self._c.NOTE.INFO, _("file deleted")) status = self._c.STATUS.COMPLETED payload = None defer.returnValue((payload, status, None, note)) @@ -374,16 +374,16 @@ @param file_data(dict): metadata of the file """ - if file_data[u'type'] == C.FILE_TYPE_DIRECTORY: + if file_data['type'] == C.FILE_TYPE_DIRECTORY: sub_files_data = yield self.host.memory.getFiles( - client, requestor, parent=file_data[u'id'], namespace=namespace) + client, requestor, parent=file_data['id'], namespace=namespace) for sub_file_data in sub_files_data: yield self._genThumbs(client, requestor, namespace, sub_file_data) - elif file_data[u'type'] == C.FILE_TYPE_FILE: - mime_type = file_data[u'mime_type'] - file_path = os.path.join(self.files_path, file_data[u'file_hash']) - if mime_type is not None and mime_type.startswith(u"image"): + elif file_data['type'] == C.FILE_TYPE_FILE: + mime_type = file_data['mime_type'] + file_path = os.path.join(self.files_path, file_data['file_hash']) + if mime_type is not None and mime_type.startswith("image"): thumbnails = [] for max_thumb_size in (self._t.SIZE_SMALL, self._t.SIZE_MEDIUM): @@ -395,31 +395,31 @@ 60 * 60 * 24 * 31 * 6, ) except Exception as e: - log.warning(_(u"Can't create thumbnail: {reason}") + log.warning(_("Can't create thumbnail: {reason}") .format(reason=e)) break - thumbnails.append({u"id": thumb_id, u"size": thumb_size}) + thumbnails.append({"id": thumb_id, "size": thumb_size}) yield self.host.memory.fileUpdate( - file_data[u'id'], u'extra', + file_data['id'], 'extra', partial(self._updateThumbs, thumbnails=thumbnails)) - log.info(u"thumbnails for [{file_name}] generated" - .format(file_name=file_data[u'name'])) + log.info("thumbnails for [{file_name}] generated" + .format(file_name=file_data['name'])) else: - log.warning(u"unmanaged file type: {type_}".format(type_=file_data[u'type'])) + log.warning("unmanaged file type: {type_}".format(type_=file_data['type'])) @defer.inlineCallbacks def _onGenThumbnails(self, client, command_elt, session_data, action, node): try: - x_elt = command_elt.elements(data_form.NS_X_DATA, "x").next() + x_elt = next(command_elt.elements(data_form.NS_X_DATA, "x")) command_form = data_form.Form.fromElement(x_elt) except StopIteration: command_form = None found_file = session_data.get('found_file') - requestor = session_data[u'requestor'] + requestor = session_data['requestor'] if command_form is None or len(command_form.fields) == 0: # root request @@ -432,11 +432,11 @@ except WorkflowError as e: defer.returnValue(e.err_args) - log.info(u"Generating thumbnails as requested") - yield self._genThumbs(client, requestor, found_file[u'namespace'], found_file) + log.info("Generating thumbnails as requested") + yield self._genThumbs(client, requestor, found_file['namespace'], found_file) # job done, we can end the session status = self._c.STATUS.COMPLETED payload = None - note = (self._c.NOTE.INFO, _(u"thumbnails generated")) + note = (self._c.NOTE.INFO, _("thumbnails generated")) defer.returnValue((payload, status, None, note))
--- a/sat/plugins/plugin_dbg_manhole.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_dbg_manhole.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for debugging, using a manhole @@ -29,14 +29,14 @@ log = getLogger(__name__) PLUGIN_INFO = { - C.PI_NAME: u"Manhole debug plugin", - C.PI_IMPORT_NAME: u"manhole", - C.PI_TYPE: u"DEBUG", + C.PI_NAME: "Manhole debug plugin", + C.PI_IMPORT_NAME: "manhole", + C.PI_TYPE: "DEBUG", C.PI_PROTOCOLS: [], C.PI_DEPENDENCIES: [], - C.PI_MAIN: u"Manhole", - C.PI_HANDLER: u"no", - C.PI_DESCRIPTION: _(u"""Debug plugin to have a telnet server"""), + C.PI_MAIN: "Manhole", + C.PI_HANDLER: "no", + C.PI_DESCRIPTION: _("""Debug plugin to have a telnet server"""), } @@ -50,14 +50,14 @@ self.startManhole(port) def startManhole(self, port): - log.warning(_(u"/!\\ Manhole debug server activated, be sure to not use it in " - u"production, this is dangerous /!\\")) - log.info(_(u"You can connect to manhole server using telnet on port {port}") + log.warning(_("/!\\ Manhole debug server activated, be sure to not use it in " + "production, this is dangerous /!\\")) + log.info(_("You can connect to manhole server using telnet on port {port}") .format(port=port)) f = protocol.ServerFactory() namespace = { - u"host": self.host, - u"jid": jid, + "host": self.host, + "jid": jid, } f.protocol = lambda: TelnetTransport(TelnetBootstrapProtocol, insults.ServerProtocol,
--- a/sat/plugins/plugin_exp_command_export.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_command_export.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to export commands (experimental) @@ -52,7 +52,7 @@ def _clean(self, data): if not data: log.error("data should not be empty !") - return u"" + return "" decoded = data.decode("utf-8", "ignore")[: -1 if data[-1] == "\n" else None] return clean_ustr(decoded) @@ -66,7 +66,7 @@ self.client.sendMessage(self.target, {"": self._clean(data)}, no_trigger=True) def processEnded(self, reason): - log.info(u"process finished: %d" % (reason.value.exitCode,)) + log.info("process finished: %d" % (reason.value.exitCode,)) self.parent.removeProcess(self.target, self) def write(self, message): @@ -120,12 +120,12 @@ if spawned_key in self.spawned: try: - body = message_elt.elements(C.NS_CLIENT, "body").next() + body = next(message_elt.elements(C.NS_CLIENT, "body")) except StopIteration: # do not block message without body (chat state notification...) return True - mess_data = unicode(body) + "\n" + mess_data = str(body) + "\n" processes_set = self.spawned[spawned_key] _continue = False exclusive = False @@ -158,7 +158,7 @@ raise jid.InvalidFormat _jid = _jid.userhostJID() except (RuntimeError, jid.InvalidFormat, AttributeError): - log.info(u"invalid target ignored: %s" % (target,)) + log.info("invalid target ignored: %s" % (target,)) continue process_prot = ExportCommandProtocol(self, client, _jid, options) self.spawned.setdefault((_jid, client.profile), set()).add(process_prot)
--- a/sat/plugins/plugin_exp_events.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_events.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to detect language (experimental) @@ -29,7 +29,7 @@ from twisted.words.protocols.jabber import jid, error from twisted.words.xish import domish from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer from twisted.words.protocols.jabber.xmlstream import XMPPHandler from wokkel import pubsub @@ -41,11 +41,11 @@ C.PI_IMPORT_NAME: "EVENTS", C.PI_TYPE: "EXP", C.PI_PROTOCOLS: [], - C.PI_DEPENDENCIES: [u"XEP-0060", u"INVITATION", u"LIST_INTEREST"], + C.PI_DEPENDENCIES: ["XEP-0060", "INVITATION", "LIST_INTEREST"], C.PI_RECOMMENDATIONS: ["XEP-0277", "EMAIL_INVITATION"], C.PI_MAIN: "Events", C.PI_HANDLER: "yes", - C.PI_DESCRIPTION: _(u"""Experimental implementation of XMPP events management"""), + C.PI_DESCRIPTION: _("""Experimental implementation of XMPP events management"""), } NS_EVENT = "org.salut-a-toi.event:0" @@ -55,13 +55,13 @@ """Q&D module to handle event attendance answer, experimentation only""" def __init__(self, host): - log.info(_(u"Event plugin initialization")) + log.info(_("Event plugin initialization")) self.host = host self._p = self.host.plugins["XEP-0060"] self._i = self.host.plugins.get("EMAIL_INVITATION") self._b = self.host.plugins.get("XEP-0277") - self.host.registerNamespace(u"event", NS_EVENT) - self.host.plugins[u"INVITATION"].registerNamespace(NS_EVENT, + self.host.registerNamespace("event", NS_EVENT) + self.host.plugins["INVITATION"].registerNamespace(NS_EVENT, self.register) host.bridge.addMethod( "eventGet", @@ -69,7 +69,7 @@ in_sign="ssss", out_sign="(ia{ss})", method=self._eventGet, - async=True, + async_=True, ) host.bridge.addMethod( "eventCreate", @@ -77,7 +77,7 @@ in_sign="ia{ss}ssss", out_sign="s", method=self._eventCreate, - async=True, + async_=True, ) host.bridge.addMethod( "eventModify", @@ -85,7 +85,7 @@ in_sign="sssia{ss}s", out_sign="", method=self._eventModify, - async=True, + async_=True, ) host.bridge.addMethod( "eventsList", @@ -93,7 +93,7 @@ in_sign="sss", out_sign="aa{ss}", method=self._eventsList, - async=True, + async_=True, ) host.bridge.addMethod( "eventInviteeGet", @@ -101,7 +101,7 @@ in_sign="sss", out_sign="a{ss}", method=self._eventInviteeGet, - async=True, + async_=True, ) host.bridge.addMethod( "eventInviteeSet", @@ -109,7 +109,7 @@ in_sign="ssa{ss}s", out_sign="", method=self._eventInviteeSet, - async=True, + async_=True, ) host.bridge.addMethod( "eventInviteesList", @@ -117,7 +117,7 @@ in_sign="sss", out_sign="a{sa{ss}}", method=self._eventInviteesList, - async=True, + async_=True, ), host.bridge.addMethod( "eventInvite", @@ -125,7 +125,7 @@ in_sign="sssss", out_sign="", method=self._invite, - async=True, + async_=True, ) host.bridge.addMethod( "eventInviteByEmail", @@ -133,7 +133,7 @@ in_sign="ssssassssssss", out_sign="", method=self._inviteByEmail, - async=True, + async_=True, ) def getHandler(self, client): @@ -152,56 +152,56 @@ data = {} - for key in (u"name",): + for key in ("name",): try: data[key] = event_elt[key] except KeyError: continue - for elt_name in (u"description",): + for elt_name in ("description",): try: elt = next(event_elt.elements(NS_EVENT, elt_name)) except StopIteration: continue else: - data[elt_name] = unicode(elt) + data[elt_name] = str(elt) - for elt_name in (u"image", "background-image"): + for elt_name in ("image", "background-image"): try: image_elt = next(event_elt.elements(NS_EVENT, elt_name)) data[elt_name] = image_elt["src"] except StopIteration: continue except KeyError: - log.warning(_(u"no src found for image")) + log.warning(_("no src found for image")) - for uri_type in (u"invitees", u"blog"): + for uri_type in ("invitees", "blog"): try: elt = next(event_elt.elements(NS_EVENT, uri_type)) - uri = data[uri_type + u"_uri"] = elt["uri"] + uri = data[uri_type + "_uri"] = elt["uri"] uri_data = xmpp_uri.parseXMPPUri(uri) - if uri_data[u"type"] != u"pubsub": + if uri_data["type"] != "pubsub": raise ValueError except StopIteration: - log.warning(_(u"no {uri_type} element found!").format(uri_type=uri_type)) + log.warning(_("no {uri_type} element found!").format(uri_type=uri_type)) except KeyError: - log.warning(_(u"incomplete {uri_type} element").format(uri_type=uri_type)) + log.warning(_("incomplete {uri_type} element").format(uri_type=uri_type)) except ValueError: - log.warning(_(u"bad {uri_type} element").format(uri_type=uri_type)) + log.warning(_("bad {uri_type} element").format(uri_type=uri_type)) else: - data[uri_type + u"_service"] = uri_data[u"path"] - data[uri_type + u"_node"] = uri_data[u"node"] + data[uri_type + "_service"] = uri_data["path"] + data[uri_type + "_node"] = uri_data["node"] for meta_elt in event_elt.elements(NS_EVENT, "meta"): - key = meta_elt[u"name"] + key = meta_elt["name"] if key in data: log.warning( - u"Ignoring conflicting meta element: {xml}".format( + "Ignoring conflicting meta element: {xml}".format( xml=meta_elt.toXml() ) ) continue - data[key] = unicode(meta_elt) + data[key] = str(meta_elt) if event_elt.link: link_elt = event_elt.link data["service"] = link_elt["service"] @@ -225,11 +225,11 @@ id_ = NS_EVENT items, metadata = yield self._p.getItems(client, service, node, item_ids=[id_]) try: - event_elt = next(items[0].elements(NS_EVENT, u"event")) + event_elt = next(items[0].elements(NS_EVENT, "event")) except StopIteration: - raise exceptions.NotFound(_(u"No event element has been found")) + raise exceptions.NotFound(_("No event element has been found")) except IndexError: - raise exceptions.NotFound(_(u"No event with this id has been found")) + raise exceptions.NotFound(_("No event with this id has been found")) defer.returnValue(event_elt) def register(self, client, name, extra, service, node, event_id, item_elt, @@ -249,16 +249,16 @@ link_elt["node"] = node link_elt["item"] = event_id __, event_data = self._parseEventElt(event_elt) - name = event_data.get(u'name') - if u'image' in event_data: - extra = {u'thumb_url': event_data[u'image']} + name = event_data.get('name') + if 'image' in event_data: + extra = {'thumb_url': event_data['image']} else: extra = None - return self.host.plugins[u'LIST_INTEREST'].registerPubsub( + return self.host.plugins['LIST_INTEREST'].registerPubsub( client, NS_EVENT, service, node, event_id, creator, name=name, element=event_elt, extra=extra) - def _eventGet(self, service, node, id_=u"", profile_key=C.PROF_KEY_NONE): + def _eventGet(self, service, node, id_="", profile_key=C.PROF_KEY_NONE): service = jid.JID(service) if service else None node = node if node else NS_EVENT client = self.host.getClient(profile_key) @@ -283,12 +283,12 @@ defer.returnValue(self._parseEventElt(event_elt)) def _eventCreate( - self, timestamp, data, service, node, id_=u"", profile_key=C.PROF_KEY_NONE + self, timestamp, data, service, node, id_="", profile_key=C.PROF_KEY_NONE ): service = jid.JID(service) if service else None node = node or None client = self.host.getClient(profile_key) - data[u"register"] = C.bool(data.get(u"register", C.BOOL_FALSE)) + data["register"] = C.bool(data.get("register", C.BOOL_FALSE)) return self.eventCreate(client, timestamp, data, service, node, id_ or NS_EVENT) @defer.inlineCallbacks @@ -311,32 +311,32 @@ @return (unicode): created node """ if not event_id: - raise ValueError(_(u"event_id must be set")) + raise ValueError(_("event_id must be set")) if not service: service = client.jid.userhostJID() if not node: - node = NS_EVENT + u"__" + shortuuid.uuid() + node = NS_EVENT + "__" + shortuuid.uuid() event_elt = domish.Element((NS_EVENT, "event")) if timestamp is not None and timestamp != -1: formatted_date = utils.xmpp_date(timestamp) event_elt.addElement((NS_EVENT, "date"), content=formatted_date) register = data.pop("register", False) - for key in (u"name",): + for key in ("name",): if key in data: event_elt[key] = data.pop(key) - for key in (u"description",): + for key in ("description",): if key in data: event_elt.addElement((NS_EVENT, key), content=data.pop(key)) - for key in (u"image", u"background-image"): + for key in ("image", "background-image"): if key in data: elt = event_elt.addElement((NS_EVENT, key)) elt["src"] = data.pop(key) # we first create the invitees and blog nodes (if not specified in data) - for uri_type in (u"invitees", u"blog"): - key = uri_type + u"_uri" - for to_delete in (u"service", u"node"): - k = uri_type + u"_" + to_delete + for uri_type in ("invitees", "blog"): + key = uri_type + "_uri" + for to_delete in ("service", "node"): + k = uri_type + "_" + to_delete if k in data: del data[k] if key not in data: @@ -352,12 +352,12 @@ else: uri = data.pop(key) uri_data = xmpp_uri.parseXMPPUri(uri) - if uri_data[u"type"] != u"pubsub": + if uri_data["type"] != "pubsub": raise ValueError( - _(u"The given URI is not valid: {uri}").format(uri=uri) + _("The given URI is not valid: {uri}").format(uri=uri) ) - uri_service = jid.JID(uri_data[u"path"]) - uri_node = uri_data[u"node"] + uri_service = jid.JID(uri_data["path"]) + uri_node = uri_data["node"] elt = event_elt.addElement((NS_EVENT, uri_type)) elt["uri"] = xmpp_uri.buildXMPPUri( @@ -365,7 +365,7 @@ ) # remaining data are put in <meta> elements - for key in data.keys(): + for key in list(data.keys()): elt = event_elt.addElement((NS_EVENT, "meta"), content=data.pop(key)) elt["name"] = key @@ -374,8 +374,8 @@ # TODO: check auto-create, no need to create node first if available node = yield self._p.createNode(client, service, nodeIdentifier=node) except error.StanzaError as e: - if e.condition == u"conflict": - log.debug(_(u"requested node already exists")) + if e.condition == "conflict": + log.debug(_("requested node already exists")) yield self._p.publish(client, service, node, items=[item_elt]) @@ -388,7 +388,7 @@ profile_key=C.PROF_KEY_NONE): service = jid.JID(service) if service else None if not node: - raise ValueError(_(u"missing node")) + raise ValueError(_("missing node")) client = self.host.getClient(profile_key) return self.eventModify( client, service, node, id_ or NS_EVENT, timestamp_update or None, data_update @@ -407,13 +407,13 @@ new_timestamp = event_timestamp if timestamp_update is None else timestamp_update new_data = event_metadata if data_update: - for k, v in data_update.iteritems(): + for k, v in data_update.items(): new_data[k] = v yield self.eventCreate(client, new_timestamp, new_data, service, node, id_) def _eventsListSerialise(self, events): for timestamp, data in events: - data["date"] = unicode(timestamp) + data["date"] = str(timestamp) data["creator"] = C.boolConst(data.get("creator", False)) return [e[1] for e in events] @@ -431,15 +431,15 @@ @return list(tuple(int, dict)): list of events (timestamp + metadata) """ - items, metadata = yield self.host.plugins[u'LIST_INTEREST'].listInterests( + items, metadata = yield self.host.plugins['LIST_INTEREST'].listInterests( client, service, node, namespace=NS_EVENT) events = [] for item in items: try: - event_elt = next(item.interest.pubsub.elements(NS_EVENT, u"event")) + event_elt = next(item.interest.pubsub.elements(NS_EVENT, "event")) except StopIteration: log.warning( - _(u"No event found in item {item_id}, ignoring").format( + _("No event found in item {item_id}, ignoring").format( item_id=item["id"]) ) else: @@ -466,12 +466,12 @@ items, metadata = yield self._p.getItems( client, service, node, item_ids=[client.jid.userhost()] ) - event_elt = next(items[0].elements(NS_EVENT, u"invitee")) + event_elt = next(items[0].elements(NS_EVENT, "invitee")) except (exceptions.NotFound, IndexError): # no item found, event data are not set yet defer.returnValue({}) data = {} - for key in (u"attend", u"guests"): + for key in ("attend", "guests"): try: data[key] = event_elt[key] except KeyError: @@ -495,7 +495,7 @@ guests: an int """ event_elt = domish.Element((NS_EVENT, "invitee")) - for key in (u"attend", u"guests"): + for key in ("attend", "guests"): try: event_elt[key] = data.pop(key) except KeyError: @@ -522,15 +522,15 @@ invitees = {} for item in items: try: - event_elt = next(item.elements(NS_EVENT, u"invitee")) + event_elt = next(item.elements(NS_EVENT, "invitee")) except StopIteration: # no item found, event data are not set yet log.warning(_( - u"no data found for {item_id} (service: {service}, node: {node})" + "no data found for {item_id} (service: {service}, node: {node})" .format(item_id=item["id"], service=service, node=node))) else: data = {} - for key in (u"attend", u"guests"): + for key in ("attend", "guests"): try: data[key] = event_elt[key] except KeyError: @@ -558,30 +558,30 @@ @param item_id(unicode): event id """ # FIXME: handle name and extra - name = u'' + name = '' extra = {} if self._b is None: raise exceptions.FeatureNotFound( - _(u'"XEP-0277" (blog) plugin is needed for this feature') + _('"XEP-0277" (blog) plugin is needed for this feature') ) if item_id is None: item_id = NS_EVENT # first we authorize our invitee to see the nodes of interest - yield self._p.setNodeAffiliations(client, service, node, {invitee_jid: u"member"}) - log.debug(_(u"affiliation set on event node")) + yield self._p.setNodeAffiliations(client, service, node, {invitee_jid: "member"}) + log.debug(_("affiliation set on event node")) __, event_data = yield self.eventGet(client, service, node, item_id) - log.debug(_(u"got event data")) + log.debug(_("got event data")) invitees_service = jid.JID(event_data["invitees_service"]) invitees_node = event_data["invitees_node"] blog_service = jid.JID(event_data["blog_service"]) blog_node = event_data["blog_node"] yield self._p.setNodeAffiliations( - client, invitees_service, invitees_node, {invitee_jid: u"publisher"} + client, invitees_service, invitees_node, {invitee_jid: "publisher"} ) - log.debug(_(u"affiliation set on invitee node")) + log.debug(_("affiliation set on invitee node")) yield self._p.setNodeAffiliations( - client, blog_service, blog_node, {invitee_jid: u"member"} + client, blog_service, blog_node, {invitee_jid: "member"} ) blog_items, __ = yield self._b.mbGet(client, blog_service, blog_node, None) @@ -591,29 +591,29 @@ comments_node = item["comments_node"] except KeyError: log.debug( - u"no comment service set for item {item_id}".format( + "no comment service set for item {item_id}".format( item_id=item["id"] ) ) else: yield self._p.setNodeAffiliations( - client, comments_service, comments_node, {invitee_jid: u"publisher"} + client, comments_service, comments_node, {invitee_jid: "publisher"} ) - log.debug(_(u"affiliation set on blog and comments nodes")) + log.debug(_("affiliation set on blog and comments nodes")) # now we send the invitation - pubsub_invitation = self.host.plugins[u'INVITATION'] + pubsub_invitation = self.host.plugins['INVITATION'] pubsub_invitation.sendPubsubInvitation(client, invitee_jid, service, node, item_id, name, extra) - def _inviteByEmail(self, service, node, id_=NS_EVENT, email=u"", emails_extra=None, - name=u"", host_name=u"", language=u"", url_template=u"", - message_subject=u"", message_body=u"", + def _inviteByEmail(self, service, node, id_=NS_EVENT, email="", emails_extra=None, + name="", host_name="", language="", url_template="", + message_subject="", message_body="", profile_key=C.PROF_KEY_NONE): client = self.host.getClient(profile_key) kwargs = { - u"profile": client.profile, - u"emails_extra": [unicode(e) for e in emails_extra], + "profile": client.profile, + "emails_extra": [str(e) for e in emails_extra], } for key in ( "email", @@ -625,7 +625,7 @@ "message_body", ): value = locals()[key] - kwargs[key] = unicode(value) + kwargs[key] = str(value) return self.inviteByEmail( client, jid.JID(service) if service else None, node, id_ or NS_EVENT, **kwargs ) @@ -640,26 +640,26 @@ """ if self._i is None: raise exceptions.FeatureNotFound( - _(u'"Invitations" plugin is needed for this feature') + _('"Invitations" plugin is needed for this feature') ) if self._b is None: raise exceptions.FeatureNotFound( - _(u'"XEP-0277" (blog) plugin is needed for this feature') + _('"XEP-0277" (blog) plugin is needed for this feature') ) service = service or client.jid.userhostJID() event_uri = xmpp_uri.buildXMPPUri( "pubsub", path=service.full(), node=node, item=id_ ) - kwargs["extra"] = {u"event_uri": event_uri} + kwargs["extra"] = {"event_uri": event_uri} invitation_data = yield self._i.create(**kwargs) - invitee_jid = invitation_data[u"jid"] - log.debug(_(u"invitation created")) + invitee_jid = invitation_data["jid"] + log.debug(_("invitation created")) # now that we have a jid, we can send normal invitation yield self.invite(client, invitee_jid, service, node, id_) +@implementer(iwokkel.IDisco) class EventsHandler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_exp_invitation.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_invitation.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to detect language (experimental) @@ -24,7 +24,7 @@ from twisted.internet import defer from twisted.words.protocols.jabber import jid from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer from twisted.words.protocols.jabber.xmlstream import XMPPHandler log = getLogger(__name__) @@ -35,24 +35,24 @@ C.PI_IMPORT_NAME: "INVITATION", C.PI_TYPE: "EXP", C.PI_PROTOCOLS: [], - C.PI_DEPENDENCIES: [u"XEP-0060", u"XEP-0329"], + C.PI_DEPENDENCIES: ["XEP-0060", "XEP-0329"], C.PI_RECOMMENDATIONS: [], C.PI_MAIN: "Invitation", C.PI_HANDLER: "yes", - C.PI_DESCRIPTION: _(u"Experimental handling of invitations"), + C.PI_DESCRIPTION: _("Experimental handling of invitations"), } -NS_INVITATION = u"https://salut-a-toi/protocol/invitation:0" +NS_INVITATION = "https://salut-a-toi/protocol/invitation:0" INVITATION = '/message/invitation[@xmlns="{ns_invit}"]'.format( ns_invit=NS_INVITATION ) -NS_INVITATION_LIST = NS_INVITATION + u"#list" +NS_INVITATION_LIST = NS_INVITATION + "#list" class Invitation(object): def __init__(self, host): - log.info(_(u"Invitation plugin initialization")) + log.info(_("Invitation plugin initialization")) self.host = host self._p = self.host.plugins["XEP-0060"] # map from namespace of the invitation to callback handling it @@ -88,7 +88,7 @@ """ if namespace in self._ns_cb: raise exceptions.ConflictError( - u"invitation namespace {namespace} is already register with {callback}" + "invitation namespace {namespace} is already register with {callback}" .format(namespace=namespace, callback=self._ns_cb[namespace])) self._ns_cb[namespace] = callback @@ -113,15 +113,15 @@ client.generateMessageXML(mess_data) invitation_elt = mess_data["xml"].addElement("invitation", NS_INVITATION) if name is not None: - invitation_elt[u"name"] = name - thumb_url = extra.get(u'thumb_url') + invitation_elt["name"] = name + thumb_url = extra.get('thumb_url') if thumb_url: - if not thumb_url.startswith(u'http'): + if not thumb_url.startswith('http'): log.warning( - u"only http URLs are allowed for thumbnails, got {url}, ignoring" + "only http URLs are allowed for thumbnails, got {url}, ignoring" .format(url=thumb_url)) else: - invitation_elt[u'thumb_url'] = thumb_url + invitation_elt['thumb_url'] = thumb_url return mess_data, invitation_elt def sendPubsubInvitation(self, client, invitee_jid, service, node, @@ -139,11 +139,11 @@ extra = {} mess_data, invitation_elt = self._generateBaseInvitation( client, invitee_jid, name, extra) - pubsub_elt = invitation_elt.addElement(u"pubsub") - pubsub_elt[u"service"] = service.full() - pubsub_elt[u"node"] = node - pubsub_elt[u"item"] = item_id - return client.send(mess_data[u"xml"]) + pubsub_elt = invitation_elt.addElement("pubsub") + pubsub_elt["service"] = service.full() + pubsub_elt["node"] = node + pubsub_elt["item"] = item_id + return client.send(mess_data["xml"]) def sendFileSharingInvitation(self, client, invitee_jid, service, repos_type=None, namespace=None, path=None, name=None, extra=None): @@ -163,20 +163,20 @@ extra = {} mess_data, invitation_elt = self._generateBaseInvitation( client, invitee_jid, name, extra) - file_sharing_elt = invitation_elt.addElement(u"file_sharing") - file_sharing_elt[u"service"] = service.full() + file_sharing_elt = invitation_elt.addElement("file_sharing") + file_sharing_elt["service"] = service.full() if repos_type is not None: - if repos_type not in (u"files", "photos"): - msg = u"unknown repository type: {repos_type}".format( + if repos_type not in ("files", "photos"): + msg = "unknown repository type: {repos_type}".format( repos_type=repos_type) log.warning(msg) raise exceptions.DateError(msg) - file_sharing_elt[u"type"] = repos_type + file_sharing_elt["type"] = repos_type if namespace is not None: - file_sharing_elt[u"namespace"] = namespace + file_sharing_elt["namespace"] = namespace if path is not None: - file_sharing_elt[u"path"] = path - return client.send(mess_data[u"xml"]) + file_sharing_elt["path"] = path + return client.send(mess_data["xml"]) @defer.inlineCallbacks def _parsePubsubElt(self, client, pubsub_elt): @@ -185,25 +185,25 @@ node = pubsub_elt["node"] item_id = pubsub_elt.getAttribute("item") except (RuntimeError, KeyError): - log.warning(_(u"Bad invitation, ignoring")) + log.warning(_("Bad invitation, ignoring")) raise exceptions.DataError try: items, metadata = yield self._p.getItems(client, service, node, item_ids=[item_id]) except Exception as e: - log.warning(_(u"Can't get item linked with invitation: {reason}").format( + log.warning(_("Can't get item linked with invitation: {reason}").format( reason=e)) try: item_elt = items[0] except IndexError: - log.warning(_(u"Invitation was linking to a non existing item")) + log.warning(_("Invitation was linking to a non existing item")) raise exceptions.DataError try: namespace = item_elt.firstChildElement().uri except Exception as e: - log.warning(_(u"Can't retrieve namespace of invitation: {reason}").format( + log.warning(_("Can't retrieve namespace of invitation: {reason}").format( reason = e)) raise exceptions.DataError @@ -214,41 +214,41 @@ try: service = jid.JID(file_sharing_elt["service"]) except (RuntimeError, KeyError): - log.warning(_(u"Bad invitation, ignoring")) + log.warning(_("Bad invitation, ignoring")) raise exceptions.DataError - repos_type = file_sharing_elt.getAttribute(u"type", u"files") - namespace = file_sharing_elt.getAttribute(u"namespace") - path = file_sharing_elt.getAttribute(u"path") + repos_type = file_sharing_elt.getAttribute("type", "files") + namespace = file_sharing_elt.getAttribute("namespace") + path = file_sharing_elt.getAttribute("path") args = [service, repos_type, namespace, path] - ns_fis = self.host.getNamespace(u"fis") + ns_fis = self.host.getNamespace("fis") return ns_fis, args @defer.inlineCallbacks def onInvitation(self, message_elt, client): - log.debug(u"invitation received [{profile}]".format(profile=client.profile)) + log.debug("invitation received [{profile}]".format(profile=client.profile)) invitation_elt = message_elt.invitation - name = invitation_elt.getAttribute(u"name") + name = invitation_elt.getAttribute("name") extra = {} - if invitation_elt.hasAttribute(u"thumb_url"): - extra[u'thumb_url'] = invitation_elt[u'thumb_url'] + if invitation_elt.hasAttribute("thumb_url"): + extra['thumb_url'] = invitation_elt['thumb_url'] for elt in invitation_elt.elements(): if elt.uri != NS_INVITATION: - log.warning(u"unexpected element: {xml}".format(xml=elt.toXml())) + log.warning("unexpected element: {xml}".format(xml=elt.toXml())) continue - if elt.name == u"pubsub": + if elt.name == "pubsub": method = self._parsePubsubElt - elif elt.name == u"file_sharing": + elif elt.name == "file_sharing": method = self._parseFileSharingElt else: - log.warning(u"not implemented invitation element: {xml}".format( + log.warning("not implemented invitation element: {xml}".format( xml = elt.toXml())) continue try: namespace, args = yield method(client, elt) except exceptions.DataError: - log.warning(u"Can't parse invitation element: {xml}".format( + log.warning("Can't parse invitation element: {xml}".format( xml = elt.toXml())) continue @@ -256,14 +256,14 @@ cb = self._ns_cb[namespace] except KeyError: log.warning(_( - u'No handler for namespace "{namespace}", invitation ignored') + 'No handler for namespace "{namespace}", invitation ignored') .format(namespace=namespace)) else: cb(client, name, extra, *args) +@implementer(iwokkel.IDisco) class PubsubInvitationHandler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_exp_invitation_file.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_invitation_file.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to detect language (experimental) @@ -31,21 +31,21 @@ C.PI_IMPORT_NAME: "FILE_SHARING_INVITATION", C.PI_TYPE: "EXP", C.PI_PROTOCOLS: [], - C.PI_DEPENDENCIES: ["XEP-0329", u"INVITATION"], + C.PI_DEPENDENCIES: ["XEP-0329", "INVITATION"], C.PI_RECOMMENDATIONS: [], C.PI_MAIN: "FileSharingInvitation", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _(u"Experimental handling of invitations for file sharing"), + C.PI_DESCRIPTION: _("Experimental handling of invitations for file sharing"), } class FileSharingInvitation(object): def __init__(self, host): - log.info(_(u"File Sharing Invitation plugin initialization")) + log.info(_("File Sharing Invitation plugin initialization")) self.host = host - ns_fis = host.getNamespace(u"fis") - host.plugins[u"INVITATION"].registerNamespace(ns_fis, self.onInvitation) + ns_fis = host.getNamespace("fis") + host.plugins["INVITATION"].registerNamespace(ns_fis, self.onInvitation) host.bridge.addMethod( "FISInvite", ".plugin", @@ -56,30 +56,30 @@ def _sendFileSharingInvitation( self, invitee_jid_s, service_s, repos_type=None, namespace=None, path=None, - name=None, extra_s=u'', profile_key=C.PROF_KEY_NONE): + name=None, extra_s='', profile_key=C.PROF_KEY_NONE): client = self.host.getClient(profile_key) invitee_jid = jid.JID(invitee_jid_s) service = jid.JID(service_s) extra = data_format.deserialise(extra_s) - return self.host.plugins[u"INVITATION"].sendFileSharingInvitation( + return self.host.plugins["INVITATION"].sendFileSharingInvitation( client, invitee_jid, service, repos_type=repos_type or None, namespace=namespace or None, path=path or None, name=name or None, extra=extra) def onInvitation(self, client, name, extra, service, repos_type, namespace, path): - if repos_type == u"files": - type_human = _(u"file sharing") - elif repos_type == u"photos": - type_human = _(u"photos album") + if repos_type == "files": + type_human = _("file sharing") + elif repos_type == "photos": + type_human = _("photos album") else: - log.warning(u"Unknown repository type: {repos_type}".format( + log.warning("Unknown repository type: {repos_type}".format( repos_type=repos_type)) - repos_type = u"file" - type_human = _(u"file sharing") + repos_type = "file" + type_human = _("file sharing") log.info(_( - u'{profile} has received an invitation for a files repository ({type_human}) ' - u'with namespace "{namespace}" at path [{path}]').format( + '{profile} has received an invitation for a files repository ({type_human}) ' + 'with namespace "{namespace}" at path [{path}]').format( profile=client.profile, type_human=type_human, namespace=namespace, path=path) ) - return self.host.plugins[u'LIST_INTEREST'].registerFileSharing( + return self.host.plugins['LIST_INTEREST'].registerFileSharing( client, service, repos_type, namespace, path, name, extra)
--- a/sat/plugins/plugin_exp_jingle_stream.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_jingle_stream.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing pipes (experimental) @@ -51,8 +51,8 @@ C.PI_DESCRIPTION: _("""Jingle Stream plugin"""), } -CONFIRM = D_(u"{peer} wants to send you a stream, do you accept ?") -CONFIRM_TITLE = D_(u"Stream Request") +CONFIRM = D_("{peer} wants to send you a stream, do you accept ?") +CONFIRM_TITLE = D_("Stream Request") class StreamProtocol(protocol.Protocol): @@ -119,7 +119,7 @@ def startStream(self, consumer): if self.consumer is not None: raise exceptions.InternalError( - _(u"stream can't be used with multiple consumers") + _("stream can't be used with multiple consumers") ) assert self.deferred is None self.consumer = consumer @@ -166,7 +166,7 @@ try: self.client_conn.sendData(data) except AttributeError: - log.warning(_(u"No client connected, can't send data")) + log.warning(_("No client connected, can't send data")) def writeToConsumer(self, data): self.consumer.write(data) @@ -186,7 +186,7 @@ in_sign="ss", out_sign="s", method=self._streamOut, - async=True, + async_=True, ) # jingle callbacks @@ -227,7 +227,7 @@ } ], ) - defer.returnValue(unicode(port)) + defer.returnValue(str(port)) def jingleSessionInit(self, client, session, content_name, stream_object): content_data = session["contents"][content_name] @@ -245,7 +245,7 @@ self._j.ROLE_INITIATOR, self._j.ROLE_RESPONDER, ): - log.warning(u"Bad sender, assuming initiator") + log.warning("Bad sender, assuming initiator") content_data["senders"] = self._j.ROLE_INITIATOR confirm_data = yield xml_tools.deferDialog( @@ -266,7 +266,7 @@ try: port = int(confirm_data["port"]) except (ValueError, KeyError): - raise exceptions.DataError(_(u"given port is invalid")) + raise exceptions.DataError(_("given port is invalid")) endpoint = endpoints.TCP4ClientEndpoint(reactor, "localhost", port) factory = StreamFactory() yield endpoint.connect(factory) @@ -288,16 +288,16 @@ args = [client, session, content_name, content_data] finished_d.addCallbacks(self._finishedCb, self._finishedEb, args, None, args) else: - log.warning(u"FIXME: unmanaged action {}".format(action)) + log.warning("FIXME: unmanaged action {}".format(action)) return desc_elt def _finishedCb(self, __, client, session, content_name, content_data): - log.info(u"Pipe transfer completed") + log.info("Pipe transfer completed") self._j.contentTerminate(client, session, content_name) content_data["stream_object"].stopStream() def _finishedEb(self, failure, client, session, content_name, content_data): - log.warning(u"Error while streaming pipe: {}".format(failure)) + log.warning("Error while streaming pipe: {}".format(failure)) self._j.contentTerminate( client, session, content_name, reason=self._j.REASON_FAILED_TRANSPORT )
--- a/sat/plugins/plugin_exp_lang_detect.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_lang_detect.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to detect language (experimental) @@ -28,7 +28,7 @@ from langid.langid import LanguageIdentifier, model except ImportError: raise exceptions.MissingModule( - u'Missing module langid, please download/install it with "pip install langid")' + 'Missing module langid, please download/install it with "pip install langid")' ) identifier = LanguageIdentifier.from_modelstring(model, norm_probs=False) @@ -45,9 +45,9 @@ C.PI_DESCRIPTION: _("""Detect and set message language when unknown"""), } -CATEGORY = D_(u"Misc") -NAME = u"lang_detect" -LABEL = D_(u"language detection") +CATEGORY = D_("Misc") +NAME = "lang_detect" +LABEL = D_("language detection") PARAMS = """ <params> <individual> @@ -63,7 +63,7 @@ class LangDetect(object): def __init__(self, host): - log.info(_(u"Language detection plugin initialization")) + log.info(_("Language detection plugin initialization")) self.host = host host.memory.updateParams(PARAMS) host.trigger.add("MessageReceived", self.MessageReceivedTrigger) @@ -71,8 +71,8 @@ def addLanguage(self, mess_data): message = mess_data["message"] - if len(message) == 1 and message.keys()[0] == "": - msg = message.values()[0] + if len(message) == 1 and list(message.keys())[0] == "": + msg = list(message.values())[0] lang = identifier.classify(msg)[0] mess_data["message"] = {lang: msg} return mess_data
--- a/sat/plugins/plugin_exp_list_of_interest.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_list_of_interest.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to detect language (experimental) @@ -21,7 +21,7 @@ from sat.core.constants import Const as C from sat.core.log import getLogger from wokkel import disco, iwokkel, pubsub -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from twisted.words.protocols.jabber import error as jabber_error, jid from twisted.words.protocols.jabber.xmlstream import XMPPHandler @@ -35,11 +35,11 @@ C.PI_IMPORT_NAME: "LIST_INTEREST", C.PI_TYPE: "EXP", C.PI_PROTOCOLS: [], - C.PI_DEPENDENCIES: [u"XEP-0060", u"XEP-0329"], + C.PI_DEPENDENCIES: ["XEP-0060", "XEP-0329"], C.PI_RECOMMENDATIONS: [], C.PI_MAIN: "ListInterest", C.PI_HANDLER: "yes", - C.PI_DESCRIPTION: _(u"Experimental handling of interesting XMPP locations"), + C.PI_DESCRIPTION: _("Experimental handling of interesting XMPP locations"), } NS_LIST_INTEREST = "https://salut-a-toi/protocol/list-interest:0" @@ -49,7 +49,7 @@ namespace = NS_LIST_INTEREST def __init__(self, host): - log.info(_(u"List of Interest plugin initialization")) + log.info(_("List of Interest plugin initialization")) self.host = host self._p = self.host.plugins["XEP-0060"] host.bridge.addMethod( @@ -58,7 +58,7 @@ in_sign="ssss", out_sign="aa{ss}", method=self._listInterests, - async=True, + async_=True, ) def getHandler(self, client): @@ -76,8 +76,8 @@ options=options, ) except jabber_error.StanzaError as e: - if e.condition == u"conflict": - log.debug(_(u"requested node already exists")) + if e.condition == "conflict": + log.debug(_("requested node already exists")) @defer.inlineCallbacks def registerPubsub(self, client, namespace, service, node, item_id=None, @@ -101,20 +101,20 @@ if extra is None: extra = {} yield self.createNode(client) - interest_elt = domish.Element((NS_LIST_INTEREST, u"interest")) - interest_elt[u"namespace"] = namespace + interest_elt = domish.Element((NS_LIST_INTEREST, "interest")) + interest_elt["namespace"] = namespace if name is not None: - interest_elt[u'name'] = name - thumb_url = extra.get(u'thumb_url') + interest_elt['name'] = name + thumb_url = extra.get('thumb_url') if thumb_url: - interest_elt[u'thumb_url'] = thumb_url - pubsub_elt = interest_elt.addElement(u"pubsub") - pubsub_elt[u"service"] = service.full() - pubsub_elt[u"node"] = node + interest_elt['thumb_url'] = thumb_url + pubsub_elt = interest_elt.addElement("pubsub") + pubsub_elt["service"] = service.full() + pubsub_elt["node"] = node if item_id is not None: - pubsub_elt[u"item"] = item_id + pubsub_elt["item"] = item_id if creator: - pubsub_elt[u"creator"] = C.BOOL_TRUE + pubsub_elt["creator"] = C.BOOL_TRUE if element is not None: pubsub_elt.addChild(element) item_elt = pubsub.Item(payload=interest_elt) @@ -138,21 +138,21 @@ if extra is None: extra = {} yield self.createNode(client) - interest_elt = domish.Element((NS_LIST_INTEREST, u"interest")) - interest_elt[u"namespace"] = self.host.getNamespace(u"fis") + interest_elt = domish.Element((NS_LIST_INTEREST, "interest")) + interest_elt["namespace"] = self.host.getNamespace("fis") if name is not None: - interest_elt[u'name'] = name - thumb_url = extra.get(u'thumb_url') + interest_elt['name'] = name + thumb_url = extra.get('thumb_url') if thumb_url: - interest_elt[u'thumb_url'] = thumb_url - file_sharing_elt = interest_elt.addElement(u"file_sharing") - file_sharing_elt[u"service"] = service.full() + interest_elt['thumb_url'] = thumb_url + file_sharing_elt = interest_elt.addElement("file_sharing") + file_sharing_elt["service"] = service.full() if repos_type is not None: - file_sharing_elt[u"type"] = repos_type + file_sharing_elt["type"] = repos_type if namespace is not None: - file_sharing_elt[u"namespace"] = namespace + file_sharing_elt["namespace"] = namespace if path is not None: - file_sharing_elt[u"path"] = path + file_sharing_elt["path"] = path item_elt = pubsub.Item(payload=interest_elt) yield self._p.publish( client, client.jid.userhostJID(), NS_LIST_INTEREST, items=[item_elt] @@ -163,38 +163,38 @@ for item_elt in interests_data[0]: interest_data = {} interest_elt = item_elt.interest - if interest_elt.hasAttribute(u'namespace'): - interest_data[u'namespace'] = interest_elt.getAttribute(u'namespace') - if interest_elt.hasAttribute(u'name'): - interest_data[u'name'] = interest_elt.getAttribute(u'name') - if interest_elt.hasAttribute(u'thumb_url'): - interest_data[u'thumb_url'] = interest_elt.getAttribute(u'thumb_url') + if interest_elt.hasAttribute('namespace'): + interest_data['namespace'] = interest_elt.getAttribute('namespace') + if interest_elt.hasAttribute('name'): + interest_data['name'] = interest_elt.getAttribute('name') + if interest_elt.hasAttribute('thumb_url'): + interest_data['thumb_url'] = interest_elt.getAttribute('thumb_url') elt = interest_elt.firstChildElement() if elt.uri != NS_LIST_INTEREST: - log.warning(u"unexpected child element, ignoring: {xml}".format( + log.warning("unexpected child element, ignoring: {xml}".format( xml = elt.toXml())) continue - if elt.name == u'pubsub': + if elt.name == 'pubsub': interest_data.update({ - u"type": u"pubsub", - u"service": elt[u'service'], - u"node": elt[u'node'], + "type": "pubsub", + "service": elt['service'], + "node": elt['node'], }) - for attr in (u'item', u'creator'): + for attr in ('item', 'creator'): if elt.hasAttribute(attr): interest_data[attr] = elt[attr] - elif elt.name == u'file_sharing': + elif elt.name == 'file_sharing': interest_data.update({ - u"type": u"file_sharing", - u"service": elt[u'service'], + "type": "file_sharing", + "service": elt['service'], }) - if elt.hasAttribute(u'type'): - interest_data[u'subtype'] = elt[u'type'] - for attr in (u'namespace', u'path'): + if elt.hasAttribute('type'): + interest_data['subtype'] = elt['type'] + for attr in ('namespace', 'path'): if elt.hasAttribute(attr): interest_data[attr] = elt[attr] else: - log.warning(u"unknown element, ignoring: {xml}".format(xml=elt.toXml())) + log.warning("unknown element, ignoring: {xml}".format(xml=elt.toXml())) continue interests.append(interest_data) @@ -229,20 +229,20 @@ filtered_items = [] for item in items: try: - interest_elt = next(item.elements(NS_LIST_INTEREST, u"interest")) + interest_elt = next(item.elements(NS_LIST_INTEREST, "interest")) except StopIteration: - log.warning(_(u"Missing interest element: {xml}").format( + log.warning(_("Missing interest element: {xml}").format( xml=interest_elt.toXml())) continue - if interest_elt.getAttribute(u"namespace") == namespace: + if interest_elt.getAttribute("namespace") == namespace: filtered_items.append(item) items = filtered_items defer.returnValue((items, metadata)) +@implementer(iwokkel.IDisco) class ListInterestHandler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_exp_parrot.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_parrot.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for parrot mode (experimental) @@ -38,7 +38,7 @@ C.PI_MAIN: "Exp_Parrot", C.PI_HANDLER: "no", C.PI_DESCRIPTION: _( - u"""Implementation of parrot mode (repeat messages between 2 entities)""" + """Implementation of parrot mode (repeat messages between 2 entities)""" ), } @@ -60,7 +60,7 @@ try: self.host.plugins[C.TEXT_CMDS].registerTextCommands(self) except KeyError: - log.info(_(u"Text commands not available")) + log.info(_("Text commands not available")) # def sendMessageTrigger(self, client, mess_data, treatments): # """ Deactivate other triggers if recipient is in parrot links """ @@ -90,7 +90,7 @@ message = {} for e in message_elt.elements(C.NS_CLIENT, "body"): - body = unicode(e) + body = str(e) lang = e.getAttribute("lang") or "" try: @@ -107,12 +107,12 @@ return True else: src_txt = from_jid.user - message[lang] = u"[{}] {}".format(src_txt, body) + message[lang] = "[{}] {}".format(src_txt, body) linked = _links[from_jid.userhostJID()] client.sendMessage( - jid.JID(unicode(linked)), message, None, "auto", no_trigger=True + jid.JID(str(linked)), message, None, "auto", no_trigger=True ) return True @@ -130,8 +130,8 @@ _links[source_jid.userhostJID()] = dest_jid log.info( - u"Parrot mode: %s will be repeated to %s" - % (source_jid.userhost(), unicode(dest_jid)) + "Parrot mode: %s will be repeated to %s" + % (source_jid.userhost(), str(dest_jid)) ) def removeParrot(self, client, source_jid): @@ -166,7 +166,7 @@ txt_cmd.feedBack( client, - "Parrot mode activated for {}".format(unicode(link_left_jid)), + "Parrot mode activated for {}".format(str(link_left_jid)), mess_data, ) @@ -183,7 +183,7 @@ raise jid.InvalidFormat except jid.InvalidFormat: txt_cmd.feedBack( - client, u"Can't deactivate Parrot mode for invalid jid", mess_data + client, "Can't deactivate Parrot mode for invalid jid", mess_data ) return False @@ -194,8 +194,8 @@ txt_cmd.feedBack( client, - u"Parrot mode deactivated for {} and {}".format( - unicode(link_left_jid), unicode(link_right_jid) + "Parrot mode deactivated for {} and {}".format( + str(link_left_jid), str(link_right_jid) ), mess_data, )
--- a/sat/plugins/plugin_exp_pubsub_admin.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_pubsub_admin.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to send pubsub requests with administrator privilege @@ -29,20 +29,20 @@ log = getLogger(__name__) PLUGIN_INFO = { - C.PI_NAME: u"Pubsub Administrator", - C.PI_IMPORT_NAME: u"PUBSUB_ADMIN", + C.PI_NAME: "Pubsub Administrator", + C.PI_IMPORT_NAME: "PUBSUB_ADMIN", C.PI_TYPE: C.PLUG_TYPE_EXP, C.PI_PROTOCOLS: [], C.PI_DEPENDENCIES: [], C.PI_RECOMMENDATIONS: [], - C.PI_MAIN: u"PubsubAdmin", - C.PI_HANDLER: u"no", - C.PI_DESCRIPTION: _(u"""\Implementation of Pubsub Administrator + C.PI_MAIN: "PubsubAdmin", + C.PI_HANDLER: "no", + C.PI_DESCRIPTION: _("""\Implementation of Pubsub Administrator This allows a pubsub administrator to overwrite completly items, including publisher. Specially useful when importing a node."""), } -NS_PUBSUB_ADMIN = u"https://salut-a-toi.org/spec/pubsub_admin:0" +NS_PUBSUB_ADMIN = "https://salut-a-toi.org/spec/pubsub_admin:0" class PubsubAdmin(object): @@ -55,7 +55,7 @@ in_sign="ssasss", out_sign="as", method=self._publish, - async=True, + async_=True, ) def _publish(self, service, nodeIdentifier, items, extra=None, @@ -71,22 +71,22 @@ def _sendCb(self, iq_result): publish_elt = iq_result.admin.pubsub.publish ids = [] - for item_elt in publish_elt.elements(pubsub.NS_PUBSUB, u'item'): - ids.append(item_elt[u'id']) + for item_elt in publish_elt.elements(pubsub.NS_PUBSUB, 'item'): + ids.append(item_elt['id']) return ids def publish(self, client, service, nodeIdentifier, items, extra=None): for item in items: - if item.name != u'item' or item.uri != pubsub.NS_PUBSUB: + if item.name != 'item' or item.uri != pubsub.NS_PUBSUB: raise exceptions.DataError( - u'Invalid element, a pubsub item is expected: {xml}'.format( + 'Invalid element, a pubsub item is expected: {xml}'.format( xml=item.toXml())) iq_elt = client.IQ() iq_elt['to'] = service.full() if service else client.jid.userhost() - admin_elt = iq_elt.addElement((NS_PUBSUB_ADMIN, u'admin')) - pubsub_elt = admin_elt.addElement((pubsub.NS_PUBSUB, u'pubsub')) + admin_elt = iq_elt.addElement((NS_PUBSUB_ADMIN, 'admin')) + pubsub_elt = admin_elt.addElement((pubsub.NS_PUBSUB, 'pubsub')) publish_elt = pubsub_elt.addElement('publish') - publish_elt[u'node'] = nodeIdentifier + publish_elt['node'] = nodeIdentifier for item in items: publish_elt.addChild(item) d = iq_elt.send()
--- a/sat/plugins/plugin_exp_pubsub_hook.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_pubsub_hook.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Pubsub Hooks @@ -43,17 +43,17 @@ } # python module -HOOK_TYPE_PYTHON = u"python" +HOOK_TYPE_PYTHON = "python" # python file path -HOOK_TYPE_PYTHON_FILE = u"python_file" +HOOK_TYPE_PYTHON_FILE = "python_file" # python code directly -HOOK_TYPE_PYTHON_CODE = u"python_code" +HOOK_TYPE_PYTHON_CODE = "python_code" HOOK_TYPES = (HOOK_TYPE_PYTHON, HOOK_TYPE_PYTHON_FILE, HOOK_TYPE_PYTHON_CODE) class PubsubHook(object): def __init__(self, host): - log.info(_(u"PubSub Hook initialization")) + log.info(_("PubSub Hook initialization")) self.host = host self.node_hooks = {} # keep track of the number of hooks per node (for all profiles) host.bridge.addMethod( @@ -90,7 +90,7 @@ def _installNodeManager(self, client, node): if node in self.node_hooks: - log.debug(_(u"node manager already set for {node}").format(node=node)) + log.debug(_("node manager already set for {node}").format(node=node)) self.node_hooks[node] += 1 else: # first hook on this node @@ -98,29 +98,29 @@ node, items_cb=self._itemsReceived ) self.node_hooks[node] = 0 - log.info(_(u"node manager installed on {node}").format(node=node)) + log.info(_("node manager installed on {node}").format(node=node)) def _removeNodeManager(self, client, node): try: self.node_hooks[node] -= 1 except KeyError: - log.error(_(u"trying to remove a {node} without hook").format(node=node)) + log.error(_("trying to remove a {node} without hook").format(node=node)) else: if self.node_hooks[node] == 0: del self.node_hooks[node] self.host.plugins["XEP-0060"].removeManagedNode(node, self._itemsReceived) - log.debug(_(u"hook removed")) + log.debug(_("hook removed")) else: - log.debug(_(u"node still needed for an other hook")) + log.debug(_("node still needed for an other hook")) def installHook(self, client, service, node, hook_type, hook_arg, persistent): if hook_type not in HOOK_TYPES: raise exceptions.DataError( - _(u"{hook_type} is not handled").format(hook_type=hook_type) + _("{hook_type} is not handled").format(hook_type=hook_type) ) if hook_type != HOOK_TYPE_PYTHON_FILE: raise NotImplementedError( - _(u"{hook_type} hook type not implemented yet").format( + _("{hook_type} hook type not implemented yet").format( hook_type=hook_type ) ) @@ -136,8 +136,8 @@ hooks_list.append(hook_data) log.info( - _(u"{persistent} hook installed on {node} for {profile}").format( - persistent=_(u"persistent") if persistent else _(u"temporary"), + _("{persistent} hook installed on {node} for {profile}").format( + persistent=_("persistent") if persistent else _("temporary"), node=node, profile=client.profile, ) @@ -160,18 +160,18 @@ try: if hook_type == HOOK_TYPE_PYTHON_FILE: hook_globals = {} - execfile(hook_data["arg"], hook_globals) + exec(compile(open(hook_data["arg"], "rb").read(), hook_data["arg"], 'exec'), hook_globals) callback = hook_globals["hook"] else: raise NotImplementedError( - _(u"{hook_type} hook type not implemented yet").format( + _("{hook_type} hook type not implemented yet").format( hook_type=hook_type ) ) except Exception as e: log.warning( _( - u"Can't load Pubsub hook at node {node}, it will be removed: {reason}" + "Can't load Pubsub hook at node {node}, it will be removed: {reason}" ).format(node=node, reason=e) ) hooks_list.remove(hook_data) @@ -183,7 +183,7 @@ except Exception as e: log.warning( _( - u"Error while running Pubsub hook for node {node}: {msg}" + "Error while running Pubsub hook for node {node}: {msg}" ).format(node=node, msg=e) ) @@ -193,9 +193,9 @@ return self.addHook( client, service, - unicode(node), - unicode(hook_type), - unicode(hook_arg), + str(node), + str(hook_type), + str(hook_arg), persistent, ) @@ -242,11 +242,11 @@ if node in hooks: for hook_data in hooks[node]: if ( - service != hook_data[u"service"] + service != hook_data["service"] or hook_type is not None - and hook_type != hook_data[u"type"] + and hook_type != hook_data["type"] or hook_arg is not None - and hook_arg != hook_data[u"arg"] + and hook_arg != hook_data["arg"] ): continue hooks[node].remove(hook_data) @@ -263,8 +263,8 @@ def _listHooks(self, profile): hooks_list = self.listHooks(self.host.getClient(profile)) for hook in hooks_list: - hook[u"service"] = hook[u"service"].full() - hook[u"persistent"] = C.boolConst(hook[u"persistent"]) + hook["service"] = hook["service"].full() + hook["persistent"] = C.boolConst(hook["persistent"]) return hooks_list def listHooks(self, client): @@ -272,15 +272,15 @@ hooks_list = [] for hooks in (client._hooks, client._hooks_temporary): persistent = hooks is client._hooks - for node, hooks_data in hooks.iteritems(): + for node, hooks_data in hooks.items(): for hook_data in hooks_data: hooks_list.append( { - u"service": hook_data[u"service"], - u"node": node, - u"type": hook_data[u"type"], - u"arg": hook_data[u"arg"], - u"persistent": persistent, + "service": hook_data["service"], + "node": node, + "type": hook_data["type"], + "arg": hook_data["arg"], + "persistent": persistent, } ) return hooks_list
--- a/sat/plugins/plugin_exp_pubsub_schema.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_pubsub_schema.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Pubsub Schemas @@ -20,7 +20,7 @@ from collections import Iterable import copy import itertools -from zope.interface import implements +from zope.interface import implementer from twisted.words.protocols.jabber import jid from twisted.words.protocols.jabber.xmlstream import XMPPHandler from twisted.internet import defer @@ -38,23 +38,23 @@ log = getLogger(__name__) -NS_SCHEMA = u"https://salut-a-toi/protocol/schema:0" +NS_SCHEMA = "https://salut-a-toi/protocol/schema:0" PLUGIN_INFO = { - C.PI_NAME: u"PubSub Schema", - C.PI_IMPORT_NAME: u"PUBSUB_SCHEMA", - C.PI_TYPE: u"EXP", + C.PI_NAME: "PubSub Schema", + C.PI_IMPORT_NAME: "PUBSUB_SCHEMA", + C.PI_TYPE: "EXP", C.PI_PROTOCOLS: [], - C.PI_DEPENDENCIES: [u"XEP-0060", u"IDENTITY"], - C.PI_MAIN: u"PubsubSchema", - C.PI_HANDLER: u"yes", - C.PI_DESCRIPTION: _(u"""Handle Pubsub data schemas"""), + C.PI_DEPENDENCIES: ["XEP-0060", "IDENTITY"], + C.PI_MAIN: "PubsubSchema", + C.PI_HANDLER: "yes", + C.PI_DESCRIPTION: _("""Handle Pubsub data schemas"""), } class PubsubSchema(object): def __init__(self, host): - log.info(_(u"PubSub Schema initialization")) + log.info(_("PubSub Schema initialization")) self.host = host self._p = self.host.plugins["XEP-0060"] self._i = self.host.plugins["IDENTITY"] @@ -64,7 +64,7 @@ in_sign="sss", out_sign="s", method=self._getSchema, - async=True, + async_=True, ) host.bridge.addMethod( "psSchemaSet", @@ -72,15 +72,16 @@ in_sign="ssss", out_sign="", method=self._setSchema, - async=True, + async_=True, ) host.bridge.addMethod( "psSchemaUIGet", ".plugin", in_sign="sss", out_sign="s", - method=utils.partial(self._getUISchema, default_node=None), - async=True, + method=lambda service, nodeIdentifier, profile_key: self._getUISchema( + service, nodeIdentifier, default_node=None, profile_key=profile_key), + async_=True, ) host.bridge.addMethod( "psItemsFormGet", @@ -88,7 +89,7 @@ in_sign="ssssiassa{ss}s", out_sign="(asa{ss})", method=self._getDataFormItems, - async=True, + async_=True, ) host.bridge.addMethod( "psItemFormSend", @@ -96,7 +97,7 @@ in_sign="ssa{sas}ssa{ss}s", out_sign="s", method=self._sendDataFormItem, - async=True, + async_=True, ) def getHandler(self, client): @@ -104,7 +105,7 @@ def _getSchemaBridgeCb(self, schema_elt): if schema_elt is None: - return u"" + return "" return schema_elt.toXml() def _getSchema(self, service, nodeIdentifier, profile_key=C.PROF_KEY_NONE): @@ -135,7 +136,7 @@ @return (domish.Element, None): schema (<x> element) None if not schema has been set on this node """ - iq_elt = client.IQ(u"get") + iq_elt = client.IQ("get") if service is not None: iq_elt["to"] = service.full() pubsub_elt = iq_elt.addElement((NS_SCHEMA, "pubsub")) @@ -163,12 +164,12 @@ the form should not be modified if copy_form is not set """ if schema is None: - log.debug(_(u"unspecified schema, we need to request it")) + log.debug(_("unspecified schema, we need to request it")) schema = yield self.getSchema(client, service, nodeIdentifier) if schema is None: raise exceptions.DataError( _( - u"no schema specified, and this node has no schema either, we can't construct the data form" + "no schema specified, and this node has no schema either, we can't construct the data form" ) ) elif isinstance(schema, data_form.Form): @@ -179,7 +180,7 @@ try: form = data_form.Form.fromElement(schema) except data_form.Error as e: - raise exceptions.DataError(_(u"Invalid Schema: {msg}").format(msg=e)) + raise exceptions.DataError(_("Invalid Schema: {msg}").format(msg=e)) form.formType = form_type defer.returnValue(form) @@ -192,7 +193,7 @@ profile_key=C.PROF_KEY_NONE): if not nodeIdentifier: if not default_node: - raise ValueError(_(u"nodeIndentifier needs to be set")) + raise ValueError(_("nodeIndentifier needs to be set")) nodeIdentifier = default_node client = self.host.getClient(profile_key) service = None if not service else jid.JID(service) @@ -233,7 +234,7 @@ client = self.host.getClient(profile_key) service = jid.JID(service) if service else None if not node: - raise exceptions.DataError(_(u"empty node is not allowed")) + raise exceptions.DataError(_("empty node is not allowed")) if schema: schema = generic.parseXml(schema.encode("utf-8")) else: @@ -276,7 +277,7 @@ if not nodeIdentifier: if not default_node: raise ValueError( - _(u"default_node must be set if nodeIdentifier is not set") + _("default_node must be set if nodeIdentifier is not set") ) nodeIdentifier = default_node # we need the initial form to get options of fields when suitable @@ -296,7 +297,7 @@ items, metadata = items_data items_xmlui = [] for item_elt in items: - for x_elt in item_elt.elements((data_form.NS_X_DATA, u"x")): + for x_elt in item_elt.elements((data_form.NS_X_DATA, "x")): form = data_form.Form.fromElement(x_elt) if form_ns and form.formNamespace != form_ns: continue @@ -307,9 +308,9 @@ # are not checked prepend=( ("label", "id"), - ("text", item_elt["id"], u"id"), + ("text", item_elt["id"], "id"), ("label", "publisher"), - ("text", item_elt.getAttribute("publisher", ""), u"publisher"), + ("text", item_elt.getAttribute("publisher", ""), "publisher"), ), filters=filters, read_only=False, @@ -336,7 +337,7 @@ extra, deserialise=True, ) - d.addCallback(lambda ret: ret or u"") + d.addCallback(lambda ret: ret or "") return d @defer.inlineCallbacks @@ -361,22 +362,22 @@ client, service, nodeIdentifier, schema, form_type="submit" ) - for name, values_list in values.iteritems(): + for name, values_list in values.items(): try: field = form.fields[name] except KeyError: log.warning( - _(u"field {name} doesn't exist, ignoring it").format(name=name) + _("field {name} doesn't exist, ignoring it").format(name=name) ) continue - if isinstance(values_list, basestring) or not isinstance( + if isinstance(values_list, str) or not isinstance( values_list, Iterable ): values_list = [values_list] if deserialise: - if field.fieldType == u"boolean": + if field.fieldType == "boolean": values_list = [C.bool(v) for v in values_list] - elif field.fieldType == u"text-multi": + elif field.fieldType == "text-multi": # for text-multi, lines must be put on separate values values_list = list( itertools.chain(*[v.splitlines() for v in values_list]) @@ -384,9 +385,9 @@ elif xml_tools.isXHTMLField(field): values_list = [generic.parseXml(v.encode("utf-8")) for v in values_list] - elif u"jid" in (field.fieldType or u""): + elif "jid" in (field.fieldType or ""): values_list = [jid.JID(v) for v in values_list] - if u"list" in (field.fieldType or u""): + if "list" in (field.fieldType or ""): # for lists, we check that given values are allowed in form allowed_values = [o.value for o in field.options] values_list = [v for v in values_list if v in allowed_values] @@ -419,26 +420,26 @@ main use case is using a textbox for labels """ - if widget_type != u"textbox": + if widget_type != "textbox": return widget_type, args, kwargs - widget_type = u"list" - options = [o for o in args.pop(0).split(u"\n") if o] + widget_type = "list" + options = [o for o in args.pop(0).split("\n") if o] kwargs = { "options": options, "name": kwargs.get("name"), - "styles": (u"noselect", u"extensible", u"reducible"), + "styles": ("noselect", "extensible", "reducible"), } return widget_type, args, kwargs def dateFilter(self, form_xmlui, widget_type, args, kwargs): """Convert a string with a date to a unix timestamp""" - if widget_type != u"string" or not args[0]: + if widget_type != "string" or not args[0]: return widget_type, args, kwargs # we convert XMPP date to timestamp try: - args[0] = unicode(date_utils.date_parse(args[0])) + args[0] = str(date_utils.date_parse(args[0])) except Exception as e: - log.warning(_(u"Can't parse date field: {msg}").format(msg=e)) + log.warning(_("Can't parse date field: {msg}").format(msg=e)) return widget_type, args, kwargs ## Helper methods ## @@ -479,7 +480,7 @@ # have to modify them if C.bool(extra.get("labels_as_list", C.BOOL_FALSE)): filters = filters.copy() - filters[u"labels"] = self.textbox2ListFilter + filters["labels"] = self.textbox2ListFilter client, service, node, max_items, extra, sub_id = self.prepareBridgeGet( service, node, max_items, sub_id, extra, profile_key ) @@ -536,7 +537,7 @@ item_elt = items_data[0][0] except Exception as e: log.warning( - _(u"Can't get previous item, update ignored: {reason}").format( + _("Can't get previous item, update ignored: {reason}").format( reason=e ) ) @@ -546,13 +547,13 @@ if form is None: log.warning( _( - u"Can't parse previous item, update ignored: data form not found" + "Can't parse previous item, update ignored: data form not found" ).format(reason=e) ) else: - for name, field in form.fields.iteritems(): + for name, field in form.fields.items(): if name not in values: - values[name] = u"\n".join(unicode(v) for v in field.values) + values[name] = "\n".join(str(v) for v in field.values) def _set(self, service, node, values, schema=None, item_id=None, extra=None, default_node=None, form_ns=None, fill_author=True, @@ -578,7 +579,7 @@ default_node=default_node, fill_author=fill_author, ) - d.addCallback(lambda ret: ret or u"") + d.addCallback(lambda ret: ret or "") return d @defer.inlineCallbacks @@ -605,15 +606,15 @@ extra = {} if not node: if default_node is None: - raise ValueError(_(u"default_node must be set if node is not set")) + raise ValueError(_("default_node must be set if node is not set")) node = default_node now = utils.xmpp_date() if not item_id: values["created"] = now - elif extra.get(u"update", False): + elif extra.get("update", False): if item_id is None: raise exceptions.DataError( - _(u'if extra["update"] is set, item_id must be set too') + _('if extra["update"] is set, item_id must be set too') ) yield self.copyMissingValues(client, service, node, item_id, form_ns, values) @@ -630,8 +631,8 @@ defer.returnValue(item_id) +@implementer(iwokkel.IDisco) class SchemaHandler(XMPPHandler): - implements(iwokkel.IDisco) def getDiscoInfo(self, requestor, service, nodeIdentifier=""): return [disco.DiscoFeature(NS_SCHEMA)]
--- a/sat/plugins/plugin_import.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_import.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for generic data import handling @@ -38,7 +38,7 @@ C.PI_DEPENDENCIES: [], C.PI_MAIN: "ImportPlugin", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _(u"""Generic import plugin, base for specialized importers"""), + C.PI_DESCRIPTION: _("""Generic import plugin, base for specialized importers"""), } Importer = collections.namedtuple("Importer", ("callback", "short_desc", "long_desc")) @@ -64,7 +64,7 @@ @param name(unicode): import handler name """ assert name == name.lower().strip() - log.info(_(u"initializing {name} import handler").format(name=name)) + log.info(_("initializing {name} import handler").format(name=name)) import_handler.name = name import_handler.register = partial(self.register, import_handler) import_handler.unregister = partial(self.unregister, import_handler) @@ -93,7 +93,7 @@ in_sign="ssa{ss}sss", out_sign="s", method=_import, - async=True, + async_=True, ) self.host.bridge.addMethod( name + "ImportList", @@ -115,7 +115,7 @@ return client._import[import_handler.name][progress_id] def listImporters(self, import_handler): - importers = import_handler.importers.keys() + importers = list(import_handler.importers.keys()) importers.sort() return [ (name, import_handler.importers[name].short_desc) @@ -132,7 +132,7 @@ importer = import_handler.importers[name] except KeyError: raise exceptions.NotFound( - u"{handler_name} importer not found [{name}]".format( + "{handler_name} importer not found [{name}]".format( handler_name=import_handler.name, name=name ) ) @@ -150,7 +150,7 @@ profile=C.PROF_KEY_NONE, ): client = self.host.getClient(profile) - options = {key: unicode(value) for key, value in options.iteritems()} + options = {key: str(value) for key, value in options.items()} for option in import_handler.BOOL_OPTIONS: try: options[option] = C.bool(options[option]) @@ -161,14 +161,14 @@ options[option] = json.loads(options[option]) except ValueError: raise exceptions.DataError( - _(u"invalid json option: {name}").format(name=option) + _("invalid json option: {name}").format(name=option) ) pubsub_service = jid.JID(pubsub_service) if pubsub_service else None return self.doImport( client, import_handler, - unicode(name), - unicode(location), + str(name), + str(location), options, pubsub_service, pubsub_node or None, @@ -202,7 +202,7 @@ if options is None: options = {} else: - for opt_name, opt_default in import_handler.OPT_DEFAULTS.iteritems(): + for opt_name, opt_default in import_handler.OPT_DEFAULTS.items(): # we want a filled options dict, with all empty or False values removed try: value = options[opt_name] @@ -216,21 +216,21 @@ try: importer = import_handler.importers[name] except KeyError: - raise exceptions.NotFound(u"Importer [{}] not found".format(name)) + raise exceptions.NotFound("Importer [{}] not found".format(name)) items_import_data, items_count = yield importer.callback( client, location, options ) - progress_id = unicode(uuid.uuid4()) + progress_id = str(uuid.uuid4()) try: _import = client._import except AttributeError: _import = client._import = {} progress_data = _import.setdefault(import_handler.name, {}) - progress_data[progress_id] = {u"position": "0"} + progress_data[progress_id] = {"position": "0"} if items_count is not None: - progress_data[progress_id]["size"] = unicode(items_count) + progress_data[progress_id]["size"] = str(items_count) metadata = { - "name": u"{}: {}".format(name, location), + "name": "{}: {}".format(name, location), "direction": "out", "type": import_handler.name.upper() + "_IMPORT", } @@ -242,8 +242,8 @@ ) self.host.bridge.progressStarted(progress_id, metadata, client.profile) session = { # session data, can be used by importers - u"root_service": pubsub_service, - u"root_node": pubsub_node, + "root_service": pubsub_service, + "root_node": pubsub_node, } self.recursiveImport( client, @@ -306,11 +306,11 @@ recurse_kwargs.setdefault("options", options) recurse_kwargs["return_data"] = return_data recurse_kwargs["depth"] = depth + 1 - log.debug(_(u"uploading subitems")) + log.debug(_("uploading subitems")) yield self.recursiveImport(**recurse_kwargs) if depth == 0: - client._import[import_handler.name][progress_id]["position"] = unicode( + client._import[import_handler.name][progress_id]["position"] = str( idx + 1 ) @@ -338,7 +338,7 @@ if name in import_handler.importers: raise exceptions.ConflictError( _( - u"An {handler_name} importer with the name {name} already exist" + "An {handler_name} importer with the name {name} already exist" ).format(handler_name=import_handler.name, name=name) ) import_handler.importers[name] = Importer(callback, short_desc, long_desc)
--- a/sat/plugins/plugin_merge_req_mercurial.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_merge_req_mercurial.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for import external blogs @@ -35,16 +35,16 @@ C.PI_DEPENDENCIES: ["MERGE_REQUESTS"], C.PI_MAIN: "MercurialHandler", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _(u"""Merge request handler for Mercurial""") + C.PI_DESCRIPTION: _("""Merge request handler for Mercurial""") } -SHORT_DESC = D_(u"handle Mercurial repository") -CLEAN_RE = re.compile(ur'[^\w -._]', flags=re.UNICODE) +SHORT_DESC = D_("handle Mercurial repository") +CLEAN_RE = re.compile(r'[^\w -._]', flags=re.UNICODE) class MercurialProtocol(async_process.CommandProtocol): """handle hg commands""" - name = u"Mercurial" + name = "Mercurial" command = None @classmethod @@ -54,7 +54,7 @@ @param path(unicode): path to the repository @param command(unicode): hg command to run """ - assert u"path" not in kwargs + assert "path" not in kwargs kwargs["path"] = path # FIXME: we have to use this workaround because Twisted's protocol.ProcessProtocol # is not using new style classes. This can be removed once moved to @@ -65,15 +65,15 @@ class MercurialHandler(object): - data_types = (u'mercurial_changeset',) + data_types = ('mercurial_changeset',) def __init__(self, host): - log.info(_(u"Mercurial merge request handler initialization")) + log.info(_("Mercurial merge request handler initialization")) try: MercurialProtocol.command = which('hg')[0] except IndexError: - raise exceptions.NotFound(_(u"Mercurial executable (hg) not found, " - u"can't use Mercurial handler")) + raise exceptions.NotFound(_("Mercurial executable (hg) not found, " + "can't use Mercurial handler")) self.host = host self._m = host.plugins['MERGE_REQUESTS'] self._m.register('mercurial', self, self.data_types, SHORT_DESC) @@ -92,11 +92,11 @@ def import_(self, repository, data, data_type, item_id, service, node, extra): parsed_data = self.parse(data) try: - parsed_name = parsed_data[0][u'commit_msg'].split(u'\n')[0] - parsed_name = CLEAN_RE.sub(u'', parsed_name)[:40] + parsed_name = parsed_data[0]['commit_msg'].split('\n')[0] + parsed_name = CLEAN_RE.sub('', parsed_name)[:40] except Exception: - parsed_name = u'' - name = u'mr_{item_id}_{parsed_name}'.format(item_id=CLEAN_RE.sub(u'', item_id), + parsed_name = '' + name = 'mr_{item_id}_{parsed_name}'.format(item_id=CLEAN_RE.sub('', item_id), parsed_name=parsed_name) return MercurialProtocol.run(repository, 'qimport', '-g', '--name', name, '--encoding=utf-8', '-', stdin=data) @@ -111,51 +111,51 @@ diff = [] state = 'init' if lines[0] != '# HG changeset patch': - raise exceptions.DataError(_(u'invalid changeset signature')) + raise exceptions.DataError(_('invalid changeset signature')) # line index of this patch in the whole data patch_idx = total_lines - len(lines) del lines[0] for idx, line in enumerate(lines): if state == 'init': - if line.startswith(u'# '): - if line.startswith(u'# User '): + if line.startswith('# '): + if line.startswith('# User '): elems = line[7:].split() if not elems: continue last = elems[-1] - if (last.startswith(u'<') and last.endswith(u'>') - and u'@' in last): + if (last.startswith('<') and last.endswith('>') + and '@' in last): patch[self._m.META_EMAIL] = elems.pop()[1:-1] - patch[self._m.META_AUTHOR] = u' '.join(elems) - elif line.startswith(u'# Date '): + patch[self._m.META_AUTHOR] = ' '.join(elems) + elif line.startswith('# Date '): time_data = line[7:].split() if len(time_data) != 2: - log.warning(_(u'unexpected time data: {data}') + log.warning(_('unexpected time data: {data}') .format(data=line[7:])) continue patch[self._m.META_TIMESTAMP] = (int(time_data[0]) + int(time_data[1])) - elif line.startswith(u'# Node ID '): + elif line.startswith('# Node ID '): patch[self._m.META_HASH] = line[10:] - elif line.startswith(u'# Parent '): + elif line.startswith('# Parent '): patch[self._m.META_PARENT_HASH] = line[10:] else: state = 'commit_msg' if state == 'commit_msg': - if line.startswith(u'diff --git a/'): + if line.startswith('diff --git a/'): state = 'diff' patch[self._m.META_DIFF_IDX] = patch_idx + idx + 1 else: commit_msg.append(line) if state == 'diff': - if line.startswith(u'# ') or idx == len(lines)-1: + if line.startswith('# ') or idx == len(lines)-1: # a new patch is starting or we have reached end of patches if idx == len(lines)-1: # end of patches, we need to keep the line diff.append(line) - patch[self._m.META_COMMIT_MSG] = u'\n'.join(commit_msg) - patch[self._m.META_DIFF] = u'\n'.join(diff) + patch[self._m.META_COMMIT_MSG] = '\n'.join(commit_msg) + patch[self._m.META_DIFF] = '\n'.join(diff) patches.append(patch) if idx == len(lines)-1: del lines[:]
--- a/sat/plugins/plugin_misc_account.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_account.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for account creation (experimental) @@ -26,7 +26,7 @@ from sat.memory.memory import Sessions from sat.memory.crypto import PasswordHasher from sat.core.constants import Const as C -import ConfigParser +import configparser from twisted.internet import defer from twisted.python.failure import Failure from twisted.words.protocols.jabber import jid @@ -45,7 +45,7 @@ C.PI_RECOMMENDATIONS: ["GROUPBLOG"], C.PI_MAIN: "MiscAccount", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _(u"""SàT account creation"""), + C.PI_DESCRIPTION: _("""SàT account creation"""), } CONFIG_SECTION = "plugin account" @@ -71,7 +71,7 @@ } WELCOME_MSG = D_( - u"""Welcome to Libervia, the web interface of Salut à Toi. + """Welcome to Libervia, the web interface of Salut à Toi. Your account on {domain} has been successfully created. This is a demonstration version to show you the current status of the project. @@ -94,7 +94,7 @@ """ ) -DEFAULT_DOMAIN = u"example.net" +DEFAULT_DOMAIN = "example.net" class MiscAccount(object): @@ -104,7 +104,7 @@ # TODO: cleaning, separate email handling, more configuration/tests, fixes def __init__(self, host): - log.info(_(u"Plugin Account initialization")) + log.info(_("Plugin Account initialization")) self.host = host host.bridge.addMethod( "registerSatAccount", @@ -112,7 +112,7 @@ in_sign="sss", out_sign="", method=self._registerAccount, - async=True, + async_=True, ) host.bridge.addMethod( "getNewAccountDomain", @@ -120,7 +120,7 @@ in_sign="", out_sign="s", method=self.getNewAccountDomain, - async=False, + async_=False, ) host.bridge.addMethod( "getAccountDialogUI", @@ -128,7 +128,7 @@ in_sign="s", out_sign="s", method=self._getAccountDialogUI, - async=False, + async_=False, ) host.bridge.addMethod( "asyncConnectWithXMPPCredentials", @@ -136,7 +136,7 @@ in_sign="ss", out_sign="b", method=self.asyncConnectWithXMPPCredentials, - async=True, + async_=True, ) self.fixEmailAdmins() @@ -175,13 +175,13 @@ if not admin_email: return log.warning( - u"admin_email parameter is deprecated, please use email_admins_list instead" + "admin_email parameter is deprecated, please use email_admins_list instead" ) param_name = "email_admins_list" try: section = "" value = self.host.memory.getConfig(section, param_name, Exception) - except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): + except (configparser.NoOptionError, configparser.NoSectionError): section = CONFIG_SECTION value = self.host.memory.getConfig( section, param_name, default_conf[param_name] @@ -198,7 +198,7 @@ # they can now be in [DEFAULT] section try: value = self.host.memory.getConfig(None, name, Exception) - except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): + except (configparser.NoOptionError, configparser.NoSectionError): pass else: return value @@ -263,7 +263,7 @@ d = defer.succeed(None) jid_ = jid.JID(jid_s) else: - jid_s = profile + u"@" + self.getNewAccountDomain() + jid_s = profile + "@" + self.getNewAccountDomain() jid_ = jid.JID(jid_s) d = self.host.plugins["XEP-0077"].registerNewAccount(jid_, password) @@ -289,7 +289,7 @@ def _sendEmailEb(self, failure_, email): # TODO: return error code to user log.error( - _(u"Failed to send account creation confirmation to {email}: {msg}").format( + _("Failed to send account creation confirmation to {email}: {msg}").format( email=email, msg=failure_ ) ) @@ -303,27 +303,27 @@ admins_emails = self.getConfig("email_admins_list") if not admins_emails: log.warning( - u"No known admin email, we can't send email to administrator(s).\nPlease fill email_admins_list parameter" + "No known admin email, we can't send email to administrator(s).\nPlease fill email_admins_list parameter" ) d_admin = defer.fail(exceptions.DataError("no admin email")) else: - subject = _(u"New Libervia account created") - body = u"""New account created: {profile} [{email}]""".format( + subject = _("New Libervia account created") + body = """New account created: {profile} [{email}]""".format( profile=profile, # there is no email when an existing XMPP account is used - email=email or u"<no email>", + email=email or "<no email>", ) d_admin = sat_email.sendEmail(self.host, admins_emails, subject, body) - admins_emails_txt = u", ".join([u"<" + addr + u">" for addr in admins_emails]) + admins_emails_txt = ", ".join(["<" + addr + ">" for addr in admins_emails]) d_admin.addCallbacks( lambda __: log.debug( - u"Account creation notification sent to admin(s) {}".format( + "Account creation notification sent to admin(s) {}".format( admins_emails_txt ) ), lambda __: log.error( - u"Failed to send account creation notification to admin {}".format( + "Failed to send account creation notification to admin {}".format( admins_emails_txt ) ), @@ -333,9 +333,9 @@ return d_admin jid_s = self.host.memory.getParamA( - u"JabberID", u"Connection", profile_key=profile + "JabberID", "Connection", profile_key=profile ) - subject = _(u"Your Libervia account has been created") + subject = _("Your Libervia account has been created") body = _(WELCOME_MSG).format(profile=profile, jid=jid_s, domain=domain) # XXX: this will not fail when the email address doesn't exist @@ -344,7 +344,7 @@ d_user = sat_email.sendEmail(self.host, [email], subject, body) d_user.addCallbacks( lambda __: log.debug( - u"Account creation confirmation sent to <{}>".format(email) + "Account creation confirmation sent to <{}>".format(email) ), self._sendEmailEb, ) @@ -359,7 +359,7 @@ if not domain: log.warning( _( - u'xmpp_domain needs to be set in sat.conf. Using "{default}" meanwhile' + 'xmpp_domain needs to be set in sat.conf. Using "{default}" meanwhile' ).format(default=DEFAULT_DOMAIN) ) return DEFAULT_DOMAIN
--- a/sat/plugins/plugin_misc_android.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_android.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for file tansfer @@ -36,7 +36,7 @@ C.PI_NAME: "Android ", C.PI_IMPORT_NAME: "android", C.PI_TYPE: C.PLUG_TYPE_MISC, - C.PI_RECOMMENDATIONS: [u"XEP-0352"], + C.PI_RECOMMENDATIONS: ["XEP-0352"], C.PI_MAIN: "AndroidPlugin", C.PI_HANDLER: "no", C.PI_DESCRIPTION: D_( @@ -45,7 +45,7 @@ } if sys.platform != "android": - raise exceptions.CancelError(u"this module is not needed on this platform") + raise exceptions.CancelError("this module is not needed on this platform") from plyer import notification, vibrator @@ -60,7 +60,7 @@ PARAM_VIBRATE_CATEGORY = "Notifications" PARAM_VIBRATE_NAME = "vibrate" -PARAM_VIBRATE_LABEL = D_(u"Vibrate on notifications") +PARAM_VIBRATE_LABEL = D_("Vibrate on notifications") SOCKET_DIR = "/data/data/org.salutatoi.cagou/" SOCKET_FILE = ".socket" STATE_RUNNING = "running" @@ -101,9 +101,9 @@ f.seek(0, os.SEEK_END) size = f.tell() if size == 10: - log.info(u"seek() bug not present anymore, workaround code can be removed") + log.info("seek() bug not present anymore, workaround code can be removed") else: - log.warning(u"seek() bug detected, applying a workaround") + log.warning("seek() bug detected, applying a workaround") web_client.FileBodyProducer._determineLength = determineLength_workaround patch_seek_bug() @@ -118,7 +118,7 @@ if data in STATES: self.android_plugin.state = data else: - log.warning(u"Unexpected data: {data}".format(data=data)) + log.warning("Unexpected data: {data}".format(data=data)) class FrontendStateFactory(protocol.Factory): @@ -149,13 +149,13 @@ ) def __init__(self, host): - log.info(_(u"plugin Android initialization")) + log.info(_("plugin Android initialization")) self.host = host - self._csi = host.plugins.get(u'XEP-0352') + self._csi = host.plugins.get('XEP-0352') self._csi_timer = None host.memory.updateParams(self.params) try: - os.mkdir(SOCKET_DIR, 0700) + os.mkdir(SOCKET_DIR, 0o700) except OSError as e: if e.errno == 17: # dir already exists @@ -188,7 +188,7 @@ self.br = BroadcastReceiver( callback=lambda *args, **kwargs: reactor.callLater(0, self.onConnectivityChange), - actions=[u"android.net.conn.CONNECTIVITY_CHANGE"]) + actions=["android.net.conn.CONNECTIVITY_CHANGE"]) self.br.start() @@ -198,7 +198,7 @@ @state.setter def state(self, new_state): - log.debug(u"frontend state has changed: {state}".format(state=new_state)) + log.debug("frontend state has changed: {state}".format(state=new_state)) previous_state = self._state self._state = new_state if new_state == STATE_RUNNING: @@ -233,11 +233,11 @@ """ if (mess_data["message"] and mess_data["type"] != C.MESS_TYPE_GROUPCHAT and not mess_data["from"].userhostJID() == client.jid.userhostJID()): - message = mess_data["message"].itervalues().next() + message = next(iter(mess_data["message"].values())) try: - subject = mess_data["subject"].itervalues().next() + subject = next(iter(mess_data["subject"].values())) except StopIteration: - subject = u"Cagou new message" + subject = "Cagou new message" notification.notify(title=subject, message=message) if self.host.memory.getParamA( @@ -248,7 +248,7 @@ except Exception as e: # FIXME: vibrator is currently not working, # cf. https://github.com/kivy/plyer/issues/509 - log.warning(u"Can't use vibrator: {e}".format(e=e)) + log.warning("Can't use vibrator: {e}".format(e=e)) return mess_data def messageReceivedTrigger(self, client, message_elt, post_treat): @@ -306,24 +306,24 @@ else: net_type = NET_TYPE_OTHER if net_type != self._net_type: - log.info(u"connectivity has changed") + log.info("connectivity has changed") previous = self._net_type self._net_type = net_type if net_type == NET_TYPE_NONE: - log.info(u"no network active") + log.info("no network active") elif net_type == NET_TYPE_WIFI: - log.info(u"WIFI activated") + log.info("WIFI activated") elif net_type == NET_TYPE_MOBILE: - log.info(u"mobile data activated") + log.info("mobile data activated") else: - log.info(u"network activated (type={net_type_android})" + log.info("network activated (type={net_type_android})" .format(net_type_android=net_type_android)) self._handleNetworkChange(previous, net_type) else: - log.debug(u"_checkConnectivity called without network change ({net_type})" + log.debug("_checkConnectivity called without network change ({net_type})" .format(net_type = net_type)) def onConnectivityChange(self): - log.debug(u"onConnectivityChange called") + log.debug("onConnectivityChange called") self._checkConnectivity()
--- a/sat/plugins/plugin_misc_debug.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_debug.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for managing raw XML log
--- a/sat/plugins/plugin_misc_email_invitation.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_email_invitation.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for file tansfer @@ -41,23 +41,23 @@ C.PI_RECOMMENDATIONS: ["IDENTITY"], C.PI_MAIN: "InvitationsPlugin", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _(u"""invitation of people without XMPP account""") + C.PI_DESCRIPTION: _("""invitation of people without XMPP account""") } SUFFIX_MAX = 5 -INVITEE_PROFILE_TPL = u"guest@@{uuid}" -KEY_ID = u'id' -KEY_JID = u'jid' -KEY_CREATED = u'created' -KEY_LAST_CONNECTION = u'last_connection' -KEY_GUEST_PROFILE = u'guest_profile' -KEY_PASSWORD = u'password' -KEY_EMAILS_EXTRA = u'emails_extra' -EXTRA_RESERVED = {KEY_ID, KEY_JID, KEY_CREATED, u'jid_', u'jid', KEY_LAST_CONNECTION, +INVITEE_PROFILE_TPL = "guest@@{uuid}" +KEY_ID = 'id' +KEY_JID = 'jid' +KEY_CREATED = 'created' +KEY_LAST_CONNECTION = 'last_connection' +KEY_GUEST_PROFILE = 'guest_profile' +KEY_PASSWORD = 'password' +KEY_EMAILS_EXTRA = 'emails_extra' +EXTRA_RESERVED = {KEY_ID, KEY_JID, KEY_CREATED, 'jid_', 'jid', KEY_LAST_CONNECTION, KEY_GUEST_PROFILE, KEY_PASSWORD, KEY_EMAILS_EXTRA} -DEFAULT_SUBJECT = D_(u"You have been invited by {host_name} to {app_name}") -DEFAULT_BODY = D_(u"""Hello {name}! +DEFAULT_SUBJECT = D_("You have been invited by {host_name} to {app_name}") +DEFAULT_BODY = D_("""Hello {name}! You have received an invitation from {host_name} to participate to "{app_name}". To join, you just have to click on the following URL: @@ -73,34 +73,34 @@ class InvitationsPlugin(object): def __init__(self, host): - log.info(_(u"plugin Invitations initialization")) + log.info(_("plugin Invitations initialization")) self.host = host - self.invitations = persistent.LazyPersistentBinaryDict(u'invitations') + self.invitations = persistent.LazyPersistentBinaryDict('invitations') host.bridge.addMethod("invitationCreate", ".plugin", in_sign='sasssssssssa{ss}s', out_sign='a{ss}', method=self._create, - async=True) + async_=True) host.bridge.addMethod("invitationGet", ".plugin", in_sign='s', out_sign='a{ss}', method=self.get, - async=True) + async_=True) host.bridge.addMethod("invitationModify", ".plugin", in_sign='sa{ss}b', out_sign='', method=self._modify, - async=True) + async_=True) host.bridge.addMethod("invitationList", ".plugin", in_sign='s', out_sign='a{sa{ss}}', method=self._list, - async=True) + async_=True) def checkExtra(self, extra): if EXTRA_RESERVED.intersection(extra): raise ValueError( - _(u"You can't use following key(s) in extra, they are reserved: {}") - .format(u', '.join(EXTRA_RESERVED.intersection(extra)))) + _("You can't use following key(s) in extra, they are reserved: {}") + .format(', '.join(EXTRA_RESERVED.intersection(extra)))) - def _create(self, email=u'', emails_extra=None, jid_=u'', password=u'', name=u'', - host_name=u'', language=u'', url_template=u'', message_subject=u'', - message_body=u'', extra=None, profile=u''): + def _create(self, email='', emails_extra=None, jid_='', password='', name='', + host_name='', language='', url_template='', message_subject='', + message_body='', extra=None, profile=''): # XXX: we don't use **kwargs here to keep arguments name for introspection with # D-Bus bridge if emails_extra is None: @@ -109,10 +109,10 @@ if extra is None: extra = {} else: - extra = {unicode(k): unicode(v) for k,v in extra.iteritems()} + extra = {str(k): str(v) for k,v in extra.items()} kwargs = {"extra": extra, - KEY_EMAILS_EXTRA: [unicode(e) for e in emails_extra] + KEY_EMAILS_EXTRA: [str(e) for e in emails_extra] } # we need to be sure that values are unicode, else they won't be pickled correctly @@ -121,7 +121,7 @@ "url_template", "message_subject", "message_body", "profile"): value = locals()[key] if value: - kwargs[key] = unicode(value) + kwargs[key] = str(value) d = self.create(**kwargs) def serialize(data): data[KEY_JID] = data[KEY_JID].full() @@ -131,7 +131,7 @@ @defer.inlineCallbacks def create(self, **kwargs): - ur"""Create an invitation + r"""Create an invitation This will create an XMPP account and a profile, and use a UUID to retrieve them. The profile is automatically generated in the form guest@@[UUID], this way they @@ -194,29 +194,29 @@ extra = kwargs.pop('extra', {}) if set(kwargs).intersection(extra): raise ValueError( - _(u"You can't use following key(s) in both args and extra: {}").format( - u', '.join(set(kwargs).intersection(extra)))) + _("You can't use following key(s) in both args and extra: {}").format( + ', '.join(set(kwargs).intersection(extra)))) self.checkExtra(extra) - email = kwargs.pop(u'email', None) - emails_extra = kwargs.pop(u'emails_extra', []) + email = kwargs.pop('email', None) + emails_extra = kwargs.pop('emails_extra', []) if not email and emails_extra: raise ValueError( - _(u'You need to provide a main email address before using emails_extra')) + _('You need to provide a main email address before using emails_extra')) if (email is not None and not 'url_template' in kwargs and not 'message_body' in kwargs): raise ValueError( - _(u"You need to provide url_template if you use default message body")) + _("You need to provide url_template if you use default message body")) ## uuid - log.info(_(u"creating an invitation")) - id_ = unicode(shortuuid.uuid()) + log.info(_("creating an invitation")) + id_ = str(shortuuid.uuid()) ## XMPP account creation - password = kwargs.pop(u'password', None) + password = kwargs.pop('password', None) if password is None: password = utils.generatePassword() assert password @@ -228,13 +228,13 @@ # saved and could be used to encrypt profile password. extra[KEY_PASSWORD] = password - jid_ = kwargs.pop(u'jid_', None) + jid_ = kwargs.pop('jid_', None) if not jid_: domain = self.host.memory.getConfig(None, 'xmpp_domain') if not domain: # TODO: fallback to profile's domain - raise ValueError(_(u"You need to specify xmpp_domain in sat.conf")) - jid_ = u"invitation-{uuid}@{domain}".format(uuid=shortuuid.uuid(), + raise ValueError(_("You need to specify xmpp_domain in sat.conf")) + jid_ = "invitation-{uuid}@{domain}".format(uuid=shortuuid.uuid(), domain=domain) jid_ = jid.JID(jid_) if jid_.user: @@ -245,11 +245,11 @@ except error.StanzaError as e: prefix = jid_.user idx = 0 - while e.condition == u'conflict': + while e.condition == 'conflict': if idx >= SUFFIX_MAX: - raise exceptions.ConflictError(_(u"Can't create XMPP account")) - jid_.user = prefix + '_' + unicode(idx) - log.info(_(u"requested jid already exists, trying with {}".format( + raise exceptions.ConflictError(_("Can't create XMPP account")) + jid_.user = prefix + '_' + str(idx) + log.info(_("requested jid already exists, trying with {}".format( jid_.full()))) try: yield self.host.plugins['XEP-0077'].registerNewAccount(jid_, @@ -258,10 +258,10 @@ idx += 1 else: break - if e.condition != u'conflict': + if e.condition != 'conflict': raise e - log.info(_(u"account {jid_} created").format(jid_=jid_.full())) + log.info(_("account {jid_} created").format(jid_=jid_.full())) ## profile creation @@ -273,66 +273,66 @@ profile_key=guest_profile) yield self.host.memory.setParam("Password", password, "Connection", profile_key=guest_profile) - name = kwargs.pop(u'name', None) + name = kwargs.pop('name', None) if name is not None: - extra[u'name'] = name + extra['name'] = name try: - id_plugin = self.host.plugins[u'IDENTITY'] + id_plugin = self.host.plugins['IDENTITY'] except KeyError: pass else: yield self.host.connect(guest_profile, password) guest_client = self.host.getClient(guest_profile) - yield id_plugin.setIdentity(guest_client, {u'nick': name}) + yield id_plugin.setIdentity(guest_client, {'nick': name}) yield self.host.disconnect(guest_profile) ## email - language = kwargs.pop(u'language', None) + language = kwargs.pop('language', None) if language is not None: - extra[u'language'] = language.strip() + extra['language'] = language.strip() if email is not None: - extra[u'email'] = email + extra['email'] = email data_format.iter2dict(KEY_EMAILS_EXTRA, extra) - url_template = kwargs.pop(u'url_template', '') + url_template = kwargs.pop('url_template', '') format_args = { - u'uuid': id_, - u'app_name': C.APP_NAME, - u'app_url': C.APP_URL} + 'uuid': id_, + 'app_name': C.APP_NAME, + 'app_url': C.APP_URL} if name is None: - format_args[u'name'] = email + format_args['name'] = email else: - format_args[u'name'] = name + format_args['name'] = name - profile = kwargs.pop(u'profile', None) + profile = kwargs.pop('profile', None) if profile is None: - format_args[u'profile'] = u'' + format_args['profile'] = '' else: - format_args[u'profile'] = extra[u'profile'] = profile + format_args['profile'] = extra['profile'] = profile - host_name = kwargs.pop(u'host_name', None) + host_name = kwargs.pop('host_name', None) if host_name is None: - format_args[u'host_name'] = profile or _(u"somebody") + format_args['host_name'] = profile or _("somebody") else: - format_args[u'host_name'] = extra[u'host_name'] = host_name + format_args['host_name'] = extra['host_name'] = host_name invite_url = url_template.format(**format_args) - format_args[u'url'] = invite_url + format_args['url'] = invite_url yield sat_email.sendEmail( self.host, [email] + emails_extra, - (kwargs.pop(u'message_subject', None) or DEFAULT_SUBJECT).format( + (kwargs.pop('message_subject', None) or DEFAULT_SUBJECT).format( **format_args), - (kwargs.pop(u'message_body', None) or DEFAULT_BODY).format(**format_args), + (kwargs.pop('message_body', None) or DEFAULT_BODY).format(**format_args), ) ## extra data saving self.invitations[id_] = extra if kwargs: - log.warning(_(u"Not all arguments have been consumed: {}").format(kwargs)) + log.warning(_("Not all arguments have been consumed: {}").format(kwargs)) extra[KEY_ID] = id_ extra[KEY_JID] = jid_ @@ -348,7 +348,7 @@ return self.invitations[id_] def _modify(self, id_, new_extra, replace): - return self.modify(id_, {unicode(k): unicode(v) for k,v in new_extra.iteritems()}, + return self.modify(id_, {str(k): str(v) for k,v in new_extra.items()}, replace) def modify(self, id_, new_extra, replace=False): @@ -372,9 +372,9 @@ continue else: new_data = current_data - for k,v in new_extra.iteritems(): + for k,v in new_extra.items(): if k in EXTRA_RESERVED: - log.warning(_(u"Skipping reserved key {key}".format(k))) + log.warning(_("Skipping reserved key {key}".format(k))) continue if v: new_data[k] = v @@ -401,9 +401,9 @@ C.PROF_KEY_NONE: don't filter invitations @return list(unicode): invitations uids """ - invitations = yield self.invitations.items() + invitations = yield list(self.invitations.items()) if profile != C.PROF_KEY_NONE: - invitations = {id_:data for id_, data in invitations.iteritems() - if data.get(u'profile') == profile} + invitations = {id_:data for id_, data in invitations.items() + if data.get('profile') == profile} defer.returnValue(invitations)
--- a/sat/plugins/plugin_misc_extra_pep.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_extra_pep.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for displaying messages from extra PEP services @@ -35,13 +35,13 @@ C.PI_RECOMMENDATIONS: [], C.PI_MAIN: "ExtraPEP", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _(u"""Display messages from extra PEP services"""), + C.PI_DESCRIPTION: _("""Display messages from extra PEP services"""), } -PARAM_KEY = u"Misc" -PARAM_NAME = u"blogs" -PARAM_LABEL = u"Blog authors following list" +PARAM_KEY = "Misc" +PARAM_NAME = "blogs" +PARAM_LABEL = "Blog authors following list" PARAM_DEFAULT = (jid.JID("salut-a-toi@libervia.org"),) @@ -62,11 +62,11 @@ "category_label": D_(PARAM_KEY), "param_name": PARAM_NAME, "param_label": D_(PARAM_LABEL), - "jids": u"\n".join({elt.toXml() for elt in params.createJidElts(PARAM_DEFAULT)}), + "jids": "\n".join({elt.toXml() for elt in params.createJidElts(PARAM_DEFAULT)}), } def __init__(self, host): - log.info(_(u"Plugin Extra PEP initialization")) + log.info(_("Plugin Extra PEP initialization")) self.host = host host.memory.updateParams(self.params)
--- a/sat/plugins/plugin_misc_file.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_file.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for file tansfer @@ -45,14 +45,14 @@ } -SENDING = D_(u"Please select a file to send to {peer}") -SENDING_TITLE = D_(u"File sending") +SENDING = D_("Please select a file to send to {peer}") +SENDING_TITLE = D_("File sending") CONFIRM = D_( - u'{peer} wants to send the file "{name}" to you:\n{desc}\n\nThe file has a size of {size_human}\n\nDo you accept ?' + '{peer} wants to send the file "{name}" to you:\n{desc}\n\nThe file has a size of {size_human}\n\nDo you accept ?' ) -CONFIRM_TITLE = D_(u"Confirm file transfer") -CONFIRM_OVERWRITE = D_(u"File {} already exists, are you sure you want to overwrite ?") -CONFIRM_OVERWRITE_TITLE = D_(u"File exists") +CONFIRM_TITLE = D_("Confirm file transfer") +CONFIRM_OVERWRITE = D_("File {} already exists, are you sure you want to overwrite ?") +CONFIRM_OVERWRITE_TITLE = D_("File exists") SECURITY_LIMIT = 30 PROGRESS_ID_KEY = "progress_id" @@ -70,7 +70,7 @@ in_sign="ssssa{ss}s", out_sign="a{ss}", method=self._fileSend, - async=True, + async_=True, ) self._file_callbacks = [] host.importMenu( @@ -109,14 +109,14 @@ @return (dict): action dictionary, with progress id in case of success, else xmlui message """ if not os.path.isfile(filepath): - raise exceptions.DataError(u"The given path doesn't link to a file") + raise exceptions.DataError("The given path doesn't link to a file") if not filename: filename = os.path.basename(filepath) or "_" for namespace, callback, priority, method_name in self._file_callbacks: has_feature = yield self.host.hasFeature(client, namespace, peer_jid) if has_feature: log.info( - u"{name} method will be used to send the file".format( + "{name} method will be used to send the file".format( name=method_name ) ) @@ -124,12 +124,12 @@ client, peer_jid, filepath, filename, file_desc, extra ) defer.returnValue({"progress": progress_id}) - msg = u"Can't find any method to send file to {jid}".format(jid=peer_jid.full()) + msg = "Can't find any method to send file to {jid}".format(jid=peer_jid.full()) log.warning(msg) defer.returnValue( { "xmlui": xml_tools.note( - u"Can't transfer file", msg, C.XMLUI_DATA_LVL_WARNING + "Can't transfer file", msg, C.XMLUI_DATA_LVL_WARNING ).toXml() } ) @@ -181,7 +181,7 @@ for data in self._file_callbacks: if namespace == data[0]: raise exceptions.ConflictError( - u"A method with this namespace is already registered" + "A method with this namespace is already registered" ) self._file_callbacks.append( (namespace, callback, priority, method_name or namespace) @@ -193,7 +193,7 @@ if data[0] == namespace: del [idx] return - raise exceptions.NotFound(u"The namespace to unregister doesn't exist") + raise exceptions.NotFound("The namespace to unregister doesn't exist") # Dialogs with user # the overwrite check is done here @@ -241,7 +241,7 @@ return False path = data["path"] file_data["file_path"] = file_path = os.path.join(path, file_data["name"]) - log.debug(u"destination file path set to {}".format(file_path)) + log.debug("destination file path set to {}".format(file_path)) # we manage case where file already exists if os.path.exists(file_path): @@ -309,7 +309,7 @@ assert filename and not "/" in filename assert PROGRESS_ID_KEY in file_data # human readable size - file_data["size_human"] = u"{:.6n} Mio".format( + file_data["size_human"] = "{:.6n} Mio".format( float(file_data["size"]) / (1024 ** 2) ) d = xml_tools.deferDialog(
--- a/sat/plugins/plugin_misc_forums.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_forums.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for pubsub forums @@ -29,8 +29,8 @@ import json log = getLogger(__name__) -NS_FORUMS = u'org.salut-a-toi.forums:0' -NS_FORUMS_TOPICS = NS_FORUMS + u'#topics' +NS_FORUMS = 'org.salut-a-toi.forums:0' +NS_FORUMS_TOPICS = NS_FORUMS + '#topics' PLUGIN_INFO = { C.PI_NAME: _("forums management"), @@ -42,16 +42,16 @@ C.PI_HANDLER: "no", C.PI_DESCRIPTION: _("""forums management plugin""") } -FORUM_ATTR = {u'title', u'name', u'main-language', u'uri'} -FORUM_SUB_ELTS = (u'short-desc', u'desc') -FORUM_TOPICS_NODE_TPL = u'{node}#topics_{uuid}' -FORUM_TOPIC_NODE_TPL = u'{node}_{uuid}' +FORUM_ATTR = {'title', 'name', 'main-language', 'uri'} +FORUM_SUB_ELTS = ('short-desc', 'desc') +FORUM_TOPICS_NODE_TPL = '{node}#topics_{uuid}' +FORUM_TOPIC_NODE_TPL = '{node}_{uuid}' class forums(object): def __init__(self, host): - log.info(_(u"forums plugin initialization")) + log.info(_("forums plugin initialization")) self.host = host self._m = self.host.plugins['XEP-0277'] self._p = self.host.plugins['XEP-0060'] @@ -67,19 +67,19 @@ host.bridge.addMethod("forumsGet", ".plugin", in_sign='ssss', out_sign='s', method=self._get, - async=True) + async_=True) host.bridge.addMethod("forumsSet", ".plugin", in_sign='sssss', out_sign='', method=self._set, - async=True) + async_=True) host.bridge.addMethod("forumTopicsGet", ".plugin", in_sign='ssa{ss}s', out_sign='(aa{ss}a{ss})', method=self._getTopics, - async=True) + async_=True) host.bridge.addMethod("forumTopicCreate", ".plugin", in_sign='ssa{ss}s', out_sign='', method=self._createTopic, - async=True) + async_=True) @defer.inlineCallbacks def _createForums(self, client, forums, service, node, forums_elt=None, names=None): @@ -94,48 +94,48 @@ @return (domish.Element): created forums """ if not isinstance(forums, list): - raise ValueError(_(u"forums arguments must be a list of forums")) + raise ValueError(_("forums arguments must be a list of forums")) if forums_elt is None: - forums_elt = domish.Element((NS_FORUMS, u'forums')) + forums_elt = domish.Element((NS_FORUMS, 'forums')) assert names is None names = set() else: - if names is None or forums_elt.name != u'forums': - raise exceptions.InternalError(u'invalid forums or names') + if names is None or forums_elt.name != 'forums': + raise exceptions.InternalError('invalid forums or names') assert names is not None for forum in forums: if not isinstance(forum, dict): - raise ValueError(_(u"A forum item must be a dictionary")) + raise ValueError(_("A forum item must be a dictionary")) forum_elt = forums_elt.addElement('forum') - for key, value in forum.iteritems(): - if key == u'name' and key in names: - raise exceptions.ConflictError(_(u"following forum name is not unique: {name}").format(name=key)) - if key == u'uri' and not value.strip(): - log.info(_(u"creating missing forum node")) + for key, value in forum.items(): + if key == 'name' and key in names: + raise exceptions.ConflictError(_("following forum name is not unique: {name}").format(name=key)) + if key == 'uri' and not value.strip(): + log.info(_("creating missing forum node")) forum_node = FORUM_TOPICS_NODE_TPL.format(node=node, uuid=shortuuid.uuid()) yield self._p.createNode(client, service, forum_node, self._node_options) - value = uri.buildXMPPUri(u'pubsub', + value = uri.buildXMPPUri('pubsub', path=service.full(), node=forum_node) if key in FORUM_ATTR: forum_elt[key] = value.strip() elif key in FORUM_SUB_ELTS: forum_elt.addElement(key, content=value) - elif key == u'sub-forums': - sub_forums_elt = forum_elt.addElement(u'forums') + elif key == 'sub-forums': + sub_forums_elt = forum_elt.addElement('forums') yield self._createForums(client, value, service, node, sub_forums_elt, names=names) else: - log.warning(_(u"Unknown forum attribute: {key}").format(key=key)) - if not forum_elt.getAttribute(u'title'): - name = forum_elt.getAttribute(u'name') + log.warning(_("Unknown forum attribute: {key}").format(key=key)) + if not forum_elt.getAttribute('title'): + name = forum_elt.getAttribute('name') if name: - forum_elt[u'title'] = name + forum_elt['title'] = name else: - raise ValueError(_(u"forum need a title or a name")) - if not forum_elt.getAttribute(u'uri') and not forum_elt.children: - raise ValueError(_(u"forum need uri or sub-forums")) + raise ValueError(_("forum need a title or a name")) + if not forum_elt.getAttribute('uri') and not forum_elt.children: + raise ValueError(_("forum need uri or sub-forums")) defer.returnValue(forums_elt) def _parseForums(self, parent_elt=None, forums=None): @@ -146,18 +146,18 @@ @return (list): parsed data @raise ValueError: item is invalid """ - if parent_elt.name == u'item': + if parent_elt.name == 'item': forums = [] try: - forums_elt = next(parent_elt.elements(NS_FORUMS, u'forums')) + forums_elt = next(parent_elt.elements(NS_FORUMS, 'forums')) except StopIteration: - raise ValueError(_(u"missing <forums> element")) + raise ValueError(_("missing <forums> element")) else: forums_elt = parent_elt if forums is None: - raise exceptions.InternalError(u'expected forums') + raise exceptions.InternalError('expected forums') if forums_elt.name != 'forums': - raise ValueError(_(u'Unexpected element: {xml}').format(xml=forums_elt.toXml())) + raise ValueError(_('Unexpected element: {xml}').format(xml=forums_elt.toXml())) for forum_elt in forums_elt.elements(): if forum_elt.name == 'forum': data = {} @@ -165,19 +165,19 @@ data[attrib] = forum_elt[attrib] unknown = set(forum_elt.attributes).difference(FORUM_ATTR) if unknown: - log.warning(_(u"Following attributes are unknown: {unknown}").format(unknown=unknown)) + log.warning(_("Following attributes are unknown: {unknown}").format(unknown=unknown)) for elt in forum_elt.elements(): if elt.name in FORUM_SUB_ELTS: - data[elt.name] = unicode(elt) - elif elt.name == u'forums': - sub_forums = data[u'sub-forums'] = [] + data[elt.name] = str(elt) + elif elt.name == 'forums': + sub_forums = data['sub-forums'] = [] self._parseForums(elt, sub_forums) - if not u'title' in data or not {u'uri', u'sub-forums'}.intersection(data): - log.warning(_(u"invalid forum, ignoring: {xml}").format(xml=forum_elt.toXml())) + if not 'title' in data or not {'uri', 'sub-forums'}.intersection(data): + log.warning(_("invalid forum, ignoring: {xml}").format(xml=forum_elt.toXml())) else: forums.append(data) else: - log.warning(_(u"unkown forums sub element: {xml}").format(xml=forum_elt)) + log.warning(_("unkown forums sub element: {xml}").format(xml=forum_elt)) return forums @@ -200,7 +200,7 @@ if node is None: node = NS_FORUMS if forums_key is None: - forums_key = u'default' + forums_key = 'default' items_data = yield self._p.getItems(client, service, node, item_ids=[forums_key]) item = items_data[0][0] # we have the item and need to convert it to json @@ -241,7 +241,7 @@ if node is None: node = NS_FORUMS if forums_key is None: - forums_key = u'default' + forums_key = 'default' forums_elt = yield self._createForums(client, forums, service, node) yield self._p.sendItem(client, service, node, forums_elt, item_id=forums_key) @@ -249,7 +249,7 @@ client = self.host.getClient(profile_key) extra = self._p.parseExtra(extra) d = self.getTopics(client, jid.JID(service), node, rsm_request=extra.rsm_request, extra=extra.extra) - d.addCallback(lambda(topics, metadata): (topics, {k: unicode(v) for k,v in metadata.iteritems()})) + d.addCallback(lambda topics_metadata: (topics_metadata[0], {k: str(v) for k,v in topics_metadata[1].items()})) return d @defer.inlineCallbacks @@ -262,11 +262,11 @@ topics = [] item_elts, metadata = topics_data for item_elt in item_elts: - topic_elt = next(item_elt.elements(NS_FORUMS, u'topic')) - title_elt = next(topic_elt.elements(NS_FORUMS, u'title')) - topic = {u'uri': topic_elt[u'uri'], - u'author': topic_elt[u'author'], - u'title': unicode(title_elt)} + topic_elt = next(item_elt.elements(NS_FORUMS, 'topic')) + title_elt = next(topic_elt.elements(NS_FORUMS, 'title')) + topic = {'uri': topic_elt['uri'], + 'author': topic_elt['author'], + 'title': str(title_elt)} topics.append(topic) defer.returnValue((topics, metadata)) @@ -277,21 +277,21 @@ @defer.inlineCallbacks def createTopic(self, client, service, node, mb_data): try: - title = mb_data[u'title'] - if not u'content' in mb_data: - raise KeyError(u'content') + title = mb_data['title'] + if not 'content' in mb_data: + raise KeyError('content') except KeyError as e: - raise exceptions.DataError(u"missing mandatory data: {key}".format(key=e.args[0])) + raise exceptions.DataError("missing mandatory data: {key}".format(key=e.args[0])) topic_node = FORUM_TOPIC_NODE_TPL.format(node=node, uuid=shortuuid.uuid()) yield self._p.createNode(client, service, topic_node, self._node_options) self._m.send(client, mb_data, service, topic_node) - topic_uri = uri.buildXMPPUri(u'pubsub', - subtype=u'microblog', + topic_uri = uri.buildXMPPUri('pubsub', + subtype='microblog', path=service.full(), node=topic_node) topic_elt = domish.Element((NS_FORUMS, 'topic')) - topic_elt[u'uri'] = topic_uri - topic_elt[u'author'] = client.jid.userhost() - topic_elt.addElement(u'title', content = title) + topic_elt['uri'] = topic_uri + topic_elt['author'] = client.jid.userhost() + topic_elt.addElement('title', content = title) yield self._p.sendItem(client, service, node, topic_elt)
--- a/sat/plugins/plugin_misc_groupblog.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_groupblog.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for microbloging with roster access @@ -25,7 +25,7 @@ from twisted.internet import defer from sat.core import exceptions from wokkel import disco, data_form, iwokkel -from zope.interface import implements +from zope.interface import implementer try: from twisted.words.protocols.xmlstream import XMPPHandler @@ -78,12 +78,12 @@ client.server_groupblog_available = False log.warning( _( - u"Server is not able to manage item-access pubsub, we can't use group blog" + "Server is not able to manage item-access pubsub, we can't use group blog" ) ) else: client.server_groupblog_available = True - log.info(_(u"Server can manage group blogs")) + log.info(_("Server can manage group blogs")) def getFeatures(self, profile): try: @@ -119,8 +119,8 @@ if not groups: return if not client.server_groupblog_available: - raise exceptions.CancelError(u"GroupBlog is not available") - log.debug(u"This entry use group blog") + raise exceptions.CancelError("GroupBlog is not available") + log.debug("This entry use group blog") form = data_form.Form("submit", formNamespace=NS_PUBSUB_ITEM_CONFIG) access = data_form.Field( None, self._p.OPT_ACCESS_MODEL, value=self._p.ACCESS_PUBLISHER_ROSTER @@ -139,8 +139,8 @@ options[self._p.OPT_ACCESS_MODEL] = self._p.ACCESS_PUBLISHER_ROSTER options[self._p.OPT_ROSTER_GROUPS_ALLOWED] = mb_data['groups'] +@implementer(iwokkel.IDisco) class GroupBlog_handler(XMPPHandler): - implements(iwokkel.IDisco) def getDiscoInfo(self, requestor, target, nodeIdentifier=""): return [disco.DiscoFeature(NS_GROUPBLOG)]
--- a/sat/plugins/plugin_misc_identity.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_identity.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing xep-0054 @@ -44,24 +44,24 @@ class Identity(object): def __init__(self, host): - log.info(_(u"Plugin Identity initialization")) + log.info(_("Plugin Identity initialization")) self.host = host - self._v = host.plugins[u"XEP-0054"] + self._v = host.plugins["XEP-0054"] host.bridge.addMethod( - u"identityGet", - u".plugin", - in_sign=u"ss", - out_sign=u"a{ss}", + "identityGet", + ".plugin", + in_sign="ss", + out_sign="a{ss}", method=self._getIdentity, - async=True, + async_=True, ) host.bridge.addMethod( - u"identitySet", - u".plugin", - in_sign=u"a{ss}s", - out_sign=u"", + "identitySet", + ".plugin", + in_sign="a{ss}s", + out_sign="", method=self._setIdentity, - async=True, + async_=True, ) def _getIdentity(self, jid_str, profile): @@ -84,30 +84,30 @@ # we first check roster roster_item = yield client.roster.getItem(jid_.userhostJID()) if roster_item is not None and roster_item.name: - id_data[u"nick"] = roster_item.name + id_data["nick"] = roster_item.name elif jid_.resource and self._v.isRoom(client, jid_): - id_data[u"nick"] = jid_.resource + id_data["nick"] = jid_.resource else: # and finally then vcard nick = yield self._v.getNick(client, jid_) if nick: - id_data[u"nick"] = nick + id_data["nick"] = nick elif jid_.user: - id_data[u"nick"] = jid_.user.capitalize() + id_data["nick"] = jid_.user.capitalize() else: - id_data[u"nick"] = jid_.userhost() + id_data["nick"] = jid_.userhost() try: - avatar_path = id_data[u"avatar"] = yield self._v.getAvatar( + avatar_path = id_data["avatar"] = yield self._v.getAvatar( client, jid_, cache_only=False ) except exceptions.NotFound: pass else: if avatar_path: - id_data[u"avatar_basename"] = os.path.basename(avatar_path) + id_data["avatar_basename"] = os.path.basename(avatar_path) else: - del id_data[u"avatar"] + del id_data["avatar"] defer.returnValue(id_data) @@ -122,7 +122,7 @@ - nick: nickname the vCard will be updated """ - if id_data.keys() != [u"nick"]: - raise NotImplementedError(u"Only nick can be updated for now") - if u"nick" in id_data: - return self._v.setNick(client, id_data[u"nick"]) + if list(id_data.keys()) != ["nick"]: + raise NotImplementedError("Only nick can be updated for now") + if "nick" in id_data: + return self._v.setNick(client, id_data["nick"])
--- a/sat/plugins/plugin_misc_imap.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_imap.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for managing imap server @@ -29,10 +29,10 @@ from twisted.python import failure from email.parser import Parser import os -from cStringIO import StringIO +from io import StringIO from twisted.internet import reactor -from zope.interface import implements +from zope.interface import implementer PLUGIN_INFO = { C.PI_NAME: "IMAP server Plugin", @@ -75,8 +75,8 @@ reactor.listenTCP(port, self.server_factory) +@implementer(imap4.IMessage) class Message(object): - implements(imap4.IMessage) def __init__(self, uid, flags, mess_fp): log.debug("Message Init") @@ -112,10 +112,10 @@ should be omitted from the return value, rather than included. @return: A mapping of header field names to header field values """ - log.debug(u"getHeaders %s - %s" % (negate, names)) + log.debug("getHeaders %s - %s" % (negate, names)) final_dict = {} to_check = [name.lower() for name in names] - for header in self.message.keys(): + for header in list(self.message.keys()): if (negate and not header.lower() in to_check) or ( not negate and header.lower() in to_check ): @@ -150,13 +150,13 @@ return TypeError +@implementer(imap4.IMailbox) class SatMailbox(object): - implements(imap4.IMailbox) def __init__(self, host, name, profile): self.host = host self.listeners = set() - log.debug(u"Mailbox init (%s)" % name) + log.debug("Mailbox init (%s)" % name) if name != "INBOX": raise imap4.MailboxException("Only INBOX is managed for the moment") self.mailbox = self.host.plugins["Maildir"].accessMessageBox( @@ -187,7 +187,7 @@ @param message: The message sequence number @return: The UID of the message. """ - log.debug(u"getUID (%i)" % message) + log.debug("getUID (%i)" % message) # return self.mailbox.getUid(message-1) #XXX: it seems that this method get uid and not message sequence number return message @@ -243,7 +243,7 @@ @param listener: An object to add to the set of those which will be notified when the contents of this mailbox change. """ - log.debug(u"addListener %s" % listener) + log.debug("addListener %s" % listener) self.listeners.add(listener) def removeListener(self, listener): @@ -288,7 +288,7 @@ about @param uid: If true, the IDs specified in the query are UIDs; """ - log.debug(u"fetch (%s, %s)" % (messages, uid)) + log.debug("fetch (%s, %s)" % (messages, uid)) if uid: messages.last = self.mailbox.getMaxUid() messages.getnext = self.mailbox.getNextExistingUid @@ -412,20 +412,21 @@ return SatMailbox(self.host, name, self.profile) +@implementer(portal.IRealm) class ImapRealm(object): - implements(portal.IRealm) def __init__(self, host): self.host = host def requestAvatar(self, avatarID, mind, *interfaces): log.debug("requestAvatar") - profile = avatarID.decode("utf-8") + profile = avatarID if imap4.IAccount not in interfaces: raise NotImplementedError return imap4.IAccount, ImapSatAccount(self.host, profile), lambda: None +@implementer(checkers.ICredentialsChecker) class SatProfileCredentialChecker(object): """ This credential checker check against SàT's profile and associated jabber's password @@ -433,7 +434,6 @@ Return the profile as avatarId """ - implements(checkers.ICredentialsChecker) credentialInterfaces = ( credentials.IUsernamePassword, credentials.IUsernameHashedPassword, @@ -470,7 +470,7 @@ log.debug(_("IMAP server connection started")) def clientConnectionLost(self, connector, reason): - log.debug(_(u"IMAP server connection lost (reason: %s)"), reason) + log.debug(_("IMAP server connection lost (reason: %s)"), reason) def buildProtocol(self, addr): log.debug("Building protocol")
--- a/sat/plugins/plugin_misc_ip.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_ip.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for IP address discovery @@ -17,12 +17,12 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. +import urllib.parse from sat.core.i18n import _, D_ from sat.core.constants import Const as C from sat.core.log import getLogger - -log = getLogger(__name__) from sat.tools import xml_tools +from wokkel import disco, iwokkel from twisted.web import client as webclient from twisted.web import error as web_error from twisted.internet import defer @@ -30,17 +30,17 @@ from twisted.internet import protocol from twisted.internet import endpoints from twisted.internet import error as internet_error -from zope.interface import implements -from wokkel import disco, iwokkel +from zope.interface import implementer from twisted.words.protocols.jabber.xmlstream import XMPPHandler from twisted.words.protocols.jabber.error import StanzaError -import urlparse + +log = getLogger(__name__) try: import netifaces except ImportError: log.warning( - u"netifaces is not available, it help discovering IPs, you can install it on https://pypi.python.org/pypi/netifaces" + "netifaces is not available, it help discovering IPs, you can install it on https://pypi.python.org/pypi/netifaces" ) netifaces = None @@ -61,12 +61,12 @@ GET_IP_PAGE = ( "http://salut-a-toi.org/whereami/" ) # This page must only return external IP of the requester -GET_IP_LABEL = D_(u"Allow external get IP") +GET_IP_LABEL = D_("Allow external get IP") GET_IP_CATEGORY = "General" GET_IP_NAME = "allow_get_ip" -GET_IP_CONFIRM_TITLE = D_(u"Confirm external site request") +GET_IP_CONFIRM_TITLE = D_("Confirm external site request") GET_IP_CONFIRM = D_( - u"""To facilitate data transfer, we need to contact a website. + """To facilitate data transfer, we need to contact a website. A request will be done on {page} That means that administrators of {domain} can know that you use "{app_name}" and your IP Address. @@ -75,7 +75,7 @@ Do you agree to do this request ? """ ).format( - page=GET_IP_PAGE, domain=urlparse.urlparse(GET_IP_PAGE).netloc, app_name=C.APP_NAME + page=GET_IP_PAGE, domain=urllib.parse.urlparse(GET_IP_PAGE).netloc, app_name=C.APP_NAME ) NS_IP_CHECK = "urn:xmpp:sic:1" @@ -105,7 +105,7 @@ try: self._nat = host.plugins["NAT-PORT"] except KeyError: - log.debug(u"NAT port plugin not available") + log.debug("NAT port plugin not available") self._nat = None # XXX: cache is kept until SàT is restarted @@ -180,7 +180,7 @@ @param ext_utl(str): url to connect to @return (D(str)): return local IP """ - url = urlparse.urlparse(ext_url) + url = urllib.parse.urlparse(ext_url) port = url.port if port is None: if url.scheme == "http": @@ -188,10 +188,10 @@ elif url.scheme == "https": port = 443 else: - log.error(u"Unknown url scheme: {}".format(url.scheme)) + log.error("Unknown url scheme: {}".format(url.scheme)) defer.returnValue(None) if url.hostname is None: - log.error(u"Can't find url hostname for {}".format(GET_IP_PAGE)) + log.error("Can't find url hostname for {}".format(GET_IP_PAGE)) point = endpoints.TCP4ClientEndpoint(reactor, url.hostname, port) @@ -257,7 +257,7 @@ try: ip_tuple = yield self._getIPFromExternal(GET_IP_PAGE) except (internet_error.DNSLookupError, internet_error.TimeoutError): - log.warning(u"Can't access Domain Name System") + log.warning("Can't access Domain Name System") defer.returnValue(addresses or localhost) self._insertFirst(addresses, ip_tuple.local) defer.returnValue(addresses) @@ -274,24 +274,25 @@ # we first try with XEP-0279 ip_check = yield self.host.hasFeature(client, NS_IP_CHECK) if ip_check: - log.debug(u"Server IP Check available, we use it to retrieve our IP") + log.debug("Server IP Check available, we use it to retrieve our IP") iq_elt = client.IQ("get") + iq_elt['to'] = client.host iq_elt.addElement((NS_IP_CHECK, "address")) try: result_elt = yield iq_elt.send() - address_elt = result_elt.elements(NS_IP_CHECK, "address").next() - ip_elt = address_elt.elements(NS_IP_CHECK, "ip").next() + address_elt = next(result_elt.elements(NS_IP_CHECK, "address")) + ip_elt = next(address_elt.elements(NS_IP_CHECK, "ip")) except StopIteration: log.warning( - u"Server returned invalid result on XEP-0279 request, we ignore it" + "Server returned invalid result on XEP-0279 request, we ignore it" ) except StanzaError as e: - log.warning(u"error while requesting ip to server: {}".format(e)) + log.warning("error while requesting ip to server: {}".format(e)) else: # FIXME: server IP may not be the same as external IP (server can be on local machine or network) # IP should be checked to see if we have a local one, and rejected in this case external_ip = str(ip_elt) - log.debug(u"External IP found: {}".format(external_ip)) + log.debug("External IP found: {}".format(external_ip)) self._external_ip_cache = external_ip defer.returnValue(self._external_ip_cache) @@ -305,13 +306,14 @@ # and finally by requesting external website allow_get_ip = yield self._externalAllowed(client) try: - ip = (yield webclient.getPage(GET_IP_PAGE)) if allow_get_ip else None + ip = ((yield webclient.getPage(GET_IP_PAGE.encode('utf-8'))) + if allow_get_ip else None) except (internet_error.DNSLookupError, internet_error.TimeoutError): - log.warning(u"Can't access Domain Name System") + log.warning("Can't access Domain Name System") ip = None except web_error.Error as e: log.warning( - u"Error while retrieving IP on {url}: {message}".format( + "Error while retrieving IP on {url}: {message}".format( url=GET_IP_PAGE, message=e ) ) @@ -321,8 +323,8 @@ defer.returnValue(ip) +@implementer(iwokkel.IDisco) class IPPlugin_handler(XMPPHandler): - implements(iwokkel.IDisco) def getDiscoInfo(self, requestor, target, nodeIdentifier=""): return [disco.DiscoFeature(NS_IP_CHECK)]
--- a/sat/plugins/plugin_misc_maildir.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_maildir.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for managing Maildir type mail boxes @@ -139,7 +139,7 @@ @param mailboxUser: MailboxUser instance""" if boxname not in self.__mailboxes: err_msg = _("Trying to remove an mailboxUser not referenced") - log.error(_(u"INTERNAL ERROR: ") + err_msg) + log.error(_("INTERNAL ERROR: ") + err_msg) raise MaildirError(err_msg) assert self.__mailboxes[profile][boxname] == mailboxUser del self.__mailboxes[profile][boxname] @@ -158,7 +158,7 @@ return self.data[profile][boxname] # the boxname MUST exist in the data except KeyError: err_msg = _("Boxname doesn't exist in internal data") - log.error(_(u"INTERNAL ERROR: ") + err_msg) + log.error(_("INTERNAL ERROR: ") + err_msg) raise MaildirError(err_msg) def getUid(self, boxname, message_id, profile): @@ -211,7 +211,7 @@ @param message_uid: unique integer identifier @return: unique id of the message as given by MaildirMailbox or None if not found""" box_data = self.__getBoxData(boxname, profile) - for message_id in box_data.keys(): # TODO: this is highly inefficient on big mailbox, must be replaced in the future + for message_id in list(box_data.keys()): # TODO: this is highly inefficient on big mailbox, must be replaced in the future if message_id == 'cur_idx': continue if box_data[message_id][0] == message_uid: @@ -248,7 +248,7 @@ @param flag: flag to check @return: list of id (as given by MaildirMailbox)""" box_data = self.__getBoxData(boxname, profile) - assert(isinstance(flag, basestring)) + assert(isinstance(flag, str)) flag = flag.upper() result = [] for key in box_data: @@ -296,22 +296,22 @@ @param boxname: name of the box which was observed @param signal: which signal was observed by the caller""" if (profile, boxname) not in self.__observed: - err_msg = _(u"Trying to remove an observer for an inexistant mailbox") - log.error(_(u"INTERNAL ERROR: ") + err_msg) + err_msg = _("Trying to remove an observer for an inexistant mailbox") + log.error(_("INTERNAL ERROR: ") + err_msg) raise MaildirError(err_msg) if signal not in self.__observed[(profile, boxname)]: - err_msg = _(u"Trying to remove an inexistant observer, no observer for this signal") - log.error(_(u"INTERNAL ERROR: ") + err_msg) + err_msg = _("Trying to remove an inexistant observer, no observer for this signal") + log.error(_("INTERNAL ERROR: ") + err_msg) raise MaildirError(err_msg) if not callback in self.__observed[(profile, boxname)][signal]: - err_msg = _(u"Trying to remove an inexistant observer") - log.error(_(u"INTERNAL ERROR: ") + err_msg) + err_msg = _("Trying to remove an inexistant observer") + log.error(_("INTERNAL ERROR: ") + err_msg) raise MaildirError(err_msg) self.__observed[(profile, boxname)][signal].remove(callback) def emitSignal(self, profile, boxname, signal_name): """Emit the signal to observer""" - log.debug(u'emitSignal %s %s %s' % (profile, boxname, signal_name)) + log.debug('emitSignal %s %s %s' % (profile, boxname, signal_name)) try: for observer_cb in self.__observed[(profile, boxname)][signal_name]: observer_cb() @@ -347,7 +347,7 @@ @param profile: real profile (ie not a profile_key) THIS OBJECT MUST NOT BE USED DIRECTLY: use MaildirBox.accessMessageBox instead""" if _maildir._checkBoxReference(name, profile): - log.error(u"INTERNAL ERROR: MailboxUser MUST NOT be instancied directly") + log.error("INTERNAL ERROR: MailboxUser MUST NOT be instancied directly") raise MaildirError('double MailboxUser instanciation') if name != "INBOX": raise NotImplementedError @@ -357,7 +357,7 @@ profile_path = self.maildir._getProfilePath(profile) full_profile_path = os.path.join(self.maildir.host.memory.getConfig('', 'local_dir'), 'maildir', profile_path) if not os.path.exists(full_profile_path): - os.makedirs(full_profile_path, 0700) + os.makedirs(full_profile_path, 0o700) mailbox_path = os.path.join(full_profile_path, MAILDIR_PATH) self.mailbox_path = mailbox_path self.mailbox = maildir.MaildirMailbox(mailbox_path) @@ -365,7 +365,7 @@ self.__uid_table_update() if observer: - log.debug(u"adding observer for %s (%s)" % (name, profile)) + log.debug("adding observer for %s (%s)" % (name, profile)) self.maildir.addObserver(observer, profile, name, "NEW_MESSAGE") def __uid_table_update(self): @@ -378,7 +378,7 @@ def __del__(self): if self.observer: - log.debug(u"removing observer for %s" % self.name) + log.debug("removing observer for %s" % self.name) self._maildir.removeObserver(self.observer, self.name, "NEW_MESSAGE") self.maildir._removeBoxAccess(self.name, self, profile=self.profile) @@ -490,7 +490,7 @@ Also purge the internal data of these messages """ for mess_id in self.getMessageIdsWithFlag("\\Deleted"): - print ("Deleting %s" % mess_id) + print(("Deleting %s" % mess_id)) self.mailbox.deleteMessage(self.getIdxFromId(mess_id)) self.mailbox = maildir.MaildirMailbox(self.mailbox_path) # We need to reparse the dir to have coherent indexing self.maildir.purgeDeleted(self.name, profile=self.profile)
--- a/sat/plugins/plugin_misc_merge_requests.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_merge_requests.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Pubsub Schemas @@ -23,8 +23,8 @@ from twisted.internet import defer from twisted.words.protocols.jabber import jid from collections import namedtuple -from sat.tools import utils from sat.core.log import getLogger + log = getLogger(__name__) NS_MERGE_REQUESTS = 'org.salut-a-toi.merge_requests:0' @@ -40,8 +40,8 @@ C.PI_DESCRIPTION: _("""Merge requests management plugin""") } -FIELD_DATA_TYPE = u'type' -FIELD_DATA = u'request_data' +FIELD_DATA_TYPE = 'type' +FIELD_DATA = 'request_data' MergeRequestHandler = namedtuple("MergeRequestHandler", ['name', @@ -52,49 +52,52 @@ class MergeRequests(object): - META_AUTHOR = u'author' - META_EMAIL = u'email' - META_TIMESTAMP = u'timestamp' - META_HASH = u'hash' - META_PARENT_HASH = u'parent_hash' - META_COMMIT_MSG = u'commit_msg' - META_DIFF = u'diff' + META_AUTHOR = 'author' + META_EMAIL = 'email' + META_TIMESTAMP = 'timestamp' + META_HASH = 'hash' + META_PARENT_HASH = 'parent_hash' + META_COMMIT_MSG = 'commit_msg' + META_DIFF = 'diff' # index of the diff in the whole data # needed to retrieve comments location - META_DIFF_IDX = u'diff_idx' + META_DIFF_IDX = 'diff_idx' def __init__(self, host): - log.info(_(u"Merge requests plugin initialization")) + log.info(_("Merge requests plugin initialization")) self.host = host host.registerNamespace('merge_requests', NS_MERGE_REQUESTS) - self._p = self.host.plugins[u"XEP-0060"] - self._s = self.host.plugins[u"PUBSUB_SCHEMA"] - self._t = self.host.plugins[u"TICKETS"] + self._p = self.host.plugins["XEP-0060"] + self._s = self.host.plugins["PUBSUB_SCHEMA"] + self._t = self.host.plugins["TICKETS"] self._handlers = {} self._handlers_list = [] # handlers sorted by priority self._type_handlers = {} # data type => handler map host.bridge.addMethod("mergeRequestsGet", ".plugin", in_sign='ssiassa{ss}s', out_sign='(asa{ss}aaa{ss})', method=self._get, - async=True + async_=True ) host.bridge.addMethod("mergeRequestSet", ".plugin", in_sign='ssssa{sas}ssss', out_sign='s', method=self._set, - async=True) + async_=True) host.bridge.addMethod("mergeRequestsSchemaGet", ".plugin", in_sign='sss', out_sign='s', - method=utils.partial(self._s._getUISchema, - default_node=NS_MERGE_REQUESTS), - async=True) + method=lambda service, nodeIdentifier, profile_key: + self._s._getUISchema(service, + nodeIdentifier, + default_node=NS_MERGE_REQUESTS, + profile_key=profile_key), + async_=True) host.bridge.addMethod("mergeRequestParseData", ".plugin", in_sign='ss', out_sign='aa{ss}', method=self._parseData, - async=True) + async_=True) host.bridge.addMethod("mergeRequestsImport", ".plugin", in_sign='ssssa{ss}s', out_sign='', method=self._import, - async=True + async_=True ) def register(self, name, handler, data_types, short_desc, priority=0): @@ -112,8 +115,8 @@ @aram data_types(list[unicode]): data types that his handler can generate or parse """ if name in self._handlers: - raise exceptions.ConflictError(_(u"a handler with name {name} already " - u"exists!").format(name = name)) + raise exceptions.ConflictError(_("a handler with name {name} already " + "exists!").format(name = name)) self._handlers[name] = MergeRequestHandler(name, handler, data_types, @@ -121,12 +124,12 @@ priority) self._handlers_list.append(name) self._handlers_list.sort(key=lambda name: self._handlers[name].priority) - if isinstance(data_types, basestring): + if isinstance(data_types, str): data_types = [data_types] for data_type in data_types: if data_type in self._type_handlers: - log.warning(_(u'merge requests of type {type} are already handled by ' - u'{old_handler}, ignoring {new_handler}').format( + log.warning(_('merge requests of type {type} are already handled by ' + '{old_handler}, ignoring {new_handler}').format( type = data_type, old_handler = self._type_handlers[data_type].name, new_handler = name)) @@ -141,10 +144,10 @@ service, node, max_items, sub_id, extra_dict, profile_key) d = self.get(client, service, node or None, max_items, item_ids, sub_id or None, extra.rsm_request, extra.extra) - d.addCallback(lambda (tickets, metadata, parsed_patches): ( - self._p.transItemsData((tickets, metadata)) + - ([[{key: unicode(value) for key, value in p.iteritems()} - for p in patches] for patches in parsed_patches],))) + d.addCallback(lambda tickets_metadata_parsed_patches: ( + self._p.transItemsData((tickets_metadata_parsed_patches[0], tickets_metadata_parsed_patches[1])) + + ([[{key: str(value) for key, value in p.items()} + for p in patches] for patches in tickets_metadata_parsed_patches[2]],))) return d @defer.inlineCallbacks @@ -167,7 +170,7 @@ # XXX: Q&D way to get list for labels when displaying them, but text when we # have to modify them if C.bool(extra.get('labels_as_list', C.BOOL_FALSE)): - filters = {u'labels': self._s.textbox2ListFilter} + filters = {'labels': self._s.textbox2ListFilter} else: filters = {} tickets_xmlui, metadata = yield self._s.getDataFormItems( @@ -191,16 +194,16 @@ defer.returnValue((tickets_xmlui, metadata, parsed_patches)) def _set(self, service, node, repository, method, values, schema=None, item_id=None, - extra=u"", profile_key=C.PROF_KEY_NONE): + extra="", profile_key=C.PROF_KEY_NONE): client, service, node, schema, item_id, extra = self._s.prepareBridgeSet( service, node, schema, item_id, extra, profile_key) d = self.set(client, service, node, repository, method, values, schema, item_id or None, extra, deserialise=True) - d.addCallback(lambda ret: ret or u'') + d.addCallback(lambda ret: ret or '') return d @defer.inlineCallbacks - def set(self, client, service, node, repository, method=u'auto', values=None, + def set(self, client, service, node, repository, method='auto', values=None, schema=None, item_id=None, extra=None, deserialise=False): """Publish a tickets @@ -221,51 +224,51 @@ if not repository and not update: # in case of update, we may re-user former patches data # so repository is not mandatory - raise exceptions.DataError(_(u"repository must be specified")) + raise exceptions.DataError(_("repository must be specified")) if FIELD_DATA in values: - raise exceptions.DataError(_(u"{field} is set by backend, you must not set " - u"it in frontend").format(field = FIELD_DATA)) + raise exceptions.DataError(_("{field} is set by backend, you must not set " + "it in frontend").format(field = FIELD_DATA)) if repository: - if method == u'auto': + if method == 'auto': for name in self._handlers_list: handler = self._handlers[name].handler can_handle = yield handler.check(repository) if can_handle: - log.info(_(u"{name} handler will be used").format(name=name)) + log.info(_("{name} handler will be used").format(name=name)) break else: - log.warning(_(u"repository {path} can't be handled by any installed " - u"handler").format( + log.warning(_("repository {path} can't be handled by any installed " + "handler").format( path = repository)) - raise exceptions.NotFound(_(u"no handler for this repository has " - u"been found")) + raise exceptions.NotFound(_("no handler for this repository has " + "been found")) else: try: handler = self._handlers[name].handler except KeyError: - raise exceptions.NotFound(_(u"No handler of this name found")) + raise exceptions.NotFound(_("No handler of this name found")) data = yield handler.export(repository) if not data.strip(): - raise exceptions.DataError(_(u'export data is empty, do you have any ' - u'change to send?')) + raise exceptions.DataError(_('export data is empty, do you have any ' + 'change to send?')) - if not values.get(u'title') or not values.get(u'body'): + if not values.get('title') or not values.get('body'): patches = yield handler.parse(data, values.get(FIELD_DATA_TYPE)) commits_msg = patches[-1][self.META_COMMIT_MSG] msg_lines = commits_msg.splitlines() - if not values.get(u'title'): - values[u'title'] = msg_lines[0] - if not values.get(u'body'): + if not values.get('title'): + values['title'] = msg_lines[0] + if not values.get('body'): ts = self.host.plugins['TEXT_SYNTAXES'] xhtml = yield ts.convert( - u'\n'.join(msg_lines[1:]), + '\n'.join(msg_lines[1:]), syntax_from = ts.SYNTAX_TEXT, syntax_to = ts.SYNTAX_XHTML, profile = client.profile) - values[u'body'] = '<div xmlns="{ns}">{xhtml}</div>'.format( + values['body'] = '<div xmlns="{ns}">{xhtml}</div>'.format( ns=C.NS_XHTML, xhtml=xhtml) values[FIELD_DATA] = data @@ -277,7 +280,7 @@ def _parseData(self, data_type, data): d = self.parseData(data_type, data) d.addCallback(lambda parsed_patches: - {key: unicode(value) for key, value in parsed_patches.iteritems()}) + {key: str(value) for key, value in parsed_patches.items()}) return d def parseData(self, data_type, data): @@ -292,7 +295,7 @@ try: handler = self._type_handlers[data_type] except KeyError: - raise exceptions.NotFound(_(u'No handler can handle data type "{type}"') + raise exceptions.NotFound(_('No handler can handle data type "{type}"') .format(type=data_type)) return defer.maybeDeferred(handler.handler.parse, data, data_type) @@ -326,9 +329,9 @@ try: handler = self._type_handlers[data_type] except KeyError: - raise exceptions.NotFound(_(u'No handler found to import {data_type}') + raise exceptions.NotFound(_('No handler found to import {data_type}') .format(data_type=data_type)) - log.info(_(u"Importing patch [{item_id}] using {name} handler").format( + log.info(_("Importing patch [{item_id}] using {name} handler").format( item_id = item, name = handler.name)) yield handler.handler.import_(repository, data, data_type, item, service, node,
--- a/sat/plugins/plugin_misc_nat-port.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_nat-port.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for NAT port mapping @@ -32,7 +32,7 @@ import miniupnpc except ImportError: raise exceptions.MissingModule( - u"Missing module MiniUPnPc, please download/install it (and its Python binding) at http://miniupnp.free.fr/ (or use pip install miniupnpc)" + "Missing module MiniUPnPc, please download/install it (and its Python binding) at http://miniupnp.free.fr/ (or use pip install miniupnpc)" ) @@ -47,7 +47,7 @@ STARTING_PORT = 6000 # starting point to automatically find a port DEFAULT_DESC = ( - u"SaT port mapping" + "SaT port mapping" ) # we don't use "à" here as some bugged NAT don't manage charset correctly @@ -74,23 +74,23 @@ def unload(self): if self._to_unmap: - log.info(u"Cleaning mapped ports") + log.info("Cleaning mapped ports") return threads.deferToThread(self._unmapPortsBlocking) def _init_failed(self, failure_): e = failure_.trap(exceptions.NotFound, exceptions.FeatureNotFound) if e == exceptions.FeatureNotFound: - log.info(u"UPnP-IGD seems to be not activated on the device") + log.info("UPnP-IGD seems to be not activated on the device") else: - log.info(u"UPnP-IGD not available") + log.info("UPnP-IGD not available") self._upnp = None def _discover(self): devices = self._upnp.discover() if devices: - log.info(u"{nb} UPnP-IGD device(s) found".format(nb=devices)) + log.info("{nb} UPnP-IGD device(s) found".format(nb=devices)) else: - log.info(u"Can't find UPnP-IGD device on the local network") + log.info("Can't find UPnP-IGD device on the local network") raise failure.Failure(exceptions.NotFound()) self._upnp.selectigd() try: @@ -119,7 +119,7 @@ self._mutex.acquire() try: for port, protocol in self._to_unmap: - log.info(u"Unmapping port {}".format(port)) + log.info("Unmapping port {}".format(port)) unmapping = self._upnp.deleteportmapping( # the last parameter is remoteHost, we don't use it port, @@ -129,7 +129,7 @@ if not unmapping: log.error( - u"Can't unmap port {port} ({protocol})".format( + "Can't unmap port {port} ({protocol})".format( port=port, protocol=protocol ) ) @@ -174,7 +174,7 @@ "", ) except Exception as e: - log.error(_(u"addportmapping error: {msg}").format(msg=e)) + log.error(_("addportmapping error: {msg}").format(msg=e)) raise failure.Failure(MappingError()) if not mapping: @@ -201,7 +201,7 @@ def mappingCb(ext_port): log.info( - u"{protocol} mapping from {int_port} to {ext_port} successful".format( + "{protocol} mapping from {int_port} to {ext_port} successful".format( protocol=protocol, int_port=int_port, ext_port=ext_port ) ) @@ -209,10 +209,10 @@ def mappingEb(failure_): failure_.trap(MappingError) - log.warning(u"Can't map internal {int_port}".format(int_port=int_port)) + log.warning("Can't map internal {int_port}".format(int_port=int_port)) def mappingUnknownEb(failure_): - log.error(_(u"error while trying to map ports: {msg}").format(msg=failure_)) + log.error(_("error while trying to map ports: {msg}").format(msg=failure_)) d = threads.deferToThread( self._mapPortBlocking, int_port, ext_port, protocol, desc
--- a/sat/plugins/plugin_misc_quiz.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_quiz.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing Quiz game @@ -190,7 +190,7 @@ """Convert a domish element with game_data to a dict""" game_data = {} for data_elt in game_data_elt.elements(): - game_data[data_elt.name] = unicode(data_elt) + game_data[data_elt.name] = str(data_elt) return game_data def __answer_result_to_signal_args(self, answer_result_elt): @@ -255,7 +255,7 @@ """Called when a player give an answer""" client = self.host.getClient(profile_key) log.debug( - u"new player answer (%(profile)s): %(answer)s" + "new player answer (%(profile)s): %(answer)s" % {"profile": client.profile, "answer": answer} ) mess = self.createGameElt(jid.JID(referee)) @@ -309,7 +309,7 @@ mess = self.createGameElt(room_jid) mess.firstChildElement().addChild( self.__ask_question( - game_data["question_id"], u"Quel est l'âge du capitaine ?", timer + game_data["question_id"], "Quel est l'âge du capitaine ?", timer ) ) client.send(mess) @@ -343,7 +343,7 @@ common_data = {"game_score": 0} new_game_data = { "instructions": _( - u"""Bienvenue dans cette partie rapide de quizz, le premier à atteindre le score de 9 remporte le jeu + """Bienvenue dans cette partie rapide de quizz, le premier à atteindre le score de 9 remporte le jeu Attention, tu es prêt ?""" ) @@ -366,7 +366,7 @@ if elt.name == "started": # new game created players = [] for player in elt.elements(): - players.append(unicode(player)) + players.append(str(player)) self.host.bridge.quizGameStarted( room_jid.userhost(), from_jid.full(), players, profile ) @@ -377,11 +377,11 @@ nb_players = len(self.games[room_jid]["players"]) status[player] = "ready" log.debug( - _(u"Player %(player)s is ready to start [status: %(status)s]") + _("Player %(player)s is ready to start [status: %(status)s]") % {"player": player, "status": status} ) if ( - status.values().count("ready") == nb_players + list(status.values()).count("ready") == nb_players ): # everybody is ready, we can start the game self.newGame(room_jid, profile) @@ -394,7 +394,7 @@ self.host.bridge.quizGameQuestion( room_jid.userhost(), elt["id"], - unicode(elt), + str(elt), int(elt["timer"]), profile, ) @@ -414,7 +414,7 @@ self.pauseTimer(room_jid) # and we send the player answer mess = self.createGameElt(room_jid) - _answer = unicode(elt) + _answer = str(elt) say_elt = mess.firstChildElement().addElement("player_says") say_elt["player"] = player say_elt.addContent(_answer) @@ -433,7 +433,7 @@ self.host.bridge.quizGamePlayerSays( room_jid.userhost(), elt["player"], - unicode(elt), + str(elt), int(elt["delay"]), profile, ) @@ -453,4 +453,4 @@ ) else: - log.error(_(u"Unmanaged game element: %s") % elt.name) + log.error(_("Unmanaged game element: %s") % elt.name)
--- a/sat/plugins/plugin_misc_radiocol.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_radiocol.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing Radiocol @@ -39,7 +39,7 @@ from mutagen.id3 import ID3NoHeaderError except ImportError: raise exceptions.MissingModule( - u"Missing module Mutagen, please download/install from https://bitbucket.org/lazka/mutagen" + "Missing module Mutagen, please download/install from https://bitbucket.org/lazka/mutagen" ) @@ -95,7 +95,7 @@ in_sign="asss", out_sign="", method=self._prepareRoom, - async=True, + async_=True, ) host.bridge.addMethod( "radiocolCreate", @@ -110,7 +110,7 @@ in_sign="sss", out_sign="", method=self._radiocolSongAdded, - async=True, + async_=True, ) host.bridge.addSignal( "radiocolPlayers", ".plugin", signature="ssass" @@ -207,7 +207,7 @@ # ==> unlink done the Q&D way with the same host trick (see above) radio_data = self.games[room_jid] if len(radio_data["players"]) == 0: - log.debug(_(u"No more participants in the radiocol: cleaning data")) + log.debug(_("No more participants in the radiocol: cleaning data")) radio_data["queue"] = [] for filename in radio_data["to_delete"]: self.deleteFile(filename, radio_data) @@ -246,7 +246,7 @@ file_to_delete = radio_data["to_delete"][filename] except KeyError: log.error( - _(u"INTERNAL ERROR: can't find full path of the song to delete") + _("INTERNAL ERROR: can't find full path of the song to delete") ) return False else: @@ -255,7 +255,7 @@ unlink(file_to_delete) except OSError: log.error( - _(u"INTERNAL ERROR: can't find %s on the file system" % file_to_delete) + _("INTERNAL ERROR: can't find %s on the file system" % file_to_delete) ) return False return True @@ -285,7 +285,7 @@ ): # new game created and/or players list updated players = [] for player in elt.elements(): - players.append(unicode(player)) + players.append(str(player)) signal = ( self.host.bridge.radiocolStarted if elt.name == "started" @@ -350,7 +350,7 @@ # songs in queue. We can now start the party :) self.playNext(room_jid, profile) else: - log.error(_(u"Unmanaged game element: %s") % elt.name) + log.error(_("Unmanaged game element: %s") % elt.name) def getSyncDataForPlayer(self, room_jid, nick): game_data = self.games[room_jid]
--- a/sat/plugins/plugin_misc_register_account.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_register_account.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for registering a new XMPP account @@ -38,7 +38,7 @@ C.PI_RECOMMENDATIONS: [], C.PI_MAIN: "RegisterAccount", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _(u"""Register XMPP account"""), + C.PI_DESCRIPTION: _("""Register XMPP account"""), } @@ -46,7 +46,7 @@ # FIXME: this plugin is messy and difficult to read, it needs to be cleaned up and documented def __init__(self, host): - log.info(_(u"Plugin Register Account initialization")) + log.info(_("Plugin Register Account initialization")) self.host = host self._sessions = Sessions() host.registerCallback( @@ -62,22 +62,22 @@ # FIXME: following loop is overcomplicated, hard to read # FIXME: while used with parameters, hashed password is used and overwrite clear one - for param in (u"JabberID", u"Password", C.FORCE_PORT_PARAM, C.FORCE_SERVER_PARAM): + for param in ("JabberID", "Password", C.FORCE_PORT_PARAM, C.FORCE_SERVER_PARAM): try: session_data[param] = data[ - SAT_FORM_PREFIX + u"Connection" + SAT_PARAM_SEPARATOR + param + SAT_FORM_PREFIX + "Connection" + SAT_PARAM_SEPARATOR + param ] except KeyError: if param in (C.FORCE_PORT_PARAM, C.FORCE_SERVER_PARAM): session_data[param] = "" - for param in (u"JabberID", u"Password"): + for param in ("JabberID", "Password"): if not session_data[param]: - form_ui = xml_tools.XMLUI(u"popup", title=D_(u"Missing values")) + form_ui = xml_tools.XMLUI("popup", title=D_("Missing values")) form_ui.addText( - D_(u"No user JID or password given: can't register new account.") + D_("No user JID or password given: can't register new account.") ) - return {u"xmlui": form_ui.toXml()} + return {"xmlui": form_ui.toXml()} session_data["user"], host, resource = jid.parse(session_data["JabberID"]) session_data["server"] = session_data[C.FORCE_SERVER_PARAM] or host @@ -89,7 +89,7 @@ session_id=session_id, ) form_ui.addText( - D_(u"Do you want to register a new XMPP account {jid}?").format( + D_("Do you want to register a new XMPP account {jid}?").format( jid=session_data["JabberID"] ) ) @@ -130,7 +130,7 @@ def _registerNewAccount(self, client, jid_, password, email, server): # FIXME: port is not set here def registeredCb(__): - xmlui = xml_tools.XMLUI(u"popup", title=D_(u"Confirmation")) + xmlui = xml_tools.XMLUI("popup", title=D_("Confirmation")) xmlui.addText(D_("Registration successful.")) return {"xmlui": xmlui.toXml()}
--- a/sat/plugins/plugin_misc_room_game.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_room_game.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -27,7 +27,7 @@ from twisted.internet import defer from time import time from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer import copy try: @@ -64,13 +64,13 @@ """ # Values for self.invite_mode (who can invite after the game creation) - FROM_ALL, FROM_NONE, FROM_REFEREE, FROM_PLAYERS = xrange(0, 4) + FROM_ALL, FROM_NONE, FROM_REFEREE, FROM_PLAYERS = range(0, 4) # Values for self.wait_mode (for who we should wait before creating the game) - FOR_ALL, FOR_NONE = xrange(0, 2) + FOR_ALL, FOR_NONE = range(0, 2) # Values for self.join_mode (who can join the game - NONE means solo game) - ALL, INVITED, NONE = xrange(0, 3) + ALL, INVITED, NONE = range(0, 3) # Values for ready_mode (how to turn a MUC user into a player) - ASK, FORCE = xrange(0, 2) + ASK, FORCE = range(0, 2) MESSAGE = "/message" REQUEST = '%s/%s[@xmlns="%s"]' @@ -218,7 +218,7 @@ if not auth and (verbose or _DEBUG): log.debug( - _(u"%(user)s not allowed to join the game %(game)s in %(room)s") + _("%(user)s not allowed to join the game %(game)s in %(room)s") % { "user": user_jid.userhost() or nick, "game": self.name, @@ -377,7 +377,7 @@ auth = self.isPlayer(room_jid, nick) if not auth and (verbose or _DEBUG): log.debug( - _(u"%(user)s not allowed to invite for the game %(game)s in %(room)s") + _("%(user)s not allowed to invite for the game %(game)s in %(room)s") % {"user": nick, "game": self.name, "room": room_jid.userhost()} ) return auth @@ -431,7 +431,7 @@ if not result[0] and (verbose or _DEBUG): log.debug( _( - u"Still waiting for %(users)s before starting the game %(game)s in %(room)s" + "Still waiting for %(users)s before starting the game %(game)s in %(room)s" ) % { "users": result[2], @@ -469,7 +469,7 @@ """ # FIXME: need to be refactored client = self.host.getClient(profile_key) - log.debug(_(u"Preparing room for %s game") % self.name) + log.debug(_("Preparing room for %s game") % self.name) profile = self.host.memory.getProfileName(profile_key) if not profile: log.error(_("Unknown profile")) @@ -515,7 +515,7 @@ batch = len(self.invitations[room_jid]) - 1 if batch < 0: log.error( - u"Invitations from %s to play %s in %s have been lost!" + "Invitations from %s to play %s in %s have been lost!" % (profile_nick, self.name, room_jid.userhost()) ) return True @@ -572,7 +572,7 @@ user_nick = self.host.plugins["XEP-0045"].getRoomNick(room_jid, profile) if not user_nick: log.error( - u"Internal error: profile %s has not joined the room %s" + "Internal error: profile %s has not joined the room %s" % (profile, room_jid.userhost()) ) return False, False @@ -580,13 +580,13 @@ is_referee = self.isReferee(room_jid, user_nick) if self._gameExists(room_jid, started=True): log.info( - _(u"%(game)s game already created in room %(room)s") + _("%(game)s game already created in room %(room)s") % {"game": self.name, "room": room_jid.userhost()} ) return False, is_referee elif not is_referee: log.info( - _(u"%(game)s game in room %(room)s can only be created by %(user)s") + _("%(game)s game in room %(room)s can only be created by %(user)s") % {"game": self.name, "room": room_jid.userhost(), "user": user_nick} ) return False, False @@ -607,12 +607,12 @@ @param profile_key (unicode): %(doc_profile_key)s """ log.debug( - _(u"Creating %(game)s game in room %(room)s") + _("Creating %(game)s game in room %(room)s") % {"game": self.name, "room": room_jid} ) profile = self.host.memory.getProfileName(profile_key) if not profile: - log.error(_(u"profile %s is unknown") % profile_key) + log.error(_("profile %s is unknown") % profile_key) return (create, sync) = self._checkCreateGameAndInit(room_jid, profile) if nicks is None: @@ -643,9 +643,9 @@ """ profile = self.host.memory.getProfileName(profile_key) if not profile: - log.error(_(u"profile %s is unknown") % profile_key) + log.error(_("profile %s is unknown") % profile_key) return - log.debug(u"new player ready: %s" % profile) + log.debug("new player ready: %s" % profile) # TODO: we probably need to add the game and room names in the sent message self.send(referee_jid, "player_ready", {"player": player_nick}, profile=profile) @@ -658,7 +658,7 @@ - msg_elts: dict to map each user to his specific initialization message @param profile """ - log.debug(_(u"new round for %s game") % self.name) + log.debug(_("new round for %s game") % self.name) game_data = self.games[room_jid] players = game_data["players"] players_data = game_data["players_data"] @@ -763,8 +763,8 @@ return RoomGameHandler(self) +@implementer(iwokkel.IDisco) class RoomGameHandler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_misc_smtp.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_smtp.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for managing smtp server @@ -33,7 +33,7 @@ from twisted.internet import reactor import sys -from zope.interface import implements +from zope.interface import implementer PLUGIN_INFO = { C.PI_NAME: "SMTP server Plugin", @@ -75,8 +75,8 @@ reactor.listenTCP(port, self.server_factory) +@implementer(smtp.IMessage) class SatSmtpMessage(object): - implements(smtp.IMessage) def __init__(self, host, profile): self.host = host @@ -103,7 +103,7 @@ except: exc_type, exc_value, exc_traceback = sys.exc_info() log.error( - _(u"Can't send message: %s") % exc_value + _("Can't send message: %s") % exc_value ) # The email is invalid or incorreclty parsed return defer.fail() self.message = None @@ -114,8 +114,8 @@ raise smtp.SMTPError +@implementer(smtp.IMessageDelivery) class SatSmtpDelivery(object): - implements(smtp.IMessageDelivery) def __init__(self, host, profile): self.host = host @@ -156,20 +156,21 @@ return origin +@implementer(portal.IRealm) class SmtpRealm(object): - implements(portal.IRealm) def __init__(self, host): self.host = host def requestAvatar(self, avatarID, mind, *interfaces): log.debug("requestAvatar") - profile = avatarID.decode("utf-8") + profile = avatarID if smtp.IMessageDelivery not in interfaces: raise NotImplementedError return smtp.IMessageDelivery, SatSmtpDelivery(self.host, profile), lambda: None +@implementer(checkers.ICredentialsChecker) class SatProfileCredentialChecker(object): """ This credential checker check against SàT's profile and associated jabber's password @@ -177,7 +178,6 @@ Return the profile as avatarId """ - implements(checkers.ICredentialsChecker) credentialInterfaces = ( credentials.IUsernamePassword, credentials.IUsernameHashedPassword, @@ -217,7 +217,7 @@ smtp.SMTPFactory.startedConnecting(self, connector) def clientConnectionLost(self, connector, reason): - log.debug(_(u"SMTP server connection lost (reason: %s)"), reason) + log.debug(_("SMTP server connection lost (reason: %s)"), reason) smtp.SMTPFactory.clientConnectionLost(self, connector, reason) def buildProtocol(self, addr):
--- a/sat/plugins/plugin_misc_static_blog.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_static_blog.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for static blogs @@ -47,7 +47,7 @@ class StaticBlog(object): - params = u""" + params = """ <params> <individual> <category name="{category_name}" label="{category_label}"> @@ -66,8 +66,8 @@ title_label=D_("Page title"), banner_name=C.STATIC_BLOG_PARAM_BANNER, banner_label=D_("Banner URL"), - background_name=u"Background", - background_label=D_(u"Background image URL"), + background_name="Background", + background_label=D_("Background image URL"), keywords_name=C.STATIC_BLOG_PARAM_KEYWORDS, keywords_label=D_("Keywords"), description_name=C.STATIC_BLOG_PARAM_DESCRIPTION,
--- a/sat/plugins/plugin_misc_tarot.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_tarot.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing French Tarot game @@ -87,7 +87,7 @@ in_sign="asss", out_sign="", method=self._prepareRoom, - async=True, + async_=True, ) # args: players, room_jid, profile host.bridge.addMethod( "tarotGameCreate", @@ -138,10 +138,10 @@ "tarotGameInvalidCards", ".plugin", signature="ssa(ss)a(ss)s" ) # args: room_jid, game phase, played_cards, invalid_cards, profile self.deck_ordered = [] - for value in ["excuse"] + map(str, range(1, 22)): + for value in ["excuse"] + list(map(str, list(range(1, 22)))): self.deck_ordered.append(TarotCard(("atout", value))) for suit in ["pique", "coeur", "carreau", "trefle"]: - for value in map(str, range(1, 11)) + ["valet", "cavalier", "dame", "roi"]: + for value in list(map(str, list(range(1, 11)))) + ["valet", "cavalier", "dame", "roi"]: self.deck_ordered.append(TarotCard((suit, value))) self.__choose_contrat_id = host.registerCallback( self._contratChoosed, with_data=True @@ -172,7 +172,7 @@ field = data_form.Field( "list-single", "contrat", - options=map(data_form.Option, self.contrats), + options=list(map(data_form.Option, self.contrats)), required=True, ) form.addField(field) @@ -271,7 +271,7 @@ players_data[pl_waiting]["levees"].append(card) log.debug( _( - u"Player %(excuse_owner)s give %(card_waited)s to %(player_waiting)s for Excuse compensation" + "Player %(excuse_owner)s give %(card_waited)s to %(player_waiting)s for Excuse compensation" ) % { "excuse_owner": player, @@ -311,7 +311,7 @@ players_data[winner]["levees"].append(low_card) log.debug( _( - u"Player %(excuse_owner)s give %(card_waited)s to %(player_waiting)s for Excuse compensation" + "Player %(excuse_owner)s give %(card_waited)s to %(player_waiting)s for Excuse compensation" ) % { "excuse_owner": excuse_player, @@ -325,7 +325,7 @@ players_data[excuse_player]["wait_for_low"] = winner log.debug( _( - u"%(excuse_owner)s keep the Excuse but has not card to give, %(winner)s is waiting for one" + "%(excuse_owner)s keep the Excuse but has not card to give, %(winner)s is waiting for one" ) % {"excuse_owner": excuse_player, "winner": winner} ) @@ -339,7 +339,7 @@ scores_str += "\n" for player in game_data["players"]: scores_str += _( - u"\n--\n%(player)s:\nscore for this game ==> %(score_game)i\ntotal score ==> %(total_score)i" + "\n--\n%(player)s:\nscore for this game ==> %(score_game)i\ntotal score ==> %(total_score)i" ) % { "player": player, "score_game": 0, @@ -420,7 +420,7 @@ loosers.append(player) scores_str = _( - u"The attacker (%(attaquant)s) makes %(points)i and needs to make %(point_limit)i (%(nb_bouts)s oulder%(plural)s%(separator)s%(bouts)s): (s)he %(victory)s" + "The attacker (%(attaquant)s) makes %(points)i and needs to make %(point_limit)i (%(nb_bouts)s oulder%(plural)s%(separator)s%(bouts)s): (s)he %(victory)s" ) % { "attaquant": game_data["attaquant"], "points": score, @@ -434,7 +434,7 @@ scores_str += "\n" for player in game_data["players"]: scores_str += _( - u"\n--\n%(player)s:\nscore for this game ==> %(score_game)i\ntotal score ==> %(total_score)i" + "\n--\n%(player)s:\nscore for this game ==> %(score_game)i\ntotal score ==> %(total_score)i" ) % { "player": player, "score_game": player_score[player], @@ -537,7 +537,7 @@ data = xml_tools.XMLUIResult2DataFormResult(raw_data) contrat = data["contrat"] log.debug( - _(u"contrat [%(contrat)s] choosed by %(profile)s") + _("contrat [%(contrat)s] choosed by %(profile)s") % {"contrat": contrat, "profile": profile} ) d = self.send( @@ -578,10 +578,10 @@ """ profile = self.host.memory.getProfileName(profile_key) if not profile: - log.error(_(u"profile %s is unknown") % profile_key) + log.error(_("profile %s is unknown") % profile_key) return log.debug( - _(u"Cards played by %(profile)s: [%(cards)s]") + _("Cards played by %(profile)s: [%(cards)s]") % {"profile": profile, "cards": cards} ) elem = self.__card_list_to_xml(TarotCard.from_tuples(cards), "cards_played") @@ -647,7 +647,7 @@ ): # new game created and/or players list updated players = [] for player in elt.elements(): - players.append(unicode(player)) + players.append(str(player)) signal = ( self.host.bridge.tarotGameStarted if elt.name == "started" @@ -661,11 +661,11 @@ nb_players = len(self.games[room_jid]["players"]) status[player] = "ready" log.debug( - _(u"Player %(player)s is ready to start [status: %(status)s]") + _("Player %(player)s is ready to start [status: %(status)s]") % {"player": player, "status": status} ) if ( - status.values().count("ready") == nb_players + list(status.values()).count("ready") == nb_players ): # everybody is ready, we can start the game self.newRound(room_jid, profile) @@ -689,7 +689,7 @@ # TODO: check we receive the contrat from the right person # TODO: use proper XEP-0004 way for answering form player = elt["player"] - players_data[player]["contrat"] = unicode(elt) + players_data[player]["contrat"] = str(elt) contrats = [players_data[p]["contrat"] for p in game_data["players"]] if contrats.count(None): # not everybody has choosed his contrat, it's next one turn @@ -720,7 +720,7 @@ game_data["status"][player] = "init" return log.debug( - _(u"%(player)s win the bid with %(contrat)s") + _("%(player)s win the bid with %(contrat)s") % {"player": best_contrat[0], "contrat": best_contrat[1]} ) game_data["contrat"] = best_contrat[1] @@ -825,7 +825,7 @@ if all(played): # everybody has played winner = self.__winner(game_data) - log.debug(_(u"The winner of this trick is %s") % winner) + log.debug(_("The winner of this trick is %s") % winner) # the winner win the trick self.__excuse_hack(game_data, played, winner) players_data[elt["player"]]["levees"].extend(played) @@ -861,13 +861,13 @@ self.host.bridge.tarotGameYourTurn(room_jid.userhost(), profile) elif elt.name == "score": - form_elt = elt.elements(name="x", uri="jabber:x:data").next() + form_elt = next(elt.elements(name="x", uri="jabber:x:data")) winners = [] loosers = [] for winner in elt.elements(name="winner", uri=NS_CG): - winners.append(unicode(winner)) + winners.append(str(winner)) for looser in elt.elements(name="looser", uri=NS_CG): - loosers.append(unicode(looser)) + loosers.append(str(looser)) form = data_form.Form.fromElement(form_elt) session_id, session_data = self._sessions.newSession(profile=profile) session_data["room_jid"] = room_jid @@ -880,10 +880,10 @@ elif elt.name == "error": if elt["type"] == "invalid_cards": played_cards = self.__xml_to_list( - elt.elements(name="played", uri=NS_CG).next() + next(elt.elements(name="played", uri=NS_CG)) ) invalid_cards = self.__xml_to_list( - elt.elements(name="invalid", uri=NS_CG).next() + next(elt.elements(name="invalid", uri=NS_CG)) ) self.host.bridge.tarotGameInvalidCards( room_jid.userhost(), @@ -893,9 +893,9 @@ profile, ) else: - log.error(_(u"Unmanaged error type: %s") % elt["type"]) + log.error(_("Unmanaged error type: %s") % elt["type"]) else: - log.error(_(u"Unmanaged card game element: %s") % elt.name) + log.error(_("Unmanaged card game element: %s") % elt.name) def getSyncDataForPlayer(self, room_jid, nick): return []
--- a/sat/plugins/plugin_misc_text_commands.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_text_commands.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for managing text commands @@ -57,8 +57,8 @@ # should be downloadable independently) HELP_SUGGESTION = _( - u"Type '/help' to get a list of the available commands. If you didn't want to " - u"use a command, please start your message with '//' to escape the slash." + "Type '/help' to get a list of the available commands. If you didn't want to " + "use a command, please start your message with '//' to escape the slash." ) def __init__(self, host): @@ -87,7 +87,7 @@ data = OrderedDict([("doc_short_help", ""), ("type", "all"), ("args", "")]) docstring = cmd.__doc__ if docstring is None: - log.warning(u"No docstring found for command {}".format(cmd_name)) + log.warning("No docstring found for command {}".format(cmd_name)) docstring = "" doc_data = docstring.split("\n") @@ -137,7 +137,7 @@ data["args"] = stripped[colon_idx + 1 :].strip() except InvalidCommandSyntax as e: log.warning( - u"Invalid command syntax for command {command}: {message}".format( + "Invalid command syntax for command {command}: {message}".format( command=cmd_name, message=e.message ) ) @@ -153,7 +153,7 @@ if attr.startswith("cmd_"): cmd = getattr(instance, attr) if not callable(cmd): - log.warning(_(u"Skipping not callable [%s] attribute") % attr) + log.warning(_("Skipping not callable [%s] attribute") % attr) continue cmd_name = attr[4:] if not cmd_name: @@ -165,7 +165,7 @@ new_name = cmd_name + str(suff) log.warning( _( - u"Conflict for command [{old_name}], renaming it to [{new_name}]" + "Conflict for command [{old_name}], renaming it to [{new_name}]" ).format(old_name=cmd_name, new_name=new_name) ) cmd_name = new_name @@ -216,9 +216,9 @@ except KeyError: try: # we have not default message, we try to take the first found - msg_lang, msg = mess_data["message"].iteritems().next() + msg_lang, msg = next(iter(mess_data["message"].items())) except StopIteration: - log.debug(u"No message found, skipping text commands") + log.debug("No message found, skipping text commands") return mess_data try: @@ -244,15 +244,15 @@ if ret: return mess_data else: - log.debug(u"text command detected ({})".format(command)) + log.debug("text command detected ({})".format(command)) raise failure.Failure(exceptions.CancelError()) def genericErrback(failure): try: - msg = u"with condition {}".format(failure.value.condition) + msg = "with condition {}".format(failure.value.condition) except AttributeError: - msg = u"with error {}".format(failure.value) - self.feedBack(client, u"Command failed {}".format(msg), mess_data) + msg = "with error {}".format(failure.value) + self.feedBack(client, "Command failed {}".format(msg), mess_data) return False mess_data["unparsed"] = msg[ @@ -280,7 +280,7 @@ command=command, context=context_txt ) self.feedBack( - client, u"{} {}".format(feedback, self.HELP_SUGGESTION), mess_data + client, "{} {}".format(feedback, self.HELP_SUGGESTION), mess_data ) log.debug("text command invalid message") raise failure.Failure(exceptions.CancelError()) @@ -318,7 +318,7 @@ if arg[-1] != "@": return jid.JID(arg) return jid.JID(arg + service_jid) - return jid.JID(u"%s@%s" % (arg, service_jid)) + return jid.JID("%s@%s" % (arg, service_jid)) def feedBack(self, client, message, mess_data, info_type=FEEDBACK_INFO_TYPE): """Give a message back to the user""" @@ -350,7 +350,7 @@ room = mess_data["to"].userhostJID() try: if self.host.plugins["XEP-0045"].isNickInRoom(client, room, entity): - entity = u"%s/%s" % (room, entity) + entity = "%s/%s" % (room, entity) except KeyError: log.warning("plugin XEP-0045 is not present") @@ -368,7 +368,7 @@ if not target_jid.resource: target_jid.resource = self.host.memory.getMainResource(client, target_jid) - whois_msg = [_(u"whois for %(jid)s") % {"jid": target_jid}] + whois_msg = [_("whois for %(jid)s") % {"jid": target_jid}] d = defer.succeed(None) for ignore, callback in self._whois: @@ -377,7 +377,7 @@ ) def feedBack(ignore): - self.feedBack(client, u"\n".join(whois_msg), mess_data) + self.feedBack(client, "\n".join(whois_msg), mess_data) return False d.addCallback(feedBack) @@ -390,11 +390,11 @@ @return (list[unicode]): help strings """ strings = [] - for doc_name, doc_help in cmd_data.iteritems(): + for doc_name, doc_help in cmd_data.items(): if doc_name.startswith("doc_arg_"): arg_name = doc_name[8:] strings.append( - u"- {name}: {doc_help}".format(name=arg_name, doc_help=_(doc_help)) + "- {name}: {doc_help}".format(name=arg_name, doc_help=_(doc_help)) ) return strings @@ -424,7 +424,7 @@ cmd_name = cmd_name[1:] if cmd_name and cmd_name not in self._commands: self.feedBack( - client, _(u"Invalid command name [{}]\n".format(cmd_name)), mess_data + client, _("Invalid command name [{}]\n".format(cmd_name)), mess_data ) cmd_name = "" if not cmd_name: @@ -445,17 +445,17 @@ ) ) - help_mess = _(u"Text commands available:\n%s") % (u"\n".join(help_cmds),) + help_mess = _("Text commands available:\n%s") % ("\n".join(help_cmds),) else: # we show detailled help for a command cmd_data = self._commands[cmd_name] syntax = cmd_data["args"] - help_mess = _(u"/{name}: {short_help}\n{syntax}{args_help}").format( + help_mess = _("/{name}: {short_help}\n{syntax}{args_help}").format( name=cmd_name, short_help=cmd_data["doc_short_help"], syntax=_(" " * 4 + "syntax: {}\n").format(syntax) if syntax else "", - args_help=u"\n".join( - [u" " * 8 + "{}".format(line) for line in self._getArgsHelp(cmd_data)] + args_help="\n".join( + [" " * 8 + "{}".format(line) for line in self._getArgsHelp(cmd_data)] ), )
--- a/sat/plugins/plugin_misc_text_syntaxes.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_text_syntaxes.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing various text syntaxes @@ -35,7 +35,7 @@ from lxml import etree except ImportError: raise exceptions.MissingModule( - u"Missing module lxml, please download/install it from http://lxml.de/" + "Missing module lxml, please download/install it from http://lxml.de/" ) log = getLogger(__name__) @@ -218,38 +218,38 @@ partial(markdown.markdown, extensions=[ EscapeHTML(), - u'nl2br', - u'codehilite', - u'fenced_code', - u'sane_lists', - u'tables', + 'nl2br', + 'codehilite', + 'fenced_code', + 'sane_lists', + 'tables', ], extension_configs = { - u"codehilite": { - u"css_class": "highlight", + "codehilite": { + "css_class": "highlight", } }), _html2text, [TextSyntaxes.OPT_DEFAULT], ) except ImportError: - log.warning(u"markdown or html2text not found, can't use Markdown syntax") + log.warning("markdown or html2text not found, can't use Markdown syntax") log.info( - u"You can download/install them from https://pythonhosted.org/Markdown/ and https://github.com/Alir3z4/html2text/" + "You can download/install them from https://pythonhosted.org/Markdown/ and https://github.com/Alir3z4/html2text/" ) host.bridge.addMethod( "syntaxConvert", ".plugin", in_sign="sssbs", out_sign="s", - async=True, + async_=True, method=self.convert, ) host.bridge.addMethod( "syntaxGet", ".plugin", in_sign="s", out_sign="s", method=self.getSyntax ) if xml_tools.cleanXHTML is None: - log.debug(u"Installing cleaning method") + log.debug("Installing cleaning method") xml_tools.cleanXHTML = self.cleanXHTML def _updateParamOptions(self): @@ -257,7 +257,7 @@ default_synt = TextSyntaxes.default_syntax syntaxes = [] - for syntax in data_synt.keys(): + for syntax in list(data_synt.keys()): flags = data_synt[syntax]["flags"] if TextSyntaxes.OPT_HIDDEN not in flags: syntaxes.append(syntax) @@ -267,9 +267,9 @@ for syntax in syntaxes: selected = 'selected="true"' if syntax == default_synt else "" - options.append(u'<option value="%s" %s/>' % (syntax, selected)) + options.append('<option value="%s" %s/>' % (syntax, selected)) - TextSyntaxes.params_data["options"] = u"\n".join(options) + TextSyntaxes.params_data["options"] = "\n".join(options) self.host.memory.updateParams(TextSyntaxes.params % TextSyntaxes.params_data) def getCurrentSyntax(self, profile): @@ -280,9 +280,9 @@ """ return self.host.memory.getParamA(NAME, CATEGORY, profile_key=profile) - def _logError(self, failure, action=u"converting syntax"): + def _logError(self, failure, action="converting syntax"): log.error( - u"Error while {action}: {failure}".format(action=action, failure=failure) + "Error while {action}: {failure}".format(action=action, failure=failure) ) return failure @@ -320,13 +320,13 @@ @return (unicode): cleaned XHTML """ - if isinstance(xhtml, basestring): + if isinstance(xhtml, str): try: xhtml_elt = html.fromstring(xhtml) except etree.ParserError as e: if not xhtml.strip(): - return u"" - log.error(u"Can't clean XHTML: {xhtml}".format(xhtml=xhtml)) + return "" + log.error("Can't clean XHTML: {xhtml}".format(xhtml=xhtml)) raise e elif isinstance(xhtml, html.HtmlElement): xhtml_elt = xhtml @@ -345,8 +345,8 @@ if element.tag in VOID_ELEMENTS: element.text = None else: - element.text = u'' - return html.tostring(xhtml_elt, encoding=unicode, method="xml") + element.text = '' + return html.tostring(xhtml_elt, encoding=str, method="xml") def convert( self, text, syntax_from, syntax_to=_SYNTAX_XHTML, safe=True, profile=None @@ -413,7 +413,7 @@ flags = flags if flags is not None else [] if TextSyntaxes.OPT_HIDDEN in flags and TextSyntaxes.OPT_DEFAULT in flags: raise ValueError( - u"{} and {} are mutually exclusive".format( + "{} and {} are mutually exclusive".format( TextSyntaxes.OPT_HIDDEN, TextSyntaxes.OPT_DEFAULT ) ) @@ -422,7 +422,7 @@ key = name.lower().strip() if key in syntaxes: raise exceptions.ConflictError( - u"This syntax key already exists: {}".format(key) + "This syntax key already exists: {}".format(key) ) syntaxes[key] = { "name": name, @@ -453,4 +453,4 @@ """ cleaner = clean.Cleaner(kill_tags=["style"]) cleaned = cleaner.clean_html(html.fromstring(xhtml)) - return html.tostring(cleaned, encoding=unicode, method="text") + return html.tostring(cleaned, encoding=str, method="text")
--- a/sat/plugins/plugin_misc_tickets.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_tickets.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Pubsub Schemas @@ -21,7 +21,6 @@ from sat.core.constants import Const as C from twisted.internet import defer from sat.tools.common import uri -from sat.tools import utils import shortuuid from sat.core.log import getLogger @@ -30,41 +29,47 @@ NS_TICKETS = "org.salut-a-toi.tickets:0" PLUGIN_INFO = { - C.PI_NAME: _(u"Tickets management"), - C.PI_IMPORT_NAME: u"TICKETS", - C.PI_TYPE: u"EXP", + C.PI_NAME: _("Tickets management"), + C.PI_IMPORT_NAME: "TICKETS", + C.PI_TYPE: "EXP", C.PI_PROTOCOLS: [], - C.PI_DEPENDENCIES: [u"XEP-0060", u"PUBSUB_SCHEMA", u"XEP-0277", u"IDENTITY"], - C.PI_MAIN: u"Tickets", - C.PI_HANDLER: u"no", - C.PI_DESCRIPTION: _(u"""Tickets management plugin"""), + C.PI_DEPENDENCIES: ["XEP-0060", "PUBSUB_SCHEMA", "XEP-0277", "IDENTITY"], + C.PI_MAIN: "Tickets", + C.PI_HANDLER: "no", + C.PI_DESCRIPTION: _("""Tickets management plugin"""), } class Tickets(object): def __init__(self, host): - log.info(_(u"Tickets plugin initialization")) + log.info(_("Tickets plugin initialization")) self.host = host - host.registerNamespace(u"tickets", NS_TICKETS) - self._p = self.host.plugins[u"XEP-0060"] - self._s = self.host.plugins[u"PUBSUB_SCHEMA"] - self._m = self.host.plugins[u"XEP-0277"] + host.registerNamespace("tickets", NS_TICKETS) + self._p = self.host.plugins["XEP-0060"] + self._s = self.host.plugins["PUBSUB_SCHEMA"] + self._m = self.host.plugins["XEP-0277"] host.bridge.addMethod( - u"ticketsGet", - u".plugin", - in_sign=u"ssiassa{ss}s", - out_sign=u"(asa{ss})", - method=utils.partial( - self._s._get, + "ticketsGet", + ".plugin", + in_sign="ssiassa{ss}s", + out_sign="(asa{ss})", + method=lambda service, node, max_items, items_ids, sub_id, extra, profile_key: + self._s._get( + service, + node, + max_items, + items_ids, + sub_id, + extra, default_node=NS_TICKETS, form_ns=NS_TICKETS, filters={ - u"author": self._s.valueOrPublisherFilter, - u"created": self._s.dateFilter, - u"updated": self._s.dateFilter, + "author": self._s.valueOrPublisherFilter, + "created": self._s.dateFilter, + "updated": self._s.dateFilter, }, - ), - async=True, + profile_key=profile_key), + async_=True, ) host.bridge.addMethod( "ticketSet", @@ -72,18 +77,20 @@ in_sign="ssa{sas}ssss", out_sign="s", method=self._set, - async=True, + async_=True, ) host.bridge.addMethod( "ticketsSchemaGet", ".plugin", in_sign="sss", out_sign="s", - method=utils.partial(self._s._getUISchema, default_node=NS_TICKETS), - async=True, + method=lambda service, nodeIdentifier, profile_key: self._s._getUISchema( + service, nodeIdentifier, default_node=NS_TICKETS, + profile_key=profile_key), + async_=True, ) - def _set(self, service, node, values, schema=None, item_id=None, extra=u'', + def _set(self, service, node, values, schema=None, item_id=None, extra='', profile_key=C.PROF_KEY_NONE): client, service, node, schema, item_id, extra = self._s.prepareBridgeSet( service, node, schema, item_id, extra, profile_key @@ -91,7 +98,7 @@ d = self.set( client, service, node, values, schema, item_id, extra, deserialise=True ) - d.addCallback(lambda ret: ret or u"") + d.addCallback(lambda ret: ret or "") return d @defer.inlineCallbacks @@ -123,7 +130,7 @@ # advance (we don't want to set it ourselves to let the server choose, so we # can have a nicer id if serial ids is activated) comments_node = self._m.getCommentsNode( - node + u"_" + unicode(shortuuid.uuid()) + node + "_" + str(shortuuid.uuid()) ) options = { self._p.OPT_ACCESS_MODEL: self._p.ACCESS_OPEN, @@ -134,8 +141,8 @@ self._p.OPT_PUBLISH_MODEL: self._p.ACCESS_OPEN, } yield self._p.createNode(client, comments_service, comments_node, options) - values[u"comments_uri"] = uri.buildXMPPUri( - u"pubsub", + values["comments_uri"] = uri.buildXMPPUri( + "pubsub", subtype="microblog", path=comments_service.full(), node=comments_node,
--- a/sat/plugins/plugin_misc_upload.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_upload.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for file tansfer @@ -41,8 +41,8 @@ } -UPLOADING = D_(u"Please select a file to upload") -UPLOADING_TITLE = D_(u"File upload") +UPLOADING = D_("Please select a file to upload") +UPLOADING_TITLE = D_("File upload") BOOL_OPTIONS = ("ignore_tls_errors",) @@ -58,7 +58,7 @@ in_sign="sssa{ss}s", out_sign="a{ss}", method=self._fileUpload, - async=True, + async_=True, ) self._upload_callbacks = [] @@ -97,12 +97,12 @@ and fail.value.condition == 'not-acceptable'): reason = fail.value.text else: - reason = unicode(fail.value) - msg = D_(u"Can't upload file: {reason}").format(reason=reason) + reason = str(fail.value) + msg = D_("Can't upload file: {reason}").format(reason=reason) log.warning(msg) return { "xmlui": xml_tools.note( - msg, D_(u"Can't upload file"), C.XMLUI_DATA_LVL_WARNING + msg, D_("Can't upload file"), C.XMLUI_DATA_LVL_WARNING ).toXml() } @@ -130,7 +130,7 @@ if options is None: options = {} if not os.path.isfile(filepath): - raise exceptions.DataError(u"The given path doesn't link to a file") + raise exceptions.DataError("The given path doesn't link to a file") for method_name, available_cb, upload_cb, priority in self._upload_callbacks: try: upload_jid = yield available_cb(upload_jid, client.profile) @@ -138,7 +138,7 @@ continue # no entity managing this extension found log.info( - u"{name} method will be used to upload the file".format(name=method_name) + "{name} method will be used to upload the file".format(name=method_name) ) progress_id_d, download_d = yield upload_cb( filepath, filename, upload_jid, options, client.profile @@ -146,7 +146,7 @@ progress_id = yield progress_id_d defer.returnValue((progress_id, download_d)) - raise exceptions.NotFound(u"Can't find any method to upload a file") + raise exceptions.NotFound("Can't find any method to upload a file") def register(self, method_name, available_cb, upload_cb, priority=0): """Register a fileUploading method @@ -167,7 +167,7 @@ for data in self._upload_callbacks: if method_name == data[0]: raise exceptions.ConflictError( - u"A method with this name is already registered" + "A method with this name is already registered" ) self._upload_callbacks.append((method_name, available_cb, upload_cb, priority)) self._upload_callbacks.sort(key=lambda data: data[3], reverse=True) @@ -177,4 +177,4 @@ if data[0] == method_name: del [idx] return - raise exceptions.NotFound(u"The name to unregister doesn't exist") + raise exceptions.NotFound("The name to unregister doesn't exist")
--- a/sat/plugins/plugin_misc_uri_finder.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_uri_finder.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to find URIs @@ -36,7 +36,7 @@ C.PI_DEPENDENCIES: [], C.PI_MAIN: "URIFinder", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: textwrap.dedent(_(u"""\ + C.PI_DESCRIPTION: textwrap.dedent(_("""\ Plugin to find URIs in well know location. This allows to retrieve settings to work with a project (e.g. pubsub node used for merge-requests). """)) @@ -49,12 +49,12 @@ class URIFinder(object): def __init__(self, host): - log.info(_(u"URI finder plugin initialization")) + log.info(_("URI finder plugin initialization")) self.host = host host.bridge.addMethod("URIFind", ".plugin", in_sign='sas', out_sign='a{sa{ss}}', method=self.find, - async=True) + async_=True) def find(self, path, keys): """Look for URI in well known locations @@ -64,31 +64,31 @@ e.g.: "tickets", "merge-requests" @return (dict[unicode, unicode]): map from key to found uri """ - keys_re = u'|'.join(keys) + keys_re = '|'.join(keys) label_re = r'"(?P<label>[^"]+)"' - uri_re = re.compile(ur'(?P<key>{keys_re})[ :]? +(?P<uri>xmpp:\S+)(?:.*use {label_re} label)?'.format( + uri_re = re.compile(r'(?P<key>{keys_re})[ :]? +(?P<uri>xmpp:\S+)(?:.*use {label_re} label)?'.format( keys_re=keys_re, label_re = label_re)) path = os.path.normpath(path) if not os.path.isdir(path) or not os.path.isabs(path): - raise ValueError(u'path must be an absolute path to a directory') + raise ValueError('path must be an absolute path to a directory') found_uris = {} - while path != u'/': + while path != '/': for filename in os.listdir(path): name, __ = os.path.splitext(filename) if name.lower() in SEARCH_FILES: file_path = os.path.join(path, filename) with open(file_path) as f: for m in uri_re.finditer(f.read()): - key = m.group(u'key') - uri = m.group(u'uri') - label = m.group(u'label') + key = m.group('key') + uri = m.group('uri') + label = m.group('label') if key in found_uris: - log.warning(_(u"Ignoring already found uri for key \"{key}\"").format(key=key)) + log.warning(_("Ignoring already found uri for key \"{key}\"").format(key=key)) else: - uri_data = found_uris[key] = {u'uri': uri} + uri_data = found_uris[key] = {'uri': uri} if label is not None: - uri_data[u'labels'] = json.dumps([label]) + uri_data['labels'] = json.dumps([label]) if found_uris: break path = os.path.dirname(path)
--- a/sat/plugins/plugin_misc_watched.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_watched.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin to be notified on some entities presence
--- a/sat/plugins/plugin_misc_welcome.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_welcome.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for file tansfer @@ -39,12 +39,12 @@ WELCOME_PARAM_CATEGORY = "General" WELCOME_PARAM_NAME = "welcome" -WELCOME_PARAM_LABEL = D_(u"Display welcome message") -WELCOME_MSG_TITLE = D_(u"Welcome to Libervia/Salut à Toi") +WELCOME_PARAM_LABEL = D_("Display welcome message") +WELCOME_MSG_TITLE = D_("Welcome to Libervia/Salut à Toi") # XXX: this message is mainly targetting libervia new users for now # (i.e.: it may look weird on other frontends) WELCOME_MSG = D_( - u"""Welcome to a free (as in freedom) network! + """Welcome to a free (as in freedom) network! If you have any trouble, or you want to help us for the bug hunting, you can contact us in real time chat by using the “Help / Official chat room” menu.
--- a/sat/plugins/plugin_misc_xmllog.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_xmllog.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for managing raw XML log @@ -33,7 +33,7 @@ C.PI_DEPENDENCIES: [], C.PI_MAIN: "XmlLog", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _(u"""Send raw XML logs to bridge"""), + C.PI_DESCRIPTION: _("""Send raw XML logs to bridge"""), } @@ -66,17 +66,17 @@ if self.do_log: receive_hooks.append(partial(self.onReceive, client=client)) send_hooks.append(partial(self.onSend, client=client)) - log.info(_(u"XML log activated")) + log.info(_("XML log activated")) return True def onReceive(self, element, client): self.host.bridge.xmlLog("IN", element.toXml(), client.profile) def onSend(self, obj, client): - if isinstance(obj, basestring): - log = unicode(obj) + if isinstance(obj, str): + log = str(obj) elif isinstance(obj, domish.Element): log = obj.toXml() else: - log.error(_(u"INTERNAL ERROR: Unmanaged XML type")) + log.error(_("INTERNAL ERROR: Unmanaged XML type")) self.host.bridge.xmlLog("OUT", log, client.profile)
--- a/sat/plugins/plugin_sec_otr.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_sec_otr.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for OTR encryption @@ -21,49 +21,50 @@ # (https://blog.darmasoft.net/2013/06/30/using-pure-python-otr.html) # this implentation is based on it +import copy +import time +import uuid +from binascii import hexlify, unhexlify from sat.core.i18n import _, D_ from sat.core.constants import Const as C from sat.core.log import getLogger from sat.core import exceptions - -log = getLogger(__name__) from sat.tools import xml_tools from twisted.words.protocols.jabber import jid from twisted.python import failure from twisted.internet import defer from sat.memory import persistent import potr -import copy -import time -import uuid + +log = getLogger(__name__) PLUGIN_INFO = { - C.PI_NAME: u"OTR", - C.PI_IMPORT_NAME: u"OTR", - C.PI_TYPE: u"SEC", - C.PI_PROTOCOLS: [u"XEP-0364"], - C.PI_DEPENDENCIES: [u"XEP-0280", u"XEP-0334"], - C.PI_MAIN: u"OTR", - C.PI_HANDLER: u"no", - C.PI_DESCRIPTION: _(u"""Implementation of OTR"""), + C.PI_NAME: "OTR", + C.PI_IMPORT_NAME: "OTR", + C.PI_TYPE: "SEC", + C.PI_PROTOCOLS: ["XEP-0364"], + C.PI_DEPENDENCIES: ["XEP-0280", "XEP-0334"], + C.PI_MAIN: "OTR", + C.PI_HANDLER: "no", + C.PI_DESCRIPTION: _("""Implementation of OTR"""), } NS_OTR = "urn:xmpp:otr:0" PRIVATE_KEY = "PRIVATE KEY" -OTR_MENU = D_(u"OTR") +OTR_MENU = D_("OTR") AUTH_TXT = D_( - u"To authenticate your correspondent, you need to give your below fingerprint " - u"*BY AN EXTERNAL CANAL* (i.e. not in this chat), and check that the one he gives " - u"you is the same as below. If there is a mismatch, there can be a spy between you!" + "To authenticate your correspondent, you need to give your below fingerprint " + "*BY AN EXTERNAL CANAL* (i.e. not in this chat), and check that the one he gives " + "you is the same as below. If there is a mismatch, there can be a spy between you!" ) DROP_TXT = D_( - u"You private key is used to encrypt messages for your correspondent, nobody except " - u"you must know it, if you are in doubt, you should drop it!\n\nAre you sure you " - u"want to drop your private key?" + "You private key is used to encrypt messages for your correspondent, nobody except " + "you must know it, if you are in doubt, you should drop it!\n\nAre you sure you " + "want to drop your private key?" ) # NO_LOG_AND = D_(u"/!\\Your history is not logged anymore, and") # FIXME: not used at the moment -NO_ADV_FEATURES = D_(u"Some of advanced features are disabled !") +NO_ADV_FEATURES = D_("Some of advanced features are disabled !") DEFAULT_POLICY_FLAGS = {"ALLOW_V1": False, "ALLOW_V2": True, "REQUIRE_ENCRYPTION": True} @@ -107,14 +108,14 @@ message data when an encrypted message is going to be sent """ assert isinstance(self.peer, jid.JID) - msg = msg_str.decode("utf-8") + msg = msg_str client = self.user.client - log.debug(u"injecting encrypted message to {to}".format(to=self.peer)) + log.debug("injecting encrypted message to {to}".format(to=self.peer)) if appdata is None: mess_data = { "from": client.jid, "to": self.peer, - "uid": unicode(uuid.uuid4()), + "uid": str(uuid.uuid4()), "message": {"": msg}, "subject": {}, "type": "chat", @@ -122,15 +123,15 @@ "timestamp": time.time(), } client.generateMessageXML(mess_data) - xml = mess_data[u'xml'] + xml = mess_data['xml'] self._p_carbons.setPrivate(xml) self._p_hints.addHintElements(xml, [ self._p_hints.HINT_NO_COPY, self._p_hints.HINT_NO_PERMANENT_STORE]) client.send(mess_data["xml"]) else: - message_elt = appdata[u"xml"] - assert message_elt.name == u"message" + message_elt = appdata["xml"] + assert message_elt.name == "message" message_elt.addElement("body", content=msg) def stopCb(self, __, feedback): @@ -143,7 +144,7 @@ def stopEb(self, failure_): # encryption may be already stopped in case of manual stop if not failure_.check(exceptions.NotFound): - log.error(u"Error while stopping OTR encryption: {msg}".format(msg=failure_)) + log.error("Error while stopping OTR encryption: {msg}".format(msg=failure_)) def isTrusted(self): # we have to check value because potr code says that a 2-tuples should be @@ -151,10 +152,10 @@ trusted = self.getCurrentTrust() if trusted is None: return False - elif trusted == u'trusted': + elif trusted == 'trusted': return True else: - log.error(u"Unexpected getCurrentTrust() value: {value}".format( + log.error("Unexpected getCurrentTrust() value: {value}".format( value=trusted)) return False @@ -162,10 +163,10 @@ client = self.user.client old_state = self.state super(Context, self).setState(state) - log.debug(u"setState: %s (old_state=%s)" % (state, old_state)) + log.debug("setState: %s (old_state=%s)" % (state, old_state)) if state == potr.context.STATE_PLAINTEXT: - feedback = _(u"/!\\ conversation with %(other_jid)s is now UNENCRYPTED") % { + feedback = _("/!\\ conversation with %(other_jid)s is now UNENCRYPTED") % { "other_jid": self.peer.full() } d = client.encryption.stop(self.peer, NS_OTR) @@ -178,16 +179,16 @@ trusted = self.isTrusted() except TypeError: trusted = False - trusted_str = _(u"trusted") if trusted else _(u"untrusted") + trusted_str = _("trusted") if trusted else _("untrusted") if old_state == potr.context.STATE_ENCRYPTED: feedback = D_( - u"{trusted} OTR conversation with {other_jid} REFRESHED" + "{trusted} OTR conversation with {other_jid} REFRESHED" ).format(trusted=trusted_str, other_jid=self.peer.full()) else: feedback = D_( - u"{trusted} encrypted OTR conversation started with {other_jid}\n" - u"{extra_info}" + "{trusted} encrypted OTR conversation started with {other_jid}\n" + "{extra_info}" ).format( trusted=trusted_str, other_jid=self.peer.full(), @@ -197,7 +198,7 @@ OTR_STATE_ENCRYPTED, self.peer.full(), client.profile ) elif state == potr.context.STATE_FINISHED: - feedback = D_(u"OTR conversation with {other_jid} is FINISHED").format( + feedback = D_("OTR conversation with {other_jid} is FINISHED").format( other_jid=self.peer.full() ) d = client.encryption.stop(self.peer, NS_OTR) @@ -205,7 +206,7 @@ d.addErrback(self.stopEb) return else: - log.error(D_(u"Unknown OTR state")) + log.error(D_("Unknown OTR state")) return client.feedback(self.peer, feedback) @@ -231,22 +232,22 @@ # TODO: manage explicit message encryption def __init__(self, host, client): - log.debug(u"new account: %s" % client.jid) + log.debug("new account: %s" % client.jid) if not client.jid.resource: log.warning("Account created without resource") - super(Account, self).__init__(unicode(client.jid), "xmpp", 1024) + super(Account, self).__init__(str(client.jid), "xmpp", 1024) self.host = host self.client = client def loadPrivkey(self): - log.debug(u"loadPrivkey") + log.debug("loadPrivkey") return self.privkey def savePrivkey(self): - log.debug(u"savePrivkey") + log.debug("savePrivkey") if self.privkey is None: - raise exceptions.InternalError(_(u"Save is called but privkey is None !")) - priv_key = self.privkey.serializePrivateKey().encode("hex") + raise exceptions.InternalError(_("Save is called but privkey is None !")) + priv_key = hexlify(self.privkey.serializePrivateKey()) d = self.host.memory.encryptValue(priv_key, self.client.profile) def save_encrypted_key(encrypted_priv_key): @@ -256,18 +257,18 @@ def loadTrusts(self): trust_data = self.client._otr_data.get("trust", {}) - for jid_, jid_data in trust_data.iteritems(): - for fingerprint, trust_level in jid_data.iteritems(): + for jid_, jid_data in trust_data.items(): + for fingerprint, trust_level in jid_data.items(): log.debug( - u'setting trust for {jid}: [{fingerprint}] = "{trust_level}"'.format( + 'setting trust for {jid}: [{fingerprint}] = "{trust_level}"'.format( jid=jid_, fingerprint=fingerprint, trust_level=trust_level ) ) self.trusts.setdefault(jid.JID(jid_), {})[fingerprint] = trust_level def saveTrusts(self): - log.debug(u"saving trusts for {profile}".format(profile=self.client.profile)) - log.debug(u"trusts = {}".format(self.client._otr_data["trust"])) + log.debug("saving trusts for {profile}".format(profile=self.client.profile)) + log.debug("trusts = {}".format(self.client._otr_data["trust"])) self.client._otr_data.force("trust") def setTrust(self, other_jid, fingerprint, trustLevel): @@ -299,23 +300,23 @@ return context def getContextForUser(self, other): - log.debug(u"getContextForUser [%s]" % other) + log.debug("getContextForUser [%s]" % other) if not other.resource: - log.warning(u"getContextForUser called with a bare jid: %s" % other.full()) + log.warning("getContextForUser called with a bare jid: %s" % other.full()) return self.startContext(other) class OTR(object): def __init__(self, host): - log.info(_(u"OTR plugin initialization")) + log.info(_("OTR plugin initialization")) self.host = host self.context_managers = {} self.skipped_profiles = ( set() ) # FIXME: OTR should not be skipped per profile, this need to be refactored - self._p_hints = host.plugins[u"XEP-0334"] - self._p_carbons = host.plugins[u"XEP-0280"] + self._p_hints = host.plugins["XEP-0334"] + self._p_carbons = host.plugins["XEP-0280"] host.trigger.add("MessageReceived", self.MessageReceivedTrigger, priority=100000) host.trigger.add("sendMessage", self.sendMessageTrigger, priority=100000) host.trigger.add("sendMessageData", self._sendMessageDataTrigger) @@ -355,7 +356,7 @@ # type_=C.MENU_SINGLE, # ) host.trigger.add("presence_received", self._presenceReceivedTrigger) - self.host.registerEncryptionPlugin(self, u"OTR", NS_OTR, directed=True) + self.host.registerEncryptionPlugin(self, "OTR", NS_OTR, directed=True) def _skipOTR(self, profile): """Tell the backend to not handle OTR for this profile. @@ -380,7 +381,7 @@ encrypted_priv_key, client.profile ) ctxMng.account.privkey = potr.crypt.PK.parsePrivateKey( - priv_key.decode("hex") + unhexlify(priv_key.encode('utf-8')) )[0] else: ctxMng.account.privkey = None @@ -390,7 +391,7 @@ if client.profile in self.skipped_profiles: self.skipped_profiles.remove(client.profile) return - for context in client._otr_context_manager.contexts.values(): + for context in list(client._otr_context_manager.contexts.values()): context.disconnect() del client._otr_context_manager @@ -419,12 +420,12 @@ dialog_opt={ C.XMLUI_DATA_TYPE: C.XMLUI_DIALOG_MESSAGE, C.XMLUI_DATA_MESS: _( - u"You have no private key yet, start an OTR conversation to " - u"have one" + "You have no private key yet, start an OTR conversation to " + "have one" ), C.XMLUI_DATA_LVL: C.XMLUI_DATA_LVL_WARNING, }, - title=_(u"No private key"), + title=_("No private key"), ) return dialog @@ -437,12 +438,12 @@ dialog_opt={ C.XMLUI_DATA_TYPE: C.XMLUI_DIALOG_MESSAGE, C.XMLUI_DATA_MESS: _( - u"Your fingerprint is:\n{fingerprint}\n\n" - u"Start an OTR conversation to have your correspondent one." + "Your fingerprint is:\n{fingerprint}\n\n" + "Start an OTR conversation to have your correspondent one." ).format(fingerprint=priv_key), C.XMLUI_DATA_LVL: C.XMLUI_DATA_LVL_INFO, }, - title=_(u"Fingerprint"), + title=_("Fingerprint"), ) return dialog @@ -453,13 +454,13 @@ data = xml_tools.XMLUIResult2DataFormResult(raw_data) if data["match"] == "yes": otrctx.setCurrentTrust(OTR_STATE_TRUSTED) - note_msg = _(u"Your correspondent {correspondent} is now TRUSTED") + note_msg = _("Your correspondent {correspondent} is now TRUSTED") self.host.bridge.otrState( OTR_STATE_TRUSTED, entity_jid.full(), client.profile ) else: otrctx.setCurrentTrust("") - note_msg = _(u"Your correspondent {correspondent} is now UNTRUSTED") + note_msg = _("Your correspondent {correspondent} is now UNTRUSTED") self.host.bridge.otrState( OTR_STATE_UNTRUSTED, entity_jid.full(), client.profile ) @@ -477,22 +478,22 @@ xmlui = xml_tools.XMLUI( C.XMLUI_FORM, - title=_(u"Authentication ({entity_jid})").format(entity_jid=entity_jid.full()), + title=_("Authentication ({entity_jid})").format(entity_jid=entity_jid.full()), submit_id=submit_id, ) xmlui.addText(_(AUTH_TXT)) xmlui.addDivider() xmlui.addText( - D_(u"Your own fingerprint is:\n{fingerprint}").format(fingerprint=priv_key) + D_("Your own fingerprint is:\n{fingerprint}").format(fingerprint=priv_key) ) xmlui.addText( - D_(u"Your correspondent fingerprint should be:\n{fingerprint}").format( + D_("Your correspondent fingerprint should be:\n{fingerprint}").format( fingerprint=other_fingerprint ) ) xmlui.addDivider("blank") xmlui.changeContainer("pairs") - xmlui.addLabel(D_(u"Is your correspondent fingerprint the same as here ?")) + xmlui.addLabel(D_("Is your correspondent fingerprint the same as here ?")) xmlui.addList( "match", [("yes", _("yes")), ("no", _("no"))], ["yes" if trusted else "no"] ) @@ -508,7 +509,7 @@ try: to_jid = jid.JID(menu_data["jid"]) except KeyError: - log.error(_(u"jid key is not present !")) + log.error(_("jid key is not present !")) return defer.fail(exceptions.DataError) self.startRefresh(client, to_jid) return {} @@ -519,10 +520,10 @@ @param to_jid(jid.JID): jid to start encrypted session with """ encrypted_session = client.encryption.getSession(to_jid.userhostJID()) - if encrypted_session and encrypted_session[u'plugin'].namespace != NS_OTR: + if encrypted_session and encrypted_session['plugin'].namespace != NS_OTR: raise exceptions.ConflictError(_( - u"Can't start an OTR session, there is already an encrypted session " - u"with {name}").format(name=encrypted_session[u'plugin'].name)) + "Can't start an OTR session, there is already an encrypted session " + "with {name}").format(name=encrypted_session['plugin'].name)) if not to_jid.resource: to_jid.resource = self.host.memory.getMainResource( client, to_jid @@ -542,7 +543,7 @@ try: to_jid = jid.JID(menu_data["jid"]) except KeyError: - log.error(_(u"jid key is not present !")) + log.error(_("jid key is not present !")) return defer.fail(exceptions.DataError) self.endSession(client, to_jid) return {} @@ -568,7 +569,7 @@ try: to_jid = jid.JID(menu_data["jid"]) except KeyError: - log.error(_(u"jid key is not present !")) + log.error(_("jid key is not present !")) return defer.fail(exceptions.DataError) return self.authenticate(client, to_jid) @@ -592,26 +593,26 @@ ) # FIXME: temporary and unsecure, must be changed when frontends # are refactored except KeyError: - log.error(_(u"jid key is not present !")) + log.error(_("jid key is not present !")) return defer.fail(exceptions.DataError) ctxMng = client._otr_context_manager if ctxMng.account.privkey is None: return { - "xmlui": xml_tools.note(_(u"You don't have a private key yet !")).toXml() + "xmlui": xml_tools.note(_("You don't have a private key yet !")).toXml() } def dropKey(data, profile): if C.bool(data["answer"]): # we end all sessions - for context in ctxMng.contexts.values(): + for context in list(ctxMng.contexts.values()): context.disconnect() ctxMng.account.privkey = None ctxMng.account.getPrivkey() # as account.privkey is None, getPrivkey # will generate a new key, and save it return { "xmlui": xml_tools.note( - D_(u"Your private key has been dropped") + D_("Your private key has been dropped") ).toXml() } return {} @@ -620,7 +621,7 @@ confirm = xml_tools.XMLUI( C.XMLUI_DIALOG, - title=_(u"Confirm private key drop"), + title=_("Confirm private key drop"), dialog_opt={"type": C.XMLUI_DIALOG_CONFIRM, "message": _(DROP_TXT)}, submit_id=submit_id, ) @@ -628,43 +629,43 @@ def _receivedTreatment(self, data, client): from_jid = data["from"] - log.debug(u"_receivedTreatment [from_jid = %s]" % from_jid) + log.debug("_receivedTreatment [from_jid = %s]" % from_jid) otrctx = client._otr_context_manager.getContextForUser(from_jid) try: message = ( - data["message"].itervalues().next() + next(iter(data["message"].values())) ) # FIXME: Q&D fix for message refactoring, message is now a dict res = otrctx.receiveMessage(message.encode("utf-8")) except potr.context.UnencryptedMessage: encrypted = False if otrctx.state == potr.context.STATE_ENCRYPTED: log.warning( - u"Received unencrypted message in an encrypted context (from {jid})" + "Received unencrypted message in an encrypted context (from {jid})" .format(jid=from_jid.full()) ) feedback = ( D_( - u"WARNING: received unencrypted data in a supposedly encrypted " - u"context" + "WARNING: received unencrypted data in a supposedly encrypted " + "context" ), ) client.feedback(from_jid, feedback) except potr.context.NotEncryptedError: - msg = D_(u"WARNING: received OTR encrypted data in an unencrypted context") + msg = D_("WARNING: received OTR encrypted data in an unencrypted context") log.warning(msg) feedback = msg client.feedback(from_jid, msg) raise failure.Failure(exceptions.CancelError(msg)) except potr.context.ErrorReceived as e: - msg = D_(u"WARNING: received OTR error message: {msg}".format(msg=e)) + msg = D_("WARNING: received OTR error message: {msg}".format(msg=e)) log.warning(msg) feedback = msg client.feedback(from_jid, msg) raise failure.Failure(exceptions.CancelError(msg)) except potr.crypt.InvalidParameterError as e: - msg = D_(u"Error while trying de decrypt OTR message: {msg}".format(msg=e)) + msg = D_("Error while trying de decrypt OTR message: {msg}".format(msg=e)) log.warning(msg) feedback = msg client.feedback(from_jid, msg) @@ -680,12 +681,12 @@ # receiveMessage() will return a tuple, # the first part of which will be the decrypted message data["message"] = { - "": res[0].decode("utf-8") + "": res[0] } # FIXME: Q&D fix for message refactoring, message is now a dict try: # we want to keep message in history, even if no store is # requested in message hints - del data[u"history"] + del data["history"] except KeyError: pass # TODO: add skip history as an option, but by default we don't skip it @@ -715,7 +716,7 @@ # XXX: FIXME: this should not be done on a per-profile basis, but per-message try: message = ( - data["message"].itervalues().next().encode("utf-8") + iter(data["message"].values()).next().encode("utf-8") ) # FIXME: Q&D fix for message refactoring, message is now a dict except StopIteration: return data @@ -726,7 +727,7 @@ # other frontends # if they are used at the same time as Libervia. # Hard to avoid with decryption on Libervia though. - data[u"history"] = C.HISTORY_SKIP + data["history"] = C.HISTORY_SKIP return data def MessageReceivedTrigger(self, client, message_elt, post_treat): @@ -755,7 +756,7 @@ otrctx = client._otr_context_manager.getContextForUser(to_jid) message_elt = mess_data["xml"] if otrctx.state == potr.context.STATE_ENCRYPTED: - log.debug(u"encrypting message") + log.debug("encrypting message") body = None for child in list(message_elt.children): if child.name == "body": @@ -768,22 +769,22 @@ # we don't want any XHTML-IM element message_elt.children.remove(child) if body is None: - log.warning(u"No message found") + log.warning("No message found") else: self._p_carbons.setPrivate(message_elt) self._p_hints.addHintElements(message_elt, [ self._p_hints.HINT_NO_COPY, self._p_hints.HINT_NO_PERMANENT_STORE]) - otrctx.sendMessage(0, unicode(body).encode("utf-8"), appdata=mess_data) + otrctx.sendMessage(0, str(body).encode("utf-8"), appdata=mess_data) else: feedback = D_( - u"Your message was not sent because your correspondent closed the " - u"encrypted conversation on his/her side. " - u"Either close your own side, or refresh the session." + "Your message was not sent because your correspondent closed the " + "encrypted conversation on his/her side. " + "Either close your own side, or refresh the session." ) - log.warning(_(u"Message discarded because closed encryption channel")) + log.warning(_("Message discarded because closed encryption channel")) client.feedback(to_jid, feedback) - raise failure.Failure(exceptions.CancelError(u"Cancelled by OTR plugin")) + raise failure.Failure(exceptions.CancelError("Cancelled by OTR plugin")) def sendMessageTrigger(self, client, mess_data, pre_xml_treatments, post_xml_treatments):
--- a/sat/plugins/plugin_syntax_wiki_dotclear.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_syntax_wiki_dotclear.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for Dotclear Wiki Syntax @@ -40,9 +40,9 @@ C.PI_DESCRIPTION: _("""Implementation of Dotclear wiki syntax"""), } -NOTE_TPL = u"[{}]" # Note template -NOTE_A_REV_TPL = u"rev_note_{}" -NOTE_A_TPL = u"note_{}" +NOTE_TPL = "[{}]" # Note template +NOTE_A_REV_TPL = "rev_note_{}" +NOTE_A_TPL = "note_{}" ESCAPE_CHARS_BASE = r"(?P<escape_char>[][{}%|\\/*#@{{}}~$-])" ESCAPE_CHARS_EXTRA = ( r"!?_+'()" @@ -90,7 +90,7 @@ class DCWikiParser(object): def __init__(self): self._footnotes = None - for i in xrange(5): + for i in range(5): setattr( self, "parser_h{}_title".format(i), @@ -108,7 +108,7 @@ try: div_elt = xml_tools.ElementParser()(wrapped_html) except domish.ParserError as e: - log.warning(u"Error while parsing HTML content, ignoring it: {}".format(e)) + log.warning("Error while parsing HTML content, ignoring it: {}".format(e)) return children = list(div_elt.elements()) if len(children) == 1 and children[0].name == "div": @@ -132,7 +132,7 @@ string = string[depth:].lstrip() - for i in xrange(depth + 1): + for i in range(depth + 1): list_elt = getattr(parent, list_type) if not list_elt: parent = parent.addElement(list_type) @@ -165,7 +165,7 @@ if p_elt is None: p_elt = blockquote_elt.addElement("p") else: - string = u"\n" + string + string = "\n" + string self._parse(string, p_elt) @@ -189,7 +189,7 @@ self._parse(string, del_elt) def parser_link(self, string, parent): - url_data = string.split(u"|") + url_data = string.split("|") a_elt = parent.addElement("a") length = len(url_data) if length == 1: @@ -206,10 +206,10 @@ if length >= 4: a_elt["title"] = url_data[3] if length > 4: - log.warning(u"too much data for url, ignoring extra data") + log.warning("too much data for url, ignoring extra data") def parser_image(self, string, parent): - image_data = string.split(u"|") + image_data = string.split("|") img_elt = parent.addElement("img") for idx, attribute in enumerate(("src", "alt", "position", "longdesc")): @@ -231,19 +231,19 @@ "style" ] = "display:block; margin-left:auto; margin-right:auto" else: - log.warning(u"bad position argument for image, ignoring it") + log.warning("bad position argument for image, ignoring it") def parser_anchor(self, string, parent): a_elt = parent.addElement("a") a_elt["id"] = string def parser_acronym(self, string, parent): - acronym, title = string.split(u"|", 1) + acronym, title = string.split("|", 1) acronym_elt = parent.addElement("acronym", content=acronym) acronym_elt["title"] = title def parser_inline_quote(self, string, parent): - quote_data = string.split(u"|") + quote_data = string.split("|") quote = quote_data[0] q_elt = parent.addElement("q", content=quote) for idx, attribute in enumerate(("lang", "cite"), 1): @@ -263,12 +263,12 @@ sup_elt["class"] = "note" a_elt = sup_elt.addElement("a", content=note_txt) a_elt["id"] = NOTE_A_REV_TPL.format(idx) - a_elt["href"] = u"#{}".format(NOTE_A_TPL.format(idx)) + a_elt["href"] = "#{}".format(NOTE_A_TPL.format(idx)) p_elt = domish.Element((None, "p")) a_elt = p_elt.addElement("a", content=note_txt) a_elt["id"] = NOTE_A_TPL.format(idx) - a_elt["href"] = u"#{}".format(NOTE_A_REV_TPL.format(idx)) + a_elt["href"] = "#{}".format(NOTE_A_REV_TPL.format(idx)) self._parse(string, p_elt) # footnotes are actually added at the end of the parsing self._footnotes.append(p_elt) @@ -287,7 +287,7 @@ try: parser = getattr(self, "parser_{}".format(match.lastgroup)) except AttributeError: - log.warning(u"No parser found for {}".format(match.lastgroup)) + log.warning("No parser found for {}".format(match.lastgroup)) # parent.addContent(string) continue parser(matched, parent) @@ -313,7 +313,7 @@ self.flags = None self.toto = 0 self.footnotes = None # will hold a map from url to buffer id - for i in xrange(1, 6): + for i in range(1, 6): setattr( self, "parser_h{}".format(i), @@ -333,11 +333,11 @@ except KeyError: self.parserGeneric(elt, buf) else: - buf.append(u"~~{}~~".format(id_)) + buf.append("~~{}~~".format(id_)) return link_data = [url] - name = unicode(elt) + name = str(elt) if name != url: link_data.insert(0, name) @@ -346,46 +346,46 @@ if lang is not None: link_data.append(lang) elif title is not None: - link_data.appand(u"") + link_data.appand("") if title is not None: link_data.append(title) - buf.append(u"[") - buf.append(u"|".join(link_data)) - buf.append(u"]") + buf.append("[") + buf.append("|".join(link_data)) + buf.append("]") def parser_acronym(self, elt, buf): try: title = elt["title"] except KeyError: - log.debug(u"Acronyme without title, using generic parser") + log.debug("Acronyme without title, using generic parser") self.parserGeneric(elt, buf) return - buf.append(u"??{}|{}??".format(unicode(elt), title)) + buf.append("??{}|{}??".format(str(elt), title)) def parser_blockquote(self, elt, buf): # we remove wrapping <p> to avoid empty line with "> " children = list( - [child for child in elt.children if unicode(child).strip() not in ("", "\n")] + [child for child in elt.children if str(child).strip() not in ("", "\n")] ) if len(children) == 1 and children[0].name == "p": elt = children[0] tmp_buf = [] self.parseChildren(elt, tmp_buf) - blockquote = u"> " + u"\n> ".join(u"".join(tmp_buf).split("\n")) + blockquote = "> " + "\n> ".join("".join(tmp_buf).split("\n")) buf.append(blockquote) def parser_br(self, elt, buf): - buf.append(u"%%%") + buf.append("%%%") def parser_code(self, elt, buf): - buf.append(u"@@") + buf.append("@@") self.parseChildren(elt, buf) - buf.append(u"@@") + buf.append("@@") def parser_del(self, elt, buf): - buf.append(u"--") + buf.append("--") self.parseChildren(elt, buf) - buf.append(u"--") + buf.append("--") def parser_div(self, elt, buf): if elt.getAttribute("class") == "footnotes": @@ -394,9 +394,9 @@ self.parseChildren(elt, buf, block=True) def parser_em(self, elt, buf): - buf.append(u"''") + buf.append("''") self.parseChildren(elt, buf) - buf.append(u"''") + buf.append("''") def parser_h6(self, elt, buf): # XXX: <h6/> heading is not managed by wiki syntax @@ -406,13 +406,13 @@ self._parse(elt, buf) def parser_hr(self, elt, buf): - buf.append(u"\n----\n") + buf.append("\n----\n") def parser_img(self, elt, buf): try: url = elt["src"] except KeyError: - log.warning(u"Ignoring <img/> without src") + log.warning("Ignoring <img/> without src") return image_data = [url] @@ -433,24 +433,24 @@ if alt: image_data.append(alt) elif position or desc: - image_data.append(u"") + image_data.append("") if position: image_data.append(position) elif desc: - image_data.append(u"") + image_data.append("") if desc: image_data.append(desc) - buf.append(u"((") - buf.append(u"|".join(image_data)) - buf.append(u"))") + buf.append("((") + buf.append("|".join(image_data)) + buf.append("))") def parser_ins(self, elt, buf): - buf.append(u"++") + buf.append("++") self.parseChildren(elt, buf) - buf.append(u"++") + buf.append("++") def parser_li(self, elt, buf): flag = None @@ -461,11 +461,11 @@ if current_flag is None: current_flag = flag if flag == current_flag: - bullets.append(u"*" if flag == FLAG_UL else u"#") + bullets.append("*" if flag == FLAG_UL else "#") else: break - if flag != current_flag and buf[-1] == u" ": + if flag != current_flag and buf[-1] == " ": # this trick is to avoid a space when we switch # from (un)ordered to the other type on the same row # e.g. *# unorder + ordered item @@ -473,29 +473,29 @@ buf.extend(bullets) - buf.append(u" ") + buf.append(" ") self.parseChildren(elt, buf) - buf.append(u"\n") + buf.append("\n") def parser_ol(self, elt, buf): self.parserList(elt, buf, FLAG_OL) def parser_p(self, elt, buf): self.parseChildren(elt, buf) - buf.append(u"\n\n") + buf.append("\n\n") def parser_pre(self, elt, buf): - pre = u"".join( + pre = "".join( [ - child.toXml() if domish.IElement.providedBy(child) else unicode(child) + child.toXml() if domish.IElement.providedBy(child) else str(child) for child in elt.children ] ) - pre = u" " + u"\n ".join(pre.split("\n")) + pre = " " + "\n ".join(pre.split("\n")) buf.append(pre) def parser_q(self, elt, buf): - quote_data = [unicode(elt)] + quote_data = [str(elt)] lang = elt.getAttribute("lang") cite = elt.getAttribute("url") @@ -503,27 +503,27 @@ if lang: quote_data.append(lang) elif cite: - quote_data.append(u"") + quote_data.append("") if cite: quote_data.append(cite) - buf.append(u"{{") - buf.append(u"|".join(quote_data)) - buf.append(u"}}") + buf.append("{{") + buf.append("|".join(quote_data)) + buf.append("}}") def parser_span(self, elt, buf): self.parseChildren(elt, buf, block=True) def parser_strong(self, elt, buf): - buf.append(u"__") + buf.append("__") self.parseChildren(elt, buf) - buf.append(u"__") + buf.append("__") def parser_sup(self, elt, buf): # sup is mainly used for footnotes, so we check if we have an anchor inside children = list( - [child for child in elt.children if unicode(child).strip() not in ("", "\n")] + [child for child in elt.children if str(child).strip() not in ("", "\n")] ) if ( len(children) == 1 @@ -538,10 +538,10 @@ self.parserGeneric(elt, buf) return # this looks like a footnote - buf.append(u"$$") - buf.append(u" ") # placeholder + buf.append("$$") + buf.append(" ") # placeholder self.footnotes[note_id] = len(buf) - 1 - buf.append(u"$$") + buf.append("$$") else: self.parserGeneric(elt, buf) @@ -559,14 +559,14 @@ break if idx == 0: - raise exceptions.InternalError(u"flag has been removed by an other parser") + raise exceptions.InternalError("flag has been removed by an other parser") def parserHeading(self, elt, buf, level): - buf.append((6 - level) * u"!") + buf.append((6 - level) * "!") for child in elt.children: # we ignore other elements for a Hx title self.parserText(child, buf) - buf.append(u"\n") + buf.append("\n") def parserFootnote(self, elt, buf): for elt in elt.elements(): @@ -575,13 +575,13 @@ a_elt = elt.a if a_elt is None: log.warning( - u"<p/> element doesn't contain <a/> in footnote, ignoring it" + "<p/> element doesn't contain <a/> in footnote, ignoring it" ) continue try: note_idx = self.footnotes[a_elt["id"]] except KeyError: - log.warning(u"Note id doesn't match any known note, ignoring it") + log.warning("Note id doesn't match any known note, ignoring it") # we create a dummy element to parse all children after the <a/> dummy_elt = domish.Element((None, "note")) a_idx = elt.children.index(a_elt) @@ -589,13 +589,13 @@ note_buf = [] self.parseChildren(dummy_elt, note_buf) # now we can replace the placeholder - buf[note_idx] = u"".join(note_buf) + buf[note_idx] = "".join(note_buf) def parserText(self, txt, buf, keep_whitespaces=False): - txt = unicode(txt) + txt = str(txt) if not keep_whitespaces: # we get text and only let one inter word space - txt = u" ".join(txt.split()) + txt = " ".join(txt.split()) txt = re.sub(ESCAPE_CHARS, r"\\\1", txt) if txt: buf.append(txt) @@ -604,14 +604,14 @@ def parserGeneric(self, elt, buf): # as dotclear wiki syntax handle arbitrary XHTML code # we use this feature to add elements that we don't know - buf.append(u"\n\n///html\n{}\n///\n\n".format(elt.toXml())) + buf.append("\n\n///html\n{}\n///\n\n".format(elt.toXml())) def parseChildren(self, elt, buf, block=False): first_visible = True for child in elt.children: if not block and not first_visible and buf and buf[-1][-1] not in (" ", "\n"): # we add separation if it isn't already there - buf.append(u" ") + buf.append(" ") if domish.IElement.providedBy(child): self._parse(child, buf) first_visible = False @@ -642,14 +642,14 @@ self.footnotes = {} buf = [] self._parse(elt, buf) - return u"".join(buf) + return "".join(buf) def parseString(self, string): - wrapped_html = u"<div>{}</div>".format(string) + wrapped_html = "<div>{}</div>".format(string) try: div_elt = xml_tools.ElementParser()(wrapped_html) except domish.ParserError as e: - log.warning(u"Error while parsing HTML content: {}".format(e)) + log.warning("Error while parsing HTML content: {}".format(e)) return children = list(div_elt.elements()) if len(children) == 1 and children[0].name == "div": @@ -661,7 +661,7 @@ SYNTAX_NAME = "wiki_dotclear" def __init__(self, host): - log.info(_(u"Dotclear wiki syntax plugin initialization")) + log.info(_("Dotclear wiki syntax plugin initialization")) self.host = host self._dc_parser = DCWikiParser() self._xhtml_parser = XHTMLParser()
--- a/sat/plugins/plugin_tickets_import.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_tickets_import.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for import external ticketss @@ -36,14 +36,14 @@ C.PI_MAIN: "TicketsImportPlugin", C.PI_HANDLER: "no", C.PI_DESCRIPTION: _( - u"""Tickets import management: + """Tickets import management: This plugin manage the different tickets importers which can register to it, and handle generic importing tasks.""" ), } OPT_MAPPING = "mapping" -FIELDS_LIST = (u"labels", u"cc_emails") # fields which must have a list as value -FIELDS_DATE = (u"created", u"updated") +FIELDS_LIST = ("labels", "cc_emails") # fields which must have a list as value +FIELDS_DATE = ("created", "updated") NS_TICKETS = "org.salut-a-toi.tickets:0" @@ -60,7 +60,7 @@ self._p = host.plugins["XEP-0060"] self._m = host.plugins["XEP-0277"] self._s = host.plugins["PUBSUB_SCHEMA"] - host.plugins["IMPORT"].initialize(self, u"tickets") + host.plugins["IMPORT"].initialize(self, "tickets") @defer.inlineCallbacks def importItem( @@ -108,21 +108,21 @@ """ if "comments_uri" in item_import_data: raise exceptions.DataError( - _(u"comments_uri key will be generated and must not be used by importer") + _("comments_uri key will be generated and must not be used by importer") ) for key in FIELDS_LIST: if not isinstance(item_import_data.get(key, []), list): - raise exceptions.DataError(_(u"{key} must be a list").format(key=key)) + raise exceptions.DataError(_("{key} must be a list").format(key=key)) for key in FIELDS_DATE: try: item_import_data[key] = utils.xmpp_date(item_import_data[key]) except KeyError: continue - if session[u"root_node"] is None: - session[u"root_node"] = NS_TICKETS + if session["root_node"] is None: + session["root_node"] = NS_TICKETS if not "schema" in session: session["schema"] = yield self._s.getSchemaForm( - client, service, node or session[u"root_node"] + client, service, node or session["root_node"] ) defer.returnValue(item_import_data) @@ -133,7 +133,7 @@ # TODO: node access/publish model should be customisable comments = ticket_data.get("comments", []) service = yield self._m.getCommentsService(client) - node = self._m.getCommentsNode(session["root_node"] + u"_" + ticket_data["id"]) + node = self._m.getCommentsNode(session["root_node"] + "_" + ticket_data["id"]) node_options = { self._p.OPT_ACCESS_MODEL: self._p.ACCESS_OPEN, self._p.OPT_PERSIST_ITEMS: 1, @@ -144,7 +144,7 @@ } yield self._p.createIfNewNode(client, service, node, options=node_options) ticket_data["comments_uri"] = uri.buildXMPPUri( - u"pubsub", subtype="microblog", path=service.full(), node=node + "pubsub", subtype="microblog", path=service.full(), node=node ) for comment in comments: if "updated" not in comment and "published" in comment: @@ -157,7 +157,7 @@ node = NS_TICKETS id_ = ticket_data.pop("id", None) log.debug( - u"uploading item [{id}]: {title}".format( + "uploading item [{id}]: {title}".format( id=id_, title=ticket_data.get("title", "") ) ) @@ -169,13 +169,13 @@ mapping = options.get(OPT_MAPPING) if mapping is not None: if not isinstance(mapping, dict): - raise exceptions.DataError(_(u"mapping option must be a dictionary")) + raise exceptions.DataError(_("mapping option must be a dictionary")) - for source, dest in mapping.iteritems(): - if not isinstance(source, unicode) or not isinstance(dest, unicode): + for source, dest in mapping.items(): + if not isinstance(source, str) or not isinstance(dest, str): raise exceptions.DataError( _( - u"keys and values of mapping must be sources and destinations ticket fields" + "keys and values of mapping must be sources and destinations ticket fields" ) ) if source in ticket_data: @@ -185,6 +185,6 @@ values.append(value) else: if dest in ticket_data: - ticket_data[dest] = ticket_data[dest] + u"\n" + value + ticket_data[dest] = ticket_data[dest] + "\n" + value else: ticket_data[dest] = value
--- a/sat/plugins/plugin_tickets_import_bugzilla.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_tickets_import_bugzilla.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for import external blogs @@ -41,10 +41,10 @@ C.PI_DESCRIPTION: _("""Tickets importer for Bugzilla"""), } -SHORT_DESC = D_(u"import tickets from Bugzilla xml export file") +SHORT_DESC = D_("import tickets from Bugzilla xml export file") LONG_DESC = D_( - u"""This importer handle Bugzilla xml export file. + """This importer handle Bugzilla xml export file. To use it, you'll need to export tickets using XML. Tickets will be uploaded with the same ID as for Bugzilla, any existing ticket with this ID will be replaced. @@ -83,7 +83,7 @@ : reporter_elt.text.find("@") ].title() else: - ticket["author"] = u"no name" + ticket["author"] = "no name" ticket["author_email"] = reporter_elt.text assigned_to_elt = bug.find("assigned_to") ticket["assigned_to_name"] = assigned_to_elt.get("name") @@ -125,7 +125,7 @@ class BugzillaImport(object): def __init__(self, host): - log.info(_(u"Bugilla Import plugin initialization")) + log.info(_("Bugilla Import plugin initialization")) self.host = host host.plugins["TICKETS_IMPORT"].register( "bugzilla", self.Import, SHORT_DESC, LONG_DESC @@ -134,7 +134,7 @@ def Import(self, client, location, options=None): if not os.path.isabs(location): raise exceptions.DataError( - u"An absolute path to XML data need to be given as location" + "An absolute path to XML data need to be given as location" ) bugzilla_parser = BugzillaParser() # d = threads.deferToThread(bugzilla_parser.parse, location)
--- a/sat/plugins/plugin_tmp_directory_subscription.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_tmp_directory_subscription.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for directory subscription
--- a/sat/plugins/plugin_xep_0020.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0020.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing xep-0020 @@ -25,7 +25,7 @@ from sat.core import exceptions from twisted.words.xish import domish -from zope.interface import implements +from zope.interface import implementer try: from twisted.words.protocols.xmlstream import XMPPHandler @@ -62,7 +62,7 @@ @raise exceptions.NotFound: no feature element found """ try: - feature_elt = elt.elements(NS_FEATURE_NEG, "feature").next() + feature_elt = next(elt.elements(NS_FEATURE_NEG, "feature")) except StopIteration: raise exceptions.NotFound return feature_elt @@ -76,7 +76,7 @@ """ if namespace is None: try: - form_elt = elt.elements(data_form.NS_X_DATA).next() + form_elt = next(elt.elements(data_form.NS_X_DATA)) except StopIteration: return None else: @@ -102,7 +102,7 @@ if len(values) > 1: log.warning( _( - u"More than one value choosed for {}, keeping the first one" + "More than one value choosed for {}, keeping the first one" ).format(field) ) return result @@ -156,8 +156,8 @@ return feature_elt +@implementer(iwokkel.IDisco) class XEP_0020_handler(XMPPHandler): - implements(iwokkel.IDisco) def getDiscoInfo(self, requestor, target, nodeIdentifier=""): return [disco.DiscoFeature(NS_FEATURE_NEG)]
--- a/sat/plugins/plugin_xep_0033.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0033.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Extended Stanza Addressing (xep-0033) @@ -24,7 +24,7 @@ log = getLogger(__name__) from sat.core import exceptions from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer from twisted.words.protocols.jabber.jid import JID from twisted.python import failure import copy @@ -97,19 +97,19 @@ _("XEP-0033 is being used but the server doesn't support it!") ) raise failure.Failure( - exceptions.CancelError(u"Cancelled by XEP-0033") + exceptions.CancelError("Cancelled by XEP-0033") ) if mess_data["to"] not in entities: expected = _(" or ").join([entity.userhost() for entity in entities]) log.warning( _( - u"Stanzas using XEP-0033 should be addressed to %(expected)s, not %(current)s!" + "Stanzas using XEP-0033 should be addressed to %(expected)s, not %(current)s!" ) % {"expected": expected, "current": mess_data["to"]} ) log.warning( _( - u"TODO: addressing has been fixed by the backend... fix it in the frontend!" + "TODO: addressing has been fixed by the backend... fix it in the frontend!" ) ) mess_data["to"] = list(entities)[0].userhostJID() @@ -128,7 +128,7 @@ # when the prosody plugin is completed, we can immediately return mess_data from here self.sendAndStoreMessage(mess_data, entries, profile) log.debug("XEP-0033 took over") - raise failure.Failure(exceptions.CancelError(u"Cancelled by XEP-0033")) + raise failure.Failure(exceptions.CancelError("Cancelled by XEP-0033")) d = self.host.findFeaturesSet(client, [NS_ADDRESS]) d.addCallbacks(discoCallback, lambda __: discoCallback(None)) @@ -213,7 +213,7 @@ return data try: - addresses = message.elements(NS_ADDRESS, "addresses").next() + addresses = next(message.elements(NS_ADDRESS, "addresses")) except StopIteration: pass # no addresses else: @@ -224,8 +224,8 @@ return XEP_0033_handler(self, client.profile) +@implementer(iwokkel.IDisco) class XEP_0033_handler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent, profile): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_xep_0045.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0045.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing xep-0045 @@ -34,7 +34,7 @@ from wokkel import muc, disco, iwokkel from sat.tools import xml_tools -from zope.interface import implements +from zope.interface import implementer # XXX: mam and rsm come from sat_tmp.wokkel from wokkel import rsm @@ -47,7 +47,7 @@ C.PI_TYPE: "XEP", C.PI_PROTOCOLS: ["XEP-0045"], C.PI_DEPENDENCIES: ["XEP-0359"], - C.PI_RECOMMENDATIONS: [C.TEXT_CMDS, u"XEP-0313"], + C.PI_RECOMMENDATIONS: [C.TEXT_CMDS, "XEP-0313"], C.PI_MAIN: "XEP_0045", C.PI_HANDLER: "yes", C.PI_DESCRIPTION: _("""Implementation of Multi-User Chat""") @@ -62,13 +62,13 @@ ROOM_STATE_SELF_PRESENCE = "self-presence" ROOM_STATE_LIVE = "live" ROOM_STATES = (ROOM_STATE_OCCUPANTS, ROOM_STATE_SELF_PRESENCE, ROOM_STATE_LIVE) -HISTORY_LEGACY = u"legacy" -HISTORY_MAM = u"mam" +HISTORY_LEGACY = "legacy" +HISTORY_MAM = "mam" -CONFIG_SECTION = u'plugin muc' +CONFIG_SECTION = 'plugin muc' -default_conf = {"default_muc": u'sat@chat.jabberfr.org'} +default_conf = {"default_muc": 'sat@chat.jabberfr.org'} class AlreadyJoined(exceptions.ConflictError): @@ -86,17 +86,17 @@ log.info(_("Plugin XEP_0045 initialization")) self.host = host self._sessions = memory.Sessions() - host.bridge.addMethod("mucJoin", ".plugin", in_sign='ssa{ss}s', out_sign='(bsa{sa{ss}}sss)', method=self._join, async=True) # return same arguments as mucRoomJoined + a boolean set to True is the room was already joined (first argument) + host.bridge.addMethod("mucJoin", ".plugin", in_sign='ssa{ss}s', out_sign='(bsa{sa{ss}}sss)', method=self._join, async_=True) # return same arguments as mucRoomJoined + a boolean set to True is the room was already joined (first argument) host.bridge.addMethod("mucNick", ".plugin", in_sign='sss', out_sign='', method=self._nick) host.bridge.addMethod("mucNickGet", ".plugin", in_sign='ss', out_sign='s', method=self._getRoomNick) - host.bridge.addMethod("mucLeave", ".plugin", in_sign='ss', out_sign='', method=self._leave, async=True) + host.bridge.addMethod("mucLeave", ".plugin", in_sign='ss', out_sign='', method=self._leave, async_=True) host.bridge.addMethod("mucOccupantsGet", ".plugin", in_sign='ss', out_sign='a{sa{ss}}', method=self._getRoomOccupants) host.bridge.addMethod("mucSubject", ".plugin", in_sign='sss', out_sign='', method=self._subject) host.bridge.addMethod("mucGetRoomsJoined", ".plugin", in_sign='s', out_sign='a(sa{sa{ss}}ss)', method=self._getRoomsJoined) host.bridge.addMethod("mucGetUniqueRoomName", ".plugin", in_sign='ss', out_sign='s', method=self._getUniqueName) - host.bridge.addMethod("mucConfigureRoom", ".plugin", in_sign='ss', out_sign='s', method=self._configureRoom, async=True) + host.bridge.addMethod("mucConfigureRoom", ".plugin", in_sign='ss', out_sign='s', method=self._configureRoom, async_=True) host.bridge.addMethod("mucGetDefaultService", ".plugin", in_sign='', out_sign='s', method=self.getDefaultMUC) - host.bridge.addMethod("mucGetService", ".plugin", in_sign='ss', out_sign='s', method=self._getMUCService, async=True) + host.bridge.addMethod("mucGetService", ".plugin", in_sign='ss', out_sign='s', method=self._getMUCService, async_=True) host.bridge.addSignal("mucRoomJoined", ".plugin", signature='sa{sa{ss}}sss') # args: room_jid, occupants, user_nick, subject, profile host.bridge.addSignal("mucRoomLeft", ".plugin", signature='ss') # args: room_jid, profile host.bridge.addSignal("mucRoomUserChangedNick", ".plugin", signature='ssss') # args: room_jid, old_nick, new_nick, profile @@ -107,13 +107,13 @@ try: self.text_cmds = self.host.plugins[C.TEXT_CMDS] except KeyError: - log.info(_(u"Text commands not available")) + log.info(_("Text commands not available")) else: self.text_cmds.registerTextCommands(self) self.text_cmds.addWhoIsCb(self._whois, 100) - self._mam = self.host.plugins.get(u"XEP-0313") - self._si = self.host.plugins[u"XEP-0359"] + self._mam = self.host.plugins.get("XEP-0313") + self._si = self.host.plugins["XEP-0359"] host.trigger.add("presence_available", self.presenceTrigger) host.trigger.add("presence_received", self.presenceReceivedTrigger) @@ -127,15 +127,15 @@ def _message_parseTrigger(self, client, message_elt, data): """Add stanza-id from the room if present""" - if message_elt.getAttribute(u"type") != C.MESS_TYPE_GROUPCHAT: + if message_elt.getAttribute("type") != C.MESS_TYPE_GROUPCHAT: return True # stanza_id will not be filled by parseMessage because the emitter # is the room and not our server, so we have to parse it here - room_jid = data[u"from"].userhostJID() + room_jid = data["from"].userhostJID() stanza_id = self._si.getStanzaId(message_elt, room_jid) if stanza_id: - data[u"extra"][u"stanza_id"] = stanza_id + data["extra"]["stanza_id"] = stanza_id def messageReceivedTrigger(self, client, message_elt, post_treat): if message_elt.getAttribute("type") == C.MESS_TYPE_GROUPCHAT: @@ -152,15 +152,15 @@ # but an expected case. # On the other hand, with legacy history, it's not normal. log.warning(_( - u"Received non delayed message in a room before its " - u"initialisation: state={state}, msg={msg}").format( + "Received non delayed message in a room before its " + "initialisation: state={state}, msg={msg}").format( state=room.state, msg=message_elt.toXml())) room._cache.append(message_elt) return False else: - log.warning(u"Received groupchat message for a room which has not been " - u"joined, ignoring it: {}".format(message_elt.toXml())) + log.warning("Received groupchat message for a room which has not been " + "joined, ignoring it: {}".format(message_elt.toXml())) return False return True @@ -173,7 +173,7 @@ try: return client._muc_client.joined_rooms[room_jid] except KeyError: - raise exceptions.NotFound(_(u"This room has not been joined")) + raise exceptions.NotFound(_("This room has not been joined")) def checkRoomJoined(self, client, room_jid): """Check that given room has been joined in current session @@ -181,7 +181,7 @@ @param room_jid (JID): room JID """ if room_jid not in client._muc_client.joined_rooms: - raise exceptions.NotFound(_(u"This room has not been joined")) + raise exceptions.NotFound(_("This room has not been joined")) def isJoinedRoom(self, client, room_jid): """Tell if a jid is a known and joined room @@ -213,8 +213,8 @@ def _passwordUICb(self, data, client, room_jid, nick): """Called when the user has given room password (or cancelled)""" if C.bool(data.get(C.XMLUI_DATA_CANCELLED, "false")): - log.info(u"room join for {} is cancelled".format(room_jid.userhost())) - raise failure.Failure(exceptions.CancelError(D_(u"Room joining cancelled by user"))) + log.info("room join for {} is cancelled".format(room_jid.userhost())) + raise failure.Failure(exceptions.CancelError(D_("Room joining cancelled by user"))) password = data[xml_tools.formEscape('password')] return client._muc_client.join(room_jid, nick, password).addCallbacks(self._joinCb, self._joinEb, (client, room_jid, nick), errbackArgs=(client, room_jid, nick, password)) @@ -234,10 +234,10 @@ # FIXME: the current behaviour is to create an instant room # and send the signal only when the room is unlocked # a proper configuration management should be done - log.debug(_(u"room locked !")) + log.debug(_("room locked !")) d = client._muc_client.configure(room.roomJID, {}) d.addErrback(self.host.logErrback, - msg=_(u'Error while configuring the room: {failure_}')) + msg=_('Error while configuring the room: {failure_}')) return room.fully_joined def _joinEb(self, failure, client, room_jid, nick, password): @@ -253,7 +253,7 @@ return client._muc_client.join(room_jid, nick, password).addCallbacks(self._joinCb, self._joinEb, (client, room_jid, nick), errbackArgs=(client, room_jid, nick, password)) elif condition == 'not-allowed': # room is restricted, we need a password - password_ui = xml_tools.XMLUI("form", title=D_(u'Room {} is restricted').format(room_jid.userhost()), submit_id='') + password_ui = xml_tools.XMLUI("form", title=D_('Room {} is restricted').format(room_jid.userhost()), submit_id='') password_ui.addText(D_("This room is restricted, please enter the password")) password_ui.addPassword('password') d = xml_tools.deferXMLUI(self.host, password_ui, profile=client.profile) @@ -262,16 +262,16 @@ msg_suffix = ' with condition "{}"'.format(failure.value.condition) - mess = D_(u"Error while joining the room {room}{suffix}".format( + mess = D_("Error while joining the room {room}{suffix}".format( room = room_jid.userhost(), suffix = msg_suffix)) log.error(mess) - xmlui = xml_tools.note(mess, D_(u"Group chat error"), level=C.XMLUI_DATA_LVL_ERROR) + xmlui = xml_tools.note(mess, D_("Group chat error"), level=C.XMLUI_DATA_LVL_ERROR) self.host.actionNew({'xmlui': xmlui.toXml()}, profile=client.profile) @staticmethod def _getOccupants(room): """Get occupants of a room in a form suitable for bridge""" - return {u.nick: {k:unicode(getattr(u,k) or '') for k in OCCUPANT_KEYS} for u in room.roster.values()} + return {u.nick: {k:str(getattr(u,k) or '') for k in OCCUPANT_KEYS} for u in list(room.roster.values())} def _getRoomOccupants(self, room_jid_s, profile_key): client = self.host.getClient(profile_key) @@ -289,7 +289,7 @@ def getRoomsJoined(self, client): """Return rooms where user is""" result = [] - for room in client._muc_client.joined_rooms.values(): + for room in list(client._muc_client.joined_rooms.values()): if room.state == ROOM_STATE_LIVE: result.append((room.roomJID.userhost(), self._getOccupants(room), room.nick, room.subject)) return result @@ -330,7 +330,7 @@ def xmluiReceived(xmlui): if not xmlui: - msg = D_(u"No configuration available for this room") + msg = D_("No configuration available for this room") return {"xmlui": xml_tools.note(msg).toXml()} return {"xmlui": xmlui.toXml()} return self.configureRoom(client, room_jid).addCallback(xmluiReceived) @@ -385,7 +385,7 @@ def _getMUCService(self, jid_=None, profile=C.PROF_KEY_NONE): client = self.host.getClient(profile) d = self.getMUCService(client, jid_ or None) - d.addCallback(lambda service_jid: service_jid.full() if service_jid is not None else u'') + d.addCallback(lambda service_jid: service_jid.full() if service_jid is not None else '') return d @defer.inlineCallbacks @@ -426,18 +426,18 @@ @return: jid.JID (unique room bare JID) """ # TODO: we should use #RFC-0045 10.1.4 when available here - room_name = unicode(uuid.uuid4()) + room_name = str(uuid.uuid4()) if muc_service is None: try: muc_service = client.muc_service except AttributeError: - raise exceptions.NotReady(u"Main server MUC service has not been checked yet") + raise exceptions.NotReady("Main server MUC service has not been checked yet") if muc_service is None: log.warning(_("No MUC service found on main server")) raise exceptions.FeatureNotFound muc_service = muc_service.userhost() - return jid.JID(u"{}@{}".format(room_name, muc_service)) + return jid.JID("{}@{}".format(room_name, muc_service)) def getDefaultMUC(self): """Return the default MUC. @@ -462,9 +462,9 @@ try: room_jid = jid.JID(room_jid_s) except (RuntimeError, jid.InvalidFormat, AttributeError): - return defer.fail(jid.InvalidFormat(_(u"Invalid room identifier: {room_id}'. Please give a room short or full identifier like 'room' or 'room@{muc_service}'.").format( + return defer.fail(jid.InvalidFormat(_("Invalid room identifier: {room_id}'. Please give a room short or full identifier like 'room' or 'room@{muc_service}'.").format( room_id=room_jid_s, - muc_service=unicode(muc_service)))) + muc_service=str(muc_service)))) if not room_jid.user: room_jid.user, room_jid.host = room_jid.host, muc_service else: @@ -482,10 +482,10 @@ options = {} if room_jid in client._muc_client.joined_rooms: room = client._muc_client.joined_rooms[room_jid] - log.info(_(u'{profile} is already in room {room_jid}').format( + log.info(_('{profile} is already in room {room_jid}').format( profile=client.profile, room_jid = room_jid.userhost())) return defer.fail(AlreadyJoined(room)) - log.info(_(u"[{profile}] is joining room {room} with nick {nick}").format( + log.info(_("[{profile}] is joining room {room} with nick {nick}").format( profile=client.profile, room=room_jid.userhost(), nick=nick)) password = options.get("password") @@ -504,7 +504,7 @@ This list can be used directly (unpacked) with self.join """ args_list = [] - for room in client._muc_client.joined_rooms.values(): + for room in list(client._muc_client.joined_rooms.values()): client._muc_client._removeRoom(room.roomJID) args_list.append((client, room.roomJID, room.nick)) return args_list @@ -642,16 +642,16 @@ nick = options[0] assert self.isNickInRoom(client, mess_data["to"], nick) except (IndexError, AssertionError): - feedback = _(u"You must provide a member's nick to kick.") + feedback = _("You must provide a member's nick to kick.") self.text_cmds.feedBack(client, feedback, mess_data) return False d = self.kick(client, nick, mess_data["to"], {} if len(options) == 1 else {'reason': options[1]}) def cb(__): - feedback_msg = _(u'You have kicked {}').format(nick) + feedback_msg = _('You have kicked {}').format(nick) if len(options) > 1: - feedback_msg += _(u' for the following reason: {}').format(options[1]) + feedback_msg += _(' for the following reason: {}').format(options[1]) self.text_cmds.feedBack(client, feedback_msg, mess_data) return True d.addCallback(cb) @@ -671,16 +671,16 @@ assert(entity_jid.user) assert(entity_jid.host) except (RuntimeError, jid.InvalidFormat, AttributeError, IndexError, AssertionError): - feedback = _(u"You must provide a valid JID to ban, like in '/ban contact@example.net'") + feedback = _("You must provide a valid JID to ban, like in '/ban contact@example.net'") self.text_cmds.feedBack(client, feedback, mess_data) return False d = self.ban(client, entity_jid, mess_data["to"], {} if len(options) == 1 else {'reason': options[1]}) def cb(__): - feedback_msg = _(u'You have banned {}').format(entity_jid) + feedback_msg = _('You have banned {}').format(entity_jid) if len(options) > 1: - feedback_msg += _(u' for the following reason: {}').format(options[1]) + feedback_msg += _(' for the following reason: {}').format(options[1]) self.text_cmds.feedBack(client, feedback_msg, mess_data) return True d.addCallback(cb) @@ -704,20 +704,20 @@ assert(entity_jid.user) assert(entity_jid.host) except (RuntimeError, jid.InvalidFormat, AttributeError, IndexError, AssertionError): - feedback = _(u"You must provide a valid JID to affiliate, like in '/affiliate contact@example.net member'") + feedback = _("You must provide a valid JID to affiliate, like in '/affiliate contact@example.net member'") self.text_cmds.feedBack(client, feedback, mess_data) return False affiliation = options[1] if len(options) > 1 else 'none' if affiliation not in AFFILIATIONS: - feedback = _(u"You must provide a valid affiliation: %s") % ' '.join(AFFILIATIONS) + feedback = _("You must provide a valid affiliation: %s") % ' '.join(AFFILIATIONS) self.text_cmds.feedBack(client, feedback, mess_data) return False d = self.affiliate(client, entity_jid, mess_data["to"], {'affiliation': affiliation}) def cb(__): - feedback_msg = _(u'New affiliation for %(entity)s: %(affiliation)s').format(entity=entity_jid, affiliation=affiliation) + feedback_msg = _('New affiliation for %(entity)s: %(affiliation)s').format(entity=entity_jid, affiliation=affiliation) self.text_cmds.feedBack(client, feedback_msg, mess_data) return True d.addCallback(cb) @@ -763,11 +763,11 @@ elif client.muc_service is not None: service = client.muc_service else: - msg = D_(u"No known default MUC service".format(unparsed)) + msg = D_("No known default MUC service".format(unparsed)) self.text_cmds.feedBack(client, msg, mess_data) return False except jid.InvalidFormat: - msg = D_(u"{} is not a valid JID!".format(unparsed)) + msg = D_("{} is not a valid JID!".format(unparsed)) self.text_cmds.feedBack(client, msg, mess_data) return False d = self.host.getDiscoItems(client, service) @@ -801,7 +801,7 @@ # FIXME: should we add a privacy parameters in settings to activate before # broadcasting the presence to all MUC rooms ? muc_client = client._muc_client - for room_jid, room in muc_client.joined_rooms.iteritems(): + for room_jid, room in muc_client.joined_rooms.items(): elt = xml_tools.elementCopy(presence_elt) elt['to'] = room_jid.userhost() + '/' + room.nick client.presence.send(elt) @@ -816,15 +816,15 @@ return True +@implementer(iwokkel.IDisco) class SatMUCClient(muc.MUCClient): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent muc.MUCClient.__init__(self) self._changing_nicks = set() # used to keep trace of who is changing nick, # and to discard userJoinedRoom signal in this case - print "init SatMUCClient OK" + print("init SatMUCClient OK") @property def joined_rooms(self): @@ -861,8 +861,8 @@ expected_state = ROOM_STATES[new_state_idx-1] if room.state != expected_state: log.error(_( - u"room {room} is not in expected state: room is in state {current_state} " - u"while we were expecting {expected_state}").format( + "room {room} is not in expected state: room is in state {current_state} " + "while we were expecting {expected_state}").format( room=room.roomJID.userhost(), current_state=room.state, expected_state=expected_state)) @@ -919,19 +919,19 @@ limit=1, between=False, filters={ - u'types': C.MESS_TYPE_GROUPCHAT, - u'last_stanza_id': True}, + 'types': C.MESS_TYPE_GROUPCHAT, + 'last_stanza_id': True}, profile=client.profile) if last_mess: - stanza_id = last_mess[0][-1][u'stanza_id'] + stanza_id = last_mess[0][-1]['stanza_id'] rsm_req = rsm.RSMRequest(max_=100, after=stanza_id) no_loop=False else: - log.info(u"We have no MAM archive for room {room_jid}.".format( + log.info("We have no MAM archive for room {room_jid}.".format( room_jid=room_jid)) # we don't want the whole archive if we have no archive yet # as it can be huge - rsm_req = rsm.RSMRequest(max_=50, before=u'') + rsm_req = rsm.RSMRequest(max_=50, before='') no_loop=True mam_req = mam.MAMRequest(rsm_=rsm_req) @@ -941,7 +941,7 @@ mam_data = yield self._mam.getArchives(client, mam_req, service=room_jid) elt_list, rsm_response, mam_response = mam_data - complete = True if no_loop else mam_response[u"complete"] + complete = True if no_loop else mam_response["complete"] # we update MAM request for next iteration mam_req.rsm.after = rsm_response.last @@ -956,11 +956,11 @@ client, mess_elt, mam_req, service=room_jid) except exceptions.DataError: continue - if fwd_message_elt.getAttribute(u"to"): + if fwd_message_elt.getAttribute("to"): log.warning( - u'Forwarded message element has a "to" attribute while it is ' - u'forbidden by specifications') - fwd_message_elt[u"to"] = client.jid.full() + 'Forwarded message element has a "to" attribute while it is ' + 'forbidden by specifications') + fwd_message_elt["to"] = client.jid.full() mess_data = client.messageProt.parseMessage(fwd_message_elt) # we attache parsed message data to element, to avoid parsing # again in _addToHistory @@ -969,12 +969,12 @@ client._muc_client._onGroupChat(fwd_message_elt) if not count: - log.info(_(u"No message received while offline in {room_jid}".format( + log.info(_("No message received while offline in {room_jid}".format( room_jid=room_jid))) else: log.info( - _(u"We have received {num_mess} message(s) in {room_jid} while " - u"offline.") + _("We have received {num_mess} message(s) in {room_jid} while " + "offline.") .format(num_mess=count, room_jid=room_jid)) # for legacy history, the following steps are done in receivedSubject but for MAM @@ -1017,7 +1017,7 @@ if user is None: nick = presence.sender.resource if not nick: - log.warning(_(u"missing nick in presence: {xml}").format( + log.warning(_("missing nick in presence: {xml}").format( xml = presence.toElement().toXml())) return user = muc.User(nick, presence.entity) @@ -1061,7 +1061,7 @@ # we have received our own nick, # this mean that the full room roster was received self.changeRoomState(room, ROOM_STATE_SELF_PRESENCE) - log.debug(u"room {room} joined with nick {nick}".format( + log.debug("room {room} joined with nick {nick}".format( room=room.occupantJID.userhost(), nick=user.nick)) # we set type so we don't have to use a deferred # with disco to check entity type @@ -1070,9 +1070,9 @@ profile_key=self.client.profile) elif room.state not in (ROOM_STATE_OCCUPANTS, ROOM_STATE_LIVE): log.warning( - u"Received user presence data in a room before its initialisation " - u"(current state: {state})," - u"this is not standard! Ignoring it: {room} ({nick})".format( + "Received user presence data in a room before its initialisation " + "(current state: {state})," + "this is not standard! Ignoring it: {room} ({nick})".format( state=room.state, room=room.roomJID.userhost(), nick=user.nick)) @@ -1084,7 +1084,7 @@ self._changing_nicks.remove(user.nick) except KeyError: # this is a new user - log.debug(_(u"user {nick} has joined room {room_id}").format( + log.debug(_("user {nick} has joined room {room_id}").format( nick=user.nick, room_id=room.occupantJID.userhost())) if not self.host.trigger.point( "MUC user joined", room, user, self.client.profile): @@ -1100,8 +1100,8 @@ mess_data = { # dict is similar to the one used in client.onMessage "from": room.roomJID, "to": self.client.jid, - "uid": unicode(uuid.uuid4()), - "message": {'': D_(u"=> {} has joined the room").format(user.nick)}, + "uid": str(uuid.uuid4()), + "message": {'': D_("=> {} has joined the room").format(user.nick)}, "subject": {}, "type": C.MESS_TYPE_INFO, "extra": extra, @@ -1120,12 +1120,12 @@ if user.nick == room.nick: # we left the room room_jid_s = room.roomJID.userhost() - log.info(_(u"Room ({room}) left ({profile})").format( + log.info(_("Room ({room}) left ({profile})").format( room = room_jid_s, profile = self.client.profile)) self.host.memory.delEntityCache(room.roomJID, profile_key=self.client.profile) self.host.bridge.mucRoomLeft(room.roomJID.userhost(), self.client.profile) elif room.state != ROOM_STATE_LIVE: - log.warning(u"Received user presence data in a room before its initialisation (current state: {state})," + log.warning("Received user presence data in a room before its initialisation (current state: {state})," "this is not standard! Ignoring it: {room} ({nick})".format( state=room.state, room=room.roomJID.userhost(), @@ -1134,7 +1134,7 @@ else: if not room.fully_joined.called: return - log.debug(_(u"user {nick} left room {room_id}").format(nick=user.nick, room_id=room.occupantJID.userhost())) + log.debug(_("user {nick} left room {room_id}").format(nick=user.nick, room_id=room.occupantJID.userhost())) extra = {'info_type': ROOM_USER_LEFT, 'user_affiliation': user.affiliation, 'user_role': user.role, @@ -1145,8 +1145,8 @@ mess_data = { # dict is similar to the one used in client.onMessage "from": room.roomJID, "to": self.client.jid, - "uid": unicode(uuid.uuid4()), - "message": {'': D_(u"<= {} has left the room").format(user.nick)}, + "uid": str(uuid.uuid4()), + "message": {'': D_("<= {} has left the room").format(user.nick)}, "subject": {}, "type": C.MESS_TYPE_INFO, "extra": extra, @@ -1181,7 +1181,7 @@ ## messages ## def receivedGroupChat(self, room, user, body): - log.debug(u'receivedGroupChat: room=%s user=%s body=%s' % (room.roomJID.full(), user, body)) + log.debug('receivedGroupChat: room=%s user=%s body=%s' % (room.roomJID.full(), user, body)) def _addToHistory(self, __, user, message): try: @@ -1190,7 +1190,7 @@ mess_data = message.element._mess_data except AttributeError: mess_data = self.client.messageProt.parseMessage(message.element) - if mess_data[u'message'] or mess_data[u'subject']: + if mess_data['message'] or mess_data['subject']: return self.host.memory.addToHistory(self.client, mess_data) else: return defer.succeed(None) @@ -1210,14 +1210,14 @@ """ if room.state != ROOM_STATE_SELF_PRESENCE: log.warning(_( - u"received history in unexpected state in room {room} (state: " - u"{state})").format(room = room.roomJID.userhost(), + "received history in unexpected state in room {room} (state: " + "{state})").format(room = room.roomJID.userhost(), state = room.state)) if not hasattr(room, "_history_d"): # XXX: this hack is due to buggy behaviour seen in the wild because of the # "mod_delay" prosody module being activated. This module add an # unexpected <delay> elements which break our workflow. - log.warning(_(u"storing the unexpected message anyway, to avoid loss")) + log.warning(_("storing the unexpected message anyway, to avoid loss")) # we have to restore URI which are stripped by wokkel parsing for c in message.element.elements(): if c.uri is None: @@ -1225,7 +1225,7 @@ mess_data = self.client.messageProt.parseMessage(message.element) message.element._mess_data = mess_data self._addToHistory(None, user, message) - if mess_data[u'message'] or mess_data[u'subject']: + if mess_data['message'] or mess_data['subject']: self.host.bridge.messageNew( *self.client.messageGetBridgeArgs(mess_data), profile=self.client.profile @@ -1248,7 +1248,7 @@ room, user = self._getRoomUser(message) if room is None: - log.warning(u"No room found for message: {message}" + log.warning("No room found for message: {message}" .format(message=message.toElement().toXml())) return @@ -1282,8 +1282,8 @@ del room._cache_presence for elem in cache: self.client.xmlstream.dispatch(elem) - for presence_data in cache_presence.itervalues(): - if not presence_data[u'show'] and not presence_data[u'status']: + for presence_data in cache_presence.values(): + if not presence_data['show'] and not presence_data['status']: # occupants are already sent in mucRoomJoined, so if we don't have # extra information like show or statuses, we can discard the signal continue @@ -1291,7 +1291,7 @@ self.userUpdatedStatus(**presence_data) def _historyEb(self, failure_, room): - log.error(u"Error while managing history: {}".format(failure_)) + log.error("Error while managing history: {}".format(failure_)) self._historyCb(None, room) def receivedSubject(self, room, user, subject): @@ -1304,7 +1304,7 @@ room._history_d.addCallbacks(self._historyCb, self._historyEb, [room], errbackArgs=[room]) else: # the subject has been changed - log.debug(_(u"New subject for room ({room_id}): {subject}").format(room_id = room.roomJID.full(), subject = subject)) + log.debug(_("New subject for room ({room_id}): {subject}").format(room_id = room.roomJID.full(), subject = subject)) self.host.bridge.mucRoomNewSubject(room.roomJID.userhost(), subject, self.client.profile) ## disco ##
--- a/sat/plugins/plugin_xep_0047.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0047.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing gateways (xep-0047) @@ -32,7 +32,7 @@ from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer import base64 @@ -84,7 +84,7 @@ @param client: %(doc_client)s """ log.info( - u"In-Band Bytestream: TimeOut reached for id {sid} [{profile}]".format( + "In-Band Bytestream: TimeOut reached for id {sid} [{profile}]".format( sid=sid, profile=client.profile ) ) @@ -101,7 +101,7 @@ try: session = client.xep_0047_current_stream[sid] except KeyError: - log.warning(u"kill id called on a non existant id") + log.warning("kill id called on a non existant id") return try: @@ -141,7 +141,7 @@ @return (dict): session data """ if sid in client.xep_0047_current_stream: - raise exceptions.ConflictError(u"A session with this id already exists !") + raise exceptions.ConflictError("A session with this id already exists !") session_data = client.xep_0047_current_stream[sid] = { "id": sid, DEFER_KEY: defer.Deferred(), @@ -159,16 +159,16 @@ @param iq_elt(domish.Element): the whole <iq> stanza """ - log.debug(_(u"IBB stream opening")) + log.debug(_("IBB stream opening")) iq_elt.handled = True - open_elt = iq_elt.elements(NS_IBB, "open").next() + open_elt = next(iq_elt.elements(NS_IBB, "open")) block_size = open_elt.getAttribute("block-size") sid = open_elt.getAttribute("sid") stanza = open_elt.getAttribute("stanza", "iq") if not sid or not block_size or int(block_size) > 65535: return self._sendError("not-acceptable", sid or None, iq_elt, client) if not sid in client.xep_0047_current_stream: - log.warning(_(u"Ignoring unexpected IBB transfer: %s" % sid)) + log.warning(_("Ignoring unexpected IBB transfer: %s" % sid)) return self._sendError("not-acceptable", sid or None, iq_elt, client) session_data = client.xep_0047_current_stream[sid] if session_data["to"] != jid.JID(iq_elt["from"]): @@ -204,7 +204,7 @@ """ iq_elt.handled = True log.debug(_("IBB stream closing")) - close_elt = iq_elt.elements(NS_IBB, "close").next() + close_elt = next(iq_elt.elements(NS_IBB, "close")) # XXX: this observer is only triggered on valid sid, so we don't need to check it sid = close_elt["sid"] @@ -219,13 +219,13 @@ @param element(domish.Element): <iq> or <message> stanza """ element.handled = True - data_elt = element.elements(NS_IBB, "data").next() + data_elt = next(element.elements(NS_IBB, "data")) sid = data_elt["sid"] try: session_data = client.xep_0047_current_stream[sid] except KeyError: - log.warning(_(u"Received data for an unknown session id")) + log.warning(_("Received data for an unknown session id")) return self._sendError("item-not-found", None, element, client) from_jid = session_data["to"] @@ -234,7 +234,7 @@ if from_jid.full() != element["from"]: log.warning( _( - u"sended jid inconsistency (man in the middle attack attempt ?)\ninitial={initial}\ngiven={given}" + "sended jid inconsistency (man in the middle attack attempt ?)\ninitial={initial}\ngiven={given}" ).format(initial=from_jid, given=element["from"]) ) if element.name == "iq": @@ -243,7 +243,7 @@ session_data["seq"] = (session_data["seq"] + 1) % 65535 if int(data_elt.getAttribute("seq", -1)) != session_data["seq"]: - log.warning(_(u"Sequence error")) + log.warning(_("Sequence error")) if element.name == "iq": reason = "not-acceptable" self._sendError(reason, sid, element, client) @@ -258,7 +258,7 @@ stream_object.write(base64.b64decode(str(data_elt))) except TypeError: # The base64 data is invalid - log.warning(_(u"Invalid base64 data")) + log.warning(_("Invalid base64 data")) if element.name == "iq": self._sendError("not-acceptable", sid, element, client) self.terminateStream(session_data, client, reason) @@ -279,7 +279,7 @@ """ iq_elt = error.StanzaError(error_condition).toResponse(iq_elt) log.warning( - u"Error while managing in-band bytestream session, cancelling: {}".format( + "Error while managing in-band bytestream session, cancelling: {}".format( error_condition ) ) @@ -334,7 +334,7 @@ next_iq_elt["to"] = session_data["to"].full() data_elt = next_iq_elt.addElement((NS_IBB, "data")) seq = session_data["seq"] = (session_data["seq"] + 1) % 65535 - data_elt["seq"] = unicode(seq) + data_elt["seq"] = str(seq) data_elt["sid"] = session_data["id"] data_elt.addContent(base64.b64encode(buffer_)) args = [session_data, client] @@ -345,9 +345,9 @@ def _IQDataStreamEb(self, failure, session_data, client): if failure.check(error.StanzaError): - log.warning(u"IBB transfer failed: {}".format(failure.value)) + log.warning("IBB transfer failed: {}".format(failure.value)) else: - log.error(u"IBB transfer failed: {}".format(failure.value)) + log.error("IBB transfer failed: {}".format(failure.value)) self.terminateStream(session_data, client, "IQ_ERROR") def terminateStream(self, session_data, client, failure_reason=None): @@ -366,8 +366,8 @@ self._killSession(session_data["id"], client, failure_reason) +@implementer(iwokkel.IDisco) class XEP_0047_handler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, parent): self.plugin_parent = parent
--- a/sat/plugins/plugin_xep_0048.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0048.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Bookmarks (xep-0048) @@ -74,7 +74,7 @@ in_sign="sss", out_sign="a{sa{sa{ss}}}", method=self._bookmarksList, - async=True, + async_=True, ) host.bridge.addMethod( "bookmarksRemove", @@ -82,7 +82,7 @@ in_sign="ssss", out_sign="", method=self._bookmarksRemove, - async=True, + async_=True, ) host.bridge.addMethod( "bookmarksAdd", @@ -90,7 +90,7 @@ in_sign="ssa{ss}ss", out_sign="", method=self._bookmarksAdd, - async=True, + async_=True, ) try: self.private_plg = self.host.plugins["XEP-0049"] @@ -115,7 +115,7 @@ for bookmarks in (local, private, pubsub): if bookmarks is not None: - for (room_jid, data) in bookmarks[XEP_0048.MUC_TYPE].items(): + for (room_jid, data) in list(bookmarks[XEP_0048.MUC_TYPE].items()): if data.get("autojoin", "false") == "true": nick = data.get("nick", client.jid.user) self.host.plugins["XEP-0045"].join(client, room_jid, nick, {}) @@ -196,8 +196,8 @@ if conference_elt.hasAttribute(attr): data[attr] = conference_elt[attr] try: - data["nick"] = unicode( - conference_elt.elements(NS_BOOKMARKS, "nick").next() + data["nick"] = str( + next(conference_elt.elements(NS_BOOKMARKS, "nick")) ) except StopIteration: pass @@ -264,7 +264,7 @@ d = self.host.plugins["XEP-0045"].join(client, room_jid, nick, {}) def join_eb(failure): - log.warning(u"Error while trying to join room: {}".format(failure)) + log.warning("Error while trying to join room: {}".format(failure)) # FIXME: failure are badly managed in plugin XEP-0045. Plugin XEP-0045 need to be fixed before managing errors correctly here return {} @@ -292,12 +292,12 @@ if bookmarks is None: continue for (room_jid, data) in sorted( - bookmarks[XEP_0048.MUC_TYPE].items(), + list(bookmarks[XEP_0048.MUC_TYPE].items()), key=lambda item: item[1].get("name", item[0].user), ): room_jid_s = room_jid.full() adv_list.setRowIndex( - u"%s %s" % (room_jid_s, data.get("nick") or client.jid.user) + "%s %s" % (room_jid_s, data.get("nick") or client.jid.user) ) xmlui.addText(data.get("name", "")) xmlui.addJid(room_jid) @@ -354,7 +354,7 @@ @param profile_key: %(doc_profile_key)s """ assert storage_type in ("auto", "pubsub", "private", "local") - if type_ == XEP_0048.URL_TYPE and {"autojoin", "nick"}.intersection(data.keys()): + if type_ == XEP_0048.URL_TYPE and {"autojoin", "nick"}.intersection(list(data.keys())): raise ValueError("autojoin or nick can't be used with URLs") client = self.host.getClient(profile_key) if storage_type == "auto":
--- a/sat/plugins/plugin_xep_0049.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0049.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing xep-0049 @@ -74,7 +74,7 @@ query_elt.addElement(node_name, namespace) def getCb(answer_iq_elt): - answer_query_elt = answer_iq_elt.elements(XEP_0049.NS_PRIVATE, "query").next() + answer_query_elt = next(answer_iq_elt.elements(XEP_0049.NS_PRIVATE, "query")) return answer_query_elt.firstChildElement() d = iq_elt.send()
--- a/sat/plugins/plugin_xep_0050.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0050.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Ad-Hoc Commands (XEP-0050) @@ -32,7 +32,7 @@ from uuid import uuid4 from sat.tools import xml_tools -from zope.interface import implements +from zope.interface import implementer try: from twisted.words.protocols.xmlstream import XMPPHandler @@ -71,7 +71,7 @@ C.PI_PROTOCOLS: ["XEP-0050"], C.PI_MAIN: "XEP_0050", C.PI_HANDLER: "yes", - C.PI_DESCRIPTION: _(u"""Implementation of Ad-Hoc Commands"""), + C.PI_DESCRIPTION: _("""Implementation of Ad-Hoc Commands"""), } @@ -84,8 +84,8 @@ self.callback_error = error_const +@implementer(iwokkel.IDisco) class AdHocCommand(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, callback, label, node, features, timeout, allowed_jids, allowed_groups, allowed_magics, forbidden_jids, @@ -130,7 +130,7 @@ try: allowed.update(self.client.roster.getJidsFromGroup(group)) except exceptions.UnknownGroupError: - log.warning(_(u"The groups [{group}] is unknown for profile [{profile}])") + log.warning(_("The groups [{group}] is unknown for profile [{profile}])") .format(group=group, profile=self.client.profile)) if requestor.userhostJID() in allowed: return True @@ -204,7 +204,7 @@ xmpp_condition, cmd_condition = error_constant iq_elt = jabber.error.StanzaError(xmpp_condition).toResponse(request) if cmd_condition: - error_elt = iq_elt.elements(None, "error").next() + error_elt = next(iq_elt.elements(None, "error")) error_elt.addElement(cmd_condition, NS_COMMANDS) self.client.send(iq_elt) del self.sessions[session_id] @@ -293,7 +293,7 @@ in_sign="sss", out_sign="s", method=self._run, - async=True, + async_=True, ) host.bridge.addMethod( "adHocList", @@ -301,7 +301,7 @@ in_sign="ss", out_sign="s", method=self._listUI, - async=True, + async_=True, ) self.__requesting_id = host.registerCallback( self._requestingEntity, with_data=True @@ -312,7 +312,7 @@ security_limit=2, help_string=D_("Execute ad-hoc commands"), ) - host.registerNamespace(u'commands', NS_COMMANDS) + host.registerNamespace('commands', NS_COMMANDS) def getHandler(self, client): return XEP_0050_handler(self) @@ -354,9 +354,9 @@ def getCommandElt(self, iq_elt): try: - return iq_elt.elements(NS_COMMANDS, "command").next() + return next(iq_elt.elements(NS_COMMANDS, "command")) except StopIteration: - raise exceptions.NotFound(_(u"Missing command element")) + raise exceptions.NotFound(_("Missing command element")) def adHocError(self, error_type): """Shortcut to raise an AdHocError @@ -389,7 +389,7 @@ return C.XMLUI_DATA_LVL_WARNING else: if type_ != "info": - log.warning(_(u"Invalid note type [%s], using info") % type_) + log.warning(_("Invalid note type [%s], using info") % type_) return C.XMLUI_DATA_LVL_INFO def _mergeNotes(self, notes): @@ -403,7 +403,7 @@ C.XMLUI_DATA_LVL_WARNING: "%s: " % _("WARNING"), C.XMLUI_DATA_LVL_ERROR: "%s: " % _("ERROR"), } - return [u"%s%s" % (lvl_map[lvl], msg) for lvl, msg in notes] + return ["%s%s" % (lvl_map[lvl], msg) for lvl, msg in notes] def _commandsAnswer2XMLUI(self, iq_elt, session_id, session_data): """Convert command answer to an ui for frontend @@ -429,7 +429,7 @@ notes.append( ( self._getDataLvl(note_elt.getAttribute("type", "info")), - unicode(note_elt), + str(note_elt), ) ) for data_elt in command_elt.elements(data_form.NS_X_DATA, "x"): @@ -461,7 +461,7 @@ C.XMLUI_DIALOG, dialog_opt={ C.XMLUI_DATA_TYPE: C.XMLUI_DIALOG_NOTE, - C.XMLUI_DATA_MESS: u"\n".join(self._mergeNotes(notes)), + C.XMLUI_DATA_MESS: "\n".join(self._mergeNotes(notes)), C.XMLUI_DATA_LVL: dlg_level, }, session_id=session_id, @@ -565,7 +565,7 @@ status = XEP_0050.STATUS.EXECUTING form = data_form.Form("form", title=_("status selection")) show_options = [ - data_form.Option(name, label) for name, label in SHOWS.items() + data_form.Option(name, label) for name, label in list(SHOWS.items()) ] field = data_form.Field( "list-single", "show", options=show_options, required=True @@ -578,7 +578,7 @@ elif len(actions) == 2: # we should have the answer here try: - x_elt = command_elt.elements(data_form.NS_X_DATA, "x").next() + x_elt = next(command_elt.elements(data_form.NS_X_DATA, "x")) answer_form = data_form.Form.fromElement(x_elt) show = answer_form["show"] except (KeyError, StopIteration): @@ -593,7 +593,7 @@ # job done, we can end the session status = XEP_0050.STATUS.COMPLETED payload = None - note = (self.NOTE.INFO, _(u"Status updated")) + note = (self.NOTE.INFO, _("Status updated")) else: self.adHocError(XEP_0050.ERROR.INTERNAL) @@ -730,24 +730,24 @@ def onCmdRequest(self, request, client): request.handled = True requestor = jid.JID(request["from"]) - command_elt = request.elements(NS_COMMANDS, "command").next() + command_elt = next(request.elements(NS_COMMANDS, "command")) action = command_elt.getAttribute("action", self.ACTION.EXECUTE) node = command_elt.getAttribute("node") if not node: - client.sendError(request, u"bad-request") + client.sendError(request, "bad-request") return sessionid = command_elt.getAttribute("sessionid") commands = client._XEP_0050_commands try: command = commands[node] except KeyError: - client.sendError(request, u"item-not-found") + client.sendError(request, "item-not-found") return command.onRequest(command_elt, requestor, action, sessionid) +@implementer(iwokkel.IDisco) class XEP_0050_handler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent @@ -772,7 +772,7 @@ ret = [] if nodeIdentifier == NS_COMMANDS: commands = self.client._XEP_0050_commands - for command in commands.values(): + for command in list(commands.values()): if command.isAuthorised(requestor): ret.append( disco.DiscoItem(self.parent.jid, command.node, command.getName())
--- a/sat/plugins/plugin_xep_0054.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0054.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing xep-0054 @@ -28,7 +28,7 @@ from twisted.words.xish import domish from twisted.python.failure import Failure -from zope.interface import implements +from zope.interface import implementer from wokkel import disco, iwokkel @@ -42,9 +42,9 @@ from PIL import Image except: raise exceptions.MissingModule( - u"Missing module pillow, please download/install it from https://python-pillow.github.io" + "Missing module pillow, please download/install it from https://python-pillow.github.io" ) -from cStringIO import StringIO +from io import StringIO try: from twisted.words.protocols.xmlstream import XMPPHandler @@ -84,27 +84,27 @@ # - get missing values def __init__(self, host): - log.info(_(u"Plugin XEP_0054 initialization")) + log.info(_("Plugin XEP_0054 initialization")) self.host = host host.bridge.addMethod( - u"avatarGet", - u".plugin", - in_sign=u"sbbs", - out_sign=u"s", + "avatarGet", + ".plugin", + in_sign="sbbs", + out_sign="s", method=self._getAvatar, - async=True, + async_=True, ) host.bridge.addMethod( - u"avatarSet", - u".plugin", - in_sign=u"ss", - out_sign=u"", + "avatarSet", + ".plugin", + in_sign="ss", + out_sign="", method=self._setAvatar, - async=True, + async_=True, ) - host.trigger.add(u"presence_available", self.presenceAvailableTrigger) - host.memory.setSignalOnUpdate(u"avatar") - host.memory.setSignalOnUpdate(u"nick") + host.trigger.add("presence_available", self.presenceAvailableTrigger) + host.memory.setSignalOnUpdate("avatar") + host.memory.setSignalOnUpdate("nick") def getHandler(self, client): return XEP_0054_handler(self) @@ -143,7 +143,7 @@ try: avatar_hash = client._cache_0054[client.jid.userhost()]["avatar"] except KeyError: - log.info(u"No avatar in cache for {}".format(client.jid.userhost())) + log.info("No avatar in cache for {}".format(client.jid.userhost())) return True x_elt = domish.Element((NS_VCARD_UPDATE, "x")) x_elt.addElement("photo", content=avatar_hash) @@ -165,14 +165,14 @@ # this is not possible with vcard-tmp, but it is with XEP-0084). # Loading avatar on demand per jid may be an option to investigate. client = self.host.getClient(profile) - for jid_s, data in client._cache_0054.iteritems(): + for jid_s, data in client._cache_0054.items(): jid_ = jid.JID(jid_s) for name in CACHED_DATA: try: value = data[name] if value is None: log.error( - u"{name} value for {jid_} is None, ignoring".format( + "{name} value for {jid_} is None, ignoring".format( name=name, jid_=jid_ ) ) @@ -232,7 +232,7 @@ """Parse a <PHOTO> photo_elt and save the picture""" # XXX: this method is launched in a separate thread try: - mime_type = unicode(photo_elt.elements(NS_VCARD, "TYPE").next()) + mime_type = str(next(photo_elt.elements(NS_VCARD, "TYPE"))) except StopIteration: mime_type = None else: @@ -248,7 +248,7 @@ else: # TODO: handle other image formats (svg?) log.warning( - u"following avatar image format is not handled: {type} [{jid}]".format( + "following avatar image format is not handled: {type} [{jid}]".format( type=mime_type, jid=entity_jid.full() ) ) @@ -256,31 +256,31 @@ ext = mimetypes.guess_extension(mime_type, strict=False) assert ext is not None - if ext == u".jpe": - ext = u".jpg" + if ext == ".jpe": + ext = ".jpg" log.debug( - u"photo of type {type} with extension {ext} found [{jid}]".format( + "photo of type {type} with extension {ext} found [{jid}]".format( type=mime_type, ext=ext, jid=entity_jid.full() ) ) try: - buf = str(photo_elt.elements(NS_VCARD, "BINVAL").next()) + buf = str(next(photo_elt.elements(NS_VCARD, "BINVAL"))) except StopIteration: - log.warning(u"BINVAL element not found") + log.warning("BINVAL element not found") raise Failure(exceptions.NotFound()) if not buf: - log.warning(u"empty avatar for {jid}".format(jid=entity_jid.full())) + log.warning("empty avatar for {jid}".format(jid=entity_jid.full())) raise Failure(exceptions.NotFound()) if mime_type is None: - log.warning(_(u"no MIME type found for {entity}'s avatar, assuming image/png") + log.warning(_("no MIME type found for {entity}'s avatar, assuming image/png") .format(entity=entity_jid.full())) if buf[:8] != b'\x89\x50\x4e\x47\x0d\x0a\x1a\x0a': - log.warning(u"this is not a PNG file, ignoring it") + log.warning("this is not a PNG file, ignoring it") raise Failure(exceptions.DataError()) else: - mime_type = u"image/png" + mime_type = "image/png" - log.debug(_(u"Decoding binary")) + log.debug(_("Decoding binary")) decoded = b64decode(buf) del buf image_hash = sha1(decoded).hexdigest() @@ -297,21 +297,21 @@ @defer.inlineCallbacks def vCard2Dict(self, client, vcard, entity_jid): """Convert a VCard to a dict, and save binaries""" - log.debug((u"parsing vcard")) + log.debug(("parsing vcard")) vcard_dict = {} for elem in vcard.elements(): if elem.name == "FN": - vcard_dict["fullname"] = unicode(elem) + vcard_dict["fullname"] = str(elem) elif elem.name == "NICKNAME": - vcard_dict["nick"] = unicode(elem) + vcard_dict["nick"] = str(elem) self.updateCache(client, entity_jid, "nick", vcard_dict["nick"]) elif elem.name == "URL": - vcard_dict["website"] = unicode(elem) + vcard_dict["website"] = str(elem) elif elem.name == "EMAIL": - vcard_dict["email"] = unicode(elem) + vcard_dict["email"] = str(elem) elif elem.name == "BDAY": - vcard_dict["birthday"] = unicode(elem) + vcard_dict["birthday"] = str(elem) elif elem.name == "PHOTO": # TODO: handle EXTVAL try: @@ -322,20 +322,20 @@ avatar_hash = "" vcard_dict["avatar"] = avatar_hash except Exception as e: - log.error(u"avatar saving error: {}".format(e)) + log.error("avatar saving error: {}".format(e)) avatar_hash = None else: vcard_dict["avatar"] = avatar_hash self.updateCache(client, entity_jid, "avatar", avatar_hash) else: - log.debug(u"FIXME: [{}] VCard tag is not managed yet".format(elem.name)) + log.debug("FIXME: [{}] VCard tag is not managed yet".format(elem.name)) # if a data in cache doesn't exist anymore, we need to delete it # so we check CACHED_DATA no gotten (i.e. not in vcard_dict keys) # and we reset them - for datum in CACHED_DATA.difference(vcard_dict.keys()): + for datum in CACHED_DATA.difference(list(vcard_dict.keys())): log.debug( - u"reseting vcard datum [{datum}] for {entity}".format( + "reseting vcard datum [{datum}] for {entity}".format( datum=datum, entity=entity_jid.full() ) ) @@ -357,14 +357,14 @@ def _vCardEb(self, failure_, to_jid, client): """Called when something is wrong with registration""" log.warning( - u"Can't get vCard for {jid}: {failure}".format( + "Can't get vCard for {jid}: {failure}".format( jid=to_jid.full, failure=failure_ ) ) self.updateCache(client, to_jid, "avatar", None) def _getVcardElt(self, iq_elt): - return iq_elt.elements(NS_VCARD, "vCard").next() + return next(iq_elt.elements(NS_VCARD, "vCard")) def getCardRaw(self, client, entity_jid): """get raw vCard XML @@ -372,7 +372,7 @@ params are as in [getCard] """ entity_jid = self.getBareOrFull(client, entity_jid) - log.debug(u"Asking for {}'s VCard".format(entity_jid.full())) + log.debug("Asking for {}'s VCard".format(entity_jid.full())) reg_request = client.IQ("get") reg_request["from"] = client.jid.full() reg_request["to"] = entity_jid.full() @@ -435,7 +435,7 @@ raise KeyError else: # avatar has already been checked but it is not set - full_path = u"" + full_path = "" except KeyError: # avatar is not in cache if cache_only: @@ -462,11 +462,11 @@ @param entity(jid.JID): entity to get nick from @return(unicode, None): nick or None if not found """ - nick = self.getCache(client, entity, u"nick") + nick = self.getCache(client, entity, "nick") if nick is not None: defer.returnValue(nick) yield self.getCard(client, entity) - defer.returnValue(self.getCache(client, entity, u"nick")) + defer.returnValue(self.getCache(client, entity, "nick")) @defer.inlineCallbacks def setNick(self, client, nick): @@ -483,24 +483,24 @@ else: raise e try: - nickname_elt = next(vcard_elt.elements(NS_VCARD, u"NICKNAME")) + nickname_elt = next(vcard_elt.elements(NS_VCARD, "NICKNAME")) except StopIteration: pass else: vcard_elt.children.remove(nickname_elt) - nickname_elt = vcard_elt.addElement((NS_VCARD, u"NICKNAME"), content=nick) + nickname_elt = vcard_elt.addElement((NS_VCARD, "NICKNAME"), content=nick) iq_elt = client.IQ() vcard_elt = iq_elt.addChild(vcard_elt) yield iq_elt.send() - self.updateCache(client, jid_, u"nick", unicode(nick)) + self.updateCache(client, jid_, "nick", str(nick)) def _buildSetAvatar(self, client, vcard_elt, file_path): # XXX: this method is executed in a separate thread try: img = Image.open(file_path) except IOError: - return Failure(exceptions.DataError(u"Can't open image")) + return Failure(exceptions.DataError("Can't open image")) if img.size != AVATAR_DIM: img.thumbnail(AVATAR_DIM) @@ -549,7 +549,7 @@ else: # the vcard exists, we need to remove PHOTO element as we'll make a new one try: - photo_elt = next(vcard_elt.elements(NS_VCARD, u"PHOTO")) + photo_elt = next(vcard_elt.elements(NS_VCARD, "PHOTO")) except StopIteration: pass else: @@ -567,8 +567,8 @@ client.presence.available() # FIXME: should send the current presence, not always "available" ! +@implementer(iwokkel.IDisco) class XEP_0054_handler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent @@ -590,7 +590,7 @@ computed_hash = self.plugin_parent.getCache(client, entity, "avatar") if computed_hash != given_hash: log.warning( - u"computed hash differs from given hash for {entity}:\n" + "computed hash differs from given hash for {entity}:\n" "computed: {computed}\ngiven: {given}".format( entity=entity, computed=computed_hash, given=given_hash ) @@ -606,18 +606,18 @@ entity_jid = self.plugin_parent.getBareOrFull(client, jid.JID(presence["from"])) # FIXME: wokkel's data_form should be used here try: - x_elt = presence.elements(NS_VCARD_UPDATE, "x").next() + x_elt = next(presence.elements(NS_VCARD_UPDATE, "x")) except StopIteration: return try: - photo_elt = x_elt.elements(NS_VCARD_UPDATE, "photo").next() + photo_elt = next(x_elt.elements(NS_VCARD_UPDATE, "photo")) except StopIteration: return - hash_ = unicode(photo_elt).strip() + hash_ = str(photo_elt).strip() if hash_ == C.HASH_SHA1_EMPTY: - hash_ = u"" + hash_ = "" old_avatar = self.plugin_parent.getCache(client, entity_jid, "avatar") if old_avatar == hash_: @@ -627,13 +627,13 @@ file_path = client.cache.getFilePath(hash_) if file_path is None: log.error( - u"Avatar for [{}] should be in cache but it is not! We get it".format( + "Avatar for [{}] should be in cache but it is not! We get it".format( entity_jid.full() ) ) self.plugin_parent.getCard(client, entity_jid) else: - log.debug(u"avatar for {} already in cache".format(entity_jid.full())) + log.debug("avatar for {} already in cache".format(entity_jid.full())) return if not hash_: @@ -646,14 +646,14 @@ file_path = client.cache.getFilePath(hash_) if file_path is not None: log.debug( - u"New avatar found for [{}], it's already in cache, we use it".format( + "New avatar found for [{}], it's already in cache, we use it".format( entity_jid.full() ) ) self.plugin_parent.updateCache(client, entity_jid, "avatar", hash_) else: log.debug( - u"New avatar found for [{}], requesting vcard".format(entity_jid.full()) + "New avatar found for [{}], requesting vcard".format(entity_jid.full()) ) d = self.plugin_parent.getCard(client, entity_jid) d.addCallback(self._checkAvatarHash, client, entity_jid, hash_)
--- a/sat/plugins/plugin_xep_0055.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0055.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Jabber Search (xep-0055) @@ -36,7 +36,7 @@ from twisted.words.protocols.xmlstream import XMPPHandler except ImportError: from wokkel.subprotocols import XMPPHandler -from zope.interface import implements +from zope.interface import implementer NS_SEARCH = "jabber:iq:search" @@ -84,7 +84,7 @@ in_sign="ss", out_sign="s", method=self._getFieldsUI, - async=True, + async_=True, ) host.bridge.addMethod( "searchRequest", @@ -92,7 +92,7 @@ in_sign="sa{ss}s", out_sign="s", method=self._searchRequest, - async=True, + async_=True, ) self.__search_menu_id = host.registerCallback(self._getMainUI, with_data=True) @@ -193,7 +193,7 @@ main_ui.addDivider("blank") # a blank line again after the button simple_data = { - key: value for key, value in data.iteritems() if key in (FIELD_SINGLE,) + key: value for key, value in data.items() if key in (FIELD_SINGLE,) } if simple_data: log.debug("Simple search with %s on %s" % (simple_data, service_jid)) @@ -234,11 +234,11 @@ # get the selected service service_jid_s = data.get("service_jid_extra", "") if not service_jid_s: - service_jid_s = data.get("service_jid", unicode(services[0])) + service_jid_s = data.get("service_jid", str(services[0])) log.debug("Refreshing search fields for %s" % service_jid_s) else: - service_jid_s = data.get(FIELD_CURRENT_SERVICE, unicode(services[0])) - services_s = [unicode(service) for service in services] + service_jid_s = data.get(FIELD_CURRENT_SERVICE, str(services[0])) + services_s = [str(service) for service in services] if service_jid_s not in services_s: services_s.append(service_jid_s) @@ -286,7 +286,7 @@ """ field_list = data_form.Form.fromElement(form_elt).fieldList adv_fields = [field.var for field in field_list if field.var] - adv_data = {key: value for key, value in data.iteritems() if key in adv_fields} + adv_data = {key: value for key, value in data.items() if key in adv_fields} xml_tools.dataForm2Widgets(main_ui, data_form.Form.fromElement(form_elt)) @@ -332,13 +332,13 @@ if "jid" in headers: # use XMLUI JidsListWidget to display the results values = {} for i in range(len(xmlui_data)): - header = headers.keys()[i % len(headers)] + header = list(headers.keys())[i % len(headers)] widget_type, widget_args, widget_kwargs = xmlui_data[i] value = widget_args[0] values.setdefault(header, []).append( jid.JID(value) if header == "jid" else value ) - main_ui.addJidsList(jids=values["jid"], name=D_(u"Search results")) + main_ui.addJidsList(jids=values["jid"], name=D_("Search results")) # TODO: also display the values other than JID else: xml_tools.XMLUIData2AdvancedList(main_ui, headers, xmlui_data) @@ -381,12 +381,12 @@ @return: domish.Element """ try: - query_elts = answer.elements("jabber:iq:search", "query").next() + query_elts = next(answer.elements("jabber:iq:search", "query")) except StopIteration: log.info(_("No query element found")) raise DataError # FIXME: StanzaError is probably more appropriate, check the RFC try: - form_elt = query_elts.elements(data_form.NS_X_DATA, "x").next() + form_elt = next(query_elts.elements(data_form.NS_X_DATA, "x")) except StopIteration: log.info(_("No data form found")) raise NotImplementedError( @@ -400,7 +400,7 @@ @param failure (defer.failure.Failure): twisted failure @raise: the unchanged defer.failure.Failure """ - log.info(_("Fields request failure: %s") % unicode(failure.getErrorMessage())) + log.info(_("Fields request failure: %s") % str(failure.getErrorMessage())) raise failure ## Do the search ## @@ -488,12 +488,12 @@ @return: domish.Element """ try: - query_elts = answer.elements("jabber:iq:search", "query").next() + query_elts = next(answer.elements("jabber:iq:search", "query")) except StopIteration: log.info(_("No query element found")) raise DataError # FIXME: StanzaError is probably more appropriate, check the RFC try: - form_elt = query_elts.elements(data_form.NS_X_DATA, "x").next() + form_elt = next(query_elts.elements(data_form.NS_X_DATA, "x")) except StopIteration: log.info(_("No data form found")) raise NotImplementedError( @@ -507,12 +507,12 @@ @param failure (defer.failure.Failure): twisted failure @raise: the unchanged defer.failure.Failure """ - log.info(_("Search request failure: %s") % unicode(failure.getErrorMessage())) + log.info(_("Search request failure: %s") % str(failure.getErrorMessage())) raise failure +@implementer(iwokkel.IDisco) class XEP_0055_handler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent, profile): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_xep_0059.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0059.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Result Set Management (XEP-0059) @@ -29,27 +29,27 @@ from wokkel import rsm from twisted.words.protocols.jabber import xmlstream -from zope.interface import implements +from zope.interface import implementer PLUGIN_INFO = { - C.PI_NAME: u"Result Set Management", - C.PI_IMPORT_NAME: u"XEP-0059", - C.PI_TYPE: u"XEP", - C.PI_PROTOCOLS: [u"XEP-0059"], - C.PI_MAIN: u"XEP_0059", - C.PI_HANDLER: u"yes", - C.PI_DESCRIPTION: _(u"""Implementation of Result Set Management"""), + C.PI_NAME: "Result Set Management", + C.PI_IMPORT_NAME: "XEP-0059", + C.PI_TYPE: "XEP", + C.PI_PROTOCOLS: ["XEP-0059"], + C.PI_MAIN: "XEP_0059", + C.PI_HANDLER: "yes", + C.PI_DESCRIPTION: _("""Implementation of Result Set Management"""), } -RSM_PREFIX = u"rsm_" +RSM_PREFIX = "rsm_" class XEP_0059(object): # XXX: RSM management is done directly in Wokkel. def __init__(self, host): - log.info(_(u"Result Set Management plugin initialization")) + log.info(_("Result Set Management plugin initialization")) def getHandler(self, client): return XEP_0059_handler() @@ -61,13 +61,13 @@ @return (rsm.RSMRequest, None): request with parsed arguments or None if no RSM arguments have been found """ - if int(extra.get(RSM_PREFIX + u'max', 0)) < 0: - raise ValueError(_(u"rsm_max can't be negative")) + if int(extra.get(RSM_PREFIX + 'max', 0)) < 0: + raise ValueError(_("rsm_max can't be negative")) rsm_args = {} - for arg in (u"max", u"after", u"before", u"index"): + for arg in ("max", "after", "before", "index"): try: - argname = "max_" if arg == u"max" else arg + argname = "max_" if arg == "max" else arg rsm_args[argname] = extra.pop(RSM_PREFIX + arg) except KeyError: continue @@ -95,18 +95,18 @@ if data is None: data = {} if rsm_response.first is not None: - data[u"rsm_first"] = rsm_response.first + data["rsm_first"] = rsm_response.first if rsm_response.last is not None: - data[u"rsm_last"] = rsm_response.last + data["rsm_last"] = rsm_response.last if rsm_response.index is not None: - data[u"rsm_index"] = unicode(rsm_response.index) + data["rsm_index"] = str(rsm_response.index) if rsm_response.index is not None: - data[u"rsm_index"] = unicode(rsm_response.index) + data["rsm_index"] = str(rsm_response.index) return data +@implementer(iwokkel.IDisco) class XEP_0059_handler(xmlstream.XMPPHandler): - implements(iwokkel.IDisco) def getDiscoInfo(self, requestor, target, nodeIdentifier=""): return [disco.DiscoFeature(rsm.NS_RSM)]
--- a/sat/plugins/plugin_xep_0060.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0060.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Publish-Subscribe (xep-0060) @@ -31,9 +31,9 @@ from wokkel import disco from wokkel import data_form from wokkel import generic -from zope.interface import implements +from zope.interface import implementer from collections import namedtuple -import urllib +import urllib.request, urllib.parse, urllib.error # XXX: sat_tmp.wokkel.pubsub is actually use instead of wokkel version # mam and rsm come from sat_tmp.wokkel too @@ -43,15 +43,15 @@ PLUGIN_INFO = { - C.PI_NAME: u"Publish-Subscribe", - C.PI_IMPORT_NAME: u"XEP-0060", - C.PI_TYPE: u"XEP", - C.PI_PROTOCOLS: [u"XEP-0060"], + C.PI_NAME: "Publish-Subscribe", + C.PI_IMPORT_NAME: "XEP-0060", + C.PI_TYPE: "XEP", + C.PI_PROTOCOLS: ["XEP-0060"], C.PI_DEPENDENCIES: [], - C.PI_RECOMMENDATIONS: [u"XEP-0059", u"XEP-0313"], - C.PI_MAIN: u"XEP_0060", - C.PI_HANDLER: u"yes", - C.PI_DESCRIPTION: _(u"""Implementation of PubSub Protocol"""), + C.PI_RECOMMENDATIONS: ["XEP-0059", "XEP-0313"], + C.PI_MAIN: "XEP_0060", + C.PI_HANDLER: "yes", + C.PI_DESCRIPTION: _("""Implementation of PubSub Protocol"""), } UNSPECIFIED = "unspecified error" @@ -82,10 +82,10 @@ ID_SINGLETON = "current" def __init__(self, host): - log.info(_(u"PubSub plugin initialization")) + log.info(_("PubSub plugin initialization")) self.host = host - self._rsm = host.plugins.get(u"XEP-0059") - self._mam = host.plugins.get(u"XEP-0313") + self._rsm = host.plugins.get("XEP-0059") + self._mam = host.plugins.get("XEP-0313") self._node_cb = {} # dictionnary of callbacks for node (key: node, value: list of callbacks) self.rt_sessions = sat_defer.RTDeferredSessions() host.bridge.addMethod( @@ -94,7 +94,7 @@ in_sign="ssa{ss}s", out_sign="s", method=self._createNode, - async=True, + async_=True, ) host.bridge.addMethod( "psNodeConfigurationGet", @@ -102,7 +102,7 @@ in_sign="sss", out_sign="a{ss}", method=self._getNodeConfiguration, - async=True, + async_=True, ) host.bridge.addMethod( "psNodeConfigurationSet", @@ -110,7 +110,7 @@ in_sign="ssa{ss}s", out_sign="", method=self._setNodeConfiguration, - async=True, + async_=True, ) host.bridge.addMethod( "psNodeAffiliationsGet", @@ -118,7 +118,7 @@ in_sign="sss", out_sign="a{ss}", method=self._getNodeAffiliations, - async=True, + async_=True, ) host.bridge.addMethod( "psNodeAffiliationsSet", @@ -126,7 +126,7 @@ in_sign="ssa{ss}s", out_sign="", method=self._setNodeAffiliations, - async=True, + async_=True, ) host.bridge.addMethod( "psNodeSubscriptionsGet", @@ -134,7 +134,7 @@ in_sign="sss", out_sign="a{ss}", method=self._getNodeSubscriptions, - async=True, + async_=True, ) host.bridge.addMethod( "psNodeSubscriptionsSet", @@ -142,7 +142,7 @@ in_sign="ssa{ss}s", out_sign="", method=self._setNodeSubscriptions, - async=True, + async_=True, ) host.bridge.addMethod( "psNodePurge", @@ -150,7 +150,7 @@ in_sign="sss", out_sign="", method=self._purgeNode, - async=True, + async_=True, ) host.bridge.addMethod( "psNodeDelete", @@ -158,7 +158,7 @@ in_sign="sss", out_sign="", method=self._deleteNode, - async=True, + async_=True, ) host.bridge.addMethod( "psNodeWatchAdd", @@ -166,7 +166,7 @@ in_sign="sss", out_sign="", method=self._addWatch, - async=False, + async_=False, ) host.bridge.addMethod( "psNodeWatchRemove", @@ -174,7 +174,7 @@ in_sign="sss", out_sign="", method=self._removeWatch, - async=False, + async_=False, ) host.bridge.addMethod( "psAffiliationsGet", @@ -182,7 +182,7 @@ in_sign="sss", out_sign="a{ss}", method=self._getAffiliations, - async=True, + async_=True, ) host.bridge.addMethod( "psItemsGet", @@ -190,7 +190,7 @@ in_sign="ssiassa{ss}s", out_sign="(asa{ss})", method=self._getItems, - async=True, + async_=True, ) host.bridge.addMethod( "psItemSend", @@ -198,7 +198,7 @@ in_sign="ssssa{ss}s", out_sign="s", method=self._sendItem, - async=True, + async_=True, ) host.bridge.addMethod( "psItemsSend", @@ -206,7 +206,7 @@ in_sign="ssasa{ss}s", out_sign="as", method=self._sendItems, - async=True, + async_=True, ) host.bridge.addMethod( "psRetractItem", @@ -214,7 +214,7 @@ in_sign="sssbs", out_sign="", method=self._retractItem, - async=True, + async_=True, ) host.bridge.addMethod( "psRetractItems", @@ -222,7 +222,7 @@ in_sign="ssasbs", out_sign="", method=self._retractItems, - async=True, + async_=True, ) host.bridge.addMethod( "psSubscribe", @@ -230,7 +230,7 @@ in_sign="ssa{ss}s", out_sign="s", method=self._subscribe, - async=True, + async_=True, ) host.bridge.addMethod( "psUnsubscribe", @@ -238,7 +238,7 @@ in_sign="sss", out_sign="", method=self._unsubscribe, - async=True, + async_=True, ) host.bridge.addMethod( "psSubscriptionsGet", @@ -246,7 +246,7 @@ in_sign="sss", out_sign="aa{ss}", method=self._subscriptions, - async=True, + async_=True, ) host.bridge.addMethod( "psSubscribeToMany", @@ -261,7 +261,7 @@ in_sign="ss", out_sign="(ua(sss))", method=self._manySubscribeRTResult, - async=True, + async_=True, ) host.bridge.addMethod( "psGetFromMany", @@ -276,7 +276,7 @@ in_sign="ss", out_sign="(ua(sssasa{ss}))", method=self._getFromManyRTResult, - async=True, + async_=True, ) # high level observer method @@ -303,7 +303,7 @@ except RuntimeError: log.info( _( - u"Can't retrieve pubsub_service from conf, we'll use first one that we find" + "Can't retrieve pubsub_service from conf, we'll use first one that we find" ) ) client.pubsub_service = yield self.host.findServiceEntity( @@ -359,8 +359,8 @@ mam_request = self._mam.parseExtra(extra, with_rsm=False) if mam_request is not None: - assert u"mam" not in extra - extra[u"mam"] = mam_request + assert "mam" not in extra + extra["mam"] = mam_request return Extra(rsm_request, extra) @@ -377,7 +377,7 @@ assert node is not None assert kwargs callbacks = self._node_cb.setdefault(node, {}) - for event, cb in kwargs.iteritems(): + for event, cb in kwargs.items(): event_name = event[:-3] assert event_name in C.PS_EVENTS callbacks.setdefault(event_name, []).append(cb) @@ -395,14 +395,14 @@ pass else: for callback in args: - for event, cb_list in registred_cb.iteritems(): + for event, cb_list in registred_cb.items(): try: cb_list.remove(callback) except ValueError: pass else: log.debug( - u"removed callback {cb} for event {event} on node {node}".format( + "removed callback {cb} for event {event} on node {node}".format( cb=callback, event=event, node=node ) ) @@ -412,7 +412,7 @@ del self._node_cb[node] return log.error( - u"Trying to remove inexistant callback {cb} for node {node}".format( + "Trying to remove inexistant callback {cb} for node {node}".format( cb=callback, node=node ) ) @@ -454,7 +454,7 @@ d = self.sendItem( client, service, nodeIdentifier, payload, item_id or None, extra ) - d.addCallback(lambda ret: ret or u"") + d.addCallback(lambda ret: ret or "") return d def _sendItems(self, service, nodeIdentifier, items, extra=None, @@ -464,7 +464,7 @@ try: items = [generic.parseXml(item.encode('utf-8')) for item in items] except Exception as e: - raise exceptions.DataError(_(u"Can't parse items: {msg}").format( + raise exceptions.DataError(_("Can't parse items: {msg}").format( msg=e)) d = self.sendItems( client, service, nodeIdentifier, items, extra @@ -504,7 +504,7 @@ """Parse publish result, and return ids given by pubsub service""" try: item_ids = [item['id'] - for item in iq_result.pubsub.publish.elements(pubsub.NS_PUBSUB, u'item')] + for item in iq_result.pubsub.publish.elements(pubsub.NS_PUBSUB, 'item')] except AttributeError: return [] return item_ids @@ -522,9 +522,9 @@ """ parsed_items = [] for item in items: - if item.name != u'item': - raise exceptions.DataError(_(u"Invalid item: {xml}").format(item.toXml())) - item_id = item.getAttribute(u"id") + if item.name != 'item': + raise exceptions.DataError(_("Invalid item: {xml}").format(item.toXml())) + item_id = item.getAttribute("id") parsed_items.append(pubsub.Item(id=item_id, payload=item.firstChildElement())) d = self.publish(client, service, nodeIdentifier, parsed_items) d.addCallback(self._publishCb) @@ -538,15 +538,15 @@ def _unwrapMAMMessage(self, message_elt): try: item_elt = ( - message_elt.elements(mam.NS_MAM, "result").next() + next(message_elt.elements(mam.NS_MAM, "result").next() .elements(C.NS_FORWARD, "forwarded").next() .elements(C.NS_CLIENT, "message").next() .elements("http://jabber.org/protocol/pubsub#event", "event").next() .elements("http://jabber.org/protocol/pubsub#event", "items").next() - .elements("http://jabber.org/protocol/pubsub#event", "item").next() + .elements("http://jabber.org/protocol/pubsub#event", "item")) ) except StopIteration: - raise exceptions.DataError(u"Can't find Item in MAM message element") + raise exceptions.DataError("Can't find Item in MAM message element") return item_elt def _getItems(self, service="", node="", max_items=10, item_ids=None, sub_id=None, @@ -593,7 +593,7 @@ if item_ids and max_items is not None: max_items = None if rsm_request and item_ids: - raise ValueError(u"items_id can't be used with rsm") + raise ValueError("items_id can't be used with rsm") if extra is None: extra = {} try: @@ -616,23 +616,23 @@ else: # if mam is requested, we have to do a totally different query if self._mam is None: - raise exceptions.NotFound(u"MAM (XEP-0313) plugin is not available") + raise exceptions.NotFound("MAM (XEP-0313) plugin is not available") if max_items is not None: - raise exceptions.DataError(u"max_items parameter can't be used with MAM") + raise exceptions.DataError("max_items parameter can't be used with MAM") if item_ids: - raise exceptions.DataError(u"items_ids parameter can't be used with MAM") + raise exceptions.DataError("items_ids parameter can't be used with MAM") if mam_query.node is None: mam_query.node = node elif mam_query.node != node: raise exceptions.DataError( - u"MAM query node is incoherent with getItems's node" + "MAM query node is incoherent with getItems's node" ) if mam_query.rsm is None: mam_query.rsm = rsm_request else: if mam_query.rsm != rsm_request: raise exceptions.DataError( - u"Conflict between RSM request and MAM's RSM request" + "Conflict between RSM request and MAM's RSM request" ) d = self._mam.getArchives(client, mam_query, service, self._unwrapMAMMessage) @@ -644,8 +644,8 @@ def subscribeEb(failure, service, node): failure.trap(error.StanzaError) log.warning( - u"Could not subscribe to node {} on service {}: {}".format( - node, unicode(service), unicode(failure.value) + "Could not subscribe to node {} on service {}: {}".format( + node, str(service), str(failure.value) ) ) @@ -670,13 +670,13 @@ if rsm_request is not None and rsm_response is not None: metadata.update( { - u"rsm_" + key: value - for key, value in rsm_response.toDict().iteritems() + "rsm_" + key: value + for key, value in rsm_response.toDict().items() } ) if mam_response is not None: - for key, value in mam_response.iteritems(): - metadata[u"mam_" + key] = value + for key, value in mam_response.items(): + metadata["mam_" + key] = value return (items, metadata) d.addCallback(addMetadata) @@ -758,7 +758,7 @@ def serialize(form): # FIXME: better more generic dataform serialisation should be available in SàT - return {f.var: unicode(f.value) for f in form.fields.values()} + return {f.var: str(f.value) for f in list(form.fields.values())} d.addCallback(serialize) return d @@ -822,7 +822,7 @@ ) except StopIteration: raise ValueError( - _(u"Invalid result: missing <affiliations> element: {}").format( + _("Invalid result: missing <affiliations> element: {}").format( iq_elt.toXml ) ) @@ -833,7 +833,7 @@ } except KeyError: raise ValueError( - _(u"Invalid result: bad <affiliation> element: {}").format( + _("Invalid result: bad <affiliation> element: {}").format( iq_elt.toXml ) ) @@ -848,7 +848,7 @@ client, jid.JID(service_s) if service_s else None, nodeIdentifier ) d.addCallback( - lambda affiliations: {j.full(): a for j, a in affiliations.iteritems()} + lambda affiliations: {j.full(): a for j, a in affiliations.items()} ) return d @@ -865,7 +865,7 @@ ) except StopIteration: raise ValueError( - _(u"Invalid result: missing <affiliations> element: {}").format( + _("Invalid result: missing <affiliations> element: {}").format( iq_elt.toXml ) ) @@ -878,7 +878,7 @@ } except KeyError: raise ValueError( - _(u"Invalid result: bad <affiliation> element: {}").format( + _("Invalid result: bad <affiliation> element: {}").format( iq_elt.toXml ) ) @@ -892,7 +892,7 @@ ): client = self.host.getClient(profile_key) affiliations = { - jid.JID(jid_): affiliation for jid_, affiliation in affiliations.iteritems() + jid.JID(jid_): affiliation for jid_, affiliation in affiliations.items() } d = self.setNodeAffiliations( client, @@ -986,7 +986,7 @@ client = self.host.getClient(profile_key) service = None if not service else jid.JID(service) d = self.subscribe(client, service, nodeIdentifier, options=options or None) - d.addCallback(lambda subscription: subscription.subscriptionIdentifier or u"") + d.addCallback(lambda subscription: subscription.subscriptionIdentifier or "") return d def subscribe(self, client, service, nodeIdentifier, sub_jid=None, options=None): @@ -1065,8 +1065,8 @@ if item is not None: query_data.append(("item", item.encode("utf-8"))) return "xmpp:{service}?;{query}".format( - service=service.userhost(), query=urllib.urlencode(query_data) - ).decode("utf-8") + service=service.userhost(), query=urllib.parse.urlencode(query_data) + ) ## methods to manage several stanzas/jids at once ## @@ -1099,7 +1099,7 @@ return ( items, - {key: unicode(value) for key, value in metadata.iteritems()}, + {key: str(value) for key, value in metadata.items()}, ) def transItemsDataD(self, items_data, item_cb, serialise=False): @@ -1122,7 +1122,7 @@ def eb(failure): log.warning( - "Error while serialising/parsing item: {}".format(unicode(failure.value)) + "Error while serialising/parsing item: {}".format(str(failure.value)) ) d = defer.gatherResults([item_cb(item).addErrback(eb) for item in items]) @@ -1135,7 +1135,7 @@ return ( items, - {key: unicode(value) for key, value in metadata.iteritems()}, + {key: str(value) for key, value in metadata.items()}, ) d.addCallback(finishSerialisation) @@ -1156,7 +1156,7 @@ return [ ("", result) if success - else (unicode(result.result) or UNSPECIFIED, failure_result) + else (str(result.result) or UNSPECIFIED, failure_result) for success, result in results ] @@ -1168,7 +1168,7 @@ client, jid.JID(service_s) if service_s else None, nodeIdentifier ) d.addCallback( - lambda subscriptions: {j.full(): a for j, a in subscriptions.iteritems()} + lambda subscriptions: {j.full(): a for j, a in subscriptions.items()} ) return d @@ -1190,12 +1190,12 @@ ) except StopIteration: raise ValueError( - _(u"Invalid result: missing <subscriptions> element: {}").format( + _("Invalid result: missing <subscriptions> element: {}").format( iq_elt.toXml ) ) except AttributeError as e: - raise ValueError(_(u"Invalid result: {}").format(e)) + raise ValueError(_("Invalid result: {}").format(e)) try: return { jid.JID(s["jid"]): s["subscription"] @@ -1205,7 +1205,7 @@ } except KeyError: raise ValueError( - _(u"Invalid result: bad <subscription> element: {}").format( + _("Invalid result: bad <subscription> element: {}").format( iq_elt.toXml ) ) @@ -1220,7 +1220,7 @@ client = self.host.getClient(profile_key) subscriptions = { jid.JID(jid_): subscription - for jid_, subscription in subscriptions.iteritems() + for jid_, subscription in subscriptions.items() } d = self.setNodeSubscriptions( client, @@ -1241,7 +1241,7 @@ request.nodeIdentifier = nodeIdentifier request.subscriptions = { pubsub.Subscription(nodeIdentifier, jid_, state) - for jid_, state in subscriptions.iteritems() + for jid_, state in subscriptions.items() } d = request.send(client.xmlstream) return d @@ -1262,7 +1262,7 @@ d = self.rt_sessions.getResults( session_id, on_success=lambda result: "", - on_error=lambda failure: unicode(failure.value), + on_error=lambda failure: str(failure.value), profile=profile, ) # we need to convert jid.JID to unicode with full() to serialise it for the bridge @@ -1271,7 +1271,7 @@ ret[0], [ (service.full(), node, "" if success else failure or UNSPECIFIED) - for (service, node), (success, failure) in ret[1].iteritems() + for (service, node), (success, failure) in ret[1].items() ], ) ) @@ -1281,7 +1281,7 @@ self, node_data, subscriber=None, options=None, profile_key=C.PROF_KEY_NONE ): return self.subscribeToMany( - [(jid.JID(service), unicode(node)) for service, node in node_data], + [(jid.JID(service), str(node)) for service, node in node_data], jid.JID(subscriber), options, profile_key, @@ -1337,7 +1337,7 @@ d = self.rt_sessions.getResults( session_id, on_success=lambda result: ("", self.transItemsData(result)), - on_error=lambda failure: (unicode(failure.value) or UNSPECIFIED, ([], {})), + on_error=lambda failure: (str(failure.value) or UNSPECIFIED, ([], {})), profile=profile, ) d.addCallback( @@ -1347,7 +1347,7 @@ (service.full(), node, failure, items, metadata) for (service, node), (success, (failure, (items, metadata))) in ret[ 1 - ].iteritems() + ].items() ], ) ) @@ -1362,7 +1362,7 @@ max_item = None if max_item == C.NO_LIMIT else max_item extra = self.parseExtra(extra_dict) return self.getFromMany( - [(jid.JID(service), unicode(node)) for service, node in node_data], + [(jid.JID(service), str(node)) for service, node in node_data], max_item, extra.rsm_request, extra.extra, @@ -1390,8 +1390,8 @@ return self.rt_sessions.newSession(deferreds, client.profile) +@implementer(disco.IDisco) class SatPubSubClient(rsm.PubSubClient): - implements(disco.IDisco) def __init__(self, host, parent_plugin): self.host = host @@ -1409,7 +1409,7 @@ @param event(unicode): one of C.PS_ITEMS, C.PS_RETRACT, C.PS_DELETE @return (iterator[callable]): callbacks for this node/event """ - for registered_node, callbacks_dict in self.parent_plugin._node_cb.iteritems(): + for registered_node, callbacks_dict in self.parent_plugin._node_cb.items(): if not node.startswith(registered_node): continue try: @@ -1420,7 +1420,7 @@ def itemsReceived(self, event): - log.debug(u"Pubsub items received") + log.debug("Pubsub items received") for callback in self._getNodeCallbacks(event.nodeIdentifier, C.PS_ITEMS): callback(self.parent, event) client = self.parent @@ -1435,7 +1435,7 @@ ) def deleteReceived(self, event): - log.debug((u"Publish node deleted")) + log.debug(("Publish node deleted")) for callback in self._getNodeCallbacks(event.nodeIdentifier, C.PS_DELETE): callback(self.parent, event) client = self.parent
--- a/sat/plugins/plugin_xep_0065.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0065.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing xep-0065 @@ -73,7 +73,7 @@ import hashlib import uuid -from zope.interface import implements +from zope.interface import implementer try: from twisted.words.protocols.xmlstream import XMPPHandler @@ -144,7 +144,7 @@ STATE_CLIENT_INITIAL, STATE_CLIENT_AUTH, STATE_CLIENT_REQUEST, -) = xrange(8) +) = range(8) SOCKS5_VER = 0x05 @@ -198,7 +198,7 @@ """ assert isinstance(jid_, jid.JID) self.host, self.port, self.type, self.jid = (host, int(port), type_, jid_) - self.id = id_ if id_ is not None else unicode(uuid.uuid4()) + self.id = id_ if id_ is not None else str(uuid.uuid4()) if priority_local: self._local_priority = int(priority) self._priority = self.calculatePriority() @@ -212,7 +212,7 @@ Used to disconnect tryed client when they are discarded """ - log.debug(u"Discarding {}".format(self)) + log.debug("Discarding {}".format(self)) try: self.factory.discard() except AttributeError: @@ -230,14 +230,14 @@ # similar to __unicode__ but we don't show jid and we encode id return "Candidate ({0.priority}): host={0.host} port={0.port} type={0.type}{id}".format( self, - id=u" id={}".format(self.id if self.id is not None else u"").encode( + id=" id={}".format(self.id if self.id is not None else "").encode( "utf-8", "ignore" ), ) def __unicode__(self): - return u"Candidate ({0.priority}): host={0.host} port={0.port} jid={0.jid} type={0.type}{id}".format( - self, id=u" id={}".format(self.id if self.id is not None else u"") + return "Candidate ({0.priority}): host={0.host} port={0.port} jid={0.jid} type={0.type}{id}".format( + self, id=" id={}".format(self.id if self.id is not None else "") ) def __eq__(self, other): @@ -270,7 +270,7 @@ elif self.type == XEP_0065.TYPE_PROXY: multiplier = 10 else: - raise exceptions.InternalError(u"Unknown {} type !".format(self.type)) + raise exceptions.InternalError("Unknown {} type !".format(self.type)) return 2 ** 16 * multiplier + self._local_priority def activate(self, client, sid, peer_jid, local_jid): @@ -322,13 +322,14 @@ """ self.connection = defer.Deferred() # called when connection/auth is done if session_hash is not None: + assert isinstance(session_hash, str) self.server_mode = False self._session_hash = session_hash self.state = STATE_CLIENT_INITIAL else: self.server_mode = True self.state = STATE_INITIAL - self.buf = "" + self.buf = b"" self.supportedAuthMechs = [AUTHMECH_ANON] self.supportedAddrs = [ADDR_DOMAINNAME] self.enabledCommands = [CMD_CONNECT] @@ -390,7 +391,7 @@ return # No supported mechs found, notify client and close the connection - log.warning(u"Unsupported authentication mechanism") + log.warning("Unsupported authentication mechanism") self.transport.write(struct.pack("!BB", SOCKS5_VER, AUTHMECH_INVALID)) self.transport.loseConnection() except struct.error: @@ -438,7 +439,7 @@ addr, port = struct.unpack("!IH", self.buf[4:10]) self.buf = self.buf[10:] elif self.addressType == ADDR_DOMAINNAME: - nlen = ord(self.buf[4]) + nlen = self.buf[4] addr, port = struct.unpack("!%dsH" % nlen, self.buf[5:]) self.buf = self.buf[7 + len(addr) :] else: @@ -466,7 +467,7 @@ return None def _makeRequest(self): - hash_ = self._session_hash + hash_ = self._session_hash.encode('utf-8') request = struct.pack( "!5B%dsH" % len(hash_), SOCKS5_VER, @@ -493,7 +494,7 @@ addr, port = struct.unpack("!IH", self.buf[4:10]) self.buf = self.buf[10:] elif self.addressType == ADDR_DOMAINNAME: - nlen = ord(self.buf[4]) + nlen = self.buf[4] addr, port = struct.unpack("!%dsH" % nlen, self.buf[5:]) self.buf = self.buf[7 + len(addr) :] else: @@ -515,7 +516,7 @@ def connectionMade(self): log.debug( - u"Socks5 connectionMade (mode = {})".format( + "Socks5 connectionMade (mode = {})".format( "server" if self.state == STATE_INITIAL else "client" ) ) @@ -524,15 +525,15 @@ def connectRequested(self, addr, port): # Check that this session is expected - if not self.factory.addToSession(addr, self): + if not self.factory.addToSession(addr.decode('utf-8'), self): self.sendErrorReply(REPLY_CONN_REFUSED) log.warning( - u"Unexpected connection request received from {host}".format( + "Unexpected connection request received from {host}".format( host=self.transport.getPeer().host ) ) return - self._session_hash = addr + self._session_hash = addr.decode('utf-8') self.connectCompleted(addr, 0) def startTransfer(self, chunk_size): @@ -543,7 +544,7 @@ self.active = True if chunk_size is not None: self.CHUNK_SIZE = chunk_size - log.debug(u"Starting file transfer") + log.debug("Starting file transfer") d = self.stream_object.startStream(self.transport) d.addCallback(self.streamFinished) @@ -575,7 +576,7 @@ def authenticateUserPass(self, user, passwd): # FIXME: implement authentication and remove the debug printing a password - log.debug(u"User/pass: %s/%s" % (user, passwd)) + log.debug("User/pass: %s/%s" % (user, passwd)) return True def dataReceived(self, buf): @@ -605,7 +606,7 @@ self._makeRequest() def connectionLost(self, reason): - log.debug(u"Socks5 connection lost: {}".format(reason.value)) + log.debug("Socks5 connection lost: {}".format(reason.value)) if self.state != STATE_READY: self.connection.errback(reason) if self.server_mode: @@ -629,7 +630,7 @@ try: protocol = session["protocols"][0] except (KeyError, IndexError): - log.error(u"Can't start file transfer, can't find protocol") + log.error("Can't start file transfer, can't find protocol") else: session[TIMER_KEY].cancel() protocol.startTransfer(chunk_size) @@ -642,6 +643,7 @@ @param protocol(SOCKSv5): protocol instance @param return(bool): True if hash was valid (i.e. expected), False else """ + assert isinstance(session_hash, str) try: session_data = self.getSession(session_hash) except KeyError: @@ -663,7 +665,7 @@ protocols = self.getSession(session_hash)["protocols"] protocols.remove(protocol) except (KeyError, ValueError): - log.error(u"Protocol not found in session while it should be there") + log.error("Protocol not found in session while it should be there") else: if protocol.active: # The active protocol has been removed, session is finished @@ -705,11 +707,11 @@ self._protocol_instance.startTransfer(chunk_size) def clientConnectionFailed(self, connector, reason): - log.debug(u"Connection failed") + log.debug("Connection failed") self.connection.errback(reason) def clientConnectionLost(self, connector, reason): - log.debug(_(u"Socks 5 client connection lost (reason: %s)") % reason.value) + log.debug(_("Socks 5 client connection lost (reason: %s)") % reason.value) if self._protocol_instance.active: # This one was used for the transfer, than mean that # the Socks5 session is finished @@ -753,7 +755,7 @@ try: self._np = self.host.plugins["NAT-PORT"] except KeyError: - log.debug(u"NAT Port plugin not available") + log.debug("NAT Port plugin not available") self._np = None # parameters @@ -779,15 +781,15 @@ if self._server_factory is None: self._server_factory = Socks5ServerFactory(self) - for port in xrange(SERVER_STARTING_PORT, 65356): + for port in range(SERVER_STARTING_PORT, 65356): try: listening_port = reactor.listenTCP(port, self._server_factory) except internet_error.CannotListenError as e: log.debug( - u"Cannot listen on port {port}: {err_msg}{err_num}".format( + "Cannot listen on port {port}: {err_msg}{err_num}".format( port=port, err_msg=e.socketError.strerror, - err_num=u" (error code: {})".format(e.socketError.errno), + err_num=" (error code: {})".format(e.socketError.errno), ) ) else: @@ -813,7 +815,7 @@ """ def notFound(server): - log.info(u"No proxy found on this server") + log.info("No proxy found on this server") self._cache_proxies[server] = None raise exceptions.NotFound @@ -837,15 +839,15 @@ result_elt = yield iq_elt.send() except jabber_error.StanzaError as failure: log.warning( - u"Error while requesting proxy info on {jid}: {error}".format( + "Error while requesting proxy info on {jid}: {error}".format( proxy.full(), failure ) ) notFound(server) try: - query_elt = result_elt.elements(NS_BS, "query").next() - streamhost_elt = query_elt.elements(NS_BS, "streamhost").next() + query_elt = next(result_elt.elements(NS_BS, "query")) + streamhost_elt = next(query_elt.elements(NS_BS, "streamhost")) host = streamhost_elt["host"] jid_ = streamhost_elt["jid"] port = streamhost_elt["port"] @@ -853,11 +855,11 @@ raise KeyError jid_ = jid.JID(jid_) except (StopIteration, KeyError, RuntimeError, jid.InvalidFormat, AttributeError): - log.warning(u"Invalid proxy data received from {}".format(proxy.full())) + log.warning("Invalid proxy data received from {}".format(proxy.full())) notFound(server) proxy_infos = self._cache_proxies[server] = ProxyInfos(host, jid_, port) - log.info(u"Proxy found: {}".format(proxy_infos)) + log.info("Proxy found: {}".format(proxy_infos)) defer.returnValue(proxy_infos) @defer.inlineCallbacks @@ -874,15 +876,15 @@ if external_ip is not None and self._external_port is None: if external_ip != local_ips[0]: - log.info(u"We are probably behind a NAT") + log.info("We are probably behind a NAT") if self._np is None: - log.warning(u"NAT port plugin not available, we can't map port") + log.warning("NAT port plugin not available, we can't map port") else: ext_port = yield self._np.mapPort( - local_port, desc=u"SaT socks5 stream" + local_port, desc="SaT socks5 stream" ) if ext_port is None: - log.warning(u"Can't map NAT port") + log.warning("Can't map NAT port") else: self._external_port = ext_port @@ -1053,28 +1055,28 @@ defer_candidates = None def connectionCb(client, candidate): - log.info(u"Connection of {} successful".format(unicode(candidate))) + log.info("Connection of {} successful".format(str(candidate))) for idx, other_candidate in enumerate(candidates): try: if other_candidate.priority < candidate.priority: - log.debug(u"Cancelling {}".format(other_candidate)) + log.debug("Cancelling {}".format(other_candidate)) defer_candidates[idx].cancel() except AttributeError: assert other_candidate is None def connectionEb(failure, client, candidate): if failure.check(defer.CancelledError): - log.debug(u"Connection of {} has been cancelled".format(candidate)) + log.debug("Connection of {} has been cancelled".format(candidate)) else: log.info( - u"Connection of {candidate} Failed: {error}".format( + "Connection of {candidate} Failed: {error}".format( candidate=candidate, error=failure.value ) ) candidates[candidates.index(candidate)] = None def allTested(self): - log.debug(u"All candidates have been tested") + log.debug("All candidates have been tested") good_candidates = [c for c in candidates if c] return good_candidates[0] if good_candidates else None @@ -1096,7 +1098,7 @@ @param session_hash(str): hash as returned by getSessionHash @param client: %(doc_client)s """ - log.info(u"Socks5 Bytestream: TimeOut reached") + log.info("Socks5 Bytestream: TimeOut reached") session = self.getSession(client, session_hash) session[DEFER_KEY].errback(exceptions.TimeOutError) @@ -1111,10 +1113,10 @@ @return (None, failure.Failure): failure_ is returned """ log.debug( - u"Cleaning session with hash {hash}{id}: {reason}".format( + "Cleaning session with hash {hash}{id}: {reason}".format( hash=session_hash, reason="" if failure_ is None else failure_.value, - id="" if sid is None else u" (id: {})".format(sid), + id="" if sid is None else " (id: {})".format(sid), ) ) @@ -1128,12 +1130,12 @@ try: del client.xep_0065_sid_session[sid] except KeyError: - log.warning(u"Session id {} is unknown".format(sid)) + log.warning("Session id {} is unknown".format(sid)) try: session_data = client._s5b_sessions[session_hash] except KeyError: - log.warning(u"There is no session with this hash") + log.warning("There is no session with this hash") return else: del client._s5b_sessions[session_hash] @@ -1175,7 +1177,7 @@ streamhost["host"] = candidate.host streamhost["port"] = str(candidate.port) streamhost["jid"] = candidate.jid.full() - log.debug(u"Candidate proposed: {}".format(candidate)) + log.debug("Candidate proposed: {}".format(candidate)) d = iq_elt.send() args = [client, session_data, local_jid] @@ -1192,28 +1194,28 @@ @param iq_elt(domish.Element): <iq> result """ try: - query_elt = iq_elt.elements(NS_BS, "query").next() - streamhost_used_elt = query_elt.elements(NS_BS, "streamhost-used").next() + query_elt = next(iq_elt.elements(NS_BS, "query")) + streamhost_used_elt = next(query_elt.elements(NS_BS, "streamhost-used")) except StopIteration: - log.warning(u"No streamhost found in stream query") + log.warning("No streamhost found in stream query") # FIXME: must clean session return streamhost_jid = jid.JID(streamhost_used_elt["jid"]) try: - candidate = ( + candidate = next(( c for c in session_data["candidates"] if c.jid == streamhost_jid - ).next() + )) except StopIteration: log.warning( - u"Candidate [{jid}] is unknown !".format(jid=streamhost_jid.full()) + "Candidate [{jid}] is unknown !".format(jid=streamhost_jid.full()) ) return else: - log.info(u"Candidate choosed by target: {}".format(candidate)) + log.info("Candidate choosed by target: {}".format(candidate)) if candidate.type == XEP_0065.TYPE_PROXY: - log.info(u"A Socks5 proxy is used") + log.info("A Socks5 proxy is used") d = self.connectCandidate(client, candidate, session_data["hash"]) d.addCallback( lambda __: candidate.activate( @@ -1227,10 +1229,10 @@ d.addCallback(lambda __: candidate.startTransfer(session_data["hash"])) def _activationEb(self, failure): - log.warning(u"Proxy activation error: {}".format(failure.value)) + log.warning("Proxy activation error: {}".format(failure.value)) def _IQNegotiationEb(self, stanza_err, client, session_data, local_jid): - log.warning(u"Socks5 transfer failed: {}".format(stanza_err.value)) + log.warning("Socks5 transfer failed: {}".format(stanza_err.value)) # FIXME: must clean session def createSession(self, *args, **kwargs): @@ -1252,7 +1254,7 @@ @return (dict): session data """ if sid in client.xep_0065_sid_session: - raise exceptions.ConflictError(u"A session with this id already exists !") + raise exceptions.ConflictError("A session with this id already exists !") if requester: session_hash = getSessionHash(local_jid, to_jid, sid) session_data = self._registerHash(client, session_hash, stream_object) @@ -1291,11 +1293,12 @@ See comments below for details @return (dict): session data """ + assert isinstance(session_hash, str) if client is None: try: client = self.hash_clients_map[session_hash] except KeyError as e: - log.warning(u"The requested session doesn't exists !") + log.warning("The requested session doesn't exists !") raise e return client._s5b_sessions[session_hash] @@ -1336,15 +1339,15 @@ session_data["stream_object"] = stream_object def streamQuery(self, iq_elt, client): - log.debug(u"BS stream query") + log.debug("BS stream query") iq_elt.handled = True - query_elt = iq_elt.elements(NS_BS, "query").next() + query_elt = next(iq_elt.elements(NS_BS, "query")) try: sid = query_elt["sid"] except KeyError: - log.warning(u"Invalid bystreams request received") + log.warning("Invalid bystreams request received") return client.sendError(iq_elt, "bad-request") streamhost_elts = list(query_elt.elements(NS_BS, "streamhost")) @@ -1354,7 +1357,7 @@ try: session_data = client.xep_0065_sid_session[sid] except KeyError: - log.warning(u"Ignoring unexpected BS transfer: {}".format(sid)) + log.warning("Ignoring unexpected BS transfer: {}".format(sid)) return client.sendError(iq_elt, "not-acceptable") peer_jid = session_data["peer_jid"] = jid.JID(iq_elt["from"]) @@ -1365,7 +1368,7 @@ try: host, port, jid_ = sh_elt["host"], sh_elt["port"], jid.JID(sh_elt["jid"]) except KeyError: - log.warning(u"malformed streamhost element") + log.warning("malformed streamhost element") return client.sendError(iq_elt, "bad-request") priority = nb_sh - idx if jid_.userhostJID() != peer_jid.userhostJID(): @@ -1375,7 +1378,7 @@ candidates.append(Candidate(host, port, type_, priority, jid_)) for candidate in candidates: - log.info(u"Candidate proposed: {}".format(candidate)) + log.info("Candidate proposed: {}".format(candidate)) d = self.getBestCandidate(client, candidates, session_data["hash"]) d.addCallback(self._ackStream, iq_elt, session_data, client) @@ -1384,7 +1387,7 @@ if candidate is None: log.info("No streamhost candidate worked, we have to end negotiation") return client.sendError(iq_elt, "item-not-found") - log.info(u"We choose: {}".format(candidate)) + log.info("We choose: {}".format(candidate)) result_elt = xmlstream.toResponse(iq_elt, "result") query_elt = result_elt.addElement((NS_BS, "query")) query_elt["sid"] = session_data["id"] @@ -1393,8 +1396,8 @@ client.send(result_elt) +@implementer(iwokkel.IDisco) class XEP_0065_handler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_xep_0070.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0070.py Tue Aug 13 19:08:41 2019 +0200 @@ -26,7 +26,7 @@ from sat.tools import xml_tools from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer try: from twisted.words.protocols.xmlstream import XMPPHandler @@ -63,7 +63,7 @@ """ def __init__(self, host): - log.info(_(u"Plugin XEP_0070 initialization")) + log.info(_("Plugin XEP_0070 initialization")) self.host = host self._dictRequest = dict() @@ -90,15 +90,15 @@ def _treatHttpAuthRequest(self, elt, stanzaType, client): elt.handled = True - auth_elt = elt.elements(NS_HTTP_AUTH, "confirm").next() + auth_elt = next(elt.elements(NS_HTTP_AUTH, "confirm")) auth_id = auth_elt["id"] auth_method = auth_elt["method"] auth_url = auth_elt["url"] self._dictRequest[client] = (auth_id, auth_method, auth_url, stanzaType, elt) - title = D_(u"Auth confirmation") - message = D_(u"{auth_url} needs to validate your identity, do you agree?\n" - u"Validation code : {auth_id}\n\n" - u"Please check that this code is the same as on {auth_url}" + title = D_("Auth confirmation") + message = D_("{auth_url} needs to validate your identity, do you agree?\n" + "Validation code : {auth_id}\n\n" + "Please check that this code is the same as on {auth_url}" ).format(auth_url=auth_url, auth_id=auth_id) d = xml_tools.deferConfirm(self.host, message=message, title=title, profile=client.profile) @@ -114,23 +114,23 @@ if authorized: if stanzaType == IQ: # iq - log.debug(_(u"XEP-0070 reply iq")) + log.debug(_("XEP-0070 reply iq")) iq_result_elt = xmlstream.toResponse(elt, "result") client.send(iq_result_elt) elif stanzaType == MSG: # message - log.debug(_(u"XEP-0070 reply message")) + log.debug(_("XEP-0070 reply message")) msg_result_elt = xmlstream.toResponse(elt, "result") - msg_result_elt.addChild(elt.elements(NS_HTTP_AUTH, "confirm").next()) + msg_result_elt.addChild(next(elt.elements(NS_HTTP_AUTH, "confirm"))) client.send(msg_result_elt) else: - log.debug(_(u"XEP-0070 reply error")) + log.debug(_("XEP-0070 reply error")) result_elt = jabber.error.StanzaError("not-authorized").toResponse(elt) client.send(result_elt) +@implementer(iwokkel.IDisco) class XEP_0070_handler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent, profile): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_xep_0071.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0071.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Publish-Subscribe (xep-0071) @@ -27,14 +27,14 @@ from twisted.internet import defer from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer # from lxml import etree try: from lxml import html except ImportError: raise exceptions.MissingModule( - u"Missing module lxml, please download/install it from http://lxml.de/" + "Missing module lxml, please download/install it from http://lxml.de/" ) try: from twisted.words.protocols.xmlstream import XMPPHandler @@ -176,14 +176,14 @@ syntax = self._s.getCurrentSyntax(client.profile) defers = [] - if u"xhtml" in data["extra"]: + if "xhtml" in data["extra"]: # we have directly XHTML for lang, xhtml in data_format.getSubDict("xhtml", data["extra"]): self._check_body_text(data, lang, xhtml, self._s.SYNTAX_XHTML, defers) d = self._s.convert(xhtml, self._s.SYNTAX_XHTML, self.SYNTAX_XHTML_IM) d.addCallback(syntax_converted, lang) defers.append(d) - elif u"rich" in data["extra"]: + elif "rich" in data["extra"]: # we have rich syntax to convert for lang, rich_data in data_format.getSubDict("rich", data["extra"]): self._check_body_text(data, lang, rich_data, syntax, defers) @@ -191,7 +191,7 @@ d.addCallback(syntax_converted, lang) defers.append(d) else: - exceptions.InternalError(u"xhtml or rich should be present at this point") + exceptions.InternalError("xhtml or rich should be present at this point") d_list = defer.DeferredList(defers) d_list.addCallback(lambda __: data) return d_list @@ -200,7 +200,7 @@ """ Check presence of XHTML-IM in message """ try: - html_elt = message.elements(NS_XHTML_IM, "html").next() + html_elt = next(message.elements(NS_XHTML_IM, "html")) except StopIteration: # No XHTML-IM pass @@ -213,14 +213,14 @@ """ Check presence of rich text in extra """ rich = {} xhtml = {} - for key, value in data["extra"].iteritems(): + for key, value in data["extra"].items(): if key.startswith("rich"): rich[key[5:]] = value elif key.startswith("xhtml"): xhtml[key[6:]] = value if rich and xhtml: raise exceptions.DataError( - _(u"Can't have XHTML and rich content at the same time") + _("Can't have XHTML and rich content at the same time") ) if rich or xhtml: if rich: @@ -247,7 +247,7 @@ continue purged.append((name, value.strip())) - return u"; ".join([u"%s: %s" % data for data in purged]) + return "; ".join(["%s: %s" % data for data in purged]) def XHTML2XHTML_IM(self, xhtml): """ Convert XHTML document to XHTML_IM subset @@ -265,7 +265,7 @@ else: body_elt.attrib.clear() - allowed_tags = allowed.keys() + allowed_tags = list(allowed.keys()) to_strip = [] for elem in body_elt.iter(): if elem.tag not in allowed_tags: @@ -282,7 +282,7 @@ for elem in to_strip: if elem.tag in blacklist: # we need to remove the element and all descendants - log.debug(u"removing black listed tag: %s" % (elem.tag)) + log.debug("removing black listed tag: %s" % (elem.tag)) elem.drop_tree() else: elem.drop_tag() @@ -295,8 +295,8 @@ return html.tostring(root_elt, encoding="unicode", method="xml") +@implementer(iwokkel.IDisco) class XEP_0071_handler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_xep_0077.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0077.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing xep-0077 @@ -51,7 +51,7 @@ namespace = 'jabber:client' def __init__(self, jid_, password, email=None, check_certificate=True): - log.debug(_(u"Registration asked for {jid}").format(jid=jid_)) + log.debug(_("Registration asked for {jid}").format(jid=jid_)) xmlstream.ConnectAuthenticator.__init__(self, jid_.host) self.jid = jid_ self.password = password @@ -70,18 +70,18 @@ xs.initializers.append(tls_init) def register(self, xmlstream): - log.debug(_(u"Stream started with {server}, now registering" + log.debug(_("Stream started with {server}, now registering" .format(server=self.jid.host))) iq = XEP_0077.buildRegisterIQ(self.xmlstream, self.jid, self.password, self.email) d = iq.send(self.jid.host).addCallbacks(self.registrationCb, self.registrationEb) d.chainDeferred(self.registered) def registrationCb(self, answer): - log.debug(_(u"Registration answer: {}").format(answer.toXml())) + log.debug(_("Registration answer: {}").format(answer.toXml())) self.xmlstream.sendFooter() def registrationEb(self, failure_): - log.info(_("Registration failure: {}").format(unicode(failure_.value))) + log.info(_("Registration failure: {}").format(str(failure_.value))) self.xmlstream.sendFooter() raise failure_ @@ -97,7 +97,7 @@ def _disconnected(self, reason): if not self.authenticator.registered.called: - err = jabber_error.StreamError(u"Server unexpectedly closed the connection") + err = jabber_error.StreamError("Server unexpectedly closed the connection") try: if reason.value.args[0][0][2] == "certificate verify failed": err = exceptions.InvalidCertificate() @@ -116,7 +116,7 @@ in_sign="ss", out_sign="", method=self._inBandRegister, - async=True, + async_=True, ) host.bridge.addMethod( "inBandAccountNew", @@ -124,7 +124,7 @@ in_sign="ssssi", out_sign="", method=self._registerNewAccount, - async=True, + async_=True, ) host.bridge.addMethod( "inBandUnregister", @@ -132,7 +132,7 @@ in_sign="ss", out_sign="", method=self._unregister, - async=True, + async_=True, ) host.bridge.addMethod( "inBandPasswordChange", @@ -140,7 +140,7 @@ in_sign="ss", out_sign="", method=self._changePassword, - async=True, + async_=True, ) @staticmethod @@ -160,12 +160,12 @@ def _regCb(self, answer, client, post_treat_cb): """Called after the first get IQ""" try: - query_elt = answer.elements(NS_REG, "query").next() + query_elt = next(answer.elements(NS_REG, "query")) except StopIteration: raise exceptions.DataError("Can't find expected query element") try: - x_elem = query_elt.elements(data_form.NS_X_DATA, "x").next() + x_elem = next(query_elt.elements(data_form.NS_X_DATA, "x")) except StopIteration: # XXX: it seems we have an old service which doesn't manage data forms log.warning(_("Can't find data form")) @@ -194,17 +194,17 @@ def _regEb(self, failure, client): """Called when something is wrong with registration""" - log.info(_("Registration failure: %s") % unicode(failure.value)) + log.info(_("Registration failure: %s") % str(failure.value)) raise failure def _regSuccess(self, answer, client, post_treat_cb): - log.debug(_(u"registration answer: %s") % answer.toXml()) + log.debug(_("registration answer: %s") % answer.toXml()) if post_treat_cb is not None: post_treat_cb(jid.JID(answer["from"]), client.profile) return {} def _regFailure(self, failure, client): - log.info(_(u"Registration failure: %s") % unicode(failure.value)) + log.info(_("Registration failure: %s") % str(failure.value)) if failure.value.condition == "conflict": raise exceptions.ConflictError( _("Username already exists, please choose an other one") @@ -221,8 +221,8 @@ """ # FIXME: this post_treat_cb arguments seems wrong, check it client = self.host.getClient(profile_key) - log.debug(_(u"Asking registration for {}").format(to_jid.full())) - reg_request = client.IQ(u"get") + log.debug(_("Asking registration for {}").format(to_jid.full())) + reg_request = client.IQ("get") reg_request["from"] = client.jid.full() reg_request["to"] = to_jid.full() reg_request.addElement("query", NS_REG) @@ -245,7 +245,7 @@ return self.registerNewAccount(jid.JID(jid_), password, **kwargs) def registerNewAccount( - self, jid_, password, email=None, host=u"127.0.0.1", port=C.XMPP_C2S_PORT + self, jid_, password, email=None, host="127.0.0.1", port=C.XMPP_C2S_PORT ): """register a new account on a XMPP server @@ -255,7 +255,7 @@ @param host(unicode): host of the server to register to @param port(int): port of the server to register to """ - check_certificate = host != u"127.0.0.1" + check_certificate = host != "127.0.0.1" authenticator = RegisteringAuthenticator( jid_, password, email, check_certificate=check_certificate) registered_d = authenticator.registered @@ -290,6 +290,6 @@ """ iq_elt = client.IQ() iq_elt["to"] = to_jid.full() - query_elt = iq_elt.addElement((NS_REG, u"query")) - query_elt.addElement(u"remove") + query_elt = iq_elt.addElement((NS_REG, "query")) + query_elt.addElement("remove") return iq_elt.send()
--- a/sat/plugins/plugin_xep_0085.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0085.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Chat State Notifications Protocol (xep-0085) @@ -24,7 +24,7 @@ log = getLogger(__name__) from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer from twisted.words.protocols.jabber.jid import JID try: @@ -185,9 +185,9 @@ else: # update entity data for one2one chat # assert from_jid.resource # FIXME: assert doesn't work on normal message from server (e.g. server announce), because there is no resource try: - domish.generateElementsNamed(message.elements(), name="body").next() + next(domish.generateElementsNamed(message.elements(), name="body")) try: - domish.generateElementsNamed(message.elements(), name="active").next() + next(domish.generateElementsNamed(message.elements(), name="active")) # contact enabled Chat State Notifications self.updateCache(from_jid, True, profile=profile) except StopIteration: @@ -233,7 +233,7 @@ return mess_data try: # message with a body always mean active state - domish.generateElementsNamed(message.elements(), name="body").next() + next(domish.generateElementsNamed(message.elements(), name="body")) message.addElement("active", NS_CHAT_STATES) # launch the chat state machine (init the timer) if self._isMUC(to_jid, profile): @@ -385,7 +385,7 @@ if state != "gone" or self.mess_type != "groupchat": # send a new message without body log.debug( - u"sending state '{state}' to {jid}".format( + "sending state '{state}' to {jid}".format( state=state, jid=self.to_jid.full() ) ) @@ -415,8 +415,8 @@ ) +@implementer(iwokkel.IDisco) class XEP_0085_handler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent, profile): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_xep_0092.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0092.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for Software Version (XEP-0092) @@ -53,7 +53,7 @@ in_sign="ss", out_sign="(sss)", method=self._getVersion, - async=True, + async_=True, ) try: self.host.plugins[C.TEXT_CMDS].addWhoIsCb(self._whois, 50) @@ -97,14 +97,14 @@ def _gotVersion(self, iq_elt): try: - query_elt = iq_elt.elements(NS_VERSION, "query").next() + query_elt = next(iq_elt.elements(NS_VERSION, "query")) except StopIteration: raise exceptions.DataError ret = [] for name in ("name", "version", "os"): try: - data_elt = query_elt.elements(NS_VERSION, name).next() - ret.append(unicode(data_elt)) + data_elt = next(query_elt.elements(NS_VERSION, name)) + ret.append(str(data_elt)) except StopIteration: ret.append(None)
--- a/sat/plugins/plugin_xep_0095.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0095.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing xep-0095 @@ -25,7 +25,7 @@ from sat.core import exceptions from twisted.words.protocols.jabber import xmlstream from twisted.words.protocols.jabber import error -from zope.interface import implements +from zope.interface import implementer from wokkel import disco from wokkel import iwokkel import uuid @@ -71,7 +71,7 @@ del self.si_profiles[si_profile] except KeyError: log.error( - u"Trying to unregister SI profile [{}] which was not registered".format( + "Trying to unregister SI profile [{}] which was not registered".format( si_profile ) ) @@ -83,7 +83,7 @@ """ log.info(_("XEP-0095 Stream initiation")) iq_elt.handled = True - si_elt = iq_elt.elements(NS_SI, "si").next() + si_elt = next(iq_elt.elements(NS_SI, "si")) si_id = si_elt["id"] si_mime_type = iq_elt.getAttribute("mime-type", "application/octet-stream") si_profile = si_elt["profile"] @@ -136,9 +136,9 @@ def _parseOfferResult(self, iq_elt): try: - si_elt = iq_elt.elements(NS_SI, "si").next() + si_elt = next(iq_elt.elements(NS_SI, "si")) except StopIteration: - log.warning(u"No <si/> element found in result while expected") + log.warning("No <si/> element found in result while expected") raise exceptions.DataError return (iq_elt, si_elt) @@ -165,7 +165,7 @@ """ offer = client.IQ() sid = str(uuid.uuid4()) - log.debug(_(u"Stream Session ID: %s") % offer["id"]) + log.debug(_("Stream Session ID: %s") % offer["id"]) offer["from"] = client.jid.full() offer["to"] = to_jid.full() @@ -182,8 +182,8 @@ return sid, offer_d +@implementer(iwokkel.IDisco) class XEP_0095_handler(xmlstream.XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent @@ -197,7 +197,7 @@ def getDiscoInfo(self, requestor, target, nodeIdentifier=""): return [disco.DiscoFeature(NS_SI)] + [ disco.DiscoFeature( - u"http://jabber.org/protocol/si/profile/{}".format(profile_name) + "http://jabber.org/protocol/si/profile/{}".format(profile_name) ) for profile_name in self.plugin_parent.si_profiles ]
--- a/sat/plugins/plugin_xep_0096.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0096.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing xep-0096 @@ -60,7 +60,7 @@ ] # Stream methods managed self._f = self.host.plugins["FILE"] self._f.register( - NS_SI_FT, self.sendFile, priority=0, method_name=u"Stream Initiation" + NS_SI_FT, self.sendFile, priority=0, method_name="Stream Initiation" ) self._si = self.host.plugins["XEP-0095"] self._si.registerSIProfile(SI_PROFILE_NAME, self._transferRequest) @@ -91,7 +91,7 @@ - range_length """ try: - range_elt = parent_elt.elements(NS_SI_FT, "range").next() + range_elt = next(parent_elt.elements(NS_SI_FT, "range")) except StopIteration: range_ = False range_offset = None @@ -126,7 +126,7 @@ peer_jid = jid.JID(iq_elt["from"]) try: - file_elt = si_elt.elements(NS_SI_FT, "file").next() + file_elt = next(si_elt.elements(NS_SI_FT, "file")) except StopIteration: return self._badRequest( client, iq_elt, "No <file/> element found in SI File Transfer request" @@ -149,13 +149,13 @@ file_hash = file_elt.getAttribute("hash") log.info( - u"File proposed: name=[{name}] size={size}".format( + "File proposed: name=[{name}] size={size}".format( name=filename, size=file_size ) ) try: - file_desc = unicode(file_elt.elements(NS_SI_FT, "desc").next()) + file_desc = str(next(file_elt.elements(NS_SI_FT, "desc"))) except StopIteration: file_desc = "" @@ -178,10 +178,10 @@ plugin = self.host.plugins["XEP-0047"] else: log.error( - u"Unknown stream method, this should not happen at this stage, cancelling transfer" + "Unknown stream method, this should not happen at this stage, cancelling transfer" ) else: - log.warning(u"Can't find a valid stream method") + log.warning("Can't find a valid stream method") self._si.sendError(client, iq_elt, "not-acceptable") return @@ -213,7 +213,7 @@ @param data(dict): session data """ if not accepted: - log.info(u"File transfer declined") + log.info("File transfer declined") self._si.sendError(client, iq_elt, "forbidden") return # data, timeout, stream_method, failed_methods = client._xep_0096_waiting_for_approval[sid] @@ -264,7 +264,7 @@ """ # TODO: check hash data["stream_object"].close() - log.info(u"Transfer {si_id} successfuly finished".format(**data)) + log.info("Transfer {si_id} successfuly finished".format(**data)) def _transferEb(self, failure, client, data): """Called when something went wrong with the transfer @@ -273,8 +273,8 @@ @param data: session data """ log.warning( - u"Transfer {si_id} failed: {reason}".format( - reason=unicode(failure.value), **data + "Transfer {si_id} failed: {reason}".format( + reason=str(failure.value), **data ) ) data["stream_object"].close() @@ -327,7 +327,7 @@ try: feature_elt = self.host.plugins["XEP-0020"].getFeatureElt(si_elt) except exceptions.NotFound: - log.warning(u"No <feature/> element found in result while expected") + log.warning("No <feature/> element found in result while expected") return choosed_options = self.host.plugins["XEP-0020"].getChoosedOptions( @@ -336,11 +336,11 @@ try: stream_method = choosed_options["stream-method"] except KeyError: - log.warning(u"No stream method choosed") + log.warning("No stream method choosed") return try: - file_elt = si_elt.elements(NS_SI_FT, "file").next() + file_elt = next(si_elt.elements(NS_SI_FT, "file")) except StopIteration: pass else: @@ -351,7 +351,7 @@ elif stream_method == self.host.plugins["XEP-0047"].NAMESPACE: plugin = self.host.plugins["XEP-0047"] else: - log.warning(u"Invalid stream method received") + log.warning("Invalid stream method received") return stream_object = stream.FileStreamObject( @@ -367,25 +367,25 @@ stanza_err = failure.value if stanza_err.code == "403" and stanza_err.condition == "forbidden": from_s = stanza_err.stanza["from"] - log.info(u"File transfer refused by {}".format(from_s)) - msg = D_(u"The contact {} has refused your file").format(from_s) - title = D_(u"File refused") + log.info("File transfer refused by {}".format(from_s)) + msg = D_("The contact {} has refused your file").format(from_s) + title = D_("File refused") xml_tools.quickNote(self.host, client, msg, title, C.XMLUI_DATA_LVL_INFO) else: - log.warning(_(u"Error during file transfer")) + log.warning(_("Error during file transfer")) msg = D_( - u"Something went wrong during the file transfer session initialisation: {reason}" - ).format(reason=unicode(stanza_err)) - title = D_(u"File transfer error") + "Something went wrong during the file transfer session initialisation: {reason}" + ).format(reason=str(stanza_err)) + title = D_("File transfer error") xml_tools.quickNote(self.host, client, msg, title, C.XMLUI_DATA_LVL_ERROR) elif failure.check(exceptions.DataError): - log.warning(u"Invalid stanza received") + log.warning("Invalid stanza received") else: - log.error(u"Error while proposing stream: {}".format(failure)) + log.error("Error while proposing stream: {}".format(failure)) def _sendCb(self, __, client, sid, stream_object): log.info( - _(u"transfer {sid} successfuly finished [{profile}]").format( + _("transfer {sid} successfuly finished [{profile}]").format( sid=sid, profile=client.profile ) ) @@ -393,8 +393,8 @@ def _sendEb(self, failure, client, sid, stream_object): log.warning( - _(u"transfer {sid} failed [{profile}]: {reason}").format( - sid=sid, profile=client.profile, reason=unicode(failure.value) + _("transfer {sid} failed [{profile}]: {reason}").format( + sid=sid, profile=client.profile, reason=str(failure.value) ) ) stream_object.close()
--- a/sat/plugins/plugin_xep_0100.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0100.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing gateways (xep-0100) @@ -38,7 +38,7 @@ } WARNING_MSG = D_( - u"""Be careful ! Gateways allow you to use an external IM (legacy IM), so you can see your contact as XMPP contacts. + """Be careful ! Gateways allow you to use an external IM (legacy IM), so you can see your contact as XMPP contacts. But when you do this, all your messages go throught the external legacy IM server, it is a huge privacy issue (i.e.: all your messages throught the gateway can be monitored, recorded, analysed by the external server, most of time a private company).""" ) @@ -133,7 +133,7 @@ xmlui.changeContainer("advanced_list", columns=3) xmlui.addLabel(_("Use external XMPP server")) xmlui.addString("external_jid") - xmlui.addButton(self.__menu_id, _(u"Go !"), fields_back=("external_jid",)) + xmlui.addButton(self.__menu_id, _("Go !"), fields_back=("external_jid",)) return xmlui def _gatewaySelectedCb(self, data, profile): @@ -156,7 +156,7 @@ if category != "gateway": log.error( _( - u'INTERNAL ERROR: identity category should always be "gateway" in _getTypeString, got "%s"' + 'INTERNAL ERROR: identity category should always be "gateway" in _getTypeString, got "%s"' ) % category ) @@ -208,7 +208,7 @@ ] if gateways: log.info( - _(u"Found gateway [%(jid)s]: %(identity_name)s") + _("Found gateway [%(jid)s]: %(identity_name)s") % { "jid": entity.full(), "identity_name": " - ".join( @@ -219,7 +219,7 @@ ret.append((success, (entity, gateways))) else: log.info( - _(u"Skipping [%(jid)s] which is not a gateway") + _("Skipping [%(jid)s] which is not a gateway") % {"jid": entity.full()} ) return ret @@ -233,7 +233,7 @@ _defers = [] for item in disco._items: - log.debug(_(u"item found: %s") % item.entity) + log.debug(_("item found: %s") % item.entity) _defers.append(client.disco.requestInfo(item.entity)) dl = defer.DeferredList(_defers) dl.addCallback( @@ -257,7 +257,7 @@ """ client = self.host.getClient(profile) log.debug( - _(u"find gateways (target = %(target)s, profile = %(profile)s)") + _("find gateways (target = %(target)s, profile = %(profile)s)") % {"target": target.full(), "profile": profile} ) d = client.disco.requestItems(target)
--- a/sat/plugins/plugin_xep_0106.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0106.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Explicit Message Encryption @@ -21,24 +21,24 @@ from sat.core.constants import Const as C from sat.core.log import getLogger from twisted.words.protocols.jabber import xmlstream -from zope.interface import implements +from zope.interface import implementer from wokkel import disco log = getLogger(__name__) PLUGIN_INFO = { - C.PI_NAME: u"JID Escaping", - C.PI_IMPORT_NAME: u"XEP-0106", - C.PI_TYPE: u"XEP", + C.PI_NAME: "JID Escaping", + C.PI_IMPORT_NAME: "XEP-0106", + C.PI_TYPE: "XEP", C.PI_MODES: C.PLUG_MODE_BOTH, - C.PI_PROTOCOLS: [u"XEP-0106"], + C.PI_PROTOCOLS: ["XEP-0106"], C.PI_DEPENDENCIES: [], - C.PI_MAIN: u"XEP_0106", - C.PI_HANDLER: u"yes", - C.PI_DESCRIPTION: _(u"""(Un)escape JID to use disallowed chars in local parts"""), + C.PI_MAIN: "XEP_0106", + C.PI_HANDLER: "yes", + C.PI_DESCRIPTION: _("""(Un)escape JID to use disallowed chars in local parts"""), } -NS_JID_ESCAPING = ur"jid\20escaping" +NS_JID_ESCAPING = r"jid\20escaping" ESCAPE_MAP = { ' ': r'\20', '"': r'\22', @@ -56,7 +56,7 @@ class XEP_0106(object): def __init__(self, host): - self.reverse_map = {v:k for k,v in ESCAPE_MAP.iteritems()} + self.reverse_map = {v:k for k,v in ESCAPE_MAP.items()} def getHandler(self, client): return XEP_0106_handler() @@ -69,14 +69,14 @@ @raise ValueError: text can't be escaped """ if not text or text[0] == ' ' or text[-1] == ' ': - raise ValueError(u"text must not be empty, or start or end with a whitespace") + raise ValueError("text must not be empty, or start or end with a whitespace") escaped = [] for c in text: if c in ESCAPE_MAP: escaped.append(ESCAPE_MAP[c]) else: escaped.append(c) - return u''.join(escaped) + return ''.join(escaped) def unescape(self, escaped): """Unescape text @@ -86,8 +86,8 @@ @raise ValueError: text can't be unescaped """ if not escaped or escaped.startswith(r'\27') or escaped.endswith(r'\27'): - raise ValueError(u"escaped value must not be empty, or start or end with a " - u"whitespace") + raise ValueError("escaped value must not be empty, or start or end with a " + "whitespace") unescaped = [] idx = 0 while idx < len(escaped): @@ -98,11 +98,11 @@ else: unescaped.append(escaped[idx]) idx += 1 - return u''.join(unescaped) + return ''.join(unescaped) +@implementer(disco.IDisco) class XEP_0106_handler(xmlstream.XMPPHandler): - implements(disco.IDisco) def getDiscoInfo(self, requestor, target, nodeIdentifier=""): return [disco.DiscoFeature(NS_JID_ESCAPING)]
--- a/sat/plugins/plugin_xep_0115.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0115.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing xep-0115 @@ -25,7 +25,7 @@ from twisted.words.xish import domish from twisted.words.protocols.jabber import jid from twisted.internet import defer, error -from zope.interface import implements +from zope.interface import implementer from wokkel import disco, iwokkel try: @@ -70,10 +70,10 @@ # optimize check client._caps_optimize = yield self.host.hasFeature(client, NS_CAPS_OPTIMIZE) if client._caps_optimize: - log.info(_(u"Caps optimisation enabled")) + log.info(_("Caps optimisation enabled")) client._caps_sent = False else: - log.warning(_(u"Caps optimisation not available")) + log.warning(_("Caps optimisation not available")) # hash generation _infos = yield client.discoHandler.info(client.jid, client.jid, "") @@ -82,11 +82,11 @@ disco_infos.append(item) cap_hash = client._caps_hash = self.host.memory.disco.generateHash(disco_infos) log.info( - u"Our capability hash has been generated: [{cap_hash}]".format( + "Our capability hash has been generated: [{cap_hash}]".format( cap_hash=cap_hash ) ) - log.debug(u"Generating capability domish.Element") + log.debug("Generating capability domish.Element") c_elt = domish.Element((NS_ENTITY_CAPABILITY, "c")) c_elt["hash"] = "sha-1" c_elt["node"] = C.APP_URL @@ -115,8 +115,8 @@ return True +@implementer(iwokkel.IDisco) class XEP_0115_handler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent, profile): self.plugin_parent = plugin_parent @@ -142,19 +142,19 @@ Check if we know the version of this capabilities and get the capabilities if necessary """ from_jid = jid.JID(presence["from"]) - c_elem = presence.elements(NS_ENTITY_CAPABILITY, "c").next() + c_elem = next(presence.elements(NS_ENTITY_CAPABILITY, "c")) try: c_ver = c_elem["ver"] c_hash = c_elem["hash"] c_node = c_elem["node"] except KeyError: - log.warning(_(u"Received invalid capabilities tag: %s") % c_elem.toXml()) + log.warning(_("Received invalid capabilities tag: %s") % c_elem.toXml()) return if c_ver in self.host.memory.disco.hashes: # we already know the hash, we update the jid entity log.debug( - u"hash [%(hash)s] already in cache, updating entity [%(jid)s]" + "hash [%(hash)s] already in cache, updating entity [%(jid)s]" % {"hash": c_ver, "jid": from_jid.full()} ) self.host.memory.updateEntityData( @@ -165,8 +165,8 @@ if c_hash != "sha-1": # unknown hash method log.warning( _( - u"Unknown hash method for entity capabilities: [{hash_method}] " - u"(entity: {entity_jid}, node: {node})" + "Unknown hash method for entity capabilities: [{hash_method}] " + "(entity: {entity_jid}, node: {node})" ) .format(hash_method = c_hash, entity_jid = from_jid, node = c_node) ) @@ -178,10 +178,10 @@ if computed_hash != c_ver: log.warning( _( - u"Computed hash differ from given hash:\n" - u"given: [{given}]\n" - u"computed: [{computed}]\n" - u"(entity: {entity_jid}, node: {node})" + "Computed hash differ from given hash:\n" + "given: [{given}]\n" + "computed: [{computed}]\n" + "(entity: {entity_jid}, node: {node})" ).format( given = c_ver, computed = computed_hash, @@ -199,7 +199,7 @@ else failure.getErrorMessage() ) log.error( - _(u"Couldn't retrieve disco info for {jid}: {error}").format( + _("Couldn't retrieve disco info for {jid}: {error}").format( jid=from_jid.full(), error=msg ) )
--- a/sat/plugins/plugin_xep_0163.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0163.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Personal Eventing Protocol (xep-0163) @@ -56,7 +56,7 @@ in_sign="sa{ss}s", out_sign="", method=self.PEPSend, - async=True, + async_=True, ) # args: type(MOOD, TUNE, etc), data, profile_key; self.addPEPEvent("MOOD", NS_USER_MOOD, self.userMoodCB, self.sendMood) @@ -67,7 +67,7 @@ will be filled with PEP features @param profile: profile we are handling """ - disco_info.extend(map(disco.DiscoFeature, self.pep_events)) + disco_info.extend(list(map(disco.DiscoFeature, self.pep_events))) return True def addPEPEvent(self, event_type, node, in_callback, out_callback=None, notify=True): @@ -128,11 +128,11 @@ profile = self.host.memory.getProfileName(profile_key) if not profile: log.error( - _(u"Trying to send personal event with an unknown profile key [%s]") + _("Trying to send personal event with an unknown profile key [%s]") % profile_key ) raise exceptions.ProfileUnknownError - if not event_type in self.pep_out_cb.keys(): + if not event_type in list(self.pep_out_cb.keys()): log.error(_("Trying to send personal event for an unknown type")) raise exceptions.DataError("Type unknown") return self.pep_out_cb[event_type](data, profile)
--- a/sat/plugins/plugin_xep_0166.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0166.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Jingle (XEP-0166) @@ -35,7 +35,7 @@ import uuid import time -from zope.interface import implements +from zope.interface import implementer IQ_SET = '/iq[@type="set"]' @@ -113,9 +113,9 @@ try: del client.jingle_sessions[sid] except KeyError: - log.debug(u"Jingle session id [{}] is unknown, nothing to delete".format(sid)) + log.debug("Jingle session id [{}] is unknown, nothing to delete".format(sid)) else: - log.debug(u"Jingle session id [{}] deleted".format(sid)) + log.debug("Jingle session id [{}] deleted".format(sid)) ## helpers methods to build stanzas ## @@ -142,14 +142,14 @@ if error.STANZA_CONDITIONS[error_condition]["type"] == "cancel" and sid: self._delSession(client, sid) log.warning( - u"Error while managing jingle session, cancelling: {condition}".format( + "Error while managing jingle session, cancelling: {condition}".format( condition=error_condition ) ) client.send(iq_elt) def _terminateEb(self, failure_): - log.warning(_(u"Error while terminating session: {msg}").format(msg=failure_)) + log.warning(_("Error while terminating session: {msg}").format(msg=failure_)) def terminate(self, client, reason, session): """Terminate the session @@ -163,7 +163,7 @@ client, session, XEP_0166.A_SESSION_TERMINATE ) reason_elt = jingle_elt.addElement("reason") - if isinstance(reason, basestring): + if isinstance(reason, str): reason_elt.addElement(reason) else: for elt in reason: @@ -182,7 +182,7 @@ @param sid(unicode): jingle session id """ log.warning( - u"Error while sending jingle <iq/> stanza: {failure_}".format( + "Error while sending jingle <iq/> stanza: {failure_}".format( failure_=failure_.value ) ) @@ -227,12 +227,12 @@ """ if namespace in self._applications: raise exceptions.ConflictError( - u"Trying to register already registered namespace {}".format(namespace) + "Trying to register already registered namespace {}".format(namespace) ) self._applications[namespace] = ApplicationData( namespace=namespace, handler=handler ) - log.debug(u"new jingle application registered") + log.debug("new jingle application registered") def registerTransport(self, namespace, transport_type, handler, priority=0): """Register a transport plugin @@ -252,7 +252,7 @@ ) if namespace in self._transports: raise exceptions.ConflictError( - u"Trying to register already registered namespace {}".format(namespace) + "Trying to register already registered namespace {}".format(namespace) ) transport_data = TransportData( namespace=namespace, handler=handler, priority=priority @@ -262,7 +262,7 @@ key=lambda transport_data: transport_data.priority, reverse=True ) self._transports[namespace] = transport_data - log.debug(u"new jingle transport registered") + log.debug("new jingle transport registered") @defer.inlineCallbacks def transportReplace(self, client, transport_ns, session, content_name): @@ -280,7 +280,7 @@ try: transport = self._transports[transport_ns] except KeyError: - raise exceptions.InternalError(u"Unkown transport") + raise exceptions.InternalError("Unkown transport") yield content_data["transport"].handler.jingleHandler( client, XEP_0166.A_DESTROY, session, content_name, None ) @@ -322,7 +322,7 @@ ) transport_elt["sid"] = content_data["transport_data"]["sid"] else: - raise exceptions.InternalError(u"unmanaged action {}".format(action)) + raise exceptions.InternalError("unmanaged action {}".format(action)) return iq_elt, context_elt @@ -355,9 +355,9 @@ assert contents # there must be at least one content if (peer_jid == client.jid or client.is_component and peer_jid.host == client.jid.host): - raise ValueError(_(u"You can't do a jingle session with yourself")) + raise ValueError(_("You can't do a jingle session with yourself")) initiator = client.jid - sid = unicode(uuid.uuid4()) + sid = str(uuid.uuid4()) # TODO: session cleaning after timeout ? session = client.jingle_sessions[sid] = { "id": sid, @@ -383,7 +383,7 @@ application = self._applications[app_ns] except KeyError: raise exceptions.InternalError( - u"No application registered for {}".format(app_ns) + "No application registered for {}".format(app_ns) ) # and the transport plugin @@ -392,7 +392,7 @@ transport = self._type_transports[transport_type][0] except IndexError: raise exceptions.InternalError( - u"No transport registered for {}".format(transport_type) + "No transport registered for {}".format(transport_type) ) # we build the session data @@ -407,7 +407,7 @@ try: content_name = content["name"] except KeyError: - content_name = unicode(uuid.uuid4()) + content_name = str(uuid.uuid4()) else: if content_name in contents_dict: raise exceptions.InternalError( @@ -471,7 +471,7 @@ self, client, action, session, content_name, desc_elt ): """This method request confirmation for a jingle session""" - log.debug(u"Using generic jingle confirmation method") + log.debug("Using generic jingle confirmation method") return xml_tools.deferConfirm( self.host, _(CONFIRM_TXT).format(entity=session["peer_jid"].full()), @@ -489,7 +489,7 @@ @param request(domish.Element): received IQ request """ request.handled = True - jingle_elt = request.elements(NS_JINGLE, "jingle").next() + jingle_elt = next(request.elements(NS_JINGLE, "jingle")) # first we need the session id try: @@ -497,7 +497,7 @@ if not sid: raise KeyError except KeyError: - log.warning(u"Received jingle request has no sid attribute") + log.warning("Received jingle request has no sid attribute") self.sendError(client, "bad-request", None, request) return @@ -507,7 +507,7 @@ if not action: raise KeyError except KeyError: - log.warning(u"Received jingle request has no action") + log.warning("Received jingle request has no action") self.sendError(client, "bad-request", None, request) return @@ -521,14 +521,14 @@ pass elif action == XEP_0166.A_SESSION_TERMINATE: log.debug( - u"ignoring session terminate action (inexisting session id): {request_id} [{profile}]".format( + "ignoring session terminate action (inexisting session id): {request_id} [{profile}]".format( request_id=sid, profile=client.profile ) ) return else: log.warning( - u"Received request for an unknown session id: {request_id} [{profile}]".format( + "Received request for an unknown session id: {request_id} [{profile}]".format( request_id=sid, profile=client.profile ) ) @@ -549,14 +549,14 @@ else: if session["peer_jid"] != peer_jid: log.warning( - u"sid conflict ({}), the jid doesn't match. Can be a collision, a hack attempt, or a bad sid generation".format( + "sid conflict ({}), the jid doesn't match. Can be a collision, a hack attempt, or a bad sid generation".format( sid ) ) self.sendError(client, "service-unavailable", sid, request) return if session["id"] != sid: - log.error(u"session id doesn't match") + log.error("session id doesn't match") self.sendError(client, "service-unavailable", sid, request) raise exceptions.InternalError @@ -577,7 +577,7 @@ elif action == XEP_0166.A_TRANSPORT_REJECT: self.onTransportReject(client, request, jingle_elt, session) else: - raise exceptions.InternalError(u"Unknown action {}".format(action)) + raise exceptions.InternalError("Unknown action {}".format(action)) ## Actions callbacks ## @@ -626,7 +626,7 @@ try: content_data = contents_dict[name] except KeyError: - log.warning(u"Other peer try to access an unknown content") + log.warning("Other peer try to access an unknown content") self.sendError(client, "bad-request", session["id"], request) raise exceptions.CancelError @@ -648,7 +648,7 @@ application = self._applications[app_ns] except KeyError: log.warning( - u"Unmanaged application namespace [{}]".format(app_ns) + "Unmanaged application namespace [{}]".format(app_ns) ) self.sendError( client, "service-unavailable", session["id"], request @@ -661,7 +661,7 @@ # the content exists, we check that we have not a former desc_elt if "desc_elt" in content_data: raise exceptions.InternalError( - u"desc_elt should not exist at this point" + "desc_elt should not exist at this point" ) content_data["desc_elt"] = desc_elt @@ -684,7 +684,7 @@ transport = self._transports[transport_ns] except KeyError: raise exceptions.InternalError( - u"No transport registered for namespace {}".format( + "No transport registered for namespace {}".format( transport_ns ) ) @@ -694,7 +694,7 @@ # the content exists, we check that we have not a former transport_elt if "transport_elt" in content_data: raise exceptions.InternalError( - u"transport_elt should not exist at this point" + "transport_elt should not exist at this point" ) content_data["transport_elt"] = transport_elt @@ -743,7 +743,7 @@ """ contents_dict = session["contents"] defers_list = [] - for content_name, content_data in contents_dict.iteritems(): + for content_name, content_data in contents_dict.items(): for method_name, handler_key, default_cb, elt_name in ( (app_method_name, "application", app_default_cb, "desc_elt"), (transp_method_name, "transport", transp_default_cb, "transport_elt"), @@ -757,7 +757,7 @@ except AttributeError: if default_cb is None: raise exceptions.NotFound( - u"{} not implemented !".format(method_name) + "{} not implemented !".format(method_name) ) else: method = default_cb @@ -846,7 +846,7 @@ defers_list = [] - for content_name, content_data in session["contents"].iteritems(): + for content_name, content_data in session["contents"].items(): content_elt = jingle_elt.addElement("content") content_elt["creator"] = XEP_0166.ROLE_INITIATOR content_elt["name"] = content_name @@ -907,9 +907,9 @@ # TODO: check reason, display a message to user if needed log.debug("Jingle Session {} terminated".format(session["id"])) try: - reason_elt = jingle_elt.elements(NS_JINGLE, "reason").next() + reason_elt = next(jingle_elt.elements(NS_JINGLE, "reason")) except StopIteration: - log.warning(u"No reason given for session termination") + log.warning("No reason given for session termination") reason_elt = jingle_elt.addElement("reason") terminate_defers = self._callPlugins( @@ -937,7 +937,7 @@ @param jingle_elt(domish.Element): the <jingle> element @param session(dict): session data """ - log.debug(u"Jingle session {} has been accepted".format(session["id"])) + log.debug("Jingle session {} has been accepted".format(session["id"])) try: self._parseElements(jingle_elt, session, request, client) @@ -965,7 +965,7 @@ client.send(xmlstream.toResponse(request, "result")) def _onSessionEb(self, failure_, client, request, jingle_elt, session): - log.error(u"Error while handling onSessionInfo: {}".format(failure_.value)) + log.error("Error while handling onSessionInfo: {}".format(failure_.value)) # XXX: only error managed so far, maybe some applications/transports need more self.sendError( client, "feature-not-implemented", None, request, "unsupported-info" @@ -1015,7 +1015,7 @@ @param jingle_elt(domish.Element): the <jingle> element @param session(dict): session data """ - log.debug(u"Other peer wants to replace the transport") + log.debug("Other peer wants to replace the transport") try: self._parseElements( jingle_elt, session, request, client, with_application=False @@ -1028,7 +1028,7 @@ content_name = None to_replace = [] - for content_name, content_data in session["contents"].iteritems(): + for content_name, content_data in session["contents"].items(): try: transport_elt = content_data.pop("transport_elt") except KeyError: @@ -1038,7 +1038,7 @@ transport = self._transports[transport_ns] except KeyError: log.warning( - u"Other peer want to replace current transport with an unknown one: {}".format( + "Other peer want to replace current transport with an unknown one: {}".format( transport_ns ) ) @@ -1093,7 +1093,7 @@ @param jingle_elt(domish.Element): the <jingle> element @param session(dict): session data """ - log.debug(u"new transport has been accepted") + log.debug("new transport has been accepted") try: self._parseElements( @@ -1140,7 +1140,7 @@ @param jingle_elt(domish.Element): the <jingle> element @param session(dict): session data """ - log.debug(u"Jingle session {} has been accepted".format(session["id"])) + log.debug("Jingle session {} has been accepted".format(session["id"])) try: self._parseElements( @@ -1152,7 +1152,7 @@ # The parsing was OK, we send the <iq> result client.send(xmlstream.toResponse(request, "result")) - for content_name, content_data in session["contents"].iteritems(): + for content_name, content_data in session["contents"].items(): try: transport_elt = content_data.pop("transport_elt") except KeyError: @@ -1167,8 +1167,8 @@ ) +@implementer(iwokkel.IDisco) class XEP_0166_handler(xmlstream.XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_xep_0184.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0184.py Tue Aug 13 19:08:41 2019 +0200 @@ -26,7 +26,7 @@ log = getLogger(__name__) from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer try: from twisted.words.protocols.xmlstream import XMPPHandler @@ -163,7 +163,7 @@ @param msg_elt: message element @param client: %(doc_client)s""" msg_elt.handled = True - rcv_elt = msg_elt.elements(NS_MESSAGE_DELIVERY_RECEIPTS, "received").next() + rcv_elt = next(msg_elt.elements(NS_MESSAGE_DELIVERY_RECEIPTS, "received")) msg_id = rcv_elt["id"] try: @@ -195,8 +195,8 @@ return self.host.memory.getParamA(PARAM_NAME, PARAM_KEY, profile_key=profile) +@implementer(iwokkel.IDisco) class XEP_0184_handler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent, profile): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_xep_0198.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0198.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for managing raw XML log @@ -28,26 +28,26 @@ from twisted.internet import task, reactor from functools import partial from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer import collections import time log = getLogger(__name__) PLUGIN_INFO = { - C.PI_NAME: u"Stream Management", - C.PI_IMPORT_NAME: u"XEP-0198", - C.PI_TYPE: u"XEP", + C.PI_NAME: "Stream Management", + C.PI_IMPORT_NAME: "XEP-0198", + C.PI_TYPE: "XEP", C.PI_MODES: C.PLUG_MODE_BOTH, - C.PI_PROTOCOLS: [u"XEP-0198"], + C.PI_PROTOCOLS: ["XEP-0198"], C.PI_DEPENDENCIES: [], - C.PI_RECOMMENDATIONS: [u"XEP-0045", u"XEP-0313"], - C.PI_MAIN: u"XEP_0198", - C.PI_HANDLER: u"yes", - C.PI_DESCRIPTION: _(u"""Implementation of Stream Management"""), + C.PI_RECOMMENDATIONS: ["XEP-0045", "XEP-0313"], + C.PI_MAIN: "XEP_0198", + C.PI_HANDLER: "yes", + C.PI_DESCRIPTION: _("""Implementation of Stream Management"""), } -NS_SM = u"urn:xmpp:sm:3" +NS_SM = "urn:xmpp:sm:3" SM_ENABLED = '/enabled[@xmlns="' + NS_SM + '"]' SM_RESUMED = '/resumed[@xmlns="' + NS_SM + '"]' SM_FAILED = '/failed[@xmlns="' + NS_SM + '"]' @@ -94,7 +94,7 @@ if enabled: if self._enabled: raise exceptions.InternalError( - u"Stream Management can't be enabled twice") + "Stream Management can't be enabled twice") self._enabled = True callback, kw = self.callback_data self.timer = task.LoopingCall(callback, **kw) @@ -119,7 +119,7 @@ self.last_ack_r = 0 if self.req_timer is not None: if self.req_timer.active(): - log.error(u"req_timer has been called/cancelled but not reset") + log.error("req_timer has been called/cancelled but not reset") else: self.req_timer.cancel() self.req_timer = None @@ -134,7 +134,7 @@ def __init__(self, host): log.info(_("Plugin Stream Management initialization")) self.host = host - host.registerNamespace(u'sm', NS_SM) + host.registerNamespace('sm', NS_SM) host.trigger.add("stream_hooks", self.addHooks) host.trigger.add("xml_init", self._XMLInitTrigger) host.trigger.add("disconnecting", self._disconnectingTrigger) @@ -142,12 +142,12 @@ try: self._ack_timeout = int(host.memory.getConfig("", "ack_timeout", ACK_TIMEOUT)) except ValueError: - log.error(_(u"Invalid ack_timeout value, please check your configuration")) + log.error(_("Invalid ack_timeout value, please check your configuration")) self._ack_timeout = ACK_TIMEOUT if not self._ack_timeout: - log.info(_(u"Ack timeout disabled")) + log.info(_("Ack timeout disabled")) else: - log.info(_(u"Ack timeout set to {timeout}s").format( + log.info(_("Ack timeout set to {timeout}s").format( timeout=self._ack_timeout)) def profileConnecting(self, client): @@ -165,11 +165,11 @@ def _XMLInitTrigger(self, client): """Enable or resume a stream mangement""" - if not (NS_SM, u'sm') in client.xmlstream.features: + if not (NS_SM, 'sm') in client.xmlstream.features: log.warning(_( - u"Your server doesn't support stream management ({namespace}), this is " - u"used to improve connection problems detection (like network outages). " - u"Please ask your server administrator to enable this feature.".format( + "Your server doesn't support stream management ({namespace}), this is " + "used to improve connection problems detection (like network outages). " + "Please ask your server administrator to enable this feature.".format( namespace=NS_SM))) return True session = client._xep_0198_session @@ -187,7 +187,7 @@ if session.resume_enabled: # we are resuming a session resume_elt = domish.Element((NS_SM, 'resume')) - resume_elt['h'] = unicode(session.in_counter) + resume_elt['h'] = str(session.in_counter) resume_elt['previd'] = session.session_id client.send(resume_elt) session.resuming = True @@ -197,7 +197,7 @@ # we start a new session assert session.out_counter == 0 enable_elt = domish.Element((NS_SM, 'enable')) - enable_elt[u'resume'] = u'true' + enable_elt['resume'] = 'true' client.send(enable_elt) session.enabled = True return True @@ -246,14 +246,14 @@ if server_acked > session.buffer_idx: diff = server_acked - session.buffer_idx try: - for i in xrange(diff): + for i in range(diff): session.buffer.pop() except IndexError: log.error( - u"error while cleaning buffer, invalid index (buffer is empty):\n" - u"diff = {diff}\n" - u"server_acked = {server_acked}\n" - u"buffer_idx = {buffer_id}".format( + "error while cleaning buffer, invalid index (buffer is empty):\n" + "diff = {diff}\n" + "server_acked = {server_acked}\n" + "buffer_idx = {buffer_id}".format( diff=diff, server_acked=server_acked, buffer_id=session.buffer_idx)) session.buffer_idx += diff @@ -272,15 +272,15 @@ break else: if ((discard_results - and stanza.name == u'iq' - and stanza.getAttribute(u'type') == 'result')): + and stanza.name == 'iq' + and stanza.getAttribute('type') == 'result')): continue client.send(stanza) def sendAck(self, client): """Send an answer element with current IN counter""" a_elt = domish.Element((NS_SM, 'a')) - a_elt['h'] = unicode(client._xep_0198_session.in_counter) + a_elt['h'] = str(client._xep_0198_session.in_counter) client.send(a_elt) def requestAck(self, client): @@ -298,9 +298,9 @@ normal_host, normal_port = connector.normal_location del connector.normal_location log.warning(_( - u"Connection failed using location given by server (host: {host}, port: " - u"{port}), switching to normal host and port (host: {normal_host}, port: " - u"{normal_port})".format(host=connector.host, port=connector.port, + "Connection failed using location given by server (host: {host}, port: " + "{port}), switching to normal host and port (host: {normal_host}, port: " + "{normal_port})".format(host=connector.host, port=connector.port, normal_host=normal_host, normal_port=normal_port))) connector.host, connector.port = normal_host, normal_port connector.connectionFailed = connector.connectionFailed_ori @@ -312,14 +312,14 @@ session.in_counter = 0 # we check that resuming is possible and that we have a session id - resume = C.bool(enabled_elt.getAttribute(u'resume')) - session_id = enabled_elt.getAttribute(u'id') + resume = C.bool(enabled_elt.getAttribute('resume')) + session_id = enabled_elt.getAttribute('id') if not session_id: - log.warning(_(u'Incorrect <enabled/> element received, no "id" attribute')) + log.warning(_('Incorrect <enabled/> element received, no "id" attribute')) if not resume or not session_id: log.warning(_( - u"You're server doesn't support session resuming with stream management, " - u"please contact your server administrator to enable it")) + "You're server doesn't support session resuming with stream management, " + "please contact your server administrator to enable it")) return session.session_id = session_id @@ -330,7 +330,7 @@ # location, in case server want resuming session to be elsewhere try: - location = enabled_elt[u'location'] + location = enabled_elt['location'] except KeyError: pass else: @@ -339,7 +339,7 @@ domain, port = location.split(':', 1) port = int(port) except ValueError: - log.warning(_(u"Invalid location received: {location}") + log.warning(_("Invalid location received: {location}") .format(location=location)) else: session.location = (domain, port) @@ -354,17 +354,17 @@ # resuming time try: - max_s = int(enabled_elt[u'max']) + max_s = int(enabled_elt['max']) except (ValueError, KeyError) as e: if isinstance(e, ValueError): - log.warning(_(u'Invalid "max" attribute')) + log.warning(_('Invalid "max" attribute')) max_s = RESUME_MAX - log.info(_(u"Using default session max value ({max_s} s).".format( + log.info(_("Using default session max value ({max_s} s).".format( max_s=max_s))) - log.info(_(u"Stream Management enabled")) + log.info(_("Stream Management enabled")) else: log.info(_( - u"Stream Management enabled, with a resumption time of {res_m} min" + "Stream Management enabled, with a resumption time of {res_m} min" .format(res_m = max_s/60))) session.session_max = max_s @@ -380,8 +380,8 @@ # now we can continue the session session.enabled = True d_time = time.time() - session.disconnected_time - log.info(_(u"Stream session resumed (disconnected for {d_time} s, {count} " - u"stanza(s) resent)").format(d_time=int(d_time), count=resend_count)) + log.info(_("Stream session resumed (disconnected for {d_time} s, {count} " + "stanza(s) resent)").format(d_time=int(d_time), count=resend_count)) def onFailed(self, failed_elt, client): session = client._xep_0198_session @@ -393,11 +393,11 @@ del session.resuming except AttributeError: # stream management can't be started at all - msg = _(u"Can't use stream management") + msg = _("Can't use stream management") if condition_elt is None: - log.error(msg + u'.') + log.error(msg + '.') else: - log.error(_(u"{msg}: {reason}").format( + log.error(_("{msg}: {reason}").format( msg=msg, reason=condition_elt.name)) else: # only stream resumption failed, we can try full session init @@ -406,12 +406,12 @@ # jid. This is experimental and may not be safe. It may be more # secured to abord the connection and restart everything with a fresh # client. - msg = _(u"stream resumption not possible, restarting full session") + msg = _("stream resumption not possible, restarting full session") if condition_elt is None: - log.warning(u'{msg}.'.format(msg=msg)) + log.warning('{msg}.'.format(msg=msg)) else: - log.warning(u"{msg}: {reason}".format( + log.warning("{msg}: {reason}".format( msg=msg, reason=condition_elt.name)) # stream resumption failed, but we still can do normal stream management # we restore attributes as if the session was new, and init stream @@ -420,9 +420,9 @@ if client.conn_deferred.called: client.conn_deferred = defer.Deferred() else: - log.error(u"conn_deferred should be called at this point") - plg_0045 = self.host.plugins.get(u'XEP-0045') - plg_0313 = self.host.plugins.get(u'XEP-0313') + log.error("conn_deferred should be called at this point") + plg_0045 = self.host.plugins.get('XEP-0045') + plg_0313 = self.host.plugins.get('XEP-0313') # FIXME: we should call all loaded plugins with generic callbacks # (e.g. prepareResume and resume), so a hot resuming can be done @@ -493,14 +493,14 @@ try: server_acked = int(a_elt['h']) except ValueError: - log.warning(_(u"Server returned invalid ack element, disabling stream " - u"management: {xml}").format(xml=a_elt)) + log.warning(_("Server returned invalid ack element, disabling stream " + "management: {xml}").format(xml=a_elt)) session.enabled = False return if server_acked > session.out_counter: - log.error(_(u"Server acked more stanzas than we have sent, disabling stream " - u"management.")) + log.error(_("Server acked more stanzas than we have sent, disabling stream " + "management.")) session.reset() return @@ -509,17 +509,17 @@ def onAckTimeOut(self, client): """Called when a requested ACK has not been received in time""" - log.info(_(u"Ack was not received in time, aborting connection")) + log.info(_("Ack was not received in time, aborting connection")) transport = client.xmlstream.transport if transport is None: - log.warning(u"transport was already removed") + log.warning("transport was already removed") else: transport.abortConnection() client._xep_0198_session.req_timer = None +@implementer(iwokkel.IDisco) class XEP_0198_handler(xmlstream.XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_xep_0199.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0199.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Delayed Delivery (XEP-0199) @@ -25,21 +25,21 @@ from sat.core.constants import Const as C from wokkel import disco, iwokkel from twisted.words.protocols.jabber import xmlstream, jid -from zope.interface import implements +from zope.interface import implementer import time PLUGIN_INFO = { - C.PI_NAME: u"XMPP PING", - C.PI_IMPORT_NAME: u"XEP-0199", - C.PI_TYPE: u"XEP", - C.PI_PROTOCOLS: [u"XEP-199"], + C.PI_NAME: "XMPP PING", + C.PI_IMPORT_NAME: "XEP-0199", + C.PI_TYPE: "XEP", + C.PI_PROTOCOLS: ["XEP-199"], C.PI_MAIN: "XEP_0199", - C.PI_HANDLER: u"yes", - C.PI_DESCRIPTION: D_(u"""Implementation of XMPP Ping"""), + C.PI_HANDLER: "yes", + C.PI_DESCRIPTION: D_("""Implementation of XMPP Ping"""), } -NS_PING = u"urn:xmpp:ping" +NS_PING = "urn:xmpp:ping" PING_REQUEST = C.IQ_GET + '/ping[@xmlns="' + NS_PING + '"]' @@ -49,11 +49,11 @@ log.info(_("XMPP Ping plugin initialization")) self.host = host host.bridge.addMethod( - "ping", ".plugin", in_sign='ss', out_sign='d', method=self._ping, async=True) + "ping", ".plugin", in_sign='ss', out_sign='d', method=self._ping, async_=True) try: self.text_cmds = self.host.plugins[C.TEXT_CMDS] except KeyError: - log.info(_(u"Text commands not available")) + log.info(_("Text commands not available")) else: self.text_cmds.registerTextCommands(self) @@ -62,7 +62,7 @@ def _pingRaiseIfFailure(self, pong): """If ping didn't succeed, raise the failure, else return pong delay""" - if pong[0] != u"PONG": + if pong[0] != "PONG": raise pong[0] return pong[1] @@ -75,7 +75,7 @@ def _pingCb(self, iq_result, send_time): receive_time = time.time() - return (u"PONG", receive_time - send_time) + return ("PONG", receive_time - send_time) def _pingEb(self, failure_, send_time): receive_time = time.time() @@ -102,11 +102,11 @@ """Send feedback to client when pong data is received""" txt_cmd = self.host.plugins[C.TEXT_CMDS] - if pong[0] == u"PONG": - txt_cmd.feedBack(client, u"PONG ({time} s)".format(time=pong[1]), mess_data) + if pong[0] == "PONG": + txt_cmd.feedBack(client, "PONG ({time} s)".format(time=pong[1]), mess_data) else: txt_cmd.feedBack( - client, _(u"ping error ({err_msg}). Response time: {time} s") + client, _("ping error ({err_msg}). Response time: {time} s") .format(err_msg=pong[0], time=pong[1]), mess_data) def cmd_ping(self, client, mess_data): @@ -120,7 +120,7 @@ entity_jid = jid.JID(mess_data["unparsed"].strip()) except RuntimeError: txt_cmd = self.host.plugins[C.TEXT_CMDS] - txt_cmd.feedBack(client, _(u'Invalid jid: "{entity_jid}"').format( + txt_cmd.feedBack(client, _('Invalid jid: "{entity_jid}"').format( entity_jid=mess_data["unparsed"].strip()), mess_data) return False else: @@ -131,15 +131,15 @@ return False def onPingRequest(self, iq_elt, client): - log.info(_(u"XMPP PING received from {from_jid} [{profile}]").format( + log.info(_("XMPP PING received from {from_jid} [{profile}]").format( from_jid=iq_elt["from"], profile=client.profile)) iq_elt.handled = True iq_result_elt = xmlstream.toResponse(iq_elt, "result") client.send(iq_result_elt) +@implementer(iwokkel.IDisco) class XEP_0199_handler(xmlstream.XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_xep_0203.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0203.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Delayed Delivery (XEP-0203) @@ -30,7 +30,7 @@ from twisted.words.protocols.xmlstream import XMPPHandler except ImportError: from wokkel.subprotocols import XMPPHandler -from zope.interface import implements +from zope.interface import implementer NS_DD = "urn:xmpp:delay" @@ -71,8 +71,8 @@ return elt +@implementer(iwokkel.IDisco) class XEP_0203_handler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent, profile): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_xep_0231.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0231.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Bit of Binary handling (XEP-0231) @@ -25,7 +25,7 @@ log = getLogger(__name__) from sat.tools import xml_tools from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer from twisted.python import failure from twisted.words.protocols.jabber import xmlstream from twisted.words.protocols.jabber import jid @@ -49,13 +49,13 @@ ), } -NS_BOB = u"urn:xmpp:bob" +NS_BOB = "urn:xmpp:bob" IQ_BOB_REQUEST = C.IQ_GET + '/data[@xmlns="' + NS_BOB + '"]' class XEP_0231(object): def __init__(self, host): - log.info(_(u"plugin Bits of Binary initialization")) + log.info(_("plugin Bits of Binary initialization")) self.host = host host.registerNamespace("bob", NS_BOB) host.trigger.add("xhtml_post_treat", self.XHTMLTrigger) @@ -65,7 +65,7 @@ in_sign="sss", out_sign="s", method=self._getFile, - async=True, + async_=True, ) def dumpData(self, cache, data_elt, cid): @@ -83,7 +83,7 @@ if max_age < 0: raise ValueError except (KeyError, ValueError): - log.warning(u"invalid max-age found") + log.warning("invalid max-age found") max_age = None with cache.cacheData( @@ -99,13 +99,13 @@ return XEP_0231_handler(self) def _requestCb(self, iq_elt, cache, cid): - for data_elt in iq_elt.elements(NS_BOB, u"data"): + for data_elt in iq_elt.elements(NS_BOB, "data"): if data_elt.getAttribute("cid") == cid: file_path = self.dumpData(cache, data_elt, cid) return file_path log.warning( - u"invalid data stanza received, requested cid was not found:\n{iq_elt}\nrequested cid: {cid}".format( + "invalid data stanza received, requested cid was not found:\n{iq_elt}\nrequested cid: {cid}".format( iq_elt=iq_elt, cid=cid ) ) @@ -113,7 +113,7 @@ def _requestEb(self, failure_): """Log the error and continue errback chain""" - log.warning(u"Can't get requested data:\n{reason}".format(reason=failure_)) + log.warning("Can't get requested data:\n{reason}".format(reason=failure_)) return failure_ def requestData(self, client, to_jid, cid, cache=None): @@ -137,24 +137,24 @@ return d def _setImgEltSrc(self, path, img_elt): - img_elt[u"src"] = u"file://{}".format(path) + img_elt["src"] = "file://{}".format(path) def XHTMLTrigger(self, client, message_elt, body_elt, lang, treat_d): - for img_elt in xml_tools.findAll(body_elt, C.NS_XHTML, u"img"): - source = img_elt.getAttribute(u"src", "") - if source.startswith(u"cid:"): + for img_elt in xml_tools.findAll(body_elt, C.NS_XHTML, "img"): + source = img_elt.getAttribute("src", "") + if source.startswith("cid:"): cid = source[4:] file_path = client.cache.getFilePath(cid) if file_path is not None: # image is in cache, we change the url - img_elt[u"src"] = u"file://{}".format(file_path) + img_elt["src"] = "file://{}".format(file_path) continue else: # image is not in cache, is it given locally? - for data_elt in message_elt.elements(NS_BOB, u"data"): + for data_elt in message_elt.elements(NS_BOB, "data"): if data_elt.getAttribute("cid") == cid: file_path = self.dumpData(client.cache, data_elt, cid) - img_elt[u"src"] = u"file://{}".format(file_path) + img_elt["src"] = "file://{}".format(file_path) break else: # cid not found locally, we need to request it @@ -172,7 +172,7 @@ iq_elt.handled = True data_elt = next(iq_elt.elements(NS_BOB, "data")) try: - cid = data_elt[u"cid"] + cid = data_elt["cid"] except KeyError: error_elt = jabber_error.StanzaError("not-acceptable").toResponse(iq_elt) client.send(error_elt) @@ -189,9 +189,9 @@ result_elt = xmlstream.toResponse(iq_elt, "result") data_elt = result_elt.addElement((NS_BOB, "data"), content=data.encode("base64")) - data_elt[u"cid"] = cid - data_elt[u"type"] = metadata[u"mime_type"] - data_elt[u"max-age"] = unicode(int(max(0, metadata["eol"] - time.time()))) + data_elt["cid"] = cid + data_elt["type"] = metadata["mime_type"] + data_elt["max-age"] = str(int(max(0, metadata["eol"] - time.time()))) client.send(result_elt) def _getFile(self, peer_jid_s, cid, profile): @@ -217,7 +217,7 @@ else: # file not in cache, is it given locally? if parent_elt is not None: - for data_elt in parent_elt.elements(NS_BOB, u"data"): + for data_elt in parent_elt.elements(NS_BOB, "data"): if data_elt.getAttribute("cid") == cid: return defer.succeed(self.dumpData(client.cache, data_elt, cid)) @@ -226,8 +226,8 @@ return self.requestData(client, peer_jid, cid) +@implementer(iwokkel.IDisco) class XEP_0231_handler(xmlstream.XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_xep_0234.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0234.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Jingle File Transfer (XEP-0234) @@ -24,7 +24,7 @@ log = getLogger(__name__) from sat.core import exceptions from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer from sat.tools import utils from sat.tools import stream from sat.tools.common import date_utils @@ -55,7 +55,7 @@ C.PI_DESCRIPTION: _("""Implementation of Jingle File Transfer"""), } -EXTRA_ALLOWED = {u"path", u"namespace", u"file_desc", u"file_hash"} +EXTRA_ALLOWED = {"path", "namespace", "file_desc", "file_hash"} Range = namedtuple("Range", ("offset", "length")) @@ -72,7 +72,7 @@ self._j.registerApplication(NS_JINGLE_FT, self) self._f = host.plugins["FILE"] self._f.register( - NS_JINGLE_FT, self.fileJingleSend, priority=10000, method_name=u"Jingle" + NS_JINGLE_FT, self.fileJingleSend, priority=10000, method_name="Jingle" ) self._hash = self.host.plugins["XEP-0300"] host.bridge.addMethod( @@ -81,7 +81,7 @@ in_sign="ssssa{ss}s", out_sign="", method=self._fileJingleSend, - async=True, + async_=True, ) host.bridge.addMethod( "fileJingleRequest", @@ -89,7 +89,7 @@ in_sign="sssssa{ss}s", out_sign="s", method=self._fileJingleRequest, - async=True, + async_=True, ) def getHandler(self, client): @@ -102,7 +102,7 @@ @param content_name(unicode): name of the content @return (unicode): unique progress id """ - return u"{}_{}".format(session["id"], content_name) + return "{}_{}".format(session["id"], content_name) # generic methods @@ -127,32 +127,32 @@ @trigger XEP-0234_buildFileElement(file_elt, extra_args): can be used to extend elements to add """ if file_elt is None: - file_elt = domish.Element((NS_JINGLE_FT, u"file")) + file_elt = domish.Element((NS_JINGLE_FT, "file")) for name, value in ( - (u"name", name), - (u"size", size), + ("name", name), + ("size", size), ("media-type", mime_type), - (u"desc", desc), - (u"path", path), - (u"namespace", namespace), + ("desc", desc), + ("path", path), + ("namespace", namespace), ): if value is not None: - file_elt.addElement(name, content=unicode(value)) + file_elt.addElement(name, content=str(value)) if modified is not None: if isinstance(modified, int): - file_elt.addElement(u"date", utils.xmpp_date(modified or None)) + file_elt.addElement("date", utils.xmpp_date(modified or None)) else: - file_elt.addElement(u"date", modified) + file_elt.addElement("date", modified) elif "created" in kwargs: - file_elt.addElement(u"date", utils.xmpp_date(kwargs.pop("created"))) + file_elt.addElement("date", utils.xmpp_date(kwargs.pop("created"))) - range_elt = file_elt.addElement(u"range") + range_elt = file_elt.addElement("range") if transfer_range is not None: if transfer_range.offset is not None: - range_elt[u"offset"] = transfer_range.offset + range_elt["offset"] = transfer_range.offset if transfer_range.length is not None: - range_elt[u"length"] = transfer_range.length + range_elt["length"] = transfer_range.length if file_hash is not None: if not file_hash: file_elt.addChild(self._hash.buildHashUsedElt()) @@ -160,7 +160,7 @@ file_elt.addChild(self._hash.buildHashElt(file_hash, hash_algo)) elif hash_algo is not None: file_elt.addChild(self._hash.buildHashUsedElt(hash_algo)) - self.host.trigger.point(u"XEP-0234_buildFileElement", file_elt, extra_args=kwargs) + self.host.trigger.point("XEP-0234_buildFileElement", file_elt, extra_args=kwargs) if kwargs: for kw in kwargs: log.debug("ignored keyword: {}".format(kw)) @@ -204,68 +204,68 @@ if parent_elt is not None: if file_elt is not None: raise exceptions.InternalError( - u"file_elt must be None if parent_elt is set" + "file_elt must be None if parent_elt is set" ) try: - file_elt = next(parent_elt.elements(NS_JINGLE_FT, u"file")) + file_elt = next(parent_elt.elements(NS_JINGLE_FT, "file")) except StopIteration: raise exceptions.NotFound() else: if not file_elt or file_elt.uri != NS_JINGLE_FT: raise exceptions.DataError( - u"invalid <file> element: {stanza}".format(stanza=file_elt.toXml()) + "invalid <file> element: {stanza}".format(stanza=file_elt.toXml()) ) if file_data is None: file_data = {} - for name in (u"name", u"desc", u"path", u"namespace"): + for name in ("name", "desc", "path", "namespace"): try: - file_data[name] = unicode(next(file_elt.elements(NS_JINGLE_FT, name))) + file_data[name] = str(next(file_elt.elements(NS_JINGLE_FT, name))) except StopIteration: pass - name = file_data.get(u"name") - if name == u"..": + name = file_data.get("name") + if name == "..": # we don't want to go to parent dir when joining to a path - name = u"--" - file_data[u"name"] = name - elif name is not None and u"/" in name or u"\\" in name: - file_data[u"name"] = regex.pathEscape(name) + name = "--" + file_data["name"] = name + elif name is not None and "/" in name or "\\" in name: + file_data["name"] = regex.pathEscape(name) try: - file_data[u"mime_type"] = unicode( - next(file_elt.elements(NS_JINGLE_FT, u"media-type")) + file_data["mime_type"] = str( + next(file_elt.elements(NS_JINGLE_FT, "media-type")) ) except StopIteration: pass try: - file_data[u"size"] = int( - unicode(next(file_elt.elements(NS_JINGLE_FT, u"size"))) + file_data["size"] = int( + str(next(file_elt.elements(NS_JINGLE_FT, "size"))) ) except StopIteration: pass try: - file_data[u"modified"] = date_utils.date_parse( - next(file_elt.elements(NS_JINGLE_FT, u"date")) + file_data["modified"] = date_utils.date_parse( + next(file_elt.elements(NS_JINGLE_FT, "date")) ) except StopIteration: pass try: - range_elt = file_elt.elements(NS_JINGLE_FT, u"range").next() + range_elt = next(file_elt.elements(NS_JINGLE_FT, "range")) except StopIteration: pass else: offset = range_elt.getAttribute("offset") length = range_elt.getAttribute("length") if offset or length or keep_empty_range: - file_data[u"transfer_range"] = Range(offset=offset, length=length) + file_data["transfer_range"] = Range(offset=offset, length=length) - prefix = u"given_" if given else u"" - hash_algo_key, hash_key = u"hash_algo", prefix + u"file_hash" + prefix = "given_" if given else "" + hash_algo_key, hash_key = "hash_algo", prefix + "file_hash" try: file_data[hash_algo_key], file_data[hash_key] = self._hash.parseHashElt( file_elt @@ -273,7 +273,7 @@ except exceptions.NotFound: pass - self.host.trigger.point(u"XEP-0234_parseFileElement", file_elt, file_data) + self.host.trigger.point("XEP-0234_parseFileElement", file_elt, file_data) return file_data @@ -379,12 +379,12 @@ extra = {} if file_hash is not None: if hash_algo is None: - raise ValueError(_(u"hash_algo must be set if file_hash is set")) + raise ValueError(_("hash_algo must be set if file_hash is set")) extra["file_hash"] = file_hash extra["hash_algo"] = hash_algo else: if hash_algo is not None: - raise ValueError(_(u"file_hash must be set if hash_algo is set")) + raise ValueError(_("file_hash must be set if hash_algo is set")) yield self._j.initiate( client, peer_jid, @@ -414,8 +414,8 @@ else: if not EXTRA_ALLOWED.issuperset(extra): raise ValueError( - _(u"only the following keys are allowed in extra: {keys}").format( - keys=u", ".join(EXTRA_ALLOWED) + _("only the following keys are allowed in extra: {keys}").format( + keys=", ".join(EXTRA_ALLOWED) ) ) progress_id_d.callback(self.getProgressId(session, content_name)) @@ -427,38 +427,38 @@ desc_elt = domish.Element((NS_JINGLE_FT, "description")) file_elt = desc_elt.addElement("file") - if content_data[u"senders"] == self._j.ROLE_INITIATOR: + if content_data["senders"] == self._j.ROLE_INITIATOR: # we send a file if name is None: name = os.path.basename(filepath) - file_data[u"date"] = utils.xmpp_date() - file_data[u"desc"] = extra.pop(u"file_desc", u"") - file_data[u"name"] = name + file_data["date"] = utils.xmpp_date() + file_data["desc"] = extra.pop("file_desc", "") + file_data["name"] = name mime_type = mimetypes.guess_type(name, strict=False)[0] if mime_type is not None: - file_data[u"mime_type"] = mime_type - file_data[u"size"] = os.path.getsize(filepath) - if u"namespace" in extra: - file_data[u"namespace"] = extra[u"namespace"] - if u"path" in extra: - file_data[u"path"] = extra[u"path"] - self.buildFileElementFromDict(file_data, file_elt=file_elt, file_hash=u"") + file_data["mime_type"] = mime_type + file_data["size"] = os.path.getsize(filepath) + if "namespace" in extra: + file_data["namespace"] = extra["namespace"] + if "path" in extra: + file_data["path"] = extra["path"] + self.buildFileElementFromDict(file_data, file_elt=file_elt, file_hash="") else: # we request a file - file_hash = extra.pop(u"file_hash", u"") + file_hash = extra.pop("file_hash", "") if not name and not file_hash: - raise ValueError(_(u"you need to provide at least name or file hash")) + raise ValueError(_("you need to provide at least name or file hash")) if name: - file_data[u"name"] = name + file_data["name"] = name if file_hash: - file_data[u"file_hash"] = file_hash - file_data[u"hash_algo"] = extra[u"hash_algo"] + file_data["file_hash"] = file_hash + file_data["hash_algo"] = extra["hash_algo"] else: - file_data[u"hash_algo"] = self._hash.getDefaultAlgo() - if u"namespace" in extra: - file_data[u"namespace"] = extra[u"namespace"] - if u"path" in extra: - file_data[u"path"] = extra[u"path"] + file_data["hash_algo"] = self._hash.getDefaultAlgo() + if "namespace" in extra: + file_data["namespace"] = extra["namespace"] + if "path" in extra: + file_data["path"] = extra["path"] self.buildFileElementFromDict(file_data, file_elt=file_elt) return desc_elt @@ -466,13 +466,13 @@ def jingleRequestConfirmation(self, client, action, session, content_name, desc_elt): """This method request confirmation for a jingle session""" content_data = session["contents"][content_name] - senders = content_data[u"senders"] + senders = content_data["senders"] if senders not in (self._j.ROLE_INITIATOR, self._j.ROLE_RESPONDER): - log.warning(u"Bad sender, assuming initiator") - senders = content_data[u"senders"] = self._j.ROLE_INITIATOR + log.warning("Bad sender, assuming initiator") + senders = content_data["senders"] = self._j.ROLE_INITIATOR # first we grab file informations try: - file_elt = desc_elt.elements(NS_JINGLE_FT, "file").next() + file_elt = next(desc_elt.elements(NS_JINGLE_FT, "file")) except StopIteration: raise failure.Failure(exceptions.DataError) file_data = {"progress_id": self.getProgressId(session, content_name)} @@ -516,7 +516,7 @@ ) defer.returnValue(confirmed) - log.warning(_(u"File continue is not implemented yet")) + log.warning(_("File continue is not implemented yet")) defer.returnValue(False) def _fileReceivingRequestConf( @@ -545,7 +545,7 @@ name = file_data["name"] if "/" in name or "\\" in name: log.warning( - u"File name contain path characters, we replace them: {}".format(name) + "File name contain path characters, we replace them: {}".format(name) ) file_data["name"] = name.replace("/", "_").replace("\\", "_") @@ -574,9 +574,9 @@ if action in (self._j.A_ACCEPTED_ACK,): pass elif action == self._j.A_SESSION_INITIATE: - file_elt = desc_elt.elements(NS_JINGLE_FT, "file").next() + file_elt = next(desc_elt.elements(NS_JINGLE_FT, "file")) try: - file_elt.elements(NS_JINGLE_FT, "range").next() + next(file_elt.elements(NS_JINGLE_FT, "range")) except StopIteration: # initiator doesn't manage <range>, but we do so we advertise it # FIXME: to be checked @@ -586,14 +586,14 @@ assert not "stream_object" in content_data file_data = application_data["file_data"] file_path = application_data["file_path"] - senders = content_data[u"senders"] - if senders != session[u"role"]: + senders = content_data["senders"] + if senders != session["role"]: # we are receiving the file try: # did the responder specified the size of the file? - file_elt = next(desc_elt.elements(NS_JINGLE_FT, u"file")) - size_elt = next(file_elt.elements(NS_JINGLE_FT, u"size")) - size = int(unicode(size_elt)) + file_elt = next(desc_elt.elements(NS_JINGLE_FT, "file")) + size_elt = next(file_elt.elements(NS_JINGLE_FT, "size")) + size = int(str(size_elt)) except (StopIteration, ValueError): size = None # XXX: hash security is not critical here, so we just take the higher mandatory one @@ -624,7 +624,7 @@ args = [client, session, content_name, content_data] finished_d.addCallbacks(self._finishedCb, self._finishedEb, args, None, args) else: - log.warning(u"FIXME: unmanaged action {}".format(action)) + log.warning("FIXME: unmanaged action {}".format(action)) return desc_elt def jingleSessionInfo(self, client, action, session, content_name, jingle_elt): @@ -644,7 +644,7 @@ # we have received the file hash, we need to parse it if content_data["senders"] == session["role"]: log.warning( - u"unexpected checksum received while we are the file sender" + "unexpected checksum received while we are the file sender" ) raise exceptions.DataError info_content_name = elt["name"] @@ -653,13 +653,13 @@ return file_data = content_data["application_data"]["file_data"] try: - file_elt = elt.elements((NS_JINGLE_FT, "file")).next() + file_elt = next(elt.elements((NS_JINGLE_FT, "file"))) except StopIteration: raise exceptions.DataError algo, file_data["given_file_hash"] = self._hash.parseHashElt(file_elt) if algo != file_data.get("hash_algo"): log.warning( - u"Hash algorithm used in given hash ({peer_algo}) doesn't correspond to the one we have used ({our_algo}) [{profile}]".format( + "Hash algorithm used in given hash ({peer_algo}) doesn't correspond to the one we have used ({our_algo}) [{profile}]".format( peer_algo=algo, our_algo=file_data.get("hash_algo"), profile=client.profile, @@ -685,7 +685,7 @@ file_data = content_data["application_data"]["file_data"] hasher = file_data["hash_hasher"] hash_ = hasher.hexdigest() - log.debug(u"Calculated hash: {}".format(hash_)) + log.debug("Calculated hash: {}".format(hash_)) iq_elt, jingle_elt = self._j.buildSessionInfo(client, session) checksum_elt = jingle_elt.addElement((NS_JINGLE_FT, "checksum")) checksum_elt["creator"] = content_data["creator"] @@ -712,7 +712,7 @@ if given_hash is None: if last_try: log.warning( - u"sender didn't sent hash checksum, we can't check the file [{profile}]".format( + "sender didn't sent hash checksum, we can't check the file [{profile}]".format( profile=client.profile ) ) @@ -721,10 +721,10 @@ return True return False hasher = file_data["hash_hasher"] - hash_ = hasher.hexdigest() + hash_ = hasher.hexdigest().encode('utf-8') if hash_ == given_hash: - log.info(u"Hash checked, file was successfully transfered: {}".format(hash_)) + log.info("Hash checked, file was successfully transfered: {}".format(hash_)) progress_metadata = { "hash": hash_, "hash_algo": file_data["hash_algo"], @@ -732,9 +732,9 @@ } error = None else: - log.warning(u"Hash mismatch, the file was not transfered correctly") + log.warning("Hash mismatch, the file was not transfered correctly") progress_metadata = None - error = u"Hash mismatch: given={algo}:{given}, calculated={algo}:{our}".format( + error = "Hash mismatch: given={algo}:{given}, calculated={algo}:{our}".format( algo=file_data["hash_algo"], given=given_hash, our=hash_ ) @@ -748,7 +748,7 @@ return True def _finishedCb(self, __, client, session, content_name, content_data): - log.info(u"File transfer terminated") + log.info("File transfer terminated") if content_data["senders"] != session["role"]: # we terminate the session only if we are the receiver, # as recommanded in XEP-0234 §2 (after example 6) @@ -772,15 +772,15 @@ content_data["stream_object"].close() def _finishedEb(self, failure, client, session, content_name, content_data): - log.warning(u"Error while streaming file: {}".format(failure)) + log.warning("Error while streaming file: {}".format(failure)) content_data["stream_object"].close() self._j.contentTerminate( client, session, content_name, reason=self._j.REASON_FAILED_TRANSPORT ) +@implementer(iwokkel.IDisco) class XEP_0234_handler(XMPPHandler): - implements(iwokkel.IDisco) def getDiscoInfo(self, requestor, target, nodeIdentifier=""): return [disco.DiscoFeature(NS_JINGLE_FT)]
--- a/sat/plugins/plugin_xep_0249.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0249.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing xep-0249 @@ -27,7 +27,7 @@ from twisted.words.xish import domish from twisted.words.protocols.jabber import jid -from zope.interface import implements +from zope.interface import implementer from wokkel import disco, iwokkel @@ -122,9 +122,9 @@ message["to"] = guest.full() x_elt = message.addElement((NS_X_CONFERENCE, "x")) x_elt["jid"] = room.userhost() - for key, value in options.iteritems(): + for key, value in options.items(): if key not in ("password", "reason", "thread"): - log.warning(u"Ignoring invalid invite option: {}".format(key)) + log.warning("Ignoring invalid invite option: {}".format(key)) continue x_elt[key] = value # there is not body in this message, so we can use directly send() @@ -137,7 +137,7 @@ """ client = self.host.getClient(profile_key) log.info( - _(u"Invitation accepted for room %(room)s [%(profile)s]") + _("Invitation accepted for room %(room)s [%(profile)s]") % {"room": room_jid.userhost(), "profile": client.profile} ) d = self.host.plugins["XEP-0045"].join(client, room_jid, client.jid.user, {}) @@ -150,13 +150,13 @@ return True try: - room_jid_s = x_elt[u"jid"] + room_jid_s = x_elt["jid"] except KeyError: - log.warning(_(u"invalid invitation received: {xml}").format( + log.warning(_("invalid invitation received: {xml}").format( xml=message_elt.toXml())) return False log.info( - _(u"Invitation received for room %(room)s [%(profile)s]") + _("Invitation received for room %(room)s [%(profile)s]") % {"room": room_jid_s, "profile": client.profile} ) from_jid_s = message_elt["from"] @@ -167,7 +167,7 @@ pass else: log.info( - _(u"Invitation silently discarded because user is already in the room.") + _("Invitation silently discarded because user is already in the room.") ) return @@ -179,15 +179,15 @@ self._accept(room_jid, client.profile) elif autojoin == "never": msg = D_( - u"An invitation from %(user)s to join the room %(room)s has been " - u"declined according to your personal settings." + "An invitation from %(user)s to join the room %(room)s has been " + "declined according to your personal settings." ) % {"user": from_jid_s, "room": room_jid_s} title = D_("MUC invitation") xml_tools.quickNote(self.host, client, msg, title, C.XMLUI_DATA_LVL_INFO) else: # leave the default value here confirm_msg = D_( - u"You have been invited by %(user)s to join the room %(room)s. " - u"Do you accept?" + "You have been invited by %(user)s to join the room %(room)s. " + "Do you accept?" ) % {"user": from_jid_s, "room": room_jid_s} confirm_title = D_("MUC invitation") d = xml_tools.deferConfirm( @@ -213,8 +213,8 @@ contact_jid = jid.JID(contact_jid_s) except (RuntimeError, jid.InvalidFormat, AttributeError): feedback = _( - u"You must provide a valid JID to invite, like in '/invite " - u"contact@{host}'" + "You must provide a valid JID to invite, like in '/invite " + "contact@{host}'" ).format(host=my_host) self.host.plugins[C.TEXT_CMDS].feedBack(client, feedback, mess_data) return False @@ -224,8 +224,8 @@ return False +@implementer(iwokkel.IDisco) class XEP_0249_handler(XMPPHandler): - implements(iwokkel.IDisco) def getDiscoInfo(self, requestor, target, nodeIdentifier=""): return [disco.DiscoFeature(NS_X_CONFERENCE)]
--- a/sat/plugins/plugin_xep_0260.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0260.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Jingle (XEP-0260) @@ -24,7 +24,7 @@ log = getLogger(__name__) from sat.core import exceptions from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer from twisted.words.xish import domish from twisted.words.protocols.jabber import jid from twisted.internet import defer @@ -122,15 +122,15 @@ transport_elt["mode"] = "tcp" # XXX: we only manage tcp for now for candidate in candidates: - log.debug(u"Adding candidate: {}".format(candidate)) + log.debug("Adding candidate: {}".format(candidate)) candidate_elt = transport_elt.addElement("candidate", NS_JINGLE_S5B) if candidate.id is None: - candidate.id = unicode(uuid.uuid4()) + candidate.id = str(uuid.uuid4()) candidate_elt["cid"] = candidate.id candidate_elt["host"] = candidate.host candidate_elt["jid"] = candidate.jid.full() - candidate_elt["port"] = unicode(candidate.port) - candidate_elt["priority"] = unicode(candidate.priority) + candidate_elt["port"] = str(candidate.port) + candidate_elt["priority"] = str(candidate.priority) candidate_elt["type"] = candidate.type return transport_elt @@ -138,12 +138,12 @@ def jingleSessionInit(self, client, session, content_name): content_data = session["contents"][content_name] transport_data = content_data["transport_data"] - sid = transport_data["sid"] = unicode(uuid.uuid4()) + sid = transport_data["sid"] = str(uuid.uuid4()) session_hash = transport_data["session_hash"] = self._s5b.getSessionHash( - session[u"local_jid"], session["peer_jid"], sid + session["local_jid"], session["peer_jid"], sid ) transport_data["peer_session_hash"] = self._s5b.getSessionHash( - session["peer_jid"], session[u"local_jid"], sid + session["peer_jid"], session["local_jid"], sid ) # requester and target are inversed for peer candidates transport_data["stream_d"] = self._s5b.registerHash(client, session_hash, None) candidates = transport_data["candidates"] = yield self._s5b.getCandidates( @@ -181,7 +181,7 @@ transport_elt.addElement("proxy-error") iq_elt.send() log.warning( - u"Can't activate proxy, we need to fallback to IBB: {reason}".format( + "Can't activate proxy, we need to fallback to IBB: {reason}".format( reason=stanza_error.value.condition ) ) @@ -211,10 +211,10 @@ client, self._j.A_TRANSPORT_INFO, session, content_name ) if candidate is None: - log.warning(u"Can't connect to any peer candidate") + log.warning("Can't connect to any peer candidate") candidate_elt = transport_elt.addElement("candidate-error") else: - log.info(u"Found best peer candidate: {}".format(unicode(candidate))) + log.info("Found best peer candidate: {}".format(str(candidate))) candidate_elt = transport_elt.addElement("candidate-used") candidate_elt["cid"] = candidate.id iq_elt.send() # TODO: check result stanza @@ -248,9 +248,9 @@ if best_candidate.priority == peer_best_candidate.priority: # same priority, we choose initiator one according to XEP-0260 §2.4 #4 log.debug( - u"Candidates have same priority, we select the one choosed by initiator" + "Candidates have same priority, we select the one choosed by initiator" ) - if session["initiator"] == session[u"local_jid"]: + if session["initiator"] == session["local_jid"]: choosed_candidate = best_candidate else: choosed_candidate = peer_best_candidate @@ -260,7 +260,7 @@ ) if choosed_candidate is None: - log.warning(u"Socks5 negociation failed, we need to fallback to IBB") + log.warning("Socks5 negociation failed, we need to fallback to IBB") self.doFallback(session, content_name, client) else: if choosed_candidate == peer_best_candidate: @@ -276,8 +276,8 @@ our_candidate = False log.info( - u"Socks5 negociation successful, {who} candidate will be used: {candidate}".format( - who=u"our" if our_candidate else u"other peer", + "Socks5 negociation successful, {who} candidate will be used: {candidate}".format( + who="our" if our_candidate else "other peer", candidate=choosed_candidate, ) ) @@ -321,10 +321,10 @@ Will try to fallback to IBB """ try: - reason = unicode(fail.value) + reason = str(fail.value) except AttributeError: - reason = unicode(fail) - log.warning(u"Cant start transfert, we'll try fallback method: {}".format(reason)) + reason = str(fail) + log.warning("Cant start transfert, we'll try fallback method: {}".format(reason)) self.doFallback(session, content_name, client) def _candidateInfo( @@ -347,25 +347,25 @@ try: cid = candidate_elt.attributes["cid"] except KeyError: - log.warning(u"No cid found in <candidate-used>") + log.warning("No cid found in <candidate-used>") raise exceptions.DataError try: - candidate = ( + candidate = next(( c for c in transport_data["candidates"] if c.id == cid - ).next() + )) except StopIteration: - log.warning(u"Given cid doesn't correspond to any known candidate !") + log.warning("Given cid doesn't correspond to any known candidate !") raise exceptions.DataError # TODO: send an error to other peer, and use better exception except KeyError: # a transport-info can also be intentionaly sent too early by other peer # but there is little probability log.error( - u'"candidates" key doesn\'t exists in transport_data, it should at this point' + '"candidates" key doesn\'t exists in transport_data, it should at this point' ) raise exceptions.InternalError # at this point we have the candidate choosed by other peer transport_data["peer_best_candidate"] = candidate - log.info(u"Other peer best candidate: {}".format(candidate)) + log.info("Other peer best candidate: {}".format(candidate)) del transport_data["candidates"] self._checkCandidates(session, content_name, transport_data, client) @@ -385,7 +385,7 @@ try: activation_d = transport_data.pop("activation_d") except KeyError: - log.warning(u"Received unexpected transport-info for proxy activation") + log.warning("Received unexpected transport-info for proxy activation") if proxy_elt.name == "activated": activation_d.callback(None) @@ -467,7 +467,7 @@ ): for name in names: try: - candidate_elt = transport_elt.elements(NS_JINGLE_S5B, name).next() + candidate_elt = next(transport_elt.elements(NS_JINGLE_S5B, name)) except StopIteration: continue else: @@ -478,20 +478,20 @@ if candidate_elt is None: log.warning( - u"Unexpected transport element: {}".format(transport_elt.toXml()) + "Unexpected transport element: {}".format(transport_elt.toXml()) ) elif action == self._j.A_DESTROY: # the transport is replaced (fallback ?), We need mainly to kill XEP-0065 session. # note that sid argument is not necessary for sessions created by this plugin self._s5b.killSession(None, transport_data["session_hash"], None, client) else: - log.warning(u"FIXME: unmanaged action {}".format(action)) + log.warning("FIXME: unmanaged action {}".format(action)) defer.returnValue(transport_elt) def jingleTerminate(self, client, action, session, content_name, reason_elt): if reason_elt.decline: - log.debug(u"Session declined, deleting S5B session") + log.debug("Session declined, deleting S5B session") # we just need to clean the S5B session if it is declined content_data = session["contents"][content_name] transport_data = content_data["transport_data"] @@ -504,7 +504,7 @@ """ if not feature_checked: log.warning( - u"Other peer can't manage jingle IBB, be have to terminate the session" + "Other peer can't manage jingle IBB, be have to terminate the session" ) self._j.terminate(client, self._j.REASON_CONNECTIVITY_ERROR, session) else: @@ -524,7 +524,7 @@ return if self._jingle_ibb is None: log.warning( - u"Jingle IBB (XEP-0261) plugin is not available, we have to close the session" + "Jingle IBB (XEP-0261) plugin is not available, we have to close the session" ) self._j.terminate(client, self._j.REASON_CONNECTIVITY_ERROR, session) else: @@ -535,8 +535,8 @@ return d +@implementer(iwokkel.IDisco) class XEP_0260_handler(XMPPHandler): - implements(iwokkel.IDisco) def getDiscoInfo(self, requestor, target, nodeIdentifier=""): return [disco.DiscoFeature(NS_JINGLE_S5B)]
--- a/sat/plugins/plugin_xep_0261.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0261.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Jingle (XEP-0261) @@ -23,7 +23,7 @@ log = getLogger(__name__) from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer from twisted.words.xish import domish import uuid @@ -68,8 +68,8 @@ content_data = session["contents"][content_name] transport_data = content_data["transport_data"] transport_data["block_size"] = self._ibb.BLOCK_SIZE - transport_elt["block-size"] = unicode(transport_data["block_size"]) - transport_elt["sid"] = transport_data["sid"] = unicode(uuid.uuid4()) + transport_elt["block-size"] = str(transport_data["block_size"]) + transport_elt["sid"] = transport_data["sid"] = str(uuid.uuid4()) return transport_elt def jingleHandler(self, client, action, session, content_name, transport_elt): @@ -99,12 +99,12 @@ client, stream_object, local_jid, peer_jid, sid) d.chainDeferred(content_data["finished_d"]) else: - log.warning(u"FIXME: unmanaged action {}".format(action)) + log.warning("FIXME: unmanaged action {}".format(action)) return transport_elt +@implementer(iwokkel.IDisco) class XEP_0261_handler(XMPPHandler): - implements(iwokkel.IDisco) def getDiscoInfo(self, requestor, target, nodeIdentifier=""): return [disco.DiscoFeature(NS_JINGLE_IBB)]
--- a/sat/plugins/plugin_xep_0264.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0264.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing xep-0264 @@ -26,7 +26,7 @@ from twisted.internet import threads from twisted.python.failure import Failure -from zope.interface import implements +from zope.interface import implementer from wokkel import disco, iwokkel @@ -37,7 +37,7 @@ from PIL import Image except: raise exceptions.MissingModule( - u"Missing module pillow, please download/install it from https://python-pillow.github.io" + "Missing module pillow, please download/install it from https://python-pillow.github.io" ) # cf. https://stackoverflow.com/a/23575424 @@ -51,8 +51,8 @@ from wokkel.subprotocols import XMPPHandler -MIME_TYPE = u"image/jpeg" -SAVE_FORMAT = u"JPEG" # (cf. Pillow documentation) +MIME_TYPE = "image/jpeg" +SAVE_FORMAT = "JPEG" # (cf. Pillow documentation) NS_THUMBS = "urn:xmpp:thumbs:1" @@ -74,7 +74,7 @@ SIZE_MEDIUM = (1024, 1024) def __init__(self, host): - log.info(_(u"Plugin XEP_0264 initialization")) + log.info(_("Plugin XEP_0264 initialization")) self.host = host host.trigger.add("XEP-0234_buildFileElement", self._addFileThumbnails) host.trigger.add("XEP-0234_parseFileElement", self._getFileThumbnails) @@ -86,21 +86,21 @@ def _addFileThumbnails(self, file_elt, extra_args): try: - thumbnails = extra_args[u"extra"][C.KEY_THUMBNAILS] + thumbnails = extra_args["extra"][C.KEY_THUMBNAILS] except KeyError: return for thumbnail in thumbnails: - thumbnail_elt = file_elt.addElement((NS_THUMBS, u"thumbnail")) - thumbnail_elt["uri"] = u"cid:" + thumbnail["id"] + thumbnail_elt = file_elt.addElement((NS_THUMBS, "thumbnail")) + thumbnail_elt["uri"] = "cid:" + thumbnail["id"] thumbnail_elt["media-type"] = MIME_TYPE width, height = thumbnail["size"] - thumbnail_elt["width"] = unicode(width) - thumbnail_elt["height"] = unicode(height) + thumbnail_elt["width"] = str(width) + thumbnail_elt["height"] = str(height) return True def _getFileThumbnails(self, file_elt, file_data): thumbnails = [] - for thumbnail_elt in file_elt.elements(NS_THUMBS, u"thumbnail"): + for thumbnail_elt in file_elt.elements(NS_THUMBS, "thumbnail"): uri = thumbnail_elt["uri"] if uri.startswith("cid:"): thumbnail = {"id": uri[4:]} @@ -144,7 +144,7 @@ try: img = Image.open(source_path) except IOError: - return Failure(exceptions.DataError(u"Can't open image")) + return Failure(exceptions.DataError("Can't open image")) img.thumbnail(size) uid = self.getThumbId(image_uid or source_path, size) @@ -176,13 +176,13 @@ self._blockingGenThumb, source_path, size, max_age, image_uid=image_uid ) d.addErrback( - lambda failure_: log.error(u"thumbnail generation error: {}".format(failure_)) + lambda failure_: log.error("thumbnail generation error: {}".format(failure_)) ) return d +@implementer(iwokkel.IDisco) class XEP_0264_handler(XMPPHandler): - implements(iwokkel.IDisco) def getDiscoInfo(self, requestor, target, nodeIdentifier=""): return [disco.DiscoFeature(NS_THUMBS)]
--- a/sat/plugins/plugin_xep_0277.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0277.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for microblogging over XMPP (xep-0277) @@ -37,12 +37,12 @@ # XXX: sat_tmp.wokkel.pubsub is actually used instead of wokkel version from wokkel import pubsub from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer import shortuuid import time import dateutil import calendar -import urlparse +import urllib.parse NS_MICROBLOG = "urn:xmpp:microblog:0" NS_ATOM = "http://www.w3.org/2005/Atom" @@ -71,7 +71,7 @@ namespace = NS_MICROBLOG def __init__(self, host): - log.info(_(u"Microblogging plugin initialization")) + log.info(_("Microblogging plugin initialization")) self.host = host host.registerNamespace("microblog", NS_MICROBLOG) self._p = self.host.plugins[ @@ -88,7 +88,7 @@ in_sign="ssss", out_sign="", method=self._mbSend, - async=True, + async_=True, ) host.bridge.addMethod( "mbRetract", @@ -96,7 +96,7 @@ in_sign="ssss", out_sign="", method=self._mbRetract, - async=True, + async_=True, ) host.bridge.addMethod( "mbGet", @@ -104,7 +104,7 @@ in_sign="ssiasa{ss}s", out_sign="(asa{ss})", method=self._mbGet, - async=True, + async_=True, ) host.bridge.addMethod( "mbSetAccess", @@ -112,7 +112,7 @@ in_sign="ss", out_sign="", method=self.mbSetAccess, - async=True, + async_=True, ) host.bridge.addMethod( "mbSubscribeToMany", @@ -127,7 +127,7 @@ in_sign="ss", out_sign="(ua(sssasa{ss}))", method=self._mbGetFromManyRTResult, - async=True, + async_=True, ) host.bridge.addMethod( "mbGetFromMany", @@ -142,7 +142,7 @@ in_sign="ss", out_sign="(ua(sssa(sa(sssasa{ss}))a{ss}))", method=self._mbGetFromManyWithCommentsRTResult, - async=True, + async_=True, ) host.bridge.addMethod( "mbGetFromManyWithComments", @@ -238,8 +238,8 @@ if data_elt is None: raise failure.Failure( exceptions.DataError( - u"XHML content not wrapped in a <div/> element, this is not " - u"standard !" + "XHML content not wrapped in a <div/> element, this is not " + "standard !" ) ) if data_elt.uri != C.NS_XHTML: @@ -248,46 +248,46 @@ _("Content of type XHTML must declare its namespace!") ) ) - key = check_conflict(u"{}_xhtml".format(elem.name)) + key = check_conflict("{}_xhtml".format(elem.name)) data = data_elt.toXml() microblog_data[key] = yield self.host.plugins["TEXT_SYNTAXES"].cleanXHTML( data ) else: key = check_conflict(elem.name) - microblog_data[key] = unicode(elem) + microblog_data[key] = str(elem) id_ = item_elt.getAttribute("id", "") # there can be no id for transient nodes - microblog_data[u"id"] = id_ + microblog_data["id"] = id_ if item_elt.uri not in (pubsub.NS_PUBSUB, NS_PUBSUB_EVENT): - msg = u"Unsupported namespace {ns} in pubsub item {id_}".format( + msg = "Unsupported namespace {ns} in pubsub item {id_}".format( ns=item_elt.uri, id_=id_ ) log.warning(msg) raise failure.Failure(exceptions.DataError(msg)) try: - entry_elt = item_elt.elements(NS_ATOM, "entry").next() + entry_elt = next(item_elt.elements(NS_ATOM, "entry")) except StopIteration: - msg = u"No atom entry found in the pubsub item {}".format(id_) + msg = "No atom entry found in the pubsub item {}".format(id_) raise failure.Failure(exceptions.DataError(msg)) # language try: - microblog_data[u"language"] = entry_elt[(C.NS_XML, u"lang")].strip() + microblog_data["language"] = entry_elt[(C.NS_XML, "lang")].strip() except KeyError: pass # atom:id try: - id_elt = entry_elt.elements(NS_ATOM, "id").next() + id_elt = next(entry_elt.elements(NS_ATOM, "id")) except StopIteration: - msg = (u"No atom id found in the pubsub item {}, this is not standard !" + msg = ("No atom id found in the pubsub item {}, this is not standard !" .format(id_)) log.warning(msg) - microblog_data[u"atom_id"] = "" + microblog_data["atom_id"] = "" else: - microblog_data[u"atom_id"] = unicode(id_elt) + microblog_data["atom_id"] = str(id_elt) # title/content(s) @@ -302,7 +302,7 @@ # raise failure.Failure(exceptions.DataError(msg)) title_elts = list(entry_elt.elements(NS_ATOM, "title")) if not title_elts: - msg = u"No atom title found in the pubsub item {}".format(id_) + msg = "No atom title found in the pubsub item {}".format(id_) raise failure.Failure(exceptions.DataError(msg)) for title_elt in title_elts: yield parseElement(title_elt) @@ -317,13 +317,13 @@ for key in ("title", "content"): if key not in microblog_data and ("{}_xhtml".format(key)) in microblog_data: log.warning( - u"item {id_} provide a {key}_xhtml data but not a text one".format( + "item {id_} provide a {key}_xhtml data but not a text one".format( id_=id_, key=key ) ) # ... and do the conversion if it's not microblog_data[key] = yield self.host.plugins["TEXT_SYNTAXES"].convert( - microblog_data[u"{}_xhtml".format(key)], + microblog_data["{}_xhtml".format(key)], self.host.plugins["TEXT_SYNTAXES"].SYNTAX_XHTML, self.host.plugins["TEXT_SYNTAXES"].SYNTAX_TEXT, False, @@ -331,28 +331,28 @@ if "content" not in microblog_data: # use the atom title data as the microblog body content - microblog_data[u"content"] = microblog_data[u"title"] - del microblog_data[u"title"] + microblog_data["content"] = microblog_data["title"] + del microblog_data["title"] if "title_xhtml" in microblog_data: - microblog_data[u"content_xhtml"] = microblog_data[u"title_xhtml"] - del microblog_data[u"title_xhtml"] + microblog_data["content_xhtml"] = microblog_data["title_xhtml"] + del microblog_data["title_xhtml"] # published/updated dates try: - updated_elt = entry_elt.elements(NS_ATOM, "updated").next() + updated_elt = next(entry_elt.elements(NS_ATOM, "updated")) except StopIteration: - msg = u"No atom updated element found in the pubsub item {}".format(id_) + msg = "No atom updated element found in the pubsub item {}".format(id_) raise failure.Failure(exceptions.DataError(msg)) - microblog_data[u"updated"] = calendar.timegm( - dateutil.parser.parse(unicode(updated_elt)).utctimetuple() + microblog_data["updated"] = calendar.timegm( + dateutil.parser.parse(str(updated_elt)).utctimetuple() ) try: - published_elt = entry_elt.elements(NS_ATOM, "published").next() + published_elt = next(entry_elt.elements(NS_ATOM, "published")) except StopIteration: - microblog_data[u"published"] = microblog_data[u"updated"] + microblog_data["published"] = microblog_data["updated"] else: - microblog_data[u"published"] = calendar.timegm( - dateutil.parser.parse(unicode(published_elt)).utctimetuple() + microblog_data["published"] = calendar.timegm( + dateutil.parser.parse(str(published_elt)).utctimetuple() ) # links @@ -366,83 +366,83 @@ try: service, node = self.parseCommentUrl(microblog_data[key]) except: - log.warning(u"Can't parse url {}".format(microblog_data[key])) + log.warning("Can't parse url {}".format(microblog_data[key])) del microblog_data[key] else: - microblog_data[u"{}_service".format(key)] = service.full() - microblog_data[u"{}_node".format(key)] = node + microblog_data["{}_service".format(key)] = service.full() + microblog_data["{}_node".format(key)] = node else: rel = link_elt.getAttribute("rel", "") title = link_elt.getAttribute("title", "") href = link_elt.getAttribute("href", "") log.warning( - u"Unmanaged link element: rel={rel} title={title} href={href}".format( + "Unmanaged link element: rel={rel} title={title} href={href}".format( rel=rel, title=title, href=href ) ) # author try: - author_elt = entry_elt.elements(NS_ATOM, "author").next() + author_elt = next(entry_elt.elements(NS_ATOM, "author")) except StopIteration: - log.debug(u"Can't find author element in item {}".format(id_)) + log.debug("Can't find author element in item {}".format(id_)) else: publisher = item_elt.getAttribute("publisher") # name try: - name_elt = author_elt.elements(NS_ATOM, "name").next() + name_elt = next(author_elt.elements(NS_ATOM, "name")) except StopIteration: log.warning( - u"No name element found in author element of item {}".format(id_) + "No name element found in author element of item {}".format(id_) ) else: - microblog_data[u"author"] = unicode(name_elt) + microblog_data["author"] = str(name_elt) # uri try: - uri_elt = author_elt.elements(NS_ATOM, "uri").next() + uri_elt = next(author_elt.elements(NS_ATOM, "uri")) except StopIteration: log.debug( - u"No uri element found in author element of item {}".format(id_) + "No uri element found in author element of item {}".format(id_) ) if publisher: - microblog_data[u"author_jid"] = publisher + microblog_data["author_jid"] = publisher else: - uri = unicode(uri_elt) + uri = str(uri_elt) if uri.startswith("xmpp:"): uri = uri[5:] - microblog_data[u"author_jid"] = uri + microblog_data["author_jid"] = uri else: - microblog_data[u"author_jid"] = ( - item_elt.getAttribute(u"publisher") or "" + microblog_data["author_jid"] = ( + item_elt.getAttribute("publisher") or "" ) if not publisher: - log.debug(u"No publisher attribute, we can't verify author jid") - microblog_data[u"author_jid_verified"] = False + log.debug("No publisher attribute, we can't verify author jid") + microblog_data["author_jid_verified"] = False elif jid.JID(publisher).userhostJID() == jid.JID(uri).userhostJID(): - microblog_data[u"author_jid_verified"] = True + microblog_data["author_jid_verified"] = True else: log.warning( - u"item atom:uri differ from publisher attribute, spoofing " - u"attempt ? atom:uri = {} publisher = {}".format( + "item atom:uri differ from publisher attribute, spoofing " + "attempt ? atom:uri = {} publisher = {}".format( uri, item_elt.getAttribute("publisher") ) ) - microblog_data[u"author_jid_verified"] = False + microblog_data["author_jid_verified"] = False # email try: - email_elt = author_elt.elements(NS_ATOM, "email").next() + email_elt = next(author_elt.elements(NS_ATOM, "email")) except StopIteration: pass else: - microblog_data[u"author_email"] = unicode(email_elt) + microblog_data["author_email"] = str(email_elt) # categories categories = [ category_elt.getAttribute("term", "") for category_elt in entry_elt.elements(NS_ATOM, "category") ] - microblog_data[u"tags"] = categories + microblog_data["tags"] = categories ## the trigger ## # if other plugins have things to add or change @@ -467,8 +467,8 @@ entry_elt = domish.Element((NS_ATOM, "entry")) ## language ## - if u"language" in data: - entry_elt[(C.NS_XML, u"lang")] = data[u"language"].strip() + if "language" in data: + entry_elt[(C.NS_XML, "lang")] = data["language"].strip() ## content and title ## synt = self.host.plugins["TEXT_SYNTAXES"] @@ -528,7 +528,7 @@ elem["type"] = "text" try: - entry_elt.elements(NS_ATOM, "title").next() + next(entry_elt.elements(NS_ATOM, "title")) except StopIteration: # we have no title element which is mandatory # so we transform content element to title @@ -579,7 +579,7 @@ entry_id = data.get( "id", xmpp_uri.buildXMPPUri( - u"pubsub", + "pubsub", path=service.full() if service is not None else client.jid.userhost(), node=node, item=item_id, @@ -613,7 +613,7 @@ @param item_id(unicode): id of the parent item @return (unicode): comment node to use """ - return u"{}{}".format(NS_COMMENT_PREFIX, item_id) + return "{}{}".format(NS_COMMENT_PREFIX, item_id) def getCommentsService(self, client, parent_service=None): """Get prefered PubSub service to create comment node @@ -662,8 +662,8 @@ elif allow_comments == False: if "comments" in mb_data: log.warning( - u"comments are not allowed but there is already a comments node, " - u"it may be lost: {uri}".format( + "comments are not allowed but there is already a comments node, " + "it may be lost: {uri}".format( uri=mb_data["comments"] ) ) @@ -695,7 +695,7 @@ else: if not comments_node: raise exceptions.DataError( - u"if comments_node is present, it must not be empty" + "if comments_node is present, it must not be empty" ) try: @@ -708,7 +708,7 @@ except error.StanzaError as e: if e.condition == "conflict": log.info( - u"node {} already exists on service {}".format( + "node {} already exists on service {}".format( comments_node, comments_service ) ) @@ -722,7 +722,7 @@ ) # …except for "member", that we transform to publisher # because we wants members to be able to write to comments - for jid_, affiliation in comments_affiliations.items(): + for jid_, affiliation in list(comments_affiliations.items()): if affiliation == "member": comments_affiliations[jid_] == "publisher" @@ -736,12 +736,12 @@ if "comments" in mb_data: if not mb_data["comments"]: raise exceptions.DataError( - u"if comments is present, it must not be empty" + "if comments is present, it must not be empty" ) if "comments_node" in mb_data or "comments_service" in mb_data: raise exceptions.DataError( - u"You can't use comments_service/comments_node and comments at the " - u"same time" + "You can't use comments_service/comments_node and comments at the " + "same time" ) else: mb_data["comments"] = self._p.getNodeURI(comments_service, comments_node) @@ -769,12 +769,12 @@ if node is None: node = NS_MICROBLOG - item_id = data.get("id") or unicode(shortuuid.uuid()) + item_id = data.get("id") or str(shortuuid.uuid()) try: yield self._manageComments(client, data, service, node, item_id, access=None) except error.StanzaError: - log.warning(u"Can't create comments node for item {}".format(item_id)) + log.warning("Can't create comments node for item {}".format(item_id)) item = yield self.data2entry(client, data, item_id, service, node) ret = yield self._p.publish(client, service, node, [item]) defer.returnValue(ret) @@ -851,10 +851,10 @@ will return(JID(u'sat-pubsub.example.net'), 'urn:xmpp:comments:_af43b363-3259-4b2a-ba4c-1bc33aa87634__urn:xmpp:groupblog:somebody@example.net') @return (tuple[jid.JID, unicode]): service and node """ - parsed_url = urlparse.urlparse(node_url, "xmpp") + parsed_url = urllib.parse.urlparse(node_url, "xmpp") service = jid.JID(parsed_url.path) - parsed_queries = urlparse.parse_qs(parsed_url.query.encode("utf-8")) - node = parsed_queries.get("node", [""])[0].decode("utf-8") + parsed_queries = urllib.parse.parse_qs(parsed_url.query.encode("utf-8")) + node = parsed_queries.get("node", [""])[0] if not node: raise failure.Failure(exceptions.DataError("Invalid comments link")) @@ -883,11 +883,11 @@ def cb(result): # Node is created with right permission - log.debug(_(u"Microblog node has now access %s") % access) + log.debug(_("Microblog node has now access %s") % access) def fatal_err(s_error): # Something went wrong - log.error(_(u"Can't set microblog access")) + log.error(_("Can't set microblog access")) raise NodeAccessChangeException() def err_cb(s_error): @@ -948,7 +948,7 @@ if services: log.debug( "Extra PEP followed entities: %s" - % ", ".join([unicode(service) for service in services]) + % ", ".join([str(service) for service in services]) ) jids_set.update(services) @@ -1033,7 +1033,7 @@ d = self._p.getRTResults( session_id, on_success=onSuccess, - on_error=lambda failure: (unicode(failure.value), ([], {})), + on_error=lambda failure: (str(failure.value), ([], {})), profile=profile, ) d.addCallback( @@ -1043,7 +1043,7 @@ (service.full(), node, failure, items, metadata) for (service, node), (success, (failure, (items, metadata))) in ret[ 1 - ].iteritems() + ].items() ], ) ) @@ -1098,7 +1098,7 @@ @return (tuple): see [_mbGetFromManyWithCommentsRTResult] """ ret = [] - data_iter = data[1].iteritems() + data_iter = iter(data[1].items()) for (service, node), (success, (failure_, (items_data, metadata))) in data_iter: items = [] for item, item_metadata in items_data: @@ -1203,7 +1203,7 @@ items_dlist = [] # deferred list for items for item in items: dlist = [] # deferred list for comments - for key, value in item.iteritems(): + for key, value in item.items(): # we look for comments if key.startswith("comments") and key.endswith("_service"): prefix = key[: key.find("_")] @@ -1228,7 +1228,7 @@ d.addCallback( lambda serialised_items_data: ("",) + serialised_items_data ) - d.addErrback(lambda failure: (unicode(failure.value), [], {})) + d.addErrback(lambda failure: (str(failure.value), [], {})) # and associate with service/node (needed if there are several # comments nodes) d.addCallback( @@ -1262,13 +1262,13 @@ ) d.addCallback(getComments) d.addCallback(lambda items_comments_data: ("", items_comments_data)) - d.addErrback(lambda failure: (unicode(failure.value), ([], {}))) + d.addErrback(lambda failure: (str(failure.value), ([], {}))) return self.rt_sessions.newSession(deferreds, client.profile) +@implementer(iwokkel.IDisco) class XEP_0277_handler(XMPPHandler): - implements(iwokkel.IDisco) def getDiscoInfo(self, requestor, target, nodeIdentifier=""): return [disco.DiscoFeature(NS_MICROBLOG)]
--- a/sat/plugins/plugin_xep_0280.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0280.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing xep-0280 @@ -26,7 +26,7 @@ from twisted.words.protocols.jabber.error import StanzaError from twisted.internet import defer from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer try: from twisted.words.protocols.xmlstream import XMPPHandler @@ -36,18 +36,18 @@ PARAM_CATEGORY = "Misc" PARAM_NAME = "carbon" -PARAM_LABEL = D_(u"Message carbons") +PARAM_LABEL = D_("Message carbons") NS_CARBONS = "urn:xmpp:carbons:2" PLUGIN_INFO = { - C.PI_NAME: u"XEP-0280 Plugin", - C.PI_IMPORT_NAME: u"XEP-0280", - C.PI_TYPE: u"XEP", - C.PI_PROTOCOLS: [u"XEP-0280"], + C.PI_NAME: "XEP-0280 Plugin", + C.PI_IMPORT_NAME: "XEP-0280", + C.PI_TYPE: "XEP", + C.PI_PROTOCOLS: ["XEP-0280"], C.PI_DEPENDENCIES: [], - C.PI_MAIN: u"XEP_0280", - C.PI_HANDLER: u"yes", - C.PI_DESCRIPTION: D_(u"""Implementation of Message Carbons"""), + C.PI_MAIN: "XEP_0280", + C.PI_HANDLER: "yes", + C.PI_DESCRIPTION: D_("""Implementation of Message Carbons"""), } @@ -87,10 +87,10 @@ (in particular end 2 end encryption plugins) @param message_elt(domish.Element): <message> stanza """ - if message_elt.name != u"message": - log.error(u"addPrivateElt must be used with <message> stanzas") + if message_elt.name != "message": + log.error("addPrivateElt must be used with <message> stanzas") return - message_elt.addElement((NS_CARBONS, u"private")) + message_elt.addElement((NS_CARBONS, "private")) @defer.inlineCallbacks def profileConnected(self, client): @@ -99,22 +99,22 @@ PARAM_NAME, PARAM_CATEGORY, profile_key=client.profile ) if not activate: - log.info(_(u"Not activating message carbons as requested in params")) + log.info(_("Not activating message carbons as requested in params")) return try: yield self.host.checkFeatures(client, (NS_CARBONS,)) except exceptions.FeatureNotFound: - log.warning(_(u"server doesn't handle message carbons")) + log.warning(_("server doesn't handle message carbons")) else: - log.info(_(u"message carbons available, enabling it")) + log.info(_("message carbons available, enabling it")) iq_elt = client.IQ() iq_elt.addElement((NS_CARBONS, "enable")) try: yield iq_elt.send() except StanzaError as e: - log.warning(u"Can't activate message carbons: {}".format(e)) + log.warning("Can't activate message carbons: {}".format(e)) else: - log.info(_(u"message carbons activated")) + log.info(_("message carbons activated")) def messageReceivedTrigger(self, client, message_elt, post_treat): """get message and handle it if carbons namespace is present""" @@ -131,7 +131,7 @@ if message_elt["from"] != client.jid.userhost(): log.warning( - u"The message carbon received is not from our server, hack attempt?\n{xml}".format( + "The message carbon received is not from our server, hack attempt?\n{xml}".format( xml=message_elt.toXml() ) ) @@ -147,7 +147,7 @@ message_elt["to"] = cc_message_elt["to"] else: log.warning( - u"invalid message carbons received:\n{xml}".format( + "invalid message carbons received:\n{xml}".format( xml=message_elt.toXml() ) ) @@ -159,8 +159,8 @@ return True +@implementer(iwokkel.IDisco) class XEP_0280_handler(XMPPHandler): - implements(iwokkel.IDisco) def getDiscoInfo(self, requestor, target, nodeIdentifier=""): return [disco.DiscoFeature(NS_CARBONS)]
--- a/sat/plugins/plugin_xep_0297.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0297.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Stanza Forwarding (XEP-0297) @@ -30,18 +30,18 @@ from twisted.words.protocols.xmlstream import XMPPHandler except ImportError: from wokkel.subprotocols import XMPPHandler -from zope.interface import implements +from zope.interface import implementer from twisted.words.xish import domish PLUGIN_INFO = { - C.PI_NAME: u"Stanza Forwarding", - C.PI_IMPORT_NAME: u"XEP-0297", - C.PI_TYPE: u"XEP", - C.PI_PROTOCOLS: [u"XEP-0297"], + C.PI_NAME: "Stanza Forwarding", + C.PI_IMPORT_NAME: "XEP-0297", + C.PI_TYPE: "XEP", + C.PI_PROTOCOLS: ["XEP-0297"], C.PI_MAIN: "XEP_0297", - C.PI_HANDLER: u"yes", - C.PI_DESCRIPTION: D_(u"""Implementation of Stanza Forwarding"""), + C.PI_HANDLER: "yes", + C.PI_DESCRIPTION: D_("""Implementation of Stanza Forwarding"""), } @@ -49,7 +49,7 @@ # FIXME: check this implementation which doesn't seems to be used def __init__(self, host): - log.info(_(u"Stanza Forwarding plugin initialization")) + log.info(_("Stanza Forwarding plugin initialization")) self.host = host def getHandler(self, client): @@ -83,7 +83,7 @@ # FIXME: this method is not used and doesn't use mess_data which should be used for client.sendMessageData # should it be deprecated? A method constructing the element without sending it seems more natural log.warning( - u"THIS METHOD IS DEPRECATED" + "THIS METHOD IS DEPRECATED" ) # FIXME: we use this warning until we check the method msg = domish.Element((None, "message")) msg["to"] = to_jid.full() @@ -104,11 +104,11 @@ msg.addChild(forwarded_elt) client = self.host.getClient(profile_key) - return client.sendMessageData({u"xml": msg}) + return client.sendMessageData({"xml": msg}) +@implementer(iwokkel.IDisco) class XEP_0297_handler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent, profile): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_xep_0300.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0300.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Hash functions (XEP-0300) @@ -27,7 +27,7 @@ from twisted.words.protocols.jabber.xmlstream import XMPPHandler from twisted.internet import threads from twisted.internet import defer -from zope.interface import implements +from zope.interface import implementer from wokkel import disco, iwokkel from collections import OrderedDict import hashlib @@ -46,7 +46,7 @@ } NS_HASHES = "urn:xmpp:hashes:2" -NS_HASHES_FUNCTIONS = u"urn:xmpp:hash-function-text-names:{}" +NS_HASHES_FUNCTIONS = "urn:xmpp:hash-function-text-names:{}" BUFFER_SIZE = 2 ** 12 ALGO_DEFAULT = "sha-256" @@ -55,10 +55,10 @@ # TODO: add blake after moving to Python 3 ALGOS = OrderedDict( ( - (u"md5", hashlib.md5), - (u"sha-1", hashlib.sha1), - (u"sha-256", hashlib.sha256), - (u"sha-512", hashlib.sha512), + ("md5", hashlib.md5), + ("sha-1", hashlib.sha1), + ("sha-256", hashlib.sha256), + ("sha-512", hashlib.sha512), ) ) @@ -98,7 +98,7 @@ ) if has_feature: log.debug( - u"Best hashing algorithm found for {jid}: {algo}".format( + "Best hashing algorithm found for {jid}: {algo}".format( jid=to_jid.full(), algo=algo ) ) @@ -156,7 +156,7 @@ hash_used_elt = next(parent.elements(NS_HASHES, "hash-used")) except StopIteration: raise exceptions.NotFound - algo = hash_used_elt[u"algo"] + algo = hash_used_elt["algo"] if not algo: raise exceptions.DataError return algo @@ -172,7 +172,8 @@ assert algo hash_elt = domish.Element((NS_HASHES, "hash")) if hash_ is not None: - hash_elt.addContent(base64.b64encode(hash_)) + b64_hash = base64.b64encode(hash_.encode('utf-8')).decode('utf-8') + hash_elt.addContent(b64_hash) hash_elt["algo"] = algo return hash_elt @@ -181,12 +182,12 @@ if multiple elements are found, the strongest managed one is returned @param (domish.Element): parent of <hash/> element - @return (tuple[unicode, str]): (algo, hash) tuple + @return (tuple[str, bytes]): (algo, hash) tuple both values can be None if <hash/> is empty @raise exceptions.NotFound: the element is not present @raise exceptions.DataError: the element is invalid """ - algos = XEP_0300.ALGOS.keys() + algos = list(XEP_0300.ALGOS.keys()) hash_elt = None best_algo = None best_value = None @@ -195,13 +196,13 @@ try: idx = algos.index(algo) except ValueError: - log.warning(u"Proposed {} algorithm is not managed".format(algo)) + log.warning("Proposed {} algorithm is not managed".format(algo)) algo = None continue if best_algo is None or algos.index(best_algo) < idx: best_algo = algo - best_value = base64.b64decode(unicode(hash_elt)) + best_value = base64.b64decode(str(hash_elt)) if not hash_elt: raise exceptions.NotFound @@ -210,8 +211,8 @@ return best_algo, best_value +@implementer(iwokkel.IDisco) class XEP_0300_handler(XMPPHandler): - implements(iwokkel.IDisco) def getDiscoInfo(self, requestor, target, nodeIdentifier=""): hash_functions_names = [
--- a/sat/plugins/plugin_xep_0313.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0313.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Message Archive Management (XEP-0313) @@ -25,7 +25,7 @@ from sat.tools.common import data_format from twisted.words.protocols.jabber import jid from twisted.internet import defer -from zope.interface import implements +from zope.interface import implementer from datetime import datetime from dateutil import tz from wokkel import disco @@ -40,19 +40,19 @@ log = getLogger(__name__) PLUGIN_INFO = { - C.PI_NAME: u"Message Archive Management", - C.PI_IMPORT_NAME: u"XEP-0313", - C.PI_TYPE: u"XEP", - C.PI_PROTOCOLS: [u"XEP-0313"], - C.PI_DEPENDENCIES: [u"XEP-0059", u"XEP-0359"], - C.PI_MAIN: u"XEP_0313", - C.PI_HANDLER: u"yes", - C.PI_DESCRIPTION: _(u"""Implementation of Message Archive Management"""), + C.PI_NAME: "Message Archive Management", + C.PI_IMPORT_NAME: "XEP-0313", + C.PI_TYPE: "XEP", + C.PI_PROTOCOLS: ["XEP-0313"], + C.PI_DEPENDENCIES: ["XEP-0059", "XEP-0359"], + C.PI_MAIN: "XEP_0313", + C.PI_HANDLER: "yes", + C.PI_DESCRIPTION: _("""Implementation of Message Archive Management"""), } -MAM_PREFIX = u"mam_" +MAM_PREFIX = "mam_" FILTER_PREFIX = MAM_PREFIX + "filter_" -KEY_LAST_STANZA_ID = u"last_stanza_id" +KEY_LAST_STANZA_ID = "last_stanza_id" MESSAGE_RESULT = "/message/result[@xmlns='{mam_ns}' and @queryid='{query_id}']" MESSAGE_STANZA_ID = '/message/stanza-id[@xmlns="{ns_stanza_id}"]' @@ -61,16 +61,16 @@ def __init__(self, host): log.info(_("Message Archive Management plugin initialization")) self.host = host - self.host.registerNamespace(u"mam", mam.NS_MAM) - self._rsm = host.plugins[u"XEP-0059"] - self._sid = host.plugins[u"XEP-0359"] + self.host.registerNamespace("mam", mam.NS_MAM) + self._rsm = host.plugins["XEP-0059"] + self._sid = host.plugins["XEP-0359"] # Deferred used to store last stanza id in order of reception self._last_stanza_id_d = defer.Deferred() self._last_stanza_id_d.callback(None) host.bridge.addMethod( "MAMGet", ".plugin", in_sign='sss', out_sign='(a(sdssa{ss}a{ss}sa{ss})a{ss}s)', method=self._getArchives, - async=True) + async_=True) @defer.inlineCallbacks def resume(self, client): @@ -79,15 +79,15 @@ mam.NS_MAM, [KEY_LAST_STANZA_ID], profile=client.profile) stanza_id = stanza_id_data.get(KEY_LAST_STANZA_ID) if stanza_id is None: - log.info(u"can't retrieve last stanza ID, checking history") + log.info("can't retrieve last stanza ID, checking history") last_mess = yield self.host.memory.historyGet( - None, None, limit=1, filters={u'not_types': C.MESS_TYPE_GROUPCHAT, - u'last_stanza_id': True}, + None, None, limit=1, filters={'not_types': C.MESS_TYPE_GROUPCHAT, + 'last_stanza_id': True}, profile=client.profile) if not last_mess: - log.info(_(u"It seems that we have no MAM history yet")) + log.info(_("It seems that we have no MAM history yet")) return - stanza_id = last_mess[0][-1][u'stanza_id'] + stanza_id = last_mess[0][-1]['stanza_id'] rsm_req = rsm.RSMRequest(max_=100, after=stanza_id) mam_req = mam.MAMRequest(rsm_=rsm_req) complete = False @@ -96,7 +96,7 @@ mam_data = yield self.getArchives(client, mam_req, service=client.jid.userhostJID()) elt_list, rsm_response, mam_response = mam_data - complete = mam_response[u"complete"] + complete = mam_response["complete"] # we update MAM request for next iteration mam_req.rsm.after = rsm_response.last if not elt_list: @@ -114,7 +114,7 @@ try: destinee = jid.JID(fwd_message_elt['to']) except KeyError: - log.warning(_(u'missing "to" attribute in forwarded message')) + log.warning(_('missing "to" attribute in forwarded message')) destinee = client.jid if destinee.userhostJID() == client.jid.userhostJID(): # message to use, we insert the forwarded message in the normal @@ -125,12 +125,12 @@ try: from_jid = jid.JID(fwd_message_elt['from']) except KeyError: - log.warning(_(u'missing "from" attribute in forwarded message')) + log.warning(_('missing "from" attribute in forwarded message')) from_jid = client.jid if from_jid.userhostJID() != client.jid.userhostJID(): log.warning(_( - u'was expecting a message sent by our jid, but this one if ' - u'from {from_jid}, ignoring\n{xml}').format( + 'was expecting a message sent by our jid, but this one if ' + 'from {from_jid}, ignoring\n{xml}').format( from_jid=from_jid.full(), xml=mess_elt.toXml())) continue # adding message to history @@ -139,16 +139,16 @@ yield client.messageProt.addToHistory(mess_data) except exceptions.CancelError as e: log.warning( - u"message has not been added to history: {e}".format(e=e)) + "message has not been added to history: {e}".format(e=e)) except Exception as e: log.error( - u"can't add message to history: {e}\n{xml}" + "can't add message to history: {e}\n{xml}" .format(e=e, xml=mess_elt.toXml())) if not count: - log.info(_(u"We have received no message while offline")) + log.info(_("We have received no message while offline")) else: - log.info(_(u"We have received {num_mess} message(s) while offline.") + log.info(_("We have received {num_mess} message(s) while offline.") .format(num_mess=count)) def profileConnected(self, client): @@ -168,31 +168,31 @@ """ mam_args = {} form_args = {} - for arg in (u"start", u"end"): + for arg in ("start", "end"): try: value = extra.pop(MAM_PREFIX + arg) form_args[arg] = datetime.fromtimestamp(float(value), tz.tzutc()) except (TypeError, ValueError): - log.warning(u"Bad value for {arg} filter ({value}), ignoring".format( + log.warning("Bad value for {arg} filter ({value}), ignoring".format( arg=arg, value=value)) except KeyError: continue try: - form_args[u"with_jid"] = jid.JID(extra.pop( - MAM_PREFIX + u"with")) + form_args["with_jid"] = jid.JID(extra.pop( + MAM_PREFIX + "with")) except (jid.InvalidFormat): - log.warning(u"Bad value for jid filter") + log.warning("Bad value for jid filter") except KeyError: pass - for name, value in extra.iteritems(): + for name, value in extra.items(): if name.startswith(FILTER_PREFIX): var = name[len(FILTER_PREFIX):] - extra_fields = form_args.setdefault(u"extra_fields", []) + extra_fields = form_args.setdefault("extra_fields", []) extra_fields.append(data_form.Field(var=var, value=value)) - for arg in (u"node", u"query_id"): + for arg in ("node", "query_id"): try: value = extra.pop(MAM_PREFIX + arg) mam_args[arg] = value @@ -209,8 +209,8 @@ # we only set orderBy if we have other MAM args # else we would make a MAM query while it's not expected - if u"order_by" in extra and mam_args: - order_by = extra.pop(u"order_by") + if "order_by" in extra and mam_args: + order_by = extra.pop("order_by") assert isinstance(order_by, list) mam_args["orderBy"] = order_by @@ -230,8 +230,8 @@ """ if data is None: data = {} - data[u"mam_complete"] = C.boolConst(mam_response[u'complete']) - data[u"mam_stable"] = C.boolConst(mam_response[u'stable']) + data["mam_complete"] = C.boolConst(mam_response['complete']) + data["mam_stable"] = C.boolConst(mam_response['stable']) return data def getMessageFromResult(self, client, mess_elt, mam_req, service=None): @@ -245,38 +245,38 @@ None if it's user server @return domish.Element): <message/> that can be used directly with onMessage """ - if mess_elt.name != u"message": - log.warning(u"unexpected stanza in archive: {xml}".format( + if mess_elt.name != "message": + log.warning("unexpected stanza in archive: {xml}".format( xml=mess_elt.toXml())) - raise exceptions.DataError(u"Invalid element") + raise exceptions.DataError("Invalid element") service_jid = client.jid.userhostJID() if service is None else service - mess_from = mess_elt[u"from"] + mess_from = mess_elt["from"] # we check that the message has been sent by the right service # if service is None (i.e. message expected from our own server) # from can be server jid or user's bare jid if (mess_from != service_jid.full() and not (service is None and mess_from == client.jid.host)): - log.error(u"Message is not from our server, something went wrong: " - u"{xml}".format(xml=mess_elt.toXml())) - raise exceptions.DataError(u"Invalid element") + log.error("Message is not from our server, something went wrong: " + "{xml}".format(xml=mess_elt.toXml())) + raise exceptions.DataError("Invalid element") try: - result_elt = next(mess_elt.elements(mam.NS_MAM, u"result")) - forwarded_elt = next(result_elt.elements(C.NS_FORWARD, u"forwarded")) + result_elt = next(mess_elt.elements(mam.NS_MAM, "result")) + forwarded_elt = next(result_elt.elements(C.NS_FORWARD, "forwarded")) try: - delay_elt = next(forwarded_elt.elements(C.NS_DELAY, u"delay")) + delay_elt = next(forwarded_elt.elements(C.NS_DELAY, "delay")) except StopIteration: # delay_elt is not mandatory delay_elt = None - fwd_message_elt = next(forwarded_elt.elements(C.NS_CLIENT, u"message")) + fwd_message_elt = next(forwarded_elt.elements(C.NS_CLIENT, "message")) except StopIteration: - log.warning(u"Invalid message received from MAM: {xml}".format( + log.warning("Invalid message received from MAM: {xml}".format( xml=mess_elt.toXml())) - raise exceptions.DataError(u"Invalid element") + raise exceptions.DataError("Invalid element") else: - if not result_elt[u"queryid"] == mam_req.query_id: - log.error(u"Unexpected query id (was expecting {query_id}): {xml}" + if not result_elt["queryid"] == mam_req.query_id: + log.error("Unexpected query id (was expecting {query_id}): {xml}" .format(query_id=mam.query_id, xml=mess_elt.toXml())) - raise exceptions.DataError(u"Invalid element") + raise exceptions.DataError("Invalid element") stanza_id = self._sid.getStanzaId(fwd_message_elt, service_jid) if stanza_id is None: @@ -284,11 +284,11 @@ # will be archived with it, and we won't request several times # the same MAM achive try: - stanza_id = result_elt[u"id"] + stanza_id = result_elt["id"] except AttributeError: - log.warning(u'Invalid MAM result: missing "id" attribute: {xml}' + log.warning('Invalid MAM result: missing "id" attribute: {xml}' .format(xml=result_elt.toXml())) - raise exceptions.DataError(u"Invalid element") + raise exceptions.DataError("Invalid element") self._sid.addStanzaId(client, fwd_message_elt, stanza_id, by=service_jid) if delay_elt is not None: @@ -323,12 +323,12 @@ def _queryFinished(self, iq_result, client, elt_list, event): client.xmlstream.removeObserver(event, self._appendMessage) try: - fin_elt = iq_result.elements(mam.NS_MAM, "fin").next() + fin_elt = next(iq_result.elements(mam.NS_MAM, "fin")) except StopIteration: - raise exceptions.DataError(u"Invalid MAM result") + raise exceptions.DataError("Invalid MAM result") - mam_response = {u"complete": C.bool(fin_elt.getAttribute(u"complete", C.BOOL_FALSE)), - u"stable": C.bool(fin_elt.getAttribute(u"stable", C.BOOL_TRUE))} + mam_response = {"complete": C.bool(fin_elt.getAttribute("complete", C.BOOL_FALSE)), + "stable": C.bool(fin_elt.getAttribute("stable", C.BOOL_TRUE))} try: rsm_response = rsm.RSMResponse.fromElement(fin_elt) @@ -383,7 +383,7 @@ - stable: a boolean which is False if items order may be changed """ if query.query_id is None: - query.query_id = unicode(uuid.uuid4()) + query.query_id = str(uuid.uuid4()) elt_list = [] event = MESSAGE_RESULT.format(mam_ns=mam.NS_MAM, query_id=query.query_id) client.xmlstream.addObserver(event, self._appendMessage, 0, elt_list, message_cb) @@ -431,7 +431,7 @@ service_jid = client.jid.userhostJID() stanza_id = self._sid.getStanzaId(message_elt, service_jid) if stanza_id is None: - log.debug(u"Ignoring <message>, stanza id is not from our server") + log.debug("Ignoring <message>, stanza id is not from our server") else: # we use self._last_stanza_id_d do be sure that last_stanza_id is stored in # the order of reception @@ -443,8 +443,8 @@ profile=client.profile)) +@implementer(disco.IDisco) class SatMAMClient(mam.MAMClient): - implements(disco.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent @@ -455,7 +455,7 @@ def connectionInitialized(self): observer_xpath = MESSAGE_STANZA_ID.format( - ns_stanza_id=self.host.ns_map[u'stanza_id']) + ns_stanza_id=self.host.ns_map['stanza_id']) self.xmlstream.addObserver( observer_xpath, self.plugin_parent.onMessageStanzaId, client=self.parent )
--- a/sat/plugins/plugin_xep_0329.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0329.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for File Information Sharing (XEP-0329) @@ -26,7 +26,7 @@ from sat.tools import stream from sat.tools.common import regex from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer from twisted.words.protocols.jabber import xmlstream from twisted.words.protocols.jabber import jid from twisted.words.protocols.jabber import error as jabber_error @@ -45,17 +45,17 @@ C.PI_DEPENDENCIES: ["XEP-0234", "XEP-0300", "XEP-0106"], C.PI_MAIN: "XEP_0329", C.PI_HANDLER: "yes", - C.PI_DESCRIPTION: _(u"""Implementation of File Information Sharing"""), + C.PI_DESCRIPTION: _("""Implementation of File Information Sharing"""), } NS_FIS = "urn:xmpp:fis:0" IQ_FIS_REQUEST = C.IQ_GET + '/query[@xmlns="' + NS_FIS + '"]' -SINGLE_FILES_DIR = u"files" -TYPE_VIRTUAL = u"virtual" -TYPE_PATH = u"path" +SINGLE_FILES_DIR = "files" +TYPE_VIRTUAL = "virtual" +TYPE_PATH = "path" SHARE_TYPES = (TYPE_PATH, TYPE_VIRTUAL) -KEY_TYPE = u"type" +KEY_TYPE = "type" class ShareNode(object): @@ -66,14 +66,14 @@ def __init__(self, name, parent, type_, access, path=None): assert type_ in SHARE_TYPES if name is not None: - if name == u".." or u"/" in name or u"\\" in name: + if name == ".." or "/" in name or "\\" in name: log.warning( - _(u"path change chars found in name [{name}], hack attempt?").format( + _("path change chars found in name [{name}], hack attempt?").format( name=name ) ) - if name == u"..": - name = u"--" + if name == "..": + name = "--" else: name = regex.pathEscape(name) self.name = name @@ -89,7 +89,7 @@ assert name is None if path is not None: if type_ != TYPE_PATH: - raise exceptions.InternalError(_(u"path can only be set on path nodes")) + raise exceptions.InternalError(_("path can only be set on path nodes")) self._path = path @property @@ -106,10 +106,10 @@ return self.children.__iter__() def iteritems(self): - return self.children.iteritems() + return iter(self.children.items()) def itervalues(self): - return self.children.itervalues() + return iter(self.children.values()) def getOrCreate(self, name, type_=TYPE_VIRTUAL, access=None): """Get a node or create a virtual node and return it""" @@ -123,7 +123,7 @@ def addChild(self, node): if node.parent is not None: - raise exceptions.ConflictError(_(u"a node can't have several parents")) + raise exceptions.ConflictError(_("a node can't have several parents")) node.parent = self self.children[node.name] = node @@ -132,10 +132,10 @@ del self.parent.children[self.name] except TypeError: raise exceptions.InternalError( - u"trying to remove a node from inexisting parent" + "trying to remove a node from inexisting parent" ) except KeyError: - raise exceptions.InternalError(u"node not found in parent's children") + raise exceptions.InternalError("node not found in parent's children") self.parent = None def _checkNodePermission(self, client, node, perms, peer_jid): @@ -146,7 +146,7 @@ @param peer_jid(jid.JID): entity which try to access the node @return (bool): True if entity can access """ - file_data = {u"access": self.access, u"owner": client.jid.userhostJID()} + file_data = {"access": self.access, "owner": client.jid.userhostJID()} try: self.host.memory.checkFilePermission(file_data, peer_jid, perms) except exceptions.PermissionError: @@ -189,16 +189,16 @@ @raise exceptions.DataError: path is invalid @raise NotFound: path lead to a non existing file/directory """ - path_elts = filter(None, path.split(u"/")) + path_elts = [_f for _f in path.split("/") if _f] - if u".." in path_elts: + if ".." in path_elts: log.warning(_( - u'parent dir ("..") found in path, hack attempt? path is {path} ' - u'[{profile}]').format(path=path, profile=client.profile)) - raise exceptions.PermissionError(u"illegal path elements") + 'parent dir ("..") found in path, hack attempt? path is {path} ' + '[{profile}]').format(path=path, profile=client.profile)) + raise exceptions.PermissionError("illegal path elements") if not path_elts: - raise exceptions.DataError(_(u"path is invalid: {path}").format(path=path)) + raise exceptions.DataError(_("path is invalid: {path}").format(path=path)) node = client._XEP_0329_root_node @@ -212,9 +212,9 @@ break if not node.checkPermissions(client, peer_jid, perms=perms): - raise exceptions.PermissionError(u"permission denied") + raise exceptions.PermissionError("permission denied") - return node, u"/".join(path_elts) + return node, "/".join(path_elts) def findByLocalPath(self, path): """retrieve nodes linking to local path @@ -230,13 +230,13 @@ def _getSharedPaths(self, node, paths): if node.type == TYPE_VIRTUAL: - for node in node.itervalues(): + for node in node.values(): self._getSharedPaths(node, paths) elif node.type == TYPE_PATH: paths.setdefault(node.path, []).append(node) else: raise exceptions.InternalError( - u"unknown node type: {type}".format(type=node.type) + "unknown node type: {type}".format(type=node.type) ) def getSharedPaths(self): @@ -267,7 +267,7 @@ in_sign="ssa{ss}s", out_sign="aa{ss}", method=self._listFiles, - async=True, + async_=True, ) host.bridge.addMethod( "FISLocalSharesGet", @@ -320,21 +320,21 @@ return True, None try: - name = file_data[u"name"] + name = file_data["name"] except KeyError: return True, None - assert u"/" not in name + assert "/" not in name - path = file_data.get(u"path") + path = file_data.get("path") if path is not None: # we have a path, we can follow it to find node try: - node, rem_path = ShareNode.find(client, path, session[u"peer_jid"]) + node, rem_path = ShareNode.find(client, path, session["peer_jid"]) except (exceptions.PermissionError, exceptions.NotFound): # no file, or file not allowed, we continue normal workflow return True, None except exceptions.DataError: - log.warning(_(u"invalid path: {path}").format(path=path)) + log.warning(_("invalid path: {path}").format(path=path)) return True, None if node.type == TYPE_VIRTUAL: @@ -348,7 +348,7 @@ path = os.path.join(node.path, rem_path, name) else: raise exceptions.InternalError( - u"unknown type: {type}".format(type=node.type) + "unknown type: {type}".format(type=node.type) ) if not os.path.exists(path): return True, None @@ -360,25 +360,25 @@ except KeyError: return True, None - for path, shared_file in name_data.iteritems(): + for path, shared_file in name_data.items(): if True: # FIXME: filters are here break else: return True, None - parent_node = shared_file[u"parent"] - if not parent_node.checkPermissions(client, session[u"peer_jid"]): + parent_node = shared_file["parent"] + if not parent_node.checkPermissions(client, session["peer_jid"]): log.warning( _( - u"{peer_jid} requested a file (s)he can't access [{profile}]" - ).format(peer_jid=session[u"peer_jid"], profile=client.profile) + "{peer_jid} requested a file (s)he can't access [{profile}]" + ).format(peer_jid=session["peer_jid"], profile=client.profile) ) return True, None - size = shared_file[u"size"] + size = shared_file["size"] - file_data[u"size"] = size - file_elt.addElement(u"size", content=unicode(size)) - hash_algo = file_data[u"hash_algo"] = self._h.getDefaultAlgo() - hasher = file_data[u"hash_hasher"] = self._h.getHasher(hash_algo) + file_data["size"] = size + file_elt.addElement("size", content=str(size)) + hash_algo = file_data["hash_algo"] = self._h.getDefaultAlgo() + hasher = file_data["hash_hasher"] = self._h.getHasher(hash_algo) file_elt.addChild(self._h.buildHashUsedElt(hash_algo)) content_data["stream_object"] = stream.FileStreamObject( self.host, @@ -401,7 +401,7 @@ d = defer.maybeDeferred(files_from_node_cb, client, iq_elt, node) d.addErrback( lambda failure_: log.error( - _(u"error while retrieving files: {msg}").format(msg=failure_) + _("error while retrieving files: {msg}").format(msg=failure_) ) ) @@ -452,7 +452,7 @@ name = name.decode("utf-8", "strict") except UnicodeDecodeError as e: log.warning( - _(u"ignoring invalid unicode name ({name}): {msg}").format( + _("ignoring invalid unicode name ({name}): {msg}").format( name=name.decode("utf-8", "replace"), msg=e ) ) @@ -462,7 +462,7 @@ def _virtualNodeHandler(self, client, peer_jid, iq_elt, query_elt, node): """Fill query_elt for virtual nodes""" - for name, child_node in node.iteritems(): + for name, child_node in node.items(): if not child_node.checkPermissions(client, peer_jid, check_parents=False): continue node_type = child_node.type @@ -473,14 +473,14 @@ self._addPathData(client, query_elt, child_node.path, child_node) else: raise exceptions.InternalError( - _(u"unexpected type: {type}").format(type=node_type) + _("unexpected type: {type}").format(type=node_type) ) def _getRootNodesCb(self, client, iq_elt): peer_jid = jid.JID(iq_elt["from"]) iq_result_elt = xmlstream.toResponse(iq_elt, "result") query_elt = iq_result_elt.addElement((NS_FIS, "query")) - for name, node in client._XEP_0329_root_node.iteritems(): + for name, node in client._XEP_0329_root_node.items(): if not node.checkPermissions(client, peer_jid, check_parents=False): continue directory_elt = query_elt.addElement("directory") @@ -489,7 +489,7 @@ def _getFilesFromNodeCb(self, client, iq_elt, node_path): """Main method to retrieve files/directories from a node_path""" - peer_jid = jid.JID(iq_elt[u"from"]) + peer_jid = jid.JID(iq_elt["from"]) try: node, path = ShareNode.find(client, node_path, peer_jid) except (exceptions.PermissionError, exceptions.NotFound): @@ -501,7 +501,7 @@ peer_jid = jid.JID(iq_elt["from"]) iq_result_elt = xmlstream.toResponse(iq_elt, "result") query_elt = iq_result_elt.addElement((NS_FIS, "query")) - query_elt[u"node"] = node_path + query_elt["node"] = node_path # we now fill query_elt according to node_type if node_type == TYPE_PATH: @@ -512,7 +512,7 @@ self._virtualNodeHandler(client, peer_jid, iq_elt, query_elt, node) else: raise exceptions.InternalError( - _(u"unknown node type: {type}").format(type=node_type) + _("unknown node type: {type}").format(type=node_type) ) client.send(iq_result_elt) @@ -533,7 +533,7 @@ to_jid = jid.JID(iq_elt['to']) if to_jid.user: user = self.host.plugins['XEP-0106'].unescape(to_jid.user) - if u'@' in user: + if '@' in user: # a full jid is specified owner = jid.JID(user) else: @@ -551,16 +551,16 @@ files_data = yield self.host.memory.getFiles( client, peer_jid=peer_jid, - parent=u"", + parent="", type_=C.FILE_TYPE_DIRECTORY, owner=owner, ) iq_result_elt = xmlstream.toResponse(iq_elt, "result") query_elt = iq_result_elt.addElement((NS_FIS, "query")) for file_data in files_data: - name = file_data[u"name"] - directory_elt = query_elt.addElement(u"directory") - directory_elt[u"name"] = name + name = file_data["name"] + directory_elt = query_elt.addElement("directory") + directory_elt["name"] = name client.send(iq_result_elt) @defer.inlineCallbacks @@ -584,19 +584,19 @@ self._iqError(client, iq_elt, condition='not-allowed') return except Exception as e: - log.error(u"internal server error: {e}".format(e=e)) + log.error("internal server error: {e}".format(e=e)) self._iqError(client, iq_elt, condition='internal-server-error') return iq_result_elt = xmlstream.toResponse(iq_elt, "result") query_elt = iq_result_elt.addElement((NS_FIS, "query")) - query_elt[u"node"] = node_path + query_elt["node"] = node_path if not self.host.trigger.point( - u"XEP-0329_compGetFilesFromNode", client, iq_elt, owner, node_path, files_data + "XEP-0329_compGetFilesFromNode", client, iq_elt, owner, node_path, files_data ): return for file_data in files_data: file_elt = self._jf.buildFileElementFromDict( - file_data, modified=file_data.get(u"modified", file_data[u"created"]) + file_data, modified=file_data.get("modified", file_data["created"]) ) query_elt.addChild(file_elt) client.send(iq_result_elt) @@ -617,14 +617,14 @@ file_data = self._jf.parseFileElement(elt) except exceptions.DataError: continue - file_data[u"type"] = C.FILE_TYPE_FILE + file_data["type"] = C.FILE_TYPE_FILE elif elt.name == "directory" and elt.uri == NS_FIS: # we have a directory file_data = {"name": elt["name"], "type": C.FILE_TYPE_DIRECTORY} else: log.warning( - _(u"unexpected element, ignoring: {elt}").format(elt=elt.toXml()) + _("unexpected element, ignoring: {elt}").format(elt=elt.toXml()) ) continue files.append(file_data) @@ -634,9 +634,9 @@ def _serializeData(self, files_data): for file_data in files_data: - for key, value in file_data.iteritems(): + for key, value in file_data.items(): file_data[key] = ( - json.dumps(value) if key in ("extra",) else unicode(value) + json.dumps(value) if key in ("extra",) else str(value) ) return files_data @@ -670,7 +670,7 @@ return self.localSharesGet(client) def localSharesGet(self, client): - return client._XEP_0329_root_node.getSharedPaths().keys() + return list(client._XEP_0329_root_node.getSharedPaths().keys()) def _sharePath(self, name, path, access, profile): client = self.host.getClient(profile) @@ -681,11 +681,11 @@ if client.is_component: raise exceptions.ClientTypeError if not os.path.exists(path): - raise ValueError(_(u"This path doesn't exist!")) - if not path or not path.strip(u" /"): - raise ValueError(_(u"A path need to be specified")) + raise ValueError(_("This path doesn't exist!")) + if not path or not path.strip(" /"): + raise ValueError(_("A path need to be specified")) if not isinstance(access, dict): - raise ValueError(_(u"access must be a dict")) + raise ValueError(_("access must be a dict")) node = client._XEP_0329_root_node node_type = TYPE_PATH @@ -695,20 +695,20 @@ node = node.getOrCreate(SINGLE_FILES_DIR) if not name: - name = os.path.basename(path.rstrip(u" /")) + name = os.path.basename(path.rstrip(" /")) if not name: - raise exceptions.InternalError(_(u"Can't find a proper name")) + raise exceptions.InternalError(_("Can't find a proper name")) if name in node or name == SINGLE_FILES_DIR: idx = 1 - new_name = name + "_" + unicode(idx) + new_name = name + "_" + str(idx) while new_name in node: idx += 1 - new_name = name + "_" + unicode(idx) + new_name = name + "_" + str(idx) name = new_name log.info(_( - u"A directory with this name is already shared, renamed to {new_name} " - u"[{profile}]".format( new_name=new_name, profile=client.profile))) + "A directory with this name is already shared, renamed to {new_name} " + "[{profile}]".format( new_name=new_name, profile=client.profile))) ShareNode(name=name, parent=node, type_=node_type, access=access, path=path) self.host.bridge.FISSharedPathNew(path, name, client.profile) @@ -725,8 +725,8 @@ self.host.bridge.FISSharedPathRemoved(path, client.profile) +@implementer(iwokkel.IDisco) class XEP_0329_handler(xmlstream.XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_xep_0334.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0334.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Delayed Delivery (XEP-0334) @@ -29,21 +29,21 @@ from wokkel import disco, iwokkel from twisted.words.protocols.jabber import xmlstream -from zope.interface import implements +from zope.interface import implementer from textwrap import dedent PLUGIN_INFO = { - C.PI_NAME: u"Message Processing Hints", - C.PI_IMPORT_NAME: u"XEP-0334", - C.PI_TYPE: u"XEP", - C.PI_PROTOCOLS: [u"XEP-0334"], + C.PI_NAME: "Message Processing Hints", + C.PI_IMPORT_NAME: "XEP-0334", + C.PI_TYPE: "XEP", + C.PI_PROTOCOLS: ["XEP-0334"], C.PI_MAIN: "XEP_0334", - C.PI_HANDLER: u"yes", - C.PI_DESCRIPTION: D_(u"""Implementation of Message Processing Hints"""), + C.PI_HANDLER: "yes", + C.PI_DESCRIPTION: D_("""Implementation of Message Processing Hints"""), C.PI_USAGE: dedent( D_( - u"""\ + """\ Frontends can use HINT_* constants in mess_data['extra'] in a serialized 'hints' dict. Internal plugins can use directly addHint([HINT_* constant]). Will set mess_data['extra']['history'] to 'skipped' when no store is requested and message is not saved in history.""" @@ -51,14 +51,14 @@ ), } -NS_HINTS = u"urn:xmpp:hints" +NS_HINTS = "urn:xmpp:hints" class XEP_0334(object): - HINT_NO_PERMANENT_STORE = u"no-permanent-store" - HINT_NO_STORE = u"no-store" - HINT_NO_COPY = u"no-copy" - HINT_STORE = u"store" + HINT_NO_PERMANENT_STORE = "no-permanent-store" + HINT_NO_STORE = "no-store" + HINT_NO_COPY = "no-copy" + HINT_STORE = "store" HINTS = (HINT_NO_PERMANENT_STORE, HINT_NO_STORE, HINT_NO_COPY, HINT_STORE) def __init__(self, host): @@ -73,14 +73,14 @@ def addHint(self, mess_data, hint): if hint == self.HINT_NO_COPY and not mess_data["to"].resource: log.error( - u"{hint} can only be used with full jids! Ignoring it.".format(hint=hint) + "{hint} can only be used with full jids! Ignoring it.".format(hint=hint) ) return hints = mess_data.setdefault("hints", set()) if hint in self.HINTS: hints.add(hint) else: - log.error(u"Unknown hint: {}".format(hint)) + log.error("Unknown hint: {}".format(hint)) def addHintElements(self, message_elt, hints): """Add hints elements to message stanza @@ -93,22 +93,22 @@ def _sendPostXmlTreatment(self, mess_data): if "hints" in mess_data: - self.addHintElements(mess_data[u"xml"], mess_data[u"hints"]) + self.addHintElements(mess_data["xml"], mess_data["hints"]) return mess_data def sendMessageTrigger( self, client, mess_data, pre_xml_treatments, post_xml_treatments ): """Add the hints element to the message to be sent""" - if u"hints" in mess_data[u"extra"]: - for hint in data_format.dict2iter(u"hints", mess_data[u"extra"], pop=True): + if "hints" in mess_data["extra"]: + for hint in data_format.dict2iter("hints", mess_data["extra"], pop=True): self.addHint(hint) post_xml_treatments.addCallback(self._sendPostXmlTreatment) return True def _receivedSkipHistory(self, mess_data): - mess_data[u"history"] = C.HISTORY_SKIP + mess_data["history"] = C.HISTORY_SKIP return mess_data def messageReceivedTrigger(self, client, message_elt, post_treat): @@ -118,14 +118,14 @@ self.HINT_NO_PERMANENT_STORE, self.HINT_NO_STORE, ): - log.debug(u"history will be skipped for this message, as requested") + log.debug("history will be skipped for this message, as requested") post_treat.addCallback(self._receivedSkipHistory) break return True +@implementer(iwokkel.IDisco) class XEP_0334_handler(xmlstream.XMPPHandler): - implements(iwokkel.IDisco) def getDiscoInfo(self, requestor, target, nodeIdentifier=""): return [disco.DiscoFeature(NS_HINTS)]
--- a/sat/plugins/plugin_xep_0352.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0352.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Explicit Message Encryption @@ -25,26 +25,26 @@ log = getLogger(__name__) PLUGIN_INFO = { - C.PI_NAME: u"Client State Indication", - C.PI_IMPORT_NAME: u"XEP-0352", + C.PI_NAME: "Client State Indication", + C.PI_IMPORT_NAME: "XEP-0352", C.PI_TYPE: C.PLUG_TYPE_XEP, - C.PI_PROTOCOLS: [u"XEP-0352"], + C.PI_PROTOCOLS: ["XEP-0352"], C.PI_DEPENDENCIES: [], - C.PI_MAIN: u"XEP_0352", - C.PI_HANDLER: u"no", - C.PI_DESCRIPTION: D_(u"Notify server when frontend is not actively used, to limit " - u"traffic and save bandwidth and battery life"), + C.PI_MAIN: "XEP_0352", + C.PI_HANDLER: "no", + C.PI_DESCRIPTION: D_("Notify server when frontend is not actively used, to limit " + "traffic and save bandwidth and battery life"), } -NS_CSI = u"urn:xmpp:csi:0" +NS_CSI = "urn:xmpp:csi:0" class XEP_0352(object): def __init__(self, host): - log.info(_(u"Client State Indication plugin initialization")) + log.info(_("Client State Indication plugin initialization")) self.host = host - host.registerNamespace(u"csi", NS_CSI) + host.registerNamespace("csi", NS_CSI) def isActive(self, client): try: @@ -54,30 +54,30 @@ except AttributeError: # _xep_0352_active can not be set if isActive is called before # profileConnected has been called - log.debug(u"isActive called when XEP-0352 plugin has not yet set the " - u"attributes") + log.debug("isActive called when XEP-0352 plugin has not yet set the " + "attributes") return True def profileConnected(self, client): - if (NS_CSI, u'csi') in client.xmlstream.features: - log.info(_(u"Client State Indication is available on this server")) + if (NS_CSI, 'csi') in client.xmlstream.features: + log.info(_("Client State Indication is available on this server")) client._xep_0352_enabled = True client._xep_0352_active = True else: - log.warning(_(u"Client State Indication is not available on this server, some" - u" bandwidth optimisations can't be used.")) + log.warning(_("Client State Indication is not available on this server, some" + " bandwidth optimisations can't be used.")) client._xep_0352_enabled = False def setInactive(self, client): if self.isActive(client): - inactive_elt = domish.Element((NS_CSI, u'inactive')) + inactive_elt = domish.Element((NS_CSI, 'inactive')) client.send(inactive_elt) client._xep_0352_active = False - log.info(u"inactive state set") + log.info("inactive state set") def setActive(self, client): if not self.isActive(client): - active_elt = domish.Element((NS_CSI, u'active')) + active_elt = domish.Element((NS_CSI, 'active')) client.send(active_elt) client._xep_0352_active = True - log.info(u"active state set") + log.info("active state set")
--- a/sat/plugins/plugin_xep_0359.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0359.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Message Archive Management (XEP-0359) @@ -23,38 +23,38 @@ from sat.core.i18n import _ from sat.core.log import getLogger from twisted.words.protocols.jabber import xmlstream -from zope.interface import implements +from zope.interface import implementer from wokkel import disco log = getLogger(__name__) PLUGIN_INFO = { - C.PI_NAME: u"Unique and Stable Stanza IDs", - C.PI_IMPORT_NAME: u"XEP-0359", - C.PI_TYPE: u"XEP", - C.PI_PROTOCOLS: [u"XEP-0359"], - C.PI_MAIN: u"XEP_0359", - C.PI_HANDLER: u"yes", - C.PI_DESCRIPTION: _(u"""Implementation of Unique and Stable Stanza IDs"""), + C.PI_NAME: "Unique and Stable Stanza IDs", + C.PI_IMPORT_NAME: "XEP-0359", + C.PI_TYPE: "XEP", + C.PI_PROTOCOLS: ["XEP-0359"], + C.PI_MAIN: "XEP_0359", + C.PI_HANDLER: "yes", + C.PI_DESCRIPTION: _("""Implementation of Unique and Stable Stanza IDs"""), } -NS_SID = u"urn:xmpp:sid:0" +NS_SID = "urn:xmpp:sid:0" class XEP_0359(object): def __init__(self, host): - log.info(_(u"Unique and Stable Stanza IDs plugin initialization")) + log.info(_("Unique and Stable Stanza IDs plugin initialization")) self.host = host - host.registerNamespace(u"stanza_id", NS_SID) - host.trigger.add(u"message_parse", self._message_parseTrigger) + host.registerNamespace("stanza_id", NS_SID) + host.trigger.add("message_parse", self._message_parseTrigger) def _message_parseTrigger(self, client, message_elt, mess_data): """Check if message has a stanza-id""" stanza_id = self.getStanzaId(message_elt, client.jid.userhostJID()) if stanza_id is not None: - mess_data[u'extra'][u'stanza_id'] = stanza_id + mess_data['extra']['stanza_id'] = stanza_id return True def getStanzaId(self, element, by): @@ -65,13 +65,13 @@ @return (unicode, None): stanza-id if found """ stanza_id = None - for stanza_elt in element.elements(NS_SID, u"stanza-id"): - if stanza_elt.getAttribute(u"by") == by.full(): + for stanza_elt in element.elements(NS_SID, "stanza-id"): + if stanza_elt.getAttribute("by") == by.full(): if stanza_id is not None: # we must not have more than one element (§3 #4) raise exceptions.DataError( - u"More than one corresponding stanza-id found!") - stanza_id = stanza_elt.getAttribute(u"id") + "More than one corresponding stanza-id found!") + stanza_id = stanza_elt.getAttribute("id") # we don't break to be sure that there is no more than one element # with this "by" attribute @@ -84,16 +84,16 @@ @param stanza_id(unicode): id to use @param by(jid.JID, None): jid to use or None to use client.jid """ - sid_elt = element.addElement((NS_SID, u"stanza-id")) - sid_elt[u"by"] = client.jid.userhost() if by is None else by.userhost() - sid_elt[u"id"] = stanza_id + sid_elt = element.addElement((NS_SID, "stanza-id")) + sid_elt["by"] = client.jid.userhost() if by is None else by.userhost() + sid_elt["id"] = stanza_id def getHandler(self, client): return XEP_0359_handler() +@implementer(disco.IDisco) class XEP_0359_handler(xmlstream.XMPPHandler): - implements(disco.IDisco) def getDiscoInfo(self, requestor, target, nodeIdentifier=""): return [disco.DiscoFeature(NS_SID)]
--- a/sat/plugins/plugin_xep_0363.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0363.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for HTTP File Upload (XEP-0363) @@ -24,7 +24,7 @@ log = getLogger(__name__) from sat.core import exceptions from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer from twisted.words.protocols.jabber import jid from twisted.words.protocols.jabber.xmlstream import XMPPHandler from twisted.internet import reactor @@ -50,7 +50,7 @@ C.PI_DEPENDENCIES: ["FILE", "UPLOAD"], C.PI_MAIN: "XEP_0363", C.PI_HANDLER: "yes", - C.PI_DESCRIPTION: _(u"""Implementation of HTTP File Upload"""), + C.PI_DESCRIPTION: _("""Implementation of HTTP File Upload"""), } NS_HTTP_UPLOAD = "urn:xmpp:http:upload:0" @@ -82,7 +82,7 @@ def creatorForNetloc(self, hostname, port): log.warning( - u"TLS check disabled for {host} on port {port}".format( + "TLS check disabled for {host} on port {port}".format( host=hostname, port=port ) ) @@ -107,10 +107,10 @@ in_sign="sisss", out_sign="(ss)", method=self._getSlot, - async=True, + async_=True, ) host.plugins["UPLOAD"].register( - u"HTTP Upload", self.getHTTPUploadEntity, self.fileHTTPUpload + "HTTP Upload", self.getHTTPUploadEntity, self.fileHTTPUpload ) def getHandler(self, client): @@ -131,12 +131,12 @@ except AttributeError: found_entities = yield self.host.findFeaturesSet(client, (NS_HTTP_UPLOAD,)) try: - entity = client.http_upload_service = iter(found_entities).next() + entity = client.http_upload_service = next(iter(found_entities)) except StopIteration: entity = client.http_upload_service = None if entity is None: - raise failure.Failure(exceptions.NotFound(u"No HTTP upload entity found")) + raise failure.Failure(exceptions.NotFound("No HTTP upload entity found")) defer.returnValue(entity) @@ -187,7 +187,7 @@ def _getSlotEb(self, fail, client, progress_id_d, download_d): """an error happened while trying to get slot""" - log.warning(u"Can't get upload slot: {reason}".format(reason=fail.value)) + log.warning("Can't get upload slot: {reason}".format(reason=fail.value)) progress_id_d.errback(fail) download_d.errback(fail) @@ -204,7 +204,7 @@ @param ignore_tls_errors(bool): ignore TLS certificate is True @return (tuple """ - log.debug(u"Got upload slot: {}".format(slot)) + log.debug("Got upload slot: {}".format(slot)) sat_file = self.host.plugins["FILE"].File( self.host, client, path, size=size, auto_end_signals=False ) @@ -243,7 +243,7 @@ should be closed, be is needed to send the progressFinished signal @param slot(Slot): put/get urls """ - log.info(u"HTTP upload finished") + log.info("HTTP upload finished") sat_file.progressFinished({"url": slot.get}) download_d.callback(slot.get) @@ -257,15 +257,15 @@ try: wrapped_fail = fail.value.reasons[0] except (AttributeError, IndexError) as e: - log.warning(_(u"upload failed: {reason}").format(reason=e)) - sat_file.progressError(unicode(fail)) + log.warning(_("upload failed: {reason}").format(reason=e)) + sat_file.progressError(str(fail)) raise fail else: if wrapped_fail.check(SSL.Error): - msg = u"TLS validation error, can't connect to HTTPS server" + msg = "TLS validation error, can't connect to HTTPS server" else: - msg = u"can't upload file" - log.warning(msg + ": " + unicode(wrapped_fail.value)) + msg = "can't upload file" + log.warning(msg + ": " + str(wrapped_fail.value)) sat_file.progressError(msg) def _gotSlot(self, iq_elt, client): @@ -275,26 +275,26 @@ @param iq_elt(domish.Element): <IQ/> result as specified in XEP-0363 """ try: - slot_elt = iq_elt.elements(NS_HTTP_UPLOAD, "slot").next() - put_elt = slot_elt.elements(NS_HTTP_UPLOAD, "put").next() + slot_elt = next(iq_elt.elements(NS_HTTP_UPLOAD, "slot")) + put_elt = next(slot_elt.elements(NS_HTTP_UPLOAD, "put")) put_url = put_elt['url'] - get_elt = slot_elt.elements(NS_HTTP_UPLOAD, "get").next() + get_elt = next(slot_elt.elements(NS_HTTP_UPLOAD, "get")) get_url = get_elt['url'] except (StopIteration, KeyError): - raise exceptions.DataError(u"Incorrect stanza received from server") + raise exceptions.DataError("Incorrect stanza received from server") headers = [] for header_elt in put_elt.elements(NS_HTTP_UPLOAD, "header"): try: name = header_elt["name"] - value = unicode(header_elt) + value = str(header_elt) except KeyError: - log.warning(_(u"Invalid header element: {xml}").format( + log.warning(_("Invalid header element: {xml}").format( iq_elt.toXml())) continue name = name.replace('\n', '') value = value.replace('\n', '') if name.lower() not in ALLOWED_HEADERS: - log.warning(_(u'Ignoring unauthorised header "{name}": {xml}') + log.warning(_('Ignoring unauthorised header "{name}": {xml}') .format(name=name, xml = iq_elt.toXml())) continue headers.append((name, value)) @@ -351,14 +351,14 @@ else: if upload_jid is None: raise failure.Failure( - exceptions.NotFound(u"No HTTP upload entity found") + exceptions.NotFound("No HTTP upload entity found") ) iq_elt = client.IQ("get") iq_elt["to"] = upload_jid.full() request_elt = iq_elt.addElement((NS_HTTP_UPLOAD, "request")) request_elt["filename"] = filename - request_elt["size"] = unicode(size) + request_elt["size"] = str(size) if content_type is not None: request_elt["content-type"] = content_type @@ -368,8 +368,8 @@ return d +@implementer(iwokkel.IDisco) class XEP_0363_handler(XMPPHandler): - implements(iwokkel.IDisco) def getDiscoInfo(self, requestor, target, nodeIdentifier=""): return [disco.DiscoFeature(NS_HTTP_UPLOAD)]
--- a/sat/plugins/plugin_xep_0380.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0380.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Explicit Message Encryption @@ -25,21 +25,21 @@ log = getLogger(__name__) PLUGIN_INFO = { - C.PI_NAME: u"Explicit Message Encryption", - C.PI_IMPORT_NAME: u"XEP-0380", - C.PI_TYPE: u"SEC", - C.PI_PROTOCOLS: [u"XEP-0380"], + C.PI_NAME: "Explicit Message Encryption", + C.PI_IMPORT_NAME: "XEP-0380", + C.PI_TYPE: "SEC", + C.PI_PROTOCOLS: ["XEP-0380"], C.PI_DEPENDENCIES: [], - C.PI_MAIN: u"XEP_0380", - C.PI_HANDLER: u"no", - C.PI_DESCRIPTION: _(u"""Implementation of Explicit Message Encryption"""), + C.PI_MAIN: "XEP_0380", + C.PI_HANDLER: "no", + C.PI_DESCRIPTION: _("""Implementation of Explicit Message Encryption"""), } -NS_EME = u"urn:xmpp:eme:0" +NS_EME = "urn:xmpp:eme:0" KNOWN_NAMESPACES = { - u"urn:xmpp:otr:0": u"OTR", - u"jabber:x:encrypted": u"Legacy OpenPGP", - u"urn:xmpp:openpgp:0": u"OpenPGP for XMPP", + "urn:xmpp:otr:0": "OTR", + "jabber:x:encrypted": "Legacy OpenPGP", + "urn:xmpp:openpgp:0": "OpenPGP for XMPP", } @@ -49,14 +49,14 @@ self.host = host host.trigger.add("sendMessage", self._sendMessageTrigger) host.trigger.add("MessageReceived", self._MessageReceivedTrigger, priority=100) - host.registerNamespace(u"eme", NS_EME) + host.registerNamespace("eme", NS_EME) def _addEMEElement(self, mess_data, namespace, name): - message_elt = mess_data[u'xml'] - encryption_elt = message_elt.addElement((NS_EME, u'encryption')) - encryption_elt[u'namespace'] = namespace + message_elt = mess_data['xml'] + encryption_elt = message_elt.addElement((NS_EME, 'encryption')) + encryption_elt['namespace'] = namespace if name is not None: - encryption_elt[u'name'] = name + encryption_elt['name'] = name return mess_data def _sendMessageTrigger(self, client, mess_data, __, post_xml_treatments): @@ -64,7 +64,7 @@ if encryption is not None: namespace = encryption['plugin'].namespace if namespace not in KNOWN_NAMESPACES: - name = encryption[u'plugin'].name + name = encryption['plugin'].name else: name = None post_xml_treatments.addCallback( @@ -73,7 +73,7 @@ def _MessageReceivedTrigger(self, client, message_elt, post_treat): try: - encryption_elt = next(message_elt.elements(NS_EME, u'encryption')) + encryption_elt = next(message_elt.elements(NS_EME, 'encryption')) except StopIteration: return True @@ -82,20 +82,20 @@ # message is encrypted and we can decrypt it return True - name = KNOWN_NAMESPACES.get(namespace, encryption_elt.getAttribute(u"name")) + name = KNOWN_NAMESPACES.get(namespace, encryption_elt.getAttribute("name")) # at this point, message is encrypted but we know that we can't decrypt it, # we need to notify the user - sender_s = message_elt[u'from'] - to_jid = jid.JID(message_elt[u'from']) - algorithm = u"{} [{}]".format(name, namespace) if name else namespace + sender_s = message_elt['from'] + to_jid = jid.JID(message_elt['from']) + algorithm = "{} [{}]".format(name, namespace) if name else namespace log.warning( - _(u"Message from {sender} is encrypted with {algorithm} and we can't " - u"decrypt it.".format(sender=message_elt['from'], algorithm=algorithm))) + _("Message from {sender} is encrypted with {algorithm} and we can't " + "decrypt it.".format(sender=message_elt['from'], algorithm=algorithm))) user_msg = D_( - u"User {sender} sent you an encrypted message (encrypted with {algorithm}), " - u"and we can't decrypt it.").format(sender=sender_s, algorithm=algorithm) + "User {sender} sent you an encrypted message (encrypted with {algorithm}), " + "and we can't decrypt it.").format(sender=sender_s, algorithm=algorithm) extra = {C.MESS_EXTRA_INFO: C.EXTRA_INFO_DECR_ERR} client.feedback(to_jid, user_msg, extra)
--- a/sat/plugins/plugin_xep_0384.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_xep_0384.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for OMEMO encryption @@ -39,21 +39,21 @@ # from omemo import wireformat except ImportError as e: raise exceptions.MissingModule( - u'Missing module omemo, please download/install it. You can use ' - u'"pip install omemo"' + 'Missing module omemo, please download/install it. You can use ' + '"pip install omemo"' ) log = getLogger(__name__) PLUGIN_INFO = { - C.PI_NAME: u"OMEMO", - C.PI_IMPORT_NAME: u"XEP-0384", - C.PI_TYPE: u"SEC", - C.PI_PROTOCOLS: [u"XEP-0384"], - C.PI_DEPENDENCIES: [u"XEP-0163", u"XEP-0280", u"XEP-0334", u"XEP-0060"], - C.PI_MAIN: u"OMEMO", - C.PI_HANDLER: u"no", - C.PI_DESCRIPTION: _(u"""Implementation of OMEMO"""), + C.PI_NAME: "OMEMO", + C.PI_IMPORT_NAME: "XEP-0384", + C.PI_TYPE: "SEC", + C.PI_PROTOCOLS: ["XEP-0384"], + C.PI_DEPENDENCIES: ["XEP-0163", "XEP-0280", "XEP-0334", "XEP-0060"], + C.PI_MAIN: "OMEMO", + C.PI_HANDLER: "no", + C.PI_DESCRIPTION: _("""Implementation of OMEMO"""), } OMEMO_MIN_VER = (0, 10, 4) @@ -159,53 +159,53 @@ self.setCb(d, callback) def loadSession(self, callback, bare_jid, device_id): - key = u'\n'.join([KEY_SESSION, bare_jid, unicode(device_id)]) + key = '\n'.join([KEY_SESSION, bare_jid, str(device_id)]) d = self.data.get(key) self.setCb(d, callback) def storeSession(self, callback, bare_jid, device_id, session): - key = u'\n'.join([KEY_SESSION, bare_jid, unicode(device_id)]) + key = '\n'.join([KEY_SESSION, bare_jid, str(device_id)]) d = self.data.force(key, session) self.setCb(d, callback) def deleteSession(self, callback, bare_jid, device_id): - key = u'\n'.join([KEY_SESSION, bare_jid, unicode(device_id)]) + key = '\n'.join([KEY_SESSION, bare_jid, str(device_id)]) d = self.data.remove(key) self.setCb(d, callback) def loadActiveDevices(self, callback, bare_jid): - key = u'\n'.join([KEY_ACTIVE_DEVICES, bare_jid]) + key = '\n'.join([KEY_ACTIVE_DEVICES, bare_jid]) d = self.data.get(key, {}) if callback is not None: self.setCb(d, callback) return d def loadInactiveDevices(self, callback, bare_jid): - key = u'\n'.join([KEY_INACTIVE_DEVICES, bare_jid]) + key = '\n'.join([KEY_INACTIVE_DEVICES, bare_jid]) d = self.data.get(key, {}) if callback is not None: self.setCb(d, callback) return d def storeActiveDevices(self, callback, bare_jid, devices): - key = u'\n'.join([KEY_ACTIVE_DEVICES, bare_jid]) + key = '\n'.join([KEY_ACTIVE_DEVICES, bare_jid]) d = self._checkJid(bare_jid) d.addCallback(lambda _: self.data.force(key, devices)) self.setCb(d, callback) def storeInactiveDevices(self, callback, bare_jid, devices): - key = u'\n'.join([KEY_INACTIVE_DEVICES, bare_jid]) + key = '\n'.join([KEY_INACTIVE_DEVICES, bare_jid]) d = self._checkJid(bare_jid) d.addCallback(lambda _: self.data.force(key, devices)) self.setCb(d, callback) def storeTrust(self, callback, bare_jid, device_id, trust): - key = u'\n'.join([KEY_TRUST, bare_jid, unicode(device_id)]) + key = '\n'.join([KEY_TRUST, bare_jid, str(device_id)]) d = self.data.force(key, trust) self.setCb(d, callback) def loadTrust(self, callback, bare_jid, device_id): - key = u'\n'.join([KEY_TRUST, bare_jid, unicode(device_id)]) + key = '\n'.join([KEY_TRUST, bare_jid, str(device_id)]) d = self.data.get(key) if callback is not None: self.setCb(d, callback) @@ -221,11 +221,11 @@ failed = [success for success, __ in results if not success] if failed: log.warning( - u"delete JID failed for {failed_count} on {total_count} operations" + "delete JID failed for {failed_count} on {total_count} operations" .format(failed_count=len(failed), total_count=len(results))) else: log.info( - u"Delete JID operation succeed ({total_count} operations)." + "Delete JID operation succeed ({total_count} operations)." .format(total_count=len(results))) def _deleteJID_gotDevices(self, results, bare_jid): @@ -240,7 +240,7 @@ else: for device_id in devices: for key in (KEY_SESSION, KEY_TRUST): - k = u'\n'.join([key, bare_jid, unicode(device_id)]) + k = '\n'.join([key, bare_jid, str(device_id)]) d_list.append(self.data.remove(k)) d_list.append(self.data.remove(KEY_ACTIVE_DEVICES, bare_jid)) @@ -257,10 +257,10 @@ """Retrieve all (in)actives of bare_jid, and delete all related keys""" d_list = [] - key = u'\n'.join([KEY_ACTIVE_DEVICES, bare_jid]) + key = '\n'.join([KEY_ACTIVE_DEVICES, bare_jid]) d_list.append(self.data.get(key, [])) - key = u'\n'.join([KEY_INACTIVE_DEVICES, bare_jid]) + key = '\n'.join([KEY_INACTIVE_DEVICES, bare_jid]) d_inactive = self.data.get(key, {}) # inactive devices are returned as a dict mapping from devices_id to timestamp # but we only need devices ids @@ -332,7 +332,7 @@ else: bare_jids = [e.userhost() for e in bare_jids] if bundles is not None: - bundles = {e.userhost(): v for e, v in bundles.iteritems()} + bundles = {e.userhost(): v for e, v in bundles.items()} encrypt_mess_p = self._session.encryptMessage( bare_jids=bare_jids, plaintext=message.encode('utf-8'), @@ -380,21 +380,21 @@ class OMEMO(object): def __init__(self, host): - log.info(_(u"OMEMO plugin initialization (omemo module v{version})").format( + log.info(_("OMEMO plugin initialization (omemo module v{version})").format( version=omemo.__version__)) - version = tuple(map(int, omemo.__version__.split(u'.')[:3])) + version = tuple(map(int, omemo.__version__.split('.')[:3])) if version < OMEMO_MIN_VER: log.warning(_( - u"Your version of omemo module is too old: {v[0]}.{v[1]}.{v[2]} is " - u"minimum required), please update.").format(v=OMEMO_MIN_VER)) + "Your version of omemo module is too old: {v[0]}.{v[1]}.{v[2]} is " + "minimum required), please update.").format(v=OMEMO_MIN_VER)) raise exceptions.CancelError("module is too old") self.host = host - self._p_hints = host.plugins[u"XEP-0334"] - self._p_carbons = host.plugins[u"XEP-0280"] - self._p = host.plugins[u"XEP-0060"] + self._p_hints = host.plugins["XEP-0334"] + self._p_carbons = host.plugins["XEP-0280"] + self._p = host.plugins["XEP-0060"] host.trigger.add("MessageReceived", self._messageReceivedTrigger, priority=100050) host.trigger.add("sendMessageData", self._sendMessageDataTrigger) - self.host.registerEncryptionPlugin(self, u"OMEMO", NS_OMEMO, 100) + self.host.registerEncryptionPlugin(self, "OMEMO", NS_OMEMO, 100) pep = host.plugins['XEP-0163'] pep.addPEPEvent("OMEMO_DEVICES", NS_OMEMO_DEVICES, self.onNewDevices) @@ -406,21 +406,21 @@ client = self.host.getClient(profile) session = client._xep_0384_session answer = xml_tools.XMLUIResult2DataFormResult(xmlui_data) - for key, value in answer.iteritems(): - if key.startswith(u'trust_'): + for key, value in answer.items(): + if key.startswith('trust_'): trust_id = key[6:] else: continue data = trust_data[trust_id] trust = C.bool(value) if trust: - yield session.trust(data[u"jid"], - data[u"device"], - data[u"ik"]) + yield session.trust(data["jid"], + data["device"], + data["ik"]) else: - yield session.distrust(data[u"jid"], - data[u"device"], - data[u"ik"]) + yield session.distrust(data["jid"], + data["device"], + data["ik"]) if expect_problems is not None: expect_problems.setdefault(data.bare_jid, set()).add(data.device) defer.returnValue({}) @@ -449,7 +449,7 @@ # we need entity_jid xor trust_data assert entity_jid and not trust_data or not entity_jid and trust_data if entity_jid and entity_jid.resource: - raise ValueError(u"A bare jid is expected") + raise ValueError("A bare jid is expected") session = client._xep_0384_session @@ -458,7 +458,7 @@ trust_data = {} trust_session_data = yield session.getTrustForJID(entity_jid) bare_jid_s = entity_jid.userhost() - for device_id, trust_info in trust_session_data['active'].iteritems(): + for device_id, trust_info in trust_session_data['active'].items(): if trust_info is None: # device has never been (un)trusted, we have to retrieve its # fingerprint (i.e. identity key or "ik") through public bundle @@ -468,25 +468,25 @@ [device_id]) if device_id not in bundles: log.warning(_( - u"Can't find bundle for device {device_id} of user " - u"{bare_jid}, ignoring").format(device_id=device_id, + "Can't find bundle for device {device_id} of user " + "{bare_jid}, ignoring").format(device_id=device_id, bare_jid=bare_jid_s)) continue cache[device_id] = bundles[device_id] # TODO: replace False below by None when undecided # trusts are handled trust_info = { - u"key": cache[device_id].ik, - u"trusted": False + "key": cache[device_id].ik, + "trusted": False } ik = trust_info["key"] - trust_id = unicode(hash((bare_jid_s, device_id, ik))) + trust_id = str(hash((bare_jid_s, device_id, ik))) trust_data[trust_id] = { - u"jid": entity_jid, - u"device": device_id, - u"ik": ik, - u"trusted": trust_info[u"trusted"], + "jid": entity_jid, + "device": device_id, + "ik": ik, + "trusted": trust_info["trusted"], } if submit_id is None: @@ -496,41 +496,41 @@ one_shot=True) xmlui = xml_tools.XMLUI( panel_type = C.XMLUI_FORM, - title = D_(u"OMEMO trust management"), + title = D_("OMEMO trust management"), submit_id = submit_id ) xmlui.addText(D_( - u"This is OMEMO trusting system. You'll see below the devices of your " - u"contacts, and a checkbox to trust them or not. A trusted device " - u"can read your messages in plain text, so be sure to only validate " - u"devices that you are sure are belonging to your contact. It's better " - u"to do this when you are next to your contact and her/his device, so " - u"you can check the \"fingerprint\" (the number next to the device) " - u"yourself. Do *not* validate a device if the fingerprint is wrong!")) + "This is OMEMO trusting system. You'll see below the devices of your " + "contacts, and a checkbox to trust them or not. A trusted device " + "can read your messages in plain text, so be sure to only validate " + "devices that you are sure are belonging to your contact. It's better " + "to do this when you are next to your contact and her/his device, so " + "you can check the \"fingerprint\" (the number next to the device) " + "yourself. Do *not* validate a device if the fingerprint is wrong!")) xmlui.changeContainer("label") - xmlui.addLabel(D_(u"This device ID")) - xmlui.addText(unicode(client._xep_0384_device_id)) - xmlui.addLabel(D_(u"This device fingerprint")) + xmlui.addLabel(D_("This device ID")) + xmlui.addText(str(client._xep_0384_device_id)) + xmlui.addLabel(D_("This device fingerprint")) ik_hex = session.public_bundle.ik.encode('hex').upper() - fp_human = u' '.join([ik_hex[i:i+8] for i in range(0, len(ik_hex), 8)]) + fp_human = ' '.join([ik_hex[i:i+8] for i in range(0, len(ik_hex), 8)]) xmlui.addText(fp_human) xmlui.addEmpty() xmlui.addEmpty() - for trust_id, data in trust_data.iteritems(): - xmlui.addLabel(D_(u"Contact")) - xmlui.addJid(data[u'jid']) - xmlui.addLabel(D_(u"Device ID")) - xmlui.addText(unicode(data[u'device'])) - xmlui.addLabel(D_(u"Fingerprint")) - ik_hex = data[u'ik'].encode('hex').upper() - fp_human = u' '.join([ik_hex[i:i+8] for i in range(0, len(ik_hex), 8)]) + for trust_id, data in trust_data.items(): + xmlui.addLabel(D_("Contact")) + xmlui.addJid(data['jid']) + xmlui.addLabel(D_("Device ID")) + xmlui.addText(str(data['device'])) + xmlui.addLabel(D_("Fingerprint")) + ik_hex = data['ik'].encode('hex').upper() + fp_human = ' '.join([ik_hex[i:i+8] for i in range(0, len(ik_hex), 8)]) xmlui.addText(fp_human) - xmlui.addLabel(D_(u"Trust this device?")) - xmlui.addBool(u"trust_{}".format(trust_id), - value=C.boolConst(data.get(u'trusted', False))) + xmlui.addLabel(D_("Trust this device?")) + xmlui.addBool("trust_{}".format(trust_id), + value=C.boolConst(data.get('trusted', False))) xmlui.addEmpty() xmlui.addEmpty() @@ -549,7 +549,7 @@ # and our own device id device_id = yield persistent_dict.get(KEY_DEVICE_ID) if device_id is None: - log.info(_(u"We have no identity for this device yet, let's generate one")) + log.info(_("We have no identity for this device yet, let's generate one")) # we have a new device, we create device_id device_id = random.randint(1, 2**31-1) # we check that it's really unique @@ -571,7 +571,7 @@ client._xep_0384_device_id = device_id yield omemo_session.newDeviceList(client.jid, devices) if omemo_session.republish_bundle: - log.info(_(u"Saving public bundle for this device ({device_id})").format( + log.info(_("Saving public bundle for this device ({device_id})").format( device_id=device_id)) yield self.setBundle(client, omemo_session.public_bundle, device_id) client._xep_0384_ready.callback(None) @@ -589,22 +589,22 @@ """ devices = set() if len(items) > 1: - log.warning(_(u"OMEMO devices list is stored in more that one items, " - u"this is not expected")) + log.warning(_("OMEMO devices list is stored in more that one items, " + "this is not expected")) if items: try: list_elt = next(items[0].elements(NS_OMEMO, 'list')) except StopIteration: - log.warning(_(u"no list element found in OMEMO devices list")) + log.warning(_("no list element found in OMEMO devices list")) return for device_elt in list_elt.elements(NS_OMEMO, 'device'): try: device_id = int(device_elt['id']) except KeyError: - log.warning(_(u'device element is missing "id" attribute: {elt}') + log.warning(_('device element is missing "id" attribute: {elt}') .format(elt=device_elt.toXml())) except ValueError: - log.warning(_(u'invalid device id: {device_id}').format( + log.warning(_('invalid device id: {device_id}').format( device_id=device_elt['id'])) else: devices.add(device_id) @@ -624,7 +624,7 @@ items, metadata = yield self._p.getItems(client, entity_jid, NS_OMEMO_DEVICES) except error.StanzaError as e: if e.condition == 'item-not-found': - log.info(_(u"there is no node to handle OMEMO devices")) + log.info(_("there is no node to handle OMEMO devices")) defer.returnValue(set()) raise e @@ -632,13 +632,13 @@ defer.returnValue(devices) def setDevicesEb(self, failure_): - log.warning(_(u"Can't set devices: {reason}").format(reason=failure_)) + log.warning(_("Can't set devices: {reason}").format(reason=failure_)) def setDevices(self, client, devices): list_elt = domish.Element((NS_OMEMO, 'list')) for device in devices: device_elt = list_elt.addElement('device') - device_elt['id'] = unicode(device) + device_elt['id'] = str(device) d = self._p.sendItem( client, None, NS_OMEMO_DEVICES, list_elt, item_id=self._p.ID_SINGLETON) d.addErrback(self.setDevicesEb) @@ -666,23 +666,23 @@ try: items, metadata = yield self._p.getItems(client, entity_jid, node) except error.StanzaError as e: - if e.condition == u"item-not-found": - log.warning(_(u"Bundle missing for device {device_id}") + if e.condition == "item-not-found": + log.warning(_("Bundle missing for device {device_id}") .format(device_id=device_id)) missing.add(device_id) continue else: - log.warning(_(u"Can't get bundle for device {device_id}: {reason}") + log.warning(_("Can't get bundle for device {device_id}: {reason}") .format(device_id=device_id, reason=e)) continue if not items: - log.warning(_(u"no item found in node {node}, can't get public bundle " - u"for device {device_id}").format(node=node, + log.warning(_("no item found in node {node}, can't get public bundle " + "for device {device_id}").format(node=node, device_id=device_id)) continue if len(items) > 1: - log.warning(_(u"more than one item found in {node}, " - u"this is not expected").format(node=node)) + log.warning(_("more than one item found in {node}, " + "this is not expected").format(node=node)) item = items[0] try: bundle_elt = next(item.elements(NS_OMEMO, 'bundle')) @@ -695,23 +695,23 @@ prekeys_elt = next(bundle_elt.elements( NS_OMEMO, 'prekeys')) except StopIteration: - log.warning(_(u"invalid bundle for device {device_id}, ignoring").format( + log.warning(_("invalid bundle for device {device_id}, ignoring").format( device_id=device_id)) continue try: - spkPublic = base64.b64decode(unicode(signedPreKeyPublic_elt)) + spkPublic = base64.b64decode(str(signedPreKeyPublic_elt)) spkSignature = base64.b64decode( - unicode(signedPreKeySignature_elt)) + str(signedPreKeySignature_elt)) - ik = base64.b64decode(unicode(identityKey_elt)) + ik = base64.b64decode(str(identityKey_elt)) spk = { "key": spkPublic, "id": int(signedPreKeyPublic_elt['signedPreKeyId']) } otpks = [] for preKeyPublic_elt in prekeys_elt.elements(NS_OMEMO, 'preKeyPublic'): - preKeyPublic = base64.b64decode(unicode(preKeyPublic_elt)) + preKeyPublic = base64.b64decode(str(preKeyPublic_elt)) otpk = { "key": preKeyPublic, "id": int(preKeyPublic_elt['preKeyId']) @@ -719,7 +719,7 @@ otpks.append(otpk) except Exception as e: - log.warning(_(u"error while decoding key for device {device_id}: {msg}") + log.warning(_("error while decoding key for device {device_id}: {msg}") .format(device_id=device_id, msg=e)) continue @@ -729,20 +729,20 @@ defer.returnValue((bundles, missing)) def setBundleEb(self, failure_): - log.warning(_(u"Can't set bundle: {reason}").format(reason=failure_)) + log.warning(_("Can't set bundle: {reason}").format(reason=failure_)) def setBundle(self, client, bundle, device_id): """Set public bundle for this device. @param bundle(ExtendedPublicBundle): bundle to publish """ - log.debug(_(u"updating bundle for {device_id}").format(device_id=device_id)) + log.debug(_("updating bundle for {device_id}").format(device_id=device_id)) bundle = bundle.serialize(omemo_backend) bundle_elt = domish.Element((NS_OMEMO, 'bundle')) signedPreKeyPublic_elt = bundle_elt.addElement( "signedPreKeyPublic", content=b64enc(bundle["spk"]['key'])) - signedPreKeyPublic_elt['signedPreKeyId'] = unicode(bundle["spk"]['id']) + signedPreKeyPublic_elt['signedPreKeyId'] = str(bundle["spk"]['id']) bundle_elt.addElement( "signedPreKeySignature", @@ -757,7 +757,7 @@ preKeyPublic_elt = prekeys_elt.addElement( 'preKeyPublic', content=b64enc(otpk["key"])) - preKeyPublic_elt['preKeyId'] = unicode(otpk['id']) + preKeyPublic_elt['preKeyId'] = str(otpk['id']) node = NS_OMEMO_BUNDLE.format(device_id=device_id) d = self._p.sendItem(client, None, node, bundle_elt, item_id=self._p.ID_SINGLETON) @@ -782,7 +782,7 @@ if entity == client.jid.userhostJID(): own_device = client._xep_0384_device_id if own_device not in devices: - log.warning(_(u"Our own device is missing from devices list, fixing it")) + log.warning(_("Our own device is missing from devices list, fixing it")) devices.add(own_device) yield self.setDevices(client, devices) @@ -809,7 +809,7 @@ cache = client._xep_0384_cache for problem in problems: if isinstance(problem, omemo_excpt.TrustException): - untrusted[unicode(hash(problem))] = problem + untrusted[str(hash(problem))] = problem elif isinstance(problem, omemo_excpt.MissingBundleException): pb_entity = jid.JID(problem.bare_jid) entity_cache = cache.setdefault(pb_entity, {}) @@ -835,39 +835,39 @@ else: raise problem - for peer_jid, devices in missing_bundles.iteritems(): - devices_s = [unicode(d) for d in devices] + for peer_jid, devices in missing_bundles.items(): + devices_s = [str(d) for d in devices] log.warning( - _(u"Can't retrieve bundle for device(s) {devices} of entity {peer}, " - u"the message will not be readable on this/those device(s)").format( - devices=u", ".join(devices_s), peer=peer_jid.full())) + _("Can't retrieve bundle for device(s) {devices} of entity {peer}, " + "the message will not be readable on this/those device(s)").format( + devices=", ".join(devices_s), peer=peer_jid.full())) client.feedback( entity, - D_(u"You're destinee {peer} has missing encryption data on some of " - u"his/her device(s) (bundle on device {devices}), the message won't " - u"be readable on this/those device.").format( - peer=peer_jid.full(), devices=u", ".join(devices_s))) + D_("You're destinee {peer} has missing encryption data on some of " + "his/her device(s) (bundle on device {devices}), the message won't " + "be readable on this/those device.").format( + peer=peer_jid.full(), devices=", ".join(devices_s))) if untrusted: trust_data = {} - for trust_id, data in untrusted.iteritems(): + for trust_id, data in untrusted.items(): trust_data[trust_id] = { 'jid': jid.JID(data.bare_jid), 'device': data.device, 'ik': data.ik} - user_msg = D_(u"Not all destination devices are trusted, we can't encrypt " - u"message in such a situation. Please indicate if you trust " - u"those devices or not in the trust manager before we can " + user_msg = D_("Not all destination devices are trusted, we can't encrypt " + "message in such a situation. Please indicate if you trust " + "those devices or not in the trust manager before we can " "send this message") client.feedback(entity, user_msg) - xmlui = yield self.getTrustUI(client, trust_data=trust_data, submit_id=u"") + xmlui = yield self.getTrustUI(client, trust_data=trust_data, submit_id="") answer = yield xml_tools.deferXMLUI( self.host, xmlui, action_extra={ - u"meta_encryption_trust": NS_OMEMO, + "meta_encryption_trust": NS_OMEMO, }, profile=client.profile) yield self.trustUICb(answer, trust_data, expect_problems, client.profile) @@ -881,7 +881,7 @@ try: while True: if loop_idx > 10: - msg = _(u"Too many iterations in encryption loop") + msg = _("Too many iterations in encryption loop") log.error(msg) raise exceptions.InternalError(msg) # encryptMessage may fail, in case of e.g. trust issue or missing bundle @@ -903,7 +903,7 @@ else: break except Exception as e: - msg = _(u"Can't encrypt message for {entity}: {reason}".format( + msg = _("Can't encrypt message for {entity}: {reason}".format( entity=entity_bare_jid.full(), reason=str(e).decode('utf-8', 'replace'))) log.warning(msg) extra = {C.MESS_EXTRA_INFO: C.EXTRA_INFO_ENCR_ERR} @@ -917,7 +917,7 @@ if message_elt.getAttribute("type") == C.MESS_TYPE_GROUPCHAT: defer.returnValue(True) try: - encrypted_elt = next(message_elt.elements(NS_OMEMO, u"encrypted")) + encrypted_elt = next(message_elt.elements(NS_OMEMO, "encrypted")) except StopIteration: # no OMEMO message here defer.returnValue(True) @@ -938,40 +938,40 @@ device_id = client._xep_0384_device_id try: - header_elt = next(encrypted_elt.elements(NS_OMEMO, u'header')) - iv_elt = next(header_elt.elements(NS_OMEMO, u'iv')) + header_elt = next(encrypted_elt.elements(NS_OMEMO, 'header')) + iv_elt = next(header_elt.elements(NS_OMEMO, 'iv')) except StopIteration: - log.warning(_(u"Invalid OMEMO encrypted stanza, ignoring: {xml}") + log.warning(_("Invalid OMEMO encrypted stanza, ignoring: {xml}") .format(xml=message_elt.toXml())) defer.returnValue(False) try: s_device_id = header_elt['sid'] except KeyError: - log.warning(_(u"Invalid OMEMO encrypted stanza, missing sender device ID, " - u"ignoring: {xml}") + log.warning(_("Invalid OMEMO encrypted stanza, missing sender device ID, " + "ignoring: {xml}") .format(xml=message_elt.toXml())) defer.returnValue(False) try: - key_elt = next((e for e in header_elt.elements(NS_OMEMO, u'key') - if int(e[u'rid']) == device_id)) + key_elt = next((e for e in header_elt.elements(NS_OMEMO, 'key') + if int(e['rid']) == device_id)) except StopIteration: - log.warning(_(u"This OMEMO encrypted stanza has not been encrypted " - u"for our device (device_id: {device_id}, fingerprint: " - u"{fingerprint}): {xml}").format( + log.warning(_("This OMEMO encrypted stanza has not been encrypted " + "for our device (device_id: {device_id}, fingerprint: " + "{fingerprint}): {xml}").format( device_id=device_id, fingerprint=omemo_session.public_bundle.ik.encode('hex'), xml=encrypted_elt.toXml())) - user_msg = (D_(u"An OMEMO message from {sender} has not been encrypted for " - u"our device, we can't decrypt it").format( + user_msg = (D_("An OMEMO message from {sender} has not been encrypted for " + "our device, we can't decrypt it").format( sender=from_jid.full())) extra = {C.MESS_EXTRA_INFO: C.EXTRA_INFO_DECR_ERR} client.feedback(feedback_jid, user_msg, extra) defer.returnValue(False) except ValueError as e: - log.warning(_(u"Invalid recipient ID: {msg}".format(msg=e))) + log.warning(_("Invalid recipient ID: {msg}".format(msg=e))) defer.returnValue(False) is_pre_key = C.bool(key_elt.getAttribute('prekey', 'false')) - payload_elt = next(encrypted_elt.elements(NS_OMEMO, u'payload'), None) + payload_elt = next(encrypted_elt.elements(NS_OMEMO, 'payload'), None) additional_information = { "from_storage": bool(message_elt.delay) } @@ -996,9 +996,9 @@ else: post_treat.addCallback(client.encryption.markAsTrusted) except Exception as e: - log.warning(_(u"Can't decrypt message: {reason}\n{xml}").format( + log.warning(_("Can't decrypt message: {reason}\n{xml}").format( reason=e, xml=message_elt.toXml())) - user_msg = (D_(u"An OMEMO message from {sender} can't be decrypted: {reason}") + user_msg = (D_("An OMEMO message from {sender} can't be decrypted: {reason}") .format(sender=from_jid.full(), reason=e)) extra = {C.MESS_EXTRA_INFO: C.EXTRA_INFO_DECR_ERR} client.feedback(feedback_jid, user_msg, extra) @@ -1012,7 +1012,7 @@ message_elt.children.remove(encrypted_elt) if plaintext: - message_elt.addElement("body", content=plaintext.decode('utf-8')) + message_elt.addElement("body", content=plaintext) post_treat.addCallback(client.encryption.markAsEncrypted) defer.returnValue(True) @@ -1023,7 +1023,7 @@ return message_elt = mess_data["xml"] to_jid = mess_data["to"].userhostJID() - log.debug(u"encrypting message") + log.debug("encrypting message") body = None for child in list(message_elt.children): if child.name == "body": @@ -1037,21 +1037,21 @@ message_elt.children.remove(child) if body is None: - log.warning(u"No message found") + log.warning("No message found") return - encryption_data = yield self.encryptMessage(client, to_jid, unicode(body)) + encryption_data = yield self.encryptMessage(client, to_jid, str(body)) encrypted_elt = message_elt.addElement((NS_OMEMO, 'encrypted')) header_elt = encrypted_elt.addElement('header') - header_elt['sid'] = unicode(encryption_data['sid']) + header_elt['sid'] = str(encryption_data['sid']) bare_jid_s = to_jid.userhost() - for rid, data in encryption_data['keys'][bare_jid_s].iteritems(): + for rid, data in encryption_data['keys'][bare_jid_s].items(): key_elt = header_elt.addElement( 'key', content=b64enc(data['data'])) - key_elt['rid'] = unicode(rid) + key_elt['rid'] = str(rid) if data['pre_key']: key_elt['prekey'] = 'true'
--- a/sat/stdui/ui_contact_list.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/stdui/ui_contact_list.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT standard user interface for managing contacts
--- a/sat/stdui/ui_profile_manager.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/stdui/ui_profile_manager.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT standard user interface for managing contacts @@ -70,9 +70,9 @@ dialog = xml_tools.XMLUI("popup", title=D_("Connection error")) dialog.addText(D_("The provided profile password doesn't match.")) else: - log.error(u"Unexpected exceptions: {}".format(fail)) + log.error("Unexpected exceptions: {}".format(fail)) dialog = xml_tools.XMLUI("popup", title=D_("Internal error")) - dialog.addText(D_(u"Internal error: {}".format(fail))) + dialog.addText(D_("Internal error: {}".format(fail))) return {"xmlui": dialog.toXml(), "validated": C.BOOL_FALSE} def _authenticateProfile(self, data, profile):
--- a/sat/test/constants.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/test/constants.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # Primitivus: a SAT frontend @@ -33,19 +33,19 @@ "test_profile5", ] JID_STR = [ - u"test@example.org/SàT", - u"sender@example.net/house", - u"sender@example.net/work", - u"sender@server.net/res", - u"xxx@server.net/res", + "test@example.org/SàT", + "sender@example.net/house", + "sender@example.net/work", + "sender@server.net/res", + "xxx@server.net/res", ] JID = [jid.JID(jid_s) for jid_s in JID_STR] PROFILE_DICT = {} - for i in xrange(0, len(PROFILE)): + for i in range(0, len(PROFILE)): PROFILE_DICT[PROFILE[i]] = JID[i] - MUC_STR = [u"room@chat.server.domain", u"sat_game@chat.server.domain"] + MUC_STR = ["room@chat.server.domain", "sat_game@chat.server.domain"] MUC = [jid.JID(jid_s) for jid_s in MUC_STR] NO_SECURITY_LIMIT = -1
--- a/sat/test/helpers.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/test/helpers.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -28,7 +28,7 @@ from sat.core import exceptions from sat.tools import config as tools_config -from constants import Const as C +from .constants import Const as C from wokkel.xmppim import RosterItem from wokkel.generic import parseXml from sat.core.xmpp import SatRosterProtocol @@ -49,7 +49,7 @@ @return: unicode conversion, according to bridge convention """ - return u"True" if value else u"False" + return "True" if value else "False" def muteLogging(): @@ -232,11 +232,11 @@ def checkCall(*args, **kwargs): if args != check_args or kwargs != check_kwargs: - print "\n\n--------------------" - print "Args are not equals:" - print "args\n----\n%s (sent)\n%s (wanted)" % (args, check_args) - print "kwargs\n------\n%s (sent)\n%s (wanted)" % (kwargs, check_kwargs) - print "--------------------\n\n" + print("\n\n--------------------") + print("Args are not equals:") + print("args\n----\n%s (sent)\n%s (wanted)" % (args, check_args)) + print("kwargs\n------\n%s (sent)\n%s (wanted)" % (kwargs, check_kwargs)) + print("--------------------\n\n") raise DifferentArgsException delattr(self, name) @@ -248,7 +248,7 @@ setattr(self, name, checkCall) - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc=None): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False, doc=None): pass def addSignal(self, name, int_suffix, signature): @@ -386,7 +386,7 @@ @param obj (domish.Element, str or unicode): message to send """ if not isinstance(obj, domish.Element): - assert(isinstance(obj, str) or isinstance(obj, unicode)) + assert(isinstance(obj, str) or isinstance(obj, str)) obj = parseXml(obj) if obj.name == 'iq': @@ -432,21 +432,21 @@ value = None setattr(elt, attr, value) if (got_elt.tag != exp_elt.tag): - print "XML are not equals (elt %s/%s):" % (got_elt, exp_elt) - print "tag: got [%s] expected: [%s]" % (got_elt.tag, exp_elt.tag) + print("XML are not equals (elt %s/%s):" % (got_elt, exp_elt)) + print("tag: got [%s] expected: [%s]" % (got_elt.tag, exp_elt.tag)) return False if (got_elt.attrib != exp_elt.attrib): - print "XML are not equals (elt %s/%s):" % (got_elt, exp_elt) - print "attribs: got %s expected %s" % (got_elt.attrib, exp_elt.attrib) + print("XML are not equals (elt %s/%s):" % (got_elt, exp_elt)) + print("attribs: got %s expected %s" % (got_elt.attrib, exp_elt.attrib)) return False if (got_elt.tail != exp_elt.tail or got_elt.text != exp_elt.text): - print "XML are not equals (elt %s/%s):" % (got_elt, exp_elt) - print "text: got [%s] expected: [%s]" % (got_elt.text, exp_elt.text) - print "tail: got [%s] expected: [%s]" % (got_elt.tail, exp_elt.tail) + print("XML are not equals (elt %s/%s):" % (got_elt, exp_elt)) + print("text: got [%s] expected: [%s]" % (got_elt.text, exp_elt.text)) + print("tail: got [%s] expected: [%s]" % (got_elt.tail, exp_elt.tail)) return False if (len(got_elt) != len(exp_elt)): - print "XML are not equals (elt %s/%s):" % (got_elt, exp_elt) - print "children len: got %d expected: %d" % (len(got_elt), len(exp_elt)) + print("XML are not equals (elt %s/%s):" % (got_elt, exp_elt)) + print("children len: got %d expected: %d" % (len(got_elt), len(exp_elt))) return False for idx, child in enumerate(got_elt): if not equalElt(child, exp_elt[idx]): @@ -461,22 +461,22 @@ expected_elt = etree.fromstring(remove_blank(expected) if ignore_blank else expected) if not equalElt(xml_elt, expected_elt): - print "---" - print "XML are not equals:" - print "got:\n-\n%s\n-\n\n" % etree.tostring(xml_elt, encoding='utf-8') - print "was expecting:\n-\n%s\n-\n\n" % etree.tostring(expected_elt, encoding='utf-8') - print "---" + print("---") + print("XML are not equals:") + print("got:\n-\n%s\n-\n\n" % etree.tostring(xml_elt, encoding='utf-8')) + print("was expecting:\n-\n%s\n-\n\n" % etree.tostring(expected_elt, encoding='utf-8')) + print("---") raise DifferentXMLException def assertEqualUnsortedList(self, a, b, msg): counter_a = Counter(a) counter_b = Counter(b) if counter_a != counter_b: - print "---" - print "Unsorted lists are not equals:" - print "got : %s" % counter_a - print "was expecting: %s" % counter_b + print("---") + print("Unsorted lists are not equals:") + print("got : %s" % counter_a) + print("was expecting: %s" % counter_b) if msg: - print msg - print "---" + print(msg) + print("---") raise DifferentListException
--- a/sat/test/helpers_plugins.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/test/helpers_plugins.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -29,7 +29,7 @@ # temporary until the changes are integrated to Wokkel from sat_tmp.wokkel.rsm import RSMResponse -from constants import Const as C +from .constants import Const as C from sat.plugins import plugin_xep_0045 from collections import OrderedDict @@ -52,7 +52,7 @@ roster = {} # ask the other profiles to fill our roster - for i in xrange(0, len(C.PROFILE)): + for i in range(0, len(C.PROFILE)): other_profile = C.PROFILE[i] if other_profile == profile: continue @@ -69,7 +69,7 @@ pass # rename our nick if it already exists - while nick in roster.keys(): + while nick in list(roster.keys()): if C.PROFILE_DICT[profile].userhost() == roster[nick].entity.userhost(): break # same user with different resource --> same nickname nick = nick + "_" @@ -79,7 +79,7 @@ self.joined_rooms[room_jid] = room # fill the other rosters with the new entry - for i in xrange(0, len(C.PROFILE)): + for i in range(0, len(C.PROFILE)): other_profile = C.PROFILE[i] if other_profile == profile: continue @@ -104,7 +104,7 @@ profile = self.host.memory.getProfileName(profile_key) room = self.joined_rooms[roomJID] # remove ourself from the other rosters - for i in xrange(0, len(C.PROFILE)): + for i in range(0, len(C.PROFILE)): other_profile = C.PROFILE[i] if other_profile == profile: continue @@ -252,7 +252,7 @@ return False for item in items: - item_obj = parseXml(item) if isinstance(item, unicode) else item + item_obj = parseXml(item) if isinstance(item, str) else item if not replace(item_obj): node.append(item_obj) return defer.succeed(None) @@ -296,6 +296,6 @@ def service_getDiscoItems(self, service, nodeIdentifier, profile_key=C.PROF_KEY_NONE): items = DiscoItems() - for item in self.__items.keys(): + for item in list(self.__items.keys()): items.append(DiscoItem(service, item)) return defer.succeed(items)
--- a/sat/test/test_core_xmpp.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/test/test_core_xmpp.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -18,7 +18,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. from sat.test import helpers -from constants import Const +from .constants import Const from twisted.trial import unittest from sat.core import xmpp from twisted.words.protocols.jabber.jid import JID @@ -35,7 +35,7 @@ def test_init(self): """Check that init values are correctly initialised""" self.assertEqual(self.client.profile, Const.PROFILE[0]) - print self.client.host + print(self.client.host) self.assertEqual(self.client.host_app, self.host) @@ -53,7 +53,7 @@ </message> """ stanza = parseXml(xml) - self.host.bridge.expectCall("messageNew", u"sender@example.net/house", u"test", u"chat", u"test@example.org/SàT", {}, profile=Const.PROFILE[0]) + self.host.bridge.expectCall("messageNew", "sender@example.net/house", "test", "chat", "test@example.org/SàT", {}, profile=Const.PROFILE[0]) self.message.onMessage(stanza) @@ -66,12 +66,12 @@ def test__registerItem(self): roster_item = RosterItem(Const.JID[0]) - roster_item.name = u"Test Man" + roster_item.name = "Test Man" roster_item.subscriptionTo = True roster_item.subscriptionFrom = True roster_item.ask = False - roster_item.groups = set([u"Test Group 1", u"Test Group 2", u"Test Group 3"]) - self.host.bridge.expectCall("newContact", Const.JID_STR[0], {'to': 'True', 'from': 'True', 'ask': 'False', 'name': u'Test Man'}, set([u"Test Group 1", u"Test Group 2", u"Test Group 3"]), Const.PROFILE[0]) + roster_item.groups = set(["Test Group 1", "Test Group 2", "Test Group 3"]) + self.host.bridge.expectCall("newContact", Const.JID_STR[0], {'to': 'True', 'from': 'True', 'ask': 'False', 'name': 'Test Man'}, set(["Test Group 1", "Test Group 2", "Test Group 3"]), Const.PROFILE[0]) self.roster._registerItem(roster_item)
--- a/sat/test/test_helpers_plugins.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/test/test_helpers_plugins.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client
--- a/sat/test/test_memory.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/test/test_memory.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -21,7 +21,7 @@ from sat.test import helpers from twisted.trial import unittest import traceback -from constants import Const +from .constants import Const from xml.dom import minidom
--- a/sat/test/test_memory_crypto.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/test/test_memory_crypto.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -30,7 +30,7 @@ def getRandomUnicode(len): """Return a random unicode string""" - return u"".join(random.choice(string.letters + u"éáúóâêûôßüöä") for i in xrange(len)) + return "".join(random.choice(string.letters + "éáúóâêûôßüöä") for i in range(len)) class CryptoTest(helpers.SatTestCase):
--- a/sat/test/test_plugin_misc_groupblog.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/test/test_plugin_misc_groupblog.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -20,7 +20,7 @@ """ Plugin groupblogs """ -from constants import Const as C +from .constants import Const as C from sat.test import helpers, helpers_plugins from sat.plugins import plugin_misc_groupblog from sat.plugins import plugin_xep_0060 @@ -29,30 +29,31 @@ from sat.plugins import plugin_misc_text_syntaxes from twisted.internet import defer from twisted.words.protocols.jabber import jid +import importlib NS_PUBSUB = "http://jabber.org/protocol/pubsub" DO_NOT_COUNT_COMMENTS = -1 -SERVICE = u"pubsub.example.com" -PUBLISHER = u"test@example.org" -OTHER_PUBLISHER = u"other@xmpp.net" -NODE_ID = u"urn:xmpp:groupblog:{publisher}".format(publisher=PUBLISHER) -OTHER_NODE_ID = u"urn:xmpp:groupblog:{publisher}".format(publisher=OTHER_PUBLISHER) -ITEM_ID_1 = u"c745a688-9b02-11e3-a1a3-c0143dd4fe51" -COMMENT_ID_1 = u"d745a688-9b02-11e3-a1a3-c0143dd4fe52" -COMMENT_ID_2 = u"e745a688-9b02-11e3-a1a3-c0143dd4fe53" +SERVICE = "pubsub.example.com" +PUBLISHER = "test@example.org" +OTHER_PUBLISHER = "other@xmpp.net" +NODE_ID = "urn:xmpp:groupblog:{publisher}".format(publisher=PUBLISHER) +OTHER_NODE_ID = "urn:xmpp:groupblog:{publisher}".format(publisher=OTHER_PUBLISHER) +ITEM_ID_1 = "c745a688-9b02-11e3-a1a3-c0143dd4fe51" +COMMENT_ID_1 = "d745a688-9b02-11e3-a1a3-c0143dd4fe52" +COMMENT_ID_2 = "e745a688-9b02-11e3-a1a3-c0143dd4fe53" def COMMENTS_NODE_ID(publisher=PUBLISHER): - return u"urn:xmpp:comments:_{id}__urn:xmpp:groupblog:{publisher}".format( + return "urn:xmpp:comments:_{id}__urn:xmpp:groupblog:{publisher}".format( id=ITEM_ID_1, publisher=publisher ) def COMMENTS_NODE_URL(publisher=PUBLISHER): - return u"xmpp:{service}?node={node}".format( + return "xmpp:{service}?node={node}".format( service=SERVICE, id=ITEM_ID_1, node=COMMENTS_NODE_ID(publisher).replace(":", "%3A").replace("@", "%40"), @@ -60,7 +61,7 @@ def ITEM(publisher=PUBLISHER): - return u""" + return """ <item id='{id}' xmlns='{ns}'> <entry> <title type='text'>The Uses of This World</title> @@ -82,7 +83,7 @@ def COMMENT(id_=COMMENT_ID_1): - return u""" + return """ <item id='{id}' xmlns='{ns}'> <entry> <title type='text'>The Uses of This World</title> @@ -113,7 +114,7 @@ "comments_node": COMMENTS_NODE_ID_1, } if count != DO_NOT_COUNT_COMMENTS: - res.update({"comments_count": unicode(count)}) + res.update({"comments_count": str(count)}) return res @@ -154,7 +155,7 @@ self.host = helpers.FakeSAT() self.host.plugins["XEP-0060"] = plugin_xep_0060.XEP_0060(self.host) self.host.plugins["XEP-0163"] = plugin_xep_0163.XEP_0163(self.host) - reload(plugin_misc_text_syntaxes) # reload the plugin to avoid conflict error + importlib.reload(plugin_misc_text_syntaxes) # reload the plugin to avoid conflict error self.host.plugins["TEXT_SYNTAXES"] = plugin_misc_text_syntaxes.TextSyntaxes( self.host ) @@ -215,7 +216,7 @@ def test_updateGroupBlog(self): pub_data = (SERVICE, NODE_ID, ITEM_ID_1) - new_text = u"silfu23RFWUP)IWNOEIOEFÖ" + new_text = "silfu23RFWUP)IWNOEIOEFÖ" self._initialise(C.PROFILE[0]) d = self.psclient.publish(SERVICE, NODE_ID, [ITEM_1]) @@ -370,7 +371,7 @@ ) def cb(atom): - self.assertIsInstance(atom, unicode) + self.assertIsInstance(atom, str) self.assertTrue(atom.startswith('<?xml version="1.0" encoding="utf-8"?>')) return d.addCallback(cb)
--- a/sat/test/test_plugin_misc_radiocol.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/test/test_plugin_misc_radiocol.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -24,7 +24,7 @@ from sat.test import helpers, helpers_plugins from sat.plugins import plugin_misc_radiocol as plugin from sat.plugins import plugin_misc_room_game as plugin_room_game -from constants import Const +from .constants import Const from twisted.words.protocols.jabber.jid import JID from twisted.words.xish import domish @@ -40,7 +40,7 @@ from mutagen.id3 import ID3NoHeaderError except ImportError: raise exceptions.MissingModule( - u"Missing module Mutagen, please download/install from https://bitbucket.org/lazka/mutagen" + "Missing module Mutagen, please download/install from https://bitbucket.org/lazka/mutagen" ) import uuid @@ -91,7 +91,7 @@ content += "/>" else: content += ">" - for i in xrange(0, len(players)): + for i in range(0, len(players)): content += "<player index='%s'>%s</player>" % (i, players[i]) content += "</started>" return content @@ -168,10 +168,10 @@ """ sent = self.host.getSentMessage(0) attrs["sender"] = self.plugin_0045.getNick(0, profile_index) - radiocol_elt = domish.generateElementsNamed(sent.elements(), "radiocol").next() - preload_elt = domish.generateElementsNamed( + radiocol_elt = next(domish.generateElementsNamed(sent.elements(), "radiocol")) + preload_elt = next(domish.generateElementsNamed( radiocol_elt.elements(), "preload" - ).next() + )) attrs["timestamp"] = preload_elt["timestamp"] # we could not guess it... content = "<preload xmlns='' %s/>" % " ".join( ["%s='%s'" % (attr, attrs[attr]) for attr in attrs] @@ -301,7 +301,7 @@ else room.roster[recipient].entity.full() ) - for index in xrange(0, len(Const.PROFILE)): + for index in range(0, len(Const.PROFILE)): nick = self.plugin_0045.getNick(0, index) if nick: if not recipient or nick == recipient: @@ -410,7 +410,7 @@ @param profile_index: index of the uploader's profile """ if song_index is None: - dst_filepath = unicode(uuid.uuid1()) + dst_filepath = str(uuid.uuid1()) expect_io_error = True else: song_index = song_index % len(self.songs) @@ -483,7 +483,7 @@ # another songs are added by Const.JID[1] until the radio starts + 1 to fill the queue # when the first song starts + 1 to be rejected because the queue is full - for song_index in xrange(1, plugin.QUEUE_TO_START + 1): + for song_index in range(1, plugin.QUEUE_TO_START + 1): self._uploadSong(song_index, 1) self.plugin.playNext(Const.MUC[0], PROFILE) # simulate the end of the first song
--- a/sat/test/test_plugin_misc_room_game.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/test/test_plugin_misc_room_game.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -21,7 +21,7 @@ """ Tests for the plugin room game (base class for MUC games) """ from sat.core.i18n import _ -from constants import Const +from .constants import Const from sat.test import helpers, helpers_plugins from sat.plugins import plugin_misc_room_game as plugin from twisted.words.protocols.jabber.jid import JID @@ -77,7 +77,7 @@ content += "/>" else: content += ">" - for i in xrange(0, len(players)): + for i in range(0, len(players)): content += "<player index='%s'>%s</player>" % (i, players[i]) content += "</%s>" % tag return "<message to='%s' type='%s'><%s xmlns='%s'>%s</dummy></message>" % (
--- a/sat/test/test_plugin_misc_text_syntaxes.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/test/test_plugin_misc_text_syntaxes.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -23,6 +23,7 @@ from sat.plugins import plugin_misc_text_syntaxes from twisted.trial.unittest import SkipTest import re +import importlib class SanitisationTest(helpers.SatTestCase): @@ -58,11 +59,11 @@ def setUp(self): self.host = helpers.FakeSAT() - reload(plugin_misc_text_syntaxes) # reload the plugin to avoid conflict error + importlib.reload(plugin_misc_text_syntaxes) # reload the plugin to avoid conflict error self.text_syntaxes = plugin_misc_text_syntaxes.TextSyntaxes(self.host) def test_xhtml_sanitise(self): - expected = u"""<div> + expected = """<div> <style>/* deleted */</style> <body> <a href="">a link</a> @@ -82,7 +83,7 @@ return d def test_styles_sanitise(self): - expected = u"""<p style="color: blue">test <strong>retest</strong><br/><span style="color: #cf2828; font-size: 3px; color: red; color: red !important; font-size: 100px !important; font-size: 100%; font-size: 100px; font-size: 100; font-size: 100 %; color: rgba(0, 0, 0, 0.1); color: rgb(35,79,255); background-color: no-repeat"> toto </span></p>""" + expected = """<p style="color: blue">test <strong>retest</strong><br/><span style="color: #cf2828; font-size: 3px; color: red; color: red !important; font-size: 100px !important; font-size: 100%; font-size: 100px; font-size: 100; font-size: 100 %; color: rgba(0, 0, 0, 0.1); color: rgb(35,79,255); background-color: no-repeat"> toto </span></p>""" d = self.text_syntaxes.cleanXHTML(self.EVIL_HTML2) d.addCallback(self.assertEqualXML, expected) @@ -105,10 +106,10 @@ return d def test_removeXHTMLMarkups(self): - expected = u""" a link another link a paragraph secret EVIL! of EVIL! Password: annoying EVIL! spam spam SPAM! """ + expected = """ a link another link a paragraph secret EVIL! of EVIL! Password: annoying EVIL! spam spam SPAM! """ result = self.text_syntaxes._removeMarkups(self.EVIL_HTML1) self.assertEqual(re.sub(r"\s+", " ", result).rstrip(), expected.rstrip()) - expected = u"""test retest toto""" + expected = """test retest toto""" result = self.text_syntaxes._removeMarkups(self.EVIL_HTML2) self.assertEqual(re.sub(r"\s+", " ", result).rstrip(), expected.rstrip())
--- a/sat/test/test_plugin_xep_0033.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/test/test_plugin_xep_0033.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -20,7 +20,7 @@ """ Plugin extended addressing stanzas """ -from constants import Const +from .constants import Const from sat.test import helpers from sat.plugins import plugin_xep_0033 as plugin from sat.core.exceptions import CancelError @@ -46,7 +46,7 @@ def test_messageReceived(self): self.host.memory.reinit() - xml = u""" + xml = """ <message type="chat" from="%s" to="%s" id="test_1"> <body>test</body> <addresses xmlns='http://jabber.org/protocol/address'> @@ -87,7 +87,7 @@ "extra": {}, } mess_data["extra"]["address"] = "%s:%s\n%s:%s\n%s:%s\n" % ADDRS - original_stanza = u""" + original_stanza = """ <message type="chat" from="%s" to="%s" id="test_1"> <body>content</body> </message>
--- a/sat/test/test_plugin_xep_0085.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/test/test_plugin_xep_0085.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -20,7 +20,7 @@ """ Plugin chat states notification tests """ -from constants import Const +from .constants import Const from sat.test import helpers from sat.core.constants import Const as C from sat.plugins import plugin_xep_0085 as plugin @@ -43,7 +43,7 @@ def test_messageReceived(self): for state in plugin.CHAT_STATES: - xml = u""" + xml = """ <message type="chat" from="%s" to="%s" id="test_1"> %s <%s xmlns='%s'/> @@ -77,7 +77,7 @@ "message": "content", "extra": {} if state == "active" else {"chat_state": state}, } - stanza = u""" + stanza = """ <message type="chat" from="%s" to="%s" id="test_1"> %s </message>
--- a/sat/test/test_plugin_xep_0203.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/test/test_plugin_xep_0203.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client
--- a/sat/test/test_plugin_xep_0277.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/test/test_plugin_xep_0277.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -25,12 +25,13 @@ from sat.plugins import plugin_misc_text_syntaxes from sat.tools.xml_tools import ElementParser from wokkel.pubsub import NS_PUBSUB +import importlib class XEP_0277Test(helpers.SatTestCase): PUBSUB_ENTRY_1 = ( - u""" + """ <item id="c745a688-9b02-11e3-a1a3-c0143dd4fe51"> <entry xmlns="%s"> <title type="text"><span>titre</span></title> @@ -49,7 +50,7 @@ ) PUBSUB_ENTRY_2 = ( - u""" + """ <item id="c745a688-9b02-11e3-a1a3-c0143dd4fe51"> <entry xmlns='%s'> <title type="text"><div>titre</div></title> @@ -81,7 +82,7 @@ self.host.plugins["XEP-0060"] = plugin_xep_0060.XEP_0060(self.host) self.host.plugins["XEP-0163"] = XEP_0163(self.host) - reload(plugin_misc_text_syntaxes) # reload the plugin to avoid conflict error + importlib.reload(plugin_misc_text_syntaxes) # reload the plugin to avoid conflict error self.host.plugins["TEXT_SYNTAXES"] = plugin_misc_text_syntaxes.TextSyntaxes( self.host ) @@ -89,17 +90,17 @@ def test_item2mbdata_1(self): expected = { - u"id": u"c745a688-9b02-11e3-a1a3-c0143dd4fe51", - u"atom_id": u"c745a688-9b02-11e3-a1a3-c0143dd4fe51", - u"title": u"<span>titre</span>", - u"updated": u"1392992199.0", - u"published": u"1392992198.0", - u"content": u"<p>contenu</p>texte sans balise<p>autre contenu</p>", - u"content_xhtml": u"<div><p>contenu</p>texte sans balise<p>autre contenu</p></div>", - u"author": u"test1@souliane.org", + "id": "c745a688-9b02-11e3-a1a3-c0143dd4fe51", + "atom_id": "c745a688-9b02-11e3-a1a3-c0143dd4fe51", + "title": "<span>titre</span>", + "updated": "1392992199.0", + "published": "1392992198.0", + "content": "<p>contenu</p>texte sans balise<p>autre contenu</p>", + "content_xhtml": "<div><p>contenu</p>texte sans balise<p>autre contenu</p></div>", + "author": "test1@souliane.org", } item_elt = ( - ElementParser()(self.PUBSUB_ENTRY_1, namespace=NS_PUBSUB).elements().next() + next(ElementParser()(self.PUBSUB_ENTRY_1, namespace=NS_PUBSUB).elements()) ) d = self.plugin.item2mbdata(item_elt) d.addCallback(self.assertEqual, expected) @@ -107,18 +108,18 @@ def test_item2mbdata_2(self): expected = { - u"id": u"c745a688-9b02-11e3-a1a3-c0143dd4fe51", - u"atom_id": u"c745a688-9b02-11e3-a1a3-c0143dd4fe51", - u"title": u"<div>titre</div>", - u"title_xhtml": u'<div><div style="">titre</div></div>', - u"updated": u"1392992199.0", - u"published": u"1392992198.0", - u"content": u"<div><p>contenu</p>texte dans balise<p>autre contenu</p></div>", - u"content_xhtml": u"<div><p>contenu</p>texte dans balise<p>autre contenu</p></div>", - u"author": u"test1@souliane.org", + "id": "c745a688-9b02-11e3-a1a3-c0143dd4fe51", + "atom_id": "c745a688-9b02-11e3-a1a3-c0143dd4fe51", + "title": "<div>titre</div>", + "title_xhtml": '<div><div style="">titre</div></div>', + "updated": "1392992199.0", + "published": "1392992198.0", + "content": "<div><p>contenu</p>texte dans balise<p>autre contenu</p></div>", + "content_xhtml": "<div><p>contenu</p>texte dans balise<p>autre contenu</p></div>", + "author": "test1@souliane.org", } item_elt = ( - ElementParser()(self.PUBSUB_ENTRY_2, namespace=NS_PUBSUB).elements().next() + next(ElementParser()(self.PUBSUB_ENTRY_2, namespace=NS_PUBSUB).elements()) ) d = self.plugin.item2mbdata(item_elt) d.addCallback(self.assertEqual, expected)
--- a/sat/test/test_plugin_xep_0297.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/test/test_plugin_xep_0297.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -20,7 +20,7 @@ """ Plugin XEP-0297 """ -from constants import Const as C +from .constants import Const as C from sat.test import helpers from sat.plugins.plugin_xep_0203 import XEP_0203 from sat.plugins.plugin_xep_0297 import XEP_0297
--- a/sat/test/test_plugin_xep_0313.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/test/test_plugin_xep_0313.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -20,7 +20,7 @@ """ Plugin XEP-0313 """ -from constants import Const as C +from .constants import Const as C from sat.test import helpers from sat.plugins.plugin_xep_0313 import XEP_0313 from twisted.words.protocols.jabber.jid import JID @@ -209,7 +209,7 @@ ) start = datetime.datetime(2010, 8, 7, 0, 0, 0, tzinfo=tzutc()) form = buildForm(start=start) - rsm = RSMRequest(max_=10, after=u"09af3-cc343-b409f") + rsm = RSMRequest(max_=10, after="09af3-cc343-b409f") d = self.plugin.queryArchive(self.client, MAMRequest(form, rsm), SERVICE_JID) d.addCallback( lambda __: self.assertEqualXML(self.host.getSentMessageXml(0), xml, True)
--- a/sat/test/test_plugin_xep_0334.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/test/test_plugin_xep_0334.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -20,7 +20,7 @@ """ Plugin XEP-0334 """ -from constants import Const as C +from .constants import Const as C from sat.test import helpers from sat.plugins.plugin_xep_0334 import XEP_0334 from twisted.internet import defer
--- a/sat/tools/async_trigger.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/tools/async_trigger.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client
--- a/sat/tools/common/ansi.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/tools/common/ansi.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -26,7 +26,7 @@ RESET = "\033[0m" NORMAL_WEIGHT = "\033[22m" FG_BLACK, FG_RED, FG_GREEN, FG_YELLOW, FG_BLUE, FG_MAGENTA, FG_CYAN, FG_WHITE = ( - "\033[3%dm" % nb for nb in xrange(8) + "\033[3%dm" % nb for nb in range(8) ) BOLD = "\033[1m" BLINK = "\033[5m" @@ -42,7 +42,7 @@ if args[-1] != cls.RESET: args = list(args) args.append(cls.RESET) - return u"".join(args) + return "".join(args) try: @@ -55,6 +55,6 @@ if not tty: # we don't want ANSI escape codes if we are not outputing to a tty! for attr in dir(ANSI): - if isinstance(getattr(ANSI, attr), basestring): - setattr(ANSI, attr, u"") + if isinstance(getattr(ANSI, attr), str): + setattr(ANSI, attr, "") del tty
--- a/sat/tools/common/async_process.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/tools/common/async_process.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -56,7 +56,7 @@ return os.path.splitext(os.path.basename(self.command))[0].decode('utf-8', 'ignore') else: - return u'' + return '' def connectionMade(self): if self._stdin is not None: @@ -74,22 +74,22 @@ self.err_data.append(data) def processEnded(self, reason): - data = ''.join(self.data) + data = b''.join(self.data) if (reason.value.exitCode == 0): - log.debug(_(u'{name} command succeed').format(name=self.command_name)) + log.debug(_('{name} command succeed').format(name=self.command_name)) # we don't use "replace" on purpose, we want an exception if decoding # is not working properly - self._deferred.callback(data.encode('utf-8')) + self._deferred.callback(data) else: - err_data = u''.join(self.err_data) + err_data = b''.join(self.err_data) - msg = (_(u"Can't complete {name} command (error code: {code}):\n" - u"stderr:\n{stderr}\n{stdout}\n") + msg = (_("Can't complete {name} command (error code: {code}):\n" + "stderr:\n{stderr}\n{stdout}\n") .format(name = self.command_name, code = reason.value.exitCode, stderr= err_data.encode('utf-8', 'replace'), - stdout = "stdout: " + data.encode('utf-8', 'replace') - if data else u'', + stdout = "stdout: " + data.decode('utf-8', 'replace') + if data else '', )) self._deferred.errback(Failure(exceptions.CommandException( msg, data, err_data))) @@ -114,10 +114,10 @@ if stdin is not None: stdin = stdin.encode('utf-8') verbose = kwargs.pop('verbose', False) - if u'path' in kwargs: - kwargs[u'path'] = kwargs[u'path'].encode('utf-8') + if 'path' in kwargs: + kwargs['path'] = kwargs['path'].encode('utf-8') args = [a.encode('utf-8') for a in args] - kwargs = {k:v.encode('utf-8') for k,v in kwargs.items()} + kwargs = {k:v.encode('utf-8') for k,v in list(kwargs.items())} d = defer.Deferred() prot = cls(d, stdin=stdin) if verbose: @@ -125,12 +125,12 @@ if cls.command is None: if not args: raise ValueError( - u"You must either specify cls.command or use a full path to command " - u"to execute as first argument") + "You must either specify cls.command or use a full path to command " + "to execute as first argument") command = args.pop(0) if prot.name is None: name = os.path.splitext(os.path.basename(command))[0] - prot.name = name.encode(u'utf-8', u'ignore') + prot.name = name else: command = cls.command cmd_args = [os.path.basename(command)] + args
--- a/sat/tools/common/data_format.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/tools/common/data_format.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -50,7 +50,7 @@ idx = 1 while True: try: - yield get(dict_, u"{}#{}".format(name, idx)) + yield get(dict_, "{}#{}".format(name, idx)) except KeyError: return else: @@ -73,7 +73,7 @@ ret = {name: main_value} for k in extra_keys: ret[k] = get( - dict_, u"{}{}_{}".format(name, (u"#" + unicode(idx)) if idx else u"", k) + dict_, "{}{}_{}".format(name, ("#" + str(idx)) if idx else "", k) ) yield ret @@ -98,14 +98,14 @@ if idx == 0: key = name else: - key = u"{}#{}".format(name, idx) + key = "{}#{}".format(name, idx) if check_conflict and key in dict_: raise exceptions.ConflictError dict_[key] = value return dict -def getSubDict(name, dict_, sep=u"_"): +def getSubDict(name, dict_, sep="_"): """get a sub dictionary from a serialised dictionary look for keys starting with name, and create a dict with it @@ -116,7 +116,7 @@ @param sep(unicode): separator used between name and subkey @return iter: iterate through the deserialised items """ - for k, v in dict_.iteritems(): + for k, v in dict_.items(): if k.startswith(name): if k == name: yield None, v @@ -131,7 +131,7 @@ @return(unicode): serialised data, can be transmitted as string to the bridge """ - return json.dumps(data, ensure_ascii=False, default=unicode) + return json.dumps(data, ensure_ascii=False, default=str) def deserialise(serialised_data, default=None, type_check=dict): """Deserialize data from bridge @@ -142,10 +142,10 @@ @return(object): deserialised data @raise ValueError: serialised_data is of wrong type """ - if serialised_data == u"": + if serialised_data == "": return default ret = json.loads(serialised_data) if type_check is not None and not isinstance(ret, type_check): - raise ValueError(u"Bad data type, was expecting {type_check}, got {real_type}" + raise ValueError("Bad data type, was expecting {type_check}, got {real_type}" .format(type_check=type_check, real_type=type(ret))) return ret
--- a/sat/tools/common/data_objects.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/tools/common/data_objects.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -25,12 +25,12 @@ try: from jinja2 import Markup as safe except ImportError: - safe = unicode + safe = str from sat.tools.common import uri as xmpp_uri -import urllib +import urllib.request, urllib.parse, urllib.error -q = lambda value: urllib.quote(value.encode("utf-8"), safe="@") +q = lambda value: urllib.parse.quote(value.encode("utf-8"), safe="@") def parsePubSubMetadata(metadata, items): @@ -49,25 +49,25 @@ data = {} assert "complete" not in metadata - for key, value in metadata.iteritems(): - if key in (u"rsm_index", u"rsm_count"): + for key, value in metadata.items(): + if key in ("rsm_index", "rsm_count"): value = int(value) - elif key == u"mam_stable": + elif key == "mam_stable": value = C.bool(value) - elif key == u"mam_complete": - key = u"complete" + elif key == "mam_complete": + key = "complete" value = C.bool(value) data[key] = value - if u"complete" not in data: - index = data.get(u"rsm_index") - count = data.get(u"rsm_count") + if "complete" not in data: + index = data.get("rsm_index") + count = data.get("rsm_count") if index is None or count is None: # we don't have enough information to know if the data is complete or not - data[u"complete"] = None + data["complete"] = None else: # normally we have a strict equality here but XEP-0059 states # that index MAY be approximative, so just in case… - data[u"complete"] = index + len(items) >= count + data["complete"] = index + len(items) >= count return data @@ -88,75 +88,75 @@ @property def id(self): - return self.mb_data.get(u"id") + return self.mb_data.get("id") @property def atom_id(self): - return self.mb_data.get(u"atom_id") + return self.mb_data.get("atom_id") @property def uri(self): node = self.parent.node service = self.parent.service return xmpp_uri.buildXMPPUri( - u"pubsub", subtype=u"microblog", path=service, node=node, item=self.id + "pubsub", subtype="microblog", path=service, node=node, item=self.id ) @property def published(self): - return self.mb_data.get(u"published") + return self.mb_data.get("published") @property def updated(self): - return self.mb_data.get(u"updated") + return self.mb_data.get("updated") @property def language(self): - return self.mb_data.get(u"language") + return self.mb_data.get("language") @property def author(self): - return self.mb_data.get(u"author") + return self.mb_data.get("author") @property def author_jid(self): - return self.mb_data.get(u"author_jid") + return self.mb_data.get("author_jid") @property def author_jid_verified(self): - return self.mb_data.get(u"author_jid_verified") + return self.mb_data.get("author_jid_verified") @property def author_email(self): - return self.mb_data.get(u"author_email") + return self.mb_data.get("author_email") @property def tags(self): - return self.mb_data.get(u'tags', []) + return self.mb_data.get('tags', []) @property def groups(self): - return self.mb_data.get(u'groups', []) + return self.mb_data.get('groups', []) @property def title(self): - return self.mb_data.get(u"title") + return self.mb_data.get("title") @property def title_xhtml(self): try: - return safe(self.mb_data[u"title_xhtml"]) + return safe(self.mb_data["title_xhtml"]) except KeyError: return None @property def content(self): - return self.mb_data.get(u"content") + return self.mb_data.get("content") @property def content_xhtml(self): try: - return safe(self.mb_data[u"content_xhtml"]) + return safe(self.mb_data["content_xhtml"]) except KeyError: return None @@ -164,17 +164,17 @@ def comments(self): if self._comments is None: self._comments = data_format.dict2iterdict( - u"comments", self.mb_data, (u"node", u"service") + "comments", self.mb_data, ("node", "service") ) return self._comments @property def comments_service(self): - return self.mb_data.get(u"comments_service") + return self.mb_data.get("comments_service") @property def comments_node(self): - return self.mb_data.get(u"comments_node") + return self.mb_data.get("comments_node") @property def comments_items_list(self): @@ -199,40 +199,40 @@ @property def service(self): - return self.metadata[u"service"] + return self.metadata["service"] @property def node(self): - return self.metadata[u"node"] + return self.metadata["node"] @property def uri(self): - return self.metadata[u"uri"] + return self.metadata["uri"] @property def with_rsm(self): """Return True if RSM is activated on this request""" - return u"rsm_first" in self.metadata + return "rsm_first" in self.metadata @property def rsm_first(self): - return self.metadata[u"rsm_first"] + return self.metadata["rsm_first"] @property def rsm_last(self): - return self.metadata[u"rsm_last"] + return self.metadata["rsm_last"] @property def rsm_index(self): - return self.metadata[u"rsm_index"] + return self.metadata["rsm_index"] @property def rsm_count(self): - return self.metadata[u"rsm_count"] + return self.metadata["rsm_count"] @property def complete(self): - return self.metadata[u"complete"] + return self.metadata["complete"] def __len__(self): return self.items.__len__() @@ -282,14 +282,14 @@ try: return self._message_data[""] except KeyError: - return next(self._message_data.itervalues()) + return next(iter(self._message_data.values())) @property def subject(self): try: return self._subject_data[""] except KeyError: - return next(self._subject_data.itervalues()) + return next(iter(self._subject_data.values())) @property def type(self): @@ -322,7 +322,7 @@ try: return safe(self._html[""]) except KeyError: - return safe(next(self._html.itervalues())) + return safe(next(iter(self._html.values()))) class Messages(object): @@ -419,5 +419,5 @@ values will be quoted before being used """ return self.url.format( - *[q(a) for a in args], **{k: ObjectQuoter(v) for k, v in kwargs.iteritems()} + *[q(a) for a in args], **{k: ObjectQuoter(v) for k, v in kwargs.items()} )
--- a/sat/tools/common/date_utils.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/tools/common/date_utils.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -30,14 +30,14 @@ import time import re -RELATIVE_RE = re.compile(ur"(?P<date>.*?)(?P<direction>[-+]?) *(?P<quantity>\d+) *" - ur"(?P<unit>(second|minute|hour|day|week|month|year))s?" - ur"(?P<ago> +ago)?", re.I) -YEAR_FIRST_RE = re.compile(ur"\d{4}[^\d]+") +RELATIVE_RE = re.compile(r"(?P<date>.*?)(?P<direction>[-+]?) *(?P<quantity>\d+) *" + r"(?P<unit>(second|minute|hour|day|week|month|year))s?" + r"(?P<ago> +ago)?", re.I) +YEAR_FIRST_RE = re.compile(r"\d{4}[^\d]+") TZ_UTC = tz.tzutc() TZ_LOCAL = tz.gettz() # used to replace values when something is missing -DEFAULT_DATETIME = datetime.datetime(2000, 01, 01) +DEFAULT_DATETIME = datetime.datetime(2000, 0o1, 0o1) def date_parse(value, default_tz=TZ_UTC): @@ -47,7 +47,7 @@ @param default_tz(datetime.tzinfo): default timezone @return (int): timestamp """ - value = unicode(value).strip() + value = str(value).strip() dayfirst = False if YEAR_FIRST_RE.match(value) else True dt = default_tzinfo( @@ -71,23 +71,23 @@ if m is None: return date_parse(value, default_tz=default_tz) - if m.group(u"direction") and m.group(u"ago"): + if m.group("direction") and m.group("ago"): raise ValueError( - _(u"You can't use a direction (+ or -) and \"ago\" at the same time")) + _("You can't use a direction (+ or -) and \"ago\" at the same time")) - if m.group(u"direction") == u'-' or m.group(u"ago"): + if m.group("direction") == '-' or m.group("ago"): direction = -1 else: direction = 1 - date = m.group(u"date").strip().lower() - if not date or date == u"now": + date = m.group("date").strip().lower() + if not date or date == "now": dt = datetime.datetime.now(tz.tzutc()) else: dt = default_tzinfo(parser.parse(date, dayfirst=True)) - quantity = int(m.group(u"quantity")) - key = m.group(u"unit").lower() + u"s" + quantity = int(m.group("quantity")) + key = m.group("unit").lower() + "s" delta_kw = {key: direction * quantity} dt = dt + relativedelta(**delta_kw) return calendar.timegm(dt.utctimetuple())
--- a/sat/tools/common/dynamic_import.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/tools/common/dynamic_import.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP
--- a/sat/tools/common/email.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/tools/common/email.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a jabber client @@ -19,14 +19,14 @@ """email sending facilities""" -from __future__ import absolute_import + from email.mime.text import MIMEText from twisted.mail import smtp from sat.core.constants import Const as C from sat.tools import config as tools_config -def sendEmail(config, to_emails, subject=u"", body=u"", from_email=None): +def sendEmail(config, to_emails, subject="", body="", from_email=None): """Send an email using SàT configuration @param config (SafeConfigParser): the configuration instance @@ -37,26 +37,26 @@ @param from_email(unicode): address of the sender @return (D): same as smtp.sendmail """ - if isinstance(to_emails, basestring): + if isinstance(to_emails, str): to_emails = to_emails.split() - email_host = tools_config.getConfig(config, None, u"email_server") or u"localhost" - email_from = tools_config.getConfig(config, None, u"email_from") + email_host = tools_config.getConfig(config, None, "email_server") or "localhost" + email_from = tools_config.getConfig(config, None, "email_from") if email_from is None: # we suppose that email domain and XMPP domain are identical - domain = tools_config.getConfig(config, None, u"xmpp_domain", u"example.net") - email_from = u"no_reply@" + domain - email_sender_domain = tools_config.getConfig(config, None, u"email_sender_domain") - email_port = int(tools_config.getConfig(config, None, u"email_port", 25)) - email_username = tools_config.getConfig(config, None, u"email_username") - email_password = tools_config.getConfig(config, None, u"email_password") + domain = tools_config.getConfig(config, None, "xmpp_domain", "example.net") + email_from = "no_reply@" + domain + email_sender_domain = tools_config.getConfig(config, None, "email_sender_domain") + email_port = int(tools_config.getConfig(config, None, "email_port", 25)) + email_username = tools_config.getConfig(config, None, "email_username") + email_password = tools_config.getConfig(config, None, "email_password") email_auth = C.bool(tools_config.getConfig(config, None, "email_auth", C.BOOL_FALSE)) email_starttls = C.bool(tools_config.getConfig(config, None, "email_starttls", C.BOOL_FALSE)) msg = MIMEText(body, "plain", "UTF-8") - msg[u"Subject"] = subject - msg[u"From"] = email_from - msg[u"To"] = u", ".join(to_emails) + msg["Subject"] = subject + msg["From"] = email_from + msg["To"] = ", ".join(to_emails) return smtp.sendmail( email_host.encode("utf-8"),
--- a/sat/tools/common/files_utils.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/tools/common/files_utils.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -30,6 +30,6 @@ ori_path = path idx = 1 while os.path.exists(path): - path = ori_path + u"_" + unicode(idx) + path = ori_path + "_" + str(idx) idx += 1 return path
--- a/sat/tools/common/regex.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/tools/common/regex.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # Salut à Toi: an XMPP client @@ -22,8 +22,8 @@ import re path_escape = {"%": "%25", "/": "%2F", "\\": "%5c"} -path_escape_rev = {re.escape(v): k for k, v in path_escape.iteritems()} -path_escape = {re.escape(k): v for k, v in path_escape.iteritems()} +path_escape_rev = {re.escape(v): k for k, v in path_escape.items()} +path_escape = {re.escape(k): v for k, v in path_escape.items()} # thanks to Martijn Pieters (https://stackoverflow.com/a/14693789) RE_ANSI_REMOVE = re.compile(r"\x1b[^m]*m") @@ -44,8 +44,8 @@ return pattern.sub(lambda m: repl_dict[re.escape(m.group(0))], string) -path_escape_re = reJoin(path_escape.keys()) -path_escape_rev_re = reJoin(path_escape_rev.keys()) +path_escape_re = reJoin(list(path_escape.keys())) +path_escape_rev_re = reJoin(list(path_escape_rev.keys())) def pathEscape(string):
--- a/sat/tools/common/template.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/tools/common/template.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -41,8 +41,8 @@ import sat_templates except ImportError: raise exceptions.MissingModule( - u"sat_templates module is not available, please install it or check your path to " - u"use template engine" + "sat_templates module is not available, please install it or check your path to " + "use template engine" ) else: sat_templates # to avoid pyflakes warning @@ -51,8 +51,8 @@ import jinja2 except: raise exceptions.MissingModule( - u"Missing module jinja2, please install it from http://jinja.pocoo.org or with " - u"pip install jinja2" + "Missing module jinja2, please install it from http://jinja.pocoo.org or with " + "pip install jinja2" ) from jinja2 import Markup as safe @@ -67,9 +67,9 @@ HTML_EXT = ("html", "xhtml") RE_ATTR_ESCAPE = re.compile(r"[^a-z_-]") -SITE_RESERVED_NAMES = (u"sat",) -TPL_RESERVED_CHARS = ur"()/." -RE_TPL_RESERVED_CHARS = re.compile(u"[" + TPL_RESERVED_CHARS + u"]") +SITE_RESERVED_NAMES = ("sat",) +TPL_RESERVED_CHARS = r"()/." +RE_TPL_RESERVED_CHARS = re.compile("[" + TPL_RESERVED_CHARS + "]") TemplateData = namedtuple("TemplateData", ['site', 'theme', 'path']) @@ -85,8 +85,8 @@ as this allow the template to open any file on the system that the launching user can access. """ - if not sites_paths or not u"" in sites_paths: - raise exceptions.InternalError(u"Invalid sites_paths") + if not sites_paths or not "" in sites_paths: + raise exceptions.InternalError("Invalid sites_paths") super(jinja2.BaseLoader, self).__init__() self.sites_paths = sites_paths self.trusted = trusted @@ -108,49 +108,49 @@ site and theme can be both None if absolute path is used Relative path is the path from theme root dir e.g. blog/articles.html """ - if template.startswith(u"("): + if template.startswith("("): # site and/or theme are specified try: - theme_end = template.index(u")") + theme_end = template.index(")") except IndexError: - raise ValueError(u"incorrect site/theme in template") + raise ValueError("incorrect site/theme in template") theme_data = template[1:theme_end] - theme_splitted = theme_data.split(u'/') + theme_splitted = theme_data.split('/') if len(theme_splitted) == 1: - site, theme = u"", theme_splitted[0] + site, theme = "", theme_splitted[0] elif len(theme_splitted) == 2: site, theme = theme_splitted else: - raise ValueError(u"incorrect site/theme in template") + raise ValueError("incorrect site/theme in template") template_path = template[theme_end+1:] - if not template_path or template_path.startswith(u"/"): - raise ValueError(u"incorrect template path") - elif template.startswith(u"/"): + if not template_path or template_path.startswith("/"): + raise ValueError("incorrect template path") + elif template.startswith("/"): # this is an absolute path, so we have no site and no theme site = None theme = None template_path = template else: # a default template - site = u"" + site = "" theme = C.TEMPLATE_THEME_DEFAULT template_path = template if site is not None: site = site.strip() if not site: - site = u"" + site = "" elif site in SITE_RESERVED_NAMES: - raise ValueError(_(u"{site} can't be used as site name, " - u"it's reserved.").format(site=site)) + raise ValueError(_("{site} can't be used as site name, " + "it's reserved.").format(site=site)) if theme is not None: theme = theme.strip() if not theme: theme = C.TEMPLATE_THEME_DEFAULT if RE_TPL_RESERVED_CHARS.search(theme): - raise ValueError(_(u"{theme} contain forbidden char. Following chars " - u"are forbidden: {reserved}").format( + raise ValueError(_("{theme} contain forbidden char. Following chars " + "are forbidden: {reserved}").format( theme=theme, reserved=TPL_RESERVED_CHARS)) return TemplateData(site, theme, template_path) @@ -169,7 +169,7 @@ sites_and_themes.append([site, C.TEMPLATE_THEME_DEFAULT]) if site: # the site is not the default one, so we add default at the end - sites_and_themes.append([u'', C.TEMPLATE_THEME_DEFAULT]) + sites_and_themes.append(['', C.TEMPLATE_THEME_DEFAULT]) return sites_and_themes def _get_template_f(self, site, theme, path_elts): @@ -185,15 +185,15 @@ """ if site is None: raise exceptions.InternalError( - u"_get_template_f must not be used with absolute path") + "_get_template_f must not be used with absolute path") for site, theme in self.getSitesAndThemes(site, theme): try: base_path = self.sites_paths[site] except KeyError: - log.warning(_(u"Unregistered site requested: {site}").format( + log.warning(_("Unregistered site requested: {site}").format( site=site)) filepath = os.path.join(base_path, C.TEMPLATE_TPL_DIR, theme, *path_elts) - f = utils.open_if_exists(filepath) + f = utils.open_if_exists(filepath, 'r') if f is not None: return f, filepath return None, None @@ -211,14 +211,14 @@ if site is None: # we have an abolute template if theme is not None: - raise exceptions.InternalError(u"We can't have a theme with absolute " - u"template.") + raise exceptions.InternalError("We can't have a theme with absolute " + "template.") if not self.trusted: - log.error(_(u"Absolute template used while unsecure is disabled, hack " - u"attempt? Template: {template}").format(template=template)) - raise exceptions.PermissionError(u"absolute template is not allowed") + log.error(_("Absolute template used while unsecure is disabled, hack " + "attempt? Template: {template}").format(template=template)) + raise exceptions.PermissionError("absolute template is not allowed") filepath = template_path - f = utils.open_if_exists(filepath) + f = utils.open_if_exists(filepath, 'r') else: # relative path, we have to deal with site and theme assert theme and template_path @@ -227,19 +227,19 @@ f, filepath = self._get_template_f(site, theme, path_elts) if f is None: - if (site is not None and path_elts[0] == u"error" + if (site is not None and path_elts[0] == "error" and os.path.splitext(template_path)[1][1:] in HTML_EXT): # if an HTML error is requested but doesn't exist, we try again # with base error. f, filepath = self._get_template_f( site, theme, ("error", "base.html")) if f is None: - raise exceptions.InternalError(u"error/base.html should exist") + raise exceptions.InternalError("error/base.html should exist") else: raise TemplateNotFound(template) try: - contents = f.read().decode('utf-8') + contents = f.read() finally: f.close() @@ -285,15 +285,15 @@ @param library_name(unicode): name of the library to import @param loading: """ - if attribute not in (u"defer", u"async", u""): + if attribute not in ("defer", "async", ""): raise exceptions.DataError( - _(u'Invalid attribute, please use one of "defer", "async" or ""') + _('Invalid attribute, please use one of "defer", "async" or ""') ) - if not library_name.endswith(u".js"): - library_name = library_name + u".js" + if not library_name.endswith(".js"): + library_name = library_name + ".js" if (library_name, attribute) not in self.scripts: self.scripts.append((library_name, attribute)) - return u"" + return "" def generate_scripts(self): """Generate the <script> elements @@ -301,28 +301,28 @@ @return (unicode): <scripts> HTML tags """ scripts = [] - tpl = u"<script src={src} {attribute}></script>" + tpl = "<script src={src} {attribute}></script>" for library, attribute in self.scripts: library_path = self.renderer.getStaticPath(self.template_data, library) if library_path is None: - log.warning(_(u"Can't find {libary} javascript library").format( + log.warning(_("Can't find {libary} javascript library").format( library=library)) continue path = self.renderer.getFrontURL(library_path) scripts.append(tpl.format(src=quoteattr(path), attribute=attribute)) - return safe(u"\n".join(scripts)) + return safe("\n".join(scripts)) class Environment(jinja2.Environment): def get_template(self, name, parent=None, globals=None): - if name[0] not in (u'/', u'('): + if name[0] not in ('/', '('): # if name is not an absolute path or a full template name (this happen on # extend or import during rendering), we convert it to a full template name. # This is needed to handle cache correctly when a base template is overriden. # Without that, we could not distinguish something like base/base.html if # it's launched from some_site/some_theme or from [default]/default - name = u"({site}/{theme}){template}".format( + name = "({site}/{theme}){template}".format( site=self._template_data.site, theme=self._template_data.theme, template=name) @@ -348,26 +348,26 @@ self.host = host self.trusted = trusted self.sites_paths = { - u"": os.path.dirname(sat_templates.__file__), + "": os.path.dirname(sat_templates.__file__), } conf = config.parseMainConf() - public_sites = config.getConfig(conf, None, u"sites_path_public_dict", {}) + public_sites = config.getConfig(conf, None, "sites_path_public_dict", {}) sites_data = [public_sites] if private: - private_sites = config.getConfig(conf, None, u"sites_path_private_dict", {}) + private_sites = config.getConfig(conf, None, "sites_path_private_dict", {}) sites_data.append(private_sites) for sites in sites_data: normalised = {} - for name, path in sites.iteritems(): + for name, path in sites.items(): if RE_TPL_RESERVED_CHARS.search(name): - log.warning(_(u"Can't add \"{name}\" site, it contains forbidden " - u"characters. Forbidden characters are {forbidden}.") + log.warning(_("Can't add \"{name}\" site, it contains forbidden " + "characters. Forbidden characters are {forbidden}.") .format(name=name, forbidden=TPL_RESERVED_CHARS)) continue path = os.path.expanduser(os.path.normpath(path)) - if not path or not path.startswith(u"/"): - log.warning(_(u"Can't add \"{name}\" site, it should map to an " - u"absolute path").format(name=name)) + if not path or not path.startswith("/"): + log.warning(_("Can't add \"{name}\" site, it should map to an " + "absolute path").format(name=name)) continue normalised[name] = path self.sites_paths.update(normalised) @@ -385,26 +385,26 @@ self.installTranslations() # we want to have access to SàT constants in templates - self.env.globals[u"C"] = C + self.env.globals["C"] = C # custom filters - self.env.filters[u"next_gidx"] = self._next_gidx - self.env.filters[u"cur_gidx"] = self._cur_gidx - self.env.filters[u"date_fmt"] = self._date_fmt - self.env.filters[u"xmlui_class"] = self._xmlui_class - self.env.filters[u"attr_escape"] = self.attr_escape - self.env.filters[u"item_filter"] = self._item_filter - self.env.filters[u"adv_format"] = self._adv_format - self.env.filters[u"dict_ext"] = self._dict_ext - self.env.filters[u"highlight"] = self.highlight - self.env.filters[u"front_url"] = (self._front_url if front_url_filter is None + self.env.filters["next_gidx"] = self._next_gidx + self.env.filters["cur_gidx"] = self._cur_gidx + self.env.filters["date_fmt"] = self._date_fmt + self.env.filters["xmlui_class"] = self._xmlui_class + self.env.filters["attr_escape"] = self.attr_escape + self.env.filters["item_filter"] = self._item_filter + self.env.filters["adv_format"] = self._adv_format + self.env.filters["dict_ext"] = self._dict_ext + self.env.filters["highlight"] = self.highlight + self.env.filters["front_url"] = (self._front_url if front_url_filter is None else front_url_filter) # custom tests - self.env.tests[u"in_the_past"] = self._in_the_past - self.icons_path = os.path.join(host.media_dir, u"fonts/fontello/svg") + self.env.tests["in_the_past"] = self._in_the_past + self.icons_path = os.path.join(host.media_dir, "fonts/fontello/svg") # policies - self.env.policies[u"ext.i18n.trimmed"] = True + self.env.policies["ext.i18n.trimmed"] = True def getFrontURL(self, template_data, path=None): """Give front URL (i.e. URL seen by end-user) of a path @@ -413,15 +413,15 @@ @param path(unicode, None): relative path of file to get, if set, will remplate template_data.path """ - return self.env.filters[u"front_url"]({u"template_data": template_data}, + return self.env.filters["front_url"]({"template_data": template_data}, path or template_data.path) def installTranslations(self): # TODO: support multi translation # for now, only translations in sat_templates are handled self.translations = {} - for site_key, site_path in self.sites_paths.iteritems(): - site_prefix = u"[{}] ".format(site_key) if site_key else u'' + for site_key, site_path in self.sites_paths.items(): + site_prefix = "[{}] ".format(site_key) if site_key else '' i18n_dir = os.path.join(site_path, "i18n") for lang_dir in os.listdir(i18n_dir): lang_path = os.path.join(i18n_dir, lang_dir) @@ -439,13 +439,13 @@ translations.merge(support.Translations(f, "sat")) except EnvironmentError: log.error( - _(u"Can't find template translation at {path}").format( + _("Can't find template translation at {path}").format( path=po_path)) except UnknownLocaleError as e: - log.error(_(u"{site}Invalid locale name: {msg}").format( + log.error(_("{site}Invalid locale name: {msg}").format( site=site_prefix, msg=e)) else: - log.info(_(u"{site}loaded {lang} templates translations").format( + log.info(_("{site}loaded {lang} templates translations").format( site = site_prefix, lang=lang_dir)) @@ -458,7 +458,7 @@ self.env.install_null_translations(True) # we generate a tuple of locales ordered by display name that templates can access # through the "locales" variable - self.locales = tuple(sorted(self.translations.keys(), + self.locales = tuple(sorted(list(self.translations.keys()), key=lambda l: l.language_name.lower())) @@ -476,21 +476,21 @@ try: locale = Locale.parse(locale_str) except ValueError as e: - log.warning(_(u"invalid locale value: {msg}").format(msg=e)) + log.warning(_("invalid locale value: {msg}").format(msg=e)) locale_str = self._locale_str = C.DEFAULT_LOCALE locale = Locale.parse(locale_str) - locale_str = unicode(locale) + locale_str = str(locale) if locale_str != C.DEFAULT_LOCALE: try: translations = self.translations[locale] except KeyError: - log.warning(_(u"Can't find locale {locale}".format(locale=locale))) + log.warning(_("Can't find locale {locale}".format(locale=locale))) locale_str = C.DEFAULT_LOCALE locale = Locale.parse(self._locale_str) else: self.env.install_gettext_translations(translations, True) - log.debug(_(u"Switched to {lang}").format(lang=locale.english_name)) + log.debug(_("Switched to {lang}").format(lang=locale.english_name)) if locale_str == C.DEFAULT_LOCALE: self.env.install_null_translations(True) @@ -509,7 +509,7 @@ site, theme, __ = self.env.loader.parse_template(template) if site is None: # absolute template - return u"", os.path.dirname(template) + return "", os.path.dirname(template) try: site_root_dir = self.sites_paths[site] except KeyError: @@ -533,9 +533,9 @@ if template_data.site is None: # we have and absolue path if (not template_data.theme is None - or not template_data.path.startswith(u'/')): + or not template_data.path.startswith('/')): raise exceptions.InternalError( - u"invalid template data, was expecting absolute URL") + "invalid template data, was expecting absolute URL") static_dir = os.path.dirname(template_data.path) file_path = os.path.join(static_dir, filename) if os.path.exists(file_path): @@ -562,11 +562,11 @@ @param css_files_noscript(list): list to fill of relative path to found css file with "_noscript" suffix """ - name = name_root + u".css" + name = name_root + ".css" css_path = self.getStaticPath(template_data, name) if css_path is not None: css_files.append(self.getFrontURL(css_path)) - noscript_name = name_root + u"_noscript.css" + noscript_name = name_root + "_noscript.css" noscript_path = self.getStaticPath(template_data, noscript_name) if noscript_path is not None: css_files_noscript.append(self.getFrontURL(noscript_path)) @@ -600,18 +600,18 @@ # TODO: some caching would be nice css_files = [] css_files_noscript = [] - path_elems = template_data.path.split(u'/') + path_elems = template_data.path.split('/') path_elems[-1] = os.path.splitext(path_elems[-1])[0] - css_path = self.getStaticPath(template_data, u'fonts.css') + css_path = self.getStaticPath(template_data, 'fonts.css') if css_path is not None: css_files.append(self.getFrontURL(css_path)) - for name_root in (u'styles', u'styles_extra', u'highlight'): + for name_root in ('styles', 'styles_extra', 'highlight'): self._appendCSSPaths(template_data, css_files, css_files_noscript, name_root) - for idx in xrange(len(path_elems)): - name_root = u"_".join(path_elems[:idx+1]) + for idx in range(len(path_elems)): + name_root = "_".join(path_elems[:idx+1]) self._appendCSSPaths(template_data, css_files, css_files_noscript, name_root) return css_files, css_files_noscript @@ -624,10 +624,10 @@ This default method return absolute full path """ - template_data = ctx[u'template_data'] + template_data = ctx['template_data'] if template_data.site is None: assert template_data.theme is None - assert template_data.path.startswith(u"/") + assert template_data.path.startswith("/") return os.path.join(os.path.dirname(template_data.path, relative_url)) site_root_dir = self.sites_paths[template_data.site] @@ -638,25 +638,25 @@ def _next_gidx(self, ctx, value): """Use next current global index as suffix""" next_ = ctx["gidx"].next(value) - return value if next_ == 0 else u"{}_{}".format(value, next_) + return value if next_ == 0 else "{}_{}".format(value, next_) @contextfilter def _cur_gidx(self, ctx, value): """Use current current global index as suffix""" current = ctx["gidx"].current(value) - return value if not current else u"{}_{}".format(value, current) + return value if not current else "{}_{}".format(value, current) def _date_fmt(self, timestamp, fmt="short", date_only=False, auto_limit=None, auto_old_fmt=None): if is_undefined(fmt): - fmt = u"short" + fmt = "short" try: return date_utils.date_fmt( timestamp, fmt, date_only, auto_limit, auto_old_fmt, locale_str = self._locale_str ) except Exception as e: - log.warning(_(u"Can't parse date: {msg}").format(msg=e)) + log.warning(_("Can't parse date: {msg}").format(msg=e)) return timestamp def attr_escape(self, text): @@ -664,7 +664,7 @@ remove spaces, and put in lower case """ - return RE_ATTR_ESCAPE.sub(u"_", text.strip().lower())[:50] + return RE_ATTR_ESCAPE.sub("_", text.strip().lower())[:50] def _xmlui_class(self, xmlui_item, fields): """return classes computed from XMLUI fields name @@ -683,10 +683,10 @@ classes.append(escaped_name + "_" + self.attr_escape(value)) except KeyError: log.debug( - _(u'ignoring field "{name}": it doesn\'t exists').format(name=name) + _('ignoring field "{name}": it doesn\'t exists').format(name=name) ) continue - return u" ".join(classes) or None + return " ".join(classes) or None @contextfilter def _item_filter(self, ctx, item, filters): @@ -709,8 +709,8 @@ if filter_ is None: return value elif isinstance(filter_, dict): - filters_args = filter_.get(u"filters_args") - for idx, f_name in enumerate(filter_.get(u"filters", [])): + filters_args = filter_.get("filters_args") + for idx, f_name in enumerate(filter_.get("filters", [])): kwargs = filters_args[idx] if filters_args is not None else {} filter_func = self.env.filters[f_name] try: @@ -722,7 +722,7 @@ value = filter_func(ctx.eval_ctx, value, **kwargs) else: value = filter_func(value, **kwargs) - template = filter_.get(u"template") + template = filter_.get("template") if template: # format will return a string, so we need to check first # if the value is safe or not, and re-mark it after formatting @@ -745,7 +745,7 @@ if template is None: return value # jinja use string when no special char is used, so we have to convert to unicode - return unicode(template).format(value=value, **kwargs) + return str(template).format(value=value, **kwargs) def _dict_ext(self, source_dict, extra_dict, key=None): """extend source_dict with extra dict and return the result @@ -813,23 +813,23 @@ ) defs_elt = etree.SubElement(svg_elt, "defs") for name in names: - path = os.path.join(self.icons_path, name + u".svg") + path = os.path.join(self.icons_path, name + ".svg") icon_svg_elt = etree.parse(path).getroot() # we use icon name as id, so we can retrieve them easily icon_svg_elt.set("id", name) if not icon_svg_elt.tag == "{http://www.w3.org/2000/svg}svg": - raise exceptions.DataError(u"invalid SVG element") + raise exceptions.DataError("invalid SVG element") defs_elt.append(icon_svg_elt) return safe(etree.tostring(svg_elt, encoding="unicode")) def _icon_use(self, name, cls=""): - return safe(u'<svg class="svg-icon{cls}" xmlns="http://www.w3.org/2000/svg" ' - u'viewBox="0 0 100 100">\n' - u' <use href="#{name}"/>' - u'</svg>\n'.format(name=name, cls=(" " + cls) if cls else "")) + return safe('<svg class="svg-icon{cls}" xmlns="http://www.w3.org/2000/svg" ' + 'viewBox="0 0 100 100">\n' + ' <use href="#{name}"/>' + '</svg>\n'.format(name=name, cls=(" " + cls) if cls else "")) def render(self, template, site=None, theme=None, locale=C.DEFAULT_LOCALE, - media_path=u"", css_files=None, css_inline=False, **kwargs): + media_path="", css_files=None, css_inline=False, **kwargs): """Render a template @param template(unicode): template to render (e.g. blog/articles.html) @@ -847,21 +847,21 @@ @param **kwargs: variable to transmit to the template """ if not template: - raise ValueError(u"template can't be empty") + raise ValueError("template can't be empty") if site is not None or theme is not None: # user wants to set site and/or theme, so we add it to the template path if site is None: - site = u'' + site = '' if theme is None: theme = C.TEMPLATE_THEME_DEFAULT - if template[0] == u"(": + if template[0] == "(": raise ValueError( - u"you can't specify site or theme in template path and in argument " - u"at the same time" + "you can't specify site or theme in template path and in argument " + "at the same time" ) template_data = TemplateData(site, theme, template) - template = u"({site}/{theme}){template}".format( + template = "({site}/{theme}){template}".format( site=site, theme=theme, template=template) else: template_data = self.env.loader.parse_template(template) @@ -883,15 +883,15 @@ if css_inline: css_contents = [] - for files, suffix in ((css_files, u""), - (css_files_noscript, u"_noscript")): + for files, suffix in ((css_files, ""), + (css_files_noscript, "_noscript")): site_root_dir = self.sites_paths[template_data.site] for css_file in files: css_file_path = os.path.join(site_root_dir, css_file) with open(css_file_path) as f: css_contents.append(f.read()) if css_contents: - kwargs[u"css_content" + suffix] = u"\n".join(css_contents) + kwargs["css_content" + suffix] = "\n".join(css_contents) scripts_handler = ScriptsHandler(self, template_data) self.setLocale(locale)
--- a/sat/tools/common/template_xmlui.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/tools/common/template_xmlui.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -39,7 +39,7 @@ class Widget(object): - category = u"widget" + category = "widget" type = None enabled = True read_only = True @@ -103,11 +103,11 @@ @property def inline(self): - return u"inline" in self.style + return "inline" in self.style @property def no_select(self): - return u"noselect" in self.style + return "noselect" in self.style class EmptyWidget(xmlui.EmptyWidget, Widget): @@ -116,11 +116,11 @@ class TextWidget(xmlui.TextWidget, ValueWidget): - type = u"text" + type = "text" class LabelWidget(xmlui.LabelWidget, ValueWidget): - type = u"label" + type = "label" @property def for_name(self): @@ -131,19 +131,19 @@ class StringWidget(xmlui.StringWidget, InputWidget): - type = u"string" + type = "string" class JidInputWidget(xmlui.JidInputWidget, StringWidget): - type = u"jid" + type = "jid" class TextBoxWidget(xmlui.TextWidget, InputWidget): - type = u"textbox" + type = "textbox" class XHTMLBoxWidget(xmlui.XHTMLBoxWidget, InputWidget): - type = u"xhtmlbox" + type = "xhtmlbox" def __init__(self, xmlui_parent, value, read_only=False): # XXX: XHTMLBoxWidget value must be cleaned (harmful XHTML must be removed) @@ -154,14 +154,14 @@ class ListWidget(xmlui.ListWidget, OptionsWidget): - type = u"list" + type = "list" ## Containers ## class Container(object): - category = u"container" + category = "container" type = None def __init__(self, xmlui_parent): @@ -179,15 +179,15 @@ class VerticalContainer(xmlui.VerticalContainer, Container): - type = u"vertical" + type = "vertical" class PairsContainer(xmlui.PairsContainer, Container): - type = u"pairs" + type = "pairs" class LabelContainer(xmlui.PairsContainer, Container): - type = u"label" + type = "label" ## Factory ##
--- a/sat/tools/common/uri.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/tools/common/uri.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -19,8 +19,8 @@ """ XMPP uri parsing tools """ -import urlparse -import urllib +import urllib.parse +import urllib.request, urllib.parse, urllib.error # FIXME: basic implementation, need to follow RFC 5122 @@ -40,64 +40,64 @@ id: id of the element (item for pubsub) @raise ValueError: the scheme is not xmpp """ - uri_split = urlparse.urlsplit(uri.encode("utf-8")) + uri_split = urllib.parse.urlsplit(uri) if uri_split.scheme != "xmpp": - raise ValueError(u"this is not a XMPP URI") + raise ValueError("this is not a XMPP URI") # XXX: we don't use jid.JID for path as it can be used both in backend and frontend # which may use different JID classes - data = {u"path": urllib.unquote(uri_split.path).decode("utf-8")} + data = {"path": urllib.parse.unquote(uri_split.path)} query_end = uri_split.query.find(";") query_type = uri_split.query[:query_end] if query_end == -1 or "=" in query_type: raise ValueError("no query type, invalid XMPP URI") - pairs = urlparse.parse_qs(uri_split.geturl()) - for k, v in pairs.items(): + pairs = urllib.parse.parse_qs(uri_split.geturl()) + for k, v in list(pairs.items()): if len(v) != 1: - raise NotImplementedError(u"multiple values not managed") + raise NotImplementedError("multiple values not managed") if k in ("path", "type", "sub_type"): - raise NotImplementedError(u"reserved key used in URI, this is not supported") - data[k.decode("utf-8")] = urllib.unquote(v[0]).decode("utf-8") + raise NotImplementedError("reserved key used in URI, this is not supported") + data[k] = urllib.parse.unquote(v[0]) if query_type: - data[u"type"] = query_type.decode("utf-8") - elif u"node" in data: - data[u"type"] = u"pubsub" + data["type"] = query_type + elif "node" in data: + data["type"] = "pubsub" else: - data[u"type"] = "" + data["type"] = "" - if u"node" in data: - if data[u"node"].startswith(u"urn:xmpp:microblog:"): - data[u"sub_type"] = "microblog" + if "node" in data: + if data["node"].startswith("urn:xmpp:microblog:"): + data["sub_type"] = "microblog" return data def addPairs(uri, pairs): - for k, v in pairs.iteritems(): + for k, v in pairs.items(): uri.append( - u";" - + urllib.quote_plus(k.encode("utf-8")) - + u"=" - + urllib.quote_plus(v.encode("utf-8")) + ";" + + urllib.parse.quote_plus(k.encode("utf-8")) + + "=" + + urllib.parse.quote_plus(v.encode("utf-8")) ) def buildXMPPUri(type_, **kwargs): - uri = [u"xmpp:"] + uri = ["xmpp:"] subtype = kwargs.pop("subtype", None) path = kwargs.pop("path") - uri.append(urllib.quote_plus(path.encode("utf-8")).replace(u"%40", "@")) + uri.append(urllib.parse.quote_plus(path.encode("utf-8")).replace("%40", "@")) - if type_ == u"pubsub": + if type_ == "pubsub": if subtype == "microblog" and not kwargs.get("node"): - kwargs[u"node"] = "urn:xmpp:microblog:0" + kwargs["node"] = "urn:xmpp:microblog:0" if kwargs: - uri.append(u"?") + uri.append("?") addPairs(uri, kwargs) else: - raise NotImplementedError(u"{type_} URI are not handled yet".format(type_=type_)) + raise NotImplementedError("{type_} URI are not handled yet".format(type_=type_)) - return u"".join(uri) + return "".join(uri)
--- a/sat/tools/config.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/tools/config.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -23,7 +23,7 @@ import os import csv import json -from ConfigParser import SafeConfigParser, DEFAULTSECT, NoOptionError, NoSectionError +from configparser import SafeConfigParser, DEFAULTSECT, NoOptionError, NoSectionError from xdg import BaseDirectory from sat.core.log import getLogger from sat.core.constants import Const as C @@ -48,7 +48,7 @@ # we will eventually update the existing file with the highest priority, # if it's a user personal file... if not silent: - log.debug(_(u"Testing file %s") % file_) + log.debug(_("Testing file %s") % file_) if os.path.isfile(file_): if file_.startswith(os.path.expanduser("~")): config.read([file_]) @@ -65,8 +65,8 @@ if not silent: if option in ("passphrase",): # list here the options storing a password value = "******" - log.warning(_(u"Config auto-update: {option} set to {value} in the file " - u"{config_file}.").format(option=option, value=value, + log.warning(_("Config auto-update: {option} set to {value} in the file " + "{config_file}.").format(option=option, value=value, config_file=target_file)) @@ -76,7 +76,7 @@ try: config.read(C.CONFIG_FILES) except Exception as e: - log.error(_(u"Can't read main config: {msg}").format(msg=e), exc_info=True) + log.error(_("Can't read main config: {msg}").format(msg=e), exc_info=True) return config @@ -96,7 +96,7 @@ section = DEFAULTSECT try: - value = config.get(section, name).decode("utf-8") + value = config.get(section, name) except (NoOptionError, NoSectionError) as e: if default is Exception: raise e @@ -106,21 +106,21 @@ value = os.path.expanduser(value) # thx to Brian (http://stackoverflow.com/questions/186857/splitting-a-semicolon-separated-string-to-a-dictionary-in-python/186873#186873) elif name.endswith("_list"): - value = csv.reader( + value = next(csv.reader( [value], delimiter=",", quotechar='"', skipinitialspace=True - ).next() + )) elif name.endswith("_dict"): try: value = json.loads(value) except ValueError as e: - raise exceptions.ParsingError(u"Error while parsing data: {}".format(e)) + raise exceptions.ParsingError("Error while parsing data: {}".format(e)) if not isinstance(value, dict): raise exceptions.ParsingError( - u"{name} value is not a dict: {value}".format(name=name, value=value) + "{name} value is not a dict: {value}".format(name=name, value=value) ) elif name.endswith("_json"): try: value = json.loads(value) except ValueError as e: - raise exceptions.ParsingError(u"Error while parsing data: {}".format(e)) + raise exceptions.ParsingError("Error while parsing data: {}".format(e)) return value
--- a/sat/tools/sat_defer.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/tools/sat_defer.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -38,7 +38,7 @@ def stanza2NotFound(failure_): """Convert item-not-found StanzaError to exceptions.NotFound""" failure_.trap(jabber_error.StanzaError) - if failure_.value.condition == u'item-not-found': + if failure_.value.condition == 'item-not-found': raise exceptions.NotFound(failure_.value.text or failure_.value.condition) return failure_ @@ -107,8 +107,8 @@ data, profile=profile ) if isinstance(deferreds, dict): - session_data[KEY_DEFERREDS] = deferreds.values() - iterator = deferreds.iteritems() + session_data[KEY_DEFERREDS] = list(deferreds.values()) + iterator = iter(deferreds.items()) else: session_data[KEY_DEFERREDS] = deferreds iterator = enumerate(deferreds) @@ -121,7 +121,7 @@ return session_id def _purgeSession( - self, session_id, reason=u"timeout", no_warning=False, got_result=False + self, session_id, reason="timeout", no_warning=False, got_result=False ): """Purge the session @@ -137,7 +137,7 @@ timer, session_data, profile = self._sessions[session_id] except ValueError: raise exceptions.InternalError( - u"was expecting timer, session_data and profile; is profile set ?" + "was expecting timer, session_data and profile; is profile set ?" ) # next_defer must be called before deferreds, @@ -152,7 +152,7 @@ if not no_warning: log.warning( - u"RTDeferredList cancelled: {} (profile {})".format(reason, profile) + "RTDeferredList cancelled: {} (profile {})".format(reason, profile) ) super(RTDeferredSessions, self)._purgeSession(session_id) @@ -175,7 +175,7 @@ deferred._RTDeferred_return = (False, failure) self._gotResult(session_id, profile) - def cancel(self, session_id, reason=u"timeout", no_log=False): + def cancel(self, session_id, reason="timeout", no_log=False): """Stop this RTDeferredList Cancel all remaining deferred, and call self.final_defer.errback
--- a/sat/tools/stream.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/tools/stream.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -65,7 +65,7 @@ """ self.host = host self.profile = client.profile - self.uid = uid or unicode(uuid.uuid4()) + self.uid = uid or str(uuid.uuid4()) self._file = open(path, mode) self.size = size self.data_cb = data_cb @@ -104,7 +104,7 @@ except exceptions.NotFound: size_ok = True if not size_ok: - error = u"declared and actual size mismatch" + error = "declared and actual size mismatch" log.warning(error) progress_metadata = None
--- a/sat/tools/trigger.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/tools/trigger.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -63,10 +63,10 @@ trigger_tuple[0] for trigger_tuple in self.__triggers[point_name] ]: if priority in (self.MIN_PRIORITY, self.MAX_PRIORITY): - log.warning(_(u"There is already a bound priority [%s]") % point_name) + log.warning(_("There is already a bound priority [%s]") % point_name) else: log.debug( - _(u"There is already a trigger with the same priority [%s]") + _("There is already a trigger with the same priority [%s]") % point_name ) self.__triggers[point_name].append((priority, callback))
--- a/sat/tools/utils.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/tools/utils.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -36,7 +36,7 @@ log = getLogger(__name__) -NO_REPOS_DATA = u"repository data unknown" +NO_REPOS_DATA = "repository data unknown" repos_cache_dict = None repos_cache = None @@ -99,10 +99,10 @@ @param with_time(bool): if True include the time @return(unicode): XEP-0082 formatted date and time """ - template_date = u"%Y-%m-%d" - template_time = u"%H:%M:%SZ" + template_date = "%Y-%m-%d" + template_time = "%H:%M:%SZ" template = ( - u"{}T{}".format(template_date, template_time) if with_time else template_date + "{}T{}".format(template_date, template_time) if with_time else template_date ) return datetime.datetime.utcfromtimestamp( time.time() if timestamp is None else timestamp @@ -119,9 +119,9 @@ random.seed() if vocabulary is None: vocabulary = [ - chr(i) for i in range(0x30, 0x3A) + range(0x41, 0x5B) + range(0x61, 0x7B) + chr(i) for i in list(range(0x30, 0x3A)) + list(range(0x41, 0x5B)) + list(range(0x61, 0x7B)) ] - return u"".join([random.choice(vocabulary) for i in range(15)]) + return "".join([random.choice(vocabulary) for i in range(15)]) def getRepositoryData(module, as_string=True, is_path=False): @@ -157,7 +157,7 @@ if sys.platform == "android": # FIXME: workaround to avoid trouble on android, need to be fixed properly - repos_cache = u"Cagou android build" + repos_cache = "Cagou android build" return repos_cache KEYS = ("node", "node_short", "branch", "date", "tag", "distance") @@ -171,7 +171,7 @@ try: hg_path = procutils.which("hg")[0] except IndexError: - log.warning(u"Can't find hg executable") + log.warning("Can't find hg executable") hg_path = None hg_data = {} @@ -191,14 +191,15 @@ "{date|isodate}\n" "{latesttag}\n" "{latesttagdistance}", - ] + ], + text=True ) except subprocess.CalledProcessError: hg_data = {} else: - hg_data = dict(zip(KEYS, hg_data_raw.split("\n"))) + hg_data = dict(list(zip(KEYS, hg_data_raw.split("\n")))) try: - hg_data["modified"] = "+" in subprocess.check_output(["hg", "id", "-i"]) + hg_data["modified"] = "+" in subprocess.check_output(["hg", "id", "-i"], text=True) except subprocess.CalledProcessError: pass else: @@ -206,7 +207,7 @@ if not hg_data: # .hg/dirstate method - log.debug(u"trying dirstate method") + log.debug("trying dirstate method") if is_path: os.chdir(repos_root) else: @@ -216,13 +217,13 @@ hg_data["node"] = hg_dirstate.read(20).encode("hex") hg_data["node_short"] = hg_data["node"][:12] except IOError: - log.debug(u"Can't access repository data") + log.debug("Can't access repository data") # we restore original working dir os.chdir(ori_cwd) if not hg_data: - log.debug(u"Mercurial not available or working, trying package version") + log.debug("Mercurial not available or working, trying package version") try: import pkg_resources @@ -234,7 +235,7 @@ log.warning("can't retrieve package data") except ValueError: log.info( - u"no local version id in package: {pkg_version}".format( + "no local version id in package: {pkg_version}".format( pkg_version=pkg_version ) ) @@ -259,21 +260,21 @@ if not hg_data: repos_cache = NO_REPOS_DATA else: - strings = [u"rev", hg_data["node_short"]] + strings = ["rev", hg_data["node_short"]] try: if hg_data["modified"]: - strings.append(u"[M]") + strings.append("[M]") except KeyError: pass try: - strings.extend([u"({branch} {date})".format(**hg_data)]) + strings.extend(["({branch} {date})".format(**hg_data)]) except KeyError: pass try: - strings.extend([u"+{distance}".format(**hg_data)]) + strings.extend(["+{distance}".format(**hg_data)]) except KeyError: pass - repos_cache = u" ".join(strings) + repos_cache = " ".join(strings) return repos_cache else: return hg_data
--- a/sat/tools/xml_tools.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/tools/xml_tools.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -30,8 +30,7 @@ from twisted.internet import defer from sat.core import exceptions from collections import OrderedDict -from copy import deepcopy -import htmlentitydefs +import html.entities import re """This library help manage XML used in SàT (parameters, registration, etc)""" @@ -64,26 +63,26 @@ widget_kwargs = {} if field.fieldType is None and field.ext_type is not None: # we have an extended field - if field.ext_type == u"xml": + if field.ext_type == "xml": element = field.value if element.uri == C.NS_XHTML: widget_type = "xhtmlbox" widget_args[0] = element.toXml() widget_kwargs["read_only"] = read_only else: - log.warning(u"unknown XML element, falling back to textbox") + log.warning("unknown XML element, falling back to textbox") widget_type = "textbox" widget_args[0] = element.toXml() widget_kwargs["read_only"] = read_only else: - raise exceptions.DataError(u"unknown extended type {ext_type}".format( + raise exceptions.DataError("unknown extended type {ext_type}".format( ext_type = field.ext_type)) elif field.fieldType == "fixed" or field.fieldType is None: widget_type = "text" if field.value is None: if field.label is None: - log.warning(_(u"Fixed field has neither value nor label, ignoring it")) + log.warning(_("Fixed field has neither value nor label, ignoring it")) field.value = "" else: field.value = field.label @@ -97,7 +96,7 @@ widget_kwargs["read_only"] = read_only elif field.fieldType == "text-multi": widget_type = "textbox" - widget_args[0] = u"\n".join(field.values) + widget_args[0] = "\n".join(field.values) widget_kwargs["read_only"] = read_only elif field.fieldType == "hidden": widget_type = "hidden" @@ -121,7 +120,7 @@ widget_args = [] else: log.error( - u"FIXME FIXME FIXME: Type [%s] is not managed yet by SàT" % field.fieldType + "FIXME FIXME FIXME: Type [%s] is not managed yet by SàT" % field.fieldType ) widget_type = "string" widget_kwargs["read_only"] = read_only @@ -206,7 +205,7 @@ """ headers = OrderedDict() try: - reported_elt = form_xml.elements("jabber:x:data", "reported").next() + reported_elt = next(form_xml.elements("jabber:x:data", "reported")) except StopIteration: raise exceptions.DataError( "Couldn't find expected <reported> tag in %s" % form_xml.toXml() @@ -229,7 +228,7 @@ for item_elt in item_elts: for elt in item_elt.elements(): if elt.name != "field": - log.warning(u"Unexpected tag (%s)" % elt.name) + log.warning("Unexpected tag (%s)" % elt.name) continue field = data_form.Field.fromElement(elt) @@ -303,9 +302,9 @@ # we deepcopy the form because _dataFormField2XMLUIData can modify the value # FIXME: check if it's really important, the only modified value seems to be # the replacement of None by "" on fixed fields - form = deepcopy(result_form) + # form = deepcopy(result_form) form = result_form - for name, field in form.fields.iteritems(): + for name, field in form.fields.items(): try: base_field = base_form.fields[name] except KeyError: @@ -324,8 +323,8 @@ """ # XXX: must be removed when DBus types will no cause problems anymore # FIXME: should be cleaned inside D-Bus bridge itself - if isinstance(value, basestring): - return unicode(value) + if isinstance(value, str): + return str(value) return value @@ -336,12 +335,12 @@ @return: dict of data usable by Wokkel's data form """ ret = {} - for key, value in xmlui_data.iteritems(): + for key, value in xmlui_data.items(): if not key.startswith(SAT_FORM_PREFIX): continue - if isinstance(value, basestring) and u'\n' in value: + if isinstance(value, str) and '\n' in value: # data form expects multi-lines text to be in separated values - value = value.split(u'\n') + value = value.split('\n') ret[key[len(SAT_FORM_PREFIX) :]] = _cleanValue(value) return ret @@ -352,12 +351,12 @@ @param name (unicode): form name @return: unicode """ - return u"%s%s" % (SAT_FORM_PREFIX, name) + return "%s%s" % (SAT_FORM_PREFIX, name) def isXMLUICancelled(raw_xmlui): """Tell if an XMLUI has been cancelled by checking raw XML""" - return C.bool(raw_xmlui.get(u'cancelled', C.BOOL_FALSE)) + return C.bool(raw_xmlui.get('cancelled', C.BOOL_FALSE)) def XMLUIResultToElt(xmlui_data): @@ -618,7 +617,7 @@ """ assert isinstance(parent, ListWidget) super(OptionElement, self).__init__(parent.xmlui, parent) - if isinstance(option, basestring): + if isinstance(option, str): value, label = option, option elif isinstance(option, tuple): value, label = option @@ -643,8 +642,8 @@ super(JidElement, self).__init__(parent.xmlui, parent) if isinstance(jid_, jid.JID): value = jid_.full() - elif isinstance(jid_, basestring): - value = unicode(jid_) + elif isinstance(jid_, str): + value = str(jid_) else: raise NotImplementedError jid_txt = self.xmlui.doc.createTextNode(value) @@ -879,7 +878,7 @@ self.elem.setAttribute("name", name) if name in xmlui.named_widgets: raise exceptions.ConflictError( - _(u'A widget with the name "{name}" already exists.').format( + _('A widget with the name "{name}" already exists.').format( name=name ) ) @@ -973,7 +972,7 @@ try: self.elem.setAttribute("value", jid.full()) except AttributeError: - self.elem.setAttribute("value", unicode(jid)) + self.elem.setAttribute("value", str(jid)) class DividerWidget(Widget): @@ -1049,7 +1048,7 @@ if clean: if cleanXHTML is None: raise exceptions.NotFound( - u"No cleaning method set, can't clean the XHTML") + "No cleaning method set, can't clean the XHTML") value = cleanXHTML(value) super(XHTMLBoxWidget, self).__init__( @@ -1114,7 +1113,7 @@ class ListWidget(InputWidget): type = "list" - STYLES = (u"multi", u"noselect", u"extensible", u"reducible", u"inline") + STYLES = ("multi", "noselect", "extensible", "reducible", "inline") def __init__( self, xmlui, options, selected=None, styles=None, name=None, parent=None @@ -1144,11 +1143,11 @@ styles = set() else: styles = set(styles) - if u"noselect" in styles and (u"multi" in styles or selected): + if "noselect" in styles and ("multi" in styles or selected): raise exceptions.DataError( _( - u'"multi" flag and "selected" option are not compatible with ' - u'"noselect" flag' + '"multi" flag and "selected" option are not compatible with ' + '"noselect" flag' ) ) if not options: @@ -1163,18 +1162,18 @@ def addOptions(self, options, selected=None): """Add options to a multi-values element (e.g. list) """ if selected: - if isinstance(selected, basestring): + if isinstance(selected, str): selected = [selected] else: selected = [] for option in options: - assert isinstance(option, basestring) or isinstance(option, tuple) - value = option if isinstance(option, basestring) else option[0] + assert isinstance(option, str) or isinstance(option, tuple) + value = option if isinstance(option, str) else option[0] OptionElement(self, option, value in selected) def setStyles(self, styles): if not styles.issubset(self.STYLES): - raise exceptions.DataError(_(u"invalid styles")) + raise exceptions.DataError(_("invalid styles")) for style in styles: self.elem.setAttribute(style, "yes") # TODO: check flags incompatibily (noselect and multi) like in __init__ @@ -1186,9 +1185,9 @@ def value(self): """Return the value of first selected option""" for child in self.elem.childNodes: - if child.tagName == u"option" and child.getAttribute(u"selected") == u"true": - return child.getAttribute(u"value") - return u"" + if child.tagName == "option" and child.getAttribute("selected") == "true": + return child.getAttribute("value") + return "" class JidsListWidget(InputWidget): @@ -1358,7 +1357,7 @@ raise exceptions.DataError(_("Unknown panel type [%s]") % panel_type) if panel_type == C.XMLUI_FORM and submit_id is None: raise exceptions.DataError(_("form XMLUI need a submit_id")) - if not isinstance(container, basestring): + if not isinstance(container, str): raise exceptions.DataError(_("container argument must be a string")) if dialog_opt is not None and panel_type != C.XMLUI_DIALOG: raise exceptions.DataError( @@ -1410,13 +1409,13 @@ # log.debug(u'introspecting XMLUI widgets and containers') cls._containers = {} cls._widgets = {} - for obj in globals().values(): + for obj in list(globals().values()): try: if issubclass(obj, Widget): if obj.__name__ == "Widget": continue cls._widgets[obj.type] = obj - creator_name = u"add" + obj.__name__ + creator_name = "add" + obj.__name__ if creator_name.endswith('Widget'): creator_name = creator_name[:-6] is_input = issubclass(obj, InputWidget) @@ -1525,7 +1524,7 @@ @param container: either container type (container it then created), or an Container instance""" - if isinstance(container, basestring): + if isinstance(container, str): self.current_container = self._createContainer( container, self.current_container.getParentContainer() or self.main_container, @@ -1633,7 +1632,7 @@ return xmlui_d -def deferDialog(host, message, title=u"Please confirm", type_=C.XMLUI_DIALOG_CONFIRM, +def deferDialog(host, message, title="Please confirm", type_=C.XMLUI_DIALOG_CONFIRM, options=None, action_extra=None, security_limit=C.NO_SECURITY_LIMIT, chained=False, profile=C.PROF_KEY_NONE): """Create a submitable dialog and manage it with a deferred @@ -1689,7 +1688,7 @@ def isXHTMLField(field): """Check if a data_form.Field is an XHTML one""" - return (field.fieldType is None and field.ext_type == u"xml" and + return (field.fieldType is None and field.ext_type == "xml" and field.value.uri == C.NS_XHTML) @@ -1705,10 +1704,10 @@ return matchobj.group(0) else: try: - return unichr(htmlentitydefs.name2codepoint[entity]) + return chr(html.entities.name2codepoint[entity]) except KeyError: - log.warning(u"removing unknown entity {}".format(entity)) - return u"" + log.warning("removing unknown entity {}".format(entity)) + return "" def __call__(self, raw_xml, force_spaces=False, namespace=None): """ @@ -1721,9 +1720,9 @@ # we need to wrap element in case # there is not a unique one on the top if namespace is not None: - raw_xml = u"<div xmlns='{}'>{}</div>".format(namespace, raw_xml) + raw_xml = "<div xmlns='{}'>{}</div>".format(namespace, raw_xml) else: - raw_xml = u"<div>{}</div>".format(raw_xml) + raw_xml = "<div>{}</div>".format(raw_xml) # avoid ParserError on HTML escaped chars raw_xml = html_entity_re.sub(self._escapeHTML, raw_xml) @@ -1767,7 +1766,7 @@ for child in node.childNodes: if child.nodeType == child.TEXT_NODE: data.append(child.wholeText) - return u"".join(data) + return "".join(data) def findAll(elt, namespaces=None, names=None): @@ -1780,9 +1779,9 @@ None to accept every namespaces @return ((G)domish.Element): found elements """ - if isinstance(namespaces, basestring): + if isinstance(namespaces, str): namespaces = tuple((namespaces,)) - if isinstance(names, basestring): + if isinstance(names, str): names = tuple((names,)) for child in elt.elements():
--- a/sat_frontends/bridge/bridge_frontend.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/bridge/bridge_frontend.py Tue Aug 13 19:08:41 2019 +0200 @@ -29,10 +29,10 @@ @param condition (str) : error condition """ Exception.__init__(self) - self.fullname = unicode(name) - self.message = unicode(message) - self.condition = unicode(condition) if condition else "" - self.module, __, self.classname = unicode(self.fullname).rpartition(".") + self.fullname = str(name) + self.message = str(message) + self.condition = str(condition) if condition else "" + self.module, __, self.classname = str(self.fullname).rpartition(".") def __str__(self): message = (": %s" % self.message) if self.message else ""
--- a/sat_frontends/bridge/dbus_bridge.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/bridge/dbus_bridge.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 #-*- coding: utf-8 -*- # SAT communication bridge @@ -18,7 +18,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. from sat.core.i18n import _ -from bridge_frontend import BridgeException +from .bridge_frontend import BridgeException import dbus from sat.core.log import getLogger log = getLogger(__name__) @@ -70,12 +70,12 @@ dbus_interface=const_INT_PREFIX + const_CORE_SUFFIX) self.db_plugin_iface = dbus.Interface(self.db_object, dbus_interface=const_INT_PREFIX + const_PLUGIN_SUFFIX) - except dbus.exceptions.DBusException, e: + except dbus.exceptions.DBusException as e: if e._dbus_error_name in ('org.freedesktop.DBus.Error.ServiceUnknown', 'org.freedesktop.DBus.Error.Spawn.ExecFailed'): errback(BridgeExceptionNoService()) elif e._dbus_error_name == 'org.freedesktop.DBus.Error.NotSupported': - log.error(_(u"D-Bus is not launched, please see README to see instructions on how to launch it")) + log.error(_("D-Bus is not launched, please see README to see instructions on how to launch it")) errback(BridgeInitError) else: errback(e) @@ -102,14 +102,14 @@ # - if we have the 'callback' and 'errback' keyword arguments # - or if the last two arguments are callable - async = False + async_ = False args = list(args) if kwargs: if 'callback' in kwargs: - async = True + async_ = True _callback = kwargs.pop('callback') - _errback = kwargs.pop('errback', lambda failure: log.error(unicode(failure))) + _errback = kwargs.pop('errback', lambda failure: log.error(str(failure))) try: args.append(kwargs.pop('profile')) except KeyError: @@ -119,15 +119,15 @@ pass # at this point, kwargs should be empty if kwargs: - log.warnings(u"unexpected keyword arguments, they will be ignored: {}".format(kwargs)) + log.warnings("unexpected keyword arguments, they will be ignored: {}".format(kwargs)) elif len(args) >= 2 and callable(args[-1]) and callable(args[-2]): - async = True + async_ = True _errback = args.pop() _callback = args.pop() method = getattr(self.db_plugin_iface, name) - if async: + if async_: kwargs['timeout'] = const_TIMEOUT kwargs['reply_handler'] = _callback kwargs['error_handler'] = lambda err: _errback(dbus_to_bridge_exception(err)) @@ -180,7 +180,7 @@ if errback is None: errback = log.error error_handler = lambda err:errback(dbus_to_bridge_exception(err)) - return unicode(self.db_core_iface.asyncGetParamA(name, category, attribute, security_limit, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)) + return str(self.db_core_iface.asyncGetParamA(name, category, attribute, security_limit, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)) def asyncGetParamsValuesFromCategory(self, category, security_limit=-1, profile_key="@DEFAULT@", callback=None, errback=None): if callback is None: @@ -209,7 +209,7 @@ error_handler = lambda err:errback(dbus_to_bridge_exception(err)) return self.db_core_iface.delContact(entity_jid, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler) - def discoFindByFeatures(self, namespaces, identities, bare_jid=False, service=True, roster=True, own_jid=True, local_device=False, profile_key=u"@DEFAULT@", callback=None, errback=None): + def discoFindByFeatures(self, namespaces, identities, bare_jid=False, service=True, roster=True, own_jid=True, local_device=False, profile_key="@DEFAULT@", callback=None, errback=None): if callback is None: error_handler = None else: @@ -218,7 +218,7 @@ error_handler = lambda err:errback(dbus_to_bridge_exception(err)) return self.db_core_iface.discoFindByFeatures(namespaces, identities, bare_jid, service, roster, own_jid, local_device, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler) - def discoInfos(self, entity_jid, node=u'', use_cache=True, profile_key=u"@DEFAULT@", callback=None, errback=None): + def discoInfos(self, entity_jid, node='', use_cache=True, profile_key="@DEFAULT@", callback=None, errback=None): if callback is None: error_handler = None else: @@ -227,7 +227,7 @@ error_handler = lambda err:errback(dbus_to_bridge_exception(err)) return self.db_core_iface.discoInfos(entity_jid, node, use_cache, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler) - def discoItems(self, entity_jid, node=u'', use_cache=True, profile_key=u"@DEFAULT@", callback=None, errback=None): + def discoItems(self, entity_jid, node='', use_cache=True, profile_key="@DEFAULT@", callback=None, errback=None): if callback is None: error_handler = None else: @@ -257,7 +257,7 @@ kwargs['timeout'] = const_TIMEOUT kwargs['reply_handler'] = callback kwargs['error_handler'] = error_handler - return unicode(self.db_core_iface.encryptionNamespaceGet(arg_0, **kwargs)) + return str(self.db_core_iface.encryptionNamespaceGet(arg_0, **kwargs)) def encryptionPluginsGet(self, callback=None, errback=None): if callback is None: @@ -273,14 +273,14 @@ kwargs['error_handler'] = error_handler return self.db_core_iface.encryptionPluginsGet(**kwargs) - def encryptionTrustUIGet(self, namespace, arg_1, profile_key, callback=None, errback=None): + def encryptionTrustUIGet(self, to_jid, namespace, profile_key, callback=None, errback=None): if callback is None: error_handler = None else: if errback is None: errback = log.error error_handler = lambda err:errback(dbus_to_bridge_exception(err)) - return unicode(self.db_core_iface.encryptionTrustUIGet(namespace, arg_1, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)) + return str(self.db_core_iface.encryptionTrustUIGet(to_jid, namespace, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)) def getConfig(self, section, name, callback=None, errback=None): if callback is None: @@ -294,7 +294,7 @@ kwargs['timeout'] = const_TIMEOUT kwargs['reply_handler'] = callback kwargs['error_handler'] = error_handler - return unicode(self.db_core_iface.getConfig(section, name, **kwargs)) + return str(self.db_core_iface.getConfig(section, name, **kwargs)) def getContacts(self, profile_key="@DEFAULT@", callback=None, errback=None): if callback is None: @@ -368,7 +368,7 @@ kwargs['timeout'] = const_TIMEOUT kwargs['reply_handler'] = callback kwargs['error_handler'] = error_handler - return unicode(self.db_core_iface.getMainResource(contact_jid, profile_key, **kwargs)) + return str(self.db_core_iface.getMainResource(contact_jid, profile_key, **kwargs)) def getParamA(self, name, category, attribute="value", profile_key="@DEFAULT@", callback=None, errback=None): if callback is None: @@ -382,7 +382,7 @@ kwargs['timeout'] = const_TIMEOUT kwargs['reply_handler'] = callback kwargs['error_handler'] = error_handler - return unicode(self.db_core_iface.getParamA(name, category, attribute, profile_key, **kwargs)) + return str(self.db_core_iface.getParamA(name, category, attribute, profile_key, **kwargs)) def getParamsCategories(self, callback=None, errback=None): if callback is None: @@ -405,7 +405,7 @@ if errback is None: errback = log.error error_handler = lambda err:errback(dbus_to_bridge_exception(err)) - return unicode(self.db_core_iface.getParamsUI(security_limit, app, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)) + return str(self.db_core_iface.getParamsUI(security_limit, app, profile_key, timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler)) def getPresenceStatuses(self, profile_key="@DEFAULT@", callback=None, errback=None): if callback is None: @@ -442,7 +442,7 @@ kwargs['timeout'] = const_TIMEOUT kwargs['reply_handler'] = callback kwargs['error_handler'] = error_handler - return unicode(self.db_core_iface.getVersion(**kwargs)) + return str(self.db_core_iface.getVersion(**kwargs)) def getWaitingSub(self, profile_key="@DEFAULT@", callback=None, errback=None): if callback is None: @@ -516,7 +516,7 @@ kwargs['timeout'] = const_TIMEOUT kwargs['reply_handler'] = callback kwargs['error_handler'] = error_handler - return unicode(self.db_core_iface.menuHelpGet(menu_id, language, **kwargs)) + return str(self.db_core_iface.menuHelpGet(menu_id, language, **kwargs)) def menuLaunch(self, menu_type, path, data, security_limit, profile_key, callback=None, errback=None): if callback is None: @@ -553,7 +553,7 @@ kwargs['timeout'] = const_TIMEOUT kwargs['reply_handler'] = callback kwargs['error_handler'] = error_handler - return unicode(self.db_core_iface.messageEncryptionGet(to_jid, profile_key, **kwargs)) + return str(self.db_core_iface.messageEncryptionGet(to_jid, profile_key, **kwargs)) def messageEncryptionStart(self, to_jid, namespace='', replace=False, profile_key="@NONE@", callback=None, errback=None): if callback is None: @@ -645,7 +645,7 @@ kwargs['timeout'] = const_TIMEOUT kwargs['reply_handler'] = callback kwargs['error_handler'] = error_handler - return unicode(self.db_core_iface.profileNameGet(profile_key, **kwargs)) + return str(self.db_core_iface.profileNameGet(profile_key, **kwargs)) def profileSetDefault(self, profile, callback=None, errback=None): if callback is None:
--- a/sat_frontends/bridge/pb.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/bridge/pb.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT communication bridge @@ -28,7 +28,7 @@ class SignalsHandler(pb.Referenceable): def __getattr__(self, name): if name.startswith("remote_"): - log.debug(u"calling an unregistered signal: {name}".format(name=name[7:])) + log.debug("calling an unregistered signal: {name}".format(name=name[7:])) return lambda *args, **kwargs: None else: @@ -43,7 +43,7 @@ pass else: raise exceptions.InternalError( - u"{name} signal handler has been registered twice".format( + "{name} signal handler has been registered twice".format( name=method_name ) ) @@ -99,7 +99,7 @@ d.addErrback(errback) def _initBridgeEb(self, failure): - log.error(u"Can't init bridge: {msg}".format(msg=failure)) + log.error("Can't init bridge: {msg}".format(msg=failure)) def _set_root(self, root): """set remote root object @@ -112,7 +112,7 @@ return d def _generic_errback(self, failure): - log.error(u"bridge failure: {}".format(failure)) + log.error("bridge failure: {}".format(failure)) def bridgeConnect(self, callback, errback): factory = pb.PBClientFactory() @@ -182,7 +182,7 @@ errback = self._generic_errback d.addErrback(errback) - def discoFindByFeatures(self, namespaces, identities, bare_jid=False, service=True, roster=True, own_jid=True, local_device=False, profile_key=u"@DEFAULT@", callback=None, errback=None): + def discoFindByFeatures(self, namespaces, identities, bare_jid=False, service=True, roster=True, own_jid=True, local_device=False, profile_key="@DEFAULT@", callback=None, errback=None): d = self.root.callRemote("discoFindByFeatures", namespaces, identities, bare_jid, service, roster, own_jid, local_device, profile_key) if callback is not None: d.addCallback(callback) @@ -190,7 +190,7 @@ errback = self._generic_errback d.addErrback(errback) - def discoInfos(self, entity_jid, node=u'', use_cache=True, profile_key=u"@DEFAULT@", callback=None, errback=None): + def discoInfos(self, entity_jid, node='', use_cache=True, profile_key="@DEFAULT@", callback=None, errback=None): d = self.root.callRemote("discoInfos", entity_jid, node, use_cache, profile_key) if callback is not None: d.addCallback(callback) @@ -198,7 +198,7 @@ errback = self._generic_errback d.addErrback(errback) - def discoItems(self, entity_jid, node=u'', use_cache=True, profile_key=u"@DEFAULT@", callback=None, errback=None): + def discoItems(self, entity_jid, node='', use_cache=True, profile_key="@DEFAULT@", callback=None, errback=None): d = self.root.callRemote("discoItems", entity_jid, node, use_cache, profile_key) if callback is not None: d.addCallback(callback) @@ -230,8 +230,8 @@ errback = self._generic_errback d.addErrback(errback) - def encryptionTrustUIGet(self, namespace, arg_1, profile_key, callback=None, errback=None): - d = self.root.callRemote("encryptionTrustUIGet", namespace, arg_1, profile_key) + def encryptionTrustUIGet(self, to_jid, namespace, profile_key, callback=None, errback=None): + d = self.root.callRemote("encryptionTrustUIGet", to_jid, namespace, profile_key) if callback is not None: d.addCallback(callback) if errback is None:
--- a/sat_frontends/jp/arg_tools.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/arg_tools.py Tue Aug 13 19:08:41 2019 +0200 @@ -28,7 +28,7 @@ """ if smart and not " " in arg and not '"' in arg: return arg - return u'"' + arg.replace(u'"', u'\\"') + u'"' + return '"' + arg.replace('"', '\\"') + '"' def get_cmd_choices(cmd=None, parser=None): @@ -76,23 +76,23 @@ opt_args = [] pos_args = [] actions = {a.dest: a for a in parser._actions} - for arg, value in use.iteritems(): + for arg, value in use.items(): try: - if arg == u"item" and not u"item" in actions: + if arg == "item" and not "item" in actions: # small hack when --item is appended to a --items list - arg = u"items" + arg = "items" action = actions[arg] except KeyError: if verbose: host.disp( _( - u"ignoring {name}={value}, not corresponding to any argument (in USE)" + "ignoring {name}={value}, not corresponding to any argument (in USE)" ).format(name=arg, value=escape(value)) ) else: if verbose: host.disp( - _(u"arg {name}={value} (in USE)").format( + _("arg {name}={value} (in USE)").format( name=arg, value=escape(value) ) )
--- a/sat_frontends/jp/base.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/base.py Tue Aug 13 19:08:41 2019 +0200 @@ -78,7 +78,7 @@ GLib.timeout_add(delay, callback, *args) else: - print u"can't start jp: only D-Bus bridge is currently handled" + print("can't start jp: only D-Bus bridge is currently handled") sys.exit(C.EXIT_ERROR) # FIXME: twisted loop can be used when jp can handle fully async bridges # from twisted.internet import reactor @@ -104,28 +104,20 @@ from sat.core import sat_main sat = sat_main.SAT() -if sys.version_info < (2, 7, 3): - # XXX: shlex.split only handle unicode since python 2.7.3 - # this is a workaround for older versions - old_split = shlex.split - new_split = (lambda s, *a, **kw: [t.decode('utf-8') for t in old_split(s.encode('utf-8'), *a, **kw)] - if isinstance(s, unicode) else old_split(s, *a, **kw)) - shlex.split = new_split - try: import progressbar except ImportError: - msg = (_(u'ProgressBar not available, please download it at http://pypi.python.org/pypi/progressbar\n') + - _(u'Progress bar deactivated\n--\n')) - print >>sys.stderr,msg.encode('utf-8') + msg = (_('ProgressBar not available, please download it at http://pypi.python.org/pypi/progressbar\n') + + _('Progress bar deactivated\n--\n')) + print(msg.encode('utf-8'), file=sys.stderr) progressbar=None #consts -PROG_NAME = u"jp" +PROG_NAME = "jp" DESCRIPTION = """This software is a command line tool for XMPP. Get the latest version at """ + C.APP_URL -COPYLEFT = u"""Copyright (C) 2009-2019 Jérôme Poisson, Adrien Cossa +COPYLEFT = """Copyright (C) 2009-2019 Jérôme Poisson, Adrien Cossa This program comes with ABSOLUTELY NO WARRANTY; This is free software, and you are welcome to redistribute it under certain conditions. """ @@ -133,11 +125,6 @@ PROGRESS_DELAY = 10 # the progression will be checked every PROGRESS_DELAY ms -def unicode_decoder(arg): - # Needed to have unicode strings from arguments - return arg.decode(locale.getpreferredencoding()) - - def date_decoder(arg): return date_utils.date_parse_ext(arg, default_tz=date_utils.TZ_LOCAL) @@ -167,7 +154,7 @@ # loop will always be needed bridge_module = dynamic_import.bridge(bridge_name, 'sat_frontends.bridge') if bridge_module is None: - log.error(u"Can't import {} bridge".format(bridge_name)) + log.error("Can't import {} bridge".format(bridge_name)) sys.exit(1) self.bridge = bridge_module.Bridge() @@ -178,7 +165,7 @@ description=DESCRIPTION) self._make_parents() self.add_parser_options() - self.subparsers = self.parser.add_subparsers(title=_(u'Available commands'), dest='subparser_name') + self.subparsers = self.parser.add_subparsers(title=_('Available commands'), dest='subparser_name') self._auto_loop = False # when loop is used for internal reasons self._need_loop = False @@ -196,11 +183,11 @@ def _bridgeEb(self, failure): if isinstance(failure, exceptions.BridgeExceptionNoService): - print(_(u"Can't connect to SàT backend, are you sure it's launched ?")) + print((_("Can't connect to SàT backend, are you sure it's launched ?"))) elif isinstance(failure, exceptions.BridgeInitError): - print(_(u"Can't init bridge")) + print((_("Can't init bridge"))) else: - print(_(u"Error while initialising bridge: {}".format(failure))) + print((_("Error while initialising bridge: {}".format(failure)))) sys.exit(C.EXIT_BRIDGE_ERROR) @property @@ -264,14 +251,14 @@ if self.verbosity >= verbosity: if error: if no_lf: - print >>sys.stderr,msg.encode('utf-8'), + print(msg, end=' ', file=sys.stderr) else: - print >>sys.stderr,msg.encode('utf-8') + print(msg, file=sys.stderr) else: if no_lf: - print msg.encode('utf-8'), + print(msg, end=' ') else: - print msg.encode('utf-8') + print(msg) def output(self, type_, name, extra_outputs, data): if name in extra_outputs: @@ -297,7 +284,7 @@ @param output_type: True for default, else can be any registered type """ - return self._outputs[output_type].keys() + return list(self._outputs[output_type].keys()) def _make_parents(self): self.parents = {} @@ -307,11 +294,11 @@ for parent_name in ('profile', 'profile_session'): parent = self.parents[parent_name] = argparse.ArgumentParser(add_help=False) parent.add_argument("-p", "--profile", action="store", type=str, default='@DEFAULT@', help=_("Use PROFILE profile key (default: %(default)s)")) - parent.add_argument("--pwd", action="store", type=unicode_decoder, default='', metavar='PASSWORD', help=_("Password used to connect profile, if necessary")) + parent.add_argument("--pwd", action="store", default='', metavar='PASSWORD', help=_("Password used to connect profile, if necessary")) profile_parent, profile_session_parent = self.parents['profile'], self.parents['profile_session'] - connect_short, connect_long, connect_action, connect_help = "-c", "--connect", "store_true", _(u"Connect the profile before doing anything else") + connect_short, connect_long, connect_action, connect_help = "-c", "--connect", "store_true", _("Connect the profile before doing anything else") profile_parent.add_argument(connect_short, connect_long, action=connect_action, help=connect_help) profile_session_connect_group = profile_session_parent.add_mutually_exclusive_group() @@ -323,12 +310,12 @@ progress_parent.add_argument("-P", "--progress", action="store_true", help=_("Show progress bar")) verbose_parent = self.parents['verbose'] = argparse.ArgumentParser(add_help=False) - verbose_parent.add_argument('--verbose', '-v', action='count', default=0, help=_(u"Add a verbosity level (can be used multiple times)")) + verbose_parent.add_argument('--verbose', '-v', action='count', default=0, help=_("Add a verbosity level (can be used multiple times)")) draft_parent = self.parents['draft'] = argparse.ArgumentParser(add_help=False) draft_group = draft_parent.add_argument_group(_('draft handling')) - draft_group.add_argument("-D", "--current", action="store_true", help=_(u"load current draft")) - draft_group.add_argument("-F", "--draft-path", type=unicode_decoder, help=_(u"path to a draft file to retrieve")) + draft_group.add_argument("-D", "--current", action="store_true", help=_("load current draft")) + draft_group.add_argument("-F", "--draft-path", help=_("path to a draft file to retrieve")) def make_pubsub_group(self, flags, defaults): @@ -343,71 +330,71 @@ flags = misc.FlagsHandler(flags) parent = argparse.ArgumentParser(add_help=False) pubsub_group = parent.add_argument_group('pubsub') - pubsub_group.add_argument("-u", "--pubsub-url", type=unicode_decoder, - help=_(u"Pubsub URL (xmpp or http)")) + pubsub_group.add_argument("-u", "--pubsub-url", + help=_("Pubsub URL (xmpp or http)")) - service_help = _(u"JID of the PubSub service") + service_help = _("JID of the PubSub service") if not flags.service: - default = defaults.pop(u'service', _(u'PEP service')) + default = defaults.pop('service', _('PEP service')) if default is not None: - service_help += _(u" (DEFAULT: {default})".format(default=default)) - pubsub_group.add_argument("-s", "--service", type=unicode_decoder, default=u'', + service_help += _(" (DEFAULT: {default})".format(default=default)) + pubsub_group.add_argument("-s", "--service", default='', help=service_help) - node_help = _(u"node to request") + node_help = _("node to request") if not flags.node: - default = defaults.pop(u'node', _(u'standard node')) + default = defaults.pop('node', _('standard node')) if default is not None: - node_help += _(u" (DEFAULT: {default})".format(default=default)) - pubsub_group.add_argument("-n", "--node", type=unicode_decoder, default=u'', help=node_help) + node_help += _(" (DEFAULT: {default})".format(default=default)) + pubsub_group.add_argument("-n", "--node", default='', help=node_help) if flags.single_item: - item_help = (u"item to retrieve") + item_help = ("item to retrieve") if not flags.item: - default = defaults.pop(u'item', _(u'last item')) + default = defaults.pop('item', _('last item')) if default is not None: - item_help += _(u" (DEFAULT: {default})".format(default=default)) - pubsub_group.add_argument("-i", "--item", type=unicode_decoder, default=u'', + item_help += _(" (DEFAULT: {default})".format(default=default)) + pubsub_group.add_argument("-i", "--item", default='', help=item_help) - pubsub_group.add_argument("-L", "--last-item", action='store_true', help=_(u'retrieve last item')) + pubsub_group.add_argument("-L", "--last-item", action='store_true', help=_('retrieve last item')) elif flags.multi_items: # mutiple items, this activate several features: max-items, RSM, MAM # and Orbder-by - pubsub_group.add_argument("-i", "--item", type=unicode_decoder, action='append', dest='items', default=[], help=_(u"items to retrieve (DEFAULT: all)")) + pubsub_group.add_argument("-i", "--item", action='append', dest='items', default=[], help=_("items to retrieve (DEFAULT: all)")) if not flags.no_max: max_group = pubsub_group.add_mutually_exclusive_group() # XXX: defaut value for --max-items or --max is set in parse_pubsub_args max_group.add_argument( "-M", "--max-items", dest="max", type=int, - help=_(u"maximum number of items to get ({no_limit} to get all items)" + help=_("maximum number of items to get ({no_limit} to get all items)" .format(no_limit=C.NO_LIMIT))) # FIXME: it could be possible to no duplicate max (between pubsub # max-items and RSM max)should not be duplicated, RSM could be # used when available and pubsub max otherwise max_group.add_argument( "-m", "--max", dest="rsm_max", type=int, - help=_(u"maximum number of items to get per page (DEFAULT: 10)")) + help=_("maximum number of items to get per page (DEFAULT: 10)")) # RSM rsm_page_group = pubsub_group.add_mutually_exclusive_group() rsm_page_group.add_argument( - "-a", "--after", dest="rsm_after", type=unicode_decoder, - help=_(u"find page after this item"), metavar='ITEM_ID') + "-a", "--after", dest="rsm_after", + help=_("find page after this item"), metavar='ITEM_ID') rsm_page_group.add_argument( - "-b", "--before", dest="rsm_before", type=unicode_decoder, - help=_(u"find page before this item"), metavar='ITEM_ID') + "-b", "--before", dest="rsm_before", + help=_("find page before this item"), metavar='ITEM_ID') rsm_page_group.add_argument( "--index", dest="rsm_index", type=int, - help=_(u"index of the page to retrieve")) + help=_("index of the page to retrieve")) # MAM pubsub_group.add_argument( - "-f", "--filter", dest='mam_filters', type=unicode_decoder, nargs=2, - action='append', default=[], help=_(u"MAM filters to use"), - metavar=(u"FILTER_NAME", u"VALUE") + "-f", "--filter", dest='mam_filters', nargs=2, + action='append', default=[], help=_("MAM filters to use"), + metavar=("FILTER_NAME", "VALUE") ) # Order-By @@ -419,10 +406,10 @@ pubsub_group.add_argument( "-o", "--order-by", choices=[C.ORDER_BY_CREATION, C.ORDER_BY_MODIFICATION], - help=_(u"how items should be ordered")) + help=_("how items should be ordered")) if not flags.all_used: - raise exceptions.InternalError('unknown flags: {flags}'.format(flags=u', '.join(flags.unused))) + raise exceptions.InternalError('unknown flags: {flags}'.format(flags=', '.join(flags.unused))) if defaults: raise exceptions.InternalError('unused defaults: {defaults}'.format(defaults=defaults)) @@ -433,14 +420,14 @@ def register_output(self, type_, name, callback, description="", default=False): if type_ not in C.OUTPUT_TYPES: - log.error(u"Invalid output type {}".format(type_)) + log.error("Invalid output type {}".format(type_)) return self._outputs[type_][name] = {'callback': callback, 'description': description } if default: if type_ in self.default_output: - self.disp(_(u'there is already a default output for {}, ignoring new one').format(type_)) + self.disp(_('there is already a default output for {}, ignoring new one').format(type_)) else: self.default_output[type_] = name @@ -450,7 +437,7 @@ options_dict = {} for option in options: try: - key, value = option.split(u'=', 1) + key, value = option.split('=', 1) except ValueError: key, value = option, None options_dict[key.strip()] = value.strip() if value is not None else None @@ -458,8 +445,8 @@ def check_output_options(self, accepted_set, options): if not accepted_set.issuperset(options): - self.disp(u"The following output options are invalid: {invalid_options}".format( - invalid_options = u', '.join(set(options).difference(accepted_set))), + self.disp("The following output options are invalid: {invalid_options}".format( + invalid_options = ', '.join(set(options).difference(accepted_set))), error=True) self.quit(C.EXIT_BAD_ARG) @@ -478,13 +465,13 @@ module = import_module(module_path) self.import_plugin_module(module, type_) except ImportError as e: - self.disp(_(u"Can't import {module_path} plugin, ignoring it: {msg}".format( + self.disp(_("Can't import {module_path} plugin, ignoring it: {msg}".format( module_path = module_path, msg = e)), error=True) except exceptions.CancelError: continue except exceptions.MissingModule as e: - self.disp(_(u"Missing module for plugin {name}: {missing}".format( + self.disp(_("Missing module for plugin {name}: {missing}".format( name = module_path, missing = e)), error=True) @@ -498,7 +485,7 @@ try: class_names = getattr(module, '__{}__'.format(type_)) except AttributeError: - log.disp(_(u"Invalid plugin module [{type}] {module}").format(type=type_, module=module), error=True) + log.disp(_("Invalid plugin module [{type}] {module}").format(type=type_, module=module), error=True) raise ImportError else: for class_name in class_names: @@ -508,29 +495,29 @@ def get_xmpp_uri_from_http(self, http_url): """parse HTML page at http(s) URL, and looks for xmpp: uri""" if http_url.startswith('https'): - scheme = u'https' + scheme = 'https' elif http_url.startswith('http'): - scheme = u'http' + scheme = 'http' else: - raise exceptions.InternalError(u'An HTTP scheme is expected in this method') - self.disp(u"{scheme} URL found, trying to find associated xmpp: URI".format(scheme=scheme.upper()),1) + raise exceptions.InternalError('An HTTP scheme is expected in this method') + self.disp("{scheme} URL found, trying to find associated xmpp: URI".format(scheme=scheme.upper()),1) # HTTP URL, we try to find xmpp: links try: from lxml import etree except ImportError: - self.disp(u"lxml module must be installed to use http(s) scheme, please install it with \"pip install lxml\"", error=True) + self.disp("lxml module must be installed to use http(s) scheme, please install it with \"pip install lxml\"", error=True) self.quit(1) - import urllib2 + import urllib.request, urllib.error, urllib.parse parser = etree.HTMLParser() try: - root = etree.parse(urllib2.urlopen(http_url), parser) + root = etree.parse(urllib.request.urlopen(http_url), parser) except etree.XMLSyntaxError as e: - self.disp(_(u"Can't parse HTML page : {msg}").format(msg=e)) + self.disp(_("Can't parse HTML page : {msg}").format(msg=e)) links = [] else: links = root.xpath("//link[@rel='alternate' and starts-with(@href, 'xmpp:')]") if not links: - self.disp(u'Could not find alternate "xmpp:" URI, can\'t find associated XMPP PubSub node/item', error=True) + self.disp('Could not find alternate "xmpp:" URI, can\'t find associated XMPP PubSub node/item', error=True) self.quit(1) xmpp_uri = links[0].get('href') return xmpp_uri @@ -546,15 +533,15 @@ try: uri_data = uri.parseXMPPUri(url) except ValueError: - self.parser.error(_(u'invalid XMPP URL: {url}').format(url=url)) + self.parser.error(_('invalid XMPP URL: {url}').format(url=url)) else: - if uri_data[u'type'] == 'pubsub': + if uri_data['type'] == 'pubsub': # URL is alright, we only set data not already set by other options if not self.args.service: - self.args.service = uri_data[u'path'] + self.args.service = uri_data['path'] if not self.args.node: - self.args.node = uri_data[u'node'] - uri_item = uri_data.get(u'item') + self.args.node = uri_data['node'] + uri_item = uri_data.get('item') if uri_item: # there is an item in URI # we use it only if item is not already set @@ -565,7 +552,7 @@ try: items = self.args.items except AttributeError: - self.disp(_(u"item specified in URL but not needed in command, ignoring it"), error=True) + self.disp(_("item specified in URL but not needed in command, ignoring it"), error=True) else: if not items: self.args.items = [uri_item] @@ -578,22 +565,22 @@ if not item_last: self.args.item = uri_item else: - self.parser.error(_(u'XMPP URL is not a pubsub one: {url}').format(url=url)) + self.parser.error(_('XMPP URL is not a pubsub one: {url}').format(url=url)) flags = self.args._cmd._pubsub_flags # we check required arguments here instead of using add_arguments' required option # because the required argument can be set in URL if C.SERVICE in flags and not self.args.service: - self.parser.error(_(u"argument -s/--service is required")) + self.parser.error(_("argument -s/--service is required")) if C.NODE in flags and not self.args.node: - self.parser.error(_(u"argument -n/--node is required")) + self.parser.error(_("argument -n/--node is required")) if C.ITEM in flags and not self.args.item: - self.parser.error(_(u"argument -i/--item is required")) + self.parser.error(_("argument -i/--item is required")) # FIXME: mutually groups can't be nested in a group and don't support title # so we check conflict here. This may be fixed in Python 3, to be checked try: if self.args.item and self.args.item_last: - self.parser.error(_(u"--item and --item-last can't be used at the same time")) + self.parser.error(_("--item and --item-last can't be used at the same time")) except AttributeError: pass @@ -639,10 +626,10 @@ except AttributeError: pass - def confirmOrQuit(self, message, cancel_message=_(u"action cancelled by user")): + def confirmOrQuit(self, message, cancel_message=_("action cancelled by user")): """Request user to confirm action, and quit if he doesn't""" - res = raw_input("{} (y/N)? ".format(message)) + res = input("{} (y/N)? ".format(message)) if res not in ("y", "Y"): self.disp(cancel_message) self.quit(C.EXIT_USER_CANCELLED) @@ -728,11 +715,11 @@ # FIXME: need better exit codes def cant_connect(failure): - log.error(_(u"Can't connect profile: {reason}").format(reason=failure)) + log.error(_("Can't connect profile: {reason}").format(reason=failure)) self.quit(1) def cant_start_session(failure): - log.error(_(u"Can't start {profile}'s session: {reason}").format(profile=self.profile, reason=failure)) + log.error(_("Can't start {profile}'s session: {reason}").format(profile=self.profile, reason=failure)) self.quit(1) self.profile = self.bridge.profileNameGet(self.args.profile) @@ -752,7 +739,7 @@ return elif not self.bridge.profileIsSessionStarted(self.profile): if not self.args.connect: - log.error(_(u"Session for [{profile}] is not started, please start it before using jp, or use either --start-session or --connect option").format(profile=self.profile)) + log.error(_("Session for [{profile}] is not started, please start it before using jp, or use either --start-session or --connect option").format(profile=self.profile)) self.quit(1) elif not getattr(self.args, "connect", False): callback() @@ -768,7 +755,7 @@ return else: if not self.bridge.isConnected(self.profile): - log.error(_(u"Profile [{profile}] is not connected, please connect it before using jp, or use --connect option").format(profile=self.profile)) + log.error(_("Profile [{profile}] is not connected, please connect it before using jp, or use --connect option").format(profile=self.profile)) self.quit(1) callback() @@ -855,14 +842,14 @@ try: default = self.host.default_output[use_output] except KeyError: - if u'default' in choices: - default = u'default' - elif u'simple' in choices: - default = u'simple' + if 'default' in choices: + default = 'default' + elif 'simple' in choices: + default = 'simple' else: default = list(choices)[0] - output_parent.add_argument('--output', '-O', choices=sorted(choices), default=default, help=_(u"select output format (default: {})".format(default))) - output_parent.add_argument('--output-option', '--oo', type=unicode_decoder, action="append", dest='output_opts', default=[], help=_(u"output specific option")) + output_parent.add_argument('--output', '-O', choices=sorted(choices), default=default, help=_("select output format (default: {})".format(default))) + output_parent.add_argument('--output-option', '--oo', action="append", dest='output_opts', default=[], help=_("output specific option")) parents.add(output_parent) else: assert extra_outputs is None @@ -875,11 +862,11 @@ self._pubsub_flags = flags # other common options - use_opts = {k:v for k,v in kwargs.iteritems() if k.startswith('use_')} - for param, do_use in use_opts.iteritems(): + use_opts = {k:v for k,v in kwargs.items() if k.startswith('use_')} + for param, do_use in use_opts.items(): opt=param[4:] # if param is use_verbose, opt is verbose if opt not in self.host.parents: - raise exceptions.InternalError(u"Unknown parent option {}".format(opt)) + raise exceptions.InternalError("Unknown parent option {}".format(opt)) del kwargs[param] if do_use: parents.add(self.host.parents[opt]) @@ -957,12 +944,12 @@ try: size = data['size'] except KeyError: - self.disp(_(u"file size is not known, we can't show a progress bar"), 1, error=True) + self.disp(_("file size is not known, we can't show a progress bar"), 1, error=True) return False if self.host.pbar is None: #first answer, we must construct the bar self.host.pbar = progressbar.ProgressBar(max_value=int(size), - widgets=[_(u"Progress: "),progressbar.Percentage(), + widgets=[_("Progress: "),progressbar.Percentage(), " ", progressbar.Bar(), " ", @@ -986,7 +973,7 @@ can be overidden by a command @param metadata(dict): metadata as sent by bridge.progressStarted """ - self.disp(_(u"Operation started"), 2) + self.disp(_("Operation started"), 2) def onProgressUpdate(self, metadata): """Method called on each progress updata @@ -1002,14 +989,14 @@ can be overidden by a command @param metadata(dict): metadata as sent by bridge.progressFinished """ - self.disp(_(u"Operation successfully finished"), 2) + self.disp(_("Operation successfully finished"), 2) def onProgressError(self, error_msg): """Called when a progress failed @param error_msg(unicode): error message as sent by bridge.progressError """ - self.disp(_(u"Error while doing operation: {}").format(error_msg), error=True) + self.disp(_("Error while doing operation: {}").format(error_msg), error=True) def disp(self, msg, verbosity=0, error=False, no_lf=False): return self.host.disp(msg, verbosity, error, no_lf) @@ -1018,7 +1005,7 @@ try: output_type = self._output_type except AttributeError: - raise exceptions.InternalError(_(u'trying to use output when use_output has not been set')) + raise exceptions.InternalError(_('trying to use output when use_output has not been set')) return self.host.output(output_type, self.args.output, self.extra_outputs, data) def exitCb(self, msg=None): @@ -1041,7 +1028,7 @@ @param exit_code(int): shell exit code """ if msg is None: - msg = _(u"error: {}") + msg = _("error: {}") self.disp(msg.format(failure_), error=True) self.host.quit(exit_code) @@ -1054,31 +1041,31 @@ if extra is None: extra = {} else: - intersection = {C.KEY_ORDER_BY}.intersection(extra.keys()) + intersection = {C.KEY_ORDER_BY}.intersection(list(extra.keys())) if intersection: raise exceptions.ConflictError( - u"given extra dict has conflicting keys with pubsub keys " - u"{intersection}".format(intersection=intersection)) + "given extra dict has conflicting keys with pubsub keys " + "{intersection}".format(intersection=intersection)) # RSM - for attribute in (u'max', u'after', u'before', 'index'): - key = u'rsm_' + attribute + for attribute in ('max', 'after', 'before', 'index'): + key = 'rsm_' + attribute if key in extra: raise exceptions.ConflictError( - u"This key already exists in extra: u{key}".format(key=key)) + "This key already exists in extra: u{key}".format(key=key)) value = getattr(self.args, key, None) if value is not None: - extra[key] = unicode(value) + extra[key] = str(value) # MAM - if hasattr(self.args, u'mam_filters'): + if hasattr(self.args, 'mam_filters'): for key, value in self.args.mam_filters: - key = u'filter_' + key + key = 'filter_' + key if key in extra: raise exceptions.ConflictError( - u"This key already exists in extra: u{key}".format(key=key)) + "This key already exists in extra: u{key}".format(key=key)) extra[key] = value # Order-By
--- a/sat_frontends/jp/cmd_account.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_account.py Tue Aug 13 19:08:41 2019 +0200 @@ -37,45 +37,42 @@ "create", use_profile=False, use_verbose=True, - help=_(u"create a XMPP account"), + help=_("create a XMPP account"), ) self.need_loop = True def add_parser_options(self): self.parser.add_argument( - "jid", type=base.unicode_decoder, help=_(u"jid to create") + "jid", help=_("jid to create") ) self.parser.add_argument( - "password", type=base.unicode_decoder, help=_(u"password of the account") + "password", help=_("password of the account") ) self.parser.add_argument( "-p", "--profile", - type=base.unicode_decoder, help=_( - u"create a profile to use this account (default: don't create profile)" + "create a profile to use this account (default: don't create profile)" ), ) self.parser.add_argument( "-e", "--email", - type=base.unicode_decoder, default="", - help=_(u"email (usage depends of XMPP server)"), + help=_("email (usage depends of XMPP server)"), ) self.parser.add_argument( "-H", "--host", - type=base.unicode_decoder, default="", - help=_(u"server host (IP address or domain, default: use localhost)"), + help=_("server host (IP address or domain, default: use localhost)"), ) self.parser.add_argument( "-P", "--port", type=int, default=0, - help=_(u"server port (IP address or domain, default: use localhost)"), + help=_("server port (IP address or domain, default: use localhost)"), ) def _setParamCb(self): @@ -99,7 +96,7 @@ ) def _profileCreateCb(self): - self.disp(_(u"profile created"), 1) + self.disp(_("profile created"), 1) self.host.bridge.profileStartSession( self.args.password, self.args.profile, @@ -110,16 +107,16 @@ def _profileCreateEb(self, failure_): self.disp( _( - u"Can't create profile {profile} to associate with jid {jid}: {msg}" + "Can't create profile {profile} to associate with jid {jid}: {msg}" ).format(profile=self.args.profile, jid=self.args.jid, msg=failure_), error=True, ) self.host.quit(C.EXIT_BRIDGE_ERRBACK) def accountNewCb(self): - self.disp(_(u"XMPP account created"), 1) + self.disp(_("XMPP account created"), 1) if self.args.profile is not None: - self.disp(_(u"creating profile"), 2) + self.disp(_("creating profile"), 2) self.host.bridge.profileCreate( self.args.profile, self.args.password, @@ -132,8 +129,8 @@ def accountNewEb(self, failure_): self.disp( - _(u"Can't create new account on server {host} with jid {jid}: {msg}").format( - host=self.args.host or u"localhost", jid=self.args.jid, msg=failure_ + _("Can't create new account on server {host} with jid {jid}: {msg}").format( + host=self.args.host or "localhost", jid=self.args.jid, msg=failure_ ), error=True, ) @@ -154,13 +151,13 @@ class AccountModify(base.CommandBase): def __init__(self, host): super(AccountModify, self).__init__( - host, "modify", help=_(u"change password for XMPP account") + host, "modify", help=_("change password for XMPP account") ) self.need_loop = True def add_parser_options(self): self.parser.add_argument( - "password", type=base.unicode_decoder, help=_(u"new XMPP password") + "password", help=_("new XMPP password") ) def start(self): @@ -175,7 +172,7 @@ class AccountDelete(base.CommandBase): def __init__(self, host): super(AccountDelete, self).__init__( - host, "delete", help=_(u"delete a XMPP account") + host, "delete", help=_("delete a XMPP account") ) self.need_loop = True @@ -184,22 +181,22 @@ "-f", "--force", action="store_true", - help=_(u"delete account without confirmation"), + help=_("delete account without confirmation"), ) def _got_jid(self, jid_str): jid_ = jid.JID(jid_str) if not self.args.force: message = ( - u"You are about to delete the XMPP account with jid {jid_}\n" - u'This is the XMPP account of profile "{profile}"\n' - u"Are you sure that you want to delete this account ?".format( + "You are about to delete the XMPP account with jid {jid_}\n" + 'This is the XMPP account of profile "{profile}"\n' + "Are you sure that you want to delete this account ?".format( jid_=jid_, profile=self.profile ) ) - res = raw_input("{} (y/N)? ".format(message)) + res = input("{} (y/N)? ".format(message)) if res not in ("y", "Y"): - self.disp(_(u"Account deletion cancelled")) + self.disp(_("Account deletion cancelled")) self.host.quit(2) self.host.bridge.inBandUnregister( jid_.domain, self.args.profile, callback=self.host.quit, errback=self.errback @@ -220,5 +217,5 @@ def __init__(self, host): super(Account, self).__init__( - host, "account", use_profile=False, help=(u"XMPP account management") + host, "account", use_profile=False, help=("XMPP account management") )
--- a/sat_frontends/jp/cmd_adhoc.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_adhoc.py Tue Aug 13 19:08:41 2019 +0200 @@ -17,7 +17,7 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. -import base +from . import base from sat.core.i18n import _ from functools import partial from sat_frontends.jp.constants import Const as C @@ -32,43 +32,39 @@ class Remote(base.CommandBase): def __init__(self, host): super(Remote, self).__init__( - host, "remote", use_verbose=True, help=_(u"remote control a software") + host, "remote", use_verbose=True, help=_("remote control a software") ) def add_parser_options(self): - self.parser.add_argument("software", type=str, help=_(u"software name")) + self.parser.add_argument("software", type=str, help=_("software name")) self.parser.add_argument( "-j", "--jids", - type=base.unicode_decoder, nargs="*", default=[], - help=_(u"jids allowed to use the command"), + help=_("jids allowed to use the command"), ) self.parser.add_argument( "-g", "--groups", - type=base.unicode_decoder, nargs="*", default=[], - help=_(u"groups allowed to use the command"), + help=_("groups allowed to use the command"), ) self.parser.add_argument( "--forbidden-groups", - type=base.unicode_decoder, nargs="*", default=[], - help=_(u"groups that are *NOT* allowed to use the command"), + help=_("groups that are *NOT* allowed to use the command"), ) self.parser.add_argument( "--forbidden-jids", - type=base.unicode_decoder, nargs="*", default=[], - help=_(u"jids that are *NOT* allowed to use the command"), + help=_("jids that are *NOT* allowed to use the command"), ) self.parser.add_argument( - "-l", "--loop", action="store_true", help=_(u"loop on the commands") + "-l", "--loop", action="store_true", help=_("loop on the commands") ) def start(self): @@ -109,7 +105,7 @@ def __init__(self, host): super(Run, self).__init__( - host, "run", use_verbose=True, help=_(u"run an Ad-Hoc command") + host, "run", use_verbose=True, help=_("run an Ad-Hoc command") ) self.need_loop = True @@ -117,9 +113,8 @@ self.parser.add_argument( "-j", "--jid", - type=base.unicode_decoder, - default=u"", - help=_(u"jid of the service (default: profile's server"), + default="", + help=_("jid of the service (default: profile's server"), ) self.parser.add_argument( "-S", @@ -127,24 +122,22 @@ action="append_const", const=xmlui_manager.SUBMIT, dest="workflow", - help=_(u"submit form/page"), + help=_("submit form/page"), ) self.parser.add_argument( "-f", "--field", - type=base.unicode_decoder, action="append", nargs=2, dest="workflow", - metavar=(u"KEY", u"VALUE"), - help=_(u"field value"), + metavar=("KEY", "VALUE"), + help=_("field value"), ) self.parser.add_argument( "node", - type=base.unicode_decoder, nargs="?", - default=u"", - help=_(u"node of the command (default: list commands)"), + default="", + help=_("node of the command (default: list commands)"), ) def adHocRunCb(self, xmlui_raw): @@ -165,7 +158,7 @@ callback=self.adHocRunCb, errback=partial( self.errback, - msg=_(u"can't get ad-hoc commands list: {}"), + msg=_("can't get ad-hoc commands list: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -176,7 +169,7 @@ def __init__(self, host): super(List, self).__init__( - host, "list", use_verbose=True, help=_(u"list Ad-Hoc commands of a service") + host, "list", use_verbose=True, help=_("list Ad-Hoc commands of a service") ) self.need_loop = True @@ -184,9 +177,8 @@ self.parser.add_argument( "-j", "--jid", - type=base.unicode_decoder, - default=u"", - help=_(u"jid of the service (default: profile's server"), + default="", + help=_("jid of the service (default: profile's server"), ) def adHocListCb(self, xmlui_raw): @@ -202,7 +194,7 @@ callback=self.adHocListCb, errback=partial( self.errback, - msg=_(u"can't get ad-hoc commands list: {}"), + msg=_("can't get ad-hoc commands list: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), )
--- a/sat_frontends/jp/cmd_avatar.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_avatar.py Tue Aug 13 19:08:41 2019 +0200 @@ -18,7 +18,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. -import base +from . import base import os import os.path from sat.core.i18n import _ @@ -47,7 +47,7 @@ """Send files to jabber contact""" path = self.args.image_path if not os.path.exists(path): - self.disp(_(u"file [{}] doesn't exist !").format(path), error=True) + self.disp(_("file [{}] doesn't exist !").format(path), error=True) self.host.quit(1) path = os.path.abspath(path) self.host.bridge.avatarSet( @@ -73,9 +73,9 @@ self.need_loop = True def add_parser_options(self): - self.parser.add_argument("jid", type=base.unicode_decoder, help=_("entity")) + self.parser.add_argument("jid", help=_("entity")) self.parser.add_argument( - "-s", "--show", action="store_true", help=_(u"show avatar") + "-s", "--show", action="store_true", help=_("show avatar") ) def showImage(self, path): @@ -100,7 +100,7 @@ def _avatarGetCb(self, avatar_path): if not avatar_path: - self.disp(_(u"No avatar found."), 1) + self.disp(_("No avatar found."), 1) self.host.quit(C.EXIT_NOT_FOUND) self.disp(avatar_path)
--- a/sat_frontends/jp/cmd_blog.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_blog.py Tue Aug 13 19:08:41 2019 +0200 @@ -18,7 +18,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. -import base +from . import base from sat.core.i18n import _ from sat_frontends.jp.constants import Const as C from sat_frontends.jp import common @@ -26,7 +26,7 @@ from sat.tools.common import data_objects from sat.tools.common import uri from sat.tools import config -from ConfigParser import NoSectionError, NoOptionError +from configparser import NoSectionError, NoOptionError from functools import partial import json import sys @@ -40,7 +40,7 @@ __commands__ = ["Blog"] -SYNTAX_XHTML = u"xhtml" +SYNTAX_XHTML = "xhtml" # extensions to use with known syntaxes SYNTAX_EXT = { # FIXME: default syntax doesn't sounds needed, there should always be a syntax set @@ -51,8 +51,8 @@ } -CONF_SYNTAX_EXT = u"syntax_ext_dict" -BLOG_TMP_DIR = u"blog" +CONF_SYNTAX_EXT = "syntax_ext_dict" +BLOG_TMP_DIR = "blog" # key to remove from metadata tmp file if they exist KEY_TO_REMOVE_METADATA = ( "id", @@ -66,24 +66,24 @@ URL_REDIRECT_PREFIX = "url_redirect_" INOTIFY_INSTALL = '"pip install inotify"' MB_KEYS = ( - u"id", - u"url", - u"atom_id", - u"updated", - u"published", - u"language", - u"comments", # this key is used for all comments* keys - u"tags", # this key is used for all tag* keys - u"author", - u"author_jid", - u"author_email", - u"author_jid_verified", - u"content", - u"content_xhtml", - u"title", - u"title_xhtml", + "id", + "url", + "atom_id", + "updated", + "published", + "language", + "comments", # this key is used for all comments* keys + "tags", # this key is used for all tag* keys + "author", + "author_jid", + "author_email", + "author_jid_verified", + "content", + "content_xhtml", + "title", + "title_xhtml", ) -OUTPUT_OPT_NO_HEADER = u"no-header" +OUTPUT_OPT_NO_HEADER = "no-header" def guessSyntaxFromPath(host, sat_conf, path): @@ -96,7 +96,7 @@ # we first try to guess syntax with extension ext = os.path.splitext(path)[1][1:] # we get extension without the '.' if ext: - for k, v in SYNTAX_EXT.iteritems(): + for k, v in SYNTAX_EXT.items(): if k and ext == v: return k @@ -117,32 +117,30 @@ def add_parser_options(self): self.parser.add_argument( - "-T", "--title", type=base.unicode_decoder, help=_(u"title of the item") + "-T", "--title", help=_("title of the item") ) self.parser.add_argument( "-t", "--tag", - type=base.unicode_decoder, action="append", - help=_(u"tag (category) of your item"), + help=_("tag (category) of your item"), ) comments_group = self.parser.add_mutually_exclusive_group() comments_group.add_argument( "-C", "--comments", action="store_const", const=True, dest="comments", - help=_(u"enable comments (default: comments not enabled except if they " - u"already exist)") + help=_("enable comments (default: comments not enabled except if they " + "already exist)") ) comments_group.add_argument( "--no-comments", action="store_const", const=False, dest="comments", - help=_(u"disable comments (will remove comments node if it exist)") + help=_("disable comments (will remove comments node if it exist)") ) self.parser.add_argument( "-S", "--syntax", - type=base.unicode_decoder, - help=_(u"syntax to use (default: get profile's default syntax)"), + help=_("syntax to use (default: get profile's default syntax)"), ) def setMbDataContent(self, content, mb_data): @@ -164,7 +162,7 @@ if self.args.comments is not None: mb_data["allow_comments"] = self.args.comments if self.args.tag: - mb_data[u'tags'] = self.args.tag + mb_data['tags'] = self.args.tag if self.args.title is not None: mb_data["title"] = self.args.title @@ -177,7 +175,7 @@ "set", use_pubsub=True, pubsub_flags={C.SINGLE_ITEM}, - help=_(u"publish a new blog item or update an existing one"), + help=_("publish a new blog item or update an existing one"), ) BlogPublishCommon.__init__(self) self.need_loop = True @@ -186,7 +184,7 @@ BlogPublishCommon.add_parser_options(self) def mbSendCb(self): - self.disp(u"Item published") + self.disp("Item published") self.host.quit(C.EXIT_OK) def start(self): @@ -207,14 +205,14 @@ callback=self.exitCb, errback=partial( self.errback, - msg=_(u"can't send item: {}"), + msg=_("can't send item: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) class Get(base.CommandBase): - TEMPLATE = u"blog/articles.html" + TEMPLATE = "blog/articles.html" def __init__(self, host): extra_outputs = {"default": self.default_output, "fancy": self.fancy_output} @@ -227,7 +225,7 @@ pubsub_flags={C.MULTI_ITEMS}, use_output=C.OUTPUT_COMPLEX, extra_outputs=extra_outputs, - help=_(u"get blog item(s)"), + help=_("get blog item(s)"), ) self.need_loop = True @@ -236,34 +234,33 @@ self.parser.add_argument( "-k", "--key", - type=base.unicode_decoder, action="append", dest="keys", - help=_(u"microblog data key(s) to display (default: depend of verbosity)"), + help=_("microblog data key(s) to display (default: depend of verbosity)"), ) # TODO: add MAM filters def template_data_mapping(self, data): - return {u"items": data_objects.BlogItems(data, deserialise=False)} + return {"items": data_objects.BlogItems(data, deserialise=False)} def format_comments(self, item, keys): comments_data = data_format.dict2iterdict( - u"comments", item, (u"node", u"service"), pop=True + "comments", item, ("node", "service"), pop=True ) lines = [] for data in comments_data: - lines.append(data[u"comments"]) - for k in (u"node", u"service"): + lines.append(data["comments"]) + for k in ("node", "service"): if OUTPUT_OPT_NO_HEADER in self.args.output_opts: - header = u"" + header = "" else: - header = C.A_HEADER + k + u": " + A.RESET + header = C.A_HEADER + k + ": " + A.RESET lines.append(header + data[k]) - return u"\n".join(lines) + return "\n".join(lines) def format_tags(self, item, keys): - tags = item.pop(u'tags', []) - return u", ".join(tags) + tags = item.pop('tags', []) + return ", ".join(tags) def format_updated(self, item, keys): return self.format_time(item["updated"]) @@ -273,11 +270,11 @@ def format_url(self, item, keys): return uri.buildXMPPUri( - u"pubsub", - subtype=u"microblog", - path=self.metadata[u"service"], - node=self.metadata[u"node"], - item=item[u"id"], + "pubsub", + subtype="microblog", + path=self.metadata["service"], + node=self.metadata["node"], + item=item["id"], ) def get_keys(self): @@ -286,10 +283,10 @@ if self.args.keys: if not set(MB_KEYS).issuperset(self.args.keys): self.disp( - u"following keys are invalid: {invalid}.\n" - u"Valid keys are: {valid}.".format( - invalid=u", ".join(set(self.args.keys).difference(MB_KEYS)), - valid=u", ".join(sorted(MB_KEYS)), + "following keys are invalid: {invalid}.\n" + "Valid keys are: {valid}.".format( + invalid=", ".join(set(self.args.keys).difference(MB_KEYS)), + valid=", ".join(sorted(MB_KEYS)), ), error=True, ) @@ -297,18 +294,18 @@ return self.args.keys else: if verbosity == 0: - return (u"title", u"content") + return ("title", "content") elif verbosity == 1: return ( - u"title", - u"tags", - u"author", - u"author_jid", - u"author_email", - u"author_jid_verified", - u"published", - u"updated", - u"content", + "title", + "tags", + "author", + "author_jid", + "author_email", + "author_jid_verified", + "published", + "updated", + "content", ) else: return MB_KEYS @@ -334,19 +331,19 @@ if OUTPUT_OPT_NO_HEADER in self.args.output_opts: header = "" else: - header = u"{k_fmt}{key}:{k_fmt_e} {sep}".format( + header = "{k_fmt}{key}:{k_fmt_e} {sep}".format( k_fmt=C.A_HEADER, key=k, k_fmt_e=A.RESET, - sep=u"\n" if "content" in k else u"", + sep="\n" if "content" in k else "", ) value = k_cb[k](item, keys) if k in k_cb else item[k] if isinstance(value, bool): - value = unicode(value).lower() + value = str(value).lower() self.disp(header + value) # we want a separation line after each item but the last one if idx < len(items) - 1: - print(u"") + print("") def format_time(self, timestamp): """return formatted date for timestamp @@ -354,7 +351,7 @@ @param timestamp(str,int,float): unix timestamp @return (unicode): formatted date """ - fmt = u"%d/%m/%Y %H:%M:%S" + fmt = "%d/%m/%Y %H:%M:%S" return time.strftime(fmt, time.localtime(float(timestamp))) def fancy_output(self, data): @@ -363,46 +360,46 @@ this output doesn't use keys filter """ # thanks to http://stackoverflow.com/a/943921 - rows, columns = map(int, os.popen("stty size", "r").read().split()) + rows, columns = list(map(int, os.popen("stty size", "r").read().split())) items, metadata = data verbosity = self.args.verbose - sep = A.color(A.FG_BLUE, columns * u"▬") + sep = A.color(A.FG_BLUE, columns * "▬") if items: - print(u"\n" + sep + "\n") + print(("\n" + sep + "\n")) for idx, item in enumerate(items): - title = item.get(u"title") + title = item.get("title") if verbosity > 0: - author = item[u"author"] - published, updated = item[u"published"], item.get("updated") + author = item["author"] + published, updated = item["published"], item.get("updated") else: author = published = updated = None if verbosity > 1: tags = item.pop('tags', []) else: tags = None - content = item.get(u"content") + content = item.get("content") if title: - print(A.color(A.BOLD, A.FG_CYAN, item[u"title"])) + print((A.color(A.BOLD, A.FG_CYAN, item["title"]))) meta = [] if author: meta.append(A.color(A.FG_YELLOW, author)) if published: - meta.append(A.color(A.FG_YELLOW, u"on ", self.format_time(published))) + meta.append(A.color(A.FG_YELLOW, "on ", self.format_time(published))) if updated != published: meta.append( - A.color(A.FG_YELLOW, u"(updated on ", self.format_time(updated), u")") + A.color(A.FG_YELLOW, "(updated on ", self.format_time(updated), ")") ) - print(u" ".join(meta)) + print((" ".join(meta))) if tags: - print(A.color(A.FG_MAGENTA, u", ".join(tags))) + print((A.color(A.FG_MAGENTA, ", ".join(tags)))) if (title or tags) and content: print("") if content: self.disp(content) - print(u"\n" + sep + "\n") + print(("\n" + sep + "\n")) def mbGetCb(self, mb_result): items, metadata = mb_result @@ -412,7 +409,7 @@ self.host.quit(C.EXIT_OK) def mbGetEb(self, failure_): - self.disp(u"can't get blog items: {reason}".format(reason=failure_), error=True) + self.disp("can't get blog items: {reason}".format(reason=failure_), error=True) self.host.quit(C.EXIT_BRIDGE_ERRBACK) def start(self): @@ -438,7 +435,7 @@ pubsub_flags={C.SINGLE_ITEM}, use_draft=True, use_verbose=True, - help=_(u"edit an existing or new blog post"), + help=_("edit an existing or new blog post"), ) BlogPublishCommon.__init__(self) common.BaseEdit.__init__(self, self.host, BLOG_TMP_DIR, use_metadata=True) @@ -449,7 +446,7 @@ "-P", "--preview", action="store_true", - help=_(u"launch a blog preview in parallel"), + help=_("launch a blog preview in parallel"), ) def buildMetadataFile(self, content_file_path, mb_data=None): @@ -465,13 +462,13 @@ # or re-use the existing one if it exists meta_file_path = os.path.splitext(content_file_path)[0] + common.METADATA_SUFF if os.path.exists(meta_file_path): - self.disp(u"Metadata file already exists, we re-use it") + self.disp("Metadata file already exists, we re-use it") try: with open(meta_file_path, "rb") as f: mb_data = json.load(f) except (OSError, IOError, ValueError) as e: self.disp( - u"Can't read existing metadata file at {path}, aborting: {reason}".format( + "Can't read existing metadata file at {path}, aborting: {reason}".format( path=meta_file_path, reason=e ), error=True, @@ -513,7 +510,7 @@ # do we need a preview ? if self.args.preview: - self.disp(u"Preview requested, launching it", 1) + self.disp("Preview requested, launching it", 1) # we redirect outputs to /dev/null to avoid console pollution in editor # if user wants to see messages, (s)he can call "blog preview" directly DEVNULL = open(os.devnull, "wb") @@ -552,7 +549,7 @@ self.host.bridge.mbSend( self.pubsub_service, self.pubsub_node, mb_data, self.profile ) - self.disp(u"Blog item published") + self.disp("Blog item published") def getTmpSuff(self): # we get current syntax to determine file extension @@ -577,15 +574,15 @@ if content and self.current_syntax == SYNTAX_XHTML: content = content.strip() if not content.startswith('<div>'): - content = u'<div>' + content + u'</div>' + content = '<div>' + content + '</div>' try: from lxml import etree except ImportError: - self.disp(_(u"You need lxml to edit pretty XHTML")) + self.disp(_("You need lxml to edit pretty XHTML")) else: parser = etree.XMLParser(remove_blank_text=True) root = etree.fromstring(content, parser) - content = etree.tostring(root, encoding=unicode, pretty_print=True) + content = etree.tostring(root, encoding=str, pretty_print=True) return content, mb_data, mb_data["id"] @@ -599,11 +596,11 @@ self.current_syntax ) except Exception as e: - if "NotFound" in unicode( + if "NotFound" in str( e ): # FIXME: there is not good way to check bridge errors self.parser.error( - _(u"unknown syntax requested ({syntax})").format( + _("unknown syntax requested ({syntax})").format( syntax=self.args.syntax ) ) @@ -627,7 +624,7 @@ def __init__(self, host): base.CommandBase.__init__( - self, host, "preview", use_verbose=True, help=_(u"preview a blog content") + self, host, "preview", use_verbose=True, help=_("preview a blog content") ) common.BaseEdit.__init__(self, self.host, BLOG_TMP_DIR, use_metadata=True) @@ -636,15 +633,14 @@ "--inotify", type=str, choices=("auto", "true", "false"), - default=u"auto", - help=_(u"use inotify to handle preview"), + default="auto", + help=_("use inotify to handle preview"), ) self.parser.add_argument( "file", - type=base.unicode_decoder, nargs="?", - default=u"current", - help=_(u"path to the content file"), + default="current", + help=_("path to the content file"), ) def showPreview(self): @@ -660,7 +656,7 @@ ) if not args: self.disp( - u'Couln\'t find command in "{name}", abording'.format(name=opt_name), + 'Couln\'t find command in "{name}", abording'.format(name=opt_name), error=True, ) self.host.quit(1) @@ -683,11 +679,11 @@ ) xhtml = ( - u'<html xmlns="http://www.w3.org/1999/xhtml">' - u'<head><meta http-equiv="Content-Type" content="text/html;charset=utf-8" />' - u"</head>" - u"<body>{}</body>" - u"</html>" + '<html xmlns="http://www.w3.org/1999/xhtml">' + '<head><meta http-equiv="Content-Type" content="text/html;charset=utf-8" />' + "</head>" + "<body>{}</body>" + "</html>" ).format(content) with open(self.preview_file_path, "wb") as f: @@ -695,7 +691,7 @@ def start(self): import webbrowser - import urllib + import urllib.request, urllib.parse, urllib.error self.webbrowser, self.urllib = webbrowser, urllib @@ -708,13 +704,13 @@ if self.args.inotify == "auto": inotify = None self.disp( - u"inotify module not found, deactivating feature. You can install" - u" it with {install}".format(install=INOTIFY_INSTALL) + "inotify module not found, deactivating feature. You can install" + " it with {install}".format(install=INOTIFY_INSTALL) ) else: self.disp( - u"inotify not found, can't activate the feature! Please install " - u"it with {install}".format(install=INOTIFY_INSTALL), + "inotify not found, can't activate the feature! Please install " + "it with {install}".format(install=INOTIFY_INSTALL), error=True, ) self.host.quit(1) @@ -724,7 +720,7 @@ inotify.adapters._LOGGER.setLevel(40) except AttributeError: self.disp( - u"Logger doesn't exists, inotify may have chanded", error=True + "Logger doesn't exists, inotify may have chanded", error=True ) else: inotify = None @@ -768,8 +764,8 @@ # XXX: we don't delete file automatically because browser need it # (and webbrowser.open can return before it is read) self.disp( - u"temporary file created at {}\nthis file will NOT BE DELETED " - u"AUTOMATICALLY, please delete it yourself when you have finished".format( + "temporary file created at {}\nthis file will NOT BE DELETED " + "AUTOMATICALLY, please delete it yourself when you have finished".format( self.preview_file_path ) ) @@ -793,10 +789,10 @@ try: for event in i.event_gen(): if event is not None: - self.disp(u"Content updated", 1) + self.disp("Content updated", 1) if {"IN_DELETE_SELF", "IN_MOVE_SELF"}.intersection(event[1]): self.disp( - u"{} event catched, changing the watch".format( + "{} event catched, changing the watch".format( ", ".join(event[1]) ), 2, @@ -813,7 +809,7 @@ update_cb() except InotifyError: self.disp( - u"Can't catch inotify events, as the file been deleted?", error=True + "Can't catch inotify events, as the file been deleted?", error=True ) finally: os.unlink(self.preview_file_path) @@ -830,28 +826,27 @@ "import", use_pubsub=True, use_progress=True, - help=_(u"import an external blog"), + help=_("import an external blog"), ) self.need_loop = True def add_parser_options(self): self.parser.add_argument( "importer", - type=base.unicode_decoder, nargs="?", - help=_(u"importer name, nothing to display importers list"), + help=_("importer name, nothing to display importers list"), ) self.parser.add_argument( - "--host", type=base.unicode_decoder, help=_(u"original blog host") + "--host", help=_("original blog host") ) self.parser.add_argument( "--no-images-upload", action="store_true", - help=_(u"do *NOT* upload images (default: do upload images)"), + help=_("do *NOT* upload images (default: do upload images)"), ) self.parser.add_argument( "--upload-ignore-host", - help=_(u"do not upload images from this host (default: upload all images)"), + help=_("do not upload images from this host (default: upload all images)"), ) self.parser.add_argument( "--ignore-tls-errors", @@ -864,52 +859,51 @@ action="append", nargs=2, default=[], - metavar=(u"NAME", u"VALUE"), - help=_(u"importer specific options (see importer description)"), + metavar=("NAME", "VALUE"), + help=_("importer specific options (see importer description)"), ) self.parser.add_argument( "location", - type=base.unicode_decoder, nargs="?", help=_( - u"importer data location (see importer description), nothing to show " - u"importer description" + "importer data location (see importer description), nothing to show " + "importer description" ), ) def onProgressStarted(self, metadata): - self.disp(_(u"Blog upload started"), 2) + self.disp(_("Blog upload started"), 2) def onProgressFinished(self, metadata): - self.disp(_(u"Blog uploaded successfully"), 2) + self.disp(_("Blog uploaded successfully"), 2) redirections = { k[len(URL_REDIRECT_PREFIX) :]: v - for k, v in metadata.iteritems() + for k, v in metadata.items() if k.startswith(URL_REDIRECT_PREFIX) } if redirections: - conf = u"\n".join( + conf = "\n".join( [ - u"url_redirections_dict = {}".format( + "url_redirections_dict = {}".format( # we need to add ' ' before each new line # and to double each '%' for ConfigParser - u"\n ".join( + "\n ".join( json.dumps(redirections, indent=1, separators=(",", ": ")) - .replace(u"%", u"%%") - .split(u"\n") + .replace("%", "%%") + .split("\n") ) ), ] ) self.disp( _( - u"\nTo redirect old URLs to new ones, put the following lines in your" - u" sat.conf file, in [libervia] section:\n\n{conf}".format(conf=conf) + "\nTo redirect old URLs to new ones, put the following lines in your" + " sat.conf file, in [libervia] section:\n\n{conf}".format(conf=conf) ) ) def onProgressError(self, error_msg): - self.disp(_(u"Error while uploading blog: {}").format(error_msg), error=True) + self.disp(_("Error while uploading blog: {}").format(error_msg), error=True) def error(self, failure): self.disp( @@ -924,14 +918,14 @@ if getattr(self.args, name): self.parser.error( _( - u"{name} argument can't be used without location argument" + "{name} argument can't be used without location argument" ).format(name=name) ) if self.args.importer is None: self.disp( - u"\n".join( + "\n".join( [ - u"{}: {}".format(name, desc) + "{}: {}".format(name, desc) for name, desc in self.host.bridge.blogImportList() ] ) @@ -942,14 +936,14 @@ self.args.importer ) except Exception as e: - msg = [l for l in unicode(e).split("\n") if l][ + msg = [l for l in str(e).split("\n") if l][ -1 ] # we only keep the last line self.disp(msg) self.host.quit(1) else: self.disp( - u"{name}: {short_desc}\n\n{long_desc}".format( + "{name}: {short_desc}\n\n{long_desc}".format( name=self.args.importer, short_desc=short_desc, long_desc=long_desc, @@ -967,8 +961,8 @@ options["upload_images"] = C.BOOL_FALSE if self.args.upload_ignore_host: self.parser.error( - u"upload-ignore-host option can't be used when no-images-upload " - u"is set" + "upload-ignore-host option can't be used when no-images-upload " + "is set" ) elif self.args.upload_ignore_host: options["upload_ignore_host"] = self.args.upload_ignore_host
--- a/sat_frontends/jp/cmd_bookmarks.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_bookmarks.py Tue Aug 13 19:08:41 2019 +0200 @@ -17,7 +17,7 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. -import base +from . import base from sat.core.i18n import _ __commands__ = ["Bookmarks"] @@ -33,7 +33,7 @@ self.parser.add_argument('-t', '--type', type=str, choices=TYPES, default=TYPES[0], help=_("bookmarks type (default: %(default)s)")) def _errback(self, failure): - print (("Something went wrong: [%s]") % failure) + print((("Something went wrong: [%s]") % failure)) self.host.quit(1) class BookmarksList(BookmarksCommon): @@ -48,20 +48,20 @@ if not data[location]: continue loc_mess = [] - loc_mess.append(u"%s:" % location) + loc_mess.append("%s:" % location) book_mess = [] - for book_link, book_data in data[location].items(): + for book_link, book_data in list(data[location].items()): name = book_data.get('name') autojoin = book_data.get('autojoin', 'false') == 'true' nick = book_data.get('nick') - book_mess.append(u"\t%s[%s%s]%s" % ((name+' ') if name else '', + book_mess.append("\t%s[%s%s]%s" % ((name+' ') if name else '', book_link, - u' (%s)' % nick if nick else '', - u' (*)' if autojoin else '')) - loc_mess.append(u'\n'.join(book_mess)) - mess.append(u'\n'.join(loc_mess)) + ' (%s)' % nick if nick else '', + ' (*)' if autojoin else '')) + loc_mess.append('\n'.join(book_mess)) + mess.append('\n'.join(loc_mess)) - print u'\n\n'.join(mess) + print('\n\n'.join(mess)) class BookmarksRemove(BookmarksCommon): @@ -72,7 +72,7 @@ def add_parser_options(self): super(BookmarksRemove, self).add_parser_options() - self.parser.add_argument('bookmark', type=base.unicode_decoder, help=_('jid (for muc bookmark) or url of to remove')) + self.parser.add_argument('bookmark', help=_('jid (for muc bookmark) or url of to remove')) def start(self): self.host.bridge.bookmarksRemove(self.args.type, self.args.bookmark, self.args.location, self.host.profile, callback = lambda: self.host.quit(), errback=self._errback) @@ -86,16 +86,16 @@ def add_parser_options(self): super(BookmarksAdd, self).add_parser_options(location_default='auto') - self.parser.add_argument('bookmark', type=base.unicode_decoder, help=_('jid (for muc bookmark) or url of to remove')) - self.parser.add_argument('-n', '--name', type=base.unicode_decoder, help=_("bookmark name")) + self.parser.add_argument('bookmark', help=_('jid (for muc bookmark) or url of to remove')) + self.parser.add_argument('-n', '--name', help=_("bookmark name")) muc_group = self.parser.add_argument_group(_('MUC specific options')) - muc_group.add_argument('-N', '--nick', type=base.unicode_decoder, help=_('nickname')) + muc_group.add_argument('-N', '--nick', help=_('nickname')) muc_group.add_argument('-a', '--autojoin', action='store_true', help=_('join room on profile connection')) def start(self): if self.args.type == 'url' and (self.args.autojoin or self.args.nick is not None): # XXX: Argparse doesn't seem to manage this case, any better way ? - print _(u"You can't use --autojoin or --nick with --type url") + print(_("You can't use --autojoin or --nick with --type url")) self.host.quit(1) data = {} if self.args.autojoin:
--- a/sat_frontends/jp/cmd_debug.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_debug.py Tue Aug 13 19:08:41 2019 +0200 @@ -18,7 +18,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. -import base +from . import base from sat.core.i18n import _ from sat_frontends.jp.constants import Const as C from sat.tools.common.ansi import ANSI as A @@ -31,11 +31,11 @@ def evalArgs(self): if self.args.arg: try: - return eval(u"[{}]".format(u",".join(self.args.arg))) + return eval("[{}]".format(",".join(self.args.arg))) except SyntaxError as e: self.disp( - u"Can't evaluate arguments: {mess}\n{text}\n{offset}^".format( - mess=e, text=e.text.decode("utf-8"), offset=u" " * (e.offset - 1) + "Can't evaluate arguments: {mess}\n{text}\n{offset}^".format( + mess=e, text=e.text, offset=" " * (e.offset - 1) ), error=True, ) @@ -46,26 +46,26 @@ class Method(base.CommandBase, BridgeCommon): def __init__(self, host): - base.CommandBase.__init__(self, host, "method", help=_(u"call a bridge method")) + base.CommandBase.__init__(self, host, "method", help=_("call a bridge method")) BridgeCommon.__init__(self) self.need_loop = True def add_parser_options(self): self.parser.add_argument( - "method", type=str, help=_(u"name of the method to execute") + "method", type=str, help=_("name of the method to execute") ) self.parser.add_argument( - "arg", type=base.unicode_decoder, nargs="*", help=_(u"argument of the method") + "arg", nargs="*", help=_("argument of the method") ) def method_cb(self, ret=None): if ret is not None: - self.disp(unicode(ret)) + self.disp(str(ret)) self.host.quit() def method_eb(self, failure): self.disp( - _(u"Error while executing {}: {}".format(self.args.method, failure)), + _("Error while executing {}: {}".format(self.args.method, failure)), error=True, ) self.host.quit(C.EXIT_ERROR) @@ -85,22 +85,22 @@ try: method(*args, callback=self.method_cb, errback=self.method_eb) except TypeError: - self.method_eb(_(u"bad arguments")) + self.method_eb(_("bad arguments")) class Signal(base.CommandBase, BridgeCommon): def __init__(self, host): base.CommandBase.__init__( - self, host, "signal", help=_(u"send a fake signal from backend") + self, host, "signal", help=_("send a fake signal from backend") ) BridgeCommon.__init__(self) def add_parser_options(self): self.parser.add_argument( - "signal", type=str, help=_(u"name of the signal to send") + "signal", type=str, help=_("name of the signal to send") ) self.parser.add_argument( - "arg", type=base.unicode_decoder, nargs="*", help=_(u"argument of the signal") + "arg", nargs="*", help=_("argument of the signal") ) def start(self): @@ -138,7 +138,7 @@ "--direction", choices=("in", "out", "both"), default="both", - help=_(u"stream direction filter"), + help=_("stream direction filter"), ) def printXML(self, direction, xml_data, profile): @@ -155,7 +155,7 @@ whiteping = False if verbosity: - profile_disp = u" ({})".format(profile) if verbosity > 1 else u"" + profile_disp = " ({})".format(profile) if verbosity > 1 else "" if direction == "IN": self.disp( A.color( @@ -180,7 +180,7 @@ # but importing lxml directly here is not clean # should be wrapped in a custom Exception self.disp(xml_data) - self.disp(u"") + self.disp("") def start(self): self.host.bridge.register_signal("xmlLog", self.printXML, "plugin")
--- a/sat_frontends/jp/cmd_encryption.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_encryption.py Tue Aug 13 19:08:41 2019 +0200 @@ -48,10 +48,10 @@ def default_output(self, plugins): if not plugins: - self.disp(_(u"No encryption plugin registered!")) + self.disp(_("No encryption plugin registered!")) self.host.quit(C.EXIT_NOT_FOUND) else: - self.disp(_(u"Following encryption algorithms are available: {algos}").format( + self.disp(_("Following encryption algorithms are available: {algos}").format( algos=', '.join([p['name'] for p in plugins]))) self.host.quit() @@ -60,7 +60,7 @@ callback=self.encryptionPluginsGetCb, errback=partial( self.errback, - msg=_(u"can't retrieve plugins: {}"), + msg=_("can't retrieve plugins: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -72,20 +72,20 @@ super(EncryptionGet, self).__init__( host, "get", use_output=C.OUTPUT_DICT, - help=_(u"get encryption session data")) + help=_("get encryption session data")) self.need_loop = True def add_parser_options(self): self.parser.add_argument( - "jid", type=base.unicode_decoder, - help=_(u"jid of the entity to check") + "jid", + help=_("jid of the entity to check") ) def messageEncryptionGetCb(self, serialised): session_data = data_format.deserialise(serialised) if session_data is None: self.disp( - u"No encryption session found, the messages are sent in plain text.") + "No encryption session found, the messages are sent in plain text.") self.host.quit(C.EXIT_NOT_FOUND) self.output(session_data) self.host.quit() @@ -98,7 +98,7 @@ callback=self.messageEncryptionGetCb, errback=partial( self.errback, - msg=_(u"can't get session: {}"), + msg=_("can't get session: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -109,23 +109,23 @@ def __init__(self, host): super(EncryptionStart, self).__init__( host, "start", - help=_(u"start encrypted session with an entity")) + help=_("start encrypted session with an entity")) self.need_loop = True def add_parser_options(self): self.parser.add_argument( "--encrypt-noreplace", action="store_true", - help=_(u"don't replace encryption algorithm if an other one is already used")) + help=_("don't replace encryption algorithm if an other one is already used")) algorithm = self.parser.add_mutually_exclusive_group() algorithm.add_argument( - "-n", "--name", help=_(u"algorithm name (DEFAULT: choose automatically)")) + "-n", "--name", help=_("algorithm name (DEFAULT: choose automatically)")) algorithm.add_argument( "-N", "--namespace", - help=_(u"algorithm namespace (DEFAULT: choose automatically)")) + help=_("algorithm namespace (DEFAULT: choose automatically)")) self.parser.add_argument( - "jid", type=base.unicode_decoder, - help=_(u"jid of the entity to stop encrypted session with") + "jid", + help=_("jid of the entity to stop encrypted session with") ) def encryptionNamespaceGetCb(self, namespace): @@ -136,7 +136,7 @@ self.profile, callback=self.host.quit, errback=partial(self.errback, - msg=_(u"Can't start encryption session: {}"), + msg=_("Can't start encryption session: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, )) @@ -145,13 +145,13 @@ self.host.bridge.encryptionNamespaceGet(self.args.name, callback=self.encryptionNamespaceGetCb, errback=partial(self.errback, - msg=_(u"Can't get encryption namespace: {}"), + msg=_("Can't get encryption namespace: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, )) elif self.args.namespace is not None: self.encryptionNamespaceGetCb(self.args.namespace) else: - self.encryptionNamespaceGetCb(u"") + self.encryptionNamespaceGetCb("") class EncryptionStop(base.CommandBase): @@ -159,13 +159,13 @@ def __init__(self, host): super(EncryptionStop, self).__init__( host, "stop", - help=_(u"stop encrypted session with an entity")) + help=_("stop encrypted session with an entity")) self.need_loop = True def add_parser_options(self): self.parser.add_argument( - "jid", type=base.unicode_decoder, - help=_(u"jid of the entity to stop encrypted session with") + "jid", + help=_("jid of the entity to stop encrypted session with") ) def start(self): @@ -176,7 +176,7 @@ callback=self.host.quit, errback=partial( self.errback, - msg=_(u"can't end encrypted session: {}"), + msg=_("can't end encrypted session: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -187,20 +187,20 @@ def __init__(self, host): super(TrustUI, self).__init__( host, "ui", - help=_(u"get UI to manage trust")) + help=_("get UI to manage trust")) self.need_loop = True def add_parser_options(self): self.parser.add_argument( - "jid", type=base.unicode_decoder, - help=_(u"jid of the entity to stop encrypted session with") + "jid", + help=_("jid of the entity to stop encrypted session with") ) algorithm = self.parser.add_mutually_exclusive_group() algorithm.add_argument( - "-n", "--name", help=_(u"algorithm name (DEFAULT: current algorithm)")) + "-n", "--name", help=_("algorithm name (DEFAULT: current algorithm)")) algorithm.add_argument( "-N", "--namespace", - help=_(u"algorithm namespace (DEFAULT: current algorithm)")) + help=_("algorithm namespace (DEFAULT: current algorithm)")) def encryptionTrustUIGetCb(self, xmlui_raw): xmlui = xmlui_manager.create(self.host, xmlui_raw) @@ -215,7 +215,7 @@ callback=self.encryptionTrustUIGetCb, errback=partial( self.errback, - msg=_(u"can't end encrypted session: {}"), + msg=_("can't end encrypted session: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -225,13 +225,13 @@ self.host.bridge.encryptionNamespaceGet(self.args.name, callback=self.encryptionNamespaceGetCb, errback=partial(self.errback, - msg=_(u"Can't get encryption namespace: {}"), + msg=_("Can't get encryption namespace: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, )) elif self.args.namespace is not None: self.encryptionNamespaceGetCb(self.args.namespace) else: - self.encryptionNamespaceGetCb(u"") + self.encryptionNamespaceGetCb("") class EncryptionTrust(base.CommandBase): @@ -239,7 +239,7 @@ def __init__(self, host): super(EncryptionTrust, self).__init__( - host, "trust", use_profile=False, help=_(u"trust manangement") + host, "trust", use_profile=False, help=_("trust manangement") ) @@ -249,5 +249,5 @@ def __init__(self, host): super(Encryption, self).__init__( - host, "encryption", use_profile=False, help=_(u"encryption sessions handling") + host, "encryption", use_profile=False, help=_("encryption sessions handling") )
--- a/sat_frontends/jp/cmd_event.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_event.py Tue Aug 13 19:08:41 2019 +0200 @@ -18,7 +18,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. -import base +from . import base from sat.core.i18n import _ from sat.tools.common.ansi import ANSI as A from sat_frontends.jp.constants import Const as C @@ -30,7 +30,7 @@ __commands__ = ["Event"] -OUTPUT_OPT_TABLE = u"table" +OUTPUT_OPT_TABLE = "table" # TODO: move date parsing to base, it may be useful for other commands @@ -45,7 +45,7 @@ use_pubsub=True, pubsub_flags={C.SINGLE_ITEM}, use_verbose=True, - help=_(u"get event data"), + help=_("get event data"), ) self.need_loop = True @@ -67,7 +67,7 @@ callback=self.eventInviteeGetCb, errback=partial( self.errback, - msg=_(u"can't get event data: {}"), + msg=_("can't get event data: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -78,22 +78,20 @@ self.parser.add_argument( "-i", "--id", - type=base.unicode_decoder, - default=u"", - help=_(u"ID of the PubSub Item"), + default="", + help=_("ID of the PubSub Item"), ) self.parser.add_argument( - "-d", "--date", type=unicode, help=_(u"date of the event") + "-d", "--date", type=str, help=_("date of the event") ) self.parser.add_argument( "-f", "--field", - type=base.unicode_decoder, action="append", nargs=2, dest="fields", - metavar=(u"KEY", u"VALUE"), - help=_(u"configuration field to set"), + metavar=("KEY", "VALUE"), + help=_("configuration field to set"), ) def parseFields(self): @@ -106,10 +104,10 @@ except ValueError: try: date_time = du_parser.parse( - self.args.date, dayfirst=not (u"-" in self.args.date) + self.args.date, dayfirst=not ("-" in self.args.date) ) except ValueError as e: - self.parser.error(_(u"Can't parse date: {msg}").format(msg=e)) + self.parser.error(_("Can't parse date: {msg}").format(msg=e)) if date_time.tzinfo is None: date = calendar.timegm(date_time.timetuple()) else: @@ -131,7 +129,7 @@ self.need_loop = True def eventCreateCb(self, node): - self.disp(_(u"Event created successfuly on node {node}").format(node=node)) + self.disp(_("Event created successfuly on node {node}").format(node=node)) self.host.quit() def start(self): @@ -147,7 +145,7 @@ callback=self.eventCreateCb, errback=partial( self.errback, - msg=_(u"can't create event: {}"), + msg=_("can't create event: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -178,7 +176,7 @@ callback=self.host.quit, errback=partial( self.errback, - msg=_(u"can't update event data: {}"), + msg=_("can't update event data: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -194,7 +192,7 @@ use_pubsub=True, pubsub_flags={C.NODE, C.ITEM, C.SINGLE_ITEM}, use_verbose=True, - help=_(u"get event attendance"), + help=_("get event attendance"), ) self.need_loop = True @@ -213,7 +211,7 @@ callback=self.eventInviteeGetCb, errback=partial( self.errback, - msg=_(u"can't get event data: {}"), + msg=_("can't get event data: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -235,12 +233,11 @@ self.parser.add_argument( "-f", "--field", - type=base.unicode_decoder, action="append", nargs=2, dest="fields", - metavar=(u"KEY", u"VALUE"), - help=_(u"configuration field to set"), + metavar=("KEY", "VALUE"), + help=_("configuration field to set"), ) def start(self): @@ -253,7 +250,7 @@ callback=self.host.quit, errback=partial( self.errback, - msg=_(u"can't set event data: {}"), + msg=_("can't set event data: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -271,7 +268,7 @@ use_pubsub=True, pubsub_flags={C.NODE}, use_verbose=True, - help=_(u"get event attendance"), + help=_("get event attendance"), ) self.need_loop = True @@ -280,26 +277,26 @@ "-m", "--missing", action="store_true", - help=_(u"show missing people (invited but no R.S.V.P. so far)"), + help=_("show missing people (invited but no R.S.V.P. so far)"), ) self.parser.add_argument( "-R", "--no-rsvp", action="store_true", - help=_(u"don't show people which gave R.S.V.P."), + help=_("don't show people which gave R.S.V.P."), ) def _attend_filter(self, attend, row): - if attend == u"yes": + if attend == "yes": attend_color = C.A_SUCCESS - elif attend == u"no": + elif attend == "no": attend_color = C.A_FAILURE else: attend_color = A.FG_WHITE return A.color(attend_color, attend) def _guests_filter(self, guests): - return u"(" + unicode(guests) + ")" if guests else u"" + return "(" + str(guests) + ")" if guests else "" def default_output(self, event_data): data = [] @@ -309,13 +306,13 @@ attendees_missing = 0 guests = 0 guests_maybe = 0 - for jid_, jid_data in event_data.iteritems(): - jid_data[u"jid"] = jid_ + for jid_, jid_data in event_data.items(): + jid_data["jid"] = jid_ try: guests_int = int(jid_data["guests"]) except (ValueError, KeyError): pass - attend = jid_data.get(u"attend", u"") + attend = jid_data.get("attend", "") if attend == "yes": attendees_yes += 1 guests += guests_int @@ -324,10 +321,10 @@ guests_maybe += guests_int elif attend == "no": attendees_no += 1 - jid_data[u"guests"] = "" + jid_data["guests"] = "" else: attendees_missing += 1 - jid_data[u"guests"] = "" + jid_data["guests"] = "" data.append(jid_data) show_table = OUTPUT_OPT_TABLE in self.args.output_opts @@ -335,60 +332,60 @@ table = common.Table.fromDict( self.host, data, - (u"nick",) - + ((u"jid",) if self.host.verbosity else ()) - + (u"attend", "guests"), + ("nick",) + + (("jid",) if self.host.verbosity else ()) + + ("attend", "guests"), headers=None, filters={ - u"nick": A.color(C.A_HEADER, u"{}" if show_table else u"{} "), - u"jid": u"{}" if show_table else u"{} ", - u"attend": self._attend_filter, - u"guests": u"{}" if show_table else self._guests_filter, + "nick": A.color(C.A_HEADER, "{}" if show_table else "{} "), + "jid": "{}" if show_table else "{} ", + "attend": self._attend_filter, + "guests": "{}" if show_table else self._guests_filter, }, - defaults={u"nick": u"", u"attend": u"", u"guests": 1}, + defaults={"nick": "", "attend": "", "guests": 1}, ) if show_table: table.display() else: - table.display_blank(show_header=False, col_sep=u"") + table.display_blank(show_header=False, col_sep="") if not self.args.no_rsvp: - self.disp(u"") + self.disp("") self.disp( A.color( C.A_SUBHEADER, - _(u"Attendees: "), + _("Attendees: "), A.RESET, - unicode(len(data)), - _(u" ("), + str(len(data)), + _(" ("), C.A_SUCCESS, - _(u"yes: "), - unicode(attendees_yes), + _("yes: "), + str(attendees_yes), A.FG_WHITE, - _(u", maybe: "), - unicode(attendees_maybe), - u", ", + _(", maybe: "), + str(attendees_maybe), + ", ", C.A_FAILURE, - _(u"no: "), - unicode(attendees_no), + _("no: "), + str(attendees_no), A.RESET, - u")", + ")", ) ) self.disp( - A.color(C.A_SUBHEADER, _(u"confirmed guests: "), A.RESET, unicode(guests)) + A.color(C.A_SUBHEADER, _("confirmed guests: "), A.RESET, str(guests)) ) self.disp( A.color( C.A_SUBHEADER, - _(u"unconfirmed guests: "), + _("unconfirmed guests: "), A.RESET, - unicode(guests_maybe), + str(guests_maybe), ) ) self.disp( A.color( - C.A_SUBHEADER, _(u"total: "), A.RESET, unicode(guests + guests_maybe) + C.A_SUBHEADER, _("total: "), A.RESET, str(guests + guests_maybe) ) ) if attendees_missing: @@ -396,9 +393,9 @@ self.disp( A.color( C.A_SUBHEADER, - _(u"missing people (no reply): "), + _("missing people (no reply): "), A.RESET, - unicode(attendees_missing), + str(attendees_missing), ) ) @@ -420,9 +417,9 @@ prefilled_data.update(event_data) # we get nicknames for everybody, make it easier for organisers - for jid_, data in prefilled_data.iteritems(): + for jid_, data in prefilled_data.items(): id_data = self.host.bridge.identityGet(jid_, self.profile) - data[u"nick"] = id_data.get(u"nick", u"") + data["nick"] = id_data.get("nick", "") self.output(prefilled_data) self.host.quit() @@ -435,7 +432,7 @@ callback=partial(self.eventInviteesListCb, prefilled_data=prefilled_data), errback=partial( self.errback, - msg=_(u"can't get event data: {}"), + msg=_("can't get event data: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -446,14 +443,14 @@ # we only consider people with "publisher" affiliation as invited, creators are not, and members can just observe prefilled = { jid_: {} - for jid_, affiliation in affiliations.iteritems() - if affiliation in (u"publisher",) + for jid_, affiliation in affiliations.items() + if affiliation in ("publisher",) } self.getList(prefilled) def start(self): if self.args.no_rsvp and not self.args.missing: - self.parser.error(_(u"you need to use --missing if you use --no-rsvp")) + self.parser.error(_("you need to use --missing if you use --no-rsvp")) if self.args.missing: self.host.bridge.psNodeAffiliationsGet( self.args.service, @@ -462,7 +459,7 @@ callback=self.psNodeAffiliationsGetCb, errback=partial( self.errback, - msg=_(u"can't get event data: {}"), + msg=_("can't get event data: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -478,7 +475,7 @@ "invite", use_pubsub=True, pubsub_flags={C.NODE, C.SINGLE_ITEM}, - help=_(u"invite someone to the event through email"), + help=_("invite someone to the event through email"), ) self.need_loop = True @@ -487,49 +484,42 @@ "-e", "--email", action="append", - type=base.unicode_decoder, default=[], help="email(s) to send the invitation to", ) self.parser.add_argument( "-N", "--name", - type=base.unicode_decoder, default="", help="name of the invitee", ) self.parser.add_argument( "-H", "--host-name", - type=base.unicode_decoder, default="", help="name of the host", ) self.parser.add_argument( "-l", "--lang", - type=base.unicode_decoder, default="", help="main language spoken by the invitee", ) self.parser.add_argument( "-U", "--url-template", - type=base.unicode_decoder, default="", help="template to construct the URL", ) self.parser.add_argument( "-S", "--subject", - type=base.unicode_decoder, default="", help="subject of the invitation email (default: generic subject)", ) self.parser.add_argument( "-b", "--body", - type=base.unicode_decoder, default="", help="body of the invitation email (default: generic body)", ) @@ -554,7 +544,7 @@ callback=self.host.quit, errback=partial( self.errback, - msg=_(u"can't create invitation: {}"), + msg=_("can't create invitation: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -565,7 +555,7 @@ def __init__(self, host): super(Invitee, self).__init__( - host, "invitee", use_profile=False, help=_(u"manage invities") + host, "invitee", use_profile=False, help=_("manage invities") )
--- a/sat_frontends/jp/cmd_file.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_file.py Tue Aug 13 19:08:41 2019 +0200 @@ -18,7 +18,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. -import base +from . import base import sys import os import os.path @@ -50,32 +50,29 @@ def add_parser_options(self): self.parser.add_argument( - "files", type=str, nargs="+", metavar="file", help=_(u"a list of file") + "files", type=str, nargs="+", metavar="file", help=_("a list of file") ) self.parser.add_argument( - "jid", type=base.unicode_decoder, help=_(u"the destination jid") + "jid", help=_("the destination jid") ) self.parser.add_argument( - "-b", "--bz2", action="store_true", help=_(u"make a bzip2 tarball") + "-b", "--bz2", action="store_true", help=_("make a bzip2 tarball") ) self.parser.add_argument( "-d", "--path", - type=base.unicode_decoder, - help=(u"path to the directory where the file must be stored"), + help=("path to the directory where the file must be stored"), ) self.parser.add_argument( "-N", "--namespace", - type=base.unicode_decoder, - help=(u"namespace of the file"), + help=("namespace of the file"), ) self.parser.add_argument( "-n", "--name", - type=base.unicode_decoder, - default=u"", - help=(u"name to use (DEFAULT: use source file name)"), + default="", + help=("name to use (DEFAULT: use source file name)"), ) def start(self): @@ -83,16 +80,16 @@ self.send_files() def onProgressStarted(self, metadata): - self.disp(_(u"File copy started"), 2) + self.disp(_("File copy started"), 2) def onProgressFinished(self, metadata): - self.disp(_(u"File sent successfully"), 2) + self.disp(_("File sent successfully"), 2) def onProgressError(self, error_msg): if error_msg == C.PROGRESS_ERROR_DECLINED: - self.disp(_(u"The file has been refused by your contact")) + self.disp(_("The file has been refused by your contact")) else: - self.disp(_(u"Error while sending file: {}").format(error_msg), error=True) + self.disp(_("Error while sending file: {}").format(error_msg), error=True) def gotId(self, data, file_): """Called when a progress id has been received @@ -101,13 +98,13 @@ @param file_(str): file path """ # FIXME: this show progress only for last progress_id - self.disp(_(u"File request sent to {jid}".format(jid=self.full_dest_jid)), 1) + self.disp(_("File request sent to {jid}".format(jid=self.full_dest_jid)), 1) try: self.progress_id = data["progress"] except KeyError: # TODO: if 'xmlui' key is present, manage xmlui message display self.disp( - _(u"Can't send file to {jid}".format(jid=self.full_dest_jid)), error=True + _("Can't send file to {jid}".format(jid=self.full_dest_jid)), error=True ) self.host.quit(2) @@ -121,12 +118,12 @@ def send_files(self): for file_ in self.args.files: if not os.path.exists(file_): - self.disp(_(u"file [{}] doesn't exist !").format(file_), error=True) + self.disp(_("file [{}] doesn't exist !").format(file_), error=True) self.host.quit(1) if not self.args.bz2 and os.path.isdir(file_): self.disp( _( - u"[{}] is a dir ! Please send files inside or use compression" + "[{}] is a dir ! Please send files inside or use compression" ).format(file_) ) self.host.quit(1) @@ -134,26 +131,26 @@ self.full_dest_jid = self.host.get_full_jid(self.args.jid) extra = {} if self.args.path: - extra[u"path"] = self.args.path + extra["path"] = self.args.path if self.args.namespace: - extra[u"namespace"] = self.args.namespace + extra["namespace"] = self.args.namespace if self.args.bz2: with tempfile.NamedTemporaryFile("wb", delete=False) as buf: self.host.addOnQuitCallback(os.unlink, buf.name) - self.disp(_(u"bz2 is an experimental option, use with caution")) + self.disp(_("bz2 is an experimental option, use with caution")) # FIXME: check free space - self.disp(_(u"Starting compression, please wait...")) + self.disp(_("Starting compression, please wait...")) sys.stdout.flush() bz2 = tarfile.open(mode="w:bz2", fileobj=buf) - archive_name = u"{}.tar.bz2".format( - os.path.basename(self.args.files[0]) or u"compressed_files" + archive_name = "{}.tar.bz2".format( + os.path.basename(self.args.files[0]) or "compressed_files" ) for file_ in self.args.files: - self.disp(_(u"Adding {}").format(file_), 1) + self.disp(_("Adding {}").format(file_), 1) bz2.add(file_) bz2.close() - self.disp(_(u"Done !"), 1) + self.disp(_("Done !"), 1) self.host.bridge.fileSend( self.full_dest_jid, @@ -193,71 +190,65 @@ @property def filename(self): - return self.args.name or self.args.hash or u"output" + return self.args.name or self.args.hash or "output" def add_parser_options(self): self.parser.add_argument( - "jid", type=base.unicode_decoder, help=_(u"the destination jid") + "jid", help=_("the destination jid") ) self.parser.add_argument( "-D", "--dest", - type=base.unicode_decoder, help=_( - u"destination path where the file will be saved (default: [current_dir]/[name|hash])" + "destination path where the file will be saved (default: [current_dir]/[name|hash])" ), ) self.parser.add_argument( "-n", "--name", - type=base.unicode_decoder, - default=u"", - help=_(u"name of the file"), + default="", + help=_("name of the file"), ) self.parser.add_argument( "-H", "--hash", - type=base.unicode_decoder, - default=u"", - help=_(u"hash of the file"), + default="", + help=_("hash of the file"), ) self.parser.add_argument( "-a", "--hash-algo", - type=base.unicode_decoder, - default=u"sha-256", - help=_(u"hash algorithm use for --hash (default: sha-256)"), + default="sha-256", + help=_("hash algorithm use for --hash (default: sha-256)"), ) self.parser.add_argument( "-d", "--path", - type=base.unicode_decoder, - help=(u"path to the directory containing the file"), + help=("path to the directory containing the file"), ) self.parser.add_argument( "-N", "--namespace", - type=base.unicode_decoder, - help=(u"namespace of the file"), + help=("namespace of the file"), ) self.parser.add_argument( "-f", "--force", action="store_true", - help=_(u"overwrite existing file without confirmation"), + help=_("overwrite existing file without confirmation"), ) def onProgressStarted(self, metadata): - self.disp(_(u"File copy started"), 2) + self.disp(_("File copy started"), 2) def onProgressFinished(self, metadata): - self.disp(_(u"File received successfully"), 2) + self.disp(_("File received successfully"), 2) def onProgressError(self, error_msg): if error_msg == C.PROGRESS_ERROR_DECLINED: - self.disp(_(u"The file request has been refused")) + self.disp(_("The file request has been refused")) else: - self.disp(_(u"Error while requesting file: {}").format(error_msg), error=True) + self.disp(_("Error while requesting file: {}").format(error_msg), error=True) def gotId(self, progress_id): """Called when a progress id has been received @@ -275,7 +266,7 @@ def start(self): if not self.args.name and not self.args.hash: - self.parser.error(_(u"at least one of --name or --hash must be provided")) + self.parser.error(_("at least one of --name or --hash must be provided")) # extra = dict(self.args.extra) if self.args.dest: path = os.path.abspath(os.path.expanduser(self.args.dest)) @@ -285,32 +276,32 @@ path = os.path.abspath(self.filename) if os.path.exists(path) and not self.args.force: - message = _(u"File {path} already exists! Do you want to overwrite?").format( + message = _("File {path} already exists! Do you want to overwrite?").format( path=path ) - confirm = raw_input(u"{} (y/N) ".format(message).encode("utf-8")) - if confirm not in (u"y", u"Y"): - self.disp(_(u"file request cancelled")) + confirm = input("{} (y/N) ".format(message).encode("utf-8")) + if confirm not in ("y", "Y"): + self.disp(_("file request cancelled")) self.host.quit(2) self.full_dest_jid = self.host.get_full_jid(self.args.jid) extra = {} if self.args.path: - extra[u"path"] = self.args.path + extra["path"] = self.args.path if self.args.namespace: - extra[u"namespace"] = self.args.namespace + extra["namespace"] = self.args.namespace self.host.bridge.fileJingleRequest( self.full_dest_jid, path, self.args.name, self.args.hash, - self.args.hash_algo if self.args.hash else u"", + self.args.hash_algo if self.args.hash else "", extra, self.profile, callback=self.gotId, errback=partial( self.errback, - msg=_(u"can't request file: {}"), + msg=_("can't request file: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -332,25 +323,25 @@ } def onProgressStarted(self, metadata): - self.disp(_(u"File copy started"), 2) + self.disp(_("File copy started"), 2) def onProgressFinished(self, metadata): - self.disp(_(u"File received successfully"), 2) + self.disp(_("File received successfully"), 2) if metadata.get("hash_verified", False): try: self.disp( - _(u"hash checked: {algo}:{checksum}").format( + _("hash checked: {algo}:{checksum}").format( algo=metadata["hash_algo"], checksum=metadata["hash"] ), 1, ) except KeyError: - self.disp(_(u"hash is checked but hash value is missing", 1), error=True) + self.disp(_("hash is checked but hash value is missing", 1), error=True) else: - self.disp(_(u"hash can't be verified"), 1) + self.disp(_("hash can't be verified"), 1) def onProgressError(self, error_msg): - self.disp(_(u"Error while receiving file: {}").format(error_msg), error=True) + self.disp(_("Error while receiving file: {}").format(error_msg), error=True) def getXmluiId(self, action_data): # FIXME: we temporarily use ElementTree, but a real XMLUI managing module @@ -359,12 +350,12 @@ try: xml_ui = action_data["xmlui"] except KeyError: - self.disp(_(u"Action has no XMLUI"), 1) + self.disp(_("Action has no XMLUI"), 1) else: ui = ET.fromstring(xml_ui.encode("utf-8")) xmlui_id = ui.get("submit") if not xmlui_id: - self.disp(_(u"Invalid XMLUI received"), error=True) + self.disp(_("Invalid XMLUI received"), error=True) return xmlui_id def onFileAction(self, action_data, action_id, security_limit, profile): @@ -374,17 +365,17 @@ try: from_jid = jid.JID(action_data["meta_from_jid"]) except KeyError: - self.disp(_(u"Ignoring action without from_jid data"), 1) + self.disp(_("Ignoring action without from_jid data"), 1) return try: progress_id = action_data["meta_progress_id"] except KeyError: - self.disp(_(u"ignoring action without progress id"), 1) + self.disp(_("ignoring action without progress id"), 1) return if not self.bare_jids or from_jid.bare in self.bare_jids: if self._overwrite_refused: - self.disp(_(u"File refused because overwrite is needed"), error=True) + self.disp(_("File refused because overwrite is needed"), error=True) self.host.bridge.launchAction( xmlui_id, {"cancelled": C.BOOL_TRUE}, profile_key=profile ) @@ -400,15 +391,15 @@ try: progress_id = action_data["meta_progress_id"] except KeyError: - self.disp(_(u"ignoring action without progress id"), 1) + self.disp(_("ignoring action without progress id"), 1) return - self.disp(_(u"Overwriting needed"), 1) + self.disp(_("Overwriting needed"), 1) if progress_id == self.progress_id: if self.args.force: - self.disp(_(u"Overwrite accepted"), 2) + self.disp(_("Overwrite accepted"), 2) else: - self.disp(_(u"Refused to overwrite"), 2) + self.disp(_("Refused to overwrite"), 2) self._overwrite_refused = True xmlui_data = {"answer": C.boolConst(self.args.force)} @@ -417,40 +408,39 @@ def add_parser_options(self): self.parser.add_argument( "jids", - type=base.unicode_decoder, nargs="*", - help=_(u"jids accepted (accept everything if none is specified)"), + help=_("jids accepted (accept everything if none is specified)"), ) self.parser.add_argument( "-m", "--multiple", action="store_true", - help=_(u"accept multiple files (you'll have to stop manually)"), + help=_("accept multiple files (you'll have to stop manually)"), ) self.parser.add_argument( "-f", "--force", action="store_true", help=_( - u"force overwritting of existing files (/!\\ name is choosed by sender)" + "force overwritting of existing files (/!\\ name is choosed by sender)" ), ) self.parser.add_argument( "--path", default=".", metavar="DIR", - help=_(u"destination path (default: working directory)"), + help=_("destination path (default: working directory)"), ) def start(self): self.bare_jids = [jid.JID(jid_).bare for jid_ in self.args.jids] self.path = os.path.abspath(self.args.path) if not os.path.isdir(self.path): - self.disp(_(u"Given path is not a directory !", error=True)) + self.disp(_("Given path is not a directory !", error=True)) self.host.quit(2) if self.args.multiple: self.host.quit_on_progress_end = False - self.disp(_(u"waiting for incoming file request"), 2) + self.disp(_("waiting for incoming file request"), 2) class Upload(base.CommandBase): @@ -464,7 +454,6 @@ self.parser.add_argument("file", type=str, help=_("file to upload")) self.parser.add_argument( "jid", - type=base.unicode_decoder, nargs="?", help=_("jid of upload component (nothing to autodetect)"), ) @@ -475,21 +464,21 @@ ) def onProgressStarted(self, metadata): - self.disp(_(u"File upload started"), 2) + self.disp(_("File upload started"), 2) def onProgressFinished(self, metadata): - self.disp(_(u"File uploaded successfully"), 2) + self.disp(_("File uploaded successfully"), 2) try: url = metadata["url"] except KeyError: - self.disp(u"download URL not found in metadata") + self.disp("download URL not found in metadata") else: - self.disp(_(u"URL to retrieve the file:"), 1) + self.disp(_("URL to retrieve the file:"), 1) # XXX: url is display alone on a line to make parsing easier self.disp(url) def onProgressError(self, error_msg): - self.disp(_(u"Error while uploading file: {}").format(error_msg), error=True) + self.disp(_("Error while uploading file: {}").format(error_msg), error=True) def gotId(self, data, file_): """Called when a progress id has been received @@ -501,7 +490,7 @@ self.progress_id = data["progress"] except KeyError: # TODO: if 'xmlui' key is present, manage xmlui message display - self.disp(_(u"Can't upload file"), error=True) + self.disp(_("Can't upload file"), error=True) self.host.quit(2) def error(self, failure): @@ -514,10 +503,10 @@ def start(self): file_ = self.args.file if not os.path.exists(file_): - self.disp(_(u"file [{}] doesn't exist !").format(file_), error=True) + self.disp(_("file [{}] doesn't exist !").format(file_), error=True) self.host.quit(1) if os.path.isdir(file_): - self.disp(_(u"[{}] is a dir! Can't upload a dir").format(file_)) + self.disp(_("[{}] is a dir! Can't upload a dir").format(file_)) self.host.quit(1) self.full_dest_jid = ( @@ -547,7 +536,7 @@ "list", use_output=C.OUTPUT_LIST_DICT, extra_outputs=extra_outputs, - help=_(u"retrieve files shared by an entity"), + help=_("retrieve files shared by an entity"), use_verbose=True, ) self.need_loop = True @@ -556,12 +545,11 @@ self.parser.add_argument( "-d", "--path", - default=u"", - help=_(u"path to the directory containing the files"), + default="", + help=_("path to the directory containing the files"), ) self.parser.add_argument( "jid", - type=base.unicode_decoder, nargs="?", default="", help=_("jid of sharing entity (nothing to check our own jid)"), @@ -569,9 +557,9 @@ def file_gen(self, files_data): for file_data in files_data: - yield file_data[u"name"] - yield file_data.get(u"size", "") - yield file_data.get(u"hash", "") + yield file_data["name"] + yield file_data.get("size", "") + yield file_data.get("hash", "") def _name_filter(self, name, row): if row.type == C.FILE_TYPE_DIRECTORY: @@ -579,39 +567,39 @@ elif row.type == C.FILE_TYPE_FILE: return A.color(C.A_FILE, name) else: - self.disp(_(u"unknown file type: {type}").format(type=row.type), error=True) + self.disp(_("unknown file type: {type}").format(type=row.type), error=True) return name def _size_filter(self, size, row): if not size: - return u"" + return "" size = int(size) # cf. https://stackoverflow.com/a/1094933 (thanks) - suffix = u"o" - for unit in [u"", u"Ki", u"Mi", u"Gi", u"Ti", u"Pi", u"Ei", u"Zi"]: + suffix = "o" + for unit in ["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"]: if abs(size) < 1024.0: - return A.color(A.BOLD, u"{:.2f}".format(size), unit, suffix) + return A.color(A.BOLD, "{:.2f}".format(size), unit, suffix) size /= 1024.0 - return A.color(A.BOLD, u"{:.2f}".format(size), u"Yi", suffix) + return A.color(A.BOLD, "{:.2f}".format(size), "Yi", suffix) def default_output(self, files_data): """display files a way similar to ls""" files_data.sort(key=lambda d: d["name"].lower()) show_header = False if self.verbosity == 0: - headers = (u"name", u"type") + headers = ("name", "type") elif self.verbosity == 1: - headers = (u"name", u"type", u"size") + headers = ("name", "type", "size") elif self.verbosity > 1: show_header = True - headers = (u"name", u"type", u"size", u"hash") + headers = ("name", "type", "size", "hash") table = common.Table.fromDict( self.host, files_data, headers, - filters={u"name": self._name_filter, u"size": self._size_filter}, - defaults={u"size": u"", u"hash": u""}, + filters={"name": self._name_filter, "size": self._size_filter}, + defaults={"size": "", "hash": ""}, ) table.display_blank(show_header=show_header, hide_cols=["type"]) @@ -628,7 +616,7 @@ callback=self._FISListCb, errback=partial( self.errback, - msg=_(u"can't retrieve shared files: {}"), + msg=_("can't retrieve shared files: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -637,7 +625,7 @@ class SharePath(base.CommandBase): def __init__(self, host): super(SharePath, self).__init__( - host, "path", help=_(u"share a file or directory"), use_verbose=True + host, "path", help=_("share a file or directory"), use_verbose=True ) self.need_loop = True @@ -645,47 +633,44 @@ self.parser.add_argument( "-n", "--name", - type=base.unicode_decoder, - default=u"", - help=_(u"virtual name to use (default: use directory/file name)"), + default="", + help=_("virtual name to use (default: use directory/file name)"), ) perm_group = self.parser.add_mutually_exclusive_group() perm_group.add_argument( "-j", "--jid", - type=base.unicode_decoder, action="append", dest="jids", default=[], - help=_(u"jid of contacts allowed to retrieve the files"), + help=_("jid of contacts allowed to retrieve the files"), ) perm_group.add_argument( "--public", action="store_true", help=_( - u"share publicly the file(s) (/!\\ *everybody* will be able to access them)" + "share publicly the file(s) (/!\\ *everybody* will be able to access them)" ), ) self.parser.add_argument( "path", - type=base.unicode_decoder, - help=_(u"path to a file or directory to share"), + help=_("path to a file or directory to share"), ) def _FISSharePathCb(self, name): self.disp( - _(u'{path} shared under the name "{name}"').format(path=self.path, name=name) + _('{path} shared under the name "{name}"').format(path=self.path, name=name) ) self.host.quit() def start(self): self.path = os.path.abspath(self.args.path) if self.args.public: - access = {u"read": {u"type": u"public"}} + access = {"read": {"type": "public"}} else: jids = self.args.jids if jids: - access = {u"read": {u"type": "whitelist", u"jids": jids}} + access = {"read": {"type": "whitelist", "jids": jids}} else: access = {} self.host.bridge.FISSharePath( @@ -696,7 +681,7 @@ callback=self._FISSharePathCb, errback=partial( self.errback, - msg=_(u"can't share path: {}"), + msg=_("can't share path: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -705,7 +690,7 @@ class ShareInvite(base.CommandBase): def __init__(self, host): super(ShareInvite, self).__init__( - host, "invite", help=_(u"send invitation for a shared repository") + host, "invite", help=_("send invitation for a shared repository") ) self.need_loop = True @@ -713,61 +698,55 @@ self.parser.add_argument( "-n", "--name", - type=base.unicode_decoder, - default=u"", - help=_(u"name of the repository"), + default="", + help=_("name of the repository"), ) self.parser.add_argument( "-N", "--namespace", - type=base.unicode_decoder, - default=u"", - help=_(u"namespace of the repository"), + default="", + help=_("namespace of the repository"), ) self.parser.add_argument( "-P", "--path", - type=base.unicode_decoder, - help=_(u"path to the repository"), + help=_("path to the repository"), ) self.parser.add_argument( "-t", "--type", - choices=[u"files", u"photos"], - default=u"files", - help=_(u"type of the repository"), + choices=["files", "photos"], + default="files", + help=_("type of the repository"), ) self.parser.add_argument( "-T", "--thumbnail", - type=base.unicode_decoder, - help=_(u"https URL of a image to use as thumbnail"), + help=_("https URL of a image to use as thumbnail"), ) self.parser.add_argument( "service", - type=base.unicode_decoder, - help=_(u"jid of the file sharing service hosting the repository"), + help=_("jid of the file sharing service hosting the repository"), ) self.parser.add_argument( "jid", - type=base.unicode_decoder, - help=_(u"jid of the person to invite"), + help=_("jid of the person to invite"), ) def _FISInviteCb(self): self.disp( - _(u'invitation sent to {entity}').format(entity=self.args.jid) + _('invitation sent to {entity}').format(entity=self.args.jid) ) self.host.quit() def start(self): - self.path = os.path.normpath(self.args.path) if self.args.path else u"" + self.path = os.path.normpath(self.args.path) if self.args.path else "" extra = {} if self.args.thumbnail is not None: - if not self.args.thumbnail.startswith(u'http'): - self.parser.error(_(u"only http(s) links are allowed with --thumbnail")) + if not self.args.thumbnail.startswith('http'): + self.parser.error(_("only http(s) links are allowed with --thumbnail")) else: - extra[u'thumb_url'] = self.args.thumbnail + extra['thumb_url'] = self.args.thumbnail self.host.bridge.FISInvite( self.args.jid, self.args.service, @@ -780,7 +759,7 @@ callback=self._FISInviteCb, errback=partial( self.errback, - msg=_(u"can't send invitation: {}"), + msg=_("can't send invitation: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -791,7 +770,7 @@ def __init__(self, host): super(Share, self).__init__( - host, "share", use_profile=False, help=_(u"files sharing management") + host, "share", use_profile=False, help=_("files sharing management") ) @@ -800,5 +779,5 @@ def __init__(self, host): super(File, self).__init__( - host, "file", use_profile=False, help=_(u"files sending/receiving/management") + host, "file", use_profile=False, help=_("files sending/receiving/management") )
--- a/sat_frontends/jp/cmd_forums.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_forums.py Tue Aug 13 19:08:41 2019 +0200 @@ -18,7 +18,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. -import base +from . import base from sat.core.i18n import _ from sat_frontends.jp.constants import Const as C from sat_frontends.jp import common @@ -29,7 +29,7 @@ __commands__ = ["Forums"] -FORUMS_TMP_DIR = u"forums" +FORUMS_TMP_DIR = "forums" class Edit(base.CommandBase, common.BaseEdit): @@ -43,7 +43,7 @@ use_pubsub=True, use_draft=True, use_verbose=True, - help=_(u"edit forums"), + help=_("edit forums"), ) common.BaseEdit.__init__(self, self.host, FORUMS_TMP_DIR) self.need_loop = True @@ -52,17 +52,16 @@ self.parser.add_argument( "-k", "--key", - type=base.unicode_decoder, - default=u"", - help=_(u"forum key (DEFAULT: default forums)"), + default="", + help=_("forum key (DEFAULT: default forums)"), ) def getTmpSuff(self): """return suffix used for content file""" - return u"json" + return "json" def forumsSetCb(self): - self.disp(_(u"forums have been edited"), 1) + self.disp(_("forums have been edited"), 1) self.host.quit() def publish(self, forums_raw): @@ -75,7 +74,7 @@ callback=self.forumsSetCb, errback=partial( self.errback, - msg=_(u"can't set forums: {}"), + msg=_("can't set forums: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -94,12 +93,12 @@ def forumsGetEb(self, failure_): # FIXME: error handling with bridge is broken, need to be properly fixed - if failure_.condition == u"item-not-found": - self.forumsGetCb(u"") + if failure_.condition == "item-not-found": + self.forumsGetCb("") else: self.errback( failure_, - msg=_(u"can't get forums structure: {}"), + msg=_("can't get forums structure: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ) @@ -125,7 +124,7 @@ extra_outputs=extra_outputs, use_pubsub=True, use_verbose=True, - help=_(u"get forums structure"), + help=_("get forums structure"), ) self.need_loop = True @@ -133,9 +132,8 @@ self.parser.add_argument( "-k", "--key", - type=base.unicode_decoder, - default=u"", - help=_(u"forum key (DEFAULT: default forums)"), + default="", + help=_("forum key (DEFAULT: default forums)"), ) def default_output(self, forums, level=0): @@ -143,33 +141,33 @@ keys = list(forum.keys()) keys.sort() try: - keys.remove(u"title") + keys.remove("title") except ValueError: pass else: - keys.insert(0, u"title") + keys.insert(0, "title") try: - keys.remove(u"sub-forums") + keys.remove("sub-forums") except ValueError: pass else: - keys.append(u"sub-forums") + keys.append("sub-forums") for key in keys: value = forum[key] if key == "sub-forums": self.default_output(value, level + 1) else: - if self.host.verbosity < 1 and key != u"title": + if self.host.verbosity < 1 and key != "title": continue head_color = C.A_LEVEL_COLORS[level % len(C.A_LEVEL_COLORS)] self.disp( - A.color(level * 4 * u" ", head_color, key, A.RESET, u": ", value) + A.color(level * 4 * " ", head_color, key, A.RESET, ": ", value) ) def forumsGetCb(self, forums_raw): if not forums_raw: - self.disp(_(u"no schema found"), 1) + self.disp(_("no schema found"), 1) self.host.quit(1) forums = json.loads(forums_raw) self.output(forums) @@ -184,7 +182,7 @@ callback=self.forumsGetCb, errback=partial( self.errback, - msg=_(u"can't get forums: {}"), + msg=_("can't get forums: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -195,5 +193,5 @@ def __init__(self, host): super(Forums, self).__init__( - host, "forums", use_profile=False, help=_(u"Forums structure edition") + host, "forums", use_profile=False, help=_("Forums structure edition") )
--- a/sat_frontends/jp/cmd_identity.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_identity.py Tue Aug 13 19:08:41 2019 +0200 @@ -18,7 +18,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. -import base +from . import base from sat.core.i18n import _ from sat_frontends.jp.constants import Const as C from functools import partial @@ -36,13 +36,13 @@ "get", use_output=C.OUTPUT_DICT, use_verbose=True, - help=_(u"get identity data"), + help=_("get identity data"), ) self.need_loop = True def add_parser_options(self): self.parser.add_argument( - "jid", type=base.unicode_decoder, help=_(u"entity to check") + "jid", help=_("entity to check") ) def identityGetCb(self, data): @@ -57,7 +57,7 @@ callback=self.identityGetCb, errback=partial( self.errback, - msg=_(u"can't get identity data: {}"), + msg=_("can't get identity data: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -71,13 +71,12 @@ self.parser.add_argument( "-f", "--field", - type=base.unicode_decoder, action="append", nargs=2, dest="fields", - metavar=(u"KEY", u"VALUE"), + metavar=("KEY", "VALUE"), required=True, - help=_(u"identity field(s) to set"), + help=_("identity field(s) to set"), ) self.need_loop = True @@ -89,7 +88,7 @@ callback=self.host.quit, errback=partial( self.errback, - msg=_(u"can't set identity data data: {}"), + msg=_("can't set identity data data: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), )
--- a/sat_frontends/jp/cmd_info.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_info.py Tue Aug 13 19:08:41 2019 +0200 @@ -17,7 +17,7 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. -import base +from . import base from sat.core.i18n import _ from sat.tools.common.ansi import ANSI as A from sat.tools.common import date_utils @@ -35,10 +35,10 @@ self.need_loop=True def add_parser_options(self): - self.parser.add_argument(u"jid", type=base.unicode_decoder, help=_(u"entity to discover")) - self.parser.add_argument(u"-t", u"--type", type=str, choices=('infos', 'items', 'both'), default='both', help=_(u"type of data to discover")) - self.parser.add_argument(u"-n", u"--node", type=base.unicode_decoder, default=u'', help=_(u"node to use")) - self.parser.add_argument(u"-C", u"--no-cache", dest='use_cache', action="store_false", help=_(u"ignore cache")) + self.parser.add_argument("jid", help=_("entity to discover")) + self.parser.add_argument("-t", "--type", type=str, choices=('infos', 'items', 'both'), default='both', help=_("type of data to discover")) + self.parser.add_argument("-n", "--node", default='', help=_("node to use")) + self.parser.add_argument("-C", "--no-cache", dest='use_cache', action="store_false", help=_("ignore cache")) def start(self): self.get_infos = self.args.type in ('infos', 'both') @@ -51,7 +51,7 @@ self.host.bridge.discoInfos(jid, node=self.args.node, use_cache=self.args.use_cache, profile_key=self.host.profile, callback=lambda infos: self.gotInfos(infos, jid), errback=self.error) def error(self, failure): - print (_("Error while doing discovery [%s]") % failure) + print((_("Error while doing discovery [%s]") % failure)) self.host.quit(1) def gotInfos(self, infos, jid): @@ -68,78 +68,78 @@ features.sort() identities.sort(key=lambda identity: identity[2]) data.update({ - u'features': features, - u'identities': identities, - u'extensions': extensions}) + 'features': features, + 'identities': identities, + 'extensions': extensions}) if self.get_items: items.sort(key=lambda item: item[2]) - data[u'items'] = items + data['items'] = items self.output(data) self.host.quit() def default_output(self, data): - features = data.get(u'features', []) - identities = data.get(u'identities', []) - extensions = data.get(u'extensions', {}) - items = data.get(u'items', []) + features = data.get('features', []) + identities = data.get('identities', []) + extensions = data.get('extensions', {}) + items = data.get('items', []) identities_table = common.Table(self.host, identities, - headers=(_(u'category'), - _(u'type'), - _(u'name')), + headers=(_('category'), + _('type'), + _('name')), use_buffer=True) extensions_tpl = [] - extensions_types = extensions.keys() + extensions_types = list(extensions.keys()) extensions_types.sort() for type_ in extensions_types: fields = [] for field in extensions[type_]: field_lines = [] data, values = field - data_keys = data.keys() + data_keys = list(data.keys()) data_keys.sort() for key in data_keys: - field_lines.append(A.color(u'\t', C.A_SUBHEADER, key, A.RESET, u': ', + field_lines.append(A.color('\t', C.A_SUBHEADER, key, A.RESET, ': ', data[key])) if len(values) == 1: - field_lines.append(A.color(u'\t', C.A_SUBHEADER, u"value", A.RESET, - u': ', values[0] or (A.BOLD + u"UNSET"))) + field_lines.append(A.color('\t', C.A_SUBHEADER, "value", A.RESET, + ': ', values[0] or (A.BOLD + "UNSET"))) elif len(values) > 1: - field_lines.append(A.color(u'\t', C.A_SUBHEADER, u"values", A.RESET, - u': ')) + field_lines.append(A.color('\t', C.A_SUBHEADER, "values", A.RESET, + ': ')) for value in values: - field_lines.append(A.color(u'\t - ', A.BOLD, value)) - fields.append(u'\n'.join(field_lines)) - extensions_tpl.append(u'{type_}\n{fields}'.format(type_=type_, + field_lines.append(A.color('\t - ', A.BOLD, value)) + fields.append('\n'.join(field_lines)) + extensions_tpl.append('{type_}\n{fields}'.format(type_=type_, fields='\n\n'.join(fields))) items_table = common.Table(self.host, items, - headers=(_(u'entity'), - _(u'node'), - _(u'name')), + headers=(_('entity'), + _('node'), + _('name')), use_buffer=True) template = [] if features: - template.append(A.color(C.A_HEADER, _(u"Features")) + u"\n\n{features}") + template.append(A.color(C.A_HEADER, _("Features")) + "\n\n{features}") if identities: - template.append(A.color(C.A_HEADER, _(u"Identities")) + u"\n\n{identities}") + template.append(A.color(C.A_HEADER, _("Identities")) + "\n\n{identities}") if extensions: - template.append(A.color(C.A_HEADER, _(u"Extensions")) + u"\n\n{extensions}") + template.append(A.color(C.A_HEADER, _("Extensions")) + "\n\n{extensions}") if items: - template.append(A.color(C.A_HEADER, _(u"Items")) + u"\n\n{items}") + template.append(A.color(C.A_HEADER, _("Items")) + "\n\n{items}") - print u"\n\n".join(template).format(features = u'\n'.join(features), + print("\n\n".join(template).format(features = '\n'.join(features), identities = identities_table.display().string, - extensions = u'\n'.join(extensions_tpl), + extensions = '\n'.join(extensions_tpl), items = items_table.display().string, - ) + )) class Version(base.CommandBase): @@ -157,7 +157,7 @@ self.host.bridge.getSoftwareVersion(jid, self.host.profile, callback=self.gotVersion, errback=self.error) def error(self, failure): - print (_("Error while trying to get version [%s]") % failure) + print((_("Error while trying to get version [%s]") % failure)) self.host.quit(1) def gotVersion(self, data): @@ -170,7 +170,7 @@ if os: infos.append(_("Operating System: %s") % os) - print "\n".join(infos) + print("\n".join(infos)) self.host.quit() @@ -183,7 +183,7 @@ def default_output(self, data): started = data['started'] - data['started'] = u'{short} (UTC, {relative})'.format( + data['started'] = '{short} (UTC, {relative})'.format( short=date_utils.date_fmt(started), relative=date_utils.date_fmt(started, 'relative')) self.host.output(C.OUTPUT_DICT, 'simple', {}, data) @@ -199,7 +199,7 @@ self.host.quit() def _sessionInfoGetEb(self, error_data): - self.disp(_(u'Error getting session infos: {}').format(error_data), error=True) + self.disp(_('Error getting session infos: {}').format(error_data), error=True) self.host.quit(C.EXIT_BRIDGE_ERRBACK)
--- a/sat_frontends/jp/cmd_input.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_input.py Tue Aug 13 19:08:41 2019 +0200 @@ -18,7 +18,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. -import base +from . import base from sat.core.i18n import _ from sat.core import exceptions from sat_frontends.jp.constants import Const as C @@ -56,7 +56,7 @@ def add_parser_options(self): self.parser.add_argument( - "--encoding", default="utf-8", help=_(u"encoding of the input data") + "--encoding", default="utf-8", help=_("encoding of the input data") ) self.parser.add_argument( "-i", @@ -64,7 +64,7 @@ action="append_const", const=(OPT_STDIN, None), dest="arguments", - help=_(u"standard input"), + help=_("standard input"), ) self.parser.add_argument( "-s", @@ -72,7 +72,7 @@ type=self.opt(OPT_SHORT), action="append", dest="arguments", - help=_(u"short option"), + help=_("short option"), ) self.parser.add_argument( "-l", @@ -80,7 +80,7 @@ type=self.opt(OPT_LONG), action="append", dest="arguments", - help=_(u"long option"), + help=_("long option"), ) self.parser.add_argument( "-p", @@ -88,7 +88,7 @@ type=self.opt(OPT_POS), action="append", dest="arguments", - help=_(u"positional argument"), + help=_("positional argument"), ) self.parser.add_argument( "-x", @@ -96,19 +96,19 @@ action="append_const", const=(OPT_IGNORE, None), dest="arguments", - help=_(u"ignore value"), + help=_("ignore value"), ) self.parser.add_argument( "-D", "--debug", action="store_true", - help=_(u"don't actually run commands but echo what would be launched"), + help=_("don't actually run commands but echo what would be launched"), ) self.parser.add_argument( - "--log", type=argparse.FileType("wb"), help=_(u"log stdout to FILE") + "--log", type=argparse.FileType("wb"), help=_("log stdout to FILE") ) self.parser.add_argument( - "--log-err", type=argparse.FileType("wb"), help=_(u"log stderr to FILE") + "--log-err", type=argparse.FileType("wb"), help=_("log stderr to FILE") ) self.parser.add_argument("command", nargs=argparse.REMAINDER) @@ -123,7 +123,7 @@ arg_type, arg_name = arguments[self.args_idx] except IndexError: self.disp( - _(u"arguments in input data and in arguments sequence don't match"), + _("arguments in input data and in arguments sequence don't match"), error=True, ) self.host.quit(C.EXIT_DATA_ERROR) @@ -150,13 +150,13 @@ self.disp( A.color( C.A_SUBHEADER, - _(u"values: "), + _("values: "), A.RESET, - u", ".join(self._values_ori), + ", ".join(self._values_ori), ), 2, ) - self.disp(A.color(A.BOLD, _(u"**SKIPPING**\n"))) + self.disp(A.color(A.BOLD, _("**SKIPPING**\n"))) self.reset() self.idx += 1 raise exceptions.CancelError @@ -180,7 +180,7 @@ else: self.parser.error( _( - u"Invalid argument, an option type is expected, got {type_}:{name}" + "Invalid argument, an option type is expected, got {type_}:{name}" ).format(type_=arg_type, name=arg_name) ) @@ -188,39 +188,39 @@ """run requested command with parsed arguments""" if self.args_idx != len(self.args.arguments): self.disp( - _(u"arguments in input data and in arguments sequence don't match"), + _("arguments in input data and in arguments sequence don't match"), error=True, ) self.host.quit(C.EXIT_DATA_ERROR) self.disp( - A.color(C.A_HEADER, _(u"command {idx}").format(idx=self.idx)), + A.color(C.A_HEADER, _("command {idx}").format(idx=self.idx)), no_lf=not self.args.debug, ) stdin = "".join(self._stdin) if self.args.debug: self.disp( A.color( - C.A_SUBHEADER, _(u"values: "), A.RESET, u", ".join(self._values_ori) + C.A_SUBHEADER, _("values: "), A.RESET, ", ".join(self._values_ori) ), 2, ) if stdin: - self.disp(A.color(C.A_SUBHEADER, u"--- STDIN ---")) - self.disp(stdin.decode("utf-8")) - self.disp(A.color(C.A_SUBHEADER, u"-------------")) + self.disp(A.color(C.A_SUBHEADER, "--- STDIN ---")) + self.disp(stdin) + self.disp(A.color(C.A_SUBHEADER, "-------------")) self.disp( - u"{indent}{prog} {static} {options} {positionals}".format( - indent=4 * u" ", + "{indent}{prog} {static} {options} {positionals}".format( + indent=4 * " ", prog=sys.argv[0], - static=" ".join(self.args.command).decode("utf-8"), - options=u" ".join([o.decode("utf-8") for o in self._opts]), - positionals=u" ".join([p.decode("utf-8") for p in self._pos]), + static=" ".join(self.args.command), + options=" ".join([o for o in self._opts]), + positionals=" ".join([p for p in self._pos]), ) ) - self.disp(u"\n") + self.disp("\n") else: - self.disp(u" (" + u", ".join(self._values_ori) + u")", 2, no_lf=True) + self.disp(" (" + ", ".join(self._values_ori) + ")", 2, no_lf=True) args = [sys.argv[0]] + self.args.command + self._opts + self._pos p = subprocess.Popen( args, @@ -238,9 +238,9 @@ log_err.write(log_tpl.format(command=" ".join(args), buff=stderr)) ret = p.wait() if ret == 0: - self.disp(A.color(C.A_SUCCESS, _(u"OK"))) + self.disp(A.color(C.A_SUCCESS, _("OK"))) else: - self.disp(A.color(C.A_FAILURE, _(u"FAILED"))) + self.disp(A.color(C.A_FAILURE, _("FAILED"))) self.reset() self.idx += 1 @@ -260,7 +260,7 @@ class Csv(InputCommon): def __init__(self, host): - super(Csv, self).__init__(host, "csv", _(u"comma-separated values")) + super(Csv, self).__init__(host, "csv", _("comma-separated values")) def add_parser_options(self): InputCommon.add_parser_options(self) @@ -269,7 +269,7 @@ "--row", type=int, default=0, - help=_(u"starting row (previous ones will be ignored)"), + help=_("starting row (previous ones will be ignored)"), ) self.parser.add_argument( "-S", @@ -277,7 +277,7 @@ action="append_const", const=("split", None), dest="arguments", - help=_(u"split value in several options"), + help=_("split value in several options"), ) self.parser.add_argument( "-E", @@ -285,8 +285,8 @@ action="append", type=self.opt("empty"), dest="arguments", - help=_(u"action to do on empty value ({choices})").format( - choices=u", ".join(OPT_EMPTY_CHOICES) + help=_("action to do on empty value ({choices})").format( + choices=", ".join(OPT_EMPTY_CHOICES) ), ) @@ -300,8 +300,8 @@ return value if value else False else: self.parser.error( - _(u"--empty value must be one of {choices}").format( - choices=u", ".join(OPT_EMPTY_CHOICES) + _("--empty value must be one of {choices}").format( + choices=", ".join(OPT_EMPTY_CHOICES) ) ) @@ -331,5 +331,5 @@ host, "input", use_profile=False, - help=_(u"launch command with external input"), + help=_("launch command with external input"), )
--- a/sat_frontends/jp/cmd_invitation.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_invitation.py Tue Aug 13 19:08:41 2019 +0200 @@ -18,7 +18,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. -import base +from . import base from sat.core.i18n import _ from sat_frontends.jp.constants import Const as C from sat.tools.common.ansi import ANSI as A @@ -36,7 +36,7 @@ "create", use_profile=False, use_output=C.OUTPUT_DICT, - help=_(u"create and send an invitation"), + help=_("create and send an invitation"), ) self.need_loop = True @@ -44,28 +44,24 @@ self.parser.add_argument( "-j", "--jid", - type=base.unicode_decoder, default="", help="jid of the invitee (default: generate one)", ) self.parser.add_argument( "-P", "--password", - type=base.unicode_decoder, default="", help="password of the invitee profile/XMPP account (default: generate one)", ) self.parser.add_argument( "-n", "--name", - type=base.unicode_decoder, default="", help="name of the invitee", ) self.parser.add_argument( "-N", "--host-name", - type=base.unicode_decoder, default="", help="name of the host", ) @@ -73,7 +69,6 @@ "-e", "--email", action="append", - type=base.unicode_decoder, default=[], help="email(s) to send the invitation to (if --no-email is set, email will just be saved)", ) @@ -83,28 +78,24 @@ self.parser.add_argument( "-l", "--lang", - type=base.unicode_decoder, default="", help="main language spoken by the invitee", ) self.parser.add_argument( "-u", "--url", - type=base.unicode_decoder, default="", help="template to construct the URL", ) self.parser.add_argument( "-s", "--subject", - type=base.unicode_decoder, default="", help="subject of the invitation email (default: generic subject)", ) self.parser.add_argument( "-b", "--body", - type=base.unicode_decoder, default="", help="body of the invitation email (default: generic body)", ) @@ -112,7 +103,6 @@ "-x", "--extra", metavar=("KEY", "VALUE"), - type=base.unicode_decoder, action="append", nargs=2, default=[], @@ -121,7 +111,6 @@ self.parser.add_argument( "-p", "--profile", - type=base.unicode_decoder, default="", help="profile doing the invitation (default: don't associate profile)", ) @@ -132,7 +121,7 @@ def invitationCreateEb(self, failure_): self.disp( - u"can't create invitation: {reason}".format(reason=failure_), error=True + "can't create invitation: {reason}".format(reason=failure_), error=True ) self.host.quit(C.EXIT_BRIDGE_ERRBACK) @@ -143,11 +132,11 @@ if self.args.no_email: if email: extra["email"] = email - data_format.iter2dict(u"emails_extra", emails_extra) + data_format.iter2dict("emails_extra", emails_extra) else: if not email: self.parser.error( - _(u"you need to specify an email address to send email invitation") + _("you need to specify an email address to send email invitation") ) self.host.bridge.invitationCreate( @@ -176,19 +165,19 @@ "get", use_profile=False, use_output=C.OUTPUT_DICT, - help=_(u"get invitation data"), + help=_("get invitation data"), ) self.need_loop = True def add_parser_options(self): self.parser.add_argument( - "id", type=base.unicode_decoder, help=_(u"invitation UUID") + "id", help=_("invitation UUID") ) self.parser.add_argument( "-j", "--with-jid", action="store_true", - help=_(u"start profile session and retrieve jid"), + help=_("start profile session and retrieve jid"), ) def output_data(self, data, jid_=None): @@ -199,28 +188,28 @@ def invitationGetCb(self, invitation_data): if self.args.with_jid: - profile = invitation_data[u"guest_profile"] + profile = invitation_data["guest_profile"] def session_started(__): self.host.bridge.asyncGetParamA( - u"JabberID", - u"Connection", + "JabberID", + "Connection", profile_key=profile, callback=lambda jid_: self.output_data(invitation_data, jid_), errback=partial( self.errback, - msg=_(u"can't retrieve jid: {}"), + msg=_("can't retrieve jid: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) self.host.bridge.profileStartSession( - invitation_data[u"password"], + invitation_data["password"], profile, callback=session_started, errback=partial( self.errback, - msg=_(u"can't start session: {}"), + msg=_("can't start session: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -233,7 +222,7 @@ callback=self.invitationGetCb, errback=partial( self.errback, - msg=_(u"can't get invitation data: {}"), + msg=_("can't get invitation data: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -242,7 +231,7 @@ class Modify(base.CommandBase): def __init__(self, host): base.CommandBase.__init__( - self, host, "modify", use_profile=False, help=_(u"modify existing invitation") + self, host, "modify", use_profile=False, help=_("modify existing invitation") ) self.need_loop = True @@ -253,21 +242,18 @@ self.parser.add_argument( "-n", "--name", - type=base.unicode_decoder, default="", help="name of the invitee", ) self.parser.add_argument( "-N", "--host-name", - type=base.unicode_decoder, default="", help="name of the host", ) self.parser.add_argument( "-e", "--email", - type=base.unicode_decoder, default="", help="email to send the invitation to (if --no-email is set, email will just be saved)", ) @@ -275,7 +261,6 @@ "-l", "--lang", dest="language", - type=base.unicode_decoder, default="", help="main language spoken by the invitee", ) @@ -283,7 +268,6 @@ "-x", "--extra", metavar=("KEY", "VALUE"), - type=base.unicode_decoder, action="append", nargs=2, default=[], @@ -292,21 +276,20 @@ self.parser.add_argument( "-p", "--profile", - type=base.unicode_decoder, default="", help="profile doing the invitation (default: don't associate profile", ) self.parser.add_argument( - "id", type=base.unicode_decoder, help=_(u"invitation UUID") + "id", help=_("invitation UUID") ) def invitationModifyCb(self): - self.disp(_(u"invitations have been modified correctly")) + self.disp(_("invitations have been modified correctly")) self.host.quit(C.EXIT_OK) def invitationModifyEb(self, failure_): self.disp( - u"can't create invitation: {reason}".format(reason=failure_), error=True + "can't create invitation: {reason}".format(reason=failure_), error=True ) self.host.quit(C.EXIT_BRIDGE_ERRBACK) @@ -319,7 +302,7 @@ if arg_name in extra: self.parser.error( _( - u"you can't set {arg_name} in both optional argument and extra" + "you can't set {arg_name} in both optional argument and extra" ).format(arg_name=arg_name) ) extra[arg_name] = value @@ -342,26 +325,26 @@ use_profile=False, use_output=C.OUTPUT_COMPLEX, extra_outputs=extra_outputs, - help=_(u"list invitations data"), + help=_("list invitations data"), ) self.need_loop = True def default_output(self, data): - for idx, datum in enumerate(data.iteritems()): + for idx, datum in enumerate(data.items()): if idx: - self.disp(u"\n") + self.disp("\n") key, invitation_data = datum self.disp(A.color(C.A_HEADER, key)) - indent = u" " - for k, v in invitation_data.iteritems(): - self.disp(indent + A.color(C.A_SUBHEADER, k + u":") + u" " + unicode(v)) + indent = " " + for k, v in invitation_data.items(): + self.disp(indent + A.color(C.A_SUBHEADER, k + ":") + " " + str(v)) def add_parser_options(self): self.parser.add_argument( "-p", "--profile", default=C.PROF_KEY_NONE, - help=_(u"return only invitations linked to this profile"), + help=_("return only invitations linked to this profile"), ) def invitationListCb(self, data): @@ -374,7 +357,7 @@ callback=self.invitationListCb, errback=partial( self.errback, - msg=_(u"can't list invitations: {}"), + msg=_("can't list invitations: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -388,5 +371,5 @@ host, "invitation", use_profile=False, - help=_(u"invitation of user(s) without XMPP account"), + help=_("invitation of user(s) without XMPP account"), )
--- a/sat_frontends/jp/cmd_merge_request.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_merge_request.py Tue Aug 13 19:08:41 2019 +0200 @@ -18,7 +18,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. -import base +from . import base from sat.core.i18n import _ from sat.tools.common import data_format from sat_frontends.jp.constants import Const as C @@ -37,8 +37,8 @@ host, "set", use_pubsub=True, - pubsub_defaults={u"service": _(u"auto"), u"node": _(u"auto")}, - help=_(u"publish or update a merge request"), + pubsub_defaults={"service": _("auto"), "node": _("auto")}, + help=_("publish or update a merge request"), ) self.need_loop = True @@ -46,59 +46,56 @@ self.parser.add_argument( "-i", "--item", - type=base.unicode_decoder, - default=u"", - help=_(u"id or URL of the request to update, or nothing for a new one"), + default="", + help=_("id or URL of the request to update, or nothing for a new one"), ) self.parser.add_argument( "-r", "--repository", metavar="PATH", - type=base.unicode_decoder, - default=u".", - help=_(u"path of the repository (DEFAULT: current directory)"), + default=".", + help=_("path of the repository (DEFAULT: current directory)"), ) self.parser.add_argument( "-f", "--force", action="store_true", - help=_(u"publish merge request without confirmation"), + help=_("publish merge request without confirmation"), ) self.parser.add_argument( "-l", "--label", dest="labels", - type=base.unicode_decoder, action="append", - help=_(u"labels to categorize your request"), + help=_("labels to categorize your request"), ) def mergeRequestSetCb(self, published_id): if published_id: - self.disp(u"Merge request published at {pub_id}".format(pub_id=published_id)) + self.disp("Merge request published at {pub_id}".format(pub_id=published_id)) else: - self.disp(u"Merge request published") + self.disp("Merge request published") self.host.quit(C.EXIT_OK) def sendRequest(self): extra = {"update": True} if self.args.item else {} values = {} if self.args.labels is not None: - values[u"labels"] = self.args.labels + values["labels"] = self.args.labels self.host.bridge.mergeRequestSet( self.args.service, self.args.node, self.repository, - u"auto", + "auto", values, - u"", + "", self.args.item, data_format.serialise(extra), self.profile, callback=self.mergeRequestSetCb, errback=partial( self.errback, - msg=_(u"can't create merge request: {}"), + msg=_("can't create merge request: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -106,9 +103,9 @@ def askConfirmation(self): if not self.args.force: message = _( - u"You are going to publish your changes to service [{service}], are you sure ?" + "You are going to publish your changes to service [{service}], are you sure ?" ).format(service=self.args.service) - self.host.confirmOrQuit(message, _(u"merge request publication cancelled")) + self.host.confirmOrQuit(message, _("merge request publication cancelled")) self.sendRequest() def start(self): @@ -125,8 +122,8 @@ use_verbose=True, use_pubsub=True, pubsub_flags={C.MULTI_ITEMS}, - pubsub_defaults={u"service": _(u"auto"), u"node": _(u"auto")}, - help=_(u"get a merge request"), + pubsub_defaults={"service": _("auto"), "node": _("auto")}, + help=_("get a merge request"), ) self.need_loop = True @@ -141,7 +138,7 @@ for request_xmlui in requests_data[0]: xmlui = xmlui_manager.create(self.host, request_xmlui, whitelist=whitelist) xmlui.show(values_only=True) - self.disp(u"") + self.disp("") self.host.quit(C.EXIT_OK) def getRequests(self): @@ -151,13 +148,13 @@ self.args.node, self.args.max, self.args.items, - u"", + "", extra, self.profile, callback=self.mergeRequestGetCb, errback=partial( self.errback, - msg=_(u"can't get merge request: {}"), + msg=_("can't get merge request: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -176,8 +173,8 @@ "import", use_pubsub=True, pubsub_flags={C.SINGLE_ITEM, C.ITEM}, - pubsub_defaults={u"service": _(u"auto"), u"node": _(u"auto")}, - help=_(u"import a merge request"), + pubsub_defaults={"service": _("auto"), "node": _("auto")}, + help=_("import a merge request"), ) self.need_loop = True @@ -186,9 +183,8 @@ "-r", "--repository", metavar="PATH", - type=base.unicode_decoder, - default=u".", - help=_(u"path of the repository (DEFAULT: current directory)"), + default=".", + help=_("path of the repository (DEFAULT: current directory)"), ) def mergeRequestImportCb(self): @@ -206,7 +202,7 @@ callback=self.mergeRequestImportCb, errback=partial( self.errback, - msg=_(u"can't import merge request: {}"), + msg=_("can't import merge request: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), )
--- a/sat_frontends/jp/cmd_message.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_message.py Tue Aug 13 19:08:41 2019 +0200 @@ -37,15 +37,15 @@ def add_parser_options(self): self.parser.add_argument( - "-l", "--lang", type=str, default="", help=_(u"language of the message") + "-l", "--lang", type=str, default="", help=_("language of the message") ) self.parser.add_argument( "-s", "--separate", action="store_true", help=_( - u"separate xmpp messages: send one message per line instead of one " - u"message alone." + "separate xmpp messages: send one message per line instead of one " + "message alone." ), ) self.parser.add_argument( @@ -53,17 +53,16 @@ "--new-line", action="store_true", help=_( - u"add a new line at the beginning of the input (usefull for ascii art ;))" + "add a new line at the beginning of the input (usefull for ascii art ;))" ), ) self.parser.add_argument( "-S", "--subject", - type=base.unicode_decoder, - help=_(u"subject of the message"), + help=_("subject of the message"), ) self.parser.add_argument( - "-L", "--subject_lang", type=str, default="", help=_(u"language of subject") + "-L", "--subject_lang", type=str, default="", help=_("language of subject") ) self.parser.add_argument( "-t", @@ -73,16 +72,16 @@ help=_("type of the message"), ) self.parser.add_argument("-e", "--encrypt", metavar="ALGORITHM", - help=_(u"encrypt message using given algorithm")) + help=_("encrypt message using given algorithm")) self.parser.add_argument( "--encrypt-noreplace", action="store_true", - help=_(u"don't replace encryption algorithm if an other one is already used")) + help=_("don't replace encryption algorithm if an other one is already used")) syntax = self.parser.add_mutually_exclusive_group() - syntax.add_argument("-x", "--xhtml", action="store_true", help=_(u"XHTML body")) - syntax.add_argument("-r", "--rich", action="store_true", help=_(u"rich body")) + syntax.add_argument("-x", "--xhtml", action="store_true", help=_("XHTML body")) + syntax.add_argument("-r", "--rich", action="store_true", help=_("rich body")) self.parser.add_argument( - "jid", type=base.unicode_decoder, help=_(u"the destination jid") + "jid", help=_("the destination jid") ) def multi_send_cb(self): @@ -91,7 +90,7 @@ self.host.quit(self.errcode) def multi_send_eb(self, failure_, msg): - self.disp(_(u"Can't send message [{msg}]: {reason}").format( + self.disp(_("Can't send message [{msg}]: {reason}").format( msg=msg, reason=failure_)) self.errcode = C.EXIT_BRIDGE_ERRBACK self.multi_send_cb() @@ -112,10 +111,10 @@ subject = {self.args.subject_lang: self.args.subject} if self.args.xhtml or self.args.rich: - key = u"xhtml" if self.args.xhtml else u"rich" + key = "xhtml" if self.args.xhtml else "rich" if self.args.lang: - key = u"{}_{}".format(key, self.args.lang) - extra[key] = clean_ustr(u"".join(stdin_lines)) + key = "{}_{}".format(key, self.args.lang) + extra[key] = clean_ustr("".join(stdin_lines)) stdin_lines = [] if self.args.separate: # we send stdin in several messages @@ -150,7 +149,7 @@ else: msg = ( - {self.args.lang: header + clean_ustr(u"".join(stdin_lines))} + {self.args.lang: header + clean_ustr("".join(stdin_lines))} if not (self.args.xhtml or self.args.rich) else {} ) @@ -163,7 +162,7 @@ profile_key=self.host.profile, callback=self.host.quit, errback=partial(self.errback, - msg=_(u"Can't send message: {}"))) + msg=_("Can't send message: {}"))) def encryptionNamespaceGetCb(self, namespace, jid_): self.host.bridge.messageEncryptionStart( @@ -171,7 +170,7 @@ self.profile, callback=lambda: self.sendStdin(jid_), errback=partial(self.errback, - msg=_(u"Can't start encryption session: {}"), + msg=_("Can't start encryption session: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, )) @@ -179,7 +178,7 @@ def start(self): if self.args.xhtml and self.args.separate: self.disp( - u"argument -s/--separate is not compatible yet with argument -x/--xhtml", + "argument -s/--separate is not compatible yet with argument -x/--xhtml", error=True, ) self.host.quit(2) @@ -194,7 +193,7 @@ self.host.bridge.encryptionNamespaceGet(self.args.encrypt, callback=partial(self.encryptionNamespaceGetCb, jid_=jid_), errback=partial(self.errback, - msg=_(u"Can't get encryption namespace: {}"), + msg=_("Can't get encryption namespace: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, )) else: @@ -205,49 +204,49 @@ def __init__(self, host): super(MAM, self).__init__( - host, "mam", use_output=C.OUTPUT_MESS, use_verbose=True, help=_(u"query archives using MAM")) + host, "mam", use_output=C.OUTPUT_MESS, use_verbose=True, help=_("query archives using MAM")) self.need_loop=True def add_parser_options(self): self.parser.add_argument( - "-s", "--service", type=base.unicode_decoder, default=u"", - help=_(u"jid of the service (default: profile's server")) + "-s", "--service", default="", + help=_("jid of the service (default: profile's server")) self.parser.add_argument( "-S", "--start", dest="mam_start", type=base.date_decoder, help=_( - u"start fetching archive from this date (default: from the beginning)")) + "start fetching archive from this date (default: from the beginning)")) self.parser.add_argument( "-E", "--end", dest="mam_end", type=base.date_decoder, - help=_(u"end fetching archive after this date (default: no limit)")) + help=_("end fetching archive after this date (default: no limit)")) self.parser.add_argument( - "-W", "--with", dest="mam_with", type=base.unicode_decoder, - help=_(u"retrieve only archives with this jid")) + "-W", "--with", dest="mam_with", + help=_("retrieve only archives with this jid")) self.parser.add_argument( "-m", "--max", dest="rsm_max", type=int, default=20, - help=_(u"maximum number of items to retrieve, using RSM (default: 20))")) + help=_("maximum number of items to retrieve, using RSM (default: 20))")) rsm_page_group = self.parser.add_mutually_exclusive_group() rsm_page_group.add_argument( - "-a", "--after", dest="rsm_after", type=base.unicode_decoder, - help=_(u"find page after this item"), metavar='ITEM_ID') + "-a", "--after", dest="rsm_after", + help=_("find page after this item"), metavar='ITEM_ID') rsm_page_group.add_argument( - "-b", "--before", dest="rsm_before", type=base.unicode_decoder, - help=_(u"find page before this item"), metavar='ITEM_ID') + "-b", "--before", dest="rsm_before", + help=_("find page before this item"), metavar='ITEM_ID') rsm_page_group.add_argument( "--index", dest="rsm_index", type=int, - help=_(u"index of the page to retrieve")) + help=_("index of the page to retrieve")) def _sessionInfosGetCb(self, session_info, data, metadata): - self.host.own_jid = jid.JID(session_info[u"jid"]) + self.host.own_jid = jid.JID(session_info["jid"]) self.output(data) # FIXME: metadata are not displayed correctly and don't play nice with output # they should be added to output data somehow if self.verbosity: - for value in (u"rsm_first", u"rsm_last", u"rsm_index", u"rsm_count", - u"mam_complete", u"mam_stable"): + for value in ("rsm_first", "rsm_last", "rsm_index", "rsm_count", + "mam_complete", "mam_stable"): if value in metadata: - label = value.split(u"_")[1] + label = value.split("_")[1] self.disp(A.color( - C.A_HEADER, label, u': ' , A.RESET, metadata[value])) + C.A_HEADER, label, ': ' , A.RESET, metadata[value])) self.host.quit() @@ -260,16 +259,16 @@ def start(self): extra = {} if self.args.mam_start is not None: - extra[u"mam_start"] = float(self.args.mam_start) + extra["mam_start"] = float(self.args.mam_start) if self.args.mam_end is not None: - extra[u"mam_end"] = float(self.args.mam_end) + extra["mam_end"] = float(self.args.mam_end) if self.args.mam_with is not None: - extra[u"mam_with"] = self.args.mam_with + extra["mam_with"] = self.args.mam_with for suff in ('max', 'after', 'before', 'index'): - key = u'rsm_' + suff + key = 'rsm_' + suff value = getattr(self.args,key) if value is not None: - extra[key] = unicode(value) + extra[key] = str(value) self.host.bridge.MAMGet( self.args.service, data_format.serialise(extra), self.profile, callback=self._MAMGetCb, errback=self.errback)
--- a/sat_frontends/jp/cmd_param.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_param.py Tue Aug 13 19:08:41 2019 +0200 @@ -19,7 +19,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. -import base +from . import base from sat.core.i18n import _ __commands__ = ["Param"] @@ -29,31 +29,31 @@ super(Get, self).__init__(host, 'get', need_connect=False, help=_('Get a parameter value')) def add_parser_options(self): - self.parser.add_argument("category", nargs='?', type=base.unicode_decoder, help=_(u"Category of the parameter")) - self.parser.add_argument("name", nargs='?', type=base.unicode_decoder, help=_(u"Name of the parameter")) - self.parser.add_argument("-a", "--attribute", type=str, default="value", help=_(u"Name of the attribute to get")) - self.parser.add_argument("--security-limit", type=int, default=-1, help=_(u"Security limit")) + self.parser.add_argument("category", nargs='?', help=_("Category of the parameter")) + self.parser.add_argument("name", nargs='?', help=_("Name of the parameter")) + self.parser.add_argument("-a", "--attribute", type=str, default="value", help=_("Name of the attribute to get")) + self.parser.add_argument("--security-limit", type=int, default=-1, help=_("Security limit")) def start(self): if self.args.category is None: categories = self.host.bridge.getParamsCategories() - print u"\n".join(categories) + print("\n".join(categories)) elif self.args.name is None: try: values_dict = self.host.bridge.asyncGetParamsValuesFromCategory(self.args.category, self.args.security_limit, self.profile) except Exception as e: - print u"Can't find requested parameters: {}".format(e) + print("Can't find requested parameters: {}".format(e)) self.host.quit(1) - for name, value in values_dict.iteritems(): - print u"{}\t{}".format(name, value) + for name, value in values_dict.items(): + print("{}\t{}".format(name, value)) else: try: value = self.host.bridge.asyncGetParamA(self.args.name, self.args.category, self.args.attribute, self.args.security_limit, self.profile) except Exception as e: - print u"Can't find requested parameter: {}".format(e) + print("Can't find requested parameter: {}".format(e)) self.host.quit(1) - print value + print(value) class Set(base.CommandBase): @@ -61,16 +61,16 @@ super(Set, self).__init__(host, 'set', need_connect=False, help=_('Set a parameter value')) def add_parser_options(self): - self.parser.add_argument("category", type=base.unicode_decoder, help=_(u"Category of the parameter")) - self.parser.add_argument("name", type=base.unicode_decoder, help=_(u"Name of the parameter")) - self.parser.add_argument("value", type=base.unicode_decoder, help=_(u"Name of the parameter")) - self.parser.add_argument("--security-limit", type=int, default=-1, help=_(u"Security limit")) + self.parser.add_argument("category", help=_("Category of the parameter")) + self.parser.add_argument("name", help=_("Name of the parameter")) + self.parser.add_argument("value", help=_("Name of the parameter")) + self.parser.add_argument("--security-limit", type=int, default=-1, help=_("Security limit")) def start(self): try: self.host.bridge.setParam(self.args.name, self.args.value, self.args.category, self.args.security_limit, self.profile) except Exception as e: - print u"Can set requested parameter: {}".format(e) + print("Can set requested parameter: {}".format(e)) class SaveTemplate(base.CommandBase): @@ -83,9 +83,9 @@ def start(self): """Save parameters template to xml file""" if self.host.bridge.saveParamsTemplate(self.args.filename): - print _("Parameters saved to file %s") % self.args.filename + print(_("Parameters saved to file %s") % self.args.filename) else: - print _("Can't save parameters to file %s") % self.args.filename + print(_("Can't save parameters to file %s") % self.args.filename) class LoadTemplate(base.CommandBase): @@ -99,9 +99,9 @@ def start(self): """Load parameters template from xml file""" if self.host.bridge.loadParamsTemplate(self.args.filename): - print _("Parameters loaded from file %s") % self.args.filename + print(_("Parameters loaded from file %s") % self.args.filename) else: - print _("Can't load parameters from file %s") % self.args.filename + print(_("Can't load parameters from file %s") % self.args.filename) class Param(base.CommandBase):
--- a/sat_frontends/jp/cmd_ping.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_ping.py Tue Aug 13 19:08:41 2019 +0200 @@ -17,7 +17,7 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. -import base +from . import base from sat.core.i18n import _ __commands__ = ["Ping"] @@ -31,14 +31,14 @@ def add_parser_options(self): self.parser.add_argument( - "jid", type=base.unicode_decoder, help=_(u"jid to ping") + "jid", help=_("jid to ping") ) self.parser.add_argument( - "-d", "--delay-only", action="store_true", help=_(u"output delay only (in s)") + "-d", "--delay-only", action="store_true", help=_("output delay only (in s)") ) def _pingCb(self, pong_time): - fmt = u"{time}" if self.args.delay_only else u"PONG ({time} s)" + fmt = "{time}" if self.args.delay_only else "PONG ({time} s)" self.disp(fmt.format(time=pong_time)) self.host.quit()
--- a/sat_frontends/jp/cmd_pipe.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_pipe.py Tue Aug 13 19:08:41 2019 +0200 @@ -26,7 +26,7 @@ import xml.etree.ElementTree as ET # FIXME: used temporarily to manage XMLUI from functools import partial import socket -import SocketServer +import socketserver import errno __commands__ = ["Pipe"] @@ -41,7 +41,7 @@ def add_parser_options(self): self.parser.add_argument( - "jid", type=base.unicode_decoder, help=_("the destination jid") + "jid", help=_("the destination jid") ) def streamOutCb(self, port): @@ -69,13 +69,13 @@ callback=self.streamOutCb, errback=partial( self.errback, - msg=_(u"can't start stream: {}"), + msg=_("can't start stream: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) -class StreamServer(SocketServer.BaseRequestHandler): +class StreamServer(socketserver.BaseRequestHandler): def handle(self): while True: data = self.request.recv(4096) @@ -100,7 +100,6 @@ def add_parser_options(self): self.parser.add_argument( "jids", - type=base.unicode_decoder, nargs="*", help=_('Jids accepted (none means "accept everything")'), ) @@ -112,12 +111,12 @@ try: xml_ui = action_data["xmlui"] except KeyError: - self.disp(_(u"Action has no XMLUI"), 1) + self.disp(_("Action has no XMLUI"), 1) else: ui = ET.fromstring(xml_ui.encode("utf-8")) xmlui_id = ui.get("submit") if not xmlui_id: - self.disp(_(u"Invalid XMLUI received"), error=True) + self.disp(_("Invalid XMLUI received"), error=True) return xmlui_id def onStreamAction(self, action_data, action_id, security_limit, profile): @@ -127,14 +126,14 @@ try: from_jid = jid.JID(action_data["meta_from_jid"]) except KeyError: - self.disp(_(u"Ignoring action without from_jid data"), 1) + self.disp(_("Ignoring action without from_jid data"), 1) return if not self.bare_jids or from_jid.bare in self.bare_jids: host, port = "localhost", START_PORT while True: try: - server = SocketServer.TCPServer((host, port), StreamServer) + server = socketserver.TCPServer((host, port), StreamServer) except socket.error as e: if e.errno == errno.EADDRINUSE: port += 1 @@ -142,7 +141,7 @@ raise e else: break - xmlui_data = {"answer": C.BOOL_TRUE, "port": unicode(port)} + xmlui_data = {"answer": C.BOOL_TRUE, "port": str(port)} self.host.bridge.launchAction(xmlui_id, xmlui_data, profile_key=profile) server.serve_forever() self.host.quitFromSignal()
--- a/sat_frontends/jp/cmd_profile.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_profile.py Tue Aug 13 19:08:41 2019 +0200 @@ -38,7 +38,7 @@ def __init__(self, host): # it's weird to have a command named "connect" with need_connect=False, but it can be handy to be able # to launch just the session, so some paradoxes don't hurt - super(ProfileConnect, self).__init__(host, 'connect', need_connect=False, help=(u'connect a profile')) + super(ProfileConnect, self).__init__(host, 'connect', need_connect=False, help=('connect a profile')) def add_parser_options(self): pass @@ -47,7 +47,7 @@ class ProfileDisconnect(base.CommandBase): def __init__(self, host): - super(ProfileDisconnect, self).__init__(host, 'disconnect', need_connect=False, help=(u'disconnect a profile')) + super(ProfileDisconnect, self).__init__(host, 'disconnect', need_connect=False, help=('disconnect a profile')) self.need_loop = True def add_parser_options(self): @@ -59,32 +59,32 @@ class ProfileDefault(base.CommandBase): def __init__(self, host): - super(ProfileDefault, self).__init__(host, 'default', use_profile=False, help=(u'print default profile')) + super(ProfileDefault, self).__init__(host, 'default', use_profile=False, help=('print default profile')) def add_parser_options(self): pass def start(self): - print self.host.bridge.profileNameGet('@DEFAULT@') + print(self.host.bridge.profileNameGet('@DEFAULT@')) class ProfileDelete(base.CommandBase): def __init__(self, host): - super(ProfileDelete, self).__init__(host, 'delete', use_profile=False, help=(u'delete a profile')) + super(ProfileDelete, self).__init__(host, 'delete', use_profile=False, help=('delete a profile')) def add_parser_options(self): self.parser.add_argument('profile', type=str, help=PROFILE_HELP) - self.parser.add_argument('-f', '--force', action='store_true', help=_(u'delete profile without confirmation')) + self.parser.add_argument('-f', '--force', action='store_true', help=_('delete profile without confirmation')) def start(self): if self.args.profile not in self.host.bridge.profilesListGet(): log.error("Profile %s doesn't exist." % self.args.profile) self.host.quit(1) if not self.args.force: - message = u"Are you sure to delete profile [{}] ?".format(self.args.profile) - res = raw_input("{} (y/N)? ".format(message)) + message = "Are you sure to delete profile [{}] ?".format(self.args.profile) + res = input("{} (y/N)? ".format(message)) if res not in ("y", "Y"): - self.disp(_(u"Profile deletion cancelled")) + self.disp(_("Profile deletion cancelled")) self.host.quit(2) self.host.bridge.asyncDeleteProfile(self.args.profile, callback=lambda __: None) @@ -92,19 +92,19 @@ class ProfileInfo(base.CommandBase): def __init__(self, host): - super(ProfileInfo, self).__init__(host, 'info', need_connect=False, help=_(u'get information about a profile')) + super(ProfileInfo, self).__init__(host, 'info', need_connect=False, help=_('get information about a profile')) self.need_loop = True - self.to_show = [(_(u"jid"), "Connection", "JabberID"),] + self.to_show = [(_("jid"), "Connection", "JabberID"),] self.largest = max([len(item[0]) for item in self.to_show]) def add_parser_options(self): - self.parser.add_argument('--show-password', action='store_true', help=_(u'show the XMPP password IN CLEAR TEXT')) + self.parser.add_argument('--show-password', action='store_true', help=_('show the XMPP password IN CLEAR TEXT')) def showNextValue(self, label=None, category=None, value=None): """Show next value from self.to_show and quit on last one""" if label is not None: - print((u"{label:<"+unicode(self.largest+2)+"}{value}").format(label=label+": ", value=value)) + print((("{label:<"+str(self.largest+2)+"}{value}").format(label=label+": ", value=value))) try: label, category, name = self.to_show.pop(0) except IndexError: @@ -115,18 +115,18 @@ def start(self): if self.args.show_password: - self.to_show.append((_(u"XMPP password"), "Connection", "Password")) + self.to_show.append((_("XMPP password"), "Connection", "Password")) self.showNextValue() class ProfileList(base.CommandBase): def __init__(self, host): - super(ProfileList, self).__init__(host, 'list', use_profile=False, use_output='list', help=(u'list profiles')) + super(ProfileList, self).__init__(host, 'list', use_profile=False, use_output='list', help=('list profiles')) def add_parser_options(self): group = self.parser.add_mutually_exclusive_group() - group.add_argument('-c', '--clients', action='store_true', help=_(u'get clients profiles only')) - group.add_argument('-C', '--components', action='store_true', help=(u'get components profiles only')) + group.add_argument('-c', '--clients', action='store_true', help=_('get clients profiles only')) + group.add_argument('-C', '--components', action='store_true', help=('get components profiles only')) def start(self): @@ -141,17 +141,17 @@ class ProfileCreate(base.CommandBase): def __init__(self, host): - super(ProfileCreate, self).__init__(host, 'create', use_profile=False, help=(u'create a new profile')) + super(ProfileCreate, self).__init__(host, 'create', use_profile=False, help=('create a new profile')) self.need_loop = True def add_parser_options(self): - self.parser.add_argument('profile', type=str, help=_(u'the name of the profile')) - self.parser.add_argument('-p', '--password', type=str, default='', help=_(u'the password of the profile')) - self.parser.add_argument('-j', '--jid', type=str, help=_(u'the jid of the profile')) - self.parser.add_argument('-x', '--xmpp-password', type=str, help=_(u'the password of the XMPP account (use profile password if not specified)'), + self.parser.add_argument('profile', type=str, help=_('the name of the profile')) + self.parser.add_argument('-p', '--password', type=str, default='', help=_('the password of the profile')) + self.parser.add_argument('-j', '--jid', type=str, help=_('the jid of the profile')) + self.parser.add_argument('-x', '--xmpp-password', type=str, help=_('the password of the XMPP account (use profile password if not specified)'), metavar='PASSWORD') - self.parser.add_argument('-C', '--component', type=base.unicode_decoder, default='', - help=_(u'set to component import name (entry point) if this is a component')) + self.parser.add_argument('-C', '--component', default='', + help=_('set to component import name (entry point) if this is a component')) def _session_started(self, __): if self.args.jid: @@ -172,22 +172,22 @@ self.host.bridge.profileCreate(self.args.profile, self.args.password, self.args.component, callback=self._profile_created, errback=partial(self.errback, - msg=_(u"can't create profile: {}"), + msg=_("can't create profile: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK)) class ProfileModify(base.CommandBase): def __init__(self, host): - super(ProfileModify, self).__init__(host, 'modify', need_connect=False, help=_(u'modify an existing profile')) + super(ProfileModify, self).__init__(host, 'modify', need_connect=False, help=_('modify an existing profile')) def add_parser_options(self): profile_pwd_group = self.parser.add_mutually_exclusive_group() - profile_pwd_group.add_argument('-w', '--password', type=base.unicode_decoder, help=_(u'change the password of the profile')) - profile_pwd_group.add_argument('--disable-password', action='store_true', help=_(u'disable profile password (dangerous!)')) - self.parser.add_argument('-j', '--jid', type=base.unicode_decoder, help=_(u'the jid of the profile')) - self.parser.add_argument('-x', '--xmpp-password', type=base.unicode_decoder, help=_(u'change the password of the XMPP account'), + profile_pwd_group.add_argument('-w', '--password', help=_('change the password of the profile')) + profile_pwd_group.add_argument('--disable-password', action='store_true', help=_('disable profile password (dangerous!)')) + self.parser.add_argument('-j', '--jid', help=_('the jid of the profile')) + self.parser.add_argument('-x', '--xmpp-password', help=_('change the password of the XMPP account'), metavar='PASSWORD') - self.parser.add_argument('-D', '--default', action='store_true', help=_(u'set as default profile')) + self.parser.add_argument('-D', '--default', action='store_true', help=_('set as default profile')) def start(self): if self.args.disable_password: @@ -206,4 +206,4 @@ subcommands = (ProfileConnect, ProfileDisconnect, ProfileCreate, ProfileDefault, ProfileDelete, ProfileInfo, ProfileList, ProfileModify) def __init__(self, host): - super(Profile, self).__init__(host, 'profile', use_profile=False, help=_(u'profile commands')) + super(Profile, self).__init__(host, 'profile', use_profile=False, help=_('profile commands'))
--- a/sat_frontends/jp/cmd_pubsub.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_pubsub.py Tue Aug 13 19:08:41 2019 +0200 @@ -18,7 +18,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. -import base +from . import base from sat.core.i18n import _ from sat.core import exceptions from sat_frontends.jp.constants import Const as C @@ -37,7 +37,7 @@ __commands__ = ["Pubsub"] -PUBSUB_TMP_DIR = u"pubsub" +PUBSUB_TMP_DIR = "pubsub" PUBSUB_SCHEMA_TMP_DIR = PUBSUB_TMP_DIR + "_schema" ALLOWED_SUBSCRIPTIONS_OWNER = ("subscribed", "pending", "none") @@ -53,7 +53,7 @@ use_output=C.OUTPUT_DICT, use_pubsub=True, pubsub_flags={C.NODE}, - help=_(u"retrieve node configuration"), + help=_("retrieve node configuration"), ) self.need_loop = True @@ -61,29 +61,28 @@ self.parser.add_argument( "-k", "--key", - type=base.unicode_decoder, action="append", dest="keys", - help=_(u"data key to filter"), + help=_("data key to filter"), ) def removePrefix(self, key): - return key[7:] if key.startswith(u"pubsub#") else key + return key[7:] if key.startswith("pubsub#") else key def filterKey(self, key): - return any((key == k or key == u"pubsub#" + k) for k in self.args.keys) + return any((key == k or key == "pubsub#" + k) for k in self.args.keys) def psNodeConfigurationGetCb(self, config_dict): key_filter = (lambda k: True) if not self.args.keys else self.filterKey config_dict = { - self.removePrefix(k): v for k, v in config_dict.iteritems() if key_filter(k) + self.removePrefix(k): v for k, v in config_dict.items() if key_filter(k) } self.output(config_dict) self.host.quit() def psNodeConfigurationGetEb(self, failure_): self.disp( - u"can't get node configuration: {reason}".format(reason=failure_), error=True + "can't get node configuration: {reason}".format(reason=failure_), error=True ) self.host.quit(C.EXIT_BRIDGE_ERRBACK) @@ -107,7 +106,7 @@ use_pubsub=True, pubsub_flags={C.NODE}, use_verbose=True, - help=_(u"create a node"), + help=_("create a node"), ) self.need_loop = True @@ -115,36 +114,35 @@ self.parser.add_argument( "-f", "--field", - type=base.unicode_decoder, action="append", nargs=2, dest="fields", default=[], - metavar=(u"KEY", u"VALUE"), - help=_(u"configuration field to set"), + metavar=("KEY", "VALUE"), + help=_("configuration field to set"), ) self.parser.add_argument( "-F", "--full-prefix", action="store_true", - help=_(u'don\'t prepend "pubsub#" prefix to field names'), + help=_('don\'t prepend "pubsub#" prefix to field names'), ) def psNodeCreateCb(self, node_id): if self.host.verbosity: - announce = _(u"node created successfully: ") + announce = _("node created successfully: ") else: - announce = u"" + announce = "" self.disp(announce + node_id) self.host.quit() def psNodeCreateEb(self, failure_): - self.disp(u"can't create: {reason}".format(reason=failure_), error=True) + self.disp("can't create: {reason}".format(reason=failure_), error=True) self.host.quit(C.EXIT_BRIDGE_ERRBACK) def start(self): if not self.args.full_prefix: - options = {u"pubsub#" + k: v for k, v in self.args.fields} + options = {"pubsub#" + k: v for k, v in self.args.fields} else: options = dict(self.args.fields) self.host.bridge.psNodeCreate( @@ -155,7 +153,7 @@ callback=self.psNodeCreateCb, errback=partial( self.errback, - msg=_(u"can't create node: {}"), + msg=_("can't create node: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -169,7 +167,7 @@ "purge", use_pubsub=True, pubsub_flags={C.NODE}, - help=_(u"purge a node (i.e. remove all items from it)"), + help=_("purge a node (i.e. remove all items from it)"), ) self.need_loop = True @@ -178,26 +176,26 @@ "-f", "--force", action="store_true", - help=_(u"purge node without confirmation"), + help=_("purge node without confirmation"), ) def psNodePurgeCb(self): - self.disp(_(u"node [{node}] purged successfully").format(node=self.args.node)) + self.disp(_("node [{node}] purged successfully").format(node=self.args.node)) self.host.quit() def start(self): if not self.args.force: if not self.args.service: - message = _(u"Are you sure to purge PEP node [{node_id}]? " - u"This will delete ALL items from it!").format( + message = _("Are you sure to purge PEP node [{node_id}]? " + "This will delete ALL items from it!").format( node_id=self.args.node ) else: message = _( - u"Are you sure to delete node [{node_id}] on service [{service}]? " - u"This will delete ALL items from it!" + "Are you sure to delete node [{node_id}] on service [{service}]? " + "This will delete ALL items from it!" ).format(node_id=self.args.node, service=self.args.service) - self.host.confirmOrQuit(message, _(u"node purge cancelled")) + self.host.confirmOrQuit(message, _("node purge cancelled")) self.host.bridge.psNodePurge( self.args.service, @@ -206,7 +204,7 @@ callback=self.psNodePurgeCb, errback=partial( self.errback, - msg=_(u"can't purge node: {}"), + msg=_("can't purge node: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -220,7 +218,7 @@ "delete", use_pubsub=True, pubsub_flags={C.NODE}, - help=_(u"delete a node"), + help=_("delete a node"), ) self.need_loop = True @@ -229,24 +227,24 @@ "-f", "--force", action="store_true", - help=_(u"delete node without confirmation"), + help=_("delete node without confirmation"), ) def psNodeDeleteCb(self): - self.disp(_(u"node [{node}] deleted successfully").format(node=self.args.node)) + self.disp(_("node [{node}] deleted successfully").format(node=self.args.node)) self.host.quit() def start(self): if not self.args.force: if not self.args.service: - message = _(u"Are you sure to delete PEP node [{node_id}] ?").format( + message = _("Are you sure to delete PEP node [{node_id}] ?").format( node_id=self.args.node ) else: message = _( - u"Are you sure to delete node [{node_id}] on service [{service}] ?" + "Are you sure to delete node [{node_id}] on service [{service}] ?" ).format(node_id=self.args.node, service=self.args.service) - self.host.confirmOrQuit(message, _(u"node deletion cancelled")) + self.host.confirmOrQuit(message, _("node deletion cancelled")) self.host.bridge.psNodeDelete( self.args.service, @@ -255,7 +253,7 @@ callback=self.psNodeDeleteCb, errback=partial( self.errback, - msg=_(u"can't delete node: {}"), + msg=_("can't delete node: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -271,7 +269,7 @@ use_pubsub=True, pubsub_flags={C.NODE}, use_verbose=True, - help=_(u"set node configuration"), + help=_("set node configuration"), ) self.need_loop = True @@ -279,28 +277,27 @@ self.parser.add_argument( "-f", "--field", - type=base.unicode_decoder, action="append", nargs=2, dest="fields", required=True, - metavar=(u"KEY", u"VALUE"), - help=_(u"configuration field to set (required)"), + metavar=("KEY", "VALUE"), + help=_("configuration field to set (required)"), ) def psNodeConfigurationSetCb(self): - self.disp(_(u"node configuration successful"), 1) + self.disp(_("node configuration successful"), 1) self.host.quit() def psNodeConfigurationSetEb(self, failure_): self.disp( - u"can't set node configuration: {reason}".format(reason=failure_), error=True + "can't set node configuration: {reason}".format(reason=failure_), error=True ) self.host.quit(C.EXIT_BRIDGE_ERRBACK) def getKeyName(self, k): - if not k.startswith(u"pubsub#"): - return u"pubsub#" + k + if not k.startswith("pubsub#"): + return "pubsub#" + k else: return k @@ -323,7 +320,7 @@ "import", use_pubsub=True, pubsub_flags={C.NODE}, - help=_(u"import raw XML to a node"), + help=_("import raw XML to a node"), ) self.need_loop = True @@ -331,18 +328,18 @@ self.parser.add_argument( "--admin", action="store_true", - help=_(u"do a pubsub admin request, needed to change publisher"), + help=_("do a pubsub admin request, needed to change publisher"), ) self.parser.add_argument( "import_file", - type=file, - help=_(u"path to the XML file with data to import. The file must contain " - u"whole XML of each item to import."), + type=argparse.FileType(), + help=_("path to the XML file with data to import. The file must contain " + "whole XML of each item to import."), ) def psItemsSendCb(self, item_ids): - self.disp(_(u'items published with id(s) {item_ids}').format( - item_ids=u', '.join(item_ids))) + self.disp(_('items published with id(s) {item_ids}').format( + item_ids=', '.join(item_ids))) self.host.quit() def start(self): @@ -364,7 +361,7 @@ if not all([i.tag == '{http://jabber.org/protocol/pubsub}item' for i in element]): self.disp( - _(u"You are not using list of pubsub items, we can't import this file"), + _("You are not using list of pubsub items, we can't import this file"), error=True) self.host.quit(C.EXIT_DATA_ERROR) @@ -374,28 +371,28 @@ self.args.service, self.args.node, items, - u"", + "", self.profile, callback=partial(self.psItemsSendCb), errback=partial( self.errback, - msg=_(u"can't send item: {}"), + msg=_("can't send item: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) else: - self.disp(_(u"Items are imported without using admin mode, publisher can't " - u"be changed")) + self.disp(_("Items are imported without using admin mode, publisher can't " + "be changed")) self.host.bridge.psItemsSend( self.args.service, self.args.node, items, - u"", + "", self.profile, callback=partial(self.psItemsSendCb), errback=partial( self.errback, - msg=_(u"can't send item: {}"), + msg=_("can't send item: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -410,7 +407,7 @@ use_output=C.OUTPUT_DICT, use_pubsub=True, pubsub_flags={C.NODE}, - help=_(u"retrieve node affiliations (for node owner)"), + help=_("retrieve node affiliations (for node owner)"), ) self.need_loop = True @@ -423,7 +420,7 @@ def psNodeAffiliationsGetEb(self, failure_): self.disp( - u"can't get node affiliations: {reason}".format(reason=failure_), error=True + "can't get node affiliations: {reason}".format(reason=failure_), error=True ) self.host.quit(C.EXIT_BRIDGE_ERRBACK) @@ -446,7 +443,7 @@ use_pubsub=True, pubsub_flags={C.NODE}, use_verbose=True, - help=_(u"set affiliations (for node owner)"), + help=_("set affiliations (for node owner)"), ) self.need_loop = True @@ -459,19 +456,18 @@ dest="affiliations", metavar=("JID", "AFFILIATION"), required=True, - type=base.unicode_decoder, action="append", nargs=2, - help=_(u"entity/affiliation couple(s)"), + help=_("entity/affiliation couple(s)"), ) def psNodeAffiliationsSetCb(self): - self.disp(_(u"affiliations have been set"), 1) + self.disp(_("affiliations have been set"), 1) self.host.quit() def psNodeAffiliationsSetEb(self, failure_): self.disp( - u"can't set node affiliations: {reason}".format(reason=failure_), error=True + "can't set node affiliations: {reason}".format(reason=failure_), error=True ) self.host.quit(C.EXIT_BRIDGE_ERRBACK) @@ -495,7 +491,7 @@ host, "affiliations", use_profile=False, - help=_(u"set or retrieve node affiliations"), + help=_("set or retrieve node affiliations"), ) @@ -508,7 +504,7 @@ use_output=C.OUTPUT_DICT, use_pubsub=True, pubsub_flags={C.NODE}, - help=_(u"retrieve node subscriptions (for node owner)"), + help=_("retrieve node subscriptions (for node owner)"), ) self.need_loop = True @@ -521,7 +517,7 @@ def psNodeSubscriptionsGetEb(self, failure_): self.disp( - u"can't get node subscriptions: {reason}".format(reason=failure_), error=True + "can't get node subscriptions: {reason}".format(reason=failure_), error=True ) self.host.quit(C.EXIT_BRIDGE_ERRBACK) @@ -552,8 +548,8 @@ subscription = "subscribed" if subscription not in ALLOWED_SUBSCRIPTIONS_OWNER: parser.error( - _(u"subscription must be one of {}").format( - u", ".join(ALLOWED_SUBSCRIPTIONS_OWNER) + _("subscription must be one of {}").format( + ", ".join(ALLOWED_SUBSCRIPTIONS_OWNER) ) ) dest_dict[jid_s] = subscription @@ -568,7 +564,7 @@ use_pubsub=True, pubsub_flags={C.NODE}, use_verbose=True, - help=_(u"set/modify subscriptions (for node owner)"), + help=_("set/modify subscriptions (for node owner)"), ) self.need_loop = True @@ -583,18 +579,17 @@ nargs="+", metavar=("JID [SUSBSCRIPTION]"), required=True, - type=base.unicode_decoder, action=StoreSubscriptionAction, - help=_(u"entity/subscription couple(s)"), + help=_("entity/subscription couple(s)"), ) def psNodeSubscriptionsSetCb(self): - self.disp(_(u"subscriptions have been set"), 1) + self.disp(_("subscriptions have been set"), 1) self.host.quit() def psNodeSubscriptionsSetEb(self, failure_): self.disp( - u"can't set node subscriptions: {reason}".format(reason=failure_), error=True + "can't set node subscriptions: {reason}".format(reason=failure_), error=True ) self.host.quit(C.EXIT_BRIDGE_ERRBACK) @@ -617,7 +612,7 @@ host, "subscriptions", use_profile=False, - help=_(u"get or modify node subscriptions"), + help=_("get or modify node subscriptions"), ) @@ -630,15 +625,15 @@ use_pubsub=True, pubsub_flags={C.NODE}, use_verbose=True, - help=_(u"set/replace a schema"), + help=_("set/replace a schema"), ) self.need_loop = True def add_parser_options(self): - self.parser.add_argument("schema", help=_(u"schema to set (must be XML)")) + self.parser.add_argument("schema", help=_("schema to set (must be XML)")) def psSchemaSetCb(self): - self.disp(_(u"schema has been set"), 1) + self.disp(_("schema has been set"), 1) self.host.quit() def start(self): @@ -650,7 +645,7 @@ callback=self.psSchemaSetCb, errback=partial( self.errback, - msg=_(u"can't set schema: {}"), + msg=_("can't set schema: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -668,7 +663,7 @@ pubsub_flags={C.NODE}, use_draft=True, use_verbose=True, - help=_(u"edit a schema"), + help=_("edit a schema"), ) common.BaseEdit.__init__(self, self.host, PUBSUB_SCHEMA_TMP_DIR) self.need_loop = True @@ -677,7 +672,7 @@ pass def psSchemaSetCb(self): - self.disp(_(u"schema has been set"), 1) + self.disp(_("schema has been set"), 1) self.host.quit() def publish(self, schema): @@ -689,7 +684,7 @@ callback=self.psSchemaSetCb, errback=partial( self.errback, - msg=_(u"can't set schema: {}"), + msg=_("can't set schema: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -698,8 +693,8 @@ try: from lxml import etree except ImportError: - self.disp(u'lxml module must be installed to use edit, please install it ' - u'with "pip install lxml"', + self.disp('lxml module must be installed to use edit, please install it ' + 'with "pip install lxml"', error=True, ) self.host.quit(1) @@ -722,7 +717,7 @@ callback=self.psSchemaGetCb, errback=partial( self.errback, - msg=_(u"can't edit schema: {}"), + msg=_("can't edit schema: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -738,7 +733,7 @@ use_pubsub=True, pubsub_flags={C.NODE}, use_verbose=True, - help=_(u"get schema"), + help=_("get schema"), ) self.need_loop = True @@ -747,7 +742,7 @@ def psSchemaGetCb(self, schema): if not schema: - self.disp(_(u"no schema found"), 1) + self.disp(_("no schema found"), 1) self.host.quit(1) self.output(schema) self.host.quit() @@ -760,7 +755,7 @@ callback=self.psSchemaGetCb, errback=partial( self.errback, - msg=_(u"can't get schema: {}"), + msg=_("can't get schema: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -771,7 +766,7 @@ def __init__(self, host): super(NodeSchema, self).__init__( - host, "schema", use_profile=False, help=_(u"data schema manipulation") + host, "schema", use_profile=False, help=_("data schema manipulation") ) @@ -802,24 +797,23 @@ "set", use_pubsub=True, pubsub_flags={C.NODE}, - help=_(u"publish a new item or update an existing one"), + help=_("publish a new item or update an existing one"), ) self.need_loop = True def add_parser_options(self): self.parser.add_argument( "item", - type=base.unicode_decoder, nargs="?", - default=u"", - help=_(u"id, URL of the item to update, keyword, or nothing for new item"), + default="", + help=_("id, URL of the item to update, keyword, or nothing for new item"), ) def psItemsSendCb(self, published_id): if published_id: - self.disp(u"Item published at {pub_id}".format(pub_id=published_id)) + self.disp("Item published at {pub_id}".format(pub_id=published_id)) else: - self.disp(u"Item published") + self.disp("Item published") self.host.quit(C.EXIT_OK) def start(self): @@ -837,7 +831,7 @@ callback=self.psItemsSendCb, errback=partial( self.errback, - msg=_(u"can't send item: {}"), + msg=_("can't send item: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -852,7 +846,7 @@ use_output=C.OUTPUT_LIST_XML, use_pubsub=True, pubsub_flags={C.NODE, C.MULTI_ITEMS}, - help=_(u"get pubsub item(s)"), + help=_("get pubsub item(s)"), ) self.need_loop = True @@ -860,9 +854,8 @@ self.parser.add_argument( "-S", "--sub-id", - type=base.unicode_decoder, - default=u"", - help=_(u"subscription id"), + default="", + help=_("subscription id"), ) # TODO: a key(s) argument to select keys to display # TODO: add MAM filters @@ -872,7 +865,7 @@ self.host.quit(C.EXIT_OK) def psItemsGetEb(self, failure_): - self.disp(u"can't get pubsub items: {reason}".format(reason=failure_), error=True) + self.disp("can't get pubsub items: {reason}".format(reason=failure_), error=True) self.host.quit(C.EXIT_BRIDGE_ERRBACK) def start(self): @@ -897,30 +890,30 @@ "delete", use_pubsub=True, pubsub_flags={C.NODE, C.ITEM, C.SINGLE_ITEM}, - help=_(u"delete an item"), + help=_("delete an item"), ) self.need_loop = True def add_parser_options(self): self.parser.add_argument( - "-f", "--force", action="store_true", help=_(u"delete without confirmation") + "-f", "--force", action="store_true", help=_("delete without confirmation") ) self.parser.add_argument( - "-N", "--notify", action="store_true", help=_(u"notify deletion") + "-N", "--notify", action="store_true", help=_("notify deletion") ) def psItemsDeleteCb(self): - self.disp(_(u"item {item_id} has been deleted").format(item_id=self.args.item)) + self.disp(_("item {item_id} has been deleted").format(item_id=self.args.item)) self.host.quit(C.EXIT_OK) def start(self): if not self.args.item: - self.parser.error(_(u"You need to specify an item to delete")) + self.parser.error(_("You need to specify an item to delete")) if not self.args.force: - message = _(u"Are you sure to delete item {item_id} ?").format( + message = _("Are you sure to delete item {item_id} ?").format( item_id=self.args.item ) - self.host.confirmOrQuit(message, _(u"item deletion cancelled")) + self.host.confirmOrQuit(message, _("item deletion cancelled")) self.host.bridge.psRetractItem( self.args.service, self.args.node, @@ -930,7 +923,7 @@ callback=self.psItemsDeleteCb, errback=partial( self.errback, - msg=_(u"can't delete item: {}"), + msg=_("can't delete item: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -946,7 +939,7 @@ use_pubsub=True, pubsub_flags={C.NODE, C.SINGLE_ITEM}, use_draft=True, - help=_(u"edit an existing or new pubsub item"), + help=_("edit an existing or new pubsub item"), ) common.BaseEdit.__init__(self, self.host, PUBSUB_TMP_DIR) @@ -967,16 +960,16 @@ self.profile, ) if published_id: - self.disp(u"Item published at {pub_id}".format(pub_id=published_id)) + self.disp("Item published at {pub_id}".format(pub_id=published_id)) else: - self.disp(u"Item published") + self.disp("Item published") def getItemData(self, service, node, item): try: from lxml import etree except ImportError: - self.disp(u'lxml module must be installed to use edit, please install it ' - u'with "pip install lxml"', + self.disp('lxml module must be installed to use edit, please install it ' + 'with "pip install lxml"', error=True, ) self.host.quit(1) @@ -990,8 +983,8 @@ try: payload = item_elt[0] except IndexError: - self.disp(_(u"Item has not payload"), 1) - return u"" + self.disp(_("Item has not payload"), 1) + return "" return etree.tostring(payload, encoding="unicode", pretty_print=True), item_id def start(self): @@ -1010,7 +1003,7 @@ use_pubsub=True, pubsub_flags={C.NODE}, use_verbose=True, - help=_(u"subscribe to a node"), + help=_("subscribe to a node"), ) self.need_loop = True @@ -1018,9 +1011,9 @@ pass def psSubscribeCb(self, sub_id): - self.disp(_(u"subscription done"), 1) + self.disp(_("subscription done"), 1) if sub_id: - self.disp(_(u"subscription id: {sub_id}").format(sub_id=sub_id)) + self.disp(_("subscription id: {sub_id}").format(sub_id=sub_id)) self.host.quit() def start(self): @@ -1032,7 +1025,7 @@ callback=self.psSubscribeCb, errback=partial( self.errback, - msg=_(u"can't subscribe to node: {}"), + msg=_("can't subscribe to node: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -1049,7 +1042,7 @@ use_pubsub=True, pubsub_flags={C.NODE}, use_verbose=True, - help=_(u"unsubscribe from a node"), + help=_("unsubscribe from a node"), ) self.need_loop = True @@ -1057,7 +1050,7 @@ pass def psUnsubscribeCb(self): - self.disp(_(u"subscription removed"), 1) + self.disp(_("subscription removed"), 1) self.host.quit() def start(self): @@ -1068,7 +1061,7 @@ callback=self.psUnsubscribeCb, errback=partial( self.errback, - msg=_(u"can't unsubscribe from node: {}"), + msg=_("can't unsubscribe from node: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -1082,7 +1075,7 @@ "subscriptions", use_output=C.OUTPUT_LIST_DICT, use_pubsub=True, - help=_(u"retrieve all subscriptions on a service"), + help=_("retrieve all subscriptions on a service"), ) self.need_loop = True @@ -1101,7 +1094,7 @@ callback=self.psSubscriptionsGetCb, errback=partial( self.errback, - msg=_(u"can't retrieve subscriptions: {}"), + msg=_("can't retrieve subscriptions: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -1115,7 +1108,7 @@ "affiliations", use_output=C.OUTPUT_DICT, use_pubsub=True, - help=_(u"retrieve all affiliations on a service"), + help=_("retrieve all affiliations on a service"), ) self.need_loop = True @@ -1128,7 +1121,7 @@ def psAffiliationsGetEb(self, failure_): self.disp( - u"can't get node affiliations: {reason}".format(reason=failure_), error=True + "can't get node affiliations: {reason}".format(reason=failure_), error=True ) self.host.quit(C.EXIT_BRIDGE_ERRBACK) @@ -1150,7 +1143,7 @@ """ RE_FLAGS = re.MULTILINE | re.UNICODE - EXEC_ACTIONS = (u"exec", u"external") + EXEC_ACTIONS = ("exec", "external") def __init__(self, host): # FIXME: C.NO_MAX is not needed here, and this can be globally removed from consts @@ -1164,7 +1157,7 @@ use_pubsub=True, pubsub_flags={C.MULTI_ITEMS, C.NO_MAX}, use_verbose=True, - help=_(u"search items corresponding to filters"), + help=_("search items corresponding to filters"), ) self.need_loop = True @@ -1178,7 +1171,6 @@ return self._etree def filter_opt(self, value, type_): - value = base.unicode_decoder(value) return (type_, value) def filter_flag(self, value, type_): @@ -1191,16 +1183,16 @@ "--max-depth", type=int, default=0, - help=_(u"maximum depth of recursion (will search linked nodes if > 0, " - u"DEFAULT: 0)"), + help=_("maximum depth of recursion (will search linked nodes if > 0, " + "DEFAULT: 0)"), ) self.parser.add_argument( "-M", "--node-max", type=int, default=30, - help=_(u"maximum number of items to get per node ({} to get all items, " - u"DEFAULT: 30)".format( C.NO_LIMIT)), + help=_("maximum number of items to get per node ({} to get all items, " + "DEFAULT: 30)".format( C.NO_LIMIT)), ) self.parser.add_argument( "-N", @@ -1209,17 +1201,17 @@ nargs=2, default=[], metavar="NAME NAMESPACE", - help=_(u"namespace to use for xpath"), + help=_("namespace to use for xpath"), ) # filters - filter_text = partial(self.filter_opt, type_=u"text") - filter_re = partial(self.filter_opt, type_=u"regex") - filter_xpath = partial(self.filter_opt, type_=u"xpath") - filter_python = partial(self.filter_opt, type_=u"python") + filter_text = partial(self.filter_opt, type_="text") + filter_re = partial(self.filter_opt, type_="regex") + filter_xpath = partial(self.filter_opt, type_="xpath") + filter_python = partial(self.filter_opt, type_="python") filters = self.parser.add_argument_group( - _(u"filters"), - _(u"only items corresponding to following filters will be kept"), + _("filters"), + _("only items corresponding to following filters will be kept"), ) filters.add_argument( "-t", @@ -1228,7 +1220,7 @@ dest="filters", type=filter_text, metavar="TEXT", - help=_(u"full text filter, item must contain this string (XML included)"), + help=_("full text filter, item must contain this string (XML included)"), ) filters.add_argument( "-r", @@ -1237,7 +1229,7 @@ dest="filters", type=filter_re, metavar="EXPRESSION", - help=_(u"like --text but using a regular expression"), + help=_("like --text but using a regular expression"), ) filters.add_argument( "-x", @@ -1246,7 +1238,7 @@ dest="filters", type=filter_xpath, metavar="XPATH", - help=_(u"filter items which has elements matching this xpath"), + help=_("filter items which has elements matching this xpath"), ) filters.add_argument( "-P", @@ -1255,20 +1247,20 @@ dest="filters", type=filter_python, metavar="PYTHON_CODE", - help=_(u'Python expression which much return a bool (True to keep item, ' - u'False to reject it). "item" is raw text item, "item_xml" is ' - u'lxml\'s etree.Element' + help=_('Python expression which much return a bool (True to keep item, ' + 'False to reject it). "item" is raw text item, "item_xml" is ' + 'lxml\'s etree.Element' ), ) # filters flags - flag_case = partial(self.filter_flag, type_=u"ignore-case") - flag_invert = partial(self.filter_flag, type_=u"invert") - flag_dotall = partial(self.filter_flag, type_=u"dotall") - flag_matching = partial(self.filter_flag, type_=u"only-matching") + flag_case = partial(self.filter_flag, type_="ignore-case") + flag_invert = partial(self.filter_flag, type_="invert") + flag_dotall = partial(self.filter_flag, type_="dotall") + flag_matching = partial(self.filter_flag, type_="only-matching") flags = self.parser.add_argument_group( - _(u"filters flags"), - _(u"filters modifiers (change behaviour of following filters)"), + _("filters flags"), + _("filters modifiers (change behaviour of following filters)"), ) flags.add_argument( "-C", @@ -1279,7 +1271,7 @@ const=("ignore-case", True), nargs="?", metavar="BOOLEAN", - help=_(u"(don't) ignore case in following filters (DEFAULT: case sensitive)"), + help=_("(don't) ignore case in following filters (DEFAULT: case sensitive)"), ) flags.add_argument( "-I", @@ -1290,7 +1282,7 @@ const=("invert", True), nargs="?", metavar="BOOLEAN", - help=_(u"(don't) invert effect of following filters (DEFAULT: don't invert)"), + help=_("(don't) invert effect of following filters (DEFAULT: don't invert)"), ) flags.add_argument( "-A", @@ -1301,7 +1293,7 @@ const=("dotall", True), nargs="?", metavar="BOOLEAN", - help=_(u"(don't) use DOTALL option for regex (DEFAULT: don't use)"), + help=_("(don't) use DOTALL option for regex (DEFAULT: don't use)"), ) flags.add_argument( "-k", @@ -1312,7 +1304,7 @@ const=("only-matching", True), nargs="?", metavar="BOOLEAN", - help=_(u"keep only the matching part of the item"), + help=_("keep only the matching part of the item"), ) # action @@ -1321,13 +1313,13 @@ default="print", nargs="?", choices=("print", "exec", "external"), - help=_(u"action to do on found items (DEFAULT: print)"), + help=_("action to do on found items (DEFAULT: print)"), ) self.parser.add_argument("command", nargs=argparse.REMAINDER) def psItemsGetEb(self, failure_, service, node): self.disp( - u"can't get pubsub items at {service} (node: {node}): {reason}".format( + "can't get pubsub items at {service} (node: {node}): {reason}".format( service=service, node=node, reason=failure_ ), error=True, @@ -1357,15 +1349,15 @@ this list will be filled while xmpp: URIs are discovered """ url = match.group(0) - if url.startswith(u"xmpp"): + if url.startswith("xmpp"): try: url_data = uri.parseXMPPUri(url) except ValueError: return - if url_data[u"type"] == u"pubsub": - found_node = {u"service": url_data[u"path"], u"node": url_data[u"node"]} - if u"item" in url_data: - found_node[u"item"] = url_data[u"item"] + if url_data["type"] == "pubsub": + found_node = {"service": url_data["path"], "node": url_data["node"]} + if "item" in url_data: + found_node["item"] = url_data["item"] found_nodes.append(found_node) def getSubNodes(self, item, depth): @@ -1376,9 +1368,9 @@ for data in found_nodes: self.getItems( depth + 1, - data[u"service"], - data[u"node"], - [data[u"item"]] if u"item" in data else [], + data["service"], + data["node"], + [data["item"]] if "item" in data else [], ) def parseXml(self, item): @@ -1386,8 +1378,8 @@ return self.etree.fromstring(item) except self.etree.XMLSyntaxError: self.disp( - _(u"item doesn't looks like XML, you have probably used --only-matching " - u"somewhere before and we have no more XML"), + _("item doesn't looks like XML, you have probably used --only-matching " + "somewhere before and we have no more XML"), error=True, ) self.host.quit(C.EXIT_BAD_ARG) @@ -1410,7 +1402,7 @@ ## filters - if type_ == u"text": + if type_ == "text": if ignore_case: if value.lower() not in item.lower(): keep = False @@ -1422,12 +1414,12 @@ # so we raise an error self.host.disp( _( - u"--only-matching used with fixed --text string, are you sure?" + "--only-matching used with fixed --text string, are you sure?" ), error=True, ) self.host.quit(C.EXIT_BAD_ARG) - elif type_ == u"regex": + elif type_ == "regex": flags = self.RE_FLAGS if ignore_case: flags |= re.IGNORECASE @@ -1438,14 +1430,14 @@ if keep and only_matching: item = match.group() item_xml = None - elif type_ == u"xpath": + elif type_ == "xpath": if item_xml is None: item_xml = self.parseXml(item) try: elts = item_xml.xpath(value, namespaces=self.args.namespace) except self.etree.XPathEvalError as e: self.disp( - _(u"can't use xpath: {reason}").format(reason=e), error=True + _("can't use xpath: {reason}").format(reason=e), error=True ) self.host.quit(C.EXIT_BAD_ARG) keep = bool(elts) @@ -1455,33 +1447,33 @@ item = self.etree.tostring(item_xml, encoding="unicode") except TypeError: # we have a string only, not an element - item = unicode(item_xml) + item = str(item_xml) item_xml = None - elif type_ == u"python": + elif type_ == "python": if item_xml is None: item_xml = self.parseXml(item) - cmd_ns = {u"item": item, u"item_xml": item_xml} + cmd_ns = {"item": item, "item_xml": item_xml} try: keep = eval(value, cmd_ns) except SyntaxError as e: - self.disp(unicode(e), error=True) + self.disp(str(e), error=True) self.host.quit(C.EXIT_BAD_ARG) ## flags - elif type_ == u"ignore-case": + elif type_ == "ignore-case": ignore_case = value - elif type_ == u"invert": + elif type_ == "invert": invert = value # we need to continue, else loop would end here continue - elif type_ == u"dotall": + elif type_ == "dotall": dotall = value - elif type_ == u"only-matching": + elif type_ == "only-matching": only_matching = value else: raise exceptions.InternalError( - _(u"unknown filter type {type}").format(type=type_) + _("unknown filter type {type}").format(type=type_) ) if invert: @@ -1497,7 +1489,7 @@ @param item(unicode): accepted item """ action = self.args.action - if action == u"print" or self.host.verbosity > 0: + if action == "print" or self.host.verbosity > 0: try: self.output(item) except self.etree.XMLSyntaxError: @@ -1506,10 +1498,10 @@ self.disp(item) if action in self.EXEC_ACTIONS: item_elt = self.parseXml(item) - if action == u"exec": + if action == "exec": use = { - "service": metadata[u"service"], - "node": metadata[u"node"], + "service": metadata["service"], + "node": metadata["node"], "item": item_elt.get("id"), "profile": self.profile, } @@ -1523,12 +1515,12 @@ cmd_args = self.args.command self.disp( - u"COMMAND: {command}".format( - command=u" ".join([arg_tools.escape(a) for a in cmd_args]) + "COMMAND: {command}".format( + command=" ".join([arg_tools.escape(a) for a in cmd_args]) ), 2, ) - if action == u"exec": + if action == "exec": ret = subprocess.call(cmd_args) else: p = subprocess.Popen(cmd_args, stdin=subprocess.PIPE) @@ -1538,7 +1530,7 @@ self.disp( A.color( C.A_FAILURE, - _(u"executed command failed with exit code {code}").format( + _("executed command failed with exit code {code}").format( code=ret ), ) @@ -1573,16 +1565,16 @@ if self.args.command: if self.args.action not in self.EXEC_ACTIONS: self.parser.error( - _(u"Command can only be used with {actions} actions").format( - actions=u", ".join(self.EXEC_ACTIONS) + _("Command can only be used with {actions} actions").format( + actions=", ".join(self.EXEC_ACTIONS) ) ) else: if self.args.action in self.EXEC_ACTIONS: - self.parser.error(_(u"you need to specify a command to execute")) + self.parser.error(_("you need to specify a command to execute")) if not self.args.node: # TODO: handle get service affiliations when node is not set - self.parser.error(_(u"empty node is not handled yet")) + self.parser.error(_("empty node is not handled yet")) # to_get is increased on each get and decreased on each answer # when it reach 0 again, the command is finished self.to_get = 0 @@ -1603,7 +1595,7 @@ "transform", use_pubsub=True, pubsub_flags={C.NODE, C.MULTI_ITEMS}, - help=_(u"modify items of a node using an external command/script"), + help=_("modify items of a node using an external command/script"), ) self.need_loop = True @@ -1611,39 +1603,39 @@ self.parser.add_argument( "--apply", action="store_true", - help=_(u"apply transformation (DEFAULT: do a dry run)"), + help=_("apply transformation (DEFAULT: do a dry run)"), ) self.parser.add_argument( "--admin", action="store_true", - help=_(u"do a pubsub admin request, needed to change publisher"), + help=_("do a pubsub admin request, needed to change publisher"), ) self.parser.add_argument( "-I", "--ignore_errors", action="store_true", help=_( - u"if command return a non zero exit code, ignore the item and continue"), + "if command return a non zero exit code, ignore the item and continue"), ) self.parser.add_argument( "-A", "--all", action="store_true", - help=_(u"get all items by looping over all pages using RSM") + help=_("get all items by looping over all pages using RSM") ) self.parser.add_argument( "command_path", - help=_(u"path to the command to use. Will be called repetitivly with an " - u"item as input. Output (full item XML) will be used as new one. " - u'Return "DELETE" string to delete the item, and "SKIP" to ignore it'), + help=_("path to the command to use. Will be called repetitivly with an " + "item as input. Output (full item XML) will be used as new one. " + 'Return "DELETE" string to delete the item, and "SKIP" to ignore it'), ) def psItemsSendCb(self, item_ids, metadata): if item_ids: - self.disp(_(u'items published with ids {item_ids}').format( - item_ids=u', '.join(item_ids))) + self.disp(_('items published with ids {item_ids}').format( + item_ids=', '.join(item_ids))) else: - self.disp(_(u'items published')) + self.disp(_('items published')) if self.args.all: return self.handleNextPage(metadata) else: @@ -1656,30 +1648,30 @@ @param metadata(dict): metadata as returned by psItemsGet """ try: - last = metadata[u'rsm_last'] - index = int(metadata[u'rsm_index']) - count = int(metadata[u'rsm_count']) + last = metadata['rsm_last'] + index = int(metadata['rsm_index']) + count = int(metadata['rsm_count']) except KeyError: - self.disp(_(u"Can't retrieve all items, RSM metadata not available"), + self.disp(_("Can't retrieve all items, RSM metadata not available"), error=True) self.host.quit(C.EXIT_MISSING_FEATURE) except ValueError as e: - self.disp(_(u"Can't retrieve all items, bad RSM metadata: {msg}") + self.disp(_("Can't retrieve all items, bad RSM metadata: {msg}") .format(msg=e), error=True) self.host.quit(C.EXIT_ERROR) if index + self.args.rsm_max >= count: - self.disp(_(u'All items transformed')) + self.disp(_('All items transformed')) self.host.quit(0) - self.disp(_(u'Retrieving next page ({page_idx}/{page_total})').format( + self.disp(_('Retrieving next page ({page_idx}/{page_total})').format( page_idx = int(index/self.args.rsm_max) + 1, page_total = int(count/self.args.rsm_max), ) ) extra = self.getPubsubExtra() - extra[u'rsm_after'] = last + extra['rsm_after'] = last self.host.bridge.psItemsGet( self.args.service, self.args.node, @@ -1691,7 +1683,7 @@ callback=self.psItemsGetCb, errback=partial( self.errback, - msg=_(u"can't retrieve items: {}"), + msg=_("can't retrieve items: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -1708,8 +1700,8 @@ item_id = item_elt.get('id') if item_id in self.items_ids: self.disp(_( - u"Duplicate found on item {item_id}, we have probably handled " - u"all items.").format(item_id=item_id)) + "Duplicate found on item {item_id}, we have probably handled " + "all items.").format(item_id=item_id)) self.host.quit() self.items_ids.append(item_id) @@ -1720,13 +1712,13 @@ except OSError as e: exit_code = C.EXIT_CMD_NOT_FOUND if e.errno == 2 else C.EXIT_ERROR e = str(e).decode('utf-8', errors="ignore") - self.disp(u"Can't execute the command: {msg}".format(msg=e), error=True) + self.disp("Can't execute the command: {msg}".format(msg=e), error=True) self.host.quit(exit_code) cmd_std_out, cmd_std_err = p.communicate(item.encode("utf-8")) ret = p.wait() if ret != 0: - self.disp(u"The command returned a non zero status while parsing the " - u"following item:\n\n{item}".format(item=item), error=True) + self.disp("The command returned a non zero status while parsing the " + "following item:\n\n{item}".format(item=item), error=True) if self.args.ignore_errors: continue else: @@ -1738,7 +1730,7 @@ if cmd_std_out == "DELETE": item_elt, __ = xml_tools.etreeParse(self, item) item_id = item_elt.get('id') - self.disp(_(u"Deleting item {item_id}").format(item_id=item_id)) + self.disp(_("Deleting item {item_id}").format(item_id=item_id)) if self.args.apply: # FIXME: we don't wait for item to be retracted which can cause # trouble in case of error just before the end of the command @@ -1753,7 +1745,7 @@ self.profile, errback=partial( self.errback, - msg=_(u"can't delete item [%s]: {}" % item_id), + msg=_("can't delete item [%s]: {}" % item_id), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -1761,19 +1753,19 @@ elif cmd_std_out == "SKIP": item_elt, __ = xml_tools.etreeParse(self, item) item_id = item_elt.get('id') - self.disp(_(u"Skipping item {item_id}").format(item_id=item_id)) + self.disp(_("Skipping item {item_id}").format(item_id=item_id)) continue element, etree = xml_tools.etreeParse(self, cmd_std_out) # at this point command has been run and we have a etree.Element object if element.tag not in ("item", "{http://jabber.org/protocol/pubsub}item"): - self.disp(u"your script must return a whole item, this is not:\n{xml}" + self.disp("your script must return a whole item, this is not:\n{xml}" .format(xml=etree.tostring(element, encoding="unicode")), error=True) self.host.quit(C.EXIT_DATA_ERROR) if not self.args.apply: # we have a dry run, we just display filtered items - serialised = etree.tostring(element, encoding=u'unicode', + serialised = etree.tostring(element, encoding='unicode', pretty_print=True) self.disp(serialised) else: @@ -1790,12 +1782,12 @@ self.args.service, self.args.node, new_items, - u"", + "", self.profile, callback=partial(self.psItemsSendCb, metadata=metadata), errback=partial( self.errback, - msg=_(u"can't send item: {}"), + msg=_("can't send item: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -1804,12 +1796,12 @@ self.args.service, self.args.node, new_items, - u"", + "", self.profile, callback=partial(self.psItemsSendCb, metadata=metadata), errback=partial( self.errback, - msg=_(u"can't send item: {}"), + msg=_("can't send item: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -1820,11 +1812,11 @@ self.items_ids = [] self.disp(A.color( A.FG_RED, A.BOLD, - u'/!\\ "--all" should be used with "--order-by creation" /!\\\n', + '/!\\ "--all" should be used with "--order-by creation" /!\\\n', A.RESET, - u"We'll update items, so order may change during transformation,\n" - u"we'll try to mitigate that by stopping on first duplicate,\n" - u"but this method is not safe, and some items may be missed.\n---\n")) + "We'll update items, so order may change during transformation,\n" + "we'll try to mitigate that by stopping on first duplicate,\n" + "but this method is not safe, and some items may be missed.\n---\n")) else: self.check_duplicates = False self.host.bridge.psItemsGet( @@ -1838,7 +1830,7 @@ callback=self.psItemsGetCb, errback=partial( self.errback, - msg=_(u"can't retrieve items: {}"), + msg=_("can't retrieve items: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -1853,7 +1845,7 @@ use_profile=False, use_pubsub=True, pubsub_flags={C.NODE, C.SINGLE_ITEM}, - help=_(u"build URI"), + help=_("build URI"), ) self.need_loop = True @@ -1861,9 +1853,8 @@ self.parser.add_argument( "-p", "--profile", - type=base.unicode_decoder, default=C.PROF_KEY_DEFAULT, - help=_(u"profile (used when no server is specified)"), + help=_("profile (used when no server is specified)"), ) def display_uri(self, jid_): @@ -1877,19 +1868,19 @@ key = "path" if value: uri_args[key] = value - self.disp(uri.buildXMPPUri(u"pubsub", **uri_args)) + self.disp(uri.buildXMPPUri("pubsub", **uri_args)) self.host.quit() def start(self): if not self.args.service: self.host.bridge.asyncGetParamA( - u"JabberID", - u"Connection", + "JabberID", + "Connection", profile_key=self.args.profile, callback=self.display_uri, errback=partial( self.errback, - msg=_(u"can't retrieve jid: {}"), + msg=_("can't retrieve jid: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -1905,7 +1896,7 @@ "create", use_pubsub=True, pubsub_flags={C.NODE}, - help=_(u"create a Pubsub hook"), + help=_("create a Pubsub hook"), ) self.need_loop = True @@ -1913,29 +1904,28 @@ self.parser.add_argument( "-t", "--type", - default=u"python", + default="python", choices=("python", "python_file", "python_code"), - help=_(u"hook type"), + help=_("hook type"), ) self.parser.add_argument( "-P", "--persistent", action="store_true", - help=_(u"make hook persistent across restarts"), + help=_("make hook persistent across restarts"), ) self.parser.add_argument( "hook_arg", - type=base.unicode_decoder, - help=_(u"argument of the hook (depend of the type)"), + help=_("argument of the hook (depend of the type)"), ) @staticmethod def checkArgs(self): - if self.args.type == u"python_file": + if self.args.type == "python_file": self.args.hook_arg = os.path.abspath(self.args.hook_arg) if not os.path.isfile(self.args.hook_arg): self.parser.error( - _(u"{path} is not a file").format(path=self.args.hook_arg) + _("{path} is not a file").format(path=self.args.hook_arg) ) def start(self): @@ -1950,7 +1940,7 @@ callback=self.host.quit, errback=partial( self.errback, - msg=_(u"can't create hook: {}"), + msg=_("can't create hook: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -1964,7 +1954,7 @@ "delete", use_pubsub=True, pubsub_flags={C.NODE}, - help=_(u"delete a Pubsub hook"), + help=_("delete a Pubsub hook"), ) self.need_loop = True @@ -1972,24 +1962,23 @@ self.parser.add_argument( "-t", "--type", - default=u"", + default="", choices=("", "python", "python_file", "python_code"), - help=_(u"hook type to remove, empty to remove all (DEFAULT: remove all)"), + help=_("hook type to remove, empty to remove all (DEFAULT: remove all)"), ) self.parser.add_argument( "-a", "--arg", dest="hook_arg", - type=base.unicode_decoder, - default=u"", + default="", help=_( - u"argument of the hook to remove, empty to remove all (DEFAULT: remove all)" + "argument of the hook to remove, empty to remove all (DEFAULT: remove all)" ), ) def psHookRemoveCb(self, nb_deleted): self.disp( - _(u"{nb_deleted} hook(s) have been deleted").format(nb_deleted=nb_deleted) + _("{nb_deleted} hook(s) have been deleted").format(nb_deleted=nb_deleted) ) self.host.quit() @@ -2004,7 +1993,7 @@ callback=self.psHookRemoveCb, errback=partial( self.errback, - msg=_(u"can't delete hook: {}"), + msg=_("can't delete hook: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -2017,7 +2006,7 @@ host, "list", use_output=C.OUTPUT_LIST_DICT, - help=_(u"list hooks of a profile"), + help=_("list hooks of a profile"), ) self.need_loop = True @@ -2026,7 +2015,7 @@ def psHookListCb(self, data): if not data: - self.disp(_(u"No hook found.")) + self.disp(_("No hook found.")) self.output(data) self.host.quit() @@ -2036,7 +2025,7 @@ callback=self.psHookListCb, errback=partial( self.errback, - msg=_(u"can't list hooks: {}"), + msg=_("can't list hooks: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), )
--- a/sat_frontends/jp/cmd_roster.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_roster.py Tue Aug 13 19:08:41 2019 +0200 @@ -18,7 +18,7 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. -import base +from . import base from collections import OrderedDict from functools import partial from sat.core.i18n import _ @@ -43,7 +43,7 @@ self.host.bridge.getContacts(profile_key=self.host.profile, callback=self.gotContacts, errback=self.error) def error(self, failure): - print (_("Error while retrieving the contacts [%s]") % failure) + print((_("Error while retrieving the contacts [%s]") % failure)) self.host.quit(1) def ask_confirmation(self, no_sub, no_from, no_to): @@ -55,17 +55,17 @@ @return bool """ if no_sub: - print "There's no subscription between profile [%s] and the following contacts:" % self.host.profile - print " " + "\n ".join(no_sub) + print("There's no subscription between profile [%s] and the following contacts:" % self.host.profile) + print(" " + "\n ".join(no_sub)) if no_from: - print "There's no 'from' subscription between profile [%s] and the following contacts:" % self.host.profile - print " " + "\n ".join(no_from) + print("There's no 'from' subscription between profile [%s] and the following contacts:" % self.host.profile) + print(" " + "\n ".join(no_from)) if no_to: - print "There's no 'to' subscription between profile [%s] and the following contacts:" % self.host.profile - print " " + "\n ".join(no_to) + print("There's no 'to' subscription between profile [%s] and the following contacts:" % self.host.profile) + print(" " + "\n ".join(no_to)) message = "REMOVE them from profile [%s]'s roster" % self.host.profile while True: - res = raw_input("%s (y/N)? " % message) + res = input("%s (y/N)? " % message) if not res or res.lower() == 'n': return False if res.lower() == 'y': @@ -87,7 +87,7 @@ elif not to and self.args.no_to: no_to.append(contact) if not no_sub and not no_from and not no_to: - print "Nothing to do - there's a from and/or to subscription(s) between profile [%s] and each of its contacts" % self.host.profile + print("Nothing to do - there's a from and/or to subscription(s) between profile [%s] and each of its contacts" % self.host.profile) elif self.ask_confirmation(no_sub, no_from, no_to): for contact in no_sub + no_from + no_to: self.host.bridge.delContact(contact, profile_key=self.host.profile, callback=lambda __: None, errback=lambda failure: None) @@ -107,7 +107,7 @@ self.host.bridge.getContacts(profile_key=self.host.profile, callback=self.gotContacts, errback=self.error) def error(self, failure): - print (_("Error while retrieving the contacts [%s]") % failure) + print((_("Error while retrieving the contacts [%s]") % failure)) self.host.quit(1) def gotContacts(self, contacts): @@ -135,31 +135,31 @@ total_group_subscription += len(groups) if not groups: no_group += 1 - hosts = OrderedDict(sorted(hosts.items(), key=lambda item:-item[1])) + hosts = OrderedDict(sorted(list(hosts.items()), key=lambda item:-item[1])) - print - print "Total number of contacts: %d" % len(contacts) - print "Number of different hosts: %d" % len(hosts) - print - for host, count in hosts.iteritems(): - print "Contacts on {host}: {count} ({rate:.1f}%)".format(host=host, count=count, rate=100 * float(count) / len(contacts)) - print - print "Contacts with no 'from' subscription: %d" % no_from - print "Contacts with no 'to' subscription: %d" % no_to - print "Contacts with no subscription at all: %d" % no_sub - print - print "Total number of groups: %d" % len(unique_groups) + print() + print("Total number of contacts: %d" % len(contacts)) + print("Number of different hosts: %d" % len(hosts)) + print() + for host, count in hosts.items(): + print("Contacts on {host}: {count} ({rate:.1f}%)".format(host=host, count=count, rate=100 * float(count) / len(contacts))) + print() + print("Contacts with no 'from' subscription: %d" % no_from) + print("Contacts with no 'to' subscription: %d" % no_to) + print("Contacts with no subscription at all: %d" % no_sub) + print() + print("Total number of groups: %d" % len(unique_groups)) try: contacts_per_group = float(total_group_subscription) / len(unique_groups) except ZeroDivisionError: contacts_per_group = 0 - print "Average contacts per group: {:.1f}".format(contacts_per_group) + print("Average contacts per group: {:.1f}".format(contacts_per_group)) try: groups_per_contact = float(total_group_subscription) / len(contacts) except ZeroDivisionError: groups_per_contact = 0 - print "Average groups' subscriptions per contact: {:.1f}".format(groups_per_contact) - print "Contacts not assigned to any group: %d" % no_group + print("Average groups' subscriptions per contact: {:.1f}".format(groups_per_contact)) + print("Contacts not assigned to any group: %d" % no_group) self.host.quit() @@ -178,7 +178,7 @@ self.host.bridge.getContacts(profile_key=self.host.profile, callback=self.gotContacts, errback=self.error) def error(self, failure): - print (_("Error while retrieving the contacts [%s]") % failure) + print((_("Error while retrieving the contacts [%s]") % failure)) self.host.quit(1) def gotContacts(self, contacts): @@ -200,10 +200,10 @@ args.append("from" if C.bool(attrs["from"]) else "") args.append("to" if C.bool(attrs["to"]) else "") if self.args.name: - args.append(unicode(attrs.get("name", ""))) + args.append(str(attrs.get("name", ""))) if self.args.groups: - args.append(u"\t".join(groups) if groups else "") - print u";".join(["{}"] * field_count).format(*args).encode("utf-8") + args.append("\t".join(groups) if groups else "") + print(";".join(["{}"] * field_count).format(*args).encode("utf-8")) self.host.quit() @@ -211,14 +211,14 @@ def __init__(self, host): super(Resync, self).__init__( - host, 'resync', help=_(u'do a full resynchronisation of roster with server')) + host, 'resync', help=_('do a full resynchronisation of roster with server')) self.need_loop = True def add_parser_options(self): pass def rosterResyncCb(self): - self.disp(_(u"Roster resynchronized")) + self.disp(_("Roster resynchronized")) self.host.quit(C.EXIT_OK) def start(self): @@ -226,7 +226,7 @@ callback=self.rosterResyncCb, errback=partial( self.errback, - msg=_(u"can't resynchronise roster: {}"), + msg=_("can't resynchronise roster: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ))
--- a/sat_frontends/jp/cmd_shell.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_shell.py Tue Aug 13 19:08:41 2019 +0200 @@ -18,7 +18,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. -import base +from . import base import cmd import sys from sat.core.i18n import _ @@ -31,7 +31,7 @@ __commands__ = ["Shell"] INTRO = _( - u"""Welcome to {app_name} shell, the Salut à Toi shell ! + """Welcome to {app_name} shell, the Salut à Toi shell ! This enrironment helps you using several {app_name} commands with similar parameters. @@ -44,7 +44,7 @@ class Shell(base.CommandBase, cmd.Cmd): def __init__(self, host): base.CommandBase.__init__( - self, host, "shell", help=_(u"launch jp in shell (REPL) mode") + self, host, "shell", help=_("launch jp in shell (REPL) mode") ) cmd.Cmd.__init__(self) @@ -54,22 +54,22 @@ def update_path(self): self._cur_parser = self.host.parser - self.help = u"" + self.help = "" for idx, path_elt in enumerate(self.path): try: self._cur_parser = arg_tools.get_cmd_choices(path_elt, self._cur_parser) except exceptions.NotFound: - self.disp(_(u"bad command path"), error=True) + self.disp(_("bad command path"), error=True) self.path = self.path[:idx] break else: self.help = self._cur_parser - self.prompt = A.color(C.A_PROMPT_PATH, u"/".join(self.path)) + A.color( - C.A_PROMPT_SUF, u"> " + self.prompt = A.color(C.A_PROMPT_PATH, "/".join(self.path)) + A.color( + C.A_PROMPT_SUF, "> " ) try: - self.actions = arg_tools.get_cmd_choices(parser=self._cur_parser).keys() + self.actions = list(arg_tools.get_cmd_choices(parser=self._cur_parser).keys()) except exceptions.NotFound: self.actions = [] @@ -100,9 +100,9 @@ # Situation should be better when we'll move away from python-dbus if self.verbose: self.disp( - _(u"COMMAND {external}=> {args}").format( - external=_(u"(external) ") if external else u"", - args=u" ".join(self.format_args(args)), + _("COMMAND {external}=> {args}").format( + external=_("(external) ") if external else "", + args=" ".join(self.format_args(args)), ) ) if not external: @@ -124,7 +124,7 @@ self.disp( A.color( C.A_FAILURE, - u"command failed with an error code of {err_no}".format( + "command failed with an error code of {err_no}".format( err_no=ret_code ), ), @@ -145,12 +145,12 @@ def do_help(self, args): """show help message""" if not args: - self.disp(A.color(C.A_HEADER, _(u"Shell commands:")), no_lf=True) + self.disp(A.color(C.A_HEADER, _("Shell commands:")), no_lf=True) super(Shell, self).do_help(args) if not args: - self.disp(A.color(C.A_HEADER, _(u"Action commands:"))) + self.disp(A.color(C.A_HEADER, _("Action commands:"))) help_list = self._cur_parser.format_help().split("\n\n") - print("\n\n".join(help_list[1 if self.path else 2 :])) + print(("\n\n".join(help_list[1 if self.path else 2 :]))) def do_debug(self, args): """launch internal debugger""" @@ -166,8 +166,8 @@ if args: self.verbose = C.bool(args[0]) self.disp( - _(u"verbose mode is {status}").format( - status=_(u"ENABLED") if self.verbose else _(u"DISABLED") + _("verbose mode is {status}").format( + status=_("ENABLED") if self.verbose else _("DISABLED") ) ) @@ -211,7 +211,7 @@ # so we need to add it in arguments to use current user profile if self.verbose: self.disp( - _(u"arg profile={profile} (logged profile)").format( + _("arg profile={profile} (logged profile)").format( profile=self.profile ) ) @@ -233,25 +233,25 @@ args = self.parse_args(args) if not args: if not self.use: - self.disp(_(u"no argument in USE")) + self.disp(_("no argument in USE")) else: - self.disp(_(u"arguments in USE:")) - for arg, value in self.use.iteritems(): + self.disp(_("arguments in USE:")) + for arg, value in self.use.items(): self.disp( _( A.color( C.A_SUBHEADER, arg, A.RESET, - u" = ", + " = ", arg_tools.escape(value), ) ) ) elif len(args) != 2: - self.disp(u"bad syntax, please use:\nuse [arg] [value]", error=True) + self.disp("bad syntax, please use:\nuse [arg] [value]", error=True) else: - self.use[args[0]] = u" ".join(args[1:]) + self.use[args[0]] = " ".join(args[1:]) if self.verbose: self.disp( "set {name} = {value}".format( @@ -271,25 +271,25 @@ except KeyError: self.disp( A.color( - C.A_FAILURE, _(u"argument {name} not found").format(name=arg) + C.A_FAILURE, _("argument {name} not found").format(name=arg) ), error=True, ) else: if self.verbose: - self.disp(_(u"argument {name} removed").format(name=arg)) + self.disp(_("argument {name} removed").format(name=arg)) def do_whoami(self, args): - u"""print profile currently used""" + """print profile currently used""" self.disp(self.profile) def do_quit(self, args): - u"""quit the shell""" - self.disp(_(u"good bye!")) + """quit the shell""" + self.disp(_("good bye!")) self.host.quit() def do_exit(self, args): - u"""alias for quit""" + """alias for quit""" self.do_quit(args) def start(self):
--- a/sat_frontends/jp/cmd_ticket.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_ticket.py Tue Aug 13 19:08:41 2019 +0200 @@ -18,7 +18,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. -import base +from . import base from sat.core.i18n import _ from sat_frontends.jp import common from sat_frontends.jp.constants import Const as C @@ -28,7 +28,7 @@ __commands__ = ["Ticket"] -FIELDS_MAP = u"mapping" +FIELDS_MAP = "mapping" class Get(base.CommandBase): @@ -40,9 +40,9 @@ use_verbose=True, use_pubsub=True, pubsub_flags={C.MULTI_ITEMS}, - pubsub_defaults={u"service": _(u"auto"), u"node": _(u"auto")}, + pubsub_defaults={"service": _("auto"), "node": _("auto")}, use_output=C.OUTPUT_LIST_XMLUI, - help=_(u"get tickets"), + help=_("get tickets"), ) self.need_loop = True @@ -59,13 +59,13 @@ self.args.node, self.args.max, self.args.items, - u"", + "", self.getPubsubExtra(), self.profile, callback=self.ticketsGetCb, errback=partial( self.errback, - msg=_(u"can't get tickets: {}"), + msg=_("can't get tickets: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -82,16 +82,15 @@ host, "import", use_progress=True, - help=_(u"import tickets from external software/dataset"), + help=_("import tickets from external software/dataset"), ) self.need_loop = True def add_parser_options(self): self.parser.add_argument( "importer", - type=base.unicode_decoder, nargs="?", - help=_(u"importer name, nothing to display importers list"), + help=_("importer name, nothing to display importers list"), ) self.parser.add_argument( "-o", @@ -99,8 +98,8 @@ action="append", nargs=2, default=[], - metavar=(u"NAME", u"VALUE"), - help=_(u"importer specific options (see importer description)"), + metavar=("NAME", "VALUE"), + help=_("importer specific options (see importer description)"), ) self.parser.add_argument( "-m", @@ -108,46 +107,43 @@ action="append", nargs=2, default=[], - metavar=(u"IMPORTED_FIELD", u"DEST_FIELD"), + metavar=("IMPORTED_FIELD", "DEST_FIELD"), help=_( - u"specified field in import data will be put in dest field (default: use same field name, or ignore if it doesn't exist)" + "specified field in import data will be put in dest field (default: use same field name, or ignore if it doesn't exist)" ), ) self.parser.add_argument( "-s", "--service", - type=base.unicode_decoder, - default=u"", - metavar=u"PUBSUB_SERVICE", - help=_(u"PubSub service where the items must be uploaded (default: server)"), + default="", + metavar="PUBSUB_SERVICE", + help=_("PubSub service where the items must be uploaded (default: server)"), ) self.parser.add_argument( "-n", "--node", - type=base.unicode_decoder, - default=u"", - metavar=u"PUBSUB_NODE", + default="", + metavar="PUBSUB_NODE", help=_( - u"PubSub node where the items must be uploaded (default: tickets' defaults)" + "PubSub node where the items must be uploaded (default: tickets' defaults)" ), ) self.parser.add_argument( "location", - type=base.unicode_decoder, nargs="?", help=_( - u"importer data location (see importer description), nothing to show importer description" + "importer data location (see importer description), nothing to show importer description" ), ) def onProgressStarted(self, metadata): - self.disp(_(u"Tickets upload started"), 2) + self.disp(_("Tickets upload started"), 2) def onProgressFinished(self, metadata): - self.disp(_(u"Tickets uploaded successfully"), 2) + self.disp(_("Tickets uploaded successfully"), 2) def onProgressError(self, error_msg): - self.disp(_(u"Error while uploading tickets: {}").format(error_msg), error=True) + self.disp(_("Error while uploading tickets: {}").format(error_msg), error=True) def error(self, failure): self.disp( @@ -162,14 +158,14 @@ if getattr(self.args, name): self.parser.error( _( - u"{name} argument can't be used without location argument" + "{name} argument can't be used without location argument" ).format(name=name) ) if self.args.importer is None: self.disp( - u"\n".join( + "\n".join( [ - u"{}: {}".format(name, desc) + "{}: {}".format(name, desc) for name, desc in self.host.bridge.ticketsImportList() ] ) @@ -180,14 +176,14 @@ self.args.importer ) except Exception as e: - msg = [l for l in unicode(e).split("\n") if l][ + msg = [l for l in str(e).split("\n") if l][ -1 ] # we only keep the last line self.disp(msg) self.host.quit(1) else: self.disp( - u"{name}: {short_desc}\n\n{long_desc}".format( + "{name}: {short_desc}\n\n{long_desc}".format( name=self.args.importer, short_desc=short_desc, long_desc=long_desc, @@ -202,7 +198,7 @@ if FIELDS_MAP in options: self.parser.error( _( - u"fields_map must be specified either preencoded in --option or using --map, but not both at the same time" + "fields_map must be specified either preencoded in --option or using --map, but not both at the same time" ) ) options[FIELDS_MAP] = json.dumps(fields_map)
--- a/sat_frontends/jp/cmd_uri.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/cmd_uri.py Tue Aug 13 19:08:41 2019 +0200 @@ -18,7 +18,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. -import base +from . import base from sat.core.i18n import _ from sat_frontends.jp.constants import Const as C from sat.tools.common import uri @@ -34,12 +34,12 @@ "parse", use_profile=False, use_output=C.OUTPUT_DICT, - help=_(u"parse URI"), + help=_("parse URI"), ) def add_parser_options(self): self.parser.add_argument( - "uri", type=base.unicode_decoder, help=_(u"XMPP URI to parse") + "uri", help=_("XMPP URI to parse") ) def start(self): @@ -49,21 +49,20 @@ class Build(base.CommandBase): def __init__(self, host): base.CommandBase.__init__( - self, host, "build", use_profile=False, help=_(u"build URI") + self, host, "build", use_profile=False, help=_("build URI") ) def add_parser_options(self): - self.parser.add_argument("type", type=base.unicode_decoder, help=_(u"URI type")) - self.parser.add_argument("path", type=base.unicode_decoder, help=_(u"URI path")) + self.parser.add_argument("type", help=_("URI type")) + self.parser.add_argument("path", help=_("URI path")) self.parser.add_argument( "-f", "--field", - type=base.unicode_decoder, action="append", nargs=2, dest="fields", - metavar=(u"KEY", u"VALUE"), - help=_(u"URI fields"), + metavar=("KEY", "VALUE"), + help=_("URI fields"), ) def start(self):
--- a/sat_frontends/jp/common.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/common.py Tue Aug 13 19:08:41 2019 +0200 @@ -24,7 +24,7 @@ from sat.tools.common.ansi import ANSI as A from sat.tools.common import uri as xmpp_uri from sat.tools import config -from ConfigParser import NoSectionError, NoOptionError +from configparser import NoSectionError, NoOptionError from collections import namedtuple from functools import partial import json @@ -55,7 +55,7 @@ def ansi_ljust(s, width): """ljust method handling ANSI escape codes""" cleaned = regex.ansiRemove(s) - return s + u" " * (width - len(cleaned)) + return s + " " * (width - len(cleaned)) def ansi_center(s, width): @@ -63,13 +63,13 @@ cleaned = regex.ansiRemove(s) diff = width - len(cleaned) half = diff / 2 - return half * u" " + s + (half + diff % 2) * u" " + return half * " " + s + (half + diff % 2) * " " def ansi_rjust(s, width): """ljust method handling ANSI escape codes""" cleaned = regex.ansiRemove(s) - return u" " * (width - len(cleaned)) + s + return " " * (width - len(cleaned)) + s def getTmpDir(sat_conf, cat_dir, sub_dir=None): @@ -103,13 +103,13 @@ return [a.format(**format_kw) for a in shlex.split(cmd_line)] except ValueError as e: host.disp( - u"Couldn't parse editor cmd [{cmd}]: {reason}".format(cmd=cmd_line, reason=e) + "Couldn't parse editor cmd [{cmd}]: {reason}".format(cmd=cmd_line, reason=e) ) return [] class BaseEdit(object): - u"""base class for editing commands + """base class for editing commands This class allows to edit file for PubSub or something else. It works with temporary files in SàT local_dir, in a "cat_dir" subdir @@ -138,11 +138,11 @@ @param path(str): file to unlink """ if not os.path.isfile(path): - raise OSError(u"path must link to a regular file") + raise OSError("path must link to a regular file") if not path.startswith(getTmpDir(self.sat_conf, self.cat_dir_str)): self.disp( - u"File {} is not in SàT temporary hierarchy, we do not remove it".format( - path.decode("utf-8") + "File {} is not in SàT temporary hierarchy, we do not remove it".format( + path ), 2, ) @@ -156,8 +156,8 @@ backup_path = os.path.join(backup_dir, filename) # we move file to backup dir self.host.disp( - u"Backuping file {src} to {dst}".format( - src=path.decode("utf-8"), dst=backup_path.decode("utf-8") + "Backuping file {src} to {dst}".format( + src=path, dst=backup_path ), 1, ) @@ -167,7 +167,7 @@ if len(backup_files) > unlink_max: backup_files.sort(key=lambda path: os.stat(path).st_mtime) for path in backup_files[: len(backup_files) - unlink_max]: - self.host.disp(u"Purging backup file {}".format(path.decode("utf-8")), 2) + self.host.disp("Purging backup file {}".format(path), 2) os.unlink(path) def runEditor( @@ -228,7 +228,7 @@ # edition will now be checked, and data will be sent if it was a success if editor_exit != 0: self.disp( - u"Editor exited with an error code, so temporary file has not be deleted, and item is not published.\nYou can find temporary file at {path}".format( + "Editor exited with an error code, so temporary file has not be deleted, and item is not published.\nYou can find temporary file at {path}".format( path=content_file_path ), error=True, @@ -240,7 +240,7 @@ content = f.read() except (OSError, IOError): self.disp( - u"Can read file at {content_path}, have it been deleted?\nCancelling edition".format( + "Can read file at {content_path}, have it been deleted?\nCancelling edition".format( content_path=content_file_path ), error=True, @@ -254,7 +254,7 @@ metadata = json.load(f) except (OSError, IOError): self.disp( - u"Can read file at {meta_file_path}, have it been deleted?\nCancelling edition".format( + "Can read file at {meta_file_path}, have it been deleted?\nCancelling edition".format( content_path=content_file_path, meta_path=meta_file_path ), error=True, @@ -262,7 +262,7 @@ self.host.quit(C.EXIT_NOT_FOUND) except ValueError: self.disp( - u"Can't parse metadata, please check it is correct JSON format. Cancelling edition.\n" + "Can't parse metadata, please check it is correct JSON format. Cancelling edition.\n" + "You can find tmp file at {content_path} and temporary meta file at {meta_path}.".format( content_path=content_file_path, meta_path=meta_file_path ), @@ -272,7 +272,7 @@ if self.use_metadata and not metadata.get("publish", True): self.disp( - u'Publication blocked by "publish" key in metadata, cancelling edition.\n\n' + 'Publication blocked by "publish" key in metadata, cancelling edition.\n\n' + "temporary file path:\t{content_path}\nmetadata file path:\t{meta_path}".format( content_path=content_file_path, meta_path=meta_file_path ), @@ -281,19 +281,19 @@ self.host.quit() if len(content) == 0: - self.disp(u"Content is empty, cancelling the edition") + self.disp("Content is empty, cancelling the edition") if not content_file_path.startswith( getTmpDir(self.sat_conf, self.cat_dir_str) ): self.disp( - u"File are not in SàT temporary hierarchy, we do not remove them", + "File are not in SàT temporary hierarchy, we do not remove them", 2, ) self.host.quit() - self.disp(u"Deletion of {}".format(content_file_path.decode("utf-8")), 2) + self.disp("Deletion of {}".format(content_file_path), 2) os.unlink(content_file_path) if self.use_metadata: - self.disp(u"Deletion of {}".format(meta_file_path.decode("utf-8")), 2) + self.disp("Deletion of {}".format(meta_file_path), 2) os.unlink(meta_file_path) self.host.quit() @@ -301,7 +301,7 @@ elif tmp_ori_hash == hashlib.sha1(content).digest() and ( not self.use_metadata or meta_ori == metadata ): - self.disp(u"The content has not been modified, cancelling the edition") + self.disp("The content has not been modified, cancelling the edition") self.host.quit() else: @@ -315,7 +315,7 @@ except Exception as e: if self.use_metadata: self.disp( - u"Error while sending your item, the temporary files have been kept at {content_path} and {meta_path}: {reason}".format( + "Error while sending your item, the temporary files have been kept at {content_path} and {meta_path}: {reason}".format( content_path=content_file_path, meta_path=meta_file_path, reason=e, @@ -324,7 +324,7 @@ ) else: self.disp( - u"Error while sending your item, the temporary file has been kept at {content_path}: {reason}".format( + "Error while sending your item, the temporary file has been kept at {content_path}: {reason}".format( content_path=content_file_path, reason=e ), error=True, @@ -353,7 +353,7 @@ os.makedirs(tmp_dir) except OSError as e: self.disp( - u"Can't create {path} directory: {reason}".format( + "Can't create {path} directory: {reason}".format( path=tmp_dir, reason=e ), error=True, @@ -369,7 +369,7 @@ return os.fdopen(fd, "w+b"), path except OSError as e: self.disp( - u"Can't create temporary file: {reason}".format(reason=e), error=True + "Can't create temporary file: {reason}".format(reason=e), error=True ) self.host.quit(1) @@ -391,7 +391,7 @@ ] if not available: self.disp( - u"Could not find any content draft in {path}".format(path=tmp_dir), + "Could not find any content draft in {path}".format(path=tmp_dir), error=True, ) self.host.quit(1) @@ -403,7 +403,7 @@ def getTmpSuff(self): """return suffix used for content file""" - return u"xml" + return "xml" def getItemPath(self): """retrieve item path (i.e. service and node) from item argument @@ -418,7 +418,7 @@ if self.args.current: # user wants to continue current draft content_file_path = self.getCurrentFile(self.profile) - self.disp(u"Continuing edition of current draft", 2) + self.disp("Continuing edition of current draft", 2) content_file_obj = open(content_file_path, "r+b") # we seek at the end of file in case of an item already exist # this will write content of the existing item at the end of the draft. @@ -435,7 +435,7 @@ content_file_obj, content_file_path = self.getTmpFile() if item or last_item: - self.disp(u"Editing requested published item", 2) + self.disp("Editing requested published item", 2) try: if self.use_metadata: content, metadata, item = self.getItemData(service, node, item) @@ -443,21 +443,21 @@ content, item = self.getItemData(service, node, item) except Exception as e: # FIXME: ugly but we have not good may to check errors in bridge - if u"item-not-found" in unicode(e): + if "item-not-found" in str(e): # item doesn't exist, we create a new one with requested id metadata = None if last_item: - self.disp(_(u"no item found at all, we create a new one"), 2) + self.disp(_("no item found at all, we create a new one"), 2) else: self.disp( _( - u'item "{item_id}" not found, we create a new item with this id' + 'item "{item_id}" not found, we create a new item with this id' ).format(item_id=item), 2, ) content_file_obj.seek(0) else: - self.disp(u"Error while retrieving item: {}".format(e)) + self.disp("Error while retrieving item: {}".format(e)) self.host.quit(C.EXIT_ERROR) else: # item exists, we write content @@ -468,10 +468,10 @@ content_file_obj.write(content.encode("utf-8")) content_file_obj.seek(0) self.disp( - _(u'item "{item_id}" found, we edit it').format(item_id=item), 2 + _('item "{item_id}" found, we edit it').format(item_id=item), 2 ) else: - self.disp(u"Editing a new item", 2) + self.disp("Editing a new item", 2) if self.use_metadata: metadata = None @@ -520,7 +520,7 @@ for idx, value in enumerate(row_data_list): if filters is not None and filters[idx] is not None: filter_ = filters[idx] - if isinstance(filter_, basestring): + if isinstance(filter_, str): col_value = filter_.format(value) else: try: @@ -531,7 +531,7 @@ # when it's mostly style/color changes. col_size = len(regex.ansiRemove(col_value)) else: - col_value = unicode(value) + col_value = str(value) col_size = len(col_value) new_row.append(col_value) if size is None: @@ -541,10 +541,10 @@ if size is None: size = len(new_row) if headers is not None and len(headers) != size: - raise exceptions.DataError(u"headers size is not coherent with rows") + raise exceptions.DataError("headers size is not coherent with rows") else: if len(new_row) != size: - raise exceptions.DataError(u"rows size is not coherent") + raise exceptions.DataError("rows size is not coherent") self.rows.append(new_row) if not data and headers is not None: @@ -554,8 +554,8 @@ @property def string(self): if self._buffer is None: - raise exceptions.InternalError(u"buffer must be used to get a string") - return u"\n".join(self._buffer) + raise exceptions.InternalError("buffer must be used to get a string") + return "\n".join(self._buffer) @staticmethod def readDictValues(data, keys, defaults=None): @@ -589,9 +589,9 @@ """ if keys is None and headers is not None: # FIXME: keys are not needed with OrderedDict, - raise exceptions.DataError(u"You must specify keys order to used headers") + raise exceptions.DataError("You must specify keys order to used headers") if keys is None: - keys = data[0].keys() + keys = list(data[0].keys()) if headers is None: headers = keys filters = [filters.get(k) for k in keys] @@ -599,7 +599,7 @@ host, (cls.readDictValues(d, keys, defaults) for d in data), headers, filters ) - def _headers(self, head_sep, headers, sizes, alignment=u"left", style=None): + def _headers(self, head_sep, headers, sizes, alignment="left", style=None): """Render headers @param head_sep(unicode): sequence to use as separator @@ -609,18 +609,18 @@ @param sizes(list[int]): sizes of columns """ rendered_headers = [] - if isinstance(style, basestring): + if isinstance(style, str): style = [style] for idx, header in enumerate(headers): size = sizes[idx] - if alignment == u"left": + if alignment == "left": rendered = header[:size].ljust(size) - elif alignment == u"center": + elif alignment == "center": rendered = header[:size].center(size) - elif alignment == u"right": + elif alignment == "right": rendered = header[:size].rjust(size) else: - raise exceptions.InternalError(u"bad alignment argument") + raise exceptions.InternalError("bad alignment argument") if style: args = style + [rendered] rendered = A.color(*args) @@ -636,28 +636,28 @@ def display( self, - head_alignment=u"left", - columns_alignment=u"left", + head_alignment="left", + columns_alignment="left", head_style=None, show_header=True, show_borders=True, hide_cols=None, - col_sep=u" │ ", - top_left=u"┌", - top=u"─", - top_sep=u"─┬─", - top_right=u"┐", - left=u"│", + col_sep=" │ ", + top_left="┌", + top="─", + top_sep="─┬─", + top_right="┐", + left="│", right=None, head_sep=None, - head_line=u"┄", - head_line_left=u"├", - head_line_sep=u"┄┼┄", - head_line_right=u"┤", - bottom_left=u"└", + head_line="┄", + head_line_left="├", + head_line_sep="┄┼┄", + head_line_right="┤", + bottom_left="└", bottom=None, - bottom_sep=u"─┴─", - bottom_right=u"┘", + bottom_sep="─┴─", + bottom_right="┘", ): """Print the table @@ -700,7 +700,7 @@ if bottom_sep is None: bottom_sep = col_sep_size * bottom if not show_borders: - left = right = head_line_left = head_line_right = u"" + left = right = head_line_left = head_line_right = "" # top border if show_borders: self._disp( @@ -722,14 +722,14 @@ ) # content - if columns_alignment == u"left": + if columns_alignment == "left": alignment = lambda idx, s: ansi_ljust(s, sizes[idx]) - elif columns_alignment == u"center": + elif columns_alignment == "center": alignment = lambda idx, s: ansi_center(s, sizes[idx]) - elif columns_alignment == u"right": + elif columns_alignment == "right": alignment = lambda idx, s: ansi_rjust(s, sizes[idx]) else: - raise exceptions.InternalError(u"bad columns alignment argument") + raise exceptions.InternalError("bad columns alignment argument") for row in self.rows: if hide_cols: @@ -752,7 +752,7 @@ def display_blank(self, **kwargs): """Display table without visible borders""" - kwargs_ = {"col_sep": u" ", "head_line_sep": u" ", "show_borders": False} + kwargs_ = {"col_sep": " ", "head_line_sep": " ", "show_borders": False} kwargs_.update(kwargs) return self.display(**kwargs_) @@ -786,7 +786,7 @@ callback=self.URIFindCb, errback=partial( command.errback, - msg=_(u"can't find " + key + u" URI: {}"), + msg=_("can't find " + key + " URI: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), ) @@ -812,7 +812,7 @@ values = getattr(self.args, key) except AttributeError: raise exceptions.InternalError( - u'there is no "{key}" arguments'.format(key=key) + 'there is no "{key}" arguments'.format(key=key) ) else: if values is None: @@ -826,20 +826,20 @@ except KeyError: self.host.disp( _( - u"No {key} URI specified for this project, please specify service and node" + "No {key} URI specified for this project, please specify service and node" ).format(key=self.key), error=True, ) self.host.quit(C.EXIT_NOT_FOUND) else: - uri = uri_data[u"uri"] + uri = uri_data["uri"] - self.setMetadataList(uri_data, u"labels") + self.setMetadataList(uri_data, "labels") parsed_uri = xmpp_uri.parseXMPPUri(uri) try: - self.args.service = parsed_uri[u"path"] - self.args.node = parsed_uri[u"node"] + self.args.service = parsed_uri["path"] + self.args.node = parsed_uri["node"] except KeyError: - self.host.disp(_(u"Invalid URI found: {uri}").format(uri=uri), error=True) + self.host.disp(_("Invalid URI found: {uri}").format(uri=uri), error=True) self.host.quit(C.EXIT_DATA_ERROR) self.callback()
--- a/sat_frontends/jp/constants.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/constants.py Tue Aug 13 19:08:41 2019 +0200 @@ -23,20 +23,20 @@ class Const(constants.Const): - APP_NAME = u"jp" - PLUGIN_CMD = u"commands" - PLUGIN_OUTPUT = u"outputs" - OUTPUT_TEXT = u"text" # blob of unicode text - OUTPUT_DICT = u"dict" # simple key/value dictionary - OUTPUT_LIST = u"list" - OUTPUT_LIST_DICT = u"list_dict" # list of dictionaries - OUTPUT_DICT_DICT = u"dict_dict" # dict of nested dictionaries - OUTPUT_MESS = u"mess" # messages (chat) - OUTPUT_COMPLEX = u"complex" # complex data (e.g. multi-level dictionary) - OUTPUT_XML = u"xml" # XML node (as unicode string) - OUTPUT_LIST_XML = u"list_xml" # list of XML nodes (as unicode strings) - OUTPUT_XMLUI = u"xmlui" # XMLUI as unicode string - OUTPUT_LIST_XMLUI = u"list_xmlui" # list of XMLUI (as unicode strings) + APP_NAME = "jp" + PLUGIN_CMD = "commands" + PLUGIN_OUTPUT = "outputs" + OUTPUT_TEXT = "text" # blob of unicode text + OUTPUT_DICT = "dict" # simple key/value dictionary + OUTPUT_LIST = "list" + OUTPUT_LIST_DICT = "list_dict" # list of dictionaries + OUTPUT_DICT_DICT = "dict_dict" # dict of nested dictionaries + OUTPUT_MESS = "mess" # messages (chat) + OUTPUT_COMPLEX = "complex" # complex data (e.g. multi-level dictionary) + OUTPUT_XML = "xml" # XML node (as unicode string) + OUTPUT_LIST_XML = "list_xml" # list of XML nodes (as unicode strings) + OUTPUT_XMLUI = "xmlui" # XMLUI as unicode string + OUTPUT_LIST_XMLUI = "list_xmlui" # list of XMLUI (as unicode strings) OUTPUT_TYPES = ( OUTPUT_TEXT, OUTPUT_DICT, @@ -52,12 +52,12 @@ ) # Pubsub options flags - SERVICE = u"service" # service required - NODE = u"node" # node required - ITEM = u"item" # item required - SINGLE_ITEM = u"single_item" # only one item is allowed - MULTI_ITEMS = u"multi_items" # multiple items are allowed - NO_MAX = u"no_max" # don't add --max option for multi items + SERVICE = "service" # service required + NODE = "node" # node required + ITEM = "item" # item required + SINGLE_ITEM = "single_item" # only one item is allowed + MULTI_ITEMS = "multi_items" # multiple items are allowed + NO_MAX = "no_max" # don't add --max option for multi items # ANSI A_HEADER = A.BOLD + A.FG_YELLOW
--- a/sat_frontends/jp/jp Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/jp Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # jp: a SAT command line tool
--- a/sat_frontends/jp/output_std.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/output_std.py Tue Aug 13 19:08:41 2019 +0200 @@ -26,9 +26,9 @@ import json __outputs__ = ["Simple", "Json"] -SIMPLE = u"simple" -JSON = u"json" -JSON_RAW = u"json_raw" +SIMPLE = "simple" +JSON = "json" +JSON_RAW = "json_raw" class Simple(object): @@ -45,25 +45,25 @@ host.register_output(C.OUTPUT_COMPLEX, SIMPLE, self.simple_print) def simple_print(self, data): - self.host.disp(unicode(data)) + self.host.disp(str(data)) def list(self, data): - self.host.disp(u"\n".join(data)) + self.host.disp("\n".join(data)) def dict(self, data, indent=0, header_color=C.A_HEADER): options = self.host.parse_output_options() - self.host.check_output_options({u"no-header"}, options) - show_header = not u"no-header" in options - for k, v in data.iteritems(): + self.host.check_output_options({"no-header"}, options) + show_header = not "no-header" in options + for k, v in data.items(): if show_header: - header = A.color(header_color, k) + u": " + header = A.color(header_color, k) + ": " else: - header = u"" + header = "" self.host.disp( ( - u"{indent}{header}{value}".format( - indent=indent * u" ", header=header, value=v + "{indent}{header}{value}".format( + indent=indent * " ", header=header, value=v ) ) ) @@ -71,11 +71,11 @@ def list_dict(self, data): for idx, datum in enumerate(data): if idx: - self.host.disp(u"\n") + self.host.disp("\n") self.dict(datum) def dict_dict(self, data): - for key, sub_dict in data.iteritems(): + for key, sub_dict in data.items(): self.host.disp(A.color(C.A_HEADER, key)) self.dict(sub_dict, indent=4, header_color=C.A_SUBHEADER) @@ -84,7 +84,7 @@ for mess_data in data: (uid, timestamp, from_jid, to_jid, message, subject, mess_type, extra) = mess_data - time_str = date_utils.date_fmt(timestamp, u"auto_day", + time_str = date_utils.date_fmt(timestamp, "auto_day", tz_info=date_utils.TZ_LOCAL) from_jid = jid.JID(from_jid) if mess_type == C.MESS_TYPE_GROUPCHAT: @@ -96,11 +96,11 @@ nick_color = A.BOLD + A.FG_BLUE else: nick_color = A.BOLD + A.FG_YELLOW - message = message.values()[0] if message else u"" + message = list(message.values())[0] if message else "" self.host.disp(A.color( - A.FG_CYAN, u'['+time_str+u'] ', - nick_color, nick, A.RESET, A.BOLD, u'> ', + A.FG_CYAN, '['+time_str+'] ', + nick_color, nick, A.RESET, A.BOLD, '> ', A.RESET, message))
--- a/sat_frontends/jp/output_template.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/output_template.py Tue Aug 13 19:08:41 2019 +0200 @@ -30,8 +30,8 @@ import os.path __outputs__ = ["Template"] -TEMPLATE = u"template" -OPTIONS = {u"template", u"browser", u"inline-css"} +TEMPLATE = "template" +OPTIONS = {"template", "browser", "inline-css"} class Template(object): @@ -43,14 +43,14 @@ def _front_url_tmp_dir(self, ctx, relative_url, tmp_dir): """Get front URL for temporary directory""" - template_data = ctx[u'template_data'] - return u"file://" + os.path.join(tmp_dir, template_data.theme, relative_url) + template_data = ctx['template_data'] + return "file://" + os.path.join(tmp_dir, template_data.theme, relative_url) def _do_render(self, template_path, css_inline, **kwargs): try: return self.renderer.render(template_path, css_inline=css_inline, **kwargs) except template.TemplateNotFound: - self.host.disp(_(u"Can't find requested template: {template_path}") + self.host.disp(_("Can't find requested template: {template_path}") .format(template_path=template_path), error=True) self.host.quit(C.EXIT_NOT_FOUND) @@ -72,8 +72,8 @@ except AttributeError: if not "template" in cmd.args.output_opts: self.host.disp(_( - u"no default template set for this command, you need to specify a " - u"template using --oo template=[path/to/template.html]"), + "no default template set for this command, you need to specify a " + "template using --oo template=[path/to/template.html]"), error=True, ) self.host.quit(C.EXIT_BAD_ARG) @@ -86,7 +86,7 @@ # template is not specified, we use default one pass if template_path is None: - self.host.disp(_(u"Can't parse template, please check its syntax"), + self.host.disp(_("Can't parse template, please check its syntax"), error=True) self.host.quit(C.EXIT_BAD_ARG) @@ -97,7 +97,7 @@ else: kwargs = mapping_cb(data) - css_inline = u"inline-css" in options + css_inline = "inline-css" in options if "browser" in options: template_name = os.path.basename(template_path) @@ -107,9 +107,9 @@ self.host, front_url_filter=front_url_filter, trusted=True) rendered = self._do_render(template_path, css_inline=css_inline, **kwargs) self.host.disp(_( - u"Browser opening requested.\n" - u"Temporary files are put in the following directory, you'll have to " - u"delete it yourself once finished viewing: {}").format(tmp_dir)) + "Browser opening requested.\n" + "Temporary files are put in the following directory, you'll have to " + "delete it yourself once finished viewing: {}").format(tmp_dir)) tmp_file = os.path.join(tmp_dir, template_name) with open(tmp_file, "w") as f: f.write(rendered.encode("utf-8")) @@ -129,7 +129,7 @@ else: # FIXME: Q&D way to disable template logging # logs are overcomplicated, and need to be reworked - template_logger = log.getLogger(u"sat.tools.common.template") + template_logger = log.getLogger("sat.tools.common.template") template_logger.log = lambda *args: None logging.disable(logging.WARNING)
--- a/sat_frontends/jp/output_xml.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/output_xml.py Tue Aug 13 19:08:41 2019 +0200 @@ -36,8 +36,8 @@ __outputs__ = ["XML"] -RAW = u"xml_raw" -PRETTY = u"xml_pretty" +RAW = "xml_raw" +PRETTY = "xml_pretty" class XML(object): @@ -54,7 +54,7 @@ if pygments is None: self.host.disp( _( - u"Pygments is not available, syntax highlighting is not possible. Please install if from http://pygments.org or with pip install pygments" + "Pygments is not available, syntax highlighting is not possible. Please install if from http://pygments.org or with pip install pygments" ), error=True, ) @@ -62,7 +62,7 @@ if not sys.stdout.isatty(): return xml lexer = XmlLexer(encoding="utf-8") - formatter = TerminalFormatter(bg=u"dark") + formatter = TerminalFormatter(bg="dark") return pygments.highlight(xml, lexer, formatter) def format(self, data, pretty=True): @@ -77,13 +77,13 @@ def pretty(self, data): self.host.disp(self.format(data)) - def pretty_list(self, data, separator=u"\n"): - list_pretty = map(self.format, data) + def pretty_list(self, data, separator="\n"): + list_pretty = list(map(self.format, data)) self.host.disp(separator.join(list_pretty)) def raw(self, data): self.host.disp(self.format_no_pretty(data)) - def list_raw(self, data, separator=u"\n"): - list_no_pretty = map(self.format_no_pretty, data) + def list_raw(self, data, separator="\n"): + list_no_pretty = list(map(self.format_no_pretty, data)) self.host.disp(separator.join(list_no_pretty))
--- a/sat_frontends/jp/output_xmlui.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/output_xmlui.py Tue Aug 13 19:08:41 2019 +0200 @@ -34,15 +34,15 @@ def __init__(self, host): self.host = host - host.register_output(C.OUTPUT_XMLUI, u"simple", self.xmlui, default=True) + host.register_output(C.OUTPUT_XMLUI, "simple", self.xmlui, default=True) host.register_output( - C.OUTPUT_LIST_XMLUI, u"simple", self.xmlui_list, default=True + C.OUTPUT_LIST_XMLUI, "simple", self.xmlui_list, default=True ) def xmlui(self, data): xmlui = xmlui_manager.create(self.host, data) xmlui.show(values_only=True, read_only=True) - self.host.disp(u"") + self.host.disp("") def xmlui_list(self, data): for d in data:
--- a/sat_frontends/jp/xml_tools.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/xml_tools.py Tue Aug 13 19:08:41 2019 +0200 @@ -33,12 +33,12 @@ from lxml import etree except ImportError: cmd.disp( - u'lxml module must be installed, please install it with "pip install lxml"', + 'lxml module must be installed, please install it with "pip install lxml"', error=True, ) cmd.host.quit(C.EXIT_ERROR) try: - if isinstance(raw_xml, basestring): + if isinstance(raw_xml, str): parser = etree.XMLParser(remove_blank_text=True) element = etree.fromstring(raw_xml, parser) else: @@ -47,7 +47,7 @@ if reraise: raise e cmd.parser.error( - _(u"Can't parse the payload XML in input: {msg}").format(msg=e) + _("Can't parse the payload XML in input: {msg}").format(msg=e) ) return element, etree @@ -59,7 +59,7 @@ """ if element.tag in ("item", "{http://jabber.org/protocol/pubsub}item"): if len(element) > 1: - cmd.disp(_(u"<item> can only have one child element (the payload)"), + cmd.disp(_("<item> can only have one child element (the payload)"), error=True) cmd.host.quit(C.EXIT_DATA_ERROR) element = element[0]
--- a/sat_frontends/jp/xmlui_manager.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/jp/xmlui_manager.py Tue Aug 13 19:08:41 2019 +0200 @@ -60,7 +60,7 @@ class Widget(Base): - category = u"widget" + category = "widget" enabled = True @property @@ -89,8 +89,8 @@ if self.host.verbosity: to_disp = [ A.FG_MAGENTA, - u" " if elems else u"", - u"({})".format(value), + " " if elems else "", + "({})".format(value), A.RESET, ] if elems is None: @@ -166,11 +166,11 @@ @property def inline(self): - return u"inline" in self.style + return "inline" in self.style @property def no_select(self): - return u"noselect" in self.style + return "noselect" in self.style class EmptyWidget(xmlui_base.EmptyWidget, Widget): @@ -179,18 +179,18 @@ Widget.__init__(self, xmlui_parent) def show(self): - self.host.disp(u'') + self.host.disp('') class TextWidget(xmlui_base.TextWidget, ValueWidget): - type = u"text" + type = "text" def show(self): self.host.disp(self.value) class LabelWidget(xmlui_base.LabelWidget, ValueWidget): - type = u"label" + type = "label" @property def for_name(self): @@ -199,7 +199,7 @@ except AttributeError: return None - def show(self, no_lf=False, ansi=u""): + def show(self, no_lf=False, ansi=""): """show label @param no_lf(bool): same as for [JP.disp] @@ -209,10 +209,10 @@ class JidWidget(xmlui_base.JidWidget, TextWidget): - type = u"jid" + type = "jid" class StringWidget(xmlui_base.StringWidget, InputWidget): - type = u"string" + type = "string" def show(self): if self.read_only or self.root.read_only: @@ -221,9 +221,9 @@ elems = [] self.verboseName(elems) if self.value: - elems.append(_(u"(enter: {default})").format(default=self.value)) - elems.extend([C.A_HEADER, u"> "]) - value = raw_input(A.color(*elems).encode('utf-8')) + elems.append(_("(enter: {default})").format(default=self.value)) + elems.extend([C.A_HEADER, "> "]) + value = input(A.color(*elems).encode('utf-8')) if value: # TODO: empty value should be possible # an escape key should be used for default instead of enter with empty value @@ -231,11 +231,11 @@ class JidInputWidget(xmlui_base.JidInputWidget, StringWidget): - type = u"jid_input" + type = "jid_input" class TextBoxWidget(xmlui_base.TextWidget, StringWidget): - type = u"textbox" + type = "textbox" # TODO: use a more advanced input method def show(self): @@ -244,25 +244,25 @@ self.disp(self.value) else: if self.value: - self.disp(A.color(C.A_HEADER, u"↓ current value ↓\n", A.FG_CYAN, self.value, + self.disp(A.color(C.A_HEADER, "↓ current value ↓\n", A.FG_CYAN, self.value, "")) values = [] while True: try: if not values: - line = raw_input(A.color(C.A_HEADER, u"[Ctrl-D to finish]> ")) + line = input(A.color(C.A_HEADER, "[Ctrl-D to finish]> ")) else: - line = raw_input() + line = input() values.append(line) except EOFError: break - self.value = u'\n'.join(values).rstrip() + self.value = '\n'.join(values).rstrip() class XHTMLBoxWidget(xmlui_base.XHTMLBoxWidget, StringWidget): - type = u"xhtmlbox" + type = "xhtmlbox" def show(self): # FIXME: we use bridge in a blocking way as permitted by python-dbus @@ -274,7 +274,7 @@ class ListWidget(xmlui_base.ListWidget, OptionsWidget): - type = u"list" + type = "list" # TODO: handle flags, notably multi def show(self): @@ -291,7 +291,7 @@ for idx, (value, label) in enumerate(self.options): elems = [] if not self.root.read_only: - elems.extend([C.A_SUBHEADER, unicode(idx), A.RESET, u": "]) + elems.extend([C.A_SUBHEADER, str(idx), A.RESET, ": "]) elems.append(label) self.verboseName(elems, value) self.disp(A.color(*elems)) @@ -308,8 +308,8 @@ choice = None limit_max = len(self.options) - 1 while choice is None or choice < 0 or choice > limit_max: - choice = raw_input( - A.color(C.A_HEADER, _(u"your choice (0-{max}): ").format(max=limit_max)) + choice = input( + A.color(C.A_HEADER, _("your choice (0-{max}): ").format(max=limit_max)) ) try: choice = int(choice) @@ -320,25 +320,25 @@ class BoolWidget(xmlui_base.BoolWidget, InputWidget): - type = u"bool" + type = "bool" def show(self): - disp_true = A.color(A.FG_GREEN, u"TRUE") - disp_false = A.color(A.FG_RED, u"FALSE") + disp_true = A.color(A.FG_GREEN, "TRUE") + disp_false = A.color(A.FG_RED, "FALSE") if self.read_only or self.root.read_only: self.disp(disp_true if self.value else disp_false) else: - self.disp(A.color(C.A_HEADER, u"0: ", + self.disp(A.color(C.A_HEADER, "0: ", disp_false, A.RESET, - u" *" if not self.value else u"")) - self.disp(A.color(C.A_HEADER, u"1: ", + " *" if not self.value else "")) + self.disp(A.color(C.A_HEADER, "1: ", disp_true, A.RESET, - u" *" if self.value else u"")) + " *" if self.value else "")) choice = None while choice not in ("0", "1"): - elems = [C.A_HEADER, _(u"your choice (0,1): ")] + elems = [C.A_HEADER, _("your choice (0,1): ")] self.verboseName(elems) - choice = raw_input(A.color(*elems)) + choice = input(A.color(*elems)) self.value = bool(int(choice)) self.disp("") @@ -350,7 +350,7 @@ class Container(Base): - category = u"container" + category = "container" def __init__(self, xmlui_parent): super(Container, self).__init__(xmlui_parent) @@ -371,22 +371,22 @@ class VerticalContainer(xmlui_base.VerticalContainer, Container): - type = u"vertical" + type = "vertical" class PairsContainer(xmlui_base.PairsContainer, Container): - type = u"pairs" + type = "pairs" class LabelContainer(xmlui_base.PairsContainer, Container): - type = u"label" + type = "label" def show(self): for child in self.children: no_lf = False # we check linked widget type # to see if we want the label on the same line or not - if child.type == u"label": + if child.type == "label": for_name = child.for_name if for_name: for_widget = self.root.widgets[for_name] @@ -535,7 +535,7 @@ def _launchActionCb(self, data): XMLUIPanel._actions -= 1 assert XMLUIPanel._actions >= 0 - if u"xmlui" in data: + if "xmlui" in data: xmlui_raw = data["xmlui"] xmlui = create(self.host, xmlui_raw) xmlui.show() @@ -557,7 +557,7 @@ callback=self._launchActionCb, errback=partial( self.command.errback, - msg=_(u"can't launch XMLUI action: {}"), + msg=_("can't launch XMLUI action: {}"), exit_code=C.EXIT_BRIDGE_ERRBACK, ), )
--- a/sat_frontends/primitivus/chat.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/primitivus/chat.py Tue Aug 13 19:08:41 2019 +0200 @@ -35,7 +35,7 @@ import bisect -OCCUPANTS_FOOTER = _(u"{} occupants") +OCCUPANTS_FOOTER = _("{} occupants") class MessageWidget(urwid.WidgetWrap, quick_chat.MessageWidget): @@ -113,16 +113,16 @@ # message status if d.status is None: - markup.append(u" ") + markup.append(" ") elif d.status == "delivered": - markup.append(("msg_status_received", u"✔")) + markup.append(("msg_status_received", "✔")) else: - log.warning(u"Unknown status: {}".format(d.status)) + log.warning("Unknown status: {}".format(d.status)) # timestamp if self.parent.show_timestamp: attr = "msg_mention" if mention else "date" - markup.append((attr, u"[{}]".format(d.time_text))) + markup.append((attr, "[{}]".format(d.time_text))) else: if mention: markup.append(("msg_mention", "[*]")) @@ -134,13 +134,13 @@ ) else: markup.append( - ("my_nick" if d.own_mess else "other_nick", u"[{}] ".format(d.nick or "")) + ("my_nick" if d.own_mess else "other_nick", "[{}] ".format(d.nick or "")) ) msg = self.message # needed to generate self.selected_lang if d.selected_lang: - markup.append(("msg_lang", u"[{}] ".format(d.selected_lang))) + markup.append(("msg_lang", "[{}] ".format(d.selected_lang))) # message body markup.append(msg) @@ -171,6 +171,9 @@ ) super(OccupantWidget, self).__init__(text) + def __hash__(self): + return id(self) + def __eq__(self, other): if other is None: return False @@ -218,11 +221,11 @@ o = self.occupant_data markup = [] markup.append( - ("info_msg", u"{}{} ".format(o.role[0].upper(), o.affiliation[0].upper())) + ("info_msg", "{}{} ".format(o.role[0].upper(), o.affiliation[0].upper())) ) markup.append(o.nick) if o.state is not None: - markup.append(u" {}".format(C.CHAT_STATE_ICON[o.state])) + markup.append(" {}".format(C.CHAT_STATE_ICON[o.state])) return markup # events @@ -240,7 +243,7 @@ urwid.ListBox(self.occupants_walker), footer=self.occupants_footer ) super(OccupantsWidget, self).__init__(occupants_widget) - occupants_list = sorted(self.parent.occupants.keys(), key=lambda o: o.lower()) + occupants_list = sorted(list(self.parent.occupants.keys()), key=lambda o: o.lower()) for occupant in occupants_list: occupant_data = self.parent.occupants[occupant] self.occupants_walker.append(OccupantWidget(occupant_data)) @@ -253,7 +256,7 @@ txt = OCCUPANTS_FOOTER.format(len(self.parent.occupants)) self.occupants_footer.set_text(txt) - def getNicks(self, start=u""): + def getNicks(self, start=""): """Return nicks of all occupants @param start(unicode): only return nicknames which start with this text @@ -281,16 +284,16 @@ class Chat(PrimitivusWidget, quick_chat.QuickChat): def __init__(self, host, target, type_=C.CHAT_ONE2ONE, nick=None, occupants=None, subject=None, profiles=None): - quick_chat.QuickChat.__init__( - self, host, target, type_, nick, occupants, subject, profiles=profiles - ) self.filters = [] # list of filter callbacks to apply self.mess_walker = urwid.SimpleListWalker([]) self.mess_widgets = urwid.ListBox(self.mess_walker) self.chat_widget = urwid.Frame(self.mess_widgets) self.chat_colums = urwid.Columns([("weight", 8, self.chat_widget)]) self.pile = urwid.Pile([self.chat_colums]) - PrimitivusWidget.__init__(self, self.pile, self.target) + PrimitivusWidget.__init__(self, self.pile, target) + quick_chat.QuickChat.__init__( + self, host, target, type_, nick, occupants, subject, profiles=profiles + ) # we must adapt the behaviour with the type if type_ == C.CHAT_GROUP: @@ -365,7 +368,7 @@ if word_idx == len(words): word_idx = 0 word = completion_data["last_word"] = words[word_idx] - return u"{}{}{}".format(text[: space + 1], word, ": " if space < 0 else "") + return "{}{}{}".format(text[: space + 1], word, ": " if space < 0 else "") def getMenu(self): """Return Menu bar""" @@ -479,7 +482,7 @@ if wid.mess_data.mention: from_jid = wid.mess_data.from_jid msg = _( - u"You have been mentioned by {nick} in {room}".format( + "You have been mentioned by {nick} in {room}".format( nick=wid.mess_data.nick, room=self.target ) ) @@ -490,7 +493,7 @@ return elif self.type == C.CHAT_ONE2ONE: from_jid = wid.mess_data.from_jid - msg = _(u"{entity} is talking to you".format(entity=from_jid)) + msg = _("{entity} is talking to you".format(entity=from_jid)) self.host.notify( C.NOTIFY_MESSAGE, from_jid, msg, widget=self, profile=self.profile ) @@ -557,7 +560,7 @@ """Set title for a group chat""" quick_chat.QuickChat.setSubject(self, subject) self.subj_wid = urwid.Text( - unicode(subject.replace("\n", "|") if wrap == "clip" else subject), + str(subject.replace("\n", "|") if wrap == "clip" else subject), align="left" if wrap == "clip" else "center", wrap=wrap, ) @@ -573,7 +576,7 @@ """ if clear: del self.mess_walker[:] - for message in self.messages.itervalues(): + for message in self.messages.values(): self.appendMessage(message, minor_notifs=False) def redraw(self): @@ -589,13 +592,13 @@ if filters and "search" in filters: self.mess_walker.append( urwid.Text( - _(u"Results for searching the globbing pattern: {}").format( + _("Results for searching the globbing pattern: {}").format( filters["search"] ) ) ) self.mess_walker.append( - urwid.Text(_(u"Type ':history <lines>' to reset the chat history")) + urwid.Text(_("Type ':history <lines>' to reset the chat history")) ) super(Chat, self).updateHistory(size, filters, profile) @@ -675,8 +678,8 @@ def onSubjectDialog(self, new_subject=None): dialog = sat_widgets.InputDialog( - _(u"Change title"), - _(u"Enter the new title"), + _("Change title"), + _("Enter the new title"), default_txt=new_subject if new_subject is not None else self.subject, ) dialog.setCallback("ok", self._onSubjectDialogCb, dialog)
--- a/sat_frontends/primitivus/config.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/primitivus/config.py Tue Aug 13 19:08:41 2019 +0200 @@ -21,7 +21,7 @@ from sat_frontends.primitivus.constants import Const as C from sat_frontends.primitivus.keys import action_key_map -import ConfigParser +import configparser def applyConfig(host): @@ -29,11 +29,11 @@ raise: can raise various Exceptions if configuration is not good """ - config = ConfigParser.SafeConfigParser() + config = configparser.SafeConfigParser() config.read(C.CONFIG_FILES) try: options = config.items(C.CONFIG_SECTION) - except ConfigParser.NoSectionError: + except configparser.NoSectionError: options = [] shortcuts = {} for name, value in options:
--- a/sat_frontends/primitivus/constants.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/primitivus/constants.py Tue Aug 13 19:08:41 2019 +0200 @@ -83,12 +83,12 @@ ("table_selected", "default, bold", "default"), ] PRESENCE = { - "unavailable": (u"⨯", "show_disconnected"), - "": (u"✔", "show_normal"), - "chat": (u"✆", "show_chat"), - "away": (u"✈", "show_away"), - "dnd": (u"✖", "show_dnd"), - "xa": (u"☄", "show_xa"), + "unavailable": ("⨯", "show_disconnected"), + "": ("✔", "show_normal"), + "chat": ("✆", "show_chat"), + "away": ("✈", "show_away"), + "dnd": ("✖", "show_dnd"), + "xa": ("☄", "show_xa"), } LOG_OPT_SECTION = APP_NAME.lower() LOG_OPT_OUTPUT = (
--- a/sat_frontends/primitivus/contact_list.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/primitivus/contact_list.py Tue Aug 13 19:08:41 2019 +0200 @@ -47,7 +47,7 @@ # we now build the widget self.status_bar = StatusBar(host) self.frame = sat_widgets.FocusFrame(self._buildList(), None, self.status_bar) - PrimitivusWidget.__init__(self, self.frame, _(u"Contacts")) + PrimitivusWidget.__init__(self, self.frame, _("Contacts")) if on_click: urwid.connect_signal(self, "click", on_click, user_data) if on_change: @@ -134,7 +134,7 @@ pass idx += 1 - log.debug(u"Not element found for {} in setFocus".format(text)) + log.debug("Not element found for {} in setFocus".format(text)) # events @@ -159,7 +159,7 @@ def onNotification(self, entity, notif, profile): notifs = list(self.host.getNotifs(C.ENTITY_ALL, profile=self.profile)) if notifs: - self.title_dynamic = u"({})".format(len(notifs)) + self.title_dynamic = "({})".format(len(notifs)) else: self.title_dynamic = None self.host.redraw() # FIXME: should not be necessary @@ -217,7 +217,7 @@ if show is None: show = C.PRESENCE_UNAVAILABLE show_icon, entity_attr = C.PRESENCE.get(show, ("", "default")) - markup.insert(0, u"{} ".format(show_icon)) + markup.insert(0, "{} ".format(show_icon)) else: entity_attr = "default" @@ -229,9 +229,9 @@ ) if notifs or mentions: attr = 'cl_mention' if mentions else 'cl_notifs' - header = [(attr, u"({})".format(len(notifs) + len(mentions))), u" "] + header = [(attr, "({})".format(len(notifs) + len(mentions))), " "] else: - header = u"" + header = "" markup.append((entity_attr, entity_txt)) if markup_prepend: @@ -333,7 +333,7 @@ content.append(urwid.Divider("=")) groups = list(self.contact_list._groups) - groups.sort(key=lambda x: x.lower() if x else x) + groups.sort(key=lambda x: x.lower() if x else '') for group in groups: data = self.contact_list.getGroupData(group) folded = data.get(C.GROUP_DATA_FOLDED, False)
--- a/sat_frontends/primitivus/game_tarot.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/primitivus/game_tarot.py Tue Aug 13 19:08:41 2019 +0200 @@ -154,20 +154,20 @@ suit = "A" color = "neutral" elif self.suit == "pique": - suit = u"♠" + suit = "♠" color = "black" elif self.suit == "trefle": - suit = u"♣" + suit = "♣" color = "black" elif self.suit == "coeur": - suit = u"♥" + suit = "♥" color = "red" elif self.suit == "carreau": - suit = u"♦" + suit = "♦" color = "red" if self.bout: color = "special" - return ("card_%s" % color, u"%s%s" % (value, suit)) + return ("card_%s" % color, "%s%s" % (value, suit)) def getWidget(self): """Return a widget representing the card""" @@ -256,12 +256,12 @@ def loadCards(self): """Load all the cards in memory""" QuickTarotGame.loadCards(self) - for value in map(str, range(1, 22)) + ["excuse"]: + for value in list(map(str, list(range(1, 22)))) + ["excuse"]: card = Card("atout", value) self.cards[card.suit, card.value] = card self.deck.append(card) for suit in ["pique", "coeur", "carreau", "trefle"]: - for value in map(str, range(1, 11)) + ["valet", "cavalier", "dame", "roi"]: + for value in list(map(str, list(range(1, 11)))) + ["valet", "cavalier", "dame", "roi"]: card = Card(suit, value) self.cards[card.suit, card.value] = card self.deck.append(card)
--- a/sat_frontends/primitivus/primitivus Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/primitivus/primitivus Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # Primitivus: a SAT frontend
--- a/sat_frontends/primitivus/widget.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/primitivus/widget.py Tue Aug 13 19:08:41 2019 +0200 @@ -59,8 +59,8 @@ if self._title_dynamic: title_elts.append(self._title_dynamic) if len(all_profiles) > 1 and profiles: - title_elts.append(u"[{}]".format(u", ".join(profiles))) - return sat_widgets.SurroundedText(u" ".join(title_elts)) + title_elts.append("[{}]".format(", ".join(profiles))) + return sat_widgets.SurroundedText(" ".join(title_elts)) @title.setter def title(self, value):
--- a/sat_frontends/primitivus/xmlui.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/primitivus/xmlui.py Tue Aug 13 19:08:41 2019 +0200 @@ -65,18 +65,18 @@ class PrimitivusDividerWidget(xmlui.DividerWidget, urwid.Divider): def __init__(self, _xmlui_parent, style="line"): if style == "line": - div_char = u"─" + div_char = "─" elif style == "dot": - div_char = u"·" + div_char = "·" elif style == "dash": - div_char = u"-" + div_char = "-" elif style == "plain": - div_char = u"█" + div_char = "█" elif style == "blank": div_char = " " else: log.warning(_("Unknown div_char")) - div_char = u"─" + div_char = "─" urwid.Divider.__init__(self, div_char)
--- a/sat_frontends/quick_frontend/constants.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/quick_frontend/constants.py Tue Aug 13 19:08:41 2019 +0200 @@ -66,12 +66,12 @@ # Chats CHAT_STATE_ICON = { - "": u" ", - "active": u"✔", - "inactive": u"☄", - "gone": u"✈", - "composing": u"✎", - "paused": u"…", + "": " ", + "active": "✔", + "inactive": "☄", + "gone": "✈", + "composing": "✎", + "paused": "…", } # Blogs
--- a/sat_frontends/quick_frontend/quick_app.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/quick_frontend/quick_app.py Tue Aug 13 19:08:41 2019 +0200 @@ -40,12 +40,12 @@ try: # FIXME: to be removed when an acceptable solution is here - unicode("") # XXX: unicode doesn't exist in pyjamas + str("") # XXX: unicode doesn't exist in pyjamas except ( TypeError, AttributeError, ): # Error raised is not the same depending on pyjsbuild options - unicode = str + str = str class ProfileManager(object): @@ -139,7 +139,7 @@ ) def _plug_profile_getFeaturesEb(self, failure): - log.error(u"Couldn't get features: {}".format(failure)) + log.error("Couldn't get features: {}".format(failure)) self._plug_profile_getFeaturesCb({}) def _plug_profile_getFeaturesCb(self, features): @@ -154,14 +154,14 @@ self._plug_profile_gotCachedValues({}) def _plug_profile_failedCachedValues(self, failure): - log.error(u"Couldn't get cached values: {}".format(failure)) + log.error("Couldn't get cached values: {}".format(failure)) self._plug_profile_gotCachedValues({}) def _plug_profile_gotCachedValues(self, cached_values): contact_list = self.host.contact_lists[self.profile] # add the contact list and its listener - for entity_s, data in cached_values.iteritems(): - for key, value in data.iteritems(): + for entity_s, data in cached_values.items(): + for key, value in data.items(): self.host.entityDataUpdatedHandler(entity_s, key, value, self.profile) if not self.connected: @@ -203,7 +203,7 @@ for contact in presences: for res in presences[contact]: - jabber_id = (u"%s/%s" % (jid.JID(contact).bare, res)) if res else contact + jabber_id = ("%s/%s" % (jid.JID(contact).bare, res)) if res else contact show = presences[contact][res][0] priority = presences[contact][res][1] statuses = presences[contact][res][2] @@ -216,7 +216,7 @@ self.profile, callback=lambda data, contact=contact: gotEntityData(data, contact), errback=lambda failure, contact=contact: log.debug( - u"No cache data for {}".format(contact) + "No cache data for {}".format(contact) ), ) @@ -238,7 +238,7 @@ return profile in self._profiles def __iter__(self): - return self._profiles.iterkeys() + return iter(self._profiles.keys()) def __getitem__(self, profile): return self._profiles[profile] @@ -246,11 +246,11 @@ def __len__(self): return len(self._profiles) - def iteritems(self): - return self._profiles.iteritems() + def items(self): + return self._profiles.items() - def itervalues(self): - return self._profiles.itervalues() + def values(self): + return self._profiles.values() def plug(self, profile): if profile in self._profiles: @@ -271,7 +271,7 @@ del self._profiles[profile] def chooseOneProfile(self): - return self._profiles.keys()[0] + return list(self._profiles.keys())[0] class QuickApp(object): @@ -350,13 +350,13 @@ self.ns_map = ns_map def _namespacesGetEb(self, failure_): - log.error(_(u"Can't get namespaces map: {msg}").format(msg=failure_)) + log.error(_("Can't get namespaces map: {msg}").format(msg=failure_)) def _encryptionPluginsGetCb(self, plugins): self.encryption_plugins = plugins def _encryptionPluginsGetEb(self, failure_): - log.warning(_(u"Can't retrieve encryption plugins: {msg}").format(msg=failure_)) + log.warning(_("Can't retrieve encryption plugins: {msg}").format(msg=failure_)) def onBridgeConnected(self): self.bridge.namespacesGet( @@ -402,13 +402,13 @@ def _bridgeEb(self, failure): if isinstance(failure, exceptions.BridgeExceptionNoService): - print(_(u"Can't connect to SàT backend, are you sure it's launched ?")) + print((_("Can't connect to SàT backend, are you sure it's launched ?"))) sys.exit(1) elif isinstance(failure, exceptions.BridgeInitError): - print(_(u"Can't init bridge")) + print((_("Can't init bridge"))) sys.exit(1) else: - print(_(u"Error while initialising bridge: {}".format(failure))) + print((_("Error while initialising bridge: {}".format(failure)))) @property def current_profile(self): @@ -459,10 +459,10 @@ or if connection has been lost and a reconnection is needed """ if state: - log.debug(u"we are synchronised with server") + log.debug("we are synchronised with server") if self.AUTO_RESYNC: # we are resynchronising all widgets - log.debug(u"doing a full widgets resynchronisation") + log.debug("doing a full widgets resynchronisation") for w in self.widgets: try: resync = w.resync @@ -474,7 +474,7 @@ self._sync = state else: - log.debug(u"we have lost synchronisation with server") + log.debug("we have lost synchronisation with server") self._sync = state # we've lost synchronisation, all widgets must be notified # note: this is always called independently of AUTO_RESYNC @@ -496,7 +496,7 @@ @param with_profile (boolean): True if the signal concerns a specific profile, in that case the profile name has to be passed by the caller """ - log.debug(u"registering signal {name}".format(name=function_name)) + log.debug("registering signal {name}".format(name=function_name)) if handler is None: handler = getattr(self, "{}{}".format(function_name, "Handler")) if not with_profile: @@ -582,7 +582,7 @@ pass else: profile = kwargs.get("profile") - for listener, profiles_filter in listeners.iteritems(): + for listener, profiles_filter in listeners.items(): if profile is None or not profiles_filter or profile in profiles_filter: listener(*args, **kwargs) @@ -617,7 +617,7 @@ cached_signals = self.signals_cache.pop(profile, []) for function_name, handler, args, kwargs in cached_signals: log.debug( - u"Calling cached signal [%s] with args %s and kwargs %s" + "Calling cached signal [%s] with args %s and kwargs %s" % (function_name, args, kwargs) ) handler(*args, **kwargs) @@ -632,7 +632,7 @@ if not errback: def errback(failure): - log.error(_(u"Can't connect profile [%s]") % failure) + log.error(_("Can't connect profile [%s]") % failure) try: module = failure.module except AttributeError: @@ -790,8 +790,8 @@ widget.onMessageState(uid, status, profile) def messageSend(self, to_jid, message, subject=None, mess_type="auto", extra=None, callback=None, errback=None, profile_key=C.PROF_KEY_NONE): - if not subject and not extra and (not message or message == {u'': u''}): - log.debug(u"Not sending empty message") + if not subject and not extra and (not message or message == {'': ''}): + log.debug("Not sending empty message") return if subject is None: @@ -812,7 +812,7 @@ return self.bridge.messageSend( - unicode(to_jid), + str(to_jid), message, subject, mess_type, @@ -857,8 +857,8 @@ def mucRoomJoinedHandler(self, room_jid_s, occupants, user_nick, subject, profile): """Called when a MUC room is joined""" log.debug( - u"Room [{room_jid}] joined by {profile}, users presents:{users}".format( - room_jid=room_jid_s, profile=profile, users=occupants.keys() + "Room [{room_jid}] joined by {profile}, users presents:{users}".format( + room_jid=room_jid_s, profile=profile, users=list(occupants.keys()) ) ) room_jid = jid.JID(room_jid_s) @@ -876,7 +876,7 @@ def mucRoomLeftHandler(self, room_jid_s, profile): """Called when a MUC room is left""" log.debug( - u"Room [%(room_jid)s] left by %(profile)s" + "Room [%(room_jid)s] left by %(profile)s" % {"room_jid": room_jid_s, "profile": profile} ) room_jid = jid.JID(room_jid_s) @@ -893,7 +893,7 @@ ) chat_widget.changeUserNick(old_nick, new_nick) log.debug( - u"user [%(old_nick)s] is now known as [%(new_nick)s] in room [%(room_jid)s]" + "user [%(old_nick)s] is now known as [%(new_nick)s] in room [%(room_jid)s]" % {"old_nick": old_nick, "new_nick": new_nick, "room_jid": room_jid} ) @@ -905,7 +905,7 @@ ) chat_widget.setSubject(subject) log.debug( - u"new subject for room [%(room_jid)s]: %(subject)s" + "new subject for room [%(room_jid)s]: %(subject)s" % {"room_jid": room_jid, "subject": subject} ) @@ -974,7 +974,7 @@ main_notif_dict = self.profiles[profile].notifications if entity is C.ENTITY_ALL: - selected_notifs = main_notif_dict.itervalues() + selected_notifs = iter(main_notif_dict.values()) exact_jid = False else: if entity is None: @@ -989,7 +989,7 @@ for notifs_from_select in selected_notifs: if type_ is None: - type_notifs = notifs_from_select.itervalues() + type_notifs = iter(notifs_from_select.values()) else: type_notifs = (notifs_from_select.get(type_, []),) @@ -1080,10 +1080,10 @@ callbacks.append((callback, errback)) def progressStartedHandler(self, pid, metadata, profile): - log.info(u"Progress {} started".format(pid)) + log.info("Progress {} started".format(pid)) def progressFinishedHandler(self, pid, metadata, profile): - log.info(u"Progress {} finished".format(pid)) + log.info("Progress {} finished".format(pid)) try: callbacks = self._progress_ids.pop(pid) except KeyError: @@ -1095,7 +1095,7 @@ self.callListeners("progressFinished", pid, metadata, profile=profile) def progressErrorHandler(self, pid, err_msg, profile): - log.warning(u"Progress {pid} error: {err_msg}".format(pid=pid, err_msg=err_msg)) + log.warning("Progress {pid} error: {err_msg}".format(pid=pid, err_msg=err_msg)) try: callbacks = self._progress_ids.pop(pid) except KeyError: @@ -1109,7 +1109,7 @@ def _subscribe_cb(self, answer, data): entity, profile = data type_ = "subscribed" if answer else "unsubscribed" - self.bridge.subscription(type_, unicode(entity.bare), profile_key=profile) + self.bridge.subscription(type_, str(entity.bare), profile_key=profile) def subscribeHandler(self, type, raw_jid, profile): """Called when a subsciption management signal is received""" @@ -1118,18 +1118,18 @@ # this is a subscription confirmation, we just have to inform user # TODO: call self.getEntityMBlog to add the new contact blogs self.showDialog( - _(u"The contact {contact} has accepted your subscription").format( + _("The contact {contact} has accepted your subscription").format( contact=entity.bare ), - _(u"Subscription confirmation"), + _("Subscription confirmation"), ) elif type == "unsubscribed": # this is a subscription refusal, we just have to inform user self.showDialog( - _(u"The contact {contact} has refused your subscription").format( + _("The contact {contact} has refused your subscription").format( contact=entity.bare ), - _(u"Subscription refusal"), + _("Subscription refusal"), "error", ) elif type == "subscribe": @@ -1137,8 +1137,8 @@ # TODO: use sat.stdui.ui_contact_list to display the groups selector self.showDialog( _( - u"The contact {contact} wants to subscribe to your presence" - u".\nDo you accept ?" + "The contact {contact} wants to subscribe to your presence" + ".\nDo you accept ?" ).format(contact=entity.bare), _("Subscription confirmation"), "yes/no", @@ -1147,11 +1147,11 @@ ) def _debugHandler(self, action, parameters, profile): - if action == u"widgets_dump": + if action == "widgets_dump": from pprint import pformat - log.info(u"Widgets dump:\n{data}".format(data=pformat(self.widgets._widgets))) + log.info("Widgets dump:\n{data}".format(data=pformat(self.widgets._widgets))) else: - log.warning(u"Unknown debug action: {action}".format(action=action)) + log.warning("Unknown debug action: {action}".format(action=action)) def showDialog(self, message, title, type="info", answer_cb=None, answer_data=None): @@ -1178,11 +1178,11 @@ pass # FIXME def dialogFailure(self, failure): - log.warning(u"Failure: {}".format(failure)) + log.warning("Failure: {}".format(failure)) def progressIdHandler(self, progress_id, profile): """Callback used when an action result in a progress id""" - log.info(u"Progress ID received: {}".format(progress_id)) + log.info("Progress ID received: {}".format(progress_id)) def isHidden(self): """Tells if the frontend window is hidden. @@ -1193,11 +1193,11 @@ def paramUpdateHandler(self, name, value, namespace, profile): log.debug( - _(u"param update: [%(namespace)s] %(name)s = %(value)s") + _("param update: [%(namespace)s] %(name)s = %(value)s") % {"namespace": namespace, "name": name, "value": value} ) if (namespace, name) == ("Connection", "JabberID"): - log.debug(_(u"Changing JID to %s") % value) + log.debug(_("Changing JID to %s") % value) self.profiles[profile].whoami = jid.JID(value) elif (namespace, name) == ("General", C.SHOW_OFFLINE_CONTACTS): self.contact_lists[profile].showOfflineContacts(C.bool(value)) @@ -1264,13 +1264,13 @@ # we ignore metadata action_data = { - k: v for k, v in action_data.iteritems() if not k.startswith("meta_") + k: v for k, v in action_data.items() if not k.startswith("meta_") } if action_data: raise exceptions.DataError( - u"Not all keys in action_data are managed ({keys})".format( - keys=", ".join(action_data.keys()) + "Not all keys in action_data are managed ({keys})".format( + keys=", ".join(list(action_data.keys())) ) ) @@ -1348,10 +1348,10 @@ def _avatarGetEb(self, failure_, entity, contact_list): # FIXME: bridge needs a proper error handling - if "NotFound" in unicode(failure_): - log.info(u"No avatar found for {entity}".format(entity=entity)) + if "NotFound" in str(failure_): + log.info("No avatar found for {entity}".format(entity=entity)) else: - log.warning(u"Can't get avatar: {}".format(failure_)) + log.warning("Can't get avatar: {}".format(failure_)) contact_list.setCache(entity, "avatar", self.getDefaultAvatar(entity)) def getAvatar( @@ -1383,7 +1383,7 @@ avatar = None if avatar is None: self.bridge.avatarGet( - unicode(entity), + str(entity), cache_only, hash_only, profile=profile, @@ -1413,7 +1413,7 @@ def onExit(self): """Must be called when the frontend is terminating""" to_unplug = [] - for profile, profile_manager in self.profiles.iteritems(): + for profile, profile_manager in self.profiles.items(): if profile_manager.connected and profile_manager.autodisconnect: # The user wants autodisconnection self.disconnect(profile)
--- a/sat_frontends/quick_frontend/quick_blog.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/quick_frontend/quick_blog.py Tue Aug 13 19:08:41 2019 +0200 @@ -30,12 +30,12 @@ try: # FIXME: to be removed when an acceptable solution is here - unicode("") # XXX: unicode doesn't exist in pyjamas + str("") # XXX: unicode doesn't exist in pyjamas except ( TypeError, AttributeError, ): # Error raised is not the same depending on pyjsbuild options - unicode = str + str = str ENTRY_CLS = None COMMENTS_CLS = None @@ -266,7 +266,7 @@ for key in keys_to_keep: value = getattr(self.item, key) if value is not None: - mb_data[key] = unicode(value) + mb_data[key] = str(value) for prefix in ("content", "title"): for suffix in ("", "_rich", "_xhtml"): @@ -287,7 +287,7 @@ mb_data['groups'] = list(self.blog.targets) self.blog.host.bridge.mbSend( - unicode(self.service or ""), + str(self.service or ""), self.node or "", data_format.serialise(mb_data), profile=self.blog.profile, @@ -300,14 +300,14 @@ all children entries will be recursively removed too """ # XXX: named delete and not remove to avoid conflict with pyjamas - log.debug(u"deleting entry {}".format("EDIT ENTRY" if self.new else self.item.id)) + log.debug("deleting entry {}".format("EDIT ENTRY" if self.new else self.item.id)) for child in self.entries: child.delete() try: self.manager.entries.remove(self) except ValueError: if self != self.manager.edit_entry: - log.error(u"Internal Error: entry not found in manager") + log.error("Internal Error: entry not found in manager") else: self.manager.edit_entry = None if not self.new: @@ -328,12 +328,12 @@ # TODO: manage several comments nodes case. if self.item.comments: self.blog.host.bridge.psNodeDelete( - unicode(self.item.comments_service) or "", + str(self.item.comments_service) or "", self.item.comments_node, profile=self.blog.profile, ) self.blog.host.bridge.mbRetract( - unicode(self.service or ""), + str(self.service or ""), self.node or "", self.item.id, profile=self.blog.profile, @@ -358,10 +358,10 @@ quick_widgets.QuickWidget.__init__(self, host, targets, C.PROF_KEY_NONE) self._targets_type = C.ALL else: - assert isinstance(targets[0], basestring) + assert isinstance(targets[0], str) quick_widgets.QuickWidget.__init__(self, host, targets[0], C.PROF_KEY_NONE) for target in targets[1:]: - assert isinstance(target, basestring) + assert isinstance(target, str) self.addTarget(target) self._targets_type = C.GROUP @@ -375,7 +375,7 @@ raise ValueError("Unkown targets type") def __str__(self): - return u"Blog Widget [target: {}, profile: {}]".format( + return "Blog Widget [target: {}, profile: {}]".format( ", ".join(self.targets), self.profile ) @@ -436,7 +436,7 @@ own_pep = self.host.whoami.bare self.host.bridge.mbGetFromManyWithComments( C.JID, - (unicode(own_pep),), + (str(own_pep),), 10, 10, {}, @@ -446,7 +446,7 @@ ) else: raise NotImplementedError( - u"{} target type is not managed".format(self._targets_type) + "{} target type is not managed".format(self._targets_type) ) def isJidAccepted(self, jid_):
--- a/sat_frontends/quick_frontend/quick_chat.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/quick_frontend/quick_chat.py Tue Aug 13 19:08:41 2019 +0200 @@ -43,12 +43,12 @@ try: # FIXME: to be removed when an acceptable solution is here - unicode("") # XXX: unicode doesn't exist in pyjamas + str("") # XXX: unicode doesn't exist in pyjamas except ( TypeError, AttributeError, ): # Error raised is not the same depending on pyjsbuild options - unicode = str + str = str # FIXME: day_format need to be settable (i18n) @@ -78,23 +78,20 @@ ) # is user mentioned here ? if self.parent.type == C.CHAT_GROUP and not self.own_mess: - for m in msg.itervalues(): + for m in msg.values(): if self.parent.nick.lower() in m.lower(): self._mention = True break self.handleMe() self.widgets = set() # widgets linked to this message - def __unicode__(self): - return u"Message<{mess_type}> [{time}]{nick}> {message}".format( + def __str__(self): + return "Message<{mess_type}> [{time}]{nick}> {message}".format( mess_type=self.type, time=self.time_text, nick=self.nick, message=self.main_message) - def __str__(self): - return self.__unicode__().encode('utf-8', 'ignore') - @property def host(self): return self.parent.host @@ -126,28 +123,28 @@ return self.message[""] except KeyError: try: - lang, mess = self.message.iteritems().next() + lang, mess = next(iter(self.message.items())) self.selected_lang = lang return mess except StopIteration: - log.error(u"Can't find message for uid {}".format(self.uid)) + log.error("Can't find message for uid {}".format(self.uid)) return "" @property def main_message_xhtml(self): """rich message""" - xhtml = {k: v for k, v in self.extra.iteritems() if "html" in k} + xhtml = {k: v for k, v in self.extra.items() if "html" in k} if xhtml: # FIXME: we only return first found value for now - return next(xhtml.itervalues()) + return next(iter(xhtml.values())) @property def time_text(self): """Return timestamp in a nicely formatted way""" # if the message was sent before today, we print the full date timestamp = time.localtime(self.timestamp) - time_format = u"%c" if timestamp < self.parent.day_change else u"%H:%M" - return time.strftime(time_format, timestamp).decode(getlocale()[1] or "utf-8") + time_format = "%c" if timestamp < self.parent.day_change else "%H:%M" + return time.strftime(time_format, timestamp) @property def avatar(self): @@ -162,7 +159,7 @@ try: return self.extra["user_nick"] except KeyError: - log.error(u"extra data is missing user nick for uid {}".format(self.uid)) + log.error("extra data is missing user nick for uid {}".format(self.uid)) return "" # FIXME: converted getSpecials to list for pyjamas if self.parent.type == C.CHAT_GROUP or entity in list( @@ -197,8 +194,8 @@ # TODO: XHTML-IM /me are not handled me = False # we need to check /me for every message - for m in self.message.itervalues(): - if m.startswith(u"/me "): + for m in self.message.values(): + if m.startswith("/me "): me = True else: me = False @@ -207,8 +204,8 @@ self.type = C.MESS_TYPE_INFO self.extra["info_type"] = "me" nick = self.nick - for lang, mess in self.message.iteritems(): - self.message[lang] = u"* " + nick + mess[3:] + for lang, mess in self.message.items(): + self.message[lang] = "* " + nick + mess[3:] class MessageWidget(object): @@ -245,7 +242,7 @@ @property def jid(self): """jid in the room""" - return jid.JID(u"{}/{}".format(self.parent.target.bare, self.nick)) + return jid.JID("{}/{}".format(self.parent.target.bare, self.nick)) @property def real_jid(self): @@ -295,10 +292,10 @@ if type_ == C.CHAT_GROUP: if target.resource: raise exceptions.InternalError( - u"a group chat entity can't have a resource" + "a group chat entity can't have a resource" ) if nick is None: - raise exceptions.InternalError(u"nick must not be None for group chat") + raise exceptions.InternalError("nick must not be None for group chat") self.nick = nick self.occupants = {} @@ -306,7 +303,7 @@ else: if occupants is not None or nick is not None: raise exceptions.InternalError( - u"only group chat can have occupants or nick" + "only group chat can have occupants or nick" ) self.messages = OrderedDict() # key: uid, value: Message instance self.games = {} # key=game name (unicode), value=instance of quick_games.RoomGame @@ -334,25 +331,25 @@ # FIXME: we don't use getter/setter here because of pyjamas # TODO: use proper getter/setter once we get rid of pyjamas if self._locked: - log.warning(u"{wid} is already locked!".format(wid=self)) + log.warning("{wid} is already locked!".format(wid=self)) return self._locked = True # messageNew signals are cached when locked self._cache = OrderedDict() - log.debug(u"{wid} is now locked".format(wid=self)) + log.debug("{wid} is now locked".format(wid=self)) def setUnlocked(self): if not self._locked: - log.debug(u"{wid} was already unlocked".format(wid=self)) + log.debug("{wid} was already unlocked".format(wid=self)) return self._locked = False - for uid, data in self._cache.iteritems(): + for uid, data in self._cache.items(): if uid not in self.messages: self.messageNew(*data) else: - log.debug(u"discarding message already in history: {data}, ".format(data=data)) + log.debug("discarding message already in history: {data}, ".format(data=data)) del self._cache - log.debug(u"{wid} is now unlocked".format(wid=self)) + log.debug("{wid} is now unlocked".format(wid=self)) def postInit(self): """Method to be called by frontend after widget is initialised @@ -404,8 +401,8 @@ if self._resync_lock: return self._resync_lock = True - log.debug(u"resynchronising {self}".format(self=self)) - for mess in reversed(self.messages.values()): + log.debug("resynchronising {self}".format(self=self)) + for mess in reversed(list(self.messages.values())): if mess.type == C.MESS_TYPE_INFO: continue last_message = mess @@ -417,7 +414,7 @@ if self.type == C.CHAT_GROUP: self.occupantsClear() self.host.bridge.mucOccupantsGet( - unicode(self.target), self.profile, callback=self.updateOccupants, + str(self.target), self.profile, callback=self.updateOccupants, errback=log.error) self.historyPrint( size=C.HISTORY_LIMIT_NONE, @@ -427,15 +424,15 @@ ## Widget management ## - def __unicode__(self): - return u"Chat Widget [target: {}, type: {}, profile: {}]".format( + def __str__(self): + return "Chat Widget [target: {}, type: {}, profile: {}]".format( self.target, self.type, self.profile ) @staticmethod def getWidgetHash(target, profiles): profile = list(profiles)[0] - return profile + "\n" + unicode(target.bare) + return profile + "\n" + str(target.bare) @staticmethod def getPrivateHash(target, profile): @@ -443,7 +440,7 @@ This method should be used with force_hash to get unique widget for private MUC conversations """ - return (unicode(profile), target) + return (str(profile), target) def addTarget(self, target): super(QuickChat, self).addTarget(target) @@ -456,7 +453,7 @@ """copy important attribute for a new widget""" kwargs["type_"] = self.type if self.type == C.CHAT_GROUP: - kwargs["occupants"] = {o.nick: o.data for o in self.occupants.itervalues()} + kwargs["occupants"] = {o.nick: o.data for o in self.occupants.values()} kwargs["subject"] = self.subject try: kwargs["nick"] = self.nick @@ -492,7 +489,7 @@ def setOccupants(self, occupants): """Set the whole list of occupants""" assert len(self.occupants) == 0 - for nick, data in occupants.iteritems(): + for nick, data in occupants.items(): # XXX: this log is disabled because it's really too verbose # but kept commented as it may be useful for debugging # log.debug(u"adding occupant {nick} to {room}".format( @@ -510,12 +507,12 @@ updated_occupants = set(occupants) left_occupants = local_occupants - updated_occupants joined_occupants = updated_occupants - local_occupants - log.debug(u"updating occupants for {room}:\n" - u"left: {left_occupants}\n" - u"joined: {joined_occupants}" + log.debug("updating occupants for {room}:\n" + "left: {left_occupants}\n" + "joined: {joined_occupants}" .format(room=self.target, - left_occupants=u", ".join(left_occupants), - joined_occupants=u", ".join(joined_occupants))) + left_occupants=", ".join(left_occupants), + joined_occupants=", ".join(joined_occupants))) for nick in left_occupants: self.removeUser(occupants[nick]) for nick in joined_occupants: @@ -533,7 +530,7 @@ try: occupant = self.occupants.pop(nick) except KeyError: - log.warning(u"Trying to remove an unknown occupant: {}".format(nick)) + log.warning("Trying to remove an unknown occupant: {}".format(nick)) else: return occupant @@ -602,12 +599,12 @@ if filters is None: filters = {} if size == 0: - log.debug(u"Empty history requested, skipping") + log.debug("Empty history requested, skipping") self._onHistoryPrinted() return - log_msg = _(u"now we print the history") + log_msg = _("now we print the history") if size != C.HISTORY_LIMIT_DEFAULT: - log_msg += _(u" ({} messages)".format(size)) + log_msg += _(" ({} messages)".format(size)) log.debug(log_msg) if self.type == C.CHAT_ONE2ONE: @@ -661,17 +658,17 @@ callback() def _historyGetEb(err): - log.error(_(u"Can't get history: {}").format(err)) + log.error(_("Can't get history: {}").format(err)) self._onHistoryPrinted() if callback is not None: callback() self.host.bridge.historyGet( - unicode(self.host.profiles[profile].whoami.bare), - unicode(target), + str(self.host.profiles[profile].whoami.bare), + str(target), size, True, - {k: unicode(v) for k,v in filters.iteritems()}, + {k: str(v) for k,v in filters.items()}, profile, callback=_historyGetCb, errback=_historyGetEb, @@ -683,7 +680,7 @@ self.messageEncryptionStarted(session_data) def messageEncryptionGetEb(self, failure_): - log.error(_(u"Can't get encryption state: {reason}").format(reason=failure_)) + log.error(_("Can't get encryption state: {reason}").format(reason=failure_)) def getEncryptionState(self): """Retrieve encryption state with current target. @@ -693,7 +690,7 @@ """ if self.type == C.CHAT_GROUP: return - self.host.bridge.messageEncryptionGet(unicode(self.target.bare), self.profile, + self.host.bridge.messageEncryptionGet(str(self.target.bare), self.profile, callback=self.messageEncryptionGetCb, errback=self.messageEncryptionGetEb) @@ -715,7 +712,7 @@ return if not msg and not subject and type_ != C.MESS_TYPE_INFO: - log.warning(u"Received an empty message for uid {}".format(uid)) + log.warning("Received an empty message for uid {}".format(uid)) return if self.type == C.CHAT_GROUP: @@ -734,7 +731,7 @@ pass else: user_data = { - k[5:]: v for k, v in extra.iteritems() if k.startswith("user_") + k[5:]: v for k, v in extra.items() if k.startswith("user_") } if info_type == ROOM_USER_JOINED: self.addUser(user_data) @@ -747,18 +744,18 @@ self.messages[uid] = message if "received_timestamp" in extra: - log.warning(u"Delayed message received after history, this should not happen") + log.warning("Delayed message received after history, this should not happen") self.createMessage(message) def messageEncryptionStarted(self, session_data): self.encrypted = True - log.debug(_(u"message encryption started with {target} using {encryption}").format( - target=self.target, encryption=session_data[u'name'])) + log.debug(_("message encryption started with {target} using {encryption}").format( + target=self.target, encryption=session_data['name'])) def messageEncryptionStopped(self, session_data): self.encrypted = False - log.debug(_(u"message encryption stopped with {target} (was using {encryption})") - .format(target=self.target, encryption=session_data[u'name'])) + log.debug(_("message encryption stopped with {target} (was using {encryption})") + .format(target=self.target, encryption=session_data['name'])) def createMessage(self, message, append=False): """Must be implemented by frontend to create and show a new message widget @@ -810,12 +807,12 @@ count = wid.reentered_count = 1 nick = wid.mess_data.nick if message.info_type == ROOM_USER_LEFT: - wid.message = _(u"<= {nick} has left the room ({count})").format( + wid.message = _("<= {nick} has left the room ({count})").format( nick=nick, count=count ) else: wid.message = _( - u"<=> {nick} re-entered the room ({count})" + "<=> {nick} re-entered the room ({count})" ).format(nick=nick, count=count) wid.reentered_count += 1 return True @@ -845,7 +842,7 @@ This change the subject on the room itself (i.e. via XMPP), while setSubject change the subject of this widget """ - self.host.bridge.mucSubject(unicode(self.target), new_subject, self.profile) + self.host.bridge.mucSubject(str(self.target), new_subject, self.profile) def addGamePanel(self, widget): """Insert a game panel to this Chat dialog. @@ -879,7 +876,7 @@ self.occupants[nick].state = state except KeyError: log.warning( - u"{nick} not found in {room}, ignoring new chat state".format( + "{nick} not found in {room}, ignoring new chat state".format( nick=nick, room=self.target.bare ) ) @@ -902,7 +899,7 @@ # entity is not here anymore pass - for m in self.messages.values(): + for m in list(self.messages.values()): if m.nick == entity.resource: for w in m.widgets: w.update({"avatar": filename}) @@ -911,8 +908,8 @@ entity.bare == self.target.bare or entity.bare == self.host.profiles[profile].whoami.bare ): - log.info(u"avatar updated for {}".format(entity)) - for m in self.messages.values(): + log.info("avatar updated for {}".format(entity)) + for m in list(self.messages.values()): if m.from_jid.bare == entity.bare: for w in m.widgets: w.update({"avatar": filename})
--- a/sat_frontends/quick_frontend/quick_contact_list.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/quick_frontend/quick_contact_list.py Tue Aug 13 19:08:41 2019 +0200 @@ -32,7 +32,7 @@ try: # FIXME: to be removed when an acceptable solution is here - unicode("") # XXX: unicode doesn't exist in pyjamas + str("") # XXX: unicode doesn't exist in pyjamas except (TypeError, AttributeError): # Error raised is not the same depending on # pyjsbuild options # XXX: pyjamas' max doesn't support key argument, so we implement it ourself @@ -46,7 +46,7 @@ # next doesn't exist in pyjamas def next(iterable, *args): try: - return iterable.next() + return iterable.__next__() except StopIteration as e: if args: return args[0] @@ -184,7 +184,7 @@ @return (dict[jid.JID, set(unicode)]) """ result = {} - for group, data in self._groups.iteritems(): + for group, data in self._groups.items(): for entity in data["jids"]: result.setdefault(entity, set()).add(group) return result @@ -203,7 +203,7 @@ entities are not sorted """ - return self._cache.iteritems() + return iter(self._cache.items()) @property def items(self): @@ -214,7 +214,7 @@ """ return { jid_: cache - for jid_, cache in self._cache.iteritems() + for jid_, cache in self._cache.items() if self.entityVisible(jid_) } @@ -340,9 +340,9 @@ @raise ValueError: the entity is not bare """ if entity.resource: - raise ValueError(u"getFullJid must be used with a bare jid") + raise ValueError("getFullJid must be used with a bare jid") main_resource = self.getCache(entity, C.CONTACT_MAIN_RESOURCE) - return jid.JID(u"{}/{}".format(entity, main_resource)) + return jid.JID("{}/{}".format(entity, main_resource)) def setGroupData(self, group, name, value): """Register a data for a group @@ -508,7 +508,7 @@ if entity.resource else cache ) - for attribute, value in attributes.iteritems(): + for attribute, value in attributes.items(): if value is None: # XXX: pyjamas hack: we need to use pop instead of del try: @@ -596,7 +596,7 @@ try: groups = self._cache[entity_bare].get(C.CONTACT_GROUPS, set()) except KeyError: - log.error(_(u"Trying to delete an unknow entity [{}]").format(entity)) + log.error(_("Trying to delete an unknow entity [{}]").format(entity)) try: self._roster.remove(entity_bare) except KeyError: @@ -639,8 +639,8 @@ del cache[C.CONTACT_RESOURCES][entity.resource] except KeyError: log.error( - u"Presence unavailable received " - u"for an unknown resource [{}]".format(entity) + "Presence unavailable received " + "for an unknown resource [{}]".format(entity) ) if not cache[C.CONTACT_RESOURCES]: cache[C.CONTACT_MAIN_RESOURCE] = None @@ -648,8 +648,8 @@ if not entity.resource: log.warning( _( - u"received presence from entity " - u"without resource: {}".format(entity) + "received presence from entity " + "without resource: {}".format(entity) ) ) resources_data = cache[C.CONTACT_RESOURCES] @@ -703,12 +703,12 @@ try: cache = self._cache[entity.bare] except: - log.error(u"Try to unselect an entity not in cache") + log.error("Try to unselect an entity not in cache") else: try: cache[C.CONTACT_SELECTED].remove(entity.resource) except KeyError: - log.error(u"Try to unselect a not selected entity") + log.error("Try to unselect a not selected entity") else: self._selected.remove(entity) self.update([entity], C.UPDATE_SELECTION) @@ -721,15 +721,15 @@ """ if entity is None: self._selected.clear() - for cache in self._cache.itervalues(): + for cache in self._cache.values(): cache[C.CONTACT_SELECTED].clear() self.update(type_=C.UPDATE_SELECTION, profile=self.profile) else: - log.debug(u"select %s" % entity) + log.debug("select %s" % entity) try: cache = self._cache[entity.bare] except: - log.error(u"Try to select an entity not in cache") + log.error("Try to select an entity not in cache") else: cache[C.CONTACT_SELECTED].add(entity.resource) self._selected.add(entity) @@ -777,7 +777,7 @@ global handler if handler is not None: raise exceptions.InternalError( - u"QuickContactListHandler must be instanciated only once" + "QuickContactListHandler must be instanciated only once" ) handler = self self._clist = {} # key: profile, value: ProfileContactList @@ -794,7 +794,7 @@ @param entity (jid.JID): jid of the entity (resource is not ignored, use bare jid if needed) """ - for contact_list in self._clist.itervalues(): + for contact_list in self._clist.values(): if entity in contact_list: return True return False @@ -806,7 +806,7 @@ @return (set[jid.JID]) """ entities = set() - for contact_list in self._clist.itervalues(): + for contact_list in self._clist.values(): entities.update(contact_list.roster) return entities @@ -817,7 +817,7 @@ @return (set[jid.JID]) """ entities = set() - for contact_list in self._clist.itervalues(): + for contact_list in self._clist.values(): entities.update(contact_list.roster_connected) return entities @@ -829,7 +829,7 @@ @return (dict[unicode,set(jid.JID)]) """ groups = {} - for contact_list in self._clist.itervalues(): + for contact_list in self._clist.values(): groups.update(contact_list.roster_entities_by_group) return groups @@ -841,7 +841,7 @@ @return (dict[jid.JID, set(unicode)]) """ entities = {} - for contact_list in self._clist.itervalues(): + for contact_list in self._clist.values(): entities.update(contact_list.roster_groups_by_entities) return entities @@ -852,7 +852,7 @@ @return (set): set of selected entities """ entities = set() - for contact_list in self._clist.itervalues(): + for contact_list in self._clist.values(): entities.update(contact_list.selected) return entities @@ -862,7 +862,7 @@ items are unordered """ - for profile, contact_list in self._clist.iteritems(): + for profile, contact_list in self._clist.items(): for bare_jid, cache in contact_list.all_iter: data = cache.copy() data[C.CONTACT_PROFILE] = profile @@ -876,8 +876,8 @@ key: bare jid, value: data """ items = {} - for profile, contact_list in self._clist.iteritems(): - for bare_jid, cache in contact_list.items.iteritems(): + for profile, contact_list in self._clist.items(): + for bare_jid, cache in contact_list.items.items(): data = cache.copy() items[bare_jid] = data data[C.CONTACT_PROFILE] = profile @@ -953,7 +953,7 @@ @return (set[jid.JID]) """ entities = set() - for contact_list in self._clist.itervalues(): + for contact_list in self._clist.values(): entities.update(contact_list.getSpecialExtras(special_type)) return entities @@ -984,13 +984,13 @@ assert profile in self._clist to_fill.add(profile) else: - to_fill.update(self._clist.keys()) + to_fill.update(list(self._clist.keys())) remaining = to_fill.difference(filled) if remaining != to_fill: log.debug( - u"Not re-filling already filled contact list(s) for {}".format( - u", ".join(to_fill.intersection(filled)) + "Not re-filling already filled contact list(s) for {}".format( + ", ".join(to_fill.intersection(filled)) ) ) for profile in remaining: @@ -1001,17 +1001,17 @@ @param keep_cache: if True, don't reset the cache """ - for contact_list in self._clist.itervalues(): + for contact_list in self._clist.values(): contact_list.clearContacts(keep_cache) # we need a full update self.update() def select(self, entity): - for contact_list in self._clist.itervalues(): + for contact_list in self._clist.values(): contact_list.select(entity) def unselect(self, entity): - for contact_list in self._clist.itervalues(): + for contact_list in self._clist.values(): contact_list.select(entity) def lockUpdate(self, locked=True, do_update=True): @@ -1025,8 +1025,8 @@ what youa re doing! """ log.debug( - u"Contact lists updates are now {}".format( - u"LOCKED" if locked else u"UNLOCKED" + "Contact lists updates are now {}".format( + "LOCKED" if locked else "UNLOCKED" ) ) self._update_locked = locked
--- a/sat_frontends/quick_frontend/quick_contact_management.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/quick_frontend/quick_contact_management.py Tue Aug 13 19:08:41 2019 +0200 @@ -41,7 +41,7 @@ def add(self, entity): """Add contact to the list, update resources""" - if not self.__contactlist.has_key(entity.bare): + if entity.bare not in self.__contactlist: self.__contactlist[entity.bare] = {"resources": []} if not entity.resource: return @@ -53,7 +53,7 @@ """Return all contacts which are in given group""" result = [] for contact in self.__contactlist: - if self.__contactlist[contact].has_key("groups"): + if "groups" in self.__contactlist[contact]: if group in self.__contactlist[contact]["groups"]: result.append(JID(contact)) return result @@ -63,11 +63,11 @@ @param entity: jid of the contact @param name: name of the attribute @return: asked attribute""" - if self.__contactlist.has_key(entity.bare): + if entity.bare in self.__contactlist: if name == "status": # FIXME: for the moment, we only use the first status if self.__contactlist[entity.bare]["statuses"]: - return self.__contactlist[entity.bare]["statuses"].values()[0] - if self.__contactlist[entity.bare].has_key(name): + return list(self.__contactlist[entity.bare]["statuses"].values())[0] + if name in self.__contactlist[entity.bare]: return self.__contactlist[entity.bare][name] else: log.debug(_("Trying to get attribute for an unknown contact")) @@ -75,7 +75,7 @@ def isConnected(self, entity): """Tell if the contact is online""" - return self.__contactlist.has_key(entity.bare) + return entity.bare in self.__contactlist def remove(self, entity): """remove resource. If no more resource is online or is no resource is specified, contact is deleted""" @@ -95,7 +95,7 @@ @param key: name of the attribute @param value: value of the attribute """ - if self.__contactlist.has_key(entity.bare): + if entity.bare in self.__contactlist: self.__contactlist[entity.bare][key] = value else: log.debug(_("Trying to update an unknown contact: %s") % entity.bare)
--- a/sat_frontends/quick_frontend/quick_game_tarot.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/quick_frontend/quick_game_tarot.py Tue Aug 13 19:08:41 2019 +0200 @@ -33,15 +33,15 @@ for player in players: self.played[player] = None self.player_nick = parent.nick - self.bottom_nick = unicode(self.player_nick) + self.bottom_nick = str(self.player_nick) idx = self.players.index(self.player_nick) idx = (idx + 1) % len(self.players) - self.right_nick = unicode(self.players[idx]) + self.right_nick = str(self.players[idx]) idx = (idx + 1) % len(self.players) - self.top_nick = unicode(self.players[idx]) + self.top_nick = str(self.players[idx]) idx = (idx + 1) % len(self.players) - self.left_nick = unicode(self.players[idx]) - self.bottom_nick = unicode(self.player_nick) + self.left_nick = str(self.players[idx]) + self.bottom_nick = str(self.player_nick) self.selected = [] # Card choosed by the player (e.g. during ecart) self.hand_size = 13 # number of cards in a hand self.hand = []
--- a/sat_frontends/quick_frontend/quick_games.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/quick_frontend/quick_games.py Tue Aug 13 19:08:41 2019 +0200 @@ -27,7 +27,7 @@ from sat_frontends.tools import games from sat_frontends.quick_frontend.constants import Const as C -import quick_chat +from . import quick_chat class RoomGame(object): @@ -82,9 +82,9 @@ if real_class == cls: host.showDialog( _( - u"A {game} activity between {players} has been started, but you couldn't take part because your client doesn't support it." + "A {game} activity between {players} has been started, but you couldn't take part because your client doesn't support it." ).format(game=cls._game_name, players=", ".join(players)), - _(u"{game} Game").format(game=cls._game_name), + _("{game} Game").format(game=cls._game_name), ) return panel = real_class(chat_widget, referee, players, *args)
--- a/sat_frontends/quick_frontend/quick_menus.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/quick_frontend/quick_menus.py Tue Aug 13 19:08:41 2019 +0200 @@ -19,12 +19,12 @@ try: # FIXME: to be removed when an acceptable solution is here - unicode("") # XXX: unicode doesn't exist in pyjamas + str("") # XXX: unicode doesn't exist in pyjamas except ( TypeError, AttributeError, ): # Error raised is not the same depending on pyjsbuild options - unicode = str + str = str from sat.core.log import getLogger from sat.core.i18n import _, languageSwitch @@ -100,11 +100,11 @@ else: if caller is None: - log.error(u"Caller can't be None with a dictionary as data_collector") + log.error("Caller can't be None with a dictionary as data_collector") return {} data = {} - for data_key, caller_attr in data_collector.iteritems(): - data[data_key] = unicode(getattr(caller, caller_attr)) + for data_key, caller_attr in data_collector.items(): + data[data_key] = str(getattr(caller, caller_attr)) return data def call(self, caller, profile=C.PROF_KEY_NONE): @@ -189,7 +189,7 @@ def __init__(self): MenuSeparator.SEP_IDX += 1 - name = u"___separator_{}".format(MenuSeparator.SEP_IDX) + name = "___separator_{}".format(MenuSeparator.SEP_IDX) MenuItem.__init__(self, name, name) @@ -208,7 +208,7 @@ return item.canonical in self._items def __iter__(self): - return self._items.itervalues() + return iter(self._items.values()) def __getitem__(self, item): try: @@ -218,8 +218,8 @@ def getOrCreate(self, item): log.debug( - u"MenuContainer getOrCreate: item=%s name=%s\nlist=%s" - % (item, item.canonical, self._items.keys()) + "MenuContainer getOrCreate: item=%s name=%s\nlist=%s" + % (item, item.canonical, list(self._items.keys())) ) try: return self[item] @@ -229,7 +229,7 @@ def getActiveMenus(self): """Return an iterator on active children""" - for child in self._items.itervalues(): + for child in self._items.values(): if child.ACTIVE: yield child @@ -347,7 +347,7 @@ try: return QuickMenusManager._data_collectors[type_] except KeyError: - log.error(u"No data collector registered for {}".format(type_)) + log.error("No data collector registered for {}".format(type_)) return None def addMenuItem(self, type_, path, item, path_i18n=None, top_extra=None): @@ -375,14 +375,14 @@ elif isinstance(container_item, MenuHook): # MenuHook must not be replaced log.debug( - u"ignoring menu at path [{}] because a hook is already in place".format( + "ignoring menu at path [{}] because a hook is already in place".format( path ) ) else: - log.error(u"Conflicting menus at path [{}]".format(path)) + log.error("Conflicting menus at path [{}]".format(path)) else: - log.debug(u"Adding menu [{type_}] {path}".format(type_=type_, path=path)) + log.debug("Adding menu [{type_}] {path}".format(type_=type_, path=path)) menu_container.append(item) self.host.callListeners("menu", type_, path, path_i18n, item) @@ -461,7 +461,7 @@ type_, path[-1], path_i18n[-1], callback=callback, extra=extra ) self.addMenuItem(type_, path[:-1], menu_item, path_i18n[:-1], top_extra) - log.info(u"Menu hook set on {path} ({type_})".format(path=path, type_=type_)) + log.info("Menu hook set on {path} ({type_})".format(path=path, type_=type_)) def addCategory(self, type_, path, path_i18n=None, extra=None, top_extra=None): """Create a category with all parents, and set extra on the last one
--- a/sat_frontends/quick_frontend/quick_profile_manager.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/quick_frontend/quick_profile_manager.py Tue Aug 13 19:08:41 2019 +0200 @@ -122,7 +122,7 @@ ) def getProfileNameEb(failure): - log.error(u"Can't retrieve profile name: {}".format(failure)) + log.error("Can't retrieve profile name: {}".format(failure)) for profile_key in profile_keys: self.host.bridge.profileNameGet( @@ -130,7 +130,7 @@ ) def getParamError(self, __): - self.host.showDialog(_(u"Error"), _("Can't get profile parameter"), "error") + self.host.showDialog(_("Error"), _("Can't get profile parameter"), "error") ## Helping methods ## @@ -215,14 +215,14 @@ self.host.bridge.setParam( "JabberID", login, "Connection", profile_key=self.current.profile ) - log.info(u"login updated for profile [{}]".format(self.current.profile)) + log.info("login updated for profile [{}]".format(self.current.profile)) if password != self.current.password and self.current.password is not None: self.current.password = password self.host.bridge.setParam( "Password", password, "Connection", profile_key=self.current.profile ) log.info( - u"password updated for profile [{}]".format(self.current.profile) + "password updated for profile [{}]".format(self.current.profile) ) ## graphic updates (should probably be overriden in frontends) ##
--- a/sat_frontends/quick_frontend/quick_utils.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/quick_frontend/quick_utils.py Tue Aug 13 19:08:41 2019 +0200 @@ -50,5 +50,5 @@ (options, args) = parser.parse_args() if options.profile: - options.profile = options.profile.decode("utf-8") + options.profile = options.profile return options
--- a/sat_frontends/quick_frontend/quick_widgets.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/quick_frontend/quick_widgets.py Tue Aug 13 19:08:41 2019 +0200 @@ -29,12 +29,12 @@ try: # FIXME: to be removed when an acceptable solution is here - unicode("") # XXX: unicode doesn't exist in pyjamas + str("") # XXX: unicode doesn't exist in pyjamas except ( TypeError, AttributeError, ): # Error raised is not the same depending on pyjsbuild options - unicode = str + str = str def register(base_cls, child_cls=None): @@ -63,8 +63,8 @@ def __iter__(self): """Iterate throught all widgets""" - for widget_map in self._widgets.itervalues(): - for widget_instances in widget_map.itervalues(): + for widget_map in self._widgets.values(): + for widget_instances in widget_map.values(): for widget in widget_instances: yield widget @@ -114,14 +114,14 @@ return else: if target is not None: - filter_hash = unicode(class_.getWidgetHash(target, profiles)) + filter_hash = str(class_.getWidgetHash(target, profiles)) else: filter_hash = None if filter_hash is not None: for widget in widgets_map.get(filter_hash, []): yield widget else: - for widget_instances in widgets_map.itervalues(): + for widget_instances in widgets_map.values(): for widget in widget_instances: yield widget @@ -136,7 +136,7 @@ @return: a class_ instance or None if the widget doesn't exist """ assert (target is not None) or (profiles is not None) - if profiles is not None and isinstance(profiles, unicode): + if profiles is not None and isinstance(profiles, str): profiles = [profiles] class_ = self.getRealClass(class_) hash_ = class_.getWidgetHash(target, profiles) @@ -223,7 +223,7 @@ if widget is None: # we need to create a new widget - log.debug(u"Creating new widget for target {} {}".format(target, cls)) + log.debug("Creating new widget for target {} {}".format(target, cls)) widget = cls(*_args, **_kwargs) widgets_map[hash_] = [widget] @@ -248,13 +248,13 @@ recreateArgs(_args, _kwargs) widget = cls(*_args, **_kwargs) widgets_map[hash_].append(widget) - log.debug(u"widget <{wid}> already exists, a new one has been recreated" + log.debug("widget <{wid}> already exists, a new one has been recreated" .format(wid=widget)) elif callable(on_existing_widget): widget = on_existing_widget(widget) if widget is None: raise exceptions.InternalError( - u"on_existing_widget method must return the widget to use") + "on_existing_widget method must return the widget to use") else: raise exceptions.InternalError( "Unexpected on_existing_widget value ({})".format(on_existing_widget)) @@ -295,9 +295,9 @@ try: widgets_map = self._widgets[class_.__name__] except KeyError: - log.error(u"no widgets_map found for class {cls}".format(cls=class_)) + log.error("no widgets_map found for class {cls}".format(cls=class_)) return - widget_hash = unicode(class_.getWidgetHash(widget_to_delete.target, + widget_hash = str(class_.getWidgetHash(widget_to_delete.target, widget_to_delete.profiles)) widget_instances = widgets_map[widget_hash] if all_instances: @@ -306,16 +306,16 @@ try: widget_instances.remove(widget_to_delete) except ValueError: - log.error(u"widget_to_delete not found in widget instances") + log.error("widget_to_delete not found in widget instances") return - log.debug(u"widget {} deleted".format(widget_to_delete)) + log.debug("widget {} deleted".format(widget_to_delete)) if not widget_instances: # all instances with this hash have been deleted # we remove the hash itself del widgets_map[widget_hash] - log.debug(u"All instances of {cls} with hash {widget_hash} have been deleted" + log.debug("All instances of {cls} with hash {widget_hash} have been deleted" .format(cls=class_, widget_hash=widget_hash)) @@ -345,7 +345,7 @@ self.addTarget(target) self.profiles = set() self._sync = True - if isinstance(profiles, basestring): + if isinstance(profiles, str): self.addProfile(profiles) elif profiles is None: if not self.PROFILES_ALLOW_NONE: @@ -430,7 +430,7 @@ @param profiles: profile(s) associated to target, see __init__ docstring @return: a hash (can correspond to one or many targets or profiles, depending of widget class) """ - return unicode(target) # by defaut, there is one hash for one target + return str(target) # by defaut, there is one hash for one target # widget life events
--- a/sat_frontends/tools/css_color.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/tools/css_color.py Tue Aug 13 19:08:41 2019 +0200 @@ -23,154 +23,154 @@ CSS_COLORS = { - u"black": u"000000", - u"silver": u"c0c0c0", - u"gray": u"808080", - u"white": u"ffffff", - u"maroon": u"800000", - u"red": u"ff0000", - u"purple": u"800080", - u"fuchsia": u"ff00ff", - u"green": u"008000", - u"lime": u"00ff00", - u"olive": u"808000", - u"yellow": u"ffff00", - u"navy": u"000080", - u"blue": u"0000ff", - u"teal": u"008080", - u"aqua": u"00ffff", - u"orange": u"ffa500", - u"aliceblue": u"f0f8ff", - u"antiquewhite": u"faebd7", - u"aquamarine": u"7fffd4", - u"azure": u"f0ffff", - u"beige": u"f5f5dc", - u"bisque": u"ffe4c4", - u"blanchedalmond": u"ffebcd", - u"blueviolet": u"8a2be2", - u"brown": u"a52a2a", - u"burlywood": u"deb887", - u"cadetblue": u"5f9ea0", - u"chartreuse": u"7fff00", - u"chocolate": u"d2691e", - u"coral": u"ff7f50", - u"cornflowerblue": u"6495ed", - u"cornsilk": u"fff8dc", - u"crimson": u"dc143c", - u"darkblue": u"00008b", - u"darkcyan": u"008b8b", - u"darkgoldenrod": u"b8860b", - u"darkgray": u"a9a9a9", - u"darkgreen": u"006400", - u"darkgrey": u"a9a9a9", - u"darkkhaki": u"bdb76b", - u"darkmagenta": u"8b008b", - u"darkolivegreen": u"556b2f", - u"darkorange": u"ff8c00", - u"darkorchid": u"9932cc", - u"darkred": u"8b0000", - u"darksalmon": u"e9967a", - u"darkseagreen": u"8fbc8f", - u"darkslateblue": u"483d8b", - u"darkslategray": u"2f4f4f", - u"darkslategrey": u"2f4f4f", - u"darkturquoise": u"00ced1", - u"darkviolet": u"9400d3", - u"deeppink": u"ff1493", - u"deepskyblue": u"00bfff", - u"dimgray": u"696969", - u"dimgrey": u"696969", - u"dodgerblue": u"1e90ff", - u"firebrick": u"b22222", - u"floralwhite": u"fffaf0", - u"forestgreen": u"228b22", - u"gainsboro": u"dcdcdc", - u"ghostwhite": u"f8f8ff", - u"gold": u"ffd700", - u"goldenrod": u"daa520", - u"greenyellow": u"adff2f", - u"grey": u"808080", - u"honeydew": u"f0fff0", - u"hotpink": u"ff69b4", - u"indianred": u"cd5c5c", - u"indigo": u"4b0082", - u"ivory": u"fffff0", - u"khaki": u"f0e68c", - u"lavender": u"e6e6fa", - u"lavenderblush": u"fff0f5", - u"lawngreen": u"7cfc00", - u"lemonchiffon": u"fffacd", - u"lightblue": u"add8e6", - u"lightcoral": u"f08080", - u"lightcyan": u"e0ffff", - u"lightgoldenrodyellow": u"fafad2", - u"lightgray": u"d3d3d3", - u"lightgreen": u"90ee90", - u"lightgrey": u"d3d3d3", - u"lightpink": u"ffb6c1", - u"lightsalmon": u"ffa07a", - u"lightseagreen": u"20b2aa", - u"lightskyblue": u"87cefa", - u"lightslategray": u"778899", - u"lightslategrey": u"778899", - u"lightsteelblue": u"b0c4de", - u"lightyellow": u"ffffe0", - u"limegreen": u"32cd32", - u"linen": u"faf0e6", - u"mediumaquamarine": u"66cdaa", - u"mediumblue": u"0000cd", - u"mediumorchid": u"ba55d3", - u"mediumpurple": u"9370db", - u"mediumseagreen": u"3cb371", - u"mediumslateblue": u"7b68ee", - u"mediumspringgreen": u"00fa9a", - u"mediumturquoise": u"48d1cc", - u"mediumvioletred": u"c71585", - u"midnightblue": u"191970", - u"mintcream": u"f5fffa", - u"mistyrose": u"ffe4e1", - u"moccasin": u"ffe4b5", - u"navajowhite": u"ffdead", - u"oldlace": u"fdf5e6", - u"olivedrab": u"6b8e23", - u"orangered": u"ff4500", - u"orchid": u"da70d6", - u"palegoldenrod": u"eee8aa", - u"palegreen": u"98fb98", - u"paleturquoise": u"afeeee", - u"palevioletred": u"db7093", - u"papayawhip": u"ffefd5", - u"peachpuff": u"ffdab9", - u"peru": u"cd853f", - u"pink": u"ffc0cb", - u"plum": u"dda0dd", - u"powderblue": u"b0e0e6", - u"rosybrown": u"bc8f8f", - u"royalblue": u"4169e1", - u"saddlebrown": u"8b4513", - u"salmon": u"fa8072", - u"sandybrown": u"f4a460", - u"seagreen": u"2e8b57", - u"seashell": u"fff5ee", - u"sienna": u"a0522d", - u"skyblue": u"87ceeb", - u"slateblue": u"6a5acd", - u"slategray": u"708090", - u"slategrey": u"708090", - u"snow": u"fffafa", - u"springgreen": u"00ff7f", - u"steelblue": u"4682b4", - u"tan": u"d2b48c", - u"thistle": u"d8bfd8", - u"tomato": u"ff6347", - u"turquoise": u"40e0d0", - u"violet": u"ee82ee", - u"wheat": u"f5deb3", - u"whitesmoke": u"f5f5f5", - u"yellowgreen": u"9acd32", - u"rebeccapurple": u"663399", + "black": "000000", + "silver": "c0c0c0", + "gray": "808080", + "white": "ffffff", + "maroon": "800000", + "red": "ff0000", + "purple": "800080", + "fuchsia": "ff00ff", + "green": "008000", + "lime": "00ff00", + "olive": "808000", + "yellow": "ffff00", + "navy": "000080", + "blue": "0000ff", + "teal": "008080", + "aqua": "00ffff", + "orange": "ffa500", + "aliceblue": "f0f8ff", + "antiquewhite": "faebd7", + "aquamarine": "7fffd4", + "azure": "f0ffff", + "beige": "f5f5dc", + "bisque": "ffe4c4", + "blanchedalmond": "ffebcd", + "blueviolet": "8a2be2", + "brown": "a52a2a", + "burlywood": "deb887", + "cadetblue": "5f9ea0", + "chartreuse": "7fff00", + "chocolate": "d2691e", + "coral": "ff7f50", + "cornflowerblue": "6495ed", + "cornsilk": "fff8dc", + "crimson": "dc143c", + "darkblue": "00008b", + "darkcyan": "008b8b", + "darkgoldenrod": "b8860b", + "darkgray": "a9a9a9", + "darkgreen": "006400", + "darkgrey": "a9a9a9", + "darkkhaki": "bdb76b", + "darkmagenta": "8b008b", + "darkolivegreen": "556b2f", + "darkorange": "ff8c00", + "darkorchid": "9932cc", + "darkred": "8b0000", + "darksalmon": "e9967a", + "darkseagreen": "8fbc8f", + "darkslateblue": "483d8b", + "darkslategray": "2f4f4f", + "darkslategrey": "2f4f4f", + "darkturquoise": "00ced1", + "darkviolet": "9400d3", + "deeppink": "ff1493", + "deepskyblue": "00bfff", + "dimgray": "696969", + "dimgrey": "696969", + "dodgerblue": "1e90ff", + "firebrick": "b22222", + "floralwhite": "fffaf0", + "forestgreen": "228b22", + "gainsboro": "dcdcdc", + "ghostwhite": "f8f8ff", + "gold": "ffd700", + "goldenrod": "daa520", + "greenyellow": "adff2f", + "grey": "808080", + "honeydew": "f0fff0", + "hotpink": "ff69b4", + "indianred": "cd5c5c", + "indigo": "4b0082", + "ivory": "fffff0", + "khaki": "f0e68c", + "lavender": "e6e6fa", + "lavenderblush": "fff0f5", + "lawngreen": "7cfc00", + "lemonchiffon": "fffacd", + "lightblue": "add8e6", + "lightcoral": "f08080", + "lightcyan": "e0ffff", + "lightgoldenrodyellow": "fafad2", + "lightgray": "d3d3d3", + "lightgreen": "90ee90", + "lightgrey": "d3d3d3", + "lightpink": "ffb6c1", + "lightsalmon": "ffa07a", + "lightseagreen": "20b2aa", + "lightskyblue": "87cefa", + "lightslategray": "778899", + "lightslategrey": "778899", + "lightsteelblue": "b0c4de", + "lightyellow": "ffffe0", + "limegreen": "32cd32", + "linen": "faf0e6", + "mediumaquamarine": "66cdaa", + "mediumblue": "0000cd", + "mediumorchid": "ba55d3", + "mediumpurple": "9370db", + "mediumseagreen": "3cb371", + "mediumslateblue": "7b68ee", + "mediumspringgreen": "00fa9a", + "mediumturquoise": "48d1cc", + "mediumvioletred": "c71585", + "midnightblue": "191970", + "mintcream": "f5fffa", + "mistyrose": "ffe4e1", + "moccasin": "ffe4b5", + "navajowhite": "ffdead", + "oldlace": "fdf5e6", + "olivedrab": "6b8e23", + "orangered": "ff4500", + "orchid": "da70d6", + "palegoldenrod": "eee8aa", + "palegreen": "98fb98", + "paleturquoise": "afeeee", + "palevioletred": "db7093", + "papayawhip": "ffefd5", + "peachpuff": "ffdab9", + "peru": "cd853f", + "pink": "ffc0cb", + "plum": "dda0dd", + "powderblue": "b0e0e6", + "rosybrown": "bc8f8f", + "royalblue": "4169e1", + "saddlebrown": "8b4513", + "salmon": "fa8072", + "sandybrown": "f4a460", + "seagreen": "2e8b57", + "seashell": "fff5ee", + "sienna": "a0522d", + "skyblue": "87ceeb", + "slateblue": "6a5acd", + "slategray": "708090", + "slategrey": "708090", + "snow": "fffafa", + "springgreen": "00ff7f", + "steelblue": "4682b4", + "tan": "d2b48c", + "thistle": "d8bfd8", + "tomato": "ff6347", + "turquoise": "40e0d0", + "violet": "ee82ee", + "wheat": "f5deb3", + "whitesmoke": "f5f5f5", + "yellowgreen": "9acd32", + "rebeccapurple": "663399", } -DEFAULT = u"000000" +DEFAULT = "000000" def parse(raw_value, as_string=True): @@ -186,52 +186,52 @@ If value can't be parsed, a warning message is logged, and DEFAULT is returned """ raw_value = raw_value.strip().lower() - if raw_value.startswith(u"#"): + if raw_value.startswith("#"): # we have a hexadecimal value str_value = raw_value[1:] if len(raw_value) in (3, 4): - str_value = u"".join([2 * v for v in str_value]) - elif raw_value.startswith(u"rgb"): - left_p = raw_value.find(u"(") - right_p = raw_value.find(u")") + str_value = "".join([2 * v for v in str_value]) + elif raw_value.startswith("rgb"): + left_p = raw_value.find("(") + right_p = raw_value.find(")") rgb_values = [v.strip() for v in raw_value[left_p + 1 : right_p].split(",")] - expected_len = 4 if raw_value.startswith(u"rgba") else 3 + expected_len = 4 if raw_value.startswith("rgba") else 3 if len(rgb_values) != expected_len: - log.warning(u"incorrect value: {}".format(raw_value)) + log.warning("incorrect value: {}".format(raw_value)) str_value = DEFAULT else: int_values = [] for rgb_v in rgb_values: - p_idx = rgb_v.find(u"%") + p_idx = rgb_v.find("%") if p_idx == -1: # base 10 value try: int_v = int(rgb_v) if int_v > 255: - raise ValueError(u"value exceed 255") + raise ValueError("value exceed 255") int_values.append(int_v) except ValueError: - log.warning(u"invalid int: {}".format(rgb_v)) + log.warning("invalid int: {}".format(rgb_v)) int_values.append(0) else: # percentage try: int_v = int(int(rgb_v[:p_idx]) / 100.0 * 255) if int_v > 255: - raise ValueError(u"value exceed 255") + raise ValueError("value exceed 255") int_values.append(int_v) except ValueError: - log.warning(u"invalid percent value: {}".format(rgb_v)) + log.warning("invalid percent value: {}".format(rgb_v)) int_values.append(0) - str_value = u"".join([u"{:02x}".format(v) for v in int_values]) - elif raw_value.startswith(u"hsl"): - log.warning(u"hue-saturation-lightness not handled yet") # TODO + str_value = "".join(["{:02x}".format(v) for v in int_values]) + elif raw_value.startswith("hsl"): + log.warning("hue-saturation-lightness not handled yet") # TODO str_value = DEFAULT else: try: str_value = CSS_COLORS[raw_value] except KeyError: - log.warning(u"unrecognised format: {}".format(raw_value)) + log.warning("unrecognised format: {}".format(raw_value)) str_value = DEFAULT if as_string: @@ -240,6 +240,6 @@ return tuple( [ int(str_value[i] + str_value[i + 1], 16) - for i in xrange(0, len(str_value), 2) + for i in range(0, len(str_value), 2) ] )
--- a/sat_frontends/tools/games.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/tools/games.py Tue Aug 13 19:08:41 2019 +0200 @@ -26,7 +26,7 @@ "carreau", "atout", ] # I have switched the usual order 'trefle' and 'carreau' because card are more easy to see if suit colour change (black, red, black, red) -VALUES_ORDER = [str(i) for i in xrange(1, 11)] + ["valet", "cavalier", "dame", "roi"] +VALUES_ORDER = [str(i) for i in range(1, 11)] + ["valet", "cavalier", "dame", "roi"] class TarotCard(object): @@ -82,4 +82,4 @@ # These symbols are diplayed by Libervia next to the player's nicknames -SYMBOLS = {"Radiocol": [u"♬"], "Tarot": [u"♠", u"♣", u"♥", u"♦"]} +SYMBOLS = {"Radiocol": ["♬"], "Tarot": ["♠", "♣", "♥", "♦"]}
--- a/sat_frontends/tools/jid.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/tools/jid.py Tue Aug 13 19:08:41 2019 +0200 @@ -20,12 +20,12 @@ # hack to use this module with pyjamas try: - unicode("") # XXX: unicode doesn't exist in pyjamas + str("") # XXX: unicode doesn't exist in pyjamas # normal version - class BaseJID(unicode): + class BaseJID(str): def __new__(cls, jid_str): - self = unicode.__new__(cls, cls._normalize(jid_str)) + self = str.__new__(cls, cls._normalize(jid_str)) return self def __init__(self, jid_str): @@ -114,7 +114,7 @@ def bare(self): if not self.node: return JID(self.domain) - return JID(u"{}@{}".format(self.node, self.domain)) + return JID("{}@{}".format(self.node, self.domain)) def is_valid(self): """ @@ -131,4 +131,4 @@ @param resource (unicode): new resource @return: a new JID instance """ - return JID(u"%s/%s" % (entity.bare, resource)) + return JID("%s/%s" % (entity.bare, resource))
--- a/sat_frontends/tools/xmlui.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat_frontends/tools/xmlui.py Tue Aug 13 19:08:41 2019 +0200 @@ -51,7 +51,7 @@ for child in node.childNodes: if child.nodeType == child.TEXT_NODE: data.append(child.wholeText) - return u"".join(data) + return "".join(data) class Widget(object): @@ -295,7 +295,7 @@ top = parsed_dom.documentElement self.session_id = top.getAttribute("session_id") or None self.submit_id = top.getAttribute("submit") or None - self.xmlui_title = title or top.getAttribute("title") or u"" + self.xmlui_title = title or top.getAttribute("title") or "" self.hidden = {} if flags is None: flags = [] @@ -342,8 +342,8 @@ raise ValueError("Can't submit is self.submit_id is not set") if "session_id" in data: raise ValueError( - u"session_id must no be used in data, it is automaticaly filled with " - u"self.session_id if present" + "session_id must no be used in data, it is automaticaly filled with " + "self.session_id if present" ) if self.session_id is not None: data["session_id"] = self.session_id @@ -376,7 +376,7 @@ return self.__getitem__(name) def keys(self): - return self.widgets.keys() + return list(self.widgets.keys()) class XMLUIPanel(XMLUIBase): @@ -420,7 +420,7 @@ @staticmethod def escape(name): """Return escaped name for forms""" - return u"%s%s" % (C.SAT_FORM_PREFIX, name) + return "%s%s" % (C.SAT_FORM_PREFIX, name) @property def main_cont(self): @@ -556,7 +556,7 @@ value = getText(value_elt) else: value = ( - node.getAttribute("value") if node.hasAttribute("value") else u"" + node.getAttribute("value") if node.hasAttribute("value") else "" ) if type_ == "empty": ctrl = self.widget_factory.createEmptyWidget(_xmlui_parent) @@ -569,8 +569,8 @@ data[CURRENT_LABEL] = ctrl elif type_ == "hidden": if name in self.hidden: - raise exceptions.ConflictError(u"Conflict on hidden value with " - u"name {name}".format(name=name)) + raise exceptions.ConflictError("Conflict on hidden value with " + "name {name}".format(name=name)) self.hidden[name] = value continue elif type_ == "jid": @@ -617,7 +617,7 @@ self.ctrl_list[name] = {"type": type_, "control": ctrl} elif type_ == "list": style = [] if node.getAttribute("multi") == "yes" else ["single"] - for attr in (u"noselect", u"extensible", u"reducible", u"inline"): + for attr in ("noselect", "extensible", "reducible", "inline"): if node.getAttribute(attr) == "yes": style.append(attr) _options = [ @@ -778,7 +778,7 @@ escaped = self.escape(field) ctrl = self.ctrl_list[field] if isinstance(ctrl["control"], ListWidget): - data[escaped] = u"\t".join(ctrl["control"]._xmluiGetSelectedValues()) + data[escaped] = "\t".join(ctrl["control"]._xmluiGetSelectedValues()) else: data[escaped] = ctrl["control"]._xmluiGetValue() self._xmluiLaunchAction(callback_id, data) @@ -810,7 +810,7 @@ target._xmluiAddValues(values, select=True) else: if isinstance(source, ListWidget): - value = u", ".join(source._xmluiGetSelectedValues()) + value = ", ".join(source._xmluiGetSelectedValues()) else: value = source._xmluiGetValue() if action == "move": @@ -877,12 +877,12 @@ ctrl = self.ctrl_list[ctrl_name] if isinstance(ctrl["control"], ListWidget): selected_values.append( - (escaped, u"\t".join(ctrl["control"]._xmluiGetSelectedValues())) + (escaped, "\t".join(ctrl["control"]._xmluiGetSelectedValues())) ) else: selected_values.append((escaped, ctrl["control"]._xmluiGetValue())) data = dict(selected_values) - for key, value in self.hidden.iteritems(): + for key, value in self.hidden.items(): data[self.escape(key)] = value if self.submit_id is not None: @@ -913,7 +913,7 @@ assert self.type == "param" for ctrl in self.param_changed: if isinstance(ctrl, ListWidget): - value = u"\t".join(ctrl._xmluiGetSelectedValues()) + value = "\t".join(ctrl._xmluiGetSelectedValues()) else: value = ctrl._xmluiGetValue() param_name = ctrl._xmlui_name.split(C.SAT_PARAM_SEPARATOR)[1] @@ -1012,10 +1012,10 @@ # TODO: remove this method, as there are seme use cases where different XMLUI # classes can be used in the same frontend, so a global value is not good assert type_ in (CLASS_PANEL, CLASS_DIALOG) - log.warning(u"registerClass for XMLUI is deprecated, please use partial with " - u"xmlui.create and class_map instead") + log.warning("registerClass for XMLUI is deprecated, please use partial with " + "xmlui.create and class_map instead") if type_ in _class_map: - log.debug(_(u"XMLUI class already registered for {type_}, ignoring").format( + log.debug(_("XMLUI class already registered for {type_}, ignoring").format( type_=type_)) return
--- a/setup.py Wed Jul 31 11:31:22 2019 +0200 +++ b/setup.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: an XMPP client @@ -76,11 +76,11 @@ setup(name=NAME, version=VERSION, - description=u'Salut à Toi multipurpose and multi frontend XMPP client', - long_description=u'Salut à Toi (SàT) is a XMPP client based on a daemon/frontend ' - u'architecture. Its multi frontend (desktop, web, console ' - u'interface, CLI, etc) and multipurpose (instant messaging, ' - u'microblogging, games, file sharing, etc).', + description='Salut à Toi multipurpose and multi frontend XMPP client', + long_description='Salut à Toi (SàT) is a XMPP client based on a daemon/frontend ' + 'architecture. Its multi frontend (desktop, web, console ' + 'interface, CLI, etc) and multipurpose (instant messaging, ' + 'microblogging, games, file sharing, etc).', author='Association « Salut à Toi »', author_email='contact@goffi.org', url='https://salut-a-toi.org',
--- a/twisted/plugins/sat_plugin.py Wed Jul 31 11:31:22 2019 +0200 +++ b/twisted/plugins/sat_plugin.py Tue Aug 13 19:08:41 2019 +0200 @@ -28,7 +28,7 @@ except ImportError: pass -from zope.interface import implements +from zope.interface import implementer from twisted.python import usage from twisted.plugin import IPlugin from twisted.application.service import IServiceMaker @@ -49,11 +49,11 @@ optParameters = [] -class SatMaker(object): - implements(IServiceMaker, IPlugin) +@implementer(IPlugin, IServiceMaker) +class SatMaker: tapname = C.APP_NAME_FILE - description = _(u"%s XMPP client backend") % C.APP_NAME_FULL + description = _("%s XMPP client backend") % C.APP_NAME_FULL options = Options def makeService(self, options):