Mercurial > libervia-backend
changeset 3028:ab2696e34d29
Python 3 port:
/!\ this is a huge commit
/!\ starting from this commit, SàT is needs Python 3.6+
/!\ SàT maybe be instable or some feature may not work anymore, this will improve with time
This patch port backend, bridge and frontends to Python 3.
Roughly this has been done this way:
- 2to3 tools has been applied (with python 3.7)
- all references to python2 have been replaced with python3 (notably shebangs)
- fixed files not handled by 2to3 (notably the shell script)
- several manual fixes
- fixed issues reported by Python 3 that where not handled in Python 2
- replaced "async" with "async_" when needed (it's a reserved word from Python 3.7)
- replaced zope's "implements" with @implementer decorator
- temporary hack to handle data pickled in database, as str or bytes may be returned,
to be checked later
- fixed hash comparison for password
- removed some code which is not needed anymore with Python 3
- deactivated some code which needs to be checked (notably certificate validation)
- tested with jp, fixed reported issues until some basic commands worked
- ported Primitivus (after porting dependencies like urwid satext)
- more manual fixes
line wrap: on
line diff
--- a/CHANGELOG Wed Jul 31 11:31:22 2019 +0200 +++ b/CHANGELOG Tue Aug 13 19:08:41 2019 +0200 @@ -1,5 +1,8 @@ All theses changelogs are not exhaustive, please check the Mercurial repository for more details. +v 0.8.0 « La Cecília » (NOT RELEASED YET): + - Python 3 port + v 0.7.0 « La Commune » (24/07/19): This version is a huge gap with previous one, changelog only show a part of novelties. This is also the first "general audience" version.
--- a/bin/sat Wed Jul 31 11:31:22 2019 +0200 +++ b/bin/sat Tue Aug 13 19:08:41 2019 +0200 @@ -2,7 +2,7 @@ DEBUG="" DAEMON="" -PYTHON="python2" +PYTHON="python3" TWISTD="$(which twistd)" kill_process() { @@ -29,17 +29,13 @@ eval `"$PYTHON" << PYTHONEND from sat.core.constants import Const as C from sat.memory.memory import fixLocalDir -from ConfigParser import SafeConfigParser +from configparser import ConfigParser from os.path import expanduser, join import sys -import codecs -import locale - -sys.stdout = codecs.getwriter(locale.getpreferredencoding())(sys.stdout) fixLocalDir() # XXX: tmp update code, will be removed in the future -config = SafeConfigParser(defaults=C.DEFAULT_CONFIG) +config = ConfigParser(defaults=C.DEFAULT_CONFIG) try: config.read(C.CONFIG_FILES) except: @@ -52,7 +48,7 @@ env.append("LOG_DIR='%s'" % join(expanduser(config.get('DEFAULT', 'log_dir')),'')) env.append("APP_NAME='%s'" % C.APP_NAME) env.append("APP_NAME_FILE='%s'" % C.APP_NAME_FILE) -print ";".join(env) +print (";".join(env)) PYTHONEND ` APP_NAME="$APP_NAME"
--- a/sat/__init__.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/__init__.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client
--- a/sat/bridge/bridge_constructor/base_constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/base_constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -20,7 +20,7 @@ """base constructor class""" from sat.bridge.bridge_constructor.constants import Const as C -from ConfigParser import NoOptionError +from configparser import NoOptionError import sys import os import os.path @@ -191,7 +191,7 @@ for arg in self.argumentsParser(signature): attr_string.append( ( - "unicode(%(name)s)%(default)s" + "str(%(name)s)%(default)s" if (unicode_protect and arg == "s") else "%(name)s%(default)s" ) @@ -240,7 +240,7 @@ method = self.generateCoreSide elif side == "frontend": if not self.FRONTEND_ACTIVATE: - print(u"This constructor only handle core, please use core side") + print("This constructor only handle core, please use core side") sys.exit(1) method = self.generateFrontendSide except AttributeError: @@ -271,7 +271,7 @@ sections.sort() for section in sections: function = self.getValues(section) - print("Adding %s %s" % (section, function["type"])) + print(("Adding %s %s" % (section, function["type"]))) default = self.getDefault(section) arg_doc = self.getArgumentsDoc(section) async_ = "async" in self.getFlags(section) @@ -291,7 +291,7 @@ ) extend_method(completion, function, default, arg_doc, async_) - for part, fmt in FORMATS.iteritems(): + for part, fmt in FORMATS.items(): if part.startswith(function["type"]): parts[part.upper()].append(fmt.format(**completion)) @@ -300,7 +300,7 @@ bridge = [] const_override = { env[len(C.ENV_OVERRIDE) :]: v - for env, v in os.environ.iteritems() + for env, v in os.environ.items() if env.startswith(C.ENV_OVERRIDE) } template_path = self.getTemplatePath(TEMPLATE) @@ -308,7 +308,7 @@ with open(template_path) as template: for line in template: - for part, extend_list in parts.iteritems(): + for part, extend_list in parts.items(): if line.startswith("##{}_PART##".format(part)): bridge.extend(extend_list) break @@ -317,7 +317,7 @@ if line.startswith("const_"): const_name = line[len("const_") : line.find(" = ")].strip() if const_name in const_override: - print("const {} overriden".format(const_name)) + print(("const {} overriden".format(const_name))) bridge.append( "const_{} = {}".format( const_name, const_override[const_name] @@ -326,7 +326,7 @@ continue bridge.append(line.replace("\n", "")) except IOError: - print("can't open template file [{}]".format(template_path)) + print(("can't open template file [{}]".format(template_path))) sys.exit(1) # now we write to final file @@ -348,15 +348,15 @@ os.mkdir(self.args.dest_dir) full_path = os.path.join(self.args.dest_dir, filename) if os.path.exists(full_path) and not self.args.force: - print( + print(( "The destination file [%s] already exists ! Use --force to overwrite it" % full_path - ) + )) try: with open(full_path, "w") as dest_file: dest_file.write("\n".join(file_buf)) except IOError: - print("Can't open destination file [%s]" % full_path) + print(("Can't open destination file [%s]" % full_path)) except OSError: print("It's not possible to generate the file, check your permissions") exit(1)
--- a/sat/bridge/bridge_constructor/bridge_constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/bridge_constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -22,7 +22,7 @@ from sat.bridge.bridge_constructor.constants import Const as C from sat.bridge.bridge_constructor import constructors, base_constructor import argparse -from ConfigParser import SafeConfigParser as Parser +from configparser import ConfigParser as Parser from importlib import import_module import os import os.path @@ -87,7 +87,7 @@ parser.add_argument( "-t", "--template", - type=file, + type=argparse.FileType(), default=default_template, help="use TEMPLATE to generate bridge (default: %(default)s)", )
--- a/sat/bridge/bridge_constructor/bridge_template.ini Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/bridge_template.ini Tue Aug 13 19:08:41 2019 +0200 @@ -507,7 +507,7 @@ sig_out=s doc=Get XMLUI to manage trust for given encryption algorithm doc_param_0=to_jid: bare JID of entity to manage -doc_param_0=namespace: namespace of the algorithm to manage +doc_param_1=namespace: namespace of the algorithm to manage doc_param_2=%(doc_profile_key)s doc_return=(XMLUI) UI of the trust management @@ -811,7 +811,7 @@ sig_out=(asa(sss)a{sa(a{ss}as)}) param_1_default=u'' param_2_default=True -param_3_default=u"@DEFAULT@" +param_3_default="@DEFAULT@" doc=Discover infos on an entity doc_param_0=entity_jid: JID to discover doc_param_1=node: node to use @@ -837,7 +837,7 @@ sig_out=a(sss) param_1_default=u'' param_2_default=True -param_3_default=u"@DEFAULT@" +param_3_default="@DEFAULT@" doc=Discover items of an entity doc_param_0=entity_jid: JID to discover doc_param_1=node: node to use @@ -856,7 +856,7 @@ param_4_default=True param_5_default=True param_6_default=False -param_7_default=u"@DEFAULT@" +param_7_default="@DEFAULT@" doc=Discover items of an entity doc_param_0=namespaces: namespaces of the features to check doc_param_1=identities: identities to filter
--- a/sat/bridge/bridge_constructor/constants.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constants.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -22,9 +22,9 @@ class Const(constants.Const): - NAME = u"bridge_constructor" + NAME = "bridge_constructor" DEST_DIR_DEFAULT = "generated" - DESCRIPTION = u"""{name} Copyright (C) 2009-2019 Jérôme Poisson (aka Goffi) + DESCRIPTION = """{name} Copyright (C) 2009-2019 Jérôme Poisson (aka Goffi) This script construct a SàT bridge using the given protocol
--- a/sat/bridge/bridge_constructor/constructors/dbus-xml/constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/dbus-xml/constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -53,7 +53,7 @@ sections.sort() for section in sections: function = self.getValues(section) - print("Adding %s %s" % (section, function["type"])) + print(("Adding %s %s" % (section, function["type"]))) new_elt = doc.createElement( "method" if function["type"] == "method" else "signal" )
--- a/sat/bridge/bridge_constructor/constructors/dbus/constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/dbus/constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -114,5 +114,5 @@ % completion ) completion["result"] = ( - "unicode(%s)" if self.args.unicode and function["sig_out"] == "s" else "%s" + "str(%s)" if self.args.unicode and function["sig_out"] == "s" else "%s" ) % result
--- a/sat/bridge/bridge_constructor/constructors/dbus/dbus_core_template.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/dbus/dbus_core_template.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -101,11 +101,11 @@ raise InternalError callback = kwargs.pop("callback") errback = kwargs.pop("errback") - async = True + async_ = True else: - async = False + async_ = False result = self.cb[name](*args, **kwargs) - if async: + if async_: if not isinstance(result, Deferred): log.error("Asynchronous method [%s] does not return a Deferred." % name) raise AsyncNotDeferred @@ -170,9 +170,9 @@ i += 1 return attr - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False): """Dynamically add a method to Dbus Bridge""" - inspect_args = inspect.getargspec(method) + inspect_args = inspect.getfullargspec(method) _arguments = inspect_args.args _defaults = list(inspect_args.defaults or []) @@ -186,12 +186,12 @@ [repr(name)] + ( (_arguments + ["callback=callback", "errback=errback"]) - if async + if async_ else _arguments ) ) - if async: + if async_: _arguments.extend(["callback", "errback"]) _defaults.extend([None, None]) @@ -213,7 +213,7 @@ ) exec(code) # FIXME: to the same thing in a cleaner way, without compile/exec method = locals()[name] - async_callbacks = ("callback", "errback") if async else None + async_callbacks = ("callback", "errback") if async_ else None setattr( DbusObject, name, @@ -265,7 +265,7 @@ if e._dbus_error_name == "org.freedesktop.DBus.Error.NotSupported": log.error( _( - u"D-Bus is not launched, please see README to see instructions on how to launch it" + "D-Bus is not launched, please see README to see instructions on how to launch it" ) ) raise BridgeInitError @@ -277,11 +277,11 @@ log.debug("registering DBus bridge method [%s]" % name) self.dbus_bridge.register_method(name, callback) - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc={}): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}): """Dynamically add a method to Dbus Bridge""" # FIXME: doc parameter is kept only temporary, the time to remove it from calls log.debug("Adding method [%s] to DBus bridge" % name) - self.dbus_bridge.addMethod(name, int_suffix, in_sign, out_sign, method, async) + self.dbus_bridge.addMethod(name, int_suffix, in_sign, out_sign, method, async_) self.register_method(name, method) def addSignal(self, name, int_suffix, signature, doc={}):
--- a/sat/bridge/bridge_constructor/constructors/dbus/dbus_frontend_template.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/dbus/dbus_frontend_template.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 #-*- coding: utf-8 -*- # SAT communication bridge @@ -70,12 +70,12 @@ dbus_interface=const_INT_PREFIX + const_CORE_SUFFIX) self.db_plugin_iface = dbus.Interface(self.db_object, dbus_interface=const_INT_PREFIX + const_PLUGIN_SUFFIX) - except dbus.exceptions.DBusException, e: + except dbus.exceptions.DBusException as e: if e._dbus_error_name in ('org.freedesktop.DBus.Error.ServiceUnknown', 'org.freedesktop.DBus.Error.Spawn.ExecFailed'): errback(BridgeExceptionNoService()) elif e._dbus_error_name == 'org.freedesktop.DBus.Error.NotSupported': - log.error(_(u"D-Bus is not launched, please see README to see instructions on how to launch it")) + log.error(_("D-Bus is not launched, please see README to see instructions on how to launch it")) errback(BridgeInitError) else: errback(e) @@ -102,14 +102,14 @@ # - if we have the 'callback' and 'errback' keyword arguments # - or if the last two arguments are callable - async = False + async_ = False args = list(args) if kwargs: if 'callback' in kwargs: - async = True + async_ = True _callback = kwargs.pop('callback') - _errback = kwargs.pop('errback', lambda failure: log.error(unicode(failure))) + _errback = kwargs.pop('errback', lambda failure: log.error(str(failure))) try: args.append(kwargs.pop('profile')) except KeyError: @@ -119,15 +119,15 @@ pass # at this point, kwargs should be empty if kwargs: - log.warnings(u"unexpected keyword arguments, they will be ignored: {}".format(kwargs)) + log.warnings("unexpected keyword arguments, they will be ignored: {}".format(kwargs)) elif len(args) >= 2 and callable(args[-1]) and callable(args[-2]): - async = True + async_ = True _errback = args.pop() _callback = args.pop() method = getattr(self.db_plugin_iface, name) - if async: + if async_: kwargs['timeout'] = const_TIMEOUT kwargs['reply_handler'] = _callback kwargs['error_handler'] = lambda err: _errback(dbus_to_bridge_exception(err))
--- a/sat/bridge/bridge_constructor/constructors/embedded/constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/embedded/constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client
--- a/sat/bridge/bridge_constructor/constructors/embedded/embedded_frontend_template.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/embedded/embedded_frontend_template.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client
--- a/sat/bridge/bridge_constructor/constructors/embedded/embedded_template.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/embedded/embedded_template.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -25,7 +25,7 @@ class _Bridge(object): def __init__(self): - log.debug(u"Init embedded bridge...") + log.debug("Init embedded bridge...") self._methods_cbs = {} self._signals_cbs = {"core": {}, "plugin": {}} @@ -33,16 +33,16 @@ callback() def register_method(self, name, callback): - log.debug(u"registering embedded bridge method [{}]".format(name)) + log.debug("registering embedded bridge method [{}]".format(name)) if name in self._methods_cbs: - raise exceptions.ConflictError(u"method {} is already regitered".format(name)) + raise exceptions.ConflictError("method {} is already regitered".format(name)) self._methods_cbs[name] = callback def register_signal(self, functionName, handler, iface="core"): iface_dict = self._signals_cbs[iface] if functionName in iface_dict: raise exceptions.ConflictError( - u"signal {name} is already regitered for interface {iface}".format( + "signal {name} is already regitered for interface {iface}".format( name=functionName, iface=iface ) ) @@ -81,11 +81,11 @@ try: cb = self._signals_cbs["plugin"][name] except KeyError: - log.debug(u"ignoring signal {}: no callback registered".format(name)) + log.debug("ignoring signal {}: no callback registered".format(name)) else: cb(*args, **kwargs) - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc={}): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}): # FIXME: doc parameter is kept only temporary, the time to remove it from calls log.debug("Adding method [{}] to embedded bridge".format(name)) self.register_method(name, method)
--- a/sat/bridge/bridge_constructor/constructors/mediawiki/constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/mediawiki/constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -85,7 +85,7 @@ sections.sort() for section in sections: function = self.getValues(section) - print("Adding %s %s" % (section, function["type"])) + print(("Adding %s %s" % (section, function["type"]))) async_msg = """<br />'''This method is asynchronous'''""" deprecated_msg = """<br />'''<font color="#FF0000">/!\ WARNING /!\ : This method is deprecated, please don't use it !</font>'''""" signature_signal = ( @@ -161,7 +161,7 @@ else: core_bridge.append(line.replace("\n", "")) except IOError: - print("Can't open template file [%s]" % template_path) + print(("Can't open template file [%s]" % template_path)) sys.exit(1) # now we write to final file
--- a/sat/bridge/bridge_constructor/constructors/pb/constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/pb/constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client
--- a/sat/bridge/bridge_constructor/constructors/pb/pb_core_template.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/pb/pb_core_template.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -46,11 +46,11 @@ def remote_initBridge(self, signals_handler): self.signals_handlers.append(signals_handler) - log.info(u"registered signal handler") + log.info("registered signal handler") def sendSignalEb(self, failure, signal_name): log.error( - u"Error while sending signal {name}: {msg}".format( + "Error while sending signal {name}: {msg}".format( name=signal_name, msg=failure ) ) @@ -66,27 +66,27 @@ d.addErrback(self.sendSignalEb, name) if to_remove: for handler in to_remove: - log.debug(u"Removing signal handler for dead frontend") + log.debug("Removing signal handler for dead frontend") self.signals_handlers.remove(handler) def _bridgeDeactivateSignals(self): if hasattr(self, "signals_paused"): - log.warning(u"bridge signals already deactivated") + log.warning("bridge signals already deactivated") if self.signals_handler: self.signals_paused.extend(self.signals_handler) else: self.signals_paused = self.signals_handlers self.signals_handlers = [] - log.debug(u"bridge signals have been deactivated") + log.debug("bridge signals have been deactivated") def _bridgeReactivateSignals(self): try: self.signals_handlers = self.signals_paused except AttributeError: - log.debug(u"signals were already activated") + log.debug("signals were already activated") else: del self.signals_paused - log.debug(u"bridge signals have been reactivated") + log.debug("bridge signals have been reactivated") ##METHODS_PART## @@ -102,14 +102,14 @@ def remote_initBridge(self, signals_handler): self.signals_handlers.append(signals_handler) - log.info(u"registered signal handler") + log.info("registered signal handler") def register_method(self, name, callback): log.debug("registering PB bridge method [%s]" % name) setattr(self.root, "remote_" + name, callback) # self.root.register_method(name, callback) - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc={}): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}): """Dynamically add a method to PB Bridge""" # FIXME: doc parameter is kept only temporary, the time to remove it from calls log.debug("Adding method {name} to PB bridge".format(name=name))
--- a/sat/bridge/bridge_constructor/constructors/pb/pb_frontend_template.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/pb/pb_frontend_template.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT communication bridge @@ -28,7 +28,7 @@ class SignalsHandler(pb.Referenceable): def __getattr__(self, name): if name.startswith("remote_"): - log.debug(u"calling an unregistered signal: {name}".format(name=name[7:])) + log.debug("calling an unregistered signal: {name}".format(name=name[7:])) return lambda *args, **kwargs: None else: @@ -43,7 +43,7 @@ pass else: raise exceptions.InternalError( - u"{name} signal handler has been registered twice".format( + "{name} signal handler has been registered twice".format( name=method_name ) ) @@ -99,7 +99,7 @@ d.addErrback(errback) def _initBridgeEb(self, failure): - log.error(u"Can't init bridge: {msg}".format(msg=failure)) + log.error("Can't init bridge: {msg}".format(msg=failure)) def _set_root(self, root): """set remote root object @@ -112,7 +112,7 @@ return d def _generic_errback(self, failure): - log.error(u"bridge failure: {}".format(failure)) + log.error("bridge failure: {}".format(failure)) def bridgeConnect(self, callback, errback): factory = pb.PBClientFactory()
--- a/sat/bridge/dbus_bridge.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/dbus_bridge.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -101,11 +101,11 @@ raise InternalError callback = kwargs.pop("callback") errback = kwargs.pop("errback") - async = True + async_ = True else: - async = False + async_ = False result = self.cb[name](*args, **kwargs) - if async: + if async_: if not isinstance(result, Deferred): log.error("Asynchronous method [%s] does not return a Deferred." % name) raise AsyncNotDeferred @@ -214,73 +214,73 @@ in_signature='s', out_signature='a(a{ss}si)', async_callbacks=None) def actionsGet(self, profile_key="@DEFAULT@"): - return self._callback("actionsGet", unicode(profile_key)) + return self._callback("actionsGet", str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='', async_callbacks=None) def addContact(self, entity_jid, profile_key="@DEFAULT@"): - return self._callback("addContact", unicode(entity_jid), unicode(profile_key)) + return self._callback("addContact", str(entity_jid), str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='', async_callbacks=('callback', 'errback')) def asyncDeleteProfile(self, profile, callback=None, errback=None): - return self._callback("asyncDeleteProfile", unicode(profile), callback=callback, errback=errback) + return self._callback("asyncDeleteProfile", str(profile), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sssis', out_signature='s', async_callbacks=('callback', 'errback')) def asyncGetParamA(self, name, category, attribute="value", security_limit=-1, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("asyncGetParamA", unicode(name), unicode(category), unicode(attribute), security_limit, unicode(profile_key), callback=callback, errback=errback) + return self._callback("asyncGetParamA", str(name), str(category), str(attribute), security_limit, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sis', out_signature='a{ss}', async_callbacks=('callback', 'errback')) def asyncGetParamsValuesFromCategory(self, category, security_limit=-1, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("asyncGetParamsValuesFromCategory", unicode(category), security_limit, unicode(profile_key), callback=callback, errback=errback) + return self._callback("asyncGetParamsValuesFromCategory", str(category), security_limit, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssa{ss}', out_signature='b', async_callbacks=('callback', 'errback')) def connect(self, profile_key="@DEFAULT@", password='', options={}, callback=None, errback=None): - return self._callback("connect", unicode(profile_key), unicode(password), options, callback=callback, errback=errback) + return self._callback("connect", str(profile_key), str(password), options, callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='', async_callbacks=('callback', 'errback')) def delContact(self, entity_jid, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("delContact", unicode(entity_jid), unicode(profile_key), callback=callback, errback=errback) + return self._callback("delContact", str(entity_jid), str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='asa(ss)bbbbbs', out_signature='(a{sa(sss)}a{sa(sss)}a{sa(sss)})', async_callbacks=('callback', 'errback')) - def discoFindByFeatures(self, namespaces, identities, bare_jid=False, service=True, roster=True, own_jid=True, local_device=False, profile_key=u"@DEFAULT@", callback=None, errback=None): - return self._callback("discoFindByFeatures", namespaces, identities, bare_jid, service, roster, own_jid, local_device, unicode(profile_key), callback=callback, errback=errback) + def discoFindByFeatures(self, namespaces, identities, bare_jid=False, service=True, roster=True, own_jid=True, local_device=False, profile_key="@DEFAULT@", callback=None, errback=None): + return self._callback("discoFindByFeatures", namespaces, identities, bare_jid, service, roster, own_jid, local_device, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssbs', out_signature='(asa(sss)a{sa(a{ss}as)})', async_callbacks=('callback', 'errback')) - def discoInfos(self, entity_jid, node=u'', use_cache=True, profile_key=u"@DEFAULT@", callback=None, errback=None): - return self._callback("discoInfos", unicode(entity_jid), unicode(node), use_cache, unicode(profile_key), callback=callback, errback=errback) + def discoInfos(self, entity_jid, node=u'', use_cache=True, profile_key="@DEFAULT@", callback=None, errback=None): + return self._callback("discoInfos", str(entity_jid), str(node), use_cache, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssbs', out_signature='a(sss)', async_callbacks=('callback', 'errback')) - def discoItems(self, entity_jid, node=u'', use_cache=True, profile_key=u"@DEFAULT@", callback=None, errback=None): - return self._callback("discoItems", unicode(entity_jid), unicode(node), use_cache, unicode(profile_key), callback=callback, errback=errback) + def discoItems(self, entity_jid, node=u'', use_cache=True, profile_key="@DEFAULT@", callback=None, errback=None): + return self._callback("discoItems", str(entity_jid), str(node), use_cache, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='', async_callbacks=('callback', 'errback')) def disconnect(self, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("disconnect", unicode(profile_key), callback=callback, errback=errback) + return self._callback("disconnect", str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='s', async_callbacks=None) def encryptionNamespaceGet(self, arg_0): - return self._callback("encryptionNamespaceGet", unicode(arg_0)) + return self._callback("encryptionNamespaceGet", str(arg_0)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='', out_signature='aa{ss}', @@ -291,56 +291,56 @@ @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sss', out_signature='s', async_callbacks=('callback', 'errback')) - def encryptionTrustUIGet(self, namespace, arg_1, profile_key, callback=None, errback=None): - return self._callback("encryptionTrustUIGet", unicode(namespace), unicode(arg_1), unicode(profile_key), callback=callback, errback=errback) + def encryptionTrustUIGet(self, to_jid, namespace, profile_key, callback=None, errback=None): + return self._callback("encryptionTrustUIGet", str(to_jid), str(namespace), str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='s', async_callbacks=None) def getConfig(self, section, name): - return self._callback("getConfig", unicode(section), unicode(name)) + return self._callback("getConfig", str(section), str(name)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='a(sa{ss}as)', async_callbacks=('callback', 'errback')) def getContacts(self, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("getContacts", unicode(profile_key), callback=callback, errback=errback) + return self._callback("getContacts", str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='as', async_callbacks=None) def getContactsFromGroup(self, group, profile_key="@DEFAULT@"): - return self._callback("getContactsFromGroup", unicode(group), unicode(profile_key)) + return self._callback("getContactsFromGroup", str(group), str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='asass', out_signature='a{sa{ss}}', async_callbacks=None) def getEntitiesData(self, jids, keys, profile): - return self._callback("getEntitiesData", jids, keys, unicode(profile)) + return self._callback("getEntitiesData", jids, keys, str(profile)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sass', out_signature='a{ss}', async_callbacks=None) def getEntityData(self, jid, keys, profile): - return self._callback("getEntityData", unicode(jid), keys, unicode(profile)) + return self._callback("getEntityData", str(jid), keys, str(profile)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='a{sa{ss}}', async_callbacks=('callback', 'errback')) def getFeatures(self, profile_key, callback=None, errback=None): - return self._callback("getFeatures", unicode(profile_key), callback=callback, errback=errback) + return self._callback("getFeatures", str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='s', async_callbacks=None) def getMainResource(self, contact_jid, profile_key="@DEFAULT@"): - return self._callback("getMainResource", unicode(contact_jid), unicode(profile_key)) + return self._callback("getMainResource", str(contact_jid), str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssss', out_signature='s', async_callbacks=None) def getParamA(self, name, category, attribute="value", profile_key="@DEFAULT@"): - return self._callback("getParamA", unicode(name), unicode(category), unicode(attribute), unicode(profile_key)) + return self._callback("getParamA", str(name), str(category), str(attribute), str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='', out_signature='as', @@ -352,13 +352,13 @@ in_signature='iss', out_signature='s', async_callbacks=('callback', 'errback')) def getParamsUI(self, security_limit=-1, app='', profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("getParamsUI", security_limit, unicode(app), unicode(profile_key), callback=callback, errback=errback) + return self._callback("getParamsUI", security_limit, str(app), str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='a{sa{s(sia{ss})}}', async_callbacks=None) def getPresenceStatuses(self, profile_key="@DEFAULT@"): - return self._callback("getPresenceStatuses", unicode(profile_key)) + return self._callback("getPresenceStatuses", str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='', out_signature='', @@ -376,73 +376,73 @@ in_signature='s', out_signature='a{ss}', async_callbacks=None) def getWaitingSub(self, profile_key="@DEFAULT@"): - return self._callback("getWaitingSub", unicode(profile_key)) + return self._callback("getWaitingSub", str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssiba{ss}s', out_signature='a(sdssa{ss}a{ss}sa{ss})', async_callbacks=('callback', 'errback')) def historyGet(self, from_jid, to_jid, limit, between=True, filters='', profile="@NONE@", callback=None, errback=None): - return self._callback("historyGet", unicode(from_jid), unicode(to_jid), limit, between, filters, unicode(profile), callback=callback, errback=errback) + return self._callback("historyGet", str(from_jid), str(to_jid), limit, between, filters, str(profile), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='b', async_callbacks=None) def isConnected(self, profile_key="@DEFAULT@"): - return self._callback("isConnected", unicode(profile_key)) + return self._callback("isConnected", str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sa{ss}s', out_signature='a{ss}', async_callbacks=('callback', 'errback')) def launchAction(self, callback_id, data, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("launchAction", unicode(callback_id), data, unicode(profile_key), callback=callback, errback=errback) + return self._callback("launchAction", str(callback_id), data, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='b', async_callbacks=None) def loadParamsTemplate(self, filename): - return self._callback("loadParamsTemplate", unicode(filename)) + return self._callback("loadParamsTemplate", str(filename)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='s', async_callbacks=None) def menuHelpGet(self, menu_id, language): - return self._callback("menuHelpGet", unicode(menu_id), unicode(language)) + return self._callback("menuHelpGet", str(menu_id), str(language)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sasa{ss}is', out_signature='a{ss}', async_callbacks=('callback', 'errback')) def menuLaunch(self, menu_type, path, data, security_limit, profile_key, callback=None, errback=None): - return self._callback("menuLaunch", unicode(menu_type), path, data, security_limit, unicode(profile_key), callback=callback, errback=errback) + return self._callback("menuLaunch", str(menu_type), path, data, security_limit, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='si', out_signature='a(ssasasa{ss})', async_callbacks=None) def menusGet(self, language, security_limit): - return self._callback("menusGet", unicode(language), security_limit) + return self._callback("menusGet", str(language), security_limit) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='s', async_callbacks=None) def messageEncryptionGet(self, to_jid, profile_key): - return self._callback("messageEncryptionGet", unicode(to_jid), unicode(profile_key)) + return self._callback("messageEncryptionGet", str(to_jid), str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssbs', out_signature='', async_callbacks=('callback', 'errback')) def messageEncryptionStart(self, to_jid, namespace='', replace=False, profile_key="@NONE@", callback=None, errback=None): - return self._callback("messageEncryptionStart", unicode(to_jid), unicode(namespace), replace, unicode(profile_key), callback=callback, errback=errback) + return self._callback("messageEncryptionStart", str(to_jid), str(namespace), replace, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='', async_callbacks=('callback', 'errback')) def messageEncryptionStop(self, to_jid, profile_key, callback=None, errback=None): - return self._callback("messageEncryptionStop", unicode(to_jid), unicode(profile_key), callback=callback, errback=errback) + return self._callback("messageEncryptionStop", str(to_jid), str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sa{ss}a{ss}sa{ss}s', out_signature='', async_callbacks=('callback', 'errback')) def messageSend(self, to_jid, message, subject={}, mess_type="auto", extra={}, profile_key="@NONE@", callback=None, errback=None): - return self._callback("messageSend", unicode(to_jid), message, subject, unicode(mess_type), extra, unicode(profile_key), callback=callback, errback=errback) + return self._callback("messageSend", str(to_jid), message, subject, str(mess_type), extra, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='', out_signature='a{ss}', @@ -454,37 +454,37 @@ in_signature='sis', out_signature='', async_callbacks=None) def paramsRegisterApp(self, xml, security_limit=-1, app=''): - return self._callback("paramsRegisterApp", unicode(xml), security_limit, unicode(app)) + return self._callback("paramsRegisterApp", str(xml), security_limit, str(app)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sss', out_signature='', async_callbacks=('callback', 'errback')) def profileCreate(self, profile, password='', component='', callback=None, errback=None): - return self._callback("profileCreate", unicode(profile), unicode(password), unicode(component), callback=callback, errback=errback) + return self._callback("profileCreate", str(profile), str(password), str(component), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='b', async_callbacks=None) def profileIsSessionStarted(self, profile_key="@DEFAULT@"): - return self._callback("profileIsSessionStarted", unicode(profile_key)) + return self._callback("profileIsSessionStarted", str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='s', async_callbacks=None) def profileNameGet(self, profile_key="@DEFAULT@"): - return self._callback("profileNameGet", unicode(profile_key)) + return self._callback("profileNameGet", str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='', async_callbacks=None) def profileSetDefault(self, profile): - return self._callback("profileSetDefault", unicode(profile)) + return self._callback("profileSetDefault", str(profile)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='b', async_callbacks=('callback', 'errback')) def profileStartSession(self, password='', profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("profileStartSession", unicode(password), unicode(profile_key), callback=callback, errback=errback) + return self._callback("profileStartSession", str(password), str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='bb', out_signature='as', @@ -496,61 +496,61 @@ in_signature='ss', out_signature='a{ss}', async_callbacks=None) def progressGet(self, id, profile): - return self._callback("progressGet", unicode(id), unicode(profile)) + return self._callback("progressGet", str(id), str(profile)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='a{sa{sa{ss}}}', async_callbacks=None) def progressGetAll(self, profile): - return self._callback("progressGetAll", unicode(profile)) + return self._callback("progressGetAll", str(profile)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='a{sa{sa{ss}}}', async_callbacks=None) def progressGetAllMetadata(self, profile): - return self._callback("progressGetAllMetadata", unicode(profile)) + return self._callback("progressGetAllMetadata", str(profile)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='', async_callbacks=('callback', 'errback')) def rosterResync(self, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("rosterResync", unicode(profile_key), callback=callback, errback=errback) + return self._callback("rosterResync", str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='b', async_callbacks=None) def saveParamsTemplate(self, filename): - return self._callback("saveParamsTemplate", unicode(filename)) + return self._callback("saveParamsTemplate", str(filename)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='a{ss}', async_callbacks=('callback', 'errback')) def sessionInfosGet(self, profile_key, callback=None, errback=None): - return self._callback("sessionInfosGet", unicode(profile_key), callback=callback, errback=errback) + return self._callback("sessionInfosGet", str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sssis', out_signature='', async_callbacks=None) def setParam(self, name, value, category, security_limit=-1, profile_key="@DEFAULT@"): - return self._callback("setParam", unicode(name), unicode(value), unicode(category), security_limit, unicode(profile_key)) + return self._callback("setParam", str(name), str(value), str(category), security_limit, str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssa{ss}s', out_signature='', async_callbacks=None) def setPresence(self, to_jid='', show='', statuses={}, profile_key="@DEFAULT@"): - return self._callback("setPresence", unicode(to_jid), unicode(show), statuses, unicode(profile_key)) + return self._callback("setPresence", str(to_jid), str(show), statuses, str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sss', out_signature='', async_callbacks=None) def subscription(self, sub_type, entity, profile_key="@DEFAULT@"): - return self._callback("subscription", unicode(sub_type), unicode(entity), unicode(profile_key)) + return self._callback("subscription", str(sub_type), str(entity), str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssass', out_signature='', async_callbacks=('callback', 'errback')) def updateContact(self, entity_jid, name, groups, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("updateContact", unicode(entity_jid), unicode(name), groups, unicode(profile_key), callback=callback, errback=errback) + return self._callback("updateContact", str(entity_jid), str(name), groups, str(profile_key), callback=callback, errback=errback) def __attributes(self, in_sign): """Return arguments to user given a in_sign @@ -590,9 +590,9 @@ i += 1 return attr - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False): """Dynamically add a method to Dbus Bridge""" - inspect_args = inspect.getargspec(method) + inspect_args = inspect.getfullargspec(method) _arguments = inspect_args.args _defaults = list(inspect_args.defaults or []) @@ -606,12 +606,12 @@ [repr(name)] + ( (_arguments + ["callback=callback", "errback=errback"]) - if async + if async_ else _arguments ) ) - if async: + if async_: _arguments.extend(["callback", "errback"]) _defaults.extend([None, None]) @@ -633,7 +633,7 @@ ) exec(code) # FIXME: to the same thing in a cleaner way, without compile/exec method = locals()[name] - async_callbacks = ("callback", "errback") if async else None + async_callbacks = ("callback", "errback") if async_ else None setattr( DbusObject, name, @@ -685,7 +685,7 @@ if e._dbus_error_name == "org.freedesktop.DBus.Error.NotSupported": log.error( _( - u"D-Bus is not launched, please see README to see instructions on how to launch it" + "D-Bus is not launched, please see README to see instructions on how to launch it" ) ) raise BridgeInitError @@ -744,11 +744,11 @@ log.debug("registering DBus bridge method [%s]" % name) self.dbus_bridge.register_method(name, callback) - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc={}): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}): """Dynamically add a method to Dbus Bridge""" # FIXME: doc parameter is kept only temporary, the time to remove it from calls log.debug("Adding method [%s] to DBus bridge" % name) - self.dbus_bridge.addMethod(name, int_suffix, in_sign, out_sign, method, async) + self.dbus_bridge.addMethod(name, int_suffix, in_sign, out_sign, method, async_) self.register_method(name, method) def addSignal(self, name, int_suffix, signature, doc={}):
--- a/sat/bridge/pb.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/pb.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -46,11 +46,11 @@ def remote_initBridge(self, signals_handler): self.signals_handlers.append(signals_handler) - log.info(u"registered signal handler") + log.info("registered signal handler") def sendSignalEb(self, failure, signal_name): log.error( - u"Error while sending signal {name}: {msg}".format( + "Error while sending signal {name}: {msg}".format( name=signal_name, msg=failure ) ) @@ -66,27 +66,27 @@ d.addErrback(self.sendSignalEb, name) if to_remove: for handler in to_remove: - log.debug(u"Removing signal handler for dead frontend") + log.debug("Removing signal handler for dead frontend") self.signals_handlers.remove(handler) def _bridgeDeactivateSignals(self): if hasattr(self, "signals_paused"): - log.warning(u"bridge signals already deactivated") + log.warning("bridge signals already deactivated") if self.signals_handler: self.signals_paused.extend(self.signals_handler) else: self.signals_paused = self.signals_handlers self.signals_handlers = [] - log.debug(u"bridge signals have been deactivated") + log.debug("bridge signals have been deactivated") def _bridgeReactivateSignals(self): try: self.signals_handlers = self.signals_paused except AttributeError: - log.debug(u"signals were already activated") + log.debug("signals were already activated") else: del self.signals_paused - log.debug(u"bridge signals have been reactivated") + log.debug("bridge signals have been reactivated") ##METHODS_PART## @@ -102,14 +102,14 @@ def remote_initBridge(self, signals_handler): self.signals_handlers.append(signals_handler) - log.info(u"registered signal handler") + log.info("registered signal handler") def register_method(self, name, callback): log.debug("registering PB bridge method [%s]" % name) setattr(self.root, "remote_" + name, callback) # self.root.register_method(name, callback) - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc={}): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}): """Dynamically add a method to PB Bridge""" # FIXME: doc parameter is kept only temporary, the time to remove it from calls log.debug("Adding method {name} to PB bridge".format(name=name))
--- a/sat/core/constants.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/constants.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -28,21 +28,21 @@ class Const(object): ## Application ## - APP_NAME = u"Salut à Toi" - APP_NAME_SHORT = u"SàT" - APP_NAME_FILE = u"sat" - APP_NAME_FULL = u"{name_short} ({name})".format( + APP_NAME = "Salut à Toi" + APP_NAME_SHORT = "SàT" + APP_NAME_FILE = "sat" + APP_NAME_FULL = "{name_short} ({name})".format( name_short=APP_NAME_SHORT, name=APP_NAME ) APP_VERSION = ( sat.__version__ ) # Please add 'D' at the end of version in sat/VERSION for dev versions - APP_RELEASE_NAME = u"La Cecília" - APP_URL = u"https://salut-a-toi.org" + APP_RELEASE_NAME = "La Cecília" + APP_URL = "https://salut-a-toi.org" ## Runtime ## PLUGIN_EXT = "py" - HISTORY_SKIP = u"skip" + HISTORY_SKIP = "skip" ## Main config ## DEFAULT_BRIDGE = "dbus" @@ -122,15 +122,15 @@ ) MESS_TYPE_ALL = MESS_TYPE_STANDARD + (MESS_TYPE_INFO, MESS_TYPE_AUTO) - MESS_EXTRA_INFO = u"info_type" - EXTRA_INFO_DECR_ERR = u"DECRYPTION_ERROR" - EXTRA_INFO_ENCR_ERR = u"ENCRYPTION_ERROR" + MESS_EXTRA_INFO = "info_type" + EXTRA_INFO_DECR_ERR = "DECRYPTION_ERROR" + EXTRA_INFO_ENCR_ERR = "ENCRYPTION_ERROR" # encryption is a key for plugins - MESS_KEY_ENCRYPTION = u"ENCRYPTION" + MESS_KEY_ENCRYPTION = "ENCRYPTION" # encrypted is a key for frontends - MESS_KEY_ENCRYPTED = u"encrypted" - MESS_KEY_TRUSTED = u"trusted" + MESS_KEY_ENCRYPTED = "encrypted" + MESS_KEY_TRUSTED = "trusted" ## Chat ## CHAT_ONE2ONE = "one2one" @@ -162,110 +162,44 @@ ## Directories ## # directory for components specific data - COMPONENTS_DIR = u"components" - CACHE_DIR = u"cache" + COMPONENTS_DIR = "components" + CACHE_DIR = "cache" # files in file dir are stored for long term # files dir is global, i.e. for all profiles - FILES_DIR = u"files" + FILES_DIR = "files" # FILES_LINKS_DIR is a directory where files owned by a specific profile # are linked to the global files directory. This way the directory can be # shared per profiles while keeping global directory where identical files # shared between different profiles are not duplicated. - FILES_LINKS_DIR = u"files_links" + FILES_LINKS_DIR = "files_links" # FILES_TMP_DIR is where profile's partially transfered files are put. # Once transfer is completed, they are moved to FILES_DIR - FILES_TMP_DIR = u"files_tmp" - - ## Configuration ## - if ( - BaseDirectory - ): # skipped when xdg module is not available (should not happen in backend) - if "org.salutatoi.cagou" in BaseDirectory.__file__: - # FIXME: hack to make config read from the right location on Android - # TODO: fix it in a more proper way - - # we need to use Android API to get downloads directory - import os.path - from jnius import autoclass - - Environment = autoclass("android.os.Environment") - - BaseDirectory = None - DEFAULT_CONFIG = { - "local_dir": "/data/data/org.salutatoi.cagou/app", - "media_dir": "/data/data/org.salutatoi.cagou/files/app/media", - # FIXME: temporary location for downloads, need to call API properly - "downloads_dir": os.path.join( - Environment.getExternalStoragePublicDirectory( - Environment.DIRECTORY_DOWNLOADS - ).getAbsolutePath(), - APP_NAME_FILE, - ), - "pid_dir": "%(local_dir)s", - "log_dir": "%(local_dir)s", - } - CONFIG_FILES = [ - "/data/data/org.salutatoi.cagou/files/app/android/" - + APP_NAME_FILE - + ".conf" - ] - else: - import os - CONFIG_PATHS = ( - ["/etc/", "~/", "~/.", "", "."] - + [ - "%s/" % path - for path in list(BaseDirectory.load_config_paths(APP_NAME_FILE)) - ] - ) - - # on recent versions of Flatpak, FLATPAK_ID is set at run time - # it seems that this is not the case on older versions, - # but FLATPAK_SANDBOX_DIR seems set then - if os.getenv('FLATPAK_ID') or os.getenv('FLATPAK_SANDBOX_DIR'): - # for Flatpak, the conf can't be set in /etc or $HOME, so we have - # to add /app - CONFIG_PATHS.append('/app/') - - ## Configuration ## - DEFAULT_CONFIG = { - "media_dir": "/usr/share/" + APP_NAME_FILE + "/media", - "local_dir": BaseDirectory.save_data_path(APP_NAME_FILE), - "downloads_dir": "~/Downloads/" + APP_NAME_FILE, - "pid_dir": "%(local_dir)s", - "log_dir": "%(local_dir)s", - } - - # List of the configuration filenames sorted by ascending priority - CONFIG_FILES = [ - realpath(expanduser(path) + APP_NAME_FILE + ".conf") - for path in CONFIG_PATHS - ] + FILES_TMP_DIR = "files_tmp" ## Templates ## - TEMPLATE_TPL_DIR = u"templates" - TEMPLATE_THEME_DEFAULT = u"default" - TEMPLATE_STATIC_DIR = u"static" - KEY_LANG = u"lang" # templates i18n + TEMPLATE_TPL_DIR = "templates" + TEMPLATE_THEME_DEFAULT = "default" + TEMPLATE_STATIC_DIR = "static" + KEY_LANG = "lang" # templates i18n ## Plugins ## # PLUGIN_INFO keys # XXX: we use PI instead of PLUG_INFO which would normally be used # to make the header more readable - PI_NAME = u"name" - PI_IMPORT_NAME = u"import_name" - PI_MAIN = u"main" - PI_HANDLER = u"handler" + PI_NAME = "name" + PI_IMPORT_NAME = "import_name" + PI_MAIN = "main" + PI_HANDLER = "handler" PI_TYPE = ( - u"type" + "type" ) # FIXME: should be types, and should handle single unicode type or tuple of types (e.g. "blog" and "import") - PI_MODES = u"modes" - PI_PROTOCOLS = u"protocols" - PI_DEPENDENCIES = u"dependencies" - PI_RECOMMENDATIONS = u"recommendations" - PI_DESCRIPTION = u"description" - PI_USAGE = u"usage" + PI_MODES = "modes" + PI_PROTOCOLS = "protocols" + PI_DEPENDENCIES = "dependencies" + PI_RECOMMENDATIONS = "recommendations" + PI_DESCRIPTION = "description" + PI_USAGE = "usage" # Types PLUG_TYPE_XEP = "XEP" @@ -387,8 +321,8 @@ META_TYPE_OVERWRITE = "overwrite" ## HARD-CODED ACTIONS IDS (generated with uuid.uuid4) ## - AUTHENTICATE_PROFILE_ID = u"b03bbfa8-a4ae-4734-a248-06ce6c7cf562" - CHANGE_XMPP_PASSWD_ID = u"878b9387-de2b-413b-950f-e424a147bcd0" + AUTHENTICATE_PROFILE_ID = "b03bbfa8-a4ae-4734-a248-06ce6c7cf562" + CHANGE_XMPP_PASSWD_ID = "878b9387-de2b-413b-950f-e424a147bcd0" ## Text values ## BOOL_TRUE = "true" @@ -399,32 +333,32 @@ HISTORY_LIMIT_NONE = -2 ## Progress error special values ## - PROGRESS_ERROR_DECLINED = u"declined" # session has been declined by peer user + PROGRESS_ERROR_DECLINED = "declined" # session has been declined by peer user ## Files ## FILE_TYPE_DIRECTORY = "directory" FILE_TYPE_FILE = "file" ## Permissions management ## - ACCESS_PERM_READ = u"read" - ACCESS_PERM_WRITE = u"write" + ACCESS_PERM_READ = "read" + ACCESS_PERM_WRITE = "write" ACCESS_PERMS = {ACCESS_PERM_READ, ACCESS_PERM_WRITE} - ACCESS_TYPE_PUBLIC = u"public" - ACCESS_TYPE_WHITELIST = u"whitelist" + ACCESS_TYPE_PUBLIC = "public" + ACCESS_TYPE_WHITELIST = "whitelist" ACCESS_TYPES = (ACCESS_TYPE_PUBLIC, ACCESS_TYPE_WHITELIST) ## Common data keys ## - KEY_THUMBNAILS = u"thumbnails" - KEY_PROGRESS_ID = u"progress_id" + KEY_THUMBNAILS = "thumbnails" + KEY_PROGRESS_ID = "progress_id" ## Common extra keys/values ## - KEY_ORDER_BY = u"order_by" + KEY_ORDER_BY = "order_by" - ORDER_BY_CREATION = u'creation' - ORDER_BY_MODIFICATION = u'modification' + ORDER_BY_CREATION = 'creation' + ORDER_BY_MODIFICATION = 'modification' # internationalisation - DEFAULT_LOCALE = u"en_GB" + DEFAULT_LOCALE = "en_GB" ## Misc ## SAVEFILE_DATABASE = APP_NAME_FILE + ".db" @@ -434,11 +368,11 @@ NO_LIMIT = -1 # used in bridge when a integer value is expected DEFAULT_MAX_AGE = 1209600 # default max age of cached files, in seconds HASH_SHA1_EMPTY = "da39a3ee5e6b4b0d3255bfef95601890afd80709" - STANZA_NAMES = (u"iq", u"message", u"presence") + STANZA_NAMES = ("iq", "message", "presence") # Stream Hooks - STREAM_HOOK_SEND = u"send" - STREAM_HOOK_RECEIVE = u"receive" + STREAM_HOOK_SEND = "send" + STREAM_HOOK_RECEIVE = "receive" @classmethod def LOG_OPTIONS(cls): @@ -456,7 +390,7 @@ @classmethod def bool(cls, value): """@return (bool): bool value for associated constant""" - assert isinstance(value, basestring) + assert isinstance(value, str) return value.lower() in (cls.BOOL_TRUE, "1", "yes", "on") @classmethod @@ -464,3 +398,72 @@ """@return (str): constant associated to bool value""" assert isinstance(value, bool) return cls.BOOL_TRUE if value else cls.BOOL_FALSE + + + +## Configuration ## +if ( + BaseDirectory +): # skipped when xdg module is not available (should not happen in backend) + if "org.salutatoi.cagou" in BaseDirectory.__file__: + # FIXME: hack to make config read from the right location on Android + # TODO: fix it in a more proper way + + # we need to use Android API to get downloads directory + import os.path + from jnius import autoclass + + Environment = autoclass("android.os.Environment") + + BaseDirectory = None + Const.DEFAULT_CONFIG = { + "local_dir": "/data/data/org.salutatoi.cagou/app", + "media_dir": "/data/data/org.salutatoi.cagou/files/app/media", + # FIXME: temporary location for downloads, need to call API properly + "downloads_dir": os.path.join( + Environment.getExternalStoragePublicDirectory( + Environment.DIRECTORY_DOWNLOADS + ).getAbsolutePath(), + Const.APP_NAME_FILE, + ), + "pid_dir": "%(local_dir)s", + "log_dir": "%(local_dir)s", + } + Const.CONFIG_FILES = [ + "/data/data/org.salutatoi.cagou/files/app/android/" + + Const.APP_NAME_FILE + + ".conf" + ] + else: + import os + Const.CONFIG_PATHS = ( + ["/etc/", "~/", "~/.", "", "."] + + [ + "%s/" % path + for path in list(BaseDirectory.load_config_paths(Const.APP_NAME_FILE)) + ] + ) + + # on recent versions of Flatpak, FLATPAK_ID is set at run time + # it seems that this is not the case on older versions, + # but FLATPAK_SANDBOX_DIR seems set then + if os.getenv('FLATPAK_ID') or os.getenv('FLATPAK_SANDBOX_DIR'): + # for Flatpak, the conf can't be set in /etc or $HOME, so we have + # to add /app + Const.CONFIG_PATHS.append('/app/') + + ## Configuration ## + Const.DEFAULT_CONFIG = { + "media_dir": "/usr/share/" + Const.APP_NAME_FILE + "/media", + "local_dir": BaseDirectory.save_data_path(Const.APP_NAME_FILE), + "downloads_dir": "~/Downloads/" + Const.APP_NAME_FILE, + "pid_dir": "%(local_dir)s", + "log_dir": "%(local_dir)s", + } + + # List of the configuration filenames sorted by ascending priority + Const.CONFIG_FILES = [ + realpath(expanduser(path) + Const.APP_NAME_FILE + ".conf") + for path in Const.CONFIG_PATHS + ] +
--- a/sat/core/exceptions.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/exceptions.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT Exceptions
--- a/sat/core/i18n.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/i18n.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -26,7 +26,7 @@ import gettext - _ = gettext.translation("sat", "i18n", fallback=True).ugettext + _ = gettext.translation("sat", "i18n", fallback=True).gettext _translators = {None: gettext.NullTranslations()} def languageSwitch(lang=None): @@ -34,7 +34,7 @@ _translators[lang] = gettext.translation( "sat", languages=[lang], fallback=True ) - _translators[lang].install(unicode=True) + _translators[lang].install() except ImportError:
--- a/sat/core/log.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/log.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -67,7 +67,7 @@ """ if kwargs.get('exc_info', False): message = self.addTraceback(message) - print message + print(message) def log(self, level, message, **kwargs): """Print message @@ -197,7 +197,7 @@ """update existing logger to the class needed for this backend""" if self.LOGGER_CLASS is None: return - for name, logger in _loggers.items(): + for name, logger in list(_loggers.items()): _loggers[name] = self.LOGGER_CLASS(logger) def preTreatment(self): @@ -235,7 +235,7 @@ def configureColors(self, colors, force_colors, levels_taints_dict): if colors: # if color are used, we need to handle levels_taints_dict - for level in levels_taints_dict.keys(): + for level in list(levels_taints_dict.keys()): # we wants levels in uppercase to correspond to contstants levels_taints_dict[level.upper()] = levels_taints_dict[level] taints = self.__class__.taints = {} @@ -283,7 +283,7 @@ options = None if output not in (C.LOG_OPT_OUTPUT_DEFAULT, C.LOG_OPT_OUTPUT_FILE, C.LOG_OPT_OUTPUT_MEMORY): - raise ValueError(u"Invalid output [%s]" % output) + raise ValueError("Invalid output [%s]" % output) if output == C.LOG_OPT_OUTPUT_DEFAULT: # no option for defaut handler @@ -303,7 +303,7 @@ handlers[output] = limit if options: # we should not have unparsed options - raise ValueError(u"options [{options}] are not supported for {handler} output".format(options=options, handler=output)) + raise ValueError("options [{options}] are not supported for {handler} output".format(options=options, handler=output)) @staticmethod def memoryGet(size=None):
--- a/sat/core/log_config.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/log_config.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -125,7 +125,7 @@ ) or self.LOGGER_CLASS.force_colors: message = event.get("message", tuple()) if message: - event["message"] = ("".join(message),) # must be a tuple + event["message"] = (b"".join(message),) # must be a tuple observer(event) # we can now call the original observer return observer_hook @@ -158,7 +158,7 @@ observer = self.changeObserver(observer, can_colors=True) else: # we use print because log system is not fully initialized - print("Unmanaged observer [%s]" % observer) + print(("Unmanaged observer [%s]" % observer)) return observer self.observers[ori] = observer return observer @@ -202,10 +202,10 @@ import types # see https://stackoverflow.com/a/4267590 (thx Chris Morgan/aaronasterling) twisted_log.addObserver = types.MethodType( - addObserverObserver, self.log_publisher, twisted_log.LogPublisher + addObserverObserver, self.log_publisher ) twisted_log.removeObserver = types.MethodType( - removeObserverObserver, self.log_publisher, twisted_log.LogPublisher + removeObserverObserver, self.log_publisher ) # we now change existing observers @@ -282,7 +282,7 @@ if event.get("isError", False) else twisted_logger.info ) - log_method(text.decode("utf-8")) + log_method(text) self.log_publisher._originalAddObserver(twistedObserver) @@ -336,7 +336,7 @@ import sys class SatFormatter(logging.Formatter): - u"""Formatter which manage SàT specificities""" + """Formatter which manage SàT specificities""" _format = fmt _with_profile = "%(profile)s" in fmt @@ -395,7 +395,7 @@ root_logger = logging.getLogger() if len(root_logger.handlers) == 0: - for handler, options in log.handlers.items(): + for handler, options in list(log.handlers.items()): if handler == C.LOG_OPT_OUTPUT_DEFAULT: hdlr = logging.StreamHandler() try: @@ -426,7 +426,7 @@ else: raise ValueError("Unknown handler type") else: - root_logger.warning(u"Handlers already set on root logger") + root_logger.warning("Handlers already set on root logger") @staticmethod def memoryGet(size=None):
--- a/sat/core/patches.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/patches.py Tue Aug 13 19:08:41 2019 +0200 @@ -84,7 +84,7 @@ def addHook(self, hook_type, callback): """Add a send or receive hook""" - conflict_msg = (u"Hook conflict: can't add {hook_type} hook {callback}" + conflict_msg = ("Hook conflict: can't add {hook_type} hook {callback}" .format(hook_type=hook_type, callback=callback)) if hook_type == C.STREAM_HOOK_RECEIVE: if callback not in self._onElementHooks: @@ -97,7 +97,7 @@ else: log.warning(conflict_msg) else: - raise ValueError(u"Invalid hook type: {hook_type}" + raise ValueError("Invalid hook type: {hook_type}" .format(hook_type=hook_type)) def onElement(self, element): @@ -161,9 +161,10 @@ def apply(): - # certificate validation - xmlstream.TLSInitiatingInitializer = TLSInitiatingInitializer - client.XMPPClient = XMPPClient + # FIXME: certificate validation is now implemented in Twisted trunk, to be removed + # # certificate validation + # xmlstream.TLSInitiatingInitializer = TLSInitiatingInitializer + # client.XMPPClient = XMPPClient # XmlStream triggers xmlstream.XmlStreamFactory.protocol = XmlStream # jid fix
--- a/sat/core/sat_main.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/sat_main.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -66,8 +66,8 @@ self.plugins = {} # map for short name to whole namespace, self.ns_map = { - u"x-data": xmpp.NS_X_DATA, - u"disco#info": xmpp.NS_DISCO_INFO, + "x-data": xmpp.NS_X_DATA, + "disco#info": xmpp.NS_DISCO_INFO, } # extended by plugins with registerNamespace self.memory = memory.Memory(self) @@ -79,13 +79,13 @@ bridge_module = dynamic_import.bridge(bridge_name) if bridge_module is None: - log.error(u"Can't find bridge module of name {}".format(bridge_name)) + log.error("Can't find bridge module of name {}".format(bridge_name)) sys.exit(1) - log.info(u"using {} bridge".format(bridge_name)) + log.info("using {} bridge".format(bridge_name)) try: self.bridge = bridge_module.Bridge() except exceptions.BridgeInitError: - log.error(u"Bridge can't be initialised, can't start SàT core") + log.error("Bridge can't be initialised, can't start SàT core") sys.exit(1) self.bridge.register_method("getReady", lambda: self.initialised) self.bridge.register_method("getVersion", lambda: self.full_version) @@ -181,7 +181,7 @@ try: return self._version_cache except AttributeError: - self._version_cache = u"{} « {} » ({})".format( + self._version_cache = "{} « {} » ({})".format( version, C.APP_RELEASE_NAME, utils.getRepositoryData(sat) ) return self._version_cache @@ -202,14 +202,14 @@ ui_profile_manager.ProfileManager(self) except Exception as e: log.error( - _(u"Could not initialize backend: {reason}").format( + _("Could not initialize backend: {reason}").format( reason=str(e).decode("utf-8", "ignore") ) ) sys.exit(1) self._addBaseMenus() self.initialised.callback(None) - log.info(_(u"Backend is ready")) + log.info(_("Backend is ready")) def _addBaseMenus(self): """Add base menus""" @@ -246,15 +246,15 @@ except exceptions.MissingModule as e: self._unimport_plugin(plugin_path) log.warning( - u"Can't import plugin [{path}] because of an unavailale third party " - u"module:\n{msg}".format( + "Can't import plugin [{path}] because of an unavailale third party " + "module:\n{msg}".format( path=plugin_path, msg=e ) ) continue except exceptions.CancelError as e: log.info( - u"Plugin [{path}] cancelled its own import: {msg}".format( + "Plugin [{path}] cancelled its own import: {msg}".format( path=plugin_path, msg=e ) ) @@ -264,7 +264,7 @@ import traceback log.error( - _(u"Can't import plugin [{path}]:\n{error}").format( + _("Can't import plugin [{path}]:\n{error}").format( path=plugin_path, error=traceback.format_exc() ) ) @@ -274,17 +274,17 @@ plugin_info = mod.PLUGIN_INFO import_name = plugin_info["import_name"] - plugin_modes = plugin_info[u"modes"] = set( - plugin_info.setdefault(u"modes", C.PLUG_MODE_DEFAULT) + plugin_modes = plugin_info["modes"] = set( + plugin_info.setdefault("modes", C.PLUG_MODE_DEFAULT) ) # if the plugin is an entry point, it must work in component mode - if plugin_info[u"type"] == C.PLUG_TYPE_ENTRY_POINT: + if plugin_info["type"] == C.PLUG_TYPE_ENTRY_POINT: # if plugin is an entrypoint, we cache it if C.PLUG_MODE_COMPONENT not in plugin_modes: log.error( _( - u"{type} type must be used with {mode} mode, ignoring plugin" + "{type} type must be used with {mode} mode, ignoring plugin" ).format(type=C.PLUG_TYPE_ENTRY_POINT, mode=C.PLUG_MODE_COMPONENT) ) self._unimport_plugin(plugin_path) @@ -293,8 +293,8 @@ if import_name in plugins_to_import: log.error( _( - u"Name conflict for import name [{import_name}], can't import " - u"plugin [{name}]" + "Name conflict for import name [{import_name}], can't import " + "plugin [{name}]" ).format(**plugin_info) ) continue @@ -320,7 +320,7 @@ is raised """ if import_name in self.plugins: - log.debug(u"Plugin {} already imported, passing".format(import_name)) + log.debug("Plugin {} already imported, passing".format(import_name)) return if not import_name: import_name, (plugin_path, mod, plugin_info) = plugins_to_import.popitem() @@ -328,10 +328,10 @@ if not import_name in plugins_to_import: if optional: log.warning( - _(u"Recommended plugin not found: {}").format(import_name) + _("Recommended plugin not found: {}").format(import_name) ) return - msg = u"Dependency not found: {}".format(import_name) + msg = "Dependency not found: {}".format(import_name) log.error(msg) raise ImportError(msg) plugin_path, mod, plugin_info = plugins_to_import.pop(import_name) @@ -340,7 +340,7 @@ for to_import in dependencies + recommendations: if to_import not in self.plugins: log.debug( - u"Recursively import dependency of [%s]: [%s]" + "Recursively import dependency of [%s]: [%s]" % (import_name, to_import) ) try: @@ -349,7 +349,7 @@ ) except ImportError as e: log.warning( - _(u"Can't import plugin {name}: {error}").format( + _("Can't import plugin {name}: {error}").format( name=plugin_info["name"], error=e ) ) @@ -362,13 +362,13 @@ self.plugins[import_name] = getattr(mod, plugin_info["main"])(self) except Exception as e: log.warning( - u'Error while loading plugin "{name}", ignoring it: {error}'.format( + 'Error while loading plugin "{name}", ignoring it: {error}'.format( name=plugin_info["name"], error=e ) ) if optional: return - raise ImportError(u"Error during initiation") + raise ImportError("Error during initiation") if C.bool(plugin_info.get(C.PI_HANDLER, C.BOOL_FALSE)): self.plugins[import_name].is_handler = True else: @@ -386,7 +386,7 @@ # pluging depending on the unloaded one should be unloaded too # for now, just a basic call on plugin.unload is done defers_list = [] - for plugin in self.plugins.itervalues(): + for plugin in self.plugins.values(): try: unload = plugin.unload except AttributeError: @@ -419,7 +419,7 @@ def connectProfile(__=None): if self.isConnected(profile): - log.info(_(u"already connected !")) + log.info(_("already connected !")) return True if self.memory.isComponent(profile): @@ -439,7 +439,7 @@ if not self.isConnected(profile_key): # isConnected is checked here and not on client # because client is deleted when session is ended - log.info(_(u"not connected !")) + log.info(_("not connected !")) return defer.succeed(None) client = self.getClient(profile_key) return client.entityDisconnect() @@ -468,7 +468,7 @@ pass features = [] - for import_name, plugin in self.plugins.iteritems(): + for import_name, plugin in self.plugins.items(): try: features_d = defer.maybeDeferred(plugin.getFeatures, profile_key) except AttributeError: @@ -485,14 +485,14 @@ ret[name] = data else: log.warning( - u"Error while getting features for {name}: {failure}".format( + "Error while getting features for {name}: {failure}".format( name=name, failure=data ) ) ret[name] = {} return ret - d_list.addCallback(buildFeatures, self.plugins.keys()) + d_list.addCallback(buildFeatures, list(self.plugins.keys())) return d_list def getContacts(self, profile_key): @@ -527,10 +527,10 @@ self.memory.purgeProfileSession(profile) def startService(self): - log.info(u"Salut à toi ô mon frère !") + log.info("Salut à toi ô mon frère !") def stopService(self): - log.info(u"Salut aussi à Rantanplan") + log.info("Salut aussi à Rantanplan") return self.pluginsUnload() def run(self): @@ -576,13 +576,13 @@ @return: list of clients """ if not profile_key: - raise exceptions.DataError(_(u"profile_key must not be empty")) + raise exceptions.DataError(_("profile_key must not be empty")) try: profile = self.memory.getProfileName(profile_key, True) except exceptions.ProfileUnknownError: return [] if profile == C.PROF_KEY_ALL: - return self.profiles.values() + return list(self.profiles.values()) elif profile[0] == "@": # only profile keys can start with "@" raise exceptions.ProfileKeyUnknown return [self.profiles[profile]] @@ -594,9 +594,9 @@ @param name: name of the option @return: unicode representation of the option """ - return unicode(self.memory.getConfig(section, name, "")) + return str(self.memory.getConfig(section, name, "")) - def logErrback(self, failure_, msg=_(u"Unexpected error: {failure_}")): + def logErrback(self, failure_, msg=_("Unexpected error: {failure_}")): """Generic errback logging @param msg(unicode): error message ("failure_" key will be use for format) @@ -610,7 +610,7 @@ def registerNamespace(self, short_name, namespace): """associate a namespace to a short name""" if short_name in self.ns_map: - raise exceptions.ConflictError(u"this short name is already used") + raise exceptions.ConflictError("this short name is already used") self.ns_map[short_name] = namespace def getNamespaces(self): @@ -620,7 +620,7 @@ try: return self.ns_map[short_name] except KeyError: - raise exceptions.NotFound(u"namespace {short_name} is not registered" + raise exceptions.NotFound("namespace {short_name} is not registered" .format(short_name=short_name)) def getSessionInfos(self, profile_key): @@ -628,7 +628,7 @@ client = self.getClient(profile_key) data = { "jid": client.jid.full(), - "started": unicode(int(client.started)) + "started": str(int(client.started)) } return defer.succeed(data) @@ -714,9 +714,9 @@ ret = [] for p in plugins: ret.append({ - u"name": p.name, - u"namespace": p.namespace, - u"priority": unicode(p.priority), + "name": p.name, + "namespace": p.namespace, + "priority": str(p.priority), }) return ret @@ -740,7 +740,7 @@ message, subject, mess_type, - {unicode(key): unicode(value) for key, value in extra.items()}, + {str(key): str(value) for key, value in list(extra.items())}, ) def _setPresence(self, to="", show="", statuses=None, profile_key=C.PROF_KEY_NONE): @@ -774,7 +774,7 @@ assert profile to_jid = jid.JID(raw_jid) log.debug( - _(u"subsciption request [%(subs_type)s] for %(jid)s") + _("subsciption request [%(subs_type)s] for %(jid)s") % {"subs_type": subs_type, "jid": to_jid.full()} ) if subs_type == "subscribe": @@ -901,15 +901,15 @@ service_jid = services_jids[idx] if not success: log.warning( - _(u"Can't find features for service {service_jid}, ignoring") + _("Can't find features for service {service_jid}, ignoring") .format(service_jid=service_jid.full())) continue if (identities is not None and not set(infos.identities.keys()).issuperset(identities)): continue found_identities = [ - (cat, type_, name or u"") - for (cat, type_), name in infos.identities.iteritems() + (cat, type_, name or "") + for (cat, type_), name in infos.identities.items() ] found_service[service_jid.full()] = found_identities @@ -960,7 +960,7 @@ full_jid = full_jids[idx] if not success: log.warning( - _(u"Can't retrieve {full_jid} infos, ignoring") + _("Can't retrieve {full_jid} infos, ignoring") .format(full_jid=full_jid.full())) continue if infos.features.issuperset(namespaces): @@ -969,8 +969,8 @@ ).issuperset(identities): continue found_identities = [ - (cat, type_, name or u"") - for (cat, type_), name in infos.identities.iteritems() + (cat, type_, name or "") + for (cat, type_), name in infos.identities.items() ] found[full_jid.full()] = found_identities @@ -979,7 +979,7 @@ ## Generic HMI ## def _killAction(self, keep_id, client): - log.debug(u"Killing action {} for timeout".format(keep_id)) + log.debug("Killing action {} for timeout".format(keep_id)) client.actions[keep_id] def actionNew( @@ -998,7 +998,7 @@ Action will be deleted after 30 min. @param profile: %(doc_profile)s """ - id_ = unicode(uuid.uuid4()) + id_ = str(uuid.uuid4()) if keep_id is not None: client = self.getClient(profile) action_timer = reactor.callLater(60 * 30, self._killAction, keep_id, client) @@ -1012,7 +1012,7 @@ @param profile: %(doc_profile)s """ client = self.getClient(profile) - return [action_tuple[:-1] for action_tuple in client.actions.itervalues()] + return [action_tuple[:-1] for action_tuple in client.actions.values()] def registerProgressCb( self, progress_id, callback, metadata=None, profile=C.PROF_KEY_NONE @@ -1022,7 +1022,7 @@ metadata = {} client = self.getClient(profile) if progress_id in client._progress_cb: - raise exceptions.ConflictError(u"Progress ID is not unique !") + raise exceptions.ConflictError("Progress ID is not unique !") client._progress_cb[progress_id] = (callback, metadata) def removeProgressCb(self, progress_id, profile): @@ -1031,11 +1031,11 @@ try: del client._progress_cb[progress_id] except KeyError: - log.error(_(u"Trying to remove an unknow progress callback")) + log.error(_("Trying to remove an unknow progress callback")) def _progressGet(self, progress_id, profile): data = self.progressGet(progress_id, profile) - return {k: unicode(v) for k, v in data.iteritems()} + return {k: str(v) for k, v in data.items()} def progressGet(self, progress_id, profile): """Return a dict with progress information @@ -1057,10 +1057,10 @@ def _progressGetAll(self, profile_key): progress_all = self.progressGetAll(profile_key) - for profile, progress_dict in progress_all.iteritems(): - for progress_id, data in progress_dict.iteritems(): - for key, value in data.iteritems(): - data[key] = unicode(value) + for profile, progress_dict in progress_all.items(): + for progress_id, data in progress_dict.items(): + for key, value in data.items(): + data[key] = str(value) return progress_all def progressGetAllMetadata(self, profile_key): @@ -1082,7 +1082,7 @@ for ( progress_id, (__, progress_metadata), - ) in client._progress_cb.iteritems(): + ) in client._progress_cb.items(): progress_dict[progress_id] = progress_metadata return progress_all @@ -1101,7 +1101,7 @@ profile = client.profile progress_dict = {} progress_all[profile] = progress_dict - for progress_id, (progress_cb, __) in client._progress_cb.iteritems(): + for progress_id, (progress_cb, __) in client._progress_cb.items(): progress_dict[progress_id] = progress_cb(progress_id, profile) return progress_all @@ -1121,7 +1121,7 @@ callback_id = str(uuid.uuid4()) else: if callback_id in self._cb_map: - raise exceptions.ConflictError(_(u"id already registered")) + raise exceptions.ConflictError(_("id already registered")) self._cb_map[callback_id] = (callback, args, kwargs) if "one_shot" in kwargs: # One Shot callback are removed after 30 min @@ -1163,7 +1163,7 @@ profile = self.memory.getProfileName(profile_key) if not profile: raise exceptions.ProfileUnknownError( - _(u"trying to launch action with a non-existant profile") + _("trying to launch action with a non-existant profile") ) else: profile = client.profile @@ -1179,7 +1179,7 @@ try: callback, args, kwargs = self._cb_map[callback_id] except KeyError: - raise exceptions.DataError(u"Unknown callback id {}".format(callback_id)) + raise exceptions.DataError("Unknown callback id {}".format(callback_id)) if kwargs.get("with_data", False): if data is None: @@ -1210,7 +1210,7 @@ def importMenu(self, path, callback, security_limit=C.NO_SECURITY_LIMIT, help_string="", type_=C.MENU_GLOBAL): - """register a new menu for frontends + r"""register a new menu for frontends @param path(iterable[unicode]): path to go to the menu (category/subcategory/.../item) (e.g.: ("File", "Open")) @@ -1245,7 +1245,7 @@ if callable(callback): callback_id = self.registerCallback(callback, with_data=True) - elif isinstance(callback, basestring): + elif isinstance(callback, str): # The callback is already registered callback_id = callback try: @@ -1256,7 +1256,7 @@ else: raise exceptions.DataError("Unknown callback type") - for menu_data in self._menus.itervalues(): + for menu_data in self._menus.values(): if menu_data["path"] == path and menu_data["type"] == type_: raise exceptions.ConflictError( _("A menu with the same path and type already exists") @@ -1267,7 +1267,7 @@ if menu_key in self._menus_paths: raise exceptions.ConflictError( - u"this menu path is already used: {path} ({menu_key})".format( + "this menu path is already used: {path} ({menu_key})".format( path=path_canonical, menu_key=menu_key ) ) @@ -1300,7 +1300,7 @@ - help_url: link to a page with more complete documentation (TODO) """ ret = [] - for menu_id, menu_data in self._menus.iteritems(): + for menu_id, menu_data in self._menus.items(): type_ = menu_data["type"] path = menu_data["path"] menu_security_limit = menu_data["security_limit"] @@ -1339,7 +1339,7 @@ callback_id = self._menus_paths[menu_key] except KeyError: raise exceptions.NotFound( - u"Can't find menu {path} ({menu_type})".format( + "Can't find menu {path} ({menu_type})".format( path=canonical_path, menu_type=menu_type ) )
--- a/sat/core/xmpp.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/xmpp.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -40,18 +40,18 @@ from sat.memory import encryption from sat.memory import persistent from sat.tools import xml_tools -from zope.interface import implements +from zope.interface import implementer log = getLogger(__name__) -NS_X_DATA = u"jabber:x:data" -NS_DISCO_INFO = u"http://jabber.org/protocol/disco#info" -NS_XML_ELEMENT = u"urn:xmpp:xml-element" -NS_ROSTER_VER = u"urn:xmpp:features:rosterver" +NS_X_DATA = "jabber:x:data" +NS_DISCO_INFO = "http://jabber.org/protocol/disco#info" +NS_XML_ELEMENT = "urn:xmpp:xml-element" +NS_ROSTER_VER = "urn:xmpp:features:rosterver" # we use 2 "@" which is illegal in a jid, to be sure we are not mixing keys # with roster jids -ROSTER_VER_KEY = u"@version@" +ROSTER_VER_KEY = "@version@" class SatXMPPEntity(object): @@ -65,9 +65,9 @@ clientConnectionFailed_ori = factory.clientConnectionFailed clientConnectionLost_ori = factory.clientConnectionLost factory.clientConnectionFailed = partial( - self.connectionTerminated, term_type=u"failed", cb=clientConnectionFailed_ori) + self.connectionTerminated, term_type="failed", cb=clientConnectionFailed_ori) factory.clientConnectionLost = partial( - self.connectionTerminated, term_type=u"lost", cb=clientConnectionLost_ori) + self.connectionTerminated, term_type="lost", cb=clientConnectionLost_ori) factory.maxRetries = max_retries factory.maxDelay = 30 @@ -87,7 +87,7 @@ self.encryption = encryption.EncryptionHandler(self) def __unicode__(self): - return u"Client instance for profile {profile}".format(profile=self.profile) + return "Client instance for profile {profile}".format(profile=self.profile) def __str__(self): return self.__unicode__.encode('utf-8') @@ -206,11 +206,11 @@ def logPluginResults(results): all_succeed = all([success for success, result in results]) if not all_succeed: - log.error(_(u"Plugins initialisation error")) + log.error(_("Plugins initialisation error")) for idx, (success, result) in enumerate(results): if not success: log.error( - u"error (plugin %(name)s): %(failure)s" + "error (plugin %(name)s): %(failure)s" % { "name": plugin_conn_cb[idx][0]._info["import_name"], "failure": result, @@ -226,11 +226,11 @@ self._connected_d = None def _disconnectionEb(self, failure_): - log.error(_(u"Error while disconnecting: {}".format(failure_))) + log.error(_("Error while disconnecting: {}".format(failure_))) def _authd(self, xmlstream): super(SatXMPPEntity, self)._authd(xmlstream) - log.debug(_(u"{profile} identified").format(profile=self.profile)) + log.debug(_("{profile} identified").format(profile=self.profile)) self.streamInitialized() def _finish_connection(self, __): @@ -238,7 +238,7 @@ def streamInitialized(self): """Called after _authd""" - log.debug(_(u"XML stream is initialized")) + log.debug(_("XML stream is initialized")) if not self.host_app.trigger.point("xml_init", self): return self.postStreamInit() @@ -246,7 +246,7 @@ def postStreamInit(self): """Workflow after stream initalisation.""" log.info( - _(u"********** [{profile}] CONNECTED **********").format(profile=self.profile) + _("********** [{profile}] CONNECTED **********").format(profile=self.profile) ) # the following Deferred is used to know when we are connected @@ -273,7 +273,7 @@ def initializationFailed(self, reason): log.error( _( - u"ERROR: XMPP connection failed for profile '%(profile)s': %(reason)s" + "ERROR: XMPP connection failed for profile '%(profile)s': %(reason)s" % {"profile": self.profile, "reason": reason} ) ) @@ -306,17 +306,17 @@ if reason is not None and not isinstance(reason.value, internet_error.ConnectionDone): try: - reason_str = unicode(reason.value) + reason_str = str(reason.value) except Exception: # FIXME: workaround for Android were p4a strips docstrings # while Twisted use docstring in __str__ # TODO: create a ticket upstream, Twisted should work when optimization # is used - reason_str = unicode(reason.value.__class__) - log.warning(u"Connection {term_type}: {reason}".format( + reason_str = str(reason.value.__class__) + log.warning("Connection {term_type}: {reason}".format( term_type = term_type, reason=reason_str)) - if not self.host_app.trigger.point(u"connection_" + term_type, connector, reason): + if not self.host_app.trigger.point("connection_" + term_type, connector, reason): return return cb(connector, reason) @@ -327,7 +327,7 @@ Retrying is disabled too, as it makes no sense to try without network, and it may use resources (notably battery on mobiles). """ - log.info(_(u"stopping connection because of network disabled")) + log.info(_("stopping connection because of network disabled")) self.factory.continueTrying = 0 self._network_disabled = True if self.xmlstream is not None: @@ -344,13 +344,13 @@ except AttributeError: # connection has not been stopped by networkDisabled # we don't have to restart it - log.debug(u"no connection to restart") + log.debug("no connection to restart") return else: del self._network_disabled if not network_disabled: - raise exceptions.InternalError(u"network_disabled should be True") - log.info(_(u"network is available, trying to connect")) + raise exceptions.InternalError("network_disabled should be True") + log.info(_("network is available, trying to connect")) # we want to be sure to start fresh self.factory.resetDelay() # we have a saved connector, meaning the connection has been stopped previously @@ -378,23 +378,23 @@ self.profile ) # and we remove references to this client log.info( - _(u"********** [{profile}] DISCONNECTED **********").format( + _("********** [{profile}] DISCONNECTED **********").format( profile=self.profile ) ) if not self.conn_deferred.called: if reason is None: - err = error.StreamError(u"Server unexpectedly closed the connection") + err = error.StreamError("Server unexpectedly closed the connection") else: err = reason try: if err.value.args[0][0][2] == "certificate verify failed": err = exceptions.InvalidCertificate( - _(u"Your server certificate is not valid " - u"(its identity can't be checked).\n\n" - u"This should never happen and may indicate that " - u"somebody is trying to spy on you.\n" - u"Please contact your server administrator.")) + _("Your server certificate is not valid " + "(its identity can't be checked).\n\n" + "This should never happen and may indicate that " + "somebody is trying to spy on you.\n" + "Please contact your server administrator.")) self.factory.stopTrying() try: # with invalid certificate, we should not retry to connect @@ -434,7 +434,7 @@ def entityDisconnect(self): if not self.host_app.trigger.point("disconnecting", self): return - log.info(_(u"Disconnecting...")) + log.info(_("Disconnecting...")) self.stopService() if self._connected_d is not None: return self._connected_d @@ -443,7 +443,7 @@ ## sending ## - def IQ(self, type_=u"set", timeout=60): + def IQ(self, type_="set", timeout=60): """shortcut to create an IQ element managing deferred @param type_(unicode): IQ type ('set' or 'get') @@ -486,11 +486,11 @@ if data["uid"]: # key must be present but can be set to '' # by a plugin to avoid id on purpose message_elt["id"] = data["uid"] - for lang, subject in data["subject"].iteritems(): + for lang, subject in data["subject"].items(): subject_elt = message_elt.addElement("subject", content=subject) if lang: subject_elt[(C.NS_XML, "lang")] = lang - for lang, message in data["message"].iteritems(): + for lang, message in data["message"].items(): body_elt = message_elt.addElement("body", content=message) if lang: body_elt[(C.NS_XML, "lang")] = lang @@ -499,7 +499,7 @@ except KeyError: if "thread_parent" in data["extra"]: raise exceptions.InternalError( - u"thread_parent found while there is not associated thread" + "thread_parent found while there is not associated thread" ) else: thread_elt = message_elt.addElement("thread", content=thread) @@ -546,7 +546,7 @@ data = { # dict is similar to the one used in client.onMessage "from": self.jid, "to": to_jid, - "uid": uid or unicode(uuid.uuid4()), + "uid": uid or str(uuid.uuid4()), "message": message, "subject": subject, "type": mess_type, @@ -599,15 +599,15 @@ ): return defer.succeed(None) - log.debug(_(u"Sending message (type {type}, to {to})") + log.debug(_("Sending message (type {type}, to {to})") .format(type=data["type"], to=to_jid.full())) pre_xml_treatments.addCallback(lambda __: self.generateMessageXML(data)) pre_xml_treatments.chainDeferred(post_xml_treatments) post_xml_treatments.addCallback(self.sendMessageData) if send_only: - log.debug(_(u"Triggers, storage and echo have been inhibited by the " - u"'send_only' parameter")) + log.debug(_("Triggers, storage and echo have been inhibited by the " + "'send_only' parameter")) else: self.addPostXmlCallbacks(post_xml_treatments) post_xml_treatments.addErrback(self._cancelErrorTrap) @@ -625,22 +625,22 @@ @param data: message data dictionnary @param client: profile's client """ - if data[u"type"] != C.MESS_TYPE_GROUPCHAT: + if data["type"] != C.MESS_TYPE_GROUPCHAT: # we don't add groupchat message to history, as we get them back # and they will be added then - if data[u"message"] or data[u"subject"]: # we need a message to store + if data["message"] or data["subject"]: # we need a message to store self.host_app.memory.addToHistory(self, data) else: log.warning( - u"No message found" + "No message found" ) # empty body should be managed by plugins before this point return data def messageGetBridgeArgs(self, data): """Generate args to use with bridge from data dict""" - return (data[u"uid"], data[u"timestamp"], data[u"from"].full(), - data[u"to"].full(), data[u"message"], data[u"subject"], - data[u"type"], data[u"extra"]) + return (data["uid"], data["timestamp"], data["from"].full(), + data["to"].full(), data["message"], data["subject"], + data["type"], data["extra"]) def messageSendToBridge(self, data): @@ -649,10 +649,10 @@ @param data: message data dictionnary @param client: profile's client """ - if data[u"type"] != C.MESS_TYPE_GROUPCHAT: + if data["type"] != C.MESS_TYPE_GROUPCHAT: # we don't send groupchat message to bridge, as we get them back # and they will be added the - if (data[u"message"] or data[u"subject"]): # we need a message to send + if (data["message"] or data["subject"]): # we need a message to send # something # We send back the message, so all frontends are aware of it @@ -661,12 +661,12 @@ profile=self.profile ) else: - log.warning(_(u"No message found")) + log.warning(_("No message found")) return data +@implementer(iwokkel.IDisco) class SatXMPPClient(SatXMPPEntity, wokkel_client.XMPPClient): - implements(iwokkel.IDisco) trigger_suffix = "" is_component = False @@ -681,34 +681,34 @@ # with a web frontend, # etc., we should implement a way to dynamically update identities through the # bridge - self.identities = [disco.DiscoIdentity(u"client", u"pc", C.APP_NAME)] + self.identities = [disco.DiscoIdentity("client", "pc", C.APP_NAME)] if sys.platform == "android": # FIXME: temporary hack as SRV is not working on android # TODO: remove this hack and fix SRV - log.info(u"FIXME: Android hack, ignoring SRV") + log.info("FIXME: Android hack, ignoring SRV") if host is None: host = user_jid.host # for now we consider Android devices to be always phones - self.identities = [disco.DiscoIdentity(u"client", u"phone", C.APP_NAME)] + self.identities = [disco.DiscoIdentity("client", "phone", C.APP_NAME)] hosts_map = host_app.memory.getConfig(None, "hosts_dict", {}) if host is None and user_jid.host in hosts_map: host_data = hosts_map[user_jid.host] - if isinstance(host_data, basestring): + if isinstance(host_data, str): host = host_data elif isinstance(host_data, dict): - if u"host" in host_data: - host = host_data[u"host"] - if u"port" in host_data: - port = host_data[u"port"] + if "host" in host_data: + host = host_data["host"] + if "port" in host_data: + port = host_data["port"] else: log.warning( - _(u"invalid data used for host: {data}").format(data=host_data) + _("invalid data used for host: {data}").format(data=host_data) ) host_data = None if host_data is not None: log.info( - u"using {host}:{port} for host {host_ori} as requested in config" + "using {host}:{port} for host {host_ori} as requested in config" .format(host_ori=user_jid.host, host=host, port=port) ) @@ -717,22 +717,22 @@ wokkel_client.XMPPClient.__init__( self, user_jid, password, host or None, port or C.XMPP_C2S_PORT, - check_certificate = self.check_certificate + # check_certificate = self.check_certificate # FIXME: currently disabled with Python 3 port ) SatXMPPEntity.__init__(self, host_app, profile, max_retries) if not self.check_certificate: - msg = (_(u"Certificate validation is deactivated, this is unsecure and " - u"somebody may be spying on you. If you have no good reason to disable " - u"certificate validation, please activate \"Check certificate\" in your " - u"settings in \"Connection\" tab.")) - xml_tools.quickNote(host_app, self, msg, _(u"Security notice"), + msg = (_("Certificate validation is deactivated, this is unsecure and " + "somebody may be spying on you. If you have no good reason to disable " + "certificate validation, please activate \"Check certificate\" in your " + "settings in \"Connection\" tab.")) + xml_tools.quickNote(host_app, self, msg, _("Security notice"), level = C.XMLUI_DATA_LVL_WARNING) def _getPluginsList(self): - for p in self.host_app.plugins.itervalues(): - if C.PLUG_MODE_CLIENT in p._info[u"modes"]: + for p in self.host_app.plugins.values(): + if C.PLUG_MODE_CLIENT in p._info["modes"]: yield p def _createSubProtocols(self): @@ -795,7 +795,7 @@ # This trigger point can't cancel the method yield self.host_app.trigger.asyncPoint("sendMessageData", self, mess_data, triggers_no_cancel=True) - self.send(mess_data[u"xml"]) + self.send(mess_data["xml"]) defer.returnValue(mess_data) def feedback(self, to_jid, message, extra=None): @@ -811,11 +811,11 @@ if extra is None: extra = {} self.host_app.bridge.messageNew( - uid=unicode(uuid.uuid4()), + uid=str(uuid.uuid4()), timestamp=time.time(), from_jid=self.jid.full(), to_jid=to_jid.full(), - message={u"": message}, + message={"": message}, subject={}, mess_type=C.MESS_TYPE_INFO, extra=extra, @@ -827,6 +827,7 @@ d.addCallback(lambda __: super(SatXMPPClient, self)._finish_connection(__)) +@implementer(iwokkel.IDisco) class SatXMPPComponent(SatXMPPEntity, component.Component): """XMPP component @@ -835,7 +836,6 @@ Component need to instantiate MessageProtocol itself """ - implements(iwokkel.IDisco) trigger_suffix = ( "Component" ) # used for to distinguish some trigger points set in SatXMPPEntity @@ -857,19 +857,19 @@ self.entry_plugin = host_app.plugins[entry_point] except KeyError: raise exceptions.NotFound( - _(u"The requested entry point ({entry_point}) is not available").format( + _("The requested entry point ({entry_point}) is not available").format( entry_point=entry_point ) ) - self.identities = [disco.DiscoIdentity(u"component", u"generic", C.APP_NAME)] + self.identities = [disco.DiscoIdentity("component", "generic", C.APP_NAME)] # jid is set automatically on bind by Twisted for Client, but not for Component self.jid = component_jid if host is None: try: - host = component_jid.host.split(u".", 1)[1] + host = component_jid.host.split(".", 1)[1] except IndexError: - raise ValueError(u"Can't guess host from jid, please specify a host") + raise ValueError("Can't guess host from jid, please specify a host") # XXX: component.Component expect unicode jid, while Client expect jid.JID. # this is not consistent, so we use jid.JID for SatXMPP* component.Component.__init__(self, host, port, component_jid.full(), password) @@ -890,20 +890,20 @@ @raise KeyError: one plugin should be present in self.host_app.plugins but it is not """ - if C.PLUG_MODE_COMPONENT not in current._info[u"modes"]: + if C.PLUG_MODE_COMPONENT not in current._info["modes"]: if not required: return else: log.error( _( - u"Plugin {current_name} is needed for {entry_name}, " - u"but it doesn't handle component mode" + "Plugin {current_name} is needed for {entry_name}, " + "but it doesn't handle component mode" ).format( - current_name=current._info[u"import_name"], - entry_name=self.entry_plugin._info[u"import_name"], + current_name=current._info["import_name"], + entry_name=self.entry_plugin._info["import_name"], ) ) - raise exceptions.InternalError(_(u"invalid plugin mode")) + raise exceptions.InternalError(_("invalid plugin mode")) for import_name in current._info.get(C.PI_DEPENDENCIES, []): # plugins are already loaded as dependencies @@ -960,9 +960,9 @@ if None, mapping will not be done @return(dict): message data """ - if message_elt.name != u"message": + if message_elt.name != "message": log.warning(_( - u"parseMessage used with a non <message/> stanza, ignoring: {xml}" + "parseMessage used with a non <message/> stanza, ignoring: {xml}" .format(xml=message_elt.toXml()))) return {} @@ -974,31 +974,31 @@ c.uri = C.NS_CLIENT elif message_elt.uri != C.NS_CLIENT: log.warning(_( - u"received <message> with a wrong namespace: {xml}" + "received <message> with a wrong namespace: {xml}" .format(xml=message_elt.toXml()))) client = self.parent - if not message_elt.hasAttribute(u'to'): + if not message_elt.hasAttribute('to'): message_elt['to'] = client.jid.full() message = {} subject = {} extra = {} data = { - u"from": jid.JID(message_elt["from"]), - u"to": jid.JID(message_elt["to"]), - u"uid": message_elt.getAttribute( - u"uid", unicode(uuid.uuid4()) + "from": jid.JID(message_elt["from"]), + "to": jid.JID(message_elt["to"]), + "uid": message_elt.getAttribute( + "uid", str(uuid.uuid4()) ), # XXX: uid is not a standard attribute but may be added by plugins - u"message": message, - u"subject": subject, - u"type": message_elt.getAttribute(u"type", u"normal"), - u"extra": extra, + "message": message, + "subject": subject, + "type": message_elt.getAttribute("type", "normal"), + "extra": extra, } try: - message_id = data[u"extra"][u"message_id"] = message_elt[u"id"] + message_id = data["extra"]["message_id"] = message_elt["id"] except KeyError: pass else: @@ -1006,11 +1006,11 @@ # message for e in message_elt.elements(C.NS_CLIENT, "body"): - message[e.getAttribute((C.NS_XML, "lang"), "")] = unicode(e) + message[e.getAttribute((C.NS_XML, "lang"), "")] = str(e) # subject for e in message_elt.elements(C.NS_CLIENT, "subject"): - subject[e.getAttribute((C.NS_XML, "lang"), "")] = unicode(e) + subject[e.getAttribute((C.NS_XML, "lang"), "")] = str(e) # delay and timestamp try: @@ -1018,12 +1018,12 @@ except AttributeError: # message_elt._received_timestamp should have been set in onMessage # but if parseMessage is called directly, it can be missing - log.debug(u"missing received timestamp for {message_elt}".format( + log.debug("missing received timestamp for {message_elt}".format( message_elt=message_elt)) received_timestamp = time.time() try: - delay_elt = message_elt.elements(delay.NS_DELAY, "delay").next() + delay_elt = next(message_elt.elements(delay.NS_DELAY, "delay")) except StopIteration: data["timestamp"] = received_timestamp else: @@ -1060,7 +1060,7 @@ client = self.parent if not "from" in message_elt.attributes: message_elt["from"] = client.jid.host - log.debug(_(u"got message from: {from_}").format(from_=message_elt["from"])) + log.debug(_("got message from: {from_}").format(from_=message_elt["from"])) # plugin can add their treatments to this deferred post_treat = defer.Deferred() @@ -1077,24 +1077,24 @@ return data def addToHistory(self, data): - if data.pop(u"history", None) == C.HISTORY_SKIP: - log.info(u"history is skipped as requested") - data[u"extra"][u"history"] = C.HISTORY_SKIP + if data.pop("history", None) == C.HISTORY_SKIP: + log.info("history is skipped as requested") + data["extra"]["history"] = C.HISTORY_SKIP else: - if data[u"message"] or data[u"subject"]: # we need a message to store + if data["message"] or data["subject"]: # we need a message to store return self.host.memory.addToHistory(self.parent, data) else: - log.debug(u"not storing empty message to history: {data}" + log.debug("not storing empty message to history: {data}" .format(data=data)) def bridgeSignal(self, __, data): try: - data["extra"]["received_timestamp"] = unicode(data["received_timestamp"]) + data["extra"]["received_timestamp"] = str(data["received_timestamp"]) data["extra"]["delay_sender"] = data["delay_sender"] except KeyError: pass if C.MESS_KEY_ENCRYPTION in data: - data[u"extra"][u"encrypted"] = C.BOOL_TRUE + data["extra"]["encrypted"] = C.BOOL_TRUE if data is not None: if data["message"] or data["subject"] or data["type"] == C.MESS_TYPE_INFO: self.host.bridge.messageNew( @@ -1109,7 +1109,7 @@ profile=self.parent.profile, ) else: - log.debug(u"Discarding bridge signal for empty message: {data}".format( + log.debug("Discarding bridge signal for empty message: {data}".format( data=data)) return data @@ -1131,7 +1131,7 @@ @property def versioning(self): """True if server support roster versioning""" - return (NS_ROSTER_VER, u'ver') in self.parent.xmlstream.features + return (NS_ROSTER_VER, 'ver') in self.parent.xmlstream.features @property def roster_cache(self): @@ -1148,23 +1148,23 @@ item must be already registered in self._jids before this method is called @param item (RosterIem): item added """ - log.debug(u"registering item: {}".format(item.entity.full())) + log.debug("registering item: {}".format(item.entity.full())) if item.entity.resource: log.warning( - u"Received a roster item with a resource, this is not common but not " - u"restricted by RFC 6121, this case may be not well tested." + "Received a roster item with a resource, this is not common but not " + "restricted by RFC 6121, this case may be not well tested." ) if not item.subscriptionTo: if not item.subscriptionFrom: log.info( - _(u"There's no subscription between you and [{}]!").format( + _("There's no subscription between you and [{}]!").format( item.entity.full() ) ) else: - log.info(_(u"You are not subscribed to [{}]!").format(item.entity.full())) + log.info(_("You are not subscribed to [{}]!").format(item.entity.full())) if not item.subscriptionFrom: - log.info(_(u"[{}] is not subscribed to you!").format(item.entity.full())) + log.info(_("[{}] is not subscribed to you!").format(item.entity.full())) for group in item.groups: self._groups.setdefault(group, set()).add(item.entity) @@ -1178,7 +1178,7 @@ roster_cache = self.roster_cache yield roster_cache.clear() roster_cache[ROSTER_VER_KEY] = version - for roster_jid, roster_item in self._jids.iteritems(): + for roster_jid, roster_item in self._jids.items(): roster_jid_s = roster_jid.full() roster_item_elt = roster_item.toElement().toXml() roster_cache[roster_jid_s] = roster_item_elt @@ -1200,19 +1200,19 @@ def requestRoster(self): """Ask the server for Roster list """ if self.versioning: - log.info(_(u"our server support roster versioning, we use it")) + log.info(_("our server support roster versioning, we use it")) roster_cache = self.roster_cache yield roster_cache.load() try: version = roster_cache[ROSTER_VER_KEY] except KeyError: - log.info(_(u"no roster in cache, we start fresh")) + log.info(_("no roster in cache, we start fresh")) # u"" means we use versioning without valid roster in cache - version = u"" + version = "" else: - log.info(_(u"We have roster v{version} in cache").format(version=version)) + log.info(_("We have roster v{version} in cache").format(version=version)) # we deserialise cached roster to our local cache - for roster_jid_s, roster_item_elt_s in roster_cache.iteritems(): + for roster_jid_s, roster_item_elt_s in roster_cache.items(): if roster_jid_s == ROSTER_VER_KEY: continue roster_jid = jid.JID(roster_jid_s) @@ -1221,26 +1221,26 @@ self._jids[roster_jid] = roster_item self._registerItem(roster_item) else: - log.warning(_(u"our server doesn't support roster versioning")) + log.warning(_("our server doesn't support roster versioning")) version = None log.debug("requesting roster") roster = yield self.getRoster(version=version) if roster is None: - log.debug(u"empty roster result received, we'll get roster item with roster " - u"pushes") + log.debug("empty roster result received, we'll get roster item with roster " + "pushes") else: # a full roster is received self._groups.clear() self._jids = roster - for item in roster.itervalues(): + for item in roster.values(): if not item.subscriptionTo and not item.subscriptionFrom and not item.ask: # XXX: current behaviour: we don't want contact in our roster list # if there is no presence subscription # may change in the future log.info( - u"Removing contact {} from roster because there is no presence " - u"subscription".format( + "Removing contact {} from roster because there is no presence " + "subscription".format( item.jid ) ) @@ -1267,9 +1267,9 @@ @return: dictionary of attributes """ item_attr = { - "to": unicode(item.subscriptionTo), - "from": unicode(item.subscriptionFrom), - "ask": unicode(item.ask), + "to": str(item.subscriptionTo), + "from": str(item.subscriptionFrom), + "ask": str(item.ask), } if item.name: item_attr["name"] = item.name @@ -1278,7 +1278,7 @@ def setReceived(self, request): item = request.item entity = item.entity - log.info(_(u"adding {entity} to roster").format(entity=entity.full())) + log.info(_("adding {entity} to roster").format(entity=entity.full())) if request.version is not None: # we update the cache in storage roster_cache = self.roster_cache @@ -1302,7 +1302,7 @@ def removeReceived(self, request): entity = request.item.entity - log.info(_(u"removing {entity} from roster").format(entity=entity.full())) + log.info(_("removing {entity} from roster").format(entity=entity.full())) if request.version is not None: # we update the cache in storage roster_cache = self.roster_cache @@ -1319,7 +1319,7 @@ item = self._jids.pop(entity) except KeyError: log.error( - u"Received a roster remove event for an item not in cache ({})".format( + "Received a roster remove event for an item not in cache ({})".format( entity ) ) @@ -1332,8 +1332,8 @@ del self._groups[group] except KeyError: log.warning( - u"there is no cache for the group [{group}] of the removed roster " - u"item [{jid_}]".format(group=group, jid=entity) + "there is no cache for the group [{group}] of the removed roster " + "item [{jid_}]".format(group=group, jid=entity) ) # then we send the bridge signal @@ -1341,7 +1341,7 @@ def getGroups(self): """Return a list of groups""" - return self._groups.keys() + return list(self._groups.keys()) def getItem(self, entity_jid): """Return RosterItem for a given jid @@ -1354,7 +1354,7 @@ def getJids(self): """Return all jids of the roster""" - return self._jids.keys() + return list(self._jids.keys()) def isJidInRoster(self, entity_jid): """Return True if jid is in roster""" @@ -1370,7 +1370,7 @@ def getItems(self): """Return all items of the roster""" - return self._jids.values() + return list(self._jids.values()) def getJidsFromGroup(self, group): try: @@ -1398,7 +1398,7 @@ jids.update(self.getJidsFromGroup(group)) return jids else: - raise ValueError(u"Unexpected type_ {}".format(type_)) + raise ValueError("Unexpected type_ {}".format(type_)) def getNick(self, entity_jid): """Return a nick name for an entity @@ -1447,7 +1447,7 @@ def unavailableReceived(self, entity, statuses=None): log.debug( - _(u"presence update for [%(entity)s] (unavailable, statuses=%(statuses)s)") + _("presence update for [%(entity)s] (unavailable, statuses=%(statuses)s)") % {"entity": entity, C.PRESENCE_STATUSES: statuses} ) @@ -1539,16 +1539,16 @@ self.host.memory.delWaitingSub(entity.userhost(), self.parent.profile) def subscribedReceived(self, entity): - log.debug(_(u"subscription approved for [%s]") % entity.userhost()) + log.debug(_("subscription approved for [%s]") % entity.userhost()) self.host.bridge.subscribe("subscribed", entity.userhost(), self.parent.profile) def unsubscribedReceived(self, entity): - log.debug(_(u"unsubscription confirmed for [%s]") % entity.userhost()) + log.debug(_("unsubscription confirmed for [%s]") % entity.userhost()) self.host.bridge.subscribe("unsubscribed", entity.userhost(), self.parent.profile) @defer.inlineCallbacks def subscribeReceived(self, entity): - log.debug(_(u"subscription request from [%s]") % entity.userhost()) + log.debug(_("subscription request from [%s]") % entity.userhost()) yield self.parent.roster.got_roster item = self.parent.roster.getItem(entity) if item and item.subscriptionTo: @@ -1566,7 +1566,7 @@ @defer.inlineCallbacks def unsubscribeReceived(self, entity): - log.debug(_(u"unsubscription asked for [%s]") % entity.userhost()) + log.debug(_("unsubscription asked for [%s]") % entity.userhost()) yield self.parent.roster.got_roster item = self.parent.roster.getItem(entity) if item and item.subscriptionFrom: # we automatically remove contact @@ -1575,8 +1575,8 @@ self.host.bridge.subscribe("unsubscribe", entity.userhost(), self.parent.profile) +@implementer(iwokkel.IDisco) class SatDiscoProtocol(disco.DiscoClientProtocol): - implements(iwokkel.IDisco) def __init__(self, host): disco.DiscoClientProtocol.__init__(self) @@ -1599,7 +1599,7 @@ def iqFallback(self, iq): if iq.handled is True: return - log.debug(u"iqFallback: xml = [%s]" % (iq.toXml())) + log.debug("iqFallback: xml = [%s]" % (iq.toXml())) generic.FallbackHandler.iqFallback(self, iq) @@ -1615,9 +1615,9 @@ return generic.VersionHandler.getDiscoInfo(self, requestor, target, None) +@implementer(iwokkel.IDisco) class SatIdentityHandler(XMPPHandler): """Manage disco Identity of SàT.""" - implements(iwokkel.IDisco) # TODO: dynamic identity update (see docstring). Note that a XMPP entity can have # several identities
--- a/sat/memory/cache.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/cache.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -23,7 +23,7 @@ from sat.tools.common import regex from sat.core import exceptions from sat.core.constants import Const as C -import cPickle as pickle +import pickle as pickle import mimetypes import os.path import time @@ -42,9 +42,9 @@ self.profile = profile path_elts = [host.memory.getConfig("", "local_dir"), C.CACHE_DIR] if profile: - path_elts.extend([u"profiles", regex.pathEscape(profile)]) + path_elts.extend(["profiles", regex.pathEscape(profile)]) else: - path_elts.append(u"common") + path_elts.append("common") self.cache_dir = os.path.join(*path_elts) if not os.path.exists(self.cache_dir): @@ -55,11 +55,11 @@ @param filename(unicode): cached file name (cache data or actual file) """ - if not filename or u"/" in filename: + if not filename or "/" in filename: log.error( - u"invalid char found in file name, hack attempt? name:{}".format(filename) + "invalid char found in file name, hack attempt? name:{}".format(filename) ) - raise exceptions.DataError(u"Invalid char found") + raise exceptions.DataError("Invalid char found") return os.path.join(self.cache_dir, filename) def getMetadata(self, uid): @@ -73,7 +73,7 @@ uid = uid.strip() if not uid: - raise exceptions.InternalError(u"uid must not be empty") + raise exceptions.InternalError("uid must not be empty") cache_url = self.getPath(uid) if not os.path.exists(cache_url): return None @@ -82,20 +82,20 @@ with open(cache_url, "rb") as f: cache_data = pickle.load(f) except IOError: - log.warning(u"can't read cache at {}".format(cache_url)) + log.warning("can't read cache at {}".format(cache_url)) return None except pickle.UnpicklingError: - log.warning(u"invalid cache found at {}".format(cache_url)) + log.warning("invalid cache found at {}".format(cache_url)) return None try: eol = cache_data["eol"] except KeyError: - log.warning(u"no End Of Life found for cached file {}".format(uid)) + log.warning("no End Of Life found for cached file {}".format(uid)) eol = 0 if eol < time.time(): log.debug( - u"removing expired cache (expired for {}s)".format(time.time() - eol) + "removing expired cache (expired for {}s)".format(time.time() - eol) ) return None @@ -135,11 +135,11 @@ ext = mimetypes.guess_extension(mime_type, strict=False) if ext is None: log.warning( - u"can't find extension for MIME type {}".format(mime_type) + "can't find extension for MIME type {}".format(mime_type) ) ext = DEFAULT_EXT - elif ext == u".jpe": - ext = u".jpg" + elif ext == ".jpe": + ext = ".jpg" else: ext = DEFAULT_EXT mime_type = None @@ -147,10 +147,10 @@ if max_age is None: max_age = C.DEFAULT_MAX_AGE cache_data = { - u"source": source, - u"filename": filename, - u"eol": int(time.time()) + max_age, - u"mime_type": mime_type, + "source": source, + "filename": filename, + "eol": int(time.time()) + max_age, + "mime_type": mime_type, } file_path = self.getPath(filename)
--- a/sat/memory/crypto.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/crypto.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -92,7 +92,7 @@ # a decrypted empty value and a decryption failure... both return # the empty value. Fortunately, we detect empty passwords beforehand # thanks to the "leave_empty" parameter which is used by default. - d.addCallback(lambda text: text.decode("utf-8") if text else None) + d.addCallback(lambda text: text if text else None) return d @classmethod @@ -114,11 +114,12 @@ def pad(self, s): """Method from http://stackoverflow.com/a/12525165""" bs = BlockCipher.BLOCK_SIZE - return s + (bs - len(s) % bs) * chr(bs - len(s) % bs) + return s + (bs - len(s) % bs) * (chr(bs - len(s) % bs)).encode('utf-8') @classmethod def unpad(self, s): """Method from http://stackoverflow.com/a/12525165""" + s = s.decode('utf-8') return s[0 : -ord(s[-1])] @@ -136,7 +137,7 @@ @return: Deferred: base-64 encoded str """ if leave_empty and password == "": - return succeed(password) + return succeed(b"") salt = ( b64decode(salt)[: PasswordHasher.SALT_LEN] if salt @@ -147,6 +148,11 @@ return d @classmethod + def compare_hash(cls, hashed_attempt, hashed): + assert isinstance(hashed, bytes) + return hashed_attempt == hashed + + @classmethod def verify(cls, attempt, hashed): """Verify a password attempt. @@ -156,5 +162,5 @@ """ leave_empty = hashed == "" d = PasswordHasher.hash(attempt, hashed, leave_empty) - d.addCallback(lambda hashed_attempt: hashed_attempt == hashed) + d.addCallback(cls.compare_hash, hashed=hashed.encode('utf-8')) return d
--- a/sat/memory/disco.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/disco.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -50,11 +50,11 @@ assert isinstance(identity, disco.DiscoIdentity) self.category = identity.category.encode("utf-8") self.idType = identity.type.encode("utf-8") - self.name = identity.name.encode("utf-8") if identity.name else "" - self.lang = lang.encode("utf-8") if lang is not None else "" + self.name = identity.name.encode("utf-8") if identity.name else b"" + self.lang = lang.encode("utf-8") if lang is not None else b"" - def __str__(self): - return "%s/%s/%s/%s" % (self.category, self.idType, self.lang, self.name) + def __bytes__(self): + return b"%s/%s/%s/%s" % (self.category, self.idType, self.lang, self.name) class HashManager(object): @@ -74,7 +74,7 @@ def __setitem__(self, hash_, disco_info): if hash_ in self.hashes: - log.debug(u"ignoring hash set: it is already known") + log.debug("ignoring hash set: it is already known") return self.hashes[hash_] = disco_info self.persistent[hash_] = disco_info.toElement().toXml() @@ -84,19 +84,19 @@ def load(self): def fillHashes(hashes): - for hash_, xml in hashes.iteritems(): + for hash_, xml in hashes.items(): element = xml_tools.ElementParser()(xml) disco_info = disco.DiscoInfo.fromElement(element) if not disco_info.features and not disco_info.identities: log.warning( _( - u"no feature/identity found in disco element (hash: {cap_hash}), ignoring: {xml}" + "no feature/identity found in disco element (hash: {cap_hash}), ignoring: {xml}" ).format(cap_hash=hash_, xml=xml) ) else: self.hashes[hash_] = disco_info - log.info(u"Disco hashes loaded") + log.info("Disco hashes loaded") d = self.persistent.load() d.addCallback(fillHashes) @@ -116,7 +116,7 @@ return self.hashes.load() @defer.inlineCallbacks - def hasFeature(self, client, feature, jid_=None, node=u""): + def hasFeature(self, client, feature, jid_=None, node=""): """Tell if an entity has the required feature @param feature: feature namespace @@ -128,7 +128,7 @@ defer.returnValue(feature in disco_infos.features) @defer.inlineCallbacks - def checkFeature(self, client, feature, jid_=None, node=u""): + def checkFeature(self, client, feature, jid_=None, node=""): """Like hasFeature, but raise an exception is feature is not Found @param feature: feature namespace @@ -142,7 +142,7 @@ raise failure.Failure(exceptions.FeatureNotFound) @defer.inlineCallbacks - def checkFeatures(self, client, features, jid_=None, identity=None, node=u""): + def checkFeatures(self, client, features, jid_=None, identity=None, node=""): """Like checkFeature, but check several features at once, and check also identity @param features(iterable[unicode]): features to check @@ -159,7 +159,7 @@ if identity is not None and identity not in disco_infos.identities: raise failure.Failure(exceptions.FeatureNotFound()) - def getInfos(self, client, jid_=None, node=u"", use_cache=True): + def getInfos(self, client, jid_=None, node="", use_cache=True): """get disco infos from jid_, filling capability hash if needed @param jid_: jid of the target, or None for profile's server @@ -188,16 +188,16 @@ def infosEb(fail): if fail.check(defer.CancelledError): - reason = u"request time-out" + reason = "request time-out" fail = failure.Failure(exceptions.TimeOutError(fail.message)) else: try: - reason = unicode(fail.value) + reason = str(fail.value) except AttributeError: - reason = unicode(fail) + reason = str(fail) log.warning( - u"Error while requesting disco infos from {jid}: {reason}".format( + "Error while requesting disco infos from {jid}: {reason}".format( jid=jid_.full(), reason=reason ) ) @@ -218,7 +218,7 @@ return defer.succeed(disco_infos) @defer.inlineCallbacks - def getItems(self, client, jid_=None, node=u"", use_cache=True): + def getItems(self, client, jid_=None, node="", use_cache=True): """get disco items from jid_, cache them for our own server @param jid_(jid.JID): jid of the target, or None for profile's server @@ -236,12 +236,12 @@ items = self.host.memory.getEntityData( jid_, ["DISCO_ITEMS"], client.profile )["DISCO_ITEMS"] - log.debug(u"[%s] disco items are in cache" % jid_.full()) + log.debug("[%s] disco items are in cache" % jid_.full()) if not use_cache: # we ignore cache, so we pretend we haven't found it raise KeyError except (KeyError, exceptions.UnknownEntityError): - log.debug(u"Caching [%s] disco items" % jid_.full()) + log.debug("Caching [%s] disco items" % jid_.full()) items = yield client.disco.requestItems(jid_, nodeIdentifier=node) self.host.memory.updateEntityData( jid_, "DISCO_ITEMS", items, profile_key=client.profile @@ -251,7 +251,7 @@ items = yield client.disco.requestItems(jid_, nodeIdentifier=node) except StanzaError as e: log.warning( - u"Error while requesting items for {jid}: {reason}".format( + "Error while requesting items for {jid}: {reason}".format( jid=jid_.full(), reason=e.condition ) ) @@ -262,7 +262,7 @@ def _infosEb(self, failure_, entity_jid): failure_.trap(StanzaError) log.warning( - _(u"Error while requesting [%(jid)s]: %(error)s") + _("Error while requesting [%(jid)s]: %(error)s") % {"jid": entity_jid.full(), "error": failure_.getErrorMessage()} ) @@ -326,7 +326,7 @@ def infosCb(infos, entity): if entity is None: - log.warning(_(u"received an item without jid")) + log.warning(_("received an item without jid")) return if identity is not None and identity not in infos.identities: return @@ -367,8 +367,8 @@ byte_identities.sort(key=lambda i: i.idType) byte_identities.sort(key=lambda i: i.category) for identity in byte_identities: - s.append(str(identity)) - s.append("<") + s.append(bytes(identity)) + s.append(b"<") # features byte_features = [ service.encode("utf-8") @@ -378,32 +378,32 @@ byte_features.sort() # XXX: the default sort has the same behaviour as the requested RFC 4790 i;octet sort for feature in byte_features: s.append(feature) - s.append("<") + s.append(b"<") # extensions - ext = services.extensions.values() + ext = list(services.extensions.values()) ext.sort(key=lambda f: f.formNamespace.encode('utf-8')) for extension in ext: s.append(extension.formNamespace.encode('utf-8')) - s.append("<") + s.append(b"<") fields = extension.fieldList fields.sort(key=lambda f: f.var.encode('utf-8')) for field in fields: s.append(field.var.encode('utf-8')) - s.append("<") + s.append(b"<") values = [v.encode('utf-8') for v in field.values] values.sort() for value in values: s.append(value) - s.append("<") + s.append(b"<") - cap_hash = b64encode(sha1("".join(s)).digest()) - log.debug(_(u"Capability hash generated: [{cap_hash}]").format(cap_hash=cap_hash)) + cap_hash = b64encode(sha1(b"".join(s)).digest()).decode('utf-8') + log.debug(_("Capability hash generated: [{cap_hash}]").format(cap_hash=cap_hash)) return cap_hash @defer.inlineCallbacks def _discoInfos( - self, entity_jid_s, node=u"", use_cache=True, profile_key=C.PROF_KEY_NONE + self, entity_jid_s, node="", use_cache=True, profile_key=C.PROF_KEY_NONE ): """ Discovery method for the bridge @param entity_jid_s: entity we want to discover @@ -417,7 +417,7 @@ disco_infos = yield self.getInfos(client, entity, node, use_cache) extensions = {} # FIXME: should extensions be serialised using tools.common.data_format? - for form_type, form in disco_infos.extensions.items(): + for form_type, form in list(disco_infos.extensions.items()): fields = [] for field in form.fieldList: data = {"type": field.fieldType} @@ -427,7 +427,7 @@ data[attr] = value values = [field.value] if field.value is not None else field.values - if field.fieldType == u"boolean": + if field.fieldType == "boolean": values = [C.boolConst(v) for v in values] fields.append((data, values)) @@ -436,7 +436,7 @@ defer.returnValue(( disco_infos.features, [(cat, type_, name or "") - for (cat, type_), name in disco_infos.identities.items()], + for (cat, type_), name in list(disco_infos.identities.items())], extensions)) def items2tuples(self, disco_items): @@ -447,13 +447,13 @@ """ for item in disco_items: if not item.entity: - log.warning(_(u"invalid item (no jid)")) + log.warning(_("invalid item (no jid)")) continue yield (item.entity.full(), item.nodeIdentifier or "", item.name or "") @defer.inlineCallbacks def _discoItems( - self, entity_jid_s, node=u"", use_cache=True, profile_key=C.PROF_KEY_NONE + self, entity_jid_s, node="", use_cache=True, profile_key=C.PROF_KEY_NONE ): """ Discovery method for the bridge
--- a/sat/memory/encryption.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/encryption.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -91,7 +91,7 @@ directed=directed) cls.plugins.append(plugin) cls.plugins.sort(key=lambda p: p.priority) - log.info(_(u"Encryption plugin registered: {name}").format(name=name)) + log.info(_("Encryption plugin registered: {name}").format(name=name)) @classmethod def getPlugins(cls): @@ -103,7 +103,7 @@ return next(p for p in cls.plugins if p.namespace == namespace) except StopIteration: raise exceptions.NotFound(_( - u"Can't find requested encryption plugin: {namespace}").format( + "Can't find requested encryption plugin: {namespace}").format( namespace=namespace)) @classmethod @@ -123,7 +123,7 @@ if p.name.lower() == name.lower(): return p.namespace raise exceptions.NotFound(_( - u"Can't find a plugin with the name \"{name}\".".format( + "Can't find a plugin with the name \"{name}\".".format( name=name))) def getBridgeData(self, session): @@ -133,12 +133,12 @@ @return (unicode): serialized data for bridge """ if session is None: - return u'' - plugin = session[u'plugin'] + return '' + plugin = session['plugin'] bridge_data = {'name': plugin.name, 'namespace': plugin.namespace} - if u'directed_devices' in session: - bridge_data[u'directed_devices'] = session[u'directed_devices'] + if 'directed_devices' in session: + bridge_data['directed_devices'] = session['directed_devices'] return data_format.serialise(bridge_data) @@ -151,7 +151,7 @@ try: start_encryption = plugin.instance.startEncryption except AttributeError: - log.debug(u"No startEncryption method found for {plugin}".format( + log.debug("No startEncryption method found for {plugin}".format( plugin = plugin.namespace)) return defer.succeed(None) else: @@ -167,7 +167,7 @@ try: stop_encryption = plugin.instance.stopEncryption except AttributeError: - log.debug(u"No stopEncryption method found for {plugin}".format( + log.debug("No stopEncryption method found for {plugin}".format( plugin = plugin.namespace)) return defer.succeed(None) else: @@ -187,8 +187,8 @@ it will be replaced by the new one """ if not self.plugins: - raise exceptions.NotFound(_(u"No encryption plugin is registered, " - u"an encryption session can't be started")) + raise exceptions.NotFound(_("No encryption plugin is registered, " + "an encryption session can't be started")) if namespace is None: plugin = self.plugins[0] @@ -198,10 +198,10 @@ bare_jid = entity.userhostJID() if bare_jid in self._sessions: # we have already an encryption session with this contact - former_plugin = self._sessions[bare_jid][u"plugin"] + former_plugin = self._sessions[bare_jid]["plugin"] if former_plugin.namespace == namespace: - log.info(_(u"Session with {bare_jid} is already encrypted with {name}. " - u"Nothing to do.").format( + log.info(_("Session with {bare_jid} is already encrypted with {name}. " + "Nothing to do.").format( bare_jid=bare_jid, name=former_plugin.name)) return @@ -211,8 +211,8 @@ del self._sessions[bare_jid] yield self._stopEncryption(former_plugin, entity) else: - msg = (_(u"Session with {bare_jid} is already encrypted with {name}. " - u"Please stop encryption session before changing algorithm.") + msg = (_("Session with {bare_jid} is already encrypted with {name}. " + "Please stop encryption session before changing algorithm.") .format(bare_jid=bare_jid, name=plugin.name)) log.warning(msg) raise exceptions.ConflictError(msg) @@ -223,34 +223,34 @@ entity.resource = self.host.memory.getMainResource(self.client, entity) if not entity.resource: raise exceptions.NotFound( - _(u"No resource found for {destinee}, can't encrypt with {name}") + _("No resource found for {destinee}, can't encrypt with {name}") .format(destinee=entity.full(), name=plugin.name)) - log.info(_(u"No resource specified to encrypt with {name}, using " - u"{destinee}.").format(destinee=entity.full(), + log.info(_("No resource specified to encrypt with {name}, using " + "{destinee}.").format(destinee=entity.full(), name=plugin.name)) # indicate that we encrypt only for some devices - directed_devices = data[u'directed_devices'] = [entity.resource] + directed_devices = data['directed_devices'] = [entity.resource] elif entity.resource: - raise ValueError(_(u"{name} encryption must be used with bare jids.")) + raise ValueError(_("{name} encryption must be used with bare jids.")) yield self._startEncryption(plugin, entity) self._sessions[entity.userhostJID()] = data - log.info(_(u"Encryption session has been set for {entity_jid} with " - u"{encryption_name}").format( + log.info(_("Encryption session has been set for {entity_jid} with " + "{encryption_name}").format( entity_jid=entity.full(), encryption_name=plugin.name)) self.host.bridge.messageEncryptionStarted( entity.full(), self.getBridgeData(data), self.client.profile) - msg = D_(u"Encryption session started: your messages with {destinee} are " - u"now end to end encrypted using {name} algorithm.").format( + msg = D_("Encryption session started: your messages with {destinee} are " + "now end to end encrypted using {name} algorithm.").format( destinee=entity.full(), name=plugin.name) - directed_devices = data.get(u'directed_devices') + directed_devices = data.get('directed_devices') if directed_devices: - msg += u"\n" + D_(u"Message are encrypted only for {nb_devices} device(s): " - u"{devices_list}.").format( + msg += "\n" + D_("Message are encrypted only for {nb_devices} device(s): " + "{devices_list}.").format( nb_devices=len(directed_devices), - devices_list = u', '.join(directed_devices)) + devices_list = ', '.join(directed_devices)) self.client.feedback(bare_jid, msg) @@ -266,29 +266,29 @@ session = self.getSession(entity.userhostJID()) if not session: raise failure.Failure( - exceptions.NotFound(_(u"There is no encryption session with this " - u"entity."))) + exceptions.NotFound(_("There is no encryption session with this " + "entity."))) plugin = session['plugin'] if namespace is not None and plugin.namespace != namespace: raise exceptions.InternalError(_( - u"The encryption session is not run with the expected plugin: encrypted " - u"with {current_name} and was expecting {expected_name}").format( - current_name=session[u'plugin'].namespace, + "The encryption session is not run with the expected plugin: encrypted " + "with {current_name} and was expecting {expected_name}").format( + current_name=session['plugin'].namespace, expected_name=namespace)) if entity.resource: try: - directed_devices = session[u'directed_devices'] + directed_devices = session['directed_devices'] except KeyError: raise exceptions.NotFound(_( - u"There is a session for the whole entity (i.e. all devices of the " - u"entity), not a directed one. Please use bare jid if you want to " - u"stop the whole encryption with this entity.")) + "There is a session for the whole entity (i.e. all devices of the " + "entity), not a directed one. Please use bare jid if you want to " + "stop the whole encryption with this entity.")) try: directed_devices.remove(entity.resource) except ValueError: - raise exceptions.NotFound(_(u"There is no directed session with this " - u"entity.")) + raise exceptions.NotFound(_("There is no directed session with this " + "entity.")) else: if not directed_devices: # if we have no more directed device sessions, @@ -302,7 +302,7 @@ del self._sessions[entity.userhostJID()] yield self._stopEncryption(plugin, entity) - log.info(_(u"encryption session stopped with entity {entity}").format( + log.info(_("encryption session stopped with entity {entity}").format( entity=entity.full())) self.host.bridge.messageEncryptionStopped( entity.full(), @@ -310,9 +310,9 @@ 'namespace': plugin.namespace, }, self.client.profile) - msg = D_(u"Encryption session finished: your messages with {destinee} are " - u"NOT end to end encrypted anymore.\nYour server administrators or " - u"{destinee} server administrators will be able to read them.").format( + msg = D_("Encryption session finished: your messages with {destinee} are " + "NOT end to end encrypted anymore.\nYour server administrators or " + "{destinee} server administrators will be able to read them.").format( destinee=entity.full()) self.client.feedback(entity, msg) @@ -326,7 +326,7 @@ None if there is not encryption for this session with this jid """ if entity.resource: - raise ValueError(u"Full jid given when expecting bare jid") + raise ValueError("Full jid given when expecting bare jid") return self._sessions.get(entity) def getTrustUI(self, entity_jid, namespace=None): @@ -346,7 +346,7 @@ session = self.getSession(entity_jid) if not session: raise exceptions.NotFound( - u"No encryption session currently active for {entity_jid}" + "No encryption session currently active for {entity_jid}" .format(entity_jid=entity_jid.full())) plugin = session['plugin'] else: @@ -355,7 +355,7 @@ get_trust_ui = plugin.instance.getTrustUI except AttributeError: raise NotImplementedError( - u"Encryption plugin doesn't handle trust management UI") + "Encryption plugin doesn't handle trust management UI") else: return defer.maybeDeferred(get_trust_ui, self.client, entity_jid) @@ -364,32 +364,32 @@ @classmethod def _importMenus(cls, host): host.importMenu( - (D_(u"Encryption"), D_(u"unencrypted (plain text)")), + (D_("Encryption"), D_("unencrypted (plain text)")), partial(cls._onMenuUnencrypted, host=host), security_limit=0, - help_string=D_(u"End encrypted session"), + help_string=D_("End encrypted session"), type_=C.MENU_SINGLE, ) for plg in cls.getPlugins(): host.importMenu( - (D_(u"Encryption"), plg.name), + (D_("Encryption"), plg.name), partial(cls._onMenuName, host=host, plg=plg), security_limit=0, - help_string=D_(u"Start {name} session").format(name=plg.name), + help_string=D_("Start {name} session").format(name=plg.name), type_=C.MENU_SINGLE, ) host.importMenu( - (D_(u"Encryption"), D_(u"⛨ {name} trust").format(name=plg.name)), + (D_("Encryption"), D_("⛨ {name} trust").format(name=plg.name)), partial(cls._onMenuTrust, host=host, plg=plg), security_limit=0, - help_string=D_(u"Manage {name} trust").format(name=plg.name), + help_string=D_("Manage {name} trust").format(name=plg.name), type_=C.MENU_SINGLE, ) @classmethod def _onMenuUnencrypted(cls, data, host, profile): client = host.getClient(profile) - peer_jid = jid.JID(data[u'jid']).userhostJID() + peer_jid = jid.JID(data['jid']).userhostJID() d = client.encryption.stop(peer_jid) d.addCallback(lambda __: {}) return d @@ -397,7 +397,7 @@ @classmethod def _onMenuName(cls, data, host, plg, profile): client = host.getClient(profile) - peer_jid = jid.JID(data[u'jid']) + peer_jid = jid.JID(data['jid']) if not plg.directed: peer_jid = peer_jid.userhostJID() d = client.encryption.start(peer_jid, plg.namespace, replace=True) @@ -408,9 +408,9 @@ @defer.inlineCallbacks def _onMenuTrust(cls, data, host, plg, profile): client = host.getClient(profile) - peer_jid = jid.JID(data[u'jid']).userhostJID() + peer_jid = jid.JID(data['jid']).userhostJID() ui = yield client.encryption.getTrustUI(peer_jid, plg.namespace) - defer.returnValue({u'xmlui': ui.toXml()}) + defer.returnValue({'xmlui': ui.toXml()}) ## Triggers ##
--- a/sat/memory/memory.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/memory.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -26,7 +26,7 @@ import os.path import copy from collections import namedtuple -from ConfigParser import SafeConfigParser, NoOptionError, NoSectionError +from configparser import SafeConfigParser, NoOptionError, NoSectionError from uuid import uuid4 from twisted.python import failure from twisted.internet import defer, reactor, error @@ -76,7 +76,7 @@ session_id = str(uuid4()) elif session_id in self._sessions: raise exceptions.ConflictError( - u"Session id {} is already used".format(session_id) + "Session id {} is already used".format(session_id) ) timer = reactor.callLater(self.timeout, self._purgeSession, session_id) if session_data is None: @@ -99,9 +99,9 @@ pass del self._sessions[session_id] log.debug( - u"Session {} purged{}".format( + "Session {} purged{}".format( session_id, - u" (profile {})".format(profile) if profile is not None else u"", + " (profile {})".format(profile) if profile is not None else "", ) ) @@ -147,10 +147,10 @@ self._purgeSession(session_id) def keys(self): - return self._sessions.keys() + return list(self._sessions.keys()) def iterkeys(self): - return self._sessions.iterkeys() + return iter(self._sessions.keys()) class ProfileSessions(Sessions): @@ -165,7 +165,7 @@ @return: a list containing the sessions ids """ ret = [] - for session_id in self._sessions.iterkeys(): + for session_id in self._sessions.keys(): try: timer, session_data, profile_set = self._sessions[session_id] except ValueError: @@ -245,7 +245,7 @@ if not silent: log.warning( _( - u"A database has been found in the default local_dir for previous versions (< 0.5)" + "A database has been found in the default local_dir for previous versions (< 0.5)" ) ) tools_config.fixConfigOption("", "local_dir", old_default, silent) @@ -306,10 +306,10 @@ if os.path.exists(filename): try: self.params.load_xml(filename) - log.debug(_(u"Parameters loaded from file: %s") % filename) + log.debug(_("Parameters loaded from file: %s") % filename) return True except Exception as e: - log.error(_(u"Can't load parameters from file: %s") % e) + log.error(_("Can't load parameters from file: %s") % e) return False def save_xml(self, filename): @@ -324,10 +324,10 @@ filename = os.path.expanduser(filename) try: self.params.save_xml(filename) - log.debug(_(u"Parameters saved to file: %s") % filename) + log.debug(_("Parameters saved to file: %s") % filename) return True except Exception as e: - log.error(_(u"Can't save parameters to file: %s") % e) + log.error(_("Can't save parameters to file: %s") % e) return False def load(self): @@ -356,7 +356,7 @@ def createSession(__): """Called once params are loaded.""" self._entities_cache[profile] = {} - log.info(u"[{}] Profile session started".format(profile)) + log.info("[{}] Profile session started".format(profile)) return False def backendInitialised(__): @@ -392,13 +392,13 @@ @param profile: %(doc_profile)s """ if self.host.isConnected(profile): - log.debug(u"Disconnecting profile because of session stop") + log.debug("Disconnecting profile because of session stop") self.host.disconnect(profile) self.auth_sessions.profileDelUnique(profile) try: self._entities_cache[profile] except KeyError: - log.warning(u"Profile was not in cache") + log.warning("Profile was not in cache") def _isSessionStarted(self, profile_key): return self.isSessionStarted(self.getProfileName(profile_key)) @@ -428,10 +428,10 @@ def check_result(result): if not result: - log.warning(u"Authentication failure of profile {}".format(profile)) + log.warning("Authentication failure of profile {}".format(profile)) raise failure.Failure( exceptions.PasswordError( - u"The provided profile password doesn't match." + "The provided profile password doesn't match." ) ) if ( @@ -460,7 +460,7 @@ self.auth_sessions.newSession( {C.MEMORY_CRYPTO_KEY: personal_key}, profile=profile ) - log.debug(u"auth session created for profile %s" % profile) + log.debug("auth session created for profile %s" % profile) d = PersistentDict(C.MEMORY_CRYPTO_NAMESPACE, profile).load() d.addCallback(lambda data: BlockCipher.decrypt(key, data[C.MEMORY_CRYPTO_KEY])) @@ -476,7 +476,7 @@ except KeyError: log.error( _( - u"Trying to purge roster status cache for a profile not in memory: [%s]" + "Trying to purge roster status cache for a profile not in memory: [%s]" ) % profile ) @@ -489,7 +489,7 @@ @return (list[unicode]): selected profiles """ if not clients and not components: - log.warning(_(u"requesting no profiles at all")) + log.warning(_("requesting no profiles at all")) return [] profiles = self.storage.getProfilesList() if clients and components: @@ -533,20 +533,20 @@ @raise exceptions.NotFound: component is not a known plugin import name """ if not name: - raise ValueError(u"Empty profile name") + raise ValueError("Empty profile name") if name[0] == "@": - raise ValueError(u"A profile name can't start with a '@'") + raise ValueError("A profile name can't start with a '@'") if "\n" in name: - raise ValueError(u"A profile name can't contain line feed ('\\n')") + raise ValueError("A profile name can't contain line feed ('\\n')") if name in self._entities_cache: - raise exceptions.ConflictError(u"A session for this profile exists") + raise exceptions.ConflictError("A session for this profile exists") if component: if not component in self.host.plugins: raise exceptions.NotFound( _( - u"Can't find component {component} entry point".format( + "Can't find component {component} entry point".format( component=component ) ) @@ -664,7 +664,7 @@ def _getPresenceStatuses(self, profile_key): ret = self.getPresenceStatuses(profile_key) - return {entity.full(): data for entity, data in ret.iteritems()} + return {entity.full(): data for entity, data in ret.items()} def getPresenceStatuses(self, profile_key): """Get all the presence statuses of a profile @@ -676,8 +676,8 @@ profile_cache = self._getProfileCache(client) entities_presence = {} - for entity_jid, entity_data in profile_cache.iteritems(): - for resource, resource_data in entity_data.iteritems(): + for entity_jid, entity_data in profile_cache.items(): + for resource, resource_data in entity_data.items(): full_jid = copy.copy(entity_jid) full_jid.resource = resource try: @@ -736,7 +736,7 @@ entity_data = profile_cache[entity_jid.userhostJID()] except KeyError: raise exceptions.UnknownEntityError( - u"Entity {} not in cache".format(entity_jid) + "Entity {} not in cache".format(entity_jid) ) resources = set(entity_data.keys()) resources.discard(None) @@ -758,7 +758,7 @@ try: presence_data = self.getEntityDatum(full_jid, "presence", client.profile) except KeyError: - log.debug(u"Can't get presence data for {}".format(full_jid)) + log.debug("Can't get presence data for {}".format(full_jid)) else: if presence_data.show != C.PRESENCE_UNAVAILABLE: available.append(resource) @@ -787,7 +787,7 @@ try: resources = self.getAllResources(client, entity_jid) except exceptions.UnknownEntityError: - log.warning(u"Entity is not in cache, we can't find any resource") + log.warning("Entity is not in cache, we can't find any resource") return None priority_resources = [] for resource in resources: @@ -796,13 +796,13 @@ try: presence_data = self.getEntityDatum(full_jid, "presence", client.profile) except KeyError: - log.debug(u"No presence information for {}".format(full_jid)) + log.debug("No presence information for {}".format(full_jid)) continue priority_resources.append((resource, presence_data.priority)) try: return max(priority_resources, key=lambda res_tuple: res_tuple[1])[0] except ValueError: - log.warning(u"No resource found at all for {}".format(entity_jid)) + log.warning("No resource found at all for {}".format(entity_jid)) return None ## Entities data ## @@ -835,8 +835,8 @@ """ profile_cache = self._getProfileCache(client) # we construct a list of all known full jids (bare jid of entities x resources) - for bare_jid, entity_data in profile_cache.iteritems(): - for resource in entity_data.iterkeys(): + for bare_jid, entity_data in profile_cache.items(): + for resource in entity_data.keys(): if resource is None: continue full_jid = copy.copy(bare_jid) @@ -871,9 +871,9 @@ entity_data[key] = value if key in self._key_signals and not silent: - if not isinstance(value, basestring): + if not isinstance(value, str): log.error( - u"Setting a non string value ({}) for a key ({}) which has a signal flag".format( + "Setting a non string value ({}) for a key ({}) which has a signal flag".format( value, key ) ) @@ -905,7 +905,7 @@ entity_data = profile_cache[jid_.userhostJID()][jid_.resource] except KeyError: raise exceptions.UnknownEntityError( - u"Entity {} not in cache".format(jid_) + "Entity {} not in cache".format(jid_) ) try: del entity_data[key] @@ -919,7 +919,7 @@ ret = self.getEntitiesData( [jid.JID(jid_) for jid_ in entities_jids], keys_list, profile_key ) - return {jid_.full(): data for jid_, data in ret.iteritems()} + return {jid_.full(): data for jid_, data in ret.items()} def getEntitiesData(self, entities_jids, keys_list=None, profile_key=C.PROF_KEY_NONE): """Get a list of cached values for several entities at once @@ -961,8 +961,8 @@ continue ret_data[entity.full()] = fillEntityData(entity_cache_data, keys_list) else: - for bare_jid, data in profile_cache.iteritems(): - for resource, entity_cache_data in data.iteritems(): + for bare_jid, data in profile_cache.items(): + for resource, entity_cache_data in data.items(): full_jid = copy.copy(bare_jid) full_jid.resource = resource ret_data[full_jid] = fillEntityData(entity_cache_data) @@ -987,7 +987,7 @@ entity_data = profile_cache[entity_jid.userhostJID()][entity_jid.resource] except KeyError: raise exceptions.UnknownEntityError( - u"Entity {} not in cache (was requesting {})".format( + "Entity {} not in cache (was requesting {})".format( entity_jid, keys_list ) ) @@ -1030,14 +1030,14 @@ del profile_cache[entity_jid] except KeyError: raise exceptions.UnknownEntityError( - u"Entity {} not in cache".format(entity_jid) + "Entity {} not in cache".format(entity_jid) ) else: try: del profile_cache[entity_jid.userhostJID()][entity_jid.resource] except KeyError: raise exceptions.UnknownEntityError( - u"Entity {} not in cache".format(entity_jid) + "Entity {} not in cache".format(entity_jid) ) ## Encryption ## @@ -1103,7 +1103,7 @@ def done(__): log.debug( - _(u"Personal data (%(ns)s, %(key)s) has been successfuly encrypted") + _("Personal data (%(ns)s, %(key)s) has been successfuly encrypted") % {"ns": C.MEMORY_CRYPTO_NAMESPACE, "key": data_key} ) @@ -1225,21 +1225,21 @@ # the owner has all rights return if not C.ACCESS_PERMS.issuperset(perms_to_check): - raise exceptions.InternalError(_(u"invalid permission")) + raise exceptions.InternalError(_("invalid permission")) for perm in perms_to_check: # we check each perm and raise PermissionError as soon as one condition is not valid # we must never return here, we only return after the loop if nothing was blocking the access try: - perm_data = file_data[u"access"][perm] - perm_type = perm_data[u"type"] + perm_data = file_data["access"][perm] + perm_type = perm_data["type"] except KeyError: raise exceptions.PermissionError() if perm_type == C.ACCESS_TYPE_PUBLIC: continue elif perm_type == C.ACCESS_TYPE_WHITELIST: try: - jids = perm_data[u"jids"] + jids = perm_data["jids"] except KeyError: raise exceptions.PermissionError() if peer_jid.full() in jids: @@ -1248,7 +1248,7 @@ raise exceptions.PermissionError() else: raise exceptions.InternalError( - _(u"unknown access type: {type}").format(type=perm_type) + _("unknown access type: {type}").format(type=perm_type) ) @defer.inlineCallbacks @@ -1257,7 +1257,7 @@ current = file_data while True: self.checkFilePermission(current, peer_jid, perms_to_check) - parent = current[u"parent"] + parent = current["parent"] if not parent: break files_data = yield self.getFile( @@ -1266,7 +1266,7 @@ try: current = files_data[0] except IndexError: - raise exceptions.DataError(u"Missing parent") + raise exceptions.DataError("Missing parent") @defer.inlineCallbacks def _getParentDir( @@ -1283,15 +1283,15 @@ # if path is set, we have to retrieve parent directory of the file(s) from it if parent is not None: raise exceptions.ConflictError( - _(u"You can't use path and parent at the same time") + _("You can't use path and parent at the same time") ) - path_elts = filter(None, path.split(u"/")) - if {u"..", u"."}.intersection(path_elts): - raise ValueError(_(u'".." or "." can\'t be used in path')) + path_elts = [_f for _f in path.split("/") if _f] + if {"..", "."}.intersection(path_elts): + raise ValueError(_('".." or "." can\'t be used in path')) # we retrieve all directories from path until we get the parent container # non existing directories will be created - parent = u"" + parent = "" for idx, path_elt in enumerate(path_elts): directories = yield self.storage.getFiles( client, @@ -1306,12 +1306,12 @@ # from this point, directories don't exist anymore, we have to create them elif len(directories) > 1: raise exceptions.InternalError( - _(u"Several directories found, this should not happen") + _("Several directories found, this should not happen") ) else: directory = directories[0] self.checkFilePermission(directory, peer_jid, perms_to_check) - parent = directory[u"id"] + parent = directory["id"] defer.returnValue((parent, [])) @defer.inlineCallbacks @@ -1357,8 +1357,8 @@ """ if peer_jid is None and perms_to_check or perms_to_check is None and peer_jid: raise exceptions.InternalError( - u"if you want to disable permission check, both peer_jid and " - u"perms_to_check must be None" + "if you want to disable permission check, both peer_jid and " + "perms_to_check must be None" ) if owner is not None: owner = owner.userhostJID() @@ -1378,7 +1378,7 @@ try: parent_data = parent_data[0] except IndexError: - raise exceptions.DataError(u"mising parent") + raise exceptions.DataError("mising parent") yield self.checkPermissionToRoot( client, parent_data, peer_jid, perms_to_check ) @@ -1414,7 +1414,7 @@ @defer.inlineCallbacks def setFile( - self, client, name, file_id=None, version=u"", parent=None, path=None, + self, client, name, file_id=None, version="", parent=None, path=None, type_=C.FILE_TYPE_FILE, file_hash=None, hash_algo=None, size=None, namespace=None, mime_type=None, created=None, modified=None, owner=None, access=None, extra=None, peer_jid=None, perms_to_check=(C.ACCESS_PERM_WRITE,) @@ -1481,7 +1481,7 @@ if type_ == C.FILE_TYPE_DIRECTORY: if any(version, file_hash, size, mime_type): raise ValueError( - u"version, file_hash, size and mime_type can't be set for a directory" + "version, file_hash, size and mime_type can't be set for a directory" ) if owner is not None: owner = owner.userhostJID() @@ -1498,7 +1498,7 @@ client, name=new_dir, file_id=new_dir_id, - version=u"", + version="", parent=parent, type_=C.FILE_TYPE_DIRECTORY, namespace=namespace, @@ -1509,7 +1509,7 @@ ) parent = new_dir_id elif parent is None: - parent = u"" + parent = "" yield self.storage.setFile( client, @@ -1552,35 +1552,35 @@ @param files_path(unicode): path of the directory containing the actual files @param file_data(dict): data of the file to delete """ - if file_data[u'owner'] != peer_jid: + if file_data['owner'] != peer_jid: raise exceptions.PermissionError( - u"file {file_name} can't be deleted, {peer_jid} is not the owner" - .format(file_name=file_data[u'name'], peer_jid=peer_jid.full())) - if file_data[u'type'] == C.FILE_TYPE_DIRECTORY: - sub_files = yield self.getFiles(client, peer_jid, parent=file_data[u'id']) + "file {file_name} can't be deleted, {peer_jid} is not the owner" + .format(file_name=file_data['name'], peer_jid=peer_jid.full())) + if file_data['type'] == C.FILE_TYPE_DIRECTORY: + sub_files = yield self.getFiles(client, peer_jid, parent=file_data['id']) if sub_files and not recursive: - raise exceptions.DataError(_(u"Can't delete directory, it is not empty")) + raise exceptions.DataError(_("Can't delete directory, it is not empty")) # we first delete the sub-files for sub_file_data in sub_files: yield self._deleteFile(client, peer_jid, recursive, sub_file_data) # then the directory itself - yield self.storage.fileDelete(file_data[u'id']) - elif file_data[u'type'] == C.FILE_TYPE_FILE: - log.info(_(u"deleting file {name} with hash {file_hash}").format( - name=file_data[u'name'], file_hash=file_data[u'file_hash'])) - yield self.storage.fileDelete(file_data[u'id']) + yield self.storage.fileDelete(file_data['id']) + elif file_data['type'] == C.FILE_TYPE_FILE: + log.info(_("deleting file {name} with hash {file_hash}").format( + name=file_data['name'], file_hash=file_data['file_hash'])) + yield self.storage.fileDelete(file_data['id']) references = yield self.getFiles( - client, peer_jid, file_hash=file_data[u'file_hash']) + client, peer_jid, file_hash=file_data['file_hash']) if references: - log.debug(u"there are still references to the file, we keep it") + log.debug("there are still references to the file, we keep it") else: - file_path = os.path.join(files_path, file_data[u'file_hash']) - log.info(_(u"no reference left to {file_path}, deleting").format( + file_path = os.path.join(files_path, file_data['file_hash']) + log.info(_("no reference left to {file_path}, deleting").format( file_path=file_path)) os.unlink(file_path) else: - raise exceptions.InternalError(u'Unexpected file type: {file_type}' - .format(file_type=file_data[u'type'])) + raise exceptions.InternalError('Unexpected file type: {file_type}' + .format(file_type=file_data['type'])) @defer.inlineCallbacks def fileDelete(self, client, peer_jid, file_id, recursive=False): @@ -1595,11 +1595,11 @@ # should be checked too files_data = yield self.getFiles(client, peer_jid, file_id) if not files_data: - raise exceptions.NotFound(u"Can't find the file with id {file_id}".format( + raise exceptions.NotFound("Can't find the file with id {file_id}".format( file_id=file_id)) file_data = files_data[0] - if file_data[u"type"] != C.FILE_TYPE_DIRECTORY and recursive: - raise ValueError(u"recursive can only be set for directories") + if file_data["type"] != C.FILE_TYPE_DIRECTORY and recursive: + raise ValueError("recursive can only be set for directories") files_path = self.host.getLocalPath(None, C.FILES_DIR, profile=False) yield self._deleteFile(client, peer_jid, recursive, files_path, file_data) @@ -1618,6 +1618,6 @@ try: presence_data = self.getEntityDatum(entity_jid, "presence", client.profile) except KeyError: - log.debug(u"No presence information for {}".format(entity_jid)) + log.debug("No presence information for {}".format(entity_jid)) return False return presence_data.show != C.PRESENCE_UNAVAILABLE
--- a/sat/memory/params.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/params.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -56,7 +56,7 @@ # TODO: when priority is changed, a new presence stanza must be emitted # TODO: int type (Priority should be int instead of string) - default_xml = u""" + default_xml = """ <params> <general> </general> @@ -80,20 +80,20 @@ </individual> </params> """ % { - u"category_general": D_(u"General"), - u"category_connection": D_(u"Connection"), - u"history_param": C.HISTORY_LIMIT, - u"history_label": D_(u"Chat history limit"), - u"show_offline_contacts": C.SHOW_OFFLINE_CONTACTS, - u"show_offline_contacts_label": D_(u"Show offline contacts"), - u"show_empty_groups": C.SHOW_EMPTY_GROUPS, - u"show_empty_groups_label": D_(u"Show empty groups"), - u"force_server_param": C.FORCE_SERVER_PARAM, - u"force_port_param": C.FORCE_PORT_PARAM, - u"new_account_label": D_(u"Register new account"), - u"autoconnect_label": D_(u"Connect on frontend startup"), - u"autodisconnect_label": D_(u"Disconnect on frontend closure"), - u"check_certificate_label": D_(u"Check certificate (don't uncheck if unsure)"), + "category_general": D_("General"), + "category_connection": D_("Connection"), + "history_param": C.HISTORY_LIMIT, + "history_label": D_("Chat history limit"), + "show_offline_contacts": C.SHOW_OFFLINE_CONTACTS, + "show_offline_contacts_label": D_("Show offline contacts"), + "show_empty_groups": C.SHOW_EMPTY_GROUPS, + "show_empty_groups_label": D_("Show empty groups"), + "force_server_param": C.FORCE_SERVER_PARAM, + "force_port_param": C.FORCE_PORT_PARAM, + "new_account_label": D_("Register new account"), + "autoconnect_label": D_("Connect on frontend startup"), + "autodisconnect_label": D_("Disconnect on frontend closure"), + "check_certificate_label": D_("Check certificate (don't uncheck if unsure)"), } def load_default_params(self): @@ -158,7 +158,7 @@ del self.params[profile] except KeyError: log.error( - _(u"Trying to purge cache of a profile not in memory: [%s]") % profile + _("Trying to purge cache of a profile not in memory: [%s]") % profile ) def save_xml(self, filename): @@ -238,7 +238,7 @@ elif return_profile_keys and profile_key in [C.PROF_KEY_ALL]: return profile_key # this value must be managed by the caller if not self.storage.hasProfile(profile_key): - log.error(_(u"Trying to access an unknown profile (%s)") % profile_key) + log.error(_("Trying to access an unknown profile (%s)") % profile_key) raise exceptions.ProfileUnknownError(profile_key) return profile_key @@ -294,7 +294,7 @@ if ( len(cat_node.childNodes) == to_remove_count ): # remove empty category - for __ in xrange(0, to_remove_count): + for __ in range(0, to_remove_count): to_remove.pop() to_remove.append(cat_node) for node in to_remove: @@ -333,7 +333,7 @@ if not app: log.warning( _( - u"Trying to register frontends parameters with no specified app: aborted" + "Trying to register frontends parameters with no specified app: aborted" ) ) return @@ -342,14 +342,14 @@ if app in self.frontends_cache: log.debug( _( - u"Trying to register twice frontends parameters for %(app)s: aborted" + "Trying to register twice frontends parameters for %(app)s: aborted" % {"app": app} ) ) return self.frontends_cache.append(app) self.updateParams(xml, security_limit, app) - log.debug(u"Frontends parameters registered for %(app)s" % {"app": app}) + log.debug("Frontends parameters registered for %(app)s" % {"app": app}) def __default_ok(self, value, name, category): # FIXME: will not work with individual parameters @@ -357,7 +357,7 @@ def __default_ko(self, failure, name, category): log.error( - _(u"Can't determine default value for [%(category)s/%(name)s]: %(reason)s") + _("Can't determine default value for [%(category)s/%(name)s]: %(reason)s") % {"category": category, "name": name, "reason": str(failure.value)} ) @@ -380,7 +380,7 @@ if not node: log.error( _( - u"Requested param [%(name)s] in category [%(category)s] doesn't exist !" + "Requested param [%(name)s] in category [%(category)s] doesn't exist !" ) % {"name": name, "category": category} ) @@ -443,14 +443,14 @@ if len(selected) == 0: log.error( _( - u"Parameter (%(cat)s, %(param)s) of type list has no default option!" + "Parameter (%(cat)s, %(param)s) of type list has no default option!" ) % {"cat": cat, "param": param} ) else: log.error( _( - u"Parameter (%(cat)s, %(param)s) of type list has more than one default option!" + "Parameter (%(cat)s, %(param)s) of type list has more than one default option!" ) % {"cat": cat, "param": param} ) @@ -468,7 +468,7 @@ jids[idx] = jid.JID(value) except (RuntimeError, jid.InvalidFormat, AttributeError): log.warning( - u"Incorrect jid value found in jids list: [{}]".format(value) + "Incorrect jid value found in jids list: [{}]".format(value) ) to_delete.append(value) for value in to_delete: @@ -564,7 +564,7 @@ if not node: log.error( _( - u"Requested param [%(name)s] in category [%(category)s] doesn't exist !" + "Requested param [%(name)s] in category [%(category)s] doesn't exist !" ) % {"name": name, "category": category} ) @@ -630,7 +630,7 @@ if not node: log.error( _( - u"Requested param [%(name)s] in category [%(category)s] doesn't exist !" + "Requested param [%(name)s] in category [%(category)s] doesn't exist !" ) % {"name": name, "category": category} ) @@ -639,7 +639,7 @@ if not self.checkSecurityLimit(node[1], security_limit): log.warning( _( - u"Trying to get parameter '%(param)s' in category '%(cat)s' without authorization!!!" + "Trying to get parameter '%(param)s' in category '%(cat)s' without authorization!!!" % {"param": name, "cat": category} ) ) @@ -697,7 +697,7 @@ name = param_node.getAttribute("name") if not name: log.warning( - u"ignoring attribute without name: {}".format( + "ignoring attribute without name: {}".format( param_node.toxml() ) ) @@ -850,7 +850,7 @@ AttributeError, ): log.warning( - u"Incorrect jid value found in jids list: [{}]".format( + "Incorrect jid value found in jids list: [{}]".format( jid_ ) ) @@ -982,13 +982,13 @@ if profile_key != C.PROF_KEY_NONE: profile = self.getProfileName(profile_key) if not profile: - log.error(_(u"Trying to set parameter for an unknown profile")) + log.error(_("Trying to set parameter for an unknown profile")) raise exceptions.ProfileUnknownError(profile_key) node = self._getParamNode(name, category, "@ALL@") if not node: log.error( - _(u"Requesting an unknown parameter (%(category)s/%(name)s)") + _("Requesting an unknown parameter (%(category)s/%(name)s)") % {"category": category, "name": name} ) return defer.succeed(None) @@ -996,7 +996,7 @@ if not self.checkSecurityLimit(node[1], security_limit): log.warning( _( - u"Trying to set parameter '%(param)s' in category '%(cat)s' without authorization!!!" + "Trying to set parameter '%(param)s' in category '%(cat)s' without authorization!!!" % {"param": name, "cat": category} ) ) @@ -1012,7 +1012,7 @@ except ValueError: log.debug( _( - u"Trying to set parameter '%(param)s' in category '%(cat)s' with an non-integer value" + "Trying to set parameter '%(param)s' in category '%(cat)s' with an non-integer value" % {"param": name, "cat": category} ) ) @@ -1051,7 +1051,7 @@ assert profile_key != C.PROF_KEY_NONE if type_ == "button": - log.debug(u"Clicked param button %s" % node.toxml()) + log.debug("Clicked param button %s" % node.toxml()) return defer.succeed(None) elif type_ == "password": try:
--- a/sat/memory/persistent.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/persistent.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -63,10 +63,10 @@ return d def iteritems(self): - return self._cache.iteritems() + return iter(self._cache.items()) def items(self): - return self._cache.items() + return list(self._cache.items()) def __repr__(self): return self._cache.__repr__() @@ -98,8 +98,8 @@ def __hash__(self): return self._cache.__hash__() - def __nonzero__(self): - return self._cache.__len__() + def __bool__(self): + return self._cache.__len__() != 0 def __contains__(self, key): return self._cache.__contains__(key) @@ -149,7 +149,7 @@ class LazyPersistentBinaryDict(PersistentBinaryDict): - ur"""PersistentBinaryDict which get key/value when needed + r"""PersistentBinaryDict which get key/value when needed This Persistent need more database access, it is suitable for largest data, to save memory. @@ -160,7 +160,7 @@ def load(self): # we show a warning as calling load on LazyPersistentBinaryDict sounds like a code mistake - log.warning(_(u"Calling load on LazyPersistentBinaryDict while it's not needed")) + log.warning(_("Calling load on LazyPersistentBinaryDict while it's not needed")) def iteritems(self): raise NotImplementedError @@ -196,9 +196,9 @@ raise NotImplementedError def __hash__(self): - return hash(unicode(self.__class__) + self.namespace + (self.profile or u'')) + return hash(str(self.__class__) + self.namespace + (self.profile or '')) - def __nonzero__(self): + def __bool__(self): raise NotImplementedError def __contains__(self, key):
--- a/sat/memory/sqlite.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/sqlite.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -31,7 +31,7 @@ import sys import re import os.path -import cPickle as pickle +import pickle as pickle import hashlib import sqlite3 import json @@ -152,12 +152,12 @@ # Sqlite integration, probably with high level library retry -= 1 if retry == 0: - log.error(_(u'too many db tries, we abandon! Error message: {msg}\n' - u'query was {query}' - .format(msg=e, query=u' '.join([unicode(a) for a in args])))) + log.error(_('too many db tries, we abandon! Error message: {msg}\n' + 'query was {query}' + .format(msg=e, query=' '.join([str(a) for a in args])))) raise e log.warning( - _(u'exception while running query, retrying ({try_}): {msg}').format( + _('exception while running query, retrying ({try_}): {msg}').format( try_ = 6 - retry, msg = e)) kw['query_retry'] = retry @@ -175,14 +175,14 @@ retry -= 1 if retry == 0: log.error( - _(u'too many interaction tries, we abandon! Error message: {msg}\n' - u'interaction method was: {interaction}\n' - u'interaction arguments were: {args}' + _('too many interaction tries, we abandon! Error message: {msg}\n' + 'interaction method was: {interaction}\n' + 'interaction arguments were: {args}' .format(msg=e, interaction=interaction, - args=u', '.join([unicode(a) for a in args])))) + args=', '.join([str(a) for a in args])))) raise e log.warning( - _(u'exception while running interaction, retrying ({try_}): {msg}') + _('exception while running interaction, retrying ({try_}): {msg}') .format(try_ = 4 - retry, msg = e)) kw['interaction_retry'] = retry return self._runInteraction(interaction, *args, **kw) @@ -204,7 +204,7 @@ if new_base: # the dir may not exist if it's not the XDG recommended one dir_ = os.path.dirname(db_filename) if not os.path.exists(dir_): - os.makedirs(dir_, 0700) + os.makedirs(dir_, 0o700) def foreignKeysOn(sqlite): sqlite.execute('PRAGMA foreign_keys = ON') @@ -240,7 +240,7 @@ if statements is None: return defer.succeed(None) - log.debug(u"\n===== COMMITTING STATEMENTS =====\n%s\n============\n\n" % '\n'.join(statements)) + log.debug("\n===== COMMITTING STATEMENTS =====\n%s\n============\n\n" % '\n'.join(statements)) d = self.dbpool.runInteraction(self._updateDb, tuple(statements)) return d @@ -270,7 +270,7 @@ def getProfilesList(self): """"Return list of all registered profiles""" - return self.profiles.keys() + return list(self.profiles.keys()) def hasProfile(self, profile_name): """return True if profile_name exists @@ -283,13 +283,13 @@ try: return self.profiles[profile_name] in self.components except KeyError: - raise exceptions.NotFound(u"the requested profile doesn't exists") + raise exceptions.NotFound("the requested profile doesn't exists") def getEntryPoint(self, profile_name): try: return self.components[self.profiles[profile_name]] except KeyError: - raise exceptions.NotFound(u"the requested profile doesn't exists or is not a component") + raise exceptions.NotFound("the requested profile doesn't exists or is not a component") def createProfile(self, name, component=None): """Create a new profile @@ -326,7 +326,7 @@ @return: deferred triggered once profile is actually deleted """ def deletionError(failure_): - log.error(_(u"Can't delete profile [%s]") % name) + log.error(_("Can't delete profile [%s]") % name) return failure_ def delete(txn): @@ -359,7 +359,7 @@ for param in result: category, name, value = param params_gen[(category, name)] = value - log.debug(_(u"loading general parameters from database")) + log.debug(_("loading general parameters from database")) return self.dbpool.runQuery("SELECT category,name,value FROM param_gen").addCallback(fillParams) def loadIndParams(self, params_ind, profile): @@ -374,7 +374,7 @@ for param in result: category, name, value = param params_ind[(category, name)] = value - log.debug(_(u"loading individual parameters from database")) + log.debug(_("loading individual parameters from database")) d = self.dbpool.runQuery("SELECT category,name,value FROM param_ind WHERE profile_id=?", (self.profiles[profile], )) d.addCallback(fillParams) return d @@ -399,7 +399,7 @@ @param value: value to set @return: deferred""" d = self.dbpool.runQuery("REPLACE INTO param_gen(category,name,value) VALUES (?,?,?)", (category, name, value)) - d.addErrback(lambda ignore: log.error(_(u"Can't set general parameter (%(category)s/%(name)s) in database" % {"category": category, "name": name}))) + d.addErrback(lambda ignore: log.error(_("Can't set general parameter (%(category)s/%(name)s) in database" % {"category": category, "name": name}))) return d def setIndParam(self, category, name, value, profile): @@ -412,7 +412,7 @@ @return: deferred """ d = self.dbpool.runQuery("REPLACE INTO param_ind(category,name,profile_id,value) VALUES (?,?,?,?)", (category, name, self.profiles[profile], value)) - d.addErrback(lambda ignore: log.error(_(u"Can't set individual parameter (%(category)s/%(name)s) for [%(profile)s] in database" % {"category": category, "name": name, "profile": profile}))) + d.addErrback(lambda ignore: log.error(_("Can't set individual parameter (%(category)s/%(name)s) for [%(profile)s] in database" % {"category": category, "name": name, "profile": profile}))) return d ## History @@ -423,14 +423,14 @@ uid = data['uid'] d_list = [] for key in ('message', 'subject'): - for lang, value in data[key].iteritems(): + for lang, value in data[key].items(): d = self.dbpool.runQuery( "INSERT INTO {key}(history_uid, {key}, language) VALUES (?,?,?)" .format(key=key), (uid, value, lang or None)) d.addErrback(lambda __: log.error( - _(u"Can't save following {key} in history (uid: {uid}, lang:{lang}):" - u" {value}").format( + _("Can't save following {key} in history (uid: {uid}, lang:{lang}):" + " {value}").format( key=key, uid=uid, lang=lang, value=value))) d_list.append(d) try: @@ -443,8 +443,8 @@ "INSERT INTO thread(history_uid, thread_id, parent_id) VALUES (?,?,?)", (uid, thread, thread_parent)) d.addErrback(lambda __: log.error( - _(u"Can't save following thread in history (uid: {uid}): thread: " - u"{thread}), parent:{parent}").format( + _("Can't save following thread in history (uid: {uid}): thread: " + "{thread}), parent:{parent}").format( uid=uid, thread=thread, parent=thread_parent))) d_list.append(d) return defer.DeferredList(d_list) @@ -453,24 +453,24 @@ failure_.trap(sqlite3.IntegrityError) sqlite_msg = failure_.value.args[0] if "UNIQUE constraint failed" in sqlite_msg: - log.debug(u"message {} is already in history, not storing it again" + log.debug("message {} is already in history, not storing it again" .format(data['uid'])) if 'received_timestamp' not in data: log.warning( - u"duplicate message is not delayed, this is maybe a bug: data={}" + "duplicate message is not delayed, this is maybe a bug: data={}" .format(data)) # we cancel message to avoid sending duplicate message to frontends raise failure.Failure(exceptions.CancelError("Cancelled duplicated message")) else: - log.error(u"Can't store message in history: {}".format(failure_)) + log.error("Can't store message in history: {}".format(failure_)) def _logHistoryError(self, failure_, from_jid, to_jid, data): if failure_.check(exceptions.CancelError): # we propagate CancelError to avoid sending message to frontends raise failure_ log.error(_( - u"Can't save following message in history: from [{from_jid}] to [{to_jid}] " - u"(uid: {uid})") + "Can't save following message in history: from [{from_jid}] to [{to_jid}] " + "(uid: {uid})") .format(from_jid=from_jid.full(), to_jid=to_jid.full(), uid=data['uid'])) def addToHistory(self, data, profile): @@ -478,14 +478,14 @@ @param data(dict): message data as build by SatMessageProtocol.onMessage """ - extra = pickle.dumps({k: v for k, v in data['extra'].iteritems() + extra = pickle.dumps({k: v for k, v in data['extra'].items() if k not in NOT_IN_EXTRA}, 0) from_jid = data['from'] to_jid = data['to'] d = self.dbpool.runQuery( - u"INSERT INTO history(uid, stanza_id, update_uid, profile_id, source, dest, " - u"source_res, dest_res, timestamp, received_timestamp, type, extra) VALUES " - u"(?,?,?,?,?,?,?,?,?,?,?,?)", + "INSERT INTO history(uid, stanza_id, update_uid, profile_id, source, dest, " + "source_res, dest_res, timestamp, received_timestamp, type, extra) VALUES " + "(?,?,?,?,?,?,?,?,?,?,?,?)", (data['uid'], data['extra'].get('stanza_id'), data['extra'].get('update_uid'), self.profiles[profile], data['from'].userhost(), to_jid.userhost(), from_jid.resource, to_jid.resource, data['timestamp'], @@ -508,7 +508,7 @@ if uid != current['uid']: # new message try: - extra = pickle.loads(str(extra or "")) + extra = pickle.loads(extra or b"") except EOFError: extra = {} current = { @@ -543,8 +543,8 @@ else: if thread_parent is not None: log.error( - u"Database inconsistency: thread parent without thread (uid: " - u"{uid}, thread_parent: {parent})" + "Database inconsistency: thread parent without thread (uid: " + "{uid}, thread_parent: {parent})" .format(uid=uid, parent=thread_parent)) return result @@ -575,7 +575,7 @@ if limit == 0: return defer.succeed([]) - query_parts = [u"SELECT uid, stanza_id, update_uid, source, dest, source_res, dest_res, timestamp, received_timestamp,\ + query_parts = ["SELECT uid, stanza_id, update_uid, source, dest, source_res, dest_res, timestamp, received_timestamp,\ type, extra, message, message.language, subject, subject.language, thread_id, thread.parent_id\ FROM history LEFT JOIN message ON history.uid = message.history_uid\ LEFT JOIN subject ON history.uid=subject.history_uid\ @@ -587,8 +587,8 @@ values.append(jid_.userhost()) if jid_.resource: values.append(jid_.resource) - return u'({type_}=? AND {type_}_res=?)'.format(type_=type_) - return u'{type_}=?'.format(type_=type_) + return '({type_}=? AND {type_}_res=?)'.format(type_=type_) + return '{type_}=?'.format(type_=type_) if not from_jid and not to_jid: # not jid specified, we want all one2one communications @@ -598,15 +598,15 @@ # we only have one jid specified, we check all messages # from or to this jid jid_ = from_jid or to_jid - query_parts.append(u"AND ({source} OR {dest})".format( - source=test_jid(u'source', jid_), - dest=test_jid(u'dest' , jid_))) + query_parts.append("AND ({source} OR {dest})".format( + source=test_jid('source', jid_), + dest=test_jid('dest' , jid_))) else: # we have 2 jids specified, we check all communications between # those 2 jids query_parts.append( - u"AND (({source_from} AND {dest_to}) " - u"OR ({source_to} AND {dest_from}))".format( + "AND (({source_from} AND {dest_to}) " + "OR ({source_to} AND {dest_from}))".format( source_from=test_jid('source', from_jid), dest_to=test_jid('dest', to_jid), source_to=test_jid('source', to_jid), @@ -619,47 +619,47 @@ q.append(test_jid('source', from_jid)) if to_jid is not None: q.append(test_jid('dest', to_jid)) - query_parts.append(u"AND " + u" AND ".join(q)) + query_parts.append("AND " + " AND ".join(q)) if filters: - if u'timestamp_start' in filters: - query_parts.append(u"AND timestamp>= ?") - values.append(float(filters[u'timestamp_start'])) - if u'body' in filters: + if 'timestamp_start' in filters: + query_parts.append("AND timestamp>= ?") + values.append(float(filters['timestamp_start'])) + if 'body' in filters: # TODO: use REGEXP (function to be defined) instead of GLOB: https://www.sqlite.org/lang_expr.html - query_parts.append(u"AND message LIKE ?") - values.append(u"%{}%".format(filters['body'])) - if u'search' in filters: - query_parts.append(u"AND (message LIKE ? OR source_res LIKE ?)") - values.extend([u"%{}%".format(filters['search'])] * 2) - if u'types' in filters: + query_parts.append("AND message LIKE ?") + values.append("%{}%".format(filters['body'])) + if 'search' in filters: + query_parts.append("AND (message LIKE ? OR source_res LIKE ?)") + values.extend(["%{}%".format(filters['search'])] * 2) + if 'types' in filters: types = filters['types'].split() - query_parts.append(u"AND type IN ({})".format(u','.join("?"*len(types)))) + query_parts.append("AND type IN ({})".format(','.join("?"*len(types)))) values.extend(types) - if u'not_types' in filters: + if 'not_types' in filters: types = filters['not_types'].split() - query_parts.append(u"AND type NOT IN ({})".format(u','.join("?"*len(types)))) + query_parts.append("AND type NOT IN ({})".format(','.join("?"*len(types)))) values.extend(types) - if u'last_stanza_id' in filters: + if 'last_stanza_id' in filters: # this request get the last message with a "stanza_id" that we # have in history. This is mainly used to retrieve messages sent # while we were offline, using MAM (XEP-0313). - if (filters[u'last_stanza_id'] is not True + if (filters['last_stanza_id'] is not True or limit != 1): - raise ValueError(u"Unexpected values for last_stanza_id filter") - query_parts.append(u"AND stanza_id IS NOT NULL") + raise ValueError("Unexpected values for last_stanza_id filter") + query_parts.append("AND stanza_id IS NOT NULL") # timestamp may be identical for 2 close messages (specially when delay is # used) that's why we order ties by received_timestamp # We'll reverse the order in sqliteHistoryToList # we use DESC here so LIMIT keep the last messages - query_parts.append(u"ORDER BY timestamp DESC, history.received_timestamp DESC") + query_parts.append("ORDER BY timestamp DESC, history.received_timestamp DESC") if limit is not None: - query_parts.append(u"LIMIT ?") + query_parts.append("LIMIT ?") values.append(limit) - d = self.dbpool.runQuery(u" ".join(query_parts), values) + d = self.dbpool.runQuery(" ".join(query_parts), values) d.addCallback(self.sqliteHistoryToList) d.addCallback(self.listDict2listTuple) return d @@ -668,32 +668,41 @@ def _privateDataEb(self, failure_, operation, namespace, key=None, profile=None): """generic errback for data queries""" - log.error(_(u"Can't {operation} data in database for namespace {namespace}{and_key}{for_profile}: {msg}").format( + log.error(_("Can't {operation} data in database for namespace {namespace}{and_key}{for_profile}: {msg}").format( operation = operation, namespace = namespace, - and_key = (u" and key " + key) if key is not None else u"", - for_profile = (u' [' + profile + u']') if profile is not None else u'', + and_key = (" and key " + key) if key is not None else "", + for_profile = (' [' + profile + ']') if profile is not None else '', msg = failure_)) + def _load_pickle(self, v): + # FIXME: workaround for Python 3 port, some pickled data are byte while other are strings + try: + return pickle.loads(v) + except TypeError: + data = pickle.loads(v.encode('utf-8')) + log.warning(f"encoding issue in pickled data: {data}") + return data + def _generateDataDict(self, query_result, binary): if binary: - return {k: pickle.loads(str(v)) for k,v in query_result} + return {k: self._load_pickle(v) for k,v in query_result} else: return dict(query_result) def _getPrivateTable(self, binary, profile): """Get table to use for private values""" - table = [u'private'] + table = ['private'] if profile is None: - table.append(u'gen') + table.append('gen') else: - table.append(u'ind') + table.append('ind') if binary: - table.append(u'bin') + table.append('bin') - return u'_'.join(table) + return '_'.join(table) def getPrivates(self, namespace, keys=None, binary=False, profile=None): """Get private value(s) from databases @@ -706,27 +715,27 @@ None to use general values @return (dict[unicode, object]): gotten keys/values """ - log.debug(_(u"getting {type}{binary} private values from database for namespace {namespace}{keys}".format( - type = u"general" if profile is None else "individual", - binary = u" binary" if binary else u"", + log.debug(_("getting {type}{binary} private values from database for namespace {namespace}{keys}".format( + type = "general" if profile is None else "individual", + binary = " binary" if binary else "", namespace = namespace, - keys = u" with keys {}".format(u", ".join(keys)) if keys is not None else u""))) + keys = " with keys {}".format(", ".join(keys)) if keys is not None else ""))) table = self._getPrivateTable(binary, profile) - query_parts = [u"SELECT key,value FROM", table, "WHERE namespace=?"] + query_parts = ["SELECT key,value FROM", table, "WHERE namespace=?"] args = [namespace] if keys is not None: - placeholders = u','.join(len(keys) * u'?') - query_parts.append(u'AND key IN (' + placeholders + u')') + placeholders = ','.join(len(keys) * '?') + query_parts.append('AND key IN (' + placeholders + ')') args.extend(keys) if profile is not None: - query_parts.append(u'AND profile_id=?') + query_parts.append('AND profile_id=?') args.append(self.profiles[profile]) - d = self.dbpool.runQuery(u" ".join(query_parts), args) + d = self.dbpool.runQuery(" ".join(query_parts), args) d.addCallback(self._generateDataDict, binary) - d.addErrback(self._privateDataEb, u"get", namespace, profile=profile) + d.addErrback(self._privateDataEb, "get", namespace, profile=profile) return d def setPrivateValue(self, namespace, key, value, binary=False, profile=None): @@ -741,7 +750,7 @@ if None, it's a general value """ table = self._getPrivateTable(binary, profile) - query_values_names = [u'namespace', u'key', u'value'] + query_values_names = ['namespace', 'key', 'value'] query_values = [namespace, key] if binary: @@ -750,14 +759,14 @@ query_values.append(value) if profile is not None: - query_values_names.append(u'profile_id') + query_values_names.append('profile_id') query_values.append(self.profiles[profile]) - query_parts = [u"REPLACE INTO", table, u'(', u','.join(query_values_names), u')', - u"VALUES (", u",".join(u'?'*len(query_values_names)), u')'] + query_parts = ["REPLACE INTO", table, '(', ','.join(query_values_names), ')', + "VALUES (", ",".join('?'*len(query_values_names)), ')'] - d = self.dbpool.runQuery(u" ".join(query_parts), query_values) - d.addErrback(self._privateDataEb, u"set", namespace, key, profile=profile) + d = self.dbpool.runQuery(" ".join(query_parts), query_values) + d.addErrback(self._privateDataEb, "set", namespace, key, profile=profile) return d def delPrivateValue(self, namespace, key, binary=False, profile=None): @@ -770,13 +779,13 @@ if None, it's a general value """ table = self._getPrivateTable(binary, profile) - query_parts = [u"DELETE FROM", table, u"WHERE namespace=? AND key=?"] + query_parts = ["DELETE FROM", table, "WHERE namespace=? AND key=?"] args = [namespace, key] if profile is not None: - query_parts.append(u"AND profile_id=?") + query_parts.append("AND profile_id=?") args.append(self.profiles[profile]) - d = self.dbpool.runQuery(u" ".join(query_parts), args) - d.addErrback(self._privateDataEb, u"delete", namespace, key, profile=profile) + d = self.dbpool.runQuery(" ".join(query_parts), args) + d.addErrback(self._privateDataEb, "delete", namespace, key, profile=profile) return d def delPrivateNamespace(self, namespace, binary=False, profile=None): @@ -787,19 +796,19 @@ Params are the same as for delPrivateValue """ table = self._getPrivateTable(binary, profile) - query_parts = [u"DELETE FROM", table, u"WHERE namespace=?"] + query_parts = ["DELETE FROM", table, "WHERE namespace=?"] args = [namespace] if profile is not None: - query_parts.append(u"AND profile_id=?") + query_parts.append("AND profile_id=?") args.append(self.profiles[profile]) - d = self.dbpool.runQuery(u" ".join(query_parts), args) - d.addErrback(self._privateDataEb, u"delete namespace", namespace, profile=profile) + d = self.dbpool.runQuery(" ".join(query_parts), args) + d.addErrback(self._privateDataEb, "delete namespace", namespace, profile=profile) return d ## Files @defer.inlineCallbacks - def getFiles(self, client, file_id=None, version=u'', parent=None, type_=None, + def getFiles(self, client, file_id=None, version='', parent=None, type_=None, file_hash=None, hash_algo=None, name=None, namespace=None, mime_type=None, owner=None, access=None, projection=None, unique=False): """retrieve files with with given filters @@ -831,45 +840,45 @@ args = [self.profiles[client.profile]] if file_id is not None: - filters.append(u'id=?') + filters.append('id=?') args.append(file_id) if version is not None: - filters.append(u'version=?') + filters.append('version=?') args.append(version) if parent is not None: - filters.append(u'parent=?') + filters.append('parent=?') args.append(parent) if type_ is not None: - filters.append(u'type=?') + filters.append('type=?') args.append(type_) if file_hash is not None: - filters.append(u'file_hash=?') + filters.append('file_hash=?') args.append(file_hash) if hash_algo is not None: - filters.append(u'hash_algo=?') + filters.append('hash_algo=?') args.append(hash_algo) if name is not None: - filters.append(u'name=?') + filters.append('name=?') args.append(name) if namespace is not None: - filters.append(u'namespace=?') + filters.append('namespace=?') args.append(namespace) if mime_type is not None: - filters.append(u'mime_type=?') + filters.append('mime_type=?') args.append(mime_type) if owner is not None: - filters.append(u'owner=?') + filters.append('owner=?') args.append(owner.full()) if access is not None: raise NotImplementedError('Access check is not implemented yet') # a JSON comparison is needed here - filters = u' AND '.join(filters) + filters = ' AND '.join(filters) query_parts.append(filters) - query = u' '.join(query_parts) + query = ' '.join(query_parts) result = yield self.dbpool.runQuery(query, args) - files_data = [dict(zip(projection, row)) for row in result] + files_data = [dict(list(zip(projection, row))) for row in result] to_parse = {'access', 'extra'}.intersection(projection) to_filter = {'owner'}.intersection(projection) if to_parse or to_filter: @@ -882,7 +891,7 @@ file_data['owner'] = jid.JID(owner) defer.returnValue(files_data) - def setFile(self, client, name, file_id, version=u'', parent=None, type_=C.FILE_TYPE_FILE, + def setFile(self, client, name, file_id, version='', parent=None, type_=C.FILE_TYPE_FILE, file_hash=None, hash_algo=None, size=None, namespace=None, mime_type=None, created=None, modified=None, owner=None, access=None, extra=None): """set a file metadata @@ -921,12 +930,12 @@ json.dumps(access) if access else None, json.dumps(extra) if extra else None, self.profiles[client.profile])) - d.addErrback(lambda failure: log.error(_(u"Can't save file metadata for [{profile}]: {reason}".format(profile=client.profile, reason=failure)))) + d.addErrback(lambda failure: log.error(_("Can't save file metadata for [{profile}]: {reason}".format(profile=client.profile, reason=failure)))) return d def _fileUpdate(self, cursor, file_id, column, update_cb): query = 'SELECT {column} FROM files where id=?'.format(column=column) - for i in xrange(5): + for i in range(5): cursor.execute(query, [file_id]) try: older_value_raw = cursor.fetchone()[0] @@ -951,9 +960,9 @@ else: if cursor.rowcount == 1: break; - log.warning(_(u"table not updated, probably due to race condition, trying again ({tries})").format(tries=i+1)) + log.warning(_("table not updated, probably due to race condition, trying again ({tries})").format(tries=i+1)) else: - log.error(_(u"Can't update file table")) + log.error(_("Can't update file table")) def fileUpdate(self, file_id, column, update_cb): """Update a column value using a method to avoid race conditions @@ -1072,17 +1081,17 @@ update_raw = yield self.update2raw(update_data, True) defer.returnValue(update_raw) else: - log.error(_(u"schema version is up-to-date, but local schema differ from expected current schema")) + log.error(_("schema version is up-to-date, but local schema differ from expected current schema")) update_data = self.generateUpdateData(local_sch, current_sch, True) update_raw = yield self.update2raw(update_data) - log.warning(_(u"Here are the commands that should fix the situation, use at your own risk (do a backup before modifying database), you can go to SàT's MUC room at sat@chat.jabberfr.org for help\n### SQL###\n%s\n### END SQL ###\n") % u'\n'.join("%s;" % statement for statement in update_raw)) + log.warning(_("Here are the commands that should fix the situation, use at your own risk (do a backup before modifying database), you can go to SàT's MUC room at sat@chat.jabberfr.org for help\n### SQL###\n%s\n### END SQL ###\n") % '\n'.join("%s;" % statement for statement in update_raw)) raise exceptions.DatabaseError("Database mismatch") else: if local_version > CURRENT_DB_VERSION: log.error(_( - u"You database version is higher than the one used in this SàT " - u"version, are you using several version at the same time? We " - u"can't run SàT with this database.")) + "You database version is higher than the one used in this SàT " + "version, are you using several version at the same time? We " + "can't run SàT with this database.")) sys.exit(1) # Database is not up-to-date, we'll do the update @@ -1091,7 +1100,7 @@ else: log.info(_("Database schema has changed, local database will be updated")) update_raw = [] - for version in xrange(local_version + 1, CURRENT_DB_VERSION + 1): + for version in range(local_version + 1, CURRENT_DB_VERSION + 1): try: update_data = DATABASE_SCHEMAS[version] except KeyError: @@ -1150,17 +1159,17 @@ ret = [] assert isinstance(data, tuple) for table, col_data in data: - assert isinstance(table, basestring) + assert isinstance(table, str) assert isinstance(col_data, tuple) for cols in col_data: if isinstance(cols, tuple): - assert all([isinstance(c, basestring) for c in cols]) - indexed_cols = u','.join(cols) - elif isinstance(cols, basestring): + assert all([isinstance(c, str) for c in cols]) + indexed_cols = ','.join(cols) + elif isinstance(cols, str): indexed_cols = cols else: - raise exceptions.InternalError(u"unexpected index columns value") - index_name = table + u'__' + indexed_cols.replace(u',', u'_') + raise exceptions.InternalError("unexpected index columns value") + index_name = table + '__' + indexed_cols.replace(',', '_') ret.append(Updater.INDEX_SQL % (index_name, table, indexed_cols)) return ret @@ -1173,7 +1182,7 @@ @return: hash as string """ hash_ = hashlib.sha1() - tables = data.keys() + tables = list(data.keys()) tables.sort() def stmnts2str(stmts): @@ -1181,7 +1190,9 @@ for table in tables: col_defs, col_constr = data[table] - hash_.update("%s:%s:%s" % (table, stmnts2str(col_defs), stmnts2str(col_constr))) + hash_.update( + ("%s:%s:%s" % (table, stmnts2str(col_defs), stmnts2str(col_constr))) + .encode('utf-8')) return hash_.digest() def rawStatements2data(self, raw_statements): @@ -1324,7 +1335,7 @@ def update_v8(self): """Update database from v7 to v8 (primary keys order changes + indexes)""" - log.info(u"Database update to v8") + log.info("Database update to v8") statements = ["PRAGMA foreign_keys = OFF"] # here is a copy of create and index data, we can't use "current" table @@ -1357,11 +1368,11 @@ schema = {table: create[table]} cols = [d.split()[0] for d in schema[table][0]] statements.extend(Updater.createData2Raw(schema)) - statements.append(u"INSERT INTO {table}({cols}) " - u"SELECT {cols} FROM {table}_old".format( + statements.append("INSERT INTO {table}({cols}) " + "SELECT {cols} FROM {table}_old".format( table=table, - cols=u','.join(cols))) - statements.append(u"DROP TABLE {}_old".format(table)) + cols=','.join(cols))) + statements.append("DROP TABLE {}_old".format(table)) statements.extend(Updater.indexData2Raw(index)) statements.append("PRAGMA foreign_keys = ON") @@ -1370,48 +1381,48 @@ @defer.inlineCallbacks def update_v7(self): """Update database from v6 to v7 (history unique constraint change)""" - log.info(u"Database update to v7, this may be long depending on your history " - u"size, please be patient.") + log.info("Database update to v7, this may be long depending on your history " + "size, please be patient.") - log.info(u"Some cleaning first") + log.info("Some cleaning first") # we need to fix duplicate stanza_id, as it can result in conflicts with the new schema # normally database should not contain any, but better safe than sorry. rows = yield self.dbpool.runQuery( - u"SELECT stanza_id, COUNT(*) as c FROM history WHERE stanza_id is not NULL " - u"GROUP BY stanza_id HAVING c>1") + "SELECT stanza_id, COUNT(*) as c FROM history WHERE stanza_id is not NULL " + "GROUP BY stanza_id HAVING c>1") if rows: count = sum([r[1] for r in rows]) - len(rows) - log.info(u"{count} duplicate stanzas found, cleaning".format(count=count)) + log.info("{count} duplicate stanzas found, cleaning".format(count=count)) for stanza_id, count in rows: - log.info(u"cleaning duplicate stanza {stanza_id}".format(stanza_id=stanza_id)) + log.info("cleaning duplicate stanza {stanza_id}".format(stanza_id=stanza_id)) row_uids = yield self.dbpool.runQuery( "SELECT uid FROM history WHERE stanza_id = ? LIMIT ?", (stanza_id, count-1)) uids = [r[0] for r in row_uids] yield self.dbpool.runQuery( - "DELETE FROM history WHERE uid IN ({})".format(u",".join(u"?"*len(uids))), + "DELETE FROM history WHERE uid IN ({})".format(",".join("?"*len(uids))), uids) def deleteInfo(txn): # with foreign_keys on, the delete takes ages, so we deactivate it here # the time to delete info messages from history. txn.execute("PRAGMA foreign_keys = OFF") - txn.execute(u"DELETE FROM message WHERE history_uid IN (SELECT uid FROM history WHERE " - u"type='info')") - txn.execute(u"DELETE FROM subject WHERE history_uid IN (SELECT uid FROM history WHERE " - u"type='info')") - txn.execute(u"DELETE FROM thread WHERE history_uid IN (SELECT uid FROM history WHERE " - u"type='info')") - txn.execute(u"DELETE FROM message WHERE history_uid IN (SELECT uid FROM history WHERE " - u"type='info')") - txn.execute(u"DELETE FROM history WHERE type='info'") + txn.execute("DELETE FROM message WHERE history_uid IN (SELECT uid FROM history WHERE " + "type='info')") + txn.execute("DELETE FROM subject WHERE history_uid IN (SELECT uid FROM history WHERE " + "type='info')") + txn.execute("DELETE FROM thread WHERE history_uid IN (SELECT uid FROM history WHERE " + "type='info')") + txn.execute("DELETE FROM message WHERE history_uid IN (SELECT uid FROM history WHERE " + "type='info')") + txn.execute("DELETE FROM history WHERE type='info'") # not sure that is is necessary to reactivate here, but in doubt… txn.execute("PRAGMA foreign_keys = ON") - log.info(u'Deleting "info" messages (this can take a while)') + log.info('Deleting "info" messages (this can take a while)') yield self.dbpool.runInteraction(deleteInfo) - log.info(u"Cleaning done") + log.info("Cleaning done") # we have to rename table we will replace # tables referencing history need to be replaced to, else reference would @@ -1423,68 +1434,68 @@ yield self.dbpool.runQuery("ALTER TABLE thread RENAME TO thread_old") # history - query = (u"CREATE TABLE history (uid TEXT PRIMARY KEY, stanza_id TEXT, " - u"update_uid TEXT, profile_id INTEGER, source TEXT, dest TEXT, " - u"source_res TEXT, dest_res TEXT, timestamp DATETIME NOT NULL, " - u"received_timestamp DATETIME, type TEXT, extra BLOB, " - u"FOREIGN KEY(profile_id) REFERENCES profiles(id) ON DELETE CASCADE, " - u"FOREIGN KEY(type) REFERENCES message_types(type), " - u"UNIQUE (profile_id, stanza_id, source, dest))") + query = ("CREATE TABLE history (uid TEXT PRIMARY KEY, stanza_id TEXT, " + "update_uid TEXT, profile_id INTEGER, source TEXT, dest TEXT, " + "source_res TEXT, dest_res TEXT, timestamp DATETIME NOT NULL, " + "received_timestamp DATETIME, type TEXT, extra BLOB, " + "FOREIGN KEY(profile_id) REFERENCES profiles(id) ON DELETE CASCADE, " + "FOREIGN KEY(type) REFERENCES message_types(type), " + "UNIQUE (profile_id, stanza_id, source, dest))") yield self.dbpool.runQuery(query) # message - query = (u"CREATE TABLE message (id INTEGER PRIMARY KEY ASC, history_uid INTEGER" - u", message TEXT, language TEXT, FOREIGN KEY(history_uid) REFERENCES " - u"history(uid) ON DELETE CASCADE)") + query = ("CREATE TABLE message (id INTEGER PRIMARY KEY ASC, history_uid INTEGER" + ", message TEXT, language TEXT, FOREIGN KEY(history_uid) REFERENCES " + "history(uid) ON DELETE CASCADE)") yield self.dbpool.runQuery(query) # subject - query = (u"CREATE TABLE subject (id INTEGER PRIMARY KEY ASC, history_uid INTEGER" - u", subject TEXT, language TEXT, FOREIGN KEY(history_uid) REFERENCES " - u"history(uid) ON DELETE CASCADE)") + query = ("CREATE TABLE subject (id INTEGER PRIMARY KEY ASC, history_uid INTEGER" + ", subject TEXT, language TEXT, FOREIGN KEY(history_uid) REFERENCES " + "history(uid) ON DELETE CASCADE)") yield self.dbpool.runQuery(query) # thread - query = (u"CREATE TABLE thread (id INTEGER PRIMARY KEY ASC, history_uid INTEGER" - u", thread_id TEXT, parent_id TEXT, FOREIGN KEY(history_uid) REFERENCES " - u"history(uid) ON DELETE CASCADE)") + query = ("CREATE TABLE thread (id INTEGER PRIMARY KEY ASC, history_uid INTEGER" + ", thread_id TEXT, parent_id TEXT, FOREIGN KEY(history_uid) REFERENCES " + "history(uid) ON DELETE CASCADE)") yield self.dbpool.runQuery(query) - log.info(u"Now transfering old data to new tables, please be patient.") + log.info("Now transfering old data to new tables, please be patient.") - log.info(u"\nTransfering table history") - query = (u"INSERT INTO history (uid, stanza_id, update_uid, profile_id, source, " - u"dest, source_res, dest_res, timestamp, received_timestamp, type, extra" - u") SELECT uid, stanza_id, update_uid, profile_id, source, dest, " - u"source_res, dest_res, timestamp, received_timestamp, type, extra " - u"FROM history_old") + log.info("\nTransfering table history") + query = ("INSERT INTO history (uid, stanza_id, update_uid, profile_id, source, " + "dest, source_res, dest_res, timestamp, received_timestamp, type, extra" + ") SELECT uid, stanza_id, update_uid, profile_id, source, dest, " + "source_res, dest_res, timestamp, received_timestamp, type, extra " + "FROM history_old") yield self.dbpool.runQuery(query) - log.info(u"\nTransfering table message") - query = (u"INSERT INTO message (id, history_uid, message, language) SELECT id, " - u"history_uid, message, language FROM message_old") + log.info("\nTransfering table message") + query = ("INSERT INTO message (id, history_uid, message, language) SELECT id, " + "history_uid, message, language FROM message_old") yield self.dbpool.runQuery(query) - log.info(u"\nTransfering table subject") - query = (u"INSERT INTO subject (id, history_uid, subject, language) SELECT id, " - u"history_uid, subject, language FROM subject_old") + log.info("\nTransfering table subject") + query = ("INSERT INTO subject (id, history_uid, subject, language) SELECT id, " + "history_uid, subject, language FROM subject_old") yield self.dbpool.runQuery(query) - log.info(u"\nTransfering table thread") - query = (u"INSERT INTO thread (id, history_uid, thread_id, parent_id) SELECT id" - u", history_uid, thread_id, parent_id FROM thread_old") + log.info("\nTransfering table thread") + query = ("INSERT INTO thread (id, history_uid, thread_id, parent_id) SELECT id" + ", history_uid, thread_id, parent_id FROM thread_old") yield self.dbpool.runQuery(query) - log.info(u"\nRemoving old tables") + log.info("\nRemoving old tables") # because of foreign keys, tables referencing history_old # must be deleted first yield self.dbpool.runQuery("DROP TABLE thread_old") yield self.dbpool.runQuery("DROP TABLE subject_old") yield self.dbpool.runQuery("DROP TABLE message_old") yield self.dbpool.runQuery("DROP TABLE history_old") - log.info(u"\nReducing database size (this can take a while)") + log.info("\nReducing database size (this can take a while)") yield self.dbpool.runQuery("VACUUM") - log.info(u"Database update done :)") + log.info("Database update done :)") @defer.inlineCallbacks def update_v3(self): @@ -1494,7 +1505,7 @@ # big database for tests. If issues are happening, we can cut it # in smaller transactions using LIMIT and by deleting already updated # messages - log.info(u"Database update to v3, this may take a while") + log.info("Database update to v3, this may take a while") # we need to fix duplicate timestamp, as it can result in conflicts with the new schema rows = yield self.dbpool.runQuery("SELECT timestamp, COUNT(*) as c FROM history GROUP BY timestamp HAVING c>1") @@ -1506,10 +1517,10 @@ for idx, (id_,) in enumerate(ids_rows): fixed.append(id_) yield self.dbpool.runQuery("UPDATE history SET timestamp=? WHERE id=?", (float(timestamp) + idx * 0.001, id_)) - log.info(u"fixed messages with ids {}".format(u', '.join([unicode(id_) for id_ in fixed]))) + log.info("fixed messages with ids {}".format(', '.join([str(id_) for id_ in fixed]))) def historySchema(txn): - log.info(u"History schema update") + log.info("History schema update") txn.execute("ALTER TABLE history RENAME TO tmp_sat_update") txn.execute("CREATE TABLE history (uid TEXT PRIMARY KEY, update_uid TEXT, profile_id INTEGER, source TEXT, dest TEXT, source_res TEXT, dest_res TEXT, timestamp DATETIME NOT NULL, received_timestamp DATETIME, type TEXT, extra BLOB, FOREIGN KEY(profile_id) REFERENCES profiles(id) ON DELETE CASCADE, FOREIGN KEY(type) REFERENCES message_types(type), UNIQUE (profile_id, timestamp, source, dest, source_res, dest_res))") txn.execute("INSERT INTO history (uid, profile_id, source, dest, source_res, dest_res, timestamp, type, extra) SELECT id, profile_id, source, dest, source_res, dest_res, timestamp, type, extra FROM tmp_sat_update") @@ -1517,17 +1528,17 @@ yield self.dbpool.runInteraction(historySchema) def newTables(txn): - log.info(u"Creating new tables") + log.info("Creating new tables") txn.execute("CREATE TABLE message (id INTEGER PRIMARY KEY ASC, history_uid INTEGER, message TEXT, language TEXT, FOREIGN KEY(history_uid) REFERENCES history(uid) ON DELETE CASCADE)") txn.execute("CREATE TABLE thread (id INTEGER PRIMARY KEY ASC, history_uid INTEGER, thread_id TEXT, parent_id TEXT, FOREIGN KEY(history_uid) REFERENCES history(uid) ON DELETE CASCADE)") txn.execute("CREATE TABLE subject (id INTEGER PRIMARY KEY ASC, history_uid INTEGER, subject TEXT, language TEXT, FOREIGN KEY(history_uid) REFERENCES history(uid) ON DELETE CASCADE)") yield self.dbpool.runInteraction(newTables) - log.info(u"inserting new message type") + log.info("inserting new message type") yield self.dbpool.runQuery("INSERT INTO message_types VALUES (?)", ('info',)) - log.info(u"messages update") + log.info("messages update") rows = yield self.dbpool.runQuery("SELECT id, timestamp, message, extra FROM tmp_sat_update") total = len(rows) @@ -1545,7 +1556,7 @@ except EOFError: extra = {} except Exception: - log.warning(u"Can't handle extra data for message id {}, ignoring it".format(id_)) + log.warning("Can't handle extra data for message id {}, ignoring it".format(id_)) extra = {} queries.append(("INSERT INTO message(history_uid, message) VALUES (?,?)", (id_, message))) @@ -1556,9 +1567,9 @@ pass else: try: - subject = subject.decode('utf-8') + subject = subject except UnicodeEncodeError: - log.warning(u"Error while decoding subject, ignoring it") + log.warning("Error while decoding subject, ignoring it") del extra['subject'] else: queries.append(("INSERT INTO subject(history_uid, subject) VALUES (?,?)", (id_, subject))) @@ -1597,7 +1608,7 @@ try: id_ = result[0][0] except IndexError: - log.error(u"Profile of id %d is referenced in 'param_ind' but it doesn't exist!" % profile_id) + log.error("Profile of id %d is referenced in 'param_ind' but it doesn't exist!" % profile_id) return defer.succeed(None) sat_password = xmpp_password
--- a/sat/plugins/plugin_adhoc_dbus.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_adhoc_dbus.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for adding D-Bus to Ad-Hoc Commands @@ -30,8 +30,8 @@ from lxml import etree except ImportError: etree = None - log.warning(u"Missing module lxml, please download/install it from http://lxml.de/ ." - u"Auto D-Bus discovery will be disabled") + log.warning("Missing module lxml, please download/install it from http://lxml.de/ ." + "Auto D-Bus discovery will be disabled") from collections import OrderedDict import os.path import uuid @@ -40,8 +40,8 @@ from dbus.mainloop.glib import DBusGMainLoop except ImportError: dbus = None - log.warning(u"Missing module dbus, please download/install it" - u"auto D-Bus discovery will be disabled") + log.warning("Missing module dbus, please download/install it" + "auto D-Bus discovery will be disabled") else: DBusGMainLoop(set_as_default=True) @@ -50,18 +50,18 @@ FD_NAME = "org.freedesktop.DBus" FD_PATH = "/org/freedekstop/DBus" INTROSPECT_IFACE = "org.freedesktop.DBus.Introspectable" -MPRIS_PREFIX = u"org.mpris.MediaPlayer2" -CMD_GO_BACK = u"GoBack" -CMD_GO_FWD = u"GoFW" +MPRIS_PREFIX = "org.mpris.MediaPlayer2" +CMD_GO_BACK = "GoBack" +CMD_GO_FWD = "GoFW" SEEK_OFFSET = 5 * 1000 * 1000 -MPRIS_COMMANDS = [u"org.mpris.MediaPlayer2.Player." + cmd for cmd in ( - u"Previous", CMD_GO_BACK, u"PlayPause", CMD_GO_FWD, u"Next")] -MPRIS_PATH = u"/org/mpris/MediaPlayer2" +MPRIS_COMMANDS = ["org.mpris.MediaPlayer2.Player." + cmd for cmd in ( + "Previous", CMD_GO_BACK, "PlayPause", CMD_GO_FWD, "Next")] +MPRIS_PATH = "/org/mpris/MediaPlayer2" MPRIS_PROPERTIES = OrderedDict(( - (u"org.mpris.MediaPlayer2", ( + ("org.mpris.MediaPlayer2", ( "Identity", )), - (u"org.mpris.MediaPlayer2.Player", ( + ("org.mpris.MediaPlayer2.Player", ( "Metadata", "PlaybackStatus", "Volume", @@ -69,7 +69,7 @@ )) MPRIS_METADATA_KEY = "Metadata" MPRIS_METADATA_MAP = OrderedDict(( - ("xesam:title", u"Title"), + ("xesam:title", "Title"), )) INTROSPECT_METHOD = "Introspect" @@ -88,7 +88,7 @@ C.PI_DEPENDENCIES: ["XEP-0050"], C.PI_MAIN: "AdHocDBus", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _(u"""Add D-Bus management to Ad-Hoc commands"""), + C.PI_DESCRIPTION: _("""Add D-Bus management to Ad-Hoc commands"""), } @@ -104,7 +104,7 @@ in_sign="sasasasasasass", out_sign="(sa(sss))", method=self._adHocDBusAddAuto, - async=True, + async_=True, ) host.bridge.addMethod( "adHocRemotesGet", @@ -112,10 +112,10 @@ in_sign="s", out_sign="a(sss)", method=self._adHocRemotesGet, - async=True, + async_=True, ) self._c = host.plugins["XEP-0050"] - host.registerNamespace(u"mediaplayer", NS_MEDIA_PLAYER) + host.registerNamespace("mediaplayer", NS_MEDIA_PLAYER) if dbus is not None: self.session_bus = dbus.SessionBus() self.fd_object = self.session_bus.get_object( @@ -124,7 +124,7 @@ def profileConnected(self, client): if dbus is not None: self._c.addAdHocCommand( - client, self.localMediaCb, D_(u"Media Players"), + client, self.localMediaCb, D_("Media Players"), node=NS_MEDIA_PLAYER, timeout=60*60*6 # 6 hours timeout, to avoid breaking remote # in the middle of a movie @@ -151,7 +151,7 @@ def _DBusGetProperty(self, proxy, interface, name): return self._DBusAsyncCall( - proxy, u"Get", interface, name, interface=u"org.freedesktop.DBus.Properties") + proxy, "Get", interface, name, interface="org.freedesktop.DBus.Properties") def _DBusListNames(self): @@ -271,7 +271,7 @@ elif len(actions) == 2: # we should have the answer here try: - x_elt = command_elt.elements(data_form.NS_X_DATA, "x").next() + x_elt = next(command_elt.elements(data_form.NS_X_DATA, "x")) answer_form = data_form.Form.fromElement(x_elt) command = answer_form["command"] except (KeyError, StopIteration): @@ -295,11 +295,11 @@ return DBusCallback( client, None, session_data, self._c.ACTION.EXECUTE, node ) - form = data_form.Form("form", title=_(u"Updated")) - form.addField(data_form.Field("fixed", u"Command sent")) + form = data_form.Form("form", title=_("Updated")) + form.addField(data_form.Field("fixed", "Command sent")) status = self._c.STATUS.COMPLETED payload = None - note = (self._c.NOTE.INFO, _(u"Command sent")) + note = (self._c.NOTE.INFO, _("Command sent")) else: raise self._c.AdHocError(self._c.ERROR.INTERNAL) @@ -363,18 +363,18 @@ opt.label or opt.value)) except Exception as e: log.warning(_( - u"Can't retrieve remote controllers on {device_jid}: " - u"{reason}".format(device_jid=device_jid, reason=e))) + "Can't retrieve remote controllers on {device_jid}: " + "{reason}".format(device_jid=device_jid, reason=e))) break defer.returnValue(remotes) def doMPRISCommand(self, proxy, command): - iface, command = command.rsplit(u".", 1) + iface, command = command.rsplit(".", 1) if command == CMD_GO_BACK: - command = u'Seek' + command = 'Seek' args = [-SEEK_OFFSET] elif command == CMD_GO_FWD: - command = u'Seek' + command = 'Seek' args = [SEEK_OFFSET] else: args = [] @@ -382,17 +382,17 @@ def addMPRISMetadata(self, form, metadata): """Serialise MRPIS Metadata according to MPRIS_METADATA_MAP""" - for mpris_key, name in MPRIS_METADATA_MAP.iteritems(): + for mpris_key, name in MPRIS_METADATA_MAP.items(): if mpris_key in metadata: - value = unicode(metadata[mpris_key]) - form.addField(data_form.Field(fieldType=u"fixed", + value = str(metadata[mpris_key]) + form.addField(data_form.Field(fieldType="fixed", var=name, value=value)) @defer.inlineCallbacks def localMediaCb(self, client, command_elt, session_data, action, node): try: - x_elt = command_elt.elements(data_form.NS_X_DATA, "x").next() + x_elt = next(command_elt.elements(data_form.NS_X_DATA, "x")) command_form = data_form.Form.fromElement(x_elt) except StopIteration: command_form = None @@ -402,16 +402,16 @@ bus_names = yield self._DBusListNames() bus_names = [b for b in bus_names if b.startswith(MPRIS_PREFIX)] if len(bus_names) == 0: - note = (self._c.NOTE.INFO, D_(u"No media player found.")) + note = (self._c.NOTE.INFO, D_("No media player found.")) defer.returnValue((None, self._c.STATUS.COMPLETED, None, note)) options = [] status = self._c.STATUS.EXECUTING - form = data_form.Form("form", title=D_(u"Media Player Selection"), + form = data_form.Form("form", title=D_("Media Player Selection"), formNamespace=NS_MEDIA_PLAYER) for bus in bus_names: player_name = bus[len(MPRIS_PREFIX)+1:] if not player_name: - log.warning(_(u"Ignoring MPRIS bus without suffix")) + log.warning(_("Ignoring MPRIS bus without suffix")) continue options.append(data_form.Option(bus, player_name)) field = data_form.Field( @@ -423,53 +423,53 @@ else: # player request try: - bus_name = command_form[u"media_player"] + bus_name = command_form["media_player"] except KeyError: - raise ValueError(_(u"missing media_player value")) + raise ValueError(_("missing media_player value")) if not bus_name.startswith(MPRIS_PREFIX): - log.warning(_(u"Media player ad-hoc command trying to use non MPRIS bus. " - u"Hack attempt? Refused bus: {bus_name}").format( + log.warning(_("Media player ad-hoc command trying to use non MPRIS bus. " + "Hack attempt? Refused bus: {bus_name}").format( bus_name=bus_name)) - note = (self._c.NOTE.ERROR, D_(u"Invalid player name.")) + note = (self._c.NOTE.ERROR, D_("Invalid player name.")) defer.returnValue((None, self._c.STATUS.COMPLETED, None, note)) try: proxy = self.session_bus.get_object(bus_name, MPRIS_PATH) except dbus.exceptions.DBusException as e: - log.warning(_(u"Can't get D-Bus proxy: {reason}").format(reason=e)) - note = (self._c.NOTE.ERROR, D_(u"Media player is not available anymore")) + log.warning(_("Can't get D-Bus proxy: {reason}").format(reason=e)) + note = (self._c.NOTE.ERROR, D_("Media player is not available anymore")) defer.returnValue((None, self._c.STATUS.COMPLETED, None, note)) try: - command = command_form[u"command"] + command = command_form["command"] except KeyError: pass else: yield self.doMPRISCommand(proxy, command) # we construct the remote control form - form = data_form.Form("form", title=D_(u"Media Player Selection")) - form.addField(data_form.Field(fieldType=u"hidden", - var=u"media_player", + form = data_form.Form("form", title=D_("Media Player Selection")) + form.addField(data_form.Field(fieldType="hidden", + var="media_player", value=bus_name)) - for iface, properties_names in MPRIS_PROPERTIES.iteritems(): + for iface, properties_names in MPRIS_PROPERTIES.items(): for name in properties_names: try: value = yield self._DBusGetProperty(proxy, iface, name) except Exception as e: - log.warning(_(u"Can't retrieve attribute {name}: {reason}") + log.warning(_("Can't retrieve attribute {name}: {reason}") .format(name=name, reason=e)) continue if name == MPRIS_METADATA_KEY: self.addMPRISMetadata(form, value) else: - form.addField(data_form.Field(fieldType=u"fixed", + form.addField(data_form.Field(fieldType="fixed", var=name, - value=unicode(value))) + value=str(value))) - commands = [data_form.Option(c, c.rsplit(u".", 1)[1]) for c in MPRIS_COMMANDS] - form.addField(data_form.Field(fieldType=u"list-single", - var=u"command", + commands = [data_form.Option(c, c.rsplit(".", 1)[1]) for c in MPRIS_COMMANDS] + form.addField(data_form.Field(fieldType="list-single", + var="command", options=commands, required=True))
--- a/sat/plugins/plugin_blog_import.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_blog_import.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for import external blogs @@ -31,7 +31,7 @@ import os import os.path import tempfile -import urlparse +import urllib.parse import shortuuid @@ -43,7 +43,7 @@ C.PI_MAIN: "BlogImportPlugin", C.PI_HANDLER: "no", C.PI_DESCRIPTION: _( - u"""Blog import management: + """Blog import management: This plugin manage the different blog importers which can register to it, and handle generic importing tasks.""" ), } @@ -67,7 +67,7 @@ self._p = host.plugins["XEP-0060"] self._m = host.plugins["XEP-0277"] self._s = self.host.plugins["TEXT_SYNTAXES"] - host.plugins["IMPORT"].initialize(self, u"blog") + host.plugins["IMPORT"].initialize(self, "blog") def importItem( self, client, item_import_data, session, options, return_data, service, node @@ -107,7 +107,7 @@ try: item_id = mb_data["id"] except KeyError: - item_id = mb_data["id"] = unicode(shortuuid.uuid()) + item_id = mb_data["id"] = str(shortuuid.uuid()) try: # we keep the link between old url and new blog item @@ -121,7 +121,7 @@ node or self._m.namespace, item_id, ) - log.info(u"url link from {old} to {new}".format(old=old_uri, new=new_uri)) + log.info("url link from {old} to {new}".format(old=old_uri, new=new_uri)) return mb_data @@ -129,7 +129,7 @@ def importSubItems(self, client, item_import_data, mb_data, session, options): # comments data if len(item_import_data["comments"]) != 1: - raise NotImplementedError(u"can't manage multiple comment links") + raise NotImplementedError("can't manage multiple comment links") allow_comments = C.bool(mb_data.get("allow_comments", C.BOOL_FALSE)) if allow_comments: comments_service = yield self._m.getCommentsService(client) @@ -145,13 +145,13 @@ else: if item_import_data["comments"][0]: raise exceptions.DataError( - u"allow_comments set to False, but comments are there" + "allow_comments set to False, but comments are there" ) defer.returnValue(None) def publishItem(self, client, mb_data, service, node, session): log.debug( - u"uploading item [{id}]: {title}".format( + "uploading item [{id}]: {title}".format( id=mb_data["id"], title=mb_data.get("title", "") ) ) @@ -182,7 +182,7 @@ else: if "{}_xhtml".format(prefix) in mb_data: raise exceptions.DataError( - u"importer gave {prefix}_rich and {prefix}_xhtml at the same time, this is not allowed".format( + "importer gave {prefix}_rich and {prefix}_xhtml at the same time, this is not allowed".format( prefix=prefix ) ) @@ -200,14 +200,14 @@ else: if "{}_xhtml".format(prefix) in mb_data: log.warning( - u"{prefix}_text will be replaced by converted {prefix}_xhtml, so filters can be handled".format( + "{prefix}_text will be replaced by converted {prefix}_xhtml, so filters can be handled".format( prefix=prefix ) ) del mb_data["{}_text".format(prefix)] else: log.warning( - u"importer gave a text {prefix}, blog filters don't work on text {prefix}".format( + "importer gave a text {prefix}, blog filters don't work on text {prefix}".format( prefix=prefix ) ) @@ -225,8 +225,8 @@ opt_host = options.get(OPT_HOST) if opt_host: # we normalise the domain - parsed_host = urlparse.urlsplit(opt_host) - opt_host = urlparse.urlunsplit( + parsed_host = urllib.parse.urlsplit(opt_host) + opt_host = urllib.parse.urlunsplit( ( parsed_host.scheme or "http", parsed_host.netloc or parsed_host.path, @@ -239,7 +239,7 @@ tmp_dir = tempfile.mkdtemp() try: # TODO: would be nice to also update the hyperlinks to these images, e.g. when you have <a href="{url}"><img src="{url}"></a> - for img_elt in xml_tools.findAll(top_elt, names=[u"img"]): + for img_elt in xml_tools.findAll(top_elt, names=["img"]): yield self.imgFilters(client, img_elt, options, opt_host, tmp_dir) finally: os.rmdir(tmp_dir) # XXX: tmp_dir should be empty, or something went wrong @@ -260,21 +260,21 @@ """ try: url = img_elt["src"] - if url[0] == u"/": + if url[0] == "/": if not opt_host: log.warning( - u"host was not specified, we can't deal with src without host ({url}) and have to ignore the following <img/>:\n{xml}".format( + "host was not specified, we can't deal with src without host ({url}) and have to ignore the following <img/>:\n{xml}".format( url=url, xml=img_elt.toXml() ) ) return else: - url = urlparse.urljoin(opt_host, url) + url = urllib.parse.urljoin(opt_host, url) filename = url.rsplit("/", 1)[-1].strip() if not filename: raise KeyError except (KeyError, IndexError): - log.warning(u"ignoring invalid img element: {}".format(img_elt.toXml())) + log.warning("ignoring invalid img element: {}".format(img_elt.toXml())) return # we change the url for the normalized one @@ -288,10 +288,10 @@ pass else: # host is the ignored one, we skip - parsed_url = urlparse.urlsplit(url) + parsed_url = urllib.parse.urlsplit(url) if ignore_host in parsed_url.hostname: log.info( - u"Don't upload image at {url} because of {opt} option".format( + "Don't upload image at {url} because of {opt} option".format( url=url, opt=OPT_UPLOAD_IGNORE_HOST ) ) @@ -304,7 +304,7 @@ try: yield web_client.downloadPage(url.encode("utf-8"), tmp_file) filename = filename.replace( - u"%", u"_" + "%", "_" ) # FIXME: tmp workaround for a bug in prosody http upload __, download_d = yield self._u.upload( client, tmp_file, filename, options=upload_options @@ -312,7 +312,7 @@ download_url = yield download_d except Exception as e: log.warning( - u"can't download image at {url}: {reason}".format(url=url, reason=e) + "can't download image at {url}: {reason}".format(url=url, reason=e) ) else: img_elt["src"] = download_url
--- a/sat/plugins/plugin_blog_import_dokuwiki.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_blog_import_dokuwiki.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin to import dokuwiki blogs @@ -28,8 +28,8 @@ from twisted.internet import threads from collections import OrderedDict import calendar -import urllib -import urlparse +import urllib.request, urllib.parse, urllib.error +import urllib.parse import tempfile import re import time @@ -39,13 +39,13 @@ from dokuwiki import DokuWiki, DokuWikiError # this is a new dependency except ImportError: raise exceptions.MissingModule( - u'Missing module dokuwiki, please install it with "pip install dokuwiki"' + 'Missing module dokuwiki, please install it with "pip install dokuwiki"' ) try: from PIL import Image # this is already needed by plugin XEP-0054 except: raise exceptions.MissingModule( - u"Missing module pillow, please download/install it from https://python-pillow.github.io" + "Missing module pillow, please download/install it from https://python-pillow.github.io" ) PLUGIN_INFO = { @@ -58,10 +58,10 @@ C.PI_DESCRIPTION: _("""Blog importer for Dokuwiki blog engine."""), } -SHORT_DESC = D_(u"import posts from Dokuwiki blog engine") +SHORT_DESC = D_("import posts from Dokuwiki blog engine") LONG_DESC = D_( - u"""This importer handle Dokuwiki blog engine. + """This importer handle Dokuwiki blog engine. To use it, you need an admin access to a running Dokuwiki website (local or on the Internet). The importer retrieves the data using @@ -129,7 +129,7 @@ @param post(dict): parsed post data @return (unicode): post unique item id """ - return unicode(post["id"]) + return str(post["id"]) def getPostUpdated(self, post): """Return the update date. @@ -137,7 +137,7 @@ @param post(dict): parsed post data @return (unicode): update date """ - return unicode(post["mtime"]) + return str(post["mtime"]) def getPostPublished(self, post): """Try to parse the date from the message ID, else use "mtime". @@ -148,7 +148,7 @@ @param post (dict): parsed post data @return (unicode): publication date """ - id_, default = unicode(post["id"]), unicode(post["mtime"]) + id_, default = str(post["id"]), str(post["mtime"]) try: date = id_.split(":")[-1].split("_")[0] except KeyError: @@ -160,7 +160,7 @@ time_struct = time.strptime(date, "%Y%m%d") except ValueError: return default - return unicode(calendar.timegm(time_struct)) + return str(calendar.timegm(time_struct)) def processPost(self, post, profile_jid): """Process a single page. @@ -235,7 +235,7 @@ if count >= self.limit: break - return (self.posts_data.itervalues(), len(self.posts_data)) + return (iter(self.posts_data.values()), len(self.posts_data)) def processContent(self, text, backlinks, profile_jid): """Do text substitutions and file copy. @@ -243,7 +243,7 @@ @param text (unicode): message content @param backlinks (list[unicode]): list of backlinks """ - text = text.strip(u"\ufeff") # this is at the beginning of the file (BOM) + text = text.strip("\ufeff") # this is at the beginning of the file (BOM) for backlink in backlinks: src = '/doku.php?id=%s"' % backlink @@ -261,9 +261,9 @@ if self.media_repo: self.moveMedia(link, subs) elif link not in subs: - subs[link] = urlparse.urljoin(self.url, link) + subs[link] = urllib.parse.urljoin(self.url, link) - for url, new_url in subs.iteritems(): + for url, new_url in subs.items(): text = text.replace(url, new_url) return text @@ -274,12 +274,12 @@ @param link (unicode): media link @param subs (dict): substitutions data """ - url = urlparse.urljoin(self.url, link) + url = urllib.parse.urljoin(self.url, link) user_media = re.match(r"(/lib/exe/\w+.php\?)(.*)", link) thumb_width = None if user_media: # media that has been added by the user - params = urlparse.parse_qs(urlparse.urlparse(url).query) + params = urllib.parse.parse_qs(urllib.parse.urlparse(url).query) try: media = params["media"][0] except KeyError: @@ -295,7 +295,7 @@ filename = media.replace(":", "/") # XXX: avoid "precondition failed" error (only keep the media parameter) - url = urlparse.urljoin(self.url, "/lib/exe/fetch.php?media=%s" % media) + url = urllib.parse.urljoin(self.url, "/lib/exe/fetch.php?media=%s" % media) elif link.startswith("/lib/plugins/"): # other link added by a plugin or something else @@ -324,7 +324,7 @@ if not os.path.exists(dest): if not os.path.exists(dirname): os.makedirs(dirname) - urllib.urlretrieve(source, dest) + urllib.request.urlretrieve(source, dest) log.debug("DokuWiki media file copied to %s" % dest) def createThumbnail(self, source, dest, width):
--- a/sat/plugins/plugin_blog_import_dotclear.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_blog_import_dotclear.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for import external blogs @@ -42,10 +42,10 @@ C.PI_DESCRIPTION: _("""Blog importer for Dotclear blog engine."""), } -SHORT_DESC = D_(u"import posts from Dotclear blog engine") +SHORT_DESC = D_("import posts from Dotclear blog engine") LONG_DESC = D_( - u"""This importer handle Dotclear blog engine. + """This importer handle Dotclear blog engine. To use it, you'll need to export your blog to a flat file. You must go in your admin interface and select Plugins/Maintenance then Backup. @@ -55,7 +55,7 @@ location: you must use the absolute path to your backup for the location parameter """ ) -POST_ID_PREFIX = u"sat_dc_" +POST_ID_PREFIX = "sat_dc_" KNOWN_DATA_TYPES = ( "link", "setting", @@ -66,7 +66,7 @@ "comment", "captcha", ) -ESCAPE_MAP = {"r": u"\r", "n": u"\n", '"': u'"', "\\": u"\\"} +ESCAPE_MAP = {"r": "\r", "n": "\n", '"': '"', "\\": "\\"} class DotclearParser(object): @@ -83,7 +83,7 @@ @param post(dict): parsed post data @return (unicode): post unique item id """ - return u"{}_{}_{}_{}:{}".format( + return "{}_{}_{}_{}:{}".format( POST_ID_PREFIX, post["blog_id"], post["user_id"], @@ -99,7 +99,7 @@ """ post_id = comment["post_id"] parent_item_id = self.posts_data[post_id]["blog"]["id"] - return u"{}_comment_{}".format(parent_item_id, comment["comment_id"]) + return "{}_comment_{}".format(parent_item_id, comment["comment_id"]) def getTime(self, data, key): """Parse time as given by dotclear, with timezone handling @@ -125,18 +125,18 @@ if char == '"': # we have reached the end of this field, # we try to parse a new one - yield u"".join(buf) + yield "".join(buf) buf = [] idx += 1 try: separator = fields_data[idx] except IndexError: return - if separator != u",": + if separator != ",": raise exceptions.ParsingError("Field separator was expeceted") idx += 1 break # we have a new field - elif char == u"\\": + elif char == "\\": idx += 1 try: char = ESCAPE_MAP[fields_data[idx]] @@ -144,22 +144,22 @@ raise exceptions.ParsingError("Escaped char was expected") except KeyError: char = fields_data[idx] - log.warning(u"Unknown key to escape: {}".format(char)) + log.warning("Unknown key to escape: {}".format(char)) buf.append(char) def parseFields(self, headers, data): - return dict(itertools.izip(headers, self.readFields(data))) + return dict(zip(headers, self.readFields(data))) def postHandler(self, headers, data, index): post = self.parseFields(headers, data) - log.debug(u"({}) post found: {}".format(index, post["post_title"])) + log.debug("({}) post found: {}".format(index, post["post_title"])) mb_data = { "id": self.getPostId(post), "published": self.getTime(post, "post_creadt"), "updated": self.getTime(post, "post_upddt"), "author": post["user_id"], # there use info are not in the archive # TODO: option to specify user info - "content_xhtml": u"{}{}".format( + "content_xhtml": "{}{}".format( post["post_content_xhtml"], post["post_excerpt_xhtml"] ), "title": post["post_title"], @@ -168,7 +168,7 @@ self.posts_data[post["post_id"]] = { "blog": mb_data, "comments": [[]], - "url": u"/post/{}".format(post["post_url"]), + "url": "/post/{}".format(post["post_url"]), } def metaHandler(self, headers, data, index): @@ -178,7 +178,7 @@ tags.add(meta["meta_id"]) def metaFinishedHandler(self): - for post_id, tags in self.tags.iteritems(): + for post_id, tags in self.tags.items(): data_format.iter2dict("tag", tags, self.posts_data[post_id]["blog"]) del self.tags @@ -186,9 +186,9 @@ comment = self.parseFields(headers, data) if comment["comment_site"]: # we don't use atom:uri because it's used for jid in XMPP - content = u'{}\n<hr>\n<a href="{}">author website</a>'.format( + content = '{}\n<hr>\n<a href="{}">author website</a>'.format( comment["comment_content"], - cgi.escape(comment["comment_site"]).replace('"', u"%22"), + cgi.escape(comment["comment_site"]).replace('"', "%22"), ) else: content = comment["comment_content"] @@ -208,24 +208,24 @@ def parse(self, db_path): with open(db_path) as f: - signature = f.readline().decode("utf-8") + signature = f.readline() try: version = signature.split("|")[1] except IndexError: version = None - log.debug(u"Dotclear version: {}".format(version)) + log.debug("Dotclear version: {}".format(version)) data_type = None data_headers = None index = None while True: - buf = f.readline().decode("utf-8") + buf = f.readline() if not buf: break if buf.startswith("["): header = buf.split(" ", 1) data_type = header[0][1:] if data_type not in KNOWN_DATA_TYPES: - log.warning(u"unkown data type: {}".format(data_type)) + log.warning("unkown data type: {}".format(data_type)) index = 0 try: data_headers = header[1].split(",") @@ -233,7 +233,7 @@ last_header = data_headers[-1] data_headers[-1] = last_header[: last_header.rfind("]")] except IndexError: - log.warning(u"Can't read data)") + log.warning("Can't read data)") else: if data_type is None: continue @@ -247,7 +247,7 @@ pass else: finished_handler() - log.debug(u"{} data finished".format(data_type)) + log.debug("{} data finished".format(data_type)) data_type = None continue assert data_type @@ -258,7 +258,7 @@ else: fields_handler(data_headers, buf, index) index += 1 - return (self.posts_data.itervalues(), len(self.posts_data)) + return (iter(self.posts_data.values()), len(self.posts_data)) class DotclearImport(object): @@ -272,7 +272,7 @@ def DcImport(self, client, location, options=None): if not os.path.isabs(location): raise exceptions.DataError( - u"An absolute path to backup data need to be given as location" + "An absolute path to backup data need to be given as location" ) dc_parser = DotclearParser() d = threads.deferToThread(dc_parser.parse, location)
--- a/sat/plugins/plugin_comp_file_sharing.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_comp_file_sharing.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for parrot mode (experimental) @@ -55,17 +55,17 @@ C.PI_RECOMMENDATIONS: [], C.PI_MAIN: "FileSharing", C.PI_HANDLER: C.BOOL_TRUE, - C.PI_DESCRIPTION: _(u"""Component hosting and sharing files"""), + C.PI_DESCRIPTION: _("""Component hosting and sharing files"""), } -HASH_ALGO = u"sha-256" +HASH_ALGO = "sha-256" NS_COMMENTS = "org.salut-a-toi.comments" COMMENT_NODE_PREFIX = "org.salut-a-toi.file_comments/" class FileSharing(object): def __init__(self, host): - log.info(_(u"File Sharing initialization")) + log.info(_("File Sharing initialization")) self.host = host self._f = host.plugins["FILE"] self._jf = host.plugins["XEP-0234"] @@ -99,12 +99,12 @@ on file is received, this method create hash/thumbnails if necessary move the file to the right location, and create metadata entry in database """ - name = file_data[u"name"] + name = file_data["name"] extra = {} - if file_data[u"hash_algo"] == HASH_ALGO: - log.debug(_(u"Reusing already generated hash")) - file_hash = file_data[u"hash_hasher"].hexdigest() + if file_data["hash_algo"] == HASH_ALGO: + log.debug(_("Reusing already generated hash")) + file_hash = file_data["hash_hasher"].hexdigest() else: hasher = self._h.getHasher(HASH_ALGO) with open("file_path") as f: @@ -113,7 +113,7 @@ if os.path.isfile(final_path): log.debug( - u"file [{file_hash}] already exists, we can remove temporary one".format( + "file [{file_hash}] already exists, we can remove temporary one".format( file_hash=file_hash ) ) @@ -121,16 +121,16 @@ else: os.rename(file_path, final_path) log.debug( - u"file [{file_hash}] moved to {files_path}".format( + "file [{file_hash}] moved to {files_path}".format( file_hash=file_hash, files_path=self.files_path ) ) - mime_type = file_data.get(u"mime_type") - if not mime_type or mime_type == u"application/octet-stream": + mime_type = file_data.get("mime_type") + if not mime_type or mime_type == "application/octet-stream": mime_type = mimetypes.guess_type(name)[0] - if mime_type is not None and mime_type.startswith(u"image"): + if mime_type is not None and mime_type.startswith("image"): thumbnails = extra.setdefault(C.KEY_THUMBNAILS, []) for max_thumb_size in (self._t.SIZE_SMALL, self._t.SIZE_MEDIUM): try: @@ -141,19 +141,19 @@ 60 * 60 * 24 * 31 * 6, ) except Exception as e: - log.warning(_(u"Can't create thumbnail: {reason}").format(reason=e)) + log.warning(_("Can't create thumbnail: {reason}").format(reason=e)) break - thumbnails.append({u"id": thumb_id, u"size": thumb_size}) + thumbnails.append({"id": thumb_id, "size": thumb_size}) self.host.memory.setFile( client, name=name, - version=u"", + version="", file_hash=file_hash, hash_algo=HASH_ALGO, - size=file_data[u"size"], - path=file_data.get(u"path"), - namespace=file_data.get(u"namespace"), + size=file_data["size"], + path=file_data.get("path"), + namespace=file_data.get("namespace"), mime_type=mime_type, owner=peer_jid, extra=extra, @@ -191,49 +191,49 @@ self, client, session, content_data, content_name, file_data, file_elt ): """This method retrieve a file on request, and send if after checking permissions""" - peer_jid = session[u"peer_jid"] + peer_jid = session["peer_jid"] try: found_files = yield self.host.memory.getFiles( client, peer_jid=peer_jid, - name=file_data.get(u"name"), - file_hash=file_data.get(u"file_hash"), - hash_algo=file_data.get(u"hash_algo"), - path=file_data.get(u"path"), - namespace=file_data.get(u"namespace"), + name=file_data.get("name"), + file_hash=file_data.get("file_hash"), + hash_algo=file_data.get("hash_algo"), + path=file_data.get("path"), + namespace=file_data.get("namespace"), ) except exceptions.NotFound: found_files = None except exceptions.PermissionError: log.warning( - _(u"{peer_jid} is trying to access an unauthorized file: {name}").format( - peer_jid=peer_jid, name=file_data.get(u"name") + _("{peer_jid} is trying to access an unauthorized file: {name}").format( + peer_jid=peer_jid, name=file_data.get("name") ) ) defer.returnValue(False) if not found_files: log.warning( - _(u"no matching file found ({file_data})").format(file_data=file_data) + _("no matching file found ({file_data})").format(file_data=file_data) ) defer.returnValue(False) # we only use the first found file found_file = found_files[0] - if found_file[u'type'] != C.FILE_TYPE_FILE: - raise TypeError(u"a file was expected, type is {type_}".format( - type_=found_file[u'type'])) - file_hash = found_file[u"file_hash"] + if found_file['type'] != C.FILE_TYPE_FILE: + raise TypeError("a file was expected, type is {type_}".format( + type_=found_file['type'])) + file_hash = found_file["file_hash"] file_path = os.path.join(self.files_path, file_hash) - file_data[u"hash_hasher"] = hasher = self._h.getHasher(found_file[u"hash_algo"]) - size = file_data[u"size"] = found_file[u"size"] - file_data[u"file_hash"] = file_hash - file_data[u"hash_algo"] = found_file[u"hash_algo"] + file_data["hash_hasher"] = hasher = self._h.getHasher(found_file["hash_algo"]) + size = file_data["size"] = found_file["size"] + file_data["file_hash"] = file_hash + file_data["hash_algo"] = found_file["hash_algo"] # we complete file_elt so peer can have some details on the file - if u"name" not in file_data: - file_elt.addElement(u"name", content=found_file[u"name"]) - file_elt.addElement(u"size", content=unicode(size)) + if "name" not in file_data: + file_elt.addElement("name", content=found_file["name"]) + file_elt.addElement("size", content=str(size)) content_data["stream_object"] = stream.FileStreamObject( self.host, client, @@ -268,11 +268,11 @@ comment_elt = file_elt.addElement((NS_COMMENTS, "comments"), content=comments_url) try: - count = len(extra_args[u"extra"][u"comments"]) + count = len(extra_args["extra"]["comments"]) except KeyError: count = 0 - comment_elt["count"] = unicode(count) + comment_elt["count"] = str(count) return True def _getFileComments(self, file_elt, file_data): @@ -280,7 +280,7 @@ comments_elt = next(file_elt.elements(NS_COMMENTS, "comments")) except StopIteration: return - file_data["comments_url"] = unicode(comments_elt) + file_data["comments_url"] = str(comments_elt) file_data["comments_count"] = comments_elt["count"] return True
--- a/sat/plugins/plugin_comp_file_sharing_management.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_comp_file_sharing_management.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to detect language (experimental) @@ -31,25 +31,25 @@ PLUGIN_INFO = { - C.PI_NAME: u"File Sharing Management", - C.PI_IMPORT_NAME: u"FILE_SHARING_MANAGEMENT", + C.PI_NAME: "File Sharing Management", + C.PI_IMPORT_NAME: "FILE_SHARING_MANAGEMENT", C.PI_MODES: [C.PLUG_MODE_COMPONENT], - C.PI_TYPE: u"EXP", + C.PI_TYPE: "EXP", C.PI_PROTOCOLS: [], - C.PI_DEPENDENCIES: [u"XEP-0050", u"XEP-0264"], + C.PI_DEPENDENCIES: ["XEP-0050", "XEP-0264"], C.PI_RECOMMENDATIONS: [], - C.PI_MAIN: u"FileSharingManagement", - C.PI_HANDLER: u"no", + C.PI_MAIN: "FileSharingManagement", + C.PI_HANDLER: "no", C.PI_DESCRIPTION: _( - u"Experimental handling of file management for file sharing. This plugins allows " - u"to change permissions of stored files/directories or remove them." + "Experimental handling of file management for file sharing. This plugins allows " + "to change permissions of stored files/directories or remove them." ), } -NS_FILE_MANAGEMENT = u"https://salut-a-toi.org/protocol/file-management:0" -NS_FILE_MANAGEMENT_PERM = u"https://salut-a-toi.org/protocol/file-management:0#perm" -NS_FILE_MANAGEMENT_DELETE = u"https://salut-a-toi.org/protocol/file-management:0#delete" -NS_FILE_MANAGEMENT_THUMB = u"https://salut-a-toi.org/protocol/file-management:0#thumb" +NS_FILE_MANAGEMENT = "https://salut-a-toi.org/protocol/file-management:0" +NS_FILE_MANAGEMENT_PERM = "https://salut-a-toi.org/protocol/file-management:0#perm" +NS_FILE_MANAGEMENT_DELETE = "https://salut-a-toi.org/protocol/file-management:0#delete" +NS_FILE_MANAGEMENT_THUMB = "https://salut-a-toi.org/protocol/file-management:0#thumb" class WorkflowError(Exception): @@ -68,7 +68,7 @@ # syntax?) should be elaborated and proposed as a standard. def __init__(self, host): - log.info(_(u"File Sharing Management plugin initialization")) + log.info(_("File Sharing Management plugin initialization")) self.host = host self._c = host.plugins["XEP-0050"] self._t = host.plugins["XEP-0264"] @@ -76,17 +76,17 @@ def profileConnected(self, client): self._c.addAdHocCommand( - client, self._onChangeFile, u"Change Permissions of File(s)", + client, self._onChangeFile, "Change Permissions of File(s)", node=NS_FILE_MANAGEMENT_PERM, allowed_magics=C.ENTITY_ALL, ) self._c.addAdHocCommand( - client, self._onDeleteFile, u"Delete File(s)", + client, self._onDeleteFile, "Delete File(s)", node=NS_FILE_MANAGEMENT_DELETE, allowed_magics=C.ENTITY_ALL, ) self._c.addAdHocCommand( - client, self._onGenThumbnails, u"Generate Thumbnails", + client, self._onGenThumbnails, "Generate Thumbnails", node=NS_FILE_MANAGEMENT_THUMB, allowed_magics=C.ENTITY_ALL, ) @@ -109,7 +109,7 @@ @return (tuple): arguments to use in defer.returnValue """ status = self._c.STATUS.EXECUTING - form = data_form.Form("form", title=u"File Management", + form = data_form.Form("form", title="File Management", formNamespace=NS_FILE_MANAGEMENT) field = data_form.Field( @@ -136,17 +136,17 @@ """ fields = command_form.fields try: - path = fields[u'path'].value.strip() - namespace = fields[u'namespace'].value or None + path = fields['path'].value.strip() + namespace = fields['namespace'].value or None except KeyError: self._c.adHocError(self._c.ERROR.BAD_PAYLOAD) if not path: self._c.adHocError(self._c.ERROR.BAD_PAYLOAD) - requestor = session_data[u'requestor'] + requestor = session_data['requestor'] requestor_bare = requestor.userhostJID() - path = path.rstrip(u'/') + path = path.rstrip('/') parent_path, basename = os.path.split(path) # TODO: if parent_path and basename are empty, we ask for root directory @@ -158,31 +158,31 @@ namespace=namespace) found_file = found_files[0] except (exceptions.NotFound, IndexError): - raise WorkflowError(self._err(_(u"file not found"))) + raise WorkflowError(self._err(_("file not found"))) except exceptions.PermissionError: - raise WorkflowError(self._err(_(u"forbidden"))) + raise WorkflowError(self._err(_("forbidden"))) if found_file['owner'] != requestor_bare: # only owner can manage files - log.warning(_(u"Only owner can manage files")) - raise WorkflowError(self._err(_(u"forbidden"))) + log.warning(_("Only owner can manage files")) + raise WorkflowError(self._err(_("forbidden"))) - session_data[u'found_file'] = found_file - session_data[u'namespace'] = namespace + session_data['found_file'] = found_file + session_data['namespace'] = namespace defer.returnValue(found_file) def _updateReadPermission(self, access, allowed_jids): if not allowed_jids: if C.ACCESS_PERM_READ in access: del access[C.ACCESS_PERM_READ] - elif allowed_jids == u'PUBLIC': + elif allowed_jids == 'PUBLIC': access[C.ACCESS_PERM_READ] = { - u"type": C.ACCESS_TYPE_PUBLIC + "type": C.ACCESS_TYPE_PUBLIC } else: access[C.ACCESS_PERM_READ] = { - u"type": C.ACCESS_TYPE_WHITELIST, - u"jids": [j.full() for j in allowed_jids] + "type": C.ACCESS_TYPE_WHITELIST, + "jids": [j.full() for j in allowed_jids] } @defer.inlineCallbacks @@ -192,30 +192,30 @@ @param file_data(dict): metadata of the file @param allowed_jids(list[jid.JID]): list of entities allowed to read the file """ - assert file_data[u'type'] == C.FILE_TYPE_DIRECTORY + assert file_data['type'] == C.FILE_TYPE_DIRECTORY files_data = yield self.host.memory.getFiles( - client, requestor, parent=file_data[u'id'], namespace=namespace) + client, requestor, parent=file_data['id'], namespace=namespace) for file_data in files_data: - if not file_data[u'access'].get(C.ACCESS_PERM_READ, {}): - log.debug(u"setting {perm} read permission for {name}".format( - perm=allowed_jids, name=file_data[u'name'])) + if not file_data['access'].get(C.ACCESS_PERM_READ, {}): + log.debug("setting {perm} read permission for {name}".format( + perm=allowed_jids, name=file_data['name'])) yield self.host.memory.fileUpdate( - file_data[u'id'], u'access', + file_data['id'], 'access', partial(self._updateReadPermission, allowed_jids=allowed_jids)) - if file_data[u'type'] == C.FILE_TYPE_DIRECTORY: - yield self._updateDir(client, requestor, namespace, file_data, u'PUBLIC') + if file_data['type'] == C.FILE_TYPE_DIRECTORY: + yield self._updateDir(client, requestor, namespace, file_data, 'PUBLIC') @defer.inlineCallbacks def _onChangeFile(self, client, command_elt, session_data, action, node): try: - x_elt = command_elt.elements(data_form.NS_X_DATA, "x").next() + x_elt = next(command_elt.elements(data_form.NS_X_DATA, "x")) command_form = data_form.Form.fromElement(x_elt) except StopIteration: command_form = None found_file = session_data.get('found_file') - requestor = session_data[u'requestor'] + requestor = session_data['requestor'] requestor_bare = requestor.userhostJID() if command_form is None or len(command_form.fields) == 0: @@ -230,31 +230,31 @@ defer.returnValue(e.err_args) # management request - if found_file[u'type'] == C.FILE_TYPE_DIRECTORY: - instructions = D_(u"Please select permissions for this directory") + if found_file['type'] == C.FILE_TYPE_DIRECTORY: + instructions = D_("Please select permissions for this directory") else: - instructions = D_(u"Please select permissions for this file") + instructions = D_("Please select permissions for this file") - form = data_form.Form("form", title=u"File Management", + form = data_form.Form("form", title="File Management", instructions=[instructions], formNamespace=NS_FILE_MANAGEMENT) field = data_form.Field( "text-multi", "read_allowed", required=False, - desc=u'list of jids allowed to read this file (beside yourself), or ' - u'"PUBLIC" to let a public access' + desc='list of jids allowed to read this file (beside yourself), or ' + '"PUBLIC" to let a public access' ) - read_access = found_file[u"access"].get(C.ACCESS_PERM_READ, {}) - access_type = read_access.get(u'type', C.ACCESS_TYPE_WHITELIST) + read_access = found_file["access"].get(C.ACCESS_PERM_READ, {}) + access_type = read_access.get('type', C.ACCESS_TYPE_WHITELIST) if access_type == C.ACCESS_TYPE_PUBLIC: - field.values = [u'PUBLIC'] + field.values = ['PUBLIC'] else: field.values = read_access.get('jids', []) form.addField(field) - if found_file[u'type'] == C.FILE_TYPE_DIRECTORY: + if found_file['type'] == C.FILE_TYPE_DIRECTORY: field = data_form.Field( "boolean", "recursive", value=False, required=False, - desc=u"Files under it will be made public to follow this dir " - u"permission (only if they don't have already a permission set)." + desc="Files under it will be made public to follow this dir " + "permission (only if they don't have already a permission set)." ) form.addField(field) @@ -269,22 +269,22 @@ except KeyError: self._c.adHocError(self._c.ERROR.BAD_PAYLOAD) - if read_allowed.value == u'PUBLIC': - allowed_jids = u'PUBLIC' - elif read_allowed.value.strip() == u'': + if read_allowed.value == 'PUBLIC': + allowed_jids = 'PUBLIC' + elif read_allowed.value.strip() == '': allowed_jids = None else: try: allowed_jids = [jid.JID(v.strip()) for v in read_allowed.values if v.strip()] except RuntimeError as e: - log.warning(_(u"Can't use read_allowed values: {reason}").format( + log.warning(_("Can't use read_allowed values: {reason}").format( reason=e)) self._c.adHocError(self._c.ERROR.BAD_PAYLOAD) - if found_file[u'type'] == C.FILE_TYPE_FILE: + if found_file['type'] == C.FILE_TYPE_FILE: yield self.host.memory.fileUpdate( - found_file[u'id'], u'access', + found_file['id'], 'access', partial(self._updateReadPermission, allowed_jids=allowed_jids)) else: try: @@ -292,32 +292,32 @@ except KeyError: self._c.adHocError(self._c.ERROR.BAD_PAYLOAD) yield self.host.memory.fileUpdate( - found_file[u'id'], u'access', + found_file['id'], 'access', partial(self._updateReadPermission, allowed_jids=allowed_jids)) if recursive: # we set all file under the directory as public (if they haven't # already a permission set), so allowed entities of root directory # can read them. - namespace = session_data[u'namespace'] + namespace = session_data['namespace'] yield self._updateDir( - client, requestor_bare, namespace, found_file, u'PUBLIC') + client, requestor_bare, namespace, found_file, 'PUBLIC') # job done, we can end the session status = self._c.STATUS.COMPLETED payload = None - note = (self._c.NOTE.INFO, _(u"management session done")) + note = (self._c.NOTE.INFO, _("management session done")) defer.returnValue((payload, status, None, note)) @defer.inlineCallbacks def _onDeleteFile(self, client, command_elt, session_data, action, node): try: - x_elt = command_elt.elements(data_form.NS_X_DATA, "x").next() + x_elt = next(command_elt.elements(data_form.NS_X_DATA, "x")) command_form = data_form.Form.fromElement(x_elt) except StopIteration: command_form = None found_file = session_data.get('found_file') - requestor = session_data[u'requestor'] + requestor = session_data['requestor'] requestor_bare = requestor.userhostJID() if command_form is None or len(command_form.fields) == 0: @@ -330,18 +330,18 @@ found_file = yield self._getFileData(client, session_data, command_form) except WorkflowError as e: defer.returnValue(e.err_args) - if found_file[u'type'] == C.FILE_TYPE_DIRECTORY: - msg = D_(u"Are you sure to delete directory {name} and all files and " - u"directories under it?").format(name=found_file[u'name']) + if found_file['type'] == C.FILE_TYPE_DIRECTORY: + msg = D_("Are you sure to delete directory {name} and all files and " + "directories under it?").format(name=found_file['name']) else: - msg = D_(u"Are you sure to delete file {name}?" - .format(name=found_file[u'name'])) - form = data_form.Form("form", title=u"File Management", + msg = D_("Are you sure to delete file {name}?" + .format(name=found_file['name'])) + form = data_form.Form("form", title="File Management", instructions = [msg], formNamespace=NS_FILE_MANAGEMENT) field = data_form.Field( "boolean", "confirm", value=False, required=True, - desc=u"check this box to confirm" + desc="check this box to confirm" ) form.addField(field) status = self._c.STATUS.EXECUTING @@ -357,10 +357,10 @@ if not confirmed: note = None else: - recursive = found_file[u'type'] == C.FILE_TYPE_DIRECTORY + recursive = found_file['type'] == C.FILE_TYPE_DIRECTORY yield self.host.memory.fileDelete( - client, requestor_bare, found_file[u'id'], recursive) - note = (self._c.NOTE.INFO, _(u"file deleted")) + client, requestor_bare, found_file['id'], recursive) + note = (self._c.NOTE.INFO, _("file deleted")) status = self._c.STATUS.COMPLETED payload = None defer.returnValue((payload, status, None, note)) @@ -374,16 +374,16 @@ @param file_data(dict): metadata of the file """ - if file_data[u'type'] == C.FILE_TYPE_DIRECTORY: + if file_data['type'] == C.FILE_TYPE_DIRECTORY: sub_files_data = yield self.host.memory.getFiles( - client, requestor, parent=file_data[u'id'], namespace=namespace) + client, requestor, parent=file_data['id'], namespace=namespace) for sub_file_data in sub_files_data: yield self._genThumbs(client, requestor, namespace, sub_file_data) - elif file_data[u'type'] == C.FILE_TYPE_FILE: - mime_type = file_data[u'mime_type'] - file_path = os.path.join(self.files_path, file_data[u'file_hash']) - if mime_type is not None and mime_type.startswith(u"image"): + elif file_data['type'] == C.FILE_TYPE_FILE: + mime_type = file_data['mime_type'] + file_path = os.path.join(self.files_path, file_data['file_hash']) + if mime_type is not None and mime_type.startswith("image"): thumbnails = [] for max_thumb_size in (self._t.SIZE_SMALL, self._t.SIZE_MEDIUM): @@ -395,31 +395,31 @@ 60 * 60 * 24 * 31 * 6, ) except Exception as e: - log.warning(_(u"Can't create thumbnail: {reason}") + log.warning(_("Can't create thumbnail: {reason}") .format(reason=e)) break - thumbnails.append({u"id": thumb_id, u"size": thumb_size}) + thumbnails.append({"id": thumb_id, "size": thumb_size}) yield self.host.memory.fileUpdate( - file_data[u'id'], u'extra', + file_data['id'], 'extra', partial(self._updateThumbs, thumbnails=thumbnails)) - log.info(u"thumbnails for [{file_name}] generated" - .format(file_name=file_data[u'name'])) + log.info("thumbnails for [{file_name}] generated" + .format(file_name=file_data['name'])) else: - log.warning(u"unmanaged file type: {type_}".format(type_=file_data[u'type'])) + log.warning("unmanaged file type: {type_}".format(type_=file_data['type'])) @defer.inlineCallbacks def _onGenThumbnails(self, client, command_elt, session_data, action, node): try: - x_elt = command_elt.elements(data_form.NS_X_DATA, "x").next() + x_elt = next(command_elt.elements(data_form.NS_X_DATA, "x")) command_form = data_form.Form.fromElement(x_elt) except StopIteration: command_form = None found_file = session_data.get('found_file') - requestor = session_data[u'requestor'] + requestor = session_data['requestor'] if command_form is None or len(command_form.fields) == 0: # root request @@ -432,11 +432,11 @@ except WorkflowError as e: defer.returnValue(e.err_args) - log.info(u"Generating thumbnails as requested") - yield self._genThumbs(client, requestor, found_file[u'namespace'], found_file) + log.info("Generating thumbnails as requested") + yield self._genThumbs(client, requestor, found_file['namespace'], found_file) # job done, we can end the session status = self._c.STATUS.COMPLETED payload = None - note = (self._c.NOTE.INFO, _(u"thumbnails generated")) + note = (self._c.NOTE.INFO, _("thumbnails generated")) defer.returnValue((payload, status, None, note))
--- a/sat/plugins/plugin_dbg_manhole.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_dbg_manhole.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for debugging, using a manhole @@ -29,14 +29,14 @@ log = getLogger(__name__) PLUGIN_INFO = { - C.PI_NAME: u"Manhole debug plugin", - C.PI_IMPORT_NAME: u"manhole", - C.PI_TYPE: u"DEBUG", + C.PI_NAME: "Manhole debug plugin", + C.PI_IMPORT_NAME: "manhole", + C.PI_TYPE: "DEBUG", C.PI_PROTOCOLS: [], C.PI_DEPENDENCIES: [], - C.PI_MAIN: u"Manhole", - C.PI_HANDLER: u"no", - C.PI_DESCRIPTION: _(u"""Debug plugin to have a telnet server"""), + C.PI_MAIN: "Manhole", + C.PI_HANDLER: "no", + C.PI_DESCRIPTION: _("""Debug plugin to have a telnet server"""), } @@ -50,14 +50,14 @@ self.startManhole(port) def startManhole(self, port): - log.warning(_(u"/!\\ Manhole debug server activated, be sure to not use it in " - u"production, this is dangerous /!\\")) - log.info(_(u"You can connect to manhole server using telnet on port {port}") + log.warning(_("/!\\ Manhole debug server activated, be sure to not use it in " + "production, this is dangerous /!\\")) + log.info(_("You can connect to manhole server using telnet on port {port}") .format(port=port)) f = protocol.ServerFactory() namespace = { - u"host": self.host, - u"jid": jid, + "host": self.host, + "jid": jid, } f.protocol = lambda: TelnetTransport(TelnetBootstrapProtocol, insults.ServerProtocol,
--- a/sat/plugins/plugin_exp_command_export.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_command_export.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to export commands (experimental) @@ -52,7 +52,7 @@ def _clean(self, data): if not data: log.error("data should not be empty !") - return u"" + return "" decoded = data.decode("utf-8", "ignore")[: -1 if data[-1] == "\n" else None] return clean_ustr(decoded) @@ -66,7 +66,7 @@ self.client.sendMessage(self.target, {"": self._clean(data)}, no_trigger=True) def processEnded(self, reason): - log.info(u"process finished: %d" % (reason.value.exitCode,)) + log.info("process finished: %d" % (reason.value.exitCode,)) self.parent.removeProcess(self.target, self) def write(self, message): @@ -120,12 +120,12 @@ if spawned_key in self.spawned: try: - body = message_elt.elements(C.NS_CLIENT, "body").next() + body = next(message_elt.elements(C.NS_CLIENT, "body")) except StopIteration: # do not block message without body (chat state notification...) return True - mess_data = unicode(body) + "\n" + mess_data = str(body) + "\n" processes_set = self.spawned[spawned_key] _continue = False exclusive = False @@ -158,7 +158,7 @@ raise jid.InvalidFormat _jid = _jid.userhostJID() except (RuntimeError, jid.InvalidFormat, AttributeError): - log.info(u"invalid target ignored: %s" % (target,)) + log.info("invalid target ignored: %s" % (target,)) continue process_prot = ExportCommandProtocol(self, client, _jid, options) self.spawned.setdefault((_jid, client.profile), set()).add(process_prot)
--- a/sat/plugins/plugin_exp_events.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_events.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to detect language (experimental) @@ -29,7 +29,7 @@ from twisted.words.protocols.jabber import jid, error from twisted.words.xish import domish from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer from twisted.words.protocols.jabber.xmlstream import XMPPHandler from wokkel import pubsub @@ -41,11 +41,11 @@ C.PI_IMPORT_NAME: "EVENTS", C.PI_TYPE: "EXP", C.PI_PROTOCOLS: [], - C.PI_DEPENDENCIES: [u"XEP-0060", u"INVITATION", u"LIST_INTEREST"], + C.PI_DEPENDENCIES: ["XEP-0060", "INVITATION", "LIST_INTEREST"], C.PI_RECOMMENDATIONS: ["XEP-0277", "EMAIL_INVITATION"], C.PI_MAIN: "Events", C.PI_HANDLER: "yes", - C.PI_DESCRIPTION: _(u"""Experimental implementation of XMPP events management"""), + C.PI_DESCRIPTION: _("""Experimental implementation of XMPP events management"""), } NS_EVENT = "org.salut-a-toi.event:0" @@ -55,13 +55,13 @@ """Q&D module to handle event attendance answer, experimentation only""" def __init__(self, host): - log.info(_(u"Event plugin initialization")) + log.info(_("Event plugin initialization")) self.host = host self._p = self.host.plugins["XEP-0060"] self._i = self.host.plugins.get("EMAIL_INVITATION") self._b = self.host.plugins.get("XEP-0277") - self.host.registerNamespace(u"event", NS_EVENT) - self.host.plugins[u"INVITATION"].registerNamespace(NS_EVENT, + self.host.registerNamespace("event", NS_EVENT) + self.host.plugins["INVITATION"].registerNamespace(NS_EVENT, self.register) host.bridge.addMethod( "eventGet", @@ -69,7 +69,7 @@ in_sign="ssss", out_sign="(ia{ss})", method=self._eventGet, - async=True, + async_=True, ) host.bridge.addMethod( "eventCreate", @@ -77,7 +77,7 @@ in_sign="ia{ss}ssss", out_sign="s", method=self._eventCreate, - async=True, + async_=True, ) host.bridge.addMethod( "eventModify", @@ -85,7 +85,7 @@ in_sign="sssia{ss}s", out_sign="", method=self._eventModify, - async=True, + async_=True, ) host.bridge.addMethod( "eventsList", @@ -93,7 +93,7 @@ in_sign="sss", out_sign="aa{ss}", method=self._eventsList, - async=True, + async_=True, ) host.bridge.addMethod( "eventInviteeGet", @@ -101,7 +101,7 @@ in_sign="sss", out_sign="a{ss}", method=self._eventInviteeGet, - async=True, + async_=True, ) host.bridge.addMethod( "eventInviteeSet", @@ -109,7 +109,7 @@ in_sign="ssa{ss}s", out_sign="", method=self._eventInviteeSet, - async=True, + async_=True, ) host.bridge.addMethod( "eventInviteesList", @@ -117,7 +117,7 @@ in_sign="sss", out_sign="a{sa{ss}}", method=self._eventInviteesList, - async=True, + async_=True, ), host.bridge.addMethod( "eventInvite", @@ -125,7 +125,7 @@ in_sign="sssss", out_sign="", method=self._invite, - async=True, + async_=True, ) host.bridge.addMethod( "eventInviteByEmail", @@ -133,7 +133,7 @@ in_sign="ssssassssssss", out_sign="", method=self._inviteByEmail, - async=True, + async_=True, ) def getHandler(self, client): @@ -152,56 +152,56 @@ data = {} - for key in (u"name",): + for key in ("name",): try: data[key] = event_elt[key] except KeyError: continue - for elt_name in (u"description",): + for elt_name in ("description",): try: elt = next(event_elt.elements(NS_EVENT, elt_name)) except StopIteration: continue else: - data[elt_name] = unicode(elt) + data[elt_name] = str(elt) - for elt_name in (u"image", "background-image"): + for elt_name in ("image", "background-image"): try: image_elt = next(event_elt.elements(NS_EVENT, elt_name)) data[elt_name] = image_elt["src"] except StopIteration: continue except KeyError: - log.warning(_(u"no src found for image")) + log.warning(_("no src found for image")) - for uri_type in (u"invitees", u"blog"): + for uri_type in ("invitees", "blog"): try: elt = next(event_elt.elements(NS_EVENT, uri_type)) - uri = data[uri_type + u"_uri"] = elt["uri"] + uri = data[uri_type + "_uri"] = elt["uri"] uri_data = xmpp_uri.parseXMPPUri(uri) - if uri_data[u"type"] != u"pubsub": + if uri_data["type"] != "pubsub": raise ValueError except StopIteration: - log.warning(_(u"no {uri_type} element found!").format(uri_type=uri_type)) + log.warning(_("no {uri_type} element found!").format(uri_type=uri_type)) except KeyError: - log.warning(_(u"incomplete {uri_type} element").format(uri_type=uri_type)) + log.warning(_("incomplete {uri_type} element").format(uri_type=uri_type)) except ValueError: - log.warning(_(u"bad {uri_type} element").format(uri_type=uri_type)) + log.warning(_("bad {uri_type} element").format(uri_type=uri_type)) else: - data[uri_type + u"_service"] = uri_data[u"path"] - data[uri_type + u"_node"] = uri_data[u"node"] + data[uri_type + "_service"] = uri_data["path"] + data[uri_type + "_node"] = uri_data["node"] for meta_elt in event_elt.elements(NS_EVENT, "meta"): - key = meta_elt[u"name"] + key = meta_elt["name"] if key in data: log.warning( - u"Ignoring conflicting meta element: {xml}".format( + "Ignoring conflicting meta element: {xml}".format( xml=meta_elt.toXml() ) ) continue - data[key] = unicode(meta_elt) + data[key] = str(meta_elt) if event_elt.link: link_elt = event_elt.link data["service"] = link_elt["service"] @@ -225,11 +225,11 @@ id_ = NS_EVENT items, metadata = yield self._p.getItems(client, service, node, item_ids=[id_]) try: - event_elt = next(items[0].elements(NS_EVENT, u"event")) + event_elt = next(items[0].elements(NS_EVENT, "event")) except StopIteration: - raise exceptions.NotFound(_(u"No event element has been found")) + raise exceptions.NotFound(_("No event element has been found")) except IndexError: - raise exceptions.NotFound(_(u"No event with this id has been found")) + raise exceptions.NotFound(_("No event with this id has been found")) defer.returnValue(event_elt) def register(self, client, name, extra, service, node, event_id, item_elt, @@ -249,16 +249,16 @@ link_elt["node"] = node link_elt["item"] = event_id __, event_data = self._parseEventElt(event_elt) - name = event_data.get(u'name') - if u'image' in event_data: - extra = {u'thumb_url': event_data[u'image']} + name = event_data.get('name') + if 'image' in event_data: + extra = {'thumb_url': event_data['image']} else: extra = None - return self.host.plugins[u'LIST_INTEREST'].registerPubsub( + return self.host.plugins['LIST_INTEREST'].registerPubsub( client, NS_EVENT, service, node, event_id, creator, name=name, element=event_elt, extra=extra) - def _eventGet(self, service, node, id_=u"", profile_key=C.PROF_KEY_NONE): + def _eventGet(self, service, node, id_="", profile_key=C.PROF_KEY_NONE): service = jid.JID(service) if service else None node = node if node else NS_EVENT client = self.host.getClient(profile_key) @@ -283,12 +283,12 @@ defer.returnValue(self._parseEventElt(event_elt)) def _eventCreate( - self, timestamp, data, service, node, id_=u"", profile_key=C.PROF_KEY_NONE + self, timestamp, data, service, node, id_="", profile_key=C.PROF_KEY_NONE ): service = jid.JID(service) if service else None node = node or None client = self.host.getClient(profile_key) - data[u"register"] = C.bool(data.get(u"register", C.BOOL_FALSE)) + data["register"] = C.bool(data.get("register", C.BOOL_FALSE)) return self.eventCreate(client, timestamp, data, service, node, id_ or NS_EVENT) @defer.inlineCallbacks @@ -311,32 +311,32 @@ @return (unicode): created node """ if not event_id: - raise ValueError(_(u"event_id must be set")) + raise ValueError(_("event_id must be set")) if not service: service = client.jid.userhostJID() if not node: - node = NS_EVENT + u"__" + shortuuid.uuid() + node = NS_EVENT + "__" + shortuuid.uuid() event_elt = domish.Element((NS_EVENT, "event")) if timestamp is not None and timestamp != -1: formatted_date = utils.xmpp_date(timestamp) event_elt.addElement((NS_EVENT, "date"), content=formatted_date) register = data.pop("register", False) - for key in (u"name",): + for key in ("name",): if key in data: event_elt[key] = data.pop(key) - for key in (u"description",): + for key in ("description",): if key in data: event_elt.addElement((NS_EVENT, key), content=data.pop(key)) - for key in (u"image", u"background-image"): + for key in ("image", "background-image"): if key in data: elt = event_elt.addElement((NS_EVENT, key)) elt["src"] = data.pop(key) # we first create the invitees and blog nodes (if not specified in data) - for uri_type in (u"invitees", u"blog"): - key = uri_type + u"_uri" - for to_delete in (u"service", u"node"): - k = uri_type + u"_" + to_delete + for uri_type in ("invitees", "blog"): + key = uri_type + "_uri" + for to_delete in ("service", "node"): + k = uri_type + "_" + to_delete if k in data: del data[k] if key not in data: @@ -352,12 +352,12 @@ else: uri = data.pop(key) uri_data = xmpp_uri.parseXMPPUri(uri) - if uri_data[u"type"] != u"pubsub": + if uri_data["type"] != "pubsub": raise ValueError( - _(u"The given URI is not valid: {uri}").format(uri=uri) + _("The given URI is not valid: {uri}").format(uri=uri) ) - uri_service = jid.JID(uri_data[u"path"]) - uri_node = uri_data[u"node"] + uri_service = jid.JID(uri_data["path"]) + uri_node = uri_data["node"] elt = event_elt.addElement((NS_EVENT, uri_type)) elt["uri"] = xmpp_uri.buildXMPPUri( @@ -365,7 +365,7 @@ ) # remaining data are put in <meta> elements - for key in data.keys(): + for key in list(data.keys()): elt = event_elt.addElement((NS_EVENT, "meta"), content=data.pop(key)) elt["name"] = key @@ -374,8 +374,8 @@ # TODO: check auto-create, no need to create node first if available node = yield self._p.createNode(client, service, nodeIdentifier=node) except error.StanzaError as e: - if e.condition == u"conflict": - log.debug(_(u"requested node already exists")) + if e.condition == "conflict": + log.debug(_("requested node already exists")) yield self._p.publish(client, service, node, items=[item_elt]) @@ -388,7 +388,7 @@ profile_key=C.PROF_KEY_NONE): service = jid.JID(service) if service else None if not node: - raise ValueError(_(u"missing node")) + raise ValueError(_("missing node")) client = self.host.getClient(profile_key) return self.eventModify( client, service, node, id_ or NS_EVENT, timestamp_update or None, data_update @@ -407,13 +407,13 @@ new_timestamp = event_timestamp if timestamp_update is None else timestamp_update new_data = event_metadata if data_update: - for k, v in data_update.iteritems(): + for k, v in data_update.items(): new_data[k] = v yield self.eventCreate(client, new_timestamp, new_data, service, node, id_) def _eventsListSerialise(self, events): for timestamp, data in events: - data["date"] = unicode(timestamp) + data["date"] = str(timestamp) data["creator"] = C.boolConst(data.get("creator", False)) return [e[1] for e in events] @@ -431,15 +431,15 @@ @return list(tuple(int, dict)): list of events (timestamp + metadata) """ - items, metadata = yield self.host.plugins[u'LIST_INTEREST'].listInterests( + items, metadata = yield self.host.plugins['LIST_INTEREST'].listInterests( client, service, node, namespace=NS_EVENT) events = [] for item in items: try: - event_elt = next(item.interest.pubsub.elements(NS_EVENT, u"event")) + event_elt = next(item.interest.pubsub.elements(NS_EVENT, "event")) except StopIteration: log.warning( - _(u"No event found in item {item_id}, ignoring").format( + _("No event found in item {item_id}, ignoring").format( item_id=item["id"]) ) else: @@ -466,12 +466,12 @@ items, metadata = yield self._p.getItems( client, service, node, item_ids=[client.jid.userhost()] ) - event_elt = next(items[0].elements(NS_EVENT, u"invitee")) + event_elt = next(items[0].elements(NS_EVENT, "invitee")) except (exceptions.NotFound, IndexError): # no item found, event data are not set yet defer.returnValue({}) data = {} - for key in (u"attend", u"guests"): + for key in ("attend", "guests"): try: data[key] = event_elt[key] except KeyError: @@ -495,7 +495,7 @@ guests: an int """ event_elt = domish.Element((NS_EVENT, "invitee")) - for key in (u"attend", u"guests"): + for key in ("attend", "guests"): try: event_elt[key] = data.pop(key) except KeyError: @@ -522,15 +522,15 @@ invitees = {} for item in items: try: - event_elt = next(item.elements(NS_EVENT, u"invitee")) + event_elt = next(item.elements(NS_EVENT, "invitee")) except StopIteration: # no item found, event data are not set yet log.warning(_( - u"no data found for {item_id} (service: {service}, node: {node})" + "no data found for {item_id} (service: {service}, node: {node})" .format(item_id=item["id"], service=service, node=node))) else: data = {} - for key in (u"attend", u"guests"): + for key in ("attend", "guests"): try: data[key] = event_elt[key] except KeyError: @@ -558,30 +558,30 @@ @param item_id(unicode): event id """ # FIXME: handle name and extra - name = u'' + name = '' extra = {} if self._b is None: raise exceptions.FeatureNotFound( - _(u'"XEP-0277" (blog) plugin is needed for this feature') + _('"XEP-0277" (blog) plugin is needed for this feature') ) if item_id is None: item_id = NS_EVENT # first we authorize our invitee to see the nodes of interest - yield self._p.setNodeAffiliations(client, service, node, {invitee_jid: u"member"}) - log.debug(_(u"affiliation set on event node")) + yield self._p.setNodeAffiliations(client, service, node, {invitee_jid: "member"}) + log.debug(_("affiliation set on event node")) __, event_data = yield self.eventGet(client, service, node, item_id) - log.debug(_(u"got event data")) + log.debug(_("got event data")) invitees_service = jid.JID(event_data["invitees_service"]) invitees_node = event_data["invitees_node"] blog_service = jid.JID(event_data["blog_service"]) blog_node = event_data["blog_node"] yield self._p.setNodeAffiliations( - client, invitees_service, invitees_node, {invitee_jid: u"publisher"} + client, invitees_service, invitees_node, {invitee_jid: "publisher"} ) - log.debug(_(u"affiliation set on invitee node")) + log.debug(_("affiliation set on invitee node")) yield self._p.setNodeAffiliations( - client, blog_service, blog_node, {invitee_jid: u"member"} + client, blog_service, blog_node, {invitee_jid: "member"} ) blog_items, __ = yield self._b.mbGet(client, blog_service, blog_node, None) @@ -591,29 +591,29 @@ comments_node = item["comments_node"] except KeyError: log.debug( - u"no comment service set for item {item_id}".format( + "no comment service set for item {item_id}".format( item_id=item["id"] ) ) else: yield self._p.setNodeAffiliations( - client, comments_service, comments_node, {invitee_jid: u"publisher"} + client, comments_service, comments_node, {invitee_jid: "publisher"} ) - log.debug(_(u"affiliation set on blog and comments nodes")) + log.debug(_("affiliation set on blog and comments nodes")) # now we send the invitation - pubsub_invitation = self.host.plugins[u'INVITATION'] + pubsub_invitation = self.host.plugins['INVITATION'] pubsub_invitation.sendPubsubInvitation(client, invitee_jid, service, node, item_id, name, extra) - def _inviteByEmail(self, service, node, id_=NS_EVENT, email=u"", emails_extra=None, - name=u"", host_name=u"", language=u"", url_template=u"", - message_subject=u"", message_body=u"", + def _inviteByEmail(self, service, node, id_=NS_EVENT, email="", emails_extra=None, + name="", host_name="", language="", url_template="", + message_subject="", message_body="", profile_key=C.PROF_KEY_NONE): client = self.host.getClient(profile_key) kwargs = { - u"profile": client.profile, - u"emails_extra": [unicode(e) for e in emails_extra], + "profile": client.profile, + "emails_extra": [str(e) for e in emails_extra], } for key in ( "email", @@ -625,7 +625,7 @@ "message_body", ): value = locals()[key] - kwargs[key] = unicode(value) + kwargs[key] = str(value) return self.inviteByEmail( client, jid.JID(service) if service else None, node, id_ or NS_EVENT, **kwargs ) @@ -640,26 +640,26 @@ """ if self._i is None: raise exceptions.FeatureNotFound( - _(u'"Invitations" plugin is needed for this feature') + _('"Invitations" plugin is needed for this feature') ) if self._b is None: raise exceptions.FeatureNotFound( - _(u'"XEP-0277" (blog) plugin is needed for this feature') + _('"XEP-0277" (blog) plugin is needed for this feature') ) service = service or client.jid.userhostJID() event_uri = xmpp_uri.buildXMPPUri( "pubsub", path=service.full(), node=node, item=id_ ) - kwargs["extra"] = {u"event_uri": event_uri} + kwargs["extra"] = {"event_uri": event_uri} invitation_data = yield self._i.create(**kwargs) - invitee_jid = invitation_data[u"jid"] - log.debug(_(u"invitation created")) + invitee_jid = invitation_data["jid"] + log.debug(_("invitation created")) # now that we have a jid, we can send normal invitation yield self.invite(client, invitee_jid, service, node, id_) +@implementer(iwokkel.IDisco) class EventsHandler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_exp_invitation.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_invitation.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to detect language (experimental) @@ -24,7 +24,7 @@ from twisted.internet import defer from twisted.words.protocols.jabber import jid from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer from twisted.words.protocols.jabber.xmlstream import XMPPHandler log = getLogger(__name__) @@ -35,24 +35,24 @@ C.PI_IMPORT_NAME: "INVITATION", C.PI_TYPE: "EXP", C.PI_PROTOCOLS: [], - C.PI_DEPENDENCIES: [u"XEP-0060", u"XEP-0329"], + C.PI_DEPENDENCIES: ["XEP-0060", "XEP-0329"], C.PI_RECOMMENDATIONS: [], C.PI_MAIN: "Invitation", C.PI_HANDLER: "yes", - C.PI_DESCRIPTION: _(u"Experimental handling of invitations"), + C.PI_DESCRIPTION: _("Experimental handling of invitations"), } -NS_INVITATION = u"https://salut-a-toi/protocol/invitation:0" +NS_INVITATION = "https://salut-a-toi/protocol/invitation:0" INVITATION = '/message/invitation[@xmlns="{ns_invit}"]'.format( ns_invit=NS_INVITATION ) -NS_INVITATION_LIST = NS_INVITATION + u"#list" +NS_INVITATION_LIST = NS_INVITATION + "#list" class Invitation(object): def __init__(self, host): - log.info(_(u"Invitation plugin initialization")) + log.info(_("Invitation plugin initialization")) self.host = host self._p = self.host.plugins["XEP-0060"] # map from namespace of the invitation to callback handling it @@ -88,7 +88,7 @@ """ if namespace in self._ns_cb: raise exceptions.ConflictError( - u"invitation namespace {namespace} is already register with {callback}" + "invitation namespace {namespace} is already register with {callback}" .format(namespace=namespace, callback=self._ns_cb[namespace])) self._ns_cb[namespace] = callback @@ -113,15 +113,15 @@ client.generateMessageXML(mess_data) invitation_elt = mess_data["xml"].addElement("invitation", NS_INVITATION) if name is not None: - invitation_elt[u"name"] = name - thumb_url = extra.get(u'thumb_url') + invitation_elt["name"] = name + thumb_url = extra.get('thumb_url') if thumb_url: - if not thumb_url.startswith(u'http'): + if not thumb_url.startswith('http'): log.warning( - u"only http URLs are allowed for thumbnails, got {url}, ignoring" + "only http URLs are allowed for thumbnails, got {url}, ignoring" .format(url=thumb_url)) else: - invitation_elt[u'thumb_url'] = thumb_url + invitation_elt['thumb_url'] = thumb_url return mess_data, invitation_elt def sendPubsubInvitation(self, client, invitee_jid, service, node, @@ -139,11 +139,11 @@ extra = {} mess_data, invitation_elt = self._generateBaseInvitation( client, invitee_jid, name, extra) - pubsub_elt = invitation_elt.addElement(u"pubsub") - pubsub_elt[u"service"] = service.full() - pubsub_elt[u"node"] = node - pubsub_elt[u"item"] = item_id - return client.send(mess_data[u"xml"]) + pubsub_elt = invitation_elt.addElement("pubsub") + pubsub_elt["service"] = service.full() + pubsub_elt["node"] = node + pubsub_elt["item"] = item_id + return client.send(mess_data["xml"]) def sendFileSharingInvitation(self, client, invitee_jid, service, repos_type=None, namespace=None, path=None, name=None, extra=None): @@ -163,20 +163,20 @@ extra = {} mess_data, invitation_elt = self._generateBaseInvitation( client, invitee_jid, name, extra) - file_sharing_elt = invitation_elt.addElement(u"file_sharing") - file_sharing_elt[u"service"] = service.full() + file_sharing_elt = invitation_elt.addElement("file_sharing") + file_sharing_elt["service"] = service.full() if repos_type is not None: - if repos_type not in (u"files", "photos"): - msg = u"unknown repository type: {repos_type}".format( + if repos_type not in ("files", "photos"): + msg = "unknown repository type: {repos_type}".format( repos_type=repos_type) log.warning(msg) raise exceptions.DateError(msg) - file_sharing_elt[u"type"] = repos_type + file_sharing_elt["type"] = repos_type if namespace is not None: - file_sharing_elt[u"namespace"] = namespace + file_sharing_elt["namespace"] = namespace if path is not None: - file_sharing_elt[u"path"] = path - return client.send(mess_data[u"xml"]) + file_sharing_elt["path"] = path + return client.send(mess_data["xml"]) @defer.inlineCallbacks def _parsePubsubElt(self, client, pubsub_elt): @@ -185,25 +185,25 @@ node = pubsub_elt["node"] item_id = pubsub_elt.getAttribute("item") except (RuntimeError, KeyError): - log.warning(_(u"Bad invitation, ignoring")) + log.warning(_("Bad invitation, ignoring")) raise exceptions.DataError try: items, metadata = yield self._p.getItems(client, service, node, item_ids=[item_id]) except Exception as e: - log.warning(_(u"Can't get item linked with invitation: {reason}").format( + log.warning(_("Can't get item linked with invitation: {reason}").format( reason=e)) try: item_elt = items[0] except IndexError: - log.warning(_(u"Invitation was linking to a non existing item")) + log.warning(_("Invitation was linking to a non existing item")) raise exceptions.DataError try: namespace = item_elt.firstChildElement().uri except Exception as e: - log.warning(_(u"Can't retrieve namespace of invitation: {reason}").format( + log.warning(_("Can't retrieve namespace of invitation: {reason}").format( reason = e)) raise exceptions.DataError @@ -214,41 +214,41 @@ try: service = jid.JID(file_sharing_elt["service"]) except (RuntimeError, KeyError): - log.warning(_(u"Bad invitation, ignoring")) + log.warning(_("Bad invitation, ignoring")) raise exceptions.DataError - repos_type = file_sharing_elt.getAttribute(u"type", u"files") - namespace = file_sharing_elt.getAttribute(u"namespace") - path = file_sharing_elt.getAttribute(u"path") + repos_type = file_sharing_elt.getAttribute("type", "files") + namespace = file_sharing_elt.getAttribute("namespace") + path = file_sharing_elt.getAttribute("path") args = [service, repos_type, namespace, path] - ns_fis = self.host.getNamespace(u"fis") + ns_fis = self.host.getNamespace("fis") return ns_fis, args @defer.inlineCallbacks def onInvitation(self, message_elt, client): - log.debug(u"invitation received [{profile}]".format(profile=client.profile)) + log.debug("invitation received [{profile}]".format(profile=client.profile)) invitation_elt = message_elt.invitation - name = invitation_elt.getAttribute(u"name") + name = invitation_elt.getAttribute("name") extra = {} - if invitation_elt.hasAttribute(u"thumb_url"): - extra[u'thumb_url'] = invitation_elt[u'thumb_url'] + if invitation_elt.hasAttribute("thumb_url"): + extra['thumb_url'] = invitation_elt['thumb_url'] for elt in invitation_elt.elements(): if elt.uri != NS_INVITATION: - log.warning(u"unexpected element: {xml}".format(xml=elt.toXml())) + log.warning("unexpected element: {xml}".format(xml=elt.toXml())) continue - if elt.name == u"pubsub": + if elt.name == "pubsub": method = self._parsePubsubElt - elif elt.name == u"file_sharing": + elif elt.name == "file_sharing": method = self._parseFileSharingElt else: - log.warning(u"not implemented invitation element: {xml}".format( + log.warning("not implemented invitation element: {xml}".format( xml = elt.toXml())) continue try: namespace, args = yield method(client, elt) except exceptions.DataError: - log.warning(u"Can't parse invitation element: {xml}".format( + log.warning("Can't parse invitation element: {xml}".format( xml = elt.toXml())) continue @@ -256,14 +256,14 @@ cb = self._ns_cb[namespace] except KeyError: log.warning(_( - u'No handler for namespace "{namespace}", invitation ignored') + 'No handler for namespace "{namespace}", invitation ignored') .format(namespace=namespace)) else: cb(client, name, extra, *args) +@implementer(iwokkel.IDisco) class PubsubInvitationHandler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_exp_invitation_file.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_invitation_file.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to detect language (experimental) @@ -31,21 +31,21 @@ C.PI_IMPORT_NAME: "FILE_SHARING_INVITATION", C.PI_TYPE: "EXP", C.PI_PROTOCOLS: [], - C.PI_DEPENDENCIES: ["XEP-0329", u"INVITATION"], + C.PI_DEPENDENCIES: ["XEP-0329", "INVITATION"], C.PI_RECOMMENDATIONS: [], C.PI_MAIN: "FileSharingInvitation", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _(u"Experimental handling of invitations for file sharing"), + C.PI_DESCRIPTION: _("Experimental handling of invitations for file sharing"), } class FileSharingInvitation(object): def __init__(self, host): - log.info(_(u"File Sharing Invitation plugin initialization")) + log.info(_("File Sharing Invitation plugin initialization")) self.host = host - ns_fis = host.getNamespace(u"fis") - host.plugins[u"INVITATION"].registerNamespace(ns_fis, self.onInvitation) + ns_fis = host.getNamespace("fis") + host.plugins["INVITATION"].registerNamespace(ns_fis, self.onInvitation) host.bridge.addMethod( "FISInvite", ".plugin", @@ -56,30 +56,30 @@ def _sendFileSharingInvitation( self, invitee_jid_s, service_s, repos_type=None, namespace=None, path=None, - name=None, extra_s=u'', profile_key=C.PROF_KEY_NONE): + name=None, extra_s='', profile_key=C.PROF_KEY_NONE): client = self.host.getClient(profile_key) invitee_jid = jid.JID(invitee_jid_s) service = jid.JID(service_s) extra = data_format.deserialise(extra_s) - return self.host.plugins[u"INVITATION"].sendFileSharingInvitation( + return self.host.plugins["INVITATION"].sendFileSharingInvitation( client, invitee_jid, service, repos_type=repos_type or None, namespace=namespace or None, path=path or None, name=name or None, extra=extra) def onInvitation(self, client, name, extra, service, repos_type, namespace, path): - if repos_type == u"files": - type_human = _(u"file sharing") - elif repos_type == u"photos": - type_human = _(u"photos album") + if repos_type == "files": + type_human = _("file sharing") + elif repos_type == "photos": + type_human = _("photos album") else: - log.warning(u"Unknown repository type: {repos_type}".format( + log.warning("Unknown repository type: {repos_type}".format( repos_type=repos_type)) - repos_type = u"file" - type_human = _(u"file sharing") + repos_type = "file" + type_human = _("file sharing") log.info(_( - u'{profile} has received an invitation for a files repository ({type_human}) ' - u'with namespace "{namespace}" at path [{path}]').format( + '{profile} has received an invitation for a files repository ({type_human}) ' + 'with namespace "{namespace}" at path [{path}]').format( profile=client.profile, type_human=type_human, namespace=namespace, path=path) ) - return self.host.plugins[u'LIST_INTEREST'].registerFileSharing( + return self.host.plugins['LIST_INTEREST'].registerFileSharing( client, service, repos_type, namespace, path, name, extra)
--- a/sat/plugins/plugin_exp_jingle_stream.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_jingle_stream.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing pipes (experimental) @@ -51,8 +51,8 @@ C.PI_DESCRIPTION: _("""Jingle Stream plugin"""), } -CONFIRM = D_(u"{peer} wants to send you a stream, do you accept ?") -CONFIRM_TITLE = D_(u"Stream Request") +CONFIRM = D_("{peer} wants to send you a stream, do you accept ?") +CONFIRM_TITLE = D_("Stream Request") class StreamProtocol(protocol.Protocol): @@ -119,7 +119,7 @@ def startStream(self, consumer): if self.consumer is not None: raise exceptions.InternalError( - _(u"stream can't be used with multiple consumers") + _("stream can't be used with multiple consumers") ) assert self.deferred is None self.consumer = consumer @@ -166,7 +166,7 @@ try: self.client_conn.sendData(data) except AttributeError: - log.warning(_(u"No client connected, can't send data")) + log.warning(_("No client connected, can't send data")) def writeToConsumer(self, data): self.consumer.write(data) @@ -186,7 +186,7 @@ in_sign="ss", out_sign="s", method=self._streamOut, - async=True, + async_=True, ) # jingle callbacks @@ -227,7 +227,7 @@ } ], ) - defer.returnValue(unicode(port)) + defer.returnValue(str(port)) def jingleSessionInit(self, client, session, content_name, stream_object): content_data = session["contents"][content_name] @@ -245,7 +245,7 @@ self._j.ROLE_INITIATOR, self._j.ROLE_RESPONDER, ): - log.warning(u"Bad sender, assuming initiator") + log.warning("Bad sender, assuming initiator") content_data["senders"] = self._j.ROLE_INITIATOR confirm_data = yield xml_tools.deferDialog( @@ -266,7 +266,7 @@ try: port = int(confirm_data["port"]) except (ValueError, KeyError): - raise exceptions.DataError(_(u"given port is invalid")) + raise exceptions.DataError(_("given port is invalid")) endpoint = endpoints.TCP4ClientEndpoint(reactor, "localhost", port) factory = StreamFactory() yield endpoint.connect(factory) @@ -288,16 +288,16 @@ args = [client, session, content_name, content_data] finished_d.addCallbacks(self._finishedCb, self._finishedEb, args, None, args) else: - log.warning(u"FIXME: unmanaged action {}".format(action)) + log.warning("FIXME: unmanaged action {}".format(action)) return desc_elt def _finishedCb(self, __, client, session, content_name, content_data): - log.info(u"Pipe transfer completed") + log.info("Pipe transfer completed") self._j.contentTerminate(client, session, content_name) content_data["stream_object"].stopStream() def _finishedEb(self, failure, client, session, content_name, content_data): - log.warning(u"Error while streaming pipe: {}".format(failure)) + log.warning("Error while streaming pipe: {}".format(failure)) self._j.contentTerminate( client, session, content_name, reason=self._j.REASON_FAILED_TRANSPORT )
--- a/sat/plugins/plugin_exp_lang_detect.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_lang_detect.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to detect language (experimental) @@ -28,7 +28,7 @@ from langid.langid import LanguageIdentifier, model except ImportError: raise exceptions.MissingModule( - u'Missing module langid, please download/install it with "pip install langid")' + 'Missing module langid, please download/install it with "pip install langid")' ) identifier = LanguageIdentifier.from_modelstring(model, norm_probs=False) @@ -45,9 +45,9 @@ C.PI_DESCRIPTION: _("""Detect and set message language when unknown"""), } -CATEGORY = D_(u"Misc") -NAME = u"lang_detect" -LABEL = D_(u"language detection") +CATEGORY = D_("Misc") +NAME = "lang_detect" +LABEL = D_("language detection") PARAMS = """ <params> <individual> @@ -63,7 +63,7 @@ class LangDetect(object): def __init__(self, host): - log.info(_(u"Language detection plugin initialization")) + log.info(_("Language detection plugin initialization")) self.host = host host.memory.updateParams(PARAMS) host.trigger.add("MessageReceived", self.MessageReceivedTrigger) @@ -71,8 +71,8 @@ def addLanguage(self, mess_data): message = mess_data["message"] - if len(message) == 1 and message.keys()[0] == "": - msg = message.values()[0] + if len(message) == 1 and list(message.keys())[0] == "": + msg = list(message.values())[0] lang = identifier.classify(msg)[0] mess_data["message"] = {lang: msg} return mess_data
--- a/sat/plugins/plugin_exp_list_of_interest.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_list_of_interest.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to detect language (experimental) @@ -21,7 +21,7 @@ from sat.core.constants import Const as C from sat.core.log import getLogger from wokkel import disco, iwokkel, pubsub -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from twisted.words.protocols.jabber import error as jabber_error, jid from twisted.words.protocols.jabber.xmlstream import XMPPHandler @@ -35,11 +35,11 @@ C.PI_IMPORT_NAME: "LIST_INTEREST", C.PI_TYPE: "EXP", C.PI_PROTOCOLS: [], - C.PI_DEPENDENCIES: [u"XEP-0060", u"XEP-0329"], + C.PI_DEPENDENCIES: ["XEP-0060", "XEP-0329"], C.PI_RECOMMENDATIONS: [], C.PI_MAIN: "ListInterest", C.PI_HANDLER: "yes", - C.PI_DESCRIPTION: _(u"Experimental handling of interesting XMPP locations"), + C.PI_DESCRIPTION: _("Experimental handling of interesting XMPP locations"), } NS_LIST_INTEREST = "https://salut-a-toi/protocol/list-interest:0" @@ -49,7 +49,7 @@ namespace = NS_LIST_INTEREST def __init__(self, host): - log.info(_(u"List of Interest plugin initialization")) + log.info(_("List of Interest plugin initialization")) self.host = host self._p = self.host.plugins["XEP-0060"] host.bridge.addMethod( @@ -58,7 +58,7 @@ in_sign="ssss", out_sign="aa{ss}", method=self._listInterests, - async=True, + async_=True, ) def getHandler(self, client): @@ -76,8 +76,8 @@ options=options, ) except jabber_error.StanzaError as e: - if e.condition == u"conflict": - log.debug(_(u"requested node already exists")) + if e.condition == "conflict": + log.debug(_("requested node already exists")) @defer.inlineCallbacks def registerPubsub(self, client, namespace, service, node, item_id=None, @@ -101,20 +101,20 @@ if extra is None: extra = {} yield self.createNode(client) - interest_elt = domish.Element((NS_LIST_INTEREST, u"interest")) - interest_elt[u"namespace"] = namespace + interest_elt = domish.Element((NS_LIST_INTEREST, "interest")) + interest_elt["namespace"] = namespace if name is not None: - interest_elt[u'name'] = name - thumb_url = extra.get(u'thumb_url') + interest_elt['name'] = name + thumb_url = extra.get('thumb_url') if thumb_url: - interest_elt[u'thumb_url'] = thumb_url - pubsub_elt = interest_elt.addElement(u"pubsub") - pubsub_elt[u"service"] = service.full() - pubsub_elt[u"node"] = node + interest_elt['thumb_url'] = thumb_url + pubsub_elt = interest_elt.addElement("pubsub") + pubsub_elt["service"] = service.full() + pubsub_elt["node"] = node if item_id is not None: - pubsub_elt[u"item"] = item_id + pubsub_elt["item"] = item_id if creator: - pubsub_elt[u"creator"] = C.BOOL_TRUE + pubsub_elt["creator"] = C.BOOL_TRUE if element is not None: pubsub_elt.addChild(element) item_elt = pubsub.Item(payload=interest_elt) @@ -138,21 +138,21 @@ if extra is None: extra = {} yield self.createNode(client) - interest_elt = domish.Element((NS_LIST_INTEREST, u"interest")) - interest_elt[u"namespace"] = self.host.getNamespace(u"fis") + interest_elt = domish.Element((NS_LIST_INTEREST, "interest")) + interest_elt["namespace"] = self.host.getNamespace("fis") if name is not None: - interest_elt[u'name'] = name - thumb_url = extra.get(u'thumb_url') + interest_elt['name'] = name + thumb_url = extra.get('thumb_url') if thumb_url: - interest_elt[u'thumb_url'] = thumb_url - file_sharing_elt = interest_elt.addElement(u"file_sharing") - file_sharing_elt[u"service"] = service.full() + interest_elt['thumb_url'] = thumb_url + file_sharing_elt = interest_elt.addElement("file_sharing") + file_sharing_elt["service"] = service.full() if repos_type is not None: - file_sharing_elt[u"type"] = repos_type + file_sharing_elt["type"] = repos_type if namespace is not None: - file_sharing_elt[u"namespace"] = namespace + file_sharing_elt["namespace"] = namespace if path is not None: - file_sharing_elt[u"path"] = path + file_sharing_elt["path"] = path item_elt = pubsub.Item(payload=interest_elt) yield self._p.publish( client, client.jid.userhostJID(), NS_LIST_INTEREST, items=[item_elt] @@ -163,38 +163,38 @@ for item_elt in interests_data[0]: interest_data = {} interest_elt = item_elt.interest - if interest_elt.hasAttribute(u'namespace'): - interest_data[u'namespace'] = interest_elt.getAttribute(u'namespace') - if interest_elt.hasAttribute(u'name'): - interest_data[u'name'] = interest_elt.getAttribute(u'name') - if interest_elt.hasAttribute(u'thumb_url'): - interest_data[u'thumb_url'] = interest_elt.getAttribute(u'thumb_url') + if interest_elt.hasAttribute('namespace'): + interest_data['namespace'] = interest_elt.getAttribute('namespace') + if interest_elt.hasAttribute('name'): + interest_data['name'] = interest_elt.getAttribute('name') + if interest_elt.hasAttribute('thumb_url'): + interest_data['thumb_url'] = interest_elt.getAttribute('thumb_url') elt = interest_elt.firstChildElement() if elt.uri != NS_LIST_INTEREST: - log.warning(u"unexpected child element, ignoring: {xml}".format( + log.warning("unexpected child element, ignoring: {xml}".format( xml = elt.toXml())) continue - if elt.name == u'pubsub': + if elt.name == 'pubsub': interest_data.update({ - u"type": u"pubsub", - u"service": elt[u'service'], - u"node": elt[u'node'], + "type": "pubsub", + "service": elt['service'], + "node": elt['node'], }) - for attr in (u'item', u'creator'): + for attr in ('item', 'creator'): if elt.hasAttribute(attr): interest_data[attr] = elt[attr] - elif elt.name == u'file_sharing': + elif elt.name == 'file_sharing': interest_data.update({ - u"type": u"file_sharing", - u"service": elt[u'service'], + "type": "file_sharing", + "service": elt['service'], }) - if elt.hasAttribute(u'type'): - interest_data[u'subtype'] = elt[u'type'] - for attr in (u'namespace', u'path'): + if elt.hasAttribute('type'): + interest_data['subtype'] = elt['type'] + for attr in ('namespace', 'path'): if elt.hasAttribute(attr): interest_data[attr] = elt[attr] else: - log.warning(u"unknown element, ignoring: {xml}".format(xml=elt.toXml())) + log.warning("unknown element, ignoring: {xml}".format(xml=elt.toXml())) continue interests.append(interest_data) @@ -229,20 +229,20 @@ filtered_items = [] for item in items: try: - interest_elt = next(item.elements(NS_LIST_INTEREST, u"interest")) + interest_elt = next(item.elements(NS_LIST_INTEREST, "interest")) except StopIteration: - log.warning(_(u"Missing interest element: {xml}").format( + log.warning(_("Missing interest element: {xml}").format( xml=interest_elt.toXml())) continue - if interest_elt.getAttribute(u"namespace") == namespace: + if interest_elt.getAttribute("namespace") == namespace: filtered_items.append(item) items = filtered_items defer.returnValue((items, metadata)) +@implementer(iwokkel.IDisco) class ListInterestHandler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent
--- a/sat/plugins/plugin_exp_parrot.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_parrot.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for parrot mode (experimental) @@ -38,7 +38,7 @@ C.PI_MAIN: "Exp_Parrot", C.PI_HANDLER: "no", C.PI_DESCRIPTION: _( - u"""Implementation of parrot mode (repeat messages between 2 entities)""" + """Implementation of parrot mode (repeat messages between 2 entities)""" ), } @@ -60,7 +60,7 @@ try: self.host.plugins[C.TEXT_CMDS].registerTextCommands(self) except KeyError: - log.info(_(u"Text commands not available")) + log.info(_("Text commands not available")) # def sendMessageTrigger(self, client, mess_data, treatments): # """ Deactivate other triggers if recipient is in parrot links """ @@ -90,7 +90,7 @@ message = {} for e in message_elt.elements(C.NS_CLIENT, "body"): - body = unicode(e) + body = str(e) lang = e.getAttribute("lang") or "" try: @@ -107,12 +107,12 @@ return True else: src_txt = from_jid.user - message[lang] = u"[{}] {}".format(src_txt, body) + message[lang] = "[{}] {}".format(src_txt, body) linked = _links[from_jid.userhostJID()] client.sendMessage( - jid.JID(unicode(linked)), message, None, "auto", no_trigger=True + jid.JID(str(linked)), message, None, "auto", no_trigger=True ) return True @@ -130,8 +130,8 @@ _links[source_jid.userhostJID()] = dest_jid log.info( - u"Parrot mode: %s will be repeated to %s" - % (source_jid.userhost(), unicode(dest_jid)) + "Parrot mode: %s will be repeated to %s" + % (source_jid.userhost(), str(dest_jid)) ) def removeParrot(self, client, source_jid): @@ -166,7 +166,7 @@ txt_cmd.feedBack( client, - "Parrot mode activated for {}".format(unicode(link_left_jid)), + "Parrot mode activated for {}".format(str(link_left_jid)), mess_data, ) @@ -183,7 +183,7 @@ raise jid.InvalidFormat except jid.InvalidFormat: txt_cmd.feedBack( - client, u"Can't deactivate Parrot mode for invalid jid", mess_data + client, "Can't deactivate Parrot mode for invalid jid", mess_data ) return False @@ -194,8 +194,8 @@ txt_cmd.feedBack( client, - u"Parrot mode deactivated for {} and {}".format( - unicode(link_left_jid), unicode(link_right_jid) + "Parrot mode deactivated for {} and {}".format( + str(link_left_jid), str(link_right_jid) ), mess_data, )
--- a/sat/plugins/plugin_exp_pubsub_admin.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_pubsub_admin.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to send pubsub requests with administrator privilege @@ -29,20 +29,20 @@ log = getLogger(__name__) PLUGIN_INFO = { - C.PI_NAME: u"Pubsub Administrator", - C.PI_IMPORT_NAME: u"PUBSUB_ADMIN", + C.PI_NAME: "Pubsub Administrator", + C.PI_IMPORT_NAME: "PUBSUB_ADMIN", C.PI_TYPE: C.PLUG_TYPE_EXP, C.PI_PROTOCOLS: [], C.PI_DEPENDENCIES: [], C.PI_RECOMMENDATIONS: [], - C.PI_MAIN: u"PubsubAdmin", - C.PI_HANDLER: u"no", - C.PI_DESCRIPTION: _(u"""\Implementation of Pubsub Administrator + C.PI_MAIN: "PubsubAdmin", + C.PI_HANDLER: "no", + C.PI_DESCRIPTION: _("""\Implementation of Pubsub Administrator This allows a pubsub administrator to overwrite completly items, including publisher. Specially useful when importing a node."""), } -NS_PUBSUB_ADMIN = u"https://salut-a-toi.org/spec/pubsub_admin:0" +NS_PUBSUB_ADMIN = "https://salut-a-toi.org/spec/pubsub_admin:0" class PubsubAdmin(object): @@ -55,7 +55,7 @@ in_sign="ssasss", out_sign="as", method=self._publish, - async=True, + async_=True, ) def _publish(self, service, nodeIdentifier, items, extra=None, @@ -71,22 +71,22 @@ def _sendCb(self, iq_result): publish_elt = iq_result.admin.pubsub.publish ids = [] - for item_elt in publish_elt.elements(pubsub.NS_PUBSUB, u'item'): - ids.append(item_elt[u'id']) + for item_elt in publish_elt.elements(pubsub.NS_PUBSUB, 'item'): + ids.append(item_elt['id']) return ids def publish(self, client, service, nodeIdentifier, items, extra=None): for item in items: - if item.name != u'item' or item.uri != pubsub.NS_PUBSUB: + if item.name != 'item' or item.uri != pubsub.NS_PUBSUB: raise exceptions.DataError( - u'Invalid element, a pubsub item is expected: {xml}'.format( + 'Invalid element, a pubsub item is expected: {xml}'.format( xml=item.toXml())) iq_elt = client.IQ() iq_elt['to'] = service.full() if service else client.jid.userhost() - admin_elt = iq_elt.addElement((NS_PUBSUB_ADMIN, u'admin')) - pubsub_elt = admin_elt.addElement((pubsub.NS_PUBSUB, u'pubsub')) + admin_elt = iq_elt.addElement((NS_PUBSUB_ADMIN, 'admin')) + pubsub_elt = admin_elt.addElement((pubsub.NS_PUBSUB, 'pubsub')) publish_elt = pubsub_elt.addElement('publish') - publish_elt[u'node'] = nodeIdentifier + publish_elt['node'] = nodeIdentifier for item in items: publish_elt.addChild(item) d = iq_elt.send()
--- a/sat/plugins/plugin_exp_pubsub_hook.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_pubsub_hook.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Pubsub Hooks @@ -43,17 +43,17 @@ } # python module -HOOK_TYPE_PYTHON = u"python" +HOOK_TYPE_PYTHON = "python" # python file path -HOOK_TYPE_PYTHON_FILE = u"python_file" +HOOK_TYPE_PYTHON_FILE = "python_file" # python code directly -HOOK_TYPE_PYTHON_CODE = u"python_code" +HOOK_TYPE_PYTHON_CODE = "python_code" HOOK_TYPES = (HOOK_TYPE_PYTHON, HOOK_TYPE_PYTHON_FILE, HOOK_TYPE_PYTHON_CODE) class PubsubHook(object): def __init__(self, host): - log.info(_(u"PubSub Hook initialization")) + log.info(_("PubSub Hook initialization")) self.host = host self.node_hooks = {} # keep track of the number of hooks per node (for all profiles) host.bridge.addMethod( @@ -90,7 +90,7 @@ def _installNodeManager(self, client, node): if node in self.node_hooks: - log.debug(_(u"node manager already set for {node}").format(node=node)) + log.debug(_("node manager already set for {node}").format(node=node)) self.node_hooks[node] += 1 else: # first hook on this node @@ -98,29 +98,29 @@ node, items_cb=self._itemsReceived ) self.node_hooks[node] = 0 - log.info(_(u"node manager installed on {node}").format(node=node)) + log.info(_("node manager installed on {node}").format(node=node)) def _removeNodeManager(self, client, node): try: self.node_hooks[node] -= 1 except KeyError: - log.error(_(u"trying to remove a {node} without hook").format(node=node)) + log.error(_("trying to remove a {node} without hook").format(node=node)) else: if self.node_hooks[node] == 0: del self.node_hooks[node] self.host.plugins["XEP-0060"].removeManagedNode(node, self._itemsReceived) - log.debug(_(u"hook removed")) + log.debug(_("hook removed")) else: - log.debug(_(u"node still needed for an other hook")) + log.debug(_("node still needed for an other hook")) def installHook(self, client, service, node, hook_type, hook_arg, persistent): if hook_type not in HOOK_TYPES: raise exceptions.DataError( - _(u"{hook_type} is not handled").format(hook_type=hook_type) + _("{hook_type} is not handled").format(hook_type=hook_type) ) if hook_type != HOOK_TYPE_PYTHON_FILE: raise NotImplementedError( - _(u"{hook_type} hook type not implemented yet").format( + _("{hook_type} hook type not implemented yet").format( hook_type=hook_type ) ) @@ -136,8 +136,8 @@ hooks_list.append(hook_data) log.info( - _(u"{persistent} hook installed on {node} for {profile}").format( - persistent=_(u"persistent") if persistent else _(u"temporary"), + _("{persistent} hook installed on {node} for {profile}").format( + persistent=_("persistent") if persistent else _("temporary"), node=node, profile=client.profile, ) @@ -160,18 +160,18 @@ try: if hook_type == HOOK_TYPE_PYTHON_FILE: hook_globals = {} - execfile(hook_data["arg"], hook_globals) + exec(compile(open(hook_data["arg"], "rb").read(), hook_data["arg"], 'exec'), hook_globals) callback = hook_globals["hook"] else: raise NotImplementedError( - _(u"{hook_type} hook type not implemented yet").format( + _("{hook_type} hook type not implemented yet").format( hook_type=hook_type ) ) except Exception as e: log.warning( _( - u"Can't load Pubsub hook at node {node}, it will be removed: {reason}" + "Can't load Pubsub hook at node {node}, it will be removed: {reason}" ).format(node=node, reason=e) ) hooks_list.remove(hook_data) @@ -183,7 +183,7 @@ except Exception as e: log.warning( _( - u"Error while running Pubsub hook for node {node}: {msg}" + "Error while running Pubsub hook for node {node}: {msg}" ).format(node=node, msg=e) ) @@ -193,9 +193,9 @@ return self.addHook( client, service, - unicode(node), - unicode(hook_type), - unicode(hook_arg), + str(node), + str(hook_type), + str(hook_arg), persistent, ) @@ -242,11 +242,11 @@ if node in hooks: for hook_data in hooks[node]: if ( - service != hook_data[u"service"] + service != hook_data["service"] or hook_type is not None - and hook_type != hook_data[u"type"] + and hook_type != hook_data["type"] or hook_arg is not None - and hook_arg != hook_data[u"arg"] + and hook_arg != hook_data["arg"] ): continue hooks[node].remove(hook_data) @@ -263,8 +263,8 @@ def _listHooks(self, profile): hooks_list = self.listHooks(self.host.getClient(profile)) for hook in hooks_list: - hook[u"service"] = hook[u"service"].full() - hook[u"persistent"] = C.boolConst(hook[u"persistent"]) + hook["service"] = hook["service"].full() + hook["persistent"] = C.boolConst(hook["persistent"]) return hooks_list def listHooks(self, client): @@ -272,15 +272,15 @@ hooks_list = [] for hooks in (client._hooks, client._hooks_temporary): persistent = hooks is client._hooks - for node, hooks_data in hooks.iteritems(): + for node, hooks_data in hooks.items(): for hook_data in hooks_data: hooks_list.append( { - u"service": hook_data[u"service"], - u"node": node, - u"type": hook_data[u"type"], - u"arg": hook_data[u"arg"], - u"persistent": persistent, + "service": hook_data["service"], + "node": node, + "type": hook_data["type"], + "arg": hook_data["arg"], + "persistent": persistent, } ) return hooks_list
--- a/sat/plugins/plugin_exp_pubsub_schema.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_pubsub_schema.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Pubsub Schemas @@ -20,7 +20,7 @@ from collections import Iterable import copy import itertools -from zope.interface import implements +from zope.interface import implementer from twisted.words.protocols.jabber import jid from twisted.words.protocols.jabber.xmlstream import XMPPHandler from twisted.internet import defer @@ -38,23 +38,23 @@ log = getLogger(__name__) -NS_SCHEMA = u"https://salut-a-toi/protocol/schema:0" +NS_SCHEMA = "https://salut-a-toi/protocol/schema:0" PLUGIN_INFO = { - C.PI_NAME: u"PubSub Schema", - C.PI_IMPORT_NAME: u"PUBSUB_SCHEMA", - C.PI_TYPE: u"EXP", + C.PI_NAME: "PubSub Schema", + C.PI_IMPORT_NAME: "PUBSUB_SCHEMA", + C.PI_TYPE: "EXP", C.PI_PROTOCOLS: [], - C.PI_DEPENDENCIES: [u"XEP-0060", u"IDENTITY"], - C.PI_MAIN: u"PubsubSchema", - C.PI_HANDLER: u"yes", - C.PI_DESCRIPTION: _(u"""Handle Pubsub data schemas"""), + C.PI_DEPENDENCIES: ["XEP-0060", "IDENTITY"], + C.PI_MAIN: "PubsubSchema", + C.PI_HANDLER: "yes", + C.PI_DESCRIPTION: _("""Handle Pubsub data schemas"""), } class PubsubSchema(object): def __init__(self, host): - log.info(_(u"PubSub Schema initialization")) + log.info(_("PubSub Schema initialization")) self.host = host self._p = self.host.plugins["XEP-0060"] self._i = self.host.plugins["IDENTITY"] @@ -64,7 +64,7 @@ in_sign="sss", out_sign="s", method=self._getSchema, - async=True, + async_=True, ) host.bridge.addMethod( "psSchemaSet", @@ -72,15 +72,16 @@ in_sign="ssss", out_sign="", method=self._setSchema, - async=True, + async_=True, ) host.bridge.addMethod( "psSchemaUIGet", ".plugin", in_sign="sss", out_sign="s", - method=utils.partial(self._getUISchema, default_node=None), - async=True, + method=lambda service, nodeIdentifier, profile_key: self._getUISchema( + service, nodeIdentifier, default_node=None, profile_key=profile_key), + async_=True, ) host.bridge.addMethod( "psItemsFormGet", @@ -88,7 +89,7 @@ in_sign="ssssiassa{ss}s", out_sign="(asa{ss})", method=self._getDataFormItems, - async=True, + async_=True, ) host.bridge.addMethod( "psItemFormSend", @@ -96,7 +97,7 @@ in_sign="ssa{sas}ssa{ss}s", out_sign="s", method=self._sendDataFormItem, - async=True, + async_=True, ) def getHandler(self, client): @@ -104,7 +105,7 @@ def _getSchemaBridgeCb(self, schema_elt): if schema_elt is None: - return u"" + return "" return schema_elt.toXml() def _getSchema(self, service, nodeIdentifier, profile_key=C.PROF_KEY_NONE): @@ -135,7 +136,7 @@ @return (domish.Element, None): schema (<x> element) None if not schema has been set on this node """ - iq_elt = client.IQ(u"get") + iq_elt = client.IQ("get") if service is not None: iq_elt["to"] = service.full() pubsub_elt = iq_elt.addElement((NS_SCHEMA, "pubsub")) @@ -163,12 +164,12 @@ the form should not be modified if copy_form is not set """ if schema is None: - log.debug(_(u"unspecified schema, we need to request it")) + log.debug(_("unspecified schema, we need to request it")) schema = yield self.getSchema(client, service, nodeIdentifier) if schema is None: raise exceptions.DataError( _( - u"no schema specified, and this node has no schema either, we can't construct the data form" + "no schema specified, and this node has no schema either, we can't construct the data form" ) ) elif isinstance(schema, data_form.Form): @@ -179,7 +180,7 @@ try: form = data_form.Form.fromElement(schema) except data_form.Error as e: - raise exceptions.DataError(_(u"Invalid Schema: {msg}").format(msg=e)) + raise exceptions.DataError(_("Invalid Schema: {msg}").format(msg=e)) form.formType = form_type defer.returnValue(form) @@ -192,7 +193,7 @@ profile_key=C.PROF_KEY_NONE): if not nodeIdentifier: if not default_node: - raise ValueError(_(u"nodeIndentifier needs to be set")) + raise ValueError(_("nodeIndentifier needs to be set")) nodeIdentifier = default_node client = self.host.getClient(profile_key) service = None if not service else jid.JID(service) @@ -233,7 +234,7 @@ client = self.host.getClient(profile_key) service = jid.JID(service) if service else None if not node: - raise exceptions.DataError(_(u"empty node is not allowed")) + raise exceptions.DataError(_("empty node is not allowed")) if schema: schema = generic.parseXml(schema.encode("utf-8")) else: @@ -276,7 +277,7 @@ if not nodeIdentifier: if not default_node: raise ValueError( - _(u"default_node must be set if nodeIdentifier is not set") + _("default_node must be set if nodeIdentifier is not set") ) nodeIdentifier = default_node # we need the initial form to get options of fields when suitable @@ -296,7 +297,7 @@ items, metadata = items_data items_xmlui = [] for item_elt in items: - for x_elt in item_elt.elements((data_form.NS_X_DATA, u"x")): + for x_elt in item_elt.elements((data_form.NS_X_DATA, "x")): form = data_form.Form.fromElement(x_elt) if form_ns and form.formNamespace != form_ns: continue @@ -307,9 +308,9 @@ # are not checked prepend=( ("label", "id"), - ("text", item_elt["id"], u"id"), + ("text", item_elt["id"], "id"), ("label", "publisher"), - ("text", item_elt.getAttribute("publisher", ""), u"publisher"), + ("text", item_elt.getAttribute("publisher", ""), "publisher"), ), filters=filters, read_only=False, @@ -336,7 +337,7 @@ extra, deserialise=True, ) - d.addCallback(lambda ret: ret or u"") + d.addCallback(lambda ret: ret or "") return d @defer.inlineCallbacks @@ -361,22 +362,22 @@ client, service, nodeIdentifier, schema, form_type="submit"