# HG changeset patch # User Goffi # Date 1565716121 -7200 # Node ID ab2696e34d295c5177744df88ed20776302cbaf8 # Parent ff5bcb12ae60b3af5be656129e76da66ec70d660 Python 3 port: /!\ this is a huge commit /!\ starting from this commit, SàT is needs Python 3.6+ /!\ SàT maybe be instable or some feature may not work anymore, this will improve with time This patch port backend, bridge and frontends to Python 3. Roughly this has been done this way: - 2to3 tools has been applied (with python 3.7) - all references to python2 have been replaced with python3 (notably shebangs) - fixed files not handled by 2to3 (notably the shell script) - several manual fixes - fixed issues reported by Python 3 that where not handled in Python 2 - replaced "async" with "async_" when needed (it's a reserved word from Python 3.7) - replaced zope's "implements" with @implementer decorator - temporary hack to handle data pickled in database, as str or bytes may be returned, to be checked later - fixed hash comparison for password - removed some code which is not needed anymore with Python 3 - deactivated some code which needs to be checked (notably certificate validation) - tested with jp, fixed reported issues until some basic commands worked - ported Primitivus (after porting dependencies like urwid satext) - more manual fixes diff -r ff5bcb12ae60 -r ab2696e34d29 CHANGELOG --- a/CHANGELOG Wed Jul 31 11:31:22 2019 +0200 +++ b/CHANGELOG Tue Aug 13 19:08:41 2019 +0200 @@ -1,5 +1,8 @@ All theses changelogs are not exhaustive, please check the Mercurial repository for more details. +v 0.8.0 « La Cecília » (NOT RELEASED YET): + - Python 3 port + v 0.7.0 « La Commune » (24/07/19): This version is a huge gap with previous one, changelog only show a part of novelties. This is also the first "general audience" version. diff -r ff5bcb12ae60 -r ab2696e34d29 bin/sat --- a/bin/sat Wed Jul 31 11:31:22 2019 +0200 +++ b/bin/sat Tue Aug 13 19:08:41 2019 +0200 @@ -2,7 +2,7 @@ DEBUG="" DAEMON="" -PYTHON="python2" +PYTHON="python3" TWISTD="$(which twistd)" kill_process() { @@ -29,17 +29,13 @@ eval `"$PYTHON" << PYTHONEND from sat.core.constants import Const as C from sat.memory.memory import fixLocalDir -from ConfigParser import SafeConfigParser +from configparser import ConfigParser from os.path import expanduser, join import sys -import codecs -import locale - -sys.stdout = codecs.getwriter(locale.getpreferredencoding())(sys.stdout) fixLocalDir() # XXX: tmp update code, will be removed in the future -config = SafeConfigParser(defaults=C.DEFAULT_CONFIG) +config = ConfigParser(defaults=C.DEFAULT_CONFIG) try: config.read(C.CONFIG_FILES) except: @@ -52,7 +48,7 @@ env.append("LOG_DIR='%s'" % join(expanduser(config.get('DEFAULT', 'log_dir')),'')) env.append("APP_NAME='%s'" % C.APP_NAME) env.append("APP_NAME_FILE='%s'" % C.APP_NAME_FILE) -print ";".join(env) +print (";".join(env)) PYTHONEND ` APP_NAME="$APP_NAME" diff -r ff5bcb12ae60 -r ab2696e34d29 sat/__init__.py --- a/sat/__init__.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/__init__.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client diff -r ff5bcb12ae60 -r ab2696e34d29 sat/bridge/bridge_constructor/base_constructor.py --- a/sat/bridge/bridge_constructor/base_constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/base_constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -20,7 +20,7 @@ """base constructor class""" from sat.bridge.bridge_constructor.constants import Const as C -from ConfigParser import NoOptionError +from configparser import NoOptionError import sys import os import os.path @@ -191,7 +191,7 @@ for arg in self.argumentsParser(signature): attr_string.append( ( - "unicode(%(name)s)%(default)s" + "str(%(name)s)%(default)s" if (unicode_protect and arg == "s") else "%(name)s%(default)s" ) @@ -240,7 +240,7 @@ method = self.generateCoreSide elif side == "frontend": if not self.FRONTEND_ACTIVATE: - print(u"This constructor only handle core, please use core side") + print("This constructor only handle core, please use core side") sys.exit(1) method = self.generateFrontendSide except AttributeError: @@ -271,7 +271,7 @@ sections.sort() for section in sections: function = self.getValues(section) - print("Adding %s %s" % (section, function["type"])) + print(("Adding %s %s" % (section, function["type"]))) default = self.getDefault(section) arg_doc = self.getArgumentsDoc(section) async_ = "async" in self.getFlags(section) @@ -291,7 +291,7 @@ ) extend_method(completion, function, default, arg_doc, async_) - for part, fmt in FORMATS.iteritems(): + for part, fmt in FORMATS.items(): if part.startswith(function["type"]): parts[part.upper()].append(fmt.format(**completion)) @@ -300,7 +300,7 @@ bridge = [] const_override = { env[len(C.ENV_OVERRIDE) :]: v - for env, v in os.environ.iteritems() + for env, v in os.environ.items() if env.startswith(C.ENV_OVERRIDE) } template_path = self.getTemplatePath(TEMPLATE) @@ -308,7 +308,7 @@ with open(template_path) as template: for line in template: - for part, extend_list in parts.iteritems(): + for part, extend_list in parts.items(): if line.startswith("##{}_PART##".format(part)): bridge.extend(extend_list) break @@ -317,7 +317,7 @@ if line.startswith("const_"): const_name = line[len("const_") : line.find(" = ")].strip() if const_name in const_override: - print("const {} overriden".format(const_name)) + print(("const {} overriden".format(const_name))) bridge.append( "const_{} = {}".format( const_name, const_override[const_name] @@ -326,7 +326,7 @@ continue bridge.append(line.replace("\n", "")) except IOError: - print("can't open template file [{}]".format(template_path)) + print(("can't open template file [{}]".format(template_path))) sys.exit(1) # now we write to final file @@ -348,15 +348,15 @@ os.mkdir(self.args.dest_dir) full_path = os.path.join(self.args.dest_dir, filename) if os.path.exists(full_path) and not self.args.force: - print( + print(( "The destination file [%s] already exists ! Use --force to overwrite it" % full_path - ) + )) try: with open(full_path, "w") as dest_file: dest_file.write("\n".join(file_buf)) except IOError: - print("Can't open destination file [%s]" % full_path) + print(("Can't open destination file [%s]" % full_path)) except OSError: print("It's not possible to generate the file, check your permissions") exit(1) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/bridge/bridge_constructor/bridge_constructor.py --- a/sat/bridge/bridge_constructor/bridge_constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/bridge_constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -22,7 +22,7 @@ from sat.bridge.bridge_constructor.constants import Const as C from sat.bridge.bridge_constructor import constructors, base_constructor import argparse -from ConfigParser import SafeConfigParser as Parser +from configparser import ConfigParser as Parser from importlib import import_module import os import os.path @@ -87,7 +87,7 @@ parser.add_argument( "-t", "--template", - type=file, + type=argparse.FileType(), default=default_template, help="use TEMPLATE to generate bridge (default: %(default)s)", ) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/bridge/bridge_constructor/bridge_template.ini --- a/sat/bridge/bridge_constructor/bridge_template.ini Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/bridge_template.ini Tue Aug 13 19:08:41 2019 +0200 @@ -507,7 +507,7 @@ sig_out=s doc=Get XMLUI to manage trust for given encryption algorithm doc_param_0=to_jid: bare JID of entity to manage -doc_param_0=namespace: namespace of the algorithm to manage +doc_param_1=namespace: namespace of the algorithm to manage doc_param_2=%(doc_profile_key)s doc_return=(XMLUI) UI of the trust management @@ -811,7 +811,7 @@ sig_out=(asa(sss)a{sa(a{ss}as)}) param_1_default=u'' param_2_default=True -param_3_default=u"@DEFAULT@" +param_3_default="@DEFAULT@" doc=Discover infos on an entity doc_param_0=entity_jid: JID to discover doc_param_1=node: node to use @@ -837,7 +837,7 @@ sig_out=a(sss) param_1_default=u'' param_2_default=True -param_3_default=u"@DEFAULT@" +param_3_default="@DEFAULT@" doc=Discover items of an entity doc_param_0=entity_jid: JID to discover doc_param_1=node: node to use @@ -856,7 +856,7 @@ param_4_default=True param_5_default=True param_6_default=False -param_7_default=u"@DEFAULT@" +param_7_default="@DEFAULT@" doc=Discover items of an entity doc_param_0=namespaces: namespaces of the features to check doc_param_1=identities: identities to filter diff -r ff5bcb12ae60 -r ab2696e34d29 sat/bridge/bridge_constructor/constants.py --- a/sat/bridge/bridge_constructor/constants.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constants.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -22,9 +22,9 @@ class Const(constants.Const): - NAME = u"bridge_constructor" + NAME = "bridge_constructor" DEST_DIR_DEFAULT = "generated" - DESCRIPTION = u"""{name} Copyright (C) 2009-2019 Jérôme Poisson (aka Goffi) + DESCRIPTION = """{name} Copyright (C) 2009-2019 Jérôme Poisson (aka Goffi) This script construct a SàT bridge using the given protocol diff -r ff5bcb12ae60 -r ab2696e34d29 sat/bridge/bridge_constructor/constructors/dbus-xml/constructor.py --- a/sat/bridge/bridge_constructor/constructors/dbus-xml/constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/dbus-xml/constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -53,7 +53,7 @@ sections.sort() for section in sections: function = self.getValues(section) - print("Adding %s %s" % (section, function["type"])) + print(("Adding %s %s" % (section, function["type"]))) new_elt = doc.createElement( "method" if function["type"] == "method" else "signal" ) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/bridge/bridge_constructor/constructors/dbus/constructor.py --- a/sat/bridge/bridge_constructor/constructors/dbus/constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/dbus/constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -114,5 +114,5 @@ % completion ) completion["result"] = ( - "unicode(%s)" if self.args.unicode and function["sig_out"] == "s" else "%s" + "str(%s)" if self.args.unicode and function["sig_out"] == "s" else "%s" ) % result diff -r ff5bcb12ae60 -r ab2696e34d29 sat/bridge/bridge_constructor/constructors/dbus/dbus_core_template.py --- a/sat/bridge/bridge_constructor/constructors/dbus/dbus_core_template.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/dbus/dbus_core_template.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -101,11 +101,11 @@ raise InternalError callback = kwargs.pop("callback") errback = kwargs.pop("errback") - async = True + async_ = True else: - async = False + async_ = False result = self.cb[name](*args, **kwargs) - if async: + if async_: if not isinstance(result, Deferred): log.error("Asynchronous method [%s] does not return a Deferred." % name) raise AsyncNotDeferred @@ -170,9 +170,9 @@ i += 1 return attr - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False): """Dynamically add a method to Dbus Bridge""" - inspect_args = inspect.getargspec(method) + inspect_args = inspect.getfullargspec(method) _arguments = inspect_args.args _defaults = list(inspect_args.defaults or []) @@ -186,12 +186,12 @@ [repr(name)] + ( (_arguments + ["callback=callback", "errback=errback"]) - if async + if async_ else _arguments ) ) - if async: + if async_: _arguments.extend(["callback", "errback"]) _defaults.extend([None, None]) @@ -213,7 +213,7 @@ ) exec(code) # FIXME: to the same thing in a cleaner way, without compile/exec method = locals()[name] - async_callbacks = ("callback", "errback") if async else None + async_callbacks = ("callback", "errback") if async_ else None setattr( DbusObject, name, @@ -265,7 +265,7 @@ if e._dbus_error_name == "org.freedesktop.DBus.Error.NotSupported": log.error( _( - u"D-Bus is not launched, please see README to see instructions on how to launch it" + "D-Bus is not launched, please see README to see instructions on how to launch it" ) ) raise BridgeInitError @@ -277,11 +277,11 @@ log.debug("registering DBus bridge method [%s]" % name) self.dbus_bridge.register_method(name, callback) - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc={}): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}): """Dynamically add a method to Dbus Bridge""" # FIXME: doc parameter is kept only temporary, the time to remove it from calls log.debug("Adding method [%s] to DBus bridge" % name) - self.dbus_bridge.addMethod(name, int_suffix, in_sign, out_sign, method, async) + self.dbus_bridge.addMethod(name, int_suffix, in_sign, out_sign, method, async_) self.register_method(name, method) def addSignal(self, name, int_suffix, signature, doc={}): diff -r ff5bcb12ae60 -r ab2696e34d29 sat/bridge/bridge_constructor/constructors/dbus/dbus_frontend_template.py --- a/sat/bridge/bridge_constructor/constructors/dbus/dbus_frontend_template.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/dbus/dbus_frontend_template.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 #-*- coding: utf-8 -*- # SAT communication bridge @@ -70,12 +70,12 @@ dbus_interface=const_INT_PREFIX + const_CORE_SUFFIX) self.db_plugin_iface = dbus.Interface(self.db_object, dbus_interface=const_INT_PREFIX + const_PLUGIN_SUFFIX) - except dbus.exceptions.DBusException, e: + except dbus.exceptions.DBusException as e: if e._dbus_error_name in ('org.freedesktop.DBus.Error.ServiceUnknown', 'org.freedesktop.DBus.Error.Spawn.ExecFailed'): errback(BridgeExceptionNoService()) elif e._dbus_error_name == 'org.freedesktop.DBus.Error.NotSupported': - log.error(_(u"D-Bus is not launched, please see README to see instructions on how to launch it")) + log.error(_("D-Bus is not launched, please see README to see instructions on how to launch it")) errback(BridgeInitError) else: errback(e) @@ -102,14 +102,14 @@ # - if we have the 'callback' and 'errback' keyword arguments # - or if the last two arguments are callable - async = False + async_ = False args = list(args) if kwargs: if 'callback' in kwargs: - async = True + async_ = True _callback = kwargs.pop('callback') - _errback = kwargs.pop('errback', lambda failure: log.error(unicode(failure))) + _errback = kwargs.pop('errback', lambda failure: log.error(str(failure))) try: args.append(kwargs.pop('profile')) except KeyError: @@ -119,15 +119,15 @@ pass # at this point, kwargs should be empty if kwargs: - log.warnings(u"unexpected keyword arguments, they will be ignored: {}".format(kwargs)) + log.warnings("unexpected keyword arguments, they will be ignored: {}".format(kwargs)) elif len(args) >= 2 and callable(args[-1]) and callable(args[-2]): - async = True + async_ = True _errback = args.pop() _callback = args.pop() method = getattr(self.db_plugin_iface, name) - if async: + if async_: kwargs['timeout'] = const_TIMEOUT kwargs['reply_handler'] = _callback kwargs['error_handler'] = lambda err: _errback(dbus_to_bridge_exception(err)) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/bridge/bridge_constructor/constructors/embedded/constructor.py --- a/sat/bridge/bridge_constructor/constructors/embedded/constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/embedded/constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client diff -r ff5bcb12ae60 -r ab2696e34d29 sat/bridge/bridge_constructor/constructors/embedded/embedded_frontend_template.py --- a/sat/bridge/bridge_constructor/constructors/embedded/embedded_frontend_template.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/embedded/embedded_frontend_template.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client diff -r ff5bcb12ae60 -r ab2696e34d29 sat/bridge/bridge_constructor/constructors/embedded/embedded_template.py --- a/sat/bridge/bridge_constructor/constructors/embedded/embedded_template.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/embedded/embedded_template.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -25,7 +25,7 @@ class _Bridge(object): def __init__(self): - log.debug(u"Init embedded bridge...") + log.debug("Init embedded bridge...") self._methods_cbs = {} self._signals_cbs = {"core": {}, "plugin": {}} @@ -33,16 +33,16 @@ callback() def register_method(self, name, callback): - log.debug(u"registering embedded bridge method [{}]".format(name)) + log.debug("registering embedded bridge method [{}]".format(name)) if name in self._methods_cbs: - raise exceptions.ConflictError(u"method {} is already regitered".format(name)) + raise exceptions.ConflictError("method {} is already regitered".format(name)) self._methods_cbs[name] = callback def register_signal(self, functionName, handler, iface="core"): iface_dict = self._signals_cbs[iface] if functionName in iface_dict: raise exceptions.ConflictError( - u"signal {name} is already regitered for interface {iface}".format( + "signal {name} is already regitered for interface {iface}".format( name=functionName, iface=iface ) ) @@ -81,11 +81,11 @@ try: cb = self._signals_cbs["plugin"][name] except KeyError: - log.debug(u"ignoring signal {}: no callback registered".format(name)) + log.debug("ignoring signal {}: no callback registered".format(name)) else: cb(*args, **kwargs) - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc={}): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}): # FIXME: doc parameter is kept only temporary, the time to remove it from calls log.debug("Adding method [{}] to embedded bridge".format(name)) self.register_method(name, method) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/bridge/bridge_constructor/constructors/mediawiki/constructor.py --- a/sat/bridge/bridge_constructor/constructors/mediawiki/constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/mediawiki/constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -85,7 +85,7 @@ sections.sort() for section in sections: function = self.getValues(section) - print("Adding %s %s" % (section, function["type"])) + print(("Adding %s %s" % (section, function["type"]))) async_msg = """
'''This method is asynchronous'''""" deprecated_msg = """
'''/!\ WARNING /!\ : This method is deprecated, please don't use it !'''""" signature_signal = ( @@ -161,7 +161,7 @@ else: core_bridge.append(line.replace("\n", "")) except IOError: - print("Can't open template file [%s]" % template_path) + print(("Can't open template file [%s]" % template_path)) sys.exit(1) # now we write to final file diff -r ff5bcb12ae60 -r ab2696e34d29 sat/bridge/bridge_constructor/constructors/pb/constructor.py --- a/sat/bridge/bridge_constructor/constructors/pb/constructor.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/pb/constructor.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client diff -r ff5bcb12ae60 -r ab2696e34d29 sat/bridge/bridge_constructor/constructors/pb/pb_core_template.py --- a/sat/bridge/bridge_constructor/constructors/pb/pb_core_template.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/pb/pb_core_template.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -46,11 +46,11 @@ def remote_initBridge(self, signals_handler): self.signals_handlers.append(signals_handler) - log.info(u"registered signal handler") + log.info("registered signal handler") def sendSignalEb(self, failure, signal_name): log.error( - u"Error while sending signal {name}: {msg}".format( + "Error while sending signal {name}: {msg}".format( name=signal_name, msg=failure ) ) @@ -66,27 +66,27 @@ d.addErrback(self.sendSignalEb, name) if to_remove: for handler in to_remove: - log.debug(u"Removing signal handler for dead frontend") + log.debug("Removing signal handler for dead frontend") self.signals_handlers.remove(handler) def _bridgeDeactivateSignals(self): if hasattr(self, "signals_paused"): - log.warning(u"bridge signals already deactivated") + log.warning("bridge signals already deactivated") if self.signals_handler: self.signals_paused.extend(self.signals_handler) else: self.signals_paused = self.signals_handlers self.signals_handlers = [] - log.debug(u"bridge signals have been deactivated") + log.debug("bridge signals have been deactivated") def _bridgeReactivateSignals(self): try: self.signals_handlers = self.signals_paused except AttributeError: - log.debug(u"signals were already activated") + log.debug("signals were already activated") else: del self.signals_paused - log.debug(u"bridge signals have been reactivated") + log.debug("bridge signals have been reactivated") ##METHODS_PART## @@ -102,14 +102,14 @@ def remote_initBridge(self, signals_handler): self.signals_handlers.append(signals_handler) - log.info(u"registered signal handler") + log.info("registered signal handler") def register_method(self, name, callback): log.debug("registering PB bridge method [%s]" % name) setattr(self.root, "remote_" + name, callback) #  self.root.register_method(name, callback) - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc={}): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}): """Dynamically add a method to PB Bridge""" # FIXME: doc parameter is kept only temporary, the time to remove it from calls log.debug("Adding method {name} to PB bridge".format(name=name)) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/bridge/bridge_constructor/constructors/pb/pb_frontend_template.py --- a/sat/bridge/bridge_constructor/constructors/pb/pb_frontend_template.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/bridge_constructor/constructors/pb/pb_frontend_template.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT communication bridge @@ -28,7 +28,7 @@ class SignalsHandler(pb.Referenceable): def __getattr__(self, name): if name.startswith("remote_"): - log.debug(u"calling an unregistered signal: {name}".format(name=name[7:])) + log.debug("calling an unregistered signal: {name}".format(name=name[7:])) return lambda *args, **kwargs: None else: @@ -43,7 +43,7 @@ pass else: raise exceptions.InternalError( - u"{name} signal handler has been registered twice".format( + "{name} signal handler has been registered twice".format( name=method_name ) ) @@ -99,7 +99,7 @@ d.addErrback(errback) def _initBridgeEb(self, failure): - log.error(u"Can't init bridge: {msg}".format(msg=failure)) + log.error("Can't init bridge: {msg}".format(msg=failure)) def _set_root(self, root): """set remote root object @@ -112,7 +112,7 @@ return d def _generic_errback(self, failure): - log.error(u"bridge failure: {}".format(failure)) + log.error("bridge failure: {}".format(failure)) def bridgeConnect(self, callback, errback): factory = pb.PBClientFactory() diff -r ff5bcb12ae60 -r ab2696e34d29 sat/bridge/dbus_bridge.py --- a/sat/bridge/dbus_bridge.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/dbus_bridge.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -101,11 +101,11 @@ raise InternalError callback = kwargs.pop("callback") errback = kwargs.pop("errback") - async = True + async_ = True else: - async = False + async_ = False result = self.cb[name](*args, **kwargs) - if async: + if async_: if not isinstance(result, Deferred): log.error("Asynchronous method [%s] does not return a Deferred." % name) raise AsyncNotDeferred @@ -214,73 +214,73 @@ in_signature='s', out_signature='a(a{ss}si)', async_callbacks=None) def actionsGet(self, profile_key="@DEFAULT@"): - return self._callback("actionsGet", unicode(profile_key)) + return self._callback("actionsGet", str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='', async_callbacks=None) def addContact(self, entity_jid, profile_key="@DEFAULT@"): - return self._callback("addContact", unicode(entity_jid), unicode(profile_key)) + return self._callback("addContact", str(entity_jid), str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='', async_callbacks=('callback', 'errback')) def asyncDeleteProfile(self, profile, callback=None, errback=None): - return self._callback("asyncDeleteProfile", unicode(profile), callback=callback, errback=errback) + return self._callback("asyncDeleteProfile", str(profile), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sssis', out_signature='s', async_callbacks=('callback', 'errback')) def asyncGetParamA(self, name, category, attribute="value", security_limit=-1, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("asyncGetParamA", unicode(name), unicode(category), unicode(attribute), security_limit, unicode(profile_key), callback=callback, errback=errback) + return self._callback("asyncGetParamA", str(name), str(category), str(attribute), security_limit, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sis', out_signature='a{ss}', async_callbacks=('callback', 'errback')) def asyncGetParamsValuesFromCategory(self, category, security_limit=-1, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("asyncGetParamsValuesFromCategory", unicode(category), security_limit, unicode(profile_key), callback=callback, errback=errback) + return self._callback("asyncGetParamsValuesFromCategory", str(category), security_limit, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssa{ss}', out_signature='b', async_callbacks=('callback', 'errback')) def connect(self, profile_key="@DEFAULT@", password='', options={}, callback=None, errback=None): - return self._callback("connect", unicode(profile_key), unicode(password), options, callback=callback, errback=errback) + return self._callback("connect", str(profile_key), str(password), options, callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='', async_callbacks=('callback', 'errback')) def delContact(self, entity_jid, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("delContact", unicode(entity_jid), unicode(profile_key), callback=callback, errback=errback) + return self._callback("delContact", str(entity_jid), str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='asa(ss)bbbbbs', out_signature='(a{sa(sss)}a{sa(sss)}a{sa(sss)})', async_callbacks=('callback', 'errback')) - def discoFindByFeatures(self, namespaces, identities, bare_jid=False, service=True, roster=True, own_jid=True, local_device=False, profile_key=u"@DEFAULT@", callback=None, errback=None): - return self._callback("discoFindByFeatures", namespaces, identities, bare_jid, service, roster, own_jid, local_device, unicode(profile_key), callback=callback, errback=errback) + def discoFindByFeatures(self, namespaces, identities, bare_jid=False, service=True, roster=True, own_jid=True, local_device=False, profile_key="@DEFAULT@", callback=None, errback=None): + return self._callback("discoFindByFeatures", namespaces, identities, bare_jid, service, roster, own_jid, local_device, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssbs', out_signature='(asa(sss)a{sa(a{ss}as)})', async_callbacks=('callback', 'errback')) - def discoInfos(self, entity_jid, node=u'', use_cache=True, profile_key=u"@DEFAULT@", callback=None, errback=None): - return self._callback("discoInfos", unicode(entity_jid), unicode(node), use_cache, unicode(profile_key), callback=callback, errback=errback) + def discoInfos(self, entity_jid, node=u'', use_cache=True, profile_key="@DEFAULT@", callback=None, errback=None): + return self._callback("discoInfos", str(entity_jid), str(node), use_cache, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssbs', out_signature='a(sss)', async_callbacks=('callback', 'errback')) - def discoItems(self, entity_jid, node=u'', use_cache=True, profile_key=u"@DEFAULT@", callback=None, errback=None): - return self._callback("discoItems", unicode(entity_jid), unicode(node), use_cache, unicode(profile_key), callback=callback, errback=errback) + def discoItems(self, entity_jid, node=u'', use_cache=True, profile_key="@DEFAULT@", callback=None, errback=None): + return self._callback("discoItems", str(entity_jid), str(node), use_cache, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='', async_callbacks=('callback', 'errback')) def disconnect(self, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("disconnect", unicode(profile_key), callback=callback, errback=errback) + return self._callback("disconnect", str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='s', async_callbacks=None) def encryptionNamespaceGet(self, arg_0): - return self._callback("encryptionNamespaceGet", unicode(arg_0)) + return self._callback("encryptionNamespaceGet", str(arg_0)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='', out_signature='aa{ss}', @@ -291,56 +291,56 @@ @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sss', out_signature='s', async_callbacks=('callback', 'errback')) - def encryptionTrustUIGet(self, namespace, arg_1, profile_key, callback=None, errback=None): - return self._callback("encryptionTrustUIGet", unicode(namespace), unicode(arg_1), unicode(profile_key), callback=callback, errback=errback) + def encryptionTrustUIGet(self, to_jid, namespace, profile_key, callback=None, errback=None): + return self._callback("encryptionTrustUIGet", str(to_jid), str(namespace), str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='s', async_callbacks=None) def getConfig(self, section, name): - return self._callback("getConfig", unicode(section), unicode(name)) + return self._callback("getConfig", str(section), str(name)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='a(sa{ss}as)', async_callbacks=('callback', 'errback')) def getContacts(self, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("getContacts", unicode(profile_key), callback=callback, errback=errback) + return self._callback("getContacts", str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='as', async_callbacks=None) def getContactsFromGroup(self, group, profile_key="@DEFAULT@"): - return self._callback("getContactsFromGroup", unicode(group), unicode(profile_key)) + return self._callback("getContactsFromGroup", str(group), str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='asass', out_signature='a{sa{ss}}', async_callbacks=None) def getEntitiesData(self, jids, keys, profile): - return self._callback("getEntitiesData", jids, keys, unicode(profile)) + return self._callback("getEntitiesData", jids, keys, str(profile)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sass', out_signature='a{ss}', async_callbacks=None) def getEntityData(self, jid, keys, profile): - return self._callback("getEntityData", unicode(jid), keys, unicode(profile)) + return self._callback("getEntityData", str(jid), keys, str(profile)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='a{sa{ss}}', async_callbacks=('callback', 'errback')) def getFeatures(self, profile_key, callback=None, errback=None): - return self._callback("getFeatures", unicode(profile_key), callback=callback, errback=errback) + return self._callback("getFeatures", str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='s', async_callbacks=None) def getMainResource(self, contact_jid, profile_key="@DEFAULT@"): - return self._callback("getMainResource", unicode(contact_jid), unicode(profile_key)) + return self._callback("getMainResource", str(contact_jid), str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssss', out_signature='s', async_callbacks=None) def getParamA(self, name, category, attribute="value", profile_key="@DEFAULT@"): - return self._callback("getParamA", unicode(name), unicode(category), unicode(attribute), unicode(profile_key)) + return self._callback("getParamA", str(name), str(category), str(attribute), str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='', out_signature='as', @@ -352,13 +352,13 @@ in_signature='iss', out_signature='s', async_callbacks=('callback', 'errback')) def getParamsUI(self, security_limit=-1, app='', profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("getParamsUI", security_limit, unicode(app), unicode(profile_key), callback=callback, errback=errback) + return self._callback("getParamsUI", security_limit, str(app), str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='a{sa{s(sia{ss})}}', async_callbacks=None) def getPresenceStatuses(self, profile_key="@DEFAULT@"): - return self._callback("getPresenceStatuses", unicode(profile_key)) + return self._callback("getPresenceStatuses", str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='', out_signature='', @@ -376,73 +376,73 @@ in_signature='s', out_signature='a{ss}', async_callbacks=None) def getWaitingSub(self, profile_key="@DEFAULT@"): - return self._callback("getWaitingSub", unicode(profile_key)) + return self._callback("getWaitingSub", str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssiba{ss}s', out_signature='a(sdssa{ss}a{ss}sa{ss})', async_callbacks=('callback', 'errback')) def historyGet(self, from_jid, to_jid, limit, between=True, filters='', profile="@NONE@", callback=None, errback=None): - return self._callback("historyGet", unicode(from_jid), unicode(to_jid), limit, between, filters, unicode(profile), callback=callback, errback=errback) + return self._callback("historyGet", str(from_jid), str(to_jid), limit, between, filters, str(profile), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='b', async_callbacks=None) def isConnected(self, profile_key="@DEFAULT@"): - return self._callback("isConnected", unicode(profile_key)) + return self._callback("isConnected", str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sa{ss}s', out_signature='a{ss}', async_callbacks=('callback', 'errback')) def launchAction(self, callback_id, data, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("launchAction", unicode(callback_id), data, unicode(profile_key), callback=callback, errback=errback) + return self._callback("launchAction", str(callback_id), data, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='b', async_callbacks=None) def loadParamsTemplate(self, filename): - return self._callback("loadParamsTemplate", unicode(filename)) + return self._callback("loadParamsTemplate", str(filename)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='s', async_callbacks=None) def menuHelpGet(self, menu_id, language): - return self._callback("menuHelpGet", unicode(menu_id), unicode(language)) + return self._callback("menuHelpGet", str(menu_id), str(language)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sasa{ss}is', out_signature='a{ss}', async_callbacks=('callback', 'errback')) def menuLaunch(self, menu_type, path, data, security_limit, profile_key, callback=None, errback=None): - return self._callback("menuLaunch", unicode(menu_type), path, data, security_limit, unicode(profile_key), callback=callback, errback=errback) + return self._callback("menuLaunch", str(menu_type), path, data, security_limit, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='si', out_signature='a(ssasasa{ss})', async_callbacks=None) def menusGet(self, language, security_limit): - return self._callback("menusGet", unicode(language), security_limit) + return self._callback("menusGet", str(language), security_limit) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='s', async_callbacks=None) def messageEncryptionGet(self, to_jid, profile_key): - return self._callback("messageEncryptionGet", unicode(to_jid), unicode(profile_key)) + return self._callback("messageEncryptionGet", str(to_jid), str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssbs', out_signature='', async_callbacks=('callback', 'errback')) def messageEncryptionStart(self, to_jid, namespace='', replace=False, profile_key="@NONE@", callback=None, errback=None): - return self._callback("messageEncryptionStart", unicode(to_jid), unicode(namespace), replace, unicode(profile_key), callback=callback, errback=errback) + return self._callback("messageEncryptionStart", str(to_jid), str(namespace), replace, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='', async_callbacks=('callback', 'errback')) def messageEncryptionStop(self, to_jid, profile_key, callback=None, errback=None): - return self._callback("messageEncryptionStop", unicode(to_jid), unicode(profile_key), callback=callback, errback=errback) + return self._callback("messageEncryptionStop", str(to_jid), str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sa{ss}a{ss}sa{ss}s', out_signature='', async_callbacks=('callback', 'errback')) def messageSend(self, to_jid, message, subject={}, mess_type="auto", extra={}, profile_key="@NONE@", callback=None, errback=None): - return self._callback("messageSend", unicode(to_jid), message, subject, unicode(mess_type), extra, unicode(profile_key), callback=callback, errback=errback) + return self._callback("messageSend", str(to_jid), message, subject, str(mess_type), extra, str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='', out_signature='a{ss}', @@ -454,37 +454,37 @@ in_signature='sis', out_signature='', async_callbacks=None) def paramsRegisterApp(self, xml, security_limit=-1, app=''): - return self._callback("paramsRegisterApp", unicode(xml), security_limit, unicode(app)) + return self._callback("paramsRegisterApp", str(xml), security_limit, str(app)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sss', out_signature='', async_callbacks=('callback', 'errback')) def profileCreate(self, profile, password='', component='', callback=None, errback=None): - return self._callback("profileCreate", unicode(profile), unicode(password), unicode(component), callback=callback, errback=errback) + return self._callback("profileCreate", str(profile), str(password), str(component), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='b', async_callbacks=None) def profileIsSessionStarted(self, profile_key="@DEFAULT@"): - return self._callback("profileIsSessionStarted", unicode(profile_key)) + return self._callback("profileIsSessionStarted", str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='s', async_callbacks=None) def profileNameGet(self, profile_key="@DEFAULT@"): - return self._callback("profileNameGet", unicode(profile_key)) + return self._callback("profileNameGet", str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='', async_callbacks=None) def profileSetDefault(self, profile): - return self._callback("profileSetDefault", unicode(profile)) + return self._callback("profileSetDefault", str(profile)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ss', out_signature='b', async_callbacks=('callback', 'errback')) def profileStartSession(self, password='', profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("profileStartSession", unicode(password), unicode(profile_key), callback=callback, errback=errback) + return self._callback("profileStartSession", str(password), str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='bb', out_signature='as', @@ -496,61 +496,61 @@ in_signature='ss', out_signature='a{ss}', async_callbacks=None) def progressGet(self, id, profile): - return self._callback("progressGet", unicode(id), unicode(profile)) + return self._callback("progressGet", str(id), str(profile)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='a{sa{sa{ss}}}', async_callbacks=None) def progressGetAll(self, profile): - return self._callback("progressGetAll", unicode(profile)) + return self._callback("progressGetAll", str(profile)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='a{sa{sa{ss}}}', async_callbacks=None) def progressGetAllMetadata(self, profile): - return self._callback("progressGetAllMetadata", unicode(profile)) + return self._callback("progressGetAllMetadata", str(profile)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='', async_callbacks=('callback', 'errback')) def rosterResync(self, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("rosterResync", unicode(profile_key), callback=callback, errback=errback) + return self._callback("rosterResync", str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='b', async_callbacks=None) def saveParamsTemplate(self, filename): - return self._callback("saveParamsTemplate", unicode(filename)) + return self._callback("saveParamsTemplate", str(filename)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='s', out_signature='a{ss}', async_callbacks=('callback', 'errback')) def sessionInfosGet(self, profile_key, callback=None, errback=None): - return self._callback("sessionInfosGet", unicode(profile_key), callback=callback, errback=errback) + return self._callback("sessionInfosGet", str(profile_key), callback=callback, errback=errback) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sssis', out_signature='', async_callbacks=None) def setParam(self, name, value, category, security_limit=-1, profile_key="@DEFAULT@"): - return self._callback("setParam", unicode(name), unicode(value), unicode(category), security_limit, unicode(profile_key)) + return self._callback("setParam", str(name), str(value), str(category), security_limit, str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssa{ss}s', out_signature='', async_callbacks=None) def setPresence(self, to_jid='', show='', statuses={}, profile_key="@DEFAULT@"): - return self._callback("setPresence", unicode(to_jid), unicode(show), statuses, unicode(profile_key)) + return self._callback("setPresence", str(to_jid), str(show), statuses, str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='sss', out_signature='', async_callbacks=None) def subscription(self, sub_type, entity, profile_key="@DEFAULT@"): - return self._callback("subscription", unicode(sub_type), unicode(entity), unicode(profile_key)) + return self._callback("subscription", str(sub_type), str(entity), str(profile_key)) @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, in_signature='ssass', out_signature='', async_callbacks=('callback', 'errback')) def updateContact(self, entity_jid, name, groups, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("updateContact", unicode(entity_jid), unicode(name), groups, unicode(profile_key), callback=callback, errback=errback) + return self._callback("updateContact", str(entity_jid), str(name), groups, str(profile_key), callback=callback, errback=errback) def __attributes(self, in_sign): """Return arguments to user given a in_sign @@ -590,9 +590,9 @@ i += 1 return attr - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False): """Dynamically add a method to Dbus Bridge""" - inspect_args = inspect.getargspec(method) + inspect_args = inspect.getfullargspec(method) _arguments = inspect_args.args _defaults = list(inspect_args.defaults or []) @@ -606,12 +606,12 @@ [repr(name)] + ( (_arguments + ["callback=callback", "errback=errback"]) - if async + if async_ else _arguments ) ) - if async: + if async_: _arguments.extend(["callback", "errback"]) _defaults.extend([None, None]) @@ -633,7 +633,7 @@ ) exec(code) # FIXME: to the same thing in a cleaner way, without compile/exec method = locals()[name] - async_callbacks = ("callback", "errback") if async else None + async_callbacks = ("callback", "errback") if async_ else None setattr( DbusObject, name, @@ -685,7 +685,7 @@ if e._dbus_error_name == "org.freedesktop.DBus.Error.NotSupported": log.error( _( - u"D-Bus is not launched, please see README to see instructions on how to launch it" + "D-Bus is not launched, please see README to see instructions on how to launch it" ) ) raise BridgeInitError @@ -744,11 +744,11 @@ log.debug("registering DBus bridge method [%s]" % name) self.dbus_bridge.register_method(name, callback) - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc={}): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}): """Dynamically add a method to Dbus Bridge""" # FIXME: doc parameter is kept only temporary, the time to remove it from calls log.debug("Adding method [%s] to DBus bridge" % name) - self.dbus_bridge.addMethod(name, int_suffix, in_sign, out_sign, method, async) + self.dbus_bridge.addMethod(name, int_suffix, in_sign, out_sign, method, async_) self.register_method(name, method) def addSignal(self, name, int_suffix, signature, doc={}): diff -r ff5bcb12ae60 -r ab2696e34d29 sat/bridge/pb.py --- a/sat/bridge/pb.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/bridge/pb.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -46,11 +46,11 @@ def remote_initBridge(self, signals_handler): self.signals_handlers.append(signals_handler) - log.info(u"registered signal handler") + log.info("registered signal handler") def sendSignalEb(self, failure, signal_name): log.error( - u"Error while sending signal {name}: {msg}".format( + "Error while sending signal {name}: {msg}".format( name=signal_name, msg=failure ) ) @@ -66,27 +66,27 @@ d.addErrback(self.sendSignalEb, name) if to_remove: for handler in to_remove: - log.debug(u"Removing signal handler for dead frontend") + log.debug("Removing signal handler for dead frontend") self.signals_handlers.remove(handler) def _bridgeDeactivateSignals(self): if hasattr(self, "signals_paused"): - log.warning(u"bridge signals already deactivated") + log.warning("bridge signals already deactivated") if self.signals_handler: self.signals_paused.extend(self.signals_handler) else: self.signals_paused = self.signals_handlers self.signals_handlers = [] - log.debug(u"bridge signals have been deactivated") + log.debug("bridge signals have been deactivated") def _bridgeReactivateSignals(self): try: self.signals_handlers = self.signals_paused except AttributeError: - log.debug(u"signals were already activated") + log.debug("signals were already activated") else: del self.signals_paused - log.debug(u"bridge signals have been reactivated") + log.debug("bridge signals have been reactivated") ##METHODS_PART## @@ -102,14 +102,14 @@ def remote_initBridge(self, signals_handler): self.signals_handlers.append(signals_handler) - log.info(u"registered signal handler") + log.info("registered signal handler") def register_method(self, name, callback): log.debug("registering PB bridge method [%s]" % name) setattr(self.root, "remote_" + name, callback) #  self.root.register_method(name, callback) - def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc={}): + def addMethod(self, name, int_suffix, in_sign, out_sign, method, async_=False, doc={}): """Dynamically add a method to PB Bridge""" # FIXME: doc parameter is kept only temporary, the time to remove it from calls log.debug("Adding method {name} to PB bridge".format(name=name)) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/core/constants.py --- a/sat/core/constants.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/constants.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -28,21 +28,21 @@ class Const(object): ## Application ## - APP_NAME = u"Salut à Toi" - APP_NAME_SHORT = u"SàT" - APP_NAME_FILE = u"sat" - APP_NAME_FULL = u"{name_short} ({name})".format( + APP_NAME = "Salut à Toi" + APP_NAME_SHORT = "SàT" + APP_NAME_FILE = "sat" + APP_NAME_FULL = "{name_short} ({name})".format( name_short=APP_NAME_SHORT, name=APP_NAME ) APP_VERSION = ( sat.__version__ ) # Please add 'D' at the end of version in sat/VERSION for dev versions - APP_RELEASE_NAME = u"La Cecília" - APP_URL = u"https://salut-a-toi.org" + APP_RELEASE_NAME = "La Cecília" + APP_URL = "https://salut-a-toi.org" ## Runtime ## PLUGIN_EXT = "py" - HISTORY_SKIP = u"skip" + HISTORY_SKIP = "skip" ## Main config ## DEFAULT_BRIDGE = "dbus" @@ -122,15 +122,15 @@ ) MESS_TYPE_ALL = MESS_TYPE_STANDARD + (MESS_TYPE_INFO, MESS_TYPE_AUTO) - MESS_EXTRA_INFO = u"info_type" - EXTRA_INFO_DECR_ERR = u"DECRYPTION_ERROR" - EXTRA_INFO_ENCR_ERR = u"ENCRYPTION_ERROR" + MESS_EXTRA_INFO = "info_type" + EXTRA_INFO_DECR_ERR = "DECRYPTION_ERROR" + EXTRA_INFO_ENCR_ERR = "ENCRYPTION_ERROR" # encryption is a key for plugins - MESS_KEY_ENCRYPTION = u"ENCRYPTION" + MESS_KEY_ENCRYPTION = "ENCRYPTION" # encrypted is a key for frontends - MESS_KEY_ENCRYPTED = u"encrypted" - MESS_KEY_TRUSTED = u"trusted" + MESS_KEY_ENCRYPTED = "encrypted" + MESS_KEY_TRUSTED = "trusted" ## Chat ## CHAT_ONE2ONE = "one2one" @@ -162,110 +162,44 @@ ## Directories ## # directory for components specific data - COMPONENTS_DIR = u"components" - CACHE_DIR = u"cache" + COMPONENTS_DIR = "components" + CACHE_DIR = "cache" # files in file dir are stored for long term # files dir is global, i.e. for all profiles - FILES_DIR = u"files" + FILES_DIR = "files" # FILES_LINKS_DIR is a directory where files owned by a specific profile # are linked to the global files directory. This way the directory can be #  shared per profiles while keeping global directory where identical files # shared between different profiles are not duplicated. - FILES_LINKS_DIR = u"files_links" + FILES_LINKS_DIR = "files_links" # FILES_TMP_DIR is where profile's partially transfered files are put. # Once transfer is completed, they are moved to FILES_DIR - FILES_TMP_DIR = u"files_tmp" - - ## Configuration ## - if ( - BaseDirectory - ): # skipped when xdg module is not available (should not happen in backend) - if "org.salutatoi.cagou" in BaseDirectory.__file__: - # FIXME: hack to make config read from the right location on Android - # TODO: fix it in a more proper way - - # we need to use Android API to get downloads directory - import os.path - from jnius import autoclass - - Environment = autoclass("android.os.Environment") - - BaseDirectory = None - DEFAULT_CONFIG = { - "local_dir": "/data/data/org.salutatoi.cagou/app", - "media_dir": "/data/data/org.salutatoi.cagou/files/app/media", - # FIXME: temporary location for downloads, need to call API properly - "downloads_dir": os.path.join( - Environment.getExternalStoragePublicDirectory( - Environment.DIRECTORY_DOWNLOADS - ).getAbsolutePath(), - APP_NAME_FILE, - ), - "pid_dir": "%(local_dir)s", - "log_dir": "%(local_dir)s", - } - CONFIG_FILES = [ - "/data/data/org.salutatoi.cagou/files/app/android/" - + APP_NAME_FILE - + ".conf" - ] - else: - import os - CONFIG_PATHS = ( - ["/etc/", "~/", "~/.", "", "."] - + [ - "%s/" % path - for path in list(BaseDirectory.load_config_paths(APP_NAME_FILE)) - ] - ) - - # on recent versions of Flatpak, FLATPAK_ID is set at run time - # it seems that this is not the case on older versions, - # but FLATPAK_SANDBOX_DIR seems set then - if os.getenv('FLATPAK_ID') or os.getenv('FLATPAK_SANDBOX_DIR'): - # for Flatpak, the conf can't be set in /etc or $HOME, so we have - # to add /app - CONFIG_PATHS.append('/app/') - - ## Configuration ## - DEFAULT_CONFIG = { - "media_dir": "/usr/share/" + APP_NAME_FILE + "/media", - "local_dir": BaseDirectory.save_data_path(APP_NAME_FILE), - "downloads_dir": "~/Downloads/" + APP_NAME_FILE, - "pid_dir": "%(local_dir)s", - "log_dir": "%(local_dir)s", - } - - # List of the configuration filenames sorted by ascending priority - CONFIG_FILES = [ - realpath(expanduser(path) + APP_NAME_FILE + ".conf") - for path in CONFIG_PATHS - ] + FILES_TMP_DIR = "files_tmp" ## Templates ## - TEMPLATE_TPL_DIR = u"templates" - TEMPLATE_THEME_DEFAULT = u"default" - TEMPLATE_STATIC_DIR = u"static" - KEY_LANG = u"lang" # templates i18n + TEMPLATE_TPL_DIR = "templates" + TEMPLATE_THEME_DEFAULT = "default" + TEMPLATE_STATIC_DIR = "static" + KEY_LANG = "lang" # templates i18n ## Plugins ## # PLUGIN_INFO keys # XXX: we use PI instead of PLUG_INFO which would normally be used # to make the header more readable - PI_NAME = u"name" - PI_IMPORT_NAME = u"import_name" - PI_MAIN = u"main" - PI_HANDLER = u"handler" + PI_NAME = "name" + PI_IMPORT_NAME = "import_name" + PI_MAIN = "main" + PI_HANDLER = "handler" PI_TYPE = ( - u"type" + "type" ) #  FIXME: should be types, and should handle single unicode type or tuple of types (e.g. "blog" and "import") - PI_MODES = u"modes" - PI_PROTOCOLS = u"protocols" - PI_DEPENDENCIES = u"dependencies" - PI_RECOMMENDATIONS = u"recommendations" - PI_DESCRIPTION = u"description" - PI_USAGE = u"usage" + PI_MODES = "modes" + PI_PROTOCOLS = "protocols" + PI_DEPENDENCIES = "dependencies" + PI_RECOMMENDATIONS = "recommendations" + PI_DESCRIPTION = "description" + PI_USAGE = "usage" # Types PLUG_TYPE_XEP = "XEP" @@ -387,8 +321,8 @@ META_TYPE_OVERWRITE = "overwrite" ## HARD-CODED ACTIONS IDS (generated with uuid.uuid4) ## - AUTHENTICATE_PROFILE_ID = u"b03bbfa8-a4ae-4734-a248-06ce6c7cf562" - CHANGE_XMPP_PASSWD_ID = u"878b9387-de2b-413b-950f-e424a147bcd0" + AUTHENTICATE_PROFILE_ID = "b03bbfa8-a4ae-4734-a248-06ce6c7cf562" + CHANGE_XMPP_PASSWD_ID = "878b9387-de2b-413b-950f-e424a147bcd0" ## Text values ## BOOL_TRUE = "true" @@ -399,32 +333,32 @@ HISTORY_LIMIT_NONE = -2 ## Progress error special values ## - PROGRESS_ERROR_DECLINED = u"declined" #  session has been declined by peer user + PROGRESS_ERROR_DECLINED = "declined" #  session has been declined by peer user ## Files ## FILE_TYPE_DIRECTORY = "directory" FILE_TYPE_FILE = "file" ## Permissions management ## - ACCESS_PERM_READ = u"read" - ACCESS_PERM_WRITE = u"write" + ACCESS_PERM_READ = "read" + ACCESS_PERM_WRITE = "write" ACCESS_PERMS = {ACCESS_PERM_READ, ACCESS_PERM_WRITE} - ACCESS_TYPE_PUBLIC = u"public" - ACCESS_TYPE_WHITELIST = u"whitelist" + ACCESS_TYPE_PUBLIC = "public" + ACCESS_TYPE_WHITELIST = "whitelist" ACCESS_TYPES = (ACCESS_TYPE_PUBLIC, ACCESS_TYPE_WHITELIST) ## Common data keys ## - KEY_THUMBNAILS = u"thumbnails" - KEY_PROGRESS_ID = u"progress_id" + KEY_THUMBNAILS = "thumbnails" + KEY_PROGRESS_ID = "progress_id" ## Common extra keys/values ## - KEY_ORDER_BY = u"order_by" + KEY_ORDER_BY = "order_by" - ORDER_BY_CREATION = u'creation' - ORDER_BY_MODIFICATION = u'modification' + ORDER_BY_CREATION = 'creation' + ORDER_BY_MODIFICATION = 'modification' # internationalisation - DEFAULT_LOCALE = u"en_GB" + DEFAULT_LOCALE = "en_GB" ## Misc ## SAVEFILE_DATABASE = APP_NAME_FILE + ".db" @@ -434,11 +368,11 @@ NO_LIMIT = -1 # used in bridge when a integer value is expected DEFAULT_MAX_AGE = 1209600 # default max age of cached files, in seconds HASH_SHA1_EMPTY = "da39a3ee5e6b4b0d3255bfef95601890afd80709" - STANZA_NAMES = (u"iq", u"message", u"presence") + STANZA_NAMES = ("iq", "message", "presence") # Stream Hooks - STREAM_HOOK_SEND = u"send" - STREAM_HOOK_RECEIVE = u"receive" + STREAM_HOOK_SEND = "send" + STREAM_HOOK_RECEIVE = "receive" @classmethod def LOG_OPTIONS(cls): @@ -456,7 +390,7 @@ @classmethod def bool(cls, value): """@return (bool): bool value for associated constant""" - assert isinstance(value, basestring) + assert isinstance(value, str) return value.lower() in (cls.BOOL_TRUE, "1", "yes", "on") @classmethod @@ -464,3 +398,72 @@ """@return (str): constant associated to bool value""" assert isinstance(value, bool) return cls.BOOL_TRUE if value else cls.BOOL_FALSE + + + +## Configuration ## +if ( + BaseDirectory +): # skipped when xdg module is not available (should not happen in backend) + if "org.salutatoi.cagou" in BaseDirectory.__file__: + # FIXME: hack to make config read from the right location on Android + # TODO: fix it in a more proper way + + # we need to use Android API to get downloads directory + import os.path + from jnius import autoclass + + Environment = autoclass("android.os.Environment") + + BaseDirectory = None + Const.DEFAULT_CONFIG = { + "local_dir": "/data/data/org.salutatoi.cagou/app", + "media_dir": "/data/data/org.salutatoi.cagou/files/app/media", + # FIXME: temporary location for downloads, need to call API properly + "downloads_dir": os.path.join( + Environment.getExternalStoragePublicDirectory( + Environment.DIRECTORY_DOWNLOADS + ).getAbsolutePath(), + Const.APP_NAME_FILE, + ), + "pid_dir": "%(local_dir)s", + "log_dir": "%(local_dir)s", + } + Const.CONFIG_FILES = [ + "/data/data/org.salutatoi.cagou/files/app/android/" + + Const.APP_NAME_FILE + + ".conf" + ] + else: + import os + Const.CONFIG_PATHS = ( + ["/etc/", "~/", "~/.", "", "."] + + [ + "%s/" % path + for path in list(BaseDirectory.load_config_paths(Const.APP_NAME_FILE)) + ] + ) + + # on recent versions of Flatpak, FLATPAK_ID is set at run time + # it seems that this is not the case on older versions, + # but FLATPAK_SANDBOX_DIR seems set then + if os.getenv('FLATPAK_ID') or os.getenv('FLATPAK_SANDBOX_DIR'): + # for Flatpak, the conf can't be set in /etc or $HOME, so we have + # to add /app + Const.CONFIG_PATHS.append('/app/') + + ## Configuration ## + Const.DEFAULT_CONFIG = { + "media_dir": "/usr/share/" + Const.APP_NAME_FILE + "/media", + "local_dir": BaseDirectory.save_data_path(Const.APP_NAME_FILE), + "downloads_dir": "~/Downloads/" + Const.APP_NAME_FILE, + "pid_dir": "%(local_dir)s", + "log_dir": "%(local_dir)s", + } + + # List of the configuration filenames sorted by ascending priority + Const.CONFIG_FILES = [ + realpath(expanduser(path) + Const.APP_NAME_FILE + ".conf") + for path in Const.CONFIG_PATHS + ] + diff -r ff5bcb12ae60 -r ab2696e34d29 sat/core/exceptions.py --- a/sat/core/exceptions.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/exceptions.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT Exceptions diff -r ff5bcb12ae60 -r ab2696e34d29 sat/core/i18n.py --- a/sat/core/i18n.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/i18n.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -26,7 +26,7 @@ import gettext - _ = gettext.translation("sat", "i18n", fallback=True).ugettext + _ = gettext.translation("sat", "i18n", fallback=True).gettext _translators = {None: gettext.NullTranslations()} def languageSwitch(lang=None): @@ -34,7 +34,7 @@ _translators[lang] = gettext.translation( "sat", languages=[lang], fallback=True ) - _translators[lang].install(unicode=True) + _translators[lang].install() except ImportError: diff -r ff5bcb12ae60 -r ab2696e34d29 sat/core/log.py --- a/sat/core/log.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/log.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -67,7 +67,7 @@ """ if kwargs.get('exc_info', False): message = self.addTraceback(message) - print message + print(message) def log(self, level, message, **kwargs): """Print message @@ -197,7 +197,7 @@ """update existing logger to the class needed for this backend""" if self.LOGGER_CLASS is None: return - for name, logger in _loggers.items(): + for name, logger in list(_loggers.items()): _loggers[name] = self.LOGGER_CLASS(logger) def preTreatment(self): @@ -235,7 +235,7 @@ def configureColors(self, colors, force_colors, levels_taints_dict): if colors: # if color are used, we need to handle levels_taints_dict - for level in levels_taints_dict.keys(): + for level in list(levels_taints_dict.keys()): # we wants levels in uppercase to correspond to contstants levels_taints_dict[level.upper()] = levels_taints_dict[level] taints = self.__class__.taints = {} @@ -283,7 +283,7 @@ options = None if output not in (C.LOG_OPT_OUTPUT_DEFAULT, C.LOG_OPT_OUTPUT_FILE, C.LOG_OPT_OUTPUT_MEMORY): - raise ValueError(u"Invalid output [%s]" % output) + raise ValueError("Invalid output [%s]" % output) if output == C.LOG_OPT_OUTPUT_DEFAULT: # no option for defaut handler @@ -303,7 +303,7 @@ handlers[output] = limit if options: # we should not have unparsed options - raise ValueError(u"options [{options}] are not supported for {handler} output".format(options=options, handler=output)) + raise ValueError("options [{options}] are not supported for {handler} output".format(options=options, handler=output)) @staticmethod def memoryGet(size=None): diff -r ff5bcb12ae60 -r ab2696e34d29 sat/core/log_config.py --- a/sat/core/log_config.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/log_config.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT: a XMPP client @@ -125,7 +125,7 @@ ) or self.LOGGER_CLASS.force_colors: message = event.get("message", tuple()) if message: - event["message"] = ("".join(message),) # must be a tuple + event["message"] = (b"".join(message),) # must be a tuple observer(event) # we can now call the original observer return observer_hook @@ -158,7 +158,7 @@ observer = self.changeObserver(observer, can_colors=True) else: # we use print because log system is not fully initialized - print("Unmanaged observer [%s]" % observer) + print(("Unmanaged observer [%s]" % observer)) return observer self.observers[ori] = observer return observer @@ -202,10 +202,10 @@ import types # see https://stackoverflow.com/a/4267590 (thx Chris Morgan/aaronasterling) twisted_log.addObserver = types.MethodType( - addObserverObserver, self.log_publisher, twisted_log.LogPublisher + addObserverObserver, self.log_publisher ) twisted_log.removeObserver = types.MethodType( - removeObserverObserver, self.log_publisher, twisted_log.LogPublisher + removeObserverObserver, self.log_publisher ) # we now change existing observers @@ -282,7 +282,7 @@ if event.get("isError", False) else twisted_logger.info ) - log_method(text.decode("utf-8")) + log_method(text) self.log_publisher._originalAddObserver(twistedObserver) @@ -336,7 +336,7 @@ import sys class SatFormatter(logging.Formatter): - u"""Formatter which manage SàT specificities""" + """Formatter which manage SàT specificities""" _format = fmt _with_profile = "%(profile)s" in fmt @@ -395,7 +395,7 @@ root_logger = logging.getLogger() if len(root_logger.handlers) == 0: - for handler, options in log.handlers.items(): + for handler, options in list(log.handlers.items()): if handler == C.LOG_OPT_OUTPUT_DEFAULT: hdlr = logging.StreamHandler() try: @@ -426,7 +426,7 @@ else: raise ValueError("Unknown handler type") else: - root_logger.warning(u"Handlers already set on root logger") + root_logger.warning("Handlers already set on root logger") @staticmethod def memoryGet(size=None): diff -r ff5bcb12ae60 -r ab2696e34d29 sat/core/patches.py --- a/sat/core/patches.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/patches.py Tue Aug 13 19:08:41 2019 +0200 @@ -84,7 +84,7 @@ def addHook(self, hook_type, callback): """Add a send or receive hook""" - conflict_msg = (u"Hook conflict: can't add {hook_type} hook {callback}" + conflict_msg = ("Hook conflict: can't add {hook_type} hook {callback}" .format(hook_type=hook_type, callback=callback)) if hook_type == C.STREAM_HOOK_RECEIVE: if callback not in self._onElementHooks: @@ -97,7 +97,7 @@ else: log.warning(conflict_msg) else: - raise ValueError(u"Invalid hook type: {hook_type}" + raise ValueError("Invalid hook type: {hook_type}" .format(hook_type=hook_type)) def onElement(self, element): @@ -161,9 +161,10 @@ def apply(): - # certificate validation - xmlstream.TLSInitiatingInitializer = TLSInitiatingInitializer - client.XMPPClient = XMPPClient + # FIXME: certificate validation is now implemented in Twisted trunk, to be removed + # # certificate validation + # xmlstream.TLSInitiatingInitializer = TLSInitiatingInitializer + # client.XMPPClient = XMPPClient # XmlStream triggers xmlstream.XmlStreamFactory.protocol = XmlStream # jid fix diff -r ff5bcb12ae60 -r ab2696e34d29 sat/core/sat_main.py --- a/sat/core/sat_main.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/sat_main.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -66,8 +66,8 @@ self.plugins = {} # map for short name to whole namespace, self.ns_map = { - u"x-data": xmpp.NS_X_DATA, - u"disco#info": xmpp.NS_DISCO_INFO, + "x-data": xmpp.NS_X_DATA, + "disco#info": xmpp.NS_DISCO_INFO, } # extended by plugins with registerNamespace self.memory = memory.Memory(self) @@ -79,13 +79,13 @@ bridge_module = dynamic_import.bridge(bridge_name) if bridge_module is None: - log.error(u"Can't find bridge module of name {}".format(bridge_name)) + log.error("Can't find bridge module of name {}".format(bridge_name)) sys.exit(1) - log.info(u"using {} bridge".format(bridge_name)) + log.info("using {} bridge".format(bridge_name)) try: self.bridge = bridge_module.Bridge() except exceptions.BridgeInitError: - log.error(u"Bridge can't be initialised, can't start SàT core") + log.error("Bridge can't be initialised, can't start SàT core") sys.exit(1) self.bridge.register_method("getReady", lambda: self.initialised) self.bridge.register_method("getVersion", lambda: self.full_version) @@ -181,7 +181,7 @@ try: return self._version_cache except AttributeError: - self._version_cache = u"{} « {} » ({})".format( + self._version_cache = "{} « {} » ({})".format( version, C.APP_RELEASE_NAME, utils.getRepositoryData(sat) ) return self._version_cache @@ -202,14 +202,14 @@ ui_profile_manager.ProfileManager(self) except Exception as e: log.error( - _(u"Could not initialize backend: {reason}").format( + _("Could not initialize backend: {reason}").format( reason=str(e).decode("utf-8", "ignore") ) ) sys.exit(1) self._addBaseMenus() self.initialised.callback(None) - log.info(_(u"Backend is ready")) + log.info(_("Backend is ready")) def _addBaseMenus(self): """Add base menus""" @@ -246,15 +246,15 @@ except exceptions.MissingModule as e: self._unimport_plugin(plugin_path) log.warning( - u"Can't import plugin [{path}] because of an unavailale third party " - u"module:\n{msg}".format( + "Can't import plugin [{path}] because of an unavailale third party " + "module:\n{msg}".format( path=plugin_path, msg=e ) ) continue except exceptions.CancelError as e: log.info( - u"Plugin [{path}] cancelled its own import: {msg}".format( + "Plugin [{path}] cancelled its own import: {msg}".format( path=plugin_path, msg=e ) ) @@ -264,7 +264,7 @@ import traceback log.error( - _(u"Can't import plugin [{path}]:\n{error}").format( + _("Can't import plugin [{path}]:\n{error}").format( path=plugin_path, error=traceback.format_exc() ) ) @@ -274,17 +274,17 @@ plugin_info = mod.PLUGIN_INFO import_name = plugin_info["import_name"] - plugin_modes = plugin_info[u"modes"] = set( - plugin_info.setdefault(u"modes", C.PLUG_MODE_DEFAULT) + plugin_modes = plugin_info["modes"] = set( + plugin_info.setdefault("modes", C.PLUG_MODE_DEFAULT) ) # if the plugin is an entry point, it must work in component mode - if plugin_info[u"type"] == C.PLUG_TYPE_ENTRY_POINT: + if plugin_info["type"] == C.PLUG_TYPE_ENTRY_POINT: # if plugin is an entrypoint, we cache it if C.PLUG_MODE_COMPONENT not in plugin_modes: log.error( _( - u"{type} type must be used with {mode} mode, ignoring plugin" + "{type} type must be used with {mode} mode, ignoring plugin" ).format(type=C.PLUG_TYPE_ENTRY_POINT, mode=C.PLUG_MODE_COMPONENT) ) self._unimport_plugin(plugin_path) @@ -293,8 +293,8 @@ if import_name in plugins_to_import: log.error( _( - u"Name conflict for import name [{import_name}], can't import " - u"plugin [{name}]" + "Name conflict for import name [{import_name}], can't import " + "plugin [{name}]" ).format(**plugin_info) ) continue @@ -320,7 +320,7 @@ is raised """ if import_name in self.plugins: - log.debug(u"Plugin {} already imported, passing".format(import_name)) + log.debug("Plugin {} already imported, passing".format(import_name)) return if not import_name: import_name, (plugin_path, mod, plugin_info) = plugins_to_import.popitem() @@ -328,10 +328,10 @@ if not import_name in plugins_to_import: if optional: log.warning( - _(u"Recommended plugin not found: {}").format(import_name) + _("Recommended plugin not found: {}").format(import_name) ) return - msg = u"Dependency not found: {}".format(import_name) + msg = "Dependency not found: {}".format(import_name) log.error(msg) raise ImportError(msg) plugin_path, mod, plugin_info = plugins_to_import.pop(import_name) @@ -340,7 +340,7 @@ for to_import in dependencies + recommendations: if to_import not in self.plugins: log.debug( - u"Recursively import dependency of [%s]: [%s]" + "Recursively import dependency of [%s]: [%s]" % (import_name, to_import) ) try: @@ -349,7 +349,7 @@ ) except ImportError as e: log.warning( - _(u"Can't import plugin {name}: {error}").format( + _("Can't import plugin {name}: {error}").format( name=plugin_info["name"], error=e ) ) @@ -362,13 +362,13 @@ self.plugins[import_name] = getattr(mod, plugin_info["main"])(self) except Exception as e: log.warning( - u'Error while loading plugin "{name}", ignoring it: {error}'.format( + 'Error while loading plugin "{name}", ignoring it: {error}'.format( name=plugin_info["name"], error=e ) ) if optional: return - raise ImportError(u"Error during initiation") + raise ImportError("Error during initiation") if C.bool(plugin_info.get(C.PI_HANDLER, C.BOOL_FALSE)): self.plugins[import_name].is_handler = True else: @@ -386,7 +386,7 @@ # pluging depending on the unloaded one should be unloaded too # for now, just a basic call on plugin.unload is done defers_list = [] - for plugin in self.plugins.itervalues(): + for plugin in self.plugins.values(): try: unload = plugin.unload except AttributeError: @@ -419,7 +419,7 @@ def connectProfile(__=None): if self.isConnected(profile): - log.info(_(u"already connected !")) + log.info(_("already connected !")) return True if self.memory.isComponent(profile): @@ -439,7 +439,7 @@ if not self.isConnected(profile_key): # isConnected is checked here and not on client # because client is deleted when session is ended - log.info(_(u"not connected !")) + log.info(_("not connected !")) return defer.succeed(None) client = self.getClient(profile_key) return client.entityDisconnect() @@ -468,7 +468,7 @@ pass features = [] - for import_name, plugin in self.plugins.iteritems(): + for import_name, plugin in self.plugins.items(): try: features_d = defer.maybeDeferred(plugin.getFeatures, profile_key) except AttributeError: @@ -485,14 +485,14 @@ ret[name] = data else: log.warning( - u"Error while getting features for {name}: {failure}".format( + "Error while getting features for {name}: {failure}".format( name=name, failure=data ) ) ret[name] = {} return ret - d_list.addCallback(buildFeatures, self.plugins.keys()) + d_list.addCallback(buildFeatures, list(self.plugins.keys())) return d_list def getContacts(self, profile_key): @@ -527,10 +527,10 @@ self.memory.purgeProfileSession(profile) def startService(self): - log.info(u"Salut à toi ô mon frère !") + log.info("Salut à toi ô mon frère !") def stopService(self): - log.info(u"Salut aussi à Rantanplan") + log.info("Salut aussi à Rantanplan") return self.pluginsUnload() def run(self): @@ -576,13 +576,13 @@ @return: list of clients """ if not profile_key: - raise exceptions.DataError(_(u"profile_key must not be empty")) + raise exceptions.DataError(_("profile_key must not be empty")) try: profile = self.memory.getProfileName(profile_key, True) except exceptions.ProfileUnknownError: return [] if profile == C.PROF_KEY_ALL: - return self.profiles.values() + return list(self.profiles.values()) elif profile[0] == "@": #  only profile keys can start with "@" raise exceptions.ProfileKeyUnknown return [self.profiles[profile]] @@ -594,9 +594,9 @@ @param name: name of the option @return: unicode representation of the option """ - return unicode(self.memory.getConfig(section, name, "")) + return str(self.memory.getConfig(section, name, "")) - def logErrback(self, failure_, msg=_(u"Unexpected error: {failure_}")): + def logErrback(self, failure_, msg=_("Unexpected error: {failure_}")): """Generic errback logging @param msg(unicode): error message ("failure_" key will be use for format) @@ -610,7 +610,7 @@ def registerNamespace(self, short_name, namespace): """associate a namespace to a short name""" if short_name in self.ns_map: - raise exceptions.ConflictError(u"this short name is already used") + raise exceptions.ConflictError("this short name is already used") self.ns_map[short_name] = namespace def getNamespaces(self): @@ -620,7 +620,7 @@ try: return self.ns_map[short_name] except KeyError: - raise exceptions.NotFound(u"namespace {short_name} is not registered" + raise exceptions.NotFound("namespace {short_name} is not registered" .format(short_name=short_name)) def getSessionInfos(self, profile_key): @@ -628,7 +628,7 @@ client = self.getClient(profile_key) data = { "jid": client.jid.full(), - "started": unicode(int(client.started)) + "started": str(int(client.started)) } return defer.succeed(data) @@ -714,9 +714,9 @@ ret = [] for p in plugins: ret.append({ - u"name": p.name, - u"namespace": p.namespace, - u"priority": unicode(p.priority), + "name": p.name, + "namespace": p.namespace, + "priority": str(p.priority), }) return ret @@ -740,7 +740,7 @@ message, subject, mess_type, - {unicode(key): unicode(value) for key, value in extra.items()}, + {str(key): str(value) for key, value in list(extra.items())}, ) def _setPresence(self, to="", show="", statuses=None, profile_key=C.PROF_KEY_NONE): @@ -774,7 +774,7 @@ assert profile to_jid = jid.JID(raw_jid) log.debug( - _(u"subsciption request [%(subs_type)s] for %(jid)s") + _("subsciption request [%(subs_type)s] for %(jid)s") % {"subs_type": subs_type, "jid": to_jid.full()} ) if subs_type == "subscribe": @@ -901,15 +901,15 @@ service_jid = services_jids[idx] if not success: log.warning( - _(u"Can't find features for service {service_jid}, ignoring") + _("Can't find features for service {service_jid}, ignoring") .format(service_jid=service_jid.full())) continue if (identities is not None and not set(infos.identities.keys()).issuperset(identities)): continue found_identities = [ - (cat, type_, name or u"") - for (cat, type_), name in infos.identities.iteritems() + (cat, type_, name or "") + for (cat, type_), name in infos.identities.items() ] found_service[service_jid.full()] = found_identities @@ -960,7 +960,7 @@ full_jid = full_jids[idx] if not success: log.warning( - _(u"Can't retrieve {full_jid} infos, ignoring") + _("Can't retrieve {full_jid} infos, ignoring") .format(full_jid=full_jid.full())) continue if infos.features.issuperset(namespaces): @@ -969,8 +969,8 @@ ).issuperset(identities): continue found_identities = [ - (cat, type_, name or u"") - for (cat, type_), name in infos.identities.iteritems() + (cat, type_, name or "") + for (cat, type_), name in infos.identities.items() ] found[full_jid.full()] = found_identities @@ -979,7 +979,7 @@ ## Generic HMI ## def _killAction(self, keep_id, client): - log.debug(u"Killing action {} for timeout".format(keep_id)) + log.debug("Killing action {} for timeout".format(keep_id)) client.actions[keep_id] def actionNew( @@ -998,7 +998,7 @@ Action will be deleted after 30 min. @param profile: %(doc_profile)s """ - id_ = unicode(uuid.uuid4()) + id_ = str(uuid.uuid4()) if keep_id is not None: client = self.getClient(profile) action_timer = reactor.callLater(60 * 30, self._killAction, keep_id, client) @@ -1012,7 +1012,7 @@ @param profile: %(doc_profile)s """ client = self.getClient(profile) - return [action_tuple[:-1] for action_tuple in client.actions.itervalues()] + return [action_tuple[:-1] for action_tuple in client.actions.values()] def registerProgressCb( self, progress_id, callback, metadata=None, profile=C.PROF_KEY_NONE @@ -1022,7 +1022,7 @@ metadata = {} client = self.getClient(profile) if progress_id in client._progress_cb: - raise exceptions.ConflictError(u"Progress ID is not unique !") + raise exceptions.ConflictError("Progress ID is not unique !") client._progress_cb[progress_id] = (callback, metadata) def removeProgressCb(self, progress_id, profile): @@ -1031,11 +1031,11 @@ try: del client._progress_cb[progress_id] except KeyError: - log.error(_(u"Trying to remove an unknow progress callback")) + log.error(_("Trying to remove an unknow progress callback")) def _progressGet(self, progress_id, profile): data = self.progressGet(progress_id, profile) - return {k: unicode(v) for k, v in data.iteritems()} + return {k: str(v) for k, v in data.items()} def progressGet(self, progress_id, profile): """Return a dict with progress information @@ -1057,10 +1057,10 @@ def _progressGetAll(self, profile_key): progress_all = self.progressGetAll(profile_key) - for profile, progress_dict in progress_all.iteritems(): - for progress_id, data in progress_dict.iteritems(): - for key, value in data.iteritems(): - data[key] = unicode(value) + for profile, progress_dict in progress_all.items(): + for progress_id, data in progress_dict.items(): + for key, value in data.items(): + data[key] = str(value) return progress_all def progressGetAllMetadata(self, profile_key): @@ -1082,7 +1082,7 @@ for ( progress_id, (__, progress_metadata), - ) in client._progress_cb.iteritems(): + ) in client._progress_cb.items(): progress_dict[progress_id] = progress_metadata return progress_all @@ -1101,7 +1101,7 @@ profile = client.profile progress_dict = {} progress_all[profile] = progress_dict - for progress_id, (progress_cb, __) in client._progress_cb.iteritems(): + for progress_id, (progress_cb, __) in client._progress_cb.items(): progress_dict[progress_id] = progress_cb(progress_id, profile) return progress_all @@ -1121,7 +1121,7 @@ callback_id = str(uuid.uuid4()) else: if callback_id in self._cb_map: - raise exceptions.ConflictError(_(u"id already registered")) + raise exceptions.ConflictError(_("id already registered")) self._cb_map[callback_id] = (callback, args, kwargs) if "one_shot" in kwargs: # One Shot callback are removed after 30 min @@ -1163,7 +1163,7 @@ profile = self.memory.getProfileName(profile_key) if not profile: raise exceptions.ProfileUnknownError( - _(u"trying to launch action with a non-existant profile") + _("trying to launch action with a non-existant profile") ) else: profile = client.profile @@ -1179,7 +1179,7 @@ try: callback, args, kwargs = self._cb_map[callback_id] except KeyError: - raise exceptions.DataError(u"Unknown callback id {}".format(callback_id)) + raise exceptions.DataError("Unknown callback id {}".format(callback_id)) if kwargs.get("with_data", False): if data is None: @@ -1210,7 +1210,7 @@ def importMenu(self, path, callback, security_limit=C.NO_SECURITY_LIMIT, help_string="", type_=C.MENU_GLOBAL): - """register a new menu for frontends + r"""register a new menu for frontends @param path(iterable[unicode]): path to go to the menu (category/subcategory/.../item) (e.g.: ("File", "Open")) @@ -1245,7 +1245,7 @@ if callable(callback): callback_id = self.registerCallback(callback, with_data=True) - elif isinstance(callback, basestring): + elif isinstance(callback, str): # The callback is already registered callback_id = callback try: @@ -1256,7 +1256,7 @@ else: raise exceptions.DataError("Unknown callback type") - for menu_data in self._menus.itervalues(): + for menu_data in self._menus.values(): if menu_data["path"] == path and menu_data["type"] == type_: raise exceptions.ConflictError( _("A menu with the same path and type already exists") @@ -1267,7 +1267,7 @@ if menu_key in self._menus_paths: raise exceptions.ConflictError( - u"this menu path is already used: {path} ({menu_key})".format( + "this menu path is already used: {path} ({menu_key})".format( path=path_canonical, menu_key=menu_key ) ) @@ -1300,7 +1300,7 @@ - help_url: link to a page with more complete documentation (TODO) """ ret = [] - for menu_id, menu_data in self._menus.iteritems(): + for menu_id, menu_data in self._menus.items(): type_ = menu_data["type"] path = menu_data["path"] menu_security_limit = menu_data["security_limit"] @@ -1339,7 +1339,7 @@ callback_id = self._menus_paths[menu_key] except KeyError: raise exceptions.NotFound( - u"Can't find menu {path} ({menu_type})".format( + "Can't find menu {path} ({menu_type})".format( path=canonical_path, menu_type=menu_type ) ) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/core/xmpp.py --- a/sat/core/xmpp.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/core/xmpp.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -40,18 +40,18 @@ from sat.memory import encryption from sat.memory import persistent from sat.tools import xml_tools -from zope.interface import implements +from zope.interface import implementer log = getLogger(__name__) -NS_X_DATA = u"jabber:x:data" -NS_DISCO_INFO = u"http://jabber.org/protocol/disco#info" -NS_XML_ELEMENT = u"urn:xmpp:xml-element" -NS_ROSTER_VER = u"urn:xmpp:features:rosterver" +NS_X_DATA = "jabber:x:data" +NS_DISCO_INFO = "http://jabber.org/protocol/disco#info" +NS_XML_ELEMENT = "urn:xmpp:xml-element" +NS_ROSTER_VER = "urn:xmpp:features:rosterver" # we use 2 "@" which is illegal in a jid, to be sure we are not mixing keys # with roster jids -ROSTER_VER_KEY = u"@version@" +ROSTER_VER_KEY = "@version@" class SatXMPPEntity(object): @@ -65,9 +65,9 @@ clientConnectionFailed_ori = factory.clientConnectionFailed clientConnectionLost_ori = factory.clientConnectionLost factory.clientConnectionFailed = partial( - self.connectionTerminated, term_type=u"failed", cb=clientConnectionFailed_ori) + self.connectionTerminated, term_type="failed", cb=clientConnectionFailed_ori) factory.clientConnectionLost = partial( - self.connectionTerminated, term_type=u"lost", cb=clientConnectionLost_ori) + self.connectionTerminated, term_type="lost", cb=clientConnectionLost_ori) factory.maxRetries = max_retries factory.maxDelay = 30 @@ -87,7 +87,7 @@ self.encryption = encryption.EncryptionHandler(self) def __unicode__(self): - return u"Client instance for profile {profile}".format(profile=self.profile) + return "Client instance for profile {profile}".format(profile=self.profile) def __str__(self): return self.__unicode__.encode('utf-8') @@ -206,11 +206,11 @@ def logPluginResults(results): all_succeed = all([success for success, result in results]) if not all_succeed: - log.error(_(u"Plugins initialisation error")) + log.error(_("Plugins initialisation error")) for idx, (success, result) in enumerate(results): if not success: log.error( - u"error (plugin %(name)s): %(failure)s" + "error (plugin %(name)s): %(failure)s" % { "name": plugin_conn_cb[idx][0]._info["import_name"], "failure": result, @@ -226,11 +226,11 @@ self._connected_d = None def _disconnectionEb(self, failure_): - log.error(_(u"Error while disconnecting: {}".format(failure_))) + log.error(_("Error while disconnecting: {}".format(failure_))) def _authd(self, xmlstream): super(SatXMPPEntity, self)._authd(xmlstream) - log.debug(_(u"{profile} identified").format(profile=self.profile)) + log.debug(_("{profile} identified").format(profile=self.profile)) self.streamInitialized() def _finish_connection(self, __): @@ -238,7 +238,7 @@ def streamInitialized(self): """Called after _authd""" - log.debug(_(u"XML stream is initialized")) + log.debug(_("XML stream is initialized")) if not self.host_app.trigger.point("xml_init", self): return self.postStreamInit() @@ -246,7 +246,7 @@ def postStreamInit(self): """Workflow after stream initalisation.""" log.info( - _(u"********** [{profile}] CONNECTED **********").format(profile=self.profile) + _("********** [{profile}] CONNECTED **********").format(profile=self.profile) ) # the following Deferred is used to know when we are connected @@ -273,7 +273,7 @@ def initializationFailed(self, reason): log.error( _( - u"ERROR: XMPP connection failed for profile '%(profile)s': %(reason)s" + "ERROR: XMPP connection failed for profile '%(profile)s': %(reason)s" % {"profile": self.profile, "reason": reason} ) ) @@ -306,17 +306,17 @@ if reason is not None and not isinstance(reason.value, internet_error.ConnectionDone): try: - reason_str = unicode(reason.value) + reason_str = str(reason.value) except Exception: # FIXME: workaround for Android were p4a strips docstrings # while Twisted use docstring in __str__ # TODO: create a ticket upstream, Twisted should work when optimization # is used - reason_str = unicode(reason.value.__class__) - log.warning(u"Connection {term_type}: {reason}".format( + reason_str = str(reason.value.__class__) + log.warning("Connection {term_type}: {reason}".format( term_type = term_type, reason=reason_str)) - if not self.host_app.trigger.point(u"connection_" + term_type, connector, reason): + if not self.host_app.trigger.point("connection_" + term_type, connector, reason): return return cb(connector, reason) @@ -327,7 +327,7 @@ Retrying is disabled too, as it makes no sense to try without network, and it may use resources (notably battery on mobiles). """ - log.info(_(u"stopping connection because of network disabled")) + log.info(_("stopping connection because of network disabled")) self.factory.continueTrying = 0 self._network_disabled = True if self.xmlstream is not None: @@ -344,13 +344,13 @@ except AttributeError: # connection has not been stopped by networkDisabled # we don't have to restart it - log.debug(u"no connection to restart") + log.debug("no connection to restart") return else: del self._network_disabled if not network_disabled: - raise exceptions.InternalError(u"network_disabled should be True") - log.info(_(u"network is available, trying to connect")) + raise exceptions.InternalError("network_disabled should be True") + log.info(_("network is available, trying to connect")) # we want to be sure to start fresh self.factory.resetDelay() # we have a saved connector, meaning the connection has been stopped previously @@ -378,23 +378,23 @@ self.profile ) # and we remove references to this client log.info( - _(u"********** [{profile}] DISCONNECTED **********").format( + _("********** [{profile}] DISCONNECTED **********").format( profile=self.profile ) ) if not self.conn_deferred.called: if reason is None: - err = error.StreamError(u"Server unexpectedly closed the connection") + err = error.StreamError("Server unexpectedly closed the connection") else: err = reason try: if err.value.args[0][0][2] == "certificate verify failed": err = exceptions.InvalidCertificate( - _(u"Your server certificate is not valid " - u"(its identity can't be checked).\n\n" - u"This should never happen and may indicate that " - u"somebody is trying to spy on you.\n" - u"Please contact your server administrator.")) + _("Your server certificate is not valid " + "(its identity can't be checked).\n\n" + "This should never happen and may indicate that " + "somebody is trying to spy on you.\n" + "Please contact your server administrator.")) self.factory.stopTrying() try: # with invalid certificate, we should not retry to connect @@ -434,7 +434,7 @@ def entityDisconnect(self): if not self.host_app.trigger.point("disconnecting", self): return - log.info(_(u"Disconnecting...")) + log.info(_("Disconnecting...")) self.stopService() if self._connected_d is not None: return self._connected_d @@ -443,7 +443,7 @@ ## sending ## - def IQ(self, type_=u"set", timeout=60): + def IQ(self, type_="set", timeout=60): """shortcut to create an IQ element managing deferred @param type_(unicode): IQ type ('set' or 'get') @@ -486,11 +486,11 @@ if data["uid"]: # key must be present but can be set to '' # by a plugin to avoid id on purpose message_elt["id"] = data["uid"] - for lang, subject in data["subject"].iteritems(): + for lang, subject in data["subject"].items(): subject_elt = message_elt.addElement("subject", content=subject) if lang: subject_elt[(C.NS_XML, "lang")] = lang - for lang, message in data["message"].iteritems(): + for lang, message in data["message"].items(): body_elt = message_elt.addElement("body", content=message) if lang: body_elt[(C.NS_XML, "lang")] = lang @@ -499,7 +499,7 @@ except KeyError: if "thread_parent" in data["extra"]: raise exceptions.InternalError( - u"thread_parent found while there is not associated thread" + "thread_parent found while there is not associated thread" ) else: thread_elt = message_elt.addElement("thread", content=thread) @@ -546,7 +546,7 @@ data = { # dict is similar to the one used in client.onMessage "from": self.jid, "to": to_jid, - "uid": uid or unicode(uuid.uuid4()), + "uid": uid or str(uuid.uuid4()), "message": message, "subject": subject, "type": mess_type, @@ -599,15 +599,15 @@ ): return defer.succeed(None) - log.debug(_(u"Sending message (type {type}, to {to})") + log.debug(_("Sending message (type {type}, to {to})") .format(type=data["type"], to=to_jid.full())) pre_xml_treatments.addCallback(lambda __: self.generateMessageXML(data)) pre_xml_treatments.chainDeferred(post_xml_treatments) post_xml_treatments.addCallback(self.sendMessageData) if send_only: - log.debug(_(u"Triggers, storage and echo have been inhibited by the " - u"'send_only' parameter")) + log.debug(_("Triggers, storage and echo have been inhibited by the " + "'send_only' parameter")) else: self.addPostXmlCallbacks(post_xml_treatments) post_xml_treatments.addErrback(self._cancelErrorTrap) @@ -625,22 +625,22 @@ @param data: message data dictionnary @param client: profile's client """ - if data[u"type"] != C.MESS_TYPE_GROUPCHAT: + if data["type"] != C.MESS_TYPE_GROUPCHAT: # we don't add groupchat message to history, as we get them back # and they will be added then - if data[u"message"] or data[u"subject"]: # we need a message to store + if data["message"] or data["subject"]: # we need a message to store self.host_app.memory.addToHistory(self, data) else: log.warning( - u"No message found" + "No message found" ) # empty body should be managed by plugins before this point return data def messageGetBridgeArgs(self, data): """Generate args to use with bridge from data dict""" - return (data[u"uid"], data[u"timestamp"], data[u"from"].full(), - data[u"to"].full(), data[u"message"], data[u"subject"], - data[u"type"], data[u"extra"]) + return (data["uid"], data["timestamp"], data["from"].full(), + data["to"].full(), data["message"], data["subject"], + data["type"], data["extra"]) def messageSendToBridge(self, data): @@ -649,10 +649,10 @@ @param data: message data dictionnary @param client: profile's client """ - if data[u"type"] != C.MESS_TYPE_GROUPCHAT: + if data["type"] != C.MESS_TYPE_GROUPCHAT: # we don't send groupchat message to bridge, as we get them back # and they will be added the - if (data[u"message"] or data[u"subject"]): # we need a message to send + if (data["message"] or data["subject"]): # we need a message to send # something # We send back the message, so all frontends are aware of it @@ -661,12 +661,12 @@ profile=self.profile ) else: - log.warning(_(u"No message found")) + log.warning(_("No message found")) return data +@implementer(iwokkel.IDisco) class SatXMPPClient(SatXMPPEntity, wokkel_client.XMPPClient): - implements(iwokkel.IDisco) trigger_suffix = "" is_component = False @@ -681,34 +681,34 @@ # with a web frontend, # etc., we should implement a way to dynamically update identities through the # bridge - self.identities = [disco.DiscoIdentity(u"client", u"pc", C.APP_NAME)] + self.identities = [disco.DiscoIdentity("client", "pc", C.APP_NAME)] if sys.platform == "android": # FIXME: temporary hack as SRV is not working on android # TODO: remove this hack and fix SRV - log.info(u"FIXME: Android hack, ignoring SRV") + log.info("FIXME: Android hack, ignoring SRV") if host is None: host = user_jid.host # for now we consider Android devices to be always phones - self.identities = [disco.DiscoIdentity(u"client", u"phone", C.APP_NAME)] + self.identities = [disco.DiscoIdentity("client", "phone", C.APP_NAME)] hosts_map = host_app.memory.getConfig(None, "hosts_dict", {}) if host is None and user_jid.host in hosts_map: host_data = hosts_map[user_jid.host] - if isinstance(host_data, basestring): + if isinstance(host_data, str): host = host_data elif isinstance(host_data, dict): - if u"host" in host_data: - host = host_data[u"host"] - if u"port" in host_data: - port = host_data[u"port"] + if "host" in host_data: + host = host_data["host"] + if "port" in host_data: + port = host_data["port"] else: log.warning( - _(u"invalid data used for host: {data}").format(data=host_data) + _("invalid data used for host: {data}").format(data=host_data) ) host_data = None if host_data is not None: log.info( - u"using {host}:{port} for host {host_ori} as requested in config" + "using {host}:{port} for host {host_ori} as requested in config" .format(host_ori=user_jid.host, host=host, port=port) ) @@ -717,22 +717,22 @@ wokkel_client.XMPPClient.__init__( self, user_jid, password, host or None, port or C.XMPP_C2S_PORT, - check_certificate = self.check_certificate + # check_certificate = self.check_certificate # FIXME: currently disabled with Python 3 port ) SatXMPPEntity.__init__(self, host_app, profile, max_retries) if not self.check_certificate: - msg = (_(u"Certificate validation is deactivated, this is unsecure and " - u"somebody may be spying on you. If you have no good reason to disable " - u"certificate validation, please activate \"Check certificate\" in your " - u"settings in \"Connection\" tab.")) - xml_tools.quickNote(host_app, self, msg, _(u"Security notice"), + msg = (_("Certificate validation is deactivated, this is unsecure and " + "somebody may be spying on you. If you have no good reason to disable " + "certificate validation, please activate \"Check certificate\" in your " + "settings in \"Connection\" tab.")) + xml_tools.quickNote(host_app, self, msg, _("Security notice"), level = C.XMLUI_DATA_LVL_WARNING) def _getPluginsList(self): - for p in self.host_app.plugins.itervalues(): - if C.PLUG_MODE_CLIENT in p._info[u"modes"]: + for p in self.host_app.plugins.values(): + if C.PLUG_MODE_CLIENT in p._info["modes"]: yield p def _createSubProtocols(self): @@ -795,7 +795,7 @@ # This trigger point can't cancel the method yield self.host_app.trigger.asyncPoint("sendMessageData", self, mess_data, triggers_no_cancel=True) - self.send(mess_data[u"xml"]) + self.send(mess_data["xml"]) defer.returnValue(mess_data) def feedback(self, to_jid, message, extra=None): @@ -811,11 +811,11 @@ if extra is None: extra = {} self.host_app.bridge.messageNew( - uid=unicode(uuid.uuid4()), + uid=str(uuid.uuid4()), timestamp=time.time(), from_jid=self.jid.full(), to_jid=to_jid.full(), - message={u"": message}, + message={"": message}, subject={}, mess_type=C.MESS_TYPE_INFO, extra=extra, @@ -827,6 +827,7 @@ d.addCallback(lambda __: super(SatXMPPClient, self)._finish_connection(__)) +@implementer(iwokkel.IDisco) class SatXMPPComponent(SatXMPPEntity, component.Component): """XMPP component @@ -835,7 +836,6 @@ Component need to instantiate MessageProtocol itself """ - implements(iwokkel.IDisco) trigger_suffix = ( "Component" ) # used for to distinguish some trigger points set in SatXMPPEntity @@ -857,19 +857,19 @@ self.entry_plugin = host_app.plugins[entry_point] except KeyError: raise exceptions.NotFound( - _(u"The requested entry point ({entry_point}) is not available").format( + _("The requested entry point ({entry_point}) is not available").format( entry_point=entry_point ) ) - self.identities = [disco.DiscoIdentity(u"component", u"generic", C.APP_NAME)] + self.identities = [disco.DiscoIdentity("component", "generic", C.APP_NAME)] # jid is set automatically on bind by Twisted for Client, but not for Component self.jid = component_jid if host is None: try: - host = component_jid.host.split(u".", 1)[1] + host = component_jid.host.split(".", 1)[1] except IndexError: - raise ValueError(u"Can't guess host from jid, please specify a host") + raise ValueError("Can't guess host from jid, please specify a host") # XXX: component.Component expect unicode jid, while Client expect jid.JID. # this is not consistent, so we use jid.JID for SatXMPP* component.Component.__init__(self, host, port, component_jid.full(), password) @@ -890,20 +890,20 @@ @raise KeyError: one plugin should be present in self.host_app.plugins but it is not """ - if C.PLUG_MODE_COMPONENT not in current._info[u"modes"]: + if C.PLUG_MODE_COMPONENT not in current._info["modes"]: if not required: return else: log.error( _( - u"Plugin {current_name} is needed for {entry_name}, " - u"but it doesn't handle component mode" + "Plugin {current_name} is needed for {entry_name}, " + "but it doesn't handle component mode" ).format( - current_name=current._info[u"import_name"], - entry_name=self.entry_plugin._info[u"import_name"], + current_name=current._info["import_name"], + entry_name=self.entry_plugin._info["import_name"], ) ) - raise exceptions.InternalError(_(u"invalid plugin mode")) + raise exceptions.InternalError(_("invalid plugin mode")) for import_name in current._info.get(C.PI_DEPENDENCIES, []): # plugins are already loaded as dependencies @@ -960,9 +960,9 @@ if None, mapping will not be done @return(dict): message data """ - if message_elt.name != u"message": + if message_elt.name != "message": log.warning(_( - u"parseMessage used with a non stanza, ignoring: {xml}" + "parseMessage used with a non stanza, ignoring: {xml}" .format(xml=message_elt.toXml()))) return {} @@ -974,31 +974,31 @@ c.uri = C.NS_CLIENT elif message_elt.uri != C.NS_CLIENT: log.warning(_( - u"received with a wrong namespace: {xml}" + "received with a wrong namespace: {xml}" .format(xml=message_elt.toXml()))) client = self.parent - if not message_elt.hasAttribute(u'to'): + if not message_elt.hasAttribute('to'): message_elt['to'] = client.jid.full() message = {} subject = {} extra = {} data = { - u"from": jid.JID(message_elt["from"]), - u"to": jid.JID(message_elt["to"]), - u"uid": message_elt.getAttribute( - u"uid", unicode(uuid.uuid4()) + "from": jid.JID(message_elt["from"]), + "to": jid.JID(message_elt["to"]), + "uid": message_elt.getAttribute( + "uid", str(uuid.uuid4()) ), # XXX: uid is not a standard attribute but may be added by plugins - u"message": message, - u"subject": subject, - u"type": message_elt.getAttribute(u"type", u"normal"), - u"extra": extra, + "message": message, + "subject": subject, + "type": message_elt.getAttribute("type", "normal"), + "extra": extra, } try: - message_id = data[u"extra"][u"message_id"] = message_elt[u"id"] + message_id = data["extra"]["message_id"] = message_elt["id"] except KeyError: pass else: @@ -1006,11 +1006,11 @@ # message for e in message_elt.elements(C.NS_CLIENT, "body"): - message[e.getAttribute((C.NS_XML, "lang"), "")] = unicode(e) + message[e.getAttribute((C.NS_XML, "lang"), "")] = str(e) # subject for e in message_elt.elements(C.NS_CLIENT, "subject"): - subject[e.getAttribute((C.NS_XML, "lang"), "")] = unicode(e) + subject[e.getAttribute((C.NS_XML, "lang"), "")] = str(e) # delay and timestamp try: @@ -1018,12 +1018,12 @@ except AttributeError: # message_elt._received_timestamp should have been set in onMessage # but if parseMessage is called directly, it can be missing - log.debug(u"missing received timestamp for {message_elt}".format( + log.debug("missing received timestamp for {message_elt}".format( message_elt=message_elt)) received_timestamp = time.time() try: - delay_elt = message_elt.elements(delay.NS_DELAY, "delay").next() + delay_elt = next(message_elt.elements(delay.NS_DELAY, "delay")) except StopIteration: data["timestamp"] = received_timestamp else: @@ -1060,7 +1060,7 @@ client = self.parent if not "from" in message_elt.attributes: message_elt["from"] = client.jid.host - log.debug(_(u"got message from: {from_}").format(from_=message_elt["from"])) + log.debug(_("got message from: {from_}").format(from_=message_elt["from"])) # plugin can add their treatments to this deferred post_treat = defer.Deferred() @@ -1077,24 +1077,24 @@ return data def addToHistory(self, data): - if data.pop(u"history", None) == C.HISTORY_SKIP: - log.info(u"history is skipped as requested") - data[u"extra"][u"history"] = C.HISTORY_SKIP + if data.pop("history", None) == C.HISTORY_SKIP: + log.info("history is skipped as requested") + data["extra"]["history"] = C.HISTORY_SKIP else: - if data[u"message"] or data[u"subject"]: # we need a message to store + if data["message"] or data["subject"]: # we need a message to store return self.host.memory.addToHistory(self.parent, data) else: - log.debug(u"not storing empty message to history: {data}" + log.debug("not storing empty message to history: {data}" .format(data=data)) def bridgeSignal(self, __, data): try: - data["extra"]["received_timestamp"] = unicode(data["received_timestamp"]) + data["extra"]["received_timestamp"] = str(data["received_timestamp"]) data["extra"]["delay_sender"] = data["delay_sender"] except KeyError: pass if C.MESS_KEY_ENCRYPTION in data: - data[u"extra"][u"encrypted"] = C.BOOL_TRUE + data["extra"]["encrypted"] = C.BOOL_TRUE if data is not None: if data["message"] or data["subject"] or data["type"] == C.MESS_TYPE_INFO: self.host.bridge.messageNew( @@ -1109,7 +1109,7 @@ profile=self.parent.profile, ) else: - log.debug(u"Discarding bridge signal for empty message: {data}".format( + log.debug("Discarding bridge signal for empty message: {data}".format( data=data)) return data @@ -1131,7 +1131,7 @@ @property def versioning(self): """True if server support roster versioning""" - return (NS_ROSTER_VER, u'ver') in self.parent.xmlstream.features + return (NS_ROSTER_VER, 'ver') in self.parent.xmlstream.features @property def roster_cache(self): @@ -1148,23 +1148,23 @@ item must be already registered in self._jids before this method is called @param item (RosterIem): item added """ - log.debug(u"registering item: {}".format(item.entity.full())) + log.debug("registering item: {}".format(item.entity.full())) if item.entity.resource: log.warning( - u"Received a roster item with a resource, this is not common but not " - u"restricted by RFC 6121, this case may be not well tested." + "Received a roster item with a resource, this is not common but not " + "restricted by RFC 6121, this case may be not well tested." ) if not item.subscriptionTo: if not item.subscriptionFrom: log.info( - _(u"There's no subscription between you and [{}]!").format( + _("There's no subscription between you and [{}]!").format( item.entity.full() ) ) else: - log.info(_(u"You are not subscribed to [{}]!").format(item.entity.full())) + log.info(_("You are not subscribed to [{}]!").format(item.entity.full())) if not item.subscriptionFrom: - log.info(_(u"[{}] is not subscribed to you!").format(item.entity.full())) + log.info(_("[{}] is not subscribed to you!").format(item.entity.full())) for group in item.groups: self._groups.setdefault(group, set()).add(item.entity) @@ -1178,7 +1178,7 @@ roster_cache = self.roster_cache yield roster_cache.clear() roster_cache[ROSTER_VER_KEY] = version - for roster_jid, roster_item in self._jids.iteritems(): + for roster_jid, roster_item in self._jids.items(): roster_jid_s = roster_jid.full() roster_item_elt = roster_item.toElement().toXml() roster_cache[roster_jid_s] = roster_item_elt @@ -1200,19 +1200,19 @@ def requestRoster(self): """Ask the server for Roster list """ if self.versioning: - log.info(_(u"our server support roster versioning, we use it")) + log.info(_("our server support roster versioning, we use it")) roster_cache = self.roster_cache yield roster_cache.load() try: version = roster_cache[ROSTER_VER_KEY] except KeyError: - log.info(_(u"no roster in cache, we start fresh")) + log.info(_("no roster in cache, we start fresh")) # u"" means we use versioning without valid roster in cache - version = u"" + version = "" else: - log.info(_(u"We have roster v{version} in cache").format(version=version)) + log.info(_("We have roster v{version} in cache").format(version=version)) # we deserialise cached roster to our local cache - for roster_jid_s, roster_item_elt_s in roster_cache.iteritems(): + for roster_jid_s, roster_item_elt_s in roster_cache.items(): if roster_jid_s == ROSTER_VER_KEY: continue roster_jid = jid.JID(roster_jid_s) @@ -1221,26 +1221,26 @@ self._jids[roster_jid] = roster_item self._registerItem(roster_item) else: - log.warning(_(u"our server doesn't support roster versioning")) + log.warning(_("our server doesn't support roster versioning")) version = None log.debug("requesting roster") roster = yield self.getRoster(version=version) if roster is None: - log.debug(u"empty roster result received, we'll get roster item with roster " - u"pushes") + log.debug("empty roster result received, we'll get roster item with roster " + "pushes") else: # a full roster is received self._groups.clear() self._jids = roster - for item in roster.itervalues(): + for item in roster.values(): if not item.subscriptionTo and not item.subscriptionFrom and not item.ask: # XXX: current behaviour: we don't want contact in our roster list # if there is no presence subscription # may change in the future log.info( - u"Removing contact {} from roster because there is no presence " - u"subscription".format( + "Removing contact {} from roster because there is no presence " + "subscription".format( item.jid ) ) @@ -1267,9 +1267,9 @@ @return: dictionary of attributes """ item_attr = { - "to": unicode(item.subscriptionTo), - "from": unicode(item.subscriptionFrom), - "ask": unicode(item.ask), + "to": str(item.subscriptionTo), + "from": str(item.subscriptionFrom), + "ask": str(item.ask), } if item.name: item_attr["name"] = item.name @@ -1278,7 +1278,7 @@ def setReceived(self, request): item = request.item entity = item.entity - log.info(_(u"adding {entity} to roster").format(entity=entity.full())) + log.info(_("adding {entity} to roster").format(entity=entity.full())) if request.version is not None: # we update the cache in storage roster_cache = self.roster_cache @@ -1302,7 +1302,7 @@ def removeReceived(self, request): entity = request.item.entity - log.info(_(u"removing {entity} from roster").format(entity=entity.full())) + log.info(_("removing {entity} from roster").format(entity=entity.full())) if request.version is not None: # we update the cache in storage roster_cache = self.roster_cache @@ -1319,7 +1319,7 @@ item = self._jids.pop(entity) except KeyError: log.error( - u"Received a roster remove event for an item not in cache ({})".format( + "Received a roster remove event for an item not in cache ({})".format( entity ) ) @@ -1332,8 +1332,8 @@ del self._groups[group] except KeyError: log.warning( - u"there is no cache for the group [{group}] of the removed roster " - u"item [{jid_}]".format(group=group, jid=entity) + "there is no cache for the group [{group}] of the removed roster " + "item [{jid_}]".format(group=group, jid=entity) ) # then we send the bridge signal @@ -1341,7 +1341,7 @@ def getGroups(self): """Return a list of groups""" - return self._groups.keys() + return list(self._groups.keys()) def getItem(self, entity_jid): """Return RosterItem for a given jid @@ -1354,7 +1354,7 @@ def getJids(self): """Return all jids of the roster""" - return self._jids.keys() + return list(self._jids.keys()) def isJidInRoster(self, entity_jid): """Return True if jid is in roster""" @@ -1370,7 +1370,7 @@ def getItems(self): """Return all items of the roster""" - return self._jids.values() + return list(self._jids.values()) def getJidsFromGroup(self, group): try: @@ -1398,7 +1398,7 @@ jids.update(self.getJidsFromGroup(group)) return jids else: - raise ValueError(u"Unexpected type_ {}".format(type_)) + raise ValueError("Unexpected type_ {}".format(type_)) def getNick(self, entity_jid): """Return a nick name for an entity @@ -1447,7 +1447,7 @@ def unavailableReceived(self, entity, statuses=None): log.debug( - _(u"presence update for [%(entity)s] (unavailable, statuses=%(statuses)s)") + _("presence update for [%(entity)s] (unavailable, statuses=%(statuses)s)") % {"entity": entity, C.PRESENCE_STATUSES: statuses} ) @@ -1539,16 +1539,16 @@ self.host.memory.delWaitingSub(entity.userhost(), self.parent.profile) def subscribedReceived(self, entity): - log.debug(_(u"subscription approved for [%s]") % entity.userhost()) + log.debug(_("subscription approved for [%s]") % entity.userhost()) self.host.bridge.subscribe("subscribed", entity.userhost(), self.parent.profile) def unsubscribedReceived(self, entity): - log.debug(_(u"unsubscription confirmed for [%s]") % entity.userhost()) + log.debug(_("unsubscription confirmed for [%s]") % entity.userhost()) self.host.bridge.subscribe("unsubscribed", entity.userhost(), self.parent.profile) @defer.inlineCallbacks def subscribeReceived(self, entity): - log.debug(_(u"subscription request from [%s]") % entity.userhost()) + log.debug(_("subscription request from [%s]") % entity.userhost()) yield self.parent.roster.got_roster item = self.parent.roster.getItem(entity) if item and item.subscriptionTo: @@ -1566,7 +1566,7 @@ @defer.inlineCallbacks def unsubscribeReceived(self, entity): - log.debug(_(u"unsubscription asked for [%s]") % entity.userhost()) + log.debug(_("unsubscription asked for [%s]") % entity.userhost()) yield self.parent.roster.got_roster item = self.parent.roster.getItem(entity) if item and item.subscriptionFrom: # we automatically remove contact @@ -1575,8 +1575,8 @@ self.host.bridge.subscribe("unsubscribe", entity.userhost(), self.parent.profile) +@implementer(iwokkel.IDisco) class SatDiscoProtocol(disco.DiscoClientProtocol): - implements(iwokkel.IDisco) def __init__(self, host): disco.DiscoClientProtocol.__init__(self) @@ -1599,7 +1599,7 @@ def iqFallback(self, iq): if iq.handled is True: return - log.debug(u"iqFallback: xml = [%s]" % (iq.toXml())) + log.debug("iqFallback: xml = [%s]" % (iq.toXml())) generic.FallbackHandler.iqFallback(self, iq) @@ -1615,9 +1615,9 @@ return generic.VersionHandler.getDiscoInfo(self, requestor, target, None) +@implementer(iwokkel.IDisco) class SatIdentityHandler(XMPPHandler): """Manage disco Identity of SàT.""" - implements(iwokkel.IDisco) # TODO: dynamic identity update (see docstring). Note that a XMPP entity can have # several identities diff -r ff5bcb12ae60 -r ab2696e34d29 sat/memory/cache.py --- a/sat/memory/cache.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/cache.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -23,7 +23,7 @@ from sat.tools.common import regex from sat.core import exceptions from sat.core.constants import Const as C -import cPickle as pickle +import pickle as pickle import mimetypes import os.path import time @@ -42,9 +42,9 @@ self.profile = profile path_elts = [host.memory.getConfig("", "local_dir"), C.CACHE_DIR] if profile: - path_elts.extend([u"profiles", regex.pathEscape(profile)]) + path_elts.extend(["profiles", regex.pathEscape(profile)]) else: - path_elts.append(u"common") + path_elts.append("common") self.cache_dir = os.path.join(*path_elts) if not os.path.exists(self.cache_dir): @@ -55,11 +55,11 @@ @param filename(unicode): cached file name (cache data or actual file) """ - if not filename or u"/" in filename: + if not filename or "/" in filename: log.error( - u"invalid char found in file name, hack attempt? name:{}".format(filename) + "invalid char found in file name, hack attempt? name:{}".format(filename) ) - raise exceptions.DataError(u"Invalid char found") + raise exceptions.DataError("Invalid char found") return os.path.join(self.cache_dir, filename) def getMetadata(self, uid): @@ -73,7 +73,7 @@ uid = uid.strip() if not uid: - raise exceptions.InternalError(u"uid must not be empty") + raise exceptions.InternalError("uid must not be empty") cache_url = self.getPath(uid) if not os.path.exists(cache_url): return None @@ -82,20 +82,20 @@ with open(cache_url, "rb") as f: cache_data = pickle.load(f) except IOError: - log.warning(u"can't read cache at {}".format(cache_url)) + log.warning("can't read cache at {}".format(cache_url)) return None except pickle.UnpicklingError: - log.warning(u"invalid cache found at {}".format(cache_url)) + log.warning("invalid cache found at {}".format(cache_url)) return None try: eol = cache_data["eol"] except KeyError: - log.warning(u"no End Of Life found for cached file {}".format(uid)) + log.warning("no End Of Life found for cached file {}".format(uid)) eol = 0 if eol < time.time(): log.debug( - u"removing expired cache (expired for {}s)".format(time.time() - eol) + "removing expired cache (expired for {}s)".format(time.time() - eol) ) return None @@ -135,11 +135,11 @@ ext = mimetypes.guess_extension(mime_type, strict=False) if ext is None: log.warning( - u"can't find extension for MIME type {}".format(mime_type) + "can't find extension for MIME type {}".format(mime_type) ) ext = DEFAULT_EXT - elif ext == u".jpe": - ext = u".jpg" + elif ext == ".jpe": + ext = ".jpg" else: ext = DEFAULT_EXT mime_type = None @@ -147,10 +147,10 @@ if max_age is None: max_age = C.DEFAULT_MAX_AGE cache_data = { - u"source": source, - u"filename": filename, - u"eol": int(time.time()) + max_age, - u"mime_type": mime_type, + "source": source, + "filename": filename, + "eol": int(time.time()) + max_age, + "mime_type": mime_type, } file_path = self.getPath(filename) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/memory/crypto.py --- a/sat/memory/crypto.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/crypto.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -92,7 +92,7 @@ # a decrypted empty value and a decryption failure... both return # the empty value. Fortunately, we detect empty passwords beforehand # thanks to the "leave_empty" parameter which is used by default. - d.addCallback(lambda text: text.decode("utf-8") if text else None) + d.addCallback(lambda text: text if text else None) return d @classmethod @@ -114,11 +114,12 @@ def pad(self, s): """Method from http://stackoverflow.com/a/12525165""" bs = BlockCipher.BLOCK_SIZE - return s + (bs - len(s) % bs) * chr(bs - len(s) % bs) + return s + (bs - len(s) % bs) * (chr(bs - len(s) % bs)).encode('utf-8') @classmethod def unpad(self, s): """Method from http://stackoverflow.com/a/12525165""" + s = s.decode('utf-8') return s[0 : -ord(s[-1])] @@ -136,7 +137,7 @@ @return: Deferred: base-64 encoded str """ if leave_empty and password == "": - return succeed(password) + return succeed(b"") salt = ( b64decode(salt)[: PasswordHasher.SALT_LEN] if salt @@ -147,6 +148,11 @@ return d @classmethod + def compare_hash(cls, hashed_attempt, hashed): + assert isinstance(hashed, bytes) + return hashed_attempt == hashed + + @classmethod def verify(cls, attempt, hashed): """Verify a password attempt. @@ -156,5 +162,5 @@ """ leave_empty = hashed == "" d = PasswordHasher.hash(attempt, hashed, leave_empty) - d.addCallback(lambda hashed_attempt: hashed_attempt == hashed) + d.addCallback(cls.compare_hash, hashed=hashed.encode('utf-8')) return d diff -r ff5bcb12ae60 -r ab2696e34d29 sat/memory/disco.py --- a/sat/memory/disco.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/disco.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -50,11 +50,11 @@ assert isinstance(identity, disco.DiscoIdentity) self.category = identity.category.encode("utf-8") self.idType = identity.type.encode("utf-8") - self.name = identity.name.encode("utf-8") if identity.name else "" - self.lang = lang.encode("utf-8") if lang is not None else "" + self.name = identity.name.encode("utf-8") if identity.name else b"" + self.lang = lang.encode("utf-8") if lang is not None else b"" - def __str__(self): - return "%s/%s/%s/%s" % (self.category, self.idType, self.lang, self.name) + def __bytes__(self): + return b"%s/%s/%s/%s" % (self.category, self.idType, self.lang, self.name) class HashManager(object): @@ -74,7 +74,7 @@ def __setitem__(self, hash_, disco_info): if hash_ in self.hashes: - log.debug(u"ignoring hash set: it is already known") + log.debug("ignoring hash set: it is already known") return self.hashes[hash_] = disco_info self.persistent[hash_] = disco_info.toElement().toXml() @@ -84,19 +84,19 @@ def load(self): def fillHashes(hashes): - for hash_, xml in hashes.iteritems(): + for hash_, xml in hashes.items(): element = xml_tools.ElementParser()(xml) disco_info = disco.DiscoInfo.fromElement(element) if not disco_info.features and not disco_info.identities: log.warning( _( - u"no feature/identity found in disco element (hash: {cap_hash}), ignoring: {xml}" + "no feature/identity found in disco element (hash: {cap_hash}), ignoring: {xml}" ).format(cap_hash=hash_, xml=xml) ) else: self.hashes[hash_] = disco_info - log.info(u"Disco hashes loaded") + log.info("Disco hashes loaded") d = self.persistent.load() d.addCallback(fillHashes) @@ -116,7 +116,7 @@ return self.hashes.load() @defer.inlineCallbacks - def hasFeature(self, client, feature, jid_=None, node=u""): + def hasFeature(self, client, feature, jid_=None, node=""): """Tell if an entity has the required feature @param feature: feature namespace @@ -128,7 +128,7 @@ defer.returnValue(feature in disco_infos.features) @defer.inlineCallbacks - def checkFeature(self, client, feature, jid_=None, node=u""): + def checkFeature(self, client, feature, jid_=None, node=""): """Like hasFeature, but raise an exception is feature is not Found @param feature: feature namespace @@ -142,7 +142,7 @@ raise failure.Failure(exceptions.FeatureNotFound) @defer.inlineCallbacks - def checkFeatures(self, client, features, jid_=None, identity=None, node=u""): + def checkFeatures(self, client, features, jid_=None, identity=None, node=""): """Like checkFeature, but check several features at once, and check also identity @param features(iterable[unicode]): features to check @@ -159,7 +159,7 @@ if identity is not None and identity not in disco_infos.identities: raise failure.Failure(exceptions.FeatureNotFound()) - def getInfos(self, client, jid_=None, node=u"", use_cache=True): + def getInfos(self, client, jid_=None, node="", use_cache=True): """get disco infos from jid_, filling capability hash if needed @param jid_: jid of the target, or None for profile's server @@ -188,16 +188,16 @@ def infosEb(fail): if fail.check(defer.CancelledError): - reason = u"request time-out" + reason = "request time-out" fail = failure.Failure(exceptions.TimeOutError(fail.message)) else: try: - reason = unicode(fail.value) + reason = str(fail.value) except AttributeError: - reason = unicode(fail) + reason = str(fail) log.warning( - u"Error while requesting disco infos from {jid}: {reason}".format( + "Error while requesting disco infos from {jid}: {reason}".format( jid=jid_.full(), reason=reason ) ) @@ -218,7 +218,7 @@ return defer.succeed(disco_infos) @defer.inlineCallbacks - def getItems(self, client, jid_=None, node=u"", use_cache=True): + def getItems(self, client, jid_=None, node="", use_cache=True): """get disco items from jid_, cache them for our own server @param jid_(jid.JID): jid of the target, or None for profile's server @@ -236,12 +236,12 @@ items = self.host.memory.getEntityData( jid_, ["DISCO_ITEMS"], client.profile )["DISCO_ITEMS"] - log.debug(u"[%s] disco items are in cache" % jid_.full()) + log.debug("[%s] disco items are in cache" % jid_.full()) if not use_cache: # we ignore cache, so we pretend we haven't found it raise KeyError except (KeyError, exceptions.UnknownEntityError): - log.debug(u"Caching [%s] disco items" % jid_.full()) + log.debug("Caching [%s] disco items" % jid_.full()) items = yield client.disco.requestItems(jid_, nodeIdentifier=node) self.host.memory.updateEntityData( jid_, "DISCO_ITEMS", items, profile_key=client.profile @@ -251,7 +251,7 @@ items = yield client.disco.requestItems(jid_, nodeIdentifier=node) except StanzaError as e: log.warning( - u"Error while requesting items for {jid}: {reason}".format( + "Error while requesting items for {jid}: {reason}".format( jid=jid_.full(), reason=e.condition ) ) @@ -262,7 +262,7 @@ def _infosEb(self, failure_, entity_jid): failure_.trap(StanzaError) log.warning( - _(u"Error while requesting [%(jid)s]: %(error)s") + _("Error while requesting [%(jid)s]: %(error)s") % {"jid": entity_jid.full(), "error": failure_.getErrorMessage()} ) @@ -326,7 +326,7 @@ def infosCb(infos, entity): if entity is None: - log.warning(_(u"received an item without jid")) + log.warning(_("received an item without jid")) return if identity is not None and identity not in infos.identities: return @@ -367,8 +367,8 @@ byte_identities.sort(key=lambda i: i.idType) byte_identities.sort(key=lambda i: i.category) for identity in byte_identities: - s.append(str(identity)) - s.append("<") + s.append(bytes(identity)) + s.append(b"<") # features byte_features = [ service.encode("utf-8") @@ -378,32 +378,32 @@ byte_features.sort() # XXX: the default sort has the same behaviour as the requested RFC 4790 i;octet sort for feature in byte_features: s.append(feature) - s.append("<") + s.append(b"<") # extensions - ext = services.extensions.values() + ext = list(services.extensions.values()) ext.sort(key=lambda f: f.formNamespace.encode('utf-8')) for extension in ext: s.append(extension.formNamespace.encode('utf-8')) - s.append("<") + s.append(b"<") fields = extension.fieldList fields.sort(key=lambda f: f.var.encode('utf-8')) for field in fields: s.append(field.var.encode('utf-8')) - s.append("<") + s.append(b"<") values = [v.encode('utf-8') for v in field.values] values.sort() for value in values: s.append(value) - s.append("<") + s.append(b"<") - cap_hash = b64encode(sha1("".join(s)).digest()) - log.debug(_(u"Capability hash generated: [{cap_hash}]").format(cap_hash=cap_hash)) + cap_hash = b64encode(sha1(b"".join(s)).digest()).decode('utf-8') + log.debug(_("Capability hash generated: [{cap_hash}]").format(cap_hash=cap_hash)) return cap_hash @defer.inlineCallbacks def _discoInfos( - self, entity_jid_s, node=u"", use_cache=True, profile_key=C.PROF_KEY_NONE + self, entity_jid_s, node="", use_cache=True, profile_key=C.PROF_KEY_NONE ): """ Discovery method for the bridge @param entity_jid_s: entity we want to discover @@ -417,7 +417,7 @@ disco_infos = yield self.getInfos(client, entity, node, use_cache) extensions = {} # FIXME: should extensions be serialised using tools.common.data_format? - for form_type, form in disco_infos.extensions.items(): + for form_type, form in list(disco_infos.extensions.items()): fields = [] for field in form.fieldList: data = {"type": field.fieldType} @@ -427,7 +427,7 @@ data[attr] = value values = [field.value] if field.value is not None else field.values - if field.fieldType == u"boolean": + if field.fieldType == "boolean": values = [C.boolConst(v) for v in values] fields.append((data, values)) @@ -436,7 +436,7 @@ defer.returnValue(( disco_infos.features, [(cat, type_, name or "") - for (cat, type_), name in disco_infos.identities.items()], + for (cat, type_), name in list(disco_infos.identities.items())], extensions)) def items2tuples(self, disco_items): @@ -447,13 +447,13 @@ """ for item in disco_items: if not item.entity: - log.warning(_(u"invalid item (no jid)")) + log.warning(_("invalid item (no jid)")) continue yield (item.entity.full(), item.nodeIdentifier or "", item.name or "") @defer.inlineCallbacks def _discoItems( - self, entity_jid_s, node=u"", use_cache=True, profile_key=C.PROF_KEY_NONE + self, entity_jid_s, node="", use_cache=True, profile_key=C.PROF_KEY_NONE ): """ Discovery method for the bridge diff -r ff5bcb12ae60 -r ab2696e34d29 sat/memory/encryption.py --- a/sat/memory/encryption.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/encryption.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -91,7 +91,7 @@ directed=directed) cls.plugins.append(plugin) cls.plugins.sort(key=lambda p: p.priority) - log.info(_(u"Encryption plugin registered: {name}").format(name=name)) + log.info(_("Encryption plugin registered: {name}").format(name=name)) @classmethod def getPlugins(cls): @@ -103,7 +103,7 @@ return next(p for p in cls.plugins if p.namespace == namespace) except StopIteration: raise exceptions.NotFound(_( - u"Can't find requested encryption plugin: {namespace}").format( + "Can't find requested encryption plugin: {namespace}").format( namespace=namespace)) @classmethod @@ -123,7 +123,7 @@ if p.name.lower() == name.lower(): return p.namespace raise exceptions.NotFound(_( - u"Can't find a plugin with the name \"{name}\".".format( + "Can't find a plugin with the name \"{name}\".".format( name=name))) def getBridgeData(self, session): @@ -133,12 +133,12 @@ @return (unicode): serialized data for bridge """ if session is None: - return u'' - plugin = session[u'plugin'] + return '' + plugin = session['plugin'] bridge_data = {'name': plugin.name, 'namespace': plugin.namespace} - if u'directed_devices' in session: - bridge_data[u'directed_devices'] = session[u'directed_devices'] + if 'directed_devices' in session: + bridge_data['directed_devices'] = session['directed_devices'] return data_format.serialise(bridge_data) @@ -151,7 +151,7 @@ try: start_encryption = plugin.instance.startEncryption except AttributeError: - log.debug(u"No startEncryption method found for {plugin}".format( + log.debug("No startEncryption method found for {plugin}".format( plugin = plugin.namespace)) return defer.succeed(None) else: @@ -167,7 +167,7 @@ try: stop_encryption = plugin.instance.stopEncryption except AttributeError: - log.debug(u"No stopEncryption method found for {plugin}".format( + log.debug("No stopEncryption method found for {plugin}".format( plugin = plugin.namespace)) return defer.succeed(None) else: @@ -187,8 +187,8 @@ it will be replaced by the new one """ if not self.plugins: - raise exceptions.NotFound(_(u"No encryption plugin is registered, " - u"an encryption session can't be started")) + raise exceptions.NotFound(_("No encryption plugin is registered, " + "an encryption session can't be started")) if namespace is None: plugin = self.plugins[0] @@ -198,10 +198,10 @@ bare_jid = entity.userhostJID() if bare_jid in self._sessions: # we have already an encryption session with this contact - former_plugin = self._sessions[bare_jid][u"plugin"] + former_plugin = self._sessions[bare_jid]["plugin"] if former_plugin.namespace == namespace: - log.info(_(u"Session with {bare_jid} is already encrypted with {name}. " - u"Nothing to do.").format( + log.info(_("Session with {bare_jid} is already encrypted with {name}. " + "Nothing to do.").format( bare_jid=bare_jid, name=former_plugin.name)) return @@ -211,8 +211,8 @@ del self._sessions[bare_jid] yield self._stopEncryption(former_plugin, entity) else: - msg = (_(u"Session with {bare_jid} is already encrypted with {name}. " - u"Please stop encryption session before changing algorithm.") + msg = (_("Session with {bare_jid} is already encrypted with {name}. " + "Please stop encryption session before changing algorithm.") .format(bare_jid=bare_jid, name=plugin.name)) log.warning(msg) raise exceptions.ConflictError(msg) @@ -223,34 +223,34 @@ entity.resource = self.host.memory.getMainResource(self.client, entity) if not entity.resource: raise exceptions.NotFound( - _(u"No resource found for {destinee}, can't encrypt with {name}") + _("No resource found for {destinee}, can't encrypt with {name}") .format(destinee=entity.full(), name=plugin.name)) - log.info(_(u"No resource specified to encrypt with {name}, using " - u"{destinee}.").format(destinee=entity.full(), + log.info(_("No resource specified to encrypt with {name}, using " + "{destinee}.").format(destinee=entity.full(), name=plugin.name)) # indicate that we encrypt only for some devices - directed_devices = data[u'directed_devices'] = [entity.resource] + directed_devices = data['directed_devices'] = [entity.resource] elif entity.resource: - raise ValueError(_(u"{name} encryption must be used with bare jids.")) + raise ValueError(_("{name} encryption must be used with bare jids.")) yield self._startEncryption(plugin, entity) self._sessions[entity.userhostJID()] = data - log.info(_(u"Encryption session has been set for {entity_jid} with " - u"{encryption_name}").format( + log.info(_("Encryption session has been set for {entity_jid} with " + "{encryption_name}").format( entity_jid=entity.full(), encryption_name=plugin.name)) self.host.bridge.messageEncryptionStarted( entity.full(), self.getBridgeData(data), self.client.profile) - msg = D_(u"Encryption session started: your messages with {destinee} are " - u"now end to end encrypted using {name} algorithm.").format( + msg = D_("Encryption session started: your messages with {destinee} are " + "now end to end encrypted using {name} algorithm.").format( destinee=entity.full(), name=plugin.name) - directed_devices = data.get(u'directed_devices') + directed_devices = data.get('directed_devices') if directed_devices: - msg += u"\n" + D_(u"Message are encrypted only for {nb_devices} device(s): " - u"{devices_list}.").format( + msg += "\n" + D_("Message are encrypted only for {nb_devices} device(s): " + "{devices_list}.").format( nb_devices=len(directed_devices), - devices_list = u', '.join(directed_devices)) + devices_list = ', '.join(directed_devices)) self.client.feedback(bare_jid, msg) @@ -266,29 +266,29 @@ session = self.getSession(entity.userhostJID()) if not session: raise failure.Failure( - exceptions.NotFound(_(u"There is no encryption session with this " - u"entity."))) + exceptions.NotFound(_("There is no encryption session with this " + "entity."))) plugin = session['plugin'] if namespace is not None and plugin.namespace != namespace: raise exceptions.InternalError(_( - u"The encryption session is not run with the expected plugin: encrypted " - u"with {current_name} and was expecting {expected_name}").format( - current_name=session[u'plugin'].namespace, + "The encryption session is not run with the expected plugin: encrypted " + "with {current_name} and was expecting {expected_name}").format( + current_name=session['plugin'].namespace, expected_name=namespace)) if entity.resource: try: - directed_devices = session[u'directed_devices'] + directed_devices = session['directed_devices'] except KeyError: raise exceptions.NotFound(_( - u"There is a session for the whole entity (i.e. all devices of the " - u"entity), not a directed one. Please use bare jid if you want to " - u"stop the whole encryption with this entity.")) + "There is a session for the whole entity (i.e. all devices of the " + "entity), not a directed one. Please use bare jid if you want to " + "stop the whole encryption with this entity.")) try: directed_devices.remove(entity.resource) except ValueError: - raise exceptions.NotFound(_(u"There is no directed session with this " - u"entity.")) + raise exceptions.NotFound(_("There is no directed session with this " + "entity.")) else: if not directed_devices: # if we have no more directed device sessions, @@ -302,7 +302,7 @@ del self._sessions[entity.userhostJID()] yield self._stopEncryption(plugin, entity) - log.info(_(u"encryption session stopped with entity {entity}").format( + log.info(_("encryption session stopped with entity {entity}").format( entity=entity.full())) self.host.bridge.messageEncryptionStopped( entity.full(), @@ -310,9 +310,9 @@ 'namespace': plugin.namespace, }, self.client.profile) - msg = D_(u"Encryption session finished: your messages with {destinee} are " - u"NOT end to end encrypted anymore.\nYour server administrators or " - u"{destinee} server administrators will be able to read them.").format( + msg = D_("Encryption session finished: your messages with {destinee} are " + "NOT end to end encrypted anymore.\nYour server administrators or " + "{destinee} server administrators will be able to read them.").format( destinee=entity.full()) self.client.feedback(entity, msg) @@ -326,7 +326,7 @@ None if there is not encryption for this session with this jid """ if entity.resource: - raise ValueError(u"Full jid given when expecting bare jid") + raise ValueError("Full jid given when expecting bare jid") return self._sessions.get(entity) def getTrustUI(self, entity_jid, namespace=None): @@ -346,7 +346,7 @@ session = self.getSession(entity_jid) if not session: raise exceptions.NotFound( - u"No encryption session currently active for {entity_jid}" + "No encryption session currently active for {entity_jid}" .format(entity_jid=entity_jid.full())) plugin = session['plugin'] else: @@ -355,7 +355,7 @@ get_trust_ui = plugin.instance.getTrustUI except AttributeError: raise NotImplementedError( - u"Encryption plugin doesn't handle trust management UI") + "Encryption plugin doesn't handle trust management UI") else: return defer.maybeDeferred(get_trust_ui, self.client, entity_jid) @@ -364,32 +364,32 @@ @classmethod def _importMenus(cls, host): host.importMenu( - (D_(u"Encryption"), D_(u"unencrypted (plain text)")), + (D_("Encryption"), D_("unencrypted (plain text)")), partial(cls._onMenuUnencrypted, host=host), security_limit=0, - help_string=D_(u"End encrypted session"), + help_string=D_("End encrypted session"), type_=C.MENU_SINGLE, ) for plg in cls.getPlugins(): host.importMenu( - (D_(u"Encryption"), plg.name), + (D_("Encryption"), plg.name), partial(cls._onMenuName, host=host, plg=plg), security_limit=0, - help_string=D_(u"Start {name} session").format(name=plg.name), + help_string=D_("Start {name} session").format(name=plg.name), type_=C.MENU_SINGLE, ) host.importMenu( - (D_(u"Encryption"), D_(u"⛨ {name} trust").format(name=plg.name)), + (D_("Encryption"), D_("⛨ {name} trust").format(name=plg.name)), partial(cls._onMenuTrust, host=host, plg=plg), security_limit=0, - help_string=D_(u"Manage {name} trust").format(name=plg.name), + help_string=D_("Manage {name} trust").format(name=plg.name), type_=C.MENU_SINGLE, ) @classmethod def _onMenuUnencrypted(cls, data, host, profile): client = host.getClient(profile) - peer_jid = jid.JID(data[u'jid']).userhostJID() + peer_jid = jid.JID(data['jid']).userhostJID() d = client.encryption.stop(peer_jid) d.addCallback(lambda __: {}) return d @@ -397,7 +397,7 @@ @classmethod def _onMenuName(cls, data, host, plg, profile): client = host.getClient(profile) - peer_jid = jid.JID(data[u'jid']) + peer_jid = jid.JID(data['jid']) if not plg.directed: peer_jid = peer_jid.userhostJID() d = client.encryption.start(peer_jid, plg.namespace, replace=True) @@ -408,9 +408,9 @@ @defer.inlineCallbacks def _onMenuTrust(cls, data, host, plg, profile): client = host.getClient(profile) - peer_jid = jid.JID(data[u'jid']).userhostJID() + peer_jid = jid.JID(data['jid']).userhostJID() ui = yield client.encryption.getTrustUI(peer_jid, plg.namespace) - defer.returnValue({u'xmlui': ui.toXml()}) + defer.returnValue({'xmlui': ui.toXml()}) ## Triggers ## diff -r ff5bcb12ae60 -r ab2696e34d29 sat/memory/memory.py --- a/sat/memory/memory.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/memory.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -26,7 +26,7 @@ import os.path import copy from collections import namedtuple -from ConfigParser import SafeConfigParser, NoOptionError, NoSectionError +from configparser import SafeConfigParser, NoOptionError, NoSectionError from uuid import uuid4 from twisted.python import failure from twisted.internet import defer, reactor, error @@ -76,7 +76,7 @@ session_id = str(uuid4()) elif session_id in self._sessions: raise exceptions.ConflictError( - u"Session id {} is already used".format(session_id) + "Session id {} is already used".format(session_id) ) timer = reactor.callLater(self.timeout, self._purgeSession, session_id) if session_data is None: @@ -99,9 +99,9 @@ pass del self._sessions[session_id] log.debug( - u"Session {} purged{}".format( + "Session {} purged{}".format( session_id, - u" (profile {})".format(profile) if profile is not None else u"", + " (profile {})".format(profile) if profile is not None else "", ) ) @@ -147,10 +147,10 @@ self._purgeSession(session_id) def keys(self): - return self._sessions.keys() + return list(self._sessions.keys()) def iterkeys(self): - return self._sessions.iterkeys() + return iter(self._sessions.keys()) class ProfileSessions(Sessions): @@ -165,7 +165,7 @@ @return: a list containing the sessions ids """ ret = [] - for session_id in self._sessions.iterkeys(): + for session_id in self._sessions.keys(): try: timer, session_data, profile_set = self._sessions[session_id] except ValueError: @@ -245,7 +245,7 @@ if not silent: log.warning( _( - u"A database has been found in the default local_dir for previous versions (< 0.5)" + "A database has been found in the default local_dir for previous versions (< 0.5)" ) ) tools_config.fixConfigOption("", "local_dir", old_default, silent) @@ -306,10 +306,10 @@ if os.path.exists(filename): try: self.params.load_xml(filename) - log.debug(_(u"Parameters loaded from file: %s") % filename) + log.debug(_("Parameters loaded from file: %s") % filename) return True except Exception as e: - log.error(_(u"Can't load parameters from file: %s") % e) + log.error(_("Can't load parameters from file: %s") % e) return False def save_xml(self, filename): @@ -324,10 +324,10 @@ filename = os.path.expanduser(filename) try: self.params.save_xml(filename) - log.debug(_(u"Parameters saved to file: %s") % filename) + log.debug(_("Parameters saved to file: %s") % filename) return True except Exception as e: - log.error(_(u"Can't save parameters to file: %s") % e) + log.error(_("Can't save parameters to file: %s") % e) return False def load(self): @@ -356,7 +356,7 @@ def createSession(__): """Called once params are loaded.""" self._entities_cache[profile] = {} - log.info(u"[{}] Profile session started".format(profile)) + log.info("[{}] Profile session started".format(profile)) return False def backendInitialised(__): @@ -392,13 +392,13 @@ @param profile: %(doc_profile)s """ if self.host.isConnected(profile): - log.debug(u"Disconnecting profile because of session stop") + log.debug("Disconnecting profile because of session stop") self.host.disconnect(profile) self.auth_sessions.profileDelUnique(profile) try: self._entities_cache[profile] except KeyError: - log.warning(u"Profile was not in cache") + log.warning("Profile was not in cache") def _isSessionStarted(self, profile_key): return self.isSessionStarted(self.getProfileName(profile_key)) @@ -428,10 +428,10 @@ def check_result(result): if not result: - log.warning(u"Authentication failure of profile {}".format(profile)) + log.warning("Authentication failure of profile {}".format(profile)) raise failure.Failure( exceptions.PasswordError( - u"The provided profile password doesn't match." + "The provided profile password doesn't match." ) ) if ( @@ -460,7 +460,7 @@ self.auth_sessions.newSession( {C.MEMORY_CRYPTO_KEY: personal_key}, profile=profile ) - log.debug(u"auth session created for profile %s" % profile) + log.debug("auth session created for profile %s" % profile) d = PersistentDict(C.MEMORY_CRYPTO_NAMESPACE, profile).load() d.addCallback(lambda data: BlockCipher.decrypt(key, data[C.MEMORY_CRYPTO_KEY])) @@ -476,7 +476,7 @@ except KeyError: log.error( _( - u"Trying to purge roster status cache for a profile not in memory: [%s]" + "Trying to purge roster status cache for a profile not in memory: [%s]" ) % profile ) @@ -489,7 +489,7 @@ @return (list[unicode]): selected profiles """ if not clients and not components: - log.warning(_(u"requesting no profiles at all")) + log.warning(_("requesting no profiles at all")) return [] profiles = self.storage.getProfilesList() if clients and components: @@ -533,20 +533,20 @@ @raise exceptions.NotFound: component is not a known plugin import name """ if not name: - raise ValueError(u"Empty profile name") + raise ValueError("Empty profile name") if name[0] == "@": - raise ValueError(u"A profile name can't start with a '@'") + raise ValueError("A profile name can't start with a '@'") if "\n" in name: - raise ValueError(u"A profile name can't contain line feed ('\\n')") + raise ValueError("A profile name can't contain line feed ('\\n')") if name in self._entities_cache: - raise exceptions.ConflictError(u"A session for this profile exists") + raise exceptions.ConflictError("A session for this profile exists") if component: if not component in self.host.plugins: raise exceptions.NotFound( _( - u"Can't find component {component} entry point".format( + "Can't find component {component} entry point".format( component=component ) ) @@ -664,7 +664,7 @@ def _getPresenceStatuses(self, profile_key): ret = self.getPresenceStatuses(profile_key) - return {entity.full(): data for entity, data in ret.iteritems()} + return {entity.full(): data for entity, data in ret.items()} def getPresenceStatuses(self, profile_key): """Get all the presence statuses of a profile @@ -676,8 +676,8 @@ profile_cache = self._getProfileCache(client) entities_presence = {} - for entity_jid, entity_data in profile_cache.iteritems(): - for resource, resource_data in entity_data.iteritems(): + for entity_jid, entity_data in profile_cache.items(): + for resource, resource_data in entity_data.items(): full_jid = copy.copy(entity_jid) full_jid.resource = resource try: @@ -736,7 +736,7 @@ entity_data = profile_cache[entity_jid.userhostJID()] except KeyError: raise exceptions.UnknownEntityError( - u"Entity {} not in cache".format(entity_jid) + "Entity {} not in cache".format(entity_jid) ) resources = set(entity_data.keys()) resources.discard(None) @@ -758,7 +758,7 @@ try: presence_data = self.getEntityDatum(full_jid, "presence", client.profile) except KeyError: - log.debug(u"Can't get presence data for {}".format(full_jid)) + log.debug("Can't get presence data for {}".format(full_jid)) else: if presence_data.show != C.PRESENCE_UNAVAILABLE: available.append(resource) @@ -787,7 +787,7 @@ try: resources = self.getAllResources(client, entity_jid) except exceptions.UnknownEntityError: - log.warning(u"Entity is not in cache, we can't find any resource") + log.warning("Entity is not in cache, we can't find any resource") return None priority_resources = [] for resource in resources: @@ -796,13 +796,13 @@ try: presence_data = self.getEntityDatum(full_jid, "presence", client.profile) except KeyError: - log.debug(u"No presence information for {}".format(full_jid)) + log.debug("No presence information for {}".format(full_jid)) continue priority_resources.append((resource, presence_data.priority)) try: return max(priority_resources, key=lambda res_tuple: res_tuple[1])[0] except ValueError: - log.warning(u"No resource found at all for {}".format(entity_jid)) + log.warning("No resource found at all for {}".format(entity_jid)) return None ## Entities data ## @@ -835,8 +835,8 @@ """ profile_cache = self._getProfileCache(client) # we construct a list of all known full jids (bare jid of entities x resources) - for bare_jid, entity_data in profile_cache.iteritems(): - for resource in entity_data.iterkeys(): + for bare_jid, entity_data in profile_cache.items(): + for resource in entity_data.keys(): if resource is None: continue full_jid = copy.copy(bare_jid) @@ -871,9 +871,9 @@ entity_data[key] = value if key in self._key_signals and not silent: - if not isinstance(value, basestring): + if not isinstance(value, str): log.error( - u"Setting a non string value ({}) for a key ({}) which has a signal flag".format( + "Setting a non string value ({}) for a key ({}) which has a signal flag".format( value, key ) ) @@ -905,7 +905,7 @@ entity_data = profile_cache[jid_.userhostJID()][jid_.resource] except KeyError: raise exceptions.UnknownEntityError( - u"Entity {} not in cache".format(jid_) + "Entity {} not in cache".format(jid_) ) try: del entity_data[key] @@ -919,7 +919,7 @@ ret = self.getEntitiesData( [jid.JID(jid_) for jid_ in entities_jids], keys_list, profile_key ) - return {jid_.full(): data for jid_, data in ret.iteritems()} + return {jid_.full(): data for jid_, data in ret.items()} def getEntitiesData(self, entities_jids, keys_list=None, profile_key=C.PROF_KEY_NONE): """Get a list of cached values for several entities at once @@ -961,8 +961,8 @@ continue ret_data[entity.full()] = fillEntityData(entity_cache_data, keys_list) else: - for bare_jid, data in profile_cache.iteritems(): - for resource, entity_cache_data in data.iteritems(): + for bare_jid, data in profile_cache.items(): + for resource, entity_cache_data in data.items(): full_jid = copy.copy(bare_jid) full_jid.resource = resource ret_data[full_jid] = fillEntityData(entity_cache_data) @@ -987,7 +987,7 @@ entity_data = profile_cache[entity_jid.userhostJID()][entity_jid.resource] except KeyError: raise exceptions.UnknownEntityError( - u"Entity {} not in cache (was requesting {})".format( + "Entity {} not in cache (was requesting {})".format( entity_jid, keys_list ) ) @@ -1030,14 +1030,14 @@ del profile_cache[entity_jid] except KeyError: raise exceptions.UnknownEntityError( - u"Entity {} not in cache".format(entity_jid) + "Entity {} not in cache".format(entity_jid) ) else: try: del profile_cache[entity_jid.userhostJID()][entity_jid.resource] except KeyError: raise exceptions.UnknownEntityError( - u"Entity {} not in cache".format(entity_jid) + "Entity {} not in cache".format(entity_jid) ) ## Encryption ## @@ -1103,7 +1103,7 @@ def done(__): log.debug( - _(u"Personal data (%(ns)s, %(key)s) has been successfuly encrypted") + _("Personal data (%(ns)s, %(key)s) has been successfuly encrypted") % {"ns": C.MEMORY_CRYPTO_NAMESPACE, "key": data_key} ) @@ -1225,21 +1225,21 @@ # the owner has all rights return if not C.ACCESS_PERMS.issuperset(perms_to_check): - raise exceptions.InternalError(_(u"invalid permission")) + raise exceptions.InternalError(_("invalid permission")) for perm in perms_to_check: # we check each perm and raise PermissionError as soon as one condition is not valid # we must never return here, we only return after the loop if nothing was blocking the access try: - perm_data = file_data[u"access"][perm] - perm_type = perm_data[u"type"] + perm_data = file_data["access"][perm] + perm_type = perm_data["type"] except KeyError: raise exceptions.PermissionError() if perm_type == C.ACCESS_TYPE_PUBLIC: continue elif perm_type == C.ACCESS_TYPE_WHITELIST: try: - jids = perm_data[u"jids"] + jids = perm_data["jids"] except KeyError: raise exceptions.PermissionError() if peer_jid.full() in jids: @@ -1248,7 +1248,7 @@ raise exceptions.PermissionError() else: raise exceptions.InternalError( - _(u"unknown access type: {type}").format(type=perm_type) + _("unknown access type: {type}").format(type=perm_type) ) @defer.inlineCallbacks @@ -1257,7 +1257,7 @@ current = file_data while True: self.checkFilePermission(current, peer_jid, perms_to_check) - parent = current[u"parent"] + parent = current["parent"] if not parent: break files_data = yield self.getFile( @@ -1266,7 +1266,7 @@ try: current = files_data[0] except IndexError: - raise exceptions.DataError(u"Missing parent") + raise exceptions.DataError("Missing parent") @defer.inlineCallbacks def _getParentDir( @@ -1283,15 +1283,15 @@ # if path is set, we have to retrieve parent directory of the file(s) from it if parent is not None: raise exceptions.ConflictError( - _(u"You can't use path and parent at the same time") + _("You can't use path and parent at the same time") ) - path_elts = filter(None, path.split(u"/")) - if {u"..", u"."}.intersection(path_elts): - raise ValueError(_(u'".." or "." can\'t be used in path')) + path_elts = [_f for _f in path.split("/") if _f] + if {"..", "."}.intersection(path_elts): + raise ValueError(_('".." or "." can\'t be used in path')) # we retrieve all directories from path until we get the parent container # non existing directories will be created - parent = u"" + parent = "" for idx, path_elt in enumerate(path_elts): directories = yield self.storage.getFiles( client, @@ -1306,12 +1306,12 @@ # from this point, directories don't exist anymore, we have to create them elif len(directories) > 1: raise exceptions.InternalError( - _(u"Several directories found, this should not happen") + _("Several directories found, this should not happen") ) else: directory = directories[0] self.checkFilePermission(directory, peer_jid, perms_to_check) - parent = directory[u"id"] + parent = directory["id"] defer.returnValue((parent, [])) @defer.inlineCallbacks @@ -1357,8 +1357,8 @@ """ if peer_jid is None and perms_to_check or perms_to_check is None and peer_jid: raise exceptions.InternalError( - u"if you want to disable permission check, both peer_jid and " - u"perms_to_check must be None" + "if you want to disable permission check, both peer_jid and " + "perms_to_check must be None" ) if owner is not None: owner = owner.userhostJID() @@ -1378,7 +1378,7 @@ try: parent_data = parent_data[0] except IndexError: - raise exceptions.DataError(u"mising parent") + raise exceptions.DataError("mising parent") yield self.checkPermissionToRoot( client, parent_data, peer_jid, perms_to_check ) @@ -1414,7 +1414,7 @@ @defer.inlineCallbacks def setFile( - self, client, name, file_id=None, version=u"", parent=None, path=None, + self, client, name, file_id=None, version="", parent=None, path=None, type_=C.FILE_TYPE_FILE, file_hash=None, hash_algo=None, size=None, namespace=None, mime_type=None, created=None, modified=None, owner=None, access=None, extra=None, peer_jid=None, perms_to_check=(C.ACCESS_PERM_WRITE,) @@ -1481,7 +1481,7 @@ if type_ == C.FILE_TYPE_DIRECTORY: if any(version, file_hash, size, mime_type): raise ValueError( - u"version, file_hash, size and mime_type can't be set for a directory" + "version, file_hash, size and mime_type can't be set for a directory" ) if owner is not None: owner = owner.userhostJID() @@ -1498,7 +1498,7 @@ client, name=new_dir, file_id=new_dir_id, - version=u"", + version="", parent=parent, type_=C.FILE_TYPE_DIRECTORY, namespace=namespace, @@ -1509,7 +1509,7 @@ ) parent = new_dir_id elif parent is None: - parent = u"" + parent = "" yield self.storage.setFile( client, @@ -1552,35 +1552,35 @@ @param files_path(unicode): path of the directory containing the actual files @param file_data(dict): data of the file to delete """ - if file_data[u'owner'] != peer_jid: + if file_data['owner'] != peer_jid: raise exceptions.PermissionError( - u"file {file_name} can't be deleted, {peer_jid} is not the owner" - .format(file_name=file_data[u'name'], peer_jid=peer_jid.full())) - if file_data[u'type'] == C.FILE_TYPE_DIRECTORY: - sub_files = yield self.getFiles(client, peer_jid, parent=file_data[u'id']) + "file {file_name} can't be deleted, {peer_jid} is not the owner" + .format(file_name=file_data['name'], peer_jid=peer_jid.full())) + if file_data['type'] == C.FILE_TYPE_DIRECTORY: + sub_files = yield self.getFiles(client, peer_jid, parent=file_data['id']) if sub_files and not recursive: - raise exceptions.DataError(_(u"Can't delete directory, it is not empty")) + raise exceptions.DataError(_("Can't delete directory, it is not empty")) # we first delete the sub-files for sub_file_data in sub_files: yield self._deleteFile(client, peer_jid, recursive, sub_file_data) # then the directory itself - yield self.storage.fileDelete(file_data[u'id']) - elif file_data[u'type'] == C.FILE_TYPE_FILE: - log.info(_(u"deleting file {name} with hash {file_hash}").format( - name=file_data[u'name'], file_hash=file_data[u'file_hash'])) - yield self.storage.fileDelete(file_data[u'id']) + yield self.storage.fileDelete(file_data['id']) + elif file_data['type'] == C.FILE_TYPE_FILE: + log.info(_("deleting file {name} with hash {file_hash}").format( + name=file_data['name'], file_hash=file_data['file_hash'])) + yield self.storage.fileDelete(file_data['id']) references = yield self.getFiles( - client, peer_jid, file_hash=file_data[u'file_hash']) + client, peer_jid, file_hash=file_data['file_hash']) if references: - log.debug(u"there are still references to the file, we keep it") + log.debug("there are still references to the file, we keep it") else: - file_path = os.path.join(files_path, file_data[u'file_hash']) - log.info(_(u"no reference left to {file_path}, deleting").format( + file_path = os.path.join(files_path, file_data['file_hash']) + log.info(_("no reference left to {file_path}, deleting").format( file_path=file_path)) os.unlink(file_path) else: - raise exceptions.InternalError(u'Unexpected file type: {file_type}' - .format(file_type=file_data[u'type'])) + raise exceptions.InternalError('Unexpected file type: {file_type}' + .format(file_type=file_data['type'])) @defer.inlineCallbacks def fileDelete(self, client, peer_jid, file_id, recursive=False): @@ -1595,11 +1595,11 @@ # should be checked too files_data = yield self.getFiles(client, peer_jid, file_id) if not files_data: - raise exceptions.NotFound(u"Can't find the file with id {file_id}".format( + raise exceptions.NotFound("Can't find the file with id {file_id}".format( file_id=file_id)) file_data = files_data[0] - if file_data[u"type"] != C.FILE_TYPE_DIRECTORY and recursive: - raise ValueError(u"recursive can only be set for directories") + if file_data["type"] != C.FILE_TYPE_DIRECTORY and recursive: + raise ValueError("recursive can only be set for directories") files_path = self.host.getLocalPath(None, C.FILES_DIR, profile=False) yield self._deleteFile(client, peer_jid, recursive, files_path, file_data) @@ -1618,6 +1618,6 @@ try: presence_data = self.getEntityDatum(entity_jid, "presence", client.profile) except KeyError: - log.debug(u"No presence information for {}".format(entity_jid)) + log.debug("No presence information for {}".format(entity_jid)) return False return presence_data.show != C.PRESENCE_UNAVAILABLE diff -r ff5bcb12ae60 -r ab2696e34d29 sat/memory/params.py --- a/sat/memory/params.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/params.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -56,7 +56,7 @@ # TODO: when priority is changed, a new presence stanza must be emitted # TODO: int type (Priority should be int instead of string) - default_xml = u""" + default_xml = """ @@ -80,20 +80,20 @@ """ % { - u"category_general": D_(u"General"), - u"category_connection": D_(u"Connection"), - u"history_param": C.HISTORY_LIMIT, - u"history_label": D_(u"Chat history limit"), - u"show_offline_contacts": C.SHOW_OFFLINE_CONTACTS, - u"show_offline_contacts_label": D_(u"Show offline contacts"), - u"show_empty_groups": C.SHOW_EMPTY_GROUPS, - u"show_empty_groups_label": D_(u"Show empty groups"), - u"force_server_param": C.FORCE_SERVER_PARAM, - u"force_port_param": C.FORCE_PORT_PARAM, - u"new_account_label": D_(u"Register new account"), - u"autoconnect_label": D_(u"Connect on frontend startup"), - u"autodisconnect_label": D_(u"Disconnect on frontend closure"), - u"check_certificate_label": D_(u"Check certificate (don't uncheck if unsure)"), + "category_general": D_("General"), + "category_connection": D_("Connection"), + "history_param": C.HISTORY_LIMIT, + "history_label": D_("Chat history limit"), + "show_offline_contacts": C.SHOW_OFFLINE_CONTACTS, + "show_offline_contacts_label": D_("Show offline contacts"), + "show_empty_groups": C.SHOW_EMPTY_GROUPS, + "show_empty_groups_label": D_("Show empty groups"), + "force_server_param": C.FORCE_SERVER_PARAM, + "force_port_param": C.FORCE_PORT_PARAM, + "new_account_label": D_("Register new account"), + "autoconnect_label": D_("Connect on frontend startup"), + "autodisconnect_label": D_("Disconnect on frontend closure"), + "check_certificate_label": D_("Check certificate (don't uncheck if unsure)"), } def load_default_params(self): @@ -158,7 +158,7 @@ del self.params[profile] except KeyError: log.error( - _(u"Trying to purge cache of a profile not in memory: [%s]") % profile + _("Trying to purge cache of a profile not in memory: [%s]") % profile ) def save_xml(self, filename): @@ -238,7 +238,7 @@ elif return_profile_keys and profile_key in [C.PROF_KEY_ALL]: return profile_key # this value must be managed by the caller if not self.storage.hasProfile(profile_key): - log.error(_(u"Trying to access an unknown profile (%s)") % profile_key) + log.error(_("Trying to access an unknown profile (%s)") % profile_key) raise exceptions.ProfileUnknownError(profile_key) return profile_key @@ -294,7 +294,7 @@ if ( len(cat_node.childNodes) == to_remove_count ): # remove empty category - for __ in xrange(0, to_remove_count): + for __ in range(0, to_remove_count): to_remove.pop() to_remove.append(cat_node) for node in to_remove: @@ -333,7 +333,7 @@ if not app: log.warning( _( - u"Trying to register frontends parameters with no specified app: aborted" + "Trying to register frontends parameters with no specified app: aborted" ) ) return @@ -342,14 +342,14 @@ if app in self.frontends_cache: log.debug( _( - u"Trying to register twice frontends parameters for %(app)s: aborted" + "Trying to register twice frontends parameters for %(app)s: aborted" % {"app": app} ) ) return self.frontends_cache.append(app) self.updateParams(xml, security_limit, app) - log.debug(u"Frontends parameters registered for %(app)s" % {"app": app}) + log.debug("Frontends parameters registered for %(app)s" % {"app": app}) def __default_ok(self, value, name, category): # FIXME: will not work with individual parameters @@ -357,7 +357,7 @@ def __default_ko(self, failure, name, category): log.error( - _(u"Can't determine default value for [%(category)s/%(name)s]: %(reason)s") + _("Can't determine default value for [%(category)s/%(name)s]: %(reason)s") % {"category": category, "name": name, "reason": str(failure.value)} ) @@ -380,7 +380,7 @@ if not node: log.error( _( - u"Requested param [%(name)s] in category [%(category)s] doesn't exist !" + "Requested param [%(name)s] in category [%(category)s] doesn't exist !" ) % {"name": name, "category": category} ) @@ -443,14 +443,14 @@ if len(selected) == 0: log.error( _( - u"Parameter (%(cat)s, %(param)s) of type list has no default option!" + "Parameter (%(cat)s, %(param)s) of type list has no default option!" ) % {"cat": cat, "param": param} ) else: log.error( _( - u"Parameter (%(cat)s, %(param)s) of type list has more than one default option!" + "Parameter (%(cat)s, %(param)s) of type list has more than one default option!" ) % {"cat": cat, "param": param} ) @@ -468,7 +468,7 @@ jids[idx] = jid.JID(value) except (RuntimeError, jid.InvalidFormat, AttributeError): log.warning( - u"Incorrect jid value found in jids list: [{}]".format(value) + "Incorrect jid value found in jids list: [{}]".format(value) ) to_delete.append(value) for value in to_delete: @@ -564,7 +564,7 @@ if not node: log.error( _( - u"Requested param [%(name)s] in category [%(category)s] doesn't exist !" + "Requested param [%(name)s] in category [%(category)s] doesn't exist !" ) % {"name": name, "category": category} ) @@ -630,7 +630,7 @@ if not node: log.error( _( - u"Requested param [%(name)s] in category [%(category)s] doesn't exist !" + "Requested param [%(name)s] in category [%(category)s] doesn't exist !" ) % {"name": name, "category": category} ) @@ -639,7 +639,7 @@ if not self.checkSecurityLimit(node[1], security_limit): log.warning( _( - u"Trying to get parameter '%(param)s' in category '%(cat)s' without authorization!!!" + "Trying to get parameter '%(param)s' in category '%(cat)s' without authorization!!!" % {"param": name, "cat": category} ) ) @@ -697,7 +697,7 @@ name = param_node.getAttribute("name") if not name: log.warning( - u"ignoring attribute without name: {}".format( + "ignoring attribute without name: {}".format( param_node.toxml() ) ) @@ -850,7 +850,7 @@ AttributeError, ): log.warning( - u"Incorrect jid value found in jids list: [{}]".format( + "Incorrect jid value found in jids list: [{}]".format( jid_ ) ) @@ -982,13 +982,13 @@ if profile_key != C.PROF_KEY_NONE: profile = self.getProfileName(profile_key) if not profile: - log.error(_(u"Trying to set parameter for an unknown profile")) + log.error(_("Trying to set parameter for an unknown profile")) raise exceptions.ProfileUnknownError(profile_key) node = self._getParamNode(name, category, "@ALL@") if not node: log.error( - _(u"Requesting an unknown parameter (%(category)s/%(name)s)") + _("Requesting an unknown parameter (%(category)s/%(name)s)") % {"category": category, "name": name} ) return defer.succeed(None) @@ -996,7 +996,7 @@ if not self.checkSecurityLimit(node[1], security_limit): log.warning( _( - u"Trying to set parameter '%(param)s' in category '%(cat)s' without authorization!!!" + "Trying to set parameter '%(param)s' in category '%(cat)s' without authorization!!!" % {"param": name, "cat": category} ) ) @@ -1012,7 +1012,7 @@ except ValueError: log.debug( _( - u"Trying to set parameter '%(param)s' in category '%(cat)s' with an non-integer value" + "Trying to set parameter '%(param)s' in category '%(cat)s' with an non-integer value" % {"param": name, "cat": category} ) ) @@ -1051,7 +1051,7 @@ assert profile_key != C.PROF_KEY_NONE if type_ == "button": - log.debug(u"Clicked param button %s" % node.toxml()) + log.debug("Clicked param button %s" % node.toxml()) return defer.succeed(None) elif type_ == "password": try: diff -r ff5bcb12ae60 -r ab2696e34d29 sat/memory/persistent.py --- a/sat/memory/persistent.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/persistent.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -63,10 +63,10 @@ return d def iteritems(self): - return self._cache.iteritems() + return iter(self._cache.items()) def items(self): - return self._cache.items() + return list(self._cache.items()) def __repr__(self): return self._cache.__repr__() @@ -98,8 +98,8 @@ def __hash__(self): return self._cache.__hash__() - def __nonzero__(self): - return self._cache.__len__() + def __bool__(self): + return self._cache.__len__() != 0 def __contains__(self, key): return self._cache.__contains__(key) @@ -149,7 +149,7 @@ class LazyPersistentBinaryDict(PersistentBinaryDict): - ur"""PersistentBinaryDict which get key/value when needed + r"""PersistentBinaryDict which get key/value when needed This Persistent need more database access, it is suitable for largest data, to save memory. @@ -160,7 +160,7 @@ def load(self): # we show a warning as calling load on LazyPersistentBinaryDict sounds like a code mistake - log.warning(_(u"Calling load on LazyPersistentBinaryDict while it's not needed")) + log.warning(_("Calling load on LazyPersistentBinaryDict while it's not needed")) def iteritems(self): raise NotImplementedError @@ -196,9 +196,9 @@ raise NotImplementedError def __hash__(self): - return hash(unicode(self.__class__) + self.namespace + (self.profile or u'')) + return hash(str(self.__class__) + self.namespace + (self.profile or '')) - def __nonzero__(self): + def __bool__(self): raise NotImplementedError def __contains__(self, key): diff -r ff5bcb12ae60 -r ab2696e34d29 sat/memory/sqlite.py --- a/sat/memory/sqlite.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/memory/sqlite.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -31,7 +31,7 @@ import sys import re import os.path -import cPickle as pickle +import pickle as pickle import hashlib import sqlite3 import json @@ -152,12 +152,12 @@ # Sqlite integration, probably with high level library retry -= 1 if retry == 0: - log.error(_(u'too many db tries, we abandon! Error message: {msg}\n' - u'query was {query}' - .format(msg=e, query=u' '.join([unicode(a) for a in args])))) + log.error(_('too many db tries, we abandon! Error message: {msg}\n' + 'query was {query}' + .format(msg=e, query=' '.join([str(a) for a in args])))) raise e log.warning( - _(u'exception while running query, retrying ({try_}): {msg}').format( + _('exception while running query, retrying ({try_}): {msg}').format( try_ = 6 - retry, msg = e)) kw['query_retry'] = retry @@ -175,14 +175,14 @@ retry -= 1 if retry == 0: log.error( - _(u'too many interaction tries, we abandon! Error message: {msg}\n' - u'interaction method was: {interaction}\n' - u'interaction arguments were: {args}' + _('too many interaction tries, we abandon! Error message: {msg}\n' + 'interaction method was: {interaction}\n' + 'interaction arguments were: {args}' .format(msg=e, interaction=interaction, - args=u', '.join([unicode(a) for a in args])))) + args=', '.join([str(a) for a in args])))) raise e log.warning( - _(u'exception while running interaction, retrying ({try_}): {msg}') + _('exception while running interaction, retrying ({try_}): {msg}') .format(try_ = 4 - retry, msg = e)) kw['interaction_retry'] = retry return self._runInteraction(interaction, *args, **kw) @@ -204,7 +204,7 @@ if new_base: # the dir may not exist if it's not the XDG recommended one dir_ = os.path.dirname(db_filename) if not os.path.exists(dir_): - os.makedirs(dir_, 0700) + os.makedirs(dir_, 0o700) def foreignKeysOn(sqlite): sqlite.execute('PRAGMA foreign_keys = ON') @@ -240,7 +240,7 @@ if statements is None: return defer.succeed(None) - log.debug(u"\n===== COMMITTING STATEMENTS =====\n%s\n============\n\n" % '\n'.join(statements)) + log.debug("\n===== COMMITTING STATEMENTS =====\n%s\n============\n\n" % '\n'.join(statements)) d = self.dbpool.runInteraction(self._updateDb, tuple(statements)) return d @@ -270,7 +270,7 @@ def getProfilesList(self): """"Return list of all registered profiles""" - return self.profiles.keys() + return list(self.profiles.keys()) def hasProfile(self, profile_name): """return True if profile_name exists @@ -283,13 +283,13 @@ try: return self.profiles[profile_name] in self.components except KeyError: - raise exceptions.NotFound(u"the requested profile doesn't exists") + raise exceptions.NotFound("the requested profile doesn't exists") def getEntryPoint(self, profile_name): try: return self.components[self.profiles[profile_name]] except KeyError: - raise exceptions.NotFound(u"the requested profile doesn't exists or is not a component") + raise exceptions.NotFound("the requested profile doesn't exists or is not a component") def createProfile(self, name, component=None): """Create a new profile @@ -326,7 +326,7 @@ @return: deferred triggered once profile is actually deleted """ def deletionError(failure_): - log.error(_(u"Can't delete profile [%s]") % name) + log.error(_("Can't delete profile [%s]") % name) return failure_ def delete(txn): @@ -359,7 +359,7 @@ for param in result: category, name, value = param params_gen[(category, name)] = value - log.debug(_(u"loading general parameters from database")) + log.debug(_("loading general parameters from database")) return self.dbpool.runQuery("SELECT category,name,value FROM param_gen").addCallback(fillParams) def loadIndParams(self, params_ind, profile): @@ -374,7 +374,7 @@ for param in result: category, name, value = param params_ind[(category, name)] = value - log.debug(_(u"loading individual parameters from database")) + log.debug(_("loading individual parameters from database")) d = self.dbpool.runQuery("SELECT category,name,value FROM param_ind WHERE profile_id=?", (self.profiles[profile], )) d.addCallback(fillParams) return d @@ -399,7 +399,7 @@ @param value: value to set @return: deferred""" d = self.dbpool.runQuery("REPLACE INTO param_gen(category,name,value) VALUES (?,?,?)", (category, name, value)) - d.addErrback(lambda ignore: log.error(_(u"Can't set general parameter (%(category)s/%(name)s) in database" % {"category": category, "name": name}))) + d.addErrback(lambda ignore: log.error(_("Can't set general parameter (%(category)s/%(name)s) in database" % {"category": category, "name": name}))) return d def setIndParam(self, category, name, value, profile): @@ -412,7 +412,7 @@ @return: deferred """ d = self.dbpool.runQuery("REPLACE INTO param_ind(category,name,profile_id,value) VALUES (?,?,?,?)", (category, name, self.profiles[profile], value)) - d.addErrback(lambda ignore: log.error(_(u"Can't set individual parameter (%(category)s/%(name)s) for [%(profile)s] in database" % {"category": category, "name": name, "profile": profile}))) + d.addErrback(lambda ignore: log.error(_("Can't set individual parameter (%(category)s/%(name)s) for [%(profile)s] in database" % {"category": category, "name": name, "profile": profile}))) return d ## History @@ -423,14 +423,14 @@ uid = data['uid'] d_list = [] for key in ('message', 'subject'): - for lang, value in data[key].iteritems(): + for lang, value in data[key].items(): d = self.dbpool.runQuery( "INSERT INTO {key}(history_uid, {key}, language) VALUES (?,?,?)" .format(key=key), (uid, value, lang or None)) d.addErrback(lambda __: log.error( - _(u"Can't save following {key} in history (uid: {uid}, lang:{lang}):" - u" {value}").format( + _("Can't save following {key} in history (uid: {uid}, lang:{lang}):" + " {value}").format( key=key, uid=uid, lang=lang, value=value))) d_list.append(d) try: @@ -443,8 +443,8 @@ "INSERT INTO thread(history_uid, thread_id, parent_id) VALUES (?,?,?)", (uid, thread, thread_parent)) d.addErrback(lambda __: log.error( - _(u"Can't save following thread in history (uid: {uid}): thread: " - u"{thread}), parent:{parent}").format( + _("Can't save following thread in history (uid: {uid}): thread: " + "{thread}), parent:{parent}").format( uid=uid, thread=thread, parent=thread_parent))) d_list.append(d) return defer.DeferredList(d_list) @@ -453,24 +453,24 @@ failure_.trap(sqlite3.IntegrityError) sqlite_msg = failure_.value.args[0] if "UNIQUE constraint failed" in sqlite_msg: - log.debug(u"message {} is already in history, not storing it again" + log.debug("message {} is already in history, not storing it again" .format(data['uid'])) if 'received_timestamp' not in data: log.warning( - u"duplicate message is not delayed, this is maybe a bug: data={}" + "duplicate message is not delayed, this is maybe a bug: data={}" .format(data)) # we cancel message to avoid sending duplicate message to frontends raise failure.Failure(exceptions.CancelError("Cancelled duplicated message")) else: - log.error(u"Can't store message in history: {}".format(failure_)) + log.error("Can't store message in history: {}".format(failure_)) def _logHistoryError(self, failure_, from_jid, to_jid, data): if failure_.check(exceptions.CancelError): # we propagate CancelError to avoid sending message to frontends raise failure_ log.error(_( - u"Can't save following message in history: from [{from_jid}] to [{to_jid}] " - u"(uid: {uid})") + "Can't save following message in history: from [{from_jid}] to [{to_jid}] " + "(uid: {uid})") .format(from_jid=from_jid.full(), to_jid=to_jid.full(), uid=data['uid'])) def addToHistory(self, data, profile): @@ -478,14 +478,14 @@ @param data(dict): message data as build by SatMessageProtocol.onMessage """ - extra = pickle.dumps({k: v for k, v in data['extra'].iteritems() + extra = pickle.dumps({k: v for k, v in data['extra'].items() if k not in NOT_IN_EXTRA}, 0) from_jid = data['from'] to_jid = data['to'] d = self.dbpool.runQuery( - u"INSERT INTO history(uid, stanza_id, update_uid, profile_id, source, dest, " - u"source_res, dest_res, timestamp, received_timestamp, type, extra) VALUES " - u"(?,?,?,?,?,?,?,?,?,?,?,?)", + "INSERT INTO history(uid, stanza_id, update_uid, profile_id, source, dest, " + "source_res, dest_res, timestamp, received_timestamp, type, extra) VALUES " + "(?,?,?,?,?,?,?,?,?,?,?,?)", (data['uid'], data['extra'].get('stanza_id'), data['extra'].get('update_uid'), self.profiles[profile], data['from'].userhost(), to_jid.userhost(), from_jid.resource, to_jid.resource, data['timestamp'], @@ -508,7 +508,7 @@ if uid != current['uid']: # new message try: - extra = pickle.loads(str(extra or "")) + extra = pickle.loads(extra or b"") except EOFError: extra = {} current = { @@ -543,8 +543,8 @@ else: if thread_parent is not None: log.error( - u"Database inconsistency: thread parent without thread (uid: " - u"{uid}, thread_parent: {parent})" + "Database inconsistency: thread parent without thread (uid: " + "{uid}, thread_parent: {parent})" .format(uid=uid, parent=thread_parent)) return result @@ -575,7 +575,7 @@ if limit == 0: return defer.succeed([]) - query_parts = [u"SELECT uid, stanza_id, update_uid, source, dest, source_res, dest_res, timestamp, received_timestamp,\ + query_parts = ["SELECT uid, stanza_id, update_uid, source, dest, source_res, dest_res, timestamp, received_timestamp,\ type, extra, message, message.language, subject, subject.language, thread_id, thread.parent_id\ FROM history LEFT JOIN message ON history.uid = message.history_uid\ LEFT JOIN subject ON history.uid=subject.history_uid\ @@ -587,8 +587,8 @@ values.append(jid_.userhost()) if jid_.resource: values.append(jid_.resource) - return u'({type_}=? AND {type_}_res=?)'.format(type_=type_) - return u'{type_}=?'.format(type_=type_) + return '({type_}=? AND {type_}_res=?)'.format(type_=type_) + return '{type_}=?'.format(type_=type_) if not from_jid and not to_jid: # not jid specified, we want all one2one communications @@ -598,15 +598,15 @@ # we only have one jid specified, we check all messages # from or to this jid jid_ = from_jid or to_jid - query_parts.append(u"AND ({source} OR {dest})".format( - source=test_jid(u'source', jid_), - dest=test_jid(u'dest' , jid_))) + query_parts.append("AND ({source} OR {dest})".format( + source=test_jid('source', jid_), + dest=test_jid('dest' , jid_))) else: # we have 2 jids specified, we check all communications between # those 2 jids query_parts.append( - u"AND (({source_from} AND {dest_to}) " - u"OR ({source_to} AND {dest_from}))".format( + "AND (({source_from} AND {dest_to}) " + "OR ({source_to} AND {dest_from}))".format( source_from=test_jid('source', from_jid), dest_to=test_jid('dest', to_jid), source_to=test_jid('source', to_jid), @@ -619,47 +619,47 @@ q.append(test_jid('source', from_jid)) if to_jid is not None: q.append(test_jid('dest', to_jid)) - query_parts.append(u"AND " + u" AND ".join(q)) + query_parts.append("AND " + " AND ".join(q)) if filters: - if u'timestamp_start' in filters: - query_parts.append(u"AND timestamp>= ?") - values.append(float(filters[u'timestamp_start'])) - if u'body' in filters: + if 'timestamp_start' in filters: + query_parts.append("AND timestamp>= ?") + values.append(float(filters['timestamp_start'])) + if 'body' in filters: # TODO: use REGEXP (function to be defined) instead of GLOB: https://www.sqlite.org/lang_expr.html - query_parts.append(u"AND message LIKE ?") - values.append(u"%{}%".format(filters['body'])) - if u'search' in filters: - query_parts.append(u"AND (message LIKE ? OR source_res LIKE ?)") - values.extend([u"%{}%".format(filters['search'])] * 2) - if u'types' in filters: + query_parts.append("AND message LIKE ?") + values.append("%{}%".format(filters['body'])) + if 'search' in filters: + query_parts.append("AND (message LIKE ? OR source_res LIKE ?)") + values.extend(["%{}%".format(filters['search'])] * 2) + if 'types' in filters: types = filters['types'].split() - query_parts.append(u"AND type IN ({})".format(u','.join("?"*len(types)))) + query_parts.append("AND type IN ({})".format(','.join("?"*len(types)))) values.extend(types) - if u'not_types' in filters: + if 'not_types' in filters: types = filters['not_types'].split() - query_parts.append(u"AND type NOT IN ({})".format(u','.join("?"*len(types)))) + query_parts.append("AND type NOT IN ({})".format(','.join("?"*len(types)))) values.extend(types) - if u'last_stanza_id' in filters: + if 'last_stanza_id' in filters: # this request get the last message with a "stanza_id" that we # have in history. This is mainly used to retrieve messages sent # while we were offline, using MAM (XEP-0313). - if (filters[u'last_stanza_id'] is not True + if (filters['last_stanza_id'] is not True or limit != 1): - raise ValueError(u"Unexpected values for last_stanza_id filter") - query_parts.append(u"AND stanza_id IS NOT NULL") + raise ValueError("Unexpected values for last_stanza_id filter") + query_parts.append("AND stanza_id IS NOT NULL") # timestamp may be identical for 2 close messages (specially when delay is # used) that's why we order ties by received_timestamp # We'll reverse the order in sqliteHistoryToList # we use DESC here so LIMIT keep the last messages - query_parts.append(u"ORDER BY timestamp DESC, history.received_timestamp DESC") + query_parts.append("ORDER BY timestamp DESC, history.received_timestamp DESC") if limit is not None: - query_parts.append(u"LIMIT ?") + query_parts.append("LIMIT ?") values.append(limit) - d = self.dbpool.runQuery(u" ".join(query_parts), values) + d = self.dbpool.runQuery(" ".join(query_parts), values) d.addCallback(self.sqliteHistoryToList) d.addCallback(self.listDict2listTuple) return d @@ -668,32 +668,41 @@ def _privateDataEb(self, failure_, operation, namespace, key=None, profile=None): """generic errback for data queries""" - log.error(_(u"Can't {operation} data in database for namespace {namespace}{and_key}{for_profile}: {msg}").format( + log.error(_("Can't {operation} data in database for namespace {namespace}{and_key}{for_profile}: {msg}").format( operation = operation, namespace = namespace, - and_key = (u" and key " + key) if key is not None else u"", - for_profile = (u' [' + profile + u']') if profile is not None else u'', + and_key = (" and key " + key) if key is not None else "", + for_profile = (' [' + profile + ']') if profile is not None else '', msg = failure_)) + def _load_pickle(self, v): + # FIXME: workaround for Python 3 port, some pickled data are byte while other are strings + try: + return pickle.loads(v) + except TypeError: + data = pickle.loads(v.encode('utf-8')) + log.warning(f"encoding issue in pickled data: {data}") + return data + def _generateDataDict(self, query_result, binary): if binary: - return {k: pickle.loads(str(v)) for k,v in query_result} + return {k: self._load_pickle(v) for k,v in query_result} else: return dict(query_result) def _getPrivateTable(self, binary, profile): """Get table to use for private values""" - table = [u'private'] + table = ['private'] if profile is None: - table.append(u'gen') + table.append('gen') else: - table.append(u'ind') + table.append('ind') if binary: - table.append(u'bin') + table.append('bin') - return u'_'.join(table) + return '_'.join(table) def getPrivates(self, namespace, keys=None, binary=False, profile=None): """Get private value(s) from databases @@ -706,27 +715,27 @@ None to use general values @return (dict[unicode, object]): gotten keys/values """ - log.debug(_(u"getting {type}{binary} private values from database for namespace {namespace}{keys}".format( - type = u"general" if profile is None else "individual", - binary = u" binary" if binary else u"", + log.debug(_("getting {type}{binary} private values from database for namespace {namespace}{keys}".format( + type = "general" if profile is None else "individual", + binary = " binary" if binary else "", namespace = namespace, - keys = u" with keys {}".format(u", ".join(keys)) if keys is not None else u""))) + keys = " with keys {}".format(", ".join(keys)) if keys is not None else ""))) table = self._getPrivateTable(binary, profile) - query_parts = [u"SELECT key,value FROM", table, "WHERE namespace=?"] + query_parts = ["SELECT key,value FROM", table, "WHERE namespace=?"] args = [namespace] if keys is not None: - placeholders = u','.join(len(keys) * u'?') - query_parts.append(u'AND key IN (' + placeholders + u')') + placeholders = ','.join(len(keys) * '?') + query_parts.append('AND key IN (' + placeholders + ')') args.extend(keys) if profile is not None: - query_parts.append(u'AND profile_id=?') + query_parts.append('AND profile_id=?') args.append(self.profiles[profile]) - d = self.dbpool.runQuery(u" ".join(query_parts), args) + d = self.dbpool.runQuery(" ".join(query_parts), args) d.addCallback(self._generateDataDict, binary) - d.addErrback(self._privateDataEb, u"get", namespace, profile=profile) + d.addErrback(self._privateDataEb, "get", namespace, profile=profile) return d def setPrivateValue(self, namespace, key, value, binary=False, profile=None): @@ -741,7 +750,7 @@ if None, it's a general value """ table = self._getPrivateTable(binary, profile) - query_values_names = [u'namespace', u'key', u'value'] + query_values_names = ['namespace', 'key', 'value'] query_values = [namespace, key] if binary: @@ -750,14 +759,14 @@ query_values.append(value) if profile is not None: - query_values_names.append(u'profile_id') + query_values_names.append('profile_id') query_values.append(self.profiles[profile]) - query_parts = [u"REPLACE INTO", table, u'(', u','.join(query_values_names), u')', - u"VALUES (", u",".join(u'?'*len(query_values_names)), u')'] + query_parts = ["REPLACE INTO", table, '(', ','.join(query_values_names), ')', + "VALUES (", ",".join('?'*len(query_values_names)), ')'] - d = self.dbpool.runQuery(u" ".join(query_parts), query_values) - d.addErrback(self._privateDataEb, u"set", namespace, key, profile=profile) + d = self.dbpool.runQuery(" ".join(query_parts), query_values) + d.addErrback(self._privateDataEb, "set", namespace, key, profile=profile) return d def delPrivateValue(self, namespace, key, binary=False, profile=None): @@ -770,13 +779,13 @@ if None, it's a general value """ table = self._getPrivateTable(binary, profile) - query_parts = [u"DELETE FROM", table, u"WHERE namespace=? AND key=?"] + query_parts = ["DELETE FROM", table, "WHERE namespace=? AND key=?"] args = [namespace, key] if profile is not None: - query_parts.append(u"AND profile_id=?") + query_parts.append("AND profile_id=?") args.append(self.profiles[profile]) - d = self.dbpool.runQuery(u" ".join(query_parts), args) - d.addErrback(self._privateDataEb, u"delete", namespace, key, profile=profile) + d = self.dbpool.runQuery(" ".join(query_parts), args) + d.addErrback(self._privateDataEb, "delete", namespace, key, profile=profile) return d def delPrivateNamespace(self, namespace, binary=False, profile=None): @@ -787,19 +796,19 @@ Params are the same as for delPrivateValue """ table = self._getPrivateTable(binary, profile) - query_parts = [u"DELETE FROM", table, u"WHERE namespace=?"] + query_parts = ["DELETE FROM", table, "WHERE namespace=?"] args = [namespace] if profile is not None: - query_parts.append(u"AND profile_id=?") + query_parts.append("AND profile_id=?") args.append(self.profiles[profile]) - d = self.dbpool.runQuery(u" ".join(query_parts), args) - d.addErrback(self._privateDataEb, u"delete namespace", namespace, profile=profile) + d = self.dbpool.runQuery(" ".join(query_parts), args) + d.addErrback(self._privateDataEb, "delete namespace", namespace, profile=profile) return d ## Files @defer.inlineCallbacks - def getFiles(self, client, file_id=None, version=u'', parent=None, type_=None, + def getFiles(self, client, file_id=None, version='', parent=None, type_=None, file_hash=None, hash_algo=None, name=None, namespace=None, mime_type=None, owner=None, access=None, projection=None, unique=False): """retrieve files with with given filters @@ -831,45 +840,45 @@ args = [self.profiles[client.profile]] if file_id is not None: - filters.append(u'id=?') + filters.append('id=?') args.append(file_id) if version is not None: - filters.append(u'version=?') + filters.append('version=?') args.append(version) if parent is not None: - filters.append(u'parent=?') + filters.append('parent=?') args.append(parent) if type_ is not None: - filters.append(u'type=?') + filters.append('type=?') args.append(type_) if file_hash is not None: - filters.append(u'file_hash=?') + filters.append('file_hash=?') args.append(file_hash) if hash_algo is not None: - filters.append(u'hash_algo=?') + filters.append('hash_algo=?') args.append(hash_algo) if name is not None: - filters.append(u'name=?') + filters.append('name=?') args.append(name) if namespace is not None: - filters.append(u'namespace=?') + filters.append('namespace=?') args.append(namespace) if mime_type is not None: - filters.append(u'mime_type=?') + filters.append('mime_type=?') args.append(mime_type) if owner is not None: - filters.append(u'owner=?') + filters.append('owner=?') args.append(owner.full()) if access is not None: raise NotImplementedError('Access check is not implemented yet') # a JSON comparison is needed here - filters = u' AND '.join(filters) + filters = ' AND '.join(filters) query_parts.append(filters) - query = u' '.join(query_parts) + query = ' '.join(query_parts) result = yield self.dbpool.runQuery(query, args) - files_data = [dict(zip(projection, row)) for row in result] + files_data = [dict(list(zip(projection, row))) for row in result] to_parse = {'access', 'extra'}.intersection(projection) to_filter = {'owner'}.intersection(projection) if to_parse or to_filter: @@ -882,7 +891,7 @@ file_data['owner'] = jid.JID(owner) defer.returnValue(files_data) - def setFile(self, client, name, file_id, version=u'', parent=None, type_=C.FILE_TYPE_FILE, + def setFile(self, client, name, file_id, version='', parent=None, type_=C.FILE_TYPE_FILE, file_hash=None, hash_algo=None, size=None, namespace=None, mime_type=None, created=None, modified=None, owner=None, access=None, extra=None): """set a file metadata @@ -921,12 +930,12 @@ json.dumps(access) if access else None, json.dumps(extra) if extra else None, self.profiles[client.profile])) - d.addErrback(lambda failure: log.error(_(u"Can't save file metadata for [{profile}]: {reason}".format(profile=client.profile, reason=failure)))) + d.addErrback(lambda failure: log.error(_("Can't save file metadata for [{profile}]: {reason}".format(profile=client.profile, reason=failure)))) return d def _fileUpdate(self, cursor, file_id, column, update_cb): query = 'SELECT {column} FROM files where id=?'.format(column=column) - for i in xrange(5): + for i in range(5): cursor.execute(query, [file_id]) try: older_value_raw = cursor.fetchone()[0] @@ -951,9 +960,9 @@ else: if cursor.rowcount == 1: break; - log.warning(_(u"table not updated, probably due to race condition, trying again ({tries})").format(tries=i+1)) + log.warning(_("table not updated, probably due to race condition, trying again ({tries})").format(tries=i+1)) else: - log.error(_(u"Can't update file table")) + log.error(_("Can't update file table")) def fileUpdate(self, file_id, column, update_cb): """Update a column value using a method to avoid race conditions @@ -1072,17 +1081,17 @@ update_raw = yield self.update2raw(update_data, True) defer.returnValue(update_raw) else: - log.error(_(u"schema version is up-to-date, but local schema differ from expected current schema")) + log.error(_("schema version is up-to-date, but local schema differ from expected current schema")) update_data = self.generateUpdateData(local_sch, current_sch, True) update_raw = yield self.update2raw(update_data) - log.warning(_(u"Here are the commands that should fix the situation, use at your own risk (do a backup before modifying database), you can go to SàT's MUC room at sat@chat.jabberfr.org for help\n### SQL###\n%s\n### END SQL ###\n") % u'\n'.join("%s;" % statement for statement in update_raw)) + log.warning(_("Here are the commands that should fix the situation, use at your own risk (do a backup before modifying database), you can go to SàT's MUC room at sat@chat.jabberfr.org for help\n### SQL###\n%s\n### END SQL ###\n") % '\n'.join("%s;" % statement for statement in update_raw)) raise exceptions.DatabaseError("Database mismatch") else: if local_version > CURRENT_DB_VERSION: log.error(_( - u"You database version is higher than the one used in this SàT " - u"version, are you using several version at the same time? We " - u"can't run SàT with this database.")) + "You database version is higher than the one used in this SàT " + "version, are you using several version at the same time? We " + "can't run SàT with this database.")) sys.exit(1) # Database is not up-to-date, we'll do the update @@ -1091,7 +1100,7 @@ else: log.info(_("Database schema has changed, local database will be updated")) update_raw = [] - for version in xrange(local_version + 1, CURRENT_DB_VERSION + 1): + for version in range(local_version + 1, CURRENT_DB_VERSION + 1): try: update_data = DATABASE_SCHEMAS[version] except KeyError: @@ -1150,17 +1159,17 @@ ret = [] assert isinstance(data, tuple) for table, col_data in data: - assert isinstance(table, basestring) + assert isinstance(table, str) assert isinstance(col_data, tuple) for cols in col_data: if isinstance(cols, tuple): - assert all([isinstance(c, basestring) for c in cols]) - indexed_cols = u','.join(cols) - elif isinstance(cols, basestring): + assert all([isinstance(c, str) for c in cols]) + indexed_cols = ','.join(cols) + elif isinstance(cols, str): indexed_cols = cols else: - raise exceptions.InternalError(u"unexpected index columns value") - index_name = table + u'__' + indexed_cols.replace(u',', u'_') + raise exceptions.InternalError("unexpected index columns value") + index_name = table + '__' + indexed_cols.replace(',', '_') ret.append(Updater.INDEX_SQL % (index_name, table, indexed_cols)) return ret @@ -1173,7 +1182,7 @@ @return: hash as string """ hash_ = hashlib.sha1() - tables = data.keys() + tables = list(data.keys()) tables.sort() def stmnts2str(stmts): @@ -1181,7 +1190,9 @@ for table in tables: col_defs, col_constr = data[table] - hash_.update("%s:%s:%s" % (table, stmnts2str(col_defs), stmnts2str(col_constr))) + hash_.update( + ("%s:%s:%s" % (table, stmnts2str(col_defs), stmnts2str(col_constr))) + .encode('utf-8')) return hash_.digest() def rawStatements2data(self, raw_statements): @@ -1324,7 +1335,7 @@ def update_v8(self): """Update database from v7 to v8 (primary keys order changes + indexes)""" - log.info(u"Database update to v8") + log.info("Database update to v8") statements = ["PRAGMA foreign_keys = OFF"] # here is a copy of create and index data, we can't use "current" table @@ -1357,11 +1368,11 @@ schema = {table: create[table]} cols = [d.split()[0] for d in schema[table][0]] statements.extend(Updater.createData2Raw(schema)) - statements.append(u"INSERT INTO {table}({cols}) " - u"SELECT {cols} FROM {table}_old".format( + statements.append("INSERT INTO {table}({cols}) " + "SELECT {cols} FROM {table}_old".format( table=table, - cols=u','.join(cols))) - statements.append(u"DROP TABLE {}_old".format(table)) + cols=','.join(cols))) + statements.append("DROP TABLE {}_old".format(table)) statements.extend(Updater.indexData2Raw(index)) statements.append("PRAGMA foreign_keys = ON") @@ -1370,48 +1381,48 @@ @defer.inlineCallbacks def update_v7(self): """Update database from v6 to v7 (history unique constraint change)""" - log.info(u"Database update to v7, this may be long depending on your history " - u"size, please be patient.") + log.info("Database update to v7, this may be long depending on your history " + "size, please be patient.") - log.info(u"Some cleaning first") + log.info("Some cleaning first") # we need to fix duplicate stanza_id, as it can result in conflicts with the new schema # normally database should not contain any, but better safe than sorry. rows = yield self.dbpool.runQuery( - u"SELECT stanza_id, COUNT(*) as c FROM history WHERE stanza_id is not NULL " - u"GROUP BY stanza_id HAVING c>1") + "SELECT stanza_id, COUNT(*) as c FROM history WHERE stanza_id is not NULL " + "GROUP BY stanza_id HAVING c>1") if rows: count = sum([r[1] for r in rows]) - len(rows) - log.info(u"{count} duplicate stanzas found, cleaning".format(count=count)) + log.info("{count} duplicate stanzas found, cleaning".format(count=count)) for stanza_id, count in rows: - log.info(u"cleaning duplicate stanza {stanza_id}".format(stanza_id=stanza_id)) + log.info("cleaning duplicate stanza {stanza_id}".format(stanza_id=stanza_id)) row_uids = yield self.dbpool.runQuery( "SELECT uid FROM history WHERE stanza_id = ? LIMIT ?", (stanza_id, count-1)) uids = [r[0] for r in row_uids] yield self.dbpool.runQuery( - "DELETE FROM history WHERE uid IN ({})".format(u",".join(u"?"*len(uids))), + "DELETE FROM history WHERE uid IN ({})".format(",".join("?"*len(uids))), uids) def deleteInfo(txn): # with foreign_keys on, the delete takes ages, so we deactivate it here # the time to delete info messages from history. txn.execute("PRAGMA foreign_keys = OFF") - txn.execute(u"DELETE FROM message WHERE history_uid IN (SELECT uid FROM history WHERE " - u"type='info')") - txn.execute(u"DELETE FROM subject WHERE history_uid IN (SELECT uid FROM history WHERE " - u"type='info')") - txn.execute(u"DELETE FROM thread WHERE history_uid IN (SELECT uid FROM history WHERE " - u"type='info')") - txn.execute(u"DELETE FROM message WHERE history_uid IN (SELECT uid FROM history WHERE " - u"type='info')") - txn.execute(u"DELETE FROM history WHERE type='info'") + txn.execute("DELETE FROM message WHERE history_uid IN (SELECT uid FROM history WHERE " + "type='info')") + txn.execute("DELETE FROM subject WHERE history_uid IN (SELECT uid FROM history WHERE " + "type='info')") + txn.execute("DELETE FROM thread WHERE history_uid IN (SELECT uid FROM history WHERE " + "type='info')") + txn.execute("DELETE FROM message WHERE history_uid IN (SELECT uid FROM history WHERE " + "type='info')") + txn.execute("DELETE FROM history WHERE type='info'") # not sure that is is necessary to reactivate here, but in doubt… txn.execute("PRAGMA foreign_keys = ON") - log.info(u'Deleting "info" messages (this can take a while)') + log.info('Deleting "info" messages (this can take a while)') yield self.dbpool.runInteraction(deleteInfo) - log.info(u"Cleaning done") + log.info("Cleaning done") # we have to rename table we will replace # tables referencing history need to be replaced to, else reference would @@ -1423,68 +1434,68 @@ yield self.dbpool.runQuery("ALTER TABLE thread RENAME TO thread_old") # history - query = (u"CREATE TABLE history (uid TEXT PRIMARY KEY, stanza_id TEXT, " - u"update_uid TEXT, profile_id INTEGER, source TEXT, dest TEXT, " - u"source_res TEXT, dest_res TEXT, timestamp DATETIME NOT NULL, " - u"received_timestamp DATETIME, type TEXT, extra BLOB, " - u"FOREIGN KEY(profile_id) REFERENCES profiles(id) ON DELETE CASCADE, " - u"FOREIGN KEY(type) REFERENCES message_types(type), " - u"UNIQUE (profile_id, stanza_id, source, dest))") + query = ("CREATE TABLE history (uid TEXT PRIMARY KEY, stanza_id TEXT, " + "update_uid TEXT, profile_id INTEGER, source TEXT, dest TEXT, " + "source_res TEXT, dest_res TEXT, timestamp DATETIME NOT NULL, " + "received_timestamp DATETIME, type TEXT, extra BLOB, " + "FOREIGN KEY(profile_id) REFERENCES profiles(id) ON DELETE CASCADE, " + "FOREIGN KEY(type) REFERENCES message_types(type), " + "UNIQUE (profile_id, stanza_id, source, dest))") yield self.dbpool.runQuery(query) # message - query = (u"CREATE TABLE message (id INTEGER PRIMARY KEY ASC, history_uid INTEGER" - u", message TEXT, language TEXT, FOREIGN KEY(history_uid) REFERENCES " - u"history(uid) ON DELETE CASCADE)") + query = ("CREATE TABLE message (id INTEGER PRIMARY KEY ASC, history_uid INTEGER" + ", message TEXT, language TEXT, FOREIGN KEY(history_uid) REFERENCES " + "history(uid) ON DELETE CASCADE)") yield self.dbpool.runQuery(query) # subject - query = (u"CREATE TABLE subject (id INTEGER PRIMARY KEY ASC, history_uid INTEGER" - u", subject TEXT, language TEXT, FOREIGN KEY(history_uid) REFERENCES " - u"history(uid) ON DELETE CASCADE)") + query = ("CREATE TABLE subject (id INTEGER PRIMARY KEY ASC, history_uid INTEGER" + ", subject TEXT, language TEXT, FOREIGN KEY(history_uid) REFERENCES " + "history(uid) ON DELETE CASCADE)") yield self.dbpool.runQuery(query) # thread - query = (u"CREATE TABLE thread (id INTEGER PRIMARY KEY ASC, history_uid INTEGER" - u", thread_id TEXT, parent_id TEXT, FOREIGN KEY(history_uid) REFERENCES " - u"history(uid) ON DELETE CASCADE)") + query = ("CREATE TABLE thread (id INTEGER PRIMARY KEY ASC, history_uid INTEGER" + ", thread_id TEXT, parent_id TEXT, FOREIGN KEY(history_uid) REFERENCES " + "history(uid) ON DELETE CASCADE)") yield self.dbpool.runQuery(query) - log.info(u"Now transfering old data to new tables, please be patient.") + log.info("Now transfering old data to new tables, please be patient.") - log.info(u"\nTransfering table history") - query = (u"INSERT INTO history (uid, stanza_id, update_uid, profile_id, source, " - u"dest, source_res, dest_res, timestamp, received_timestamp, type, extra" - u") SELECT uid, stanza_id, update_uid, profile_id, source, dest, " - u"source_res, dest_res, timestamp, received_timestamp, type, extra " - u"FROM history_old") + log.info("\nTransfering table history") + query = ("INSERT INTO history (uid, stanza_id, update_uid, profile_id, source, " + "dest, source_res, dest_res, timestamp, received_timestamp, type, extra" + ") SELECT uid, stanza_id, update_uid, profile_id, source, dest, " + "source_res, dest_res, timestamp, received_timestamp, type, extra " + "FROM history_old") yield self.dbpool.runQuery(query) - log.info(u"\nTransfering table message") - query = (u"INSERT INTO message (id, history_uid, message, language) SELECT id, " - u"history_uid, message, language FROM message_old") + log.info("\nTransfering table message") + query = ("INSERT INTO message (id, history_uid, message, language) SELECT id, " + "history_uid, message, language FROM message_old") yield self.dbpool.runQuery(query) - log.info(u"\nTransfering table subject") - query = (u"INSERT INTO subject (id, history_uid, subject, language) SELECT id, " - u"history_uid, subject, language FROM subject_old") + log.info("\nTransfering table subject") + query = ("INSERT INTO subject (id, history_uid, subject, language) SELECT id, " + "history_uid, subject, language FROM subject_old") yield self.dbpool.runQuery(query) - log.info(u"\nTransfering table thread") - query = (u"INSERT INTO thread (id, history_uid, thread_id, parent_id) SELECT id" - u", history_uid, thread_id, parent_id FROM thread_old") + log.info("\nTransfering table thread") + query = ("INSERT INTO thread (id, history_uid, thread_id, parent_id) SELECT id" + ", history_uid, thread_id, parent_id FROM thread_old") yield self.dbpool.runQuery(query) - log.info(u"\nRemoving old tables") + log.info("\nRemoving old tables") # because of foreign keys, tables referencing history_old # must be deleted first yield self.dbpool.runQuery("DROP TABLE thread_old") yield self.dbpool.runQuery("DROP TABLE subject_old") yield self.dbpool.runQuery("DROP TABLE message_old") yield self.dbpool.runQuery("DROP TABLE history_old") - log.info(u"\nReducing database size (this can take a while)") + log.info("\nReducing database size (this can take a while)") yield self.dbpool.runQuery("VACUUM") - log.info(u"Database update done :)") + log.info("Database update done :)") @defer.inlineCallbacks def update_v3(self): @@ -1494,7 +1505,7 @@ # big database for tests. If issues are happening, we can cut it # in smaller transactions using LIMIT and by deleting already updated # messages - log.info(u"Database update to v3, this may take a while") + log.info("Database update to v3, this may take a while") # we need to fix duplicate timestamp, as it can result in conflicts with the new schema rows = yield self.dbpool.runQuery("SELECT timestamp, COUNT(*) as c FROM history GROUP BY timestamp HAVING c>1") @@ -1506,10 +1517,10 @@ for idx, (id_,) in enumerate(ids_rows): fixed.append(id_) yield self.dbpool.runQuery("UPDATE history SET timestamp=? WHERE id=?", (float(timestamp) + idx * 0.001, id_)) - log.info(u"fixed messages with ids {}".format(u', '.join([unicode(id_) for id_ in fixed]))) + log.info("fixed messages with ids {}".format(', '.join([str(id_) for id_ in fixed]))) def historySchema(txn): - log.info(u"History schema update") + log.info("History schema update") txn.execute("ALTER TABLE history RENAME TO tmp_sat_update") txn.execute("CREATE TABLE history (uid TEXT PRIMARY KEY, update_uid TEXT, profile_id INTEGER, source TEXT, dest TEXT, source_res TEXT, dest_res TEXT, timestamp DATETIME NOT NULL, received_timestamp DATETIME, type TEXT, extra BLOB, FOREIGN KEY(profile_id) REFERENCES profiles(id) ON DELETE CASCADE, FOREIGN KEY(type) REFERENCES message_types(type), UNIQUE (profile_id, timestamp, source, dest, source_res, dest_res))") txn.execute("INSERT INTO history (uid, profile_id, source, dest, source_res, dest_res, timestamp, type, extra) SELECT id, profile_id, source, dest, source_res, dest_res, timestamp, type, extra FROM tmp_sat_update") @@ -1517,17 +1528,17 @@ yield self.dbpool.runInteraction(historySchema) def newTables(txn): - log.info(u"Creating new tables") + log.info("Creating new tables") txn.execute("CREATE TABLE message (id INTEGER PRIMARY KEY ASC, history_uid INTEGER, message TEXT, language TEXT, FOREIGN KEY(history_uid) REFERENCES history(uid) ON DELETE CASCADE)") txn.execute("CREATE TABLE thread (id INTEGER PRIMARY KEY ASC, history_uid INTEGER, thread_id TEXT, parent_id TEXT, FOREIGN KEY(history_uid) REFERENCES history(uid) ON DELETE CASCADE)") txn.execute("CREATE TABLE subject (id INTEGER PRIMARY KEY ASC, history_uid INTEGER, subject TEXT, language TEXT, FOREIGN KEY(history_uid) REFERENCES history(uid) ON DELETE CASCADE)") yield self.dbpool.runInteraction(newTables) - log.info(u"inserting new message type") + log.info("inserting new message type") yield self.dbpool.runQuery("INSERT INTO message_types VALUES (?)", ('info',)) - log.info(u"messages update") + log.info("messages update") rows = yield self.dbpool.runQuery("SELECT id, timestamp, message, extra FROM tmp_sat_update") total = len(rows) @@ -1545,7 +1556,7 @@ except EOFError: extra = {} except Exception: - log.warning(u"Can't handle extra data for message id {}, ignoring it".format(id_)) + log.warning("Can't handle extra data for message id {}, ignoring it".format(id_)) extra = {} queries.append(("INSERT INTO message(history_uid, message) VALUES (?,?)", (id_, message))) @@ -1556,9 +1567,9 @@ pass else: try: - subject = subject.decode('utf-8') + subject = subject except UnicodeEncodeError: - log.warning(u"Error while decoding subject, ignoring it") + log.warning("Error while decoding subject, ignoring it") del extra['subject'] else: queries.append(("INSERT INTO subject(history_uid, subject) VALUES (?,?)", (id_, subject))) @@ -1597,7 +1608,7 @@ try: id_ = result[0][0] except IndexError: - log.error(u"Profile of id %d is referenced in 'param_ind' but it doesn't exist!" % profile_id) + log.error("Profile of id %d is referenced in 'param_ind' but it doesn't exist!" % profile_id) return defer.succeed(None) sat_password = xmpp_password diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_adhoc_dbus.py --- a/sat/plugins/plugin_adhoc_dbus.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_adhoc_dbus.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for adding D-Bus to Ad-Hoc Commands @@ -30,8 +30,8 @@ from lxml import etree except ImportError: etree = None - log.warning(u"Missing module lxml, please download/install it from http://lxml.de/ ." - u"Auto D-Bus discovery will be disabled") + log.warning("Missing module lxml, please download/install it from http://lxml.de/ ." + "Auto D-Bus discovery will be disabled") from collections import OrderedDict import os.path import uuid @@ -40,8 +40,8 @@ from dbus.mainloop.glib import DBusGMainLoop except ImportError: dbus = None - log.warning(u"Missing module dbus, please download/install it" - u"auto D-Bus discovery will be disabled") + log.warning("Missing module dbus, please download/install it" + "auto D-Bus discovery will be disabled") else: DBusGMainLoop(set_as_default=True) @@ -50,18 +50,18 @@ FD_NAME = "org.freedesktop.DBus" FD_PATH = "/org/freedekstop/DBus" INTROSPECT_IFACE = "org.freedesktop.DBus.Introspectable" -MPRIS_PREFIX = u"org.mpris.MediaPlayer2" -CMD_GO_BACK = u"GoBack" -CMD_GO_FWD = u"GoFW" +MPRIS_PREFIX = "org.mpris.MediaPlayer2" +CMD_GO_BACK = "GoBack" +CMD_GO_FWD = "GoFW" SEEK_OFFSET = 5 * 1000 * 1000 -MPRIS_COMMANDS = [u"org.mpris.MediaPlayer2.Player." + cmd for cmd in ( - u"Previous", CMD_GO_BACK, u"PlayPause", CMD_GO_FWD, u"Next")] -MPRIS_PATH = u"/org/mpris/MediaPlayer2" +MPRIS_COMMANDS = ["org.mpris.MediaPlayer2.Player." + cmd for cmd in ( + "Previous", CMD_GO_BACK, "PlayPause", CMD_GO_FWD, "Next")] +MPRIS_PATH = "/org/mpris/MediaPlayer2" MPRIS_PROPERTIES = OrderedDict(( - (u"org.mpris.MediaPlayer2", ( + ("org.mpris.MediaPlayer2", ( "Identity", )), - (u"org.mpris.MediaPlayer2.Player", ( + ("org.mpris.MediaPlayer2.Player", ( "Metadata", "PlaybackStatus", "Volume", @@ -69,7 +69,7 @@ )) MPRIS_METADATA_KEY = "Metadata" MPRIS_METADATA_MAP = OrderedDict(( - ("xesam:title", u"Title"), + ("xesam:title", "Title"), )) INTROSPECT_METHOD = "Introspect" @@ -88,7 +88,7 @@ C.PI_DEPENDENCIES: ["XEP-0050"], C.PI_MAIN: "AdHocDBus", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _(u"""Add D-Bus management to Ad-Hoc commands"""), + C.PI_DESCRIPTION: _("""Add D-Bus management to Ad-Hoc commands"""), } @@ -104,7 +104,7 @@ in_sign="sasasasasasass", out_sign="(sa(sss))", method=self._adHocDBusAddAuto, - async=True, + async_=True, ) host.bridge.addMethod( "adHocRemotesGet", @@ -112,10 +112,10 @@ in_sign="s", out_sign="a(sss)", method=self._adHocRemotesGet, - async=True, + async_=True, ) self._c = host.plugins["XEP-0050"] - host.registerNamespace(u"mediaplayer", NS_MEDIA_PLAYER) + host.registerNamespace("mediaplayer", NS_MEDIA_PLAYER) if dbus is not None: self.session_bus = dbus.SessionBus() self.fd_object = self.session_bus.get_object( @@ -124,7 +124,7 @@ def profileConnected(self, client): if dbus is not None: self._c.addAdHocCommand( - client, self.localMediaCb, D_(u"Media Players"), + client, self.localMediaCb, D_("Media Players"), node=NS_MEDIA_PLAYER, timeout=60*60*6 # 6 hours timeout, to avoid breaking remote # in the middle of a movie @@ -151,7 +151,7 @@ def _DBusGetProperty(self, proxy, interface, name): return self._DBusAsyncCall( - proxy, u"Get", interface, name, interface=u"org.freedesktop.DBus.Properties") + proxy, "Get", interface, name, interface="org.freedesktop.DBus.Properties") def _DBusListNames(self): @@ -271,7 +271,7 @@ elif len(actions) == 2: # we should have the answer here try: - x_elt = command_elt.elements(data_form.NS_X_DATA, "x").next() + x_elt = next(command_elt.elements(data_form.NS_X_DATA, "x")) answer_form = data_form.Form.fromElement(x_elt) command = answer_form["command"] except (KeyError, StopIteration): @@ -295,11 +295,11 @@ return DBusCallback( client, None, session_data, self._c.ACTION.EXECUTE, node ) - form = data_form.Form("form", title=_(u"Updated")) - form.addField(data_form.Field("fixed", u"Command sent")) + form = data_form.Form("form", title=_("Updated")) + form.addField(data_form.Field("fixed", "Command sent")) status = self._c.STATUS.COMPLETED payload = None - note = (self._c.NOTE.INFO, _(u"Command sent")) + note = (self._c.NOTE.INFO, _("Command sent")) else: raise self._c.AdHocError(self._c.ERROR.INTERNAL) @@ -363,18 +363,18 @@ opt.label or opt.value)) except Exception as e: log.warning(_( - u"Can't retrieve remote controllers on {device_jid}: " - u"{reason}".format(device_jid=device_jid, reason=e))) + "Can't retrieve remote controllers on {device_jid}: " + "{reason}".format(device_jid=device_jid, reason=e))) break defer.returnValue(remotes) def doMPRISCommand(self, proxy, command): - iface, command = command.rsplit(u".", 1) + iface, command = command.rsplit(".", 1) if command == CMD_GO_BACK: - command = u'Seek' + command = 'Seek' args = [-SEEK_OFFSET] elif command == CMD_GO_FWD: - command = u'Seek' + command = 'Seek' args = [SEEK_OFFSET] else: args = [] @@ -382,17 +382,17 @@ def addMPRISMetadata(self, form, metadata): """Serialise MRPIS Metadata according to MPRIS_METADATA_MAP""" - for mpris_key, name in MPRIS_METADATA_MAP.iteritems(): + for mpris_key, name in MPRIS_METADATA_MAP.items(): if mpris_key in metadata: - value = unicode(metadata[mpris_key]) - form.addField(data_form.Field(fieldType=u"fixed", + value = str(metadata[mpris_key]) + form.addField(data_form.Field(fieldType="fixed", var=name, value=value)) @defer.inlineCallbacks def localMediaCb(self, client, command_elt, session_data, action, node): try: - x_elt = command_elt.elements(data_form.NS_X_DATA, "x").next() + x_elt = next(command_elt.elements(data_form.NS_X_DATA, "x")) command_form = data_form.Form.fromElement(x_elt) except StopIteration: command_form = None @@ -402,16 +402,16 @@ bus_names = yield self._DBusListNames() bus_names = [b for b in bus_names if b.startswith(MPRIS_PREFIX)] if len(bus_names) == 0: - note = (self._c.NOTE.INFO, D_(u"No media player found.")) + note = (self._c.NOTE.INFO, D_("No media player found.")) defer.returnValue((None, self._c.STATUS.COMPLETED, None, note)) options = [] status = self._c.STATUS.EXECUTING - form = data_form.Form("form", title=D_(u"Media Player Selection"), + form = data_form.Form("form", title=D_("Media Player Selection"), formNamespace=NS_MEDIA_PLAYER) for bus in bus_names: player_name = bus[len(MPRIS_PREFIX)+1:] if not player_name: - log.warning(_(u"Ignoring MPRIS bus without suffix")) + log.warning(_("Ignoring MPRIS bus without suffix")) continue options.append(data_form.Option(bus, player_name)) field = data_form.Field( @@ -423,53 +423,53 @@ else: # player request try: - bus_name = command_form[u"media_player"] + bus_name = command_form["media_player"] except KeyError: - raise ValueError(_(u"missing media_player value")) + raise ValueError(_("missing media_player value")) if not bus_name.startswith(MPRIS_PREFIX): - log.warning(_(u"Media player ad-hoc command trying to use non MPRIS bus. " - u"Hack attempt? Refused bus: {bus_name}").format( + log.warning(_("Media player ad-hoc command trying to use non MPRIS bus. " + "Hack attempt? Refused bus: {bus_name}").format( bus_name=bus_name)) - note = (self._c.NOTE.ERROR, D_(u"Invalid player name.")) + note = (self._c.NOTE.ERROR, D_("Invalid player name.")) defer.returnValue((None, self._c.STATUS.COMPLETED, None, note)) try: proxy = self.session_bus.get_object(bus_name, MPRIS_PATH) except dbus.exceptions.DBusException as e: - log.warning(_(u"Can't get D-Bus proxy: {reason}").format(reason=e)) - note = (self._c.NOTE.ERROR, D_(u"Media player is not available anymore")) + log.warning(_("Can't get D-Bus proxy: {reason}").format(reason=e)) + note = (self._c.NOTE.ERROR, D_("Media player is not available anymore")) defer.returnValue((None, self._c.STATUS.COMPLETED, None, note)) try: - command = command_form[u"command"] + command = command_form["command"] except KeyError: pass else: yield self.doMPRISCommand(proxy, command) # we construct the remote control form - form = data_form.Form("form", title=D_(u"Media Player Selection")) - form.addField(data_form.Field(fieldType=u"hidden", - var=u"media_player", + form = data_form.Form("form", title=D_("Media Player Selection")) + form.addField(data_form.Field(fieldType="hidden", + var="media_player", value=bus_name)) - for iface, properties_names in MPRIS_PROPERTIES.iteritems(): + for iface, properties_names in MPRIS_PROPERTIES.items(): for name in properties_names: try: value = yield self._DBusGetProperty(proxy, iface, name) except Exception as e: - log.warning(_(u"Can't retrieve attribute {name}: {reason}") + log.warning(_("Can't retrieve attribute {name}: {reason}") .format(name=name, reason=e)) continue if name == MPRIS_METADATA_KEY: self.addMPRISMetadata(form, value) else: - form.addField(data_form.Field(fieldType=u"fixed", + form.addField(data_form.Field(fieldType="fixed", var=name, - value=unicode(value))) + value=str(value))) - commands = [data_form.Option(c, c.rsplit(u".", 1)[1]) for c in MPRIS_COMMANDS] - form.addField(data_form.Field(fieldType=u"list-single", - var=u"command", + commands = [data_form.Option(c, c.rsplit(".", 1)[1]) for c in MPRIS_COMMANDS] + form.addField(data_form.Field(fieldType="list-single", + var="command", options=commands, required=True)) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_blog_import.py --- a/sat/plugins/plugin_blog_import.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_blog_import.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for import external blogs @@ -31,7 +31,7 @@ import os import os.path import tempfile -import urlparse +import urllib.parse import shortuuid @@ -43,7 +43,7 @@ C.PI_MAIN: "BlogImportPlugin", C.PI_HANDLER: "no", C.PI_DESCRIPTION: _( - u"""Blog import management: + """Blog import management: This plugin manage the different blog importers which can register to it, and handle generic importing tasks.""" ), } @@ -67,7 +67,7 @@ self._p = host.plugins["XEP-0060"] self._m = host.plugins["XEP-0277"] self._s = self.host.plugins["TEXT_SYNTAXES"] - host.plugins["IMPORT"].initialize(self, u"blog") + host.plugins["IMPORT"].initialize(self, "blog") def importItem( self, client, item_import_data, session, options, return_data, service, node @@ -107,7 +107,7 @@ try: item_id = mb_data["id"] except KeyError: - item_id = mb_data["id"] = unicode(shortuuid.uuid()) + item_id = mb_data["id"] = str(shortuuid.uuid()) try: # we keep the link between old url and new blog item @@ -121,7 +121,7 @@ node or self._m.namespace, item_id, ) - log.info(u"url link from {old} to {new}".format(old=old_uri, new=new_uri)) + log.info("url link from {old} to {new}".format(old=old_uri, new=new_uri)) return mb_data @@ -129,7 +129,7 @@ def importSubItems(self, client, item_import_data, mb_data, session, options): # comments data if len(item_import_data["comments"]) != 1: - raise NotImplementedError(u"can't manage multiple comment links") + raise NotImplementedError("can't manage multiple comment links") allow_comments = C.bool(mb_data.get("allow_comments", C.BOOL_FALSE)) if allow_comments: comments_service = yield self._m.getCommentsService(client) @@ -145,13 +145,13 @@ else: if item_import_data["comments"][0]: raise exceptions.DataError( - u"allow_comments set to False, but comments are there" + "allow_comments set to False, but comments are there" ) defer.returnValue(None) def publishItem(self, client, mb_data, service, node, session): log.debug( - u"uploading item [{id}]: {title}".format( + "uploading item [{id}]: {title}".format( id=mb_data["id"], title=mb_data.get("title", "") ) ) @@ -182,7 +182,7 @@ else: if "{}_xhtml".format(prefix) in mb_data: raise exceptions.DataError( - u"importer gave {prefix}_rich and {prefix}_xhtml at the same time, this is not allowed".format( + "importer gave {prefix}_rich and {prefix}_xhtml at the same time, this is not allowed".format( prefix=prefix ) ) @@ -200,14 +200,14 @@ else: if "{}_xhtml".format(prefix) in mb_data: log.warning( - u"{prefix}_text will be replaced by converted {prefix}_xhtml, so filters can be handled".format( + "{prefix}_text will be replaced by converted {prefix}_xhtml, so filters can be handled".format( prefix=prefix ) ) del mb_data["{}_text".format(prefix)] else: log.warning( - u"importer gave a text {prefix}, blog filters don't work on text {prefix}".format( + "importer gave a text {prefix}, blog filters don't work on text {prefix}".format( prefix=prefix ) ) @@ -225,8 +225,8 @@ opt_host = options.get(OPT_HOST) if opt_host: # we normalise the domain - parsed_host = urlparse.urlsplit(opt_host) - opt_host = urlparse.urlunsplit( + parsed_host = urllib.parse.urlsplit(opt_host) + opt_host = urllib.parse.urlunsplit( ( parsed_host.scheme or "http", parsed_host.netloc or parsed_host.path, @@ -239,7 +239,7 @@ tmp_dir = tempfile.mkdtemp() try: # TODO: would be nice to also update the hyperlinks to these images, e.g. when you have - for img_elt in xml_tools.findAll(top_elt, names=[u"img"]): + for img_elt in xml_tools.findAll(top_elt, names=["img"]): yield self.imgFilters(client, img_elt, options, opt_host, tmp_dir) finally: os.rmdir(tmp_dir) # XXX: tmp_dir should be empty, or something went wrong @@ -260,21 +260,21 @@ """ try: url = img_elt["src"] - if url[0] == u"/": + if url[0] == "/": if not opt_host: log.warning( - u"host was not specified, we can't deal with src without host ({url}) and have to ignore the following :\n{xml}".format( + "host was not specified, we can't deal with src without host ({url}) and have to ignore the following :\n{xml}".format( url=url, xml=img_elt.toXml() ) ) return else: - url = urlparse.urljoin(opt_host, url) + url = urllib.parse.urljoin(opt_host, url) filename = url.rsplit("/", 1)[-1].strip() if not filename: raise KeyError except (KeyError, IndexError): - log.warning(u"ignoring invalid img element: {}".format(img_elt.toXml())) + log.warning("ignoring invalid img element: {}".format(img_elt.toXml())) return # we change the url for the normalized one @@ -288,10 +288,10 @@ pass else: # host is the ignored one, we skip - parsed_url = urlparse.urlsplit(url) + parsed_url = urllib.parse.urlsplit(url) if ignore_host in parsed_url.hostname: log.info( - u"Don't upload image at {url} because of {opt} option".format( + "Don't upload image at {url} because of {opt} option".format( url=url, opt=OPT_UPLOAD_IGNORE_HOST ) ) @@ -304,7 +304,7 @@ try: yield web_client.downloadPage(url.encode("utf-8"), tmp_file) filename = filename.replace( - u"%", u"_" + "%", "_" ) # FIXME: tmp workaround for a bug in prosody http upload __, download_d = yield self._u.upload( client, tmp_file, filename, options=upload_options @@ -312,7 +312,7 @@ download_url = yield download_d except Exception as e: log.warning( - u"can't download image at {url}: {reason}".format(url=url, reason=e) + "can't download image at {url}: {reason}".format(url=url, reason=e) ) else: img_elt["src"] = download_url diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_blog_import_dokuwiki.py --- a/sat/plugins/plugin_blog_import_dokuwiki.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_blog_import_dokuwiki.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin to import dokuwiki blogs @@ -28,8 +28,8 @@ from twisted.internet import threads from collections import OrderedDict import calendar -import urllib -import urlparse +import urllib.request, urllib.parse, urllib.error +import urllib.parse import tempfile import re import time @@ -39,13 +39,13 @@ from dokuwiki import DokuWiki, DokuWikiError # this is a new dependency except ImportError: raise exceptions.MissingModule( - u'Missing module dokuwiki, please install it with "pip install dokuwiki"' + 'Missing module dokuwiki, please install it with "pip install dokuwiki"' ) try: from PIL import Image # this is already needed by plugin XEP-0054 except: raise exceptions.MissingModule( - u"Missing module pillow, please download/install it from https://python-pillow.github.io" + "Missing module pillow, please download/install it from https://python-pillow.github.io" ) PLUGIN_INFO = { @@ -58,10 +58,10 @@ C.PI_DESCRIPTION: _("""Blog importer for Dokuwiki blog engine."""), } -SHORT_DESC = D_(u"import posts from Dokuwiki blog engine") +SHORT_DESC = D_("import posts from Dokuwiki blog engine") LONG_DESC = D_( - u"""This importer handle Dokuwiki blog engine. + """This importer handle Dokuwiki blog engine. To use it, you need an admin access to a running Dokuwiki website (local or on the Internet). The importer retrieves the data using @@ -129,7 +129,7 @@ @param post(dict): parsed post data @return (unicode): post unique item id """ - return unicode(post["id"]) + return str(post["id"]) def getPostUpdated(self, post): """Return the update date. @@ -137,7 +137,7 @@ @param post(dict): parsed post data @return (unicode): update date """ - return unicode(post["mtime"]) + return str(post["mtime"]) def getPostPublished(self, post): """Try to parse the date from the message ID, else use "mtime". @@ -148,7 +148,7 @@ @param post (dict): parsed post data @return (unicode): publication date """ - id_, default = unicode(post["id"]), unicode(post["mtime"]) + id_, default = str(post["id"]), str(post["mtime"]) try: date = id_.split(":")[-1].split("_")[0] except KeyError: @@ -160,7 +160,7 @@ time_struct = time.strptime(date, "%Y%m%d") except ValueError: return default - return unicode(calendar.timegm(time_struct)) + return str(calendar.timegm(time_struct)) def processPost(self, post, profile_jid): """Process a single page. @@ -235,7 +235,7 @@ if count >= self.limit: break - return (self.posts_data.itervalues(), len(self.posts_data)) + return (iter(self.posts_data.values()), len(self.posts_data)) def processContent(self, text, backlinks, profile_jid): """Do text substitutions and file copy. @@ -243,7 +243,7 @@ @param text (unicode): message content @param backlinks (list[unicode]): list of backlinks """ - text = text.strip(u"\ufeff") # this is at the beginning of the file (BOM) + text = text.strip("\ufeff") # this is at the beginning of the file (BOM) for backlink in backlinks: src = '/doku.php?id=%s"' % backlink @@ -261,9 +261,9 @@ if self.media_repo: self.moveMedia(link, subs) elif link not in subs: - subs[link] = urlparse.urljoin(self.url, link) + subs[link] = urllib.parse.urljoin(self.url, link) - for url, new_url in subs.iteritems(): + for url, new_url in subs.items(): text = text.replace(url, new_url) return text @@ -274,12 +274,12 @@ @param link (unicode): media link @param subs (dict): substitutions data """ - url = urlparse.urljoin(self.url, link) + url = urllib.parse.urljoin(self.url, link) user_media = re.match(r"(/lib/exe/\w+.php\?)(.*)", link) thumb_width = None if user_media: # media that has been added by the user - params = urlparse.parse_qs(urlparse.urlparse(url).query) + params = urllib.parse.parse_qs(urllib.parse.urlparse(url).query) try: media = params["media"][0] except KeyError: @@ -295,7 +295,7 @@ filename = media.replace(":", "/") # XXX: avoid "precondition failed" error (only keep the media parameter) - url = urlparse.urljoin(self.url, "/lib/exe/fetch.php?media=%s" % media) + url = urllib.parse.urljoin(self.url, "/lib/exe/fetch.php?media=%s" % media) elif link.startswith("/lib/plugins/"): # other link added by a plugin or something else @@ -324,7 +324,7 @@ if not os.path.exists(dest): if not os.path.exists(dirname): os.makedirs(dirname) - urllib.urlretrieve(source, dest) + urllib.request.urlretrieve(source, dest) log.debug("DokuWiki media file copied to %s" % dest) def createThumbnail(self, source, dest, width): diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_blog_import_dotclear.py --- a/sat/plugins/plugin_blog_import_dotclear.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_blog_import_dotclear.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for import external blogs @@ -42,10 +42,10 @@ C.PI_DESCRIPTION: _("""Blog importer for Dotclear blog engine."""), } -SHORT_DESC = D_(u"import posts from Dotclear blog engine") +SHORT_DESC = D_("import posts from Dotclear blog engine") LONG_DESC = D_( - u"""This importer handle Dotclear blog engine. + """This importer handle Dotclear blog engine. To use it, you'll need to export your blog to a flat file. You must go in your admin interface and select Plugins/Maintenance then Backup. @@ -55,7 +55,7 @@ location: you must use the absolute path to your backup for the location parameter """ ) -POST_ID_PREFIX = u"sat_dc_" +POST_ID_PREFIX = "sat_dc_" KNOWN_DATA_TYPES = ( "link", "setting", @@ -66,7 +66,7 @@ "comment", "captcha", ) -ESCAPE_MAP = {"r": u"\r", "n": u"\n", '"': u'"', "\\": u"\\"} +ESCAPE_MAP = {"r": "\r", "n": "\n", '"': '"', "\\": "\\"} class DotclearParser(object): @@ -83,7 +83,7 @@ @param post(dict): parsed post data @return (unicode): post unique item id """ - return u"{}_{}_{}_{}:{}".format( + return "{}_{}_{}_{}:{}".format( POST_ID_PREFIX, post["blog_id"], post["user_id"], @@ -99,7 +99,7 @@ """ post_id = comment["post_id"] parent_item_id = self.posts_data[post_id]["blog"]["id"] - return u"{}_comment_{}".format(parent_item_id, comment["comment_id"]) + return "{}_comment_{}".format(parent_item_id, comment["comment_id"]) def getTime(self, data, key): """Parse time as given by dotclear, with timezone handling @@ -125,18 +125,18 @@ if char == '"': # we have reached the end of this field, # we try to parse a new one - yield u"".join(buf) + yield "".join(buf) buf = [] idx += 1 try: separator = fields_data[idx] except IndexError: return - if separator != u",": + if separator != ",": raise exceptions.ParsingError("Field separator was expeceted") idx += 1 break # we have a new field - elif char == u"\\": + elif char == "\\": idx += 1 try: char = ESCAPE_MAP[fields_data[idx]] @@ -144,22 +144,22 @@ raise exceptions.ParsingError("Escaped char was expected") except KeyError: char = fields_data[idx] - log.warning(u"Unknown key to escape: {}".format(char)) + log.warning("Unknown key to escape: {}".format(char)) buf.append(char) def parseFields(self, headers, data): - return dict(itertools.izip(headers, self.readFields(data))) + return dict(zip(headers, self.readFields(data))) def postHandler(self, headers, data, index): post = self.parseFields(headers, data) - log.debug(u"({}) post found: {}".format(index, post["post_title"])) + log.debug("({}) post found: {}".format(index, post["post_title"])) mb_data = { "id": self.getPostId(post), "published": self.getTime(post, "post_creadt"), "updated": self.getTime(post, "post_upddt"), "author": post["user_id"], # there use info are not in the archive # TODO: option to specify user info - "content_xhtml": u"{}{}".format( + "content_xhtml": "{}{}".format( post["post_content_xhtml"], post["post_excerpt_xhtml"] ), "title": post["post_title"], @@ -168,7 +168,7 @@ self.posts_data[post["post_id"]] = { "blog": mb_data, "comments": [[]], - "url": u"/post/{}".format(post["post_url"]), + "url": "/post/{}".format(post["post_url"]), } def metaHandler(self, headers, data, index): @@ -178,7 +178,7 @@ tags.add(meta["meta_id"]) def metaFinishedHandler(self): - for post_id, tags in self.tags.iteritems(): + for post_id, tags in self.tags.items(): data_format.iter2dict("tag", tags, self.posts_data[post_id]["blog"]) del self.tags @@ -186,9 +186,9 @@ comment = self.parseFields(headers, data) if comment["comment_site"]: # we don't use atom:uri because it's used for jid in XMPP - content = u'{}\n
\nauthor website'.format( + content = '{}\n
\nauthor website'.format( comment["comment_content"], - cgi.escape(comment["comment_site"]).replace('"', u"%22"), + cgi.escape(comment["comment_site"]).replace('"', "%22"), ) else: content = comment["comment_content"] @@ -208,24 +208,24 @@ def parse(self, db_path): with open(db_path) as f: - signature = f.readline().decode("utf-8") + signature = f.readline() try: version = signature.split("|")[1] except IndexError: version = None - log.debug(u"Dotclear version: {}".format(version)) + log.debug("Dotclear version: {}".format(version)) data_type = None data_headers = None index = None while True: - buf = f.readline().decode("utf-8") + buf = f.readline() if not buf: break if buf.startswith("["): header = buf.split(" ", 1) data_type = header[0][1:] if data_type not in KNOWN_DATA_TYPES: - log.warning(u"unkown data type: {}".format(data_type)) + log.warning("unkown data type: {}".format(data_type)) index = 0 try: data_headers = header[1].split(",") @@ -233,7 +233,7 @@ last_header = data_headers[-1] data_headers[-1] = last_header[: last_header.rfind("]")] except IndexError: - log.warning(u"Can't read data)") + log.warning("Can't read data)") else: if data_type is None: continue @@ -247,7 +247,7 @@ pass else: finished_handler() - log.debug(u"{} data finished".format(data_type)) + log.debug("{} data finished".format(data_type)) data_type = None continue assert data_type @@ -258,7 +258,7 @@ else: fields_handler(data_headers, buf, index) index += 1 - return (self.posts_data.itervalues(), len(self.posts_data)) + return (iter(self.posts_data.values()), len(self.posts_data)) class DotclearImport(object): @@ -272,7 +272,7 @@ def DcImport(self, client, location, options=None): if not os.path.isabs(location): raise exceptions.DataError( - u"An absolute path to backup data need to be given as location" + "An absolute path to backup data need to be given as location" ) dc_parser = DotclearParser() d = threads.deferToThread(dc_parser.parse, location) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_comp_file_sharing.py --- a/sat/plugins/plugin_comp_file_sharing.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_comp_file_sharing.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for parrot mode (experimental) @@ -55,17 +55,17 @@ C.PI_RECOMMENDATIONS: [], C.PI_MAIN: "FileSharing", C.PI_HANDLER: C.BOOL_TRUE, - C.PI_DESCRIPTION: _(u"""Component hosting and sharing files"""), + C.PI_DESCRIPTION: _("""Component hosting and sharing files"""), } -HASH_ALGO = u"sha-256" +HASH_ALGO = "sha-256" NS_COMMENTS = "org.salut-a-toi.comments" COMMENT_NODE_PREFIX = "org.salut-a-toi.file_comments/" class FileSharing(object): def __init__(self, host): - log.info(_(u"File Sharing initialization")) + log.info(_("File Sharing initialization")) self.host = host self._f = host.plugins["FILE"] self._jf = host.plugins["XEP-0234"] @@ -99,12 +99,12 @@ on file is received, this method create hash/thumbnails if necessary move the file to the right location, and create metadata entry in database """ - name = file_data[u"name"] + name = file_data["name"] extra = {} - if file_data[u"hash_algo"] == HASH_ALGO: - log.debug(_(u"Reusing already generated hash")) - file_hash = file_data[u"hash_hasher"].hexdigest() + if file_data["hash_algo"] == HASH_ALGO: + log.debug(_("Reusing already generated hash")) + file_hash = file_data["hash_hasher"].hexdigest() else: hasher = self._h.getHasher(HASH_ALGO) with open("file_path") as f: @@ -113,7 +113,7 @@ if os.path.isfile(final_path): log.debug( - u"file [{file_hash}] already exists, we can remove temporary one".format( + "file [{file_hash}] already exists, we can remove temporary one".format( file_hash=file_hash ) ) @@ -121,16 +121,16 @@ else: os.rename(file_path, final_path) log.debug( - u"file [{file_hash}] moved to {files_path}".format( + "file [{file_hash}] moved to {files_path}".format( file_hash=file_hash, files_path=self.files_path ) ) - mime_type = file_data.get(u"mime_type") - if not mime_type or mime_type == u"application/octet-stream": + mime_type = file_data.get("mime_type") + if not mime_type or mime_type == "application/octet-stream": mime_type = mimetypes.guess_type(name)[0] - if mime_type is not None and mime_type.startswith(u"image"): + if mime_type is not None and mime_type.startswith("image"): thumbnails = extra.setdefault(C.KEY_THUMBNAILS, []) for max_thumb_size in (self._t.SIZE_SMALL, self._t.SIZE_MEDIUM): try: @@ -141,19 +141,19 @@ 60 * 60 * 24 * 31 * 6, ) except Exception as e: - log.warning(_(u"Can't create thumbnail: {reason}").format(reason=e)) + log.warning(_("Can't create thumbnail: {reason}").format(reason=e)) break - thumbnails.append({u"id": thumb_id, u"size": thumb_size}) + thumbnails.append({"id": thumb_id, "size": thumb_size}) self.host.memory.setFile( client, name=name, - version=u"", + version="", file_hash=file_hash, hash_algo=HASH_ALGO, - size=file_data[u"size"], - path=file_data.get(u"path"), - namespace=file_data.get(u"namespace"), + size=file_data["size"], + path=file_data.get("path"), + namespace=file_data.get("namespace"), mime_type=mime_type, owner=peer_jid, extra=extra, @@ -191,49 +191,49 @@ self, client, session, content_data, content_name, file_data, file_elt ): """This method retrieve a file on request, and send if after checking permissions""" - peer_jid = session[u"peer_jid"] + peer_jid = session["peer_jid"] try: found_files = yield self.host.memory.getFiles( client, peer_jid=peer_jid, - name=file_data.get(u"name"), - file_hash=file_data.get(u"file_hash"), - hash_algo=file_data.get(u"hash_algo"), - path=file_data.get(u"path"), - namespace=file_data.get(u"namespace"), + name=file_data.get("name"), + file_hash=file_data.get("file_hash"), + hash_algo=file_data.get("hash_algo"), + path=file_data.get("path"), + namespace=file_data.get("namespace"), ) except exceptions.NotFound: found_files = None except exceptions.PermissionError: log.warning( - _(u"{peer_jid} is trying to access an unauthorized file: {name}").format( - peer_jid=peer_jid, name=file_data.get(u"name") + _("{peer_jid} is trying to access an unauthorized file: {name}").format( + peer_jid=peer_jid, name=file_data.get("name") ) ) defer.returnValue(False) if not found_files: log.warning( - _(u"no matching file found ({file_data})").format(file_data=file_data) + _("no matching file found ({file_data})").format(file_data=file_data) ) defer.returnValue(False) # we only use the first found file found_file = found_files[0] - if found_file[u'type'] != C.FILE_TYPE_FILE: - raise TypeError(u"a file was expected, type is {type_}".format( - type_=found_file[u'type'])) - file_hash = found_file[u"file_hash"] + if found_file['type'] != C.FILE_TYPE_FILE: + raise TypeError("a file was expected, type is {type_}".format( + type_=found_file['type'])) + file_hash = found_file["file_hash"] file_path = os.path.join(self.files_path, file_hash) - file_data[u"hash_hasher"] = hasher = self._h.getHasher(found_file[u"hash_algo"]) - size = file_data[u"size"] = found_file[u"size"] - file_data[u"file_hash"] = file_hash - file_data[u"hash_algo"] = found_file[u"hash_algo"] + file_data["hash_hasher"] = hasher = self._h.getHasher(found_file["hash_algo"]) + size = file_data["size"] = found_file["size"] + file_data["file_hash"] = file_hash + file_data["hash_algo"] = found_file["hash_algo"] # we complete file_elt so peer can have some details on the file - if u"name" not in file_data: - file_elt.addElement(u"name", content=found_file[u"name"]) - file_elt.addElement(u"size", content=unicode(size)) + if "name" not in file_data: + file_elt.addElement("name", content=found_file["name"]) + file_elt.addElement("size", content=str(size)) content_data["stream_object"] = stream.FileStreamObject( self.host, client, @@ -268,11 +268,11 @@ comment_elt = file_elt.addElement((NS_COMMENTS, "comments"), content=comments_url) try: - count = len(extra_args[u"extra"][u"comments"]) + count = len(extra_args["extra"]["comments"]) except KeyError: count = 0 - comment_elt["count"] = unicode(count) + comment_elt["count"] = str(count) return True def _getFileComments(self, file_elt, file_data): @@ -280,7 +280,7 @@ comments_elt = next(file_elt.elements(NS_COMMENTS, "comments")) except StopIteration: return - file_data["comments_url"] = unicode(comments_elt) + file_data["comments_url"] = str(comments_elt) file_data["comments_count"] = comments_elt["count"] return True diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_comp_file_sharing_management.py --- a/sat/plugins/plugin_comp_file_sharing_management.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_comp_file_sharing_management.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to detect language (experimental) @@ -31,25 +31,25 @@ PLUGIN_INFO = { - C.PI_NAME: u"File Sharing Management", - C.PI_IMPORT_NAME: u"FILE_SHARING_MANAGEMENT", + C.PI_NAME: "File Sharing Management", + C.PI_IMPORT_NAME: "FILE_SHARING_MANAGEMENT", C.PI_MODES: [C.PLUG_MODE_COMPONENT], - C.PI_TYPE: u"EXP", + C.PI_TYPE: "EXP", C.PI_PROTOCOLS: [], - C.PI_DEPENDENCIES: [u"XEP-0050", u"XEP-0264"], + C.PI_DEPENDENCIES: ["XEP-0050", "XEP-0264"], C.PI_RECOMMENDATIONS: [], - C.PI_MAIN: u"FileSharingManagement", - C.PI_HANDLER: u"no", + C.PI_MAIN: "FileSharingManagement", + C.PI_HANDLER: "no", C.PI_DESCRIPTION: _( - u"Experimental handling of file management for file sharing. This plugins allows " - u"to change permissions of stored files/directories or remove them." + "Experimental handling of file management for file sharing. This plugins allows " + "to change permissions of stored files/directories or remove them." ), } -NS_FILE_MANAGEMENT = u"https://salut-a-toi.org/protocol/file-management:0" -NS_FILE_MANAGEMENT_PERM = u"https://salut-a-toi.org/protocol/file-management:0#perm" -NS_FILE_MANAGEMENT_DELETE = u"https://salut-a-toi.org/protocol/file-management:0#delete" -NS_FILE_MANAGEMENT_THUMB = u"https://salut-a-toi.org/protocol/file-management:0#thumb" +NS_FILE_MANAGEMENT = "https://salut-a-toi.org/protocol/file-management:0" +NS_FILE_MANAGEMENT_PERM = "https://salut-a-toi.org/protocol/file-management:0#perm" +NS_FILE_MANAGEMENT_DELETE = "https://salut-a-toi.org/protocol/file-management:0#delete" +NS_FILE_MANAGEMENT_THUMB = "https://salut-a-toi.org/protocol/file-management:0#thumb" class WorkflowError(Exception): @@ -68,7 +68,7 @@ # syntax?) should be elaborated and proposed as a standard. def __init__(self, host): - log.info(_(u"File Sharing Management plugin initialization")) + log.info(_("File Sharing Management plugin initialization")) self.host = host self._c = host.plugins["XEP-0050"] self._t = host.plugins["XEP-0264"] @@ -76,17 +76,17 @@ def profileConnected(self, client): self._c.addAdHocCommand( - client, self._onChangeFile, u"Change Permissions of File(s)", + client, self._onChangeFile, "Change Permissions of File(s)", node=NS_FILE_MANAGEMENT_PERM, allowed_magics=C.ENTITY_ALL, ) self._c.addAdHocCommand( - client, self._onDeleteFile, u"Delete File(s)", + client, self._onDeleteFile, "Delete File(s)", node=NS_FILE_MANAGEMENT_DELETE, allowed_magics=C.ENTITY_ALL, ) self._c.addAdHocCommand( - client, self._onGenThumbnails, u"Generate Thumbnails", + client, self._onGenThumbnails, "Generate Thumbnails", node=NS_FILE_MANAGEMENT_THUMB, allowed_magics=C.ENTITY_ALL, ) @@ -109,7 +109,7 @@ @return (tuple): arguments to use in defer.returnValue """ status = self._c.STATUS.EXECUTING - form = data_form.Form("form", title=u"File Management", + form = data_form.Form("form", title="File Management", formNamespace=NS_FILE_MANAGEMENT) field = data_form.Field( @@ -136,17 +136,17 @@ """ fields = command_form.fields try: - path = fields[u'path'].value.strip() - namespace = fields[u'namespace'].value or None + path = fields['path'].value.strip() + namespace = fields['namespace'].value or None except KeyError: self._c.adHocError(self._c.ERROR.BAD_PAYLOAD) if not path: self._c.adHocError(self._c.ERROR.BAD_PAYLOAD) - requestor = session_data[u'requestor'] + requestor = session_data['requestor'] requestor_bare = requestor.userhostJID() - path = path.rstrip(u'/') + path = path.rstrip('/') parent_path, basename = os.path.split(path) # TODO: if parent_path and basename are empty, we ask for root directory @@ -158,31 +158,31 @@ namespace=namespace) found_file = found_files[0] except (exceptions.NotFound, IndexError): - raise WorkflowError(self._err(_(u"file not found"))) + raise WorkflowError(self._err(_("file not found"))) except exceptions.PermissionError: - raise WorkflowError(self._err(_(u"forbidden"))) + raise WorkflowError(self._err(_("forbidden"))) if found_file['owner'] != requestor_bare: # only owner can manage files - log.warning(_(u"Only owner can manage files")) - raise WorkflowError(self._err(_(u"forbidden"))) + log.warning(_("Only owner can manage files")) + raise WorkflowError(self._err(_("forbidden"))) - session_data[u'found_file'] = found_file - session_data[u'namespace'] = namespace + session_data['found_file'] = found_file + session_data['namespace'] = namespace defer.returnValue(found_file) def _updateReadPermission(self, access, allowed_jids): if not allowed_jids: if C.ACCESS_PERM_READ in access: del access[C.ACCESS_PERM_READ] - elif allowed_jids == u'PUBLIC': + elif allowed_jids == 'PUBLIC': access[C.ACCESS_PERM_READ] = { - u"type": C.ACCESS_TYPE_PUBLIC + "type": C.ACCESS_TYPE_PUBLIC } else: access[C.ACCESS_PERM_READ] = { - u"type": C.ACCESS_TYPE_WHITELIST, - u"jids": [j.full() for j in allowed_jids] + "type": C.ACCESS_TYPE_WHITELIST, + "jids": [j.full() for j in allowed_jids] } @defer.inlineCallbacks @@ -192,30 +192,30 @@ @param file_data(dict): metadata of the file @param allowed_jids(list[jid.JID]): list of entities allowed to read the file """ - assert file_data[u'type'] == C.FILE_TYPE_DIRECTORY + assert file_data['type'] == C.FILE_TYPE_DIRECTORY files_data = yield self.host.memory.getFiles( - client, requestor, parent=file_data[u'id'], namespace=namespace) + client, requestor, parent=file_data['id'], namespace=namespace) for file_data in files_data: - if not file_data[u'access'].get(C.ACCESS_PERM_READ, {}): - log.debug(u"setting {perm} read permission for {name}".format( - perm=allowed_jids, name=file_data[u'name'])) + if not file_data['access'].get(C.ACCESS_PERM_READ, {}): + log.debug("setting {perm} read permission for {name}".format( + perm=allowed_jids, name=file_data['name'])) yield self.host.memory.fileUpdate( - file_data[u'id'], u'access', + file_data['id'], 'access', partial(self._updateReadPermission, allowed_jids=allowed_jids)) - if file_data[u'type'] == C.FILE_TYPE_DIRECTORY: - yield self._updateDir(client, requestor, namespace, file_data, u'PUBLIC') + if file_data['type'] == C.FILE_TYPE_DIRECTORY: + yield self._updateDir(client, requestor, namespace, file_data, 'PUBLIC') @defer.inlineCallbacks def _onChangeFile(self, client, command_elt, session_data, action, node): try: - x_elt = command_elt.elements(data_form.NS_X_DATA, "x").next() + x_elt = next(command_elt.elements(data_form.NS_X_DATA, "x")) command_form = data_form.Form.fromElement(x_elt) except StopIteration: command_form = None found_file = session_data.get('found_file') - requestor = session_data[u'requestor'] + requestor = session_data['requestor'] requestor_bare = requestor.userhostJID() if command_form is None or len(command_form.fields) == 0: @@ -230,31 +230,31 @@ defer.returnValue(e.err_args) # management request - if found_file[u'type'] == C.FILE_TYPE_DIRECTORY: - instructions = D_(u"Please select permissions for this directory") + if found_file['type'] == C.FILE_TYPE_DIRECTORY: + instructions = D_("Please select permissions for this directory") else: - instructions = D_(u"Please select permissions for this file") + instructions = D_("Please select permissions for this file") - form = data_form.Form("form", title=u"File Management", + form = data_form.Form("form", title="File Management", instructions=[instructions], formNamespace=NS_FILE_MANAGEMENT) field = data_form.Field( "text-multi", "read_allowed", required=False, - desc=u'list of jids allowed to read this file (beside yourself), or ' - u'"PUBLIC" to let a public access' + desc='list of jids allowed to read this file (beside yourself), or ' + '"PUBLIC" to let a public access' ) - read_access = found_file[u"access"].get(C.ACCESS_PERM_READ, {}) - access_type = read_access.get(u'type', C.ACCESS_TYPE_WHITELIST) + read_access = found_file["access"].get(C.ACCESS_PERM_READ, {}) + access_type = read_access.get('type', C.ACCESS_TYPE_WHITELIST) if access_type == C.ACCESS_TYPE_PUBLIC: - field.values = [u'PUBLIC'] + field.values = ['PUBLIC'] else: field.values = read_access.get('jids', []) form.addField(field) - if found_file[u'type'] == C.FILE_TYPE_DIRECTORY: + if found_file['type'] == C.FILE_TYPE_DIRECTORY: field = data_form.Field( "boolean", "recursive", value=False, required=False, - desc=u"Files under it will be made public to follow this dir " - u"permission (only if they don't have already a permission set)." + desc="Files under it will be made public to follow this dir " + "permission (only if they don't have already a permission set)." ) form.addField(field) @@ -269,22 +269,22 @@ except KeyError: self._c.adHocError(self._c.ERROR.BAD_PAYLOAD) - if read_allowed.value == u'PUBLIC': - allowed_jids = u'PUBLIC' - elif read_allowed.value.strip() == u'': + if read_allowed.value == 'PUBLIC': + allowed_jids = 'PUBLIC' + elif read_allowed.value.strip() == '': allowed_jids = None else: try: allowed_jids = [jid.JID(v.strip()) for v in read_allowed.values if v.strip()] except RuntimeError as e: - log.warning(_(u"Can't use read_allowed values: {reason}").format( + log.warning(_("Can't use read_allowed values: {reason}").format( reason=e)) self._c.adHocError(self._c.ERROR.BAD_PAYLOAD) - if found_file[u'type'] == C.FILE_TYPE_FILE: + if found_file['type'] == C.FILE_TYPE_FILE: yield self.host.memory.fileUpdate( - found_file[u'id'], u'access', + found_file['id'], 'access', partial(self._updateReadPermission, allowed_jids=allowed_jids)) else: try: @@ -292,32 +292,32 @@ except KeyError: self._c.adHocError(self._c.ERROR.BAD_PAYLOAD) yield self.host.memory.fileUpdate( - found_file[u'id'], u'access', + found_file['id'], 'access', partial(self._updateReadPermission, allowed_jids=allowed_jids)) if recursive: # we set all file under the directory as public (if they haven't # already a permission set), so allowed entities of root directory # can read them. - namespace = session_data[u'namespace'] + namespace = session_data['namespace'] yield self._updateDir( - client, requestor_bare, namespace, found_file, u'PUBLIC') + client, requestor_bare, namespace, found_file, 'PUBLIC') # job done, we can end the session status = self._c.STATUS.COMPLETED payload = None - note = (self._c.NOTE.INFO, _(u"management session done")) + note = (self._c.NOTE.INFO, _("management session done")) defer.returnValue((payload, status, None, note)) @defer.inlineCallbacks def _onDeleteFile(self, client, command_elt, session_data, action, node): try: - x_elt = command_elt.elements(data_form.NS_X_DATA, "x").next() + x_elt = next(command_elt.elements(data_form.NS_X_DATA, "x")) command_form = data_form.Form.fromElement(x_elt) except StopIteration: command_form = None found_file = session_data.get('found_file') - requestor = session_data[u'requestor'] + requestor = session_data['requestor'] requestor_bare = requestor.userhostJID() if command_form is None or len(command_form.fields) == 0: @@ -330,18 +330,18 @@ found_file = yield self._getFileData(client, session_data, command_form) except WorkflowError as e: defer.returnValue(e.err_args) - if found_file[u'type'] == C.FILE_TYPE_DIRECTORY: - msg = D_(u"Are you sure to delete directory {name} and all files and " - u"directories under it?").format(name=found_file[u'name']) + if found_file['type'] == C.FILE_TYPE_DIRECTORY: + msg = D_("Are you sure to delete directory {name} and all files and " + "directories under it?").format(name=found_file['name']) else: - msg = D_(u"Are you sure to delete file {name}?" - .format(name=found_file[u'name'])) - form = data_form.Form("form", title=u"File Management", + msg = D_("Are you sure to delete file {name}?" + .format(name=found_file['name'])) + form = data_form.Form("form", title="File Management", instructions = [msg], formNamespace=NS_FILE_MANAGEMENT) field = data_form.Field( "boolean", "confirm", value=False, required=True, - desc=u"check this box to confirm" + desc="check this box to confirm" ) form.addField(field) status = self._c.STATUS.EXECUTING @@ -357,10 +357,10 @@ if not confirmed: note = None else: - recursive = found_file[u'type'] == C.FILE_TYPE_DIRECTORY + recursive = found_file['type'] == C.FILE_TYPE_DIRECTORY yield self.host.memory.fileDelete( - client, requestor_bare, found_file[u'id'], recursive) - note = (self._c.NOTE.INFO, _(u"file deleted")) + client, requestor_bare, found_file['id'], recursive) + note = (self._c.NOTE.INFO, _("file deleted")) status = self._c.STATUS.COMPLETED payload = None defer.returnValue((payload, status, None, note)) @@ -374,16 +374,16 @@ @param file_data(dict): metadata of the file """ - if file_data[u'type'] == C.FILE_TYPE_DIRECTORY: + if file_data['type'] == C.FILE_TYPE_DIRECTORY: sub_files_data = yield self.host.memory.getFiles( - client, requestor, parent=file_data[u'id'], namespace=namespace) + client, requestor, parent=file_data['id'], namespace=namespace) for sub_file_data in sub_files_data: yield self._genThumbs(client, requestor, namespace, sub_file_data) - elif file_data[u'type'] == C.FILE_TYPE_FILE: - mime_type = file_data[u'mime_type'] - file_path = os.path.join(self.files_path, file_data[u'file_hash']) - if mime_type is not None and mime_type.startswith(u"image"): + elif file_data['type'] == C.FILE_TYPE_FILE: + mime_type = file_data['mime_type'] + file_path = os.path.join(self.files_path, file_data['file_hash']) + if mime_type is not None and mime_type.startswith("image"): thumbnails = [] for max_thumb_size in (self._t.SIZE_SMALL, self._t.SIZE_MEDIUM): @@ -395,31 +395,31 @@ 60 * 60 * 24 * 31 * 6, ) except Exception as e: - log.warning(_(u"Can't create thumbnail: {reason}") + log.warning(_("Can't create thumbnail: {reason}") .format(reason=e)) break - thumbnails.append({u"id": thumb_id, u"size": thumb_size}) + thumbnails.append({"id": thumb_id, "size": thumb_size}) yield self.host.memory.fileUpdate( - file_data[u'id'], u'extra', + file_data['id'], 'extra', partial(self._updateThumbs, thumbnails=thumbnails)) - log.info(u"thumbnails for [{file_name}] generated" - .format(file_name=file_data[u'name'])) + log.info("thumbnails for [{file_name}] generated" + .format(file_name=file_data['name'])) else: - log.warning(u"unmanaged file type: {type_}".format(type_=file_data[u'type'])) + log.warning("unmanaged file type: {type_}".format(type_=file_data['type'])) @defer.inlineCallbacks def _onGenThumbnails(self, client, command_elt, session_data, action, node): try: - x_elt = command_elt.elements(data_form.NS_X_DATA, "x").next() + x_elt = next(command_elt.elements(data_form.NS_X_DATA, "x")) command_form = data_form.Form.fromElement(x_elt) except StopIteration: command_form = None found_file = session_data.get('found_file') - requestor = session_data[u'requestor'] + requestor = session_data['requestor'] if command_form is None or len(command_form.fields) == 0: # root request @@ -432,11 +432,11 @@ except WorkflowError as e: defer.returnValue(e.err_args) - log.info(u"Generating thumbnails as requested") - yield self._genThumbs(client, requestor, found_file[u'namespace'], found_file) + log.info("Generating thumbnails as requested") + yield self._genThumbs(client, requestor, found_file['namespace'], found_file) # job done, we can end the session status = self._c.STATUS.COMPLETED payload = None - note = (self._c.NOTE.INFO, _(u"thumbnails generated")) + note = (self._c.NOTE.INFO, _("thumbnails generated")) defer.returnValue((payload, status, None, note)) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_dbg_manhole.py --- a/sat/plugins/plugin_dbg_manhole.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_dbg_manhole.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for debugging, using a manhole @@ -29,14 +29,14 @@ log = getLogger(__name__) PLUGIN_INFO = { - C.PI_NAME: u"Manhole debug plugin", - C.PI_IMPORT_NAME: u"manhole", - C.PI_TYPE: u"DEBUG", + C.PI_NAME: "Manhole debug plugin", + C.PI_IMPORT_NAME: "manhole", + C.PI_TYPE: "DEBUG", C.PI_PROTOCOLS: [], C.PI_DEPENDENCIES: [], - C.PI_MAIN: u"Manhole", - C.PI_HANDLER: u"no", - C.PI_DESCRIPTION: _(u"""Debug plugin to have a telnet server"""), + C.PI_MAIN: "Manhole", + C.PI_HANDLER: "no", + C.PI_DESCRIPTION: _("""Debug plugin to have a telnet server"""), } @@ -50,14 +50,14 @@ self.startManhole(port) def startManhole(self, port): - log.warning(_(u"/!\\ Manhole debug server activated, be sure to not use it in " - u"production, this is dangerous /!\\")) - log.info(_(u"You can connect to manhole server using telnet on port {port}") + log.warning(_("/!\\ Manhole debug server activated, be sure to not use it in " + "production, this is dangerous /!\\")) + log.info(_("You can connect to manhole server using telnet on port {port}") .format(port=port)) f = protocol.ServerFactory() namespace = { - u"host": self.host, - u"jid": jid, + "host": self.host, + "jid": jid, } f.protocol = lambda: TelnetTransport(TelnetBootstrapProtocol, insults.ServerProtocol, diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_exp_command_export.py --- a/sat/plugins/plugin_exp_command_export.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_command_export.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to export commands (experimental) @@ -52,7 +52,7 @@ def _clean(self, data): if not data: log.error("data should not be empty !") - return u"" + return "" decoded = data.decode("utf-8", "ignore")[: -1 if data[-1] == "\n" else None] return clean_ustr(decoded) @@ -66,7 +66,7 @@ self.client.sendMessage(self.target, {"": self._clean(data)}, no_trigger=True) def processEnded(self, reason): - log.info(u"process finished: %d" % (reason.value.exitCode,)) + log.info("process finished: %d" % (reason.value.exitCode,)) self.parent.removeProcess(self.target, self) def write(self, message): @@ -120,12 +120,12 @@ if spawned_key in self.spawned: try: - body = message_elt.elements(C.NS_CLIENT, "body").next() + body = next(message_elt.elements(C.NS_CLIENT, "body")) except StopIteration: # do not block message without body (chat state notification...) return True - mess_data = unicode(body) + "\n" + mess_data = str(body) + "\n" processes_set = self.spawned[spawned_key] _continue = False exclusive = False @@ -158,7 +158,7 @@ raise jid.InvalidFormat _jid = _jid.userhostJID() except (RuntimeError, jid.InvalidFormat, AttributeError): - log.info(u"invalid target ignored: %s" % (target,)) + log.info("invalid target ignored: %s" % (target,)) continue process_prot = ExportCommandProtocol(self, client, _jid, options) self.spawned.setdefault((_jid, client.profile), set()).add(process_prot) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_exp_events.py --- a/sat/plugins/plugin_exp_events.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_events.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to detect language (experimental) @@ -29,7 +29,7 @@ from twisted.words.protocols.jabber import jid, error from twisted.words.xish import domish from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer from twisted.words.protocols.jabber.xmlstream import XMPPHandler from wokkel import pubsub @@ -41,11 +41,11 @@ C.PI_IMPORT_NAME: "EVENTS", C.PI_TYPE: "EXP", C.PI_PROTOCOLS: [], - C.PI_DEPENDENCIES: [u"XEP-0060", u"INVITATION", u"LIST_INTEREST"], + C.PI_DEPENDENCIES: ["XEP-0060", "INVITATION", "LIST_INTEREST"], C.PI_RECOMMENDATIONS: ["XEP-0277", "EMAIL_INVITATION"], C.PI_MAIN: "Events", C.PI_HANDLER: "yes", - C.PI_DESCRIPTION: _(u"""Experimental implementation of XMPP events management"""), + C.PI_DESCRIPTION: _("""Experimental implementation of XMPP events management"""), } NS_EVENT = "org.salut-a-toi.event:0" @@ -55,13 +55,13 @@ """Q&D module to handle event attendance answer, experimentation only""" def __init__(self, host): - log.info(_(u"Event plugin initialization")) + log.info(_("Event plugin initialization")) self.host = host self._p = self.host.plugins["XEP-0060"] self._i = self.host.plugins.get("EMAIL_INVITATION") self._b = self.host.plugins.get("XEP-0277") - self.host.registerNamespace(u"event", NS_EVENT) - self.host.plugins[u"INVITATION"].registerNamespace(NS_EVENT, + self.host.registerNamespace("event", NS_EVENT) + self.host.plugins["INVITATION"].registerNamespace(NS_EVENT, self.register) host.bridge.addMethod( "eventGet", @@ -69,7 +69,7 @@ in_sign="ssss", out_sign="(ia{ss})", method=self._eventGet, - async=True, + async_=True, ) host.bridge.addMethod( "eventCreate", @@ -77,7 +77,7 @@ in_sign="ia{ss}ssss", out_sign="s", method=self._eventCreate, - async=True, + async_=True, ) host.bridge.addMethod( "eventModify", @@ -85,7 +85,7 @@ in_sign="sssia{ss}s", out_sign="", method=self._eventModify, - async=True, + async_=True, ) host.bridge.addMethod( "eventsList", @@ -93,7 +93,7 @@ in_sign="sss", out_sign="aa{ss}", method=self._eventsList, - async=True, + async_=True, ) host.bridge.addMethod( "eventInviteeGet", @@ -101,7 +101,7 @@ in_sign="sss", out_sign="a{ss}", method=self._eventInviteeGet, - async=True, + async_=True, ) host.bridge.addMethod( "eventInviteeSet", @@ -109,7 +109,7 @@ in_sign="ssa{ss}s", out_sign="", method=self._eventInviteeSet, - async=True, + async_=True, ) host.bridge.addMethod( "eventInviteesList", @@ -117,7 +117,7 @@ in_sign="sss", out_sign="a{sa{ss}}", method=self._eventInviteesList, - async=True, + async_=True, ), host.bridge.addMethod( "eventInvite", @@ -125,7 +125,7 @@ in_sign="sssss", out_sign="", method=self._invite, - async=True, + async_=True, ) host.bridge.addMethod( "eventInviteByEmail", @@ -133,7 +133,7 @@ in_sign="ssssassssssss", out_sign="", method=self._inviteByEmail, - async=True, + async_=True, ) def getHandler(self, client): @@ -152,56 +152,56 @@ data = {} - for key in (u"name",): + for key in ("name",): try: data[key] = event_elt[key] except KeyError: continue - for elt_name in (u"description",): + for elt_name in ("description",): try: elt = next(event_elt.elements(NS_EVENT, elt_name)) except StopIteration: continue else: - data[elt_name] = unicode(elt) + data[elt_name] = str(elt) - for elt_name in (u"image", "background-image"): + for elt_name in ("image", "background-image"): try: image_elt = next(event_elt.elements(NS_EVENT, elt_name)) data[elt_name] = image_elt["src"] except StopIteration: continue except KeyError: - log.warning(_(u"no src found for image")) + log.warning(_("no src found for image")) - for uri_type in (u"invitees", u"blog"): + for uri_type in ("invitees", "blog"): try: elt = next(event_elt.elements(NS_EVENT, uri_type)) - uri = data[uri_type + u"_uri"] = elt["uri"] + uri = data[uri_type + "_uri"] = elt["uri"] uri_data = xmpp_uri.parseXMPPUri(uri) - if uri_data[u"type"] != u"pubsub": + if uri_data["type"] != "pubsub": raise ValueError except StopIteration: - log.warning(_(u"no {uri_type} element found!").format(uri_type=uri_type)) + log.warning(_("no {uri_type} element found!").format(uri_type=uri_type)) except KeyError: - log.warning(_(u"incomplete {uri_type} element").format(uri_type=uri_type)) + log.warning(_("incomplete {uri_type} element").format(uri_type=uri_type)) except ValueError: - log.warning(_(u"bad {uri_type} element").format(uri_type=uri_type)) + log.warning(_("bad {uri_type} element").format(uri_type=uri_type)) else: - data[uri_type + u"_service"] = uri_data[u"path"] - data[uri_type + u"_node"] = uri_data[u"node"] + data[uri_type + "_service"] = uri_data["path"] + data[uri_type + "_node"] = uri_data["node"] for meta_elt in event_elt.elements(NS_EVENT, "meta"): - key = meta_elt[u"name"] + key = meta_elt["name"] if key in data: log.warning( - u"Ignoring conflicting meta element: {xml}".format( + "Ignoring conflicting meta element: {xml}".format( xml=meta_elt.toXml() ) ) continue - data[key] = unicode(meta_elt) + data[key] = str(meta_elt) if event_elt.link: link_elt = event_elt.link data["service"] = link_elt["service"] @@ -225,11 +225,11 @@ id_ = NS_EVENT items, metadata = yield self._p.getItems(client, service, node, item_ids=[id_]) try: - event_elt = next(items[0].elements(NS_EVENT, u"event")) + event_elt = next(items[0].elements(NS_EVENT, "event")) except StopIteration: - raise exceptions.NotFound(_(u"No event element has been found")) + raise exceptions.NotFound(_("No event element has been found")) except IndexError: - raise exceptions.NotFound(_(u"No event with this id has been found")) + raise exceptions.NotFound(_("No event with this id has been found")) defer.returnValue(event_elt) def register(self, client, name, extra, service, node, event_id, item_elt, @@ -249,16 +249,16 @@ link_elt["node"] = node link_elt["item"] = event_id __, event_data = self._parseEventElt(event_elt) - name = event_data.get(u'name') - if u'image' in event_data: - extra = {u'thumb_url': event_data[u'image']} + name = event_data.get('name') + if 'image' in event_data: + extra = {'thumb_url': event_data['image']} else: extra = None - return self.host.plugins[u'LIST_INTEREST'].registerPubsub( + return self.host.plugins['LIST_INTEREST'].registerPubsub( client, NS_EVENT, service, node, event_id, creator, name=name, element=event_elt, extra=extra) - def _eventGet(self, service, node, id_=u"", profile_key=C.PROF_KEY_NONE): + def _eventGet(self, service, node, id_="", profile_key=C.PROF_KEY_NONE): service = jid.JID(service) if service else None node = node if node else NS_EVENT client = self.host.getClient(profile_key) @@ -283,12 +283,12 @@ defer.returnValue(self._parseEventElt(event_elt)) def _eventCreate( - self, timestamp, data, service, node, id_=u"", profile_key=C.PROF_KEY_NONE + self, timestamp, data, service, node, id_="", profile_key=C.PROF_KEY_NONE ): service = jid.JID(service) if service else None node = node or None client = self.host.getClient(profile_key) - data[u"register"] = C.bool(data.get(u"register", C.BOOL_FALSE)) + data["register"] = C.bool(data.get("register", C.BOOL_FALSE)) return self.eventCreate(client, timestamp, data, service, node, id_ or NS_EVENT) @defer.inlineCallbacks @@ -311,32 +311,32 @@ @return (unicode): created node """ if not event_id: - raise ValueError(_(u"event_id must be set")) + raise ValueError(_("event_id must be set")) if not service: service = client.jid.userhostJID() if not node: - node = NS_EVENT + u"__" + shortuuid.uuid() + node = NS_EVENT + "__" + shortuuid.uuid() event_elt = domish.Element((NS_EVENT, "event")) if timestamp is not None and timestamp != -1: formatted_date = utils.xmpp_date(timestamp) event_elt.addElement((NS_EVENT, "date"), content=formatted_date) register = data.pop("register", False) - for key in (u"name",): + for key in ("name",): if key in data: event_elt[key] = data.pop(key) - for key in (u"description",): + for key in ("description",): if key in data: event_elt.addElement((NS_EVENT, key), content=data.pop(key)) - for key in (u"image", u"background-image"): + for key in ("image", "background-image"): if key in data: elt = event_elt.addElement((NS_EVENT, key)) elt["src"] = data.pop(key) # we first create the invitees and blog nodes (if not specified in data) - for uri_type in (u"invitees", u"blog"): - key = uri_type + u"_uri" - for to_delete in (u"service", u"node"): - k = uri_type + u"_" + to_delete + for uri_type in ("invitees", "blog"): + key = uri_type + "_uri" + for to_delete in ("service", "node"): + k = uri_type + "_" + to_delete if k in data: del data[k] if key not in data: @@ -352,12 +352,12 @@ else: uri = data.pop(key) uri_data = xmpp_uri.parseXMPPUri(uri) - if uri_data[u"type"] != u"pubsub": + if uri_data["type"] != "pubsub": raise ValueError( - _(u"The given URI is not valid: {uri}").format(uri=uri) + _("The given URI is not valid: {uri}").format(uri=uri) ) - uri_service = jid.JID(uri_data[u"path"]) - uri_node = uri_data[u"node"] + uri_service = jid.JID(uri_data["path"]) + uri_node = uri_data["node"] elt = event_elt.addElement((NS_EVENT, uri_type)) elt["uri"] = xmpp_uri.buildXMPPUri( @@ -365,7 +365,7 @@ ) # remaining data are put in elements - for key in data.keys(): + for key in list(data.keys()): elt = event_elt.addElement((NS_EVENT, "meta"), content=data.pop(key)) elt["name"] = key @@ -374,8 +374,8 @@ # TODO: check auto-create, no need to create node first if available node = yield self._p.createNode(client, service, nodeIdentifier=node) except error.StanzaError as e: - if e.condition == u"conflict": - log.debug(_(u"requested node already exists")) + if e.condition == "conflict": + log.debug(_("requested node already exists")) yield self._p.publish(client, service, node, items=[item_elt]) @@ -388,7 +388,7 @@ profile_key=C.PROF_KEY_NONE): service = jid.JID(service) if service else None if not node: - raise ValueError(_(u"missing node")) + raise ValueError(_("missing node")) client = self.host.getClient(profile_key) return self.eventModify( client, service, node, id_ or NS_EVENT, timestamp_update or None, data_update @@ -407,13 +407,13 @@ new_timestamp = event_timestamp if timestamp_update is None else timestamp_update new_data = event_metadata if data_update: - for k, v in data_update.iteritems(): + for k, v in data_update.items(): new_data[k] = v yield self.eventCreate(client, new_timestamp, new_data, service, node, id_) def _eventsListSerialise(self, events): for timestamp, data in events: - data["date"] = unicode(timestamp) + data["date"] = str(timestamp) data["creator"] = C.boolConst(data.get("creator", False)) return [e[1] for e in events] @@ -431,15 +431,15 @@ @return list(tuple(int, dict)): list of events (timestamp + metadata) """ - items, metadata = yield self.host.plugins[u'LIST_INTEREST'].listInterests( + items, metadata = yield self.host.plugins['LIST_INTEREST'].listInterests( client, service, node, namespace=NS_EVENT) events = [] for item in items: try: - event_elt = next(item.interest.pubsub.elements(NS_EVENT, u"event")) + event_elt = next(item.interest.pubsub.elements(NS_EVENT, "event")) except StopIteration: log.warning( - _(u"No event found in item {item_id}, ignoring").format( + _("No event found in item {item_id}, ignoring").format( item_id=item["id"]) ) else: @@ -466,12 +466,12 @@ items, metadata = yield self._p.getItems( client, service, node, item_ids=[client.jid.userhost()] ) - event_elt = next(items[0].elements(NS_EVENT, u"invitee")) + event_elt = next(items[0].elements(NS_EVENT, "invitee")) except (exceptions.NotFound, IndexError): # no item found, event data are not set yet defer.returnValue({}) data = {} - for key in (u"attend", u"guests"): + for key in ("attend", "guests"): try: data[key] = event_elt[key] except KeyError: @@ -495,7 +495,7 @@ guests: an int """ event_elt = domish.Element((NS_EVENT, "invitee")) - for key in (u"attend", u"guests"): + for key in ("attend", "guests"): try: event_elt[key] = data.pop(key) except KeyError: @@ -522,15 +522,15 @@ invitees = {} for item in items: try: - event_elt = next(item.elements(NS_EVENT, u"invitee")) + event_elt = next(item.elements(NS_EVENT, "invitee")) except StopIteration: # no item found, event data are not set yet log.warning(_( - u"no data found for {item_id} (service: {service}, node: {node})" + "no data found for {item_id} (service: {service}, node: {node})" .format(item_id=item["id"], service=service, node=node))) else: data = {} - for key in (u"attend", u"guests"): + for key in ("attend", "guests"): try: data[key] = event_elt[key] except KeyError: @@ -558,30 +558,30 @@ @param item_id(unicode): event id """ # FIXME: handle name and extra - name = u'' + name = '' extra = {} if self._b is None: raise exceptions.FeatureNotFound( - _(u'"XEP-0277" (blog) plugin is needed for this feature') + _('"XEP-0277" (blog) plugin is needed for this feature') ) if item_id is None: item_id = NS_EVENT # first we authorize our invitee to see the nodes of interest - yield self._p.setNodeAffiliations(client, service, node, {invitee_jid: u"member"}) - log.debug(_(u"affiliation set on event node")) + yield self._p.setNodeAffiliations(client, service, node, {invitee_jid: "member"}) + log.debug(_("affiliation set on event node")) __, event_data = yield self.eventGet(client, service, node, item_id) - log.debug(_(u"got event data")) + log.debug(_("got event data")) invitees_service = jid.JID(event_data["invitees_service"]) invitees_node = event_data["invitees_node"] blog_service = jid.JID(event_data["blog_service"]) blog_node = event_data["blog_node"] yield self._p.setNodeAffiliations( - client, invitees_service, invitees_node, {invitee_jid: u"publisher"} + client, invitees_service, invitees_node, {invitee_jid: "publisher"} ) - log.debug(_(u"affiliation set on invitee node")) + log.debug(_("affiliation set on invitee node")) yield self._p.setNodeAffiliations( - client, blog_service, blog_node, {invitee_jid: u"member"} + client, blog_service, blog_node, {invitee_jid: "member"} ) blog_items, __ = yield self._b.mbGet(client, blog_service, blog_node, None) @@ -591,29 +591,29 @@ comments_node = item["comments_node"] except KeyError: log.debug( - u"no comment service set for item {item_id}".format( + "no comment service set for item {item_id}".format( item_id=item["id"] ) ) else: yield self._p.setNodeAffiliations( - client, comments_service, comments_node, {invitee_jid: u"publisher"} + client, comments_service, comments_node, {invitee_jid: "publisher"} ) - log.debug(_(u"affiliation set on blog and comments nodes")) + log.debug(_("affiliation set on blog and comments nodes")) # now we send the invitation - pubsub_invitation = self.host.plugins[u'INVITATION'] + pubsub_invitation = self.host.plugins['INVITATION'] pubsub_invitation.sendPubsubInvitation(client, invitee_jid, service, node, item_id, name, extra) - def _inviteByEmail(self, service, node, id_=NS_EVENT, email=u"", emails_extra=None, - name=u"", host_name=u"", language=u"", url_template=u"", - message_subject=u"", message_body=u"", + def _inviteByEmail(self, service, node, id_=NS_EVENT, email="", emails_extra=None, + name="", host_name="", language="", url_template="", + message_subject="", message_body="", profile_key=C.PROF_KEY_NONE): client = self.host.getClient(profile_key) kwargs = { - u"profile": client.profile, - u"emails_extra": [unicode(e) for e in emails_extra], + "profile": client.profile, + "emails_extra": [str(e) for e in emails_extra], } for key in ( "email", @@ -625,7 +625,7 @@ "message_body", ): value = locals()[key] - kwargs[key] = unicode(value) + kwargs[key] = str(value) return self.inviteByEmail( client, jid.JID(service) if service else None, node, id_ or NS_EVENT, **kwargs ) @@ -640,26 +640,26 @@ """ if self._i is None: raise exceptions.FeatureNotFound( - _(u'"Invitations" plugin is needed for this feature') + _('"Invitations" plugin is needed for this feature') ) if self._b is None: raise exceptions.FeatureNotFound( - _(u'"XEP-0277" (blog) plugin is needed for this feature') + _('"XEP-0277" (blog) plugin is needed for this feature') ) service = service or client.jid.userhostJID() event_uri = xmpp_uri.buildXMPPUri( "pubsub", path=service.full(), node=node, item=id_ ) - kwargs["extra"] = {u"event_uri": event_uri} + kwargs["extra"] = {"event_uri": event_uri} invitation_data = yield self._i.create(**kwargs) - invitee_jid = invitation_data[u"jid"] - log.debug(_(u"invitation created")) + invitee_jid = invitation_data["jid"] + log.debug(_("invitation created")) # now that we have a jid, we can send normal invitation yield self.invite(client, invitee_jid, service, node, id_) +@implementer(iwokkel.IDisco) class EventsHandler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_exp_invitation.py --- a/sat/plugins/plugin_exp_invitation.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_invitation.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to detect language (experimental) @@ -24,7 +24,7 @@ from twisted.internet import defer from twisted.words.protocols.jabber import jid from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer from twisted.words.protocols.jabber.xmlstream import XMPPHandler log = getLogger(__name__) @@ -35,24 +35,24 @@ C.PI_IMPORT_NAME: "INVITATION", C.PI_TYPE: "EXP", C.PI_PROTOCOLS: [], - C.PI_DEPENDENCIES: [u"XEP-0060", u"XEP-0329"], + C.PI_DEPENDENCIES: ["XEP-0060", "XEP-0329"], C.PI_RECOMMENDATIONS: [], C.PI_MAIN: "Invitation", C.PI_HANDLER: "yes", - C.PI_DESCRIPTION: _(u"Experimental handling of invitations"), + C.PI_DESCRIPTION: _("Experimental handling of invitations"), } -NS_INVITATION = u"https://salut-a-toi/protocol/invitation:0" +NS_INVITATION = "https://salut-a-toi/protocol/invitation:0" INVITATION = '/message/invitation[@xmlns="{ns_invit}"]'.format( ns_invit=NS_INVITATION ) -NS_INVITATION_LIST = NS_INVITATION + u"#list" +NS_INVITATION_LIST = NS_INVITATION + "#list" class Invitation(object): def __init__(self, host): - log.info(_(u"Invitation plugin initialization")) + log.info(_("Invitation plugin initialization")) self.host = host self._p = self.host.plugins["XEP-0060"] # map from namespace of the invitation to callback handling it @@ -88,7 +88,7 @@ """ if namespace in self._ns_cb: raise exceptions.ConflictError( - u"invitation namespace {namespace} is already register with {callback}" + "invitation namespace {namespace} is already register with {callback}" .format(namespace=namespace, callback=self._ns_cb[namespace])) self._ns_cb[namespace] = callback @@ -113,15 +113,15 @@ client.generateMessageXML(mess_data) invitation_elt = mess_data["xml"].addElement("invitation", NS_INVITATION) if name is not None: - invitation_elt[u"name"] = name - thumb_url = extra.get(u'thumb_url') + invitation_elt["name"] = name + thumb_url = extra.get('thumb_url') if thumb_url: - if not thumb_url.startswith(u'http'): + if not thumb_url.startswith('http'): log.warning( - u"only http URLs are allowed for thumbnails, got {url}, ignoring" + "only http URLs are allowed for thumbnails, got {url}, ignoring" .format(url=thumb_url)) else: - invitation_elt[u'thumb_url'] = thumb_url + invitation_elt['thumb_url'] = thumb_url return mess_data, invitation_elt def sendPubsubInvitation(self, client, invitee_jid, service, node, @@ -139,11 +139,11 @@ extra = {} mess_data, invitation_elt = self._generateBaseInvitation( client, invitee_jid, name, extra) - pubsub_elt = invitation_elt.addElement(u"pubsub") - pubsub_elt[u"service"] = service.full() - pubsub_elt[u"node"] = node - pubsub_elt[u"item"] = item_id - return client.send(mess_data[u"xml"]) + pubsub_elt = invitation_elt.addElement("pubsub") + pubsub_elt["service"] = service.full() + pubsub_elt["node"] = node + pubsub_elt["item"] = item_id + return client.send(mess_data["xml"]) def sendFileSharingInvitation(self, client, invitee_jid, service, repos_type=None, namespace=None, path=None, name=None, extra=None): @@ -163,20 +163,20 @@ extra = {} mess_data, invitation_elt = self._generateBaseInvitation( client, invitee_jid, name, extra) - file_sharing_elt = invitation_elt.addElement(u"file_sharing") - file_sharing_elt[u"service"] = service.full() + file_sharing_elt = invitation_elt.addElement("file_sharing") + file_sharing_elt["service"] = service.full() if repos_type is not None: - if repos_type not in (u"files", "photos"): - msg = u"unknown repository type: {repos_type}".format( + if repos_type not in ("files", "photos"): + msg = "unknown repository type: {repos_type}".format( repos_type=repos_type) log.warning(msg) raise exceptions.DateError(msg) - file_sharing_elt[u"type"] = repos_type + file_sharing_elt["type"] = repos_type if namespace is not None: - file_sharing_elt[u"namespace"] = namespace + file_sharing_elt["namespace"] = namespace if path is not None: - file_sharing_elt[u"path"] = path - return client.send(mess_data[u"xml"]) + file_sharing_elt["path"] = path + return client.send(mess_data["xml"]) @defer.inlineCallbacks def _parsePubsubElt(self, client, pubsub_elt): @@ -185,25 +185,25 @@ node = pubsub_elt["node"] item_id = pubsub_elt.getAttribute("item") except (RuntimeError, KeyError): - log.warning(_(u"Bad invitation, ignoring")) + log.warning(_("Bad invitation, ignoring")) raise exceptions.DataError try: items, metadata = yield self._p.getItems(client, service, node, item_ids=[item_id]) except Exception as e: - log.warning(_(u"Can't get item linked with invitation: {reason}").format( + log.warning(_("Can't get item linked with invitation: {reason}").format( reason=e)) try: item_elt = items[0] except IndexError: - log.warning(_(u"Invitation was linking to a non existing item")) + log.warning(_("Invitation was linking to a non existing item")) raise exceptions.DataError try: namespace = item_elt.firstChildElement().uri except Exception as e: - log.warning(_(u"Can't retrieve namespace of invitation: {reason}").format( + log.warning(_("Can't retrieve namespace of invitation: {reason}").format( reason = e)) raise exceptions.DataError @@ -214,41 +214,41 @@ try: service = jid.JID(file_sharing_elt["service"]) except (RuntimeError, KeyError): - log.warning(_(u"Bad invitation, ignoring")) + log.warning(_("Bad invitation, ignoring")) raise exceptions.DataError - repos_type = file_sharing_elt.getAttribute(u"type", u"files") - namespace = file_sharing_elt.getAttribute(u"namespace") - path = file_sharing_elt.getAttribute(u"path") + repos_type = file_sharing_elt.getAttribute("type", "files") + namespace = file_sharing_elt.getAttribute("namespace") + path = file_sharing_elt.getAttribute("path") args = [service, repos_type, namespace, path] - ns_fis = self.host.getNamespace(u"fis") + ns_fis = self.host.getNamespace("fis") return ns_fis, args @defer.inlineCallbacks def onInvitation(self, message_elt, client): - log.debug(u"invitation received [{profile}]".format(profile=client.profile)) + log.debug("invitation received [{profile}]".format(profile=client.profile)) invitation_elt = message_elt.invitation - name = invitation_elt.getAttribute(u"name") + name = invitation_elt.getAttribute("name") extra = {} - if invitation_elt.hasAttribute(u"thumb_url"): - extra[u'thumb_url'] = invitation_elt[u'thumb_url'] + if invitation_elt.hasAttribute("thumb_url"): + extra['thumb_url'] = invitation_elt['thumb_url'] for elt in invitation_elt.elements(): if elt.uri != NS_INVITATION: - log.warning(u"unexpected element: {xml}".format(xml=elt.toXml())) + log.warning("unexpected element: {xml}".format(xml=elt.toXml())) continue - if elt.name == u"pubsub": + if elt.name == "pubsub": method = self._parsePubsubElt - elif elt.name == u"file_sharing": + elif elt.name == "file_sharing": method = self._parseFileSharingElt else: - log.warning(u"not implemented invitation element: {xml}".format( + log.warning("not implemented invitation element: {xml}".format( xml = elt.toXml())) continue try: namespace, args = yield method(client, elt) except exceptions.DataError: - log.warning(u"Can't parse invitation element: {xml}".format( + log.warning("Can't parse invitation element: {xml}".format( xml = elt.toXml())) continue @@ -256,14 +256,14 @@ cb = self._ns_cb[namespace] except KeyError: log.warning(_( - u'No handler for namespace "{namespace}", invitation ignored') + 'No handler for namespace "{namespace}", invitation ignored') .format(namespace=namespace)) else: cb(client, name, extra, *args) +@implementer(iwokkel.IDisco) class PubsubInvitationHandler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_exp_invitation_file.py --- a/sat/plugins/plugin_exp_invitation_file.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_invitation_file.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to detect language (experimental) @@ -31,21 +31,21 @@ C.PI_IMPORT_NAME: "FILE_SHARING_INVITATION", C.PI_TYPE: "EXP", C.PI_PROTOCOLS: [], - C.PI_DEPENDENCIES: ["XEP-0329", u"INVITATION"], + C.PI_DEPENDENCIES: ["XEP-0329", "INVITATION"], C.PI_RECOMMENDATIONS: [], C.PI_MAIN: "FileSharingInvitation", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _(u"Experimental handling of invitations for file sharing"), + C.PI_DESCRIPTION: _("Experimental handling of invitations for file sharing"), } class FileSharingInvitation(object): def __init__(self, host): - log.info(_(u"File Sharing Invitation plugin initialization")) + log.info(_("File Sharing Invitation plugin initialization")) self.host = host - ns_fis = host.getNamespace(u"fis") - host.plugins[u"INVITATION"].registerNamespace(ns_fis, self.onInvitation) + ns_fis = host.getNamespace("fis") + host.plugins["INVITATION"].registerNamespace(ns_fis, self.onInvitation) host.bridge.addMethod( "FISInvite", ".plugin", @@ -56,30 +56,30 @@ def _sendFileSharingInvitation( self, invitee_jid_s, service_s, repos_type=None, namespace=None, path=None, - name=None, extra_s=u'', profile_key=C.PROF_KEY_NONE): + name=None, extra_s='', profile_key=C.PROF_KEY_NONE): client = self.host.getClient(profile_key) invitee_jid = jid.JID(invitee_jid_s) service = jid.JID(service_s) extra = data_format.deserialise(extra_s) - return self.host.plugins[u"INVITATION"].sendFileSharingInvitation( + return self.host.plugins["INVITATION"].sendFileSharingInvitation( client, invitee_jid, service, repos_type=repos_type or None, namespace=namespace or None, path=path or None, name=name or None, extra=extra) def onInvitation(self, client, name, extra, service, repos_type, namespace, path): - if repos_type == u"files": - type_human = _(u"file sharing") - elif repos_type == u"photos": - type_human = _(u"photos album") + if repos_type == "files": + type_human = _("file sharing") + elif repos_type == "photos": + type_human = _("photos album") else: - log.warning(u"Unknown repository type: {repos_type}".format( + log.warning("Unknown repository type: {repos_type}".format( repos_type=repos_type)) - repos_type = u"file" - type_human = _(u"file sharing") + repos_type = "file" + type_human = _("file sharing") log.info(_( - u'{profile} has received an invitation for a files repository ({type_human}) ' - u'with namespace "{namespace}" at path [{path}]').format( + '{profile} has received an invitation for a files repository ({type_human}) ' + 'with namespace "{namespace}" at path [{path}]').format( profile=client.profile, type_human=type_human, namespace=namespace, path=path) ) - return self.host.plugins[u'LIST_INTEREST'].registerFileSharing( + return self.host.plugins['LIST_INTEREST'].registerFileSharing( client, service, repos_type, namespace, path, name, extra) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_exp_jingle_stream.py --- a/sat/plugins/plugin_exp_jingle_stream.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_jingle_stream.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing pipes (experimental) @@ -51,8 +51,8 @@ C.PI_DESCRIPTION: _("""Jingle Stream plugin"""), } -CONFIRM = D_(u"{peer} wants to send you a stream, do you accept ?") -CONFIRM_TITLE = D_(u"Stream Request") +CONFIRM = D_("{peer} wants to send you a stream, do you accept ?") +CONFIRM_TITLE = D_("Stream Request") class StreamProtocol(protocol.Protocol): @@ -119,7 +119,7 @@ def startStream(self, consumer): if self.consumer is not None: raise exceptions.InternalError( - _(u"stream can't be used with multiple consumers") + _("stream can't be used with multiple consumers") ) assert self.deferred is None self.consumer = consumer @@ -166,7 +166,7 @@ try: self.client_conn.sendData(data) except AttributeError: - log.warning(_(u"No client connected, can't send data")) + log.warning(_("No client connected, can't send data")) def writeToConsumer(self, data): self.consumer.write(data) @@ -186,7 +186,7 @@ in_sign="ss", out_sign="s", method=self._streamOut, - async=True, + async_=True, ) # jingle callbacks @@ -227,7 +227,7 @@ } ], ) - defer.returnValue(unicode(port)) + defer.returnValue(str(port)) def jingleSessionInit(self, client, session, content_name, stream_object): content_data = session["contents"][content_name] @@ -245,7 +245,7 @@ self._j.ROLE_INITIATOR, self._j.ROLE_RESPONDER, ): - log.warning(u"Bad sender, assuming initiator") + log.warning("Bad sender, assuming initiator") content_data["senders"] = self._j.ROLE_INITIATOR confirm_data = yield xml_tools.deferDialog( @@ -266,7 +266,7 @@ try: port = int(confirm_data["port"]) except (ValueError, KeyError): - raise exceptions.DataError(_(u"given port is invalid")) + raise exceptions.DataError(_("given port is invalid")) endpoint = endpoints.TCP4ClientEndpoint(reactor, "localhost", port) factory = StreamFactory() yield endpoint.connect(factory) @@ -288,16 +288,16 @@ args = [client, session, content_name, content_data] finished_d.addCallbacks(self._finishedCb, self._finishedEb, args, None, args) else: - log.warning(u"FIXME: unmanaged action {}".format(action)) + log.warning("FIXME: unmanaged action {}".format(action)) return desc_elt def _finishedCb(self, __, client, session, content_name, content_data): - log.info(u"Pipe transfer completed") + log.info("Pipe transfer completed") self._j.contentTerminate(client, session, content_name) content_data["stream_object"].stopStream() def _finishedEb(self, failure, client, session, content_name, content_data): - log.warning(u"Error while streaming pipe: {}".format(failure)) + log.warning("Error while streaming pipe: {}".format(failure)) self._j.contentTerminate( client, session, content_name, reason=self._j.REASON_FAILED_TRANSPORT ) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_exp_lang_detect.py --- a/sat/plugins/plugin_exp_lang_detect.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_lang_detect.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to detect language (experimental) @@ -28,7 +28,7 @@ from langid.langid import LanguageIdentifier, model except ImportError: raise exceptions.MissingModule( - u'Missing module langid, please download/install it with "pip install langid")' + 'Missing module langid, please download/install it with "pip install langid")' ) identifier = LanguageIdentifier.from_modelstring(model, norm_probs=False) @@ -45,9 +45,9 @@ C.PI_DESCRIPTION: _("""Detect and set message language when unknown"""), } -CATEGORY = D_(u"Misc") -NAME = u"lang_detect" -LABEL = D_(u"language detection") +CATEGORY = D_("Misc") +NAME = "lang_detect" +LABEL = D_("language detection") PARAMS = """ @@ -63,7 +63,7 @@ class LangDetect(object): def __init__(self, host): - log.info(_(u"Language detection plugin initialization")) + log.info(_("Language detection plugin initialization")) self.host = host host.memory.updateParams(PARAMS) host.trigger.add("MessageReceived", self.MessageReceivedTrigger) @@ -71,8 +71,8 @@ def addLanguage(self, mess_data): message = mess_data["message"] - if len(message) == 1 and message.keys()[0] == "": - msg = message.values()[0] + if len(message) == 1 and list(message.keys())[0] == "": + msg = list(message.values())[0] lang = identifier.classify(msg)[0] mess_data["message"] = {lang: msg} return mess_data diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_exp_list_of_interest.py --- a/sat/plugins/plugin_exp_list_of_interest.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_list_of_interest.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to detect language (experimental) @@ -21,7 +21,7 @@ from sat.core.constants import Const as C from sat.core.log import getLogger from wokkel import disco, iwokkel, pubsub -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from twisted.words.protocols.jabber import error as jabber_error, jid from twisted.words.protocols.jabber.xmlstream import XMPPHandler @@ -35,11 +35,11 @@ C.PI_IMPORT_NAME: "LIST_INTEREST", C.PI_TYPE: "EXP", C.PI_PROTOCOLS: [], - C.PI_DEPENDENCIES: [u"XEP-0060", u"XEP-0329"], + C.PI_DEPENDENCIES: ["XEP-0060", "XEP-0329"], C.PI_RECOMMENDATIONS: [], C.PI_MAIN: "ListInterest", C.PI_HANDLER: "yes", - C.PI_DESCRIPTION: _(u"Experimental handling of interesting XMPP locations"), + C.PI_DESCRIPTION: _("Experimental handling of interesting XMPP locations"), } NS_LIST_INTEREST = "https://salut-a-toi/protocol/list-interest:0" @@ -49,7 +49,7 @@ namespace = NS_LIST_INTEREST def __init__(self, host): - log.info(_(u"List of Interest plugin initialization")) + log.info(_("List of Interest plugin initialization")) self.host = host self._p = self.host.plugins["XEP-0060"] host.bridge.addMethod( @@ -58,7 +58,7 @@ in_sign="ssss", out_sign="aa{ss}", method=self._listInterests, - async=True, + async_=True, ) def getHandler(self, client): @@ -76,8 +76,8 @@ options=options, ) except jabber_error.StanzaError as e: - if e.condition == u"conflict": - log.debug(_(u"requested node already exists")) + if e.condition == "conflict": + log.debug(_("requested node already exists")) @defer.inlineCallbacks def registerPubsub(self, client, namespace, service, node, item_id=None, @@ -101,20 +101,20 @@ if extra is None: extra = {} yield self.createNode(client) - interest_elt = domish.Element((NS_LIST_INTEREST, u"interest")) - interest_elt[u"namespace"] = namespace + interest_elt = domish.Element((NS_LIST_INTEREST, "interest")) + interest_elt["namespace"] = namespace if name is not None: - interest_elt[u'name'] = name - thumb_url = extra.get(u'thumb_url') + interest_elt['name'] = name + thumb_url = extra.get('thumb_url') if thumb_url: - interest_elt[u'thumb_url'] = thumb_url - pubsub_elt = interest_elt.addElement(u"pubsub") - pubsub_elt[u"service"] = service.full() - pubsub_elt[u"node"] = node + interest_elt['thumb_url'] = thumb_url + pubsub_elt = interest_elt.addElement("pubsub") + pubsub_elt["service"] = service.full() + pubsub_elt["node"] = node if item_id is not None: - pubsub_elt[u"item"] = item_id + pubsub_elt["item"] = item_id if creator: - pubsub_elt[u"creator"] = C.BOOL_TRUE + pubsub_elt["creator"] = C.BOOL_TRUE if element is not None: pubsub_elt.addChild(element) item_elt = pubsub.Item(payload=interest_elt) @@ -138,21 +138,21 @@ if extra is None: extra = {} yield self.createNode(client) - interest_elt = domish.Element((NS_LIST_INTEREST, u"interest")) - interest_elt[u"namespace"] = self.host.getNamespace(u"fis") + interest_elt = domish.Element((NS_LIST_INTEREST, "interest")) + interest_elt["namespace"] = self.host.getNamespace("fis") if name is not None: - interest_elt[u'name'] = name - thumb_url = extra.get(u'thumb_url') + interest_elt['name'] = name + thumb_url = extra.get('thumb_url') if thumb_url: - interest_elt[u'thumb_url'] = thumb_url - file_sharing_elt = interest_elt.addElement(u"file_sharing") - file_sharing_elt[u"service"] = service.full() + interest_elt['thumb_url'] = thumb_url + file_sharing_elt = interest_elt.addElement("file_sharing") + file_sharing_elt["service"] = service.full() if repos_type is not None: - file_sharing_elt[u"type"] = repos_type + file_sharing_elt["type"] = repos_type if namespace is not None: - file_sharing_elt[u"namespace"] = namespace + file_sharing_elt["namespace"] = namespace if path is not None: - file_sharing_elt[u"path"] = path + file_sharing_elt["path"] = path item_elt = pubsub.Item(payload=interest_elt) yield self._p.publish( client, client.jid.userhostJID(), NS_LIST_INTEREST, items=[item_elt] @@ -163,38 +163,38 @@ for item_elt in interests_data[0]: interest_data = {} interest_elt = item_elt.interest - if interest_elt.hasAttribute(u'namespace'): - interest_data[u'namespace'] = interest_elt.getAttribute(u'namespace') - if interest_elt.hasAttribute(u'name'): - interest_data[u'name'] = interest_elt.getAttribute(u'name') - if interest_elt.hasAttribute(u'thumb_url'): - interest_data[u'thumb_url'] = interest_elt.getAttribute(u'thumb_url') + if interest_elt.hasAttribute('namespace'): + interest_data['namespace'] = interest_elt.getAttribute('namespace') + if interest_elt.hasAttribute('name'): + interest_data['name'] = interest_elt.getAttribute('name') + if interest_elt.hasAttribute('thumb_url'): + interest_data['thumb_url'] = interest_elt.getAttribute('thumb_url') elt = interest_elt.firstChildElement() if elt.uri != NS_LIST_INTEREST: - log.warning(u"unexpected child element, ignoring: {xml}".format( + log.warning("unexpected child element, ignoring: {xml}".format( xml = elt.toXml())) continue - if elt.name == u'pubsub': + if elt.name == 'pubsub': interest_data.update({ - u"type": u"pubsub", - u"service": elt[u'service'], - u"node": elt[u'node'], + "type": "pubsub", + "service": elt['service'], + "node": elt['node'], }) - for attr in (u'item', u'creator'): + for attr in ('item', 'creator'): if elt.hasAttribute(attr): interest_data[attr] = elt[attr] - elif elt.name == u'file_sharing': + elif elt.name == 'file_sharing': interest_data.update({ - u"type": u"file_sharing", - u"service": elt[u'service'], + "type": "file_sharing", + "service": elt['service'], }) - if elt.hasAttribute(u'type'): - interest_data[u'subtype'] = elt[u'type'] - for attr in (u'namespace', u'path'): + if elt.hasAttribute('type'): + interest_data['subtype'] = elt['type'] + for attr in ('namespace', 'path'): if elt.hasAttribute(attr): interest_data[attr] = elt[attr] else: - log.warning(u"unknown element, ignoring: {xml}".format(xml=elt.toXml())) + log.warning("unknown element, ignoring: {xml}".format(xml=elt.toXml())) continue interests.append(interest_data) @@ -229,20 +229,20 @@ filtered_items = [] for item in items: try: - interest_elt = next(item.elements(NS_LIST_INTEREST, u"interest")) + interest_elt = next(item.elements(NS_LIST_INTEREST, "interest")) except StopIteration: - log.warning(_(u"Missing interest element: {xml}").format( + log.warning(_("Missing interest element: {xml}").format( xml=interest_elt.toXml())) continue - if interest_elt.getAttribute(u"namespace") == namespace: + if interest_elt.getAttribute("namespace") == namespace: filtered_items.append(item) items = filtered_items defer.returnValue((items, metadata)) +@implementer(iwokkel.IDisco) class ListInterestHandler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_exp_parrot.py --- a/sat/plugins/plugin_exp_parrot.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_parrot.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for parrot mode (experimental) @@ -38,7 +38,7 @@ C.PI_MAIN: "Exp_Parrot", C.PI_HANDLER: "no", C.PI_DESCRIPTION: _( - u"""Implementation of parrot mode (repeat messages between 2 entities)""" + """Implementation of parrot mode (repeat messages between 2 entities)""" ), } @@ -60,7 +60,7 @@ try: self.host.plugins[C.TEXT_CMDS].registerTextCommands(self) except KeyError: - log.info(_(u"Text commands not available")) + log.info(_("Text commands not available")) # def sendMessageTrigger(self, client, mess_data, treatments): # """ Deactivate other triggers if recipient is in parrot links """ @@ -90,7 +90,7 @@ message = {} for e in message_elt.elements(C.NS_CLIENT, "body"): - body = unicode(e) + body = str(e) lang = e.getAttribute("lang") or "" try: @@ -107,12 +107,12 @@ return True else: src_txt = from_jid.user - message[lang] = u"[{}] {}".format(src_txt, body) + message[lang] = "[{}] {}".format(src_txt, body) linked = _links[from_jid.userhostJID()] client.sendMessage( - jid.JID(unicode(linked)), message, None, "auto", no_trigger=True + jid.JID(str(linked)), message, None, "auto", no_trigger=True ) return True @@ -130,8 +130,8 @@ _links[source_jid.userhostJID()] = dest_jid log.info( - u"Parrot mode: %s will be repeated to %s" - % (source_jid.userhost(), unicode(dest_jid)) + "Parrot mode: %s will be repeated to %s" + % (source_jid.userhost(), str(dest_jid)) ) def removeParrot(self, client, source_jid): @@ -166,7 +166,7 @@ txt_cmd.feedBack( client, - "Parrot mode activated for {}".format(unicode(link_left_jid)), + "Parrot mode activated for {}".format(str(link_left_jid)), mess_data, ) @@ -183,7 +183,7 @@ raise jid.InvalidFormat except jid.InvalidFormat: txt_cmd.feedBack( - client, u"Can't deactivate Parrot mode for invalid jid", mess_data + client, "Can't deactivate Parrot mode for invalid jid", mess_data ) return False @@ -194,8 +194,8 @@ txt_cmd.feedBack( client, - u"Parrot mode deactivated for {} and {}".format( - unicode(link_left_jid), unicode(link_right_jid) + "Parrot mode deactivated for {} and {}".format( + str(link_left_jid), str(link_right_jid) ), mess_data, ) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_exp_pubsub_admin.py --- a/sat/plugins/plugin_exp_pubsub_admin.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_pubsub_admin.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin to send pubsub requests with administrator privilege @@ -29,20 +29,20 @@ log = getLogger(__name__) PLUGIN_INFO = { - C.PI_NAME: u"Pubsub Administrator", - C.PI_IMPORT_NAME: u"PUBSUB_ADMIN", + C.PI_NAME: "Pubsub Administrator", + C.PI_IMPORT_NAME: "PUBSUB_ADMIN", C.PI_TYPE: C.PLUG_TYPE_EXP, C.PI_PROTOCOLS: [], C.PI_DEPENDENCIES: [], C.PI_RECOMMENDATIONS: [], - C.PI_MAIN: u"PubsubAdmin", - C.PI_HANDLER: u"no", - C.PI_DESCRIPTION: _(u"""\Implementation of Pubsub Administrator + C.PI_MAIN: "PubsubAdmin", + C.PI_HANDLER: "no", + C.PI_DESCRIPTION: _("""\Implementation of Pubsub Administrator This allows a pubsub administrator to overwrite completly items, including publisher. Specially useful when importing a node."""), } -NS_PUBSUB_ADMIN = u"https://salut-a-toi.org/spec/pubsub_admin:0" +NS_PUBSUB_ADMIN = "https://salut-a-toi.org/spec/pubsub_admin:0" class PubsubAdmin(object): @@ -55,7 +55,7 @@ in_sign="ssasss", out_sign="as", method=self._publish, - async=True, + async_=True, ) def _publish(self, service, nodeIdentifier, items, extra=None, @@ -71,22 +71,22 @@ def _sendCb(self, iq_result): publish_elt = iq_result.admin.pubsub.publish ids = [] - for item_elt in publish_elt.elements(pubsub.NS_PUBSUB, u'item'): - ids.append(item_elt[u'id']) + for item_elt in publish_elt.elements(pubsub.NS_PUBSUB, 'item'): + ids.append(item_elt['id']) return ids def publish(self, client, service, nodeIdentifier, items, extra=None): for item in items: - if item.name != u'item' or item.uri != pubsub.NS_PUBSUB: + if item.name != 'item' or item.uri != pubsub.NS_PUBSUB: raise exceptions.DataError( - u'Invalid element, a pubsub item is expected: {xml}'.format( + 'Invalid element, a pubsub item is expected: {xml}'.format( xml=item.toXml())) iq_elt = client.IQ() iq_elt['to'] = service.full() if service else client.jid.userhost() - admin_elt = iq_elt.addElement((NS_PUBSUB_ADMIN, u'admin')) - pubsub_elt = admin_elt.addElement((pubsub.NS_PUBSUB, u'pubsub')) + admin_elt = iq_elt.addElement((NS_PUBSUB_ADMIN, 'admin')) + pubsub_elt = admin_elt.addElement((pubsub.NS_PUBSUB, 'pubsub')) publish_elt = pubsub_elt.addElement('publish') - publish_elt[u'node'] = nodeIdentifier + publish_elt['node'] = nodeIdentifier for item in items: publish_elt.addChild(item) d = iq_elt.send() diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_exp_pubsub_hook.py --- a/sat/plugins/plugin_exp_pubsub_hook.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_pubsub_hook.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Pubsub Hooks @@ -43,17 +43,17 @@ } #  python module -HOOK_TYPE_PYTHON = u"python" +HOOK_TYPE_PYTHON = "python" # python file path -HOOK_TYPE_PYTHON_FILE = u"python_file" +HOOK_TYPE_PYTHON_FILE = "python_file" # python code directly -HOOK_TYPE_PYTHON_CODE = u"python_code" +HOOK_TYPE_PYTHON_CODE = "python_code" HOOK_TYPES = (HOOK_TYPE_PYTHON, HOOK_TYPE_PYTHON_FILE, HOOK_TYPE_PYTHON_CODE) class PubsubHook(object): def __init__(self, host): - log.info(_(u"PubSub Hook initialization")) + log.info(_("PubSub Hook initialization")) self.host = host self.node_hooks = {} # keep track of the number of hooks per node (for all profiles) host.bridge.addMethod( @@ -90,7 +90,7 @@ def _installNodeManager(self, client, node): if node in self.node_hooks: - log.debug(_(u"node manager already set for {node}").format(node=node)) + log.debug(_("node manager already set for {node}").format(node=node)) self.node_hooks[node] += 1 else: # first hook on this node @@ -98,29 +98,29 @@ node, items_cb=self._itemsReceived ) self.node_hooks[node] = 0 - log.info(_(u"node manager installed on {node}").format(node=node)) + log.info(_("node manager installed on {node}").format(node=node)) def _removeNodeManager(self, client, node): try: self.node_hooks[node] -= 1 except KeyError: - log.error(_(u"trying to remove a {node} without hook").format(node=node)) + log.error(_("trying to remove a {node} without hook").format(node=node)) else: if self.node_hooks[node] == 0: del self.node_hooks[node] self.host.plugins["XEP-0060"].removeManagedNode(node, self._itemsReceived) - log.debug(_(u"hook removed")) + log.debug(_("hook removed")) else: - log.debug(_(u"node still needed for an other hook")) + log.debug(_("node still needed for an other hook")) def installHook(self, client, service, node, hook_type, hook_arg, persistent): if hook_type not in HOOK_TYPES: raise exceptions.DataError( - _(u"{hook_type} is not handled").format(hook_type=hook_type) + _("{hook_type} is not handled").format(hook_type=hook_type) ) if hook_type != HOOK_TYPE_PYTHON_FILE: raise NotImplementedError( - _(u"{hook_type} hook type not implemented yet").format( + _("{hook_type} hook type not implemented yet").format( hook_type=hook_type ) ) @@ -136,8 +136,8 @@ hooks_list.append(hook_data) log.info( - _(u"{persistent} hook installed on {node} for {profile}").format( - persistent=_(u"persistent") if persistent else _(u"temporary"), + _("{persistent} hook installed on {node} for {profile}").format( + persistent=_("persistent") if persistent else _("temporary"), node=node, profile=client.profile, ) @@ -160,18 +160,18 @@ try: if hook_type == HOOK_TYPE_PYTHON_FILE: hook_globals = {} - execfile(hook_data["arg"], hook_globals) + exec(compile(open(hook_data["arg"], "rb").read(), hook_data["arg"], 'exec'), hook_globals) callback = hook_globals["hook"] else: raise NotImplementedError( - _(u"{hook_type} hook type not implemented yet").format( + _("{hook_type} hook type not implemented yet").format( hook_type=hook_type ) ) except Exception as e: log.warning( _( - u"Can't load Pubsub hook at node {node}, it will be removed: {reason}" + "Can't load Pubsub hook at node {node}, it will be removed: {reason}" ).format(node=node, reason=e) ) hooks_list.remove(hook_data) @@ -183,7 +183,7 @@ except Exception as e: log.warning( _( - u"Error while running Pubsub hook for node {node}: {msg}" + "Error while running Pubsub hook for node {node}: {msg}" ).format(node=node, msg=e) ) @@ -193,9 +193,9 @@ return self.addHook( client, service, - unicode(node), - unicode(hook_type), - unicode(hook_arg), + str(node), + str(hook_type), + str(hook_arg), persistent, ) @@ -242,11 +242,11 @@ if node in hooks: for hook_data in hooks[node]: if ( - service != hook_data[u"service"] + service != hook_data["service"] or hook_type is not None - and hook_type != hook_data[u"type"] + and hook_type != hook_data["type"] or hook_arg is not None - and hook_arg != hook_data[u"arg"] + and hook_arg != hook_data["arg"] ): continue hooks[node].remove(hook_data) @@ -263,8 +263,8 @@ def _listHooks(self, profile): hooks_list = self.listHooks(self.host.getClient(profile)) for hook in hooks_list: - hook[u"service"] = hook[u"service"].full() - hook[u"persistent"] = C.boolConst(hook[u"persistent"]) + hook["service"] = hook["service"].full() + hook["persistent"] = C.boolConst(hook["persistent"]) return hooks_list def listHooks(self, client): @@ -272,15 +272,15 @@ hooks_list = [] for hooks in (client._hooks, client._hooks_temporary): persistent = hooks is client._hooks - for node, hooks_data in hooks.iteritems(): + for node, hooks_data in hooks.items(): for hook_data in hooks_data: hooks_list.append( { - u"service": hook_data[u"service"], - u"node": node, - u"type": hook_data[u"type"], - u"arg": hook_data[u"arg"], - u"persistent": persistent, + "service": hook_data["service"], + "node": node, + "type": hook_data["type"], + "arg": hook_data["arg"], + "persistent": persistent, } ) return hooks_list diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_exp_pubsub_schema.py --- a/sat/plugins/plugin_exp_pubsub_schema.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_exp_pubsub_schema.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Pubsub Schemas @@ -20,7 +20,7 @@ from collections import Iterable import copy import itertools -from zope.interface import implements +from zope.interface import implementer from twisted.words.protocols.jabber import jid from twisted.words.protocols.jabber.xmlstream import XMPPHandler from twisted.internet import defer @@ -38,23 +38,23 @@ log = getLogger(__name__) -NS_SCHEMA = u"https://salut-a-toi/protocol/schema:0" +NS_SCHEMA = "https://salut-a-toi/protocol/schema:0" PLUGIN_INFO = { - C.PI_NAME: u"PubSub Schema", - C.PI_IMPORT_NAME: u"PUBSUB_SCHEMA", - C.PI_TYPE: u"EXP", + C.PI_NAME: "PubSub Schema", + C.PI_IMPORT_NAME: "PUBSUB_SCHEMA", + C.PI_TYPE: "EXP", C.PI_PROTOCOLS: [], - C.PI_DEPENDENCIES: [u"XEP-0060", u"IDENTITY"], - C.PI_MAIN: u"PubsubSchema", - C.PI_HANDLER: u"yes", - C.PI_DESCRIPTION: _(u"""Handle Pubsub data schemas"""), + C.PI_DEPENDENCIES: ["XEP-0060", "IDENTITY"], + C.PI_MAIN: "PubsubSchema", + C.PI_HANDLER: "yes", + C.PI_DESCRIPTION: _("""Handle Pubsub data schemas"""), } class PubsubSchema(object): def __init__(self, host): - log.info(_(u"PubSub Schema initialization")) + log.info(_("PubSub Schema initialization")) self.host = host self._p = self.host.plugins["XEP-0060"] self._i = self.host.plugins["IDENTITY"] @@ -64,7 +64,7 @@ in_sign="sss", out_sign="s", method=self._getSchema, - async=True, + async_=True, ) host.bridge.addMethod( "psSchemaSet", @@ -72,15 +72,16 @@ in_sign="ssss", out_sign="", method=self._setSchema, - async=True, + async_=True, ) host.bridge.addMethod( "psSchemaUIGet", ".plugin", in_sign="sss", out_sign="s", - method=utils.partial(self._getUISchema, default_node=None), - async=True, + method=lambda service, nodeIdentifier, profile_key: self._getUISchema( + service, nodeIdentifier, default_node=None, profile_key=profile_key), + async_=True, ) host.bridge.addMethod( "psItemsFormGet", @@ -88,7 +89,7 @@ in_sign="ssssiassa{ss}s", out_sign="(asa{ss})", method=self._getDataFormItems, - async=True, + async_=True, ) host.bridge.addMethod( "psItemFormSend", @@ -96,7 +97,7 @@ in_sign="ssa{sas}ssa{ss}s", out_sign="s", method=self._sendDataFormItem, - async=True, + async_=True, ) def getHandler(self, client): @@ -104,7 +105,7 @@ def _getSchemaBridgeCb(self, schema_elt): if schema_elt is None: - return u"" + return "" return schema_elt.toXml() def _getSchema(self, service, nodeIdentifier, profile_key=C.PROF_KEY_NONE): @@ -135,7 +136,7 @@ @return (domish.Element, None): schema ( element) None if not schema has been set on this node """ - iq_elt = client.IQ(u"get") + iq_elt = client.IQ("get") if service is not None: iq_elt["to"] = service.full() pubsub_elt = iq_elt.addElement((NS_SCHEMA, "pubsub")) @@ -163,12 +164,12 @@ the form should not be modified if copy_form is not set """ if schema is None: - log.debug(_(u"unspecified schema, we need to request it")) + log.debug(_("unspecified schema, we need to request it")) schema = yield self.getSchema(client, service, nodeIdentifier) if schema is None: raise exceptions.DataError( _( - u"no schema specified, and this node has no schema either, we can't construct the data form" + "no schema specified, and this node has no schema either, we can't construct the data form" ) ) elif isinstance(schema, data_form.Form): @@ -179,7 +180,7 @@ try: form = data_form.Form.fromElement(schema) except data_form.Error as e: - raise exceptions.DataError(_(u"Invalid Schema: {msg}").format(msg=e)) + raise exceptions.DataError(_("Invalid Schema: {msg}").format(msg=e)) form.formType = form_type defer.returnValue(form) @@ -192,7 +193,7 @@ profile_key=C.PROF_KEY_NONE): if not nodeIdentifier: if not default_node: - raise ValueError(_(u"nodeIndentifier needs to be set")) + raise ValueError(_("nodeIndentifier needs to be set")) nodeIdentifier = default_node client = self.host.getClient(profile_key) service = None if not service else jid.JID(service) @@ -233,7 +234,7 @@ client = self.host.getClient(profile_key) service = jid.JID(service) if service else None if not node: - raise exceptions.DataError(_(u"empty node is not allowed")) + raise exceptions.DataError(_("empty node is not allowed")) if schema: schema = generic.parseXml(schema.encode("utf-8")) else: @@ -276,7 +277,7 @@ if not nodeIdentifier: if not default_node: raise ValueError( - _(u"default_node must be set if nodeIdentifier is not set") + _("default_node must be set if nodeIdentifier is not set") ) nodeIdentifier = default_node # we need the initial form to get options of fields when suitable @@ -296,7 +297,7 @@ items, metadata = items_data items_xmlui = [] for item_elt in items: - for x_elt in item_elt.elements((data_form.NS_X_DATA, u"x")): + for x_elt in item_elt.elements((data_form.NS_X_DATA, "x")): form = data_form.Form.fromElement(x_elt) if form_ns and form.formNamespace != form_ns: continue @@ -307,9 +308,9 @@ # are not checked prepend=( ("label", "id"), - ("text", item_elt["id"], u"id"), + ("text", item_elt["id"], "id"), ("label", "publisher"), - ("text", item_elt.getAttribute("publisher", ""), u"publisher"), + ("text", item_elt.getAttribute("publisher", ""), "publisher"), ), filters=filters, read_only=False, @@ -336,7 +337,7 @@ extra, deserialise=True, ) - d.addCallback(lambda ret: ret or u"") + d.addCallback(lambda ret: ret or "") return d @defer.inlineCallbacks @@ -361,22 +362,22 @@ client, service, nodeIdentifier, schema, form_type="submit" ) - for name, values_list in values.iteritems(): + for name, values_list in values.items(): try: field = form.fields[name] except KeyError: log.warning( - _(u"field {name} doesn't exist, ignoring it").format(name=name) + _("field {name} doesn't exist, ignoring it").format(name=name) ) continue - if isinstance(values_list, basestring) or not isinstance( + if isinstance(values_list, str) or not isinstance( values_list, Iterable ): values_list = [values_list] if deserialise: - if field.fieldType == u"boolean": + if field.fieldType == "boolean": values_list = [C.bool(v) for v in values_list] - elif field.fieldType == u"text-multi": + elif field.fieldType == "text-multi": # for text-multi, lines must be put on separate values values_list = list( itertools.chain(*[v.splitlines() for v in values_list]) @@ -384,9 +385,9 @@ elif xml_tools.isXHTMLField(field): values_list = [generic.parseXml(v.encode("utf-8")) for v in values_list] - elif u"jid" in (field.fieldType or u""): + elif "jid" in (field.fieldType or ""): values_list = [jid.JID(v) for v in values_list] - if u"list" in (field.fieldType or u""): + if "list" in (field.fieldType or ""): # for lists, we check that given values are allowed in form allowed_values = [o.value for o in field.options] values_list = [v for v in values_list if v in allowed_values] @@ -419,26 +420,26 @@ main use case is using a textbox for labels """ - if widget_type != u"textbox": + if widget_type != "textbox": return widget_type, args, kwargs - widget_type = u"list" - options = [o for o in args.pop(0).split(u"\n") if o] + widget_type = "list" + options = [o for o in args.pop(0).split("\n") if o] kwargs = { "options": options, "name": kwargs.get("name"), - "styles": (u"noselect", u"extensible", u"reducible"), + "styles": ("noselect", "extensible", "reducible"), } return widget_type, args, kwargs def dateFilter(self, form_xmlui, widget_type, args, kwargs): """Convert a string with a date to a unix timestamp""" - if widget_type != u"string" or not args[0]: + if widget_type != "string" or not args[0]: return widget_type, args, kwargs # we convert XMPP date to timestamp try: - args[0] = unicode(date_utils.date_parse(args[0])) + args[0] = str(date_utils.date_parse(args[0])) except Exception as e: - log.warning(_(u"Can't parse date field: {msg}").format(msg=e)) + log.warning(_("Can't parse date field: {msg}").format(msg=e)) return widget_type, args, kwargs ## Helper methods ## @@ -479,7 +480,7 @@ # have to modify them if C.bool(extra.get("labels_as_list", C.BOOL_FALSE)): filters = filters.copy() - filters[u"labels"] = self.textbox2ListFilter + filters["labels"] = self.textbox2ListFilter client, service, node, max_items, extra, sub_id = self.prepareBridgeGet( service, node, max_items, sub_id, extra, profile_key ) @@ -536,7 +537,7 @@ item_elt = items_data[0][0] except Exception as e: log.warning( - _(u"Can't get previous item, update ignored: {reason}").format( + _("Can't get previous item, update ignored: {reason}").format( reason=e ) ) @@ -546,13 +547,13 @@ if form is None: log.warning( _( - u"Can't parse previous item, update ignored: data form not found" + "Can't parse previous item, update ignored: data form not found" ).format(reason=e) ) else: - for name, field in form.fields.iteritems(): + for name, field in form.fields.items(): if name not in values: - values[name] = u"\n".join(unicode(v) for v in field.values) + values[name] = "\n".join(str(v) for v in field.values) def _set(self, service, node, values, schema=None, item_id=None, extra=None, default_node=None, form_ns=None, fill_author=True, @@ -578,7 +579,7 @@ default_node=default_node, fill_author=fill_author, ) - d.addCallback(lambda ret: ret or u"") + d.addCallback(lambda ret: ret or "") return d @defer.inlineCallbacks @@ -605,15 +606,15 @@ extra = {} if not node: if default_node is None: - raise ValueError(_(u"default_node must be set if node is not set")) + raise ValueError(_("default_node must be set if node is not set")) node = default_node now = utils.xmpp_date() if not item_id: values["created"] = now - elif extra.get(u"update", False): + elif extra.get("update", False): if item_id is None: raise exceptions.DataError( - _(u'if extra["update"] is set, item_id must be set too') + _('if extra["update"] is set, item_id must be set too') ) yield self.copyMissingValues(client, service, node, item_id, form_ns, values) @@ -630,8 +631,8 @@ defer.returnValue(item_id) +@implementer(iwokkel.IDisco) class SchemaHandler(XMPPHandler): - implements(iwokkel.IDisco) def getDiscoInfo(self, requestor, service, nodeIdentifier=""): return [disco.DiscoFeature(NS_SCHEMA)] diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_import.py --- a/sat/plugins/plugin_import.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_import.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for generic data import handling @@ -38,7 +38,7 @@ C.PI_DEPENDENCIES: [], C.PI_MAIN: "ImportPlugin", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _(u"""Generic import plugin, base for specialized importers"""), + C.PI_DESCRIPTION: _("""Generic import plugin, base for specialized importers"""), } Importer = collections.namedtuple("Importer", ("callback", "short_desc", "long_desc")) @@ -64,7 +64,7 @@ @param name(unicode): import handler name """ assert name == name.lower().strip() - log.info(_(u"initializing {name} import handler").format(name=name)) + log.info(_("initializing {name} import handler").format(name=name)) import_handler.name = name import_handler.register = partial(self.register, import_handler) import_handler.unregister = partial(self.unregister, import_handler) @@ -93,7 +93,7 @@ in_sign="ssa{ss}sss", out_sign="s", method=_import, - async=True, + async_=True, ) self.host.bridge.addMethod( name + "ImportList", @@ -115,7 +115,7 @@ return client._import[import_handler.name][progress_id] def listImporters(self, import_handler): - importers = import_handler.importers.keys() + importers = list(import_handler.importers.keys()) importers.sort() return [ (name, import_handler.importers[name].short_desc) @@ -132,7 +132,7 @@ importer = import_handler.importers[name] except KeyError: raise exceptions.NotFound( - u"{handler_name} importer not found [{name}]".format( + "{handler_name} importer not found [{name}]".format( handler_name=import_handler.name, name=name ) ) @@ -150,7 +150,7 @@ profile=C.PROF_KEY_NONE, ): client = self.host.getClient(profile) - options = {key: unicode(value) for key, value in options.iteritems()} + options = {key: str(value) for key, value in options.items()} for option in import_handler.BOOL_OPTIONS: try: options[option] = C.bool(options[option]) @@ -161,14 +161,14 @@ options[option] = json.loads(options[option]) except ValueError: raise exceptions.DataError( - _(u"invalid json option: {name}").format(name=option) + _("invalid json option: {name}").format(name=option) ) pubsub_service = jid.JID(pubsub_service) if pubsub_service else None return self.doImport( client, import_handler, - unicode(name), - unicode(location), + str(name), + str(location), options, pubsub_service, pubsub_node or None, @@ -202,7 +202,7 @@ if options is None: options = {} else: - for opt_name, opt_default in import_handler.OPT_DEFAULTS.iteritems(): + for opt_name, opt_default in import_handler.OPT_DEFAULTS.items(): # we want a filled options dict, with all empty or False values removed try: value = options[opt_name] @@ -216,21 +216,21 @@ try: importer = import_handler.importers[name] except KeyError: - raise exceptions.NotFound(u"Importer [{}] not found".format(name)) + raise exceptions.NotFound("Importer [{}] not found".format(name)) items_import_data, items_count = yield importer.callback( client, location, options ) - progress_id = unicode(uuid.uuid4()) + progress_id = str(uuid.uuid4()) try: _import = client._import except AttributeError: _import = client._import = {} progress_data = _import.setdefault(import_handler.name, {}) - progress_data[progress_id] = {u"position": "0"} + progress_data[progress_id] = {"position": "0"} if items_count is not None: - progress_data[progress_id]["size"] = unicode(items_count) + progress_data[progress_id]["size"] = str(items_count) metadata = { - "name": u"{}: {}".format(name, location), + "name": "{}: {}".format(name, location), "direction": "out", "type": import_handler.name.upper() + "_IMPORT", } @@ -242,8 +242,8 @@ ) self.host.bridge.progressStarted(progress_id, metadata, client.profile) session = { #  session data, can be used by importers - u"root_service": pubsub_service, - u"root_node": pubsub_node, + "root_service": pubsub_service, + "root_node": pubsub_node, } self.recursiveImport( client, @@ -306,11 +306,11 @@ recurse_kwargs.setdefault("options", options) recurse_kwargs["return_data"] = return_data recurse_kwargs["depth"] = depth + 1 - log.debug(_(u"uploading subitems")) + log.debug(_("uploading subitems")) yield self.recursiveImport(**recurse_kwargs) if depth == 0: - client._import[import_handler.name][progress_id]["position"] = unicode( + client._import[import_handler.name][progress_id]["position"] = str( idx + 1 ) @@ -338,7 +338,7 @@ if name in import_handler.importers: raise exceptions.ConflictError( _( - u"An {handler_name} importer with the name {name} already exist" + "An {handler_name} importer with the name {name} already exist" ).format(handler_name=import_handler.name, name=name) ) import_handler.importers[name] = Importer(callback, short_desc, long_desc) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_merge_req_mercurial.py --- a/sat/plugins/plugin_merge_req_mercurial.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_merge_req_mercurial.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for import external blogs @@ -35,16 +35,16 @@ C.PI_DEPENDENCIES: ["MERGE_REQUESTS"], C.PI_MAIN: "MercurialHandler", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _(u"""Merge request handler for Mercurial""") + C.PI_DESCRIPTION: _("""Merge request handler for Mercurial""") } -SHORT_DESC = D_(u"handle Mercurial repository") -CLEAN_RE = re.compile(ur'[^\w -._]', flags=re.UNICODE) +SHORT_DESC = D_("handle Mercurial repository") +CLEAN_RE = re.compile(r'[^\w -._]', flags=re.UNICODE) class MercurialProtocol(async_process.CommandProtocol): """handle hg commands""" - name = u"Mercurial" + name = "Mercurial" command = None @classmethod @@ -54,7 +54,7 @@ @param path(unicode): path to the repository @param command(unicode): hg command to run """ - assert u"path" not in kwargs + assert "path" not in kwargs kwargs["path"] = path # FIXME: we have to use this workaround because Twisted's protocol.ProcessProtocol # is not using new style classes. This can be removed once moved to @@ -65,15 +65,15 @@ class MercurialHandler(object): - data_types = (u'mercurial_changeset',) + data_types = ('mercurial_changeset',) def __init__(self, host): - log.info(_(u"Mercurial merge request handler initialization")) + log.info(_("Mercurial merge request handler initialization")) try: MercurialProtocol.command = which('hg')[0] except IndexError: - raise exceptions.NotFound(_(u"Mercurial executable (hg) not found, " - u"can't use Mercurial handler")) + raise exceptions.NotFound(_("Mercurial executable (hg) not found, " + "can't use Mercurial handler")) self.host = host self._m = host.plugins['MERGE_REQUESTS'] self._m.register('mercurial', self, self.data_types, SHORT_DESC) @@ -92,11 +92,11 @@ def import_(self, repository, data, data_type, item_id, service, node, extra): parsed_data = self.parse(data) try: - parsed_name = parsed_data[0][u'commit_msg'].split(u'\n')[0] - parsed_name = CLEAN_RE.sub(u'', parsed_name)[:40] + parsed_name = parsed_data[0]['commit_msg'].split('\n')[0] + parsed_name = CLEAN_RE.sub('', parsed_name)[:40] except Exception: - parsed_name = u'' - name = u'mr_{item_id}_{parsed_name}'.format(item_id=CLEAN_RE.sub(u'', item_id), + parsed_name = '' + name = 'mr_{item_id}_{parsed_name}'.format(item_id=CLEAN_RE.sub('', item_id), parsed_name=parsed_name) return MercurialProtocol.run(repository, 'qimport', '-g', '--name', name, '--encoding=utf-8', '-', stdin=data) @@ -111,51 +111,51 @@ diff = [] state = 'init' if lines[0] != '# HG changeset patch': - raise exceptions.DataError(_(u'invalid changeset signature')) + raise exceptions.DataError(_('invalid changeset signature')) # line index of this patch in the whole data patch_idx = total_lines - len(lines) del lines[0] for idx, line in enumerate(lines): if state == 'init': - if line.startswith(u'# '): - if line.startswith(u'# User '): + if line.startswith('# '): + if line.startswith('# User '): elems = line[7:].split() if not elems: continue last = elems[-1] - if (last.startswith(u'<') and last.endswith(u'>') - and u'@' in last): + if (last.startswith('<') and last.endswith('>') + and '@' in last): patch[self._m.META_EMAIL] = elems.pop()[1:-1] - patch[self._m.META_AUTHOR] = u' '.join(elems) - elif line.startswith(u'# Date '): + patch[self._m.META_AUTHOR] = ' '.join(elems) + elif line.startswith('# Date '): time_data = line[7:].split() if len(time_data) != 2: - log.warning(_(u'unexpected time data: {data}') + log.warning(_('unexpected time data: {data}') .format(data=line[7:])) continue patch[self._m.META_TIMESTAMP] = (int(time_data[0]) + int(time_data[1])) - elif line.startswith(u'# Node ID '): + elif line.startswith('# Node ID '): patch[self._m.META_HASH] = line[10:] - elif line.startswith(u'# Parent '): + elif line.startswith('# Parent '): patch[self._m.META_PARENT_HASH] = line[10:] else: state = 'commit_msg' if state == 'commit_msg': - if line.startswith(u'diff --git a/'): + if line.startswith('diff --git a/'): state = 'diff' patch[self._m.META_DIFF_IDX] = patch_idx + idx + 1 else: commit_msg.append(line) if state == 'diff': - if line.startswith(u'# ') or idx == len(lines)-1: + if line.startswith('# ') or idx == len(lines)-1: # a new patch is starting or we have reached end of patches if idx == len(lines)-1: # end of patches, we need to keep the line diff.append(line) - patch[self._m.META_COMMIT_MSG] = u'\n'.join(commit_msg) - patch[self._m.META_DIFF] = u'\n'.join(diff) + patch[self._m.META_COMMIT_MSG] = '\n'.join(commit_msg) + patch[self._m.META_DIFF] = '\n'.join(diff) patches.append(patch) if idx == len(lines)-1: del lines[:] diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_account.py --- a/sat/plugins/plugin_misc_account.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_account.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for account creation (experimental) @@ -26,7 +26,7 @@ from sat.memory.memory import Sessions from sat.memory.crypto import PasswordHasher from sat.core.constants import Const as C -import ConfigParser +import configparser from twisted.internet import defer from twisted.python.failure import Failure from twisted.words.protocols.jabber import jid @@ -45,7 +45,7 @@ C.PI_RECOMMENDATIONS: ["GROUPBLOG"], C.PI_MAIN: "MiscAccount", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _(u"""SàT account creation"""), + C.PI_DESCRIPTION: _("""SàT account creation"""), } CONFIG_SECTION = "plugin account" @@ -71,7 +71,7 @@ } WELCOME_MSG = D_( - u"""Welcome to Libervia, the web interface of Salut à Toi. + """Welcome to Libervia, the web interface of Salut à Toi. Your account on {domain} has been successfully created. This is a demonstration version to show you the current status of the project. @@ -94,7 +94,7 @@ """ ) -DEFAULT_DOMAIN = u"example.net" +DEFAULT_DOMAIN = "example.net" class MiscAccount(object): @@ -104,7 +104,7 @@ # TODO: cleaning, separate email handling, more configuration/tests, fixes def __init__(self, host): - log.info(_(u"Plugin Account initialization")) + log.info(_("Plugin Account initialization")) self.host = host host.bridge.addMethod( "registerSatAccount", @@ -112,7 +112,7 @@ in_sign="sss", out_sign="", method=self._registerAccount, - async=True, + async_=True, ) host.bridge.addMethod( "getNewAccountDomain", @@ -120,7 +120,7 @@ in_sign="", out_sign="s", method=self.getNewAccountDomain, - async=False, + async_=False, ) host.bridge.addMethod( "getAccountDialogUI", @@ -128,7 +128,7 @@ in_sign="s", out_sign="s", method=self._getAccountDialogUI, - async=False, + async_=False, ) host.bridge.addMethod( "asyncConnectWithXMPPCredentials", @@ -136,7 +136,7 @@ in_sign="ss", out_sign="b", method=self.asyncConnectWithXMPPCredentials, - async=True, + async_=True, ) self.fixEmailAdmins() @@ -175,13 +175,13 @@ if not admin_email: return log.warning( - u"admin_email parameter is deprecated, please use email_admins_list instead" + "admin_email parameter is deprecated, please use email_admins_list instead" ) param_name = "email_admins_list" try: section = "" value = self.host.memory.getConfig(section, param_name, Exception) - except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): + except (configparser.NoOptionError, configparser.NoSectionError): section = CONFIG_SECTION value = self.host.memory.getConfig( section, param_name, default_conf[param_name] @@ -198,7 +198,7 @@ # they can now be in [DEFAULT] section try: value = self.host.memory.getConfig(None, name, Exception) - except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): + except (configparser.NoOptionError, configparser.NoSectionError): pass else: return value @@ -263,7 +263,7 @@ d = defer.succeed(None) jid_ = jid.JID(jid_s) else: - jid_s = profile + u"@" + self.getNewAccountDomain() + jid_s = profile + "@" + self.getNewAccountDomain() jid_ = jid.JID(jid_s) d = self.host.plugins["XEP-0077"].registerNewAccount(jid_, password) @@ -289,7 +289,7 @@ def _sendEmailEb(self, failure_, email): # TODO: return error code to user log.error( - _(u"Failed to send account creation confirmation to {email}: {msg}").format( + _("Failed to send account creation confirmation to {email}: {msg}").format( email=email, msg=failure_ ) ) @@ -303,27 +303,27 @@ admins_emails = self.getConfig("email_admins_list") if not admins_emails: log.warning( - u"No known admin email, we can't send email to administrator(s).\nPlease fill email_admins_list parameter" + "No known admin email, we can't send email to administrator(s).\nPlease fill email_admins_list parameter" ) d_admin = defer.fail(exceptions.DataError("no admin email")) else: - subject = _(u"New Libervia account created") - body = u"""New account created: {profile} [{email}]""".format( + subject = _("New Libervia account created") + body = """New account created: {profile} [{email}]""".format( profile=profile, # there is no email when an existing XMPP account is used - email=email or u"", + email=email or "", ) d_admin = sat_email.sendEmail(self.host, admins_emails, subject, body) - admins_emails_txt = u", ".join([u"<" + addr + u">" for addr in admins_emails]) + admins_emails_txt = ", ".join(["<" + addr + ">" for addr in admins_emails]) d_admin.addCallbacks( lambda __: log.debug( - u"Account creation notification sent to admin(s) {}".format( + "Account creation notification sent to admin(s) {}".format( admins_emails_txt ) ), lambda __: log.error( - u"Failed to send account creation notification to admin {}".format( + "Failed to send account creation notification to admin {}".format( admins_emails_txt ) ), @@ -333,9 +333,9 @@ return d_admin jid_s = self.host.memory.getParamA( - u"JabberID", u"Connection", profile_key=profile + "JabberID", "Connection", profile_key=profile ) - subject = _(u"Your Libervia account has been created") + subject = _("Your Libervia account has been created") body = _(WELCOME_MSG).format(profile=profile, jid=jid_s, domain=domain) # XXX: this will not fail when the email address doesn't exist @@ -344,7 +344,7 @@ d_user = sat_email.sendEmail(self.host, [email], subject, body) d_user.addCallbacks( lambda __: log.debug( - u"Account creation confirmation sent to <{}>".format(email) + "Account creation confirmation sent to <{}>".format(email) ), self._sendEmailEb, ) @@ -359,7 +359,7 @@ if not domain: log.warning( _( - u'xmpp_domain needs to be set in sat.conf. Using "{default}" meanwhile' + 'xmpp_domain needs to be set in sat.conf. Using "{default}" meanwhile' ).format(default=DEFAULT_DOMAIN) ) return DEFAULT_DOMAIN diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_android.py --- a/sat/plugins/plugin_misc_android.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_android.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for file tansfer @@ -36,7 +36,7 @@ C.PI_NAME: "Android ", C.PI_IMPORT_NAME: "android", C.PI_TYPE: C.PLUG_TYPE_MISC, - C.PI_RECOMMENDATIONS: [u"XEP-0352"], + C.PI_RECOMMENDATIONS: ["XEP-0352"], C.PI_MAIN: "AndroidPlugin", C.PI_HANDLER: "no", C.PI_DESCRIPTION: D_( @@ -45,7 +45,7 @@ } if sys.platform != "android": - raise exceptions.CancelError(u"this module is not needed on this platform") + raise exceptions.CancelError("this module is not needed on this platform") from plyer import notification, vibrator @@ -60,7 +60,7 @@ PARAM_VIBRATE_CATEGORY = "Notifications" PARAM_VIBRATE_NAME = "vibrate" -PARAM_VIBRATE_LABEL = D_(u"Vibrate on notifications") +PARAM_VIBRATE_LABEL = D_("Vibrate on notifications") SOCKET_DIR = "/data/data/org.salutatoi.cagou/" SOCKET_FILE = ".socket" STATE_RUNNING = "running" @@ -101,9 +101,9 @@ f.seek(0, os.SEEK_END) size = f.tell() if size == 10: - log.info(u"seek() bug not present anymore, workaround code can be removed") + log.info("seek() bug not present anymore, workaround code can be removed") else: - log.warning(u"seek() bug detected, applying a workaround") + log.warning("seek() bug detected, applying a workaround") web_client.FileBodyProducer._determineLength = determineLength_workaround patch_seek_bug() @@ -118,7 +118,7 @@ if data in STATES: self.android_plugin.state = data else: - log.warning(u"Unexpected data: {data}".format(data=data)) + log.warning("Unexpected data: {data}".format(data=data)) class FrontendStateFactory(protocol.Factory): @@ -149,13 +149,13 @@ ) def __init__(self, host): - log.info(_(u"plugin Android initialization")) + log.info(_("plugin Android initialization")) self.host = host - self._csi = host.plugins.get(u'XEP-0352') + self._csi = host.plugins.get('XEP-0352') self._csi_timer = None host.memory.updateParams(self.params) try: - os.mkdir(SOCKET_DIR, 0700) + os.mkdir(SOCKET_DIR, 0o700) except OSError as e: if e.errno == 17: # dir already exists @@ -188,7 +188,7 @@ self.br = BroadcastReceiver( callback=lambda *args, **kwargs: reactor.callLater(0, self.onConnectivityChange), - actions=[u"android.net.conn.CONNECTIVITY_CHANGE"]) + actions=["android.net.conn.CONNECTIVITY_CHANGE"]) self.br.start() @@ -198,7 +198,7 @@ @state.setter def state(self, new_state): - log.debug(u"frontend state has changed: {state}".format(state=new_state)) + log.debug("frontend state has changed: {state}".format(state=new_state)) previous_state = self._state self._state = new_state if new_state == STATE_RUNNING: @@ -233,11 +233,11 @@ """ if (mess_data["message"] and mess_data["type"] != C.MESS_TYPE_GROUPCHAT and not mess_data["from"].userhostJID() == client.jid.userhostJID()): - message = mess_data["message"].itervalues().next() + message = next(iter(mess_data["message"].values())) try: - subject = mess_data["subject"].itervalues().next() + subject = next(iter(mess_data["subject"].values())) except StopIteration: - subject = u"Cagou new message" + subject = "Cagou new message" notification.notify(title=subject, message=message) if self.host.memory.getParamA( @@ -248,7 +248,7 @@ except Exception as e: # FIXME: vibrator is currently not working, # cf. https://github.com/kivy/plyer/issues/509 - log.warning(u"Can't use vibrator: {e}".format(e=e)) + log.warning("Can't use vibrator: {e}".format(e=e)) return mess_data def messageReceivedTrigger(self, client, message_elt, post_treat): @@ -306,24 +306,24 @@ else: net_type = NET_TYPE_OTHER if net_type != self._net_type: - log.info(u"connectivity has changed") + log.info("connectivity has changed") previous = self._net_type self._net_type = net_type if net_type == NET_TYPE_NONE: - log.info(u"no network active") + log.info("no network active") elif net_type == NET_TYPE_WIFI: - log.info(u"WIFI activated") + log.info("WIFI activated") elif net_type == NET_TYPE_MOBILE: - log.info(u"mobile data activated") + log.info("mobile data activated") else: - log.info(u"network activated (type={net_type_android})" + log.info("network activated (type={net_type_android})" .format(net_type_android=net_type_android)) self._handleNetworkChange(previous, net_type) else: - log.debug(u"_checkConnectivity called without network change ({net_type})" + log.debug("_checkConnectivity called without network change ({net_type})" .format(net_type = net_type)) def onConnectivityChange(self): - log.debug(u"onConnectivityChange called") + log.debug("onConnectivityChange called") self._checkConnectivity() diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_debug.py --- a/sat/plugins/plugin_misc_debug.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_debug.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for managing raw XML log diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_email_invitation.py --- a/sat/plugins/plugin_misc_email_invitation.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_email_invitation.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for file tansfer @@ -41,23 +41,23 @@ C.PI_RECOMMENDATIONS: ["IDENTITY"], C.PI_MAIN: "InvitationsPlugin", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _(u"""invitation of people without XMPP account""") + C.PI_DESCRIPTION: _("""invitation of people without XMPP account""") } SUFFIX_MAX = 5 -INVITEE_PROFILE_TPL = u"guest@@{uuid}" -KEY_ID = u'id' -KEY_JID = u'jid' -KEY_CREATED = u'created' -KEY_LAST_CONNECTION = u'last_connection' -KEY_GUEST_PROFILE = u'guest_profile' -KEY_PASSWORD = u'password' -KEY_EMAILS_EXTRA = u'emails_extra' -EXTRA_RESERVED = {KEY_ID, KEY_JID, KEY_CREATED, u'jid_', u'jid', KEY_LAST_CONNECTION, +INVITEE_PROFILE_TPL = "guest@@{uuid}" +KEY_ID = 'id' +KEY_JID = 'jid' +KEY_CREATED = 'created' +KEY_LAST_CONNECTION = 'last_connection' +KEY_GUEST_PROFILE = 'guest_profile' +KEY_PASSWORD = 'password' +KEY_EMAILS_EXTRA = 'emails_extra' +EXTRA_RESERVED = {KEY_ID, KEY_JID, KEY_CREATED, 'jid_', 'jid', KEY_LAST_CONNECTION, KEY_GUEST_PROFILE, KEY_PASSWORD, KEY_EMAILS_EXTRA} -DEFAULT_SUBJECT = D_(u"You have been invited by {host_name} to {app_name}") -DEFAULT_BODY = D_(u"""Hello {name}! +DEFAULT_SUBJECT = D_("You have been invited by {host_name} to {app_name}") +DEFAULT_BODY = D_("""Hello {name}! You have received an invitation from {host_name} to participate to "{app_name}". To join, you just have to click on the following URL: @@ -73,34 +73,34 @@ class InvitationsPlugin(object): def __init__(self, host): - log.info(_(u"plugin Invitations initialization")) + log.info(_("plugin Invitations initialization")) self.host = host - self.invitations = persistent.LazyPersistentBinaryDict(u'invitations') + self.invitations = persistent.LazyPersistentBinaryDict('invitations') host.bridge.addMethod("invitationCreate", ".plugin", in_sign='sasssssssssa{ss}s', out_sign='a{ss}', method=self._create, - async=True) + async_=True) host.bridge.addMethod("invitationGet", ".plugin", in_sign='s', out_sign='a{ss}', method=self.get, - async=True) + async_=True) host.bridge.addMethod("invitationModify", ".plugin", in_sign='sa{ss}b', out_sign='', method=self._modify, - async=True) + async_=True) host.bridge.addMethod("invitationList", ".plugin", in_sign='s', out_sign='a{sa{ss}}', method=self._list, - async=True) + async_=True) def checkExtra(self, extra): if EXTRA_RESERVED.intersection(extra): raise ValueError( - _(u"You can't use following key(s) in extra, they are reserved: {}") - .format(u', '.join(EXTRA_RESERVED.intersection(extra)))) + _("You can't use following key(s) in extra, they are reserved: {}") + .format(', '.join(EXTRA_RESERVED.intersection(extra)))) - def _create(self, email=u'', emails_extra=None, jid_=u'', password=u'', name=u'', - host_name=u'', language=u'', url_template=u'', message_subject=u'', - message_body=u'', extra=None, profile=u''): + def _create(self, email='', emails_extra=None, jid_='', password='', name='', + host_name='', language='', url_template='', message_subject='', + message_body='', extra=None, profile=''): # XXX: we don't use **kwargs here to keep arguments name for introspection with # D-Bus bridge if emails_extra is None: @@ -109,10 +109,10 @@ if extra is None: extra = {} else: - extra = {unicode(k): unicode(v) for k,v in extra.iteritems()} + extra = {str(k): str(v) for k,v in extra.items()} kwargs = {"extra": extra, - KEY_EMAILS_EXTRA: [unicode(e) for e in emails_extra] + KEY_EMAILS_EXTRA: [str(e) for e in emails_extra] } # we need to be sure that values are unicode, else they won't be pickled correctly @@ -121,7 +121,7 @@ "url_template", "message_subject", "message_body", "profile"): value = locals()[key] if value: - kwargs[key] = unicode(value) + kwargs[key] = str(value) d = self.create(**kwargs) def serialize(data): data[KEY_JID] = data[KEY_JID].full() @@ -131,7 +131,7 @@ @defer.inlineCallbacks def create(self, **kwargs): - ur"""Create an invitation + r"""Create an invitation This will create an XMPP account and a profile, and use a UUID to retrieve them. The profile is automatically generated in the form guest@@[UUID], this way they @@ -194,29 +194,29 @@ extra = kwargs.pop('extra', {}) if set(kwargs).intersection(extra): raise ValueError( - _(u"You can't use following key(s) in both args and extra: {}").format( - u', '.join(set(kwargs).intersection(extra)))) + _("You can't use following key(s) in both args and extra: {}").format( + ', '.join(set(kwargs).intersection(extra)))) self.checkExtra(extra) - email = kwargs.pop(u'email', None) - emails_extra = kwargs.pop(u'emails_extra', []) + email = kwargs.pop('email', None) + emails_extra = kwargs.pop('emails_extra', []) if not email and emails_extra: raise ValueError( - _(u'You need to provide a main email address before using emails_extra')) + _('You need to provide a main email address before using emails_extra')) if (email is not None and not 'url_template' in kwargs and not 'message_body' in kwargs): raise ValueError( - _(u"You need to provide url_template if you use default message body")) + _("You need to provide url_template if you use default message body")) ## uuid - log.info(_(u"creating an invitation")) - id_ = unicode(shortuuid.uuid()) + log.info(_("creating an invitation")) + id_ = str(shortuuid.uuid()) ## XMPP account creation - password = kwargs.pop(u'password', None) + password = kwargs.pop('password', None) if password is None: password = utils.generatePassword() assert password @@ -228,13 +228,13 @@ # saved and could be used to encrypt profile password. extra[KEY_PASSWORD] = password - jid_ = kwargs.pop(u'jid_', None) + jid_ = kwargs.pop('jid_', None) if not jid_: domain = self.host.memory.getConfig(None, 'xmpp_domain') if not domain: # TODO: fallback to profile's domain - raise ValueError(_(u"You need to specify xmpp_domain in sat.conf")) - jid_ = u"invitation-{uuid}@{domain}".format(uuid=shortuuid.uuid(), + raise ValueError(_("You need to specify xmpp_domain in sat.conf")) + jid_ = "invitation-{uuid}@{domain}".format(uuid=shortuuid.uuid(), domain=domain) jid_ = jid.JID(jid_) if jid_.user: @@ -245,11 +245,11 @@ except error.StanzaError as e: prefix = jid_.user idx = 0 - while e.condition == u'conflict': + while e.condition == 'conflict': if idx >= SUFFIX_MAX: - raise exceptions.ConflictError(_(u"Can't create XMPP account")) - jid_.user = prefix + '_' + unicode(idx) - log.info(_(u"requested jid already exists, trying with {}".format( + raise exceptions.ConflictError(_("Can't create XMPP account")) + jid_.user = prefix + '_' + str(idx) + log.info(_("requested jid already exists, trying with {}".format( jid_.full()))) try: yield self.host.plugins['XEP-0077'].registerNewAccount(jid_, @@ -258,10 +258,10 @@ idx += 1 else: break - if e.condition != u'conflict': + if e.condition != 'conflict': raise e - log.info(_(u"account {jid_} created").format(jid_=jid_.full())) + log.info(_("account {jid_} created").format(jid_=jid_.full())) ## profile creation @@ -273,66 +273,66 @@ profile_key=guest_profile) yield self.host.memory.setParam("Password", password, "Connection", profile_key=guest_profile) - name = kwargs.pop(u'name', None) + name = kwargs.pop('name', None) if name is not None: - extra[u'name'] = name + extra['name'] = name try: - id_plugin = self.host.plugins[u'IDENTITY'] + id_plugin = self.host.plugins['IDENTITY'] except KeyError: pass else: yield self.host.connect(guest_profile, password) guest_client = self.host.getClient(guest_profile) - yield id_plugin.setIdentity(guest_client, {u'nick': name}) + yield id_plugin.setIdentity(guest_client, {'nick': name}) yield self.host.disconnect(guest_profile) ## email - language = kwargs.pop(u'language', None) + language = kwargs.pop('language', None) if language is not None: - extra[u'language'] = language.strip() + extra['language'] = language.strip() if email is not None: - extra[u'email'] = email + extra['email'] = email data_format.iter2dict(KEY_EMAILS_EXTRA, extra) - url_template = kwargs.pop(u'url_template', '') + url_template = kwargs.pop('url_template', '') format_args = { - u'uuid': id_, - u'app_name': C.APP_NAME, - u'app_url': C.APP_URL} + 'uuid': id_, + 'app_name': C.APP_NAME, + 'app_url': C.APP_URL} if name is None: - format_args[u'name'] = email + format_args['name'] = email else: - format_args[u'name'] = name + format_args['name'] = name - profile = kwargs.pop(u'profile', None) + profile = kwargs.pop('profile', None) if profile is None: - format_args[u'profile'] = u'' + format_args['profile'] = '' else: - format_args[u'profile'] = extra[u'profile'] = profile + format_args['profile'] = extra['profile'] = profile - host_name = kwargs.pop(u'host_name', None) + host_name = kwargs.pop('host_name', None) if host_name is None: - format_args[u'host_name'] = profile or _(u"somebody") + format_args['host_name'] = profile or _("somebody") else: - format_args[u'host_name'] = extra[u'host_name'] = host_name + format_args['host_name'] = extra['host_name'] = host_name invite_url = url_template.format(**format_args) - format_args[u'url'] = invite_url + format_args['url'] = invite_url yield sat_email.sendEmail( self.host, [email] + emails_extra, - (kwargs.pop(u'message_subject', None) or DEFAULT_SUBJECT).format( + (kwargs.pop('message_subject', None) or DEFAULT_SUBJECT).format( **format_args), - (kwargs.pop(u'message_body', None) or DEFAULT_BODY).format(**format_args), + (kwargs.pop('message_body', None) or DEFAULT_BODY).format(**format_args), ) ## extra data saving self.invitations[id_] = extra if kwargs: - log.warning(_(u"Not all arguments have been consumed: {}").format(kwargs)) + log.warning(_("Not all arguments have been consumed: {}").format(kwargs)) extra[KEY_ID] = id_ extra[KEY_JID] = jid_ @@ -348,7 +348,7 @@ return self.invitations[id_] def _modify(self, id_, new_extra, replace): - return self.modify(id_, {unicode(k): unicode(v) for k,v in new_extra.iteritems()}, + return self.modify(id_, {str(k): str(v) for k,v in new_extra.items()}, replace) def modify(self, id_, new_extra, replace=False): @@ -372,9 +372,9 @@ continue else: new_data = current_data - for k,v in new_extra.iteritems(): + for k,v in new_extra.items(): if k in EXTRA_RESERVED: - log.warning(_(u"Skipping reserved key {key}".format(k))) + log.warning(_("Skipping reserved key {key}".format(k))) continue if v: new_data[k] = v @@ -401,9 +401,9 @@ C.PROF_KEY_NONE: don't filter invitations @return list(unicode): invitations uids """ - invitations = yield self.invitations.items() + invitations = yield list(self.invitations.items()) if profile != C.PROF_KEY_NONE: - invitations = {id_:data for id_, data in invitations.iteritems() - if data.get(u'profile') == profile} + invitations = {id_:data for id_, data in invitations.items() + if data.get('profile') == profile} defer.returnValue(invitations) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_extra_pep.py --- a/sat/plugins/plugin_misc_extra_pep.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_extra_pep.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for displaying messages from extra PEP services @@ -35,13 +35,13 @@ C.PI_RECOMMENDATIONS: [], C.PI_MAIN: "ExtraPEP", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _(u"""Display messages from extra PEP services"""), + C.PI_DESCRIPTION: _("""Display messages from extra PEP services"""), } -PARAM_KEY = u"Misc" -PARAM_NAME = u"blogs" -PARAM_LABEL = u"Blog authors following list" +PARAM_KEY = "Misc" +PARAM_NAME = "blogs" +PARAM_LABEL = "Blog authors following list" PARAM_DEFAULT = (jid.JID("salut-a-toi@libervia.org"),) @@ -62,11 +62,11 @@ "category_label": D_(PARAM_KEY), "param_name": PARAM_NAME, "param_label": D_(PARAM_LABEL), - "jids": u"\n".join({elt.toXml() for elt in params.createJidElts(PARAM_DEFAULT)}), + "jids": "\n".join({elt.toXml() for elt in params.createJidElts(PARAM_DEFAULT)}), } def __init__(self, host): - log.info(_(u"Plugin Extra PEP initialization")) + log.info(_("Plugin Extra PEP initialization")) self.host = host host.memory.updateParams(self.params) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_file.py --- a/sat/plugins/plugin_misc_file.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_file.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for file tansfer @@ -45,14 +45,14 @@ } -SENDING = D_(u"Please select a file to send to {peer}") -SENDING_TITLE = D_(u"File sending") +SENDING = D_("Please select a file to send to {peer}") +SENDING_TITLE = D_("File sending") CONFIRM = D_( - u'{peer} wants to send the file "{name}" to you:\n{desc}\n\nThe file has a size of {size_human}\n\nDo you accept ?' + '{peer} wants to send the file "{name}" to you:\n{desc}\n\nThe file has a size of {size_human}\n\nDo you accept ?' ) -CONFIRM_TITLE = D_(u"Confirm file transfer") -CONFIRM_OVERWRITE = D_(u"File {} already exists, are you sure you want to overwrite ?") -CONFIRM_OVERWRITE_TITLE = D_(u"File exists") +CONFIRM_TITLE = D_("Confirm file transfer") +CONFIRM_OVERWRITE = D_("File {} already exists, are you sure you want to overwrite ?") +CONFIRM_OVERWRITE_TITLE = D_("File exists") SECURITY_LIMIT = 30 PROGRESS_ID_KEY = "progress_id" @@ -70,7 +70,7 @@ in_sign="ssssa{ss}s", out_sign="a{ss}", method=self._fileSend, - async=True, + async_=True, ) self._file_callbacks = [] host.importMenu( @@ -109,14 +109,14 @@ @return (dict): action dictionary, with progress id in case of success, else xmlui message """ if not os.path.isfile(filepath): - raise exceptions.DataError(u"The given path doesn't link to a file") + raise exceptions.DataError("The given path doesn't link to a file") if not filename: filename = os.path.basename(filepath) or "_" for namespace, callback, priority, method_name in self._file_callbacks: has_feature = yield self.host.hasFeature(client, namespace, peer_jid) if has_feature: log.info( - u"{name} method will be used to send the file".format( + "{name} method will be used to send the file".format( name=method_name ) ) @@ -124,12 +124,12 @@ client, peer_jid, filepath, filename, file_desc, extra ) defer.returnValue({"progress": progress_id}) - msg = u"Can't find any method to send file to {jid}".format(jid=peer_jid.full()) + msg = "Can't find any method to send file to {jid}".format(jid=peer_jid.full()) log.warning(msg) defer.returnValue( { "xmlui": xml_tools.note( - u"Can't transfer file", msg, C.XMLUI_DATA_LVL_WARNING + "Can't transfer file", msg, C.XMLUI_DATA_LVL_WARNING ).toXml() } ) @@ -181,7 +181,7 @@ for data in self._file_callbacks: if namespace == data[0]: raise exceptions.ConflictError( - u"A method with this namespace is already registered" + "A method with this namespace is already registered" ) self._file_callbacks.append( (namespace, callback, priority, method_name or namespace) @@ -193,7 +193,7 @@ if data[0] == namespace: del [idx] return - raise exceptions.NotFound(u"The namespace to unregister doesn't exist") + raise exceptions.NotFound("The namespace to unregister doesn't exist") # Dialogs with user # the overwrite check is done here @@ -241,7 +241,7 @@ return False path = data["path"] file_data["file_path"] = file_path = os.path.join(path, file_data["name"]) - log.debug(u"destination file path set to {}".format(file_path)) + log.debug("destination file path set to {}".format(file_path)) # we manage case where file already exists if os.path.exists(file_path): @@ -309,7 +309,7 @@ assert filename and not "/" in filename assert PROGRESS_ID_KEY in file_data # human readable size - file_data["size_human"] = u"{:.6n} Mio".format( + file_data["size_human"] = "{:.6n} Mio".format( float(file_data["size"]) / (1024 ** 2) ) d = xml_tools.deferDialog( diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_forums.py --- a/sat/plugins/plugin_misc_forums.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_forums.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for pubsub forums @@ -29,8 +29,8 @@ import json log = getLogger(__name__) -NS_FORUMS = u'org.salut-a-toi.forums:0' -NS_FORUMS_TOPICS = NS_FORUMS + u'#topics' +NS_FORUMS = 'org.salut-a-toi.forums:0' +NS_FORUMS_TOPICS = NS_FORUMS + '#topics' PLUGIN_INFO = { C.PI_NAME: _("forums management"), @@ -42,16 +42,16 @@ C.PI_HANDLER: "no", C.PI_DESCRIPTION: _("""forums management plugin""") } -FORUM_ATTR = {u'title', u'name', u'main-language', u'uri'} -FORUM_SUB_ELTS = (u'short-desc', u'desc') -FORUM_TOPICS_NODE_TPL = u'{node}#topics_{uuid}' -FORUM_TOPIC_NODE_TPL = u'{node}_{uuid}' +FORUM_ATTR = {'title', 'name', 'main-language', 'uri'} +FORUM_SUB_ELTS = ('short-desc', 'desc') +FORUM_TOPICS_NODE_TPL = '{node}#topics_{uuid}' +FORUM_TOPIC_NODE_TPL = '{node}_{uuid}' class forums(object): def __init__(self, host): - log.info(_(u"forums plugin initialization")) + log.info(_("forums plugin initialization")) self.host = host self._m = self.host.plugins['XEP-0277'] self._p = self.host.plugins['XEP-0060'] @@ -67,19 +67,19 @@ host.bridge.addMethod("forumsGet", ".plugin", in_sign='ssss', out_sign='s', method=self._get, - async=True) + async_=True) host.bridge.addMethod("forumsSet", ".plugin", in_sign='sssss', out_sign='', method=self._set, - async=True) + async_=True) host.bridge.addMethod("forumTopicsGet", ".plugin", in_sign='ssa{ss}s', out_sign='(aa{ss}a{ss})', method=self._getTopics, - async=True) + async_=True) host.bridge.addMethod("forumTopicCreate", ".plugin", in_sign='ssa{ss}s', out_sign='', method=self._createTopic, - async=True) + async_=True) @defer.inlineCallbacks def _createForums(self, client, forums, service, node, forums_elt=None, names=None): @@ -94,48 +94,48 @@ @return (domish.Element): created forums """ if not isinstance(forums, list): - raise ValueError(_(u"forums arguments must be a list of forums")) + raise ValueError(_("forums arguments must be a list of forums")) if forums_elt is None: - forums_elt = domish.Element((NS_FORUMS, u'forums')) + forums_elt = domish.Element((NS_FORUMS, 'forums')) assert names is None names = set() else: - if names is None or forums_elt.name != u'forums': - raise exceptions.InternalError(u'invalid forums or names') + if names is None or forums_elt.name != 'forums': + raise exceptions.InternalError('invalid forums or names') assert names is not None for forum in forums: if not isinstance(forum, dict): - raise ValueError(_(u"A forum item must be a dictionary")) + raise ValueError(_("A forum item must be a dictionary")) forum_elt = forums_elt.addElement('forum') - for key, value in forum.iteritems(): - if key == u'name' and key in names: - raise exceptions.ConflictError(_(u"following forum name is not unique: {name}").format(name=key)) - if key == u'uri' and not value.strip(): - log.info(_(u"creating missing forum node")) + for key, value in forum.items(): + if key == 'name' and key in names: + raise exceptions.ConflictError(_("following forum name is not unique: {name}").format(name=key)) + if key == 'uri' and not value.strip(): + log.info(_("creating missing forum node")) forum_node = FORUM_TOPICS_NODE_TPL.format(node=node, uuid=shortuuid.uuid()) yield self._p.createNode(client, service, forum_node, self._node_options) - value = uri.buildXMPPUri(u'pubsub', + value = uri.buildXMPPUri('pubsub', path=service.full(), node=forum_node) if key in FORUM_ATTR: forum_elt[key] = value.strip() elif key in FORUM_SUB_ELTS: forum_elt.addElement(key, content=value) - elif key == u'sub-forums': - sub_forums_elt = forum_elt.addElement(u'forums') + elif key == 'sub-forums': + sub_forums_elt = forum_elt.addElement('forums') yield self._createForums(client, value, service, node, sub_forums_elt, names=names) else: - log.warning(_(u"Unknown forum attribute: {key}").format(key=key)) - if not forum_elt.getAttribute(u'title'): - name = forum_elt.getAttribute(u'name') + log.warning(_("Unknown forum attribute: {key}").format(key=key)) + if not forum_elt.getAttribute('title'): + name = forum_elt.getAttribute('name') if name: - forum_elt[u'title'] = name + forum_elt['title'] = name else: - raise ValueError(_(u"forum need a title or a name")) - if not forum_elt.getAttribute(u'uri') and not forum_elt.children: - raise ValueError(_(u"forum need uri or sub-forums")) + raise ValueError(_("forum need a title or a name")) + if not forum_elt.getAttribute('uri') and not forum_elt.children: + raise ValueError(_("forum need uri or sub-forums")) defer.returnValue(forums_elt) def _parseForums(self, parent_elt=None, forums=None): @@ -146,18 +146,18 @@ @return (list): parsed data @raise ValueError: item is invalid """ - if parent_elt.name == u'item': + if parent_elt.name == 'item': forums = [] try: - forums_elt = next(parent_elt.elements(NS_FORUMS, u'forums')) + forums_elt = next(parent_elt.elements(NS_FORUMS, 'forums')) except StopIteration: - raise ValueError(_(u"missing element")) + raise ValueError(_("missing element")) else: forums_elt = parent_elt if forums is None: - raise exceptions.InternalError(u'expected forums') + raise exceptions.InternalError('expected forums') if forums_elt.name != 'forums': - raise ValueError(_(u'Unexpected element: {xml}').format(xml=forums_elt.toXml())) + raise ValueError(_('Unexpected element: {xml}').format(xml=forums_elt.toXml())) for forum_elt in forums_elt.elements(): if forum_elt.name == 'forum': data = {} @@ -165,19 +165,19 @@ data[attrib] = forum_elt[attrib] unknown = set(forum_elt.attributes).difference(FORUM_ATTR) if unknown: - log.warning(_(u"Following attributes are unknown: {unknown}").format(unknown=unknown)) + log.warning(_("Following attributes are unknown: {unknown}").format(unknown=unknown)) for elt in forum_elt.elements(): if elt.name in FORUM_SUB_ELTS: - data[elt.name] = unicode(elt) - elif elt.name == u'forums': - sub_forums = data[u'sub-forums'] = [] + data[elt.name] = str(elt) + elif elt.name == 'forums': + sub_forums = data['sub-forums'] = [] self._parseForums(elt, sub_forums) - if not u'title' in data or not {u'uri', u'sub-forums'}.intersection(data): - log.warning(_(u"invalid forum, ignoring: {xml}").format(xml=forum_elt.toXml())) + if not 'title' in data or not {'uri', 'sub-forums'}.intersection(data): + log.warning(_("invalid forum, ignoring: {xml}").format(xml=forum_elt.toXml())) else: forums.append(data) else: - log.warning(_(u"unkown forums sub element: {xml}").format(xml=forum_elt)) + log.warning(_("unkown forums sub element: {xml}").format(xml=forum_elt)) return forums @@ -200,7 +200,7 @@ if node is None: node = NS_FORUMS if forums_key is None: - forums_key = u'default' + forums_key = 'default' items_data = yield self._p.getItems(client, service, node, item_ids=[forums_key]) item = items_data[0][0] # we have the item and need to convert it to json @@ -241,7 +241,7 @@ if node is None: node = NS_FORUMS if forums_key is None: - forums_key = u'default' + forums_key = 'default' forums_elt = yield self._createForums(client, forums, service, node) yield self._p.sendItem(client, service, node, forums_elt, item_id=forums_key) @@ -249,7 +249,7 @@ client = self.host.getClient(profile_key) extra = self._p.parseExtra(extra) d = self.getTopics(client, jid.JID(service), node, rsm_request=extra.rsm_request, extra=extra.extra) - d.addCallback(lambda(topics, metadata): (topics, {k: unicode(v) for k,v in metadata.iteritems()})) + d.addCallback(lambda topics_metadata: (topics_metadata[0], {k: str(v) for k,v in topics_metadata[1].items()})) return d @defer.inlineCallbacks @@ -262,11 +262,11 @@ topics = [] item_elts, metadata = topics_data for item_elt in item_elts: - topic_elt = next(item_elt.elements(NS_FORUMS, u'topic')) - title_elt = next(topic_elt.elements(NS_FORUMS, u'title')) - topic = {u'uri': topic_elt[u'uri'], - u'author': topic_elt[u'author'], - u'title': unicode(title_elt)} + topic_elt = next(item_elt.elements(NS_FORUMS, 'topic')) + title_elt = next(topic_elt.elements(NS_FORUMS, 'title')) + topic = {'uri': topic_elt['uri'], + 'author': topic_elt['author'], + 'title': str(title_elt)} topics.append(topic) defer.returnValue((topics, metadata)) @@ -277,21 +277,21 @@ @defer.inlineCallbacks def createTopic(self, client, service, node, mb_data): try: - title = mb_data[u'title'] - if not u'content' in mb_data: - raise KeyError(u'content') + title = mb_data['title'] + if not 'content' in mb_data: + raise KeyError('content') except KeyError as e: - raise exceptions.DataError(u"missing mandatory data: {key}".format(key=e.args[0])) + raise exceptions.DataError("missing mandatory data: {key}".format(key=e.args[0])) topic_node = FORUM_TOPIC_NODE_TPL.format(node=node, uuid=shortuuid.uuid()) yield self._p.createNode(client, service, topic_node, self._node_options) self._m.send(client, mb_data, service, topic_node) - topic_uri = uri.buildXMPPUri(u'pubsub', - subtype=u'microblog', + topic_uri = uri.buildXMPPUri('pubsub', + subtype='microblog', path=service.full(), node=topic_node) topic_elt = domish.Element((NS_FORUMS, 'topic')) - topic_elt[u'uri'] = topic_uri - topic_elt[u'author'] = client.jid.userhost() - topic_elt.addElement(u'title', content = title) + topic_elt['uri'] = topic_uri + topic_elt['author'] = client.jid.userhost() + topic_elt.addElement('title', content = title) yield self._p.sendItem(client, service, node, topic_elt) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_groupblog.py --- a/sat/plugins/plugin_misc_groupblog.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_groupblog.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for microbloging with roster access @@ -25,7 +25,7 @@ from twisted.internet import defer from sat.core import exceptions from wokkel import disco, data_form, iwokkel -from zope.interface import implements +from zope.interface import implementer try: from twisted.words.protocols.xmlstream import XMPPHandler @@ -78,12 +78,12 @@ client.server_groupblog_available = False log.warning( _( - u"Server is not able to manage item-access pubsub, we can't use group blog" + "Server is not able to manage item-access pubsub, we can't use group blog" ) ) else: client.server_groupblog_available = True - log.info(_(u"Server can manage group blogs")) + log.info(_("Server can manage group blogs")) def getFeatures(self, profile): try: @@ -119,8 +119,8 @@ if not groups: return if not client.server_groupblog_available: - raise exceptions.CancelError(u"GroupBlog is not available") - log.debug(u"This entry use group blog") + raise exceptions.CancelError("GroupBlog is not available") + log.debug("This entry use group blog") form = data_form.Form("submit", formNamespace=NS_PUBSUB_ITEM_CONFIG) access = data_form.Field( None, self._p.OPT_ACCESS_MODEL, value=self._p.ACCESS_PUBLISHER_ROSTER @@ -139,8 +139,8 @@ options[self._p.OPT_ACCESS_MODEL] = self._p.ACCESS_PUBLISHER_ROSTER options[self._p.OPT_ROSTER_GROUPS_ALLOWED] = mb_data['groups'] +@implementer(iwokkel.IDisco) class GroupBlog_handler(XMPPHandler): - implements(iwokkel.IDisco) def getDiscoInfo(self, requestor, target, nodeIdentifier=""): return [disco.DiscoFeature(NS_GROUPBLOG)] diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_identity.py --- a/sat/plugins/plugin_misc_identity.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_identity.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing xep-0054 @@ -44,24 +44,24 @@ class Identity(object): def __init__(self, host): - log.info(_(u"Plugin Identity initialization")) + log.info(_("Plugin Identity initialization")) self.host = host - self._v = host.plugins[u"XEP-0054"] + self._v = host.plugins["XEP-0054"] host.bridge.addMethod( - u"identityGet", - u".plugin", - in_sign=u"ss", - out_sign=u"a{ss}", + "identityGet", + ".plugin", + in_sign="ss", + out_sign="a{ss}", method=self._getIdentity, - async=True, + async_=True, ) host.bridge.addMethod( - u"identitySet", - u".plugin", - in_sign=u"a{ss}s", - out_sign=u"", + "identitySet", + ".plugin", + in_sign="a{ss}s", + out_sign="", method=self._setIdentity, - async=True, + async_=True, ) def _getIdentity(self, jid_str, profile): @@ -84,30 +84,30 @@ # we first check roster roster_item = yield client.roster.getItem(jid_.userhostJID()) if roster_item is not None and roster_item.name: - id_data[u"nick"] = roster_item.name + id_data["nick"] = roster_item.name elif jid_.resource and self._v.isRoom(client, jid_): - id_data[u"nick"] = jid_.resource + id_data["nick"] = jid_.resource else: #  and finally then vcard nick = yield self._v.getNick(client, jid_) if nick: - id_data[u"nick"] = nick + id_data["nick"] = nick elif jid_.user: - id_data[u"nick"] = jid_.user.capitalize() + id_data["nick"] = jid_.user.capitalize() else: - id_data[u"nick"] = jid_.userhost() + id_data["nick"] = jid_.userhost() try: - avatar_path = id_data[u"avatar"] = yield self._v.getAvatar( + avatar_path = id_data["avatar"] = yield self._v.getAvatar( client, jid_, cache_only=False ) except exceptions.NotFound: pass else: if avatar_path: - id_data[u"avatar_basename"] = os.path.basename(avatar_path) + id_data["avatar_basename"] = os.path.basename(avatar_path) else: - del id_data[u"avatar"] + del id_data["avatar"] defer.returnValue(id_data) @@ -122,7 +122,7 @@ - nick: nickname the vCard will be updated """ - if id_data.keys() != [u"nick"]: - raise NotImplementedError(u"Only nick can be updated for now") - if u"nick" in id_data: - return self._v.setNick(client, id_data[u"nick"]) + if list(id_data.keys()) != ["nick"]: + raise NotImplementedError("Only nick can be updated for now") + if "nick" in id_data: + return self._v.setNick(client, id_data["nick"]) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_imap.py --- a/sat/plugins/plugin_misc_imap.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_imap.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for managing imap server @@ -29,10 +29,10 @@ from twisted.python import failure from email.parser import Parser import os -from cStringIO import StringIO +from io import StringIO from twisted.internet import reactor -from zope.interface import implements +from zope.interface import implementer PLUGIN_INFO = { C.PI_NAME: "IMAP server Plugin", @@ -75,8 +75,8 @@ reactor.listenTCP(port, self.server_factory) +@implementer(imap4.IMessage) class Message(object): - implements(imap4.IMessage) def __init__(self, uid, flags, mess_fp): log.debug("Message Init") @@ -112,10 +112,10 @@ should be omitted from the return value, rather than included. @return: A mapping of header field names to header field values """ - log.debug(u"getHeaders %s - %s" % (negate, names)) + log.debug("getHeaders %s - %s" % (negate, names)) final_dict = {} to_check = [name.lower() for name in names] - for header in self.message.keys(): + for header in list(self.message.keys()): if (negate and not header.lower() in to_check) or ( not negate and header.lower() in to_check ): @@ -150,13 +150,13 @@ return TypeError +@implementer(imap4.IMailbox) class SatMailbox(object): - implements(imap4.IMailbox) def __init__(self, host, name, profile): self.host = host self.listeners = set() - log.debug(u"Mailbox init (%s)" % name) + log.debug("Mailbox init (%s)" % name) if name != "INBOX": raise imap4.MailboxException("Only INBOX is managed for the moment") self.mailbox = self.host.plugins["Maildir"].accessMessageBox( @@ -187,7 +187,7 @@ @param message: The message sequence number @return: The UID of the message. """ - log.debug(u"getUID (%i)" % message) + log.debug("getUID (%i)" % message) # return self.mailbox.getUid(message-1) #XXX: it seems that this method get uid and not message sequence number return message @@ -243,7 +243,7 @@ @param listener: An object to add to the set of those which will be notified when the contents of this mailbox change. """ - log.debug(u"addListener %s" % listener) + log.debug("addListener %s" % listener) self.listeners.add(listener) def removeListener(self, listener): @@ -288,7 +288,7 @@ about @param uid: If true, the IDs specified in the query are UIDs; """ - log.debug(u"fetch (%s, %s)" % (messages, uid)) + log.debug("fetch (%s, %s)" % (messages, uid)) if uid: messages.last = self.mailbox.getMaxUid() messages.getnext = self.mailbox.getNextExistingUid @@ -412,20 +412,21 @@ return SatMailbox(self.host, name, self.profile) +@implementer(portal.IRealm) class ImapRealm(object): - implements(portal.IRealm) def __init__(self, host): self.host = host def requestAvatar(self, avatarID, mind, *interfaces): log.debug("requestAvatar") - profile = avatarID.decode("utf-8") + profile = avatarID if imap4.IAccount not in interfaces: raise NotImplementedError return imap4.IAccount, ImapSatAccount(self.host, profile), lambda: None +@implementer(checkers.ICredentialsChecker) class SatProfileCredentialChecker(object): """ This credential checker check against SàT's profile and associated jabber's password @@ -433,7 +434,6 @@ Return the profile as avatarId """ - implements(checkers.ICredentialsChecker) credentialInterfaces = ( credentials.IUsernamePassword, credentials.IUsernameHashedPassword, @@ -470,7 +470,7 @@ log.debug(_("IMAP server connection started")) def clientConnectionLost(self, connector, reason): - log.debug(_(u"IMAP server connection lost (reason: %s)"), reason) + log.debug(_("IMAP server connection lost (reason: %s)"), reason) def buildProtocol(self, addr): log.debug("Building protocol") diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_ip.py --- a/sat/plugins/plugin_misc_ip.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_ip.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for IP address discovery @@ -17,12 +17,12 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +import urllib.parse from sat.core.i18n import _, D_ from sat.core.constants import Const as C from sat.core.log import getLogger - -log = getLogger(__name__) from sat.tools import xml_tools +from wokkel import disco, iwokkel from twisted.web import client as webclient from twisted.web import error as web_error from twisted.internet import defer @@ -30,17 +30,17 @@ from twisted.internet import protocol from twisted.internet import endpoints from twisted.internet import error as internet_error -from zope.interface import implements -from wokkel import disco, iwokkel +from zope.interface import implementer from twisted.words.protocols.jabber.xmlstream import XMPPHandler from twisted.words.protocols.jabber.error import StanzaError -import urlparse + +log = getLogger(__name__) try: import netifaces except ImportError: log.warning( - u"netifaces is not available, it help discovering IPs, you can install it on https://pypi.python.org/pypi/netifaces" + "netifaces is not available, it help discovering IPs, you can install it on https://pypi.python.org/pypi/netifaces" ) netifaces = None @@ -61,12 +61,12 @@ GET_IP_PAGE = ( "http://salut-a-toi.org/whereami/" ) # This page must only return external IP of the requester -GET_IP_LABEL = D_(u"Allow external get IP") +GET_IP_LABEL = D_("Allow external get IP") GET_IP_CATEGORY = "General" GET_IP_NAME = "allow_get_ip" -GET_IP_CONFIRM_TITLE = D_(u"Confirm external site request") +GET_IP_CONFIRM_TITLE = D_("Confirm external site request") GET_IP_CONFIRM = D_( - u"""To facilitate data transfer, we need to contact a website. + """To facilitate data transfer, we need to contact a website. A request will be done on {page} That means that administrators of {domain} can know that you use "{app_name}" and your IP Address. @@ -75,7 +75,7 @@ Do you agree to do this request ? """ ).format( - page=GET_IP_PAGE, domain=urlparse.urlparse(GET_IP_PAGE).netloc, app_name=C.APP_NAME + page=GET_IP_PAGE, domain=urllib.parse.urlparse(GET_IP_PAGE).netloc, app_name=C.APP_NAME ) NS_IP_CHECK = "urn:xmpp:sic:1" @@ -105,7 +105,7 @@ try: self._nat = host.plugins["NAT-PORT"] except KeyError: - log.debug(u"NAT port plugin not available") + log.debug("NAT port plugin not available") self._nat = None # XXX: cache is kept until SàT is restarted @@ -180,7 +180,7 @@ @param ext_utl(str): url to connect to @return (D(str)): return local IP """ - url = urlparse.urlparse(ext_url) + url = urllib.parse.urlparse(ext_url) port = url.port if port is None: if url.scheme == "http": @@ -188,10 +188,10 @@ elif url.scheme == "https": port = 443 else: - log.error(u"Unknown url scheme: {}".format(url.scheme)) + log.error("Unknown url scheme: {}".format(url.scheme)) defer.returnValue(None) if url.hostname is None: - log.error(u"Can't find url hostname for {}".format(GET_IP_PAGE)) + log.error("Can't find url hostname for {}".format(GET_IP_PAGE)) point = endpoints.TCP4ClientEndpoint(reactor, url.hostname, port) @@ -257,7 +257,7 @@ try: ip_tuple = yield self._getIPFromExternal(GET_IP_PAGE) except (internet_error.DNSLookupError, internet_error.TimeoutError): - log.warning(u"Can't access Domain Name System") + log.warning("Can't access Domain Name System") defer.returnValue(addresses or localhost) self._insertFirst(addresses, ip_tuple.local) defer.returnValue(addresses) @@ -274,24 +274,25 @@ # we first try with XEP-0279 ip_check = yield self.host.hasFeature(client, NS_IP_CHECK) if ip_check: - log.debug(u"Server IP Check available, we use it to retrieve our IP") + log.debug("Server IP Check available, we use it to retrieve our IP") iq_elt = client.IQ("get") + iq_elt['to'] = client.host iq_elt.addElement((NS_IP_CHECK, "address")) try: result_elt = yield iq_elt.send() - address_elt = result_elt.elements(NS_IP_CHECK, "address").next() - ip_elt = address_elt.elements(NS_IP_CHECK, "ip").next() + address_elt = next(result_elt.elements(NS_IP_CHECK, "address")) + ip_elt = next(address_elt.elements(NS_IP_CHECK, "ip")) except StopIteration: log.warning( - u"Server returned invalid result on XEP-0279 request, we ignore it" + "Server returned invalid result on XEP-0279 request, we ignore it" ) except StanzaError as e: - log.warning(u"error while requesting ip to server: {}".format(e)) + log.warning("error while requesting ip to server: {}".format(e)) else: # FIXME: server IP may not be the same as external IP (server can be on local machine or network) # IP should be checked to see if we have a local one, and rejected in this case external_ip = str(ip_elt) - log.debug(u"External IP found: {}".format(external_ip)) + log.debug("External IP found: {}".format(external_ip)) self._external_ip_cache = external_ip defer.returnValue(self._external_ip_cache) @@ -305,13 +306,14 @@ # and finally by requesting external website allow_get_ip = yield self._externalAllowed(client) try: - ip = (yield webclient.getPage(GET_IP_PAGE)) if allow_get_ip else None + ip = ((yield webclient.getPage(GET_IP_PAGE.encode('utf-8'))) + if allow_get_ip else None) except (internet_error.DNSLookupError, internet_error.TimeoutError): - log.warning(u"Can't access Domain Name System") + log.warning("Can't access Domain Name System") ip = None except web_error.Error as e: log.warning( - u"Error while retrieving IP on {url}: {message}".format( + "Error while retrieving IP on {url}: {message}".format( url=GET_IP_PAGE, message=e ) ) @@ -321,8 +323,8 @@ defer.returnValue(ip) +@implementer(iwokkel.IDisco) class IPPlugin_handler(XMPPHandler): - implements(iwokkel.IDisco) def getDiscoInfo(self, requestor, target, nodeIdentifier=""): return [disco.DiscoFeature(NS_IP_CHECK)] diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_maildir.py --- a/sat/plugins/plugin_misc_maildir.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_maildir.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for managing Maildir type mail boxes @@ -139,7 +139,7 @@ @param mailboxUser: MailboxUser instance""" if boxname not in self.__mailboxes: err_msg = _("Trying to remove an mailboxUser not referenced") - log.error(_(u"INTERNAL ERROR: ") + err_msg) + log.error(_("INTERNAL ERROR: ") + err_msg) raise MaildirError(err_msg) assert self.__mailboxes[profile][boxname] == mailboxUser del self.__mailboxes[profile][boxname] @@ -158,7 +158,7 @@ return self.data[profile][boxname] # the boxname MUST exist in the data except KeyError: err_msg = _("Boxname doesn't exist in internal data") - log.error(_(u"INTERNAL ERROR: ") + err_msg) + log.error(_("INTERNAL ERROR: ") + err_msg) raise MaildirError(err_msg) def getUid(self, boxname, message_id, profile): @@ -211,7 +211,7 @@ @param message_uid: unique integer identifier @return: unique id of the message as given by MaildirMailbox or None if not found""" box_data = self.__getBoxData(boxname, profile) - for message_id in box_data.keys(): # TODO: this is highly inefficient on big mailbox, must be replaced in the future + for message_id in list(box_data.keys()): # TODO: this is highly inefficient on big mailbox, must be replaced in the future if message_id == 'cur_idx': continue if box_data[message_id][0] == message_uid: @@ -248,7 +248,7 @@ @param flag: flag to check @return: list of id (as given by MaildirMailbox)""" box_data = self.__getBoxData(boxname, profile) - assert(isinstance(flag, basestring)) + assert(isinstance(flag, str)) flag = flag.upper() result = [] for key in box_data: @@ -296,22 +296,22 @@ @param boxname: name of the box which was observed @param signal: which signal was observed by the caller""" if (profile, boxname) not in self.__observed: - err_msg = _(u"Trying to remove an observer for an inexistant mailbox") - log.error(_(u"INTERNAL ERROR: ") + err_msg) + err_msg = _("Trying to remove an observer for an inexistant mailbox") + log.error(_("INTERNAL ERROR: ") + err_msg) raise MaildirError(err_msg) if signal not in self.__observed[(profile, boxname)]: - err_msg = _(u"Trying to remove an inexistant observer, no observer for this signal") - log.error(_(u"INTERNAL ERROR: ") + err_msg) + err_msg = _("Trying to remove an inexistant observer, no observer for this signal") + log.error(_("INTERNAL ERROR: ") + err_msg) raise MaildirError(err_msg) if not callback in self.__observed[(profile, boxname)][signal]: - err_msg = _(u"Trying to remove an inexistant observer") - log.error(_(u"INTERNAL ERROR: ") + err_msg) + err_msg = _("Trying to remove an inexistant observer") + log.error(_("INTERNAL ERROR: ") + err_msg) raise MaildirError(err_msg) self.__observed[(profile, boxname)][signal].remove(callback) def emitSignal(self, profile, boxname, signal_name): """Emit the signal to observer""" - log.debug(u'emitSignal %s %s %s' % (profile, boxname, signal_name)) + log.debug('emitSignal %s %s %s' % (profile, boxname, signal_name)) try: for observer_cb in self.__observed[(profile, boxname)][signal_name]: observer_cb() @@ -347,7 +347,7 @@ @param profile: real profile (ie not a profile_key) THIS OBJECT MUST NOT BE USED DIRECTLY: use MaildirBox.accessMessageBox instead""" if _maildir._checkBoxReference(name, profile): - log.error(u"INTERNAL ERROR: MailboxUser MUST NOT be instancied directly") + log.error("INTERNAL ERROR: MailboxUser MUST NOT be instancied directly") raise MaildirError('double MailboxUser instanciation') if name != "INBOX": raise NotImplementedError @@ -357,7 +357,7 @@ profile_path = self.maildir._getProfilePath(profile) full_profile_path = os.path.join(self.maildir.host.memory.getConfig('', 'local_dir'), 'maildir', profile_path) if not os.path.exists(full_profile_path): - os.makedirs(full_profile_path, 0700) + os.makedirs(full_profile_path, 0o700) mailbox_path = os.path.join(full_profile_path, MAILDIR_PATH) self.mailbox_path = mailbox_path self.mailbox = maildir.MaildirMailbox(mailbox_path) @@ -365,7 +365,7 @@ self.__uid_table_update() if observer: - log.debug(u"adding observer for %s (%s)" % (name, profile)) + log.debug("adding observer for %s (%s)" % (name, profile)) self.maildir.addObserver(observer, profile, name, "NEW_MESSAGE") def __uid_table_update(self): @@ -378,7 +378,7 @@ def __del__(self): if self.observer: - log.debug(u"removing observer for %s" % self.name) + log.debug("removing observer for %s" % self.name) self._maildir.removeObserver(self.observer, self.name, "NEW_MESSAGE") self.maildir._removeBoxAccess(self.name, self, profile=self.profile) @@ -490,7 +490,7 @@ Also purge the internal data of these messages """ for mess_id in self.getMessageIdsWithFlag("\\Deleted"): - print ("Deleting %s" % mess_id) + print(("Deleting %s" % mess_id)) self.mailbox.deleteMessage(self.getIdxFromId(mess_id)) self.mailbox = maildir.MaildirMailbox(self.mailbox_path) # We need to reparse the dir to have coherent indexing self.maildir.purgeDeleted(self.name, profile=self.profile) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_merge_requests.py --- a/sat/plugins/plugin_misc_merge_requests.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_merge_requests.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Pubsub Schemas @@ -23,8 +23,8 @@ from twisted.internet import defer from twisted.words.protocols.jabber import jid from collections import namedtuple -from sat.tools import utils from sat.core.log import getLogger + log = getLogger(__name__) NS_MERGE_REQUESTS = 'org.salut-a-toi.merge_requests:0' @@ -40,8 +40,8 @@ C.PI_DESCRIPTION: _("""Merge requests management plugin""") } -FIELD_DATA_TYPE = u'type' -FIELD_DATA = u'request_data' +FIELD_DATA_TYPE = 'type' +FIELD_DATA = 'request_data' MergeRequestHandler = namedtuple("MergeRequestHandler", ['name', @@ -52,49 +52,52 @@ class MergeRequests(object): - META_AUTHOR = u'author' - META_EMAIL = u'email' - META_TIMESTAMP = u'timestamp' - META_HASH = u'hash' - META_PARENT_HASH = u'parent_hash' - META_COMMIT_MSG = u'commit_msg' - META_DIFF = u'diff' + META_AUTHOR = 'author' + META_EMAIL = 'email' + META_TIMESTAMP = 'timestamp' + META_HASH = 'hash' + META_PARENT_HASH = 'parent_hash' + META_COMMIT_MSG = 'commit_msg' + META_DIFF = 'diff' # index of the diff in the whole data # needed to retrieve comments location - META_DIFF_IDX = u'diff_idx' + META_DIFF_IDX = 'diff_idx' def __init__(self, host): - log.info(_(u"Merge requests plugin initialization")) + log.info(_("Merge requests plugin initialization")) self.host = host host.registerNamespace('merge_requests', NS_MERGE_REQUESTS) - self._p = self.host.plugins[u"XEP-0060"] - self._s = self.host.plugins[u"PUBSUB_SCHEMA"] - self._t = self.host.plugins[u"TICKETS"] + self._p = self.host.plugins["XEP-0060"] + self._s = self.host.plugins["PUBSUB_SCHEMA"] + self._t = self.host.plugins["TICKETS"] self._handlers = {} self._handlers_list = [] # handlers sorted by priority self._type_handlers = {} # data type => handler map host.bridge.addMethod("mergeRequestsGet", ".plugin", in_sign='ssiassa{ss}s', out_sign='(asa{ss}aaa{ss})', method=self._get, - async=True + async_=True ) host.bridge.addMethod("mergeRequestSet", ".plugin", in_sign='ssssa{sas}ssss', out_sign='s', method=self._set, - async=True) + async_=True) host.bridge.addMethod("mergeRequestsSchemaGet", ".plugin", in_sign='sss', out_sign='s', - method=utils.partial(self._s._getUISchema, - default_node=NS_MERGE_REQUESTS), - async=True) + method=lambda service, nodeIdentifier, profile_key: + self._s._getUISchema(service, + nodeIdentifier, + default_node=NS_MERGE_REQUESTS, + profile_key=profile_key), + async_=True) host.bridge.addMethod("mergeRequestParseData", ".plugin", in_sign='ss', out_sign='aa{ss}', method=self._parseData, - async=True) + async_=True) host.bridge.addMethod("mergeRequestsImport", ".plugin", in_sign='ssssa{ss}s', out_sign='', method=self._import, - async=True + async_=True ) def register(self, name, handler, data_types, short_desc, priority=0): @@ -112,8 +115,8 @@ @aram data_types(list[unicode]): data types that his handler can generate or parse """ if name in self._handlers: - raise exceptions.ConflictError(_(u"a handler with name {name} already " - u"exists!").format(name = name)) + raise exceptions.ConflictError(_("a handler with name {name} already " + "exists!").format(name = name)) self._handlers[name] = MergeRequestHandler(name, handler, data_types, @@ -121,12 +124,12 @@ priority) self._handlers_list.append(name) self._handlers_list.sort(key=lambda name: self._handlers[name].priority) - if isinstance(data_types, basestring): + if isinstance(data_types, str): data_types = [data_types] for data_type in data_types: if data_type in self._type_handlers: - log.warning(_(u'merge requests of type {type} are already handled by ' - u'{old_handler}, ignoring {new_handler}').format( + log.warning(_('merge requests of type {type} are already handled by ' + '{old_handler}, ignoring {new_handler}').format( type = data_type, old_handler = self._type_handlers[data_type].name, new_handler = name)) @@ -141,10 +144,10 @@ service, node, max_items, sub_id, extra_dict, profile_key) d = self.get(client, service, node or None, max_items, item_ids, sub_id or None, extra.rsm_request, extra.extra) - d.addCallback(lambda (tickets, metadata, parsed_patches): ( - self._p.transItemsData((tickets, metadata)) + - ([[{key: unicode(value) for key, value in p.iteritems()} - for p in patches] for patches in parsed_patches],))) + d.addCallback(lambda tickets_metadata_parsed_patches: ( + self._p.transItemsData((tickets_metadata_parsed_patches[0], tickets_metadata_parsed_patches[1])) + + ([[{key: str(value) for key, value in p.items()} + for p in patches] for patches in tickets_metadata_parsed_patches[2]],))) return d @defer.inlineCallbacks @@ -167,7 +170,7 @@ # XXX: Q&D way to get list for labels when displaying them, but text when we # have to modify them if C.bool(extra.get('labels_as_list', C.BOOL_FALSE)): - filters = {u'labels': self._s.textbox2ListFilter} + filters = {'labels': self._s.textbox2ListFilter} else: filters = {} tickets_xmlui, metadata = yield self._s.getDataFormItems( @@ -191,16 +194,16 @@ defer.returnValue((tickets_xmlui, metadata, parsed_patches)) def _set(self, service, node, repository, method, values, schema=None, item_id=None, - extra=u"", profile_key=C.PROF_KEY_NONE): + extra="", profile_key=C.PROF_KEY_NONE): client, service, node, schema, item_id, extra = self._s.prepareBridgeSet( service, node, schema, item_id, extra, profile_key) d = self.set(client, service, node, repository, method, values, schema, item_id or None, extra, deserialise=True) - d.addCallback(lambda ret: ret or u'') + d.addCallback(lambda ret: ret or '') return d @defer.inlineCallbacks - def set(self, client, service, node, repository, method=u'auto', values=None, + def set(self, client, service, node, repository, method='auto', values=None, schema=None, item_id=None, extra=None, deserialise=False): """Publish a tickets @@ -221,51 +224,51 @@ if not repository and not update: # in case of update, we may re-user former patches data # so repository is not mandatory - raise exceptions.DataError(_(u"repository must be specified")) + raise exceptions.DataError(_("repository must be specified")) if FIELD_DATA in values: - raise exceptions.DataError(_(u"{field} is set by backend, you must not set " - u"it in frontend").format(field = FIELD_DATA)) + raise exceptions.DataError(_("{field} is set by backend, you must not set " + "it in frontend").format(field = FIELD_DATA)) if repository: - if method == u'auto': + if method == 'auto': for name in self._handlers_list: handler = self._handlers[name].handler can_handle = yield handler.check(repository) if can_handle: - log.info(_(u"{name} handler will be used").format(name=name)) + log.info(_("{name} handler will be used").format(name=name)) break else: - log.warning(_(u"repository {path} can't be handled by any installed " - u"handler").format( + log.warning(_("repository {path} can't be handled by any installed " + "handler").format( path = repository)) - raise exceptions.NotFound(_(u"no handler for this repository has " - u"been found")) + raise exceptions.NotFound(_("no handler for this repository has " + "been found")) else: try: handler = self._handlers[name].handler except KeyError: - raise exceptions.NotFound(_(u"No handler of this name found")) + raise exceptions.NotFound(_("No handler of this name found")) data = yield handler.export(repository) if not data.strip(): - raise exceptions.DataError(_(u'export data is empty, do you have any ' - u'change to send?')) + raise exceptions.DataError(_('export data is empty, do you have any ' + 'change to send?')) - if not values.get(u'title') or not values.get(u'body'): + if not values.get('title') or not values.get('body'): patches = yield handler.parse(data, values.get(FIELD_DATA_TYPE)) commits_msg = patches[-1][self.META_COMMIT_MSG] msg_lines = commits_msg.splitlines() - if not values.get(u'title'): - values[u'title'] = msg_lines[0] - if not values.get(u'body'): + if not values.get('title'): + values['title'] = msg_lines[0] + if not values.get('body'): ts = self.host.plugins['TEXT_SYNTAXES'] xhtml = yield ts.convert( - u'\n'.join(msg_lines[1:]), + '\n'.join(msg_lines[1:]), syntax_from = ts.SYNTAX_TEXT, syntax_to = ts.SYNTAX_XHTML, profile = client.profile) - values[u'body'] = '
{xhtml}
'.format( + values['body'] = '
{xhtml}
'.format( ns=C.NS_XHTML, xhtml=xhtml) values[FIELD_DATA] = data @@ -277,7 +280,7 @@ def _parseData(self, data_type, data): d = self.parseData(data_type, data) d.addCallback(lambda parsed_patches: - {key: unicode(value) for key, value in parsed_patches.iteritems()}) + {key: str(value) for key, value in parsed_patches.items()}) return d def parseData(self, data_type, data): @@ -292,7 +295,7 @@ try: handler = self._type_handlers[data_type] except KeyError: - raise exceptions.NotFound(_(u'No handler can handle data type "{type}"') + raise exceptions.NotFound(_('No handler can handle data type "{type}"') .format(type=data_type)) return defer.maybeDeferred(handler.handler.parse, data, data_type) @@ -326,9 +329,9 @@ try: handler = self._type_handlers[data_type] except KeyError: - raise exceptions.NotFound(_(u'No handler found to import {data_type}') + raise exceptions.NotFound(_('No handler found to import {data_type}') .format(data_type=data_type)) - log.info(_(u"Importing patch [{item_id}] using {name} handler").format( + log.info(_("Importing patch [{item_id}] using {name} handler").format( item_id = item, name = handler.name)) yield handler.handler.import_(repository, data, data_type, item, service, node, diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_nat-port.py --- a/sat/plugins/plugin_misc_nat-port.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_nat-port.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for NAT port mapping @@ -32,7 +32,7 @@ import miniupnpc except ImportError: raise exceptions.MissingModule( - u"Missing module MiniUPnPc, please download/install it (and its Python binding) at http://miniupnp.free.fr/ (or use pip install miniupnpc)" + "Missing module MiniUPnPc, please download/install it (and its Python binding) at http://miniupnp.free.fr/ (or use pip install miniupnpc)" ) @@ -47,7 +47,7 @@ STARTING_PORT = 6000 # starting point to automatically find a port DEFAULT_DESC = ( - u"SaT port mapping" + "SaT port mapping" ) # we don't use "à" here as some bugged NAT don't manage charset correctly @@ -74,23 +74,23 @@ def unload(self): if self._to_unmap: - log.info(u"Cleaning mapped ports") + log.info("Cleaning mapped ports") return threads.deferToThread(self._unmapPortsBlocking) def _init_failed(self, failure_): e = failure_.trap(exceptions.NotFound, exceptions.FeatureNotFound) if e == exceptions.FeatureNotFound: - log.info(u"UPnP-IGD seems to be not activated on the device") + log.info("UPnP-IGD seems to be not activated on the device") else: - log.info(u"UPnP-IGD not available") + log.info("UPnP-IGD not available") self._upnp = None def _discover(self): devices = self._upnp.discover() if devices: - log.info(u"{nb} UPnP-IGD device(s) found".format(nb=devices)) + log.info("{nb} UPnP-IGD device(s) found".format(nb=devices)) else: - log.info(u"Can't find UPnP-IGD device on the local network") + log.info("Can't find UPnP-IGD device on the local network") raise failure.Failure(exceptions.NotFound()) self._upnp.selectigd() try: @@ -119,7 +119,7 @@ self._mutex.acquire() try: for port, protocol in self._to_unmap: - log.info(u"Unmapping port {}".format(port)) + log.info("Unmapping port {}".format(port)) unmapping = self._upnp.deleteportmapping( # the last parameter is remoteHost, we don't use it port, @@ -129,7 +129,7 @@ if not unmapping: log.error( - u"Can't unmap port {port} ({protocol})".format( + "Can't unmap port {port} ({protocol})".format( port=port, protocol=protocol ) ) @@ -174,7 +174,7 @@ "", ) except Exception as e: - log.error(_(u"addportmapping error: {msg}").format(msg=e)) + log.error(_("addportmapping error: {msg}").format(msg=e)) raise failure.Failure(MappingError()) if not mapping: @@ -201,7 +201,7 @@ def mappingCb(ext_port): log.info( - u"{protocol} mapping from {int_port} to {ext_port} successful".format( + "{protocol} mapping from {int_port} to {ext_port} successful".format( protocol=protocol, int_port=int_port, ext_port=ext_port ) ) @@ -209,10 +209,10 @@ def mappingEb(failure_): failure_.trap(MappingError) - log.warning(u"Can't map internal {int_port}".format(int_port=int_port)) + log.warning("Can't map internal {int_port}".format(int_port=int_port)) def mappingUnknownEb(failure_): - log.error(_(u"error while trying to map ports: {msg}").format(msg=failure_)) + log.error(_("error while trying to map ports: {msg}").format(msg=failure_)) d = threads.deferToThread( self._mapPortBlocking, int_port, ext_port, protocol, desc diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_quiz.py --- a/sat/plugins/plugin_misc_quiz.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_quiz.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing Quiz game @@ -190,7 +190,7 @@ """Convert a domish element with game_data to a dict""" game_data = {} for data_elt in game_data_elt.elements(): - game_data[data_elt.name] = unicode(data_elt) + game_data[data_elt.name] = str(data_elt) return game_data def __answer_result_to_signal_args(self, answer_result_elt): @@ -255,7 +255,7 @@ """Called when a player give an answer""" client = self.host.getClient(profile_key) log.debug( - u"new player answer (%(profile)s): %(answer)s" + "new player answer (%(profile)s): %(answer)s" % {"profile": client.profile, "answer": answer} ) mess = self.createGameElt(jid.JID(referee)) @@ -309,7 +309,7 @@ mess = self.createGameElt(room_jid) mess.firstChildElement().addChild( self.__ask_question( - game_data["question_id"], u"Quel est l'âge du capitaine ?", timer + game_data["question_id"], "Quel est l'âge du capitaine ?", timer ) ) client.send(mess) @@ -343,7 +343,7 @@ common_data = {"game_score": 0} new_game_data = { "instructions": _( - u"""Bienvenue dans cette partie rapide de quizz, le premier à atteindre le score de 9 remporte le jeu + """Bienvenue dans cette partie rapide de quizz, le premier à atteindre le score de 9 remporte le jeu Attention, tu es prêt ?""" ) @@ -366,7 +366,7 @@ if elt.name == "started": # new game created players = [] for player in elt.elements(): - players.append(unicode(player)) + players.append(str(player)) self.host.bridge.quizGameStarted( room_jid.userhost(), from_jid.full(), players, profile ) @@ -377,11 +377,11 @@ nb_players = len(self.games[room_jid]["players"]) status[player] = "ready" log.debug( - _(u"Player %(player)s is ready to start [status: %(status)s]") + _("Player %(player)s is ready to start [status: %(status)s]") % {"player": player, "status": status} ) if ( - status.values().count("ready") == nb_players + list(status.values()).count("ready") == nb_players ): # everybody is ready, we can start the game self.newGame(room_jid, profile) @@ -394,7 +394,7 @@ self.host.bridge.quizGameQuestion( room_jid.userhost(), elt["id"], - unicode(elt), + str(elt), int(elt["timer"]), profile, ) @@ -414,7 +414,7 @@ self.pauseTimer(room_jid) # and we send the player answer mess = self.createGameElt(room_jid) - _answer = unicode(elt) + _answer = str(elt) say_elt = mess.firstChildElement().addElement("player_says") say_elt["player"] = player say_elt.addContent(_answer) @@ -433,7 +433,7 @@ self.host.bridge.quizGamePlayerSays( room_jid.userhost(), elt["player"], - unicode(elt), + str(elt), int(elt["delay"]), profile, ) @@ -453,4 +453,4 @@ ) else: - log.error(_(u"Unmanaged game element: %s") % elt.name) + log.error(_("Unmanaged game element: %s") % elt.name) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_radiocol.py --- a/sat/plugins/plugin_misc_radiocol.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_radiocol.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing Radiocol @@ -39,7 +39,7 @@ from mutagen.id3 import ID3NoHeaderError except ImportError: raise exceptions.MissingModule( - u"Missing module Mutagen, please download/install from https://bitbucket.org/lazka/mutagen" + "Missing module Mutagen, please download/install from https://bitbucket.org/lazka/mutagen" ) @@ -95,7 +95,7 @@ in_sign="asss", out_sign="", method=self._prepareRoom, - async=True, + async_=True, ) host.bridge.addMethod( "radiocolCreate", @@ -110,7 +110,7 @@ in_sign="sss", out_sign="", method=self._radiocolSongAdded, - async=True, + async_=True, ) host.bridge.addSignal( "radiocolPlayers", ".plugin", signature="ssass" @@ -207,7 +207,7 @@ # ==> unlink done the Q&D way with the same host trick (see above) radio_data = self.games[room_jid] if len(radio_data["players"]) == 0: - log.debug(_(u"No more participants in the radiocol: cleaning data")) + log.debug(_("No more participants in the radiocol: cleaning data")) radio_data["queue"] = [] for filename in radio_data["to_delete"]: self.deleteFile(filename, radio_data) @@ -246,7 +246,7 @@ file_to_delete = radio_data["to_delete"][filename] except KeyError: log.error( - _(u"INTERNAL ERROR: can't find full path of the song to delete") + _("INTERNAL ERROR: can't find full path of the song to delete") ) return False else: @@ -255,7 +255,7 @@ unlink(file_to_delete) except OSError: log.error( - _(u"INTERNAL ERROR: can't find %s on the file system" % file_to_delete) + _("INTERNAL ERROR: can't find %s on the file system" % file_to_delete) ) return False return True @@ -285,7 +285,7 @@ ): # new game created and/or players list updated players = [] for player in elt.elements(): - players.append(unicode(player)) + players.append(str(player)) signal = ( self.host.bridge.radiocolStarted if elt.name == "started" @@ -350,7 +350,7 @@ # songs in queue. We can now start the party :) self.playNext(room_jid, profile) else: - log.error(_(u"Unmanaged game element: %s") % elt.name) + log.error(_("Unmanaged game element: %s") % elt.name) def getSyncDataForPlayer(self, room_jid, nick): game_data = self.games[room_jid] diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_register_account.py --- a/sat/plugins/plugin_misc_register_account.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_register_account.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for registering a new XMPP account @@ -38,7 +38,7 @@ C.PI_RECOMMENDATIONS: [], C.PI_MAIN: "RegisterAccount", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _(u"""Register XMPP account"""), + C.PI_DESCRIPTION: _("""Register XMPP account"""), } @@ -46,7 +46,7 @@ # FIXME: this plugin is messy and difficult to read, it needs to be cleaned up and documented def __init__(self, host): - log.info(_(u"Plugin Register Account initialization")) + log.info(_("Plugin Register Account initialization")) self.host = host self._sessions = Sessions() host.registerCallback( @@ -62,22 +62,22 @@ # FIXME: following loop is overcomplicated, hard to read # FIXME: while used with parameters, hashed password is used and overwrite clear one - for param in (u"JabberID", u"Password", C.FORCE_PORT_PARAM, C.FORCE_SERVER_PARAM): + for param in ("JabberID", "Password", C.FORCE_PORT_PARAM, C.FORCE_SERVER_PARAM): try: session_data[param] = data[ - SAT_FORM_PREFIX + u"Connection" + SAT_PARAM_SEPARATOR + param + SAT_FORM_PREFIX + "Connection" + SAT_PARAM_SEPARATOR + param ] except KeyError: if param in (C.FORCE_PORT_PARAM, C.FORCE_SERVER_PARAM): session_data[param] = "" - for param in (u"JabberID", u"Password"): + for param in ("JabberID", "Password"): if not session_data[param]: - form_ui = xml_tools.XMLUI(u"popup", title=D_(u"Missing values")) + form_ui = xml_tools.XMLUI("popup", title=D_("Missing values")) form_ui.addText( - D_(u"No user JID or password given: can't register new account.") + D_("No user JID or password given: can't register new account.") ) - return {u"xmlui": form_ui.toXml()} + return {"xmlui": form_ui.toXml()} session_data["user"], host, resource = jid.parse(session_data["JabberID"]) session_data["server"] = session_data[C.FORCE_SERVER_PARAM] or host @@ -89,7 +89,7 @@ session_id=session_id, ) form_ui.addText( - D_(u"Do you want to register a new XMPP account {jid}?").format( + D_("Do you want to register a new XMPP account {jid}?").format( jid=session_data["JabberID"] ) ) @@ -130,7 +130,7 @@ def _registerNewAccount(self, client, jid_, password, email, server): #  FIXME: port is not set here def registeredCb(__): - xmlui = xml_tools.XMLUI(u"popup", title=D_(u"Confirmation")) + xmlui = xml_tools.XMLUI("popup", title=D_("Confirmation")) xmlui.addText(D_("Registration successful.")) return {"xmlui": xmlui.toXml()} diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_room_game.py --- a/sat/plugins/plugin_misc_room_game.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_room_game.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT: a jabber client @@ -27,7 +27,7 @@ from twisted.internet import defer from time import time from wokkel import disco, iwokkel -from zope.interface import implements +from zope.interface import implementer import copy try: @@ -64,13 +64,13 @@ """ # Values for self.invite_mode (who can invite after the game creation) - FROM_ALL, FROM_NONE, FROM_REFEREE, FROM_PLAYERS = xrange(0, 4) + FROM_ALL, FROM_NONE, FROM_REFEREE, FROM_PLAYERS = range(0, 4) # Values for self.wait_mode (for who we should wait before creating the game) - FOR_ALL, FOR_NONE = xrange(0, 2) + FOR_ALL, FOR_NONE = range(0, 2) # Values for self.join_mode (who can join the game - NONE means solo game) - ALL, INVITED, NONE = xrange(0, 3) + ALL, INVITED, NONE = range(0, 3) # Values for ready_mode (how to turn a MUC user into a player) - ASK, FORCE = xrange(0, 2) + ASK, FORCE = range(0, 2) MESSAGE = "/message" REQUEST = '%s/%s[@xmlns="%s"]' @@ -218,7 +218,7 @@ if not auth and (verbose or _DEBUG): log.debug( - _(u"%(user)s not allowed to join the game %(game)s in %(room)s") + _("%(user)s not allowed to join the game %(game)s in %(room)s") % { "user": user_jid.userhost() or nick, "game": self.name, @@ -377,7 +377,7 @@ auth = self.isPlayer(room_jid, nick) if not auth and (verbose or _DEBUG): log.debug( - _(u"%(user)s not allowed to invite for the game %(game)s in %(room)s") + _("%(user)s not allowed to invite for the game %(game)s in %(room)s") % {"user": nick, "game": self.name, "room": room_jid.userhost()} ) return auth @@ -431,7 +431,7 @@ if not result[0] and (verbose or _DEBUG): log.debug( _( - u"Still waiting for %(users)s before starting the game %(game)s in %(room)s" + "Still waiting for %(users)s before starting the game %(game)s in %(room)s" ) % { "users": result[2], @@ -469,7 +469,7 @@ """ # FIXME: need to be refactored client = self.host.getClient(profile_key) - log.debug(_(u"Preparing room for %s game") % self.name) + log.debug(_("Preparing room for %s game") % self.name) profile = self.host.memory.getProfileName(profile_key) if not profile: log.error(_("Unknown profile")) @@ -515,7 +515,7 @@ batch = len(self.invitations[room_jid]) - 1 if batch < 0: log.error( - u"Invitations from %s to play %s in %s have been lost!" + "Invitations from %s to play %s in %s have been lost!" % (profile_nick, self.name, room_jid.userhost()) ) return True @@ -572,7 +572,7 @@ user_nick = self.host.plugins["XEP-0045"].getRoomNick(room_jid, profile) if not user_nick: log.error( - u"Internal error: profile %s has not joined the room %s" + "Internal error: profile %s has not joined the room %s" % (profile, room_jid.userhost()) ) return False, False @@ -580,13 +580,13 @@ is_referee = self.isReferee(room_jid, user_nick) if self._gameExists(room_jid, started=True): log.info( - _(u"%(game)s game already created in room %(room)s") + _("%(game)s game already created in room %(room)s") % {"game": self.name, "room": room_jid.userhost()} ) return False, is_referee elif not is_referee: log.info( - _(u"%(game)s game in room %(room)s can only be created by %(user)s") + _("%(game)s game in room %(room)s can only be created by %(user)s") % {"game": self.name, "room": room_jid.userhost(), "user": user_nick} ) return False, False @@ -607,12 +607,12 @@ @param profile_key (unicode): %(doc_profile_key)s """ log.debug( - _(u"Creating %(game)s game in room %(room)s") + _("Creating %(game)s game in room %(room)s") % {"game": self.name, "room": room_jid} ) profile = self.host.memory.getProfileName(profile_key) if not profile: - log.error(_(u"profile %s is unknown") % profile_key) + log.error(_("profile %s is unknown") % profile_key) return (create, sync) = self._checkCreateGameAndInit(room_jid, profile) if nicks is None: @@ -643,9 +643,9 @@ """ profile = self.host.memory.getProfileName(profile_key) if not profile: - log.error(_(u"profile %s is unknown") % profile_key) + log.error(_("profile %s is unknown") % profile_key) return - log.debug(u"new player ready: %s" % profile) + log.debug("new player ready: %s" % profile) # TODO: we probably need to add the game and room names in the sent message self.send(referee_jid, "player_ready", {"player": player_nick}, profile=profile) @@ -658,7 +658,7 @@ - msg_elts: dict to map each user to his specific initialization message @param profile """ - log.debug(_(u"new round for %s game") % self.name) + log.debug(_("new round for %s game") % self.name) game_data = self.games[room_jid] players = game_data["players"] players_data = game_data["players_data"] @@ -763,8 +763,8 @@ return RoomGameHandler(self) +@implementer(iwokkel.IDisco) class RoomGameHandler(XMPPHandler): - implements(iwokkel.IDisco) def __init__(self, plugin_parent): self.plugin_parent = plugin_parent diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_smtp.py --- a/sat/plugins/plugin_misc_smtp.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_smtp.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for managing smtp server @@ -33,7 +33,7 @@ from twisted.internet import reactor import sys -from zope.interface import implements +from zope.interface import implementer PLUGIN_INFO = { C.PI_NAME: "SMTP server Plugin", @@ -75,8 +75,8 @@ reactor.listenTCP(port, self.server_factory) +@implementer(smtp.IMessage) class SatSmtpMessage(object): - implements(smtp.IMessage) def __init__(self, host, profile): self.host = host @@ -103,7 +103,7 @@ except: exc_type, exc_value, exc_traceback = sys.exc_info() log.error( - _(u"Can't send message: %s") % exc_value + _("Can't send message: %s") % exc_value ) # The email is invalid or incorreclty parsed return defer.fail() self.message = None @@ -114,8 +114,8 @@ raise smtp.SMTPError +@implementer(smtp.IMessageDelivery) class SatSmtpDelivery(object): - implements(smtp.IMessageDelivery) def __init__(self, host, profile): self.host = host @@ -156,20 +156,21 @@ return origin +@implementer(portal.IRealm) class SmtpRealm(object): - implements(portal.IRealm) def __init__(self, host): self.host = host def requestAvatar(self, avatarID, mind, *interfaces): log.debug("requestAvatar") - profile = avatarID.decode("utf-8") + profile = avatarID if smtp.IMessageDelivery not in interfaces: raise NotImplementedError return smtp.IMessageDelivery, SatSmtpDelivery(self.host, profile), lambda: None +@implementer(checkers.ICredentialsChecker) class SatProfileCredentialChecker(object): """ This credential checker check against SàT's profile and associated jabber's password @@ -177,7 +178,6 @@ Return the profile as avatarId """ - implements(checkers.ICredentialsChecker) credentialInterfaces = ( credentials.IUsernamePassword, credentials.IUsernameHashedPassword, @@ -217,7 +217,7 @@ smtp.SMTPFactory.startedConnecting(self, connector) def clientConnectionLost(self, connector, reason): - log.debug(_(u"SMTP server connection lost (reason: %s)"), reason) + log.debug(_("SMTP server connection lost (reason: %s)"), reason) smtp.SMTPFactory.clientConnectionLost(self, connector, reason) def buildProtocol(self, addr): diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_static_blog.py --- a/sat/plugins/plugin_misc_static_blog.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_static_blog.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for static blogs @@ -47,7 +47,7 @@ class StaticBlog(object): - params = u""" + params = """ @@ -66,8 +66,8 @@ title_label=D_("Page title"), banner_name=C.STATIC_BLOG_PARAM_BANNER, banner_label=D_("Banner URL"), - background_name=u"Background", - background_label=D_(u"Background image URL"), + background_name="Background", + background_label=D_("Background image URL"), keywords_name=C.STATIC_BLOG_PARAM_KEYWORDS, keywords_label=D_("Keywords"), description_name=C.STATIC_BLOG_PARAM_DESCRIPTION, diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_tarot.py --- a/sat/plugins/plugin_misc_tarot.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_tarot.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing French Tarot game @@ -87,7 +87,7 @@ in_sign="asss", out_sign="", method=self._prepareRoom, - async=True, + async_=True, ) # args: players, room_jid, profile host.bridge.addMethod( "tarotGameCreate", @@ -138,10 +138,10 @@ "tarotGameInvalidCards", ".plugin", signature="ssa(ss)a(ss)s" ) # args: room_jid, game phase, played_cards, invalid_cards, profile self.deck_ordered = [] - for value in ["excuse"] + map(str, range(1, 22)): + for value in ["excuse"] + list(map(str, list(range(1, 22)))): self.deck_ordered.append(TarotCard(("atout", value))) for suit in ["pique", "coeur", "carreau", "trefle"]: - for value in map(str, range(1, 11)) + ["valet", "cavalier", "dame", "roi"]: + for value in list(map(str, list(range(1, 11)))) + ["valet", "cavalier", "dame", "roi"]: self.deck_ordered.append(TarotCard((suit, value))) self.__choose_contrat_id = host.registerCallback( self._contratChoosed, with_data=True @@ -172,7 +172,7 @@ field = data_form.Field( "list-single", "contrat", - options=map(data_form.Option, self.contrats), + options=list(map(data_form.Option, self.contrats)), required=True, ) form.addField(field) @@ -271,7 +271,7 @@ players_data[pl_waiting]["levees"].append(card) log.debug( _( - u"Player %(excuse_owner)s give %(card_waited)s to %(player_waiting)s for Excuse compensation" + "Player %(excuse_owner)s give %(card_waited)s to %(player_waiting)s for Excuse compensation" ) % { "excuse_owner": player, @@ -311,7 +311,7 @@ players_data[winner]["levees"].append(low_card) log.debug( _( - u"Player %(excuse_owner)s give %(card_waited)s to %(player_waiting)s for Excuse compensation" + "Player %(excuse_owner)s give %(card_waited)s to %(player_waiting)s for Excuse compensation" ) % { "excuse_owner": excuse_player, @@ -325,7 +325,7 @@ players_data[excuse_player]["wait_for_low"] = winner log.debug( _( - u"%(excuse_owner)s keep the Excuse but has not card to give, %(winner)s is waiting for one" + "%(excuse_owner)s keep the Excuse but has not card to give, %(winner)s is waiting for one" ) % {"excuse_owner": excuse_player, "winner": winner} ) @@ -339,7 +339,7 @@ scores_str += "\n" for player in game_data["players"]: scores_str += _( - u"\n--\n%(player)s:\nscore for this game ==> %(score_game)i\ntotal score ==> %(total_score)i" + "\n--\n%(player)s:\nscore for this game ==> %(score_game)i\ntotal score ==> %(total_score)i" ) % { "player": player, "score_game": 0, @@ -420,7 +420,7 @@ loosers.append(player) scores_str = _( - u"The attacker (%(attaquant)s) makes %(points)i and needs to make %(point_limit)i (%(nb_bouts)s oulder%(plural)s%(separator)s%(bouts)s): (s)he %(victory)s" + "The attacker (%(attaquant)s) makes %(points)i and needs to make %(point_limit)i (%(nb_bouts)s oulder%(plural)s%(separator)s%(bouts)s): (s)he %(victory)s" ) % { "attaquant": game_data["attaquant"], "points": score, @@ -434,7 +434,7 @@ scores_str += "\n" for player in game_data["players"]: scores_str += _( - u"\n--\n%(player)s:\nscore for this game ==> %(score_game)i\ntotal score ==> %(total_score)i" + "\n--\n%(player)s:\nscore for this game ==> %(score_game)i\ntotal score ==> %(total_score)i" ) % { "player": player, "score_game": player_score[player], @@ -537,7 +537,7 @@ data = xml_tools.XMLUIResult2DataFormResult(raw_data) contrat = data["contrat"] log.debug( - _(u"contrat [%(contrat)s] choosed by %(profile)s") + _("contrat [%(contrat)s] choosed by %(profile)s") % {"contrat": contrat, "profile": profile} ) d = self.send( @@ -578,10 +578,10 @@ """ profile = self.host.memory.getProfileName(profile_key) if not profile: - log.error(_(u"profile %s is unknown") % profile_key) + log.error(_("profile %s is unknown") % profile_key) return log.debug( - _(u"Cards played by %(profile)s: [%(cards)s]") + _("Cards played by %(profile)s: [%(cards)s]") % {"profile": profile, "cards": cards} ) elem = self.__card_list_to_xml(TarotCard.from_tuples(cards), "cards_played") @@ -647,7 +647,7 @@ ): # new game created and/or players list updated players = [] for player in elt.elements(): - players.append(unicode(player)) + players.append(str(player)) signal = ( self.host.bridge.tarotGameStarted if elt.name == "started" @@ -661,11 +661,11 @@ nb_players = len(self.games[room_jid]["players"]) status[player] = "ready" log.debug( - _(u"Player %(player)s is ready to start [status: %(status)s]") + _("Player %(player)s is ready to start [status: %(status)s]") % {"player": player, "status": status} ) if ( - status.values().count("ready") == nb_players + list(status.values()).count("ready") == nb_players ): # everybody is ready, we can start the game self.newRound(room_jid, profile) @@ -689,7 +689,7 @@ # TODO: check we receive the contrat from the right person # TODO: use proper XEP-0004 way for answering form player = elt["player"] - players_data[player]["contrat"] = unicode(elt) + players_data[player]["contrat"] = str(elt) contrats = [players_data[p]["contrat"] for p in game_data["players"]] if contrats.count(None): # not everybody has choosed his contrat, it's next one turn @@ -720,7 +720,7 @@ game_data["status"][player] = "init" return log.debug( - _(u"%(player)s win the bid with %(contrat)s") + _("%(player)s win the bid with %(contrat)s") % {"player": best_contrat[0], "contrat": best_contrat[1]} ) game_data["contrat"] = best_contrat[1] @@ -825,7 +825,7 @@ if all(played): # everybody has played winner = self.__winner(game_data) - log.debug(_(u"The winner of this trick is %s") % winner) + log.debug(_("The winner of this trick is %s") % winner) # the winner win the trick self.__excuse_hack(game_data, played, winner) players_data[elt["player"]]["levees"].extend(played) @@ -861,13 +861,13 @@ self.host.bridge.tarotGameYourTurn(room_jid.userhost(), profile) elif elt.name == "score": - form_elt = elt.elements(name="x", uri="jabber:x:data").next() + form_elt = next(elt.elements(name="x", uri="jabber:x:data")) winners = [] loosers = [] for winner in elt.elements(name="winner", uri=NS_CG): - winners.append(unicode(winner)) + winners.append(str(winner)) for looser in elt.elements(name="looser", uri=NS_CG): - loosers.append(unicode(looser)) + loosers.append(str(looser)) form = data_form.Form.fromElement(form_elt) session_id, session_data = self._sessions.newSession(profile=profile) session_data["room_jid"] = room_jid @@ -880,10 +880,10 @@ elif elt.name == "error": if elt["type"] == "invalid_cards": played_cards = self.__xml_to_list( - elt.elements(name="played", uri=NS_CG).next() + next(elt.elements(name="played", uri=NS_CG)) ) invalid_cards = self.__xml_to_list( - elt.elements(name="invalid", uri=NS_CG).next() + next(elt.elements(name="invalid", uri=NS_CG)) ) self.host.bridge.tarotGameInvalidCards( room_jid.userhost(), @@ -893,9 +893,9 @@ profile, ) else: - log.error(_(u"Unmanaged error type: %s") % elt["type"]) + log.error(_("Unmanaged error type: %s") % elt["type"]) else: - log.error(_(u"Unmanaged card game element: %s") % elt.name) + log.error(_("Unmanaged card game element: %s") % elt.name) def getSyncDataForPlayer(self, room_jid, nick): return [] diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_text_commands.py --- a/sat/plugins/plugin_misc_text_commands.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_text_commands.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SàT plugin for managing text commands @@ -57,8 +57,8 @@ # should be downloadable independently) HELP_SUGGESTION = _( - u"Type '/help' to get a list of the available commands. If you didn't want to " - u"use a command, please start your message with '//' to escape the slash." + "Type '/help' to get a list of the available commands. If you didn't want to " + "use a command, please start your message with '//' to escape the slash." ) def __init__(self, host): @@ -87,7 +87,7 @@ data = OrderedDict([("doc_short_help", ""), ("type", "all"), ("args", "")]) docstring = cmd.__doc__ if docstring is None: - log.warning(u"No docstring found for command {}".format(cmd_name)) + log.warning("No docstring found for command {}".format(cmd_name)) docstring = "" doc_data = docstring.split("\n") @@ -137,7 +137,7 @@ data["args"] = stripped[colon_idx + 1 :].strip() except InvalidCommandSyntax as e: log.warning( - u"Invalid command syntax for command {command}: {message}".format( + "Invalid command syntax for command {command}: {message}".format( command=cmd_name, message=e.message ) ) @@ -153,7 +153,7 @@ if attr.startswith("cmd_"): cmd = getattr(instance, attr) if not callable(cmd): - log.warning(_(u"Skipping not callable [%s] attribute") % attr) + log.warning(_("Skipping not callable [%s] attribute") % attr) continue cmd_name = attr[4:] if not cmd_name: @@ -165,7 +165,7 @@ new_name = cmd_name + str(suff) log.warning( _( - u"Conflict for command [{old_name}], renaming it to [{new_name}]" + "Conflict for command [{old_name}], renaming it to [{new_name}]" ).format(old_name=cmd_name, new_name=new_name) ) cmd_name = new_name @@ -216,9 +216,9 @@ except KeyError: try: # we have not default message, we try to take the first found - msg_lang, msg = mess_data["message"].iteritems().next() + msg_lang, msg = next(iter(mess_data["message"].items())) except StopIteration: - log.debug(u"No message found, skipping text commands") + log.debug("No message found, skipping text commands") return mess_data try: @@ -244,15 +244,15 @@ if ret: return mess_data else: - log.debug(u"text command detected ({})".format(command)) + log.debug("text command detected ({})".format(command)) raise failure.Failure(exceptions.CancelError()) def genericErrback(failure): try: - msg = u"with condition {}".format(failure.value.condition) + msg = "with condition {}".format(failure.value.condition) except AttributeError: - msg = u"with error {}".format(failure.value) - self.feedBack(client, u"Command failed {}".format(msg), mess_data) + msg = "with error {}".format(failure.value) + self.feedBack(client, "Command failed {}".format(msg), mess_data) return False mess_data["unparsed"] = msg[ @@ -280,7 +280,7 @@ command=command, context=context_txt ) self.feedBack( - client, u"{} {}".format(feedback, self.HELP_SUGGESTION), mess_data + client, "{} {}".format(feedback, self.HELP_SUGGESTION), mess_data ) log.debug("text command invalid message") raise failure.Failure(exceptions.CancelError()) @@ -318,7 +318,7 @@ if arg[-1] != "@": return jid.JID(arg) return jid.JID(arg + service_jid) - return jid.JID(u"%s@%s" % (arg, service_jid)) + return jid.JID("%s@%s" % (arg, service_jid)) def feedBack(self, client, message, mess_data, info_type=FEEDBACK_INFO_TYPE): """Give a message back to the user""" @@ -350,7 +350,7 @@ room = mess_data["to"].userhostJID() try: if self.host.plugins["XEP-0045"].isNickInRoom(client, room, entity): - entity = u"%s/%s" % (room, entity) + entity = "%s/%s" % (room, entity) except KeyError: log.warning("plugin XEP-0045 is not present") @@ -368,7 +368,7 @@ if not target_jid.resource: target_jid.resource = self.host.memory.getMainResource(client, target_jid) - whois_msg = [_(u"whois for %(jid)s") % {"jid": target_jid}] + whois_msg = [_("whois for %(jid)s") % {"jid": target_jid}] d = defer.succeed(None) for ignore, callback in self._whois: @@ -377,7 +377,7 @@ ) def feedBack(ignore): - self.feedBack(client, u"\n".join(whois_msg), mess_data) + self.feedBack(client, "\n".join(whois_msg), mess_data) return False d.addCallback(feedBack) @@ -390,11 +390,11 @@ @return (list[unicode]): help strings """ strings = [] - for doc_name, doc_help in cmd_data.iteritems(): + for doc_name, doc_help in cmd_data.items(): if doc_name.startswith("doc_arg_"): arg_name = doc_name[8:] strings.append( - u"- {name}: {doc_help}".format(name=arg_name, doc_help=_(doc_help)) + "- {name}: {doc_help}".format(name=arg_name, doc_help=_(doc_help)) ) return strings @@ -424,7 +424,7 @@ cmd_name = cmd_name[1:] if cmd_name and cmd_name not in self._commands: self.feedBack( - client, _(u"Invalid command name [{}]\n".format(cmd_name)), mess_data + client, _("Invalid command name [{}]\n".format(cmd_name)), mess_data ) cmd_name = "" if not cmd_name: @@ -445,17 +445,17 @@ ) ) - help_mess = _(u"Text commands available:\n%s") % (u"\n".join(help_cmds),) + help_mess = _("Text commands available:\n%s") % ("\n".join(help_cmds),) else: # we show detailled help for a command cmd_data = self._commands[cmd_name] syntax = cmd_data["args"] - help_mess = _(u"/{name}: {short_help}\n{syntax}{args_help}").format( + help_mess = _("/{name}: {short_help}\n{syntax}{args_help}").format( name=cmd_name, short_help=cmd_data["doc_short_help"], syntax=_(" " * 4 + "syntax: {}\n").format(syntax) if syntax else "", - args_help=u"\n".join( - [u" " * 8 + "{}".format(line) for line in self._getArgsHelp(cmd_data)] + args_help="\n".join( + [" " * 8 + "{}".format(line) for line in self._getArgsHelp(cmd_data)] ), ) diff -r ff5bcb12ae60 -r ab2696e34d29 sat/plugins/plugin_misc_text_syntaxes.py --- a/sat/plugins/plugin_misc_text_syntaxes.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_text_syntaxes.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for managing various text syntaxes @@ -35,7 +35,7 @@ from lxml import etree except ImportError: raise exceptions.MissingModule( - u"Missing module lxml, please download/install it from http://lxml.de/" + "Missing module lxml, please download/install it from http://lxml.de/" ) log = getLogger(__name__) @@ -218,38 +218,38 @@ partial(markdown.markdown, extensions=[ EscapeHTML(), - u'nl2br', - u'codehilite', - u'fenced_code', - u'sane_lists', - u'tables', + 'nl2br', + 'codehilite', + 'fenced_code', + 'sane_lists', + 'tables', ], extension_configs = { - u"codehilite": { - u"css_class": "highlight", + "codehilite": { + "css_class": "highlight", } }), _html2text, [TextSyntaxes.OPT_DEFAULT], ) except ImportError: - log.warning(u"markdown or html2text not found, can't use Markdown syntax") + log.warning("markdown or html2text not found, can't use Markdown syntax") log.info( - u"You can download/install them from https://pythonhosted.org/Markdown/ and https://github.com/Alir3z4/html2text/" + "You can download/install them from https://pythonhosted.org/Markdown/ and https://github.com/Alir3z4/html2text/" ) host.bridge.addMethod( "syntaxConvert", ".plugin", in_sign="sssbs", out_sign="s", - async=True, + async_=True, method=self.convert, ) host.bridge.addMethod( "syntaxGet", ".plugin", in_sign="s", out_sign="s", method=self.getSyntax ) if xml_tools.cleanXHTML is None: - log.debug(u"Installing cleaning method") + log.debug("Installing cleaning method") xml_tools.cleanXHTML = self.cleanXHTML def _updateParamOptions(self): @@ -257,7 +257,7 @@ default_synt = TextSyntaxes.default_syntax syntaxes = [] - for syntax in data_synt.keys(): + for syntax in list(data_synt.keys()): flags = data_synt[syntax]["flags"] if TextSyntaxes.OPT_HIDDEN not in flags: syntaxes.append(syntax) @@ -267,9 +267,9 @@ for syntax in syntaxes: selected = 'selected="true"' if syntax == default_synt else "" - options.append(u'