Mercurial > libervia-backend
changeset 2624:56f94936df1e
code style reformatting using black
line wrap: on
line diff
--- a/sat/__init__.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/__init__.py Wed Jun 27 20:14:46 2018 +0200 @@ -18,10 +18,10 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. import os.path -version_file = os.path.join(os.path.dirname(__file__), 'VERSION') +version_file = os.path.join(os.path.dirname(__file__), "VERSION") try: with open(version_file) as f: __version__ = f.read().strip() except NotImplementedError: # pyjamas workaround - __version__ = '0.7D' + __version__ = "0.7D"
--- a/sat/bridge/bridge_constructor/base_constructor.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/bridge/bridge_constructor/base_constructor.py Wed Jun 27 20:14:46 2018 +0200 @@ -1,5 +1,5 @@ #!/usr/bin/env python2 -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # Sà T: a XMPP client # Copyright (C) 2009-2018 Jérôme Poisson (goffi@goffi.org) @@ -29,7 +29,7 @@ class ParseError(Exception): - #Used when the signature parsing is going wrong (invalid signature ?) + # Used when the signature parsing is going wrong (invalid signature ?) pass @@ -41,7 +41,7 @@ # replace ##SIGNALS_PART## in template), while the value is the format # keys starting with "signal" will be used for signals, while ones starting with # "method" will be used for methods - # check D-Bus constructor for an example + #  check D-Bus constructor for an example CORE_FORMATS = None CORE_TEMPLATE = None CORE_DEST = None @@ -66,7 +66,7 @@ @param name: Name of the function to get @return: dict, each key has the config value or None if the value is not set""" function = {} - for option in ['type', 'category', 'sig_in', 'sig_out', 'doc']: + for option in ["type", "category", "sig_in", "sig_out", "doc"]: try: value = self.bridge_template.get(name, option) except NoOptionError: @@ -87,7 +87,9 @@ try: idx = int(match.group(1)) except ValueError: - raise ParseError("Invalid value [%s] for parameter number" % match.group(1)) + raise ParseError( + "Invalid value [%s] for parameter number" % match.group(1) + ) default_dict[idx] = self.bridge_template.get(name, option) return default_dict @@ -112,15 +114,17 @@ option_re = re.compile(r"doc_param_(\d+)") value_re = re.compile(r"^(\w+): (.*)$", re.MULTILINE | re.DOTALL) for option in self.bridge_template.options(name): - if option == 'doc_return': - doc_dict['return'] = self.bridge_template.get(name, option) + if option == "doc_return": + doc_dict["return"] = self.bridge_template.get(name, option) continue match = option_re.match(option) if match: try: idx = int(match.group(1)) except ValueError: - raise ParseError("Invalid value [%s] for parameter number" % match.group(1)) + raise ParseError( + "Invalid value [%s] for parameter number" % match.group(1) + ) value_match = value_re.match(self.bridge_template.get(name, option)) if not value_match: raise ParseError("Invalid value for parameter doc [%i]" % idx) @@ -141,21 +145,35 @@ i = 0 while i < len(signature): - if signature[i] not in ['b', 'y', 'n', 'i', 'x', 'q', 'u', 't', 'd', 's', 'a']: + if signature[i] not in [ + "b", + "y", + "n", + "i", + "x", + "q", + "u", + "t", + "d", + "s", + "a", + ]: raise ParseError("Unmanaged attribute type [%c]" % signature[i]) - if signature[i] == 'a': + if signature[i] == "a": i += 1 - if signature[i] != '{' and signature[i] != '(': # FIXME: must manage tuples out of arrays + if ( + signature[i] != "{" and signature[i] != "(" + ): # FIXME: must manage tuples out of arrays i += 1 yield signature[start:i] start = i continue # we have a simple type for the array opening_car = signature[i] - assert(opening_car in ['{', '(']) - closing_car = '}' if opening_car == '{' else ')' + assert opening_car in ["{", "("] + closing_car = "}" if opening_car == "{" else ")" opening_count = 1 - while (True): # we have a dict or a list of tuples + while True: # we have a dict or a list of tuples i += 1 if i >= len(signature): raise ParseError("missing }") @@ -182,11 +200,19 @@ attr_string = [] for arg in self.argumentsParser(signature): - attr_string.append(("unicode(%(name)s)%(default)s" if (unicode_protect and arg == 's') else "%(name)s%(default)s") % { - 'name': name[idx][0] if (name and idx in name) else "arg_%i" % idx, - 'default': "=" + default[idx] if (default and idx in default) else ''}) - # give arg_1, arg2, etc or name1, name2=default, etc. - #give unicode(arg_1), unicode(arg_2), etc. if unicode_protect is set and arg is a string + attr_string.append( + ( + "unicode(%(name)s)%(default)s" + if (unicode_protect and arg == "s") + else "%(name)s%(default)s" + ) + % { + "name": name[idx][0] if (name and idx in name) else "arg_%i" % idx, + "default": "=" + default[idx] if (default and idx in default) else "", + } + ) + # give arg_1, arg2, etc or name1, name2=default, etc. + # give unicode(arg_1), unicode(arg_2), etc. if unicode_protect is set and arg is a string idx += 1 return ", ".join(attr_string) @@ -214,7 +240,6 @@ """override this method to extend completion""" pass - def generate(self, side): """generate bridge @@ -242,7 +267,7 @@ @param side(str): core or frontend """ side_vars = [] - for var in ('FORMATS', 'TEMPLATE', 'DEST'): + for var in ("FORMATS", "TEMPLATE", "DEST"): attr = "{}_{}".format(side.upper(), var) value = getattr(self, attr) if value is None: @@ -252,59 +277,70 @@ FORMATS, TEMPLATE, DEST = side_vars del side_vars - parts = {part.upper():[] for part in FORMATS} + parts = {part.upper(): [] for part in FORMATS} sections = self.bridge_template.sections() sections.sort() for section in sections: function = self.getValues(section) - print ("Adding %s %s" % (section, function["type"])) + print("Adding %s %s" % (section, function["type"])) default = self.getDefault(section) arg_doc = self.getArgumentsDoc(section) async_ = "async" in self.getFlags(section) completion = { - 'sig_in': function['sig_in'] or '', - 'sig_out': function['sig_out'] or '', - 'category': 'plugin' if function['category'] == 'plugin' else 'core', - 'name': section, + "sig_in": function["sig_in"] or "", + "sig_out": function["sig_out"] or "", + "category": "plugin" if function["category"] == "plugin" else "core", + "name": section, # arguments with default values - 'args': self.getArguments(function['sig_in'], name=arg_doc, default=default), - } + "args": self.getArguments( + function["sig_in"], name=arg_doc, default=default + ), + } - extend_method = getattr(self, "{}_completion_{}".format(side, function["type"])) + extend_method = getattr( + self, "{}_completion_{}".format(side, function["type"]) + ) extend_method(completion, function, default, arg_doc, async_) for part, fmt in FORMATS.iteritems(): if part.startswith(function["type"]): parts[part.upper()].append(fmt.format(**completion)) - - #at this point, signals_part, methods_part and direct_calls should be filled, - #we just have to place them in the right part of the template + # at this point, signals_part, methods_part and direct_calls should be filled, + # we just have to place them in the right part of the template bridge = [] - const_override = {env[len(C.ENV_OVERRIDE):]:v for env,v in os.environ.iteritems() if env.startswith(C.ENV_OVERRIDE)} + const_override = { + env[len(C.ENV_OVERRIDE) :]: v + for env, v in os.environ.iteritems() + if env.startswith(C.ENV_OVERRIDE) + } template_path = self.getTemplatePath(TEMPLATE) try: with open(template_path) as template: for line in template: for part, extend_list in parts.iteritems(): - if line.startswith('##{}_PART##'.format(part)): + if line.startswith("##{}_PART##".format(part)): bridge.extend(extend_list) break else: # the line is not a magic part replacement - if line.startswith('const_'): - const_name = line[len('const_'):line.find(' = ')].strip() + if line.startswith("const_"): + const_name = line[len("const_") : line.find(" = ")].strip() if const_name in const_override: print("const {} overriden".format(const_name)) - bridge.append('const_{} = {}'.format(const_name, const_override[const_name])) + bridge.append( + "const_{} = {}".format( + const_name, const_override[const_name] + ) + ) continue - bridge.append(line.replace('\n', '')) + bridge.append(line.replace("\n", "")) except IOError: - print ("can't open template file [{}]".format(template_path)) + print("can't open template file [{}]".format(template_path)) sys.exit(1) - #now we write to final file + # now we write to final file self.finalWrite(DEST, bridge) def finalWrite(self, filename, file_buf): @@ -314,19 +350,24 @@ @param file_buf: list of lines (stings) of the file """ if os.path.exists(self.args.dest_dir) and not os.path.isdir(self.args.dest_dir): - print ("The destination dir [%s] can't be created: a file with this name already exists !") + print( + "The destination dir [%s] can't be created: a file with this name already exists !" + ) sys.exit(1) try: if not os.path.exists(self.args.dest_dir): os.mkdir(self.args.dest_dir) full_path = os.path.join(self.args.dest_dir, filename) if os.path.exists(full_path) and not self.args.force: - print ("The destination file [%s] already exists ! Use --force to overwrite it" % full_path) + print( + "The destination file [%s] already exists ! Use --force to overwrite it" + % full_path + ) try: - with open(full_path, 'w') as dest_file: - dest_file.write('\n'.join(file_buf)) + with open(full_path, "w") as dest_file: + dest_file.write("\n".join(file_buf)) except IOError: - print ("Can't open destination file [%s]" % full_path) + print("Can't open destination file [%s]" % full_path) except OSError: print("It's not possible to generate the file, check your permissions") exit(1)
--- a/sat/bridge/bridge_constructor/bridge_constructor.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/bridge/bridge_constructor/bridge_constructor.py Wed Jun 27 20:14:46 2018 +0200 @@ -1,5 +1,5 @@ #!/usr/bin/env python2 -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # Sà T: a XMPP client # Copyright (C) 2009-2018 Jérôme Poisson (goffi@goffi.org) @@ -23,23 +23,24 @@ from sat.bridge.bridge_constructor import constructors, base_constructor import argparse from ConfigParser import SafeConfigParser as Parser -from importlib import import_module +from importlib import import_module import os import os.path -#consts +# consts __version__ = C.APP_VERSION class BridgeConstructor(object): - def importConstructors(self): constructors_dir = os.path.dirname(constructors.__file__) self.protocoles = {} for dir_ in os.listdir(constructors_dir): - init_path = os.path.join(constructors_dir, dir_, '__init__.py') - constructor_path = os.path.join(constructors_dir, dir_, 'constructor.py') - module_path = "sat.bridge.bridge_constructor.constructors.{}.constructor".format(dir_) + init_path = os.path.join(constructors_dir, dir_, "__init__.py") + constructor_path = os.path.join(constructors_dir, dir_, "constructor.py") + module_path = "sat.bridge.bridge_constructor.constructors.{}.constructor".format( + dir_ + ) if os.path.isfile(init_path) and os.path.isfile(constructor_path): mod = import_module(module_path) for attr in dir(mod): @@ -55,27 +56,66 @@ def parse_args(self): """Check command line options""" - parser = argparse.ArgumentParser(description=C.DESCRIPTION, formatter_class=argparse.RawDescriptionHelpFormatter) + parser = argparse.ArgumentParser( + description=C.DESCRIPTION, + formatter_class=argparse.RawDescriptionHelpFormatter, + ) - parser.add_argument("--version", action="version", version= __version__) - default_protocole = C.DEFAULT_PROTOCOLE if C.DEFAULT_PROTOCOLE in self.protocoles else self.protocoles[0] - parser.add_argument("-p", "--protocole", choices=sorted(self.protocoles), default=default_protocole, - help="generate bridge using PROTOCOLE (default: %(default)s)") # (default: %s, possible values: [%s])" % (DEFAULT_PROTOCOLE, ", ".join(MANAGED_PROTOCOLES))) - parser.add_argument("-s", "--side", choices=("core", "frontend"), default="core", - help="which side of the bridge do you want to make ?") # (default: %default, possible values: [core, frontend])") - default_template = os.path.join(os.path.dirname(bridge_constructor.__file__), 'bridge_template.ini') - parser.add_argument("-t", "--template", type=file, default=default_template, - help="use TEMPLATE to generate bridge (default: %(default)s)") - parser.add_argument("-f", "--force", action="store_true", - help=("force overwritting of existing files")) - parser.add_argument("-d", "--debug", action="store_true", - help=("add debug information printing")) - parser.add_argument("--no-unicode", action="store_false", dest="unicode", - help=("remove unicode type protection from string results")) - parser.add_argument("--flags", nargs='+', default=[], - help=("constructors' specific flags")) - parser.add_argument("--dest-dir", default=C.DEST_DIR_DEFAULT, - help=("directory when the generated files will be written (default: %(default)s)")) + parser.add_argument("--version", action="version", version=__version__) + default_protocole = ( + C.DEFAULT_PROTOCOLE + if C.DEFAULT_PROTOCOLE in self.protocoles + else self.protocoles[0] + ) + parser.add_argument( + "-p", + "--protocole", + choices=sorted(self.protocoles), + default=default_protocole, + help="generate bridge using PROTOCOLE (default: %(default)s)", + ) # (default: %s, possible values: [%s])" % (DEFAULT_PROTOCOLE, ", ".join(MANAGED_PROTOCOLES))) + parser.add_argument( + "-s", + "--side", + choices=("core", "frontend"), + default="core", + help="which side of the bridge do you want to make ?", + ) # (default: %default, possible values: [core, frontend])") + default_template = os.path.join( + os.path.dirname(bridge_constructor.__file__), "bridge_template.ini" + ) + parser.add_argument( + "-t", + "--template", + type=file, + default=default_template, + help="use TEMPLATE to generate bridge (default: %(default)s)", + ) + parser.add_argument( + "-f", + "--force", + action="store_true", + help=("force overwritting of existing files"), + ) + parser.add_argument( + "-d", "--debug", action="store_true", help=("add debug information printing") + ) + parser.add_argument( + "--no-unicode", + action="store_false", + dest="unicode", + help=("remove unicode type protection from string results"), + ) + parser.add_argument( + "--flags", nargs="+", default=[], help=("constructors' specific flags") + ) + parser.add_argument( + "--dest-dir", + default=C.DEST_DIR_DEFAULT, + help=( + "directory when the generated files will be written (default: %(default)s)" + ), + ) return parser.parse_args() @@ -86,7 +126,7 @@ try: template_parser.readfp(args.template) except IOError: - print ("The template file doesn't exist or is not accessible") + print("The template file doesn't exist or is not accessible") exit(1) constructor = self.protocoles[args.protocole](template_parser, args) constructor.generate(args.side)
--- a/sat/bridge/bridge_constructor/constants.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/bridge/bridge_constructor/constants.py Wed Jun 27 20:14:46 2018 +0200 @@ -1,5 +1,5 @@ #!/usr/bin/env python2 -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # Sà T: a XMPP client # Copyright (C) 2009-2018 Jérôme Poisson (goffi@goffi.org) @@ -31,11 +31,13 @@ This program comes with ABSOLUTELY NO WARRANTY; This is free software, and you are welcome to redistribute it under certain conditions. - """.format(name=NAME, version=constants.Const.APP_VERSION) -# TODO: move protocoles in separate files (plugins?) - DEFAULT_PROTOCOLE = 'dbus' + """.format( + name=NAME, version=constants.Const.APP_VERSION + ) + #  TODO: move protocoles in separate files (plugins?) + DEFAULT_PROTOCOLE = "dbus" -# flags used method/signal declaration (not to be confused with constructor flags) - DECLARATION_FLAGS = ['deprecated', 'async'] + # flags used method/signal declaration (not to be confused with constructor flags) + DECLARATION_FLAGS = ["deprecated", "async"] ENV_OVERRIDE = "SAT_BRIDGE_CONST_" # Prefix used to override a constant
--- a/sat/bridge/bridge_constructor/constructors/dbus-xml/constructor.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/bridge/bridge_constructor/constructors/dbus-xml/constructor.py Wed Jun 27 20:14:46 2018 +0200 @@ -1,5 +1,5 @@ #!/usr/bin/env python2 -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # Sà T: a XMPP client # Copyright (C) 2009-2018 Jérôme Poisson (goffi@goffi.org) @@ -30,62 +30,73 @@ self.template = "dbus_xml_template.xml" self.core_dest = "org.goffi.sat.xml" - self.default_annotation = {'a{ss}': 'StringDict', - 'a(sa{ss}as)': 'QList<Contact>', - 'a{i(ss)}': 'HistoryT', - 'a(sss)': 'QList<MenuT>', - 'a{sa{s(sia{ss})}}': 'PresenceStatusT', - } + self.default_annotation = { + "a{ss}": "StringDict", + "a(sa{ss}as)": "QList<Contact>", + "a{i(ss)}": "HistoryT", + "a(sss)": "QList<MenuT>", + "a{sa{s(sia{ss})}}": "PresenceStatusT", + } def generateCoreSide(self): try: doc = minidom.parse(self.getTemplatePath(self.template)) - interface_elt = doc.getElementsByTagName('interface')[0] + interface_elt = doc.getElementsByTagName("interface")[0] except IOError: - print ("Can't access template") + print("Can't access template") sys.exit(1) except IndexError: - print ("Template error") + print("Template error") sys.exit(1) sections = self.bridge_template.sections() sections.sort() for section in sections: function = self.getValues(section) - print ("Adding %s %s" % (section, function["type"])) - new_elt = doc.createElement('method' if function["type"] == 'method' else 'signal') - new_elt.setAttribute('name', section) + print("Adding %s %s" % (section, function["type"])) + new_elt = doc.createElement( + "method" if function["type"] == "method" else "signal" + ) + new_elt.setAttribute("name", section) idx = 0 args_doc = self.getArgumentsDoc(section) - for arg in self.argumentsParser(function['sig_in'] or ''): - arg_elt = doc.createElement('arg') - arg_elt.setAttribute('name', args_doc[idx][0] if idx in args_doc else "arg_%i" % idx) - arg_elt.setAttribute('type', arg) - _direction = 'in' if function["type"] == 'method' else 'out' - arg_elt.setAttribute('direction', _direction) + for arg in self.argumentsParser(function["sig_in"] or ""): + arg_elt = doc.createElement("arg") + arg_elt.setAttribute( + "name", args_doc[idx][0] if idx in args_doc else "arg_%i" % idx + ) + arg_elt.setAttribute("type", arg) + _direction = "in" if function["type"] == "method" else "out" + arg_elt.setAttribute("direction", _direction) new_elt.appendChild(arg_elt) if "annotation" in self.args.flags: if arg in self.default_annotation: annot_elt = doc.createElement("annotation") - annot_elt.setAttribute('name', "com.trolltech.QtDBus.QtTypeName.In%d" % idx) - annot_elt.setAttribute('value', self.default_annotation[arg]) + annot_elt.setAttribute( + "name", "com.trolltech.QtDBus.QtTypeName.In%d" % idx + ) + annot_elt.setAttribute("value", self.default_annotation[arg]) new_elt.appendChild(annot_elt) idx += 1 - if function['sig_out']: - arg_elt = doc.createElement('arg') - arg_elt.setAttribute('type', function['sig_out']) - arg_elt.setAttribute('direction', 'out') + if function["sig_out"]: + arg_elt = doc.createElement("arg") + arg_elt.setAttribute("type", function["sig_out"]) + arg_elt.setAttribute("direction", "out") new_elt.appendChild(arg_elt) if "annotation" in self.args.flags: - if function['sig_out'] in self.default_annotation: + if function["sig_out"] in self.default_annotation: annot_elt = doc.createElement("annotation") - annot_elt.setAttribute('name', "com.trolltech.QtDBus.QtTypeName.Out0") - annot_elt.setAttribute('value', self.default_annotation[function['sig_out']]) + annot_elt.setAttribute( + "name", "com.trolltech.QtDBus.QtTypeName.Out0" + ) + annot_elt.setAttribute( + "value", self.default_annotation[function["sig_out"]] + ) new_elt.appendChild(annot_elt) interface_elt.appendChild(new_elt) - #now we write to final file + # now we write to final file self.finalWrite(self.core_dest, [doc.toprettyxml()])
--- a/sat/bridge/bridge_constructor/constructors/dbus/constructor.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/bridge/bridge_constructor/constructors/dbus/constructor.py Wed Jun 27 20:14:46 2018 +0200 @@ -1,5 +1,5 @@ #!/usr/bin/env python2 -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # Sà T: a XMPP client # Copyright (C) 2009-2018 Jérôme Poisson (goffi@goffi.org) @@ -25,73 +25,94 @@ CORE_TEMPLATE = "dbus_core_template.py" CORE_DEST = "dbus_bridge.py" CORE_FORMATS = { - 'signals': """\ + "signals": """\ @dbus.service.signal(const_INT_PREFIX+const_{category}_SUFFIX, signature='{sig_in}') def {name}(self, {args}): {body}\n""", - - 'methods': """\ + "methods": """\ @dbus.service.method(const_INT_PREFIX+const_{category}_SUFFIX, in_signature='{sig_in}', out_signature='{sig_out}', async_callbacks={async_callbacks}) def {name}(self, {args}{async_comma}{async_args_def}): {debug}return self._callback("{name}", {args_result}{async_comma}{async_args_call})\n""", - - 'signal_direct_calls': """\ + "signal_direct_calls": """\ def {name}(self, {args}): self.dbus_bridge.{name}({args})\n""", - } + } FRONTEND_TEMPLATE = "dbus_frontend_template.py" FRONTEND_DEST = CORE_DEST FRONTEND_FORMATS = { - 'methods': """\ + "methods": """\ def {name}(self, {args}{async_comma}{async_args}): - {error_handler}{blocking_call}{debug}return {result}\n""", - } + {error_handler}{blocking_call}{debug}return {result}\n""" + } def core_completion_signal(self, completion, function, default, arg_doc, async_): - completion['category'] = completion['category'].upper() - completion['body'] = "pass" if not self.args.debug else 'log.debug ("{}")'.format(completion['name']) + completion["category"] = completion["category"].upper() + completion["body"] = ( + "pass" + if not self.args.debug + else 'log.debug ("{}")'.format(completion["name"]) + ) def core_completion_method(self, completion, function, default, arg_doc, async_): - completion.update({ - 'debug': "" if not self.args.debug else 'log.debug ("%s")\n%s' % (completion['name'], 8 * ' '), - 'args_result': self.getArguments(function['sig_in'], name=arg_doc, unicode_protect=self.args.unicode), - 'async_comma': ', ' if async_ and function['sig_in'] else '', - 'async_args_def': 'callback=None, errback=None' if async_ else '', - 'async_args_call': 'callback=callback, errback=errback' if async_ else '', - 'async_callbacks': "('callback', 'errback')" if async_ else "None", - 'category': completion['category'].upper(), - }) + completion.update( + { + "debug": "" + if not self.args.debug + else 'log.debug ("%s")\n%s' % (completion["name"], 8 * " "), + "args_result": self.getArguments( + function["sig_in"], name=arg_doc, unicode_protect=self.args.unicode + ), + "async_comma": ", " if async_ and function["sig_in"] else "", + "async_args_def": "callback=None, errback=None" if async_ else "", + "async_args_call": "callback=callback, errback=errback" if async_ else "", + "async_callbacks": "('callback', 'errback')" if async_ else "None", + "category": completion["category"].upper(), + } + ) def frontend_completion_method(self, completion, function, default, arg_doc, async_): - completion.update({ - # XXX: we can manage blocking call in the same way as async one: if callback is None the call will be blocking - 'debug': "" if not self.args.debug else 'log.debug ("%s")\n%s' % (completion['name'], 8 * ' '), - 'args_result': self.getArguments(function['sig_in'], name=arg_doc), - 'async_args': 'callback=None, errback=None', - 'async_comma': ', ' if function['sig_in'] else '', - 'error_handler': """if callback is None: + completion.update( + { + # XXX: we can manage blocking call in the same way as async one: if callback is None the call will be blocking + "debug": "" + if not self.args.debug + else 'log.debug ("%s")\n%s' % (completion["name"], 8 * " "), + "args_result": self.getArguments(function["sig_in"], name=arg_doc), + "async_args": "callback=None, errback=None", + "async_comma": ", " if function["sig_in"] else "", + "error_handler": """if callback is None: error_handler = None else: if errback is None: errback = log.error error_handler = lambda err:errback(dbus_to_bridge_exception(err)) """, - }) + } + ) if async_: - completion['blocking_call'] = '' - completion['async_args_result'] = 'timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler' + completion["blocking_call"] = "" + completion[ + "async_args_result" + ] = "timeout=const_TIMEOUT, reply_handler=callback, error_handler=error_handler" else: # XXX: To have a blocking call, we must have not reply_handler, so we test if callback exists, and add reply_handler only in this case - completion['blocking_call'] = """kwargs={} + completion[ + "blocking_call" + ] = """kwargs={} if callback is not None: kwargs['timeout'] = const_TIMEOUT kwargs['reply_handler'] = callback kwargs['error_handler'] = error_handler """ - completion['async_args_result'] = '**kwargs' - result = "self.db_%(category)s_iface.%(name)s(%(args_result)s%(async_comma)s%(async_args_result)s)" % completion - completion['result'] = ("unicode(%s)" if self.args.unicode and function['sig_out'] == 's' else "%s") % result + completion["async_args_result"] = "**kwargs" + result = ( + "self.db_%(category)s_iface.%(name)s(%(args_result)s%(async_comma)s%(async_args_result)s)" + % completion + ) + completion["result"] = ( + "unicode(%s)" if self.args.unicode and function["sig_out"] == "s" else "%s" + ) % result
--- a/sat/bridge/bridge_constructor/constructors/dbus/dbus_core_template.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/bridge/bridge_constructor/constructors/dbus/dbus_core_template.py Wed Jun 27 20:14:46 2018 +0200 @@ -1,5 +1,5 @@ #!/usr/bin/env python2 -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # SAT: a jabber client # Copyright (C) 2009-2018 Jérôme Poisson (goffi@goffi.org) @@ -23,13 +23,14 @@ import dbus.mainloop.glib import inspect from sat.core.log import getLogger + log = getLogger(__name__) from twisted.internet.defer import Deferred from sat.core.exceptions import BridgeInitError const_INT_PREFIX = "org.goffi.SAT" # Interface prefix const_ERROR_PREFIX = const_INT_PREFIX + ".error" -const_OBJ_PATH = '/org/goffi/SAT/bridge' +const_OBJ_PATH = "/org/goffi/SAT/bridge" const_CORE_SUFFIX = ".core" const_PLUGIN_SUFFIX = ".plugin" @@ -73,11 +74,12 @@ self.args = (message, twisted_error.value.condition) except AttributeError: self.args = (message,) - self._dbus_error_name = '.'.join([const_ERROR_PREFIX, class_.__module__, class_.__name__]) + self._dbus_error_name = ".".join( + [const_ERROR_PREFIX, class_.__module__, class_.__name__] + ) class DbusObject(dbus.service.Object): - def __init__(self, bus, path): dbus.service.Object.__init__(self, bus, path) log.debug("Init DbusObject...") @@ -93,7 +95,7 @@ raise MethodNotRegistered if "callback" in kwargs: - #we must have errback too + # we must have errback too if not "errback" in kwargs: log.error("errback is missing in method call [%s]" % name) raise InternalError @@ -107,27 +109,29 @@ if not isinstance(result, Deferred): log.error("Asynchronous method [%s] does not return a Deferred." % name) raise AsyncNotDeferred - result.addCallback(lambda result: callback() if result is None else callback(result)) + result.addCallback( + lambda result: callback() if result is None else callback(result) + ) result.addErrback(lambda err: errback(GenericException(err))) else: if isinstance(result, Deferred): log.error("Synchronous method [%s] return a Deferred." % name) raise DeferredNotAsync return result + ### signals ### - @dbus.service.signal(const_INT_PREFIX + const_PLUGIN_SUFFIX, - signature='') + @dbus.service.signal(const_INT_PREFIX + const_PLUGIN_SUFFIX, signature="") def dummySignal(self): - #FIXME: workaround for addSignal (doesn't work if one method doensn't + # FIXME: workaround for addSignal (doesn't work if one method doensn't # already exist for plugins), probably missing some initialisation, need # further investigations pass -##SIGNALS_PART## + ##SIGNALS_PART## ### methods ### -##METHODS_PART## + ##METHODS_PART## def __attributes(self, in_sign): """Return arguments to user given a in_sign @param in_sign: in_sign in the short form (using s,a,i,b etc) @@ -136,22 +140,24 @@ idx = 0 attr = [] while i < len(in_sign): - if in_sign[i] not in ['b', 'y', 'n', 'i', 'x', 'q', 'u', 't', 'd', 's', 'a']: + if in_sign[i] not in ["b", "y", "n", "i", "x", "q", "u", "t", "d", "s", "a"]: raise ParseError("Unmanaged attribute type [%c]" % in_sign[i]) attr.append("arg_%i" % idx) idx += 1 - if in_sign[i] == 'a': + if in_sign[i] == "a": i += 1 - if in_sign[i] != '{' and in_sign[i] != '(': # FIXME: must manage tuples out of arrays + if ( + in_sign[i] != "{" and in_sign[i] != "(" + ): # FIXME: must manage tuples out of arrays i += 1 continue # we have a simple type for the array opening_car = in_sign[i] - assert(opening_car in ['{', '(']) - closing_car = '}' if opening_car == '{' else ')' + assert opening_car in ["{", "("] + closing_car = "}" if opening_car == "{" else ")" opening_count = 1 - while (True): # we have a dict or a list of tuples + while True: # we have a dict or a list of tuples i += 1 if i >= len(in_sign): raise ParseError("missing }") @@ -172,47 +178,80 @@ _defaults = list(inspect_args.defaults or []) if inspect.ismethod(method): - #if we have a method, we don't want the first argument (usually 'self') - del(_arguments[0]) + # if we have a method, we don't want the first argument (usually 'self') + del (_arguments[0]) - #first arguments are for the _callback method - arguments_callback = ', '.join([repr(name)] + ((_arguments + ['callback=callback', 'errback=errback']) if async else _arguments)) + # first arguments are for the _callback method + arguments_callback = ", ".join( + [repr(name)] + + ( + (_arguments + ["callback=callback", "errback=errback"]) + if async + else _arguments + ) + ) if async: - _arguments.extend(['callback', 'errback']) + _arguments.extend(["callback", "errback"]) _defaults.extend([None, None]) - #now we create a second list with default values + # now we create a second list with default values for i in range(1, len(_defaults) + 1): _arguments[-i] = "%s = %s" % (_arguments[-i], repr(_defaults[-i])) - arguments_defaults = ', '.join(_arguments) + arguments_defaults = ", ".join(_arguments) - code = compile('def %(name)s (self,%(arguments_defaults)s): return self._callback(%(arguments_callback)s)' % - {'name': name, 'arguments_defaults': arguments_defaults, 'arguments_callback': arguments_callback}, '<DBus bridge>', 'exec') - exec (code) # FIXME: to the same thing in a cleaner way, without compile/exec + code = compile( + "def %(name)s (self,%(arguments_defaults)s): return self._callback(%(arguments_callback)s)" + % { + "name": name, + "arguments_defaults": arguments_defaults, + "arguments_callback": arguments_callback, + }, + "<DBus bridge>", + "exec", + ) + exec(code) # FIXME: to the same thing in a cleaner way, without compile/exec method = locals()[name] - async_callbacks = ('callback', 'errback') if async else None - setattr(DbusObject, name, dbus.service.method( - const_INT_PREFIX + int_suffix, in_signature=in_sign, out_signature=out_sign, - async_callbacks=async_callbacks)(method)) + async_callbacks = ("callback", "errback") if async else None + setattr( + DbusObject, + name, + dbus.service.method( + const_INT_PREFIX + int_suffix, + in_signature=in_sign, + out_signature=out_sign, + async_callbacks=async_callbacks, + )(method), + ) function = getattr(self, name) - func_table = self._dbus_class_table[self.__class__.__module__ + '.' + self.__class__.__name__][function._dbus_interface] + func_table = self._dbus_class_table[ + self.__class__.__module__ + "." + self.__class__.__name__ + ][function._dbus_interface] func_table[function.__name__] = function # Needed for introspection def addSignal(self, name, int_suffix, signature, doc={}): """Dynamically add a signal to Dbus Bridge""" - attributes = ', '.join(self.__attributes(signature)) - #TODO: use doc parameter to name attributes + attributes = ", ".join(self.__attributes(signature)) + # TODO: use doc parameter to name attributes - #code = compile ('def '+name+' (self,'+attributes+'): log.debug ("'+name+' signal")', '<DBus bridge>','exec') #XXX: the log.debug is too annoying with xmllog - code = compile('def ' + name + ' (self,' + attributes + '): pass', '<DBus bridge>', 'exec') - exec (code) + # code = compile ('def '+name+' (self,'+attributes+'): log.debug ("'+name+' signal")', '<DBus bridge>','exec') #XXX: the log.debug is too annoying with xmllog + code = compile( + "def " + name + " (self," + attributes + "): pass", "<DBus bridge>", "exec" + ) + exec(code) signal = locals()[name] - setattr(DbusObject, name, dbus.service.signal( - const_INT_PREFIX + int_suffix, signature=signature)(signal)) + setattr( + DbusObject, + name, + dbus.service.signal(const_INT_PREFIX + int_suffix, signature=signature)( + signal + ), + ) function = getattr(self, name) - func_table = self._dbus_class_table[self.__class__.__module__ + '.' + self.__class__.__name__][function._dbus_interface] + func_table = self._dbus_class_table[ + self.__class__.__module__ + "." + self.__class__.__name__ + ][function._dbus_interface] func_table[function.__name__] = function # Needed for introspection @@ -223,20 +262,24 @@ try: self.session_bus = dbus.SessionBus() except dbus.DBusException as e: - if e._dbus_error_name == 'org.freedesktop.DBus.Error.NotSupported': - log.error(_(u"D-Bus is not launched, please see README to see instructions on how to launch it")) + if e._dbus_error_name == "org.freedesktop.DBus.Error.NotSupported": + log.error( + _( + u"D-Bus is not launched, please see README to see instructions on how to launch it" + ) + ) raise BridgeInitError self.dbus_name = dbus.service.BusName(const_INT_PREFIX, self.session_bus) self.dbus_bridge = DbusObject(self.session_bus, const_OBJ_PATH) -##SIGNAL_DIRECT_CALLS_PART## + ##SIGNAL_DIRECT_CALLS_PART## def register_method(self, name, callback): log.debug("registering DBus bridge method [%s]" % name) self.dbus_bridge.register_method(name, callback) def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc={}): """Dynamically add a method to Dbus Bridge""" - #FIXME: doc parameter is kept only temporary, the time to remove it from calls + # FIXME: doc parameter is kept only temporary, the time to remove it from calls log.debug("Adding method [%s] to DBus bridge" % name) self.dbus_bridge.addMethod(name, int_suffix, in_sign, out_sign, method, async) self.register_method(name, method)
--- a/sat/bridge/bridge_constructor/constructors/embedded/constructor.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/bridge/bridge_constructor/constructors/embedded/constructor.py Wed Jun 27 20:14:46 2018 +0200 @@ -1,5 +1,5 @@ #!/usr/bin/env python2 -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # Sà T: a XMPP client # Copyright (C) 2009-2018 Jérôme Poisson (goffi@goffi.org) @@ -18,7 +18,8 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. from sat.bridge.bridge_constructor import base_constructor -# from textwraps import dedent + +#  from textwraps import dedent class EmbeddedConstructor(base_constructor.Constructor): @@ -26,11 +27,11 @@ CORE_TEMPLATE = "embedded_template.py" CORE_DEST = "embedded.py" CORE_FORMATS = { - 'methods': """\ + "methods": """\ def {name}(self, {args}{args_comma}callback=None, errback=None): {ret_routine} """, - 'signals': """\ + "signals": """\ def {name}(self, {args}): try: cb = self._signals_cbs["{category}"]["{name}"] @@ -38,22 +39,30 @@ log.warning(u"ignoring signal {name}: no callback registered") else: cb({args_result}) -""" - } +""", + } FRONTEND_TEMPLATE = "embedded_frontend_template.py" FRONTEND_DEST = CORE_DEST FRONTEND_FORMATS = {} def core_completion_method(self, completion, function, default, arg_doc, async_): - completion.update({ - 'debug': "" if not self.args.debug else 'log.debug ("%s")\n%s' % (completion['name'], 8 * ' '), - 'args_result': self.getArguments(function['sig_in'], name=arg_doc), - 'args_comma': ', ' if function['sig_in'] else '', - }) + completion.update( + { + "debug": "" + if not self.args.debug + else 'log.debug ("%s")\n%s' % (completion["name"], 8 * " "), + "args_result": self.getArguments(function["sig_in"], name=arg_doc), + "args_comma": ", " if function["sig_in"] else "", + } + ) if async_: - completion["cb_or_lambda"] = "callback" if function['sig_out'] else "lambda dummy: callback()" - completion["ret_routine"] = """\ + completion["cb_or_lambda"] = ( + "callback" if function["sig_out"] else "lambda dummy: callback()" + ) + completion[ + "ret_routine" + ] = """\ d = self._methods_cbs["{name}"]({args_result}) if callback is not None: d.addCallback({cb_or_lambda}) @@ -62,10 +71,14 @@ else: d.addErrback(errback) return d - """.format(**completion) + """.format( + **completion + ) else: - completion['ret_or_nothing'] = 'ret' if function['sig_out'] else '' - completion["ret_routine"] = """\ + completion["ret_or_nothing"] = "ret" if function["sig_out"] else "" + completion[ + "ret_routine" + ] = """\ try: ret = self._methods_cbs["{name}"]({args_result}) except Exception as e: @@ -77,9 +90,11 @@ if callback is None: return ret else: - callback({ret_or_nothing})""".format(**completion) + callback({ret_or_nothing})""".format( + **completion + ) def core_completion_signal(self, completion, function, default, arg_doc, async_): - completion.update({ - 'args_result': self.getArguments(function['sig_in'], name=arg_doc), - }) + completion.update( + {"args_result": self.getArguments(function["sig_in"], name=arg_doc)} + )
--- a/sat/bridge/bridge_constructor/constructors/embedded/embedded_frontend_template.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/bridge/bridge_constructor/constructors/embedded/embedded_frontend_template.py Wed Jun 27 20:14:46 2018 +0200 @@ -1,5 +1,5 @@ #!/usr/bin/env python2 -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # Sà T: a XMPP client # Copyright (C) 2009-2018 Jérôme Poisson (goffi@goffi.org)
--- a/sat/bridge/bridge_constructor/constructors/embedded/embedded_template.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/bridge/bridge_constructor/constructors/embedded/embedded_template.py Wed Jun 27 20:14:46 2018 +0200 @@ -1,5 +1,5 @@ #!/usr/bin/env python2 -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # Sà T: a XMPP client # Copyright (C) 2009-2018 Jérôme Poisson (goffi@goffi.org) @@ -18,6 +18,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. from sat.core.log import getLogger + log = getLogger(__name__) from sat.core import exceptions @@ -26,10 +27,7 @@ def __init__(self): log.debug(u"Init embedded bridge...") self._methods_cbs = {} - self._signals_cbs = { - "core": {}, - "plugin": {} - } + self._signals_cbs = {"core": {}, "plugin": {}} def bridgeConnect(self, callback, errback): callback() @@ -43,7 +41,11 @@ def register_signal(self, functionName, handler, iface="core"): iface_dict = self._signals_cbs[iface] if functionName in iface_dict: - raise exceptions.ConflictError(u"signal {name} is already regitered for interface {iface}".format(name=functionName, iface=iface)) + raise exceptions.ConflictError( + u"signal {name} is already regitered for interface {iface}".format( + name=functionName, iface=iface + ) + ) iface_dict[functionName] = handler def call_method(self, name, out_sign, async_, args, kwargs): @@ -84,23 +86,36 @@ cb(*args, **kwargs) def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc={}): - #FIXME: doc parameter is kept only temporary, the time to remove it from calls + # FIXME: doc parameter is kept only temporary, the time to remove it from calls log.debug("Adding method [{}] to embedded bridge".format(name)) self.register_method(name, method) - setattr(self.__class__, name, lambda self_, *args, **kwargs: self.call_method(name, out_sign, async, args, kwargs)) + setattr( + self.__class__, + name, + lambda self_, *args, **kwargs: self.call_method( + name, out_sign, async, args, kwargs + ), + ) def addSignal(self, name, int_suffix, signature, doc={}): - setattr(self.__class__, name, lambda self_, *args, **kwargs: self.send_signal(name, args, kwargs)) + setattr( + self.__class__, + name, + lambda self_, *args, **kwargs: self.send_signal(name, args, kwargs), + ) ## signals ## + ##SIGNALS_PART## - ## methods ## +## methods ## ##METHODS_PART## # we want the same instance for both core and frontend bridge = None + + def Bridge(): global bridge if bridge is None:
--- a/sat/bridge/bridge_constructor/constructors/mediawiki/constructor.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/bridge/bridge_constructor/constructors/mediawiki/constructor.py Wed Jun 27 20:14:46 2018 +0200 @@ -1,5 +1,5 @@ #!/usr/bin/env python2 -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # Sà T: a XMPP client # Copyright (C) 2009-2018 Jérôme Poisson (goffi@goffi.org) @@ -24,7 +24,6 @@ class MediawikiConstructor(base_constructor.Constructor): - def __init__(self, bridge_template, options): base_constructor.Constructor.__init__(self, bridge_template, options) self.core_template = "mediawiki_template.tpl" @@ -35,10 +34,10 @@ def anchor_link(match): link = match.group(1) - #we add anchor_link for [method_name] syntax: + # we add anchor_link for [method_name] syntax: if link in self.bridge_template.sections(): return "[[#%s|%s]]" % (link, link) - print ("WARNING: found an anchor link to an unknown method") + print("WARNING: found an anchor link to an unknown method") return link return re.sub(r"\[(\w+)\]", anchor_link, text) @@ -51,12 +50,12 @@ arg_doc = self.getArgumentsDoc(name) arg_default = self.getDefault(name) args_str = self.getArguments(sig_in) - args = args_str.split(', ') if args_str else [] # ugly but it works :) + args = args_str.split(", ") if args_str else [] # ugly but it works :) wiki = [] for i in range(len(args)): if i in arg_doc: name, doc = arg_doc[i] - doc = '\n:'.join(doc.rstrip('\n').split('\n')) + doc = "\n:".join(doc.rstrip("\n").split("\n")) wiki.append("; %s: %s" % (name, self._addTextDecorations(doc))) else: wiki.append("; arg_%d: " % i) @@ -70,9 +69,13 @@ """ arg_doc = self.getArgumentsDoc(name) wiki = [] - if 'return' in arg_doc: - wiki.append('\n|-\n! scope=row | return value\n|') - wiki.append('<br />\n'.join(self._addTextDecorations(arg_doc['return']).rstrip('\n').split('\n'))) + if "return" in arg_doc: + wiki.append("\n|-\n! scope=row | return value\n|") + wiki.append( + "<br />\n".join( + self._addTextDecorations(arg_doc["return"]).rstrip("\n").split("\n") + ) + ) return "\n".join(wiki) def generateCoreSide(self): @@ -82,37 +85,49 @@ sections.sort() for section in sections: function = self.getValues(section) - print ("Adding %s %s" % (section, function["type"])) + print("Adding %s %s" % (section, function["type"])) async_msg = """<br />'''This method is asynchronous'''""" deprecated_msg = """<br />'''<font color="#FF0000">/!\ WARNING /!\ : This method is deprecated, please don't use it !</font>'''""" - signature_signal = \ - """\ + signature_signal = ( + """\ ! scope=row | signature | %s |-\ -""" % function['sig_in'] - signature_method = \ - """\ +""" + % function["sig_in"] + ) + signature_method = """\ ! scope=row | signature in | %s |- ! scope=row | signature out | %s |-\ -""" % (function['sig_in'], function['sig_out']) +""" % ( + function["sig_in"], + function["sig_out"], + ) completion = { - 'signature': signature_signal if function['type'] == "signal" else signature_method, - 'sig_out': function['sig_out'] or '', - 'category': function['category'], - 'name': section, - 'doc': self.getDoc(section) or "FIXME: No description available", - 'async': async_msg if "async" in self.getFlags(section) else "", - 'deprecated': deprecated_msg if "deprecated" in self.getFlags(section) else "", - 'parameters': self._wikiParameter(section, function['sig_in']), - 'return': self._wikiReturn(section) if function['type'] == 'method' else ''} + "signature": signature_signal + if function["type"] == "signal" + else signature_method, + "sig_out": function["sig_out"] or "", + "category": function["category"], + "name": section, + "doc": self.getDoc(section) or "FIXME: No description available", + "async": async_msg if "async" in self.getFlags(section) else "", + "deprecated": deprecated_msg + if "deprecated" in self.getFlags(section) + else "", + "parameters": self._wikiParameter(section, function["sig_in"]), + "return": self._wikiReturn(section) + if function["type"] == "method" + else "", + } - dest = signals_part if function['type'] == "signal" else methods_part - dest.append("""\ + dest = signals_part if function["type"] == "signal" else methods_part + dest.append( + """\ == %(name)s == ''%(doc)s'' %(deprecated)s @@ -126,28 +141,28 @@ | %(parameters)s%(return)s |} -""" % completion) +""" + % completion + ) - #at this point, signals_part, and methods_part should be filled, - #we just have to place them in the right part of the template + # at this point, signals_part, and methods_part should be filled, + # we just have to place them in the right part of the template core_bridge = [] template_path = self.getTemplatePath(self.core_template) try: with open(template_path) as core_template: for line in core_template: - if line.startswith('##SIGNALS_PART##'): + if line.startswith("##SIGNALS_PART##"): core_bridge.extend(signals_part) - elif line.startswith('##METHODS_PART##'): + elif line.startswith("##METHODS_PART##"): core_bridge.extend(methods_part) - elif line.startswith('##TIMESTAMP##'): - core_bridge.append('Generated on %s' % datetime.now()) + elif line.startswith("##TIMESTAMP##"): + core_bridge.append("Generated on %s" % datetime.now()) else: - core_bridge.append(line.replace('\n', '')) + core_bridge.append(line.replace("\n", "")) except IOError: - print ("Can't open template file [%s]" % template_path) + print("Can't open template file [%s]" % template_path) sys.exit(1) - #now we write to final file + # now we write to final file self.finalWrite(self.core_dest, core_bridge) - -
--- a/sat/bridge/bridge_constructor/constructors/pb/constructor.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/bridge/bridge_constructor/constructors/pb/constructor.py Wed Jun 27 20:14:46 2018 +0200 @@ -1,5 +1,5 @@ #!/usr/bin/env python2 -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # Sà T: a XMPP client # Copyright (C) 2009-2018 Jérôme Poisson (goffi@goffi.org) @@ -25,32 +25,42 @@ CORE_TEMPLATE = "pb_core_template.py" CORE_DEST = "pb.py" CORE_FORMATS = { - 'signals': """\ + "signals": """\ def {name}(self, {args}): - {debug}self.sendSignal("{name}", {args_no_def})\n""", - } + {debug}self.sendSignal("{name}", {args_no_def})\n""" + } FRONTEND_TEMPLATE = "pb_frontend_template.py" FRONTEND_DEST = CORE_DEST FRONTEND_FORMATS = { - 'methods': """\ + "methods": """\ def {name}(self{args_comma}{args}, callback=None, errback=None): {debug}d = self.root.callRemote("{name}"{args_comma}{args_no_def}) if callback is not None: d.addCallback({callback}) if errback is None: errback = self._generic_errback - d.addErrback(errback)\n""", - } + d.addErrback(errback)\n""" + } def core_completion_signal(self, completion, function, default, arg_doc, async_): - completion['args_no_def'] = self.getArguments(function['sig_in'], name=arg_doc) - completion['debug'] = "" if not self.args.debug else 'log.debug ("%s")\n%s' % (completion['name'], 8 * ' ') + completion["args_no_def"] = self.getArguments(function["sig_in"], name=arg_doc) + completion["debug"] = ( + "" + if not self.args.debug + else 'log.debug ("%s")\n%s' % (completion["name"], 8 * " ") + ) def frontend_completion_method(self, completion, function, default, arg_doc, async_): - completion.update({ - 'args_comma': ', ' if function['sig_in'] else '', - 'args_no_def': self.getArguments(function['sig_in'], name=arg_doc), - 'callback': 'callback' if function['sig_out'] else 'lambda dummy: callback()', - 'debug': "" if not self.args.debug else 'log.debug ("%s")\n%s' % (completion['name'], 8 * ' '), - }) + completion.update( + { + "args_comma": ", " if function["sig_in"] else "", + "args_no_def": self.getArguments(function["sig_in"], name=arg_doc), + "callback": "callback" + if function["sig_out"] + else "lambda dummy: callback()", + "debug": "" + if not self.args.debug + else 'log.debug ("%s")\n%s' % (completion["name"], 8 * " "), + } + )
--- a/sat/bridge/bridge_constructor/constructors/pb/pb_core_template.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/bridge/bridge_constructor/constructors/pb/pb_core_template.py Wed Jun 27 20:14:46 2018 +0200 @@ -1,5 +1,5 @@ #!/usr/bin/env python2 -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # SAT: a jabber client # Copyright (C) 2009-2018 Jérôme Poisson (goffi@goffi.org) @@ -19,6 +19,7 @@ from sat.core.log import getLogger + log = getLogger(__name__) from twisted.spread import jelly, pb from twisted.internet import reactor @@ -28,17 +29,18 @@ # we monkey patch jelly to handle namedtuple ori_jelly = jelly._Jellier.jelly + def fixed_jelly(self, obj): """this method fix handling of namedtuple""" if isinstance(obj, tuple) and not obj is tuple: obj = tuple(obj) return ori_jelly(self, obj) + jelly._Jellier.jelly = fixed_jelly class PBRoot(pb.Root): - def __init__(self): self.signals_handlers = [] @@ -47,10 +49,11 @@ log.info(u"registered signal handler") def sendSignalEb(self, failure, signal_name): - log.error(u"Error while sending signal {name}: {msg}".format( - name = signal_name, - msg = failure, - )) + log.error( + u"Error while sending signal {name}: {msg}".format( + name=signal_name, msg=failure + ) + ) def sendSignal(self, name, args, kwargs): to_remove = [] @@ -66,11 +69,11 @@ log.debug(u"Removing signal handler for dead frontend") self.signals_handlers.remove(handler) + ##METHODS_PART## class Bridge(object): - def __init__(self): log.info("Init Perspective Broker...") self.root = PBRoot() @@ -85,17 +88,20 @@ def register_method(self, name, callback): log.debug("registering PB bridge method [%s]" % name) - setattr(self.root, "remote_"+name, callback) - # self.root.register_method(name, callback) + setattr(self.root, "remote_" + name, callback) + #  self.root.register_method(name, callback) def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc={}): """Dynamically add a method to PB Bridge""" - #FIXME: doc parameter is kept only temporary, the time to remove it from calls + # FIXME: doc parameter is kept only temporary, the time to remove it from calls log.debug("Adding method {name} to PB bridge".format(name=name)) self.register_method(name, method) def addSignal(self, name, int_suffix, signature, doc={}): log.debug("Adding signal {name} to PB bridge".format(name=name)) - setattr(self, name, lambda *args, **kwargs: self.sendSignal(name, *args, **kwargs)) + setattr( + self, name, lambda *args, **kwargs: self.sendSignal(name, *args, **kwargs) + ) + ##SIGNALS_PART##
--- a/sat/bridge/bridge_constructor/constructors/pb/pb_frontend_template.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/bridge/bridge_constructor/constructors/pb/pb_frontend_template.py Wed Jun 27 20:14:46 2018 +0200 @@ -1,5 +1,5 @@ #!/usr/bin/env python2 -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # SAT communication bridge # Copyright (C) 2009-2018 Jérôme Poisson (goffi@goffi.org) @@ -18,6 +18,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. from sat.core.log import getLogger + log = getLogger(__name__) from sat.core import exceptions from twisted.spread import pb @@ -25,11 +26,9 @@ class SignalsHandler(pb.Referenceable): - def __getattr__(self, name): if name.startswith("remote_"): - log.debug(u"calling an unregistered signal: {name}".format( - name = name[7:])) + log.debug(u"calling an unregistered signal: {name}".format(name=name[7:])) return lambda *args, **kwargs: None else: @@ -43,13 +42,15 @@ except AttributeError: pass else: - raise exceptions.InternalError(u"{name} signal handler has been registered twice".format( - name = method_name)) + raise exceptions.InternalError( + u"{name} signal handler has been registered twice".format( + name=method_name + ) + ) setattr(self, method_name, handler) class Bridge(object): - def __init__(self): self.signals_handler = SignalsHandler() @@ -81,11 +82,11 @@ callback = errback = None if kwargs: try: - callback = kwargs.pop('callback') + callback = kwargs.pop("callback") except KeyError: pass try: - errback = kwargs.pop('errback') + errback = kwargs.pop("errback") except KeyError: pass elif len(args) >= 2 and callable(args[-1]) and callable(args[-2]): @@ -124,4 +125,5 @@ def register_signal(self, functionName, handler, iface="core"): self.signals_handler.register_signal(functionName, handler, iface) + ##METHODS_PART##
--- a/sat/bridge/dbus_bridge.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/bridge/dbus_bridge.py Wed Jun 27 20:14:46 2018 +0200 @@ -1,5 +1,5 @@ #!/usr/bin/env python2 -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # SAT: a jabber client # Copyright (C) 2009-2018 Jérôme Poisson (goffi@goffi.org) @@ -23,13 +23,14 @@ import dbus.mainloop.glib import inspect from sat.core.log import getLogger + log = getLogger(__name__) from twisted.internet.defer import Deferred from sat.core.exceptions import BridgeInitError const_INT_PREFIX = "org.goffi.SAT" # Interface prefix const_ERROR_PREFIX = const_INT_PREFIX + ".error" -const_OBJ_PATH = '/org/goffi/SAT/bridge' +const_OBJ_PATH = "/org/goffi/SAT/bridge" const_CORE_SUFFIX = ".core" const_PLUGIN_SUFFIX = ".plugin" @@ -73,11 +74,12 @@ self.args = (message, twisted_error.value.condition) except AttributeError: self.args = (message,) - self._dbus_error_name = '.'.join([const_ERROR_PREFIX, class_.__module__, class_.__name__]) + self._dbus_error_name = ".".join( + [const_ERROR_PREFIX, class_.__module__, class_.__name__] + ) class DbusObject(dbus.service.Object): - def __init__(self, bus, path): dbus.service.Object.__init__(self, bus, path) log.debug("Init DbusObject...") @@ -93,7 +95,7 @@ raise MethodNotRegistered if "callback" in kwargs: - #we must have errback too + # we must have errback too if not "errback" in kwargs: log.error("errback is missing in method call [%s]" % name) raise InternalError @@ -107,389 +109,798 @@ if not isinstance(result, Deferred): log.error("Asynchronous method [%s] does not return a Deferred." % name) raise AsyncNotDeferred - result.addCallback(lambda result: callback() if result is None else callback(result)) + result.addCallback( + lambda result: callback() if result is None else callback(result) + ) result.addErrback(lambda err: errback(GenericException(err))) else: if isinstance(result, Deferred): log.error("Synchronous method [%s] return a Deferred." % name) raise DeferredNotAsync return result + ### signals ### - @dbus.service.signal(const_INT_PREFIX + const_PLUGIN_SUFFIX, - signature='') + @dbus.service.signal(const_INT_PREFIX + const_PLUGIN_SUFFIX, signature="") def dummySignal(self): - #FIXME: workaround for addSignal (doesn't work if one method doensn't + # FIXME: workaround for addSignal (doesn't work if one method doensn't # already exist for plugins), probably missing some initialisation, need # further investigations pass - @dbus.service.signal(const_INT_PREFIX+const_CORE_SUFFIX, - signature='a{ss}sis') + @dbus.service.signal(const_INT_PREFIX + const_CORE_SUFFIX, signature="a{ss}sis") def actionNew(self, action_data, id, security_limit, profile): pass - @dbus.service.signal(const_INT_PREFIX+const_CORE_SUFFIX, - signature='ss') + @dbus.service.signal(const_INT_PREFIX + const_CORE_SUFFIX, signature="ss") def connected(self, profile, jid_s): pass - @dbus.service.signal(const_INT_PREFIX+const_CORE_SUFFIX, - signature='ss') + @dbus.service.signal(const_INT_PREFIX + const_CORE_SUFFIX, signature="ss") def contactDeleted(self, entity_jid, profile): pass - @dbus.service.signal(const_INT_PREFIX+const_CORE_SUFFIX, - signature='s') + @dbus.service.signal(const_INT_PREFIX + const_CORE_SUFFIX, signature="s") def disconnected(self, profile): pass - @dbus.service.signal(const_INT_PREFIX+const_CORE_SUFFIX, - signature='ssss') + @dbus.service.signal(const_INT_PREFIX + const_CORE_SUFFIX, signature="ssss") def entityDataUpdated(self, jid, name, value, profile): pass - @dbus.service.signal(const_INT_PREFIX+const_CORE_SUFFIX, - signature='sdssa{ss}a{ss}sa{ss}s') - def messageNew(self, uid, timestamp, from_jid, to_jid, message, subject, mess_type, extra, profile): + @dbus.service.signal( + const_INT_PREFIX + const_CORE_SUFFIX, signature="sdssa{ss}a{ss}sa{ss}s" + ) + def messageNew( + self, + uid, + timestamp, + from_jid, + to_jid, + message, + subject, + mess_type, + extra, + profile, + ): pass - @dbus.service.signal(const_INT_PREFIX+const_CORE_SUFFIX, - signature='sa{ss}ass') + @dbus.service.signal(const_INT_PREFIX + const_CORE_SUFFIX, signature="sa{ss}ass") def newContact(self, contact_jid, attributes, groups, profile): pass - @dbus.service.signal(const_INT_PREFIX+const_CORE_SUFFIX, - signature='ssss') + @dbus.service.signal(const_INT_PREFIX + const_CORE_SUFFIX, signature="ssss") def paramUpdate(self, name, value, category, profile): pass - @dbus.service.signal(const_INT_PREFIX+const_CORE_SUFFIX, - signature='ssia{ss}s') + @dbus.service.signal(const_INT_PREFIX + const_CORE_SUFFIX, signature="ssia{ss}s") def presenceUpdate(self, entity_jid, show, priority, statuses, profile): pass - @dbus.service.signal(const_INT_PREFIX+const_CORE_SUFFIX, - signature='sss') + @dbus.service.signal(const_INT_PREFIX + const_CORE_SUFFIX, signature="sss") def progressError(self, id, error, profile): pass - @dbus.service.signal(const_INT_PREFIX+const_CORE_SUFFIX, - signature='sa{ss}s') + @dbus.service.signal(const_INT_PREFIX + const_CORE_SUFFIX, signature="sa{ss}s") def progressFinished(self, id, metadata, profile): pass - @dbus.service.signal(const_INT_PREFIX+const_CORE_SUFFIX, - signature='sa{ss}s') + @dbus.service.signal(const_INT_PREFIX + const_CORE_SUFFIX, signature="sa{ss}s") def progressStarted(self, id, metadata, profile): pass - @dbus.service.signal(const_INT_PREFIX+const_CORE_SUFFIX, - signature='sss') + @dbus.service.signal(const_INT_PREFIX + const_CORE_SUFFIX, signature="sss") def subscribe(self, sub_type, entity_jid, profile): pass ### methods ### - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='s', out_signature='a(a{ss}si)', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="s", + out_signature="a(a{ss}si)", + async_callbacks=None, + ) def actionsGet(self, profile_key="@DEFAULT@"): return self._callback("actionsGet", unicode(profile_key)) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='ss', out_signature='', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="ss", + out_signature="", + async_callbacks=None, + ) def addContact(self, entity_jid, profile_key="@DEFAULT@"): return self._callback("addContact", unicode(entity_jid), unicode(profile_key)) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='s', out_signature='', - async_callbacks=('callback', 'errback')) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="s", + out_signature="", + async_callbacks=("callback", "errback"), + ) def asyncDeleteProfile(self, profile, callback=None, errback=None): - return self._callback("asyncDeleteProfile", unicode(profile), callback=callback, errback=errback) + return self._callback( + "asyncDeleteProfile", unicode(profile), callback=callback, errback=errback + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='sssis', out_signature='s', - async_callbacks=('callback', 'errback')) - def asyncGetParamA(self, name, category, attribute="value", security_limit=-1, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("asyncGetParamA", unicode(name), unicode(category), unicode(attribute), security_limit, unicode(profile_key), callback=callback, errback=errback) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="sssis", + out_signature="s", + async_callbacks=("callback", "errback"), + ) + def asyncGetParamA( + self, + name, + category, + attribute="value", + security_limit=-1, + profile_key="@DEFAULT@", + callback=None, + errback=None, + ): + return self._callback( + "asyncGetParamA", + unicode(name), + unicode(category), + unicode(attribute), + security_limit, + unicode(profile_key), + callback=callback, + errback=errback, + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='sis', out_signature='a{ss}', - async_callbacks=('callback', 'errback')) - def asyncGetParamsValuesFromCategory(self, category, security_limit=-1, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("asyncGetParamsValuesFromCategory", unicode(category), security_limit, unicode(profile_key), callback=callback, errback=errback) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="sis", + out_signature="a{ss}", + async_callbacks=("callback", "errback"), + ) + def asyncGetParamsValuesFromCategory( + self, + category, + security_limit=-1, + profile_key="@DEFAULT@", + callback=None, + errback=None, + ): + return self._callback( + "asyncGetParamsValuesFromCategory", + unicode(category), + security_limit, + unicode(profile_key), + callback=callback, + errback=errback, + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='ssa{ss}', out_signature='b', - async_callbacks=('callback', 'errback')) - def connect(self, profile_key="@DEFAULT@", password='', options={}, callback=None, errback=None): - return self._callback("connect", unicode(profile_key), unicode(password), options, callback=callback, errback=errback) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="ssa{ss}", + out_signature="b", + async_callbacks=("callback", "errback"), + ) + def connect( + self, + profile_key="@DEFAULT@", + password="", + options={}, + callback=None, + errback=None, + ): + return self._callback( + "connect", + unicode(profile_key), + unicode(password), + options, + callback=callback, + errback=errback, + ) + + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="ss", + out_signature="", + async_callbacks=("callback", "errback"), + ) + def delContact( + self, entity_jid, profile_key="@DEFAULT@", callback=None, errback=None + ): + return self._callback( + "delContact", + unicode(entity_jid), + unicode(profile_key), + callback=callback, + errback=errback, + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='ss', out_signature='', - async_callbacks=('callback', 'errback')) - def delContact(self, entity_jid, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("delContact", unicode(entity_jid), unicode(profile_key), callback=callback, errback=errback) - - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='asa(ss)bbbbbs', out_signature='(a{sa(sss)}a{sa(sss)}a{sa(sss)})', - async_callbacks=('callback', 'errback')) - def discoFindByFeatures(self, namespaces, identities, bare_jid=False, service=True, roster=True, own_jid=True, local_device=False, profile_key=u"@DEFAULT@", callback=None, errback=None): - return self._callback("discoFindByFeatures", namespaces, identities, bare_jid, service, roster, own_jid, local_device, unicode(profile_key), callback=callback, errback=errback) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="asa(ss)bbbbbs", + out_signature="(a{sa(sss)}a{sa(sss)}a{sa(sss)})", + async_callbacks=("callback", "errback"), + ) + def discoFindByFeatures( + self, + namespaces, + identities, + bare_jid=False, + service=True, + roster=True, + own_jid=True, + local_device=False, + profile_key=u"@DEFAULT@", + callback=None, + errback=None, + ): + return self._callback( + "discoFindByFeatures", + namespaces, + identities, + bare_jid, + service, + roster, + own_jid, + local_device, + unicode(profile_key), + callback=callback, + errback=errback, + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='ssbs', out_signature='(asa(sss)a{sa(a{ss}as)})', - async_callbacks=('callback', 'errback')) - def discoInfos(self, entity_jid, node=u'', use_cache=True, profile_key=u"@DEFAULT@", callback=None, errback=None): - return self._callback("discoInfos", unicode(entity_jid), unicode(node), use_cache, unicode(profile_key), callback=callback, errback=errback) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="ssbs", + out_signature="(asa(sss)a{sa(a{ss}as)})", + async_callbacks=("callback", "errback"), + ) + def discoInfos( + self, + entity_jid, + node=u"", + use_cache=True, + profile_key=u"@DEFAULT@", + callback=None, + errback=None, + ): + return self._callback( + "discoInfos", + unicode(entity_jid), + unicode(node), + use_cache, + unicode(profile_key), + callback=callback, + errback=errback, + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='ssbs', out_signature='a(sss)', - async_callbacks=('callback', 'errback')) - def discoItems(self, entity_jid, node=u'', use_cache=True, profile_key=u"@DEFAULT@", callback=None, errback=None): - return self._callback("discoItems", unicode(entity_jid), unicode(node), use_cache, unicode(profile_key), callback=callback, errback=errback) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="ssbs", + out_signature="a(sss)", + async_callbacks=("callback", "errback"), + ) + def discoItems( + self, + entity_jid, + node=u"", + use_cache=True, + profile_key=u"@DEFAULT@", + callback=None, + errback=None, + ): + return self._callback( + "discoItems", + unicode(entity_jid), + unicode(node), + use_cache, + unicode(profile_key), + callback=callback, + errback=errback, + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='s', out_signature='', - async_callbacks=('callback', 'errback')) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="s", + out_signature="", + async_callbacks=("callback", "errback"), + ) def disconnect(self, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("disconnect", unicode(profile_key), callback=callback, errback=errback) + return self._callback( + "disconnect", unicode(profile_key), callback=callback, errback=errback + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='ss', out_signature='s', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="ss", + out_signature="s", + async_callbacks=None, + ) def getConfig(self, section, name): return self._callback("getConfig", unicode(section), unicode(name)) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='s', out_signature='a(sa{ss}as)', - async_callbacks=('callback', 'errback')) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="s", + out_signature="a(sa{ss}as)", + async_callbacks=("callback", "errback"), + ) def getContacts(self, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("getContacts", unicode(profile_key), callback=callback, errback=errback) + return self._callback( + "getContacts", unicode(profile_key), callback=callback, errback=errback + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='ss', out_signature='as', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="ss", + out_signature="as", + async_callbacks=None, + ) def getContactsFromGroup(self, group, profile_key="@DEFAULT@"): - return self._callback("getContactsFromGroup", unicode(group), unicode(profile_key)) + return self._callback( + "getContactsFromGroup", unicode(group), unicode(profile_key) + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='asass', out_signature='a{sa{ss}}', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="asass", + out_signature="a{sa{ss}}", + async_callbacks=None, + ) def getEntitiesData(self, jids, keys, profile): return self._callback("getEntitiesData", jids, keys, unicode(profile)) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='sass', out_signature='a{ss}', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="sass", + out_signature="a{ss}", + async_callbacks=None, + ) def getEntityData(self, jid, keys, profile): return self._callback("getEntityData", unicode(jid), keys, unicode(profile)) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='s', out_signature='a{sa{ss}}', - async_callbacks=('callback', 'errback')) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="s", + out_signature="a{sa{ss}}", + async_callbacks=("callback", "errback"), + ) def getFeatures(self, profile_key, callback=None, errback=None): - return self._callback("getFeatures", unicode(profile_key), callback=callback, errback=errback) + return self._callback( + "getFeatures", unicode(profile_key), callback=callback, errback=errback + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='ss', out_signature='s', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="ss", + out_signature="s", + async_callbacks=None, + ) def getMainResource(self, contact_jid, profile_key="@DEFAULT@"): - return self._callback("getMainResource", unicode(contact_jid), unicode(profile_key)) + return self._callback( + "getMainResource", unicode(contact_jid), unicode(profile_key) + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='ssss', out_signature='s', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="ssss", + out_signature="s", + async_callbacks=None, + ) def getParamA(self, name, category, attribute="value", profile_key="@DEFAULT@"): - return self._callback("getParamA", unicode(name), unicode(category), unicode(attribute), unicode(profile_key)) + return self._callback( + "getParamA", + unicode(name), + unicode(category), + unicode(attribute), + unicode(profile_key), + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='', out_signature='as', - async_callbacks=None) - def getParamsCategories(self, ): - return self._callback("getParamsCategories", ) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="", + out_signature="as", + async_callbacks=None, + ) + def getParamsCategories(self,): + return self._callback("getParamsCategories") - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='iss', out_signature='s', - async_callbacks=('callback', 'errback')) - def getParamsUI(self, security_limit=-1, app='', profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("getParamsUI", security_limit, unicode(app), unicode(profile_key), callback=callback, errback=errback) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="iss", + out_signature="s", + async_callbacks=("callback", "errback"), + ) + def getParamsUI( + self, + security_limit=-1, + app="", + profile_key="@DEFAULT@", + callback=None, + errback=None, + ): + return self._callback( + "getParamsUI", + security_limit, + unicode(app), + unicode(profile_key), + callback=callback, + errback=errback, + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='s', out_signature='a{sa{s(sia{ss})}}', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="s", + out_signature="a{sa{s(sia{ss})}}", + async_callbacks=None, + ) def getPresenceStatuses(self, profile_key="@DEFAULT@"): return self._callback("getPresenceStatuses", unicode(profile_key)) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='', out_signature='', - async_callbacks=('callback', 'errback')) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="", + out_signature="", + async_callbacks=("callback", "errback"), + ) def getReady(self, callback=None, errback=None): return self._callback("getReady", callback=callback, errback=errback) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='', out_signature='s', - async_callbacks=None) - def getVersion(self, ): - return self._callback("getVersion", ) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="", + out_signature="s", + async_callbacks=None, + ) + def getVersion(self,): + return self._callback("getVersion") - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='s', out_signature='a{ss}', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="s", + out_signature="a{ss}", + async_callbacks=None, + ) def getWaitingSub(self, profile_key="@DEFAULT@"): return self._callback("getWaitingSub", unicode(profile_key)) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='ssiba{ss}s', out_signature='a(sdssa{ss}a{ss}sa{ss})', - async_callbacks=('callback', 'errback')) - def historyGet(self, from_jid, to_jid, limit, between=True, filters='', profile="@NONE@", callback=None, errback=None): - return self._callback("historyGet", unicode(from_jid), unicode(to_jid), limit, between, filters, unicode(profile), callback=callback, errback=errback) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="ssiba{ss}s", + out_signature="a(sdssa{ss}a{ss}sa{ss})", + async_callbacks=("callback", "errback"), + ) + def historyGet( + self, + from_jid, + to_jid, + limit, + between=True, + filters="", + profile="@NONE@", + callback=None, + errback=None, + ): + return self._callback( + "historyGet", + unicode(from_jid), + unicode(to_jid), + limit, + between, + filters, + unicode(profile), + callback=callback, + errback=errback, + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='s', out_signature='b', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="s", + out_signature="b", + async_callbacks=None, + ) def isConnected(self, profile_key="@DEFAULT@"): return self._callback("isConnected", unicode(profile_key)) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='sa{ss}s', out_signature='a{ss}', - async_callbacks=('callback', 'errback')) - def launchAction(self, callback_id, data, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("launchAction", unicode(callback_id), data, unicode(profile_key), callback=callback, errback=errback) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="sa{ss}s", + out_signature="a{ss}", + async_callbacks=("callback", "errback"), + ) + def launchAction( + self, callback_id, data, profile_key="@DEFAULT@", callback=None, errback=None + ): + return self._callback( + "launchAction", + unicode(callback_id), + data, + unicode(profile_key), + callback=callback, + errback=errback, + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='s', out_signature='b', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="s", + out_signature="b", + async_callbacks=None, + ) def loadParamsTemplate(self, filename): return self._callback("loadParamsTemplate", unicode(filename)) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='ss', out_signature='s', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="ss", + out_signature="s", + async_callbacks=None, + ) def menuHelpGet(self, menu_id, language): return self._callback("menuHelpGet", unicode(menu_id), unicode(language)) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='sasa{ss}is', out_signature='a{ss}', - async_callbacks=('callback', 'errback')) - def menuLaunch(self, menu_type, path, data, security_limit, profile_key, callback=None, errback=None): - return self._callback("menuLaunch", unicode(menu_type), path, data, security_limit, unicode(profile_key), callback=callback, errback=errback) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="sasa{ss}is", + out_signature="a{ss}", + async_callbacks=("callback", "errback"), + ) + def menuLaunch( + self, + menu_type, + path, + data, + security_limit, + profile_key, + callback=None, + errback=None, + ): + return self._callback( + "menuLaunch", + unicode(menu_type), + path, + data, + security_limit, + unicode(profile_key), + callback=callback, + errback=errback, + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='si', out_signature='a(ssasasa{ss})', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="si", + out_signature="a(ssasasa{ss})", + async_callbacks=None, + ) def menusGet(self, language, security_limit): return self._callback("menusGet", unicode(language), security_limit) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='sa{ss}a{ss}sa{ss}s', out_signature='', - async_callbacks=('callback', 'errback')) - def messageSend(self, to_jid, message, subject={}, mess_type="auto", extra={}, profile_key="@NONE@", callback=None, errback=None): - return self._callback("messageSend", unicode(to_jid), message, subject, unicode(mess_type), extra, unicode(profile_key), callback=callback, errback=errback) - - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='', out_signature='a{ss}', - async_callbacks=None) - def namespacesGet(self, ): - return self._callback("namespacesGet", ) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="sa{ss}a{ss}sa{ss}s", + out_signature="", + async_callbacks=("callback", "errback"), + ) + def messageSend( + self, + to_jid, + message, + subject={}, + mess_type="auto", + extra={}, + profile_key="@NONE@", + callback=None, + errback=None, + ): + return self._callback( + "messageSend", + unicode(to_jid), + message, + subject, + unicode(mess_type), + extra, + unicode(profile_key), + callback=callback, + errback=errback, + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='sis', out_signature='', - async_callbacks=None) - def paramsRegisterApp(self, xml, security_limit=-1, app=''): - return self._callback("paramsRegisterApp", unicode(xml), security_limit, unicode(app)) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="", + out_signature="a{ss}", + async_callbacks=None, + ) + def namespacesGet(self,): + return self._callback("namespacesGet") + + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="sis", + out_signature="", + async_callbacks=None, + ) + def paramsRegisterApp(self, xml, security_limit=-1, app=""): + return self._callback( + "paramsRegisterApp", unicode(xml), security_limit, unicode(app) + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='sss', out_signature='', - async_callbacks=('callback', 'errback')) - def profileCreate(self, profile, password='', component='', callback=None, errback=None): - return self._callback("profileCreate", unicode(profile), unicode(password), unicode(component), callback=callback, errback=errback) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="sss", + out_signature="", + async_callbacks=("callback", "errback"), + ) + def profileCreate( + self, profile, password="", component="", callback=None, errback=None + ): + return self._callback( + "profileCreate", + unicode(profile), + unicode(password), + unicode(component), + callback=callback, + errback=errback, + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='s', out_signature='b', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="s", + out_signature="b", + async_callbacks=None, + ) def profileIsSessionStarted(self, profile_key="@DEFAULT@"): return self._callback("profileIsSessionStarted", unicode(profile_key)) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='s', out_signature='s', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="s", + out_signature="s", + async_callbacks=None, + ) def profileNameGet(self, profile_key="@DEFAULT@"): return self._callback("profileNameGet", unicode(profile_key)) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='s', out_signature='', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="s", + out_signature="", + async_callbacks=None, + ) def profileSetDefault(self, profile): return self._callback("profileSetDefault", unicode(profile)) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='ss', out_signature='b', - async_callbacks=('callback', 'errback')) - def profileStartSession(self, password='', profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("profileStartSession", unicode(password), unicode(profile_key), callback=callback, errback=errback) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="ss", + out_signature="b", + async_callbacks=("callback", "errback"), + ) + def profileStartSession( + self, password="", profile_key="@DEFAULT@", callback=None, errback=None + ): + return self._callback( + "profileStartSession", + unicode(password), + unicode(profile_key), + callback=callback, + errback=errback, + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='bb', out_signature='as', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="bb", + out_signature="as", + async_callbacks=None, + ) def profilesListGet(self, clients=True, components=False): return self._callback("profilesListGet", clients, components) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='ss', out_signature='a{ss}', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="ss", + out_signature="a{ss}", + async_callbacks=None, + ) def progressGet(self, id, profile): return self._callback("progressGet", unicode(id), unicode(profile)) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='s', out_signature='a{sa{sa{ss}}}', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="s", + out_signature="a{sa{sa{ss}}}", + async_callbacks=None, + ) def progressGetAll(self, profile): return self._callback("progressGetAll", unicode(profile)) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='s', out_signature='a{sa{sa{ss}}}', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="s", + out_signature="a{sa{sa{ss}}}", + async_callbacks=None, + ) def progressGetAllMetadata(self, profile): return self._callback("progressGetAllMetadata", unicode(profile)) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='s', out_signature='b', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="s", + out_signature="b", + async_callbacks=None, + ) def saveParamsTemplate(self, filename): return self._callback("saveParamsTemplate", unicode(filename)) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='s', out_signature='a{ss}', - async_callbacks=('callback', 'errback')) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="s", + out_signature="a{ss}", + async_callbacks=("callback", "errback"), + ) def sessionInfosGet(self, profile_key, callback=None, errback=None): - return self._callback("sessionInfosGet", unicode(profile_key), callback=callback, errback=errback) + return self._callback( + "sessionInfosGet", unicode(profile_key), callback=callback, errback=errback + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='sssis', out_signature='', - async_callbacks=None) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="sssis", + out_signature="", + async_callbacks=None, + ) def setParam(self, name, value, category, security_limit=-1, profile_key="@DEFAULT@"): - return self._callback("setParam", unicode(name), unicode(value), unicode(category), security_limit, unicode(profile_key)) + return self._callback( + "setParam", + unicode(name), + unicode(value), + unicode(category), + security_limit, + unicode(profile_key), + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='ssa{ss}s', out_signature='', - async_callbacks=None) - def setPresence(self, to_jid='', show='', statuses={}, profile_key="@DEFAULT@"): - return self._callback("setPresence", unicode(to_jid), unicode(show), statuses, unicode(profile_key)) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="ssa{ss}s", + out_signature="", + async_callbacks=None, + ) + def setPresence(self, to_jid="", show="", statuses={}, profile_key="@DEFAULT@"): + return self._callback( + "setPresence", unicode(to_jid), unicode(show), statuses, unicode(profile_key) + ) + + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="sss", + out_signature="", + async_callbacks=None, + ) + def subscription(self, sub_type, entity, profile_key="@DEFAULT@"): + return self._callback( + "subscription", unicode(sub_type), unicode(entity), unicode(profile_key) + ) - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='sss', out_signature='', - async_callbacks=None) - def subscription(self, sub_type, entity, profile_key="@DEFAULT@"): - return self._callback("subscription", unicode(sub_type), unicode(entity), unicode(profile_key)) - - @dbus.service.method(const_INT_PREFIX+const_CORE_SUFFIX, - in_signature='ssass', out_signature='', - async_callbacks=('callback', 'errback')) - def updateContact(self, entity_jid, name, groups, profile_key="@DEFAULT@", callback=None, errback=None): - return self._callback("updateContact", unicode(entity_jid), unicode(name), groups, unicode(profile_key), callback=callback, errback=errback) + @dbus.service.method( + const_INT_PREFIX + const_CORE_SUFFIX, + in_signature="ssass", + out_signature="", + async_callbacks=("callback", "errback"), + ) + def updateContact( + self, + entity_jid, + name, + groups, + profile_key="@DEFAULT@", + callback=None, + errback=None, + ): + return self._callback( + "updateContact", + unicode(entity_jid), + unicode(name), + groups, + unicode(profile_key), + callback=callback, + errback=errback, + ) def __attributes(self, in_sign): """Return arguments to user given a in_sign @@ -499,22 +910,24 @@ idx = 0 attr = [] while i < len(in_sign): - if in_sign[i] not in ['b', 'y', 'n', 'i', 'x', 'q', 'u', 't', 'd', 's', 'a']: + if in_sign[i] not in ["b", "y", "n", "i", "x", "q", "u", "t", "d", "s", "a"]: raise ParseError("Unmanaged attribute type [%c]" % in_sign[i]) attr.append("arg_%i" % idx) idx += 1 - if in_sign[i] == 'a': + if in_sign[i] == "a": i += 1 - if in_sign[i] != '{' and in_sign[i] != '(': # FIXME: must manage tuples out of arrays + if ( + in_sign[i] != "{" and in_sign[i] != "(" + ): # FIXME: must manage tuples out of arrays i += 1 continue # we have a simple type for the array opening_car = in_sign[i] - assert(opening_car in ['{', '(']) - closing_car = '}' if opening_car == '{' else ')' + assert opening_car in ["{", "("] + closing_car = "}" if opening_car == "{" else ")" opening_count = 1 - while (True): # we have a dict or a list of tuples + while True: # we have a dict or a list of tuples i += 1 if i >= len(in_sign): raise ParseError("missing }") @@ -535,47 +948,80 @@ _defaults = list(inspect_args.defaults or []) if inspect.ismethod(method): - #if we have a method, we don't want the first argument (usually 'self') - del(_arguments[0]) + # if we have a method, we don't want the first argument (usually 'self') + del (_arguments[0]) - #first arguments are for the _callback method - arguments_callback = ', '.join([repr(name)] + ((_arguments + ['callback=callback', 'errback=errback']) if async else _arguments)) + # first arguments are for the _callback method + arguments_callback = ", ".join( + [repr(name)] + + ( + (_arguments + ["callback=callback", "errback=errback"]) + if async + else _arguments + ) + ) if async: - _arguments.extend(['callback', 'errback']) + _arguments.extend(["callback", "errback"]) _defaults.extend([None, None]) - #now we create a second list with default values + # now we create a second list with default values for i in range(1, len(_defaults) + 1): _arguments[-i] = "%s = %s" % (_arguments[-i], repr(_defaults[-i])) - arguments_defaults = ', '.join(_arguments) + arguments_defaults = ", ".join(_arguments) - code = compile('def %(name)s (self,%(arguments_defaults)s): return self._callback(%(arguments_callback)s)' % - {'name': name, 'arguments_defaults': arguments_defaults, 'arguments_callback': arguments_callback}, '<DBus bridge>', 'exec') - exec (code) # FIXME: to the same thing in a cleaner way, without compile/exec + code = compile( + "def %(name)s (self,%(arguments_defaults)s): return self._callback(%(arguments_callback)s)" + % { + "name": name, + "arguments_defaults": arguments_defaults, + "arguments_callback": arguments_callback, + }, + "<DBus bridge>", + "exec", + ) + exec(code) # FIXME: to the same thing in a cleaner way, without compile/exec method = locals()[name] - async_callbacks = ('callback', 'errback') if async else None - setattr(DbusObject, name, dbus.service.method( - const_INT_PREFIX + int_suffix, in_signature=in_sign, out_signature=out_sign, - async_callbacks=async_callbacks)(method)) + async_callbacks = ("callback", "errback") if async else None + setattr( + DbusObject, + name, + dbus.service.method( + const_INT_PREFIX + int_suffix, + in_signature=in_sign, + out_signature=out_sign, + async_callbacks=async_callbacks, + )(method), + ) function = getattr(self, name) - func_table = self._dbus_class_table[self.__class__.__module__ + '.' + self.__class__.__name__][function._dbus_interface] + func_table = self._dbus_class_table[ + self.__class__.__module__ + "." + self.__class__.__name__ + ][function._dbus_interface] func_table[function.__name__] = function # Needed for introspection def addSignal(self, name, int_suffix, signature, doc={}): """Dynamically add a signal to Dbus Bridge""" - attributes = ', '.join(self.__attributes(signature)) - #TODO: use doc parameter to name attributes + attributes = ", ".join(self.__attributes(signature)) + # TODO: use doc parameter to name attributes - #code = compile ('def '+name+' (self,'+attributes+'): log.debug ("'+name+' signal")', '<DBus bridge>','exec') #XXX: the log.debug is too annoying with xmllog - code = compile('def ' + name + ' (self,' + attributes + '): pass', '<DBus bridge>', 'exec') - exec (code) + # code = compile ('def '+name+' (self,'+attributes+'): log.debug ("'+name+' signal")', '<DBus bridge>','exec') #XXX: the log.debug is too annoying with xmllog + code = compile( + "def " + name + " (self," + attributes + "): pass", "<DBus bridge>", "exec" + ) + exec(code) signal = locals()[name] - setattr(DbusObject, name, dbus.service.signal( - const_INT_PREFIX + int_suffix, signature=signature)(signal)) + setattr( + DbusObject, + name, + dbus.service.signal(const_INT_PREFIX + int_suffix, signature=signature)( + signal + ), + ) function = getattr(self, name) - func_table = self._dbus_class_table[self.__class__.__module__ + '.' + self.__class__.__name__][function._dbus_interface] + func_table = self._dbus_class_table[ + self.__class__.__module__ + "." + self.__class__.__name__ + ][function._dbus_interface] func_table[function.__name__] = function # Needed for introspection @@ -586,8 +1032,12 @@ try: self.session_bus = dbus.SessionBus() except dbus.DBusException as e: - if e._dbus_error_name == 'org.freedesktop.DBus.Error.NotSupported': - log.error(_(u"D-Bus is not launched, please see README to see instructions on how to launch it")) + if e._dbus_error_name == "org.freedesktop.DBus.Error.NotSupported": + log.error( + _( + u"D-Bus is not launched, please see README to see instructions on how to launch it" + ) + ) raise BridgeInitError self.dbus_name = dbus.service.BusName(const_INT_PREFIX, self.session_bus) self.dbus_bridge = DbusObject(self.session_bus, const_OBJ_PATH) @@ -607,8 +1057,21 @@ def entityDataUpdated(self, jid, name, value, profile): self.dbus_bridge.entityDataUpdated(jid, name, value, profile) - def messageNew(self, uid, timestamp, from_jid, to_jid, message, subject, mess_type, extra, profile): - self.dbus_bridge.messageNew(uid, timestamp, from_jid, to_jid, message, subject, mess_type, extra, profile) + def messageNew( + self, + uid, + timestamp, + from_jid, + to_jid, + message, + subject, + mess_type, + extra, + profile, + ): + self.dbus_bridge.messageNew( + uid, timestamp, from_jid, to_jid, message, subject, mess_type, extra, profile + ) def newContact(self, contact_jid, attributes, groups, profile): self.dbus_bridge.newContact(contact_jid, attributes, groups, profile) @@ -637,11 +1100,11 @@ def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc={}): """Dynamically add a method to Dbus Bridge""" - #FIXME: doc parameter is kept only temporary, the time to remove it from calls + # FIXME: doc parameter is kept only temporary, the time to remove it from calls log.debug("Adding method [%s] to DBus bridge" % name) self.dbus_bridge.addMethod(name, int_suffix, in_sign, out_sign, method, async) self.register_method(name, method) def addSignal(self, name, int_suffix, signature, doc={}): self.dbus_bridge.addSignal(name, int_suffix, signature, doc) - setattr(Bridge, name, getattr(self.dbus_bridge, name)) \ No newline at end of file + setattr(Bridge, name, getattr(self.dbus_bridge, name))
--- a/sat/bridge/pb.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/bridge/pb.py Wed Jun 27 20:14:46 2018 +0200 @@ -1,5 +1,5 @@ #!/usr/bin/env python2 -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # SAT: a jabber client # Copyright (C) 2009-2018 Jérôme Poisson (goffi@goffi.org) @@ -19,6 +19,7 @@ from sat.core.log import getLogger + log = getLogger(__name__) from twisted.spread import jelly, pb from twisted.internet import reactor @@ -28,17 +29,18 @@ # we monkey patch jelly to handle namedtuple ori_jelly = jelly._Jellier.jelly + def fixed_jelly(self, obj): """this method fix handling of namedtuple""" if isinstance(obj, tuple) and not obj is tuple: obj = tuple(obj) return ori_jelly(self, obj) + jelly._Jellier.jelly = fixed_jelly class PBRoot(pb.Root): - def __init__(self): self.signals_handlers = [] @@ -47,10 +49,11 @@ log.info(u"registered signal handler") def sendSignalEb(self, failure, signal_name): - log.error(u"Error while sending signal {name}: {msg}".format( - name = signal_name, - msg = failure, - )) + log.error( + u"Error while sending signal {name}: {msg}".format( + name=signal_name, msg=failure + ) + ) def sendSignal(self, name, args, kwargs): to_remove = [] @@ -66,11 +69,11 @@ log.debug(u"Removing signal handler for dead frontend") self.signals_handlers.remove(handler) + ##METHODS_PART## class Bridge(object): - def __init__(self): log.info("Init Perspective Broker...") self.root = PBRoot() @@ -85,18 +88,20 @@ def register_method(self, name, callback): log.debug("registering PB bridge method [%s]" % name) - setattr(self.root, "remote_"+name, callback) - # self.root.register_method(name, callback) + setattr(self.root, "remote_" + name, callback) + #  self.root.register_method(name, callback) def addMethod(self, name, int_suffix, in_sign, out_sign, method, async=False, doc={}): """Dynamically add a method to PB Bridge""" - #FIXME: doc parameter is kept only temporary, the time to remove it from calls + # FIXME: doc parameter is kept only temporary, the time to remove it from calls log.debug("Adding method {name} to PB bridge".format(name=name)) self.register_method(name, method) def addSignal(self, name, int_suffix, signature, doc={}): log.debug("Adding signal {name} to PB bridge".format(name=name)) - setattr(self, name, lambda *args, **kwargs: self.sendSignal(name, *args, **kwargs)) + setattr( + self, name, lambda *args, **kwargs: self.sendSignal(name, *args, **kwargs) + ) def actionNew(self, action_data, id, security_limit, profile): self.sendSignal("actionNew", action_data, id, security_limit, profile) @@ -113,8 +118,30 @@ def entityDataUpdated(self, jid, name, value, profile): self.sendSignal("entityDataUpdated", jid, name, value, profile) - def messageNew(self, uid, timestamp, from_jid, to_jid, message, subject, mess_type, extra, profile): - self.sendSignal("messageNew", uid, timestamp, from_jid, to_jid, message, subject, mess_type, extra, profile) + def messageNew( + self, + uid, + timestamp, + from_jid, + to_jid, + message, + subject, + mess_type, + extra, + profile, + ): + self.sendSignal( + "messageNew", + uid, + timestamp, + from_jid, + to_jid, + message, + subject, + mess_type, + extra, + profile, + ) def newContact(self, contact_jid, attributes, groups, profile): self.sendSignal("newContact", contact_jid, attributes, groups, profile)
--- a/sat/core/constants.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/core/constants.py Wed Jun 27 20:14:46 2018 +0200 @@ -28,23 +28,24 @@ class Const(object): ## Application ## - APP_NAME = u'Salut à Toi' - APP_NAME_SHORT = u'Sà T' - APP_NAME_FILE = u'sat' - APP_NAME_FULL = u'{name_short} ({name})'.format(name_short=APP_NAME_SHORT, - name=APP_NAME) - APP_VERSION = sat.__version__ # Please add 'D' at the end of version in sat/VERSION for dev versions - APP_RELEASE_NAME = u'La Commune' - APP_URL = u'https://salut-a-toi.org' - + APP_NAME = u"Salut à Toi" + APP_NAME_SHORT = u"Sà T" + APP_NAME_FILE = u"sat" + APP_NAME_FULL = u"{name_short} ({name})".format( + name_short=APP_NAME_SHORT, name=APP_NAME + ) + APP_VERSION = ( + sat.__version__ + ) # Please add 'D' at the end of version in sat/VERSION for dev versions + APP_RELEASE_NAME = u"La Commune" + APP_URL = u"https://salut-a-toi.org" ## Runtime ## PLUGIN_EXT = "py" - HISTORY_SKIP = u'skip' + HISTORY_SKIP = u"skip" ## Main config ## - DEFAULT_BRIDGE = 'dbus' - + DEFAULT_BRIDGE = "dbus" ## Protocol ## XMPP_C2S_PORT = 5222 @@ -53,9 +54,8 @@ # default port used on Prosody, may differ on other servers XMPP_COMPONENT_PORT = 5347 - ## Parameters ## - NO_SECURITY_LIMIT = -1 # FIXME: to rename + NO_SECURITY_LIMIT = -1 #  FIXME: to rename SECURITY_LIMIT_MAX = 0 INDIVIDUAL = "individual" GENERAL = "general" @@ -67,9 +67,9 @@ FORCE_SERVER_PARAM = "Force server" FORCE_PORT_PARAM = "Force port" # Parameters related to encryption - PROFILE_PASS_PATH = ('General', 'Password') - MEMORY_CRYPTO_NAMESPACE = 'crypto' # for the private persistent binary dict - MEMORY_CRYPTO_KEY = 'personal_key' + PROFILE_PASS_PATH = ("General", "Password") + MEMORY_CRYPTO_NAMESPACE = "crypto" # for the private persistent binary dict + MEMORY_CRYPTO_KEY = "personal_key" # Parameters for static blog pages # FIXME: blog constants should not be in core constants STATIC_BLOG_KEY = "Blog page" @@ -78,7 +78,6 @@ STATIC_BLOG_PARAM_KEYWORDS = "Keywords" STATIC_BLOG_PARAM_DESCRIPTION = "Description" - ## Menus ## MENU_GLOBAL = "GLOBAL" MENU_ROOM = "ROOM" @@ -88,62 +87,64 @@ MENU_ROSTER_GROUP_CONTEXT = "MENU_ROSTER_GROUP_CONTEXT" MENU_ROOM_OCCUPANT_CONTEXT = "MENU_ROOM_OCCUPANT_CONTEXT" - ## Profile and entities ## - PROF_KEY_NONE = '@NONE@' - PROF_KEY_DEFAULT = '@DEFAULT@' - PROF_KEY_ALL = '@ALL@' - ENTITY_ALL = '@ALL@' - ENTITY_ALL_RESOURCES = '@ALL_RESOURCES@' - ENTITY_MAIN_RESOURCE = '@MAIN_RESOURCE@' - ENTITY_CAP_HASH = 'CAP_HASH' - ENTITY_TYPE = 'TYPE' - + PROF_KEY_NONE = "@NONE@" + PROF_KEY_DEFAULT = "@DEFAULT@" + PROF_KEY_ALL = "@ALL@" + ENTITY_ALL = "@ALL@" + ENTITY_ALL_RESOURCES = "@ALL_RESOURCES@" + ENTITY_MAIN_RESOURCE = "@MAIN_RESOURCE@" + ENTITY_CAP_HASH = "CAP_HASH" + ENTITY_TYPE = "TYPE" ## Roster jids selection ## - PUBLIC = 'PUBLIC' - ALL = 'ALL' # ALL means all known contacts, while PUBLIC means everybody, known or not - GROUP = 'GROUP' - JID = 'JID' - + PUBLIC = "PUBLIC" + ALL = ( + "ALL" + ) # ALL means all known contacts, while PUBLIC means everybody, known or not + GROUP = "GROUP" + JID = "JID" ## Messages ## - MESS_TYPE_INFO = 'info' - MESS_TYPE_CHAT = 'chat' - MESS_TYPE_ERROR = 'error' - MESS_TYPE_GROUPCHAT = 'groupchat' - MESS_TYPE_HEADLINE = 'headline' - MESS_TYPE_NORMAL = 'normal' - MESS_TYPE_AUTO = 'auto' # magic value to let the backend guess the type - MESS_TYPE_STANDARD = (MESS_TYPE_CHAT, MESS_TYPE_ERROR, MESS_TYPE_GROUPCHAT, MESS_TYPE_HEADLINE, MESS_TYPE_NORMAL) + MESS_TYPE_INFO = "info" + MESS_TYPE_CHAT = "chat" + MESS_TYPE_ERROR = "error" + MESS_TYPE_GROUPCHAT = "groupchat" + MESS_TYPE_HEADLINE = "headline" + MESS_TYPE_NORMAL = "normal" + MESS_TYPE_AUTO = "auto" # magic value to let the backend guess the type + MESS_TYPE_STANDARD = ( + MESS_TYPE_CHAT, + MESS_TYPE_ERROR, + MESS_TYPE_GROUPCHAT, + MESS_TYPE_HEADLINE, + MESS_TYPE_NORMAL, + ) MESS_TYPE_ALL = MESS_TYPE_STANDARD + (MESS_TYPE_INFO, MESS_TYPE_AUTO) MESS_EXTRA_INFO = "info_type" - ## Chat ## - CHAT_ONE2ONE = 'one2one' - CHAT_GROUP = 'group' - + CHAT_ONE2ONE = "one2one" + CHAT_GROUP = "group" ## Presence ## - PRESENCE_UNAVAILABLE = 'unavailable' - PRESENCE_SHOW_AWAY = 'away' - PRESENCE_SHOW_CHAT = 'chat' - PRESENCE_SHOW_DND = 'dnd' - PRESENCE_SHOW_XA = 'xa' - PRESENCE_SHOW = 'show' - PRESENCE_STATUSES = 'statuses' - PRESENCE_STATUSES_DEFAULT = 'default' - PRESENCE_PRIORITY = 'priority' - + PRESENCE_UNAVAILABLE = "unavailable" + PRESENCE_SHOW_AWAY = "away" + PRESENCE_SHOW_CHAT = "chat" + PRESENCE_SHOW_DND = "dnd" + PRESENCE_SHOW_XA = "xa" + PRESENCE_SHOW = "show" + PRESENCE_STATUSES = "statuses" + PRESENCE_STATUSES_DEFAULT = "default" + PRESENCE_PRIORITY = "priority" ## Common namespaces ## - NS_XML = 'http://www.w3.org/XML/1998/namespace' - NS_CLIENT = 'jabber:client' - NS_FORWARD = 'urn:xmpp:forward:0' - NS_DELAY = 'urn:xmpp:delay' - NS_XHTML = 'http://www.w3.org/1999/xhtml' + NS_XML = "http://www.w3.org/XML/1998/namespace" + NS_CLIENT = "jabber:client" + NS_FORWARD = "urn:xmpp:forward:0" + NS_DELAY = "urn:xmpp:delay" + NS_XHTML = "http://www.w3.org/1999/xhtml" ## Common XPath ## @@ -153,23 +154,24 @@ ## Directories ## # directory for components specific data - COMPONENTS_DIR = u'components' - CACHE_DIR = u'cache' + COMPONENTS_DIR = u"components" + CACHE_DIR = u"cache" # files in file dir are stored for long term # files dir is global, i.e. for all profiles - FILES_DIR = u'files' + FILES_DIR = u"files" # FILES_LINKS_DIR is a directory where files owned by a specific profile # are linked to the global files directory. This way the directory can be - # shared per profiles while keeping global directory where identical files + #  shared per profiles while keeping global directory where identical files # shared between different profiles are not duplicated. - FILES_LINKS_DIR = u'files_links' + FILES_LINKS_DIR = u"files_links" # FILES_TMP_DIR is where profile's partially transfered files are put. # Once transfer is completed, they are moved to FILES_DIR - FILES_TMP_DIR = u'files_tmp' - + FILES_TMP_DIR = u"files_tmp" ## Configuration ## - if BaseDirectory: # skipped when xdg module is not available (should not happen in backend) + if ( + BaseDirectory + ): # skipped when xdg module is not available (should not happen in backend) if "org.goffi.cagou.cagou" in BaseDirectory.__file__: # FIXME: hack to make config read from the right location on Android # TODO: fix it in a more proper way @@ -177,56 +179,71 @@ # we need to use Android API to get downloads directory import os.path from jnius import autoclass + Environment = autoclass("android.os.Environment") BaseDirectory = None DEFAULT_CONFIG = { - 'local_dir': '/data/data/org.goffi.cagou.cagou/app', - 'media_dir': '/data/data/org.goffi.cagou.cagou/files/app/media', + "local_dir": "/data/data/org.goffi.cagou.cagou/app", + "media_dir": "/data/data/org.goffi.cagou.cagou/files/app/media", # FIXME: temporary location for downloads, need to call API properly - 'downloads_dir': os.path.join(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS).getAbsolutePath(), APP_NAME_FILE), - 'pid_dir': '%(local_dir)s', - 'log_dir': '%(local_dir)s', + "downloads_dir": os.path.join( + Environment.getExternalStoragePublicDirectory( + Environment.DIRECTORY_DOWNLOADS + ).getAbsolutePath(), + APP_NAME_FILE, + ), + "pid_dir": "%(local_dir)s", + "log_dir": "%(local_dir)s", } - CONFIG_FILES = ['/data/data/org.goffi.cagou.cagou/files/app/android/' + APP_NAME_FILE + '.conf'] + CONFIG_FILES = [ + "/data/data/org.goffi.cagou.cagou/files/app/android/" + + APP_NAME_FILE + + ".conf" + ] else: ## Configuration ## DEFAULT_CONFIG = { - 'media_dir': '/usr/share/' + APP_NAME_FILE + '/media', - 'local_dir': BaseDirectory.save_data_path(APP_NAME_FILE), - 'downloads_dir': '~/Downloads/' + APP_NAME_FILE, - 'pid_dir': '%(local_dir)s', - 'log_dir': '%(local_dir)s', + "media_dir": "/usr/share/" + APP_NAME_FILE + "/media", + "local_dir": BaseDirectory.save_data_path(APP_NAME_FILE), + "downloads_dir": "~/Downloads/" + APP_NAME_FILE, + "pid_dir": "%(local_dir)s", + "log_dir": "%(local_dir)s", } # List of the configuration filenames sorted by ascending priority - CONFIG_FILES = [realpath(expanduser(path) + APP_NAME_FILE + '.conf') for path in - ['/etc/', '~/', '~/.', '', '.'] + - ['%s/' % path for path in list(BaseDirectory.load_config_paths(APP_NAME_FILE))] - ] + CONFIG_FILES = [ + realpath(expanduser(path) + APP_NAME_FILE + ".conf") + for path in ["/etc/", "~/", "~/.", "", "."] + + [ + "%s/" % path + for path in list(BaseDirectory.load_config_paths(APP_NAME_FILE)) + ] + ] ## Templates ## - TEMPLATE_THEME_DEFAULT = u'default' - TEMPLATE_STATIC_DIR = u'static' - + TEMPLATE_THEME_DEFAULT = u"default" + TEMPLATE_STATIC_DIR = u"static" ## Plugins ## # PLUGIN_INFO keys # XXX: we use PI instead of PLUG_INFO which would normally be used # to make the header more readable - PI_NAME = u'name' - PI_IMPORT_NAME = u'import_name' - PI_MAIN = u'main' - PI_HANDLER = u'handler' - PI_TYPE = u'type' # FIXME: should be types, and should handle single unicode type or tuple of types (e.g. "blog" and "import") - PI_MODES = u'modes' - PI_PROTOCOLS = u'protocols' - PI_DEPENDENCIES = u'dependencies' - PI_RECOMMENDATIONS = u'recommendations' - PI_DESCRIPTION = u'description' - PI_USAGE = u'usage' + PI_NAME = u"name" + PI_IMPORT_NAME = u"import_name" + PI_MAIN = u"main" + PI_HANDLER = u"handler" + PI_TYPE = ( + u"type" + ) #  FIXME: should be types, and should handle single unicode type or tuple of types (e.g. "blog" and "import") + PI_MODES = u"modes" + PI_PROTOCOLS = u"protocols" + PI_DEPENDENCIES = u"dependencies" + PI_RECOMMENDATIONS = u"recommendations" + PI_DESCRIPTION = u"description" + PI_USAGE = u"usage" # Types PLUG_TYPE_XEP = "XEP" @@ -245,7 +262,7 @@ PLUG_MODE_BOTH = (PLUG_MODE_CLIENT, PLUG_MODE_COMPONENT) # names of widely used plugins - TEXT_CMDS = 'TEXT-COMMANDS' + TEXT_CMDS = "TEXT-COMMANDS" # PubSub event categories PS_PEP = "PEP" @@ -253,19 +270,18 @@ # PubSub PS_PUBLISH = "publish" - PS_RETRACT = "retract" # used for items - PS_DELETE = "delete" # used for nodes + PS_RETRACT = "retract" # used for items + PS_DELETE = "delete" # used for nodes PS_ITEM = "item" - PS_ITEMS = "items" # Can contain publish and retract items + PS_ITEMS = "items" # Can contain publish and retract items PS_EVENTS = (PS_ITEMS, PS_DELETE) - ## XMLUI ## - XMLUI_WINDOW = 'window' - XMLUI_POPUP = 'popup' - XMLUI_FORM = 'form' - XMLUI_PARAM = 'param' - XMLUI_DIALOG = 'dialog' + XMLUI_WINDOW = "window" + XMLUI_POPUP = "popup" + XMLUI_FORM = "form" + XMLUI_PARAM = "param" + XMLUI_DIALOG = "dialog" XMLUI_DIALOG_CONFIRM = "confirm" XMLUI_DIALOG_MESSAGE = "message" XMLUI_DIALOG_NOTE = "note" @@ -284,108 +300,119 @@ XMLUI_DATA_BTNS_SET_OKCANCEL = "ok/cancel" XMLUI_DATA_BTNS_SET_YESNO = "yes/no" XMLUI_DATA_BTNS_SET_DEFAULT = XMLUI_DATA_BTNS_SET_OKCANCEL - XMLUI_DATA_FILETYPE = 'filetype' + XMLUI_DATA_FILETYPE = "filetype" XMLUI_DATA_FILETYPE_FILE = "file" XMLUI_DATA_FILETYPE_DIR = "dir" XMLUI_DATA_FILETYPE_DEFAULT = XMLUI_DATA_FILETYPE_FILE - ## Logging ## - LOG_LVL_DEBUG = 'DEBUG' - LOG_LVL_INFO = 'INFO' - LOG_LVL_WARNING = 'WARNING' - LOG_LVL_ERROR = 'ERROR' - LOG_LVL_CRITICAL = 'CRITICAL' - LOG_LEVELS = (LOG_LVL_DEBUG, LOG_LVL_INFO, LOG_LVL_WARNING, LOG_LVL_ERROR, LOG_LVL_CRITICAL) - LOG_BACKEND_STANDARD = 'standard' - LOG_BACKEND_TWISTED = 'twisted' - LOG_BACKEND_BASIC = 'basic' - LOG_BACKEND_CUSTOM = 'custom' - LOG_BASE_LOGGER = 'root' - LOG_TWISTED_LOGGER = 'twisted' - LOG_OPT_SECTION = 'DEFAULT' # section of sat.conf where log options should be - LOG_OPT_PREFIX = 'log_' + LOG_LVL_DEBUG = "DEBUG" + LOG_LVL_INFO = "INFO" + LOG_LVL_WARNING = "WARNING" + LOG_LVL_ERROR = "ERROR" + LOG_LVL_CRITICAL = "CRITICAL" + LOG_LEVELS = ( + LOG_LVL_DEBUG, + LOG_LVL_INFO, + LOG_LVL_WARNING, + LOG_LVL_ERROR, + LOG_LVL_CRITICAL, + ) + LOG_BACKEND_STANDARD = "standard" + LOG_BACKEND_TWISTED = "twisted" + LOG_BACKEND_BASIC = "basic" + LOG_BACKEND_CUSTOM = "custom" + LOG_BASE_LOGGER = "root" + LOG_TWISTED_LOGGER = "twisted" + LOG_OPT_SECTION = "DEFAULT" # section of sat.conf where log options should be + LOG_OPT_PREFIX = "log_" # (option_name, default_value) tuples - LOG_OPT_COLORS = ('colors', 'true') # true for auto colors, force to have colors even if stdout is not a tty, false for no color - LOG_OPT_TAINTS_DICT = ('levels_taints_dict', { - LOG_LVL_DEBUG: ('cyan',), - LOG_LVL_INFO: (), - LOG_LVL_WARNING: ('yellow',), - LOG_LVL_ERROR: ('red', 'blink', r'/!\ ', 'blink_off'), - LOG_LVL_CRITICAL: ('bold', 'red', 'Guru Meditation ', 'normal_weight') - }) - LOG_OPT_LEVEL = ('level', 'info') - LOG_OPT_FORMAT = ('fmt', '%(message)s') # similar to logging format. - LOG_OPT_LOGGER = ('logger', '') # regex to filter logger name - LOG_OPT_OUTPUT_SEP = '//' - LOG_OPT_OUTPUT_DEFAULT = 'default' - LOG_OPT_OUTPUT_MEMORY = 'memory' + LOG_OPT_COLORS = ( + "colors", + "true", + ) # true for auto colors, force to have colors even if stdout is not a tty, false for no color + LOG_OPT_TAINTS_DICT = ( + "levels_taints_dict", + { + LOG_LVL_DEBUG: ("cyan",), + LOG_LVL_INFO: (), + LOG_LVL_WARNING: ("yellow",), + LOG_LVL_ERROR: ("red", "blink", r"/!\ ", "blink_off"), + LOG_LVL_CRITICAL: ("bold", "red", "Guru Meditation ", "normal_weight"), + }, + ) + LOG_OPT_LEVEL = ("level", "info") + LOG_OPT_FORMAT = ("fmt", "%(message)s") # similar to logging format. + LOG_OPT_LOGGER = ("logger", "") # regex to filter logger name + LOG_OPT_OUTPUT_SEP = "//" + LOG_OPT_OUTPUT_DEFAULT = "default" + LOG_OPT_OUTPUT_MEMORY = "memory" LOG_OPT_OUTPUT_MEMORY_LIMIT = 50 - LOG_OPT_OUTPUT_FILE = 'file' # file is implicit if only output - LOG_OPT_OUTPUT = ('output', LOG_OPT_OUTPUT_SEP + LOG_OPT_OUTPUT_DEFAULT) # //default = normal output (stderr or a file with twistd), path/to/file for a file (must be the first if used), //memory for memory (options can be put in parenthesis, e.g.: //memory(500) for a 500 lines memory) - + LOG_OPT_OUTPUT_FILE = "file" # file is implicit if only output + LOG_OPT_OUTPUT = ( + "output", + LOG_OPT_OUTPUT_SEP + LOG_OPT_OUTPUT_DEFAULT, + ) # //default = normal output (stderr or a file with twistd), path/to/file for a file (must be the first if used), //memory for memory (options can be put in parenthesis, e.g.: //memory(500) for a 500 lines memory) ## action constants ## META_TYPE_FILE = "file" META_TYPE_OVERWRITE = "overwrite" - ## HARD-CODED ACTIONS IDS (generated with uuid.uuid4) ## - AUTHENTICATE_PROFILE_ID = u'b03bbfa8-a4ae-4734-a248-06ce6c7cf562' - CHANGE_XMPP_PASSWD_ID = u'878b9387-de2b-413b-950f-e424a147bcd0' - + AUTHENTICATE_PROFILE_ID = u"b03bbfa8-a4ae-4734-a248-06ce6c7cf562" + CHANGE_XMPP_PASSWD_ID = u"878b9387-de2b-413b-950f-e424a147bcd0" ## Text values ## BOOL_TRUE = "true" BOOL_FALSE = "false" - ## Special values used in bridge methods calls ## HISTORY_LIMIT_DEFAULT = -1 HISTORY_LIMIT_NONE = -2 - ## Progress error special values ## - PROGRESS_ERROR_DECLINED = u'declined' # session has been declined by peer user - + PROGRESS_ERROR_DECLINED = u"declined" #  session has been declined by peer user ## Files ## - FILE_TYPE_DIRECTORY = 'directory' - FILE_TYPE_FILE = 'file' - + FILE_TYPE_DIRECTORY = "directory" + FILE_TYPE_FILE = "file" ## Permissions management ## - ACCESS_PERM_READ = u'read' - ACCESS_PERM_WRITE = u'write' + ACCESS_PERM_READ = u"read" + ACCESS_PERM_WRITE = u"write" ACCESS_PERMS = {ACCESS_PERM_READ, ACCESS_PERM_WRITE} - ACCESS_TYPE_PUBLIC = u'public' - ACCESS_TYPE_WHITELIST = u'whitelist' + ACCESS_TYPE_PUBLIC = u"public" + ACCESS_TYPE_WHITELIST = u"whitelist" ACCESS_TYPES = (ACCESS_TYPE_PUBLIC, ACCESS_TYPE_WHITELIST) - ## Common data keys ## - KEY_THUMBNAILS = u'thumbnails' - KEY_PROGRESS_ID = u'progress_id' + KEY_THUMBNAILS = u"thumbnails" + KEY_PROGRESS_ID = u"progress_id" - - #internationalisation - DEFAULT_LOCALE = u'en_GB' - + # internationalisation + DEFAULT_LOCALE = u"en_GB" ## Misc ## SAVEFILE_DATABASE = APP_NAME_FILE + ".db" IQ_SET = '/iq[@type="set"]' - ENV_PREFIX = 'SAT_' # Prefix used for environment variables - IGNORE = 'ignore' - NO_LIMIT = -1 # used in bridge when a integer value is expected - DEFAULT_MAX_AGE = 1209600 # default max age of cached files, in seconds - HASH_SHA1_EMPTY = 'da39a3ee5e6b4b0d3255bfef95601890afd80709' + ENV_PREFIX = "SAT_" # Prefix used for environment variables + IGNORE = "ignore" + NO_LIMIT = -1 # used in bridge when a integer value is expected + DEFAULT_MAX_AGE = 1209600 # default max age of cached files, in seconds + HASH_SHA1_EMPTY = "da39a3ee5e6b4b0d3255bfef95601890afd80709" @classmethod def LOG_OPTIONS(cls): """Return options checked for logs""" # XXX: we use a classmethod so we can use Const inheritance to change default options - return(cls.LOG_OPT_COLORS, cls.LOG_OPT_TAINTS_DICT, cls.LOG_OPT_LEVEL, cls.LOG_OPT_FORMAT, cls.LOG_OPT_LOGGER, cls.LOG_OPT_OUTPUT) + return ( + cls.LOG_OPT_COLORS, + cls.LOG_OPT_TAINTS_DICT, + cls.LOG_OPT_LEVEL, + cls.LOG_OPT_FORMAT, + cls.LOG_OPT_LOGGER, + cls.LOG_OPT_OUTPUT, + ) @classmethod def bool(cls, value):
--- a/sat/core/exceptions.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/core/exceptions.py Wed Jun 27 20:14:46 2018 +0200 @@ -84,7 +84,9 @@ pass -class FeatureNotFound(Exception): # a disco feature/identity which is needed is not present +class FeatureNotFound( + Exception +): # a disco feature/identity which is needed is not present pass
--- a/sat/core/i18n.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/core/i18n.py Wed Jun 27 20:14:46 2018 +0200 @@ -19,27 +19,31 @@ from sat.core.log import getLogger + log = getLogger(__name__) try: import gettext - _ = gettext.translation('sat', 'i18n', fallback=True).ugettext + _ = gettext.translation("sat", "i18n", fallback=True).ugettext _translators = {None: gettext.NullTranslations()} def languageSwitch(lang=None): if not lang in _translators: - _translators[lang] = gettext.translation('sat', languages=[lang], fallback=True) + _translators[lang] = gettext.translation( + "sat", languages=[lang], fallback=True + ) _translators[lang].install(unicode=True) + except ImportError: log.warning("gettext support disabled") - _ = lambda msg: msg # Libervia doesn't support gettext + _ = lambda msg: msg # Libervia doesn't support gettext + def languageSwitch(lang=None): pass -D_ = lambda msg: msg # used for deferred translations - +D_ = lambda msg: msg # used for deferred translations
--- a/sat/core/log_config.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/core/log_config.py Wed Jun 27 20:14:46 2018 +0200 @@ -31,6 +31,7 @@ def __init__(self, *args, **kwargs): super(TwistedLogger, self).__init__(*args, **kwargs) from twisted.python import log as twisted_log + self.twisted_log = twisted_log def out(self, message, level=None): @@ -38,22 +39,30 @@ @param message: formatted message """ - self.twisted_log.msg(message.encode('utf-8', 'ignore'), sat_logged=True, level=level) + self.twisted_log.msg( + message.encode("utf-8", "ignore"), sat_logged=True, level=level + ) class ConfigureBasic(log.ConfigureBase): - def configureColors(self, colors, force_colors, levels_taints_dict): - super(ConfigureBasic, self).configureColors(colors, force_colors, levels_taints_dict) + super(ConfigureBasic, self).configureColors( + colors, force_colors, levels_taints_dict + ) if colors: import sys + try: isatty = sys.stdout.isatty() except AttributeError: isatty = False - if force_colors or isatty: # FIXME: isatty should be tested on each handler, not globaly + if ( + force_colors or isatty + ): # FIXME: isatty should be tested on each handler, not globaly # we need colors - log.Logger.post_treat = lambda logger, level, message: self.ansiColors(level, message) + log.Logger.post_treat = lambda logger, level, message: self.ansiColors( + level, message + ) elif force_colors: raise ValueError("force_colors can't be used if colors is False") @@ -61,30 +70,36 @@ def getProfile(): """Try to find profile value using introspection""" import inspect + stack = inspect.stack() current_path = stack[0][1] for frame_data in stack[:-1]: if frame_data[1] != current_path: - if log.backend == C.LOG_BACKEND_STANDARD and "/logging/__init__.py" in frame_data[1]: + if ( + log.backend == C.LOG_BACKEND_STANDARD + and "/logging/__init__.py" in frame_data[1] + ): continue break frame = frame_data[0] args = inspect.getargvalues(frame) try: - profile = args.locals.get('profile') or args.locals['profile_key'] + profile = args.locals.get("profile") or args.locals["profile_key"] except (TypeError, KeyError): try: try: - profile = args.locals['self'].profile + profile = args.locals["self"].profile except AttributeError: try: - profile = args.locals['self'].parent.profile + profile = args.locals["self"].parent.profile except AttributeError: - profile = args.locals['self'].host.profile # used in quick_frontend for single profile configuration + profile = args.locals[ + "self" + ].host.profile # used in quick_frontend for single profile configuration except Exception: # we can't find profile, we return an empty value - profile = '' + profile = "" return profile @@ -97,16 +112,23 @@ @param observer: original observer to hook @param can_colors: True if observer can display ansi colors """ + def observer_hook(event): """redirect non SÃ T log to twisted_logger, and add colors when possible""" - if 'sat_logged' in event: # we only want our own logs, other are managed by twistedObserver + if ( + "sat_logged" in event + ): # we only want our own logs, other are managed by twistedObserver # we add colors if possible - if (can_colors and self.LOGGER_CLASS.colors) or self.LOGGER_CLASS.force_colors: - message = event.get('message', tuple()) - level = event.get('level', C.LOG_LVL_INFO) + if ( + can_colors and self.LOGGER_CLASS.colors + ) or self.LOGGER_CLASS.force_colors: + message = event.get("message", tuple()) + level = event.get("level", C.LOG_LVL_INFO) if message: - event['message'] = (self.ansiColors(level, ''.join(message)),) # must be a tuple - observer(event) # we can now call the original observer + event["message"] = ( + self.ansiColors(level, "".join(message)), + ) # must be a tuple + observer(event) # we can now call the original observer return observer_hook @@ -130,7 +152,7 @@ @param observer: observer to hook @return: hooked observer or original one """ - if hasattr(observer, '__self__'): + if hasattr(observer, "__self__"): ori = observer if isinstance(observer.__self__, self.twisted_log.FileLogObserver): observer = self.changeFileLogObserver(observer) @@ -147,12 +169,15 @@ """initialise needed attributes, and install observers hooks""" self.observers = {} from twisted.python import log as twisted_log + self.twisted_log = twisted_log self.log_publisher = twisted_log.msg.__self__ + def addObserverObserver(self_logpub, other): """Install hook so we know when a new observer is added""" other = self.installObserverHook(other) return self_logpub._originalAddObserver(other) + def removeObserverObserver(self_logpub, ori): """removeObserver hook fix @@ -170,11 +195,20 @@ raise ValueError("Unknown observer") # we replace addObserver/removeObserver by our own - twisted_log.LogPublisher._originalAddObserver = twisted_log.LogPublisher.addObserver - twisted_log.LogPublisher._originalRemoveObserver = twisted_log.LogPublisher.removeObserver - import types # see https://stackoverflow.com/a/4267590 (thx Chris Morgan/aaronasterling) - twisted_log.addObserver = types.MethodType(addObserverObserver, self.log_publisher, twisted_log.LogPublisher) - twisted_log.removeObserver = types.MethodType(removeObserverObserver, self.log_publisher, twisted_log.LogPublisher) + twisted_log.LogPublisher._originalAddObserver = ( + twisted_log.LogPublisher.addObserver + ) + twisted_log.LogPublisher._originalRemoveObserver = ( + twisted_log.LogPublisher.removeObserver + ) + import types # see https://stackoverflow.com/a/4267590 (thx Chris Morgan/aaronasterling) + + twisted_log.addObserver = types.MethodType( + addObserverObserver, self.log_publisher, twisted_log.LogPublisher + ) + twisted_log.removeObserver = types.MethodType( + removeObserverObserver, self.log_publisher, twisted_log.LogPublisher + ) # we now change existing observers for idx, observer in enumerate(self.log_publisher.observers): @@ -186,6 +220,7 @@ def configureOutput(self, output): import sys + if output is None: output = C.LOG_OPT_OUTPUT_SEP + C.LOG_OPT_OUTPUT_DEFAULT self.manageOutputs(output) @@ -194,9 +229,11 @@ if C.LOG_OPT_OUTPUT_DEFAULT in log.handlers: # default output is already managed, we just add output to stdout if we are in debug or nodaemon mode if self.backend_data is None: - raise ValueError("You must pass options as backend_data with Twisted backend") + raise ValueError( + "You must pass options as backend_data with Twisted backend" + ) options = self.backend_data - if options.get('nodaemon', False) or options.get('debug', False): + if options.get("nodaemon", False) or options.get("debug", False): addObserver(self.twisted_log.FileLogObserver(sys.stdout).emit) else: # \\default is not in the output, so we remove current observers @@ -208,49 +245,81 @@ if C.LOG_OPT_OUTPUT_FILE in log.handlers: from twisted.python import logfile + for path in log.handlers[C.LOG_OPT_OUTPUT_FILE]: - log_file = sys.stdout if path == '-' else logfile.LogFile.fromFullPath(path) + log_file = ( + sys.stdout if path == "-" else logfile.LogFile.fromFullPath(path) + ) addObserver(self.twisted_log.FileLogObserver(log_file).emit) if C.LOG_OPT_OUTPUT_MEMORY in log.handlers: - raise NotImplementedError("Memory observer is not implemented in Twisted backend") + raise NotImplementedError( + "Memory observer is not implemented in Twisted backend" + ) def configureColors(self, colors, force_colors, levels_taints_dict): - super(ConfigureTwisted, self).configureColors(colors, force_colors, levels_taints_dict) + super(ConfigureTwisted, self).configureColors( + colors, force_colors, levels_taints_dict + ) self.LOGGER_CLASS.colors = colors self.LOGGER_CLASS.force_colors = force_colors if force_colors and not colors: - raise ValueError('colors must be True if force_colors is True') + raise ValueError("colors must be True if force_colors is True") def postTreatment(self): """Install twistedObserver which manage non SÃ T logs""" + def twistedObserver(event): """Observer which redirect log message not produced by SÃ T to SÃ T logging system""" - if not 'sat_logged' in event: + if not "sat_logged" in event: # this log was not produced by SÃ T from twisted.python import log as twisted_log + text = twisted_log.textFromEventDict(event) if text is None: return twisted_logger = log.getLogger(C.LOG_TWISTED_LOGGER) - log_method = twisted_logger.error if event.get('isError', False) else twisted_logger.info - log_method(text.decode('utf-8')) + log_method = ( + twisted_logger.error + if event.get("isError", False) + else twisted_logger.info + ) + log_method(text.decode("utf-8")) self.log_publisher._originalAddObserver(twistedObserver) class ConfigureStandard(ConfigureBasic): - - def __init__(self, level=None, fmt=None, output=None, logger=None, colors=False, levels_taints_dict=None, force_colors=False, backend_data=None): + def __init__( + self, + level=None, + fmt=None, + output=None, + logger=None, + colors=False, + levels_taints_dict=None, + force_colors=False, + backend_data=None, + ): if fmt is None: fmt = C.LOG_OPT_FORMAT[1] if output is None: output = C.LOG_OPT_OUTPUT[1] - super(ConfigureStandard, self).__init__(level, fmt, output, logger, colors, levels_taints_dict, force_colors, backend_data) + super(ConfigureStandard, self).__init__( + level, + fmt, + output, + logger, + colors, + levels_taints_dict, + force_colors, + backend_data, + ) def preTreatment(self): """We use logging methods directly, instead of using Logger""" import logging + log.getLogger = logging.getLogger log.debug = logging.debug log.info = logging.info @@ -271,7 +340,7 @@ class SatFormatter(logging.Formatter): u"""Formatter which manage SÃ T specificities""" _format = fmt - _with_profile = '%(profile)s' in fmt + _with_profile = "%(profile)s" in fmt def __init__(self, can_colors=False): super(SatFormatter, self).__init__(self._format) @@ -288,14 +357,14 @@ record.color_start = log.COLOR_START record.color_end = log.COLOR_END else: - record.color_start = record.color_end = '' + record.color_start = record.color_end = "" s = super(SatFormatter, self).format(record) if do_color: s = ConfigureStandard.ansiColors(record.levelname, s) if sys.platform == "android": # FIXME: dirty hack to workaround android encoding issue on log # need to be fixed properly - return s.encode('ascii', 'ignore') + return s.encode("ascii", "ignore") else: return s @@ -308,7 +377,9 @@ self.name_filter = log.FilterName(logger) if logger else None def configureColors(self, colors, force_colors, levels_taints_dict): - super(ConfigureStandard, self).configureColors(colors, force_colors, levels_taints_dict) + super(ConfigureStandard, self).configureColors( + colors, force_colors, levels_taints_dict + ) self.formatterClass.with_colors = colors self.formatterClass.force_colors = force_colors if not colors and force_colors: @@ -323,6 +394,7 @@ def postTreatment(self): import logging + root_logger = logging.getLogger() if len(root_logger.handlers) == 0: for handler, options in log.handlers.items(): @@ -335,14 +407,21 @@ self._addHandler(root_logger, hdlr, can_colors=can_colors) elif handler == C.LOG_OPT_OUTPUT_MEMORY: from logging.handlers import BufferingHandler + class SatMemoryHandler(BufferingHandler): def emit(self, record): super(SatMemoryHandler, self).emit(self.format(record)) + hdlr = SatMemoryHandler(options) - log.handlers[handler] = hdlr # we keep a reference to the handler to read the buffer later + log.handlers[ + handler + ] = ( + hdlr + ) # we keep a reference to the handler to read the buffer later self._addHandler(root_logger, hdlr, can_colors=False) elif handler == C.LOG_OPT_OUTPUT_FILE: import os.path + for path in options: hdlr = logging.FileHandler(os.path.expanduser(path)) self._addHandler(root_logger, hdlr, can_colors=False) @@ -358,13 +437,16 @@ @param size: number of logs to return """ mem_handler = log.handlers[C.LOG_OPT_OUTPUT_MEMORY] - return (log_msg for log_msg in mem_handler.buffer[size if size is None else -size:]) + return ( + log_msg for log_msg in mem_handler.buffer[size if size is None else -size :] + ) log.configure_cls[C.LOG_BACKEND_BASIC] = ConfigureBasic log.configure_cls[C.LOG_BACKEND_TWISTED] = ConfigureTwisted log.configure_cls[C.LOG_BACKEND_STANDARD] = ConfigureStandard + def configure(backend, **options): """Configure logging behaviour @param backend: can be: @@ -375,6 +457,7 @@ """ return log.configure(backend, **options) + def _parseOptions(options): """Parse string options as given in conf or environment variable, and return expected python value @@ -384,11 +467,11 @@ LEVEL = C.LOG_OPT_LEVEL[0] if COLORS in options: - if options[COLORS].lower() in ('1', 'true'): + if options[COLORS].lower() in ("1", "true"): options[COLORS] = True - elif options[COLORS] == 'force': + elif options[COLORS] == "force": options[COLORS] = True - options['force_colors'] = True + options["force_colors"] = True else: options[COLORS] = False if LEVEL in options: @@ -397,6 +480,7 @@ level = C.LOG_LVL_INFO options[LEVEL] = level + def satConfigure(backend=C.LOG_BACKEND_STANDARD, const=None, backend_data=None): """Configure logging system for SÃ T, can be used by frontends @@ -413,13 +497,18 @@ log.C = const from sat.tools import config import os + log_conf = {} sat_conf = config.parseMainConf() for opt_name, opt_default in C.LOG_OPTIONS(): try: - log_conf[opt_name] = os.environ[''.join((C.ENV_PREFIX, C.LOG_OPT_PREFIX.upper(), opt_name.upper()))] + log_conf[opt_name] = os.environ[ + "".join((C.ENV_PREFIX, C.LOG_OPT_PREFIX.upper(), opt_name.upper())) + ] except KeyError: - log_conf[opt_name] = config.getConfig(sat_conf, C.LOG_OPT_SECTION, C.LOG_OPT_PREFIX + opt_name, opt_default) + log_conf[opt_name] = config.getConfig( + sat_conf, C.LOG_OPT_SECTION, C.LOG_OPT_PREFIX + opt_name, opt_default + ) _parseOptions(log_conf) configure(backend, backend_data=backend_data, **log_conf)
--- a/sat/core/sat_main.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/core/sat_main.py Wed Jun 27 20:14:46 2018 +0200 @@ -27,6 +27,7 @@ from sat.core import xmpp from sat.core import exceptions from sat.core.log import getLogger + log = getLogger(__name__) from sat.core.constants import Const as C from sat.memory import memory @@ -43,26 +44,31 @@ import uuid try: - from collections import OrderedDict # only available from python 2.7 + from collections import OrderedDict # only available from python 2.7 except ImportError: from ordereddict import OrderedDict class SAT(service.Service): - def __init__(self): self._cb_map = {} # map from callback_id to callbacks - self._menus = OrderedDict() # dynamic menus. key: callback_id, value: menu data (dictionnary) + self._menus = ( + OrderedDict() + ) # dynamic menus. key: callback_id, value: menu data (dictionnary) self._menus_paths = {} # path to id. key: (menu_type, lower case tuple of path), value: menu id self.initialised = defer.Deferred() self.profiles = {} self.plugins = {} - self.ns_map = {u'x-data': u'jabber:x:data'} # map for short name to whole namespace, - # extended by plugins with registerNamespace + self.ns_map = { + u"x-data": u"jabber:x:data" + } #  map for short name to whole namespace, + #  extended by plugins with registerNamespace self.memory = memory.Memory(self) - self.trigger = trigger.TriggerManager() # trigger are used to change Sà T behaviour + self.trigger = ( + trigger.TriggerManager() + ) # trigger are used to change Sà T behaviour - bridge_name = self.memory.getConfig('', 'bridge', 'dbus') + bridge_name = self.memory.getConfig("", "bridge", "dbus") bridge_module = dynamic_import.bridge(bridge_name) if bridge_module is None: @@ -79,28 +85,42 @@ self.bridge.register_method("getFeatures", self.getFeatures) self.bridge.register_method("profileNameGet", self.memory.getProfileName) self.bridge.register_method("profilesListGet", self.memory.getProfilesList) - self.bridge.register_method("getEntityData", lambda jid_, keys, profile: self.memory.getEntityData(jid.JID(jid_), keys, profile)) + self.bridge.register_method( + "getEntityData", + lambda jid_, keys, profile: self.memory.getEntityData( + jid.JID(jid_), keys, profile + ), + ) self.bridge.register_method("getEntitiesData", self.memory._getEntitiesData) self.bridge.register_method("profileCreate", self.memory.createProfile) self.bridge.register_method("asyncDeleteProfile", self.memory.asyncDeleteProfile) self.bridge.register_method("profileStartSession", self.memory.startSession) - self.bridge.register_method("profileIsSessionStarted", self.memory._isSessionStarted) + self.bridge.register_method( + "profileIsSessionStarted", self.memory._isSessionStarted + ) self.bridge.register_method("profileSetDefault", self.memory.profileSetDefault) self.bridge.register_method("connect", self._connect) self.bridge.register_method("disconnect", self.disconnect) self.bridge.register_method("getContacts", self.getContacts) self.bridge.register_method("getContactsFromGroup", self.getContactsFromGroup) self.bridge.register_method("getMainResource", self.memory._getMainResource) - self.bridge.register_method("getPresenceStatuses", self.memory._getPresenceStatuses) + self.bridge.register_method( + "getPresenceStatuses", self.memory._getPresenceStatuses + ) self.bridge.register_method("getWaitingSub", self.memory.getWaitingSub) self.bridge.register_method("messageSend", self._messageSend) self.bridge.register_method("getConfig", self._getConfig) self.bridge.register_method("setParam", self.setParam) self.bridge.register_method("getParamA", self.memory.getStringParamA) self.bridge.register_method("asyncGetParamA", self.memory.asyncGetStringParamA) - self.bridge.register_method("asyncGetParamsValuesFromCategory", self.memory.asyncGetParamsValuesFromCategory) + self.bridge.register_method( + "asyncGetParamsValuesFromCategory", + self.memory.asyncGetParamsValuesFromCategory, + ) self.bridge.register_method("getParamsUI", self.memory.getParamsUI) - self.bridge.register_method("getParamsCategories", self.memory.getParamsCategories) + self.bridge.register_method( + "getParamsCategories", self.memory.getParamsCategories + ) self.bridge.register_method("paramsRegisterApp", self.memory.paramsRegisterApp) self.bridge.register_method("historyGet", self.memory._historyGet) self.bridge.register_method("setPresence", self._setPresence) @@ -135,12 +155,14 @@ def full_version(self): """Return the full version of Sà T (with release name and extra data when in development mode)""" version = self.version - if version[-1] == 'D': + if version[-1] == "D": # we are in debug version, we add extra data try: return self._version_cache except AttributeError: - self._version_cache = u"{} « {} » ({})".format(version, C.APP_RELEASE_NAME, utils.getRepositoryData(sat)) + self._version_cache = u"{} « {} » ({})".format( + version, C.APP_RELEASE_NAME, utils.getRepositoryData(sat) + ) return self._version_cache else: return version @@ -158,8 +180,11 @@ ui_contact_list.ContactList(self) ui_profile_manager.ProfileManager(self) except Exception as e: - log.error(_(u"Could not initialize backend: {reason}").format( - reason = str(e).decode('utf-8', 'ignore'))) + log.error( + _(u"Could not initialize backend: {reason}").format( + reason=str(e).decode("utf-8", "ignore") + ) + ) sys.exit(1) self.initialised.callback(None) log.info(_(u"Backend is ready")) @@ -180,43 +205,69 @@ # just use a client, and plugin blacklisting should be possible in sat.conf plugins_path = os.path.dirname(sat.plugins.__file__) plugin_glob = "plugin*." + C.PLUGIN_EXT - plug_lst = [os.path.splitext(plugin)[0] for plugin in map(os.path.basename, glob(os.path.join(plugins_path, plugin_glob)))] + plug_lst = [ + os.path.splitext(plugin)[0] + for plugin in map( + os.path.basename, glob(os.path.join(plugins_path, plugin_glob)) + ) + ] plugins_to_import = {} # plugins we still have to import for plug in plug_lst: - plugin_path = 'sat.plugins.' + plug + plugin_path = "sat.plugins." + plug try: __import__(plugin_path) except exceptions.MissingModule as e: self._unimport_plugin(plugin_path) - log.warning(u"Can't import plugin [{path}] because of an unavailale third party module:\n{msg}".format( - path=plugin_path, msg=e)) + log.warning( + u"Can't import plugin [{path}] because of an unavailale third party module:\n{msg}".format( + path=plugin_path, msg=e + ) + ) continue except exceptions.CancelError as e: - log.info(u"Plugin [{path}] cancelled its own import: {msg}".format(path=plugin_path, msg=e)) + log.info( + u"Plugin [{path}] cancelled its own import: {msg}".format( + path=plugin_path, msg=e + ) + ) self._unimport_plugin(plugin_path) continue except Exception as e: import traceback - log.error(_(u"Can't import plugin [{path}]:\n{error}").format(path=plugin_path, error=traceback.format_exc())) + + log.error( + _(u"Can't import plugin [{path}]:\n{error}").format( + path=plugin_path, error=traceback.format_exc() + ) + ) self._unimport_plugin(plugin_path) continue mod = sys.modules[plugin_path] plugin_info = mod.PLUGIN_INFO - import_name = plugin_info['import_name'] + import_name = plugin_info["import_name"] - plugin_modes = plugin_info[u'modes'] = set(plugin_info.setdefault(u"modes", C.PLUG_MODE_DEFAULT)) + plugin_modes = plugin_info[u"modes"] = set( + plugin_info.setdefault(u"modes", C.PLUG_MODE_DEFAULT) + ) # if the plugin is an entry point, it must work in component mode - if plugin_info[u'type'] == C.PLUG_TYPE_ENTRY_POINT: + if plugin_info[u"type"] == C.PLUG_TYPE_ENTRY_POINT: # if plugin is an entrypoint, we cache it if C.PLUG_MODE_COMPONENT not in plugin_modes: - log.error(_(u"{type} type must be used with {mode} mode, ignoring plugin").format( - type = C.PLUG_TYPE_ENTRY_POINT, mode = C.PLUG_MODE_COMPONENT)) + log.error( + _( + u"{type} type must be used with {mode} mode, ignoring plugin" + ).format(type=C.PLUG_TYPE_ENTRY_POINT, mode=C.PLUG_MODE_COMPONENT) + ) self._unimport_plugin(plugin_path) continue if import_name in plugins_to_import: - log.error(_(u"Name conflict for import name [{import_name}], can't import plugin [{name}]").format(**plugin_info)) + log.error( + _( + u"Name conflict for import name [{import_name}], can't import plugin [{name}]" + ).format(**plugin_info) + ) continue plugins_to_import[import_name] = (plugin_path, mod, plugin_info) while True: @@ -227,7 +278,9 @@ if not plugins_to_import: break - def _import_plugins_from_dict(self, plugins_to_import, import_name=None, optional=False): + def _import_plugins_from_dict( + self, plugins_to_import, import_name=None, optional=False + ): """Recursively import and their dependencies in the right order @param plugins_to_import(dict): key=import_name and values=(plugin_path, module, plugin_info) @@ -235,14 +288,16 @@ @param optional(bool): if False and plugin is not found, an ImportError exception is raised """ if import_name in self.plugins: - log.debug(u'Plugin {} already imported, passing'.format(import_name)) + log.debug(u"Plugin {} already imported, passing".format(import_name)) return if not import_name: import_name, (plugin_path, mod, plugin_info) = plugins_to_import.popitem() else: if not import_name in plugins_to_import: if optional: - log.warning(_(u"Recommended plugin not found: {}").format(import_name)) + log.warning( + _(u"Recommended plugin not found: {}").format(import_name) + ) return msg = u"Dependency not found: {}".format(import_name) log.error(msg) @@ -252,21 +307,33 @@ recommendations = plugin_info.setdefault("recommendations", []) for to_import in dependencies + recommendations: if to_import not in self.plugins: - log.debug(u'Recursively import dependency of [%s]: [%s]' % (import_name, to_import)) + log.debug( + u"Recursively import dependency of [%s]: [%s]" + % (import_name, to_import) + ) try: - self._import_plugins_from_dict(plugins_to_import, to_import, to_import not in dependencies) + self._import_plugins_from_dict( + plugins_to_import, to_import, to_import not in dependencies + ) except ImportError as e: - log.warning(_(u"Can't import plugin {name}: {error}").format(name=plugin_info['name'], error=e)) + log.warning( + _(u"Can't import plugin {name}: {error}").format( + name=plugin_info["name"], error=e + ) + ) if optional: return raise e - log.info("importing plugin: {}".format(plugin_info['name'])) + log.info("importing plugin: {}".format(plugin_info["name"])) # we instanciate the plugin here try: - self.plugins[import_name] = getattr(mod, plugin_info['main'])(self) + self.plugins[import_name] = getattr(mod, plugin_info["main"])(self) except Exception as e: - log.warning(u'Error while loading plugin "{name}", ignoring it: {error}' - .format(name=plugin_info['name'], error=e)) + log.warning( + u'Error while loading plugin "{name}", ignoring it: {error}'.format( + name=plugin_info["name"], error=e + ) + ) if optional: return raise ImportError(u"Error during initiation") @@ -276,7 +343,7 @@ self.plugins[import_name].is_handler = False # we keep metadata as a Class attribute self.plugins[import_name]._info = plugin_info - #TODO: test xmppclient presence and register handler parent + # TODO: test xmppclient presence and register handler parent def pluginsUnload(self): """Call unload method on every loaded plugin, if exists @@ -296,11 +363,11 @@ defers_list.append(defer.maybeDeferred(unload)) return defers_list - def _connect(self, profile_key, password='', options=None): + def _connect(self, profile_key, password="", options=None): profile = self.memory.getProfileName(profile_key) return self.connect(profile, password, options) - def connect(self, profile, password='', options=None, max_retries=C.XMPP_MAX_RETRIES): + def connect(self, profile, password="", options=None, max_retries=C.XMPP_MAX_RETRIES): """Connect a profile (i.e. connect client.component to XMPP server) Retrieve the individual parameters, authenticate the profile @@ -316,7 +383,8 @@ @raise exceptions.PasswordError: Profile password is wrong """ if options is None: - options={} + options = {} + def connectProfile(dummy=None): if self.isConnected(profile): log.info(_("already connected !")) @@ -375,15 +443,19 @@ features.append(features_d) d_list = defer.DeferredList(features) + def buildFeatures(result, import_names): assert len(result) == len(import_names) ret = {} - for name, (success, data) in zip (import_names, result): + for name, (success, data) in zip(import_names, result): if success: ret[name] = data else: - log.warning(u"Error while getting features for {name}: {failure}".format( - name=name, failure=data)) + log.warning( + u"Error while getting features for {name}: {failure}".format( + name=name, failure=data + ) + ) ret[name] = {} return ret @@ -392,6 +464,7 @@ def getContacts(self, profile_key): client = self.getClient(profile_key) + def got_roster(dummy): ret = [] for item in client.roster.getItems(): # we get all items for client's roster @@ -467,14 +540,14 @@ @return: list of clients """ if not profile_key: - raise exceptions.DataError(_(u'profile_key must not be empty')) + raise exceptions.DataError(_(u"profile_key must not be empty")) try: profile = self.memory.getProfileName(profile_key, True) except exceptions.ProfileUnknownError: return [] if profile == C.PROF_KEY_ALL: return self.profiles.values() - elif profile[0] == '@': # only profile keys can start with "@" + elif profile[0] == "@": #  only profile keys can start with "@" raise exceptions.ProfileKeyUnknown return [self.profiles[profile]] @@ -485,7 +558,7 @@ @param name: name of the option @return: unicode representation of the option """ - return unicode(self.memory.getConfig(section, name, '')) + return unicode(self.memory.getConfig(section, name, "")) def logErrback(self, failure_): """generic errback logging @@ -495,12 +568,12 @@ log.error(_(u"Unexpected error: {}".format(failure_))) return failure_ - # namespaces + #  namespaces def registerNamespace(self, short_name, namespace): """associate a namespace to a short name""" if short_name in self.ns_map: - raise exceptions.ConflictError(u'this short name is already used') + raise exceptions.ConflictError(u"this short name is already used") self.ns_map[short_name] = namespace def getNamespaces(self): @@ -509,10 +582,7 @@ def getSessionInfos(self, profile_key): """compile interesting data on current profile session""" client = self.getClient(profile_key) - data = { - "jid": client.jid.full(), - "started": unicode(int(client.started)), - } + data = {"jid": client.jid.full(), "started": unicode(int(client.started))} return defer.succeed(data) # local dirs @@ -531,11 +601,11 @@ """ # FIXME: component and profile are parsed with **kwargs because of python 2 limitations # once moved to python 3, this can be fixed - component = kwargs.pop('component', False) - profile = kwargs.pop('profile', True) + component = kwargs.pop("component", False) + profile = kwargs.pop("profile", True) assert not kwargs - path_elts = [self.memory.getConfig('', 'local_dir')] + path_elts = [self.memory.getConfig("", "local_dir")] if component: path_elts.append(C.COMPONENTS_DIR) path_elts.append(regex.pathEscape(dir_name)) @@ -561,7 +631,7 @@ """ profile = self.memory.getProfileName(profile_key) if not profile: - log.error(_('asking connection status for a non-existant profile')) + log.error(_("asking connection status for a non-existant profile")) raise exceptions.ProfileUnknownError(profile_key) if profile not in self.profiles: return False @@ -569,28 +639,47 @@ ## XMPP methods ## - def _messageSend(self, to_jid_s, message, subject=None, mess_type='auto', extra=None, profile_key=C.PROF_KEY_NONE): + def _messageSend( + self, + to_jid_s, + message, + subject=None, + mess_type="auto", + extra=None, + profile_key=C.PROF_KEY_NONE, + ): client = self.getClient(profile_key) to_jid = jid.JID(to_jid_s) - #XXX: we need to use the dictionary comprehension because D-Bus return its own types, and pickle can't manage them. TODO: Need to find a better way - return client.sendMessage(to_jid, message, subject, mess_type, {unicode(key): unicode(value) for key, value in extra.items()}) + # XXX: we need to use the dictionary comprehension because D-Bus return its own types, and pickle can't manage them. TODO: Need to find a better way + return client.sendMessage( + to_jid, + message, + subject, + mess_type, + {unicode(key): unicode(value) for key, value in extra.items()}, + ) def _setPresence(self, to="", show="", statuses=None, profile_key=C.PROF_KEY_NONE): return self.setPresence(jid.JID(to) if to else None, show, statuses, profile_key) - def setPresence(self, to_jid=None, show="", statuses=None, profile_key=C.PROF_KEY_NONE): + def setPresence( + self, to_jid=None, show="", statuses=None, profile_key=C.PROF_KEY_NONE + ): """Send our presence information""" if statuses is None: statuses = {} profile = self.memory.getProfileName(profile_key) assert profile - priority = int(self.memory.getParamA("Priority", "Connection", profile_key=profile)) + priority = int( + self.memory.getParamA("Priority", "Connection", profile_key=profile) + ) self.profiles[profile].presence.available(to_jid, show, statuses, priority) - #XXX: FIXME: temporary fix to work around openfire 3.7.0 bug (presence is not broadcasted to generating resource) - if '' in statuses: - statuses[C.PRESENCE_STATUSES_DEFAULT] = statuses.pop('') - self.bridge.presenceUpdate(self.profiles[profile].jid.full(), show, - int(priority), statuses, profile) + # XXX: FIXME: temporary fix to work around openfire 3.7.0 bug (presence is not broadcasted to generating resource) + if "" in statuses: + statuses[C.PRESENCE_STATUSES_DEFAULT] = statuses.pop("") + self.bridge.presenceUpdate( + self.profiles[profile].jid.full(), show, int(priority), statuses, profile + ) def subscription(self, subs_type, raw_jid, profile_key): """Called to manage subscription @@ -600,7 +689,10 @@ profile = self.memory.getProfileName(profile_key) assert profile to_jid = jid.JID(raw_jid) - log.debug(_(u'subsciption request [%(subs_type)s] for %(jid)s') % {'subs_type': subs_type, 'jid': to_jid.full()}) + log.debug( + _(u"subsciption request [%(subs_type)s] for %(jid)s") + % {"subs_type": subs_type, "jid": to_jid.full()} + ) if subs_type == "subscribe": self.profiles[profile].presence.subscribe(to_jid) elif subs_type == "subscribed": @@ -671,12 +763,41 @@ def findFeaturesSet(self, *args, **kwargs): return self.memory.disco.findFeaturesSet(*args, **kwargs) - def _findByFeatures(self, namespaces, identities, bare_jids, service, roster, own_jid, local_device, profile_key): + def _findByFeatures( + self, + namespaces, + identities, + bare_jids, + service, + roster, + own_jid, + local_device, + profile_key, + ): client = self.getClient(profile_key) - return self.findByFeatures(client, namespaces, identities, bare_jids, service, roster, own_jid, local_device) + return self.findByFeatures( + client, + namespaces, + identities, + bare_jids, + service, + roster, + own_jid, + local_device, + ) @defer.inlineCallbacks - def findByFeatures(self, client, namespaces, identities=None, bare_jids=False, service=True, roster=True, own_jid=True, local_device=False): + def findByFeatures( + self, + client, + namespaces, + identities=None, + bare_jids=False, + service=True, + roster=True, + own_jid=True, + local_device=False, + ): """retrieve all services or contacts managing a set a features @param namespaces(list[unicode]): features which must be handled @@ -697,7 +818,9 @@ if not identities: identities = None if not namespaces and not identities: - raise exceptions.DataError("at least one namespace or one identity must be set") + raise exceptions.DataError( + "at least one namespace or one identity must be set" + ) found_service = {} found_own = {} found_roster = {} @@ -705,9 +828,14 @@ services_jids = yield self.findFeaturesSet(client, namespaces) for service_jid in services_jids: infos = yield self.getDiscoInfos(client, service_jid) - if identities is not None and not set(infos.identities.keys()).issuperset(identities): + if identities is not None and not set(infos.identities.keys()).issuperset( + identities + ): continue - found_identities = [(cat, type_, name or u'') for (cat, type_), name in infos.identities.iteritems()] + found_identities = [ + (cat, type_, name or u"") + for (cat, type_), name in infos.identities.iteritems() + ] found_service[service_jid.full()] = found_identities jids = [] @@ -716,8 +844,10 @@ if own_jid: jids.append(client.jid.userhostJID()) - for found, jids in ((found_own, [client.jid.userhostJID()]), - (found_roster, client.roster.getJids())): + for found, jids in ( + (found_own, [client.jid.userhostJID()]), + (found_roster, client.roster.getJids()), + ): for jid_ in jids: if jid_.resource: if bare_jids: @@ -737,9 +867,14 @@ continue infos = yield self.getDiscoInfos(client, full_jid) if infos.features.issuperset(namespaces): - if identities is not None and not set(infos.identities.keys()).issuperset(identities): + if identities is not None and not set( + infos.identities.keys() + ).issuperset(identities): continue - found_identities = [(cat, type_, name or u'') for (cat, type_), name in infos.identities.iteritems()] + found_identities = [ + (cat, type_, name or u"") + for (cat, type_), name in infos.identities.iteritems() + ] found[full_jid.full()] = found_identities defer.returnValue((found_service, found_own, found_roster)) @@ -750,7 +885,13 @@ log.debug(u"Killing action {} for timeout".format(keep_id)) client.actions[keep_id] - def actionNew(self, action_data, security_limit=C.NO_SECURITY_LIMIT, keep_id=None, profile=C.PROF_KEY_NONE): + def actionNew( + self, + action_data, + security_limit=C.NO_SECURITY_LIMIT, + keep_id=None, + profile=C.PROF_KEY_NONE, + ): """Shortcut to bridge.actionNew which generate and id and keep for retrieval @param action_data(dict): action data (see bridge documentation) @@ -763,7 +904,7 @@ id_ = unicode(uuid.uuid4()) if keep_id is not None: client = self.getClient(profile) - action_timer = reactor.callLater(60*30, self._killAction, keep_id, client) + action_timer = reactor.callLater(60 * 30, self._killAction, keep_id, client) client.actions[keep_id] = (action_data, id_, security_limit, action_timer) self.bridge.actionNew(action_data, id_, security_limit, profile) @@ -776,7 +917,9 @@ client = self.getClient(profile) return [action_tuple[:-1] for action_tuple in client.actions.itervalues()] - def registerProgressCb(self, progress_id, callback, metadata=None, profile=C.PROF_KEY_NONE): + def registerProgressCb( + self, progress_id, callback, metadata=None, profile=C.PROF_KEY_NONE + ): """Register a callback called when progress is requested for id""" if metadata is None: metadata = {} @@ -795,7 +938,7 @@ def _progressGet(self, progress_id, profile): data = self.progressGet(progress_id, profile) - return {k: unicode(v) for k,v in data.iteritems()} + return {k: unicode(v) for k, v in data.iteritems()} def progressGet(self, progress_id, profile): """Return a dict with progress information @@ -837,7 +980,10 @@ profile = client.profile progress_dict = {} progress_all[profile] = progress_dict - for progress_id, (dummy, progress_metadata) in client._progress_cb.iteritems(): + for ( + progress_id, + (dummy, progress_metadata), + ) in client._progress_cb.iteritems(): progress_dict[progress_id] = progress_metadata return progress_all @@ -870,7 +1016,7 @@ one_shot(bool): True to delete callback once it have been called @return: id of the registered callback """ - callback_id = kwargs.pop('force_id', None) + callback_id = kwargs.pop("force_id", None) if callback_id is None: callback_id = str(uuid.uuid4()) else: @@ -878,12 +1024,14 @@ raise exceptions.ConflictError(_(u"id already registered")) self._cb_map[callback_id] = (callback, args, kwargs) - if "one_shot" in kwargs: # One Shot callback are removed after 30 min + if "one_shot" in kwargs: # One Shot callback are removed after 30 min + def purgeCallback(): try: self.removeCallback(callback_id) except KeyError: pass + reactor.callLater(1800, purgeCallback) return callback_id @@ -906,14 +1054,16 @@ - C.BOOL_TRUE - C.BOOL_FALSE """ - # FIXME: security limit need to be checked here + #  FIXME: security limit need to be checked here try: client = self.getClient(profile_key) except exceptions.NotFound: # client is not available yet profile = self.memory.getProfileName(profile_key) if not profile: - raise exceptions.ProfileUnknownError(_(u'trying to launch action with a non-existant profile')) + raise exceptions.ProfileUnknownError( + _(u"trying to launch action with a non-existant profile") + ) else: profile = client.profile # we check if the action is kept, and remove it @@ -922,7 +1072,7 @@ except KeyError: pass else: - action_tuple[-1].cancel() # the last item is the action timer + action_tuple[-1].cancel() # the last item is the action timer del client.actions[callback_id] try: @@ -933,17 +1083,20 @@ if kwargs.get("with_data", False): if data is None: raise exceptions.DataError("Required data for this callback is missing") - args,kwargs=list(args)[:],kwargs.copy() # we don't want to modify the original (kw)args + args, kwargs = ( + list(args)[:], + kwargs.copy(), + ) # we don't want to modify the original (kw)args args.insert(0, data) kwargs["profile"] = profile del kwargs["with_data"] - if kwargs.pop('one_shot', False): + if kwargs.pop("one_shot", False): self.removeCallback(callback_id) return defer.maybeDeferred(callback, *args, **kwargs) - #Menus management + # Menus management def _getMenuCanonicalPath(self, path): """give canonical form of path @@ -954,7 +1107,14 @@ """ return tuple((p.lower().strip() for p in path)) - def importMenu(self, path, callback, security_limit=C.NO_SECURITY_LIMIT, help_string="", type_=C.MENU_GLOBAL): + def importMenu( + self, + path, + callback, + security_limit=C.NO_SECURITY_LIMIT, + help_string="", + type_=C.MENU_GLOBAL, + ): """register a new menu for frontends @param path(iterable[unicode]): path to go to the menu (category/subcategory/.../item) (e.g.: ("File", "Open")) @@ -989,34 +1149,40 @@ callback, args, kwargs = self._cb_map[callback_id] except KeyError: raise exceptions.DataError("Unknown callback id") - kwargs["with_data"] = True # we have to be sure that we use extra data + kwargs["with_data"] = True # we have to be sure that we use extra data else: raise exceptions.DataError("Unknown callback type") for menu_data in self._menus.itervalues(): - if menu_data['path'] == path and menu_data['type'] == type_: - raise exceptions.ConflictError(_("A menu with the same path and type already exists")) + if menu_data["path"] == path and menu_data["type"] == type_: + raise exceptions.ConflictError( + _("A menu with the same path and type already exists") + ) path_canonical = self._getMenuCanonicalPath(path) menu_key = (type_, path_canonical) if menu_key in self._menus_paths: - raise exceptions.ConflictError(u"this menu path is already used: {path} ({menu_key})".format( - path=path_canonical, menu_key=menu_key)) + raise exceptions.ConflictError( + u"this menu path is already used: {path} ({menu_key})".format( + path=path_canonical, menu_key=menu_key + ) + ) - menu_data = {'path': tuple(path), - 'path_canonical': path_canonical, - 'security_limit': security_limit, - 'help_string': help_string, - 'type': type_ - } + menu_data = { + "path": tuple(path), + "path_canonical": path_canonical, + "security_limit": security_limit, + "help_string": help_string, + "type": type_, + } self._menus[callback_id] = menu_data self._menus_paths[menu_key] = callback_id return callback_id - def getMenus(self, language='', security_limit=C.NO_SECURITY_LIMIT): + def getMenus(self, language="", security_limit=C.NO_SECURITY_LIMIT): """Return all menus registered @param language: language used for translation, or empty string for default @@ -1032,24 +1198,36 @@ """ ret = [] for menu_id, menu_data in self._menus.iteritems(): - type_ = menu_data['type'] - path = menu_data['path'] - menu_security_limit = menu_data['security_limit'] - if security_limit!=C.NO_SECURITY_LIMIT and (menu_security_limit==C.NO_SECURITY_LIMIT or menu_security_limit>security_limit): + type_ = menu_data["type"] + path = menu_data["path"] + menu_security_limit = menu_data["security_limit"] + if security_limit != C.NO_SECURITY_LIMIT and ( + menu_security_limit == C.NO_SECURITY_LIMIT + or menu_security_limit > security_limit + ): continue languageSwitch(language) path_i18n = [_(elt) for elt in path] languageSwitch() - extra = {} # TODO: manage extra data like icon + extra = {} # TODO: manage extra data like icon ret.append((menu_id, type_, path, path_i18n, extra)) return ret - def _launchMenu(self, menu_type, path, data=None, security_limit=C.NO_SECURITY_LIMIT, profile_key=C.PROF_KEY_NONE): + def _launchMenu( + self, + menu_type, + path, + data=None, + security_limit=C.NO_SECURITY_LIMIT, + profile_key=C.PROF_KEY_NONE, + ): client = self.getClient(profile_key) return self.launchMenu(client, menu_type, path, data, security_limit) - def launchMenu(self, client, menu_type, path, data=None, security_limit=C.NO_SECURITY_LIMIT): + def launchMenu( + self, client, menu_type, path, data=None, security_limit=C.NO_SECURITY_LIMIT + ): """launch action a menu action @param menu_type(unicode): type of menu to launch @@ -1064,11 +1242,14 @@ try: callback_id = self._menus_paths[menu_key] except KeyError: - raise exceptions.NotFound(u"Can't find menu {path} ({menu_type})".format( - path=canonical_path, menu_type=menu_type)) + raise exceptions.NotFound( + u"Can't find menu {path} ({menu_type})".format( + path=canonical_path, menu_type=menu_type + ) + ) return self.launchCallback(callback_id, data, client.profile) - def getMenuHelp(self, menu_id, language=''): + def getMenuHelp(self, menu_id, language=""): """return the help string of the menu @param menu_id: id of the menu (same as callback_id) @@ -1081,6 +1262,6 @@ except KeyError: raise exceptions.DataError("Trying to access an unknown menu") languageSwitch(language) - help_string = _(menu_data['help_string']) + help_string = _(menu_data["help_string"]) languageSwitch() return help_string
--- a/sat/core/xmpp.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/core/xmpp.py Wed Jun 27 20:14:46 2018 +0200 @@ -31,6 +31,7 @@ from wokkel import component from wokkel import delay from sat.core.log import getLogger + log = getLogger(__name__) from sat.core import exceptions from zope.interface import implements @@ -53,10 +54,10 @@ self.profile = profile self.host_app = host_app self.cache = cache.Cache(host_app, profile) - self._mess_id_uid = {} # map from message id to uid used in history. Key: (full_jid,message_id) Value: uid + self._mess_id_uid = {} # map from message id to uid used in history. Key: (full_jid,message_id) Value: uid self.conn_deferred = defer.Deferred() self._progress_cb = {} # callback called when a progress is requested (key = progress id) - self.actions = {} # used to keep track of actions for retrieval (key = action_id) + self.actions = {} # used to keep track of actions for retrieval (key = action_id) ## initialisation ## @@ -117,24 +118,37 @@ # but client should not be deleted except if session is finished (independently of connection/deconnection # try: - port = int(host.memory.getParamA(C.FORCE_PORT_PARAM, "Connection", profile_key=profile)) + port = int( + host.memory.getParamA( + C.FORCE_PORT_PARAM, "Connection", profile_key=profile + ) + ) except ValueError: log.debug(_("Can't parse port value, using default value")) - port = None # will use default value 5222 or be retrieved from a DNS SRV record + port = ( + None + ) # will use default value 5222 or be retrieved from a DNS SRV record - password = yield host.memory.asyncGetParamA("Password", "Connection", profile_key=profile) - entity = host.profiles[profile] = cls(host, profile, + password = yield host.memory.asyncGetParamA( + "Password", "Connection", profile_key=profile + ) + entity = host.profiles[profile] = cls( + host, + profile, jid.JID(host.memory.getParamA("JabberID", "Connection", profile_key=profile)), - password, host.memory.getParamA(C.FORCE_SERVER_PARAM, "Connection", profile_key=profile) or None, - port, max_retries) + password, + host.memory.getParamA(C.FORCE_SERVER_PARAM, "Connection", profile_key=profile) + or None, + port, + max_retries, + ) entity._createSubProtocols() entity.fallBack = SatFallbackHandler(host) entity.fallBack.setHandlerParent(entity) - entity.versionHandler = SatVersionHandler(C.APP_NAME_FULL, - host.full_version) + entity.versionHandler = SatVersionHandler(C.APP_NAME_FULL, host.full_version) entity.versionHandler.setHandlerParent(entity) entity.identityHandler = SatIdentityHandler() @@ -162,10 +176,17 @@ log.error(_(u"Plugins initialisation error")) for idx, (success, result) in enumerate(results): if not success: - log.error(u"error (plugin %(name)s): %(failure)s" % - {'name': plugin_conn_cb[idx][0]._info['import_name'], 'failure': result}) + log.error( + u"error (plugin %(name)s): %(failure)s" + % { + "name": plugin_conn_cb[idx][0]._info["import_name"], + "failure": result, + } + ) - yield list_d.addCallback(logPluginResults) # FIXME: we should have a timeout here, and a way to know if a plugin freeze + yield list_d.addCallback( + logPluginResults + ) # FIXME: we should have a timeout here, and a way to know if a plugin freeze # TODO: mesure launch time of each plugin def getConnectionDeferred(self): @@ -190,9 +211,13 @@ self._connected.addCallback(self._disconnectionCb) self._connected.addErrback(self._disconnectionEb) - log.info(_(u"********** [{profile}] CONNECTED **********").format(profile=self.profile)) + log.info( + _(u"********** [{profile}] CONNECTED **********").format(profile=self.profile) + ) self.streamInitialized() - self.host_app.bridge.connected(self.profile, unicode(self.jid)) # we send the signal to the clients + self.host_app.bridge.connected( + self.profile, unicode(self.jid) + ) # we send the signal to the clients def _finish_connection(self, dummy): self.conn_deferred.callback(None) @@ -200,7 +225,9 @@ def streamInitialized(self): """Called after _authd""" log.debug(_(u"XML stream is initialized")) - self.keep_alife = task.LoopingCall(self.xmlstream.send, " ") # Needed to avoid disconnection (specially with openfire) + self.keep_alife = task.LoopingCall( + self.xmlstream.send, " " + ) # Needed to avoid disconnection (specially with openfire) self.keep_alife.start(C.XMPP_KEEP_ALIFE) self.disco = SatDiscoProtocol(self) @@ -215,7 +242,12 @@ disco_d.addCallback(self._finish_connection) def initializationFailed(self, reason): - log.error(_(u"ERROR: XMPP connection failed for profile '%(profile)s': %(reason)s" % {'profile': self.profile, 'reason': reason})) + log.error( + _( + u"ERROR: XMPP connection failed for profile '%(profile)s': %(reason)s" + % {"profile": self.profile, "reason": reason} + ) + ) self.conn_deferred.errback(reason.value) try: super(SatXMPPEntity, self).initializationFailed(reason) @@ -231,14 +263,24 @@ except AttributeError: log.debug(_("No keep_alife")) if self._connected is not None: - self.host_app.bridge.disconnected(self.profile) # we send the signal to the clients + self.host_app.bridge.disconnected( + self.profile + ) # we send the signal to the clients self._connected.callback(None) - self.host_app.purgeEntity(self.profile) # and we remove references to this client - log.info(_(u"********** [{profile}] DISCONNECTED **********").format(profile=self.profile)) + self.host_app.purgeEntity( + self.profile + ) # and we remove references to this client + log.info( + _(u"********** [{profile}] DISCONNECTED **********").format( + profile=self.profile + ) + ) if not self.conn_deferred.called: # FIXME: real error is not gotten here (e.g. if jid is not know by Prosody, # we should have the real error) - self.conn_deferred.errback(error.StreamError(u"Server unexpectedly closed the connection")) + self.conn_deferred.errback( + error.StreamError(u"Server unexpectedly closed the connection") + ) @defer.inlineCallbacks def _cleanConnection(self, dummy): @@ -265,7 +307,7 @@ ## sending ## - def IQ(self, type_=u'set', timeout=60): + def IQ(self, type_=u"set", timeout=60): """shortcut to create an IQ element managing deferred @param type_(unicode): IQ type ('set' or 'get') @@ -301,26 +343,28 @@ - extra @return (dict) message data """ - data['xml'] = message_elt = domish.Element((None, 'message')) + data["xml"] = message_elt = domish.Element((None, "message")) message_elt["to"] = data["to"].full() - message_elt["from"] = data['from'].full() + message_elt["from"] = data["from"].full() message_elt["type"] = data["type"] - if data['uid']: # key must be present but can be set to '' - # by a plugin to avoid id on purpose - message_elt['id'] = data['uid'] + if data["uid"]: # key must be present but can be set to '' + # by a plugin to avoid id on purpose + message_elt["id"] = data["uid"] for lang, subject in data["subject"].iteritems(): subject_elt = message_elt.addElement("subject", content=subject) if lang: - subject_elt[(C.NS_XML, 'lang')] = lang + subject_elt[(C.NS_XML, "lang")] = lang for lang, message in data["message"].iteritems(): body_elt = message_elt.addElement("body", content=message) if lang: - body_elt[(C.NS_XML, 'lang')] = lang + body_elt[(C.NS_XML, "lang")] = lang try: - thread = data['extra']['thread'] + thread = data["extra"]["thread"] except KeyError: - if 'thread_parent' in data['extra']: - raise exceptions.InternalError(u"thread_parent found while there is not associated thread") + if "thread_parent" in data["extra"]: + raise exceptions.InternalError( + u"thread_parent found while there is not associated thread" + ) else: thread_elt = message_elt.addElement("thread", content=thread) try: @@ -336,7 +380,16 @@ """ raise NotImplementedError - def sendMessage(self, to_jid, message, subject=None, mess_type='auto', extra=None, uid=None, no_trigger=False): + def sendMessage( + self, + to_jid, + message, + subject=None, + mess_type="auto", + extra=None, + uid=None, + no_trigger=False, + ): """Send a message to an entity @param to_jid(jid.JID): destinee of the message @@ -371,18 +424,26 @@ "extra": extra, "timestamp": time.time(), } - pre_xml_treatments = defer.Deferred() # XXX: plugin can add their pre XML treatments to this deferred - post_xml_treatments = defer.Deferred() # XXX: plugin can add their post XML treatments to this deferred + pre_xml_treatments = ( + defer.Deferred() + ) # XXX: plugin can add their pre XML treatments to this deferred + post_xml_treatments = ( + defer.Deferred() + ) # XXX: plugin can add their post XML treatments to this deferred if data["type"] == C.MESS_TYPE_AUTO: # we try to guess the type if data["subject"]: data["type"] = C.MESS_TYPE_NORMAL - elif not data["to"].resource: # if to JID has a resource, the type is not 'groupchat' + elif not data[ + "to" + ].resource: # if to JID has a resource, the type is not 'groupchat' # we may have a groupchat message, we check if the we know this jid try: - entity_type = self.host_app.memory.getEntityData(data["to"], ['type'], self.profile)["type"] - #FIXME: should entity_type manage resources ? + entity_type = self.host_app.memory.getEntityData( + data["to"], ["type"], self.profile + )["type"] + # FIXME: should entity_type manage resources ? except (exceptions.UnknownEntityError, KeyError): entity_type = "contact" @@ -397,19 +458,33 @@ # FIXME: send_only is used by libervia's OTR plugin to avoid # the triggers from frontend, and no_trigger do the same # thing internally, this could be unified - send_only = data['extra'].get('send_only', False) + send_only = data["extra"].get("send_only", False) if not no_trigger and not send_only: - if not self.host_app.trigger.point("sendMessage" + self.trigger_suffix, self, data, pre_xml_treatments, post_xml_treatments): + if not self.host_app.trigger.point( + "sendMessage" + self.trigger_suffix, + self, + data, + pre_xml_treatments, + post_xml_treatments, + ): return defer.succeed(None) - log.debug(_(u"Sending message (type {type}, to {to})").format(type=data["type"], to=to_jid.full())) + log.debug( + _(u"Sending message (type {type}, to {to})").format( + type=data["type"], to=to_jid.full() + ) + ) pre_xml_treatments.addCallback(lambda dummy: self.generateMessageXML(data)) pre_xml_treatments.chainDeferred(post_xml_treatments) post_xml_treatments.addCallback(self.sendMessageData) if send_only: - log.debug(_("Triggers, storage and echo have been inhibited by the 'send_only' parameter")) + log.debug( + _( + "Triggers, storage and echo have been inhibited by the 'send_only' parameter" + ) + ) else: self.addPostXmlCallbacks(post_xml_treatments) post_xml_treatments.addErrback(self._cancelErrorTrap) @@ -430,10 +505,12 @@ if data[u"type"] != C.MESS_TYPE_GROUPCHAT: # we don't add groupchat message to history, as we get them back # and they will be added then - if data[u'message'] or data[u'subject']: # we need a message to store + if data[u"message"] or data[u"subject"]: # we need a message to store self.host_app.memory.addToHistory(self, data) else: - log.warning(u"No message found") # empty body should be managed by plugins before this point + log.warning( + u"No message found" + ) # empty body should be managed by plugins before this point return data def messageSendToBridge(self, data): @@ -445,11 +522,23 @@ if data[u"type"] != C.MESS_TYPE_GROUPCHAT: # we don't send groupchat message to bridge, as we get them back # and they will be added the - if data[u'message'] or data[u'subject']: # we need a message to send something + if ( + data[u"message"] or data[u"subject"] + ): # we need a message to send something # We send back the message, so all frontends are aware of it - self.host_app.bridge.messageNew(data[u'uid'], data[u'timestamp'], data[u'from'].full(), data[u'to'].full(), data[u'message'], data[u'subject'], data[u'type'], data[u'extra'], profile=self.profile) + self.host_app.bridge.messageNew( + data[u"uid"], + data[u"timestamp"], + data[u"from"].full(), + data[u"to"].full(), + data[u"message"], + data[u"subject"], + data[u"type"], + data[u"extra"], + profile=self.profile, + ) else: - log.warning(_(u"No message found")) + log.warning(_(u"No message found")) return data @@ -458,7 +547,16 @@ trigger_suffix = "" is_component = False - def __init__(self, host_app, profile, user_jid, password, host=None, port=C.XMPP_C2S_PORT, max_retries=C.XMPP_MAX_RETRIES): + def __init__( + self, + host_app, + profile, + user_jid, + password, + host=None, + port=C.XMPP_C2S_PORT, + max_retries=C.XMPP_MAX_RETRIES, + ): # XXX: DNS SRV records are checked when the host is not specified. # If no SRV record is found, the host is directly extracted from the JID. self.started = time.time() @@ -482,25 +580,30 @@ if isinstance(host_data, basestring): host = host_data elif isinstance(host_data, dict): - if u'host' in host_data: - host = host_data[u'host'] - if u'port' in host_data: - port = host_data[u'port'] + if u"host" in host_data: + host = host_data[u"host"] + if u"port" in host_data: + port = host_data[u"port"] else: - log.warning(_(u"invalid data used for host: {data}").format(data=host_data)) + log.warning( + _(u"invalid data used for host: {data}").format(data=host_data) + ) host_data = None if host_data is not None: - log.info(u"using {host}:{port} for host {host_ori} as requested in config".format( - host_ori = user_jid.host, - host = host, - port = port)) + log.info( + u"using {host}:{port} for host {host_ori} as requested in config".format( + host_ori=user_jid.host, host=host, port=port + ) + ) - wokkel_client.XMPPClient.__init__(self, user_jid, password, host or None, port or C.XMPP_C2S_PORT) + wokkel_client.XMPPClient.__init__( + self, user_jid, password, host or None, port or C.XMPP_C2S_PORT + ) SatXMPPEntity.__init__(self, host_app, profile, max_retries) def _getPluginsList(self): for p in self.host_app.plugins.itervalues(): - if C.PLUG_MODE_CLIENT in p._info[u'modes']: + if C.PLUG_MODE_CLIENT in p._info[u"modes"]: yield p def _createSubProtocols(self): @@ -531,9 +634,9 @@ # (out of band transmission for instance). # e2e should have a priority of 0 here, and out of band transmission # a lower priority - # FIXME: trigger not used yet, can be uncommented when e2e full stanza encryption is implemented - # if not self.host_app.trigger.point("send", self, obj): - # return + #  FIXME: trigger not used yet, can be uncommented when e2e full stanza encryption is implemented + #  if not self.host_app.trigger.point("send", self, obj): + #  return super(SatXMPPClient, self).send(obj) def sendMessageData(self, mess_data): @@ -549,7 +652,7 @@ # This is intented for e2e encryption which doesn't do full stanza encryption (e.g. OTR) # This trigger point can't cancel the method self.host_app.trigger.point("sendMessageData", self, mess_data) - self.send(mess_data[u'xml']) + self.send(mess_data[u"xml"]) return mess_data def feedback(self, to_jid, message): @@ -560,15 +663,17 @@ @param to_jid(jid.JID): destinee jid @param message(unicode): message to send to frontends """ - self.host_app.bridge.messageNew(uid=unicode(uuid.uuid4()), - timestamp=time.time(), - from_jid=self.jid.full(), - to_jid=to_jid.full(), - message={u'': message}, - subject={}, - mess_type=C.MESS_TYPE_INFO, - extra={}, - profile=self.profile) + self.host_app.bridge.messageNew( + uid=unicode(uuid.uuid4()), + timestamp=time.time(), + from_jid=self.jid.full(), + to_jid=to_jid.full(), + message={u"": message}, + subject={}, + mess_type=C.MESS_TYPE_INFO, + extra={}, + profile=self.profile, + ) def _finish_connection(self, dummy): self.roster.requestRoster() @@ -583,12 +688,26 @@ An entry point plugin is launched after component is connected. Component need to instantiate MessageProtocol itself """ + implements(iwokkel.IDisco) - trigger_suffix = "Component" # used for to distinguish some trigger points set in SatXMPPEntity + trigger_suffix = ( + "Component" + ) # used for to distinguish some trigger points set in SatXMPPEntity is_component = True - sendHistory = False # XXX: set to True from entry plugin to keep messages in history for received messages + sendHistory = ( + False + ) # XXX: set to True from entry plugin to keep messages in history for received messages - def __init__(self, host_app, profile, component_jid, password, host=None, port=None, max_retries=C.XMPP_MAX_RETRIES): + def __init__( + self, + host_app, + profile, + component_jid, + password, + host=None, + port=None, + max_retries=C.XMPP_MAX_RETRIES, + ): self.started = time.time() if port is None: port = C.XMPP_COMPONENT_PORT @@ -598,15 +717,18 @@ try: self.entry_plugin = host_app.plugins[entry_point] except KeyError: - raise exceptions.NotFound(_(u"The requested entry point ({entry_point}) is not available").format( - entry_point = entry_point)) + raise exceptions.NotFound( + _(u"The requested entry point ({entry_point}) is not available").format( + entry_point=entry_point + ) + ) self.identities = [disco.DiscoIdentity(u"component", u"generic", C.APP_NAME)] # jid is set automatically on bind by Twisted for Client, but not for Component self.jid = component_jid if host is None: try: - host = component_jid.host.split(u'.', 1)[1] + host = component_jid.host.split(u".", 1)[1] except IndexError: raise ValueError(u"Can't guess host from jid, please specify a host") # XXX: component.Component expect unicode jid, while Client expect jid.JID. @@ -628,14 +750,18 @@ @raise InternalError: one of the plugin is not handling components @raise KeyError: one plugin should be present in self.host_app.plugins but it is not """ - if C.PLUG_MODE_COMPONENT not in current._info[u'modes']: + if C.PLUG_MODE_COMPONENT not in current._info[u"modes"]: if not required: return else: - log.error(_(u"Plugin {current_name} is needed for {entry_name}, but it doesn't handle component mode").format( - current_name = current._info[u'import_name'], - entry_name = self.entry_plugin._info[u'import_name'] - )) + log.error( + _( + u"Plugin {current_name} is needed for {entry_name}, but it doesn't handle component mode" + ).format( + current_name=current._info[u"import_name"], + entry_name=self.entry_plugin._info[u"import_name"], + ) + ) raise exceptions.InternalError(_(u"invalid plugin mode")) for import_name in current._info.get(C.PI_DEPENDENCIES, []): @@ -651,7 +777,7 @@ dep = self.host_app.plugins[import_name] except KeyError: continue - self._buildDependencies(dep, plugins, required = False) + self._buildDependencies(dep, plugins, required=False) if current not in plugins: # current can be required for several plugins and so @@ -680,7 +806,6 @@ class SatMessageProtocol(xmppim.MessageProtocol): - def __init__(self, host): xmppim.MessageProtocol.__init__(self) self.host = host @@ -697,52 +822,60 @@ message = {} subject = {} extra = {} - data = {"from": jid.JID(message_elt['from']), - "to": jid.JID(message_elt['to']), - "uid": message_elt.getAttribute('uid', unicode(uuid.uuid4())), # XXX: uid is not a standard attribute but may be added by plugins - "message": message, - "subject": subject, - "type": message_elt.getAttribute('type', 'normal'), - "extra": extra} + data = { + "from": jid.JID(message_elt["from"]), + "to": jid.JID(message_elt["to"]), + "uid": message_elt.getAttribute( + "uid", unicode(uuid.uuid4()) + ), # XXX: uid is not a standard attribute but may be added by plugins + "message": message, + "subject": subject, + "type": message_elt.getAttribute("type", "normal"), + "extra": extra, + } if client is not None: try: - data['stanza_id'] = message_elt['id'] + data["stanza_id"] = message_elt["id"] except KeyError: pass else: - client._mess_id_uid[(data['from'], data['stanza_id'])] = data['uid'] + client._mess_id_uid[(data["from"], data["stanza_id"])] = data["uid"] # message - for e in message_elt.elements(C.NS_CLIENT, 'body'): - message[e.getAttribute((C.NS_XML,'lang'),'')] = unicode(e) + for e in message_elt.elements(C.NS_CLIENT, "body"): + message[e.getAttribute((C.NS_XML, "lang"), "")] = unicode(e) # subject - for e in message_elt.elements(C.NS_CLIENT, 'subject'): - subject[e.getAttribute((C.NS_XML, 'lang'),'')] = unicode(e) + for e in message_elt.elements(C.NS_CLIENT, "subject"): + subject[e.getAttribute((C.NS_XML, "lang"), "")] = unicode(e) # delay and timestamp try: - delay_elt = message_elt.elements(delay.NS_DELAY, 'delay').next() + delay_elt = message_elt.elements(delay.NS_DELAY, "delay").next() except StopIteration: - data['timestamp'] = time.time() + data["timestamp"] = time.time() else: parsed_delay = delay.Delay.fromElement(delay_elt) - data['timestamp'] = calendar.timegm(parsed_delay.stamp.utctimetuple()) - data['received_timestamp'] = unicode(time.time()) + data["timestamp"] = calendar.timegm(parsed_delay.stamp.utctimetuple()) + data["received_timestamp"] = unicode(time.time()) if parsed_delay.sender: - data['delay_sender'] = parsed_delay.sender.full() + data["delay_sender"] = parsed_delay.sender.full() return data def onMessage(self, message_elt): # TODO: handle threads client = self.parent - if not 'from' in message_elt.attributes: - message_elt['from'] = client.jid.host - log.debug(_(u"got message from: {from_}").format(from_=message_elt['from'])) - post_treat = defer.Deferred() # XXX: plugin can add their treatments to this deferred + if not "from" in message_elt.attributes: + message_elt["from"] = client.jid.host + log.debug(_(u"got message from: {from_}").format(from_=message_elt["from"])) + post_treat = ( + defer.Deferred() + ) # XXX: plugin can add their treatments to this deferred - if not self.host.trigger.point("MessageReceived", client, message_elt, post_treat): + if not self.host.trigger.point( + "MessageReceived", client, message_elt, post_treat + ): return data = self.parseMessage(message_elt, client) @@ -754,25 +887,35 @@ post_treat.callback(data) def skipEmptyMessage(self, data): - if not data['message'] and not data['extra'] and not data['subject']: + if not data["message"] and not data["extra"] and not data["subject"]: raise failure.Failure(exceptions.CancelError("Cancelled empty message")) return data def addToHistory(self, data, client): - if data.pop(u'history', None) == C.HISTORY_SKIP: - log.info(u'history is skipped as requested') - data[u'extra'][u'history'] = C.HISTORY_SKIP + if data.pop(u"history", None) == C.HISTORY_SKIP: + log.info(u"history is skipped as requested") + data[u"extra"][u"history"] = C.HISTORY_SKIP else: return self.host.memory.addToHistory(client, data) def bridgeSignal(self, dummy, client, data): try: - data['extra']['received_timestamp'] = data['received_timestamp'] - data['extra']['delay_sender'] = data['delay_sender'] + data["extra"]["received_timestamp"] = data["received_timestamp"] + data["extra"]["delay_sender"] = data["delay_sender"] except KeyError: pass if data is not None: - self.host.bridge.messageNew(data['uid'], data['timestamp'], data['from'].full(), data['to'].full(), data['message'], data['subject'], data['type'], data['extra'], profile=client.profile) + self.host.bridge.messageNew( + data["uid"], + data["timestamp"], + data["from"].full(), + data["to"].full(), + data["message"], + data["subject"], + data["type"], + data["extra"], + profile=client.profile, + ) return data def cancelErrorTrap(self, failure_): @@ -781,26 +924,29 @@ class SatRosterProtocol(xmppim.RosterClientProtocol): - def __init__(self, host): xmppim.RosterClientProtocol.__init__(self) self.host = host - self.got_roster = defer.Deferred() # called when roster is received and ready - #XXX: the two following dicts keep a local copy of the roster + self.got_roster = defer.Deferred() # called when roster is received and ready + # XXX: the two following dicts keep a local copy of the roster self._groups = {} # map from groups to jids: key=group value=set of jids self._jids = None # map from jids to RosterItem: key=jid value=RosterItem def rosterCb(self, roster): - assert roster is not None # FIXME: must be managed with roster versioning + assert roster is not None # FIXME: must be managed with roster versioning self._groups.clear() self._jids = roster for item in roster.itervalues(): if not item.subscriptionTo and not item.subscriptionFrom and not item.ask: - #XXX: current behaviour: we don't want contact in our roster list + # XXX: current behaviour: we don't want contact in our roster list # if there is no presence subscription # may change in the future - log.info(u"Removing contact {} from roster because there is no presence subscription".format(item.jid)) - self.removeItem(item.entity) # FIXME: to be checked + log.info( + u"Removing contact {} from roster because there is no presence subscription".format( + item.jid + ) + ) + self.removeItem(item.entity) # FIXME: to be checked else: self._registerItem(item) @@ -812,10 +958,16 @@ """ log.debug(u"registering item: {}".format(item.entity.full())) if item.entity.resource: - log.warning(u"Received a roster item with a resource, this is not common but not restricted by RFC 6121, this case may be not well tested.") + log.warning( + u"Received a roster item with a resource, this is not common but not restricted by RFC 6121, this case may be not well tested." + ) if not item.subscriptionTo: if not item.subscriptionFrom: - log.info(_(u"There's no subscription between you and [{}]!").format(item.entity.full())) + log.info( + _(u"There's no subscription between you and [{}]!").format( + item.entity.full() + ) + ) else: log.info(_(u"You are not subscribed to [{}]!").format(item.entity.full())) if not item.subscriptionFrom: @@ -843,16 +995,17 @@ @param item: RosterItem @return: dictionary of attributes """ - item_attr = {'to': unicode(item.subscriptionTo), - 'from': unicode(item.subscriptionFrom), - 'ask': unicode(item.ask) - } + item_attr = { + "to": unicode(item.subscriptionTo), + "from": unicode(item.subscriptionFrom), + "ask": unicode(item.ask), + } if item.name: - item_attr['name'] = item.name + item_attr["name"] = item.name return item_attr def setReceived(self, request): - #TODO: implement roster versioning (cf RFC 6121 §2.6) + # TODO: implement roster versioning (cf RFC 6121 §2.6) item = request.item try: # update the cache for the groups the contact has been removed from left_groups = set(self._jids[item.entity].groups).difference(item.groups) @@ -865,7 +1018,9 @@ pass # no previous item registration (or it's been cleared) self._jids[item.entity] = item self._registerItem(item) - self.host.bridge.newContact(item.entity.full(), self.getAttributes(item), item.groups, self.parent.profile) + self.host.bridge.newContact( + item.entity.full(), self.getAttributes(item), item.groups, self.parent.profile + ) def removeReceived(self, request): entity = request.item.entity @@ -875,7 +1030,11 @@ try: item = self._jids.pop(entity) except KeyError: - log.error(u"Received a roster remove event for an item not in cache ({})".format(entity)) + log.error( + u"Received a roster remove event for an item not in cache ({})".format( + entity + ) + ) return for group in item.groups: try: @@ -884,8 +1043,10 @@ if not jids_set: del self._groups[group] except KeyError: - log.warning(u"there is no cache for the group [%(group)s] of the removed roster item [%(jid)s]" % - {"group": group, "jid": entity}) + log.warning( + u"there is no cache for the group [%(group)s] of the removed roster item [%(jid)s]" + % {"group": group, "jid": entity} + ) # then we send the bridge signal self.host.bridge.contactDeleted(entity.full(), self.parent.profile) @@ -939,7 +1100,7 @@ @return (set(jid.JID)): set of selected jids """ if type_ == C.ALL and groups is not None: - raise ValueError('groups must not be set for {} type'.format(C.ALL)) + raise ValueError("groups must not be set for {} type".format(C.ALL)) if type_ == C.ALL: return set(self.getJids()) @@ -949,7 +1110,7 @@ jids.update(self.getJidsFromGroup(group)) return jids else: - raise ValueError(u'Unexpected type_ {}'.format(type_)) + raise ValueError(u"Unexpected type_ {}".format(type_)) def getNick(self, entity_jid): """Return a nick name for an entity @@ -965,7 +1126,6 @@ class SatPresenceProtocol(xmppim.PresenceClientProtocol): - def __init__(self, host): xmppim.PresenceClientProtocol.__init__(self) self.host = host @@ -977,7 +1137,17 @@ presence_d.addCallback(lambda __: super(SatPresenceProtocol, self).send(obj)) def availableReceived(self, entity, show=None, statuses=None, priority=0): - log.debug(_(u"presence update for [%(entity)s] (available, show=%(show)s statuses=%(statuses)s priority=%(priority)d)") % {'entity': entity, C.PRESENCE_SHOW: show, C.PRESENCE_STATUSES: statuses, C.PRESENCE_PRIORITY: priority}) + log.debug( + _( + u"presence update for [%(entity)s] (available, show=%(show)s statuses=%(statuses)s priority=%(priority)d)" + ) + % { + "entity": entity, + C.PRESENCE_SHOW: show, + C.PRESENCE_STATUSES: statuses, + C.PRESENCE_PRIORITY: priority, + } + ) if not statuses: statuses = {} @@ -985,20 +1155,25 @@ if None in statuses: # we only want string keys statuses[C.PRESENCE_STATUSES_DEFAULT] = statuses.pop(None) - if not self.host.trigger.point("presenceReceived", entity, show, priority, statuses, self.parent.profile): + if not self.host.trigger.point( + "presenceReceived", entity, show, priority, statuses, self.parent.profile + ): return - self.host.memory.setPresenceStatus(entity, show or "", - int(priority), statuses, - self.parent.profile) + self.host.memory.setPresenceStatus( + entity, show or "", int(priority), statuses, self.parent.profile + ) # now it's time to notify frontends - self.host.bridge.presenceUpdate(entity.full(), show or "", - int(priority), statuses, - self.parent.profile) + self.host.bridge.presenceUpdate( + entity.full(), show or "", int(priority), statuses, self.parent.profile + ) def unavailableReceived(self, entity, statuses=None): - log.debug(_(u"presence update for [%(entity)s] (unavailable, statuses=%(statuses)s)") % {'entity': entity, C.PRESENCE_STATUSES: statuses}) + log.debug( + _(u"presence update for [%(entity)s] (unavailable, statuses=%(statuses)s)") + % {"entity": entity, C.PRESENCE_STATUSES: statuses} + ) if not statuses: statuses = {} @@ -1006,21 +1181,33 @@ if None in statuses: # we only want string keys statuses[C.PRESENCE_STATUSES_DEFAULT] = statuses.pop(None) - if not self.host.trigger.point("presenceReceived", entity, "unavailable", 0, statuses, self.parent.profile): + if not self.host.trigger.point( + "presenceReceived", entity, "unavailable", 0, statuses, self.parent.profile + ): return # now it's time to notify frontends # if the entity is not known yet in this session or is already unavailable, there is no need to send an unavailable signal try: - presence = self.host.memory.getEntityDatum(entity, "presence", self.parent.profile) + presence = self.host.memory.getEntityDatum( + entity, "presence", self.parent.profile + ) except (KeyError, exceptions.UnknownEntityError): # the entity has not been seen yet in this session pass else: if presence.show != C.PRESENCE_UNAVAILABLE: - self.host.bridge.presenceUpdate(entity.full(), C.PRESENCE_UNAVAILABLE, 0, statuses, self.parent.profile) + self.host.bridge.presenceUpdate( + entity.full(), + C.PRESENCE_UNAVAILABLE, + 0, + statuses, + self.parent.profile, + ) - self.host.memory.setPresenceStatus(entity, C.PRESENCE_UNAVAILABLE, 0, statuses, self.parent.profile) + self.host.memory.setPresenceStatus( + entity, C.PRESENCE_UNAVAILABLE, 0, statuses, self.parent.profile + ) def available(self, entity=None, show=None, statuses=None, priority=None): """Set a presence and statuses. @@ -1032,7 +1219,11 @@ """ if priority is None: try: - priority = int(self.host.memory.getParamA("Priority", "Connection", profile_key=self.parent.profile)) + priority = int( + self.host.memory.getParamA( + "Priority", "Connection", profile_key=self.parent.profile + ) + ) except ValueError: priority = 0 @@ -1048,7 +1239,7 @@ # ... before switching back if None in statuses: - statuses['default'] = statuses.pop(None) + statuses["default"] = statuses.pop(None) if not self.host.trigger.point("presence_available", presence_elt, self.parent): return @@ -1060,7 +1251,9 @@ xmppim.PresenceClientProtocol.subscribed(self, entity) self.host.memory.delWaitingSub(entity.userhost(), self.parent.profile) item = self.parent.roster.getItem(entity) - if not item or not item.subscriptionTo: # we automatically subscribe to 'to' presence + if ( + not item or not item.subscriptionTo + ): # we automatically subscribe to 'to' presence log.debug(_('sending automatic "from" subscription request')) self.subscribe(entity) @@ -1070,11 +1263,11 @@ def subscribedReceived(self, entity): log.debug(_(u"subscription approved for [%s]") % entity.userhost()) - self.host.bridge.subscribe('subscribed', entity.userhost(), self.parent.profile) + self.host.bridge.subscribe("subscribed", entity.userhost(), self.parent.profile) def unsubscribedReceived(self, entity): log.debug(_(u"unsubscription confirmed for [%s]") % entity.userhost()) - self.host.bridge.subscribe('unsubscribed', entity.userhost(), self.parent.profile) + self.host.bridge.subscribe("unsubscribed", entity.userhost(), self.parent.profile) @defer.inlineCallbacks def subscribeReceived(self, entity): @@ -1083,11 +1276,15 @@ item = self.parent.roster.getItem(entity) if item and item.subscriptionTo: # We automatically accept subscription if we are already subscribed to contact presence - log.debug(_('sending automatic subscription acceptance')) + log.debug(_("sending automatic subscription acceptance")) self.subscribed(entity) else: - self.host.memory.addWaitingSub('subscribe', entity.userhost(), self.parent.profile) - self.host.bridge.subscribe('subscribe', entity.userhost(), self.parent.profile) + self.host.memory.addWaitingSub( + "subscribe", entity.userhost(), self.parent.profile + ) + self.host.bridge.subscribe( + "subscribe", entity.userhost(), self.parent.profile + ) @defer.inlineCallbacks def unsubscribeReceived(self, entity): @@ -1095,9 +1292,9 @@ yield self.parent.roster.got_roster item = self.parent.roster.getItem(entity) if item and item.subscriptionFrom: # we automatically remove contact - log.debug(_('automatic contact deletion')) + log.debug(_("automatic contact deletion")) self.host.delContact(entity, self.parent.profile) - self.host.bridge.subscribe('unsubscribe', entity.userhost(), self.parent.profile) + self.host.bridge.subscribe("unsubscribe", entity.userhost(), self.parent.profile) class SatDiscoProtocol(disco.DiscoClientProtocol): @@ -1117,9 +1314,8 @@ class SatVersionHandler(generic.VersionHandler): - def getDiscoInfo(self, requestor, target, node): - #XXX: We need to work around wokkel's behaviour (namespace not added if there is a + # XXX: We need to work around wokkel's behaviour (namespace not added if there is a # node) as it cause issues with XEP-0115 & PEP (XEP-0163): there is a node when server # ask for disco info, and not when we generate the key, so the hash is used with different # disco features, and when the server (seen on ejabberd) generate its own hash for security check @@ -1131,11 +1327,12 @@ """ Manage disco Identity of Sà T. """ - #TODO: dynamic identity update (see docstring). Note that a XMPP entity can have several identities + + # TODO: dynamic identity update (see docstring). Note that a XMPP entity can have several identities implements(iwokkel.IDisco) - def getDiscoInfo(self, requestor, target, nodeIdentifier=''): + def getDiscoInfo(self, requestor, target, nodeIdentifier=""): return self.parent.identities - def getDiscoItems(self, requestor, target, nodeIdentifier=''): + def getDiscoItems(self, requestor, target, nodeIdentifier=""): return []
--- a/sat/memory/cache.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/memory/cache.py Wed Jun 27 20:14:46 2018 +0200 @@ -18,6 +18,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. from sat.core.log import getLogger + log = getLogger(__name__) from sat.tools.common import regex from sat.core import exceptions @@ -27,7 +28,7 @@ import os.path import time -DEFAULT_EXT = '.raw' +DEFAULT_EXT = ".raw" class Cache(object): @@ -39,11 +40,11 @@ if None, the cache will be common for all profiles """ self.profile = profile - path_elts = [host.memory.getConfig('', 'local_dir'), C.CACHE_DIR] + path_elts = [host.memory.getConfig("", "local_dir"), C.CACHE_DIR] if profile: - path_elts.extend([u'profiles',regex.pathEscape(profile)]) + path_elts.extend([u"profiles", regex.pathEscape(profile)]) else: - path_elts.append(u'common') + path_elts.append(u"common") self.cache_dir = os.path.join(*path_elts) if not os.path.exists(self.cache_dir): @@ -54,8 +55,10 @@ @param filename(unicode): cached file name (cache data or actual file) """ - if not filename or u'/' in filename: - log.error(u"invalid char found in file name, hack attempt? name:{}".format(filename)) + if not filename or u"/" in filename: + log.error( + u"invalid char found in file name, hack attempt? name:{}".format(filename) + ) raise exceptions.DataError(u"Invalid char found") return os.path.join(self.cache_dir, filename) @@ -76,26 +79,27 @@ return None try: - with open(cache_url, 'rb') as f: + with open(cache_url, "rb") as f: cache_data = pickle.load(f) except IOError: log.warning(u"can't read cache at {}".format(cache_url)) return None except pickle.UnpicklingError: - log.warning(u'invalid cache found at {}'.format(cache_url)) + log.warning(u"invalid cache found at {}".format(cache_url)) return None try: - eol = cache_data['eol'] + eol = cache_data["eol"] except KeyError: - log.warning(u'no End Of Life found for cached file {}'.format(uid)) + log.warning(u"no End Of Life found for cached file {}".format(uid)) eol = 0 if eol < time.time(): - log.debug(u"removing expired cache (expired for {}s)".format( - time.time() - eol)) + log.debug( + u"removing expired cache (expired for {}s)".format(time.time() - eol) + ) return None - cache_data['path'] = self.getPath(cache_data['filename']) + cache_data["path"] = self.getPath(cache_data["filename"]) return cache_data def getFilePath(self, uid): @@ -107,7 +111,7 @@ """ metadata = self.getMetadata(uid) if metadata is not None: - return metadata['path'] + return metadata["path"] def cacheData(self, source, uid, mime_type=None, max_age=None, filename=None): """create cache metadata and file object to use for actual data @@ -130,24 +134,27 @@ if mime_type: ext = mimetypes.guess_extension(mime_type, strict=False) if ext is None: - log.warning(u"can't find extension for MIME type {}".format(mime_type)) + log.warning( + u"can't find extension for MIME type {}".format(mime_type) + ) ext = DEFAULT_EXT - elif ext == u'.jpe': - ext = u'.jpg' + elif ext == u".jpe": + ext = u".jpg" else: ext = DEFAULT_EXT mime_type = None filename = uid + ext if max_age is None: max_age = C.DEFAULT_MAX_AGE - cache_data = {u'source': source, - u'filename': filename, - u'eol': int(time.time()) + max_age, - u'mime_type': mime_type, - } + cache_data = { + u"source": source, + u"filename": filename, + u"eol": int(time.time()) + max_age, + u"mime_type": mime_type, + } file_path = self.getPath(filename) - with open(cache_url, 'wb') as f: + with open(cache_url, "wb") as f: pickle.dump(cache_data, f, protocol=2) - return open(file_path, 'wb') + return open(file_path, "wb")
--- a/sat/memory/crypto.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/memory/crypto.py Wed Jun 27 20:14:46 2018 +0200 @@ -47,13 +47,17 @@ @param leave_empty (bool): if True, empty text will be returned "as is" @return: Deferred: base-64 encoded str """ - if leave_empty and text == '': + if leave_empty and text == "": return succeed(text) iv = BlockCipher.getRandomKey() - key = key.encode('utf-8') - key = key[:BlockCipher.MAX_KEY_SIZE] if len(key) >= BlockCipher.MAX_KEY_SIZE else BlockCipher.pad(key) + key = key.encode("utf-8") + key = ( + key[: BlockCipher.MAX_KEY_SIZE] + if len(key) >= BlockCipher.MAX_KEY_SIZE + else BlockCipher.pad(key) + ) cipher = AES.new(key, AES.MODE_CFB, iv) - d = deferToThread(cipher.encrypt, BlockCipher.pad(text.encode('utf-8'))) + d = deferToThread(cipher.encrypt, BlockCipher.pad(text.encode("utf-8"))) d.addCallback(lambda ciphertext: b64encode(iv + ciphertext)) return d @@ -68,12 +72,19 @@ @param leave_empty (bool): if True, empty ciphertext will be returned "as is" @return: Deferred: str or None if the password could not be decrypted """ - if leave_empty and ciphertext == '': - return succeed('') + if leave_empty and ciphertext == "": + return succeed("") ciphertext = b64decode(ciphertext) - iv, ciphertext = ciphertext[:BlockCipher.IV_SIZE], ciphertext[BlockCipher.IV_SIZE:] - key = key.encode('utf-8') - key = key[:BlockCipher.MAX_KEY_SIZE] if len(key) >= BlockCipher.MAX_KEY_SIZE else BlockCipher.pad(key) + iv, ciphertext = ( + ciphertext[: BlockCipher.IV_SIZE], + ciphertext[BlockCipher.IV_SIZE :], + ) + key = key.encode("utf-8") + key = ( + key[: BlockCipher.MAX_KEY_SIZE] + if len(key) >= BlockCipher.MAX_KEY_SIZE + else BlockCipher.pad(key) + ) cipher = AES.new(key, AES.MODE_CFB, iv) d = deferToThread(cipher.decrypt, ciphertext) d.addCallback(lambda text: BlockCipher.unpad(text)) @@ -81,7 +92,7 @@ # a decrypted empty value and a decryption failure... both return # the empty value. Fortunately, we detect empty passwords beforehand # thanks to the "leave_empty" parameter which is used by default. - d.addCallback(lambda text: text.decode('utf-8') if text else None) + d.addCallback(lambda text: text.decode("utf-8") if text else None) return d @classmethod @@ -108,7 +119,7 @@ @classmethod def unpad(self, s): """Method from http://stackoverflow.com/a/12525165""" - return s[0:-ord(s[-1])] + return s[0 : -ord(s[-1])] class PasswordHasher(object): @@ -124,9 +135,13 @@ @param leave_empty (bool): if True, empty password will be returned "as is" @return: Deferred: base-64 encoded str """ - if leave_empty and password == '': + if leave_empty and password == "": return succeed(password) - salt = b64decode(salt)[:PasswordHasher.SALT_LEN] if salt else urandom(PasswordHasher.SALT_LEN) + salt = ( + b64decode(salt)[: PasswordHasher.SALT_LEN] + if salt + else urandom(PasswordHasher.SALT_LEN) + ) d = deferToThread(PBKDF2, password, salt) d.addCallback(lambda hashed: b64encode(salt + hashed)) return d @@ -139,7 +154,7 @@ @param hashed (str): the hash of the password @return: Deferred: boolean """ - leave_empty = hashed == '' + leave_empty = hashed == "" d = PasswordHasher.hash(attempt, hashed, leave_empty) d.addCallback(lambda hashed_attempt: hashed_attempt == hashed) return d
--- a/sat/memory/disco.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/memory/disco.py Wed Jun 27 20:14:46 2018 +0200 @@ -20,6 +20,7 @@ from sat.core.i18n import _ from sat.core import exceptions from sat.core.log import getLogger + log = getLogger(__name__) from twisted.words.protocols.jabber import jid from twisted.words.protocols.jabber.error import StanzaError @@ -35,7 +36,8 @@ TIMEOUT = 15 -CAP_HASH_ERROR = 'ERROR' +CAP_HASH_ERROR = "ERROR" + class HashGenerationError(Exception): pass @@ -46,10 +48,10 @@ def __init__(self, identity, lang=None): assert isinstance(identity, disco.DiscoIdentity) - self.category = identity.category.encode('utf-8') - self.idType = identity.type.encode('utf-8') - self.name = identity.name.encode('utf-8') if identity.name else '' - self.lang = lang.encode('utf-8') if lang is not None else '' + self.category = identity.category.encode("utf-8") + self.idType = identity.type.encode("utf-8") + self.name = identity.name.encode("utf-8") if identity.name else "" + self.lang = lang.encode("utf-8") if lang is not None else "" def __str__(self): return "%s/%s/%s/%s" % (self.category, self.idType, self.lang, self.name) @@ -63,8 +65,8 @@ def __init__(self, persistent): self.hashes = { - CAP_HASH_ERROR: disco.DiscoInfo(), # used when we can't get disco infos - } + CAP_HASH_ERROR: disco.DiscoInfo() # used when we can't get disco infos + } self.persistent = persistent def __getitem__(self, key): @@ -86,12 +88,16 @@ element = xml_tools.ElementParser()(xml) disco_info = disco.DiscoInfo.fromElement(element) if not disco_info.features and not disco_info.identities: - log.warning(_(u"no feature/identity found in disco element (hash: {cap_hash}), ignoring: {xml}").format( - cap_hash=hash_, xml=xml)) + log.warning( + _( + u"no feature/identity found in disco element (hash: {cap_hash}), ignoring: {xml}" + ).format(cap_hash=hash_, xml=xml) + ) else: self.hashes[hash_] = disco_info log.info(u"Disco hashes loaded") + d = self.persistent.load() d.addCallback(fillHashes) return d @@ -110,7 +116,7 @@ return self.hashes.load() @defer.inlineCallbacks - def hasFeature(self, client, feature, jid_=None, node=u''): + def hasFeature(self, client, feature, jid_=None, node=u""): """Tell if an entity has the required feature @param feature: feature namespace @@ -122,7 +128,7 @@ defer.returnValue(feature in disco_infos.features) @defer.inlineCallbacks - def checkFeature(self, client, feature, jid_=None, node=u''): + def checkFeature(self, client, feature, jid_=None, node=u""): """Like hasFeature, but raise an exception is feature is not Found @param feature: feature namespace @@ -136,7 +142,7 @@ raise failure.Failure(exceptions.FeatureNotFound) @defer.inlineCallbacks - def checkFeatures(self, client, features, jid_=None, identity=None, node=u''): + def checkFeatures(self, client, features, jid_=None, identity=None, node=u""): """Like checkFeature, but check several features at once, and check also identity @param features(iterable[unicode]): features to check @@ -153,7 +159,7 @@ if identity is not None and identity not in disco_infos.identities: raise failure.Failure(exceptions.FeatureNotFound()) - def getInfos(self, client, jid_=None, node=u'', use_cache=True): + def getInfos(self, client, jid_=None, node=u"", use_cache=True): """get disco infos from jid_, filling capability hash if needed @param jid_: jid of the target, or None for profile's server @@ -167,14 +173,19 @@ if not use_cache: # we ignore cache, so we pretend we haven't found it raise KeyError - cap_hash = self.host.memory.getEntityData(jid_, [C.ENTITY_CAP_HASH], client.profile)[C.ENTITY_CAP_HASH] + cap_hash = self.host.memory.getEntityData( + jid_, [C.ENTITY_CAP_HASH], client.profile + )[C.ENTITY_CAP_HASH] except (KeyError, exceptions.UnknownEntityError): # capability hash is not available, we'll compute one def infosCb(disco_infos): cap_hash = self.generateHash(disco_infos) self.hashes[cap_hash] = disco_infos - self.host.memory.updateEntityData(jid_, C.ENTITY_CAP_HASH, cap_hash, profile_key=client.profile) + self.host.memory.updateEntityData( + jid_, C.ENTITY_CAP_HASH, cap_hash, profile_key=client.profile + ) return disco_infos + def infosEb(fail): if fail.check(defer.CancelledError): reason = u"request time-out" @@ -183,10 +194,17 @@ reason = unicode(fail.value) except AttributeError: reason = unicode(fail) - log.warning(u"Error while requesting disco infos from {jid}: {reason}".format(jid=jid_.full(), reason=reason)) - self.host.memory.updateEntityData(jid_, C.ENTITY_CAP_HASH, CAP_HASH_ERROR, profile_key=client.profile) + log.warning( + u"Error while requesting disco infos from {jid}: {reason}".format( + jid=jid_.full(), reason=reason + ) + ) + self.host.memory.updateEntityData( + jid_, C.ENTITY_CAP_HASH, CAP_HASH_ERROR, profile_key=client.profile + ) disco_infos = self.hashes[CAP_HASH_ERROR] return disco_infos + d = client.disco.requestInfo(jid_, nodeIdentifier=node) d.addCallback(infosCb) d.addErrback(infosEb) @@ -196,7 +214,7 @@ return defer.succeed(disco_infos) @defer.inlineCallbacks - def getItems(self, client, jid_=None, node=u'', use_cache=True): + def getItems(self, client, jid_=None, node=u"", use_cache=True): """get disco items from jid_, cache them for our own server @param jid_(jid.JID): jid of the target, or None for profile's server @@ -211,7 +229,9 @@ if jid_ == server_jid and not node: # we cache items only for our own server and if node is not set try: - items = self.host.memory.getEntityData(jid_, ["DISCO_ITEMS"], client.profile)["DISCO_ITEMS"] + items = self.host.memory.getEntityData( + jid_, ["DISCO_ITEMS"], client.profile + )["DISCO_ITEMS"] log.debug(u"[%s] disco items are in cache" % jid_.full()) if not use_cache: # we ignore cache, so we pretend we haven't found it @@ -219,22 +239,28 @@ except (KeyError, exceptions.UnknownEntityError): log.debug(u"Caching [%s] disco items" % jid_.full()) items = yield client.disco.requestItems(jid_, nodeIdentifier=node) - self.host.memory.updateEntityData(jid_, "DISCO_ITEMS", items, profile_key=client.profile) + self.host.memory.updateEntityData( + jid_, "DISCO_ITEMS", items, profile_key=client.profile + ) else: try: items = yield client.disco.requestItems(jid_, nodeIdentifier=node) except StanzaError as e: - log.warning(u"Error while requesting items for {jid}: {reason}" - .format(jid=jid_.full(), reason=e.condition)) + log.warning( + u"Error while requesting items for {jid}: {reason}".format( + jid=jid_.full(), reason=e.condition + ) + ) items = disco.DiscoItems() defer.returnValue(items) - def _infosEb(self, failure_, entity_jid): failure_.trap(StanzaError) - log.warning(_(u"Error while requesting [%(jid)s]: %(error)s") % {'jid': entity_jid.full(), - 'error': failure_.getErrorMessage()}) + log.warning( + _(u"Error while requesting [%(jid)s]: %(error)s") + % {"jid": entity_jid.full(), "error": failure_.getErrorMessage()} + ) def findServiceEntity(self, client, category, type_, jid_=None): """Helper method to find first available entity from findServiceEntities @@ -265,14 +291,18 @@ defers_list = [] for item in items: info_d = self.getInfos(client, item.entity) - info_d.addCallbacks(infosCb, self._infosEb, [item.entity], None, [item.entity]) + info_d.addCallbacks( + infosCb, self._infosEb, [item.entity], None, [item.entity] + ) defers_list.append(info_d) return defer.DeferredList(defers_list) d = self.getItems(client, jid_) d.addCallback(gotItems) d.addCallback(lambda dummy: found_entities) - reactor.callLater(TIMEOUT, d.cancel) # FIXME: one bad service make a general timeout + reactor.callLater( + TIMEOUT, d.cancel + ) # FIXME: one bad service make a general timeout return d def findFeaturesSet(self, client, features, identity=None, jid_=None): @@ -291,7 +321,7 @@ def infosCb(infos, entity): if entity is None: - log.warning(_(u'received an item without jid')) + log.warning(_(u"received an item without jid")) return if identity is not None and identity not in infos.identities: return @@ -309,7 +339,9 @@ d = self.getItems(client, jid_) d.addCallback(gotItems) d.addCallback(lambda dummy: found_entities) - reactor.callLater(TIMEOUT, d.cancel) # FIXME: one bad service make a general timeout + reactor.callLater( + TIMEOUT, d.cancel + ) # FIXME: one bad service make a general timeout return d def generateHash(self, services): @@ -320,25 +352,35 @@ """ s = [] - byte_identities = [ByteIdentity(service) for service in services if isinstance(service, disco.DiscoIdentity)] # FIXME: lang must be managed here + byte_identities = [ + ByteIdentity(service) + for service in services + if isinstance(service, disco.DiscoIdentity) + ] # FIXME: lang must be managed here byte_identities.sort(key=lambda i: i.lang) byte_identities.sort(key=lambda i: i.idType) byte_identities.sort(key=lambda i: i.category) for identity in byte_identities: s.append(str(identity)) - s.append('<') - byte_features = [service.encode('utf-8') for service in services if isinstance(service, disco.DiscoFeature)] + s.append("<") + byte_features = [ + service.encode("utf-8") + for service in services + if isinstance(service, disco.DiscoFeature) + ] byte_features.sort() # XXX: the default sort has the same behaviour as the requested RFC 4790 i;octet sort for feature in byte_features: s.append(feature) - s.append('<') - #TODO: manage XEP-0128 data form here - cap_hash = b64encode(sha1(''.join(s)).digest()) - log.debug(_(u'Capability hash generated: [%s]') % cap_hash) + s.append("<") + # TODO: manage XEP-0128 data form here + cap_hash = b64encode(sha1("".join(s)).digest()) + log.debug(_(u"Capability hash generated: [%s]") % cap_hash) return cap_hash @defer.inlineCallbacks - def _discoInfos(self, entity_jid_s, node=u'', use_cache=True, profile_key=C.PROF_KEY_NONE): + def _discoInfos( + self, entity_jid_s, node=u"", use_cache=True, profile_key=C.PROF_KEY_NONE + ): """ Discovery method for the bridge @param entity_jid_s: entity we want to discover @param use_cache(bool): if True, use cached data if available @@ -353,8 +395,8 @@ for form_type, form in disco_infos.extensions.items(): fields = [] for field in form.fieldList: - data = {'type': field.fieldType} - for attr in ('var', 'label', 'desc'): + data = {"type": field.fieldType} + for attr in ("var", "label", "desc"): value = getattr(field, attr) if value is not None: data[attr] = value @@ -364,9 +406,16 @@ extensions[form_type or ""] = fields - defer.returnValue((disco_infos.features, - [(cat, type_, name or '') for (cat, type_), name in disco_infos.identities.items()], - extensions)) + defer.returnValue( + ( + disco_infos.features, + [ + (cat, type_, name or "") + for (cat, type_), name in disco_infos.identities.items() + ], + extensions, + ) + ) def items2tuples(self, disco_items): """convert disco items to tuple of strings @@ -378,10 +427,12 @@ if not item.entity: log.warning(_(u"invalid item (no jid)")) continue - yield (item.entity.full(), item.nodeIdentifier or '', item.name or '') + yield (item.entity.full(), item.nodeIdentifier or "", item.name or "") @defer.inlineCallbacks - def _discoItems(self, entity_jid_s, node=u'', use_cache=True, profile_key=C.PROF_KEY_NONE): + def _discoItems( + self, entity_jid_s, node=u"", use_cache=True, profile_key=C.PROF_KEY_NONE + ): """ Discovery method for the bridge @param entity_jid_s: entity we want to discover
--- a/sat/memory/memory.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/memory/memory.py Wed Jun 27 20:14:46 2018 +0200 @@ -20,6 +20,7 @@ from sat.core.i18n import _ from sat.core.log import getLogger + log = getLogger(__name__) import os.path @@ -44,11 +45,13 @@ import time -PresenceTuple = namedtuple("PresenceTuple", ('show', 'priority', 'statuses')) +PresenceTuple = namedtuple("PresenceTuple", ("show", "priority", "statuses")) MSG_NO_SESSION = "Session id doesn't exist or is finished" + class Sessions(object): """Sessions are data associated to key used for a temporary moment, with optional profile checking.""" + DEFAULT_TIMEOUT = 600 def __init__(self, timeout=None, resettable_timeout=True): @@ -72,11 +75,15 @@ if session_id is None: session_id = str(uuid4()) elif session_id in self._sessions: - raise exceptions.ConflictError(u"Session id {} is already used".format(session_id)) + raise exceptions.ConflictError( + u"Session id {} is already used".format(session_id) + ) timer = reactor.callLater(self.timeout, self._purgeSession, session_id) if session_data is None: session_data = {} - self._sessions[session_id] = (timer, session_data) if profile is None else (timer, session_data, profile) + self._sessions[session_id] = ( + (timer, session_data) if profile is None else (timer, session_data, profile) + ) return session_id, session_data def _purgeSession(self, session_id): @@ -91,7 +98,12 @@ # if the session is time-outed, the timer has been called pass del self._sessions[session_id] - log.debug(u"Session {} purged{}".format(session_id, u' (profile {})'.format(profile) if profile is not None else u'')) + log.debug( + u"Session {} purged{}".format( + session_id, + u" (profile {})".format(profile) if profile is not None else u"", + ) + ) def __len__(self): return len(self._sessions) @@ -103,7 +115,9 @@ try: timer, session_data, profile_set = self._sessions[session_id] except ValueError: - raise exceptions.InternalError("You need to use __getitem__ when profile is not set") + raise exceptions.InternalError( + "You need to use __getitem__ when profile is not set" + ) except KeyError: raise failure.Failure(KeyError(MSG_NO_SESSION)) if profile_set != profile: @@ -116,7 +130,9 @@ try: timer, session_data = self._sessions[session_id] except ValueError: - raise exceptions.InternalError("You need to use profileGet instead of __getitem__ when profile is set") + raise exceptions.InternalError( + "You need to use profileGet instead of __getitem__ when profile is set" + ) except KeyError: raise failure.Failure(KeyError(MSG_NO_SESSION)) if self.resettable_timeout: @@ -169,8 +185,12 @@ """ ids = self._profileGetAllIds(profile) if len(ids) > 1: - raise exceptions.InternalError('profileGetUnique has been used but more than one session has been found!') - return self.profileGet(ids[0], profile) if len(ids) == 1 else None # XXX: timeout might be reset + raise exceptions.InternalError( + "profileGetUnique has been used but more than one session has been found!" + ) + return ( + self.profileGet(ids[0], profile) if len(ids) == 1 else None + ) # XXX: timeout might be reset def profileDelUnique(self, profile): """Delete the unique session that is associated to the given profile. @@ -180,7 +200,9 @@ """ ids = self._profileGetAllIds(profile) if len(ids) > 1: - raise exceptions.InternalError('profileDelUnique has been used but more than one session has been found!') + raise exceptions.InternalError( + "profileDelUnique has been used but more than one session has been found!" + ) if len(ids) == 1: del self._sessions[ids[0]] @@ -194,7 +216,9 @@ ProfileSessions.__init__(self, timeout, resettable_timeout=False) def _purgeSession(self, session_id): - log.debug("FIXME: PasswordSessions should ask for the profile password after the session expired") + log.debug( + "FIXME: PasswordSessions should ask for the profile password after the session expired" + ) # XXX: tmp update code, will be removed in the future @@ -211,16 +235,20 @@ except: pass # file is readable but its structure if wrong try: - current_value = user_config.get('DEFAULT', 'local_dir') + current_value = user_config.get("DEFAULT", "local_dir") except (NoOptionError, NoSectionError): - current_value = '' + current_value = "" if current_value: return # nothing to do - old_default = '~/.sat' - if os.path.isfile(os.path.expanduser(old_default) + '/' + C.SAVEFILE_DATABASE): + old_default = "~/.sat" + if os.path.isfile(os.path.expanduser(old_default) + "/" + C.SAVEFILE_DATABASE): if not silent: - log.warning(_(u"A database has been found in the default local_dir for previous versions (< 0.5)")) - tools_config.fixConfigOption('', 'local_dir', old_default, silent) + log.warning( + _( + u"A database has been found in the default local_dir for previous versions (< 0.5)" + ) + ) + tools_config.fixConfigOption("", "local_dir", old_default, silent) class Memory(object): @@ -230,17 +258,19 @@ log.info(_("Memory manager init")) self.initialized = defer.Deferred() self.host = host - self._entities_cache = {} # XXX: keep presence/last resource/other data in cache - # /!\ an entity is not necessarily in roster - # main key is bare jid, value is a dict - # where main key is resource, or None for bare jid - self._key_signals = set() # key which need a signal to frontends when updated + self._entities_cache = {} # XXX: keep presence/last resource/other data in cache + # /!\ an entity is not necessarily in roster + # main key is bare jid, value is a dict + # where main key is resource, or None for bare jid + self._key_signals = set() # key which need a signal to frontends when updated self.subscriptions = {} self.auth_sessions = PasswordSessions() # remember the authenticated profiles self.disco = Discovery(host) fixLocalDir(False) # XXX: tmp update code, will be removed in the future self.config = tools_config.parseMainConf() - database_file = os.path.expanduser(os.path.join(self.getConfig('', 'local_dir'), C.SAVEFILE_DATABASE)) + database_file = os.path.expanduser( + os.path.join(self.getConfig("", "local_dir"), C.SAVEFILE_DATABASE) + ) self.storage = SqliteStorage(database_file, host.version) PersistentDict.storage = self.storage self.params = Params(host, self.storage) @@ -290,7 +320,7 @@ """ if not filename: return False - #TODO: need to encrypt files (at least passwords !) and set permissions + # TODO: need to encrypt files (at least passwords !) and set permissions filename = os.path.expanduser(filename) try: self.params.save_xml(filename) @@ -302,7 +332,7 @@ def load(self): """Load parameters and all memory things from db""" - #parameters data + # parameters data return self.params.loadGenParams() def loadIndividualParams(self, profile): @@ -340,7 +370,9 @@ session_d = self._entities_cache[profile] except KeyError: # else we do request the params - session_d = self._entities_cache[profile] = self.loadIndividualParams(profile) + session_d = self._entities_cache[profile] = self.loadIndividualParams( + profile + ) session_d.addCallback(createSession) finally: return session_d @@ -396,12 +428,20 @@ def check_result(result): if not result: - log.warning(u'Authentication failure of profile {}'.format(profile)) - raise failure.Failure(exceptions.PasswordError(u"The provided profile password doesn't match.")) - if not session_data: # avoid to create two profile sessions when password if specified + log.warning(u"Authentication failure of profile {}".format(profile)) + raise failure.Failure( + exceptions.PasswordError( + u"The provided profile password doesn't match." + ) + ) + if ( + not session_data + ): # avoid to create two profile sessions when password if specified return self.newAuthSession(password, profile) - d = self.asyncGetParamA(C.PROFILE_PASS_PATH[1], C.PROFILE_PASS_PATH[0], profile_key=profile) + d = self.asyncGetParamA( + C.PROFILE_PASS_PATH[1], C.PROFILE_PASS_PATH[0], profile_key=profile + ) d.addCallback(lambda sat_cipher: PasswordHasher.verify(password, sat_cipher)) return d.addCallback(check_result) @@ -414,10 +454,13 @@ @param profile: %(doc_profile)s @return: a deferred None value """ + def gotPersonalKey(personal_key): """Create the session for this profile and store the personal key""" - self.auth_sessions.newSession({C.MEMORY_CRYPTO_KEY: personal_key}, profile=profile) - log.debug(u'auth session created for profile %s' % profile) + self.auth_sessions.newSession( + {C.MEMORY_CRYPTO_KEY: personal_key}, profile=profile + ) + log.debug(u"auth session created for profile %s" % profile) d = PersistentDict(C.MEMORY_CRYPTO_NAMESPACE, profile).load() d.addCallback(lambda data: BlockCipher.decrypt(key, data[C.MEMORY_CRYPTO_KEY])) @@ -431,7 +474,12 @@ try: del self._entities_cache[profile] except KeyError: - log.error(_(u"Trying to purge roster status cache for a profile not in memory: [%s]") % profile) + log.error( + _( + u"Trying to purge roster status cache for a profile not in memory: [%s]" + ) + % profile + ) def getProfilesList(self, clients=True, components=False): """retrieve profiles list @@ -472,7 +520,7 @@ # we want to be sure that the profile exists profile = self.getProfileName(profile) - self.memory_data['Profile_default'] = profile + self.memory_data["Profile_default"] = profile def createProfile(self, name, password, component=None): """Create a new profile @@ -486,9 +534,9 @@ """ if not name: raise ValueError(u"Empty profile name") - if name[0] == '@': + if name[0] == "@": raise ValueError(u"A profile name can't start with a '@'") - if '\n' in name: + if "\n" in name: raise ValueError(u"A profile name can't contain line feed ('\\n')") if name in self._entities_cache: @@ -496,19 +544,28 @@ if component: if not component in self.host.plugins: - raise exceptions.NotFound(_(u"Can't find component {component} entry point".format( - component = component))) + raise exceptions.NotFound( + _( + u"Can't find component {component} entry point".format( + component=component + ) + ) + ) # FIXME: PLUGIN_INFO is not currently accessible after import, but type shoul be tested here - #Â if self.host.plugins[component].PLUGIN_INFO[u"type"] != C.PLUG_TYPE_ENTRY_POINT: - #Â raise ValueError(_(u"Plugin {component} is not an entry point !".format( - #Â component = component))) + # Â if self.host.plugins[component].PLUGIN_INFO[u"type"] != C.PLUG_TYPE_ENTRY_POINT: + # Â raise ValueError(_(u"Plugin {component} is not an entry point !".format( + # Â component = component))) d = self.params.createProfile(name, component) def initPersonalKey(dummy): # be sure to call this after checking that the profile doesn't exist yet - personal_key = BlockCipher.getRandomKey(base64=True) # generated once for all and saved in a PersistentDict - self.auth_sessions.newSession({C.MEMORY_CRYPTO_KEY: personal_key}, profile=name) # will be encrypted by setParam + personal_key = BlockCipher.getRandomKey( + base64=True + ) # generated once for all and saved in a PersistentDict + self.auth_sessions.newSession( + {C.MEMORY_CRYPTO_KEY: personal_key}, profile=name + ) # will be encrypted by setParam def startFakeSession(dummy): # avoid ProfileNotConnected exception in setParam @@ -521,7 +578,11 @@ d.addCallback(initPersonalKey) d.addCallback(startFakeSession) - d.addCallback(lambda dummy: self.setParam(C.PROFILE_PASS_PATH[1], password, C.PROFILE_PASS_PATH[0], profile_key=name)) + d.addCallback( + lambda dummy: self.setParam( + C.PROFILE_PASS_PATH[1], password, C.PROFILE_PASS_PATH[0], profile_key=name + ) + ) d.addCallback(stopFakeSession) d.addCallback(lambda dummy: self.auth_sessions.profileDelUnique(name)) return d @@ -534,12 +595,14 @@ To be used for direct calls only (not through the bridge). @return: a Deferred instance """ + def cleanMemory(dummy): self.auth_sessions.profileDelUnique(name) try: del self._entities_cache[name] except KeyError: pass + d = self.params.asyncDeleteProfile(name, force) d.addCallback(cleanMemory) return d @@ -567,10 +630,28 @@ def addToHistory(self, client, data): return self.storage.addToHistory(data, client.profile) - def _historyGet(self, from_jid_s, to_jid_s, limit=C.HISTORY_LIMIT_NONE, between=True, filters=None, profile=C.PROF_KEY_NONE): - return self.historyGet(jid.JID(from_jid_s), jid.JID(to_jid_s), limit, between, filters, profile) + def _historyGet( + self, + from_jid_s, + to_jid_s, + limit=C.HISTORY_LIMIT_NONE, + between=True, + filters=None, + profile=C.PROF_KEY_NONE, + ): + return self.historyGet( + jid.JID(from_jid_s), jid.JID(to_jid_s), limit, between, filters, profile + ) - def historyGet(self, from_jid, to_jid, limit=C.HISTORY_LIMIT_NONE, between=True, filters=None, profile=C.PROF_KEY_NONE): + def historyGet( + self, + from_jid, + to_jid, + limit=C.HISTORY_LIMIT_NONE, + between=True, + filters=None, + profile=C.PROF_KEY_NONE, + ): """Retrieve messages in history @param from_jid (JID): source JID (full, or bare for catchall) @@ -586,7 +667,7 @@ """ assert profile != C.PROF_KEY_NONE if limit == C.HISTORY_LIMIT_DEFAULT: - limit = int(self.getParamA(C.HISTORY_LIMIT, 'General', profile_key=profile)) + limit = int(self.getParamA(C.HISTORY_LIMIT, "General", profile_key=profile)) elif limit == C.HISTORY_LIMIT_NONE: limit = None if limit == 0: @@ -597,7 +678,7 @@ def _getPresenceStatuses(self, profile_key): ret = self.getPresenceStatuses(profile_key) - return {entity.full():data for entity, data in ret.iteritems()} + return {entity.full(): data for entity, data in ret.iteritems()} def getPresenceStatuses(self, profile_key): """Get all the presence statuses of a profile @@ -617,7 +698,9 @@ presence_data = self.getEntityDatum(full_jid, "presence", profile_key) except KeyError: continue - entities_presence.setdefault(entity_jid, {})[resource or ''] = presence_data + entities_presence.setdefault(entity_jid, {})[ + resource or "" + ] = presence_data return entities_presence @@ -631,7 +714,9 @@ @param profile_key: %(doc_profile_key)s """ presence_data = PresenceTuple(show, priority, statuses) - self.updateEntityData(entity_jid, "presence", presence_data, profile_key=profile_key) + self.updateEntityData( + entity_jid, "presence", presence_data, profile_key=profile_key + ) if entity_jid.resource and show != C.PRESENCE_UNAVAILABLE: # If a resource is available, bare jid should not have presence information try: @@ -657,13 +742,17 @@ """ # FIXME: is there a need to keep cache data for resources which are not connected anymore? if entity_jid.resource: - raise ValueError("getAllResources must be used with a bare jid (got {})".format(entity_jid)) + raise ValueError( + "getAllResources must be used with a bare jid (got {})".format(entity_jid) + ) profile_cache = self._getProfileCache(client) try: entity_data = profile_cache[entity_jid.userhostJID()] except KeyError: - raise exceptions.UnknownEntityError(u"Entity {} not in cache".format(entity_jid)) - resources= set(entity_data.keys()) + raise exceptions.UnknownEntityError( + u"Entity {} not in cache".format(entity_jid) + ) + resources = set(entity_data.keys()) resources.discard(None) return resources @@ -701,7 +790,9 @@ @return (unicode): main resource or None """ if entity_jid.resource: - raise ValueError("getMainResource must be used with a bare jid (got {})".format(entity_jid)) + raise ValueError( + "getMainResource must be used with a bare jid (got {})".format(entity_jid) + ) try: if self.host.plugins["XEP-0045"].isJoinedRoom(client, entity_jid): return None # MUC rooms have no main resource @@ -766,7 +857,9 @@ full_jid.resource = resource yield full_jid - def updateEntityData(self, entity_jid, key, value, silent=False, profile_key=C.PROF_KEY_NONE): + def updateEntityData( + self, entity_jid, key, value, silent=False, profile_key=C.PROF_KEY_NONE + ): """Set a misc data for an entity If key was registered with setSignalOnUpdate, a signal will be sent to frontends @@ -780,19 +873,27 @@ client = self.host.getClient(profile_key) profile_cache = self._getProfileCache(client) if entity_jid in (C.ENTITY_ALL_RESOURCES, C.ENTITY_ALL): - entities = self.getAllEntitiesIter(client, entity_jid==C.ENTITY_ALL) + entities = self.getAllEntitiesIter(client, entity_jid == C.ENTITY_ALL) else: entities = (entity_jid,) for jid_ in entities: - entity_data = profile_cache.setdefault(jid_.userhostJID(),{}).setdefault(jid_.resource, {}) + entity_data = profile_cache.setdefault(jid_.userhostJID(), {}).setdefault( + jid_.resource, {} + ) entity_data[key] = value if key in self._key_signals and not silent: if not isinstance(value, basestring): - log.error(u"Setting a non string value ({}) for a key ({}) which has a signal flag".format(value, key)) + log.error( + u"Setting a non string value ({}) for a key ({}) which has a signal flag".format( + value, key + ) + ) else: - self.host.bridge.entityDataUpdated(jid_.full(), key, value, self.getProfileName(profile_key)) + self.host.bridge.entityDataUpdated( + jid_.full(), key, value, self.getProfileName(profile_key) + ) def delEntityDatum(self, entity_jid, key, profile_key): """Delete a data for an entity @@ -808,7 +909,7 @@ client = self.host.getClient(profile_key) profile_cache = self._getProfileCache(client) if entity_jid in (C.ENTITY_ALL_RESOURCES, C.ENTITY_ALL): - entities = self.getAllEntitiesIter(client, entity_jid==C.ENTITY_ALL) + entities = self.getAllEntitiesIter(client, entity_jid == C.ENTITY_ALL) else: entities = (entity_jid,) @@ -816,17 +917,21 @@ try: entity_data = profile_cache[jid_.userhostJID()][jid_.resource] except KeyError: - raise exceptions.UnknownEntityError(u"Entity {} not in cache".format(jid_)) + raise exceptions.UnknownEntityError( + u"Entity {} not in cache".format(jid_) + ) try: del entity_data[key] except KeyError as e: if entity_jid in (C.ENTITY_ALL_RESOURCES, C.ENTITY_ALL): - continue # we ignore KeyError when deleting keys from several entities + continue # we ignore KeyError when deleting keys from several entities else: raise e def _getEntitiesData(self, entities_jids, keys_list, profile_key): - ret = self.getEntitiesData([jid.JID(jid_) for jid_ in entities_jids], keys_list, profile_key) + ret = self.getEntitiesData( + [jid.JID(jid_) for jid_ in entities_jids], keys_list, profile_key + ) return {jid_.full(): data for jid_, data in ret.iteritems()} def getEntitiesData(self, entities_jids, keys_list=None, profile_key=C.PROF_KEY_NONE): @@ -843,6 +948,7 @@ @raise exceptions.UnknownEntityError: if entity is not in cache """ + def fillEntityData(entity_cache_data): entity_data = {} if keys_list is None: @@ -861,7 +967,9 @@ if entities_jids: for entity in entities_jids: try: - entity_cache_data = profile_cache[entity.userhostJID()][entity.resource] + entity_cache_data = profile_cache[entity.userhostJID()][ + entity.resource + ] except KeyError: continue ret_data[entity.full()] = fillEntityData(entity_cache_data, keys_list) @@ -891,7 +999,11 @@ try: entity_data = profile_cache[entity_jid.userhostJID()][entity_jid.resource] except KeyError: - raise exceptions.UnknownEntityError(u"Entity {} not in cache (was requesting {})".format(entity_jid, keys_list)) + raise exceptions.UnknownEntityError( + u"Entity {} not in cache (was requesting {})".format( + entity_jid, keys_list + ) + ) if keys_list is None: return entity_data @@ -910,7 +1022,9 @@ """ return self.getEntityData(entity_jid, (key,), profile_key)[key] - def delEntityCache(self, entity_jid, delete_all_resources=True, profile_key=C.PROF_KEY_NONE): + def delEntityCache( + self, entity_jid, delete_all_resources=True, profile_key=C.PROF_KEY_NONE + ): """Remove all cached data for entity @param entity_jid: JID of the entity to delete @@ -928,12 +1042,16 @@ try: del profile_cache[entity_jid] except KeyError: - raise exceptions.UnknownEntityError(u"Entity {} not in cache".format(entity_jid)) + raise exceptions.UnknownEntityError( + u"Entity {} not in cache".format(entity_jid) + ) else: try: del profile_cache[entity_jid.userhostJID()][entity_jid.resource] except KeyError: - raise exceptions.UnknownEntityError(u"Entity {} not in cache".format(entity_jid)) + raise exceptions.UnknownEntityError( + u"Entity {} not in cache".format(entity_jid) + ) ## Encryption ## @@ -947,9 +1065,14 @@ @return: the deferred encrypted value """ try: - personal_key = self.auth_sessions.profileGetUnique(profile)[C.MEMORY_CRYPTO_KEY] + personal_key = self.auth_sessions.profileGetUnique(profile)[ + C.MEMORY_CRYPTO_KEY + ] except TypeError: - raise exceptions.InternalError(_('Trying to encrypt a value for %s while the personal key is undefined!') % profile) + raise exceptions.InternalError( + _("Trying to encrypt a value for %s while the personal key is undefined!") + % profile + ) return BlockCipher.encrypt(personal_key, value) def decryptValue(self, value, profile): @@ -962,9 +1085,14 @@ @return: the deferred decrypted value """ try: - personal_key = self.auth_sessions.profileGetUnique(profile)[C.MEMORY_CRYPTO_KEY] + personal_key = self.auth_sessions.profileGetUnique(profile)[ + C.MEMORY_CRYPTO_KEY + ] except TypeError: - raise exceptions.InternalError(_('Trying to decrypt a value for %s while the personal key is undefined!') % profile) + raise exceptions.InternalError( + _("Trying to decrypt a value for %s while the personal key is undefined!") + % profile + ) return BlockCipher.decrypt(personal_key, value) def encryptPersonalData(self, data_key, data_value, crypto_key, profile): @@ -987,8 +1115,10 @@ return d.addCallback(cb) def done(dummy): - log.debug(_(u'Personal data (%(ns)s, %(key)s) has been successfuly encrypted') % - {'ns': C.MEMORY_CRYPTO_NAMESPACE, 'key': data_key}) + log.debug( + _(u"Personal data (%(ns)s, %(key)s) has been successfuly encrypted") + % {"ns": C.MEMORY_CRYPTO_NAMESPACE, "key": data_key} + ) d = PersistentDict(C.MEMORY_CRYPTO_NAMESPACE, profile).load() return d.addCallback(gotIndMemory).addCallback(done) @@ -1014,7 +1144,7 @@ """Called to get a list of currently waiting subscription requests""" profile = self.getProfileName(profile_key) if not profile: - log.error(_('Asking waiting subscriptions for a non-existant profile')) + log.error(_("Asking waiting subscriptions for a non-existant profile")) return {} if profile not in self.subscriptions: return {} @@ -1029,28 +1159,59 @@ def getParamA(self, name, category, attr="value", profile_key=C.PROF_KEY_NONE): return self.params.getParamA(name, category, attr, profile_key=profile_key) - def asyncGetParamA(self, name, category, attr="value", security_limit=C.NO_SECURITY_LIMIT, profile_key=C.PROF_KEY_NONE): - return self.params.asyncGetParamA(name, category, attr, security_limit, profile_key) + def asyncGetParamA( + self, + name, + category, + attr="value", + security_limit=C.NO_SECURITY_LIMIT, + profile_key=C.PROF_KEY_NONE, + ): + return self.params.asyncGetParamA( + name, category, attr, security_limit, profile_key + ) - def asyncGetParamsValuesFromCategory(self, category, security_limit=C.NO_SECURITY_LIMIT, profile_key=C.PROF_KEY_NONE): - return self.params.asyncGetParamsValuesFromCategory(category, security_limit, profile_key) + def asyncGetParamsValuesFromCategory( + self, category, security_limit=C.NO_SECURITY_LIMIT, profile_key=C.PROF_KEY_NONE + ): + return self.params.asyncGetParamsValuesFromCategory( + category, security_limit, profile_key + ) - def asyncGetStringParamA(self, name, category, attr="value", security_limit=C.NO_SECURITY_LIMIT, profile_key=C.PROF_KEY_NONE): - return self.params.asyncGetStringParamA(name, category, attr, security_limit, profile_key) + def asyncGetStringParamA( + self, + name, + category, + attr="value", + security_limit=C.NO_SECURITY_LIMIT, + profile_key=C.PROF_KEY_NONE, + ): + return self.params.asyncGetStringParamA( + name, category, attr, security_limit, profile_key + ) - def getParamsUI(self, security_limit=C.NO_SECURITY_LIMIT, app='', profile_key=C.PROF_KEY_NONE): + def getParamsUI( + self, security_limit=C.NO_SECURITY_LIMIT, app="", profile_key=C.PROF_KEY_NONE + ): return self.params.getParamsUI(security_limit, app, profile_key) def getParamsCategories(self): return self.params.getParamsCategories() - def setParam(self, name, value, category, security_limit=C.NO_SECURITY_LIMIT, profile_key=C.PROF_KEY_NONE): + def setParam( + self, + name, + value, + category, + security_limit=C.NO_SECURITY_LIMIT, + profile_key=C.PROF_KEY_NONE, + ): return self.params.setParam(name, value, category, security_limit, profile_key) def updateParams(self, xml): return self.params.updateParams(xml) - def paramsRegisterApp(self, xml, security_limit=C.NO_SECURITY_LIMIT, app=''): + def paramsRegisterApp(self, xml, security_limit=C.NO_SECURITY_LIMIT, app=""): return self.params.paramsRegisterApp(xml, security_limit, app) def setDefault(self, name, category, callback, errback=None): @@ -1073,25 +1234,25 @@ if peer_jid is None and perms_to_check is None: return peer_jid = peer_jid.userhostJID() - if peer_jid == file_data['owner']: + if peer_jid == file_data["owner"]: # the owner has all rights return if not C.ACCESS_PERMS.issuperset(perms_to_check): - raise exceptions.InternalError(_(u'invalid permission')) + raise exceptions.InternalError(_(u"invalid permission")) for perm in perms_to_check: # we check each perm and raise PermissionError as soon as one condition is not valid # we must never return here, we only return after the loop if nothing was blocking the access try: - perm_data = file_data[u'access'][perm] - perm_type = perm_data[u'type'] + perm_data = file_data[u"access"][perm] + perm_type = perm_data[u"type"] except KeyError: raise failure.Failure(exceptions.PermissionError()) if perm_type == C.ACCESS_TYPE_PUBLIC: continue elif perm_type == C.ACCESS_TYPE_WHITELIST: try: - jids = perm_data[u'jids'] + jids = perm_data[u"jids"] except KeyError: raise failure.Failure(exceptions.PermissionError()) if peer_jid.full() in jids: @@ -1099,7 +1260,9 @@ else: raise failure.Failure(exceptions.PermissionError()) else: - raise exceptions.InternalError(_(u'unknown access type: {type}').format(type=perm_type)) + raise exceptions.InternalError( + _(u"unknown access type: {type}").format(type=perm_type) + ) @defer.inlineCallbacks def checkPermissionToRoot(self, client, file_data, peer_jid, perms_to_check): @@ -1107,17 +1270,21 @@ current = file_data while True: self.checkFilePermission(current, peer_jid, perms_to_check) - parent = current[u'parent'] + parent = current[u"parent"] if not parent: break - files_data = yield self.getFile(self, client, peer_jid=None, file_id=parent, perms_to_check=None) + files_data = yield self.getFile( + self, client, peer_jid=None, file_id=parent, perms_to_check=None + ) try: current = files_data[0] except IndexError: - raise exceptions.DataError(u'Missing parent') + raise exceptions.DataError(u"Missing parent") @defer.inlineCallbacks - def _getParentDir(self, client, path, parent, namespace, owner, peer_jid, perms_to_check): + def _getParentDir( + self, client, path, parent, namespace, owner, peer_jid, perms_to_check + ): """Retrieve parent node from a path, or last existing directory each directory of the path will be retrieved, until the last existing one @@ -1128,32 +1295,59 @@ """ # if path is set, we have to retrieve parent directory of the file(s) from it if parent is not None: - raise exceptions.ConflictError(_(u"You can't use path and parent at the same time")) - path_elts = filter(None, path.split(u'/')) - if {u'..', u'.'}.intersection(path_elts): + raise exceptions.ConflictError( + _(u"You can't use path and parent at the same time") + ) + path_elts = filter(None, path.split(u"/")) + if {u"..", u"."}.intersection(path_elts): raise ValueError(_(u'".." or "." can\'t be used in path')) # we retrieve all directories from path until we get the parent container # non existing directories will be created - parent = u'' + parent = u"" for idx, path_elt in enumerate(path_elts): - directories = yield self.storage.getFiles(client, parent=parent, type_=C.FILE_TYPE_DIRECTORY, - name=path_elt, namespace=namespace, owner=owner) + directories = yield self.storage.getFiles( + client, + parent=parent, + type_=C.FILE_TYPE_DIRECTORY, + name=path_elt, + namespace=namespace, + owner=owner, + ) if not directories: defer.returnValue((parent, path_elts[idx:])) # from this point, directories don't exist anymore, we have to create them elif len(directories) > 1: - raise exceptions.InternalError(_(u"Several directories found, this should not happen")) + raise exceptions.InternalError( + _(u"Several directories found, this should not happen") + ) else: directory = directories[0] self.checkFilePermission(directory, peer_jid, perms_to_check) - parent = directory[u'id'] + parent = directory[u"id"] defer.returnValue((parent, [])) @defer.inlineCallbacks - def getFiles(self, client, peer_jid, file_id=None, version=None, parent=None, path=None, type_=None, - file_hash=None, hash_algo=None, name=None, namespace=None, mime_type=None, - owner=None, access=None, projection=None, unique=False, perms_to_check=(C.ACCESS_PERM_READ,)): + def getFiles( + self, + client, + peer_jid, + file_id=None, + version=None, + parent=None, + path=None, + type_=None, + file_hash=None, + hash_algo=None, + name=None, + namespace=None, + mime_type=None, + owner=None, + access=None, + projection=None, + unique=False, + perms_to_check=(C.ACCESS_PERM_READ,), + ): """retrieve files with with given filters @param peer_jid(jid.JID, None): jid trying to access the file @@ -1180,12 +1374,16 @@ on the path """ if peer_jid is None and perms_to_check or perms_to_check is None and peer_jid: - raise exceptions.InternalError('if you want to disable permission check, both peer_jid and perms_to_check must be None') + raise exceptions.InternalError( + "if you want to disable permission check, both peer_jid and perms_to_check must be None" + ) if owner is not None: owner = owner.userhostJID() if path is not None: # permission are checked by _getParentDir - parent, remaining_path_elts = yield self._getParentDir(client, path, parent, namespace, owner, peer_jid, perms_to_check) + parent, remaining_path_elts = yield self._getParentDir( + client, path, parent, namespace, owner, peer_jid, perms_to_check + ) if remaining_path_elts: # if we have remaining path elements, # the parent directory is not found @@ -1197,16 +1395,30 @@ try: parent_data = parent_data[0] except IndexError: - raise exceptions.DataError(u'mising parent') - yield self.checkPermissionToRoot(client, parent_data, peer_jid, perms_to_check) + raise exceptions.DataError(u"mising parent") + yield self.checkPermissionToRoot( + client, parent_data, peer_jid, perms_to_check + ) - files = yield self.storage.getFiles(client, file_id=file_id, version=version, parent=parent, type_=type_, - file_hash=file_hash, hash_algo=hash_algo, name=name, namespace=namespace, - mime_type=mime_type, owner=owner, access=access, - projection=projection, unique=unique) + files = yield self.storage.getFiles( + client, + file_id=file_id, + version=version, + parent=parent, + type_=type_, + file_hash=file_hash, + hash_algo=hash_algo, + name=name, + namespace=namespace, + mime_type=mime_type, + owner=owner, + access=access, + projection=projection, + unique=unique, + ) if peer_jid: - #Â if permission are checked, we must remove all file tha use can't access + # Â if permission are checked, we must remove all file tha use can't access to_remove = [] for file_data in files: try: @@ -1218,10 +1430,28 @@ defer.returnValue(files) @defer.inlineCallbacks - def setFile(self, client, name, file_id=None, version=u'', parent=None, path=None, - type_=C.FILE_TYPE_FILE, file_hash=None, hash_algo=None, size=None, namespace=None, - mime_type=None, created=None, modified=None, owner=None, access=None, extra=None, - peer_jid = None, perms_to_check=(C.ACCESS_PERM_WRITE,)): + def setFile( + self, + client, + name, + file_id=None, + version=u"", + parent=None, + path=None, + type_=C.FILE_TYPE_FILE, + file_hash=None, + hash_algo=None, + size=None, + namespace=None, + mime_type=None, + created=None, + modified=None, + owner=None, + access=None, + extra=None, + peer_jid=None, + perms_to_check=(C.ACCESS_PERM_WRITE,), + ): """set a file metadata @param name(unicode): basename of the file @@ -1258,12 +1488,17 @@ if None, permission will no be checked (peer_jid must be None too in this case) @param profile(unicode): profile owning the file """ - if '/' in name: + if "/" in name: raise ValueError('name must not contain a slash ("/")') if file_id is None: file_id = shortuuid.uuid() - if file_hash is not None and hash_algo is None or hash_algo is not None and file_hash is None: - raise ValueError('file_hash and hash_algo must be set at the same time') + if ( + file_hash is not None + and hash_algo is None + or hash_algo is not None + and file_hash is None + ): + raise ValueError("file_hash and hash_algo must be set at the same time") if mime_type is None: mime_type, file_encoding = mimetypes.guess_type(name) if created is None: @@ -1271,31 +1506,56 @@ if namespace is not None: namespace = namespace.strip() or None if type_ == C.FILE_TYPE_DIRECTORY: - if any(version, file_hash, size, mime_type): - raise ValueError(u"version, file_hash, size and mime_type can't be set for a directory") + if any(version, file_hash, size, mime_type): + raise ValueError( + u"version, file_hash, size and mime_type can't be set for a directory" + ) if owner is not None: owner = owner.userhostJID() if path is not None: # _getParentDir will check permissions if peer_jid is set, so we use owner - parent, remaining_path_elts = yield self._getParentDir(client, path, parent, namespace, owner, owner, perms_to_check) + parent, remaining_path_elts = yield self._getParentDir( + client, path, parent, namespace, owner, owner, perms_to_check + ) # if remaining directories don't exist, we have to create them for new_dir in remaining_path_elts: new_dir_id = shortuuid.uuid() - yield self.storage.setFile(client, name=new_dir, file_id=new_dir_id, version=u'', parent=parent, - type_=C.FILE_TYPE_DIRECTORY, namespace=namespace, - created=time.time(), - owner=owner, - access=access, extra={}) + yield self.storage.setFile( + client, + name=new_dir, + file_id=new_dir_id, + version=u"", + parent=parent, + type_=C.FILE_TYPE_DIRECTORY, + namespace=namespace, + created=time.time(), + owner=owner, + access=access, + extra={}, + ) parent = new_dir_id elif parent is None: - parent = u'' + parent = u"" - yield self.storage.setFile(client, file_id=file_id, version=version, parent=parent, type_=type_, - file_hash=file_hash, hash_algo=hash_algo, name=name, size=size, - namespace=namespace, mime_type=mime_type, created=created, modified=modified, - owner=owner, - access=access, extra=extra) + yield self.storage.setFile( + client, + file_id=file_id, + version=version, + parent=parent, + type_=type_, + file_hash=file_hash, + hash_algo=hash_algo, + name=name, + size=size, + namespace=namespace, + mime_type=mime_type, + created=created, + modified=modified, + owner=owner, + access=access, + extra=extra, + ) def fileUpdate(self, file_id, column, update_cb): """update a file column taking care of race condition @@ -1318,7 +1578,9 @@ @return (bool): True if entity is available """ if not entity_jid.resource: - return bool(self.getAvailableResources(client, entity_jid)) # is any resource is available, entity is available + return bool( + self.getAvailableResources(client, entity_jid) + ) # is any resource is available, entity is available try: presence_data = self.getEntityDatum(entity_jid, "presence", client.profile) except KeyError:
--- a/sat/memory/params.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/memory/params.py Wed Jun 27 20:14:46 2018 +0200 @@ -24,6 +24,7 @@ from sat.memory.crypto import BlockCipher, PasswordHasher from xml.dom import minidom, NotFoundErr from sat.core.log import getLogger + log = getLogger(__name__) from twisted.internet import defer from twisted.python.failure import Failure @@ -43,17 +44,18 @@ @return (generator[domish.Element]): <jid/> elements """ for jid_ in jids: - jid_elt = domish.Element((None, 'jid')) + jid_elt = domish.Element((None, "jid")) jid_elt.addContent(jid_.full()) yield jid_elt class Params(object): """This class manage parameters with xml""" + ### TODO: add desciption in params - #TODO: when priority is changed, a new presence stanza must be emitted - #TODO: int type (Priority should be int instead of string) + # TODO: when priority is changed, a new presence stanza must be emitted + # TODO: int type (Priority should be int instead of string) default_xml = u""" <params> <general> @@ -77,23 +79,23 @@ </individual> </params> """ % { - 'category_general': D_("General"), - 'category_connection': D_("Connection"), - 'history_param': C.HISTORY_LIMIT, - 'history_label': D_('Chat history limit'), - 'show_offline_contacts': C.SHOW_OFFLINE_CONTACTS, - 'show_offline_contacts_label': D_('Show offline contacts'), - 'show_empty_groups': C.SHOW_EMPTY_GROUPS, - 'show_empty_groups_label': D_('Show empty groups'), - 'force_server_param': C.FORCE_SERVER_PARAM, - 'force_port_param': C.FORCE_PORT_PARAM, - 'new_account_label': D_("Register new account"), - 'autoconnect_label': D_('Connect on frontend startup'), - 'autodisconnect_label': D_('Disconnect on frontend closure'), + "category_general": D_("General"), + "category_connection": D_("Connection"), + "history_param": C.HISTORY_LIMIT, + "history_label": D_("Chat history limit"), + "show_offline_contacts": C.SHOW_OFFLINE_CONTACTS, + "show_offline_contacts_label": D_("Show offline contacts"), + "show_empty_groups": C.SHOW_EMPTY_GROUPS, + "show_empty_groups_label": D_("Show empty groups"), + "force_server_param": C.FORCE_SERVER_PARAM, + "force_port_param": C.FORCE_PORT_PARAM, + "new_account_label": D_("Register new account"), + "autoconnect_label": D_("Connect on frontend startup"), + "autodisconnect_label": D_("Disconnect on frontend closure"), } def load_default_params(self): - self.dom = minidom.parseString(Params.default_xml.encode('utf-8')) + self.dom = minidom.parseString(Params.default_xml.encode("utf-8")) def _mergeParams(self, source_node, dest_node): """Look for every node in source_node and recursively copy them to dest if they don't exists""" @@ -102,8 +104,9 @@ ret = {} for child in children: if child.nodeType == child.ELEMENT_NODE: - ret[(child.tagName, child.getAttribute('name'))] = child + ret[(child.tagName, child.getAttribute("name"))] = child return ret + source_map = getNodesMap(source_node.childNodes) dest_map = getNodesMap(dest_node.childNodes) source_set = set(source_map.keys()) @@ -120,7 +123,7 @@ def load_xml(self, xml_file): """Load parameters template from xml file""" self.dom = minidom.parse(xml_file) - default_dom = minidom.parseString(Params.default_xml.encode('utf-8')) + default_dom = minidom.parseString(Params.default_xml.encode("utf-8")) self._mergeParams(default_dom.documentElement, self.dom.documentElement) def loadGenParams(self): @@ -140,7 +143,9 @@ """ if cache is None: self.params[profile] = {} - return self.storage.loadIndParams(self.params[profile] if cache is None else cache, profile) + return self.storage.loadIndParams( + self.params[profile] if cache is None else cache, profile + ) def purgeProfile(self, profile): """Remove cache data of a profile @@ -150,12 +155,14 @@ try: del self.params[profile] except KeyError: - log.error(_(u"Trying to purge cache of a profile not in memory: [%s]") % profile) + log.error( + _(u"Trying to purge cache of a profile not in memory: [%s]") % profile + ) def save_xml(self, filename): """Save parameters template to xml file""" - with open(filename, 'wb') as xml_file: - xml_file.write(self.dom.toxml('utf-8')) + with open(filename, "wb") as xml_file: + xml_file.write(self.dom.toxml("utf-8")) def __init__(self, host, storage): log.debug("Parameters init") @@ -174,7 +181,7 @@ @return: a Deferred instance """ if self.storage.hasProfile(profile): - log.info(_('The profile name already exists')) + log.info(_("The profile name already exists")) return defer.fail(Failure(exceptions.ConflictError)) if not self.host.trigger.point("ProfileCreation", profile): return defer.fail(Failure(exceptions.CancelError)) @@ -189,7 +196,7 @@ @return: a Deferred instance """ if not self.storage.hasProfile(profile): - log.info(_('Trying to delete an unknown profile')) + log.info(_("Trying to delete an unknown profile")) return defer.fail(Failure(exceptions.ProfileUnknownError(profile))) if self.host.isConnected(profile): if force: @@ -210,22 +217,26 @@ @raise exceptions.ProfileUnknownError: profile doesn't exists @raise exceptions.ProfileNotSetError: if C.PROF_KEY_NONE is used """ - if profile_key == '@DEFAULT@': - default = self.host.memory.memory_data.get('Profile_default') + if profile_key == "@DEFAULT@": + default = self.host.memory.memory_data.get("Profile_default") if not default: - log.info(_('No default profile, returning first one')) + log.info(_("No default profile, returning first one")) try: - default = self.host.memory.memory_data['Profile_default'] = self.storage.getProfilesList()[0] + default = self.host.memory.memory_data[ + "Profile_default" + ] = self.storage.getProfilesList()[0] except IndexError: - log.info(_('No profile exist yet')) + log.info(_("No profile exist yet")) raise exceptions.ProfileUnknownError(profile_key) - return default # FIXME: temporary, must use real default value, and fallback to first one if it doesn't exists + return ( + default + ) # FIXME: temporary, must use real default value, and fallback to first one if it doesn't exists elif profile_key == C.PROF_KEY_NONE: raise exceptions.ProfileNotSetError elif return_profile_keys and profile_key in [C.PROF_KEY_ALL]: - return profile_key # this value must be managed by the caller + return profile_key # this value must be managed by the caller if not self.storage.hasProfile(profile_key): - log.error(_(u'Trying to access an unknown profile (%s)') % profile_key) + log.error(_(u"Trying to access an unknown profile (%s)") % profile_key) raise exceptions.ProfileUnknownError(profile_key) return profile_key @@ -239,12 +250,12 @@ """ for node in parent.childNodes: if node.nodeName == tag and node.getAttribute("name") == name: - #the node already exists + # the node already exists return node - #the node is new + # the node is new return None - def updateParams(self, xml, security_limit=C.NO_SECURITY_LIMIT, app=''): + def updateParams(self, xml, security_limit=C.NO_SECURITY_LIMIT, app=""): """import xml in parameters, update if the param already exists If security_limit is specified and greater than -1, the parameters @@ -254,7 +265,7 @@ @param app: name of the frontend registering the parameters or empty value """ # TODO: should word with domish.Element - src_parent = minidom.parseString(xml.encode('utf-8')).documentElement + src_parent = minidom.parseString(xml.encode("utf-8")).documentElement def pre_process_app_node(src_parent, security_limit, app): """Parameters that are registered from a frontend must be checked""" @@ -264,17 +275,23 @@ to_remove.append(type_node) # accept individual parameters only continue for cat_node in type_node.childNodes: - if cat_node.nodeName != 'category': + if cat_node.nodeName != "category": to_remove.append(cat_node) continue - to_remove_count = 0 # count the params to be removed from current category + to_remove_count = ( + 0 + ) # count the params to be removed from current category for node in cat_node.childNodes: - if node.nodeName != "param" or not self.checkSecurityLimit(node, security_limit): + if node.nodeName != "param" or not self.checkSecurityLimit( + node, security_limit + ): to_remove.append(node) to_remove_count += 1 continue - node.setAttribute('app', app) - if len(cat_node.childNodes) == to_remove_count: # remove empty category + node.setAttribute("app", app) + if ( + len(cat_node.childNodes) == to_remove_count + ): # remove empty category for dummy in xrange(0, to_remove_count): to_remove.pop() to_remove.append(cat_node) @@ -283,9 +300,11 @@ def import_node(tgt_parent, src_parent): for child in src_parent.childNodes: - if child.nodeName == '#text': + if child.nodeName == "#text": continue - node = self.__get_unique_node(tgt_parent, child.nodeName, child.getAttribute("name")) + node = self.__get_unique_node( + tgt_parent, child.nodeName, child.getAttribute("name") + ) if not node: # The node is new tgt_parent.appendChild(child.cloneNode(True)) else: @@ -310,23 +329,35 @@ @param app: name of the frontend registering the parameters """ if not app: - log.warning(_(u"Trying to register frontends parameters with no specified app: aborted")) + log.warning( + _( + u"Trying to register frontends parameters with no specified app: aborted" + ) + ) return if not hasattr(self, "frontends_cache"): self.frontends_cache = [] if app in self.frontends_cache: - log.debug(_(u"Trying to register twice frontends parameters for %(app)s: aborted" % {"app": app})) + log.debug( + _( + u"Trying to register twice frontends parameters for %(app)s: aborted" + % {"app": app} + ) + ) return self.frontends_cache.append(app) self.updateParams(xml, security_limit, app) - log.debug(u"Frontends parameters registered for %(app)s" % {'app': app}) + log.debug(u"Frontends parameters registered for %(app)s" % {"app": app}) def __default_ok(self, value, name, category): - #FIXME: will not work with individual parameters + # FIXME: will not work with individual parameters self.setParam(name, value, category) def __default_ko(self, failure, name, category): - log.error(_(u"Can't determine default value for [%(category)s/%(name)s]: %(reason)s") % {'category': category, 'name': name, 'reason': str(failure.value)}) + log.error( + _(u"Can't determine default value for [%(category)s/%(name)s]: %(reason)s") + % {"category": category, "name": name, "reason": str(failure.value)} + ) def setDefault(self, name, category, callback, errback=None): """Set default value of parameter @@ -337,19 +368,27 @@ @param callback: must return a string with the value (use deferred if needed) @param errback: must manage the error with args failure, name, category """ - #TODO: send signal param update if value changed - #TODO: manage individual paramaters - log.debug ("setDefault called for %(category)s/%(name)s" % {"category": category, "name": name}) - node = self._getParamNode(name, category, '@ALL@') + # TODO: send signal param update if value changed + # TODO: manage individual paramaters + log.debug( + "setDefault called for %(category)s/%(name)s" + % {"category": category, "name": name} + ) + node = self._getParamNode(name, category, "@ALL@") if not node: - log.error(_(u"Requested param [%(name)s] in category [%(category)s] doesn't exist !") % {'name': name, 'category': category}) + log.error( + _( + u"Requested param [%(name)s] in category [%(category)s] doesn't exist !" + ) + % {"name": name, "category": category} + ) return - if node[1].getAttribute('default_cb') == 'yes': + if node[1].getAttribute("default_cb") == "yes": # del node[1].attributes['default_cb'] # default_cb is not used anymore as a flag to know if we have to set the default value, - # and we can still use it later e.g. to call a generic setDefault method + # and we can still use it later e.g. to call a generic setDefault method value = self._getParam(category, name, C.GENERAL) - if value is None: # no value set by the user: we have the default value - log.debug ("Default value to set, using callback") + if value is None: # no value set by the user: we have the default value + log.debug("Default value to set, using callback") d = defer.maybeDeferred(callback) d.addCallback(self.__default_ok, name, category) d.addErrback(errback or self.__default_ko, name, category) @@ -364,30 +403,61 @@ @param value: user defined value @return: value (can be str, bool, int, list, None) """ - if attr == 'value': - value_to_use = value if value is not None else node.getAttribute(attr) # we use value (user defined) if it exist, else we use node's default value - if node.getAttribute('type') == 'bool': + if attr == "value": + value_to_use = ( + value if value is not None else node.getAttribute(attr) + ) # we use value (user defined) if it exist, else we use node's default value + if node.getAttribute("type") == "bool": return C.bool(value_to_use) - if node.getAttribute('type') == 'int': + if node.getAttribute("type") == "int": return int(value_to_use) - elif node.getAttribute('type') == 'list': - if not value_to_use: # no user defined value, take default value from the XML - options = [option for option in node.childNodes if option.nodeName == 'option'] - selected = [option for option in options if option.getAttribute('selected') == 'true'] - cat, param = node.parentNode.getAttribute('name'), node.getAttribute('name') + elif node.getAttribute("type") == "list": + if ( + not value_to_use + ): # no user defined value, take default value from the XML + options = [ + option + for option in node.childNodes + if option.nodeName == "option" + ] + selected = [ + option + for option in options + if option.getAttribute("selected") == "true" + ] + cat, param = ( + node.parentNode.getAttribute("name"), + node.getAttribute("name"), + ) if len(selected) == 1: - value_to_use = selected[0].getAttribute('value') - log.info(_("Unset parameter (%(cat)s, %(param)s) of type list will use the default option '%(value)s'") % - {'cat': cat, 'param': param, 'value': value_to_use}) + value_to_use = selected[0].getAttribute("value") + log.info( + _( + "Unset parameter (%(cat)s, %(param)s) of type list will use the default option '%(value)s'" + ) + % {"cat": cat, "param": param, "value": value_to_use} + ) return value_to_use if len(selected) == 0: - log.error(_(u'Parameter (%(cat)s, %(param)s) of type list has no default option!') % {'cat': cat, 'param': param}) + log.error( + _( + u"Parameter (%(cat)s, %(param)s) of type list has no default option!" + ) + % {"cat": cat, "param": param} + ) else: - log.error(_(u'Parameter (%(cat)s, %(param)s) of type list has more than one default option!') % {'cat': cat, 'param': param}) + log.error( + _( + u"Parameter (%(cat)s, %(param)s) of type list has more than one default option!" + ) + % {"cat": cat, "param": param} + ) raise exceptions.DataError - elif node.getAttribute('type') == 'jids_list': + elif node.getAttribute("type") == "jids_list": if value_to_use: - jids = value_to_use.split('\t') # FIXME: it's not good to use tabs as separator ! + jids = value_to_use.split( + "\t" + ) # FIXME: it's not good to use tabs as separator ! else: # no user defined value, take default value from the XML jids = [getText(jid_) for jid_ in node.getElementsByTagName("jid")] to_delete = [] @@ -395,7 +465,9 @@ try: jids[idx] = jid.JID(value) except (RuntimeError, jid.InvalidFormat, AttributeError): - log.warning(u"Incorrect jid value found in jids list: [{}]".format(value)) + log.warning( + u"Incorrect jid value found in jids list: [{}]".format(value) + ) to_delete.append(value) for value in to_delete: jids.remove(value) @@ -412,8 +484,10 @@ @param value: user defined value @return (unicode, bool, int, list): value to retrieve """ - if attr == 'value' and node.getAttribute('type') == 'password': - raise exceptions.InternalError('To retrieve password values, use _asyncGetAttr instead of _getAttr') + if attr == "value" and node.getAttribute("type") == "password": + raise exceptions.InternalError( + "To retrieve password values, use _asyncGetAttr instead of _getAttr" + ) return self._getAttr_internal(node, attr, value) def _asyncGetAttr(self, node, attr, value, profile=None): @@ -428,19 +502,27 @@ @return (unicode, bool, int, list): Deferred value to retrieve """ value = self._getAttr_internal(node, attr, value) - if attr != 'value' or node.getAttribute('type') != 'password': + if attr != "value" or node.getAttribute("type") != "password": return defer.succeed(value) - param_cat = node.parentNode.getAttribute('name') - param_name = node.getAttribute('name') + param_cat = node.parentNode.getAttribute("name") + param_name = node.getAttribute("name") if ((param_cat, param_name) == C.PROFILE_PASS_PATH) or not value: - return defer.succeed(value) # profile password and empty passwords are returned "as is" + return defer.succeed( + value + ) # profile password and empty passwords are returned "as is" if not profile: - raise exceptions.ProfileNotSetError('The profile is needed to decrypt a password') + raise exceptions.ProfileNotSetError( + "The profile is needed to decrypt a password" + ) d = self.host.memory.decryptValue(value, profile) def gotPlainPassword(password): - if password is None: # empty value means empty password, None means decryption failure - raise exceptions.InternalError(_('The stored password could not be decrypted!')) + if ( + password is None + ): # empty value means empty password, None means decryption failure + raise exceptions.InternalError( + _("The stored password could not be decrypted!") + ) return password return d.addCallback(gotPlainPassword) @@ -455,9 +537,13 @@ def getStringParamA(self, name, category, attr="value", profile_key=C.PROF_KEY_NONE): """ Same as getParamA but for bridge: convert non string value to string """ - return self.__type_to_string(self.getParamA(name, category, attr, profile_key=profile_key)) + return self.__type_to_string( + self.getParamA(name, category, attr, profile_key=profile_key) + ) - def getParamA(self, name, category, attr="value", use_default=True, profile_key=C.PROF_KEY_NONE): + def getParamA( + self, name, category, attr="value", use_default=True, profile_key=C.PROF_KEY_NONE + ): """Helper method to get a specific attribute. /!\ This method would return encrypted password values, @@ -474,15 +560,22 @@ # FIXME: security_limit is not managed here ! node = self._getParamNode(name, category) if not node: - log.error(_(u"Requested param [%(name)s] in category [%(category)s] doesn't exist !") % {'name': name, 'category': category}) + log.error( + _( + u"Requested param [%(name)s] in category [%(category)s] doesn't exist !" + ) + % {"name": name, "category": category} + ) raise exceptions.NotFound - if attr == 'value' and node[1].getAttribute('type') == 'password': - raise exceptions.InternalError('To retrieve password values, use asyncGetParamA instead of getParamA') + if attr == "value" and node[1].getAttribute("type") == "password": + raise exceptions.InternalError( + "To retrieve password values, use asyncGetParamA instead of getParamA" + ) if node[0] == C.GENERAL: value = self._getParam(category, name, C.GENERAL) - if value is None and attr=='value' and not use_default: + if value is None and attr == "value" and not use_default: return value return self._getAttr(node[1], attr, value) @@ -490,25 +583,39 @@ profile = self.getProfileName(profile_key) if not profile: - log.error(_('Requesting a param for an non-existant profile')) + log.error(_("Requesting a param for an non-existant profile")) raise exceptions.ProfileUnknownError(profile_key) if profile not in self.params: - log.error(_('Requesting synchronous param for not connected profile')) + log.error(_("Requesting synchronous param for not connected profile")) raise exceptions.ProfileNotConnected(profile) if attr == "value": value = self._getParam(category, name, profile=profile) - if value is None and attr=='value' and not use_default: + if value is None and attr == "value" and not use_default: return value return self._getAttr(node[1], attr, value) - def asyncGetStringParamA(self, name, category, attr="value", security_limit=C.NO_SECURITY_LIMIT, profile_key=C.PROF_KEY_NONE): + def asyncGetStringParamA( + self, + name, + category, + attr="value", + security_limit=C.NO_SECURITY_LIMIT, + profile_key=C.PROF_KEY_NONE, + ): d = self.asyncGetParamA(name, category, attr, security_limit, profile_key) d.addCallback(self.__type_to_string) return d - def asyncGetParamA(self, name, category, attr="value", security_limit=C.NO_SECURITY_LIMIT, profile_key=C.PROF_KEY_NONE): + def asyncGetParamA( + self, + name, + category, + attr="value", + security_limit=C.NO_SECURITY_LIMIT, + profile_key=C.PROF_KEY_NONE, + ): """Helper method to get a specific attribute. @param name: name of the parameter @@ -519,12 +626,21 @@ """ node = self._getParamNode(name, category) if not node: - log.error(_(u"Requested param [%(name)s] in category [%(category)s] doesn't exist !") % {'name': name, 'category': category}) + log.error( + _( + u"Requested param [%(name)s] in category [%(category)s] doesn't exist !" + ) + % {"name": name, "category": category} + ) raise ValueError("Requested param doesn't exist") if not self.checkSecurityLimit(node[1], security_limit): - log.warning(_(u"Trying to get parameter '%(param)s' in category '%(cat)s' without authorization!!!" - % {'param': name, 'cat': category})) + log.warning( + _( + u"Trying to get parameter '%(param)s' in category '%(cat)s' without authorization!!!" + % {"param": name, "cat": category} + ) + ) raise exceptions.PermissionError if node[0] == C.GENERAL: @@ -535,7 +651,9 @@ profile = self.getProfileName(profile_key) if not profile: - raise exceptions.InternalError(_('Requesting a param for a non-existant profile')) + raise exceptions.InternalError( + _("Requesting a param for a non-existant profile") + ) if attr != "value": return defer.succeed(node[1].getAttribute(attr)) @@ -543,9 +661,11 @@ value = self._getParam(category, name, profile=profile) return self._asyncGetAttr(node[1], attr, value, profile) except exceptions.ProfileNotInCacheError: - #We have to ask data to the storage manager + # We have to ask data to the storage manager d = self.storage.getIndParam(category, name, profile) - return d.addCallback(lambda value: self._asyncGetAttr(node[1], attr, value, profile)) + return d.addCallback( + lambda value: self._asyncGetAttr(node[1], attr, value, profile) + ) def asyncGetParamsValuesFromCategory(self, category, security_limit, profile_key): """Get all parameters "attribute" for a category @@ -557,7 +677,7 @@ @param profile_key: %(doc_profile_key)s @return (dict): key: param name, value: param value (converted to string if needed) """ - #TODO: manage category of general type (without existant profile) + # TODO: manage category of general type (without existant profile) profile = self.getProfileName(profile_key) if not profile: log.error(_("Asking params for inexistant profile")) @@ -572,11 +692,20 @@ for category_node in prof_xml.getElementsByTagName("category"): if category_node.getAttribute("name") == category: for param_node in category_node.getElementsByTagName("param"): - name = param_node.getAttribute('name') + name = param_node.getAttribute("name") if not name: - log.warning(u"ignoring attribute without name: {}".format(param_node.toxml())) + log.warning( + u"ignoring attribute without name: {}".format( + param_node.toxml() + ) + ) continue - d = self.asyncGetStringParamA(name, category, security_limit=security_limit, profile_key=profile) + d = self.asyncGetStringParamA( + name, + category, + security_limit=security_limit, + profile_key=profile, + ) d.addCallback(setValue, ret, name) names_d_list.append(d) break @@ -586,10 +715,12 @@ dlist.addCallback(lambda dummy: ret) return ret - d = self._constructProfileXml(security_limit, '', profile) + d = self._constructProfileXml(security_limit, "", profile) return d.addCallback(returnCategoryXml) - def _getParam(self, category, name, type_=C.INDIVIDUAL, cache=None, profile=C.PROF_KEY_NONE): + def _getParam( + self, category, name, type_=C.INDIVIDUAL, cache=None, profile=C.PROF_KEY_NONE + ): """Return the param, or None if it doesn't exist @param category: param category @@ -608,8 +739,10 @@ raise exceptions.ProfileNotSetError if profile in self.params: cache = self.params[profile] # if profile is in main cache, we use it, - # ignoring the temporary cache - elif cache is None: # else we use the temporary cache if it exists, or raise an exception + # ignoring the temporary cache + elif ( + cache is None + ): # else we use the temporary cache if it exists, or raise an exception raise exceptions.ProfileNotInCacheError if (category, name) not in cache: return None @@ -629,11 +762,13 @@ def checkNode(node): """Check the node against security_limit and app""" - return self.checkSecurityLimit(node, security_limit) and self.checkApp(node, app) + return self.checkSecurityLimit(node, security_limit) and self.checkApp( + node, app + ) def constructProfile(ignore, profile_cache): # init the result document - prof_xml = minidom.parseString('<params/>') + prof_xml = minidom.parseString("<params/>") cache = {} for type_node in self.dom.documentElement.childNodes: @@ -641,9 +776,9 @@ continue # we use all params, general and individual for cat_node in type_node.childNodes: - if cat_node.nodeName != 'category': + if cat_node.nodeName != "category": continue - category = cat_node.getAttribute('name') + category = cat_node.getAttribute("name") dest_params = {} # result (merged) params for category if category not in cache: # we make a copy for the new xml @@ -655,7 +790,7 @@ if not checkNode(node): to_remove.append(node) continue - dest_params[node.getAttribute('name')] = node + dest_params[node.getAttribute("name")] = node for node in to_remove: dest_cat.removeChild(node) new_node = True @@ -668,7 +803,7 @@ for param_node in params: # we have to merge new params (we are parsing individual parameters, we have to add them # to the previously parsed general ones) - name = param_node.getAttribute('name') + name = param_node.getAttribute("name") if not checkNode(param_node): continue if name not in dest_params: @@ -676,35 +811,53 @@ dest_params[name] = param_node.cloneNode(True) dest_cat.appendChild(dest_params[name]) - profile_value = self._getParam(category, - name, type_node.nodeName, - cache=profile_cache, profile=profile) + profile_value = self._getParam( + category, + name, + type_node.nodeName, + cache=profile_cache, + profile=profile, + ) if profile_value is not None: # there is a value for this profile, we must change the default - if dest_params[name].getAttribute('type') == 'list': - for option in dest_params[name].getElementsByTagName("option"): - if option.getAttribute('value') == profile_value: - option.setAttribute('selected', 'true') + if dest_params[name].getAttribute("type") == "list": + for option in dest_params[name].getElementsByTagName( + "option" + ): + if option.getAttribute("value") == profile_value: + option.setAttribute("selected", "true") else: try: - option.removeAttribute('selected') + option.removeAttribute("selected") except NotFoundErr: pass - elif dest_params[name].getAttribute('type') == 'jids_list': - jids = profile_value.split('\t') - for jid_elt in dest_params[name].getElementsByTagName("jid"): - dest_params[name].removeChild(jid_elt) # remove all default + elif dest_params[name].getAttribute("type") == "jids_list": + jids = profile_value.split("\t") + for jid_elt in dest_params[name].getElementsByTagName( + "jid" + ): + dest_params[name].removeChild( + jid_elt + ) # remove all default for jid_ in jids: # rebuilt the children with use values try: jid.JID(jid_) - except (RuntimeError, jid.InvalidFormat, AttributeError): - log.warning(u"Incorrect jid value found in jids list: [{}]".format(jid_)) + except ( + RuntimeError, + jid.InvalidFormat, + AttributeError, + ): + log.warning( + u"Incorrect jid value found in jids list: [{}]".format( + jid_ + ) + ) else: - jid_elt = prof_xml.createElement('jid') + jid_elt = prof_xml.createElement("jid") jid_elt.appendChild(prof_xml.createTextNode(jid_)) dest_params[name].appendChild(jid_elt) else: - dest_params[name].setAttribute('value', profile_value) + dest_params[name].setAttribute("value", profile_value) if new_node: prof_xml.documentElement.appendChild(dest_cat) @@ -721,7 +874,7 @@ d = defer.succeed(None) profile_cache = self.params[profile] else: - #profile is not in cache, we load values in a short time cache + # profile is not in cache, we load values in a short time cache profile_cache = {} d = self.loadIndParams(profile, profile_cache) @@ -761,9 +914,11 @@ def returnXML(prof_xml): return_xml = prof_xml.toxml() prof_xml.unlink() - return '\n'.join((line for line in return_xml.split('\n') if line)) + return "\n".join((line for line in return_xml.split("\n") if line)) - return self._constructProfileXml(security_limit, app, profile).addCallback(returnXML) + return self._constructProfileXml(security_limit, app, profile).addCallback( + returnXML + ) def _getParamNode(self, name, category, type_="@ALL@"): # FIXME: is type_ useful ? """Return a node from the param_xml @@ -776,9 +931,14 @@ @return: a tuple (node type, node) or None if not found""" for type_node in self.dom.documentElement.childNodes: - if (((type_ == "@ALL@" or type_ == "@GENERAL@") and type_node.nodeName == C.GENERAL) - or ((type_ == "@ALL@" or type_ == "@INDIVIDUAL@") and type_node.nodeName == C.INDIVIDUAL)): - for node in type_node.getElementsByTagName('category'): + if ( + (type_ == "@ALL@" or type_ == "@GENERAL@") + and type_node.nodeName == C.GENERAL + ) or ( + (type_ == "@ALL@" or type_ == "@INDIVIDUAL@") + and type_node.nodeName == C.INDIVIDUAL + ): + for node in type_node.getElementsByTagName("category"): if node.getAttribute("name") == category: params = node.getElementsByTagName("param") for param in params: @@ -795,7 +955,14 @@ categories.append(cat.getAttribute("name")) return categories - def setParam(self, name, value, category, security_limit=C.NO_SECURITY_LIMIT, profile_key=C.PROF_KEY_NONE): + def setParam( + self, + name, + value, + category, + security_limit=C.NO_SECURITY_LIMIT, + profile_key=C.PROF_KEY_NONE, + ): """Set a parameter, return None if the parameter is not in param xml. Parameter of type 'password' that are not the SÃ T profile password are @@ -813,42 +980,59 @@ if profile_key != C.PROF_KEY_NONE: profile = self.getProfileName(profile_key) if not profile: - log.error(_(u'Trying to set parameter for an unknown profile')) + log.error(_(u"Trying to set parameter for an unknown profile")) raise exceptions.ProfileUnknownError(profile_key) - node = self._getParamNode(name, category, '@ALL@') + node = self._getParamNode(name, category, "@ALL@") if not node: - log.error(_(u'Requesting an unknown parameter (%(category)s/%(name)s)') - % {'category': category, 'name': name}) + log.error( + _(u"Requesting an unknown parameter (%(category)s/%(name)s)") + % {"category": category, "name": name} + ) return defer.succeed(None) if not self.checkSecurityLimit(node[1], security_limit): - log.warning(_(u"Trying to set parameter '%(param)s' in category '%(cat)s' without authorization!!!" - % {'param': name, 'cat': category})) + log.warning( + _( + u"Trying to set parameter '%(param)s' in category '%(cat)s' without authorization!!!" + % {"param": name, "cat": category} + ) + ) return defer.succeed(None) type_ = node[1].getAttribute("type") - if type_ == 'int': + if type_ == "int": if not value: # replace with the default value (which might also be '') value = node[1].getAttribute("value") else: try: int(value) except ValueError: - log.debug(_(u"Trying to set parameter '%(param)s' in category '%(cat)s' with an non-integer value" - % {'param': name, 'cat': category})) + log.debug( + _( + u"Trying to set parameter '%(param)s' in category '%(cat)s' with an non-integer value" + % {"param": name, "cat": category} + ) + ) return defer.succeed(None) if node[1].hasAttribute("constraint"): constraint = node[1].getAttribute("constraint") try: min_, max_ = [int(limit) for limit in constraint.split(";")] except ValueError: - raise exceptions.InternalError("Invalid integer parameter constraint: %s" % constraint) + raise exceptions.InternalError( + "Invalid integer parameter constraint: %s" % constraint + ) value = str(min(max(int(value), min_), max_)) - - log.info(_("Setting parameter (%(category)s, %(name)s) = %(value)s") % - {'category': category, 'name': name, 'value': value if type_ != 'password' else '********'}) + log.info( + _("Setting parameter (%(category)s, %(name)s) = %(value)s") + % { + "category": category, + "name": name, + "value": value if type_ != "password" else "********", + } + ) if node[0] == C.GENERAL: self.params_gen[(category, name)] = value @@ -856,7 +1040,9 @@ for profile in self.storage.getProfilesList(): if self.host.memory.isSessionStarted(profile): self.host.bridge.paramUpdate(name, value, category, profile) - self.host.trigger.point("paramUpdateTrigger", name, value, category, node[0], profile) + self.host.trigger.point( + "paramUpdateTrigger", name, value, category, node[0], profile + ) return defer.succeed(None) assert node[0] == C.INDIVIDUAL @@ -867,16 +1053,24 @@ return defer.succeed(None) elif type_ == "password": try: - personal_key = self.host.memory.auth_sessions.profileGetUnique(profile)[C.MEMORY_CRYPTO_KEY] + personal_key = self.host.memory.auth_sessions.profileGetUnique(profile)[ + C.MEMORY_CRYPTO_KEY + ] except TypeError: - raise exceptions.InternalError(_('Trying to encrypt a password while the personal key is undefined!')) + raise exceptions.InternalError( + _("Trying to encrypt a password while the personal key is undefined!") + ) if (category, name) == C.PROFILE_PASS_PATH: # using 'value' as the encryption key to encrypt another encryption key... could be confusing! - d = self.host.memory.encryptPersonalData(data_key=C.MEMORY_CRYPTO_KEY, - data_value=personal_key, - crypto_key=value, - profile=profile) - d.addCallback(lambda dummy: PasswordHasher.hash(value)) # profile password is hashed (empty value stays empty) + d = self.host.memory.encryptPersonalData( + data_key=C.MEMORY_CRYPTO_KEY, + data_value=personal_key, + crypto_key=value, + profile=profile, + ) + d.addCallback( + lambda dummy: PasswordHasher.hash(value) + ) # profile password is hashed (empty value stays empty) elif value: # other non empty passwords are encrypted with the personal key d = BlockCipher.encrypt(personal_key, value) else: @@ -888,7 +1082,9 @@ if self.host.memory.isSessionStarted(profile): self.params[profile][(category, name)] = value self.host.bridge.paramUpdate(name, value, category, profile) - self.host.trigger.point("paramUpdateTrigger", name, value, category, node[0], profile) + self.host.trigger.point( + "paramUpdateTrigger", name, value, category, node[0], profile + ) return self.storage.setIndParam(category, name, value, profile) else: raise exceptions.ProfileNotConnected @@ -912,10 +1108,15 @@ """ ret = {} for type_node in self.dom.documentElement.childNodes: - if (((node_type == "@ALL@" or node_type == "@GENERAL@") and type_node.nodeName == C.GENERAL) or - ((node_type == "@ALL@" or node_type == "@INDIVIDUAL@") and type_node.nodeName == C.INDIVIDUAL)): - for cat_node in type_node.getElementsByTagName('category'): - cat = cat_node.getAttribute('name') + if ( + (node_type == "@ALL@" or node_type == "@GENERAL@") + and type_node.nodeName == C.GENERAL + ) or ( + (node_type == "@ALL@" or node_type == "@INDIVIDUAL@") + and type_node.nodeName == C.INDIVIDUAL + ): + for cat_node in type_node.getElementsByTagName("category"): + cat = cat_node.getAttribute("name") params = cat_node.getElementsByTagName("param") for param in params: if param.getAttribute("type") == attr_type:
--- a/sat/plugins/__init__.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/plugins/__init__.py Wed Jun 27 20:14:46 2018 +0200 @@ -2,6 +2,7 @@ # XXX: the Monkey Patch is here and not in src/__init__ to avoir issues with pyjamas compilation import wokkel from sat_tmp.wokkel import pubsub as tmp_pubsub, rsm as tmp_rsm, mam as tmp_mam + wokkel.pubsub = tmp_pubsub wokkel.rsm = tmp_rsm wokkel.mam = tmp_mam
--- a/sat/plugins/plugin_adhoc_dbus.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/plugins/plugin_adhoc_dbus.py Wed Jun 27 20:14:46 2018 +0200 @@ -20,18 +20,23 @@ from sat.core.i18n import _ from sat.core.constants import Const as C from sat.core.log import getLogger + log = getLogger(__name__) from sat.core import exceptions from twisted.internet import defer from wokkel import data_form + try: from lxml import etree except ImportError: - raise exceptions.MissingModule(u"Missing module lxml, please download/install it from http://lxml.de/") + raise exceptions.MissingModule( + u"Missing module lxml, please download/install it from http://lxml.de/" + ) import os.path import uuid import dbus from dbus.mainloop.glib import DBusGMainLoop + DBusGMainLoop(set_as_default=True) FD_NAME = "org.freedesktop.DBus" @@ -39,8 +44,12 @@ INTROSPECT_IFACE = "org.freedesktop.DBus.Introspectable" INTROSPECT_METHOD = "Introspect" -IGNORED_IFACES_START = ('org.freedesktop', 'org.qtproject', 'org.kde.KMainWindow') # commands in interface starting with these values will be ignored -FLAG_LOOP = 'LOOP' +IGNORED_IFACES_START = ( + "org.freedesktop", + "org.qtproject", + "org.kde.KMainWindow", +) # commands in interface starting with these values will be ignored +FLAG_LOOP = "LOOP" PLUGIN_INFO = { C.PI_NAME: "Ad-Hoc Commands - D-Bus", @@ -50,21 +59,25 @@ C.PI_DEPENDENCIES: ["XEP-0050"], C.PI_MAIN: "AdHocDBus", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _("""Add D-Bus management to Ad-Hoc commands""") + C.PI_DESCRIPTION: _("""Add D-Bus management to Ad-Hoc commands"""), } class AdHocDBus(object): - def __init__(self, host): log.info(_("plugin Ad-Hoc D-Bus initialization")) self.host = host - host.bridge.addMethod("adHocDBusAddAuto", ".plugin", in_sign='sasasasasasass', out_sign='(sa(sss))', - method=self._adHocDBusAddAuto, - async=True) + host.bridge.addMethod( + "adHocDBusAddAuto", + ".plugin", + in_sign="sasasasasasass", + out_sign="(sa(sss))", + method=self._adHocDBusAddAuto, + async=True, + ) self.session_bus = dbus.SessionBus() self.fd_object = self.session_bus.get_object(FD_NAME, FD_PATH, introspect=False) - self.XEP_0050 = host.plugins['XEP-0050'] + self.XEP_0050 = host.plugins["XEP-0050"] def _DBusAsyncCall(self, proxy, method, *args, **kwargs): """ Call a DBus method asynchronously and return a deferred @@ -77,9 +90,9 @@ """ d = defer.Deferred() - interface = kwargs.pop('interface', None) - kwargs['reply_handler'] = lambda ret=None: d.callback(ret) - kwargs['error_handler'] = d.errback + interface = kwargs.pop("interface", None) + kwargs["reply_handler"] = lambda ret=None: d.callback(ret) + kwargs["error_handler"] = d.errback proxy.get_dbus_method(method, dbus_interface=interface)(*args, **kwargs) return d @@ -95,7 +108,9 @@ @return: True if the method is acceptable """ - if method.xpath("arg[@direction='in']"): # we don't accept method with argument for the moment + if method.xpath( + "arg[@direction='in']" + ): # we don't accept method with argument for the moment return False return True @@ -104,30 +119,61 @@ log.debug("introspecting path [%s]" % proxy.object_path) introspect_xml = yield self._DBusIntrospect(proxy) el = etree.fromstring(introspect_xml) - for node in el.iterchildren('node', 'interface'): - if node.tag == 'node': - new_path = os.path.join(proxy.object_path, node.get('name')) - new_proxy = self.session_bus.get_object(bus_name, new_path, introspect=False) + for node in el.iterchildren("node", "interface"): + if node.tag == "node": + new_path = os.path.join(proxy.object_path, node.get("name")) + new_proxy = self.session_bus.get_object( + bus_name, new_path, introspect=False + ) yield self._introspect(methods, bus_name, new_proxy) - elif node.tag == 'interface': - name = node.get('name') + elif node.tag == "interface": + name = node.get("name") if any(name.startswith(ignored) for ignored in IGNORED_IFACES_START): - log.debug('interface [%s] is ignored' % name) + log.debug("interface [%s] is ignored" % name) continue log.debug("introspecting interface [%s]" % name) - for method in node.iterchildren('method'): + for method in node.iterchildren("method"): if self._acceptMethod(method): - method_name = method.get('name') + method_name = method.get("name") log.debug("method accepted: [%s]" % method_name) methods.add((proxy.object_path, name, method_name)) - def _adHocDBusAddAuto(self, prog_name, allowed_jids, allowed_groups, allowed_magics, forbidden_jids, forbidden_groups, flags, profile_key): - return self.adHocDBusAddAuto(prog_name, allowed_jids, allowed_groups, allowed_magics, forbidden_jids, forbidden_groups, flags, profile_key) + def _adHocDBusAddAuto( + self, + prog_name, + allowed_jids, + allowed_groups, + allowed_magics, + forbidden_jids, + forbidden_groups, + flags, + profile_key, + ): + return self.adHocDBusAddAuto( + prog_name, + allowed_jids, + allowed_groups, + allowed_magics, + forbidden_jids, + forbidden_groups, + flags, + profile_key, + ) @defer.inlineCallbacks - def adHocDBusAddAuto(self, prog_name, allowed_jids=None, allowed_groups=None, allowed_magics=None, forbidden_jids=None, forbidden_groups=None, flags=None, profile_key=C.PROF_KEY_NONE): + def adHocDBusAddAuto( + self, + prog_name, + allowed_jids=None, + allowed_groups=None, + allowed_magics=None, + forbidden_jids=None, + forbidden_groups=None, + flags=None, + profile_key=C.PROF_KEY_NONE, + ): bus_names = yield self._DBusListNames() - bus_names = [bus_name for bus_name in bus_names if '.' + prog_name in bus_name] + bus_names = [bus_name for bus_name in bus_names if "." + prog_name in bus_name] if not bus_names: log.info("Can't find any bus for [%s]" % prog_name) defer.returnValue(("", [])) @@ -136,45 +182,62 @@ if bus_name.endswith(prog_name): break log.info("bus name found: [%s]" % bus_name) - proxy = self.session_bus.get_object(bus_name, '/', introspect=False) + proxy = self.session_bus.get_object(bus_name, "/", introspect=False) methods = set() yield self._introspect(methods, bus_name, proxy) if methods: - self._addCommand(prog_name, bus_name, methods, - allowed_jids = allowed_jids, - allowed_groups = allowed_groups, - allowed_magics = allowed_magics, - forbidden_jids = forbidden_jids, - forbidden_groups = forbidden_groups, - flags = flags, - profile_key = profile_key) + self._addCommand( + prog_name, + bus_name, + methods, + allowed_jids=allowed_jids, + allowed_groups=allowed_groups, + allowed_magics=allowed_magics, + forbidden_jids=forbidden_jids, + forbidden_groups=forbidden_groups, + flags=flags, + profile_key=profile_key, + ) defer.returnValue((bus_name, methods)) - - def _addCommand(self, adhoc_name, bus_name, methods, allowed_jids=None, allowed_groups=None, allowed_magics=None, forbidden_jids=None, forbidden_groups=None, flags=None, profile_key=C.PROF_KEY_NONE): + def _addCommand( + self, + adhoc_name, + bus_name, + methods, + allowed_jids=None, + allowed_groups=None, + allowed_magics=None, + forbidden_jids=None, + forbidden_groups=None, + flags=None, + profile_key=C.PROF_KEY_NONE, + ): if flags is None: flags = set() def DBusCallback(command_elt, session_data, action, node, profile): - actions = session_data.setdefault('actions',[]) - names_map = session_data.setdefault('names_map', {}) + actions = session_data.setdefault("actions", []) + names_map = session_data.setdefault("names_map", {}) actions.append(action) if len(actions) == 1: # it's our first request, we ask the desired new status status = self.XEP_0050.STATUS.EXECUTING - form = data_form.Form('form', title=_('Command selection')) + form = data_form.Form("form", title=_("Command selection")) options = [] for path, iface, command in methods: - label = command.rsplit('.',1)[-1] + label = command.rsplit(".", 1)[-1] name = str(uuid.uuid4()) names_map[name] = (path, iface, command) options.append(data_form.Option(name, label)) - field = data_form.Field('list-single', 'command', options=options, required=True) + field = data_form.Field( + "list-single", "command", options=options, required=True + ) form.addField(field) payload = form.toElement() @@ -183,9 +246,9 @@ elif len(actions) == 2: # we should have the answer here try: - x_elt = command_elt.elements(data_form.NS_X_DATA,'x').next() + x_elt = command_elt.elements(data_form.NS_X_DATA, "x").next() answer_form = data_form.Form.fromElement(x_elt) - command = answer_form['command'] + command = answer_form["command"] except (KeyError, StopIteration): raise self.XEP_0050.AdHocError(self.XEP_0050.ERROR.BAD_PAYLOAD) @@ -202,9 +265,11 @@ # We have a loop, so we clear everything and we execute again the command as we had a first call (command_elt is not used, so None is OK) del actions[:] names_map.clear() - return DBusCallback(None, session_data, self.XEP_0050.ACTION.EXECUTE, node, profile) - form = data_form.Form('form', title=_(u'Updated')) - form.addField(data_form.Field('fixed', u'Command sent')) + return DBusCallback( + None, session_data, self.XEP_0050.ACTION.EXECUTE, node, profile + ) + form = data_form.Form("form", title=_(u"Updated")) + form.addField(data_form.Field("fixed", u"Command sent")) status = self.XEP_0050.STATUS.COMPLETED payload = None note = (self.XEP_0050.NOTE.INFO, _(u"Command sent")) @@ -213,10 +278,13 @@ return (payload, status, None, note) - self.XEP_0050.addAdHocCommand(DBusCallback, adhoc_name, - allowed_jids = allowed_jids, - allowed_groups = allowed_groups, - allowed_magics = allowed_magics, - forbidden_jids = forbidden_jids, - forbidden_groups = forbidden_groups, - profile_key = profile_key) + self.XEP_0050.addAdHocCommand( + DBusCallback, + adhoc_name, + allowed_jids=allowed_jids, + allowed_groups=allowed_groups, + allowed_magics=allowed_magics, + forbidden_jids=forbidden_jids, + forbidden_groups=forbidden_groups, + profile_key=profile_key, + )
--- a/sat/plugins/plugin_blog_import.py Wed Jun 27 07:51:29 2018 +0200 +++ b/sat/plugins/plugin_blog_import.py Wed Jun 27 20:14:46 2018 +0200 @@ -21,6 +21,7 @@ from sat.core.i18n import _ from sat.core.constants import Const as C from sat.core.log import getLogger + log = getLogger(__name__) from twisted.internet import defer from twisted.web import client as web_client @@ -41,33 +42,36 @@ C.PI_DEPENDENCIES: ["IMPORT", "XEP-0060", "XEP-0277", "TEXT-SYNTAXES", "UPLOAD"], C.PI_MAIN: "BlogImportPlugin", C.PI_HANDLER: "no", - C.PI_DESCRIPTION: _(u"""Blog import management: -This plugin manage the different blog importers which can register to it, and handle generic importing tasks.""") + C.PI_DESCRIPTION: _( + u"""Blog import management: +This plugin manage the different blog importers which can register to it, and handle generic importing tasks.""" + ), } -OPT_HOST = 'host' -OPT_UPLOAD_IMAGES = 'upload_images' -OPT_UPLOAD_IGNORE_HOST = 'upload_ignore_host' -OPT_IGNORE_TLS = 'ignore_tls_errors' -URL_REDIRECT_PREFIX = 'url_redirect_' +OPT_HOST = "host" +OPT_UPLOAD_IMAGES = "upload_images" +OPT_UPLOAD_IGNORE_HOST = "upload_ignore_host" +OPT_IGNORE_TLS = "ignore_tls_errors" +URL_REDIRECT_PREFIX = "url_redirect_" class BlogImportPlugin(object): BOOL_OPTIONS = (OPT_UPLOAD_IMAGES, OPT_IGNORE_TLS) JSON_OPTIONS = () - OPT_DEFAULTS = {OPT_UPLOAD_IMAGES: True, - OPT_IGNORE_TLS: False} + OPT_DEFAULTS = {OPT_UPLOAD_IMAGES: True, OPT_IGNORE_TLS: False} def __init__(self, host): log.info(_("plugin Blog Import initialization")) self.host = host - self._u = host.plugins['UPLOAD'] - self._p = host.plugins['XEP-0060'] - self._m = host.plugins['XEP-0277'] - self._s = self.host.plugins['TEXT-SYNTAXES'] - host.plugins['IMPORT'].initialize(self, u'blog') + self._u = host.plugins["UPLOAD"] + self._p = host.plugins["XEP-0060"] + self._m = host.plugins["XEP-0277"] + self._s = self.host.plugins["TEXT-SYNTAXES"] + host.plugins["IMPORT"].initialize(self, u"blog") - def importItem(self, client, item_import_data, session, options, return_data, service, node): + def importItem( + self, client, item_import_data, session, options, return_data, service, node + ): """importItem specialized for blog import @param item_import_data(dict): @@ -99,51 +103,58 @@ @param return_data(dict): will contain link between former posts and new items """ - mb_data = item_import_data['blog'] + mb_data = item_import_data["blog"] try: - item_id = mb_data['id'] + item_id = mb_data["id"] except KeyError: - item_id = mb_data['id'] = unicode(shortuuid.uuid()) + item_id = mb_data["id"] = unicode(shortuuid.uuid()) try: # we keep the link between old url and new blog item # so the user can redirect its former blog urls - old_uri = item_import_data['url'] + old_uri = item_import_data["url"] except KeyError: pass else: new_uri = return_data[URL_REDIRECT_PREFIX + old_uri] = self._p.getNodeURI( service if service is not None else client.jid.userhostJID(), node or self._m.namespace, - item_id) - log.info(u"url link from {old} to {new}".format( - old=old_uri, new=new_uri)) + item_id, + ) + log.info(u"url link from {old} to {new}".format(old=old_uri, new=new_uri)) return mb_data @defer.inlineCallbacks def importSubItems(self, client, item_import_data, mb_data, session, options): # comments data - if len(item_import_data['comments']) != 1: + if len(item_import_data["comments"]) != 1: raise NotImplementedError(u"can't manage multiple comment links") - allow_comments = C.bool(mb_data.get('allow_comments', C.BOOL_FALSE)) + allow_comments = C.bool(mb_data.get("allow_comments", C.BOOL_FALSE)) if allow_comments: comments_service = yield self._m.getCommentsService(client) - comments_node = self._m.getCommentsNode(mb_data['id']) - mb_data['comments_service'] = comments_service.full() - mb_data['comments_node'] = comments_node + comments_node = self._m.getCommentsNode(mb_data["id"]) + mb_data["comments_service"] = comments_service.full() + mb_data["comments_node"] = comments_node recurse_kwargs = { - 'items_import_data':item_import_data['comments'][0], - 'service':comments_service, - 'node':comments_node} + "items_import_data": item_import_data["comments"][0], + "service": comments_service, + "node": comments_node, + } defer.returnValue(recurse_kwargs) else: - if item_import_data['comments'][0]: - raise exceptions.DataError(u"allow_comments set to False, but comments are there") + if item_import_data["comments"][0]: + raise exceptions.DataError( + u"allow_comments set to False, but comments are there" + ) defer.returnValue(None) def publishItem(self, client, mb_data, service, node, session): - log.debug(u"uploading item [{id}]: {title}".format(id=mb_data['id'], title=mb_data.get('title',''))) + log.debug( + u"uploading item [{id}]: {title}".format( + id=mb_data["id"], title=mb_data.get("title", "") + ) + ) return self._m.send(client, mb_data, service, node) @defer.inlineCallbacks @@ -161,54 +172,80 @@ return # we want only XHTML content - for prefix in ('content',): # a tuple is use, if title need to be added in the future + for prefix in ( + "content", + ): # a tuple is use, if title need to be added in the future try: - rich = mb_data['{}_rich'.format(prefix)] + rich = mb_data["{}_rich".format(prefix)] except KeyError: pass else: - if '{}_xhtml'.format(prefix) in mb_data: - raise exceptions.DataError(u"importer gave {prefix}_rich and {prefix}_xhtml at the same time, this is not allowed".format(prefix=prefix)) + if "{}_xhtml".format(prefix) in mb_data: + raise exceptions.DataError( + u"importer gave {prefix}_rich and {prefix}_xhtml at the same time, this is not allowed".format( + prefix=prefix + ) + ) # we convert rich syntax to XHTML here, so we can handle filters easily - converted = yield self._s.convert(rich, self._s.getCurrentSyntax(client.profile), safe=False) - mb_data['{}_xhtml'.format(prefix)] = converted - del mb_data['{}_rich'.format(prefix)] + converted = yield self._s.convert( + rich, self._s.getCurrentSyntax(client.profile), safe=False + ) + mb_data["{}_xhtml".format(prefix)] = converted + del mb_data["{}_rich".format(prefix)] try: - mb_data['txt'] + mb_data["txt"] except KeyError: pass else: - if '{}_xhtml'.format(prefix) in mb_data: - log.warning(u"{prefix}_text will be replaced by converted {prefix}_xhtml, so filters can be handled".format(prefix=prefix)) - del mb_data['{}_text'.format(prefix)] + if "{}_xhtml".format(prefix) in mb_data: + log.warning( + u"{prefix}_text will be replaced by converted {prefix}_xhtml, so filters can be handled".format( + prefix=prefix + ) + ) + del mb_data["{}_text".format(prefix)] else: - log.warning(u"importer gave a text {prefix}, blog filters don't work on text {prefix}".format(prefix=prefix)) + log.warning( + u"importer gave a text {prefix}, blog filters don't work on text {prefix}".format( + prefix=prefix + ) + ) return # at this point, we have only XHTML version of content try: - top_elt = xml_tools.ElementParser()(mb_data['content_xhtml'], namespace=C.NS_XHTML) + top_elt = xml_tools.ElementParser()( + mb_data["content_xhtml"], namespace=C.NS_XHTML + ) except domish.ParserError: # we clean the xml and try again our luck - cleaned = yield self._s.cleanXHTML(mb_data['content_xhtml']) + cleaned = yield self._s.cleanXHTML(mb_data["content_xhtml"]) top_elt = xml_tools.ElementParser()(cleaned, namespace=C.NS_XHTML) opt_host = options.get(OPT_HOST) if opt_host: # we normalise the domain parsed_host = urlparse.urlsplit(opt_host) - opt_host = urlparse.urlunsplit((parsed_host.scheme or 'http', parsed_host.netloc or parsed_host.path, '', '', '')) + opt_host = urlparse.urlunsplit( + ( + parsed_host.scheme or "http", + parsed_host.netloc or parsed_host.path, + "", + "", + "", + ) + ) tmp_dir = tempfile.mkdtemp() try: # TODO: would be nice to also update the hyperlinks to these images, e.g. when you have <a href="{url}"><img src="{url}"></a> - for img_elt in xml_tools.findAll(top_elt, names=[u'img']): + for img_elt in xml_tools.findAll(top_elt, names=[u"img"]): yield self.imgFilters(client, img_elt, options, opt_host, tmp_dir) finally: - os.rmdir(tmp_dir) # XXX: tmp_dir should be empty, or something went wrong + os.rmdir(tmp_dir) # XXX: tmp_dir should be empty, or something went wrong # we now replace the content with filtered one - mb_data['content_xhtml'] = top_elt.toXml() + mb_data["content_xhtml"] = top_elt.toXml() @defer.inlineCallbacks def imgFilters(self, client, img_elt, options, opt_host, tmp_dir): @@ -222,15 +259,18 @@ @param tmp_dir(str): path to temp directory """ try: - url = img_elt['src'] - if url[0] == u'/': + url = img_elt["src"] + if url[0] == u"/": if not opt_host: - log.warning(u"host was not specified, we can't deal with src without host ({url}) and have to ignore the following <img/>:\n{xml}" - .format(url=url, xml=img_elt.toXml())) + log.warning( + u"host was not specified, we can't deal with src without host ({url}) and have to ignore the following <img/>:\n{xml}".format( + url=url, xml=img_elt.toXml() + ) + ) return else: url = urlparse.urljoin(opt_host, url) - filename = url.rsplit('/',1)[-1].strip() + filename = url.rsplit("/", 1)[-1].strip() if not filename: raise KeyError except (KeyError, IndexError): @@ -238,7 +278,7 @@ return # we change the url for the normalized one - img_elt['src'] = url + img_elt["src"] = url if options.get(OPT_UPLOAD_IMAGES, False): # upload is requested @@ -250,23 +290,32 @@ # host is the ignored one, we skip parsed_url = urlparse.urlsplit(url) if ignore_host in parsed_url.hostname: - log.info(u"Don't upload image at {url} because of {opt} option".format( - url=url, opt=OPT_UPLOAD_IGNORE_HOST)) + log.info( + u"Don't upload image at {url} because of {opt} option".format( + url=url, opt=OPT_UPLOAD_IGNORE_HOST + ) + ) return # we download images and re-upload them via XMPP - tmp_file = os.path.join(tmp_dir, filename).encode('utf-8') - upload_options = {'ignore_tls_errors': options.get(OPT_IGNORE_TLS, False)} + tmp_file = os.path.join(tmp_dir, filename).encode("utf-8") + upload_options = {"ignore_tls_errors": options.get(OPT_IGNORE_TLS, False)} try: - yield web_client.downloadPage(url.encode('utf-8'), tmp_file) - filename = filename.replace(u'%', u'_') # FIXME: tmp workaround for a bug in prosody http upload - dummy, download_d = yield self._u.upload(client, tmp_file, filename, options=upload_options) + yield web_client.downloadPage(url.encode("utf-8"), tmp_file) + filename = filename.replace( + u"%", u"_" + ) # FIXME: tmp workaround for a bug in prosody http upload + dummy, download_d = yield self._u.upload( + client, tmp_file, filename, options=upload_options + ) download_url = yield download_d except Exception as e: - log.warning(u"can't download image at {url}: {reason}".format(url=url, reason=e)) + log.warning( + u"can't download image