Mercurial > libervia-backend
diff sat/plugins/plugin_misc_merge_requests.py @ 3028:ab2696e34d29
Python 3 port:
/!\ this is a huge commit
/!\ starting from this commit, SàT is needs Python 3.6+
/!\ SàT maybe be instable or some feature may not work anymore, this will improve with time
This patch port backend, bridge and frontends to Python 3.
Roughly this has been done this way:
- 2to3 tools has been applied (with python 3.7)
- all references to python2 have been replaced with python3 (notably shebangs)
- fixed files not handled by 2to3 (notably the shell script)
- several manual fixes
- fixed issues reported by Python 3 that where not handled in Python 2
- replaced "async" with "async_" when needed (it's a reserved word from Python 3.7)
- replaced zope's "implements" with @implementer decorator
- temporary hack to handle data pickled in database, as str or bytes may be returned,
to be checked later
- fixed hash comparison for password
- removed some code which is not needed anymore with Python 3
- deactivated some code which needs to be checked (notably certificate validation)
- tested with jp, fixed reported issues until some basic commands worked
- ported Primitivus (after porting dependencies like urwid satext)
- more manual fixes
author | Goffi <goffi@goffi.org> |
---|---|
date | Tue, 13 Aug 2019 19:08:41 +0200 |
parents | 989b622faff6 |
children | fee60f17ebac |
line wrap: on
line diff
--- a/sat/plugins/plugin_misc_merge_requests.py Wed Jul 31 11:31:22 2019 +0200 +++ b/sat/plugins/plugin_misc_merge_requests.py Tue Aug 13 19:08:41 2019 +0200 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # SAT plugin for Pubsub Schemas @@ -23,8 +23,8 @@ from twisted.internet import defer from twisted.words.protocols.jabber import jid from collections import namedtuple -from sat.tools import utils from sat.core.log import getLogger + log = getLogger(__name__) NS_MERGE_REQUESTS = 'org.salut-a-toi.merge_requests:0' @@ -40,8 +40,8 @@ C.PI_DESCRIPTION: _("""Merge requests management plugin""") } -FIELD_DATA_TYPE = u'type' -FIELD_DATA = u'request_data' +FIELD_DATA_TYPE = 'type' +FIELD_DATA = 'request_data' MergeRequestHandler = namedtuple("MergeRequestHandler", ['name', @@ -52,49 +52,52 @@ class MergeRequests(object): - META_AUTHOR = u'author' - META_EMAIL = u'email' - META_TIMESTAMP = u'timestamp' - META_HASH = u'hash' - META_PARENT_HASH = u'parent_hash' - META_COMMIT_MSG = u'commit_msg' - META_DIFF = u'diff' + META_AUTHOR = 'author' + META_EMAIL = 'email' + META_TIMESTAMP = 'timestamp' + META_HASH = 'hash' + META_PARENT_HASH = 'parent_hash' + META_COMMIT_MSG = 'commit_msg' + META_DIFF = 'diff' # index of the diff in the whole data # needed to retrieve comments location - META_DIFF_IDX = u'diff_idx' + META_DIFF_IDX = 'diff_idx' def __init__(self, host): - log.info(_(u"Merge requests plugin initialization")) + log.info(_("Merge requests plugin initialization")) self.host = host host.registerNamespace('merge_requests', NS_MERGE_REQUESTS) - self._p = self.host.plugins[u"XEP-0060"] - self._s = self.host.plugins[u"PUBSUB_SCHEMA"] - self._t = self.host.plugins[u"TICKETS"] + self._p = self.host.plugins["XEP-0060"] + self._s = self.host.plugins["PUBSUB_SCHEMA"] + self._t = self.host.plugins["TICKETS"] self._handlers = {} self._handlers_list = [] # handlers sorted by priority self._type_handlers = {} # data type => handler map host.bridge.addMethod("mergeRequestsGet", ".plugin", in_sign='ssiassa{ss}s', out_sign='(asa{ss}aaa{ss})', method=self._get, - async=True + async_=True ) host.bridge.addMethod("mergeRequestSet", ".plugin", in_sign='ssssa{sas}ssss', out_sign='s', method=self._set, - async=True) + async_=True) host.bridge.addMethod("mergeRequestsSchemaGet", ".plugin", in_sign='sss', out_sign='s', - method=utils.partial(self._s._getUISchema, - default_node=NS_MERGE_REQUESTS), - async=True) + method=lambda service, nodeIdentifier, profile_key: + self._s._getUISchema(service, + nodeIdentifier, + default_node=NS_MERGE_REQUESTS, + profile_key=profile_key), + async_=True) host.bridge.addMethod("mergeRequestParseData", ".plugin", in_sign='ss', out_sign='aa{ss}', method=self._parseData, - async=True) + async_=True) host.bridge.addMethod("mergeRequestsImport", ".plugin", in_sign='ssssa{ss}s', out_sign='', method=self._import, - async=True + async_=True ) def register(self, name, handler, data_types, short_desc, priority=0): @@ -112,8 +115,8 @@ @aram data_types(list[unicode]): data types that his handler can generate or parse """ if name in self._handlers: - raise exceptions.ConflictError(_(u"a handler with name {name} already " - u"exists!").format(name = name)) + raise exceptions.ConflictError(_("a handler with name {name} already " + "exists!").format(name = name)) self._handlers[name] = MergeRequestHandler(name, handler, data_types, @@ -121,12 +124,12 @@ priority) self._handlers_list.append(name) self._handlers_list.sort(key=lambda name: self._handlers[name].priority) - if isinstance(data_types, basestring): + if isinstance(data_types, str): data_types = [data_types] for data_type in data_types: if data_type in self._type_handlers: - log.warning(_(u'merge requests of type {type} are already handled by ' - u'{old_handler}, ignoring {new_handler}').format( + log.warning(_('merge requests of type {type} are already handled by ' + '{old_handler}, ignoring {new_handler}').format( type = data_type, old_handler = self._type_handlers[data_type].name, new_handler = name)) @@ -141,10 +144,10 @@ service, node, max_items, sub_id, extra_dict, profile_key) d = self.get(client, service, node or None, max_items, item_ids, sub_id or None, extra.rsm_request, extra.extra) - d.addCallback(lambda (tickets, metadata, parsed_patches): ( - self._p.transItemsData((tickets, metadata)) + - ([[{key: unicode(value) for key, value in p.iteritems()} - for p in patches] for patches in parsed_patches],))) + d.addCallback(lambda tickets_metadata_parsed_patches: ( + self._p.transItemsData((tickets_metadata_parsed_patches[0], tickets_metadata_parsed_patches[1])) + + ([[{key: str(value) for key, value in p.items()} + for p in patches] for patches in tickets_metadata_parsed_patches[2]],))) return d @defer.inlineCallbacks @@ -167,7 +170,7 @@ # XXX: Q&D way to get list for labels when displaying them, but text when we # have to modify them if C.bool(extra.get('labels_as_list', C.BOOL_FALSE)): - filters = {u'labels': self._s.textbox2ListFilter} + filters = {'labels': self._s.textbox2ListFilter} else: filters = {} tickets_xmlui, metadata = yield self._s.getDataFormItems( @@ -191,16 +194,16 @@ defer.returnValue((tickets_xmlui, metadata, parsed_patches)) def _set(self, service, node, repository, method, values, schema=None, item_id=None, - extra=u"", profile_key=C.PROF_KEY_NONE): + extra="", profile_key=C.PROF_KEY_NONE): client, service, node, schema, item_id, extra = self._s.prepareBridgeSet( service, node, schema, item_id, extra, profile_key) d = self.set(client, service, node, repository, method, values, schema, item_id or None, extra, deserialise=True) - d.addCallback(lambda ret: ret or u'') + d.addCallback(lambda ret: ret or '') return d @defer.inlineCallbacks - def set(self, client, service, node, repository, method=u'auto', values=None, + def set(self, client, service, node, repository, method='auto', values=None, schema=None, item_id=None, extra=None, deserialise=False): """Publish a tickets @@ -221,51 +224,51 @@ if not repository and not update: # in case of update, we may re-user former patches data # so repository is not mandatory - raise exceptions.DataError(_(u"repository must be specified")) + raise exceptions.DataError(_("repository must be specified")) if FIELD_DATA in values: - raise exceptions.DataError(_(u"{field} is set by backend, you must not set " - u"it in frontend").format(field = FIELD_DATA)) + raise exceptions.DataError(_("{field} is set by backend, you must not set " + "it in frontend").format(field = FIELD_DATA)) if repository: - if method == u'auto': + if method == 'auto': for name in self._handlers_list: handler = self._handlers[name].handler can_handle = yield handler.check(repository) if can_handle: - log.info(_(u"{name} handler will be used").format(name=name)) + log.info(_("{name} handler will be used").format(name=name)) break else: - log.warning(_(u"repository {path} can't be handled by any installed " - u"handler").format( + log.warning(_("repository {path} can't be handled by any installed " + "handler").format( path = repository)) - raise exceptions.NotFound(_(u"no handler for this repository has " - u"been found")) + raise exceptions.NotFound(_("no handler for this repository has " + "been found")) else: try: handler = self._handlers[name].handler except KeyError: - raise exceptions.NotFound(_(u"No handler of this name found")) + raise exceptions.NotFound(_("No handler of this name found")) data = yield handler.export(repository) if not data.strip(): - raise exceptions.DataError(_(u'export data is empty, do you have any ' - u'change to send?')) + raise exceptions.DataError(_('export data is empty, do you have any ' + 'change to send?')) - if not values.get(u'title') or not values.get(u'body'): + if not values.get('title') or not values.get('body'): patches = yield handler.parse(data, values.get(FIELD_DATA_TYPE)) commits_msg = patches[-1][self.META_COMMIT_MSG] msg_lines = commits_msg.splitlines() - if not values.get(u'title'): - values[u'title'] = msg_lines[0] - if not values.get(u'body'): + if not values.get('title'): + values['title'] = msg_lines[0] + if not values.get('body'): ts = self.host.plugins['TEXT_SYNTAXES'] xhtml = yield ts.convert( - u'\n'.join(msg_lines[1:]), + '\n'.join(msg_lines[1:]), syntax_from = ts.SYNTAX_TEXT, syntax_to = ts.SYNTAX_XHTML, profile = client.profile) - values[u'body'] = '<div xmlns="{ns}">{xhtml}</div>'.format( + values['body'] = '<div xmlns="{ns}">{xhtml}</div>'.format( ns=C.NS_XHTML, xhtml=xhtml) values[FIELD_DATA] = data @@ -277,7 +280,7 @@ def _parseData(self, data_type, data): d = self.parseData(data_type, data) d.addCallback(lambda parsed_patches: - {key: unicode(value) for key, value in parsed_patches.iteritems()}) + {key: str(value) for key, value in parsed_patches.items()}) return d def parseData(self, data_type, data): @@ -292,7 +295,7 @@ try: handler = self._type_handlers[data_type] except KeyError: - raise exceptions.NotFound(_(u'No handler can handle data type "{type}"') + raise exceptions.NotFound(_('No handler can handle data type "{type}"') .format(type=data_type)) return defer.maybeDeferred(handler.handler.parse, data, data_type) @@ -326,9 +329,9 @@ try: handler = self._type_handlers[data_type] except KeyError: - raise exceptions.NotFound(_(u'No handler found to import {data_type}') + raise exceptions.NotFound(_('No handler found to import {data_type}') .format(data_type=data_type)) - log.info(_(u"Importing patch [{item_id}] using {name} handler").format( + log.info(_("Importing patch [{item_id}] using {name} handler").format( item_id = item, name = handler.name)) yield handler.handler.import_(repository, data, data_type, item, service, node,