view src/plugins/plugin_misc_merge_requests.py @ 2451:d1153ce68ca0

tools (config): complexe data can now be set using json and the "_json" suffix
author Goffi <goffi@goffi.org>
date Thu, 30 Nov 2017 20:53:47 +0100
parents 637ac234424f
children 544c4d2fec45
line wrap: on
line source

#!/usr/bin/env python2
# -*- coding: utf-8 -*-

# SAT plugin for Pubsub Schemas
# Copyright (C) 2009-2017 Jérôme Poisson (goffi@goffi.org)

# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.

# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Affero General Public License for more details.

# You should have received a copy of the GNU Affero General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.

from sat.core.i18n import _
from sat.core.constants import Const as C
from sat.core import exceptions
from twisted.words.protocols.jabber import jid
from twisted.internet import defer
from wokkel import generic
from collections import namedtuple
from sat.core.log import getLogger
log = getLogger(__name__)

NS_MERGE_REQUESTS = 'org.salut-a-toi.merge_requests:0'

PLUGIN_INFO = {
    C.PI_NAME: _("Merge requests management"),
    C.PI_IMPORT_NAME: "MERGE_REQUESTS",
    C.PI_TYPE: "EXP",
    C.PI_PROTOCOLS: [],
    C.PI_DEPENDENCIES: ["XEP-0060", "PUBSUB_SCHEMA", "TICKETS"],
    C.PI_MAIN: "MergeRequests",
    C.PI_HANDLER: "no",
    C.PI_DESCRIPTION: _("""Merge requests management plugin""")
}

FIELD_DATA_TYPE = u'type'
FIELD_DATA = u'request_data'


MergeRequestHandler = namedtuple("MergeRequestHandler", ['name',
                                                         'handler',
                                                         'data_types',
                                                         'short_desc',
                                                         'priority'])


class MergeRequests(object):
    META_AUTHOR = u'author'
    META_EMAIL = u'email'
    META_TIMESTAMP = u'timestamp'
    META_HASH = u'hash'
    META_PARENT_HASH = u'parent_hash'
    META_COMMIT_MSG = u'commit_msg'
    META_DIFF = u'diff'
    # index of the diff in the whole data
    # needed to retrieve comments location
    META_DIFF_IDX = u'diff_idx'

    def __init__(self, host):
        log.info(_(u"Merge requests plugin initialization"))
        self.host = host
        host.registerNamespace('merge_requests', NS_MERGE_REQUESTS)
        self._p = self.host.plugins["XEP-0060"]
        self._s = self.host.plugins["PUBSUB_SCHEMA"]
        self._t = self.host.plugins["TICKETS"]
        self._handlers = {}
        self._handlers_list = []  # handlers sorted by priority
        self._type_handlers = {}  # data type => handler map
        host.bridge.addMethod("mergeRequestsGet", ".plugin",
                              in_sign='ssiassa{ss}s', out_sign='(asa{ss}aaa{ss})',
                              method=self._get,
                              async=True
                              )
        host.bridge.addMethod("mergeRequestSet", ".plugin",
                              in_sign='ssssa{sas}ssa{ss}s', out_sign='s',
                              method=self._set,
                              async=True)
        host.bridge.addMethod("mergeRequestsSchemaGet", ".plugin",
                              in_sign='sss', out_sign='s',
                              method=self._getSchema,
                              async=True)
        host.bridge.addMethod("mergeRequestParseData", ".plugin",
                              in_sign='ss', out_sign='aa{ss}',
                              method=self._parseData,
                              async=True)

    def register(self, name, handler, data_types, short_desc, priority=0):
        """register an merge request handler

        @param name(unicode): name of the handler
        @param handler(object): instance of the handler.
            It must have the following methods, which may all return a Deferred:
                - check(repository): True if repository can be handled
                - export(repository): return export data, i.e. the patches
                - parse(export_data): parse report data and return a list of dict (1 per patch) with:
                    - title: title of the commit message (first line)
                    - body: body of the commit message
        @aram data_types(list[unicode]): data types that his handler can generate or parse
        """
        if name in self._handlers:
            raise exceptions.ConflictError(_(u"a handler with name {name} already exists!").format(
                name = name))
        self._handlers[name] = MergeRequestHandler(name,
                                                   handler,
                                                   data_types,
                                                   short_desc,
                                                   priority)
        self._handlers_list.append(name)
        self._handlers_list.sort(key=lambda name: self._handlers[name].priority)
        if isinstance(data_types, basestring):
            data_types = [data_types]
        for data_type in data_types:
            if data_type in self._type_handlers:
                log.warning(_(u'merge requests of type {type} are already handled by {old_handler}, '
                              u'ignoring {new_handler}').format(
                type = data_type,
                old_handler = self._type_handlers[data_type].name,
                new_handler = name))
                continue
            self._type_handlers[data_type] = self._handlers[name]

    def _get(self, service='', node='', max_items=10, item_ids=None, sub_id=None, extra_dict=None, profile_key=C.PROF_KEY_NONE):
        client = self.host.getClient(profile_key)
        service = jid.JID(service) if service else None
        max_items = None if max_items == C.NO_LIMIT else max_items
        if extra_dict and 'parse' in extra_dict:
                extra_dict['parse'] = C.bool(extra_dict['parse'])
        extra = self._p.parseExtra(extra_dict)
        d = self.get(client, service, node or None, max_items, item_ids, sub_id or None, extra.rsm_request, extra.extra)
        d.addCallback(lambda (tickets, metadata, parsed_patches): (
            self._p.serItemsData((tickets, metadata)) +
            ([[{key: unicode(value) for key, value in p.iteritems()}
                for p in patches] for patches in parsed_patches],)))
        return d

    @defer.inlineCallbacks
    def get(self, client, service=None, node=None, max_items=None, item_ids=None, sub_id=None, rsm_request=None, extra=None):
        """Retrieve merge requests and convert them to XMLUI

        @param extra(XEP-0060.parse, None): can have following keys:
            - update(bool): if True, will return list of parsed request data
        other params are the same as for [TICKETS._get]
        @return (tuple[list[unicode], list[dict[unicode, unicode]])): tuple with
            - XMLUI of the tickets, like [TICKETS._get]
            - list of parsed request data (if extra['parse'] is set, else empty list)
        """
        if not node:
            node = NS_MERGE_REQUESTS
        tickets_xmlui, metadata = yield self._t.get(client, service, node, max_items, item_ids, sub_id, rsm_request, extra, form_ns=NS_MERGE_REQUESTS)
        parsed_patches = []
        if extra.get('parse', False):
            for ticket in tickets_xmlui:
                request_type = ticket.named_widgets[FIELD_DATA_TYPE].value
                request_data = ticket.named_widgets[FIELD_DATA].value
                parsed_data = yield self.parseData(request_type, request_data)
                parsed_patches.append(parsed_data)
        defer.returnValue((tickets_xmlui, metadata, parsed_patches))

    def _set(self, service, node, repository, method, values, schema=None, item_id=None, extra=None, profile_key=C.PROF_KEY_NONE):
        client = self.host.getClient(profile_key)
        service = None if not service else jid.JID(service)
        if extra and 'update' in extra:
                extra['update'] = C.bool(extra['update'])
        if schema:
            schema = generic.parseXml(schema.encode('utf-8'))
        else:
            schema = None
        d = self.set(client, service, node or None, repository, method, values, schema, item_id or None, extra, deserialise=True)
        d.addCallback(lambda ret: ret or u'')
        return d

    @defer.inlineCallbacks
    def set(self, client, service, node, repository, method=u'auto', values=None, schema=None, item_id=None, extra=None, deserialise=False):
        """Publish a tickets

        @param service(None, jid.JID): Pubsub service to use
        @param node(unicode, None): Pubsub node to use
            None to use default tickets node
        @param repository(unicode): path to the repository where the code stands
        @param method(unicode): name of one of the registered handler, or "auto" to try autodetection.
        other arguments are same as for [TICKETS.set]
        @return (unicode): id of the created item
        """
        if not node:
            node = NS_MERGE_REQUESTS

        if values is None:
            values = {}

        if FIELD_DATA in values:
            raise exceptions.DataError(_(u"{field} is set by backend, you must not set it in frontend").format(
                field = FIELD_DATA))

        if method == u'auto':
            for name in self._handlers_list:
                handler = self._handlers[name].handler
                can_handle = yield handler.check(repository)
                if can_handle:
                    log.info(_(u"{name} handler will be used").format(name=name))
                    break
            else:
                log.warning(_(u"repository {path} can't be handled by any installed handler").format(
                    path = repository))
                raise exceptions.NotFound(_(u"no handler for this repository has been found"))
        else:
            try:
                handler = self._handlers[name].handler
            except KeyError:
                raise exceptions.NotFound(_(u"No handler of this name found"))

        data = yield handler.export(repository)
        if not data.strip():
            raise exceptions.DataError(_(u'export data is empty, do you have any change to send?'))

        if not values.get(u'title') or not values.get(u'body'):
            patches = yield handler.parse(data, values.get(FIELD_DATA_TYPE))
            commits_msg = patches[-1][self.META_COMMIT_MSG]
            msg_lines = commits_msg.splitlines()
            if not values.get(u'title'):
                values[u'title'] = msg_lines[0]
            if not values.get(u'body'):
                values[u'body'] = u'\n'.join(msg_lines[1:])

        values[FIELD_DATA] = data

        item_id = yield self._t.set(client, service, node, values, schema, item_id, extra, deserialise, form_ns=NS_MERGE_REQUESTS)
        defer.returnValue(item_id)

    def _getSchema(self, service, node, profile_key=C.PROF_KEY_NONE):
        if not node:
            node = NS_MERGE_REQUESTS
        return self._s._getUISchema(service, node, profile_key)

    def _parseData(self, data_type, data):
        d = self.parseData(data_type, data)
        d.addCallback(lambda parsed_patches:
            {key: unicode(value) for key, value in parsed_patches.iteritems()})
        return d

    def parseData(self, data_type, data):
        """Parse a merge request data according to type

        @param data_type(unicode): type of the data to parse
        @param data(unicode): data to parse
        @return(list[dict[unicode, unicode]]): parsed data
            key of dictionary are self.META_* or keys specifics to handler
        @raise NotFound: no handler can parse this data_type
        """
        try:
            handler = self._type_handlers[data_type]
        except KeyError:
            raise exceptions.NotFound(_(u'No handler can handle data type "{type}"').format(type=data_type))
        return defer.maybeDeferred(handler.handler.parse, data, data_type)