Mercurial > libervia-backend
comparison src/plugins/plugin_misc_merge_requests.py @ 2448:637ac234424f
plugin merge requests: first draft:
this plugin allows to handle merge requests (i.e. requests to include a contribution in a project) by extendings tickets management.
This plugin is made to be generic, independent from end user tools and from the kind of project where the contribution is requested to be merged.
author | Goffi <goffi@goffi.org> |
---|---|
date | Thu, 30 Nov 2017 20:44:25 +0100 |
parents | |
children | 544c4d2fec45 |
comparison
equal
deleted
inserted
replaced
2447:9e692f09f367 | 2448:637ac234424f |
---|---|
1 #!/usr/bin/env python2 | |
2 # -*- coding: utf-8 -*- | |
3 | |
4 # SAT plugin for Pubsub Schemas | |
5 # Copyright (C) 2009-2017 Jérôme Poisson (goffi@goffi.org) | |
6 | |
7 # This program is free software: you can redistribute it and/or modify | |
8 # it under the terms of the GNU Affero General Public License as published by | |
9 # the Free Software Foundation, either version 3 of the License, or | |
10 # (at your option) any later version. | |
11 | |
12 # This program is distributed in the hope that it will be useful, | |
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 # GNU Affero General Public License for more details. | |
16 | |
17 # You should have received a copy of the GNU Affero General Public License | |
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
19 | |
20 from sat.core.i18n import _ | |
21 from sat.core.constants import Const as C | |
22 from sat.core import exceptions | |
23 from twisted.words.protocols.jabber import jid | |
24 from twisted.internet import defer | |
25 from wokkel import generic | |
26 from collections import namedtuple | |
27 from sat.core.log import getLogger | |
28 log = getLogger(__name__) | |
29 | |
30 NS_MERGE_REQUESTS = 'org.salut-a-toi.merge_requests:0' | |
31 | |
32 PLUGIN_INFO = { | |
33 C.PI_NAME: _("Merge requests management"), | |
34 C.PI_IMPORT_NAME: "MERGE_REQUESTS", | |
35 C.PI_TYPE: "EXP", | |
36 C.PI_PROTOCOLS: [], | |
37 C.PI_DEPENDENCIES: ["XEP-0060", "PUBSUB_SCHEMA", "TICKETS"], | |
38 C.PI_MAIN: "MergeRequests", | |
39 C.PI_HANDLER: "no", | |
40 C.PI_DESCRIPTION: _("""Merge requests management plugin""") | |
41 } | |
42 | |
43 FIELD_DATA_TYPE = u'type' | |
44 FIELD_DATA = u'request_data' | |
45 | |
46 | |
47 MergeRequestHandler = namedtuple("MergeRequestHandler", ['name', | |
48 'handler', | |
49 'data_types', | |
50 'short_desc', | |
51 'priority']) | |
52 | |
53 | |
54 class MergeRequests(object): | |
55 META_AUTHOR = u'author' | |
56 META_EMAIL = u'email' | |
57 META_TIMESTAMP = u'timestamp' | |
58 META_HASH = u'hash' | |
59 META_PARENT_HASH = u'parent_hash' | |
60 META_COMMIT_MSG = u'commit_msg' | |
61 META_DIFF = u'diff' | |
62 # index of the diff in the whole data | |
63 # needed to retrieve comments location | |
64 META_DIFF_IDX = u'diff_idx' | |
65 | |
66 def __init__(self, host): | |
67 log.info(_(u"Merge requests plugin initialization")) | |
68 self.host = host | |
69 host.registerNamespace('merge_requests', NS_MERGE_REQUESTS) | |
70 self._p = self.host.plugins["XEP-0060"] | |
71 self._s = self.host.plugins["PUBSUB_SCHEMA"] | |
72 self._t = self.host.plugins["TICKETS"] | |
73 self._handlers = {} | |
74 self._handlers_list = [] # handlers sorted by priority | |
75 self._type_handlers = {} # data type => handler map | |
76 host.bridge.addMethod("mergeRequestsGet", ".plugin", | |
77 in_sign='ssiassa{ss}s', out_sign='(asa{ss}aaa{ss})', | |
78 method=self._get, | |
79 async=True | |
80 ) | |
81 host.bridge.addMethod("mergeRequestSet", ".plugin", | |
82 in_sign='ssssa{sas}ssa{ss}s', out_sign='s', | |
83 method=self._set, | |
84 async=True) | |
85 host.bridge.addMethod("mergeRequestsSchemaGet", ".plugin", | |
86 in_sign='sss', out_sign='s', | |
87 method=self._getSchema, | |
88 async=True) | |
89 host.bridge.addMethod("mergeRequestParseData", ".plugin", | |
90 in_sign='ss', out_sign='aa{ss}', | |
91 method=self._parseData, | |
92 async=True) | |
93 | |
94 def register(self, name, handler, data_types, short_desc, priority=0): | |
95 """register an merge request handler | |
96 | |
97 @param name(unicode): name of the handler | |
98 @param handler(object): instance of the handler. | |
99 It must have the following methods, which may all return a Deferred: | |
100 - check(repository): True if repository can be handled | |
101 - export(repository): return export data, i.e. the patches | |
102 - parse(export_data): parse report data and return a list of dict (1 per patch) with: | |
103 - title: title of the commit message (first line) | |
104 - body: body of the commit message | |
105 @aram data_types(list[unicode]): data types that his handler can generate or parse | |
106 """ | |
107 if name in self._handlers: | |
108 raise exceptions.ConflictError(_(u"a handler with name {name} already exists!").format( | |
109 name = name)) | |
110 self._handlers[name] = MergeRequestHandler(name, | |
111 handler, | |
112 data_types, | |
113 short_desc, | |
114 priority) | |
115 self._handlers_list.append(name) | |
116 self._handlers_list.sort(key=lambda name: self._handlers[name].priority) | |
117 if isinstance(data_types, basestring): | |
118 data_types = [data_types] | |
119 for data_type in data_types: | |
120 if data_type in self._type_handlers: | |
121 log.warning(_(u'merge requests of type {type} are already handled by {old_handler}, ' | |
122 u'ignoring {new_handler}').format( | |
123 type = data_type, | |
124 old_handler = self._type_handlers[data_type].name, | |
125 new_handler = name)) | |
126 continue | |
127 self._type_handlers[data_type] = self._handlers[name] | |
128 | |
129 def _get(self, service='', node='', max_items=10, item_ids=None, sub_id=None, extra_dict=None, profile_key=C.PROF_KEY_NONE): | |
130 client = self.host.getClient(profile_key) | |
131 service = jid.JID(service) if service else None | |
132 max_items = None if max_items == C.NO_LIMIT else max_items | |
133 if extra_dict and 'parse' in extra_dict: | |
134 extra_dict['parse'] = C.bool(extra_dict['parse']) | |
135 extra = self._p.parseExtra(extra_dict) | |
136 d = self.get(client, service, node or None, max_items, item_ids, sub_id or None, extra.rsm_request, extra.extra) | |
137 d.addCallback(lambda (tickets, metadata, parsed_patches): ( | |
138 self._p.serItemsData((tickets, metadata)) + | |
139 ([[{key: unicode(value) for key, value in p.iteritems()} | |
140 for p in patches] for patches in parsed_patches],))) | |
141 return d | |
142 | |
143 @defer.inlineCallbacks | |
144 def get(self, client, service=None, node=None, max_items=None, item_ids=None, sub_id=None, rsm_request=None, extra=None): | |
145 """Retrieve merge requests and convert them to XMLUI | |
146 | |
147 @param extra(XEP-0060.parse, None): can have following keys: | |
148 - update(bool): if True, will return list of parsed request data | |
149 other params are the same as for [TICKETS._get] | |
150 @return (tuple[list[unicode], list[dict[unicode, unicode]])): tuple with | |
151 - XMLUI of the tickets, like [TICKETS._get] | |
152 - list of parsed request data (if extra['parse'] is set, else empty list) | |
153 """ | |
154 if not node: | |
155 node = NS_MERGE_REQUESTS | |
156 tickets_xmlui, metadata = yield self._t.get(client, service, node, max_items, item_ids, sub_id, rsm_request, extra, form_ns=NS_MERGE_REQUESTS) | |
157 parsed_patches = [] | |
158 if extra.get('parse', False): | |
159 for ticket in tickets_xmlui: | |
160 request_type = ticket.named_widgets[FIELD_DATA_TYPE].value | |
161 request_data = ticket.named_widgets[FIELD_DATA].value | |
162 parsed_data = yield self.parseData(request_type, request_data) | |
163 parsed_patches.append(parsed_data) | |
164 defer.returnValue((tickets_xmlui, metadata, parsed_patches)) | |
165 | |
166 def _set(self, service, node, repository, method, values, schema=None, item_id=None, extra=None, profile_key=C.PROF_KEY_NONE): | |
167 client = self.host.getClient(profile_key) | |
168 service = None if not service else jid.JID(service) | |
169 if extra and 'update' in extra: | |
170 extra['update'] = C.bool(extra['update']) | |
171 if schema: | |
172 schema = generic.parseXml(schema.encode('utf-8')) | |
173 else: | |
174 schema = None | |
175 d = self.set(client, service, node or None, repository, method, values, schema, item_id or None, extra, deserialise=True) | |
176 d.addCallback(lambda ret: ret or u'') | |
177 return d | |
178 | |
179 @defer.inlineCallbacks | |
180 def set(self, client, service, node, repository, method=u'auto', values=None, schema=None, item_id=None, extra=None, deserialise=False): | |
181 """Publish a tickets | |
182 | |
183 @param service(None, jid.JID): Pubsub service to use | |
184 @param node(unicode, None): Pubsub node to use | |
185 None to use default tickets node | |
186 @param repository(unicode): path to the repository where the code stands | |
187 @param method(unicode): name of one of the registered handler, or "auto" to try autodetection. | |
188 other arguments are same as for [TICKETS.set] | |
189 @return (unicode): id of the created item | |
190 """ | |
191 if not node: | |
192 node = NS_MERGE_REQUESTS | |
193 | |
194 if values is None: | |
195 values = {} | |
196 | |
197 if FIELD_DATA in values: | |
198 raise exceptions.DataError(_(u"{field} is set by backend, you must not set it in frontend").format( | |
199 field = FIELD_DATA)) | |
200 | |
201 if method == u'auto': | |
202 for name in self._handlers_list: | |
203 handler = self._handlers[name].handler | |
204 can_handle = yield handler.check(repository) | |
205 if can_handle: | |
206 log.info(_(u"{name} handler will be used").format(name=name)) | |
207 break | |
208 else: | |
209 log.warning(_(u"repository {path} can't be handled by any installed handler").format( | |
210 path = repository)) | |
211 raise exceptions.NotFound(_(u"no handler for this repository has been found")) | |
212 else: | |
213 try: | |
214 handler = self._handlers[name].handler | |
215 except KeyError: | |
216 raise exceptions.NotFound(_(u"No handler of this name found")) | |
217 | |
218 data = yield handler.export(repository) | |
219 if not data.strip(): | |
220 raise exceptions.DataError(_(u'export data is empty, do you have any change to send?')) | |
221 | |
222 if not values.get(u'title') or not values.get(u'body'): | |
223 patches = yield handler.parse(data, values.get(FIELD_DATA_TYPE)) | |
224 commits_msg = patches[-1][self.META_COMMIT_MSG] | |
225 msg_lines = commits_msg.splitlines() | |
226 if not values.get(u'title'): | |
227 values[u'title'] = msg_lines[0] | |
228 if not values.get(u'body'): | |
229 values[u'body'] = u'\n'.join(msg_lines[1:]) | |
230 | |
231 values[FIELD_DATA] = data | |
232 | |
233 item_id = yield self._t.set(client, service, node, values, schema, item_id, extra, deserialise, form_ns=NS_MERGE_REQUESTS) | |
234 defer.returnValue(item_id) | |
235 | |
236 def _getSchema(self, service, node, profile_key=C.PROF_KEY_NONE): | |
237 if not node: | |
238 node = NS_MERGE_REQUESTS | |
239 return self._s._getUISchema(service, node, profile_key) | |
240 | |
241 def _parseData(self, data_type, data): | |
242 d = self.parseData(data_type, data) | |
243 d.addCallback(lambda parsed_patches: | |
244 {key: unicode(value) for key, value in parsed_patches.iteritems()}) | |
245 return d | |
246 | |
247 def parseData(self, data_type, data): | |
248 """Parse a merge request data according to type | |
249 | |
250 @param data_type(unicode): type of the data to parse | |
251 @param data(unicode): data to parse | |
252 @return(list[dict[unicode, unicode]]): parsed data | |
253 key of dictionary are self.META_* or keys specifics to handler | |
254 @raise NotFound: no handler can parse this data_type | |
255 """ | |
256 try: | |
257 handler = self._type_handlers[data_type] | |
258 except KeyError: | |
259 raise exceptions.NotFound(_(u'No handler can handle data type "{type}"').format(type=data_type)) | |
260 return defer.maybeDeferred(handler.handler.parse, data, data_type) |