Mercurial > libervia-backend
annotate src/plugins/plugin_misc_merge_requests.py @ 2472:3f0a3a0ed290
plugins tickets, merge-requests: fixed call of prepareBridgeSet
author | Goffi <goffi@goffi.org> |
---|---|
date | Sat, 13 Jan 2018 10:03:32 +0100 |
parents | 544c4d2fec45 |
children | 447c3de6b9e5 |
rev | line source |
---|---|
2448 | 1 #!/usr/bin/env python2 |
2 # -*- coding: utf-8 -*- | |
3 | |
4 # SAT plugin for Pubsub Schemas | |
5 # Copyright (C) 2009-2017 Jérôme Poisson (goffi@goffi.org) | |
6 | |
7 # This program is free software: you can redistribute it and/or modify | |
8 # it under the terms of the GNU Affero General Public License as published by | |
9 # the Free Software Foundation, either version 3 of the License, or | |
10 # (at your option) any later version. | |
11 | |
12 # This program is distributed in the hope that it will be useful, | |
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 # GNU Affero General Public License for more details. | |
16 | |
17 # You should have received a copy of the GNU Affero General Public License | |
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
19 | |
20 from sat.core.i18n import _ | |
21 from sat.core.constants import Const as C | |
22 from sat.core import exceptions | |
23 from twisted.internet import defer | |
24 from collections import namedtuple | |
2471
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
25 from sat.tools import utils |
2448 | 26 from sat.core.log import getLogger |
27 log = getLogger(__name__) | |
28 | |
29 NS_MERGE_REQUESTS = 'org.salut-a-toi.merge_requests:0' | |
30 | |
31 PLUGIN_INFO = { | |
32 C.PI_NAME: _("Merge requests management"), | |
33 C.PI_IMPORT_NAME: "MERGE_REQUESTS", | |
34 C.PI_TYPE: "EXP", | |
35 C.PI_PROTOCOLS: [], | |
2471
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
36 C.PI_DEPENDENCIES: ["XEP-0060", "PUBSUB_SCHEMA"], |
2448 | 37 C.PI_MAIN: "MergeRequests", |
38 C.PI_HANDLER: "no", | |
39 C.PI_DESCRIPTION: _("""Merge requests management plugin""") | |
40 } | |
41 | |
42 FIELD_DATA_TYPE = u'type' | |
43 FIELD_DATA = u'request_data' | |
44 | |
45 | |
46 MergeRequestHandler = namedtuple("MergeRequestHandler", ['name', | |
47 'handler', | |
48 'data_types', | |
49 'short_desc', | |
50 'priority']) | |
51 | |
52 | |
53 class MergeRequests(object): | |
54 META_AUTHOR = u'author' | |
55 META_EMAIL = u'email' | |
56 META_TIMESTAMP = u'timestamp' | |
57 META_HASH = u'hash' | |
58 META_PARENT_HASH = u'parent_hash' | |
59 META_COMMIT_MSG = u'commit_msg' | |
60 META_DIFF = u'diff' | |
61 # index of the diff in the whole data | |
62 # needed to retrieve comments location | |
63 META_DIFF_IDX = u'diff_idx' | |
64 | |
65 def __init__(self, host): | |
66 log.info(_(u"Merge requests plugin initialization")) | |
67 self.host = host | |
68 host.registerNamespace('merge_requests', NS_MERGE_REQUESTS) | |
69 self._p = self.host.plugins["XEP-0060"] | |
70 self._s = self.host.plugins["PUBSUB_SCHEMA"] | |
71 self._handlers = {} | |
72 self._handlers_list = [] # handlers sorted by priority | |
73 self._type_handlers = {} # data type => handler map | |
74 host.bridge.addMethod("mergeRequestsGet", ".plugin", | |
75 in_sign='ssiassa{ss}s', out_sign='(asa{ss}aaa{ss})', | |
76 method=self._get, | |
77 async=True | |
78 ) | |
79 host.bridge.addMethod("mergeRequestSet", ".plugin", | |
80 in_sign='ssssa{sas}ssa{ss}s', out_sign='s', | |
81 method=self._set, | |
82 async=True) | |
83 host.bridge.addMethod("mergeRequestsSchemaGet", ".plugin", | |
84 in_sign='sss', out_sign='s', | |
2471
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
85 method=utils.partial(self._s._getUISchema, default_node=NS_MERGE_REQUESTS), |
2448 | 86 async=True) |
87 host.bridge.addMethod("mergeRequestParseData", ".plugin", | |
88 in_sign='ss', out_sign='aa{ss}', | |
89 method=self._parseData, | |
90 async=True) | |
91 | |
92 def register(self, name, handler, data_types, short_desc, priority=0): | |
93 """register an merge request handler | |
94 | |
95 @param name(unicode): name of the handler | |
96 @param handler(object): instance of the handler. | |
97 It must have the following methods, which may all return a Deferred: | |
98 - check(repository): True if repository can be handled | |
99 - export(repository): return export data, i.e. the patches | |
100 - parse(export_data): parse report data and return a list of dict (1 per patch) with: | |
101 - title: title of the commit message (first line) | |
102 - body: body of the commit message | |
103 @aram data_types(list[unicode]): data types that his handler can generate or parse | |
104 """ | |
105 if name in self._handlers: | |
106 raise exceptions.ConflictError(_(u"a handler with name {name} already exists!").format( | |
107 name = name)) | |
108 self._handlers[name] = MergeRequestHandler(name, | |
109 handler, | |
110 data_types, | |
111 short_desc, | |
112 priority) | |
113 self._handlers_list.append(name) | |
114 self._handlers_list.sort(key=lambda name: self._handlers[name].priority) | |
115 if isinstance(data_types, basestring): | |
116 data_types = [data_types] | |
117 for data_type in data_types: | |
118 if data_type in self._type_handlers: | |
119 log.warning(_(u'merge requests of type {type} are already handled by {old_handler}, ' | |
120 u'ignoring {new_handler}').format( | |
121 type = data_type, | |
122 old_handler = self._type_handlers[data_type].name, | |
123 new_handler = name)) | |
124 continue | |
125 self._type_handlers[data_type] = self._handlers[name] | |
126 | |
127 def _get(self, service='', node='', max_items=10, item_ids=None, sub_id=None, extra_dict=None, profile_key=C.PROF_KEY_NONE): | |
128 if extra_dict and 'parse' in extra_dict: | |
129 extra_dict['parse'] = C.bool(extra_dict['parse']) | |
2471
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
130 client, service, node, max_items, extra, sub_id = self._s.prepareBridgeGet(service, node, max_items, sub_id, extra_dict, profile_key) |
2448 | 131 d = self.get(client, service, node or None, max_items, item_ids, sub_id or None, extra.rsm_request, extra.extra) |
132 d.addCallback(lambda (tickets, metadata, parsed_patches): ( | |
133 self._p.serItemsData((tickets, metadata)) + | |
134 ([[{key: unicode(value) for key, value in p.iteritems()} | |
135 for p in patches] for patches in parsed_patches],))) | |
136 return d | |
137 | |
138 @defer.inlineCallbacks | |
139 def get(self, client, service=None, node=None, max_items=None, item_ids=None, sub_id=None, rsm_request=None, extra=None): | |
140 """Retrieve merge requests and convert them to XMLUI | |
141 | |
142 @param extra(XEP-0060.parse, None): can have following keys: | |
143 - update(bool): if True, will return list of parsed request data | |
144 other params are the same as for [TICKETS._get] | |
145 @return (tuple[list[unicode], list[dict[unicode, unicode]])): tuple with | |
146 - XMLUI of the tickets, like [TICKETS._get] | |
147 - list of parsed request data (if extra['parse'] is set, else empty list) | |
148 """ | |
149 if not node: | |
150 node = NS_MERGE_REQUESTS | |
2471
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
151 tickets_xmlui, metadata = yield self._s.getDataFormItems( |
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
152 client, |
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
153 service, |
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
154 node, |
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
155 max_items=max_items, |
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
156 item_ids=item_ids, |
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
157 sub_id=sub_id, |
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
158 rsm_request=rsm_request, |
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
159 extra=extra, |
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
160 form_ns=NS_MERGE_REQUESTS) |
2448 | 161 parsed_patches = [] |
162 if extra.get('parse', False): | |
163 for ticket in tickets_xmlui: | |
164 request_type = ticket.named_widgets[FIELD_DATA_TYPE].value | |
165 request_data = ticket.named_widgets[FIELD_DATA].value | |
166 parsed_data = yield self.parseData(request_type, request_data) | |
167 parsed_patches.append(parsed_data) | |
168 defer.returnValue((tickets_xmlui, metadata, parsed_patches)) | |
169 | |
170 def _set(self, service, node, repository, method, values, schema=None, item_id=None, extra=None, profile_key=C.PROF_KEY_NONE): | |
2472
3f0a3a0ed290
plugins tickets, merge-requests: fixed call of prepareBridgeSet
Goffi <goffi@goffi.org>
parents:
2471
diff
changeset
|
171 client, service, node, schema, item_id, extra = self._s.prepareBridgeSet(service, node, schema, item_id, extra, profile_key) |
3f0a3a0ed290
plugins tickets, merge-requests: fixed call of prepareBridgeSet
Goffi <goffi@goffi.org>
parents:
2471
diff
changeset
|
172 d = self.set(client, service, node, repository, method, values, schema, item_id, extra, deserialise=True) |
2448 | 173 d.addCallback(lambda ret: ret or u'') |
174 return d | |
175 | |
176 @defer.inlineCallbacks | |
177 def set(self, client, service, node, repository, method=u'auto', values=None, schema=None, item_id=None, extra=None, deserialise=False): | |
178 """Publish a tickets | |
179 | |
180 @param service(None, jid.JID): Pubsub service to use | |
181 @param node(unicode, None): Pubsub node to use | |
182 None to use default tickets node | |
183 @param repository(unicode): path to the repository where the code stands | |
184 @param method(unicode): name of one of the registered handler, or "auto" to try autodetection. | |
185 other arguments are same as for [TICKETS.set] | |
186 @return (unicode): id of the created item | |
187 """ | |
188 if values is None: | |
189 values = {} | |
190 | |
191 if FIELD_DATA in values: | |
192 raise exceptions.DataError(_(u"{field} is set by backend, you must not set it in frontend").format( | |
193 field = FIELD_DATA)) | |
194 | |
195 if method == u'auto': | |
196 for name in self._handlers_list: | |
197 handler = self._handlers[name].handler | |
198 can_handle = yield handler.check(repository) | |
199 if can_handle: | |
200 log.info(_(u"{name} handler will be used").format(name=name)) | |
201 break | |
202 else: | |
203 log.warning(_(u"repository {path} can't be handled by any installed handler").format( | |
204 path = repository)) | |
205 raise exceptions.NotFound(_(u"no handler for this repository has been found")) | |
206 else: | |
207 try: | |
208 handler = self._handlers[name].handler | |
209 except KeyError: | |
210 raise exceptions.NotFound(_(u"No handler of this name found")) | |
211 | |
212 data = yield handler.export(repository) | |
213 if not data.strip(): | |
214 raise exceptions.DataError(_(u'export data is empty, do you have any change to send?')) | |
215 | |
216 if not values.get(u'title') or not values.get(u'body'): | |
217 patches = yield handler.parse(data, values.get(FIELD_DATA_TYPE)) | |
218 commits_msg = patches[-1][self.META_COMMIT_MSG] | |
219 msg_lines = commits_msg.splitlines() | |
220 if not values.get(u'title'): | |
221 values[u'title'] = msg_lines[0] | |
222 if not values.get(u'body'): | |
223 values[u'body'] = u'\n'.join(msg_lines[1:]) | |
224 | |
225 values[FIELD_DATA] = data | |
226 | |
2471
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
227 item_id = yield self._t.set(client, service, node, values, schema, item_id, extra, deserialise, default_node=NS_MERGE_REQUESTS, form_ns=NS_MERGE_REQUESTS) |
2448 | 228 defer.returnValue(item_id) |
229 | |
230 def _parseData(self, data_type, data): | |
231 d = self.parseData(data_type, data) | |
232 d.addCallback(lambda parsed_patches: | |
233 {key: unicode(value) for key, value in parsed_patches.iteritems()}) | |
234 return d | |
235 | |
236 def parseData(self, data_type, data): | |
237 """Parse a merge request data according to type | |
238 | |
239 @param data_type(unicode): type of the data to parse | |
240 @param data(unicode): data to parse | |
241 @return(list[dict[unicode, unicode]]): parsed data | |
242 key of dictionary are self.META_* or keys specifics to handler | |
243 @raise NotFound: no handler can parse this data_type | |
244 """ | |
245 try: | |
246 handler = self._type_handlers[data_type] | |
247 except KeyError: | |
248 raise exceptions.NotFound(_(u'No handler can handle data type "{type}"').format(type=data_type)) | |
249 return defer.maybeDeferred(handler.handler.parse, data, data_type) |