Mercurial > libervia-backend
annotate src/plugins/plugin_misc_merge_requests.py @ 2483:0046283a285d
dates update
author | Goffi <goffi@goffi.org> |
---|---|
date | Fri, 26 Jan 2018 11:14:13 +0100 |
parents | 447c3de6b9e5 |
children | 65695b9343d3 |
rev | line source |
---|---|
2448 | 1 #!/usr/bin/env python2 |
2 # -*- coding: utf-8 -*- | |
3 | |
4 # SAT plugin for Pubsub Schemas | |
2483 | 5 # Copyright (C) 2009-2018 Jérôme Poisson (goffi@goffi.org) |
2448 | 6 |
7 # This program is free software: you can redistribute it and/or modify | |
8 # it under the terms of the GNU Affero General Public License as published by | |
9 # the Free Software Foundation, either version 3 of the License, or | |
10 # (at your option) any later version. | |
11 | |
12 # This program is distributed in the hope that it will be useful, | |
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 # GNU Affero General Public License for more details. | |
16 | |
17 # You should have received a copy of the GNU Affero General Public License | |
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
19 | |
20 from sat.core.i18n import _ | |
21 from sat.core.constants import Const as C | |
22 from sat.core import exceptions | |
23 from twisted.internet import defer | |
24 from collections import namedtuple | |
2471
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
25 from sat.tools import utils |
2448 | 26 from sat.core.log import getLogger |
27 log = getLogger(__name__) | |
28 | |
29 NS_MERGE_REQUESTS = 'org.salut-a-toi.merge_requests:0' | |
30 | |
31 PLUGIN_INFO = { | |
32 C.PI_NAME: _("Merge requests management"), | |
33 C.PI_IMPORT_NAME: "MERGE_REQUESTS", | |
34 C.PI_TYPE: "EXP", | |
35 C.PI_PROTOCOLS: [], | |
2473
447c3de6b9e5
plugin merge-requests: fixed "set" method
Goffi <goffi@goffi.org>
parents:
2472
diff
changeset
|
36 C.PI_DEPENDENCIES: ["XEP-0060", "PUBSUB_SCHEMA", "TICKETS"], |
2448 | 37 C.PI_MAIN: "MergeRequests", |
38 C.PI_HANDLER: "no", | |
39 C.PI_DESCRIPTION: _("""Merge requests management plugin""") | |
40 } | |
41 | |
42 FIELD_DATA_TYPE = u'type' | |
43 FIELD_DATA = u'request_data' | |
44 | |
45 | |
46 MergeRequestHandler = namedtuple("MergeRequestHandler", ['name', | |
47 'handler', | |
48 'data_types', | |
49 'short_desc', | |
50 'priority']) | |
51 | |
52 | |
53 class MergeRequests(object): | |
54 META_AUTHOR = u'author' | |
55 META_EMAIL = u'email' | |
56 META_TIMESTAMP = u'timestamp' | |
57 META_HASH = u'hash' | |
58 META_PARENT_HASH = u'parent_hash' | |
59 META_COMMIT_MSG = u'commit_msg' | |
60 META_DIFF = u'diff' | |
61 # index of the diff in the whole data | |
62 # needed to retrieve comments location | |
63 META_DIFF_IDX = u'diff_idx' | |
64 | |
65 def __init__(self, host): | |
66 log.info(_(u"Merge requests plugin initialization")) | |
67 self.host = host | |
68 host.registerNamespace('merge_requests', NS_MERGE_REQUESTS) | |
69 self._p = self.host.plugins["XEP-0060"] | |
70 self._s = self.host.plugins["PUBSUB_SCHEMA"] | |
2473
447c3de6b9e5
plugin merge-requests: fixed "set" method
Goffi <goffi@goffi.org>
parents:
2472
diff
changeset
|
71 self._t = self.host.plugins["TICKETS"] |
2448 | 72 self._handlers = {} |
73 self._handlers_list = [] # handlers sorted by priority | |
74 self._type_handlers = {} # data type => handler map | |
75 host.bridge.addMethod("mergeRequestsGet", ".plugin", | |
76 in_sign='ssiassa{ss}s', out_sign='(asa{ss}aaa{ss})', | |
77 method=self._get, | |
78 async=True | |
79 ) | |
80 host.bridge.addMethod("mergeRequestSet", ".plugin", | |
81 in_sign='ssssa{sas}ssa{ss}s', out_sign='s', | |
82 method=self._set, | |
83 async=True) | |
84 host.bridge.addMethod("mergeRequestsSchemaGet", ".plugin", | |
85 in_sign='sss', out_sign='s', | |
2471
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
86 method=utils.partial(self._s._getUISchema, default_node=NS_MERGE_REQUESTS), |
2448 | 87 async=True) |
88 host.bridge.addMethod("mergeRequestParseData", ".plugin", | |
89 in_sign='ss', out_sign='aa{ss}', | |
90 method=self._parseData, | |
91 async=True) | |
92 | |
93 def register(self, name, handler, data_types, short_desc, priority=0): | |
94 """register an merge request handler | |
95 | |
96 @param name(unicode): name of the handler | |
97 @param handler(object): instance of the handler. | |
98 It must have the following methods, which may all return a Deferred: | |
99 - check(repository): True if repository can be handled | |
100 - export(repository): return export data, i.e. the patches | |
101 - parse(export_data): parse report data and return a list of dict (1 per patch) with: | |
102 - title: title of the commit message (first line) | |
103 - body: body of the commit message | |
104 @aram data_types(list[unicode]): data types that his handler can generate or parse | |
105 """ | |
106 if name in self._handlers: | |
107 raise exceptions.ConflictError(_(u"a handler with name {name} already exists!").format( | |
108 name = name)) | |
109 self._handlers[name] = MergeRequestHandler(name, | |
110 handler, | |
111 data_types, | |
112 short_desc, | |
113 priority) | |
114 self._handlers_list.append(name) | |
115 self._handlers_list.sort(key=lambda name: self._handlers[name].priority) | |
116 if isinstance(data_types, basestring): | |
117 data_types = [data_types] | |
118 for data_type in data_types: | |
119 if data_type in self._type_handlers: | |
120 log.warning(_(u'merge requests of type {type} are already handled by {old_handler}, ' | |
121 u'ignoring {new_handler}').format( | |
122 type = data_type, | |
123 old_handler = self._type_handlers[data_type].name, | |
124 new_handler = name)) | |
125 continue | |
126 self._type_handlers[data_type] = self._handlers[name] | |
127 | |
128 def _get(self, service='', node='', max_items=10, item_ids=None, sub_id=None, extra_dict=None, profile_key=C.PROF_KEY_NONE): | |
129 if extra_dict and 'parse' in extra_dict: | |
130 extra_dict['parse'] = C.bool(extra_dict['parse']) | |
2471
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
131 client, service, node, max_items, extra, sub_id = self._s.prepareBridgeGet(service, node, max_items, sub_id, extra_dict, profile_key) |
2448 | 132 d = self.get(client, service, node or None, max_items, item_ids, sub_id or None, extra.rsm_request, extra.extra) |
133 d.addCallback(lambda (tickets, metadata, parsed_patches): ( | |
134 self._p.serItemsData((tickets, metadata)) + | |
135 ([[{key: unicode(value) for key, value in p.iteritems()} | |
136 for p in patches] for patches in parsed_patches],))) | |
137 return d | |
138 | |
139 @defer.inlineCallbacks | |
140 def get(self, client, service=None, node=None, max_items=None, item_ids=None, sub_id=None, rsm_request=None, extra=None): | |
141 """Retrieve merge requests and convert them to XMLUI | |
142 | |
143 @param extra(XEP-0060.parse, None): can have following keys: | |
144 - update(bool): if True, will return list of parsed request data | |
145 other params are the same as for [TICKETS._get] | |
146 @return (tuple[list[unicode], list[dict[unicode, unicode]])): tuple with | |
147 - XMLUI of the tickets, like [TICKETS._get] | |
148 - list of parsed request data (if extra['parse'] is set, else empty list) | |
149 """ | |
150 if not node: | |
151 node = NS_MERGE_REQUESTS | |
2471
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
152 tickets_xmlui, metadata = yield self._s.getDataFormItems( |
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
153 client, |
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
154 service, |
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
155 node, |
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
156 max_items=max_items, |
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
157 item_ids=item_ids, |
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
158 sub_id=sub_id, |
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
159 rsm_request=rsm_request, |
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
160 extra=extra, |
544c4d2fec45
plugins schema, merge_requests, tickets*: factorisation
Goffi <goffi@goffi.org>
parents:
2448
diff
changeset
|
161 form_ns=NS_MERGE_REQUESTS) |
2448 | 162 parsed_patches = [] |
163 if extra.get('parse', False): | |
164 for ticket in tickets_xmlui: | |
165 request_type = ticket.named_widgets[FIELD_DATA_TYPE].value | |
166 request_data = ticket.named_widgets[FIELD_DATA].value | |
167 parsed_data = yield self.parseData(request_type, request_data) | |
168 parsed_patches.append(parsed_data) | |
169 defer.returnValue((tickets_xmlui, metadata, parsed_patches)) | |
170 | |
171 def _set(self, service, node, repository, method, values, schema=None, item_id=None, extra=None, profile_key=C.PROF_KEY_NONE): | |
2472
3f0a3a0ed290
plugins tickets, merge-requests: fixed call of prepareBridgeSet
Goffi <goffi@goffi.org>
parents:
2471
diff
changeset
|
172 client, service, node, schema, item_id, extra = self._s.prepareBridgeSet(service, node, schema, item_id, extra, profile_key) |
3f0a3a0ed290
plugins tickets, merge-requests: fixed call of prepareBridgeSet
Goffi <goffi@goffi.org>
parents:
2471
diff
changeset
|
173 d = self.set(client, service, node, repository, method, values, schema, item_id, extra, deserialise=True) |
2448 | 174 d.addCallback(lambda ret: ret or u'') |
175 return d | |
176 | |
177 @defer.inlineCallbacks | |
178 def set(self, client, service, node, repository, method=u'auto', values=None, schema=None, item_id=None, extra=None, deserialise=False): | |
179 """Publish a tickets | |
180 | |
181 @param service(None, jid.JID): Pubsub service to use | |
182 @param node(unicode, None): Pubsub node to use | |
183 None to use default tickets node | |
184 @param repository(unicode): path to the repository where the code stands | |
185 @param method(unicode): name of one of the registered handler, or "auto" to try autodetection. | |
186 other arguments are same as for [TICKETS.set] | |
187 @return (unicode): id of the created item | |
188 """ | |
2473
447c3de6b9e5
plugin merge-requests: fixed "set" method
Goffi <goffi@goffi.org>
parents:
2472
diff
changeset
|
189 if not node: |
447c3de6b9e5
plugin merge-requests: fixed "set" method
Goffi <goffi@goffi.org>
parents:
2472
diff
changeset
|
190 node = NS_MERGE_REQUESTS |
2448 | 191 if values is None: |
192 values = {} | |
193 | |
194 if FIELD_DATA in values: | |
195 raise exceptions.DataError(_(u"{field} is set by backend, you must not set it in frontend").format( | |
196 field = FIELD_DATA)) | |
197 | |
198 if method == u'auto': | |
199 for name in self._handlers_list: | |
200 handler = self._handlers[name].handler | |
201 can_handle = yield handler.check(repository) | |
202 if can_handle: | |
203 log.info(_(u"{name} handler will be used").format(name=name)) | |
204 break | |
205 else: | |
206 log.warning(_(u"repository {path} can't be handled by any installed handler").format( | |
207 path = repository)) | |
208 raise exceptions.NotFound(_(u"no handler for this repository has been found")) | |
209 else: | |
210 try: | |
211 handler = self._handlers[name].handler | |
212 except KeyError: | |
213 raise exceptions.NotFound(_(u"No handler of this name found")) | |
214 | |
215 data = yield handler.export(repository) | |
216 if not data.strip(): | |
217 raise exceptions.DataError(_(u'export data is empty, do you have any change to send?')) | |
218 | |
219 if not values.get(u'title') or not values.get(u'body'): | |
220 patches = yield handler.parse(data, values.get(FIELD_DATA_TYPE)) | |
221 commits_msg = patches[-1][self.META_COMMIT_MSG] | |
222 msg_lines = commits_msg.splitlines() | |
223 if not values.get(u'title'): | |
224 values[u'title'] = msg_lines[0] | |
225 if not values.get(u'body'): | |
226 values[u'body'] = u'\n'.join(msg_lines[1:]) | |
227 | |
228 values[FIELD_DATA] = data | |
229 | |
2473
447c3de6b9e5
plugin merge-requests: fixed "set" method
Goffi <goffi@goffi.org>
parents:
2472
diff
changeset
|
230 item_id = yield self._t.set(client, service, node, values, schema, item_id, extra, deserialise, form_ns=NS_MERGE_REQUESTS) |
2448 | 231 defer.returnValue(item_id) |
232 | |
233 def _parseData(self, data_type, data): | |
234 d = self.parseData(data_type, data) | |
235 d.addCallback(lambda parsed_patches: | |
236 {key: unicode(value) for key, value in parsed_patches.iteritems()}) | |
237 return d | |
238 | |
239 def parseData(self, data_type, data): | |
240 """Parse a merge request data according to type | |
241 | |
242 @param data_type(unicode): type of the data to parse | |
243 @param data(unicode): data to parse | |
244 @return(list[dict[unicode, unicode]]): parsed data | |
245 key of dictionary are self.META_* or keys specifics to handler | |
246 @raise NotFound: no handler can parse this data_type | |
247 """ | |
248 try: | |
249 handler = self._type_handlers[data_type] | |
250 except KeyError: | |
251 raise exceptions.NotFound(_(u'No handler can handle data type "{type}"').format(type=data_type)) | |
252 return defer.maybeDeferred(handler.handler.parse, data, data_type) |