comparison libervia/backend/plugins/plugin_misc_merge_requests.py @ 4071:4b842c1fb686

refactoring: renamed `sat` package to `libervia.backend`
author Goffi <goffi@goffi.org>
date Fri, 02 Jun 2023 11:49:51 +0200
parents sat/plugins/plugin_misc_merge_requests.py@524856bd7b19
children 0d7bb4df2343
comparison
equal deleted inserted replaced
4070:d10748475025 4071:4b842c1fb686
1 #!/usr/bin/env python3
2
3
4 # SAT plugin for Pubsub Schemas
5 # Copyright (C) 2009-2021 Jérôme Poisson (goffi@goffi.org)
6
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Affero General Public License as published by
9 # the Free Software Foundation, either version 3 of the License, or
10 # (at your option) any later version.
11
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Affero General Public License for more details.
16
17 # You should have received a copy of the GNU Affero General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
19
20 from collections import namedtuple
21 from twisted.internet import defer
22 from twisted.words.protocols.jabber import jid
23 from libervia.backend.core.i18n import _
24 from libervia.backend.core.constants import Const as C
25 from libervia.backend.core import exceptions
26 from libervia.backend.tools.common import data_format
27 from libervia.backend.core.log import getLogger
28
29
30 log = getLogger(__name__)
31
32 APP_NS_MERGE_REQUESTS = 'org.salut-a-toi.merge_requests:0'
33
34 PLUGIN_INFO = {
35 C.PI_NAME: _("Merge requests management"),
36 C.PI_IMPORT_NAME: "MERGE_REQUESTS",
37 C.PI_TYPE: "EXP",
38 C.PI_PROTOCOLS: [],
39 C.PI_DEPENDENCIES: ["XEP-0060", "XEP-0346", "LISTS", "TEXT_SYNTAXES"],
40 C.PI_MAIN: "MergeRequests",
41 C.PI_HANDLER: "no",
42 C.PI_DESCRIPTION: _("""Merge requests management plugin""")
43 }
44
45 FIELD_DATA_TYPE = 'type'
46 FIELD_DATA = 'request_data'
47
48
49 MergeRequestHandler = namedtuple("MergeRequestHandler", ['name',
50 'handler',
51 'data_types',
52 'short_desc',
53 'priority'])
54
55
56 class MergeRequests(object):
57 META_AUTHOR = 'author'
58 META_EMAIL = 'email'
59 META_TIMESTAMP = 'timestamp'
60 META_HASH = 'hash'
61 META_PARENT_HASH = 'parent_hash'
62 META_COMMIT_MSG = 'commit_msg'
63 META_DIFF = 'diff'
64 # index of the diff in the whole data
65 # needed to retrieve comments location
66 META_DIFF_IDX = 'diff_idx'
67
68 def __init__(self, host):
69 log.info(_("Merge requests plugin initialization"))
70 self.host = host
71 self._s = self.host.plugins["XEP-0346"]
72 self.namespace = self._s.get_submitted_ns(APP_NS_MERGE_REQUESTS)
73 host.register_namespace('merge_requests', self.namespace)
74 self._p = self.host.plugins["XEP-0060"]
75 self._t = self.host.plugins["LISTS"]
76 self._handlers = {}
77 self._handlers_list = [] # handlers sorted by priority
78 self._type_handlers = {} # data type => handler map
79 host.bridge.add_method("merge_requests_get", ".plugin",
80 in_sign='ssiassss', out_sign='s',
81 method=self._get,
82 async_=True
83 )
84 host.bridge.add_method("merge_request_set", ".plugin",
85 in_sign='ssssa{sas}ssss', out_sign='s',
86 method=self._set,
87 async_=True)
88 host.bridge.add_method("merge_requests_schema_get", ".plugin",
89 in_sign='sss', out_sign='s',
90 method=lambda service, nodeIdentifier, profile_key:
91 self._s._get_ui_schema(service,
92 nodeIdentifier,
93 default_node=self.namespace,
94 profile_key=profile_key),
95 async_=True)
96 host.bridge.add_method("merge_request_parse_data", ".plugin",
97 in_sign='ss', out_sign='aa{ss}',
98 method=self._parse_data,
99 async_=True)
100 host.bridge.add_method("merge_requests_import", ".plugin",
101 in_sign='ssssa{ss}s', out_sign='',
102 method=self._import,
103 async_=True
104 )
105
106 def register(self, name, handler, data_types, short_desc, priority=0):
107 """register an merge request handler
108
109 @param name(unicode): name of the handler
110 @param handler(object): instance of the handler.
111 It must have the following methods, which may all return a Deferred:
112 - check(repository)->bool: True if repository can be handled
113 - export(repository)->str: return export data, i.e. the patches
114 - parse(export_data): parse report data and return a list of dict
115 (1 per patch) with:
116 - title: title of the commit message (first line)
117 - body: body of the commit message
118 @aram data_types(list[unicode]): data types that his handler can generate or parse
119 """
120 if name in self._handlers:
121 raise exceptions.ConflictError(_("a handler with name {name} already "
122 "exists!").format(name = name))
123 self._handlers[name] = MergeRequestHandler(name,
124 handler,
125 data_types,
126 short_desc,
127 priority)
128 self._handlers_list.append(name)
129 self._handlers_list.sort(key=lambda name: self._handlers[name].priority)
130 if isinstance(data_types, str):
131 data_types = [data_types]
132 for data_type in data_types:
133 if data_type in self._type_handlers:
134 log.warning(_('merge requests of type {type} are already handled by '
135 '{old_handler}, ignoring {new_handler}').format(
136 type = data_type,
137 old_handler = self._type_handlers[data_type].name,
138 new_handler = name))
139 continue
140 self._type_handlers[data_type] = self._handlers[name]
141
142 def serialise(self, get_data):
143 tickets_xmlui, metadata, items_patches = get_data
144 tickets_xmlui_s, metadata = self._p.trans_items_data((tickets_xmlui, metadata))
145 return data_format.serialise({
146 "items": tickets_xmlui_s,
147 "metadata": metadata,
148 "items_patches": items_patches,
149 })
150
151 def _get(self, service='', node='', max_items=10, item_ids=None, sub_id=None,
152 extra="", profile_key=C.PROF_KEY_NONE):
153 extra = data_format.deserialise(extra)
154 client, service, node, max_items, extra, sub_id = self._s.prepare_bridge_get(
155 service, node, max_items, sub_id, extra, profile_key)
156 d = self.get(client, service, node or None, max_items, item_ids, sub_id or None,
157 extra.rsm_request, extra.extra)
158 d.addCallback(self.serialise)
159 return d
160
161 @defer.inlineCallbacks
162 def get(self, client, service=None, node=None, max_items=None, item_ids=None,
163 sub_id=None, rsm_request=None, extra=None):
164 """Retrieve merge requests and convert them to XMLUI
165
166 @param extra(XEP-0060.parse, None): can have following keys:
167 - update(bool): if True, will return list of parsed request data
168 other params are the same as for [TICKETS._get]
169 @return (tuple[list[unicode], list[dict[unicode, unicode]])): tuple with
170 - XMLUI of the tickets, like [TICKETS._get]
171 - node metadata
172 - list of parsed request data (if extra['parse'] is set, else empty list)
173 """
174 if not node:
175 node = self.namespace
176 if extra is None:
177 extra = {}
178 # XXX: Q&D way to get list for labels when displaying them, but text when we
179 # have to modify them
180 if C.bool(extra.get('labels_as_list', C.BOOL_FALSE)):
181 filters = {'labels': self._s.textbox_2_list_filter}
182 else:
183 filters = {}
184 tickets_xmlui, metadata = yield defer.ensureDeferred(
185 self._s.get_data_form_items(
186 client,
187 service,
188 node,
189 max_items=max_items,
190 item_ids=item_ids,
191 sub_id=sub_id,
192 rsm_request=rsm_request,
193 extra=extra,
194 form_ns=APP_NS_MERGE_REQUESTS,
195 filters = filters)
196 )
197 parsed_patches = []
198 if extra.get('parse', False):
199 for ticket in tickets_xmlui:
200 request_type = ticket.named_widgets[FIELD_DATA_TYPE].value
201 request_data = ticket.named_widgets[FIELD_DATA].value
202 parsed_data = yield self.parse_data(request_type, request_data)
203 parsed_patches.append(parsed_data)
204 defer.returnValue((tickets_xmlui, metadata, parsed_patches))
205
206 def _set(self, service, node, repository, method, values, schema=None, item_id=None,
207 extra="", profile_key=C.PROF_KEY_NONE):
208 client, service, node, schema, item_id, extra = self._s.prepare_bridge_set(
209 service, node, schema, item_id, extra, profile_key)
210 d = defer.ensureDeferred(
211 self.set(
212 client, service, node, repository, method, values, schema,
213 item_id or None, extra, deserialise=True
214 )
215 )
216 d.addCallback(lambda ret: ret or '')
217 return d
218
219 async def set(self, client, service, node, repository, method='auto', values=None,
220 schema=None, item_id=None, extra=None, deserialise=False):
221 """Publish a tickets
222
223 @param service(None, jid.JID): Pubsub service to use
224 @param node(unicode, None): Pubsub node to use
225 None to use default tickets node
226 @param repository(unicode): path to the repository where the code stands
227 @param method(unicode): name of one of the registered handler,
228 or "auto" to try autodetection.
229 other arguments are same as for [TICKETS.set]
230 @return (unicode): id of the created item
231 """
232 if not node:
233 node = self.namespace
234 if values is None:
235 values = {}
236 update = extra.get('update', False)
237 if not repository and not update:
238 # in case of update, we may re-user former patches data
239 # so repository is not mandatory
240 raise exceptions.DataError(_("repository must be specified"))
241
242 if FIELD_DATA in values:
243 raise exceptions.DataError(_("{field} is set by backend, you must not set "
244 "it in frontend").format(field = FIELD_DATA))
245
246 if repository:
247 if method == 'auto':
248 for name in self._handlers_list:
249 handler = self._handlers[name].handler
250 can_handle = await handler.check(repository)
251 if can_handle:
252 log.info(_("{name} handler will be used").format(name=name))
253 break
254 else:
255 log.warning(_("repository {path} can't be handled by any installed "
256 "handler").format(
257 path = repository))
258 raise exceptions.NotFound(_("no handler for this repository has "
259 "been found"))
260 else:
261 try:
262 handler = self._handlers[name].handler
263 except KeyError:
264 raise exceptions.NotFound(_("No handler of this name found"))
265
266 data = await handler.export(repository)
267 if not data.strip():
268 raise exceptions.DataError(_('export data is empty, do you have any '
269 'change to send?'))
270
271 if not values.get('title') or not values.get('body'):
272 patches = handler.parse(data, values.get(FIELD_DATA_TYPE))
273 commits_msg = patches[-1][self.META_COMMIT_MSG]
274 msg_lines = commits_msg.splitlines()
275 if not values.get('title'):
276 values['title'] = msg_lines[0]
277 if not values.get('body'):
278 ts = self.host.plugins['TEXT_SYNTAXES']
279 xhtml = await ts.convert(
280 '\n'.join(msg_lines[1:]),
281 syntax_from = ts.SYNTAX_TEXT,
282 syntax_to = ts.SYNTAX_XHTML,
283 profile = client.profile)
284 values['body'] = '<div xmlns="{ns}">{xhtml}</div>'.format(
285 ns=C.NS_XHTML, xhtml=xhtml)
286
287 values[FIELD_DATA] = data
288
289 item_id = await self._t.set(client, service, node, values, schema, item_id, extra,
290 deserialise, form_ns=APP_NS_MERGE_REQUESTS)
291 return item_id
292
293 def _parse_data(self, data_type, data):
294 d = self.parse_data(data_type, data)
295 d.addCallback(lambda parsed_patches:
296 {key: str(value) for key, value in parsed_patches.items()})
297 return d
298
299 def parse_data(self, data_type, data):
300 """Parse a merge request data according to type
301
302 @param data_type(unicode): type of the data to parse
303 @param data(unicode): data to parse
304 @return(list[dict[unicode, unicode]]): parsed data
305 key of dictionary are self.META_* or keys specifics to handler
306 @raise NotFound: no handler can parse this data_type
307 """
308 try:
309 handler = self._type_handlers[data_type]
310 except KeyError:
311 raise exceptions.NotFound(_('No handler can handle data type "{type}"')
312 .format(type=data_type))
313 return defer.maybeDeferred(handler.handler.parse, data, data_type)
314
315 def _import(self, repository, item_id, service=None, node=None, extra=None,
316 profile_key=C.PROF_KEY_NONE):
317 client = self.host.get_client(profile_key)
318 service = jid.JID(service) if service else None
319 d = self.import_request(client, repository, item_id, service, node or None,
320 extra=extra or None)
321 return d
322
323 @defer.inlineCallbacks
324 def import_request(self, client, repository, item, service=None, node=None,
325 extra=None):
326 """import a merge request in specified directory
327
328 @param repository(unicode): path to the repository where the code stands
329 """
330 if not node:
331 node = self.namespace
332 tickets_xmlui, metadata = yield defer.ensureDeferred(
333 self._s.get_data_form_items(
334 client,
335 service,
336 node,
337 max_items=1,
338 item_ids=[item],
339 form_ns=APP_NS_MERGE_REQUESTS)
340 )
341 ticket_xmlui = tickets_xmlui[0]
342 data = ticket_xmlui.named_widgets[FIELD_DATA].value
343 data_type = ticket_xmlui.named_widgets[FIELD_DATA_TYPE].value
344 try:
345 handler = self._type_handlers[data_type]
346 except KeyError:
347 raise exceptions.NotFound(_('No handler found to import {data_type}')
348 .format(data_type=data_type))
349 log.info(_("Importing patch [{item_id}] using {name} handler").format(
350 item_id = item,
351 name = handler.name))
352 yield handler.handler.import_(repository, data, data_type, item, service, node,
353 extra)