comparison libervia/backend/plugins/plugin_misc_merge_requests.py @ 4270:0d7bb4df2343

Reformatted code base using black.
author Goffi <goffi@goffi.org>
date Wed, 19 Jun 2024 18:44:57 +0200
parents 4b842c1fb686
children
comparison
equal deleted inserted replaced
4269:64a85ce8be70 4270:0d7bb4df2343
27 from libervia.backend.core.log import getLogger 27 from libervia.backend.core.log import getLogger
28 28
29 29
30 log = getLogger(__name__) 30 log = getLogger(__name__)
31 31
32 APP_NS_MERGE_REQUESTS = 'org.salut-a-toi.merge_requests:0' 32 APP_NS_MERGE_REQUESTS = "org.salut-a-toi.merge_requests:0"
33 33
34 PLUGIN_INFO = { 34 PLUGIN_INFO = {
35 C.PI_NAME: _("Merge requests management"), 35 C.PI_NAME: _("Merge requests management"),
36 C.PI_IMPORT_NAME: "MERGE_REQUESTS", 36 C.PI_IMPORT_NAME: "MERGE_REQUESTS",
37 C.PI_TYPE: "EXP", 37 C.PI_TYPE: "EXP",
38 C.PI_PROTOCOLS: [], 38 C.PI_PROTOCOLS: [],
39 C.PI_DEPENDENCIES: ["XEP-0060", "XEP-0346", "LISTS", "TEXT_SYNTAXES"], 39 C.PI_DEPENDENCIES: ["XEP-0060", "XEP-0346", "LISTS", "TEXT_SYNTAXES"],
40 C.PI_MAIN: "MergeRequests", 40 C.PI_MAIN: "MergeRequests",
41 C.PI_HANDLER: "no", 41 C.PI_HANDLER: "no",
42 C.PI_DESCRIPTION: _("""Merge requests management plugin""") 42 C.PI_DESCRIPTION: _("""Merge requests management plugin"""),
43 } 43 }
44 44
45 FIELD_DATA_TYPE = 'type' 45 FIELD_DATA_TYPE = "type"
46 FIELD_DATA = 'request_data' 46 FIELD_DATA = "request_data"
47 47
48 48
49 MergeRequestHandler = namedtuple("MergeRequestHandler", ['name', 49 MergeRequestHandler = namedtuple(
50 'handler', 50 "MergeRequestHandler", ["name", "handler", "data_types", "short_desc", "priority"]
51 'data_types', 51 )
52 'short_desc',
53 'priority'])
54 52
55 53
56 class MergeRequests(object): 54 class MergeRequests(object):
57 META_AUTHOR = 'author' 55 META_AUTHOR = "author"
58 META_EMAIL = 'email' 56 META_EMAIL = "email"
59 META_TIMESTAMP = 'timestamp' 57 META_TIMESTAMP = "timestamp"
60 META_HASH = 'hash' 58 META_HASH = "hash"
61 META_PARENT_HASH = 'parent_hash' 59 META_PARENT_HASH = "parent_hash"
62 META_COMMIT_MSG = 'commit_msg' 60 META_COMMIT_MSG = "commit_msg"
63 META_DIFF = 'diff' 61 META_DIFF = "diff"
64 # index of the diff in the whole data 62 # index of the diff in the whole data
65 # needed to retrieve comments location 63 # needed to retrieve comments location
66 META_DIFF_IDX = 'diff_idx' 64 META_DIFF_IDX = "diff_idx"
67 65
68 def __init__(self, host): 66 def __init__(self, host):
69 log.info(_("Merge requests plugin initialization")) 67 log.info(_("Merge requests plugin initialization"))
70 self.host = host 68 self.host = host
71 self._s = self.host.plugins["XEP-0346"] 69 self._s = self.host.plugins["XEP-0346"]
72 self.namespace = self._s.get_submitted_ns(APP_NS_MERGE_REQUESTS) 70 self.namespace = self._s.get_submitted_ns(APP_NS_MERGE_REQUESTS)
73 host.register_namespace('merge_requests', self.namespace) 71 host.register_namespace("merge_requests", self.namespace)
74 self._p = self.host.plugins["XEP-0060"] 72 self._p = self.host.plugins["XEP-0060"]
75 self._t = self.host.plugins["LISTS"] 73 self._t = self.host.plugins["LISTS"]
76 self._handlers = {} 74 self._handlers = {}
77 self._handlers_list = [] # handlers sorted by priority 75 self._handlers_list = [] # handlers sorted by priority
78 self._type_handlers = {} # data type => handler map 76 self._type_handlers = {} # data type => handler map
79 host.bridge.add_method("merge_requests_get", ".plugin", 77 host.bridge.add_method(
80 in_sign='ssiassss', out_sign='s', 78 "merge_requests_get",
81 method=self._get, 79 ".plugin",
82 async_=True 80 in_sign="ssiassss",
83 ) 81 out_sign="s",
84 host.bridge.add_method("merge_request_set", ".plugin", 82 method=self._get,
85 in_sign='ssssa{sas}ssss', out_sign='s', 83 async_=True,
86 method=self._set, 84 )
87 async_=True) 85 host.bridge.add_method(
88 host.bridge.add_method("merge_requests_schema_get", ".plugin", 86 "merge_request_set",
89 in_sign='sss', out_sign='s', 87 ".plugin",
90 method=lambda service, nodeIdentifier, profile_key: 88 in_sign="ssssa{sas}ssss",
91 self._s._get_ui_schema(service, 89 out_sign="s",
92 nodeIdentifier, 90 method=self._set,
93 default_node=self.namespace, 91 async_=True,
94 profile_key=profile_key), 92 )
95 async_=True) 93 host.bridge.add_method(
96 host.bridge.add_method("merge_request_parse_data", ".plugin", 94 "merge_requests_schema_get",
97 in_sign='ss', out_sign='aa{ss}', 95 ".plugin",
98 method=self._parse_data, 96 in_sign="sss",
99 async_=True) 97 out_sign="s",
100 host.bridge.add_method("merge_requests_import", ".plugin", 98 method=lambda service, nodeIdentifier, profile_key: self._s._get_ui_schema(
101 in_sign='ssssa{ss}s', out_sign='', 99 service,
102 method=self._import, 100 nodeIdentifier,
103 async_=True 101 default_node=self.namespace,
104 ) 102 profile_key=profile_key,
103 ),
104 async_=True,
105 )
106 host.bridge.add_method(
107 "merge_request_parse_data",
108 ".plugin",
109 in_sign="ss",
110 out_sign="aa{ss}",
111 method=self._parse_data,
112 async_=True,
113 )
114 host.bridge.add_method(
115 "merge_requests_import",
116 ".plugin",
117 in_sign="ssssa{ss}s",
118 out_sign="",
119 method=self._import,
120 async_=True,
121 )
105 122
106 def register(self, name, handler, data_types, short_desc, priority=0): 123 def register(self, name, handler, data_types, short_desc, priority=0):
107 """register an merge request handler 124 """register an merge request handler
108 125
109 @param name(unicode): name of the handler 126 @param name(unicode): name of the handler
116 - title: title of the commit message (first line) 133 - title: title of the commit message (first line)
117 - body: body of the commit message 134 - body: body of the commit message
118 @aram data_types(list[unicode]): data types that his handler can generate or parse 135 @aram data_types(list[unicode]): data types that his handler can generate or parse
119 """ 136 """
120 if name in self._handlers: 137 if name in self._handlers:
121 raise exceptions.ConflictError(_("a handler with name {name} already " 138 raise exceptions.ConflictError(
122 "exists!").format(name = name)) 139 _("a handler with name {name} already " "exists!").format(name=name)
123 self._handlers[name] = MergeRequestHandler(name, 140 )
124 handler, 141 self._handlers[name] = MergeRequestHandler(
125 data_types, 142 name, handler, data_types, short_desc, priority
126 short_desc, 143 )
127 priority)
128 self._handlers_list.append(name) 144 self._handlers_list.append(name)
129 self._handlers_list.sort(key=lambda name: self._handlers[name].priority) 145 self._handlers_list.sort(key=lambda name: self._handlers[name].priority)
130 if isinstance(data_types, str): 146 if isinstance(data_types, str):
131 data_types = [data_types] 147 data_types = [data_types]
132 for data_type in data_types: 148 for data_type in data_types:
133 if data_type in self._type_handlers: 149 if data_type in self._type_handlers:
134 log.warning(_('merge requests of type {type} are already handled by ' 150 log.warning(
135 '{old_handler}, ignoring {new_handler}').format( 151 _(
136 type = data_type, 152 "merge requests of type {type} are already handled by "
137 old_handler = self._type_handlers[data_type].name, 153 "{old_handler}, ignoring {new_handler}"
138 new_handler = name)) 154 ).format(
155 type=data_type,
156 old_handler=self._type_handlers[data_type].name,
157 new_handler=name,
158 )
159 )
139 continue 160 continue
140 self._type_handlers[data_type] = self._handlers[name] 161 self._type_handlers[data_type] = self._handlers[name]
141 162
142 def serialise(self, get_data): 163 def serialise(self, get_data):
143 tickets_xmlui, metadata, items_patches = get_data 164 tickets_xmlui, metadata, items_patches = get_data
144 tickets_xmlui_s, metadata = self._p.trans_items_data((tickets_xmlui, metadata)) 165 tickets_xmlui_s, metadata = self._p.trans_items_data((tickets_xmlui, metadata))
145 return data_format.serialise({ 166 return data_format.serialise(
146 "items": tickets_xmlui_s, 167 {
147 "metadata": metadata, 168 "items": tickets_xmlui_s,
148 "items_patches": items_patches, 169 "metadata": metadata,
149 }) 170 "items_patches": items_patches,
150 171 }
151 def _get(self, service='', node='', max_items=10, item_ids=None, sub_id=None, 172 )
152 extra="", profile_key=C.PROF_KEY_NONE): 173
174 def _get(
175 self,
176 service="",
177 node="",
178 max_items=10,
179 item_ids=None,
180 sub_id=None,
181 extra="",
182 profile_key=C.PROF_KEY_NONE,
183 ):
153 extra = data_format.deserialise(extra) 184 extra = data_format.deserialise(extra)
154 client, service, node, max_items, extra, sub_id = self._s.prepare_bridge_get( 185 client, service, node, max_items, extra, sub_id = self._s.prepare_bridge_get(
155 service, node, max_items, sub_id, extra, profile_key) 186 service, node, max_items, sub_id, extra, profile_key
156 d = self.get(client, service, node or None, max_items, item_ids, sub_id or None, 187 )
157 extra.rsm_request, extra.extra) 188 d = self.get(
189 client,
190 service,
191 node or None,
192 max_items,
193 item_ids,
194 sub_id or None,
195 extra.rsm_request,
196 extra.extra,
197 )
158 d.addCallback(self.serialise) 198 d.addCallback(self.serialise)
159 return d 199 return d
160 200
161 @defer.inlineCallbacks 201 @defer.inlineCallbacks
162 def get(self, client, service=None, node=None, max_items=None, item_ids=None, 202 def get(
163 sub_id=None, rsm_request=None, extra=None): 203 self,
204 client,
205 service=None,
206 node=None,
207 max_items=None,
208 item_ids=None,
209 sub_id=None,
210 rsm_request=None,
211 extra=None,
212 ):
164 """Retrieve merge requests and convert them to XMLUI 213 """Retrieve merge requests and convert them to XMLUI
165 214
166 @param extra(XEP-0060.parse, None): can have following keys: 215 @param extra(XEP-0060.parse, None): can have following keys:
167 - update(bool): if True, will return list of parsed request data 216 - update(bool): if True, will return list of parsed request data
168 other params are the same as for [TICKETS._get] 217 other params are the same as for [TICKETS._get]
175 node = self.namespace 224 node = self.namespace
176 if extra is None: 225 if extra is None:
177 extra = {} 226 extra = {}
178 # XXX: Q&D way to get list for labels when displaying them, but text when we 227 # XXX: Q&D way to get list for labels when displaying them, but text when we
179 # have to modify them 228 # have to modify them
180 if C.bool(extra.get('labels_as_list', C.BOOL_FALSE)): 229 if C.bool(extra.get("labels_as_list", C.BOOL_FALSE)):
181 filters = {'labels': self._s.textbox_2_list_filter} 230 filters = {"labels": self._s.textbox_2_list_filter}
182 else: 231 else:
183 filters = {} 232 filters = {}
184 tickets_xmlui, metadata = yield defer.ensureDeferred( 233 tickets_xmlui, metadata = yield defer.ensureDeferred(
185 self._s.get_data_form_items( 234 self._s.get_data_form_items(
186 client, 235 client,
190 item_ids=item_ids, 239 item_ids=item_ids,
191 sub_id=sub_id, 240 sub_id=sub_id,
192 rsm_request=rsm_request, 241 rsm_request=rsm_request,
193 extra=extra, 242 extra=extra,
194 form_ns=APP_NS_MERGE_REQUESTS, 243 form_ns=APP_NS_MERGE_REQUESTS,
195 filters = filters) 244 filters=filters,
245 )
196 ) 246 )
197 parsed_patches = [] 247 parsed_patches = []
198 if extra.get('parse', False): 248 if extra.get("parse", False):
199 for ticket in tickets_xmlui: 249 for ticket in tickets_xmlui:
200 request_type = ticket.named_widgets[FIELD_DATA_TYPE].value 250 request_type = ticket.named_widgets[FIELD_DATA_TYPE].value
201 request_data = ticket.named_widgets[FIELD_DATA].value 251 request_data = ticket.named_widgets[FIELD_DATA].value
202 parsed_data = yield self.parse_data(request_type, request_data) 252 parsed_data = yield self.parse_data(request_type, request_data)
203 parsed_patches.append(parsed_data) 253 parsed_patches.append(parsed_data)
204 defer.returnValue((tickets_xmlui, metadata, parsed_patches)) 254 defer.returnValue((tickets_xmlui, metadata, parsed_patches))
205 255
206 def _set(self, service, node, repository, method, values, schema=None, item_id=None, 256 def _set(
207 extra="", profile_key=C.PROF_KEY_NONE): 257 self,
258 service,
259 node,
260 repository,
261 method,
262 values,
263 schema=None,
264 item_id=None,
265 extra="",
266 profile_key=C.PROF_KEY_NONE,
267 ):
208 client, service, node, schema, item_id, extra = self._s.prepare_bridge_set( 268 client, service, node, schema, item_id, extra = self._s.prepare_bridge_set(
209 service, node, schema, item_id, extra, profile_key) 269 service, node, schema, item_id, extra, profile_key
270 )
210 d = defer.ensureDeferred( 271 d = defer.ensureDeferred(
211 self.set( 272 self.set(
212 client, service, node, repository, method, values, schema, 273 client,
213 item_id or None, extra, deserialise=True 274 service,
214 ) 275 node,
215 ) 276 repository,
216 d.addCallback(lambda ret: ret or '') 277 method,
278 values,
279 schema,
280 item_id or None,
281 extra,
282 deserialise=True,
283 )
284 )
285 d.addCallback(lambda ret: ret or "")
217 return d 286 return d
218 287
219 async def set(self, client, service, node, repository, method='auto', values=None, 288 async def set(
220 schema=None, item_id=None, extra=None, deserialise=False): 289 self,
290 client,
291 service,
292 node,
293 repository,
294 method="auto",
295 values=None,
296 schema=None,
297 item_id=None,
298 extra=None,
299 deserialise=False,
300 ):
221 """Publish a tickets 301 """Publish a tickets
222 302
223 @param service(None, jid.JID): Pubsub service to use 303 @param service(None, jid.JID): Pubsub service to use
224 @param node(unicode, None): Pubsub node to use 304 @param node(unicode, None): Pubsub node to use
225 None to use default tickets node 305 None to use default tickets node
231 """ 311 """
232 if not node: 312 if not node:
233 node = self.namespace 313 node = self.namespace
234 if values is None: 314 if values is None:
235 values = {} 315 values = {}
236 update = extra.get('update', False) 316 update = extra.get("update", False)
237 if not repository and not update: 317 if not repository and not update:
238 # in case of update, we may re-user former patches data 318 # in case of update, we may re-user former patches data
239 # so repository is not mandatory 319 # so repository is not mandatory
240 raise exceptions.DataError(_("repository must be specified")) 320 raise exceptions.DataError(_("repository must be specified"))
241 321
242 if FIELD_DATA in values: 322 if FIELD_DATA in values:
243 raise exceptions.DataError(_("{field} is set by backend, you must not set " 323 raise exceptions.DataError(
244 "it in frontend").format(field = FIELD_DATA)) 324 _("{field} is set by backend, you must not set " "it in frontend").format(
325 field=FIELD_DATA
326 )
327 )
245 328
246 if repository: 329 if repository:
247 if method == 'auto': 330 if method == "auto":
248 for name in self._handlers_list: 331 for name in self._handlers_list:
249 handler = self._handlers[name].handler 332 handler = self._handlers[name].handler
250 can_handle = await handler.check(repository) 333 can_handle = await handler.check(repository)
251 if can_handle: 334 if can_handle:
252 log.info(_("{name} handler will be used").format(name=name)) 335 log.info(_("{name} handler will be used").format(name=name))
253 break 336 break
254 else: 337 else:
255 log.warning(_("repository {path} can't be handled by any installed " 338 log.warning(
256 "handler").format( 339 _(
257 path = repository)) 340 "repository {path} can't be handled by any installed "
258 raise exceptions.NotFound(_("no handler for this repository has " 341 "handler"
259 "been found")) 342 ).format(path=repository)
343 )
344 raise exceptions.NotFound(
345 _("no handler for this repository has " "been found")
346 )
260 else: 347 else:
261 try: 348 try:
262 handler = self._handlers[name].handler 349 handler = self._handlers[name].handler
263 except KeyError: 350 except KeyError:
264 raise exceptions.NotFound(_("No handler of this name found")) 351 raise exceptions.NotFound(_("No handler of this name found"))
265 352
266 data = await handler.export(repository) 353 data = await handler.export(repository)
267 if not data.strip(): 354 if not data.strip():
268 raise exceptions.DataError(_('export data is empty, do you have any ' 355 raise exceptions.DataError(
269 'change to send?')) 356 _("export data is empty, do you have any " "change to send?")
270 357 )
271 if not values.get('title') or not values.get('body'): 358
359 if not values.get("title") or not values.get("body"):
272 patches = handler.parse(data, values.get(FIELD_DATA_TYPE)) 360 patches = handler.parse(data, values.get(FIELD_DATA_TYPE))
273 commits_msg = patches[-1][self.META_COMMIT_MSG] 361 commits_msg = patches[-1][self.META_COMMIT_MSG]
274 msg_lines = commits_msg.splitlines() 362 msg_lines = commits_msg.splitlines()
275 if not values.get('title'): 363 if not values.get("title"):
276 values['title'] = msg_lines[0] 364 values["title"] = msg_lines[0]
277 if not values.get('body'): 365 if not values.get("body"):
278 ts = self.host.plugins['TEXT_SYNTAXES'] 366 ts = self.host.plugins["TEXT_SYNTAXES"]
279 xhtml = await ts.convert( 367 xhtml = await ts.convert(
280 '\n'.join(msg_lines[1:]), 368 "\n".join(msg_lines[1:]),
281 syntax_from = ts.SYNTAX_TEXT, 369 syntax_from=ts.SYNTAX_TEXT,
282 syntax_to = ts.SYNTAX_XHTML, 370 syntax_to=ts.SYNTAX_XHTML,
283 profile = client.profile) 371 profile=client.profile,
284 values['body'] = '<div xmlns="{ns}">{xhtml}</div>'.format( 372 )
285 ns=C.NS_XHTML, xhtml=xhtml) 373 values["body"] = '<div xmlns="{ns}">{xhtml}</div>'.format(
374 ns=C.NS_XHTML, xhtml=xhtml
375 )
286 376
287 values[FIELD_DATA] = data 377 values[FIELD_DATA] = data
288 378
289 item_id = await self._t.set(client, service, node, values, schema, item_id, extra, 379 item_id = await self._t.set(
290 deserialise, form_ns=APP_NS_MERGE_REQUESTS) 380 client,
381 service,
382 node,
383 values,
384 schema,
385 item_id,
386 extra,
387 deserialise,
388 form_ns=APP_NS_MERGE_REQUESTS,
389 )
291 return item_id 390 return item_id
292 391
293 def _parse_data(self, data_type, data): 392 def _parse_data(self, data_type, data):
294 d = self.parse_data(data_type, data) 393 d = self.parse_data(data_type, data)
295 d.addCallback(lambda parsed_patches: 394 d.addCallback(
296 {key: str(value) for key, value in parsed_patches.items()}) 395 lambda parsed_patches: {
396 key: str(value) for key, value in parsed_patches.items()
397 }
398 )
297 return d 399 return d
298 400
299 def parse_data(self, data_type, data): 401 def parse_data(self, data_type, data):
300 """Parse a merge request data according to type 402 """Parse a merge request data according to type
301 403
306 @raise NotFound: no handler can parse this data_type 408 @raise NotFound: no handler can parse this data_type
307 """ 409 """
308 try: 410 try:
309 handler = self._type_handlers[data_type] 411 handler = self._type_handlers[data_type]
310 except KeyError: 412 except KeyError:
311 raise exceptions.NotFound(_('No handler can handle data type "{type}"') 413 raise exceptions.NotFound(
312 .format(type=data_type)) 414 _('No handler can handle data type "{type}"').format(type=data_type)
415 )
313 return defer.maybeDeferred(handler.handler.parse, data, data_type) 416 return defer.maybeDeferred(handler.handler.parse, data, data_type)
314 417
315 def _import(self, repository, item_id, service=None, node=None, extra=None, 418 def _import(
316 profile_key=C.PROF_KEY_NONE): 419 self,
420 repository,
421 item_id,
422 service=None,
423 node=None,
424 extra=None,
425 profile_key=C.PROF_KEY_NONE,
426 ):
317 client = self.host.get_client(profile_key) 427 client = self.host.get_client(profile_key)
318 service = jid.JID(service) if service else None 428 service = jid.JID(service) if service else None
319 d = self.import_request(client, repository, item_id, service, node or None, 429 d = self.import_request(
320 extra=extra or None) 430 client, repository, item_id, service, node or None, extra=extra or None
431 )
321 return d 432 return d
322 433
323 @defer.inlineCallbacks 434 @defer.inlineCallbacks
324 def import_request(self, client, repository, item, service=None, node=None, 435 def import_request(
325 extra=None): 436 self, client, repository, item, service=None, node=None, extra=None
437 ):
326 """import a merge request in specified directory 438 """import a merge request in specified directory
327 439
328 @param repository(unicode): path to the repository where the code stands 440 @param repository(unicode): path to the repository where the code stands
329 """ 441 """
330 if not node: 442 if not node:
334 client, 446 client,
335 service, 447 service,
336 node, 448 node,
337 max_items=1, 449 max_items=1,
338 item_ids=[item], 450 item_ids=[item],
339 form_ns=APP_NS_MERGE_REQUESTS) 451 form_ns=APP_NS_MERGE_REQUESTS,
452 )
340 ) 453 )
341 ticket_xmlui = tickets_xmlui[0] 454 ticket_xmlui = tickets_xmlui[0]
342 data = ticket_xmlui.named_widgets[FIELD_DATA].value 455 data = ticket_xmlui.named_widgets[FIELD_DATA].value
343 data_type = ticket_xmlui.named_widgets[FIELD_DATA_TYPE].value 456 data_type = ticket_xmlui.named_widgets[FIELD_DATA_TYPE].value
344 try: 457 try:
345 handler = self._type_handlers[data_type] 458 handler = self._type_handlers[data_type]
346 except KeyError: 459 except KeyError:
347 raise exceptions.NotFound(_('No handler found to import {data_type}') 460 raise exceptions.NotFound(
348 .format(data_type=data_type)) 461 _("No handler found to import {data_type}").format(data_type=data_type)
349 log.info(_("Importing patch [{item_id}] using {name} handler").format( 462 )
350 item_id = item, 463 log.info(
351 name = handler.name)) 464 _("Importing patch [{item_id}] using {name} handler").format(
352 yield handler.handler.import_(repository, data, data_type, item, service, node, 465 item_id=item, name=handler.name
353 extra) 466 )
467 )
468 yield handler.handler.import_(
469 repository, data, data_type, item, service, node, extra
470 )