Mercurial > libervia-backend
comparison libervia/backend/plugins/plugin_misc_identity.py @ 4270:0d7bb4df2343
Reformatted code base using black.
author | Goffi <goffi@goffi.org> |
---|---|
date | Wed, 19 Jun 2024 18:44:57 +0200 |
parents | 5f2d496c633f |
children | ff88a807852d |
comparison
equal
deleted
inserted
replaced
4269:64a85ce8be70 | 4270:0d7bb4df2343 |
---|---|
39 from PIL import Image | 39 from PIL import Image |
40 except: | 40 except: |
41 raise exceptions.MissingModule( | 41 raise exceptions.MissingModule( |
42 "Missing module pillow, please download/install it from https://python-pillow.github.io" | 42 "Missing module pillow, please download/install it from https://python-pillow.github.io" |
43 ) | 43 ) |
44 | |
45 | 44 |
46 | 45 |
47 log = getLogger(__name__) | 46 log = getLogger(__name__) |
48 | 47 |
49 | 48 |
84 "update_data_filter": self.avatar_update_data_filter, | 83 "update_data_filter": self.avatar_update_data_filter, |
85 # we store the metadata in database, to restore it on next connection | 84 # we store the metadata in database, to restore it on next connection |
86 # (it is stored only for roster entities) | 85 # (it is stored only for roster entities) |
87 "store": True, | 86 "store": True, |
88 "store_serialisation": self._avatar_ser, | 87 "store_serialisation": self._avatar_ser, |
89 "store_deserialisation": self._avatar_deser | 88 "store_deserialisation": self._avatar_deser, |
90 }, | 89 }, |
91 "nicknames": { | 90 "nicknames": { |
92 "type": list, | 91 "type": list, |
93 # accumulate all nicknames from all callbacks in a list instead | 92 # accumulate all nicknames from all callbacks in a list instead |
94 # of returning only the data from the first successful callback | 93 # of returning only the data from the first successful callback |
101 "description": { | 100 "description": { |
102 "type": str, | 101 "type": str, |
103 "get_all": True, | 102 "get_all": True, |
104 "get_post_treatment": self.description_get_post_treatment, | 103 "get_post_treatment": self.description_get_post_treatment, |
105 "store": True, | 104 "store": True, |
106 } | 105 }, |
107 } | 106 } |
108 host.trigger.add("roster_update", self._roster_update_trigger) | 107 host.trigger.add("roster_update", self._roster_update_trigger) |
109 host.memory.set_signal_on_update("avatar") | 108 host.memory.set_signal_on_update("avatar") |
110 host.memory.set_signal_on_update("nicknames") | 109 host.memory.set_signal_on_update("nicknames") |
111 host.bridge.add_method( | 110 host.bridge.add_method( |
159 | 158 |
160 async def profile_connecting(self, client): | 159 async def profile_connecting(self, client): |
161 client._identity_update_lock = [] | 160 client._identity_update_lock = [] |
162 # we restore known identities from database | 161 # we restore known identities from database |
163 client._identity_storage = persistent.LazyPersistentBinaryDict( | 162 client._identity_storage = persistent.LazyPersistentBinaryDict( |
164 "identity", client.profile) | 163 "identity", client.profile |
164 ) | |
165 | 165 |
166 stored_data = await client._identity_storage.all() | 166 stored_data = await client._identity_storage.all() |
167 | 167 |
168 to_delete = [] | 168 to_delete = [] |
169 | 169 |
170 for key, value in stored_data.items(): | 170 for key, value in stored_data.items(): |
171 entity_s, name = key.split('\n') | 171 entity_s, name = key.split("\n") |
172 try: | 172 try: |
173 metadata = self.metadata[name] | 173 metadata = self.metadata[name] |
174 except KeyError: | 174 except KeyError: |
175 log.debug(f"removing {key} from storage: not an allowed metadata name") | 175 log.debug(f"removing {key} from storage: not an allowed metadata name") |
176 to_delete.append(key) | 176 to_delete.append(key) |
179 deser_method = metadata.get("store_deserialisation") | 179 deser_method = metadata.get("store_deserialisation") |
180 if deser_method is not None: | 180 if deser_method is not None: |
181 value = deser_method(value) | 181 value = deser_method(value) |
182 entity = jid.JID(entity_s) | 182 entity = jid.JID(entity_s) |
183 | 183 |
184 if name == 'avatar': | 184 if name == "avatar": |
185 if value is not None: | 185 if value is not None: |
186 try: | 186 try: |
187 cache_uid = value['cache_uid'] | 187 cache_uid = value["cache_uid"] |
188 if not cache_uid: | 188 if not cache_uid: |
189 raise ValueError | 189 raise ValueError |
190 filename = value['filename'] | 190 filename = value["filename"] |
191 if not filename: | 191 if not filename: |
192 raise ValueError | 192 raise ValueError |
193 except (ValueError, KeyError): | 193 except (ValueError, KeyError): |
194 log.warning( | 194 log.warning( |
195 f"invalid data for {entity} avatar, it will be deleted: " | 195 f"invalid data for {entity} avatar, it will be deleted: " |
196 f"{value}") | 196 f"{value}" |
197 ) | |
197 to_delete.append(key) | 198 to_delete.append(key) |
198 continue | 199 continue |
199 cache = self.host.common_cache.get_metadata(cache_uid) | 200 cache = self.host.common_cache.get_metadata(cache_uid) |
200 if cache is None: | 201 if cache is None: |
201 log.debug( | 202 log.debug( |
202 f"purging avatar for {entity}: it is not in cache anymore") | 203 f"purging avatar for {entity}: it is not in cache anymore" |
204 ) | |
203 to_delete.append(key) | 205 to_delete.append(key) |
204 continue | 206 continue |
205 | 207 |
206 self.host.memory.update_entity_data( | 208 self.host.memory.update_entity_data(client, entity, name, value, silent=True) |
207 client, entity, name, value, silent=True | |
208 ) | |
209 | 209 |
210 for key in to_delete: | 210 for key in to_delete: |
211 await client._identity_storage.adel(key) | 211 await client._identity_storage.adel(key) |
212 | 212 |
213 def _roster_update_trigger(self, client, roster_item): | 213 def _roster_update_trigger(self, client, roster_item): |
217 f"roster nickname has been updated to {roster_item.name!r} for " | 217 f"roster nickname has been updated to {roster_item.name!r} for " |
218 f"{roster_item.jid}" | 218 f"{roster_item.jid}" |
219 ) | 219 ) |
220 defer.ensureDeferred( | 220 defer.ensureDeferred( |
221 self.update( | 221 self.update( |
222 client, | 222 client, IMPORT_NAME, "nicknames", [roster_item.name], roster_item.jid |
223 IMPORT_NAME, | |
224 "nicknames", | |
225 [roster_item.name], | |
226 roster_item.jid | |
227 ) | 223 ) |
228 ) | 224 ) |
229 return True | 225 return True |
230 | 226 |
231 def register( | 227 def register( |
232 self, | 228 self, |
233 origin: str, | 229 origin: str, |
234 metadata_name: str, | 230 metadata_name: str, |
235 cb_get: Union[Coroutine, defer.Deferred], | 231 cb_get: Union[Coroutine, defer.Deferred], |
236 cb_set: Union[Coroutine, defer.Deferred], | 232 cb_set: Union[Coroutine, defer.Deferred], |
237 priority: int=0): | 233 priority: int = 0, |
234 ): | |
238 """Register callbacks to handle identity metadata | 235 """Register callbacks to handle identity metadata |
239 | 236 |
240 @param origin: namespace of the plugin managing this metadata | 237 @param origin: namespace of the plugin managing this metadata |
241 @param metadata_name: name of metadata can be: | 238 @param metadata_name: name of metadata can be: |
242 - avatar | 239 - avatar |
249 methods with bigger priorities will be called first | 246 methods with bigger priorities will be called first |
250 """ | 247 """ |
251 if not metadata_name in self.metadata.keys(): | 248 if not metadata_name in self.metadata.keys(): |
252 raise ValueError(f"Invalid metadata_name: {metadata_name!r}") | 249 raise ValueError(f"Invalid metadata_name: {metadata_name!r}") |
253 callback = Callback(origin=origin, get=cb_get, set=cb_set, priority=priority) | 250 callback = Callback(origin=origin, get=cb_get, set=cb_set, priority=priority) |
254 cb_list = self.metadata[metadata_name].setdefault('callbacks', []) | 251 cb_list = self.metadata[metadata_name].setdefault("callbacks", []) |
255 cb_list.append(callback) | 252 cb_list.append(callback) |
256 cb_list.sort(key=lambda c: c.priority, reverse=True) | 253 cb_list.sort(key=lambda c: c.priority, reverse=True) |
257 | 254 |
258 def get_identity_jid(self, client, peer_jid): | 255 def get_identity_jid(self, client, peer_jid): |
259 """Return jid to use to set identity metadata | 256 """Return jid to use to set identity metadata |
274 """Check that type used for a metadata is the one declared in self.metadata""" | 271 """Check that type used for a metadata is the one declared in self.metadata""" |
275 value_type = self.metadata[metadata_name]["type"] | 272 value_type = self.metadata[metadata_name]["type"] |
276 if not isinstance(value, value_type): | 273 if not isinstance(value, value_type): |
277 raise ValueError( | 274 raise ValueError( |
278 f"{value} has wrong type: it is {type(value)} while {value_type} was " | 275 f"{value} has wrong type: it is {type(value)} while {value_type} was " |
279 f"expected") | 276 f"expected" |
277 ) | |
280 | 278 |
281 def get_field_type(self, metadata_name: str) -> str: | 279 def get_field_type(self, metadata_name: str) -> str: |
282 """Return the type the requested field | 280 """Return the type the requested field |
283 | 281 |
284 @param metadata_name: name of the field to check | 282 @param metadata_name: name of the field to check |
285 @raise KeyError: the request field doesn't exist | 283 @raise KeyError: the request field doesn't exist |
286 """ | 284 """ |
287 return self.metadata[metadata_name]["type"] | 285 return self.metadata[metadata_name]["type"] |
288 | 286 |
289 async def get( | 287 async def get( |
290 self, | 288 self, |
291 client: SatXMPPEntity, | 289 client: SatXMPPEntity, |
292 metadata_name: str, | 290 metadata_name: str, |
293 entity: Optional[jid.JID], | 291 entity: Optional[jid.JID], |
294 use_cache: bool=True, | 292 use_cache: bool = True, |
295 prefilled_values: Optional[Dict[str, Any]]=None | 293 prefilled_values: Optional[Dict[str, Any]] = None, |
296 ): | 294 ): |
297 """Retrieve identity metadata of an entity | 295 """Retrieve identity metadata of an entity |
298 | 296 |
299 if metadata is already in cache, it is returned. Otherwise, registered callbacks | 297 if metadata is already in cache, it is returned. Otherwise, registered callbacks |
300 will be tried in priority order (bigger to lower) | 298 will be tried in priority order (bigger to lower) |
301 @param metadata_name: name of the metadata | 299 @param metadata_name: name of the metadata |
309 entity = self.get_identity_jid(client, entity) | 307 entity = self.get_identity_jid(client, entity) |
310 try: | 308 try: |
311 metadata = self.metadata[metadata_name] | 309 metadata = self.metadata[metadata_name] |
312 except KeyError: | 310 except KeyError: |
313 raise ValueError(f"Invalid metadata name: {metadata_name!r}") | 311 raise ValueError(f"Invalid metadata name: {metadata_name!r}") |
314 get_all = metadata.get('get_all', False) | 312 get_all = metadata.get("get_all", False) |
315 if use_cache: | 313 if use_cache: |
316 try: | 314 try: |
317 data = self.host.memory.get_entity_datum( | 315 data = self.host.memory.get_entity_datum(client, entity, metadata_name) |
318 client, entity, metadata_name) | |
319 except (KeyError, exceptions.UnknownEntityError): | 316 except (KeyError, exceptions.UnknownEntityError): |
320 pass | 317 pass |
321 else: | 318 else: |
322 return data | 319 return data |
323 | 320 |
324 try: | 321 try: |
325 callbacks = metadata['callbacks'] | 322 callbacks = metadata["callbacks"] |
326 except KeyError: | 323 except KeyError: |
327 log.warning(_("No callback registered for {metadata_name}") | 324 log.warning( |
328 .format(metadata_name=metadata_name)) | 325 _("No callback registered for {metadata_name}").format( |
326 metadata_name=metadata_name | |
327 ) | |
328 ) | |
329 return [] if get_all else None | 329 return [] if get_all else None |
330 | 330 |
331 if get_all: | 331 if get_all: |
332 all_data = [] | 332 all_data = [] |
333 elif prefilled_values is not None: | 333 elif prefilled_values is not None: |
334 raise exceptions.InternalError( | 334 raise exceptions.InternalError( |
335 "prefilled_values can only be used when `get_all` is set") | 335 "prefilled_values can only be used when `get_all` is set" |
336 ) | |
336 | 337 |
337 for callback in callbacks: | 338 for callback in callbacks: |
338 try: | 339 try: |
339 if prefilled_values is not None and callback.origin in prefilled_values: | 340 if prefilled_values is not None and callback.origin in prefilled_values: |
340 data = prefilled_values[callback.origin] | 341 data = prefilled_values[callback.origin] |
341 log.debug( | 342 log.debug( |
342 f"using prefilled values {data!r} for {metadata_name} with " | 343 f"using prefilled values {data!r} for {metadata_name} with " |
343 f"{callback.origin}") | 344 f"{callback.origin}" |
345 ) | |
344 else: | 346 else: |
345 data = await defer.ensureDeferred(callback.get(client, entity)) | 347 data = await defer.ensureDeferred(callback.get(client, entity)) |
346 except exceptions.CancelError: | 348 except exceptions.CancelError: |
347 continue | 349 continue |
348 except Exception as e: | 350 except Exception as e: |
349 log.warning( | 351 log.warning( |
350 _("Error while trying to get {metadata_name} with {callback}: {e}") | 352 _( |
351 .format(callback=callback.get, metadata_name=metadata_name, e=e)) | 353 "Error while trying to get {metadata_name} with {callback}: {e}" |
354 ).format(callback=callback.get, metadata_name=metadata_name, e=e) | |
355 ) | |
352 else: | 356 else: |
353 if data: | 357 if data: |
354 self.check_type(metadata_name, data) | 358 self.check_type(metadata_name, data) |
355 if get_all: | 359 if get_all: |
356 if isinstance(data, list): | 360 if isinstance(data, list): |
367 | 371 |
368 post_treatment = metadata.get("get_post_treatment") | 372 post_treatment = metadata.get("get_post_treatment") |
369 if post_treatment is not None: | 373 if post_treatment is not None: |
370 data = await utils.as_deferred(post_treatment, client, entity, data) | 374 data = await utils.as_deferred(post_treatment, client, entity, data) |
371 | 375 |
372 self.host.memory.update_entity_data( | 376 self.host.memory.update_entity_data(client, entity, metadata_name, data) |
373 client, entity, metadata_name, data) | 377 |
374 | 378 if metadata.get("store", False): |
375 if metadata.get('store', False): | |
376 if data is not None: | 379 if data is not None: |
377 ser_method = metadata.get("store_serialisation") | 380 ser_method = metadata.get("store_serialisation") |
378 if ser_method is not None: | 381 if ser_method is not None: |
379 data = ser_method(data) | 382 data = ser_method(data) |
380 key = f"{entity}\n{metadata_name}" | 383 key = f"{entity}\n{metadata_name}" |
399 if data_filter is not None: | 402 if data_filter is not None: |
400 data = await utils.as_deferred(data_filter, client, entity, data) | 403 data = await utils.as_deferred(data_filter, client, entity, data) |
401 self.check_type(metadata_name, data) | 404 self.check_type(metadata_name, data) |
402 | 405 |
403 try: | 406 try: |
404 callbacks = metadata['callbacks'] | 407 callbacks = metadata["callbacks"] |
405 except KeyError: | 408 except KeyError: |
406 log.warning(_("No callback registered for {metadata_name}") | 409 log.warning( |
407 .format(metadata_name=metadata_name)) | 410 _("No callback registered for {metadata_name}").format( |
411 metadata_name=metadata_name | |
412 ) | |
413 ) | |
408 return exceptions.FeatureNotFound(f"Can't set {metadata_name} for {entity}") | 414 return exceptions.FeatureNotFound(f"Can't set {metadata_name} for {entity}") |
409 | 415 |
410 for callback in callbacks: | 416 for callback in callbacks: |
411 try: | 417 try: |
412 await defer.ensureDeferred(callback.set(client, data, entity)) | 418 await defer.ensureDeferred(callback.set(client, data, entity)) |
413 except exceptions.CancelError: | 419 except exceptions.CancelError: |
414 continue | 420 continue |
415 except Exception as e: | 421 except Exception as e: |
416 log.warning( | 422 log.warning( |
417 _("Error while trying to set {metadata_name} with {callback}: {e}") | 423 _( |
418 .format(callback=callback.set, metadata_name=metadata_name, e=e)) | 424 "Error while trying to set {metadata_name} with {callback}: {e}" |
425 ).format(callback=callback.set, metadata_name=metadata_name, e=e) | |
426 ) | |
419 else: | 427 else: |
420 break | 428 break |
421 else: | 429 else: |
422 raise exceptions.FeatureNotFound(f"Can't set {metadata_name} for {entity}") | 430 raise exceptions.FeatureNotFound(f"Can't set {metadata_name} for {entity}") |
423 | 431 |
429 self, | 437 self, |
430 client: SatXMPPEntity, | 438 client: SatXMPPEntity, |
431 origin: str, | 439 origin: str, |
432 metadata_name: str, | 440 metadata_name: str, |
433 data: Any, | 441 data: Any, |
434 entity: Optional[jid.JID] | 442 entity: Optional[jid.JID], |
435 ): | 443 ): |
436 """Update a metadata in cache | 444 """Update a metadata in cache |
437 | 445 |
438 This method may be called by plugins when an identity metadata is available. | 446 This method may be called by plugins when an identity metadata is available. |
439 @param origin: namespace of the plugin which is source of the metadata | 447 @param origin: namespace of the plugin which is source of the metadata |
443 log.debug(f"update is locked for {entity}'s {metadata_name}") | 451 log.debug(f"update is locked for {entity}'s {metadata_name}") |
444 return | 452 return |
445 metadata = self.metadata[metadata_name] | 453 metadata = self.metadata[metadata_name] |
446 | 454 |
447 try: | 455 try: |
448 cached_data = self.host.memory.get_entity_datum( | 456 cached_data = self.host.memory.get_entity_datum(client, entity, metadata_name) |
449 client, entity, metadata_name) | |
450 except (KeyError, exceptions.UnknownEntityError): | 457 except (KeyError, exceptions.UnknownEntityError): |
451 # metadata is not cached, we do the update | 458 # metadata is not cached, we do the update |
452 pass | 459 pass |
453 else: | 460 else: |
454 # metadata is cached, we check if the new value differs from the cached one | 461 # metadata is cached, we check if the new value differs from the cached one |
459 | 466 |
460 if data is None: | 467 if data is None: |
461 if cached_data is None: | 468 if cached_data is None: |
462 log.debug( | 469 log.debug( |
463 f"{metadata_name} for {entity} is already disabled, nothing to " | 470 f"{metadata_name} for {entity} is already disabled, nothing to " |
464 f"do") | 471 f"do" |
472 ) | |
465 return | 473 return |
466 elif cached_data is None: | 474 elif cached_data is None: |
467 pass | 475 pass |
468 elif not update_is_new_data(client, entity, cached_data, data): | 476 elif not update_is_new_data(client, entity, cached_data, data): |
469 log.debug( | 477 log.debug( |
470 f"{metadata_name} for {entity} is already in cache, nothing to " | 478 f"{metadata_name} for {entity} is already in cache, nothing to " f"do" |
471 f"do") | 479 ) |
472 return | 480 return |
473 | 481 |
474 # we can't use the cache, so we do the update | 482 # we can't use the cache, so we do the update |
475 | 483 |
476 log.debug(f"updating {metadata_name} for {entity}") | 484 log.debug(f"updating {metadata_name} for {entity}") |
477 | 485 |
478 if metadata.get('get_all', False): | 486 if metadata.get("get_all", False): |
479 # get_all is set, meaning that we have to check all plugins | 487 # get_all is set, meaning that we have to check all plugins |
480 # so we first delete current cache | 488 # so we first delete current cache |
481 try: | 489 try: |
482 self.host.memory.del_entity_datum(client, entity, metadata_name) | 490 self.host.memory.del_entity_datum(client, entity, metadata_name) |
483 except (KeyError, exceptions.UnknownEntityError): | 491 except (KeyError, exceptions.UnknownEntityError): |
489 await self.get(client, metadata_name, entity, prefilled_values={origin: data}) | 497 await self.get(client, metadata_name, entity, prefilled_values={origin: data}) |
490 client._identity_update_lock.remove((entity, metadata_name)) | 498 client._identity_update_lock.remove((entity, metadata_name)) |
491 return | 499 return |
492 | 500 |
493 if data is not None: | 501 if data is not None: |
494 data_filter = metadata['update_data_filter'] | 502 data_filter = metadata["update_data_filter"] |
495 if data_filter is not None: | 503 if data_filter is not None: |
496 data = await utils.as_deferred(data_filter, client, entity, data) | 504 data = await utils.as_deferred(data_filter, client, entity, data) |
497 self.check_type(metadata_name, data) | 505 self.check_type(metadata_name, data) |
498 | 506 |
499 self.host.memory.update_entity_data(client, entity, metadata_name, data) | 507 self.host.memory.update_entity_data(client, entity, metadata_name, data) |
500 | 508 |
501 if metadata.get('store', False): | 509 if metadata.get("store", False): |
502 key = f"{entity}\n{metadata_name}" | 510 key = f"{entity}\n{metadata_name}" |
503 if data is not None: | 511 if data is not None: |
504 ser_method = metadata.get("store_serialisation") | 512 ser_method = metadata.get("store_serialisation") |
505 if ser_method is not None: | 513 if ser_method is not None: |
506 data = ser_method(data) | 514 data = ser_method(data) |
517 return d | 525 return d |
518 | 526 |
519 def _set_avatar(self, file_path, entity, profile_key=C.PROF_KEY_NONE): | 527 def _set_avatar(self, file_path, entity, profile_key=C.PROF_KEY_NONE): |
520 client = self.host.get_client(profile_key) | 528 client = self.host.get_client(profile_key) |
521 entity = jid.JID(entity) if entity else None | 529 entity = jid.JID(entity) if entity else None |
522 return defer.ensureDeferred( | 530 return defer.ensureDeferred(self.set(client, "avatar", file_path, entity)) |
523 self.set(client, "avatar", file_path, entity)) | 531 |
524 | 532 def _blocking_cache_avatar(self, source: str, avatar_data: dict[str, Any]): |
525 def _blocking_cache_avatar( | |
526 self, | |
527 source: str, | |
528 avatar_data: dict[str, Any] | |
529 ): | |
530 """This method is executed in a separated thread""" | 533 """This method is executed in a separated thread""" |
531 if avatar_data["media_type"] == "image/svg+xml": | 534 if avatar_data["media_type"] == "image/svg+xml": |
532 # for vector image, we save directly | 535 # for vector image, we save directly |
533 img_buf = open(avatar_data["path"], "rb") | 536 img_buf = open(avatar_data["path"], "rb") |
534 else: | 537 else: |
560 media_type = avatar_data["media_type"] | 563 media_type = avatar_data["media_type"] |
561 avatar_data["base64"] = image_b64 = b64encode(img_buf.read()).decode() | 564 avatar_data["base64"] = image_b64 = b64encode(img_buf.read()).decode() |
562 img_buf.seek(0) | 565 img_buf.seek(0) |
563 image_hash = hashlib.sha1(img_buf.read()).hexdigest() | 566 image_hash = hashlib.sha1(img_buf.read()).hexdigest() |
564 img_buf.seek(0) | 567 img_buf.seek(0) |
565 with self.host.common_cache.cache_data( | 568 with self.host.common_cache.cache_data(source, image_hash, media_type) as f: |
566 source, image_hash, media_type | |
567 ) as f: | |
568 f.write(img_buf.read()) | 569 f.write(img_buf.read()) |
569 avatar_data['path'] = Path(f.name) | 570 avatar_data["path"] = Path(f.name) |
570 avatar_data['filename'] = avatar_data['path'].name | 571 avatar_data["filename"] = avatar_data["path"].name |
571 avatar_data['cache_uid'] = image_hash | 572 avatar_data["cache_uid"] = image_hash |
572 | 573 |
573 async def cache_avatar(self, source: str, avatar_data: Dict[str, Any]) -> None: | 574 async def cache_avatar(self, source: str, avatar_data: Dict[str, Any]) -> None: |
574 """Resize if necessary and cache avatar | 575 """Resize if necessary and cache avatar |
575 | 576 |
576 @param source: source importing the avatar (usually it is plugin's import name), | 577 @param source: source importing the avatar (usually it is plugin's import name), |
588 """Convert avatar file path to dict data""" | 589 """Convert avatar file path to dict data""" |
589 file_path = Path(file_path) | 590 file_path = Path(file_path) |
590 if not file_path.is_file(): | 591 if not file_path.is_file(): |
591 raise ValueError(f"There is no file at {file_path} to use as avatar") | 592 raise ValueError(f"There is no file at {file_path} to use as avatar") |
592 avatar_data = { | 593 avatar_data = { |
593 'path': file_path, | 594 "path": file_path, |
594 'filename': file_path.name, | 595 "filename": file_path.name, |
595 'media_type': image.guess_type(file_path), | 596 "media_type": image.guess_type(file_path), |
596 } | 597 } |
597 media_type = avatar_data['media_type'] | 598 media_type = avatar_data["media_type"] |
598 if media_type is None: | 599 if media_type is None: |
599 raise ValueError(f"Can't identify type of image at {file_path}") | 600 raise ValueError(f"Can't identify type of image at {file_path}") |
600 if not media_type.startswith('image/'): | 601 if not media_type.startswith("image/"): |
601 raise ValueError(f"File at {file_path} doesn't appear to be an image") | 602 raise ValueError(f"File at {file_path} doesn't appear to be an image") |
602 await self.cache_avatar(IMPORT_NAME, avatar_data) | 603 await self.cache_avatar(IMPORT_NAME, avatar_data) |
603 return avatar_data | 604 return avatar_data |
604 | 605 |
605 async def avatar_set_post_treatment(self, client, entity, avatar_data): | 606 async def avatar_set_post_treatment(self, client, entity, avatar_data): |
606 """Update our own avatar""" | 607 """Update our own avatar""" |
607 await self.update(client, IMPORT_NAME, "avatar", avatar_data, entity) | 608 await self.update(client, IMPORT_NAME, "avatar", avatar_data, entity) |
608 | 609 |
609 def avatar_build_metadata( | 610 def avatar_build_metadata( |
610 self, | 611 self, |
611 path: Path, | 612 path: Path, |
612 media_type: Optional[str] = None, | 613 media_type: Optional[str] = None, |
613 cache_uid: Optional[str] = None | 614 cache_uid: Optional[str] = None, |
614 ) -> Optional[Dict[str, Union[str, Path, None]]]: | 615 ) -> Optional[Dict[str, Union[str, Path, None]]]: |
615 """Helper method to generate avatar metadata | 616 """Helper method to generate avatar metadata |
616 | 617 |
617 @param path(str, Path, None): path to avatar file | 618 @param path(str, Path, None): path to avatar file |
618 avatar file must be in cache | 619 avatar file must be in cache |
637 "media_type": media_type, | 638 "media_type": media_type, |
638 "cache_uid": cache_uid, | 639 "cache_uid": cache_uid, |
639 } | 640 } |
640 | 641 |
641 def avatar_update_is_new_data(self, client, entity, cached_data, new_data): | 642 def avatar_update_is_new_data(self, client, entity, cached_data, new_data): |
642 return new_data['path'] != cached_data['path'] | 643 return new_data["path"] != cached_data["path"] |
643 | 644 |
644 async def avatar_update_data_filter(self, client, entity, data): | 645 async def avatar_update_data_filter(self, client, entity, data): |
645 if not isinstance(data, dict): | 646 if not isinstance(data, dict): |
646 raise ValueError(f"Invalid data type ({type(data)}), a dict is expected") | 647 raise ValueError(f"Invalid data type ({type(data)}), a dict is expected") |
647 mandatory_keys = {'path', 'filename', 'cache_uid'} | 648 mandatory_keys = {"path", "filename", "cache_uid"} |
648 if not data.keys() >= mandatory_keys: | 649 if not data.keys() >= mandatory_keys: |
649 raise ValueError(f"missing avatar data keys: {mandatory_keys - data.keys()}") | 650 raise ValueError(f"missing avatar data keys: {mandatory_keys - data.keys()}") |
650 return data | 651 return data |
651 | 652 |
652 def _avatar_ser(self, data: dict) -> dict: | 653 def _avatar_ser(self, data: dict) -> dict: |
700 | 701 |
701 def nicknames_update_is_new_data(self, client, entity, cached_data, new_nicknames): | 702 def nicknames_update_is_new_data(self, client, entity, cached_data, new_nicknames): |
702 return not set(new_nicknames).issubset(cached_data) | 703 return not set(new_nicknames).issubset(cached_data) |
703 | 704 |
704 async def description_get_post_treatment( | 705 async def description_get_post_treatment( |
705 self, | 706 self, client: SatXMPPEntity, entity: jid.JID, plugin_description: List[str] |
706 client: SatXMPPEntity, | |
707 entity: jid.JID, | |
708 plugin_description: List[str] | |
709 ) -> str: | 707 ) -> str: |
710 """Join all descriptions in a unique string""" | 708 """Join all descriptions in a unique string""" |
711 return '\n'.join(plugin_description) | 709 return "\n".join(plugin_description) |
712 | 710 |
713 def _get_identity(self, entity_s, metadata_filter, use_cache, profile): | 711 def _get_identity(self, entity_s, metadata_filter, use_cache, profile): |
714 entity = jid.JID(entity_s) | 712 entity = jid.JID(entity_s) |
715 client = self.host.get_client(profile) | 713 client = self.host.get_client(profile) |
716 d = defer.ensureDeferred( | 714 d = defer.ensureDeferred( |
717 self.get_identity(client, entity, metadata_filter, use_cache)) | 715 self.get_identity(client, entity, metadata_filter, use_cache) |
716 ) | |
718 d.addCallback(data_format.serialise) | 717 d.addCallback(data_format.serialise) |
719 return d | 718 return d |
720 | 719 |
721 async def get_identity( | 720 async def get_identity( |
722 self, | 721 self, |
723 client: SatXMPPEntity, | 722 client: SatXMPPEntity, |
724 entity: Optional[jid.JID] = None, | 723 entity: Optional[jid.JID] = None, |
725 metadata_filter: Optional[List[str]] = None, | 724 metadata_filter: Optional[List[str]] = None, |
726 use_cache: bool = True | 725 use_cache: bool = True, |
727 ) -> Dict[str, Any]: | 726 ) -> Dict[str, Any]: |
728 """Retrieve identity of an entity | 727 """Retrieve identity of an entity |
729 | 728 |
730 @param entity: entity to check | 729 @param entity: entity to check |
731 @param metadata_filter: if not None or empty, only return | 730 @param metadata_filter: if not None or empty, only return |
741 else: | 740 else: |
742 metadata_names = metadata_filter | 741 metadata_names = metadata_filter |
743 | 742 |
744 for metadata_name in metadata_names: | 743 for metadata_name in metadata_names: |
745 id_data[metadata_name] = await self.get( | 744 id_data[metadata_name] = await self.get( |
746 client, metadata_name, entity, use_cache) | 745 client, metadata_name, entity, use_cache |
746 ) | |
747 | 747 |
748 return id_data | 748 return id_data |
749 | 749 |
750 def _get_identities(self, entities_s, metadata_filter, profile): | 750 def _get_identities(self, entities_s, metadata_filter, profile): |
751 entities = [jid.JID(e) for e in entities_s] | 751 entities = [jid.JID(e) for e in entities_s] |
752 client = self.host.get_client(profile) | 752 client = self.host.get_client(profile) |
753 d = defer.ensureDeferred(self.get_identities(client, entities, metadata_filter)) | 753 d = defer.ensureDeferred(self.get_identities(client, entities, metadata_filter)) |
754 d.addCallback(lambda d: data_format.serialise({str(j):i for j, i in d.items()})) | 754 d.addCallback(lambda d: data_format.serialise({str(j): i for j, i in d.items()})) |
755 return d | 755 return d |
756 | 756 |
757 async def get_identities( | 757 async def get_identities( |
758 self, | 758 self, |
759 client: SatXMPPEntity, | 759 client: SatXMPPEntity, |
790 return identities | 790 return identities |
791 | 791 |
792 def _get_base_identities(self, profile_key): | 792 def _get_base_identities(self, profile_key): |
793 client = self.host.get_client(profile_key) | 793 client = self.host.get_client(profile_key) |
794 d = defer.ensureDeferred(self.get_base_identities(client)) | 794 d = defer.ensureDeferred(self.get_base_identities(client)) |
795 d.addCallback(lambda d: data_format.serialise({str(j):i for j, i in d.items()})) | 795 d.addCallback(lambda d: data_format.serialise({str(j): i for j, i in d.items()})) |
796 return d | 796 return d |
797 | 797 |
798 async def get_base_identities( | 798 async def get_base_identities( |
799 self, | 799 self, |
800 client: SatXMPPEntity, | 800 client: SatXMPPEntity, |
807 if client.is_component: | 807 if client.is_component: |
808 entities = [client.jid.userhostJID()] | 808 entities = [client.jid.userhostJID()] |
809 else: | 809 else: |
810 entities = client.roster.get_jids() + [client.jid.userhostJID()] | 810 entities = client.roster.get_jids() + [client.jid.userhostJID()] |
811 | 811 |
812 return await self.get_identities( | 812 return await self.get_identities(client, entities, ["avatar", "nicknames"]) |
813 client, | |
814 entities, | |
815 ['avatar', 'nicknames'] | |
816 ) | |
817 | 813 |
818 def _set_identity(self, id_data_s, profile): | 814 def _set_identity(self, id_data_s, profile): |
819 client = self.host.get_client(profile) | 815 client = self.host.get_client(profile) |
820 id_data = data_format.deserialise(id_data_s) | 816 id_data = data_format.deserialise(id_data_s) |
821 return defer.ensureDeferred(self.set_identity(client, id_data)) | 817 return defer.ensureDeferred(self.set_identity(client, id_data)) |
825 | 821 |
826 @param id_data(dict): data to update, key can be one of self.metadata keys | 822 @param id_data(dict): data to update, key can be one of self.metadata keys |
827 """ | 823 """ |
828 if not id_data.keys() <= self.metadata.keys(): | 824 if not id_data.keys() <= self.metadata.keys(): |
829 raise ValueError( | 825 raise ValueError( |
830 f"Invalid metadata names: {id_data.keys() - self.metadata.keys()}") | 826 f"Invalid metadata names: {id_data.keys() - self.metadata.keys()}" |
827 ) | |
831 for metadata_name, data in id_data.items(): | 828 for metadata_name, data in id_data.items(): |
832 try: | 829 try: |
833 await self.set(client, metadata_name, data) | 830 await self.set(client, metadata_name, data) |
834 except Exception as e: | 831 except Exception as e: |
835 log.warning( | 832 log.warning( |
836 _("Can't set metadata {metadata_name!r}: {reason}") | 833 _("Can't set metadata {metadata_name!r}: {reason}").format( |
837 .format(metadata_name=metadata_name, reason=e)) | 834 metadata_name=metadata_name, reason=e |
835 ) | |
836 ) |