Mercurial > libervia-backend
comparison libervia/backend/plugins/plugin_misc_identity.py @ 4071:4b842c1fb686
refactoring: renamed `sat` package to `libervia.backend`
author | Goffi <goffi@goffi.org> |
---|---|
date | Fri, 02 Jun 2023 11:49:51 +0200 |
parents | sat/plugins/plugin_misc_identity.py@524856bd7b19 |
children | 5f2d496c633f |
comparison
equal
deleted
inserted
replaced
4070:d10748475025 | 4071:4b842c1fb686 |
---|---|
1 #!/usr/bin/env python3 | |
2 | |
3 # Copyright (C) 2009-2021 Jérôme Poisson (goffi@goffi.org) | |
4 | |
5 # This program is free software: you can redistribute it and/or modify | |
6 # it under the terms of the GNU Affero General Public License as published by | |
7 # the Free Software Foundation, either version 3 of the License, or | |
8 # (at your option) any later version. | |
9 | |
10 # This program is distributed in the hope that it will be useful, | |
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
13 # GNU Affero General Public License for more details. | |
14 | |
15 # You should have received a copy of the GNU Affero General Public License | |
16 # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
17 | |
18 from collections import namedtuple | |
19 import io | |
20 from pathlib import Path | |
21 from base64 import b64encode | |
22 import hashlib | |
23 from typing import Any, Coroutine, Dict, List, Optional, Union | |
24 | |
25 from twisted.internet import defer, threads | |
26 from twisted.words.protocols.jabber import jid | |
27 | |
28 from libervia.backend.core import exceptions | |
29 from libervia.backend.core.constants import Const as C | |
30 from libervia.backend.core.i18n import _ | |
31 from libervia.backend.core.log import getLogger | |
32 from libervia.backend.core.xmpp import SatXMPPEntity | |
33 from libervia.backend.memory import persistent | |
34 from libervia.backend.tools import image | |
35 from libervia.backend.tools import utils | |
36 from libervia.backend.tools.common import data_format | |
37 | |
38 try: | |
39 from PIL import Image | |
40 except: | |
41 raise exceptions.MissingModule( | |
42 "Missing module pillow, please download/install it from https://python-pillow.github.io" | |
43 ) | |
44 | |
45 | |
46 | |
47 log = getLogger(__name__) | |
48 | |
49 | |
50 IMPORT_NAME = "IDENTITY" | |
51 | |
52 | |
53 PLUGIN_INFO = { | |
54 C.PI_NAME: "Identity Plugin", | |
55 C.PI_IMPORT_NAME: IMPORT_NAME, | |
56 C.PI_TYPE: C.PLUG_TYPE_MISC, | |
57 C.PI_MODES: C.PLUG_MODE_BOTH, | |
58 C.PI_PROTOCOLS: [], | |
59 C.PI_DEPENDENCIES: [], | |
60 C.PI_RECOMMENDATIONS: ["XEP-0045"], | |
61 C.PI_MAIN: "Identity", | |
62 C.PI_HANDLER: "no", | |
63 C.PI_DESCRIPTION: _("""Identity manager"""), | |
64 } | |
65 | |
66 Callback = namedtuple("Callback", ("origin", "get", "set", "priority")) | |
67 AVATAR_DIM = (128, 128) | |
68 | |
69 | |
70 class Identity: | |
71 | |
72 def __init__(self, host): | |
73 log.info(_("Plugin Identity initialization")) | |
74 self.host = host | |
75 self._m = host.plugins.get("XEP-0045") | |
76 self.metadata = { | |
77 "avatar": { | |
78 "type": dict, | |
79 # convert avatar path to avatar metadata (and check validity) | |
80 "set_data_filter": self.avatar_set_data_filter, | |
81 # update profile avatar, so all frontends are aware | |
82 "set_post_treatment": self.avatar_set_post_treatment, | |
83 "update_is_new_data": self.avatar_update_is_new_data, | |
84 "update_data_filter": self.avatar_update_data_filter, | |
85 # we store the metadata in database, to restore it on next connection | |
86 # (it is stored only for roster entities) | |
87 "store": True, | |
88 }, | |
89 "nicknames": { | |
90 "type": list, | |
91 # accumulate all nicknames from all callbacks in a list instead | |
92 # of returning only the data from the first successful callback | |
93 "get_all": True, | |
94 # append nicknames from roster, resource, etc. | |
95 "get_post_treatment": self.nicknames_get_post_treatment, | |
96 "update_is_new_data": self.nicknames_update_is_new_data, | |
97 "store": True, | |
98 }, | |
99 "description": { | |
100 "type": str, | |
101 "get_all": True, | |
102 "get_post_treatment": self.description_get_post_treatment, | |
103 "store": True, | |
104 } | |
105 } | |
106 host.trigger.add("roster_update", self._roster_update_trigger) | |
107 host.memory.set_signal_on_update("avatar") | |
108 host.memory.set_signal_on_update("nicknames") | |
109 host.bridge.add_method( | |
110 "identity_get", | |
111 ".plugin", | |
112 in_sign="sasbs", | |
113 out_sign="s", | |
114 method=self._get_identity, | |
115 async_=True, | |
116 ) | |
117 host.bridge.add_method( | |
118 "identities_get", | |
119 ".plugin", | |
120 in_sign="asass", | |
121 out_sign="s", | |
122 method=self._get_identities, | |
123 async_=True, | |
124 ) | |
125 host.bridge.add_method( | |
126 "identities_base_get", | |
127 ".plugin", | |
128 in_sign="s", | |
129 out_sign="s", | |
130 method=self._get_base_identities, | |
131 async_=True, | |
132 ) | |
133 host.bridge.add_method( | |
134 "identity_set", | |
135 ".plugin", | |
136 in_sign="ss", | |
137 out_sign="", | |
138 method=self._set_identity, | |
139 async_=True, | |
140 ) | |
141 host.bridge.add_method( | |
142 "avatar_get", | |
143 ".plugin", | |
144 in_sign="sbs", | |
145 out_sign="s", | |
146 method=self._getAvatar, | |
147 async_=True, | |
148 ) | |
149 host.bridge.add_method( | |
150 "avatar_set", | |
151 ".plugin", | |
152 in_sign="sss", | |
153 out_sign="", | |
154 method=self._set_avatar, | |
155 async_=True, | |
156 ) | |
157 | |
158 async def profile_connecting(self, client): | |
159 client._identity_update_lock = [] | |
160 # we restore known identities from database | |
161 client._identity_storage = persistent.LazyPersistentBinaryDict( | |
162 "identity", client.profile) | |
163 | |
164 stored_data = await client._identity_storage.all() | |
165 | |
166 to_delete = [] | |
167 | |
168 for key, value in stored_data.items(): | |
169 entity_s, name = key.split('\n') | |
170 if name not in self.metadata.keys(): | |
171 log.debug(f"removing {key} from storage: not an allowed metadata name") | |
172 to_delete.append(key) | |
173 continue | |
174 entity = jid.JID(entity_s) | |
175 | |
176 if name == 'avatar': | |
177 if value is not None: | |
178 try: | |
179 cache_uid = value['cache_uid'] | |
180 if not cache_uid: | |
181 raise ValueError | |
182 filename = value['filename'] | |
183 if not filename: | |
184 raise ValueError | |
185 except (ValueError, KeyError): | |
186 log.warning( | |
187 f"invalid data for {entity} avatar, it will be deleted: " | |
188 f"{value}") | |
189 to_delete.append(key) | |
190 continue | |
191 cache = self.host.common_cache.get_metadata(cache_uid) | |
192 if cache is None: | |
193 log.debug( | |
194 f"purging avatar for {entity}: it is not in cache anymore") | |
195 to_delete.append(key) | |
196 continue | |
197 | |
198 self.host.memory.update_entity_data( | |
199 client, entity, name, value, silent=True | |
200 ) | |
201 | |
202 for key in to_delete: | |
203 await client._identity_storage.adel(key) | |
204 | |
205 def _roster_update_trigger(self, client, roster_item): | |
206 old_item = client.roster.get_item(roster_item.jid) | |
207 if old_item is None or old_item.name != roster_item.name: | |
208 log.debug( | |
209 f"roster nickname has been updated to {roster_item.name!r} for " | |
210 f"{roster_item.jid}" | |
211 ) | |
212 defer.ensureDeferred( | |
213 self.update( | |
214 client, | |
215 IMPORT_NAME, | |
216 "nicknames", | |
217 [roster_item.name], | |
218 roster_item.jid | |
219 ) | |
220 ) | |
221 return True | |
222 | |
223 def register( | |
224 self, | |
225 origin: str, | |
226 metadata_name: str, | |
227 cb_get: Union[Coroutine, defer.Deferred], | |
228 cb_set: Union[Coroutine, defer.Deferred], | |
229 priority: int=0): | |
230 """Register callbacks to handle identity metadata | |
231 | |
232 @param origin: namespace of the plugin managing this metadata | |
233 @param metadata_name: name of metadata can be: | |
234 - avatar | |
235 - nicknames | |
236 @param cb_get: method to retrieve a metadata | |
237 the method will get client and metadata names to retrieve as arguments. | |
238 @param cb_set: method to set a metadata | |
239 the method will get client, metadata name to set, and value as argument. | |
240 @param priority: priority of this method for the given metadata. | |
241 methods with bigger priorities will be called first | |
242 """ | |
243 if not metadata_name in self.metadata.keys(): | |
244 raise ValueError(f"Invalid metadata_name: {metadata_name!r}") | |
245 callback = Callback(origin=origin, get=cb_get, set=cb_set, priority=priority) | |
246 cb_list = self.metadata[metadata_name].setdefault('callbacks', []) | |
247 cb_list.append(callback) | |
248 cb_list.sort(key=lambda c: c.priority, reverse=True) | |
249 | |
250 def get_identity_jid(self, client, peer_jid): | |
251 """Return jid to use to set identity metadata | |
252 | |
253 if it's a jid of a room occupant, full jid will be used | |
254 otherwise bare jid will be used | |
255 if None, bare jid of profile will be used | |
256 @return (jid.JID): jid to use for avatar | |
257 """ | |
258 if peer_jid is None: | |
259 return client.jid.userhostJID() | |
260 if self._m is None: | |
261 return peer_jid.userhostJID() | |
262 else: | |
263 return self._m.get_bare_or_full(client, peer_jid) | |
264 | |
265 def check_type(self, metadata_name, value): | |
266 """Check that type used for a metadata is the one declared in self.metadata""" | |
267 value_type = self.metadata[metadata_name]["type"] | |
268 if not isinstance(value, value_type): | |
269 raise ValueError( | |
270 f"{value} has wrong type: it is {type(value)} while {value_type} was " | |
271 f"expected") | |
272 | |
273 def get_field_type(self, metadata_name: str) -> str: | |
274 """Return the type the requested field | |
275 | |
276 @param metadata_name: name of the field to check | |
277 @raise KeyError: the request field doesn't exist | |
278 """ | |
279 return self.metadata[metadata_name]["type"] | |
280 | |
281 async def get( | |
282 self, | |
283 client: SatXMPPEntity, | |
284 metadata_name: str, | |
285 entity: Optional[jid.JID], | |
286 use_cache: bool=True, | |
287 prefilled_values: Optional[Dict[str, Any]]=None | |
288 ): | |
289 """Retrieve identity metadata of an entity | |
290 | |
291 if metadata is already in cache, it is returned. Otherwise, registered callbacks | |
292 will be tried in priority order (bigger to lower) | |
293 @param metadata_name: name of the metadata | |
294 must be one of self.metadata key | |
295 the name will also be used as entity data name in host.memory | |
296 @param entity: entity for which avatar is requested | |
297 None to use profile's jid | |
298 @param use_cache: if False, cache won't be checked | |
299 @param prefilled_values: map of origin => value to use when `get_all` is set | |
300 """ | |
301 entity = self.get_identity_jid(client, entity) | |
302 try: | |
303 metadata = self.metadata[metadata_name] | |
304 except KeyError: | |
305 raise ValueError(f"Invalid metadata name: {metadata_name!r}") | |
306 get_all = metadata.get('get_all', False) | |
307 if use_cache: | |
308 try: | |
309 data = self.host.memory.get_entity_datum( | |
310 client, entity, metadata_name) | |
311 except (KeyError, exceptions.UnknownEntityError): | |
312 pass | |
313 else: | |
314 return data | |
315 | |
316 try: | |
317 callbacks = metadata['callbacks'] | |
318 except KeyError: | |
319 log.warning(_("No callback registered for {metadata_name}") | |
320 .format(metadata_name=metadata_name)) | |
321 return [] if get_all else None | |
322 | |
323 if get_all: | |
324 all_data = [] | |
325 elif prefilled_values is not None: | |
326 raise exceptions.InternalError( | |
327 "prefilled_values can only be used when `get_all` is set") | |
328 | |
329 for callback in callbacks: | |
330 try: | |
331 if prefilled_values is not None and callback.origin in prefilled_values: | |
332 data = prefilled_values[callback.origin] | |
333 log.debug( | |
334 f"using prefilled values {data!r} for {metadata_name} with " | |
335 f"{callback.origin}") | |
336 else: | |
337 data = await defer.ensureDeferred(callback.get(client, entity)) | |
338 except exceptions.CancelError: | |
339 continue | |
340 except Exception as e: | |
341 log.warning( | |
342 _("Error while trying to get {metadata_name} with {callback}: {e}") | |
343 .format(callback=callback.get, metadata_name=metadata_name, e=e)) | |
344 else: | |
345 if data: | |
346 self.check_type(metadata_name, data) | |
347 if get_all: | |
348 if isinstance(data, list): | |
349 all_data.extend(data) | |
350 else: | |
351 all_data.append(data) | |
352 else: | |
353 break | |
354 else: | |
355 data = None | |
356 | |
357 if get_all: | |
358 data = all_data | |
359 | |
360 post_treatment = metadata.get("get_post_treatment") | |
361 if post_treatment is not None: | |
362 data = await utils.as_deferred(post_treatment, client, entity, data) | |
363 | |
364 self.host.memory.update_entity_data( | |
365 client, entity, metadata_name, data) | |
366 | |
367 if metadata.get('store', False): | |
368 key = f"{entity}\n{metadata_name}" | |
369 await client._identity_storage.aset(key, data) | |
370 | |
371 return data | |
372 | |
373 async def set(self, client, metadata_name, data, entity=None): | |
374 """Set identity metadata for an entity | |
375 | |
376 Registered callbacks will be tried in priority order (bigger to lower) | |
377 @param metadata_name(str): name of the metadata | |
378 must be one of self.metadata key | |
379 the name will also be used to set entity data in host.memory | |
380 @param data(object): value to set | |
381 @param entity(jid.JID, None): entity for which avatar is requested | |
382 None to use profile's jid | |
383 """ | |
384 entity = self.get_identity_jid(client, entity) | |
385 metadata = self.metadata[metadata_name] | |
386 data_filter = metadata.get("set_data_filter") | |
387 if data_filter is not None: | |
388 data = await utils.as_deferred(data_filter, client, entity, data) | |
389 self.check_type(metadata_name, data) | |
390 | |
391 try: | |
392 callbacks = metadata['callbacks'] | |
393 except KeyError: | |
394 log.warning(_("No callback registered for {metadata_name}") | |
395 .format(metadata_name=metadata_name)) | |
396 return exceptions.FeatureNotFound(f"Can't set {metadata_name} for {entity}") | |
397 | |
398 for callback in callbacks: | |
399 try: | |
400 await defer.ensureDeferred(callback.set(client, data, entity)) | |
401 except exceptions.CancelError: | |
402 continue | |
403 except Exception as e: | |
404 log.warning( | |
405 _("Error while trying to set {metadata_name} with {callback}: {e}") | |
406 .format(callback=callback.set, metadata_name=metadata_name, e=e)) | |
407 else: | |
408 break | |
409 else: | |
410 raise exceptions.FeatureNotFound(f"Can't set {metadata_name} for {entity}") | |
411 | |
412 post_treatment = metadata.get("set_post_treatment") | |
413 if post_treatment is not None: | |
414 await utils.as_deferred(post_treatment, client, entity, data) | |
415 | |
416 async def update( | |
417 self, | |
418 client: SatXMPPEntity, | |
419 origin: str, | |
420 metadata_name: str, | |
421 data: Any, | |
422 entity: Optional[jid.JID] | |
423 ): | |
424 """Update a metadata in cache | |
425 | |
426 This method may be called by plugins when an identity metadata is available. | |
427 @param origin: namespace of the plugin which is source of the metadata | |
428 """ | |
429 entity = self.get_identity_jid(client, entity) | |
430 if (entity, metadata_name) in client._identity_update_lock: | |
431 log.debug(f"update is locked for {entity}'s {metadata_name}") | |
432 return | |
433 metadata = self.metadata[metadata_name] | |
434 | |
435 try: | |
436 cached_data = self.host.memory.get_entity_datum( | |
437 client, entity, metadata_name) | |
438 except (KeyError, exceptions.UnknownEntityError): | |
439 # metadata is not cached, we do the update | |
440 pass | |
441 else: | |
442 # metadata is cached, we check if the new value differs from the cached one | |
443 try: | |
444 update_is_new_data = metadata["update_is_new_data"] | |
445 except KeyError: | |
446 update_is_new_data = self.default_update_is_new_data | |
447 | |
448 if data is None: | |
449 if cached_data is None: | |
450 log.debug( | |
451 f"{metadata_name} for {entity} is already disabled, nothing to " | |
452 f"do") | |
453 return | |
454 elif cached_data is None: | |
455 pass | |
456 elif not update_is_new_data(client, entity, cached_data, data): | |
457 log.debug( | |
458 f"{metadata_name} for {entity} is already in cache, nothing to " | |
459 f"do") | |
460 return | |
461 | |
462 # we can't use the cache, so we do the update | |
463 | |
464 log.debug(f"updating {metadata_name} for {entity}") | |
465 | |
466 if metadata.get('get_all', False): | |
467 # get_all is set, meaning that we have to check all plugins | |
468 # so we first delete current cache | |
469 try: | |
470 self.host.memory.del_entity_datum(client, entity, metadata_name) | |
471 except (KeyError, exceptions.UnknownEntityError): | |
472 pass | |
473 # then fill it again by calling get, which will retrieve all values | |
474 # we lock update to avoid infinite recursions (update can be called during | |
475 # get callbacks) | |
476 client._identity_update_lock.append((entity, metadata_name)) | |
477 await self.get(client, metadata_name, entity, prefilled_values={origin: data}) | |
478 client._identity_update_lock.remove((entity, metadata_name)) | |
479 return | |
480 | |
481 if data is not None: | |
482 data_filter = metadata['update_data_filter'] | |
483 if data_filter is not None: | |
484 data = await utils.as_deferred(data_filter, client, entity, data) | |
485 self.check_type(metadata_name, data) | |
486 | |
487 self.host.memory.update_entity_data(client, entity, metadata_name, data) | |
488 | |
489 if metadata.get('store', False): | |
490 key = f"{entity}\n{metadata_name}" | |
491 await client._identity_storage.aset(key, data) | |
492 | |
493 def default_update_is_new_data(self, client, entity, cached_data, new_data): | |
494 return new_data != cached_data | |
495 | |
496 def _getAvatar(self, entity, use_cache, profile): | |
497 client = self.host.get_client(profile) | |
498 entity = jid.JID(entity) if entity else None | |
499 d = defer.ensureDeferred(self.get(client, "avatar", entity, use_cache)) | |
500 d.addCallback(lambda data: data_format.serialise(data)) | |
501 return d | |
502 | |
503 def _set_avatar(self, file_path, entity, profile_key=C.PROF_KEY_NONE): | |
504 client = self.host.get_client(profile_key) | |
505 entity = jid.JID(entity) if entity else None | |
506 return defer.ensureDeferred( | |
507 self.set(client, "avatar", file_path, entity)) | |
508 | |
509 def _blocking_cache_avatar( | |
510 self, | |
511 source: str, | |
512 avatar_data: dict[str, Any] | |
513 ): | |
514 """This method is executed in a separated thread""" | |
515 if avatar_data["media_type"] == "image/svg+xml": | |
516 # for vector image, we save directly | |
517 img_buf = open(avatar_data["path"], "rb") | |
518 else: | |
519 # for bitmap image, we check size and resize if necessary | |
520 try: | |
521 img = Image.open(avatar_data["path"]) | |
522 except IOError as e: | |
523 raise exceptions.DataError(f"Can't open image: {e}") | |
524 | |
525 if img.size != AVATAR_DIM: | |
526 img.thumbnail(AVATAR_DIM) | |
527 if img.size[0] != img.size[1]: # we need to crop first | |
528 left, upper = (0, 0) | |
529 right, lower = img.size | |
530 offset = abs(right - lower) / 2 | |
531 if right == min(img.size): | |
532 upper += offset | |
533 lower -= offset | |
534 else: | |
535 left += offset | |
536 right -= offset | |
537 img = img.crop((left, upper, right, lower)) | |
538 img_buf = io.BytesIO() | |
539 # PNG is well supported among clients, so we convert to this format | |
540 img.save(img_buf, "PNG") | |
541 img_buf.seek(0) | |
542 avatar_data["media_type"] = "image/png" | |
543 | |
544 media_type = avatar_data["media_type"] | |
545 avatar_data["base64"] = image_b64 = b64encode(img_buf.read()).decode() | |
546 img_buf.seek(0) | |
547 image_hash = hashlib.sha1(img_buf.read()).hexdigest() | |
548 img_buf.seek(0) | |
549 with self.host.common_cache.cache_data( | |
550 source, image_hash, media_type | |
551 ) as f: | |
552 f.write(img_buf.read()) | |
553 avatar_data['path'] = Path(f.name) | |
554 avatar_data['filename'] = avatar_data['path'].name | |
555 avatar_data['cache_uid'] = image_hash | |
556 | |
557 async def cache_avatar(self, source: str, avatar_data: Dict[str, Any]) -> None: | |
558 """Resize if necessary and cache avatar | |
559 | |
560 @param source: source importing the avatar (usually it is plugin's import name), | |
561 will be used in cache metadata | |
562 @param avatar_data: avatar metadata as build by [avatar_set_data_filter] | |
563 will be updated with following keys: | |
564 path: updated path using cached file | |
565 filename: updated filename using cached file | |
566 base64: resized and base64 encoded avatar | |
567 cache_uid: SHA1 hash used as cache unique ID | |
568 """ | |
569 await threads.deferToThread(self._blocking_cache_avatar, source, avatar_data) | |
570 | |
571 async def avatar_set_data_filter(self, client, entity, file_path): | |
572 """Convert avatar file path to dict data""" | |
573 file_path = Path(file_path) | |
574 if not file_path.is_file(): | |
575 raise ValueError(f"There is no file at {file_path} to use as avatar") | |
576 avatar_data = { | |
577 'path': file_path, | |
578 'filename': file_path.name, | |
579 'media_type': image.guess_type(file_path), | |
580 } | |
581 media_type = avatar_data['media_type'] | |
582 if media_type is None: | |
583 raise ValueError(f"Can't identify type of image at {file_path}") | |
584 if not media_type.startswith('image/'): | |
585 raise ValueError(f"File at {file_path} doesn't appear to be an image") | |
586 await self.cache_avatar(IMPORT_NAME, avatar_data) | |
587 return avatar_data | |
588 | |
589 async def avatar_set_post_treatment(self, client, entity, avatar_data): | |
590 """Update our own avatar""" | |
591 await self.update(client, IMPORT_NAME, "avatar", avatar_data, entity) | |
592 | |
593 def avatar_build_metadata( | |
594 self, | |
595 path: Path, | |
596 media_type: Optional[str] = None, | |
597 cache_uid: Optional[str] = None | |
598 ) -> Optional[Dict[str, Union[str, Path, None]]]: | |
599 """Helper method to generate avatar metadata | |
600 | |
601 @param path(str, Path, None): path to avatar file | |
602 avatar file must be in cache | |
603 None if avatar is explicitely not set | |
604 @param media_type(str, None): type of the avatar file (MIME type) | |
605 @param cache_uid(str, None): UID of avatar in cache | |
606 @return (dict, None): avatar metadata | |
607 None if avatar is not set | |
608 """ | |
609 if path is None: | |
610 return None | |
611 else: | |
612 if cache_uid is None: | |
613 raise ValueError("cache_uid must be set if path is set") | |
614 path = Path(path) | |
615 if media_type is None: | |
616 media_type = image.guess_type(path) | |
617 | |
618 return { | |
619 "path": path, | |
620 "filename": path.name, | |
621 "media_type": media_type, | |
622 "cache_uid": cache_uid, | |
623 } | |
624 | |
625 def avatar_update_is_new_data(self, client, entity, cached_data, new_data): | |
626 return new_data['path'] != cached_data['path'] | |
627 | |
628 async def avatar_update_data_filter(self, client, entity, data): | |
629 if not isinstance(data, dict): | |
630 raise ValueError(f"Invalid data type ({type(data)}), a dict is expected") | |
631 mandatory_keys = {'path', 'filename', 'cache_uid'} | |
632 if not data.keys() >= mandatory_keys: | |
633 raise ValueError(f"missing avatar data keys: {mandatory_keys - data.keys()}") | |
634 return data | |
635 | |
636 async def nicknames_get_post_treatment(self, client, entity, plugin_nicknames): | |
637 """Prepend nicknames from core locations + set default nickname | |
638 | |
639 nicknames are checked from many locations, there is always at least | |
640 one nickname. First nickname of the list can be used in priority. | |
641 Nicknames are appended in this order: | |
642 - roster, plugins set nicknames | |
643 - if no nickname is found, user part of jid is then used, or bare jid | |
644 if there is no user part. | |
645 For MUC, room nick is always put first | |
646 """ | |
647 nicknames = [] | |
648 | |
649 # for MUC we add resource | |
650 if entity.resource: | |
651 # get_identity_jid let the resource only if the entity is a MUC room | |
652 # occupant jid | |
653 nicknames.append(entity.resource) | |
654 | |
655 # we first check roster (if we are not in a component) | |
656 if not client.is_component: | |
657 roster_item = client.roster.get_item(entity.userhostJID()) | |
658 if roster_item is not None and roster_item.name: | |
659 # user set name has priority over entity set name | |
660 nicknames.append(roster_item.name) | |
661 | |
662 nicknames.extend(plugin_nicknames) | |
663 | |
664 if not nicknames: | |
665 if entity.user: | |
666 nicknames.append(entity.user.capitalize()) | |
667 else: | |
668 nicknames.append(entity.userhost()) | |
669 | |
670 # we remove duplicates while preserving order with dict | |
671 return list(dict.fromkeys(nicknames)) | |
672 | |
673 def nicknames_update_is_new_data(self, client, entity, cached_data, new_nicknames): | |
674 return not set(new_nicknames).issubset(cached_data) | |
675 | |
676 async def description_get_post_treatment( | |
677 self, | |
678 client: SatXMPPEntity, | |
679 entity: jid.JID, | |
680 plugin_description: List[str] | |
681 ) -> str: | |
682 """Join all descriptions in a unique string""" | |
683 return '\n'.join(plugin_description) | |
684 | |
685 def _get_identity(self, entity_s, metadata_filter, use_cache, profile): | |
686 entity = jid.JID(entity_s) | |
687 client = self.host.get_client(profile) | |
688 d = defer.ensureDeferred( | |
689 self.get_identity(client, entity, metadata_filter, use_cache)) | |
690 d.addCallback(data_format.serialise) | |
691 return d | |
692 | |
693 async def get_identity( | |
694 self, | |
695 client: SatXMPPEntity, | |
696 entity: Optional[jid.JID] = None, | |
697 metadata_filter: Optional[List[str]] = None, | |
698 use_cache: bool = True | |
699 ) -> Dict[str, Any]: | |
700 """Retrieve identity of an entity | |
701 | |
702 @param entity: entity to check | |
703 @param metadata_filter: if not None or empty, only return | |
704 metadata in this filter | |
705 @param use_cache: if False, cache won't be checked | |
706 should be True most of time, to avoid useless network requests | |
707 @return: identity data | |
708 """ | |
709 id_data = {} | |
710 | |
711 if not metadata_filter: | |
712 metadata_names = self.metadata.keys() | |
713 else: | |
714 metadata_names = metadata_filter | |
715 | |
716 for metadata_name in metadata_names: | |
717 id_data[metadata_name] = await self.get( | |
718 client, metadata_name, entity, use_cache) | |
719 | |
720 return id_data | |
721 | |
722 def _get_identities(self, entities_s, metadata_filter, profile): | |
723 entities = [jid.JID(e) for e in entities_s] | |
724 client = self.host.get_client(profile) | |
725 d = defer.ensureDeferred(self.get_identities(client, entities, metadata_filter)) | |
726 d.addCallback(lambda d: data_format.serialise({str(j):i for j, i in d.items()})) | |
727 return d | |
728 | |
729 async def get_identities( | |
730 self, | |
731 client: SatXMPPEntity, | |
732 entities: List[jid.JID], | |
733 metadata_filter: Optional[List[str]] = None, | |
734 ) -> dict: | |
735 """Retrieve several identities at once | |
736 | |
737 @param entities: entities from which identities must be retrieved | |
738 @param metadata_filter: same as for [get_identity] | |
739 @return: identities metadata where key is jid | |
740 if an error happens while retrieve a jid entity, it won't be present in the | |
741 result (and a warning will be logged) | |
742 """ | |
743 identities = {} | |
744 get_identity_list = [] | |
745 for entity_jid in entities: | |
746 get_identity_list.append( | |
747 defer.ensureDeferred( | |
748 self.get_identity( | |
749 client, | |
750 entity=entity_jid, | |
751 metadata_filter=metadata_filter, | |
752 ) | |
753 ) | |
754 ) | |
755 identities_result = await defer.DeferredList(get_identity_list) | |
756 for idx, (success, identity) in enumerate(identities_result): | |
757 entity_jid = entities[idx] | |
758 if not success: | |
759 log.warning(f"Can't get identity for {entity_jid}") | |
760 else: | |
761 identities[entity_jid] = identity | |
762 return identities | |
763 | |
764 def _get_base_identities(self, profile_key): | |
765 client = self.host.get_client(profile_key) | |
766 d = defer.ensureDeferred(self.get_base_identities(client)) | |
767 d.addCallback(lambda d: data_format.serialise({str(j):i for j, i in d.items()})) | |
768 return d | |
769 | |
770 async def get_base_identities( | |
771 self, | |
772 client: SatXMPPEntity, | |
773 ) -> dict: | |
774 """Retrieve identities for entities in roster + own identity + invitations | |
775 | |
776 @param with_guests: if True, get affiliations of people invited by email | |
777 | |
778 """ | |
779 if client.is_component: | |
780 entities = [client.jid.userhostJID()] | |
781 else: | |
782 entities = client.roster.get_jids() + [client.jid.userhostJID()] | |
783 | |
784 return await self.get_identities( | |
785 client, | |
786 entities, | |
787 ['avatar', 'nicknames'] | |
788 ) | |
789 | |
790 def _set_identity(self, id_data_s, profile): | |
791 client = self.host.get_client(profile) | |
792 id_data = data_format.deserialise(id_data_s) | |
793 return defer.ensureDeferred(self.set_identity(client, id_data)) | |
794 | |
795 async def set_identity(self, client, id_data): | |
796 """Update profile's identity | |
797 | |
798 @param id_data(dict): data to update, key can be one of self.metadata keys | |
799 """ | |
800 if not id_data.keys() <= self.metadata.keys(): | |
801 raise ValueError( | |
802 f"Invalid metadata names: {id_data.keys() - self.metadata.keys()}") | |
803 for metadata_name, data in id_data.items(): | |
804 try: | |
805 await self.set(client, metadata_name, data) | |
806 except Exception as e: | |
807 log.warning( | |
808 _("Can't set metadata {metadata_name!r}: {reason}") | |
809 .format(metadata_name=metadata_name, reason=e)) |