Mercurial > libervia-backend
comparison sat/plugins/plugin_misc_identity.py @ 4037:524856bd7b19
massive refactoring to switch from camelCase to snake_case:
historically, Libervia (SàT before) was using camelCase as allowed by PEP8 when using a
pre-PEP8 code, to use the same coding style as in Twisted.
However, snake_case is more readable and it's better to follow PEP8 best practices, so it
has been decided to move on full snake_case. Because Libervia has a huge codebase, this
ended with a ugly mix of camelCase and snake_case.
To fix that, this patch does a big refactoring by renaming every function and method
(including bridge) that are not coming from Twisted or Wokkel, to use fully snake_case.
This is a massive change, and may result in some bugs.
author | Goffi <goffi@goffi.org> |
---|---|
date | Sat, 08 Apr 2023 13:54:42 +0200 |
parents | 998c5318230f |
children |
comparison
equal
deleted
inserted
replaced
4036:c4464d7ae97b | 4037:524856bd7b19 |
---|---|
75 self._m = host.plugins.get("XEP-0045") | 75 self._m = host.plugins.get("XEP-0045") |
76 self.metadata = { | 76 self.metadata = { |
77 "avatar": { | 77 "avatar": { |
78 "type": dict, | 78 "type": dict, |
79 # convert avatar path to avatar metadata (and check validity) | 79 # convert avatar path to avatar metadata (and check validity) |
80 "set_data_filter": self.avatarSetDataFilter, | 80 "set_data_filter": self.avatar_set_data_filter, |
81 # update profile avatar, so all frontends are aware | 81 # update profile avatar, so all frontends are aware |
82 "set_post_treatment": self.avatarSetPostTreatment, | 82 "set_post_treatment": self.avatar_set_post_treatment, |
83 "update_is_new_data": self.avatarUpdateIsNewData, | 83 "update_is_new_data": self.avatar_update_is_new_data, |
84 "update_data_filter": self.avatarUpdateDataFilter, | 84 "update_data_filter": self.avatar_update_data_filter, |
85 # we store the metadata in database, to restore it on next connection | 85 # we store the metadata in database, to restore it on next connection |
86 # (it is stored only for roster entities) | 86 # (it is stored only for roster entities) |
87 "store": True, | 87 "store": True, |
88 }, | 88 }, |
89 "nicknames": { | 89 "nicknames": { |
90 "type": list, | 90 "type": list, |
91 # accumulate all nicknames from all callbacks in a list instead | 91 # accumulate all nicknames from all callbacks in a list instead |
92 # of returning only the data from the first successful callback | 92 # of returning only the data from the first successful callback |
93 "get_all": True, | 93 "get_all": True, |
94 # append nicknames from roster, resource, etc. | 94 # append nicknames from roster, resource, etc. |
95 "get_post_treatment": self.nicknamesGetPostTreatment, | 95 "get_post_treatment": self.nicknames_get_post_treatment, |
96 "update_is_new_data": self.nicknamesUpdateIsNewData, | 96 "update_is_new_data": self.nicknames_update_is_new_data, |
97 "store": True, | 97 "store": True, |
98 }, | 98 }, |
99 "description": { | 99 "description": { |
100 "type": str, | 100 "type": str, |
101 "get_all": True, | 101 "get_all": True, |
102 "get_post_treatment": self.descriptionGetPostTreatment, | 102 "get_post_treatment": self.description_get_post_treatment, |
103 "store": True, | 103 "store": True, |
104 } | 104 } |
105 } | 105 } |
106 host.trigger.add("roster_update", self._rosterUpdateTrigger) | 106 host.trigger.add("roster_update", self._roster_update_trigger) |
107 host.memory.setSignalOnUpdate("avatar") | 107 host.memory.set_signal_on_update("avatar") |
108 host.memory.setSignalOnUpdate("nicknames") | 108 host.memory.set_signal_on_update("nicknames") |
109 host.bridge.addMethod( | 109 host.bridge.add_method( |
110 "identityGet", | 110 "identity_get", |
111 ".plugin", | 111 ".plugin", |
112 in_sign="sasbs", | 112 in_sign="sasbs", |
113 out_sign="s", | 113 out_sign="s", |
114 method=self._getIdentity, | 114 method=self._get_identity, |
115 async_=True, | 115 async_=True, |
116 ) | 116 ) |
117 host.bridge.addMethod( | 117 host.bridge.add_method( |
118 "identitiesGet", | 118 "identities_get", |
119 ".plugin", | 119 ".plugin", |
120 in_sign="asass", | 120 in_sign="asass", |
121 out_sign="s", | 121 out_sign="s", |
122 method=self._getIdentities, | 122 method=self._get_identities, |
123 async_=True, | 123 async_=True, |
124 ) | 124 ) |
125 host.bridge.addMethod( | 125 host.bridge.add_method( |
126 "identitiesBaseGet", | 126 "identities_base_get", |
127 ".plugin", | 127 ".plugin", |
128 in_sign="s", | 128 in_sign="s", |
129 out_sign="s", | 129 out_sign="s", |
130 method=self._getBaseIdentities, | 130 method=self._get_base_identities, |
131 async_=True, | 131 async_=True, |
132 ) | 132 ) |
133 host.bridge.addMethod( | 133 host.bridge.add_method( |
134 "identitySet", | 134 "identity_set", |
135 ".plugin", | 135 ".plugin", |
136 in_sign="ss", | 136 in_sign="ss", |
137 out_sign="", | 137 out_sign="", |
138 method=self._setIdentity, | 138 method=self._set_identity, |
139 async_=True, | 139 async_=True, |
140 ) | 140 ) |
141 host.bridge.addMethod( | 141 host.bridge.add_method( |
142 "avatarGet", | 142 "avatar_get", |
143 ".plugin", | 143 ".plugin", |
144 in_sign="sbs", | 144 in_sign="sbs", |
145 out_sign="s", | 145 out_sign="s", |
146 method=self._getAvatar, | 146 method=self._getAvatar, |
147 async_=True, | 147 async_=True, |
148 ) | 148 ) |
149 host.bridge.addMethod( | 149 host.bridge.add_method( |
150 "avatarSet", | 150 "avatar_set", |
151 ".plugin", | 151 ".plugin", |
152 in_sign="sss", | 152 in_sign="sss", |
153 out_sign="", | 153 out_sign="", |
154 method=self._setAvatar, | 154 method=self._set_avatar, |
155 async_=True, | 155 async_=True, |
156 ) | 156 ) |
157 | 157 |
158 async def profileConnecting(self, client): | 158 async def profile_connecting(self, client): |
159 client._identity_update_lock = [] | 159 client._identity_update_lock = [] |
160 # we restore known identities from database | 160 # we restore known identities from database |
161 client._identity_storage = persistent.LazyPersistentBinaryDict( | 161 client._identity_storage = persistent.LazyPersistentBinaryDict( |
162 "identity", client.profile) | 162 "identity", client.profile) |
163 | 163 |
186 log.warning( | 186 log.warning( |
187 f"invalid data for {entity} avatar, it will be deleted: " | 187 f"invalid data for {entity} avatar, it will be deleted: " |
188 f"{value}") | 188 f"{value}") |
189 to_delete.append(key) | 189 to_delete.append(key) |
190 continue | 190 continue |
191 cache = self.host.common_cache.getMetadata(cache_uid) | 191 cache = self.host.common_cache.get_metadata(cache_uid) |
192 if cache is None: | 192 if cache is None: |
193 log.debug( | 193 log.debug( |
194 f"purging avatar for {entity}: it is not in cache anymore") | 194 f"purging avatar for {entity}: it is not in cache anymore") |
195 to_delete.append(key) | 195 to_delete.append(key) |
196 continue | 196 continue |
197 | 197 |
198 self.host.memory.updateEntityData( | 198 self.host.memory.update_entity_data( |
199 client, entity, name, value, silent=True | 199 client, entity, name, value, silent=True |
200 ) | 200 ) |
201 | 201 |
202 for key in to_delete: | 202 for key in to_delete: |
203 await client._identity_storage.adel(key) | 203 await client._identity_storage.adel(key) |
204 | 204 |
205 def _rosterUpdateTrigger(self, client, roster_item): | 205 def _roster_update_trigger(self, client, roster_item): |
206 old_item = client.roster.getItem(roster_item.jid) | 206 old_item = client.roster.get_item(roster_item.jid) |
207 if old_item is None or old_item.name != roster_item.name: | 207 if old_item is None or old_item.name != roster_item.name: |
208 log.debug( | 208 log.debug( |
209 f"roster nickname has been updated to {roster_item.name!r} for " | 209 f"roster nickname has been updated to {roster_item.name!r} for " |
210 f"{roster_item.jid}" | 210 f"{roster_item.jid}" |
211 ) | 211 ) |
245 callback = Callback(origin=origin, get=cb_get, set=cb_set, priority=priority) | 245 callback = Callback(origin=origin, get=cb_get, set=cb_set, priority=priority) |
246 cb_list = self.metadata[metadata_name].setdefault('callbacks', []) | 246 cb_list = self.metadata[metadata_name].setdefault('callbacks', []) |
247 cb_list.append(callback) | 247 cb_list.append(callback) |
248 cb_list.sort(key=lambda c: c.priority, reverse=True) | 248 cb_list.sort(key=lambda c: c.priority, reverse=True) |
249 | 249 |
250 def getIdentityJid(self, client, peer_jid): | 250 def get_identity_jid(self, client, peer_jid): |
251 """Return jid to use to set identity metadata | 251 """Return jid to use to set identity metadata |
252 | 252 |
253 if it's a jid of a room occupant, full jid will be used | 253 if it's a jid of a room occupant, full jid will be used |
254 otherwise bare jid will be used | 254 otherwise bare jid will be used |
255 if None, bare jid of profile will be used | 255 if None, bare jid of profile will be used |
258 if peer_jid is None: | 258 if peer_jid is None: |
259 return client.jid.userhostJID() | 259 return client.jid.userhostJID() |
260 if self._m is None: | 260 if self._m is None: |
261 return peer_jid.userhostJID() | 261 return peer_jid.userhostJID() |
262 else: | 262 else: |
263 return self._m.getBareOrFull(client, peer_jid) | 263 return self._m.get_bare_or_full(client, peer_jid) |
264 | 264 |
265 def checkType(self, metadata_name, value): | 265 def check_type(self, metadata_name, value): |
266 """Check that type used for a metadata is the one declared in self.metadata""" | 266 """Check that type used for a metadata is the one declared in self.metadata""" |
267 value_type = self.metadata[metadata_name]["type"] | 267 value_type = self.metadata[metadata_name]["type"] |
268 if not isinstance(value, value_type): | 268 if not isinstance(value, value_type): |
269 raise ValueError( | 269 raise ValueError( |
270 f"{value} has wrong type: it is {type(value)} while {value_type} was " | 270 f"{value} has wrong type: it is {type(value)} while {value_type} was " |
271 f"expected") | 271 f"expected") |
272 | 272 |
273 def getFieldType(self, metadata_name: str) -> str: | 273 def get_field_type(self, metadata_name: str) -> str: |
274 """Return the type the requested field | 274 """Return the type the requested field |
275 | 275 |
276 @param metadata_name: name of the field to check | 276 @param metadata_name: name of the field to check |
277 @raise KeyError: the request field doesn't exist | 277 @raise KeyError: the request field doesn't exist |
278 """ | 278 """ |
296 @param entity: entity for which avatar is requested | 296 @param entity: entity for which avatar is requested |
297 None to use profile's jid | 297 None to use profile's jid |
298 @param use_cache: if False, cache won't be checked | 298 @param use_cache: if False, cache won't be checked |
299 @param prefilled_values: map of origin => value to use when `get_all` is set | 299 @param prefilled_values: map of origin => value to use when `get_all` is set |
300 """ | 300 """ |
301 entity = self.getIdentityJid(client, entity) | 301 entity = self.get_identity_jid(client, entity) |
302 try: | 302 try: |
303 metadata = self.metadata[metadata_name] | 303 metadata = self.metadata[metadata_name] |
304 except KeyError: | 304 except KeyError: |
305 raise ValueError(f"Invalid metadata name: {metadata_name!r}") | 305 raise ValueError(f"Invalid metadata name: {metadata_name!r}") |
306 get_all = metadata.get('get_all', False) | 306 get_all = metadata.get('get_all', False) |
307 if use_cache: | 307 if use_cache: |
308 try: | 308 try: |
309 data = self.host.memory.getEntityDatum( | 309 data = self.host.memory.get_entity_datum( |
310 client, entity, metadata_name) | 310 client, entity, metadata_name) |
311 except (KeyError, exceptions.UnknownEntityError): | 311 except (KeyError, exceptions.UnknownEntityError): |
312 pass | 312 pass |
313 else: | 313 else: |
314 return data | 314 return data |
341 log.warning( | 341 log.warning( |
342 _("Error while trying to get {metadata_name} with {callback}: {e}") | 342 _("Error while trying to get {metadata_name} with {callback}: {e}") |
343 .format(callback=callback.get, metadata_name=metadata_name, e=e)) | 343 .format(callback=callback.get, metadata_name=metadata_name, e=e)) |
344 else: | 344 else: |
345 if data: | 345 if data: |
346 self.checkType(metadata_name, data) | 346 self.check_type(metadata_name, data) |
347 if get_all: | 347 if get_all: |
348 if isinstance(data, list): | 348 if isinstance(data, list): |
349 all_data.extend(data) | 349 all_data.extend(data) |
350 else: | 350 else: |
351 all_data.append(data) | 351 all_data.append(data) |
357 if get_all: | 357 if get_all: |
358 data = all_data | 358 data = all_data |
359 | 359 |
360 post_treatment = metadata.get("get_post_treatment") | 360 post_treatment = metadata.get("get_post_treatment") |
361 if post_treatment is not None: | 361 if post_treatment is not None: |
362 data = await utils.asDeferred(post_treatment, client, entity, data) | 362 data = await utils.as_deferred(post_treatment, client, entity, data) |
363 | 363 |
364 self.host.memory.updateEntityData( | 364 self.host.memory.update_entity_data( |
365 client, entity, metadata_name, data) | 365 client, entity, metadata_name, data) |
366 | 366 |
367 if metadata.get('store', False): | 367 if metadata.get('store', False): |
368 key = f"{entity}\n{metadata_name}" | 368 key = f"{entity}\n{metadata_name}" |
369 await client._identity_storage.aset(key, data) | 369 await client._identity_storage.aset(key, data) |
379 the name will also be used to set entity data in host.memory | 379 the name will also be used to set entity data in host.memory |
380 @param data(object): value to set | 380 @param data(object): value to set |
381 @param entity(jid.JID, None): entity for which avatar is requested | 381 @param entity(jid.JID, None): entity for which avatar is requested |
382 None to use profile's jid | 382 None to use profile's jid |
383 """ | 383 """ |
384 entity = self.getIdentityJid(client, entity) | 384 entity = self.get_identity_jid(client, entity) |
385 metadata = self.metadata[metadata_name] | 385 metadata = self.metadata[metadata_name] |
386 data_filter = metadata.get("set_data_filter") | 386 data_filter = metadata.get("set_data_filter") |
387 if data_filter is not None: | 387 if data_filter is not None: |
388 data = await utils.asDeferred(data_filter, client, entity, data) | 388 data = await utils.as_deferred(data_filter, client, entity, data) |
389 self.checkType(metadata_name, data) | 389 self.check_type(metadata_name, data) |
390 | 390 |
391 try: | 391 try: |
392 callbacks = metadata['callbacks'] | 392 callbacks = metadata['callbacks'] |
393 except KeyError: | 393 except KeyError: |
394 log.warning(_("No callback registered for {metadata_name}") | 394 log.warning(_("No callback registered for {metadata_name}") |
409 else: | 409 else: |
410 raise exceptions.FeatureNotFound(f"Can't set {metadata_name} for {entity}") | 410 raise exceptions.FeatureNotFound(f"Can't set {metadata_name} for {entity}") |
411 | 411 |
412 post_treatment = metadata.get("set_post_treatment") | 412 post_treatment = metadata.get("set_post_treatment") |
413 if post_treatment is not None: | 413 if post_treatment is not None: |
414 await utils.asDeferred(post_treatment, client, entity, data) | 414 await utils.as_deferred(post_treatment, client, entity, data) |
415 | 415 |
416 async def update( | 416 async def update( |
417 self, | 417 self, |
418 client: SatXMPPEntity, | 418 client: SatXMPPEntity, |
419 origin: str, | 419 origin: str, |
424 """Update a metadata in cache | 424 """Update a metadata in cache |
425 | 425 |
426 This method may be called by plugins when an identity metadata is available. | 426 This method may be called by plugins when an identity metadata is available. |
427 @param origin: namespace of the plugin which is source of the metadata | 427 @param origin: namespace of the plugin which is source of the metadata |
428 """ | 428 """ |
429 entity = self.getIdentityJid(client, entity) | 429 entity = self.get_identity_jid(client, entity) |
430 if (entity, metadata_name) in client._identity_update_lock: | 430 if (entity, metadata_name) in client._identity_update_lock: |
431 log.debug(f"update is locked for {entity}'s {metadata_name}") | 431 log.debug(f"update is locked for {entity}'s {metadata_name}") |
432 return | 432 return |
433 metadata = self.metadata[metadata_name] | 433 metadata = self.metadata[metadata_name] |
434 | 434 |
435 try: | 435 try: |
436 cached_data = self.host.memory.getEntityDatum( | 436 cached_data = self.host.memory.get_entity_datum( |
437 client, entity, metadata_name) | 437 client, entity, metadata_name) |
438 except (KeyError, exceptions.UnknownEntityError): | 438 except (KeyError, exceptions.UnknownEntityError): |
439 # metadata is not cached, we do the update | 439 # metadata is not cached, we do the update |
440 pass | 440 pass |
441 else: | 441 else: |
442 # metadata is cached, we check if the new value differs from the cached one | 442 # metadata is cached, we check if the new value differs from the cached one |
443 try: | 443 try: |
444 update_is_new_data = metadata["update_is_new_data"] | 444 update_is_new_data = metadata["update_is_new_data"] |
445 except KeyError: | 445 except KeyError: |
446 update_is_new_data = self.defaultUpdateIsNewData | 446 update_is_new_data = self.default_update_is_new_data |
447 | 447 |
448 if data is None: | 448 if data is None: |
449 if cached_data is None: | 449 if cached_data is None: |
450 log.debug( | 450 log.debug( |
451 f"{metadata_name} for {entity} is already disabled, nothing to " | 451 f"{metadata_name} for {entity} is already disabled, nothing to " |
465 | 465 |
466 if metadata.get('get_all', False): | 466 if metadata.get('get_all', False): |
467 # get_all is set, meaning that we have to check all plugins | 467 # get_all is set, meaning that we have to check all plugins |
468 # so we first delete current cache | 468 # so we first delete current cache |
469 try: | 469 try: |
470 self.host.memory.delEntityDatum(client, entity, metadata_name) | 470 self.host.memory.del_entity_datum(client, entity, metadata_name) |
471 except (KeyError, exceptions.UnknownEntityError): | 471 except (KeyError, exceptions.UnknownEntityError): |
472 pass | 472 pass |
473 # then fill it again by calling get, which will retrieve all values | 473 # then fill it again by calling get, which will retrieve all values |
474 # we lock update to avoid infinite recursions (update can be called during | 474 # we lock update to avoid infinite recursions (update can be called during |
475 # get callbacks) | 475 # get callbacks) |
479 return | 479 return |
480 | 480 |
481 if data is not None: | 481 if data is not None: |
482 data_filter = metadata['update_data_filter'] | 482 data_filter = metadata['update_data_filter'] |
483 if data_filter is not None: | 483 if data_filter is not None: |
484 data = await utils.asDeferred(data_filter, client, entity, data) | 484 data = await utils.as_deferred(data_filter, client, entity, data) |
485 self.checkType(metadata_name, data) | 485 self.check_type(metadata_name, data) |
486 | 486 |
487 self.host.memory.updateEntityData(client, entity, metadata_name, data) | 487 self.host.memory.update_entity_data(client, entity, metadata_name, data) |
488 | 488 |
489 if metadata.get('store', False): | 489 if metadata.get('store', False): |
490 key = f"{entity}\n{metadata_name}" | 490 key = f"{entity}\n{metadata_name}" |
491 await client._identity_storage.aset(key, data) | 491 await client._identity_storage.aset(key, data) |
492 | 492 |
493 def defaultUpdateIsNewData(self, client, entity, cached_data, new_data): | 493 def default_update_is_new_data(self, client, entity, cached_data, new_data): |
494 return new_data != cached_data | 494 return new_data != cached_data |
495 | 495 |
496 def _getAvatar(self, entity, use_cache, profile): | 496 def _getAvatar(self, entity, use_cache, profile): |
497 client = self.host.getClient(profile) | 497 client = self.host.get_client(profile) |
498 entity = jid.JID(entity) if entity else None | 498 entity = jid.JID(entity) if entity else None |
499 d = defer.ensureDeferred(self.get(client, "avatar", entity, use_cache)) | 499 d = defer.ensureDeferred(self.get(client, "avatar", entity, use_cache)) |
500 d.addCallback(lambda data: data_format.serialise(data)) | 500 d.addCallback(lambda data: data_format.serialise(data)) |
501 return d | 501 return d |
502 | 502 |
503 def _setAvatar(self, file_path, entity, profile_key=C.PROF_KEY_NONE): | 503 def _set_avatar(self, file_path, entity, profile_key=C.PROF_KEY_NONE): |
504 client = self.host.getClient(profile_key) | 504 client = self.host.get_client(profile_key) |
505 entity = jid.JID(entity) if entity else None | 505 entity = jid.JID(entity) if entity else None |
506 return defer.ensureDeferred( | 506 return defer.ensureDeferred( |
507 self.set(client, "avatar", file_path, entity)) | 507 self.set(client, "avatar", file_path, entity)) |
508 | 508 |
509 def _blockingCacheAvatar( | 509 def _blocking_cache_avatar( |
510 self, | 510 self, |
511 source: str, | 511 source: str, |
512 avatar_data: dict[str, Any] | 512 avatar_data: dict[str, Any] |
513 ): | 513 ): |
514 """This method is executed in a separated thread""" | 514 """This method is executed in a separated thread""" |
544 media_type = avatar_data["media_type"] | 544 media_type = avatar_data["media_type"] |
545 avatar_data["base64"] = image_b64 = b64encode(img_buf.read()).decode() | 545 avatar_data["base64"] = image_b64 = b64encode(img_buf.read()).decode() |
546 img_buf.seek(0) | 546 img_buf.seek(0) |
547 image_hash = hashlib.sha1(img_buf.read()).hexdigest() | 547 image_hash = hashlib.sha1(img_buf.read()).hexdigest() |
548 img_buf.seek(0) | 548 img_buf.seek(0) |
549 with self.host.common_cache.cacheData( | 549 with self.host.common_cache.cache_data( |
550 source, image_hash, media_type | 550 source, image_hash, media_type |
551 ) as f: | 551 ) as f: |
552 f.write(img_buf.read()) | 552 f.write(img_buf.read()) |
553 avatar_data['path'] = Path(f.name) | 553 avatar_data['path'] = Path(f.name) |
554 avatar_data['filename'] = avatar_data['path'].name | 554 avatar_data['filename'] = avatar_data['path'].name |
555 avatar_data['cache_uid'] = image_hash | 555 avatar_data['cache_uid'] = image_hash |
556 | 556 |
557 async def cacheAvatar(self, source: str, avatar_data: Dict[str, Any]) -> None: | 557 async def cache_avatar(self, source: str, avatar_data: Dict[str, Any]) -> None: |
558 """Resize if necessary and cache avatar | 558 """Resize if necessary and cache avatar |
559 | 559 |
560 @param source: source importing the avatar (usually it is plugin's import name), | 560 @param source: source importing the avatar (usually it is plugin's import name), |
561 will be used in cache metadata | 561 will be used in cache metadata |
562 @param avatar_data: avatar metadata as build by [avatarSetDataFilter] | 562 @param avatar_data: avatar metadata as build by [avatar_set_data_filter] |
563 will be updated with following keys: | 563 will be updated with following keys: |
564 path: updated path using cached file | 564 path: updated path using cached file |
565 filename: updated filename using cached file | 565 filename: updated filename using cached file |
566 base64: resized and base64 encoded avatar | 566 base64: resized and base64 encoded avatar |
567 cache_uid: SHA1 hash used as cache unique ID | 567 cache_uid: SHA1 hash used as cache unique ID |
568 """ | 568 """ |
569 await threads.deferToThread(self._blockingCacheAvatar, source, avatar_data) | 569 await threads.deferToThread(self._blocking_cache_avatar, source, avatar_data) |
570 | 570 |
571 async def avatarSetDataFilter(self, client, entity, file_path): | 571 async def avatar_set_data_filter(self, client, entity, file_path): |
572 """Convert avatar file path to dict data""" | 572 """Convert avatar file path to dict data""" |
573 file_path = Path(file_path) | 573 file_path = Path(file_path) |
574 if not file_path.is_file(): | 574 if not file_path.is_file(): |
575 raise ValueError(f"There is no file at {file_path} to use as avatar") | 575 raise ValueError(f"There is no file at {file_path} to use as avatar") |
576 avatar_data = { | 576 avatar_data = { |
581 media_type = avatar_data['media_type'] | 581 media_type = avatar_data['media_type'] |
582 if media_type is None: | 582 if media_type is None: |
583 raise ValueError(f"Can't identify type of image at {file_path}") | 583 raise ValueError(f"Can't identify type of image at {file_path}") |
584 if not media_type.startswith('image/'): | 584 if not media_type.startswith('image/'): |
585 raise ValueError(f"File at {file_path} doesn't appear to be an image") | 585 raise ValueError(f"File at {file_path} doesn't appear to be an image") |
586 await self.cacheAvatar(IMPORT_NAME, avatar_data) | 586 await self.cache_avatar(IMPORT_NAME, avatar_data) |
587 return avatar_data | 587 return avatar_data |
588 | 588 |
589 async def avatarSetPostTreatment(self, client, entity, avatar_data): | 589 async def avatar_set_post_treatment(self, client, entity, avatar_data): |
590 """Update our own avatar""" | 590 """Update our own avatar""" |
591 await self.update(client, IMPORT_NAME, "avatar", avatar_data, entity) | 591 await self.update(client, IMPORT_NAME, "avatar", avatar_data, entity) |
592 | 592 |
593 def avatarBuildMetadata( | 593 def avatar_build_metadata( |
594 self, | 594 self, |
595 path: Path, | 595 path: Path, |
596 media_type: Optional[str] = None, | 596 media_type: Optional[str] = None, |
597 cache_uid: Optional[str] = None | 597 cache_uid: Optional[str] = None |
598 ) -> Optional[Dict[str, Union[str, Path, None]]]: | 598 ) -> Optional[Dict[str, Union[str, Path, None]]]: |
620 "filename": path.name, | 620 "filename": path.name, |
621 "media_type": media_type, | 621 "media_type": media_type, |
622 "cache_uid": cache_uid, | 622 "cache_uid": cache_uid, |
623 } | 623 } |
624 | 624 |
625 def avatarUpdateIsNewData(self, client, entity, cached_data, new_data): | 625 def avatar_update_is_new_data(self, client, entity, cached_data, new_data): |
626 return new_data['path'] != cached_data['path'] | 626 return new_data['path'] != cached_data['path'] |
627 | 627 |
628 async def avatarUpdateDataFilter(self, client, entity, data): | 628 async def avatar_update_data_filter(self, client, entity, data): |
629 if not isinstance(data, dict): | 629 if not isinstance(data, dict): |
630 raise ValueError(f"Invalid data type ({type(data)}), a dict is expected") | 630 raise ValueError(f"Invalid data type ({type(data)}), a dict is expected") |
631 mandatory_keys = {'path', 'filename', 'cache_uid'} | 631 mandatory_keys = {'path', 'filename', 'cache_uid'} |
632 if not data.keys() >= mandatory_keys: | 632 if not data.keys() >= mandatory_keys: |
633 raise ValueError(f"missing avatar data keys: {mandatory_keys - data.keys()}") | 633 raise ValueError(f"missing avatar data keys: {mandatory_keys - data.keys()}") |
634 return data | 634 return data |
635 | 635 |
636 async def nicknamesGetPostTreatment(self, client, entity, plugin_nicknames): | 636 async def nicknames_get_post_treatment(self, client, entity, plugin_nicknames): |
637 """Prepend nicknames from core locations + set default nickname | 637 """Prepend nicknames from core locations + set default nickname |
638 | 638 |
639 nicknames are checked from many locations, there is always at least | 639 nicknames are checked from many locations, there is always at least |
640 one nickname. First nickname of the list can be used in priority. | 640 one nickname. First nickname of the list can be used in priority. |
641 Nicknames are appended in this order: | 641 Nicknames are appended in this order: |
646 """ | 646 """ |
647 nicknames = [] | 647 nicknames = [] |
648 | 648 |
649 # for MUC we add resource | 649 # for MUC we add resource |
650 if entity.resource: | 650 if entity.resource: |
651 # getIdentityJid let the resource only if the entity is a MUC room | 651 # get_identity_jid let the resource only if the entity is a MUC room |
652 # occupant jid | 652 # occupant jid |
653 nicknames.append(entity.resource) | 653 nicknames.append(entity.resource) |
654 | 654 |
655 # we first check roster (if we are not in a component) | 655 # we first check roster (if we are not in a component) |
656 if not client.is_component: | 656 if not client.is_component: |
657 roster_item = client.roster.getItem(entity.userhostJID()) | 657 roster_item = client.roster.get_item(entity.userhostJID()) |
658 if roster_item is not None and roster_item.name: | 658 if roster_item is not None and roster_item.name: |
659 # user set name has priority over entity set name | 659 # user set name has priority over entity set name |
660 nicknames.append(roster_item.name) | 660 nicknames.append(roster_item.name) |
661 | 661 |
662 nicknames.extend(plugin_nicknames) | 662 nicknames.extend(plugin_nicknames) |
668 nicknames.append(entity.userhost()) | 668 nicknames.append(entity.userhost()) |
669 | 669 |
670 # we remove duplicates while preserving order with dict | 670 # we remove duplicates while preserving order with dict |
671 return list(dict.fromkeys(nicknames)) | 671 return list(dict.fromkeys(nicknames)) |
672 | 672 |
673 def nicknamesUpdateIsNewData(self, client, entity, cached_data, new_nicknames): | 673 def nicknames_update_is_new_data(self, client, entity, cached_data, new_nicknames): |
674 return not set(new_nicknames).issubset(cached_data) | 674 return not set(new_nicknames).issubset(cached_data) |
675 | 675 |
676 async def descriptionGetPostTreatment( | 676 async def description_get_post_treatment( |
677 self, | 677 self, |
678 client: SatXMPPEntity, | 678 client: SatXMPPEntity, |
679 entity: jid.JID, | 679 entity: jid.JID, |
680 plugin_description: List[str] | 680 plugin_description: List[str] |
681 ) -> str: | 681 ) -> str: |
682 """Join all descriptions in a unique string""" | 682 """Join all descriptions in a unique string""" |
683 return '\n'.join(plugin_description) | 683 return '\n'.join(plugin_description) |
684 | 684 |
685 def _getIdentity(self, entity_s, metadata_filter, use_cache, profile): | 685 def _get_identity(self, entity_s, metadata_filter, use_cache, profile): |
686 entity = jid.JID(entity_s) | 686 entity = jid.JID(entity_s) |
687 client = self.host.getClient(profile) | 687 client = self.host.get_client(profile) |
688 d = defer.ensureDeferred( | 688 d = defer.ensureDeferred( |
689 self.getIdentity(client, entity, metadata_filter, use_cache)) | 689 self.get_identity(client, entity, metadata_filter, use_cache)) |
690 d.addCallback(data_format.serialise) | 690 d.addCallback(data_format.serialise) |
691 return d | 691 return d |
692 | 692 |
693 async def getIdentity( | 693 async def get_identity( |
694 self, | 694 self, |
695 client: SatXMPPEntity, | 695 client: SatXMPPEntity, |
696 entity: Optional[jid.JID] = None, | 696 entity: Optional[jid.JID] = None, |
697 metadata_filter: Optional[List[str]] = None, | 697 metadata_filter: Optional[List[str]] = None, |
698 use_cache: bool = True | 698 use_cache: bool = True |
717 id_data[metadata_name] = await self.get( | 717 id_data[metadata_name] = await self.get( |
718 client, metadata_name, entity, use_cache) | 718 client, metadata_name, entity, use_cache) |
719 | 719 |
720 return id_data | 720 return id_data |
721 | 721 |
722 def _getIdentities(self, entities_s, metadata_filter, profile): | 722 def _get_identities(self, entities_s, metadata_filter, profile): |
723 entities = [jid.JID(e) for e in entities_s] | 723 entities = [jid.JID(e) for e in entities_s] |
724 client = self.host.getClient(profile) | 724 client = self.host.get_client(profile) |
725 d = defer.ensureDeferred(self.getIdentities(client, entities, metadata_filter)) | 725 d = defer.ensureDeferred(self.get_identities(client, entities, metadata_filter)) |
726 d.addCallback(lambda d: data_format.serialise({str(j):i for j, i in d.items()})) | 726 d.addCallback(lambda d: data_format.serialise({str(j):i for j, i in d.items()})) |
727 return d | 727 return d |
728 | 728 |
729 async def getIdentities( | 729 async def get_identities( |
730 self, | 730 self, |
731 client: SatXMPPEntity, | 731 client: SatXMPPEntity, |
732 entities: List[jid.JID], | 732 entities: List[jid.JID], |
733 metadata_filter: Optional[List[str]] = None, | 733 metadata_filter: Optional[List[str]] = None, |
734 ) -> dict: | 734 ) -> dict: |
735 """Retrieve several identities at once | 735 """Retrieve several identities at once |
736 | 736 |
737 @param entities: entities from which identities must be retrieved | 737 @param entities: entities from which identities must be retrieved |
738 @param metadata_filter: same as for [getIdentity] | 738 @param metadata_filter: same as for [get_identity] |
739 @return: identities metadata where key is jid | 739 @return: identities metadata where key is jid |
740 if an error happens while retrieve a jid entity, it won't be present in the | 740 if an error happens while retrieve a jid entity, it won't be present in the |
741 result (and a warning will be logged) | 741 result (and a warning will be logged) |
742 """ | 742 """ |
743 identities = {} | 743 identities = {} |
744 get_identity_list = [] | 744 get_identity_list = [] |
745 for entity_jid in entities: | 745 for entity_jid in entities: |
746 get_identity_list.append( | 746 get_identity_list.append( |
747 defer.ensureDeferred( | 747 defer.ensureDeferred( |
748 self.getIdentity( | 748 self.get_identity( |
749 client, | 749 client, |
750 entity=entity_jid, | 750 entity=entity_jid, |
751 metadata_filter=metadata_filter, | 751 metadata_filter=metadata_filter, |
752 ) | 752 ) |
753 ) | 753 ) |
759 log.warning(f"Can't get identity for {entity_jid}") | 759 log.warning(f"Can't get identity for {entity_jid}") |
760 else: | 760 else: |
761 identities[entity_jid] = identity | 761 identities[entity_jid] = identity |
762 return identities | 762 return identities |
763 | 763 |
764 def _getBaseIdentities(self, profile_key): | 764 def _get_base_identities(self, profile_key): |
765 client = self.host.getClient(profile_key) | 765 client = self.host.get_client(profile_key) |
766 d = defer.ensureDeferred(self.getBaseIdentities(client)) | 766 d = defer.ensureDeferred(self.get_base_identities(client)) |
767 d.addCallback(lambda d: data_format.serialise({str(j):i for j, i in d.items()})) | 767 d.addCallback(lambda d: data_format.serialise({str(j):i for j, i in d.items()})) |
768 return d | 768 return d |
769 | 769 |
770 async def getBaseIdentities( | 770 async def get_base_identities( |
771 self, | 771 self, |
772 client: SatXMPPEntity, | 772 client: SatXMPPEntity, |
773 ) -> dict: | 773 ) -> dict: |
774 """Retrieve identities for entities in roster + own identity + invitations | 774 """Retrieve identities for entities in roster + own identity + invitations |
775 | 775 |
777 | 777 |
778 """ | 778 """ |
779 if client.is_component: | 779 if client.is_component: |
780 entities = [client.jid.userhostJID()] | 780 entities = [client.jid.userhostJID()] |
781 else: | 781 else: |
782 entities = client.roster.getJids() + [client.jid.userhostJID()] | 782 entities = client.roster.get_jids() + [client.jid.userhostJID()] |
783 | 783 |
784 return await self.getIdentities( | 784 return await self.get_identities( |
785 client, | 785 client, |
786 entities, | 786 entities, |
787 ['avatar', 'nicknames'] | 787 ['avatar', 'nicknames'] |
788 ) | 788 ) |
789 | 789 |
790 def _setIdentity(self, id_data_s, profile): | 790 def _set_identity(self, id_data_s, profile): |
791 client = self.host.getClient(profile) | 791 client = self.host.get_client(profile) |
792 id_data = data_format.deserialise(id_data_s) | 792 id_data = data_format.deserialise(id_data_s) |
793 return defer.ensureDeferred(self.setIdentity(client, id_data)) | 793 return defer.ensureDeferred(self.set_identity(client, id_data)) |
794 | 794 |
795 async def setIdentity(self, client, id_data): | 795 async def set_identity(self, client, id_data): |
796 """Update profile's identity | 796 """Update profile's identity |
797 | 797 |
798 @param id_data(dict): data to update, key can be one of self.metadata keys | 798 @param id_data(dict): data to update, key can be one of self.metadata keys |
799 """ | 799 """ |
800 if not id_data.keys() <= self.metadata.keys(): | 800 if not id_data.keys() <= self.metadata.keys(): |