Mercurial > libervia-backend
diff sat/plugins/plugin_misc_identity.py @ 4037:524856bd7b19
massive refactoring to switch from camelCase to snake_case:
historically, Libervia (SàT before) was using camelCase as allowed by PEP8 when using a
pre-PEP8 code, to use the same coding style as in Twisted.
However, snake_case is more readable and it's better to follow PEP8 best practices, so it
has been decided to move on full snake_case. Because Libervia has a huge codebase, this
ended with a ugly mix of camelCase and snake_case.
To fix that, this patch does a big refactoring by renaming every function and method
(including bridge) that are not coming from Twisted or Wokkel, to use fully snake_case.
This is a massive change, and may result in some bugs.
author | Goffi <goffi@goffi.org> |
---|---|
date | Sat, 08 Apr 2023 13:54:42 +0200 |
parents | 998c5318230f |
children |
line wrap: on
line diff
--- a/sat/plugins/plugin_misc_identity.py Fri Apr 07 15:18:39 2023 +0200 +++ b/sat/plugins/plugin_misc_identity.py Sat Apr 08 13:54:42 2023 +0200 @@ -77,11 +77,11 @@ "avatar": { "type": dict, # convert avatar path to avatar metadata (and check validity) - "set_data_filter": self.avatarSetDataFilter, + "set_data_filter": self.avatar_set_data_filter, # update profile avatar, so all frontends are aware - "set_post_treatment": self.avatarSetPostTreatment, - "update_is_new_data": self.avatarUpdateIsNewData, - "update_data_filter": self.avatarUpdateDataFilter, + "set_post_treatment": self.avatar_set_post_treatment, + "update_is_new_data": self.avatar_update_is_new_data, + "update_data_filter": self.avatar_update_data_filter, # we store the metadata in database, to restore it on next connection # (it is stored only for roster entities) "store": True, @@ -92,70 +92,70 @@ # of returning only the data from the first successful callback "get_all": True, # append nicknames from roster, resource, etc. - "get_post_treatment": self.nicknamesGetPostTreatment, - "update_is_new_data": self.nicknamesUpdateIsNewData, + "get_post_treatment": self.nicknames_get_post_treatment, + "update_is_new_data": self.nicknames_update_is_new_data, "store": True, }, "description": { "type": str, "get_all": True, - "get_post_treatment": self.descriptionGetPostTreatment, + "get_post_treatment": self.description_get_post_treatment, "store": True, } } - host.trigger.add("roster_update", self._rosterUpdateTrigger) - host.memory.setSignalOnUpdate("avatar") - host.memory.setSignalOnUpdate("nicknames") - host.bridge.addMethod( - "identityGet", + host.trigger.add("roster_update", self._roster_update_trigger) + host.memory.set_signal_on_update("avatar") + host.memory.set_signal_on_update("nicknames") + host.bridge.add_method( + "identity_get", ".plugin", in_sign="sasbs", out_sign="s", - method=self._getIdentity, + method=self._get_identity, async_=True, ) - host.bridge.addMethod( - "identitiesGet", + host.bridge.add_method( + "identities_get", ".plugin", in_sign="asass", out_sign="s", - method=self._getIdentities, + method=self._get_identities, async_=True, ) - host.bridge.addMethod( - "identitiesBaseGet", + host.bridge.add_method( + "identities_base_get", ".plugin", in_sign="s", out_sign="s", - method=self._getBaseIdentities, + method=self._get_base_identities, async_=True, ) - host.bridge.addMethod( - "identitySet", + host.bridge.add_method( + "identity_set", ".plugin", in_sign="ss", out_sign="", - method=self._setIdentity, + method=self._set_identity, async_=True, ) - host.bridge.addMethod( - "avatarGet", + host.bridge.add_method( + "avatar_get", ".plugin", in_sign="sbs", out_sign="s", method=self._getAvatar, async_=True, ) - host.bridge.addMethod( - "avatarSet", + host.bridge.add_method( + "avatar_set", ".plugin", in_sign="sss", out_sign="", - method=self._setAvatar, + method=self._set_avatar, async_=True, ) - async def profileConnecting(self, client): + async def profile_connecting(self, client): client._identity_update_lock = [] # we restore known identities from database client._identity_storage = persistent.LazyPersistentBinaryDict( @@ -188,22 +188,22 @@ f"{value}") to_delete.append(key) continue - cache = self.host.common_cache.getMetadata(cache_uid) + cache = self.host.common_cache.get_metadata(cache_uid) if cache is None: log.debug( f"purging avatar for {entity}: it is not in cache anymore") to_delete.append(key) continue - self.host.memory.updateEntityData( + self.host.memory.update_entity_data( client, entity, name, value, silent=True ) for key in to_delete: await client._identity_storage.adel(key) - def _rosterUpdateTrigger(self, client, roster_item): - old_item = client.roster.getItem(roster_item.jid) + def _roster_update_trigger(self, client, roster_item): + old_item = client.roster.get_item(roster_item.jid) if old_item is None or old_item.name != roster_item.name: log.debug( f"roster nickname has been updated to {roster_item.name!r} for " @@ -247,7 +247,7 @@ cb_list.append(callback) cb_list.sort(key=lambda c: c.priority, reverse=True) - def getIdentityJid(self, client, peer_jid): + def get_identity_jid(self, client, peer_jid): """Return jid to use to set identity metadata if it's a jid of a room occupant, full jid will be used @@ -260,9 +260,9 @@ if self._m is None: return peer_jid.userhostJID() else: - return self._m.getBareOrFull(client, peer_jid) + return self._m.get_bare_or_full(client, peer_jid) - def checkType(self, metadata_name, value): + def check_type(self, metadata_name, value): """Check that type used for a metadata is the one declared in self.metadata""" value_type = self.metadata[metadata_name]["type"] if not isinstance(value, value_type): @@ -270,7 +270,7 @@ f"{value} has wrong type: it is {type(value)} while {value_type} was " f"expected") - def getFieldType(self, metadata_name: str) -> str: + def get_field_type(self, metadata_name: str) -> str: """Return the type the requested field @param metadata_name: name of the field to check @@ -298,7 +298,7 @@ @param use_cache: if False, cache won't be checked @param prefilled_values: map of origin => value to use when `get_all` is set """ - entity = self.getIdentityJid(client, entity) + entity = self.get_identity_jid(client, entity) try: metadata = self.metadata[metadata_name] except KeyError: @@ -306,7 +306,7 @@ get_all = metadata.get('get_all', False) if use_cache: try: - data = self.host.memory.getEntityDatum( + data = self.host.memory.get_entity_datum( client, entity, metadata_name) except (KeyError, exceptions.UnknownEntityError): pass @@ -343,7 +343,7 @@ .format(callback=callback.get, metadata_name=metadata_name, e=e)) else: if data: - self.checkType(metadata_name, data) + self.check_type(metadata_name, data) if get_all: if isinstance(data, list): all_data.extend(data) @@ -359,9 +359,9 @@ post_treatment = metadata.get("get_post_treatment") if post_treatment is not None: - data = await utils.asDeferred(post_treatment, client, entity, data) + data = await utils.as_deferred(post_treatment, client, entity, data) - self.host.memory.updateEntityData( + self.host.memory.update_entity_data( client, entity, metadata_name, data) if metadata.get('store', False): @@ -381,12 +381,12 @@ @param entity(jid.JID, None): entity for which avatar is requested None to use profile's jid """ - entity = self.getIdentityJid(client, entity) + entity = self.get_identity_jid(client, entity) metadata = self.metadata[metadata_name] data_filter = metadata.get("set_data_filter") if data_filter is not None: - data = await utils.asDeferred(data_filter, client, entity, data) - self.checkType(metadata_name, data) + data = await utils.as_deferred(data_filter, client, entity, data) + self.check_type(metadata_name, data) try: callbacks = metadata['callbacks'] @@ -411,7 +411,7 @@ post_treatment = metadata.get("set_post_treatment") if post_treatment is not None: - await utils.asDeferred(post_treatment, client, entity, data) + await utils.as_deferred(post_treatment, client, entity, data) async def update( self, @@ -426,14 +426,14 @@ This method may be called by plugins when an identity metadata is available. @param origin: namespace of the plugin which is source of the metadata """ - entity = self.getIdentityJid(client, entity) + entity = self.get_identity_jid(client, entity) if (entity, metadata_name) in client._identity_update_lock: log.debug(f"update is locked for {entity}'s {metadata_name}") return metadata = self.metadata[metadata_name] try: - cached_data = self.host.memory.getEntityDatum( + cached_data = self.host.memory.get_entity_datum( client, entity, metadata_name) except (KeyError, exceptions.UnknownEntityError): # metadata is not cached, we do the update @@ -443,7 +443,7 @@ try: update_is_new_data = metadata["update_is_new_data"] except KeyError: - update_is_new_data = self.defaultUpdateIsNewData + update_is_new_data = self.default_update_is_new_data if data is None: if cached_data is None: @@ -467,7 +467,7 @@ # get_all is set, meaning that we have to check all plugins # so we first delete current cache try: - self.host.memory.delEntityDatum(client, entity, metadata_name) + self.host.memory.del_entity_datum(client, entity, metadata_name) except (KeyError, exceptions.UnknownEntityError): pass # then fill it again by calling get, which will retrieve all values @@ -481,32 +481,32 @@ if data is not None: data_filter = metadata['update_data_filter'] if data_filter is not None: - data = await utils.asDeferred(data_filter, client, entity, data) - self.checkType(metadata_name, data) + data = await utils.as_deferred(data_filter, client, entity, data) + self.check_type(metadata_name, data) - self.host.memory.updateEntityData(client, entity, metadata_name, data) + self.host.memory.update_entity_data(client, entity, metadata_name, data) if metadata.get('store', False): key = f"{entity}\n{metadata_name}" await client._identity_storage.aset(key, data) - def defaultUpdateIsNewData(self, client, entity, cached_data, new_data): + def default_update_is_new_data(self, client, entity, cached_data, new_data): return new_data != cached_data def _getAvatar(self, entity, use_cache, profile): - client = self.host.getClient(profile) + client = self.host.get_client(profile) entity = jid.JID(entity) if entity else None d = defer.ensureDeferred(self.get(client, "avatar", entity, use_cache)) d.addCallback(lambda data: data_format.serialise(data)) return d - def _setAvatar(self, file_path, entity, profile_key=C.PROF_KEY_NONE): - client = self.host.getClient(profile_key) + def _set_avatar(self, file_path, entity, profile_key=C.PROF_KEY_NONE): + client = self.host.get_client(profile_key) entity = jid.JID(entity) if entity else None return defer.ensureDeferred( self.set(client, "avatar", file_path, entity)) - def _blockingCacheAvatar( + def _blocking_cache_avatar( self, source: str, avatar_data: dict[str, Any] @@ -546,7 +546,7 @@ img_buf.seek(0) image_hash = hashlib.sha1(img_buf.read()).hexdigest() img_buf.seek(0) - with self.host.common_cache.cacheData( + with self.host.common_cache.cache_data( source, image_hash, media_type ) as f: f.write(img_buf.read()) @@ -554,21 +554,21 @@ avatar_data['filename'] = avatar_data['path'].name avatar_data['cache_uid'] = image_hash - async def cacheAvatar(self, source: str, avatar_data: Dict[str, Any]) -> None: + async def cache_avatar(self, source: str, avatar_data: Dict[str, Any]) -> None: """Resize if necessary and cache avatar @param source: source importing the avatar (usually it is plugin's import name), will be used in cache metadata - @param avatar_data: avatar metadata as build by [avatarSetDataFilter] + @param avatar_data: avatar metadata as build by [avatar_set_data_filter] will be updated with following keys: path: updated path using cached file filename: updated filename using cached file base64: resized and base64 encoded avatar cache_uid: SHA1 hash used as cache unique ID """ - await threads.deferToThread(self._blockingCacheAvatar, source, avatar_data) + await threads.deferToThread(self._blocking_cache_avatar, source, avatar_data) - async def avatarSetDataFilter(self, client, entity, file_path): + async def avatar_set_data_filter(self, client, entity, file_path): """Convert avatar file path to dict data""" file_path = Path(file_path) if not file_path.is_file(): @@ -583,14 +583,14 @@ raise ValueError(f"Can't identify type of image at {file_path}") if not media_type.startswith('image/'): raise ValueError(f"File at {file_path} doesn't appear to be an image") - await self.cacheAvatar(IMPORT_NAME, avatar_data) + await self.cache_avatar(IMPORT_NAME, avatar_data) return avatar_data - async def avatarSetPostTreatment(self, client, entity, avatar_data): + async def avatar_set_post_treatment(self, client, entity, avatar_data): """Update our own avatar""" await self.update(client, IMPORT_NAME, "avatar", avatar_data, entity) - def avatarBuildMetadata( + def avatar_build_metadata( self, path: Path, media_type: Optional[str] = None, @@ -622,10 +622,10 @@ "cache_uid": cache_uid, } - def avatarUpdateIsNewData(self, client, entity, cached_data, new_data): + def avatar_update_is_new_data(self, client, entity, cached_data, new_data): return new_data['path'] != cached_data['path'] - async def avatarUpdateDataFilter(self, client, entity, data): + async def avatar_update_data_filter(self, client, entity, data): if not isinstance(data, dict): raise ValueError(f"Invalid data type ({type(data)}), a dict is expected") mandatory_keys = {'path', 'filename', 'cache_uid'} @@ -633,7 +633,7 @@ raise ValueError(f"missing avatar data keys: {mandatory_keys - data.keys()}") return data - async def nicknamesGetPostTreatment(self, client, entity, plugin_nicknames): + async def nicknames_get_post_treatment(self, client, entity, plugin_nicknames): """Prepend nicknames from core locations + set default nickname nicknames are checked from many locations, there is always at least @@ -648,13 +648,13 @@ # for MUC we add resource if entity.resource: - # getIdentityJid let the resource only if the entity is a MUC room + # get_identity_jid let the resource only if the entity is a MUC room # occupant jid nicknames.append(entity.resource) # we first check roster (if we are not in a component) if not client.is_component: - roster_item = client.roster.getItem(entity.userhostJID()) + roster_item = client.roster.get_item(entity.userhostJID()) if roster_item is not None and roster_item.name: # user set name has priority over entity set name nicknames.append(roster_item.name) @@ -670,10 +670,10 @@ # we remove duplicates while preserving order with dict return list(dict.fromkeys(nicknames)) - def nicknamesUpdateIsNewData(self, client, entity, cached_data, new_nicknames): + def nicknames_update_is_new_data(self, client, entity, cached_data, new_nicknames): return not set(new_nicknames).issubset(cached_data) - async def descriptionGetPostTreatment( + async def description_get_post_treatment( self, client: SatXMPPEntity, entity: jid.JID, @@ -682,15 +682,15 @@ """Join all descriptions in a unique string""" return '\n'.join(plugin_description) - def _getIdentity(self, entity_s, metadata_filter, use_cache, profile): + def _get_identity(self, entity_s, metadata_filter, use_cache, profile): entity = jid.JID(entity_s) - client = self.host.getClient(profile) + client = self.host.get_client(profile) d = defer.ensureDeferred( - self.getIdentity(client, entity, metadata_filter, use_cache)) + self.get_identity(client, entity, metadata_filter, use_cache)) d.addCallback(data_format.serialise) return d - async def getIdentity( + async def get_identity( self, client: SatXMPPEntity, entity: Optional[jid.JID] = None, @@ -719,14 +719,14 @@ return id_data - def _getIdentities(self, entities_s, metadata_filter, profile): + def _get_identities(self, entities_s, metadata_filter, profile): entities = [jid.JID(e) for e in entities_s] - client = self.host.getClient(profile) - d = defer.ensureDeferred(self.getIdentities(client, entities, metadata_filter)) + client = self.host.get_client(profile) + d = defer.ensureDeferred(self.get_identities(client, entities, metadata_filter)) d.addCallback(lambda d: data_format.serialise({str(j):i for j, i in d.items()})) return d - async def getIdentities( + async def get_identities( self, client: SatXMPPEntity, entities: List[jid.JID], @@ -735,7 +735,7 @@ """Retrieve several identities at once @param entities: entities from which identities must be retrieved - @param metadata_filter: same as for [getIdentity] + @param metadata_filter: same as for [get_identity] @return: identities metadata where key is jid if an error happens while retrieve a jid entity, it won't be present in the result (and a warning will be logged) @@ -745,7 +745,7 @@ for entity_jid in entities: get_identity_list.append( defer.ensureDeferred( - self.getIdentity( + self.get_identity( client, entity=entity_jid, metadata_filter=metadata_filter, @@ -761,13 +761,13 @@ identities[entity_jid] = identity return identities - def _getBaseIdentities(self, profile_key): - client = self.host.getClient(profile_key) - d = defer.ensureDeferred(self.getBaseIdentities(client)) + def _get_base_identities(self, profile_key): + client = self.host.get_client(profile_key) + d = defer.ensureDeferred(self.get_base_identities(client)) d.addCallback(lambda d: data_format.serialise({str(j):i for j, i in d.items()})) return d - async def getBaseIdentities( + async def get_base_identities( self, client: SatXMPPEntity, ) -> dict: @@ -779,20 +779,20 @@ if client.is_component: entities = [client.jid.userhostJID()] else: - entities = client.roster.getJids() + [client.jid.userhostJID()] + entities = client.roster.get_jids() + [client.jid.userhostJID()] - return await self.getIdentities( + return await self.get_identities( client, entities, ['avatar', 'nicknames'] ) - def _setIdentity(self, id_data_s, profile): - client = self.host.getClient(profile) + def _set_identity(self, id_data_s, profile): + client = self.host.get_client(profile) id_data = data_format.deserialise(id_data_s) - return defer.ensureDeferred(self.setIdentity(client, id_data)) + return defer.ensureDeferred(self.set_identity(client, id_data)) - async def setIdentity(self, client, id_data): + async def set_identity(self, client, id_data): """Update profile's identity @param id_data(dict): data to update, key can be one of self.metadata keys