Mercurial > libervia-backend
diff sat/memory/sqla.py @ 4037:524856bd7b19
massive refactoring to switch from camelCase to snake_case:
historically, Libervia (SàT before) was using camelCase as allowed by PEP8 when using a
pre-PEP8 code, to use the same coding style as in Twisted.
However, snake_case is more readable and it's better to follow PEP8 best practices, so it
has been decided to move on full snake_case. Because Libervia has a huge codebase, this
ended with a ugly mix of camelCase and snake_case.
To fix that, this patch does a big refactoring by renaming every function and method
(including bridge) that are not coming from Twisted or Wokkel, to use fully snake_case.
This is a massive change, and may result in some bugs.
author | Goffi <goffi@goffi.org> |
---|---|
date | Sat, 08 Apr 2023 13:54:42 +0200 |
parents | 1a77e1f866f9 |
children |
line wrap: on
line diff
--- a/sat/memory/sqla.py Fri Apr 07 15:18:39 2023 +0200 +++ b/sat/memory/sqla.py Sat Apr 08 13:54:42 2023 +0200 @@ -122,10 +122,10 @@ # profile id to component entry point self.components: Dict[int, str] = {} - def getProfileById(self, profile_id): + def get_profile_by_id(self, profile_id): return self.profiles.get(profile_id) - async def migrateApply(self, *args: str, log_output: bool = False) -> None: + async def migrate_apply(self, *args: str, log_output: bool = False) -> None: """Do a migration command Commands are applied by running Alembic in a subprocess. @@ -167,7 +167,7 @@ await conn.run_sync(Base.metadata.create_all) log.debug("stamping the database") - await self.migrateApply("stamp", "head") + await self.migrate_apply("stamp", "head") log.debug("stamping done") def _check_db_is_up_to_date(self, conn: Connection) -> bool: @@ -193,14 +193,14 @@ else: log.info("Database needs to be updated") log.info("updating…") - await self.migrateApply("upgrade", "head", log_output=True) + await self.migrate_apply("upgrade", "head", log_output=True) log.info("Database is now up-to-date") @aio async def initialise(self) -> None: log.info(_("Connecting database")) - db_config = sqla_config.getDbConfig() + db_config = sqla_config.get_db_config() engine = create_async_engine( db_config["url"], future=True, @@ -288,31 +288,31 @@ ## Profiles - def getProfilesList(self) -> List[str]: + def get_profiles_list(self) -> List[str]: """"Return list of all registered profiles""" return list(self.profiles.keys()) - def hasProfile(self, profile_name: str) -> bool: + def has_profile(self, profile_name: str) -> bool: """return True if profile_name exists @param profile_name: name of the profile to check """ return profile_name in self.profiles - def profileIsComponent(self, profile_name: str) -> bool: + def profile_is_component(self, profile_name: str) -> bool: try: return self.profiles[profile_name] in self.components except KeyError: raise exceptions.NotFound("the requested profile doesn't exists") - def getEntryPoint(self, profile_name: str) -> str: + def get_entry_point(self, profile_name: str) -> str: try: return self.components[self.profiles[profile_name]] except KeyError: raise exceptions.NotFound("the requested profile doesn't exists or is not a component") @aio - async def createProfile(self, name: str, component_ep: Optional[str] = None) -> None: + async def create_profile(self, name: str, component_ep: Optional[str] = None) -> None: """Create a new profile @param name: name of the profile @@ -331,7 +331,7 @@ return profile @aio - async def deleteProfile(self, name: str) -> None: + async def delete_profile(self, name: str) -> None: """Delete profile @param name: name of the profile @@ -349,7 +349,7 @@ ## Params @aio - async def loadGenParams(self, params_gen: dict) -> None: + async def load_gen_params(self, params_gen: dict) -> None: """Load general parameters @param params_gen: dictionary to fill @@ -361,7 +361,7 @@ params_gen[(p.category, p.name)] = p.value @aio - async def loadIndParams(self, params_ind: dict, profile: str) -> None: + async def load_ind_params(self, params_ind: dict, profile: str) -> None: """Load individual parameters @param params_ind: dictionary to fill @@ -376,7 +376,7 @@ params_ind[(p.category, p.name)] = p.value @aio - async def getIndParam(self, category: str, name: str, profile: str) -> Optional[str]: + async def get_ind_param(self, category: str, name: str, profile: str) -> Optional[str]: """Ask database for the value of one specific individual parameter @param category: category of the parameter @@ -395,7 +395,7 @@ return result.scalar_one_or_none() @aio - async def getIndParamValues(self, category: str, name: str) -> Dict[str, str]: + async def get_ind_param_values(self, category: str, name: str) -> Dict[str, str]: """Ask database for the individual values of a parameter for all profiles @param category: category of the parameter @@ -414,7 +414,7 @@ return {param.profile.name: param.value for param in result.scalars()} @aio - async def setGenParam(self, category: str, name: str, value: Optional[str]) -> None: + async def set_gen_param(self, category: str, name: str, value: Optional[str]) -> None: """Save the general parameters in database @param category: category of the parameter @@ -436,7 +436,7 @@ await session.commit() @aio - async def setIndParam( + async def set_ind_param( self, category:str, name: str, @@ -489,7 +489,7 @@ return History.source == jid_.userhost() @aio - async def historyGet( + async def history_get( self, from_jid: Optional[jid.JID], to_jid: Optional[jid.JID], @@ -509,7 +509,7 @@ - None for unlimited @param between: confound source and dest (ignore the direction) @param filters: pattern to filter the history results - @return: list of messages as in [messageNew], minus the profile which is already + @return: list of messages as in [message_new], minus the profile which is already known. """ # we have to set a default value to profile because it's last argument @@ -634,7 +634,7 @@ return [h.as_tuple() for h in result] @aio - async def addToHistory(self, data: dict, profile: str) -> None: + async def add_to_history(self, data: dict, profile: str) -> None: """Store a new message in history @param data: message data as build by SatMessageProtocol.onMessage @@ -682,7 +682,7 @@ ## Private values - def _getPrivateClass(self, binary, profile): + def _get_private_class(self, binary, profile): """Get ORM class to use for private values""" if profile is None: return PrivateGenBin if binary else PrivateGen @@ -691,7 +691,7 @@ @aio - async def getPrivates( + async def get_privates( self, namespace:str, keys: Optional[Iterable[str]] = None, @@ -714,7 +714,7 @@ f"{' binary' if binary else ''} private values from database for namespace " f"{namespace}{f' with keys {keys!r}' if keys is not None else ''}" ) - cls = self._getPrivateClass(binary, profile) + cls = self._get_private_class(binary, profile) stmt = select(cls).filter_by(namespace=namespace) if keys: stmt = stmt.where(cls.key.in_(list(keys))) @@ -725,7 +725,7 @@ return {p.key: p.value for p in result.scalars()} @aio - async def setPrivateValue( + async def set_private_value( self, namespace: str, key:str, @@ -743,7 +743,7 @@ @param profile: profile to use for individual value if None, it's a general value """ - cls = self._getPrivateClass(binary, profile) + cls = self._get_private_class(binary, profile) values = { "namespace": namespace, @@ -768,7 +768,7 @@ await session.commit() @aio - async def delPrivateValue( + async def del_private_value( self, namespace: str, key: str, @@ -783,7 +783,7 @@ @param profile: profile to use for individual value if None, it's a general value """ - cls = self._getPrivateClass(binary, profile) + cls = self._get_private_class(binary, profile) stmt = delete(cls).filter_by(namespace=namespace, key=key) @@ -795,7 +795,7 @@ await session.commit() @aio - async def delPrivateNamespace( + async def del_private_namespace( self, namespace: str, binary: bool = False, @@ -805,9 +805,9 @@ Be really cautious when you use this method, as all data with given namespace are removed. - Params are the same as for delPrivateValue + Params are the same as for del_private_value """ - cls = self._getPrivateClass(binary, profile) + cls = self._get_private_class(binary, profile) stmt = delete(cls).filter_by(namespace=namespace) @@ -821,7 +821,7 @@ ## Files @aio - async def getFiles( + async def get_files( self, client: Optional[SatXMPPEntity], file_id: Optional[str] = None, @@ -852,7 +852,7 @@ @param projection: name of columns to retrieve None to retrieve all @param unique: if True will remove duplicates - other params are the same as for [setFile] + other params are the same as for [set_file] @return: files corresponding to filters """ if projection is None: @@ -910,7 +910,7 @@ return [dict(r) for r in result] @aio - async def setFile( + async def set_file( self, client: SatXMPPEntity, name: str, @@ -987,7 +987,7 @@ )) @aio - async def fileGetUsedSpace(self, client: SatXMPPEntity, owner: jid.JID) -> int: + async def file_get_used_space(self, client: SatXMPPEntity, owner: jid.JID) -> int: async with self.session() as session: result = await session.execute( select(sum_(File.size)).filter_by( @@ -998,7 +998,7 @@ return result.scalar_one_or_none() or 0 @aio - async def fileDelete(self, file_id: str) -> None: + async def file_delete(self, file_id: str) -> None: """Delete file metadata from the database @param file_id: id of the file to delete @@ -1010,7 +1010,7 @@ await session.commit() @aio - async def fileUpdate( + async def file_update( self, file_id: str, column: str, @@ -1068,7 +1068,7 @@ ) @aio - async def getPubsubNode( + async def get_pubsub_node( self, client: SatXMPPEntity, service: jid.JID, @@ -1085,7 +1085,7 @@ @param with_items: retrieve items in the same query @param with_subscriptions: retrieve subscriptions in the same query @param create: if the node doesn't exist in DB, create it - @param create_kwargs: keyword arguments to use with ``setPubsubNode`` if the node + @param create_kwargs: keyword arguments to use with ``set_pubsub_node`` if the node needs to be created. """ async with self.session() as session: @@ -1112,15 +1112,15 @@ if create_kwargs is None: create_kwargs = {} try: - return await as_future(self.setPubsubNode( + return await as_future(self.set_pubsub_node( client, service, name, **create_kwargs )) except IntegrityError as e: if "unique" in str(e.orig).lower(): # the node may already exist, if it has been created just after - # getPubsubNode above + # get_pubsub_node above log.debug("ignoring UNIQUE constraint error") - cached_node = await as_future(self.getPubsubNode( + cached_node = await as_future(self.get_pubsub_node( client, service, name, @@ -1133,7 +1133,7 @@ return ret @aio - async def setPubsubNode( + async def set_pubsub_node( self, client: SatXMPPEntity, service: jid.JID, @@ -1159,7 +1159,7 @@ return node @aio - async def updatePubsubNodeSyncState( + async def update_pubsub_node_sync_state( self, node: PubsubNode, state: SyncState @@ -1176,7 +1176,7 @@ ) @aio - async def deletePubsubNode( + async def delete_pubsub_node( self, profiles: Optional[List[str]], services: Optional[List[jid.JID]], @@ -1207,7 +1207,7 @@ await session.commit() @aio - async def cachePubsubItems( + async def cache_pubsub_items( self, client: SatXMPPEntity, node: PubsubNode, @@ -1240,7 +1240,7 @@ await session.commit() @aio - async def deletePubsubItems( + async def delete_pubsub_items( self, node: PubsubNode, items_names: Optional[List[str]] = None @@ -1264,7 +1264,7 @@ await session.commit() @aio - async def purgePubsubItems( + async def purge_pubsub_items( self, services: Optional[List[jid.JID]] = None, names: Optional[List[str]] = None, @@ -1313,7 +1313,7 @@ await session.commit() @aio - async def getItems( + async def get_items( self, node: PubsubNode, max_items: Optional[int] = None, @@ -1352,7 +1352,7 @@ metadata = { "service": node.service, "node": node.name, - "uri": uri.buildXMPPUri( + "uri": uri.build_xmpp_uri( "pubsub", path=node.service.full(), node=node.name, @@ -1487,7 +1487,7 @@ result.reverse() return result, metadata - def _getSqlitePath( + def _get_sqlite_path( self, path: List[Union[str, int]] ) -> str: @@ -1495,7 +1495,7 @@ return f"${''.join(f'[{p}]' if isinstance(p, int) else f'.{p}' for p in path)}" @aio - async def searchPubsubItems( + async def search_pubsub_items( self, query: dict, ) -> Tuple[List[PubsubItem]]: @@ -1626,7 +1626,7 @@ op_attr = OP_MAP[operator] except KeyError: raise ValueError(f"invalid operator: {operator!r}") - sqlite_path = self._getSqlitePath(path) + sqlite_path = self._get_sqlite_path(path) if operator in ("overlap", "ioverlap", "disjoint", "idisjoint"): col = literal_column("json_each.value") if operator[0] == "i": @@ -1683,7 +1683,7 @@ raise NotImplementedError(f"Unknown {order!r} order") else: # we have a JSON path - # sqlite_path = self._getSqlitePath(path) + # sqlite_path = self._get_sqlite_path(path) col = PubsubItem.parsed[path] direction = order_data.get("direction", "ASC").lower() if not direction in ("asc", "desc"):