Mercurial > libervia-backend
diff libervia/backend/memory/sqla_mapping.py @ 4130:02f0adc745c6
core: notifications implementation, first draft:
add a new table for notifications, and methods/bridge methods to manipulate them.
author | Goffi <goffi@goffi.org> |
---|---|
date | Mon, 16 Oct 2023 17:29:31 +0200 |
parents | 684ba556a617 |
children | 23d21daed216 |
line wrap: on
line diff
--- a/libervia/backend/memory/sqla_mapping.py Wed Oct 18 15:30:07 2023 +0200 +++ b/libervia/backend/memory/sqla_mapping.py Mon Oct 16 17:29:31 2023 +0200 @@ -16,12 +16,12 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. -from typing import Dict, Any from datetime import datetime import enum import json import pickle import time +from typing import Any, Dict from sqlalchemy import ( Boolean, @@ -45,21 +45,52 @@ from twisted.words.protocols.jabber import jid from wokkel import generic +from libervia.backend.core.constants import Const as C + Base = declarative_base( metadata=MetaData( naming_convention={ - "ix": 'ix_%(column_0_label)s', + "ix": "ix_%(column_0_label)s", "uq": "uq_%(table_name)s_%(column_0_name)s", "ck": "ck_%(table_name)s_%(constraint_name)s", "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", - "pk": "pk_%(table_name)s" + "pk": "pk_%(table_name)s", } ) ) # keys which are in message data extra but not stored in extra field this is # because those values are stored in separate fields -NOT_IN_EXTRA = ('origin_id', 'stanza_id', 'received_timestamp', 'update_uid') +NOT_IN_EXTRA = ("origin_id", "stanza_id", "received_timestamp", "update_uid") + + +class Profiles(dict): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.id_to_profile = {v: k for k, v in self.items()} + + def __setitem__(self, key, value): + super().__setitem__(key, value) + self.id_to_profile[value] = key + + def __delitem__(self, key): + del self.id_to_profile[self[key]] + super().__delitem__(key) + + def update(self, *args, **kwargs): + super().update(*args, **kwargs) + self.id_to_profile = {v: k for k, v in self.items()} + + def clear(self): + super().clear() + self.id_to_profile.clear() + + +profiles = Profiles() + + +def get_profile_by_id( profile_id): + return profiles.id_to_profile.get(profile_id) class SyncState(enum.Enum): @@ -78,11 +109,34 @@ PENDING = 2 +class NotificationType(enum.Enum): + chat = "chat" + blog = "blog" + calendar = "calendar" + file = "file" + call = "call" + service = "service" + other = "other" + + +class NotificationStatus(enum.Enum): + new = "new" + read = "read" + + +class NotificationPriority(enum.IntEnum): + LOW = 10 + MEDIUM = 20 + HIGH = 30 + URGENT = 40 + + class LegacyPickle(TypeDecorator): """Handle troubles with data pickled by former version of SàT This type is temporary until we do migration to a proper data type """ + # Blob is used on SQLite but gives errors when used here, while Text works fine impl = Text cache_ok = True @@ -109,12 +163,13 @@ # JSON return pickle.loads( value.replace(b"sat.plugins", b"libervia.backend.plugins"), - encoding="utf-8" + encoding="utf-8", ) class Json(TypeDecorator): """Handle JSON field in DB independant way""" + # Blob is used on SQLite but gives errors when used here, while Text works fine impl = Text cache_ok = True @@ -156,6 +211,7 @@ class JID(TypeDecorator): """Store twisted JID in text fields""" + impl = Text cache_ok = True @@ -193,9 +249,7 @@ __tablename__ = "components" profile_id = Column( - ForeignKey("profiles.id", ondelete="CASCADE"), - nullable=True, - primary_key=True + ForeignKey("profiles.id", ondelete="CASCADE"), nullable=True, primary_key=True ) entry_point = Column(Text, nullable=False) profile = relationship("Profile") @@ -209,7 +263,7 @@ Index("history__profile_id_timestamp", "profile_id", "timestamp"), Index( "history__profile_id_received_timestamp", "profile_id", "received_timestamp" - ) + ), ) uid = Column(Text, primary_key=True) @@ -295,14 +349,14 @@ if self.thread.parent_id is not None: extra["thread_parent"] = self.thread.parent_id - return { - "from": f"{self.source}/{self.source_res}" if self.source_res - else self.source, + "from": f"{self.source}/{self.source_res}" + if self.source_res + else self.source, "to": f"{self.dest}/{self.dest_res}" if self.dest_res else self.dest, "uid": self.uid, - "message": {m.language or '': m.message for m in self.messages}, - "subject": {m.language or '': m.subject for m in self.subjects}, + "message": {m.language or "": m.message for m in self.messages}, + "subject": {m.language or "": m.subject for m in self.subjects}, "type": self.type, "extra": extra, "timestamp": self.timestamp, @@ -311,8 +365,14 @@ def as_tuple(self): d = self.serialise() return ( - d['uid'], d['timestamp'], d['from'], d['to'], d['message'], d['subject'], - d['type'], d['extra'] + d["uid"], + d["timestamp"], + d["from"], + d["to"], + d["message"], + d["subject"], + d["type"], + d["extra"], ) @staticmethod @@ -336,9 +396,7 @@ class Message(Base): __tablename__ = "message" - __table_args__ = ( - Index("message__history_uid", "history_uid"), - ) + __table_args__ = (Index("message__history_uid", "history_uid"),) id = Column( Integer, @@ -358,16 +416,14 @@ def __repr__(self): lang_str = f"[{self.language}]" if self.language else "" - msg = f"{self.message[:20]}…" if len(self.message)>20 else self.message + msg = f"{self.message[:20]}…" if len(self.message) > 20 else self.message content = f"{lang_str}{msg}" return f"Message<{content}>" class Subject(Base): __tablename__ = "subject" - __table_args__ = ( - Index("subject__history_uid", "history_uid"), - ) + __table_args__ = (Index("subject__history_uid", "history_uid"),) id = Column( Integer, @@ -387,16 +443,14 @@ def __repr__(self): lang_str = f"[{self.language}]" if self.language else "" - msg = f"{self.subject[:20]}…" if len(self.subject)>20 else self.subject + msg = f"{self.subject[:20]}…" if len(self.subject) > 20 else self.subject content = f"{lang_str}{msg}" return f"Subject<{content}>" class Thread(Base): __tablename__ = "thread" - __table_args__ = ( - Index("thread__history_uid", "history_uid"), - ) + __table_args__ = (Index("thread__history_uid", "history_uid"),) id = Column( Integer, @@ -412,6 +466,51 @@ return f"Thread<{self.thread_id} [parent: {self.parent_id}]>" +class Notification(Base): + __tablename__ = "notifications" + __table_args__ = (Index("notifications_profile_id_status", "profile_id", "status"),) + + id = Column(Integer, primary_key=True, autoincrement=True) + timestamp = Column(Float, nullable=False, default=time.time) + expire_at = Column(Float, nullable=True) + + profile_id = Column(ForeignKey("profiles.id", ondelete="CASCADE"), index=True, nullable=True) + profile = relationship("Profile") + + type = Column(Enum(NotificationType), nullable=False) + + title = Column(Text, nullable=True) + body_plain = Column(Text, nullable=False) + body_rich = Column(Text, nullable=True) + + requires_action = Column(Boolean, default=False) + priority = Column(Integer, default=NotificationPriority.MEDIUM.value) + + extra_data = Column(JSON) + status = Column(Enum(NotificationStatus), default=NotificationStatus.new) + + def serialise(self) -> dict[str, str | float | bool | int | dict]: + """ + Serialises the Notification instance to a dictionary. + """ + result = {} + for column in self.__table__.columns: + value = getattr(self, column.name) + if value is not None: + if column.name in ("type", "status"): + result[column.name] = value.name + elif column.name == "id": + result[column.name] = str(value) + elif column.name == "profile_id": + if value is None: + result["profile"] = C.PROF_KEY_ALL + else: + result["profile"] = get_profile_by_id(value) + else: + result[column.name] = value + return result + + class ParamGen(Base): __tablename__ = "param_gen" @@ -425,9 +524,7 @@ category = Column(Text, primary_key=True) name = Column(Text, primary_key=True) - profile_id = Column( - ForeignKey("profiles.id", ondelete="CASCADE"), primary_key=True - ) + profile_id = Column(ForeignKey("profiles.id", ondelete="CASCADE"), primary_key=True) value = Column(Text) profile = relationship("Profile", back_populates="params") @@ -446,9 +543,7 @@ namespace = Column(Text, primary_key=True) key = Column(Text, primary_key=True) - profile_id = Column( - ForeignKey("profiles.id", ondelete="CASCADE"), primary_key=True - ) + profile_id = Column(ForeignKey("profiles.id", ondelete="CASCADE"), primary_key=True) value = Column(Text) profile = relationship("Profile", back_populates="private_data") @@ -467,9 +562,7 @@ namespace = Column(Text, primary_key=True) key = Column(Text, primary_key=True) - profile_id = Column( - ForeignKey("profiles.id", ondelete="CASCADE"), primary_key=True - ) + profile_id = Column(ForeignKey("profiles.id", ondelete="CASCADE"), primary_key=True) value = Column(LegacyPickle) profile = relationship("Profile", back_populates="private_bin_data") @@ -484,8 +577,8 @@ "profile_id", "owner", "media_type", - "media_subtype" - ) + "media_subtype", + ), ) id = Column(Text, primary_key=True) @@ -493,11 +586,7 @@ version = Column(Text, primary_key=True) parent = Column(Text, nullable=False) type = Column( - Enum( - "file", "directory", - name="file_type", - create_constraint=True - ), + Enum("file", "directory", name="file_type", create_constraint=True), nullable=False, server_default="file", ) @@ -520,14 +609,10 @@ class PubsubNode(Base): __tablename__ = "pubsub_nodes" - __table_args__ = ( - UniqueConstraint("profile_id", "service", "name"), - ) + __table_args__ = (UniqueConstraint("profile_id", "service", "name"),) id = Column(Integer, primary_key=True) - profile_id = Column( - ForeignKey("profiles.id", ondelete="CASCADE") - ) + profile_id = Column(ForeignKey("profiles.id", ondelete="CASCADE")) service = Column(JID) name = Column(Text, nullable=False) subscribed = Column( @@ -540,19 +625,11 @@ name="sync_state", create_constraint=True, ), - nullable=True - ) - sync_state_updated = Column( - Float, - nullable=False, - default=time.time() + nullable=True, ) - type_ = Column( - Text, name="type", nullable=True - ) - subtype = Column( - Text, nullable=True - ) + sync_state_updated = Column(Float, nullable=False, default=time.time()) + type_ = Column(Text, name="type", nullable=True) + subtype = Column(Text, nullable=True) extra = Column(JSON) items = relationship("PubsubItem", back_populates="node", passive_deletes=True) @@ -567,10 +644,9 @@ Used by components managing a pubsub service """ + __tablename__ = "pubsub_subs" - __table_args__ = ( - UniqueConstraint("node_id", "subscriber"), - ) + __table_args__ = (UniqueConstraint("node_id", "subscriber"),) id = Column(Integer, primary_key=True) node_id = Column(ForeignKey("pubsub_nodes.id", ondelete="CASCADE"), nullable=False) @@ -581,7 +657,7 @@ name="state", create_constraint=True, ), - nullable=True + nullable=True, ) node = relationship("PubsubNode", back_populates="subscriptions") @@ -589,9 +665,7 @@ class PubsubItem(Base): __tablename__ = "pubsub_items" - __table_args__ = ( - UniqueConstraint("node_id", "name"), - ) + __table_args__ = (UniqueConstraint("node_id", "name"),) id = Column(Integer, primary_key=True) node_id = Column(ForeignKey("pubsub_nodes.id", ondelete="CASCADE"), nullable=False) name = Column(Text, nullable=False) @@ -607,6 +681,7 @@ # create + @event.listens_for(PubsubItem.__table__, "after_create") def fts_create(target, connection, **kw): """Full-Text Search table creation""" @@ -626,13 +701,15 @@ " INSERT INTO pubsub_items_fts(pubsub_items_fts, rowid, data) VALUES" "('delete', old.id, old.data);" " INSERT INTO pubsub_items_fts(rowid, data) VALUES(new.id, new.data);" - "END" + "END", ] for q in queries: connection.execute(DDL(q)) + # drop + @event.listens_for(PubsubItem.__table__, "before_drop") def fts_drop(target, connection, **kw): "Full-Text Search table drop" ""