comparison sat/memory/sqla_mapping.py @ 3663:162866ca4be7

db (schema): create virtual table for FTS + migration rel 361
author Goffi <goffi@goffi.org>
date Wed, 08 Sep 2021 17:58:48 +0200
parents 05db744f194f
children 658ddbabaf36
comparison
equal deleted inserted replaced
3661:cbb988a6f507 3663:162866ca4be7
21 from datetime import datetime 21 from datetime import datetime
22 import time 22 import time
23 import enum 23 import enum
24 from sqlalchemy import ( 24 from sqlalchemy import (
25 MetaData, Column, Integer, Text, Float, Boolean, DateTime, Enum, JSON, ForeignKey, 25 MetaData, Column, Integer, Text, Float, Boolean, DateTime, Enum, JSON, ForeignKey,
26 UniqueConstraint, Index 26 UniqueConstraint, Index, DDL, event
27 ) 27 )
28 28
29 from sqlalchemy.orm import declarative_base, relationship 29 from sqlalchemy.orm import declarative_base, relationship
30 from sqlalchemy.types import TypeDecorator 30 from sqlalchemy.types import TypeDecorator
31 from sqlalchemy.sql.functions import now 31 from sqlalchemy.sql.functions import now
54 IN_PROGRESS = 1 54 IN_PROGRESS = 1
55 #: synchronisation is done 55 #: synchronisation is done
56 COMPLETED = 2 56 COMPLETED = 2
57 #: something wrong happened during synchronisation, won't sync 57 #: something wrong happened during synchronisation, won't sync
58 ERROR = 3 58 ERROR = 3
59 #: synchronisation won't be done even if a syncing analyser match 59 #: synchronisation won't be done even if a syncing analyser matches
60 NO_SYNC = 4 60 NO_SYNC = 4
61 61
62 62
63 class LegacyPickle(TypeDecorator): 63 class LegacyPickle(TypeDecorator):
64 """Handle troubles with data pickled by former version of SàT 64 """Handle troubles with data pickled by former version of SàT
527 created = Column(DateTime, nullable=False, server_default=now()) 527 created = Column(DateTime, nullable=False, server_default=now())
528 updated = Column(DateTime, nullable=False, server_default=now(), onupdate=now()) 528 updated = Column(DateTime, nullable=False, server_default=now(), onupdate=now())
529 parsed = Column(JSON) 529 parsed = Column(JSON)
530 530
531 node = relationship("PubsubNode", back_populates="items") 531 node = relationship("PubsubNode", back_populates="items")
532
533
534 ## Full-Text Search
535
536 # create
537
538 @event.listens_for(PubsubItem.__table__, "after_create")
539 def fts_create(target, connection, **kw):
540 """Full-Text Search table creation"""
541 if connection.engine.name == "sqlite":
542 # Using SQLite FTS5
543 queries = [
544 "CREATE VIRTUAL TABLE pubsub_items_fts "
545 "USING fts5(data, content=pubsub_items, content_rowid=id)",
546 "CREATE TRIGGER pubsub_items_fts_sync_ins AFTER INSERT ON pubsub_items BEGIN"
547 " INSERT INTO pubsub_items_fts(rowid, data) VALUES (new.id, new.data);"
548 "END",
549 "CREATE TRIGGER pubsub_items_fts_sync_del AFTER DELETE ON pubsub_items BEGIN"
550 " INSERT INTO pubsub_items_fts(pubsub_items_fts, rowid, data) "
551 "VALUES('delete', old.id, old.data);"
552 "END",
553 "CREATE TRIGGER pubsub_items_fts_sync_upd AFTER UPDATE ON pubsub_items BEGIN"
554 " INSERT INTO pubsub_items_fts(pubsub_items_fts, rowid, data) VALUES"
555 "('delete', old.id, old.data);"
556 " INSERT INTO pubsub_items_fts(rowid, data) VALUES(new.id, new.data);"
557 "END"
558 ]
559 for q in queries:
560 connection.execute(DDL(q))
561
562 # drop
563
564 @event.listens_for(PubsubItem.__table__, "before_drop")
565 def fts_drop(target, connection, **kw):
566 "Full-Text Search table drop" ""
567 if connection.engine.name == "sqlite":
568 # Using SQLite FTS5
569 queries = [
570 "DROP TRIGGER IF EXISTS pubsub_items_fts_sync_ins",
571 "DROP TRIGGER IF EXISTS pubsub_items_fts_sync_del",
572 "DROP TRIGGER IF EXISTS pubsub_items_fts_sync_upd",
573 "DROP TABLE IF EXISTS pubsub_items_fts",
574 ]
575 for q in queries:
576 connection.execute(DDL(q))