comparison libervia/backend/plugins/plugin_xep_0373.py @ 4212:5f2d496c633f

core: get rid of `pickle`: Use of `pickle` to serialise data was a technical legacy that was causing trouble to store in database, to update (if a class was serialised, a change could break update), and to security (pickle can lead to code execution). This patch remove all use of Pickle in favour in JSON, notably: - for caching data, a Pydantic model is now used instead - for SQLAlchemy model, the LegacyPickle is replaced by JSON serialisation - in XEP-0373 a class `PublicKeyMetadata` was serialised. New method `from_dict` and `to_dict` method have been implemented to do serialisation. - new methods to (de)serialise data can now be specified with Identity data types. It is notably used to (de)serialise `path` of avatars. A migration script has been created to convert data (for upgrade or downgrade), with special care for XEP-0373 case. Depending of size of database, this migration script can be long to run. rel 443
author Goffi <goffi@goffi.org>
date Fri, 23 Feb 2024 13:31:04 +0100
parents 7c5654c54fed
children b53b6dc1f929
comparison
equal deleted inserted replaced
4211:be89ab1cbca4 4212:5f2d496c633f
18 18
19 from abc import ABC, abstractmethod 19 from abc import ABC, abstractmethod
20 import base64 20 import base64
21 from datetime import datetime, timezone 21 from datetime import datetime, timezone
22 import enum 22 import enum
23 import json
23 import secrets 24 import secrets
24 import string 25 import string
25 from typing import Any, Dict, Iterable, List, Literal, Optional, Set, Tuple, cast 26 from typing import Any, Dict, Iterable, List, Literal, Optional, Set, Tuple, cast
26 from xml.sax.saxutils import quoteattr 27 from xml.sax.saxutils import quoteattr
27 28
28 from typing_extensions import Final, NamedTuple, Never, assert_never 29 from typing import Final, NamedTuple, Never, assert_never
29 from wokkel import muc, pubsub 30 from wokkel import muc, pubsub
30 from wokkel.disco import DiscoFeature, DiscoInfo 31 from wokkel.disco import DiscoFeature, DiscoInfo
31 import xmlschema 32 import xmlschema
32 33
33 from libervia.backend.core import exceptions 34 from libervia.backend.core import exceptions
810 811
811 class PublicKeyMetadata(NamedTuple): 812 class PublicKeyMetadata(NamedTuple):
812 """ 813 """
813 Metadata about a published public key. 814 Metadata about a published public key.
814 """ 815 """
815
816 fingerprint: str 816 fingerprint: str
817 timestamp: datetime 817 timestamp: datetime
818
819 def to_dict(self) -> dict:
820 # Convert the instance to a dictionary and handle datetime serialization
821 data = self._asdict()
822 data['timestamp'] = self.timestamp.isoformat()
823 return data
824
825 @staticmethod
826 def from_dict(data: dict) -> 'PublicKeyMetadata':
827 # Load a serialised dictionary
828 data['timestamp'] = datetime.fromisoformat(data['timestamp'])
829 return PublicKeyMetadata(**data)
818 830
819 831
820 @enum.unique 832 @enum.unique
821 class TrustLevel(enum.Enum): 833 class TrustLevel(enum.Enum):
822 """ 834 """
1100 timestamp=parse_datetime(cast(str, pubkey_metadata_elt["date"])) 1112 timestamp=parse_datetime(cast(str, pubkey_metadata_elt["date"]))
1101 ) for pubkey_metadata_elt in pubkey_metadata_elts } 1113 ) for pubkey_metadata_elt in pubkey_metadata_elts }
1102 1114
1103 storage_key = STR_KEY_PUBLIC_KEYS_METADATA.format(sender.userhost()) 1115 storage_key = STR_KEY_PUBLIC_KEYS_METADATA.format(sender.userhost())
1104 1116
1105 local_public_keys_metadata = cast( 1117 local_public_keys_metadata = {
1106 Set[PublicKeyMetadata], 1118 PublicKeyMetadata.from_dict(pkm)
1107 await self.__storage[profile].get(storage_key, set()) 1119 for pkm in await self.__storage[profile].get(storage_key, [])
1108 ) 1120 }
1109 1121
1110 unchanged_keys = new_public_keys_metadata & local_public_keys_metadata 1122 unchanged_keys = new_public_keys_metadata & local_public_keys_metadata
1111 changed_or_new_keys = new_public_keys_metadata - unchanged_keys 1123 changed_or_new_keys = new_public_keys_metadata - unchanged_keys
1112 available_keys = self.list_public_keys(client, sender) 1124 available_keys = self.list_public_keys(client, sender)
1113 1125
1147 timestamp=datetime.now(timezone.utc) 1159 timestamp=datetime.now(timezone.utc)
1148 )) 1160 ))
1149 1161
1150 await self.publish_public_keys_list(client, new_public_keys_metadata) 1162 await self.publish_public_keys_list(client, new_public_keys_metadata)
1151 1163
1152 await self.__storage[profile].force(storage_key, new_public_keys_metadata) 1164 await self.__storage[profile].force(
1165 storage_key,
1166 [pkm.to_dict() for pkm in new_public_keys_metadata]
1167 )
1153 1168
1154 def list_public_keys(self, client: SatXMPPClient, jid: jid.JID) -> Set[GPGPublicKey]: 1169 def list_public_keys(self, client: SatXMPPClient, jid: jid.JID) -> Set[GPGPublicKey]:
1155 """List GPG public keys available for a JID. 1170 """List GPG public keys available for a JID.
1156 1171
1157 @param client: The client to perform this operation with. 1172 @param client: The client to perform this operation with.
1189 1204
1190 await self.publish_public_key(client, secret_key.public_key) 1205 await self.publish_public_key(client, secret_key.public_key)
1191 1206
1192 storage_key = STR_KEY_PUBLIC_KEYS_METADATA.format(client.jid.userhost()) 1207 storage_key = STR_KEY_PUBLIC_KEYS_METADATA.format(client.jid.userhost())
1193 1208
1194 public_keys_list = cast( 1209 public_keys_list = {
1195 Set[PublicKeyMetadata], 1210 PublicKeyMetadata.from_dict(pkm)
1196 await self.__storage[client.profile].get(storage_key, set()) 1211 for pkm in await self.__storage[client.profile].get(storage_key, [])
1197 ) 1212 }
1198 1213
1199 public_keys_list.add(PublicKeyMetadata( 1214 public_keys_list.add(PublicKeyMetadata(
1200 fingerprint=secret_key.public_key.fingerprint, 1215 fingerprint=secret_key.public_key.fingerprint,
1201 timestamp=datetime.now(timezone.utc) 1216 timestamp=datetime.now(timezone.utc)
1202 )) 1217 ))
1506 1521
1507 available_public_keys = self.list_public_keys(client, entity_jid) 1522 available_public_keys = self.list_public_keys(client, entity_jid)
1508 1523
1509 storage_key = STR_KEY_PUBLIC_KEYS_METADATA.format(entity_jid.userhost()) 1524 storage_key = STR_KEY_PUBLIC_KEYS_METADATA.format(entity_jid.userhost())
1510 1525
1511 public_keys_metadata = cast( 1526 public_keys_metadata = {
1512 Set[PublicKeyMetadata], 1527 PublicKeyMetadata.from_dict(pkm)
1513 await self.__storage[client.profile].get(storage_key, set()) 1528 for pkm in await self.__storage[client.profile].get(storage_key, [])
1514 ) 1529 }
1515 if not public_keys_metadata: 1530 if not public_keys_metadata:
1516 public_keys_metadata = await self.download_public_keys_list( 1531 public_keys_metadata = await self.download_public_keys_list(
1517 client, entity_jid 1532 client, entity_jid
1518 ) 1533 )
1519 if not public_keys_metadata: 1534 if not public_keys_metadata:
1520 raise exceptions.NotFound( 1535 raise exceptions.NotFound(
1521 f"Can't find public keys for {entity_jid}" 1536 f"Can't find public keys for {entity_jid}"
1522 ) 1537 )
1523 else: 1538 else:
1524 await self.__storage[client.profile].aset( 1539 await self.__storage[client.profile].aset(
1525 storage_key, public_keys_metadata 1540 storage_key,
1541 [pkm.to_dict() for pkm in public_keys_metadata]
1526 ) 1542 )
1527 1543
1528 1544
1529 missing_keys = set(filter(lambda public_key_metadata: all( 1545 missing_keys = set(filter(lambda public_key_metadata: all(
1530 public_key_metadata.fingerprint != public_key.fingerprint 1546 public_key_metadata.fingerprint != public_key.fingerprint