Mercurial > libervia-backend
comparison libervia/backend/plugins/plugin_misc_identity.py @ 4212:5f2d496c633f
core: get rid of `pickle`:
Use of `pickle` to serialise data was a technical legacy that was causing trouble to store
in database, to update (if a class was serialised, a change could break update), and to
security (pickle can lead to code execution).
This patch remove all use of Pickle in favour in JSON, notably:
- for caching data, a Pydantic model is now used instead
- for SQLAlchemy model, the LegacyPickle is replaced by JSON serialisation
- in XEP-0373 a class `PublicKeyMetadata` was serialised. New method `from_dict` and
`to_dict` method have been implemented to do serialisation.
- new methods to (de)serialise data can now be specified with Identity data types. It is
notably used to (de)serialise `path` of avatars.
A migration script has been created to convert data (for upgrade or downgrade), with
special care for XEP-0373 case. Depending of size of database, this migration script can
be long to run.
rel 443
author | Goffi <goffi@goffi.org> |
---|---|
date | Fri, 23 Feb 2024 13:31:04 +0100 |
parents | 4b842c1fb686 |
children | 0d7bb4df2343 |
comparison
equal
deleted
inserted
replaced
4211:be89ab1cbca4 | 4212:5f2d496c633f |
---|---|
83 "update_is_new_data": self.avatar_update_is_new_data, | 83 "update_is_new_data": self.avatar_update_is_new_data, |
84 "update_data_filter": self.avatar_update_data_filter, | 84 "update_data_filter": self.avatar_update_data_filter, |
85 # we store the metadata in database, to restore it on next connection | 85 # we store the metadata in database, to restore it on next connection |
86 # (it is stored only for roster entities) | 86 # (it is stored only for roster entities) |
87 "store": True, | 87 "store": True, |
88 "store_serialisation": self._avatar_ser, | |
89 "store_deserialisation": self._avatar_deser | |
88 }, | 90 }, |
89 "nicknames": { | 91 "nicknames": { |
90 "type": list, | 92 "type": list, |
91 # accumulate all nicknames from all callbacks in a list instead | 93 # accumulate all nicknames from all callbacks in a list instead |
92 # of returning only the data from the first successful callback | 94 # of returning only the data from the first successful callback |
165 | 167 |
166 to_delete = [] | 168 to_delete = [] |
167 | 169 |
168 for key, value in stored_data.items(): | 170 for key, value in stored_data.items(): |
169 entity_s, name = key.split('\n') | 171 entity_s, name = key.split('\n') |
170 if name not in self.metadata.keys(): | 172 try: |
173 metadata = self.metadata[name] | |
174 except KeyError: | |
171 log.debug(f"removing {key} from storage: not an allowed metadata name") | 175 log.debug(f"removing {key} from storage: not an allowed metadata name") |
172 to_delete.append(key) | 176 to_delete.append(key) |
173 continue | 177 continue |
178 if value is not None: | |
179 deser_method = metadata.get("store_deserialisation") | |
180 if deser_method is not None: | |
181 value = deser_method(value) | |
174 entity = jid.JID(entity_s) | 182 entity = jid.JID(entity_s) |
175 | 183 |
176 if name == 'avatar': | 184 if name == 'avatar': |
177 if value is not None: | 185 if value is not None: |
178 try: | 186 try: |
363 | 371 |
364 self.host.memory.update_entity_data( | 372 self.host.memory.update_entity_data( |
365 client, entity, metadata_name, data) | 373 client, entity, metadata_name, data) |
366 | 374 |
367 if metadata.get('store', False): | 375 if metadata.get('store', False): |
376 if data is not None: | |
377 ser_method = metadata.get("store_serialisation") | |
378 if ser_method is not None: | |
379 data = ser_method(data) | |
368 key = f"{entity}\n{metadata_name}" | 380 key = f"{entity}\n{metadata_name}" |
369 await client._identity_storage.aset(key, data) | 381 await client._identity_storage.aset(key, data) |
370 | 382 |
371 return data | 383 return data |
372 | 384 |
486 | 498 |
487 self.host.memory.update_entity_data(client, entity, metadata_name, data) | 499 self.host.memory.update_entity_data(client, entity, metadata_name, data) |
488 | 500 |
489 if metadata.get('store', False): | 501 if metadata.get('store', False): |
490 key = f"{entity}\n{metadata_name}" | 502 key = f"{entity}\n{metadata_name}" |
503 if data is not None: | |
504 ser_method = metadata.get("store_serialisation") | |
505 if ser_method is not None: | |
506 data = ser_method(data) | |
491 await client._identity_storage.aset(key, data) | 507 await client._identity_storage.aset(key, data) |
492 | 508 |
493 def default_update_is_new_data(self, client, entity, cached_data, new_data): | 509 def default_update_is_new_data(self, client, entity, cached_data, new_data): |
494 return new_data != cached_data | 510 return new_data != cached_data |
495 | 511 |
629 if not isinstance(data, dict): | 645 if not isinstance(data, dict): |
630 raise ValueError(f"Invalid data type ({type(data)}), a dict is expected") | 646 raise ValueError(f"Invalid data type ({type(data)}), a dict is expected") |
631 mandatory_keys = {'path', 'filename', 'cache_uid'} | 647 mandatory_keys = {'path', 'filename', 'cache_uid'} |
632 if not data.keys() >= mandatory_keys: | 648 if not data.keys() >= mandatory_keys: |
633 raise ValueError(f"missing avatar data keys: {mandatory_keys - data.keys()}") | 649 raise ValueError(f"missing avatar data keys: {mandatory_keys - data.keys()}") |
650 return data | |
651 | |
652 def _avatar_ser(self, data: dict) -> dict: | |
653 if data.get("path"): | |
654 # Path instance can't be stored | |
655 data = data.copy() | |
656 data["path"] = str(data["path"]) | |
657 return data | |
658 | |
659 def _avatar_deser(self, data: dict) -> dict: | |
660 if data.get("path"): | |
661 data["path"] = Path(data["path"]) | |
634 return data | 662 return data |
635 | 663 |
636 async def nicknames_get_post_treatment(self, client, entity, plugin_nicknames): | 664 async def nicknames_get_post_treatment(self, client, entity, plugin_nicknames): |
637 """Prepend nicknames from core locations + set default nickname | 665 """Prepend nicknames from core locations + set default nickname |
638 | 666 |